identifier
stringlengths 42
383
| collection
stringclasses 1
value | open_type
stringclasses 1
value | license
stringlengths 0
1.81k
| date
float64 1.99k
2.02k
⌀ | title
stringlengths 0
100
| creator
stringlengths 1
39
| language
stringclasses 157
values | language_type
stringclasses 2
values | word_count
int64 1
20k
| token_count
int64 4
1.32M
| text
stringlengths 5
1.53M
| __index_level_0__
int64 0
57.5k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
https://github.com/ruanlinos/react-native-google-fit/blob/master/android/src/main/java/com/reactnative/googlefit/HelperUtil.java
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
react-native-google-fit
|
ruanlinos
|
Java
|
Code
| 49
| 190
|
package com.reactnative.googlefit;
import java.util.concurrent.TimeUnit;
final class HelperUtil {
public static TimeUnit processBucketUnit(String buckUnit) {
switch (buckUnit){
case "NANOSECOND": return TimeUnit.NANOSECONDS;
case "MICROSECOND": return TimeUnit.MICROSECONDS;
case "MILLISECOND": return TimeUnit.MILLISECONDS;
case "SECOND": return TimeUnit.SECONDS;
case "MINUTE": return TimeUnit.MINUTES;
case "HOUR": return TimeUnit.HOURS;
case "DAY": return TimeUnit.DAYS;
}
return TimeUnit.HOURS;
}
}
| 23,239
|
https://github.com/DeyvidJLira/Popular-Movies/blob/master/app/src/main/java/br/com/deyvidjlira/popularmovies/util/AsyncDelegate.java
|
Github Open Source
|
Open Source
|
MIT
| null |
Popular-Movies
|
DeyvidJLira
|
Java
|
Code
| 26
| 106
|
package br.com.deyvidjlira.popularmovies.util;
import java.util.List;
import br.com.deyvidjlira.popularmovies.data.models.Movie;
/**
* Created by Deyvid on 04/01/2017.
*/
public interface AsyncDelegate {
public void asyncInit();
public void asyncComplete(List<Movie> movieList);
}
| 19,020
|
https://github.com/v-kydela/BotFramework-Composer-1/blob/master/Composer/packages/client/__tests__/messenger/ApiClient.test.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
BotFramework-Composer-1
|
v-kydela
|
TypeScript
|
Code
| 203
| 562
|
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
import nanoid from 'nanoid';
import ApiClient from '../../src/messenger/ApiClient';
jest.mock('nanoid');
(nanoid as jest.Mock).mockReturnValue('uniqueId');
const oldPostMessage = window.postMessage;
describe('ApiClient', () => {
let client;
beforeEach(() => {
window.postMessage = (data, origin) => {
const evt = new MessageEvent('message', { data, origin, source: window });
window.dispatchEvent(evt);
};
client = new ApiClient();
client.connect();
});
afterEach(() => {
client.disconnect();
window.postMessage = oldPostMessage;
});
it('can register and invoke a sync api', async () => {
const add = data => {
return data.x + data.y;
};
client.registerApi('add', add);
const res = await client.apiCall('add', { x: 3, y: 4 });
expect(res).toEqual(7);
});
it('can register and invoke an async api', async () => {
const add = data => {
return Promise.resolve(data.x + data.y);
};
client.registerApi('add', add);
const res = await client.apiCall('add', { x: 3, y: 4 });
expect(res).toEqual(7);
});
it('handles when the api throws an error', async () => {
const err = new Error('SomeError');
const syncError = () => {
throw err;
};
const asyncError = () => {
return Promise.reject(err);
};
client.registerApi('syncError', syncError);
client.registerApi('asyncError', asyncError);
try {
await client.apiCall('syncError');
} catch (sErr) {
expect(sErr).toBe('SomeError');
}
try {
await client.apiCall('asyncError');
} catch (asErr) {
expect(asErr).toBe('SomeError');
}
});
});
| 26,512
|
https://github.com/cazzer/hey-fridge/blob/master/src/data.js
|
Github Open Source
|
Open Source
|
CC0-1.0
| 2,016
|
hey-fridge
|
cazzer
|
JavaScript
|
Code
| 109
| 363
|
import io from 'socket.io-client'
import { routeActions } from 'redux-simple-router'
import { API, SERVER} from './variables'
import { onEvent } from './api'
export default class Data {
constructor(store) {
this.store = store
onEvent('set', data => store.dispatch({type: 'set', data}))
onEvent('added', data => store.dispatch({type: 'added', data}))
onEvent('updated', data => store.dispatch({type: 'updated', data}))
onEvent('focused', view => store.dispatch({type: 'view/focused', view}))
onEvent('unfocused', view => store.dispatch({type: 'view/unfocused', view}))
onEvent('view/set', views => store.dispatch({type: 'view/set', views}))
onEvent('removed', data => {
// if the thing we're focused on just got removed, we need to redirect
if (store.getState().routing.location.pathname === `/data/${data.id}`) {
if (data.redirectTo) {
store.dispatch(routeActions.push(`/data/${data.redirectTo}`))
} else {
store.dispatch(routeActions.push('/'))
}
}
store.dispatch({type: 'removed', data})
})
onEvent('breakdown', store.dispatch.bind(store))
}
}
| 15,371
|
https://github.com/lfl-0/lfl-pages/blob/master/.gitignore
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
lfl-pages
|
lfl-0
|
Ignore List
|
Code
| 4
| 21
|
node_modules/
.DS_Store
yarn.lock
*.log
| 48,541
|
https://github.com/alibabacloud-sdk-php/dingtalk/blob/master/src/ats_1_0/Models/QueryInterviewsRequest.php
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,023
|
dingtalk
|
alibabacloud-sdk-php
|
PHP
|
Code
| 244
| 836
|
<?php
// This file is auto-generated, don't edit it. Thanks.
namespace AlibabaCloud\SDK\Dingtalk\Vats_1_0\Models;
use AlibabaCloud\Tea\Model;
class QueryInterviewsRequest extends Model
{
/**
* @example ddats
*
* @var string
*/
public $bizCode;
/**
* @example xxx
*
* @var string
*/
public $candidateId;
/**
* @example 1626796800000
*
* @var int
*/
public $startTimeBeginMillis;
/**
* @example 1626883199000
*
* @var int
*/
public $startTimeEndMillis;
/**
* @example xxx
*
* @var string
*/
public $nextToken;
/**
* @example 10
*
* @var int
*/
public $size;
protected $_name = [
'bizCode' => 'bizCode',
'candidateId' => 'candidateId',
'startTimeBeginMillis' => 'startTimeBeginMillis',
'startTimeEndMillis' => 'startTimeEndMillis',
'nextToken' => 'nextToken',
'size' => 'size',
];
public function validate()
{
}
public function toMap()
{
$res = [];
if (null !== $this->bizCode) {
$res['bizCode'] = $this->bizCode;
}
if (null !== $this->candidateId) {
$res['candidateId'] = $this->candidateId;
}
if (null !== $this->startTimeBeginMillis) {
$res['startTimeBeginMillis'] = $this->startTimeBeginMillis;
}
if (null !== $this->startTimeEndMillis) {
$res['startTimeEndMillis'] = $this->startTimeEndMillis;
}
if (null !== $this->nextToken) {
$res['nextToken'] = $this->nextToken;
}
if (null !== $this->size) {
$res['size'] = $this->size;
}
return $res;
}
/**
* @param array $map
*
* @return QueryInterviewsRequest
*/
public static function fromMap($map = [])
{
$model = new self();
if (isset($map['bizCode'])) {
$model->bizCode = $map['bizCode'];
}
if (isset($map['candidateId'])) {
$model->candidateId = $map['candidateId'];
}
if (isset($map['startTimeBeginMillis'])) {
$model->startTimeBeginMillis = $map['startTimeBeginMillis'];
}
if (isset($map['startTimeEndMillis'])) {
$model->startTimeEndMillis = $map['startTimeEndMillis'];
}
if (isset($map['nextToken'])) {
$model->nextToken = $map['nextToken'];
}
if (isset($map['size'])) {
$model->size = $map['size'];
}
return $model;
}
}
| 43,435
|
https://github.com/Bartlett-RC3/skilling-module-1-monxia/blob/master/Assets/Scripts/School/Session_1_20.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
skilling-module-1-monxia
|
Bartlett-RC3
|
C#
|
Code
| 251
| 666
|
// LIBRARIES
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
// CODE STRUCTURE
public class Session_1_20 : MonoBehaviour
{
// 1.VARIABLES - data that is stored in your computer memory
// Declaring Variables: Scope (visibility) -- Type -- Name -- Value (optional)
// Numbers
public int myFirstWholeNumber = 1;
private int mySecondWholeNumber = 99;
public float myFirstDecimalNumber = 1.1234567890123456f;
public double myFirstLongDecimalNumber = 1.12345678901234561234567890123456d;
// Text
private string myFirstWords = "Hello RC3 and RC9! Happy to teach you this!";
public string mySecondWord = "Hello!";
// Logical (binary can be either true or false)
private bool myFristBoolean = false;
public bool mySecondBoolean = true;
// 2.DATA STRUCTURES
// Declaring Data Structures: Scope -- Type -- Name -- Values (optional)
// Arrays
public int[] myWholeNumbersCollection = new int[5];
public float[] myDecimalNumbersCollection = { 1.3f, 2.5f, 3.0f };
// Lists
public List<int> myFirsIntList = new List<int>();
// 3.FUNCTIONS
// Declaring Functions: // Scope -- Type -- Variables -- Body (Instructions)
private float CalculatorSum (float _FirstNumber, float _SecondNumber)
{
float SumResult = _FirstNumber + _SecondNumber;
return SumResult;
}
// Start is called before the first frame update
void Start()
{
float SumResult = CalculatorSum(myFirstWholeNumber, mySecondWholeNumber);
Debug.Log("Adding two numbers result is: " + SumResult.ToString());
// Data in Arrays
// Changing the data
myDecimalNumbersCollection[0] = 2.8f;
// Retriving the data
float SumResult2 = CalculatorSum(myDecimalNumbersCollection[0], myDecimalNumbersCollection[2]);
// Data in Lists
// Changing the data
myFirsIntList.Add(3);
myFirsIntList.Add(9);
myFirsIntList[0] = 5;
myFirsIntList[1] = 12;
// Retriving the data
Debug.Log(myFirsIntList[0]);
}
// Update is called once per frame
void Update()
{
}
}
| 24,481
|
https://github.com/Coron4444/BalloonWorld/blob/master/Document/html/dir_e2f6dcd6052d04a1f65079a0782cb32a.js
|
Github Open Source
|
Open Source
|
MIT
| null |
BalloonWorld
|
Coron4444
|
JavaScript
|
Code
| 15
| 76
|
var dir_e2f6dcd6052d04a1f65079a0782cb32a =
[
[ "Controller.cpp", "_controller_8cpp.html", "_controller_8cpp" ],
[ "Controller.h", "_controller_8h.html", "_controller_8h" ]
];
| 50,595
|
https://github.com/JeffScherrer/ChakraCore/blob/master/lib/Common/DataStructures/GrowingArray.h
|
Github Open Source
|
Open Source
|
MIT
| null |
ChakraCore
|
JeffScherrer
|
C
|
Code
| 339
| 1,100
|
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
// Contains a class which will provide a uint32 array which can grow dynamically
// It behaves almost same as regex::List<> except it has less members, is customized for being used in SmallSpanSequence of FunctionBody
#pragma once
#ifdef DIAG_MEM
extern int listFreeAmount;
#endif
namespace JsUtil
{
template <class TValue, class TAllocator>
class GrowingArray
{
public:
typedef typename AllocatorInfo<TAllocator, TValue>::AllocatorType AllocatorType;
static GrowingArray* Create(uint32 _length);
GrowingArray(AllocatorType* allocator, uint32 _length)
: buffer(nullptr),
alloc(allocator),
count(0),
length(_length)
{
EnsureArray();
}
~GrowingArray()
{
if (buffer != nullptr)
{
AllocatorFree(alloc, (TypeAllocatorFunc<AllocatorType, int>::GetFreeFunc()), buffer, UInt32Math::Mul(length, sizeof(TValue)));
}
}
TValue ItemInBuffer(uint32 index) const
{
if (index >= count)
{
return (TValue)0;
}
return buffer[index];
}
void ItemInBuffer(uint32 index, TValue item)
{
EnsureArray();
Assert(index < count);
buffer[index] = item;
}
void Add(TValue item)
{
EnsureArray();
buffer[count] = item;
count++;
}
uint32 Count() const { return count; }
void SetCount(uint32 _count) { count = _count; }
uint32 GetLength() const { return length; }
TValue* GetBuffer() const { return buffer; }
GrowingArray * Clone()
{
GrowingArray * pNewArray = AllocatorNew(AllocatorType, alloc, GrowingArray, alloc, length);
pNewArray->count = count;
if (buffer)
{
pNewArray->buffer = AllocateArray<AllocatorType, TValue, false>(
TRACK_ALLOC_INFO(alloc, TValue, AllocatorType, 0, length),
TypeAllocatorFunc<AllocatorType, TValue>::GetAllocFunc(),
length);
const size_t byteSize = UInt32Math::Mul(length, sizeof(TValue));
js_memcpy_s(pNewArray->buffer, byteSize, buffer, byteSize);
}
return pNewArray;
}
private:
TValue* buffer;
uint32 count;
uint32 length;
AllocatorType* alloc;
void EnsureArray()
{
if (buffer == nullptr)
{
buffer = AllocateArray<AllocatorType, TValue, false>(
TRACK_ALLOC_INFO(alloc, TValue, AllocatorType, 0, length),
TypeAllocatorFunc<AllocatorType, TValue>::GetAllocFunc(),
length);
count = 0;
}
else if (count == length)
{
uint32 newLength = UInt32Math::AddMul<1, 2>(length);
TValue * newbuffer = AllocateArray<AllocatorType, TValue, false>(
TRACK_ALLOC_INFO(alloc, TValue, AllocatorType, 0, newLength),
TypeAllocatorFunc<AllocatorType, TValue>::GetAllocFunc(),
newLength);
const size_t lengthByteSize = UInt32Math::Mul(length, sizeof(TValue));
const size_t newLengthByteSize = UInt32Math::Mul(newLength, sizeof(TValue));
js_memcpy_s(newbuffer, newLengthByteSize, buffer, lengthByteSize);
#ifdef DIAG_MEM
listFreeAmount += length;
#endif
if (length != 0)
{
AllocatorFree(alloc, (TypeAllocatorFunc<AllocatorType, int>::GetFreeFunc()), buffer, lengthByteSize);
}
length = newLength;
buffer = newbuffer;
}
}
};
typedef GrowingArray<uint32, HeapAllocator> GrowingUint32HeapArray;
}
| 36,149
|
https://github.com/S8msGITcode/Lightcord/blob/master/modules/discord_krisp/index.js
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
Lightcord
|
S8msGITcode
|
JavaScript
|
Code
| 36
| 140
|
const KrispModule = require('./discord_krisp_'+process.platform+'.node');
KrispModule._initialize();
KrispModule.getNcModels = function() {
return new Promise(resolve => {
KrispModule._getNcModels(models => resolve(models));
});
};
KrispModule.getVadModels = function() {
return new Promise(resolve => {
KrispModule._getVadModels(models => resolve(models));
});
};
module.exports = KrispModule;
| 36,262
|
https://github.com/bradzacher/eslint-config-brad/blob/master/src/types/react/jsx-boolean-value.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
eslint-config-brad
|
bradzacher
|
TypeScript
|
Code
| 88
| 230
|
// this file is auto-generated. Run `make regenerate-types` to regenerate it.
type JsxBooleanValue0 =
| ['off' | 'error' | 'warn']
| ['off' | 'error' | 'warn', 'always' | 'never']
| ['off' | 'error' | 'warn']
| ['off' | 'error' | 'warn', 'always']
| [
'off' | 'error' | 'warn',
'always',
{
never?: string[];
},
]
| ['off' | 'error' | 'warn']
| ['off' | 'error' | 'warn', 'never']
| [
'off' | 'error' | 'warn',
'never',
{
always?: string[];
},
];
export type JsxBooleanValue = 'off' | JsxBooleanValue0;
| 27,830
|
https://github.com/jclcdev7/world_client/blob/master/src/app/components/country/country.component.ts
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,018
|
world_client
|
jclcdev7
|
TypeScript
|
Code
| 81
| 209
|
import { Component, OnInit, OnChanges } from '@angular/core';
import { CountryService } from '../../services/country.service';
declare let componentHandler: any; // need to use external in angular2? Ok no pb, but declare it before like that
@Component({
selector: 'country',
templateUrl: './country.component.html',
styleUrls: ['./country.component.css']
})
export class CountryComponent implements OnInit, OnChanges {
maliste: Array<any>;
mycountry: string;
constructor(private service: CountryService) { }
ngOnInit() {
this.service.getList().subscribe(lst => this.maliste = lst);
componentHandler.upgradeDom(); // permet de prendre en compte les tabulations après routage
}
ngOnChanges() {
}
}
| 42,975
|
https://github.com/pobear/tidb-operator/blob/master/checkout-pr-branch.sh
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
tidb-operator
|
pobear
|
Shell
|
Code
| 102
| 285
|
#!/usr/bin/env bash
# This script is used to checkout a tidb-operator PR branch in a forked repo.
if [[ -z ${1:-} ]]; then
echo "\
This script is used to checkout a tidb-operator PR branch in a forked repo
Usage:
checkout-pr-branch.sh [github-username]:[pr-branch]
The argument can be copied directly from github PR page.
The local branch name would be [github-username]/[pr-branch].\
" >&2
exit
fi
username=$(echo ${1} | cut -d':' -f1)
branch=$(echo ${1} | cut -d':' -f2)
local_branch=${username}/${branch}
fork="https://github.com/${username}/tidb-operator"
exists=`git show-ref refs/heads/${local_branch}`
if [[ -n ${exists} ]]; then
git checkout ${local_branch}
git pull ${fork} ${branch}:${local_branch}
else
git fetch ${fork} ${branch}:${local_branch}
git checkout ${local_branch}
fi
| 13,130
|
https://github.com/Damoness/react-native-http-cache/blob/master/ios/DMNHttpCache.m
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
react-native-http-cache
|
Damoness
|
Objective-C
|
Code
| 71
| 397
|
//
// DMNHttpCache.m
// QiChangBuyCar
//
// Created by Damoness on 2019/8/30.
// Copyright © 2019 Facebook. All rights reserved.
//
#import "DMNHttpCache.h"
@implementation DMNHttpCache
RCT_EXPORT_MODULE(DMNHttpCache);
//1.2.0
RCT_EXPORT_METHOD(getHttpCacheSize:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject)
{
NSURLCache *httpCache = [NSURLCache sharedURLCache];
resolve(@([httpCache currentDiskUsage]));
}
RCT_EXPORT_METHOD(clearHttpCache:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject)
{
NSURLCache *httpCache = [NSURLCache sharedURLCache];
[httpCache removeAllCachedResponses];
resolve(nil);
}
RCT_EXPORT_METHOD(getImageCacheSize:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject)
{
[[SDImageCache sharedImageCache]calculateSizeWithCompletionBlock:^(NSUInteger fileCount, NSUInteger totalSize) {
resolve(@(totalSize));
}];
}
RCT_EXPORT_METHOD(clearImageCache:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject)
{
[[SDImageCache sharedImageCache]clearDiskOnCompletion:^{
resolve(@(true));
}];
}
@end
| 9,929
|
https://github.com/MattWindsor91/roslyn/blob/master/src/Compilers/CSharp/Portable/Symbols/Synthesized/SynthesizedWitnessMethodSymbol.cs
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,017
|
roslyn
|
MattWindsor91
|
C#
|
Code
| 383
| 983
|
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Immutable;
using System.Diagnostics;
namespace Microsoft.CodeAnalysis.CSharp.Symbols
{
/// <summary>
/// Symbol representing a member belonging to a concept, which has been
/// accessed through a concept witness.
/// <para>
/// The main goal of this class is to mark the method so invocations
/// of it can be dispatched properly during binding. This is a rather
/// hacky way of doing this, to say the least.
/// </para>
/// </summary>
internal sealed class SynthesizedWitnessMethodSymbol : WrappedMethodSymbol
{
/// <summary>
/// The witness 'owning' the concept method.
/// </summary>
private TypeSymbol _parent;
/// <summary>
/// The concept method to wrap.
/// </summary>
private MethodSymbol _method;
/// <summary>
/// Constructs a new <see cref="SynthesizedWitnessMethodSymbol"/>.
/// </summary>
/// <param name="method">
/// The concept method to wrap.
/// </param>
/// <param name="parent">
/// The witness 'owning' the concept method.
/// </param>
internal SynthesizedWitnessMethodSymbol(MethodSymbol method, TypeSymbol parent)
: base()
{
Debug.Assert(parent != null, "synthesized witness method must have a parent");
Debug.Assert(parent.IsConceptWitness || parent.IsInstanceType(), "parent of a synthesised witness method must be an instance or witness");
_method = method;
_parent = parent;
}
/// <summary>
/// Gets the type parameter of the witness from which this method is
/// being called.
/// </summary>
internal TypeSymbol Parent => _parent;
public override MethodSymbol UnderlyingMethod => _method;
public override TypeSymbol ReceiverType => _parent;
// @t-mawind
// The following are things WrappedMethodSymbol doesn't give us for
// free, and are probably incorrect.
public override MethodSymbol OriginalDefinition => UnderlyingMethod.OriginalDefinition;
public override Symbol ContainingSymbol => UnderlyingMethod.ContainingSymbol;
public override ImmutableArray<SyntaxReference> DeclaringSyntaxReferences
=> ImmutableArray<SyntaxReference>.Empty;
public sealed override bool IsImplicitlyDeclared => true;
public override ImmutableArray<Location> Locations
=> ImmutableArray<Location>.Empty;
public override bool ReturnsVoid => UnderlyingMethod.ReturnsVoid;
public override TypeSymbol ReturnType => UnderlyingMethod.ReturnType;
public override ImmutableArray<TypeSymbol> TypeArguments => UnderlyingMethod.TypeArguments;
public override ImmutableArray<TypeParameterSymbol> TypeParameters => UnderlyingMethod.TypeParameters;
public override ImmutableArray<ParameterSymbol> Parameters => UnderlyingMethod.Parameters;
public override ImmutableArray<MethodSymbol> ExplicitInterfaceImplementations => UnderlyingMethod.ExplicitInterfaceImplementations;
public override ImmutableArray<CustomModifier> ReturnTypeCustomModifiers => UnderlyingMethod.ReturnTypeCustomModifiers;
public override Symbol AssociatedSymbol => UnderlyingMethod.AssociatedSymbol;
internal override bool IsExplicitInterfaceImplementation => UnderlyingMethod.IsExplicitInterfaceImplementation;
public override ImmutableArray<CustomModifier> RefCustomModifiers => UnderlyingMethod.RefCustomModifiers;
// TODO: this is probably wrong, as we have no syntax.
internal override int CalculateLocalSyntaxOffset(int localPosition, SyntaxTree localTree) => UnderlyingMethod.CalculateLocalSyntaxOffset(localPosition, localTree);
public override ImmutableArray<CSharpAttributeData> GetAttributes() => UnderlyingMethod.GetAttributes();
public override ImmutableArray<CSharpAttributeData> GetReturnTypeAttributes() => UnderlyingMethod.GetReturnTypeAttributes();
}
}
| 6,516
|
https://github.com/xgeekshq/spring-boot-jdbc-template-sample/blob/master/src/main/java/io/xgeeks/examples/spring/CarService.java
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
spring-boot-jdbc-template-sample
|
xgeekshq
|
Java
|
Code
| 111
| 423
|
package io.xgeeks.examples.spring;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@Service
public class CarService {
private final CarDAO dao;
private final CarMapper mapper;
@Autowired
public CarService(CarDAO dao, CarMapper mapper) {
this.dao = dao;
this.mapper = mapper;
}
public List<CarDTO> findAll(Page page) {
Stream<Car> stream = dao.findAll(page);
return stream.map(mapper::toDTO)
.collect(Collectors.toList());
}
public Optional<CarDTO> findById(Long id) {
return dao.findBy(id).map(mapper::toDTO);
}
public CarDTO insert(CarDTO dto) {
Car car = mapper.toEntity(dto);
return mapper.toDTO(dao.insert(car));
}
public CarDTO update(Long id, CarDTO dto) {
Car car = dao.findBy(id)
.orElseThrow(() -> new EntityNotFoundException("Car does not find with the id " + id));
car.update(mapper.toEntity(dto));
dao.update(car);
return mapper.toDTO(car);
}
public void delete(Long id) {
dao.delete(id);
}
}
| 32,441
|
https://github.com/mervecrk/bilge-adam-java-course1/blob/master/week-08-exceptions/custom-exception-classes/src/com/bilgeadam/Multiply.java
|
Github Open Source
|
Open Source
|
MIT
| null |
bilge-adam-java-course1
|
mervecrk
|
Java
|
Code
| 38
| 117
|
package com.bilgeadam;
import com.bilgeadam.exceptions.MultiplyException;
public class Multiply {
public void multiply(int a, int b) throws IllegalArgumentException, MultiplyException, RuntimeException{
if(a < 1000 && b < 1000){
System.out.println(a*b);
}else{
throw new MultiplyException("a veya b 1000'den büyük olamaz.");
}
}
}
| 38,323
|
https://github.com/hiteshtr/contactbook/blob/master/src/User.js
|
Github Open Source
|
Open Source
|
MIT
| null |
contactbook
|
hiteshtr
|
JavaScript
|
Code
| 274
| 1,249
|
import React, { Component } from 'react';
import { Link } from "react-router-dom";
class User extends Component {
constructor(props) {
super(props);
this.localSubmitHandler = this.localSubmitHandler.bind(this);
this.localUpdateHandler = this.localUpdateHandler.bind(this);
}
localSubmitHandler = (e) => {
this.props.submitHandler(e, this.props.history);
}
localUpdateHandler = (e) => {
this.props.updateHandler(e, parseInt(this.props.match.params.id, 10), this.props.history);
}
render() {
if (this.props.match.path === "/user/:id") {
let user = this.props.userData.find(function (elem) {
return elem.id === parseInt(this.props.match.params.id, 10);
}, this);
return (
<form name="userForm" onSubmit={this.localUpdateHandler}>
<div className="form-row">
<div className="form-group col-md-6">
<label htmlFor="inputEmail4">First Name</label>
<input required type="text" pattern="^[a-zA-Z]+$" defaultValue={user.firstName} className="form-control" name="firstName" placeholder="First Name" />
</div>
<div className="form-group col-md-6">
<label htmlFor="inputPassword4">Last Name</label>
<input required type="text" pattern="^[a-zA-Z]+$" defaultValue={user.lastName} className="form-control" name="lastName" placeholder="Last Name" />
</div>
</div>
<div className="form-row">
<div className="form-group col-md-6">
<label htmlFor="inputCity">Email</label>
<input required type="email" defaultValue={user.email} className="form-control" name="email" placeholder="email" />
</div>
<div className="form-group col-md-2">
<label htmlFor="inputZip">Phone</label>
<input required type="tel" maxLength="9" pattern="[7-9]{1}[0-9]{8}" defaultValue={user.phone} className="form-control" name="phone" placeholder="phone" />
</div>
<div className="form-group col-md-4">
<label htmlFor="inputState">Status</label>
<select name="status" defaultValue={user.status} className="form-control" required>
<option value="">Choose...</option>
<option value="Active">Active</option>
<option value="Inactive">Inactive</option>
</select>
</div>
</div>
<button type="submit" className="btn btn-primary">Update</button>
<Link to="/" className="btn btn-secondary">Cancel</Link>
</form>
);
}
return (
<form name="userForm" onSubmit={this.localSubmitHandler}>
<div className="form-row">
<div className="form-group col-md-6">
<label htmlFor="inputEmail4">First Name</label>
<input required type="text" pattern="^[a-zA-Z]+$" className="form-control" name="firstName" placeholder="First Name" />
</div>
<div className="form-group col-md-6">
<label htmlFor="inputPassword4">Last Name</label>
<input required type="text" pattern="^[a-zA-Z]+$" className="form-control" name="lastName" placeholder="Last Name" />
</div>
</div>
<div className="form-row">
<div className="form-group col-md-6">
<label htmlFor="inputCity">Email</label>
<input required type="email" className="form-control" name="email" placeholder="email" />
</div>
<div className="form-group col-md-2">
<label htmlFor="inputZip">Phone</label>
<input required type="tel" maxLength="9" pattern="[7-9]{1}[0-9]{8}" className="form-control" name="phone" placeholder="phone" />
</div>
<div className="form-group col-md-4">
<label htmlFor="inputState">Status</label>
<select name="status" defaultValue="" className="form-control" required>
<option value="">Choose...</option>
<option value="Active">Active</option>
<option value="Inactive">Inactive</option>
</select>
</div>
</div>
<button type="submit" className="btn btn-primary">Save</button>
<Link to="/" className="btn btn-secondary">Cancel</Link>
</form>
);
}
}
export default User;
| 45,777
|
https://github.com/StennGroup/stenn-entityframework-core/blob/master/src/Stenn.EntityFrameworkCore.SqlServer.Extensions.DependencyInjection/SqlServerMigrations.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
stenn-entityframework-core
|
StennGroup
|
C#
|
Code
| 46
| 261
|
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Stenn.EntityFrameworkCore.Extensions.DependencyInjection;
using Stenn.EntityFrameworkCore.SqlServer.Enums;
using Stenn.EntityFrameworkCore.StaticMigrations;
using Stenn.EntityFrameworkCore.StaticMigrations.Enums;
using Stenn.EntityFrameworkCore.StaticMigrations.StaticMigrations;
namespace Stenn.EntityFrameworkCore.SqlServer.Extensions.DependencyInjection
{
public sealed class SqlServerMigrations : RelationalDbContextOptionsConfigurator, IStaticMigrationsProviderConfigurator
{
/// <inheritdoc />
public void RegisterServices(IServiceCollection services, StaticMigrationsOptions options)
{
services.TryAddTransient<IStaticMigrationHistoryRepository, StaticMigrationHistoryRepositorySqlServer>();
if (options.EnableEnumTables)
{
services.TryAddTransient<IEnumsStaticMigrationFactory, EnumsStaticMigrationFactorySqlServer>();
}
}
}
}
| 33,641
|
https://github.com/pranathivemuri/napari/blob/master/napari/_qt/widgets/qt_highlight_preview.py
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,021
|
napari
|
pranathivemuri
|
Python
|
Code
| 1,291
| 4,460
|
import numpy as np
from qtpy.QtCore import QSize, Qt, Signal
from qtpy.QtGui import QColor, QIntValidator, QPainter, QPainterPath, QPen
from qtpy.QtWidgets import (
QDialog,
QFrame,
QHBoxLayout,
QLabel,
QLineEdit,
QSlider,
QVBoxLayout,
QWidget,
)
from ...utils.translations import translator
trans = translator.load()
class QtStar(QFrame):
"""Creates a star for the preview pane in the highlight widget.
Parameters
----------
value : int
The line width of the star.
"""
def __init__(
self,
parent: QWidget = None,
value: int = None,
):
super().__init__(parent)
self._value = value
def sizeHint(self):
"""Override Qt sizeHint."""
return QSize(100, 100)
def minimumSizeHint(self):
"""Override Qt minimumSizeHint."""
return QSize(100, 100)
def paintEvent(self, e):
"""Paint star on frame."""
qp = QPainter()
qp.begin(self)
self.drawStar(qp)
qp.end()
def value(self):
"""Return value of star widget.
Returns
-------
int
The value of the star widget.
"""
return self._value
def setValue(self, value: int):
"""Set line width value of star widget.
Parameters
----------
value : int
line width value for star
"""
self._value = value
self.update()
def drawStar(self, qp):
"""Draw a star in the preview pane.
Parameters
----------
qp : QPainter object
"""
width = self.rect().width()
height = self.rect().height()
col = QColor(135, 206, 235)
pen = QPen(col, self._value)
pen.setJoinStyle(Qt.PenJoinStyle.MiterJoin)
qp.setPen(pen)
path = QPainterPath()
# draw pentagram
star_center_x = width / 2
star_center_y = height / 2
# make sure the star equal no matter the size of the qframe
if width < height:
# not taking it all the way to the edge so the star has room to grow
radius_outer = width * 0.35
else:
radius_outer = height * 0.35
# start at the top point of the star and move counter clockwise to draw the path.
# every other point is the shorter radius (1/(1+golden_ratio)) of the larger radius
golden_ratio = (1 + np.sqrt(5)) / 2
radius_inner = radius_outer / (1 + golden_ratio)
theta_start = np.pi / 2
theta_inc = (2 * np.pi) / 10
for n in range(11):
theta = theta_start + (n * theta_inc)
theta = np.mod(theta, 2 * np.pi)
if np.mod(n, 2) == 0:
# use radius_outer
x = radius_outer * np.cos(theta)
y = radius_outer * np.sin(theta)
else:
# use radius_inner
x = radius_inner * np.cos(theta)
y = radius_inner * np.sin(theta)
x_adj = star_center_x - x
y_adj = star_center_y - y + 3
if n == 0:
path.moveTo(x_adj, y_adj)
else:
path.lineTo(x_adj, y_adj)
qp.drawPath(path)
class QtTriangle(QFrame):
"""Draw the triangle in highlight widget.
Parameters
----------
value : int
Current value of the highlight size.
min_value : int
Minimum value possible for highlight size.
max_value : int
Maximum value possible for highlight size.
"""
valueChanged = Signal(int)
def __init__(
self,
parent: QWidget = None,
value: int = 1,
min_value: int = 1,
max_value: int = 10,
):
super().__init__(parent)
self._max_value = max_value
self._min_value = min_value
self._value = value
def mousePressEvent(self, event):
"""When mouse is clicked, adjust to new values."""
# set value based on position of event
perc = event.pos().x() / self.rect().width()
value = ((self._max_value - self._min_value) * perc) + self._min_value
self.setValue(value)
def paintEvent(self, e):
"""Paint triangle on frame."""
qp = QPainter()
qp.begin(self)
self.drawTriangle(qp)
perc = (self._value - self._min_value) / (
self._max_value - self._min_value
)
self.drawLine(qp, self.rect().width() * perc)
qp.end()
def sizeHint(self):
"""Override Qt sizeHint."""
return QSize(75, 30)
def minimumSizeHint(self):
"""Override Qt minimumSizeHint."""
return QSize(75, 30)
def drawTriangle(self, qp):
"""Draw triangle.
Parameters
----------
qp : QPainter object
"""
width = self.rect().width()
height = self.rect().height()
col = QColor(135, 206, 235)
qp.setPen(QPen(col, 1))
qp.setBrush(col)
path = QPainterPath()
height = 10
path.moveTo(0, height)
path.lineTo(width, height)
path.lineTo(width, 0)
path.closeSubpath()
qp.drawPath(path)
def value(self):
"""Return value of triangle widget.
Returns
-------
int
Current value of triangle widget.
"""
return self._value
def setValue(self, value):
"""Set value for triangle widget.
Parameters
----------
value : int
Value to use for line in triangle widget.
"""
self._value = value
self.update()
def minimum(self):
"""Return minimum value.
Returns
-------
int
Mininum value of triangle widget.
"""
return self._min_value
def maximum(self):
"""Return maximum value.
Returns
-------
int
Maximum value of triangle widget.
"""
return self._max_value
def setMinimum(self, value: int):
"""Set minimum value
Parameters
----------
value : int
Minimum value of triangle.
"""
self._min_value = value
if self._value < value:
self._value = value
def setMaximum(self, value: int):
"""Set maximum value.
Parameters
----------
value : int
Maximum value of triangle.
"""
self._max_value = value
if self._value > value:
self._value = value
def drawLine(self, qp, value: int):
"""Draw line on triangle indicating value.
Parameters
----------
qp : QPainter object
value : int
Value of highlight thickness.
"""
col = QColor('white')
qp.setPen(QPen(col, 2))
qp.setBrush(col)
path = QPainterPath()
path.moveTo(value, 15)
path.lineTo(value, 0)
path.closeSubpath()
qp.drawPath(path)
self.valueChanged.emit(self._value)
class QtHighlightSizePreviewWidget(QDialog):
"""Creates custom widget to set highlight size.
Parameters
----------
description : str
Text to explain and display on widget.
value : int
Value of highlight size.
min_value : int
Minimum possible value of highlight size.
max_value : int
Maximum possible value of highlight size.
unit : str
Unit of highlight size.
"""
valueChanged = Signal(int)
def __init__(
self,
parent: QWidget = None,
description: str = "",
value: int = 1,
min_value: int = 1,
max_value: int = 10,
unit: str = "px",
):
super().__init__(parent)
self.setGeometry(300, 300, 125, 110)
self._value = value if value else self.fontMetrics().height()
self._min_value = min_value
self._max_value = max_value
# Widget
self._lineedit = QLineEdit()
self._description = QLabel(self)
self._unit = QLabel(self)
self._slider = QSlider(Qt.Horizontal)
self._triangle = QtTriangle(self)
self._slider_min_label = QLabel(self)
self._slider_max_label = QLabel(self)
self._preview = QtStar(self)
self._preview_label = QLabel(self)
self._validator = QIntValidator(min_value, max_value, self)
# Widgets setup
self._description.setText(description)
self._description.setWordWrap(True)
self._unit.setText(unit)
self._unit.setAlignment(Qt.AlignBottom)
self._lineedit.setValidator(self._validator)
self._lineedit.setAlignment(Qt.AlignRight)
self._lineedit.setAlignment(Qt.AlignBottom)
self._slider_min_label.setText(str(min_value))
self._slider_min_label.setAlignment(Qt.AlignBottom)
self._slider_max_label.setText(str(max_value))
self._slider_max_label.setAlignment(Qt.AlignBottom)
self._slider.setMinimum(min_value)
self._slider.setMaximum(max_value)
self._preview.setValue(value)
self._triangle.setValue(value)
self._triangle.setMinimum(min_value)
self._triangle.setMaximum(max_value)
self._preview_label.setText(trans._("Preview"))
self._preview_label.setAlignment(Qt.AlignHCenter)
self._preview_label.setAlignment(Qt.AlignBottom)
self._preview.setStyleSheet('border: 1px solid white;')
# Signals
self._slider.valueChanged.connect(self._update_value)
self._lineedit.textChanged.connect(self._update_value)
self._triangle.valueChanged.connect(self._update_value)
# Layout
triangle_layout = QHBoxLayout()
triangle_layout.addWidget(self._triangle)
triangle_layout.setContentsMargins(6, 35, 6, 0)
triangle_slider_layout = QVBoxLayout()
triangle_slider_layout.addLayout(triangle_layout)
triangle_slider_layout.setContentsMargins(0, 0, 0, 0)
triangle_slider_layout.addWidget(self._slider)
triangle_slider_layout.setAlignment(Qt.AlignVCenter)
# Bottom row layout
lineedit_layout = QHBoxLayout()
lineedit_layout.addWidget(self._lineedit)
lineedit_layout.setAlignment(Qt.AlignBottom)
bottom_left_layout = QHBoxLayout()
bottom_left_layout.addLayout(lineedit_layout)
bottom_left_layout.addWidget(self._unit)
bottom_left_layout.addWidget(self._slider_min_label)
bottom_left_layout.addLayout(triangle_slider_layout)
bottom_left_layout.addWidget(self._slider_max_label)
bottom_left_layout.setAlignment(Qt.AlignBottom)
left_layout = QVBoxLayout()
left_layout.addWidget(self._description)
left_layout.addLayout(bottom_left_layout)
left_layout.setAlignment(Qt.AlignLeft)
preview_label_layout = QHBoxLayout()
preview_label_layout.addWidget(self._preview_label)
preview_label_layout.setAlignment(Qt.AlignHCenter)
preview_layout = QVBoxLayout()
preview_layout.addWidget(self._preview)
preview_layout.addLayout(preview_label_layout)
preview_layout.setAlignment(Qt.AlignCenter)
layout = QHBoxLayout()
layout.addLayout(left_layout)
layout.addLayout(preview_layout)
self.setLayout(layout)
self._refresh()
def _update_value(self, value):
"""Update highlight value.
Parameters
----------
value : int
Highlight value.
"""
if value == "":
value = int(self._value)
value = int(value)
if value > self._max_value:
value = self._max_value
elif value < self._min_value:
value = self._min_value
if value != self._value:
self.valueChanged.emit(value)
self._value = value
self._refresh()
def _refresh(self):
"""Set every widget value to the new set value."""
self.blockSignals(True)
self._lineedit.setText(str(self._value))
self._slider.setValue(self._value)
self._triangle.setValue(self._value)
self._preview.setValue(self._value)
self.blockSignals(False)
self.valueChanged.emit(self._value)
def value(self):
"""Return current value.
Returns
-------
int
Current value of highlight widget.
"""
return self._value
def setValue(self, value):
"""Set new value and update widget.
Parameters
----------
value : int
Highlight value.
"""
self._update_value(value)
self._refresh()
def description(self):
"""Return the description text.
Returns
-------
str
Current text in description.
"""
return self._description.text()
def setDescription(self, text):
"""Set the description text.
Parameters
----------
text : str
Text to use in description box.
"""
self._description.setText(text)
def unit(self):
"""Return highlight value unit text.
Returns
-------
str
Current text in unit text.
"""
return self._unit.text()
def setUnit(self, text):
"""Set highlight value unit.
Parameters
----------
text : str
Text used to describe units.
"""
self._unit.setText(text)
def setMinimum(self, value):
"""Set minimum highlight value for star, triangle, text and slider.
Parameters
----------
value : int
Minimum highlight value.
"""
value = int(value)
if value < self._max_value:
self._min_value = value
self._slider_min_label.setText(str(value))
self._slider.setMinimum(value)
self._triangle.setMinimum(value)
self._value = (
self._min_value
if self._value < self._min_value
else self._value
)
self._refresh()
else:
raise ValueError(
trans._(
"Minimum value must be smaller than {max_value}",
deferred=True,
max_value=self._max_value,
)
)
def minimum(self):
"""Return minimum highlight value.
Returns
-------
int
Minimum value of highlight widget.
"""
return self._min_value
def setMaximum(self, value):
"""Set maximum highlight value.
Parameters
----------
value : int
Maximum highlight value.
"""
value = int(value)
if value > self._min_value:
self._max_value = value
self._slider_max_label.setText(str(value))
self._slider.setMaximum(value)
self._triangle.setMaximum(value)
self._value = (
self._max_value
if self._value > self._max_value
else self._value
)
self._refresh()
else:
raise ValueError(
trans._(
"Maximum value must be larger than {min_value}",
deferred=True,
min_value=self._min_value,
)
)
def maximum(self):
"""Return maximum highlight value.
Returns
-------
int
Maximum value of highlight widget.
"""
return self._max_value
| 3,673
|
https://github.com/Avanade/Liquid-Application-Framework/blob/master/test/Liquid.Messaging.Tests/Mock/EntityMock.cs
|
Github Open Source
|
Open Source
|
LicenseRef-scancode-dco-1.1, MIT
| 2,023
|
Liquid-Application-Framework
|
Avanade
|
C#
|
Code
| 23
| 53
|
namespace Liquid.Messaging.Tests.Mock
{
public class EntityMock
{
public int Property1 { get; set; }
public string Property2 { get; set; }
}
}
| 11,698
|
https://github.com/CGQAQ/deno_yoga/blob/master/tests/padding_test.js
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
deno_yoga
|
CGQAQ
|
JavaScript
|
Code
| 1,361
| 5,544
|
import { assert } from "./deps.ts";
import * as Yoga from "../mod.ts";
Deno.test("padding_no_size", function () {
var config = Yoga.Config.create();
try {
var root = Yoga.Node.create(config);
root.setPadding(Yoga.EDGE_LEFT, 10);
root.setPadding(Yoga.EDGE_TOP, 10);
root.setPadding(Yoga.EDGE_RIGHT, 10);
root.setPadding(Yoga.EDGE_BOTTOM, 10);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_LTR);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(20 === root.getComputedWidth(), "20 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(20 === root.getComputedHeight(), "20 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_RTL);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(20 === root.getComputedWidth(), "20 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(20 === root.getComputedHeight(), "20 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
} finally {
if (typeof root !== "undefined") {
root.freeRecursive();
}
config.free();
}
});
Deno.test("padding_container_match_child", function () {
var config = Yoga.Config.create();
try {
var root = Yoga.Node.create(config);
root.setPadding(Yoga.EDGE_LEFT, 10);
root.setPadding(Yoga.EDGE_TOP, 10);
root.setPadding(Yoga.EDGE_RIGHT, 10);
root.setPadding(Yoga.EDGE_BOTTOM, 10);
var root_child0 = Yoga.Node.create(config);
root_child0.setWidth(10);
root_child0.setHeight(10);
root.insertChild(root_child0, 0);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_LTR);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(30 === root.getComputedWidth(), "30 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(30 === root.getComputedHeight(), "30 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
assert(
10 === root_child0.getComputedLeft(),
"10 === root_child0.getComputedLeft() (" + root_child0.getComputedLeft() + ")",
);
assert(
10 === root_child0.getComputedTop(),
"10 === root_child0.getComputedTop() (" + root_child0.getComputedTop() + ")",
);
assert(
10 === root_child0.getComputedWidth(),
"10 === root_child0.getComputedWidth() (" + root_child0.getComputedWidth() + ")",
);
assert(
10 === root_child0.getComputedHeight(),
"10 === root_child0.getComputedHeight() (" + root_child0.getComputedHeight() + ")",
);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_RTL);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(30 === root.getComputedWidth(), "30 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(30 === root.getComputedHeight(), "30 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
assert(
10 === root_child0.getComputedLeft(),
"10 === root_child0.getComputedLeft() (" + root_child0.getComputedLeft() + ")",
);
assert(
10 === root_child0.getComputedTop(),
"10 === root_child0.getComputedTop() (" + root_child0.getComputedTop() + ")",
);
assert(
10 === root_child0.getComputedWidth(),
"10 === root_child0.getComputedWidth() (" + root_child0.getComputedWidth() + ")",
);
assert(
10 === root_child0.getComputedHeight(),
"10 === root_child0.getComputedHeight() (" + root_child0.getComputedHeight() + ")",
);
} finally {
if (typeof root !== "undefined") {
root.freeRecursive();
}
config.free();
}
});
Deno.test("padding_flex_child", function () {
var config = Yoga.Config.create();
try {
var root = Yoga.Node.create(config);
root.setPadding(Yoga.EDGE_LEFT, 10);
root.setPadding(Yoga.EDGE_TOP, 10);
root.setPadding(Yoga.EDGE_RIGHT, 10);
root.setPadding(Yoga.EDGE_BOTTOM, 10);
root.setWidth(100);
root.setHeight(100);
var root_child0 = Yoga.Node.create(config);
root_child0.setFlexGrow(1);
root_child0.setWidth(10);
root.insertChild(root_child0, 0);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_LTR);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(100 === root.getComputedWidth(), "100 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(100 === root.getComputedHeight(), "100 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
assert(
10 === root_child0.getComputedLeft(),
"10 === root_child0.getComputedLeft() (" + root_child0.getComputedLeft() + ")",
);
assert(
10 === root_child0.getComputedTop(),
"10 === root_child0.getComputedTop() (" + root_child0.getComputedTop() + ")",
);
assert(
10 === root_child0.getComputedWidth(),
"10 === root_child0.getComputedWidth() (" + root_child0.getComputedWidth() + ")",
);
assert(
80 === root_child0.getComputedHeight(),
"80 === root_child0.getComputedHeight() (" + root_child0.getComputedHeight() + ")",
);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_RTL);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(100 === root.getComputedWidth(), "100 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(100 === root.getComputedHeight(), "100 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
assert(
80 === root_child0.getComputedLeft(),
"80 === root_child0.getComputedLeft() (" + root_child0.getComputedLeft() + ")",
);
assert(
10 === root_child0.getComputedTop(),
"10 === root_child0.getComputedTop() (" + root_child0.getComputedTop() + ")",
);
assert(
10 === root_child0.getComputedWidth(),
"10 === root_child0.getComputedWidth() (" + root_child0.getComputedWidth() + ")",
);
assert(
80 === root_child0.getComputedHeight(),
"80 === root_child0.getComputedHeight() (" + root_child0.getComputedHeight() + ")",
);
} finally {
if (typeof root !== "undefined") {
root.freeRecursive();
}
config.free();
}
});
Deno.test("padding_stretch_child", function () {
var config = Yoga.Config.create();
try {
var root = Yoga.Node.create(config);
root.setPadding(Yoga.EDGE_LEFT, 10);
root.setPadding(Yoga.EDGE_TOP, 10);
root.setPadding(Yoga.EDGE_RIGHT, 10);
root.setPadding(Yoga.EDGE_BOTTOM, 10);
root.setWidth(100);
root.setHeight(100);
var root_child0 = Yoga.Node.create(config);
root_child0.setHeight(10);
root.insertChild(root_child0, 0);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_LTR);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(100 === root.getComputedWidth(), "100 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(100 === root.getComputedHeight(), "100 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
assert(
10 === root_child0.getComputedLeft(),
"10 === root_child0.getComputedLeft() (" + root_child0.getComputedLeft() + ")",
);
assert(
10 === root_child0.getComputedTop(),
"10 === root_child0.getComputedTop() (" + root_child0.getComputedTop() + ")",
);
assert(
80 === root_child0.getComputedWidth(),
"80 === root_child0.getComputedWidth() (" + root_child0.getComputedWidth() + ")",
);
assert(
10 === root_child0.getComputedHeight(),
"10 === root_child0.getComputedHeight() (" + root_child0.getComputedHeight() + ")",
);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_RTL);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(100 === root.getComputedWidth(), "100 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(100 === root.getComputedHeight(), "100 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
assert(
10 === root_child0.getComputedLeft(),
"10 === root_child0.getComputedLeft() (" + root_child0.getComputedLeft() + ")",
);
assert(
10 === root_child0.getComputedTop(),
"10 === root_child0.getComputedTop() (" + root_child0.getComputedTop() + ")",
);
assert(
80 === root_child0.getComputedWidth(),
"80 === root_child0.getComputedWidth() (" + root_child0.getComputedWidth() + ")",
);
assert(
10 === root_child0.getComputedHeight(),
"10 === root_child0.getComputedHeight() (" + root_child0.getComputedHeight() + ")",
);
} finally {
if (typeof root !== "undefined") {
root.freeRecursive();
}
config.free();
}
});
Deno.test("padding_center_child", function () {
var config = Yoga.Config.create();
try {
var root = Yoga.Node.create(config);
root.setJustifyContent(Yoga.JUSTIFY_CENTER);
root.setAlignItems(Yoga.ALIGN_CENTER);
root.setPadding(Yoga.EDGE_START, 10);
root.setPadding(Yoga.EDGE_END, 20);
root.setPadding(Yoga.EDGE_BOTTOM, 20);
root.setWidth(100);
root.setHeight(100);
var root_child0 = Yoga.Node.create(config);
root_child0.setWidth(10);
root_child0.setHeight(10);
root.insertChild(root_child0, 0);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_LTR);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(100 === root.getComputedWidth(), "100 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(100 === root.getComputedHeight(), "100 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
assert(
40 === root_child0.getComputedLeft(),
"40 === root_child0.getComputedLeft() (" + root_child0.getComputedLeft() + ")",
);
assert(
35 === root_child0.getComputedTop(),
"35 === root_child0.getComputedTop() (" + root_child0.getComputedTop() + ")",
);
assert(
10 === root_child0.getComputedWidth(),
"10 === root_child0.getComputedWidth() (" + root_child0.getComputedWidth() + ")",
);
assert(
10 === root_child0.getComputedHeight(),
"10 === root_child0.getComputedHeight() (" + root_child0.getComputedHeight() + ")",
);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_RTL);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(100 === root.getComputedWidth(), "100 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(100 === root.getComputedHeight(), "100 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
assert(
50 === root_child0.getComputedLeft(),
"50 === root_child0.getComputedLeft() (" + root_child0.getComputedLeft() + ")",
);
assert(
35 === root_child0.getComputedTop(),
"35 === root_child0.getComputedTop() (" + root_child0.getComputedTop() + ")",
);
assert(
10 === root_child0.getComputedWidth(),
"10 === root_child0.getComputedWidth() (" + root_child0.getComputedWidth() + ")",
);
assert(
10 === root_child0.getComputedHeight(),
"10 === root_child0.getComputedHeight() (" + root_child0.getComputedHeight() + ")",
);
} finally {
if (typeof root !== "undefined") {
root.freeRecursive();
}
config.free();
}
});
Deno.test("child_with_padding_align_end", function () {
var config = Yoga.Config.create();
try {
var root = Yoga.Node.create(config);
root.setJustifyContent(Yoga.JUSTIFY_FLEX_END);
root.setAlignItems(Yoga.ALIGN_FLEX_END);
root.setWidth(200);
root.setHeight(200);
var root_child0 = Yoga.Node.create(config);
root_child0.setPadding(Yoga.EDGE_LEFT, 20);
root_child0.setPadding(Yoga.EDGE_TOP, 20);
root_child0.setPadding(Yoga.EDGE_RIGHT, 20);
root_child0.setPadding(Yoga.EDGE_BOTTOM, 20);
root_child0.setWidth(100);
root_child0.setHeight(100);
root.insertChild(root_child0, 0);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_LTR);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(200 === root.getComputedWidth(), "200 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(200 === root.getComputedHeight(), "200 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
assert(
100 === root_child0.getComputedLeft(),
"100 === root_child0.getComputedLeft() (" + root_child0.getComputedLeft() + ")",
);
assert(
100 === root_child0.getComputedTop(),
"100 === root_child0.getComputedTop() (" + root_child0.getComputedTop() + ")",
);
assert(
100 === root_child0.getComputedWidth(),
"100 === root_child0.getComputedWidth() (" + root_child0.getComputedWidth() + ")",
);
assert(
100 === root_child0.getComputedHeight(),
"100 === root_child0.getComputedHeight() (" + root_child0.getComputedHeight() + ")",
);
root.calculateLayout(Yoga.UNDEFINED, Yoga.UNDEFINED, Yoga.DIRECTION_RTL);
assert(0 === root.getComputedLeft(), "0 === root.getComputedLeft() (" + root.getComputedLeft() + ")");
assert(0 === root.getComputedTop(), "0 === root.getComputedTop() (" + root.getComputedTop() + ")");
assert(200 === root.getComputedWidth(), "200 === root.getComputedWidth() (" + root.getComputedWidth() + ")");
assert(200 === root.getComputedHeight(), "200 === root.getComputedHeight() (" + root.getComputedHeight() + ")");
assert(
0 === root_child0.getComputedLeft(),
"0 === root_child0.getComputedLeft() (" + root_child0.getComputedLeft() + ")",
);
assert(
100 === root_child0.getComputedTop(),
"100 === root_child0.getComputedTop() (" + root_child0.getComputedTop() + ")",
);
assert(
100 === root_child0.getComputedWidth(),
"100 === root_child0.getComputedWidth() (" + root_child0.getComputedWidth() + ")",
);
assert(
100 === root_child0.getComputedHeight(),
"100 === root_child0.getComputedHeight() (" + root_child0.getComputedHeight() + ")",
);
} finally {
if (typeof root !== "undefined") {
root.freeRecursive();
}
config.free();
}
});
| 7,596
|
https://github.com/shady831213/genes/blob/master/src/test/scala/genes/example/design/GenesExample/modules/regFile/RegFileLogic.scala
|
Github Open Source
|
Open Source
|
MIT
| null |
genes
|
shady831213
|
Scala
|
Code
| 90
| 263
|
package genes.example.design.GenesExample.modules.regFile
import chisel3._
import scala.collection.mutable.ArrayBuffer
//from rocket
class RegFileLogic(val n: Int, val w: Int, zero: Boolean = false) {
val rf = Mem(n, UInt(w.W))
private val reads = ArrayBuffer[(UInt, UInt)]()
private var canRead = true
def read(addr: UInt) = {
require(canRead)
reads += addr -> Wire(UInt())
reads.last._2 := Mux(zero.B && addr === 0.U, 0.U, rf.read(addr))
reads.last._2
}
def write(addr: UInt, data: UInt) = {
canRead = false
when(addr =/= 0.U) {
rf.write(addr, data)
for ((raddr, rdata) <- reads)
when(addr === raddr) {
rdata := data
}
}
}
}
| 458
|
https://github.com/chrisesler/opentsdb-horizon/blob/master/frontend/src/app/app-shell/containers/app-shell.component.scss
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
opentsdb-horizon
|
chrisesler
|
SCSS
|
Code
| 292
| 774
|
// **
// * This file is part of OpenTSDB.
// * Copyright (C) 2021 Yahoo.
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
// * you may not use this file except in compliance with the License.
// * You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing, software
// * distributed under the License is distributed on an "AS IS" BASIS,
// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// * See the License for the specific language governing permissions and
// * limitations under the License.
// *
$drawer-min-width: 301px;
.app-shell {
display: flex;
flex: 1;
flex-direction: column;
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
// .app-navigator contains:
// the icon bar (.app-sidenav)
// the sliding drawer (.app-navigation-drawer)
// the app content passed through ng-content (.router-outlet)
.app-navigator {
display: flex;
flex: 1;
}
.app-sidenav {
width: 60px;
overflow: hidden;
display: flex;
.mat-drawer-inner-container {
display: flex;
}
&.mat-drawer-side {
border-right: 0;
}
}
// this contains the sliding drawer & the router-outlet
.app-navigator-outlet-container {
// when drawer is shown, and is "over", we make the dimming basically invisible
// but you can still click on it / esc it
.mat-drawer-backdrop.mat-drawer-shown {
background-color: rgba(0, 0, 0, .01);
}
.cdk-visually-hidden.cdk-focus-trap-anchor {
display: none;
}
}
.app-navigator-drawer {
display: flex;
min-width: $drawer-min-width;
}
.messaging-bar {
display: flex;
align-items: stretch;
padding: 3px;
.message-icon {
width: 32px;
padding: 8px;
border-top-left-radius: 4px;
border-bottom-left-radius: 4px;
display: flex;
}
.message-text {
flex: 1;
padding: 4px 8px;
border-top-right-radius: 4px;
border-bottom-right-radius: 4px;
display: flex;
align-items: center;
}
}
.router-outlet.mat-drawer-content {
overflow: hidden;
height: 100%;
max-height: 100%;
}
}
| 24,451
|
https://github.com/mooneywang/MJZhanQiTV/blob/master/ZhanQiTV/ZhanQiTV/Classes/Home/View/ZQSectionHeader.m
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
MJZhanQiTV
|
mooneywang
|
Objective-C
|
Code
| 153
| 600
|
//
// ZQSectionHeader.m
// ZhanQiTV
//
// Created by Mengjie.Wang on 2016/06/13.
// Copyright © 2016年 Mooney.Wang. All rights reserved.
//
#import "ZQSectionHeader.h"
#import "ZQConstants.h"
#import <Masonry.h>
#import "ZQHomeListModel.h"
@interface ZQSectionHeader ()
@property (nonatomic, strong) UIView *lineView;
@property (nonatomic, strong) UILabel *titleLabel;
@end
@implementation ZQSectionHeader
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
[self initView];
}
return self;
}
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
if (self = [super initWithCoder:aDecoder]) {
[self initView];
}
return self;
}
- (void)initView {
self.backgroundColor = [UIColor whiteColor];
_lineView = [[UIView alloc] init];
_lineView.backgroundColor = ZQRGBColor(88, 159, 245, 1);
_lineView.layer.cornerRadius = 2.5;
_lineView.layer.masksToBounds = YES;
[self addSubview:_lineView];
_titleLabel = [[UILabel alloc] init];
_titleLabel.font = [UIFont boldSystemFontOfSize:17];
_titleLabel.textAlignment = NSTextAlignmentLeft;
_titleLabel.text = @"英雄联盟";
[self addSubview:_titleLabel];
}
- (void)layoutSubviews {
[super layoutSubviews];
_lineView.frame = CGRectMake(10, 5, 5, self.frame.size.height - 10);
[_titleLabel mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.equalTo(_lineView.mas_right).offset(10);
make.center.equalTo(self);
}];
}
- (void)setHomeListModel:(HomeListModel *)homeListModel {
_homeListModel = homeListModel;
_titleLabel.text = homeListModel.title;
}
@end
| 7,109
|
https://github.com/hlmclgl/rentaCarProject-frontend/blob/master/src/app/components/rental-detail/rental-detail.component.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
rentaCarProject-frontend
|
hlmclgl
|
TypeScript
|
Code
| 55
| 208
|
import { Component, OnInit } from '@angular/core';
import { RentalDetail } from 'src/app/models/rentalDetail';
import { RentalService } from 'src/app/services/rental.service';
@Component({
selector: 'app-rental-detail',
templateUrl: './rental-detail.component.html',
styleUrls: ['./rental-detail.component.css'],
})
export class RentalDetailComponent implements OnInit {
rentals: RentalDetail[];
constructor(private rentalService: RentalService) {}
ngOnInit(): void {
this.getRentals();
}
getRentals() {
this.rentalService.getRentals().subscribe((response) => {
this.rentals = response.data;
});
}
}
| 32,783
|
https://github.com/windystrife/UnrealEngine_NVIDIAGameWork/blob/master/Engine/Source/Developer/MaterialBaking/Public/MaterialOptions.h
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
UnrealEngine_NVIDIAGameWork
|
windystrife
|
C
|
Code
| 535
| 1,467
|
// Copyright 1998-2017 Epic Games, Inc. All Rights Reserved.
#pragma once
#include "SceneTypes.h"
#include "Engine/EngineTypes.h"
#include "MaterialOptions.generated.h"
/** Enum to define different types of baking materials */
UENUM()
enum class EMaterialBakeMethod : uint8
{
IndividualMaterial UMETA(DisplayName = "Bake out Materials Individually"),
AtlasMaterial UMETA(DisplayName = "Combine Materials into Atlassed Material"),
BinnedMaterial UMETA(DisplayName = "Combine Materials into Binned Material")
};
/** Structure to represent a single property the user wants to bake out for a given set of materials */
USTRUCT(Blueprintable)
struct FPropertyEntry
{
GENERATED_BODY()
FPropertyEntry() : Property(MP_EmissiveColor),
CustomSize(0, 0),
bUseConstantValue(false),
ConstantValue(0.0f) {}
FPropertyEntry(EMaterialProperty InProperty)
: Property(InProperty),
bUseCustomSize(false),
CustomSize(0, 0),
bUseConstantValue(false),
ConstantValue(0.0f)
{}
/** Property which should be baked out */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Property, meta = (ExposeOnSpawn))
TEnumAsByte<EMaterialProperty> Property;
/** Whether or not to use the value of custom size for the output texture */
UPROPERTY(EditAnywhere, Category = Property, BlueprintReadWrite, meta = (InlineEditConditionToggle, ExposeOnSpawn))
bool bUseCustomSize;
/** Defines the size of the output textures for the baked out material properties */
UPROPERTY(EditAnywhere, Category = Property, BlueprintReadWrite, meta = (EditCondition = bUseCustomSize, ExposeOnSpawn, ClampMin = "1", UIMin = "1"))
FIntPoint CustomSize;
/** Wheter or not to use Constant Value as the final 'baked out' value for the this property */
UPROPERTY(EditAnywhere, Category = Property, BlueprintReadWrite, meta = (InlineEditConditionToggle, ExposeOnSpawn))
bool bUseConstantValue;
/** Defines the value representing this property in the final proxy material */
UPROPERTY(EditAnywhere, Category = Property, BlueprintReadWrite, meta = (EditCondition = bUseConstantValue, ExposeOnSpawn))
float ConstantValue;
};
/** Options object to define what and how a material should be baked out */
UCLASS(config = Editor, Blueprintable)
class MATERIALBAKING_API UMaterialOptions : public UObject
{
GENERATED_BODY()
public:
UMaterialOptions()
: TextureSize(128, 128), bUseMeshData(false), bUseSpecificUVIndex(false), TextureCoordinateIndex(0)
{
Properties.Add(MP_BaseColor);
LODIndices.Add(0);
}
/** Properties which are supposed to be baked out for the material(s) */
UPROPERTY(EditAnywhere, config, BlueprintReadWrite, Category= MaterialSettings, meta = (ExposeOnSpawn))
TArray<FPropertyEntry> Properties;
/** Size of the final texture(s) containing the baked out property data */
UPROPERTY(EditAnywhere, config, BlueprintReadWrite, Category = MaterialSettings, meta = (ExposeOnSpawn, ClampMin = "1", UIMin = "1"))
FIntPoint TextureSize;
/** LOD indices for which the materials should be baked out */
UPROPERTY(BlueprintReadWrite, Category = MeshSettings, meta = (ExposeOnSpawn))
TArray<int32> LODIndices;
/** Determines whether to not allow usage of the source mesh data while baking out material properties */
UPROPERTY(EditAnywhere, config, BlueprintReadWrite, Category = MeshSettings, meta = (ExposeOnSpawn))
bool bUseMeshData;
/** Flag whether or not the value of TextureCoordinateIndex should be used while baking out material properties */
UPROPERTY(EditAnywhere, Category = MeshSettings, BlueprintReadWrite, meta = (InlineEditConditionToggle, EditCondition = bUseMeshData, ExposeOnSpawn))
bool bUseSpecificUVIndex;
/** Specific texture coordinate which should be used to while baking out material properties as the positions stream */
UPROPERTY(EditAnywhere, Category = MeshSettings, BlueprintReadWrite, meta = (EditCondition = bUseSpecificUVIndex, ExposeOnSpawn))
int32 TextureCoordinateIndex;
};
/** Asset bake options object */
UCLASS(Config = Editor, Blueprintable)
class MATERIALBAKING_API UAssetBakeOptions : public UObject
{
GENERATED_BODY()
public:
UAssetBakeOptions()
{
}
};
/** Material merge options object */
UCLASS(Config = Editor, Blueprintable)
class MATERIALBAKING_API UMaterialMergeOptions: public UObject
{
GENERATED_BODY()
public:
UMaterialMergeOptions() : Method(EMaterialBakeMethod::IndividualMaterial), BlendMode(EBlendMode::BLEND_Opaque)
{
}
/** Method used to bake out the materials, hidden for now */
UPROPERTY(/*EditAnywhere, BlueprintReadWrite, config, Category = MeshSettings, meta = (ExposeOnSpawn)*/)
EMaterialBakeMethod Method;
/** Blend mode for the final proxy material(s) */
UPROPERTY(EditAnywhere, BlueprintReadWrite, config, Category = MeshSettings, meta = (ExposeOnSpawn))
TEnumAsByte<EBlendMode> BlendMode;
};
| 1,087
|
https://github.com/primeval-io/primeval-reflex/blob/master/src/test/java/io/primeval/reflex/proxy/theory/M1ArgsUpdater.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,017
|
primeval-reflex
|
primeval-io
|
Java
|
Code
| 388
| 1,094
|
package io.primeval.reflex.proxy.theory;
import java.io.PrintStream;
import java.lang.reflect.Parameter;
import java.util.List;
import io.primeval.reflex.arguments.Arguments;
import io.primeval.reflex.arguments.ArgumentsUpdater;
public final class M1ArgsUpdater implements ArgumentsUpdater {
public final List<Parameter> parameters;
public PrintStream ps;
public int i;
public byte b;
public String s;
public M1ArgsUpdater(List<Parameter> parameters, PrintStream ps, int i, byte b, String s) {
this.parameters = parameters;
this.ps = ps;
this.i = i;
this.b = b;
this.s = s;
}
@Override
public List<Parameter> parameters() {
return parameters;
}
@SuppressWarnings("unchecked")
@Override
public <T> T objectArg(String argName) {
if (argName.equals("ps")) {
return (T) ps;
} else if (argName.equals("s")) {
return (T) s;
} else {
throw new IllegalArgumentException("No object parameter named " + argName);
}
}
@Override
public int intArg(String argName) {
switch (argName) {
default:
throw new IllegalArgumentException("No int parameter named " + argName);
}
}
@Override
public short shortArg(String argName) {
throw new IllegalArgumentException("Bad type");
}
@Override
public long longArg(String argName) {
throw new IllegalArgumentException("No object parameter named " + argName);
}
@Override
public byte byteArg(String argName) {
switch (argName) {
case "b":
return b;
default:
throw new IllegalArgumentException("No byte parameter named " + argName);
}
}
@Override
public boolean booleanArg(String argName) {
throw new IllegalArgumentException("Bad type");
}
@Override
public float floatArg(String argName) {
throw new IllegalArgumentException("Bad type");
}
@Override
public double doubleArg(String argName) {
throw new IllegalArgumentException("Bad type");
}
@Override
public char charArg(String argName) {
throw new IllegalArgumentException("Bad type");
}
@Override
public Arguments update() {
return new M1Args(parameters, ps, i, b, s);
}
@Override
public <T> ArgumentsUpdater setObjectArg(String argName, T newValue) {
if (argName.equals("ps")) {
ps = (PrintStream) newValue;
return this;
} else if (argName.equals("s")) {
s = (String) newValue;
return this;
} else {
throw new IllegalArgumentException("No object parameter named " + argName);
}
}
@Override
public ArgumentsUpdater setIntArg(String argName, int newValue) {
if (argName.equals("i")) {
i = newValue;
return this;
} else {
throw new IllegalArgumentException("No object parameter named " + argName);
}
}
@Override
public ArgumentsUpdater setShortArg(String argName, short newValue) {
return this;
}
@Override
public ArgumentsUpdater setLongArg(String argName, long newValue) {
return this;
}
@Override
public ArgumentsUpdater setByteArg(String argName, byte newValue) {
return this;
}
@Override
public ArgumentsUpdater setBooleanArg(String argName, boolean newValue) {
return this;
}
@Override
public ArgumentsUpdater setFloatArg(String argName, float newValue) {
return this;
}
@Override
public ArgumentsUpdater setDoubleArg(String argName, double newValue) {
return this;
}
@Override
public ArgumentsUpdater setCharArg(String argName, char newValue) {
return this;
}
}
| 32,939
|
https://github.com/Mirocow/parser-1/blob/master/frontend/views/site/index.php
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,017
|
parser-1
|
Mirocow
|
PHP
|
Code
| 255
| 1,253
|
<?php
use yii\helpers\Html;
//use yii\grid\GridView;
use kartik\grid\GridView;
use kartik\dynagrid\DynaGrid;
use yii\helpers\ArrayHelper;
use common\models\Parser;
/* @var $this yii\web\View */
/* @var $searchModel common\models\ParserSearch */
/* @var $dataProvider yii\data\ActiveDataProvider */
$this->title = Yii::t('backend', 'Parsers');
$this->params['breadcrumbs'][] = $this->title;
?>
<div class="parser-index">
<?php
$columns =
[
['class'=>'kartik\grid\SerialColumn', 'order'=>DynaGrid::ORDER_FIX_LEFT],
[
'attribute'=>'product_name',
// 'filterType'=>GridView::FILTER_SELECT2,
// 'filter'=>ArrayHelper::map(Parser::find()->orderBy('product_name')->asArray()->all(), 'id', 'product_name'),
'vAlign'=>'middle',
'order'=>DynaGrid::ORDER_FIX_LEFT
],
[
'attribute'=>'product_sku',
// 'filterType'=>GridView::FILTER_SELECT2,
// 'filter'=>ArrayHelper::map(Parser::find()->orderBy('product_sku')->asArray()->all(), 'id', 'product_sku'),
'vAlign'=>'middle',
'format'=>'raw',
// 'width'=>'150px',
'noWrap'=>true
],
[
'attribute'=>'site_name',
// 'filterType'=>GridView::FILTER_SELECT2,
// 'filter'=>ArrayHelper::map(Parser::find()->orderBy('site_name')->asArray()->all(), 'id', 'site_name'),
// 'filterInputOptions'=>['placeholder'=>'Все сайты'],
// 'format'=>'raw',
// 'width'=>'170px',
// 'visible'=>false,
],
[
'attribute'=>'price',
'vAlign'=>'middle',
'format'=>['decimal', 2],
// 'width'=>'250px',
// 'filterInputOptions'=>['placeholder'=>'Any author'],
],
[
'attribute'=>'price_old',
'vAlign'=>'middle',
'format'=>['decimal', 2],
],
[
// 'class'=>'kartik\grid\BooleanColumn',
'attribute'=>'available',
'vAlign'=>'middle',
],
// [
// 'class'=>'kartik\grid\ActionColumn',
// 'dropdown'=>false,
// 'urlCreator'=>function($action, $model, $key, $index) { return '#'; },
// 'viewOptions'=>['title'=>$viewMsg, 'data-toggle'=>'tooltip'],
// 'updateOptions'=>['title'=>$updateMsg, 'data-toggle'=>'tooltip'],
// 'deleteOptions'=>['title'=>$deleteMsg, 'data-toggle'=>'tooltip'],
// 'order'=>DynaGrid::ORDER_FIX_RIGHT
// ],
// ['class'=>'kartik\grid\CheckboxColumn', 'order'=>DynaGrid::ORDER_FIX_RIGHT],
];
$dynagrid = DynaGrid::begin([
'columns' => $columns,
'theme'=>'panel-info',
'showPersonalize'=>true,
'storage' => 'session',
'gridOptions'=>[
'dataProvider'=>$dataProvider,
'filterModel'=>$searchModel,
'showPageSummary'=>true,
'floatHeader'=>true,
'pjax'=>true,
'responsiveWrap'=>false,
'panel'=>[
'heading'=>'<h3 class="panel-title"><i class="glyphicon glyphicon-book"></i>Парсер</h3>',
// 'before' => '<div style="padding-top: 7px;"><em>* The table header sticks to the top in this demo as you scroll</em></div>',
'after' => false
],
'toolbar' => [
// ['content'=>
// Html::button('<i class="glyphicon glyphicon-plus"></i>', ['type'=>'button', 'title'=>'Add Book', 'class'=>'btn btn-success', 'onclick'=>'alert("This will launch the book creation form.\n\nDisabled for this demo!");']) . ' '.
// Html::a('<i class="glyphicon glyphicon-repeat"></i>', ['dynagrid-demo'], ['data-pjax'=>0, 'class' => 'btn btn-default', 'title'=>'Reset Grid'])
// ],
['content'=>'{dynagridFilter}{dynagridSort}{dynagrid}'],
'{export}',
]
],
'options'=>['id'=>'dynagrid-1'] // a unique identifier is important
]);
if (substr($dynagrid->theme, 0, 6) == 'simple') {
$dynagrid->gridOptions['panel'] = false;
}
DynaGrid::end();
?>
</div>
| 4,195
|
https://github.com/batfish/batfish/blob/master/projects/batfish-common-protocol/src/main/java/org/batfish/datamodel/Bgpv4Route.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,023
|
batfish
|
batfish
|
Java
|
Code
| 711
| 2,979
|
package org.batfish.datamodel;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import static org.batfish.datamodel.OriginMechanism.LEARNED;
import static org.batfish.datamodel.OriginMechanism.NETWORK;
import static org.batfish.datamodel.OriginMechanism.REDISTRIBUTE;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.MoreObjects;
import java.util.Objects;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.ParametersAreNonnullByDefault;
import org.batfish.datamodel.bgp.TunnelEncapsulationAttribute;
import org.batfish.datamodel.route.nh.NextHop;
import org.batfish.datamodel.route.nh.NextHopDiscard;
import org.batfish.datamodel.routing_policy.communities.CommunitySet;
/**
* A BGP Route. Captures attributes of both iBGP and eBGP routes.
*
* <p>For computational efficiency may contain additional attributes (that would otherwise be
* present only in BGP advertisements on the wire)
*/
@ParametersAreNonnullByDefault
public final class Bgpv4Route extends BgpRoute<Bgpv4Route.Builder, Bgpv4Route> {
/** Builder for {@link Bgpv4Route} */
public static final class Builder extends BgpRoute.Builder<Builder, Bgpv4Route> {
@Nonnull
@Override
public Builder newBuilder() {
return new Builder();
}
@Nonnull
@Override
public Bgpv4Route build() {
checkArgument(_originatorIp != null, "Missing %s", PROP_ORIGINATOR_IP);
checkArgument(_originMechanism != null, "Missing %s", PROP_ORIGIN_MECHANISM);
checkArgument(
_srcProtocol != null || (_originMechanism != NETWORK && _originMechanism != REDISTRIBUTE),
"Local routes must have a source protocol");
checkArgument(_originType != null, "Missing %s", PROP_ORIGIN_TYPE);
checkArgument(_protocol != null, "Missing %s", PROP_PROTOCOL);
checkArgument(_receivedFrom != null, "Missing %s", PROP_RECEIVED_FROM);
checkArgument(_nextHop != null, "Missing next hop");
return new Bgpv4Route(
BgpRouteAttributes.create(
_asPath,
_clusterList,
_communities,
_localPreference,
getMetric(),
_originatorIp,
_originMechanism,
_originType,
_protocol,
_receivedFromRouteReflectorClient,
_srcProtocol,
_tunnelEncapsulationAttribute,
_weight),
_receivedFrom,
getNetwork(),
_nextHop,
_pathId,
getAdmin(),
getTag(),
getNonForwarding(),
getNonRouting());
}
@Override
@Nonnull
public Builder getThis() {
return this;
}
private Builder() {}
}
/* Cache the hashcode */
private transient int _hashCode = 0;
@JsonCreator
private static Bgpv4Route jsonCreator(
@Nullable @JsonProperty(PROP_NETWORK) Prefix network,
@Nullable @JsonProperty(PROP_NEXT_HOP_IP) Ip nextHopIp,
@JsonProperty(PROP_ADMINISTRATIVE_COST) int admin,
@Nullable @JsonProperty(PROP_AS_PATH) AsPath asPath,
@Nullable @JsonProperty(PROP_COMMUNITIES) CommunitySet communities,
@JsonProperty(PROP_LOCAL_PREFERENCE) long localPreference,
@JsonProperty(PROP_METRIC) long med,
@Nullable @JsonProperty(PROP_NEXT_HOP_INTERFACE) String nextHopInterface,
@Nullable @JsonProperty(PROP_ORIGINATOR_IP) Ip originatorIp,
@Nullable @JsonProperty(PROP_CLUSTER_LIST) Set<Long> clusterList,
@JsonProperty(PROP_RECEIVED_FROM_ROUTE_REFLECTOR_CLIENT)
boolean receivedFromRouteReflectorClient,
@Nullable @JsonProperty(PROP_ORIGIN_MECHANISM) OriginMechanism originMechanism,
@Nullable @JsonProperty(PROP_ORIGIN_TYPE) OriginType originType,
@Nullable @JsonProperty(PROP_PATH_ID) Integer pathId,
@Nullable @JsonProperty(PROP_PROTOCOL) RoutingProtocol protocol,
@Nullable @JsonProperty(PROP_RECEIVED_FROM) ReceivedFrom receivedFrom,
@Nullable @JsonProperty(PROP_SRC_PROTOCOL) RoutingProtocol srcProtocol,
@JsonProperty(PROP_TAG) long tag,
@Nullable @JsonProperty(PROP_TUNNEL_ENCAPSULATION_ATTRIBUTE)
TunnelEncapsulationAttribute tunnelEncapsulationAttribute,
@JsonProperty(PROP_WEIGHT) int weight) {
checkArgument(originatorIp != null, "Missing %s", PROP_ORIGINATOR_IP);
checkArgument(originMechanism != null, "Missing %s", PROP_ORIGIN_MECHANISM);
checkArgument(
srcProtocol != null || (originMechanism != NETWORK && originMechanism != REDISTRIBUTE),
"Local routes must have a source protocol");
checkArgument(originType != null, "Missing %s", PROP_ORIGIN_TYPE);
checkArgument(protocol != null, "Missing %s", PROP_PROTOCOL);
checkArgument(receivedFrom != null, "Missing %s", PROP_RECEIVED_FROM);
return new Bgpv4Route(
BgpRouteAttributes.create(
asPath,
clusterList,
firstNonNull(communities, CommunitySet.empty()),
localPreference,
med,
originatorIp,
originMechanism,
originType,
protocol,
receivedFromRouteReflectorClient,
srcProtocol,
tunnelEncapsulationAttribute,
weight),
receivedFrom,
network,
NextHop.legacyConverter(nextHopInterface, nextHopIp),
pathId,
admin,
tag,
false,
false);
}
private Bgpv4Route(
BgpRouteAttributes attributes,
@Nonnull ReceivedFrom receivedFrom,
@Nullable Prefix network,
@Nonnull NextHop nextHop,
@Nullable Integer pathId,
int admin,
long tag,
boolean nonForwarding,
boolean nonRouting) {
super(
network, nextHop, pathId, admin, attributes, receivedFrom, tag, nonForwarding, nonRouting);
}
public static Builder builder() {
return new Builder();
}
/** Return a route builder with pre-filled mandatory values. To be used in tests only */
@VisibleForTesting
public static Builder testBuilder() {
return builder()
.setNextHop(NextHopDiscard.instance())
.setOriginMechanism(LEARNED)
.setOriginType(OriginType.IGP)
.setOriginatorIp(Ip.parse("1.1.1.1"))
.setAdmin(170)
.setProtocol(RoutingProtocol.BGP)
.setReceivedFrom(ReceivedFromSelf.instance());
}
/////// Keep #toBuilder, #equals, and #hashCode in sync ////////
@Override
public Builder toBuilder() {
return builder()
.setNetwork(getNetwork())
.setAdmin(getAdministrativeCost())
.setNonRouting(getNonRouting())
.setNonForwarding(getNonForwarding())
.setAsPath(_attributes._asPath)
.setClusterList(_attributes._clusterList)
.setCommunities(_attributes._communities)
.setLocalPreference(_attributes._localPreference)
.setMetric(_attributes._med)
.setNextHop(_nextHop)
.setOriginatorIp(_attributes._originatorIp)
.setOriginMechanism(_attributes.getOriginMechanism())
.setOriginType(_attributes.getOriginType())
.setPathId(_pathId)
.setProtocol(_attributes.getProtocol())
.setReceivedFrom(_receivedFrom)
.setReceivedFromRouteReflectorClient(_attributes._receivedFromRouteReflectorClient)
.setSrcProtocol(_attributes.getSrcProtocol())
.setTag(_tag)
.setTunnelEncapsulationAttribute(_attributes._tunnelEncapsulationAttribute)
.setWeight(_attributes._weight);
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Bgpv4Route)) {
return false;
}
Bgpv4Route other = (Bgpv4Route) o;
return (_hashCode == other._hashCode || _hashCode == 0 || other._hashCode == 0)
&& _network.equals(other._network)
&& _nextHop.equals(other._nextHop)
&& Objects.equals(_pathId, other._pathId)
&& _attributes.equals(other._attributes)
&& _receivedFrom.equals(other._receivedFrom)
// Things above this line are more likely to cause false earlier.
&& _admin == other._admin
&& _tag == other._tag
&& getNonRouting() == other.getNonRouting()
&& getNonForwarding() == other.getNonForwarding();
}
@Override
public int hashCode() {
int h = _hashCode;
if (h == 0) {
h = _admin;
h = h * 31 + _attributes.hashCode();
h = h * 31 + _receivedFrom.hashCode();
h = h * 31 + _network.hashCode();
h = h * 31 + _nextHop.hashCode();
h = h * 31 + (_pathId != null ? _pathId : 0);
h = h * 31 + Boolean.hashCode(getNonForwarding());
h = h * 31 + Boolean.hashCode(getNonRouting());
h = h * 31 + Long.hashCode(_tag);
_hashCode = h;
}
return h;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.omitNullValues()
.add("_network", _network)
.add("_admin", _admin)
.add("_tag", _tag)
.add("_asPath", _attributes._asPath)
.add("_clusterList", _attributes._clusterList)
.add("_communities", _attributes._communities)
.add("_localPreference", _attributes._localPreference)
.add("_med", _attributes._med)
.add("_nextHop", _nextHop)
.add("_originatorIp", _attributes._originatorIp)
.add("_originMechanism", _attributes._originMechanism)
.add("_originType", _attributes._originType)
.add("_pathId", _pathId)
.add("_protocol", _attributes._protocol)
.add("_receivedFrom", _receivedFrom)
.add("_receivedFromRouteReflectorClient", _attributes._receivedFromRouteReflectorClient)
.add("_srcProtocol", _attributes._srcProtocol)
.add("_tunnelEncapsulationAttribute", _attributes._tunnelEncapsulationAttribute)
.add("_weight", _attributes._weight)
.toString();
}
}
| 27,853
|
https://github.com/gaoyangy/drygoods/blob/master/src/sass/sign/sign.sass
|
Github Open Source
|
Open Source
|
MIT
| null |
drygoods
|
gaoyangy
|
Sass
|
Code
| 23
| 87
|
.sign
background: #f0f6fa
position: relative
top: 50%
margin-top: 160px
border-radius: 5px
padding-top: 45px
background: #fff
.title
text-align: center
margin-bottom: 15px
.el-input
margin-bottom: 15px
| 24,982
|
https://github.com/langsci/pypi/blob/master/langsci/wrapperscripts/wikicite.py
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
pypi
|
langsci
|
Python
|
Code
| 123
| 467
|
import sys
from langsci.langscipressorg_webcrawler import get_soup, get_publication_date, get_citeinfo, get_ISBN_digital, get_title_subtitle, get_biosketches, biosketches2names
book_ID = sys.argv[1]
soup = get_soup(book_ID)
citegroups = get_citeinfo(soup)
biosketches = get_biosketches(soup)
if citegroups is None:
sys.exit()
title, subtitle = get_title_subtitle(citegroups)
series = citegroups["series"]
creatorlist = biosketches2names(biosketches)
creatortype = 'vauthors'
if citegroups['ed']:
creatortype = 'veditors'
vcreators = ", ".join([u"%s %s"%(creatormetadata[1],
''.join([firstname[0]
for firstname
in creatormetadata[0].split()
])
)
for creatormetadata
in creatorlist])
creatorstring = f'{creatortype}={vcreators}'
isbn_digital = get_ISBN_digital(soup)
key = vcreators.split()[0] + citegroups['year']
print(f"""<ref name = "{key}">{{{{Cite book
| {creatorstring}
| title = {title} {subtitle}
| place = Berlin
| publisher = Language Science Press
| date = {citegroups['year']}
| format = pdf
| url = http://langsci-press.org/catalog/book/{book_ID}
| doi = {citegroups['doi']}
| doi-access = free
| isbn = {isbn_digital}
}}}}
</ref>
""")
| 36,644
|
https://github.com/Doniol/IPASSV2/blob/master/html/search/functions_5.js
|
Github Open Source
|
Open Source
|
BSL-1.0
| 2,019
|
IPASSV2
|
Doniol
|
JavaScript
|
Code
| 5
| 49
|
var searchData=
[
['lines',['lines',['../classlines.html#a56771c74942df50716476346cce07f8e',1,'lines']]]
];
| 36,227
|
https://github.com/annaelde/forum-app/blob/master/src/styles/_overrides.sass
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
forum-app
|
annaelde
|
Sass
|
Code
| 6
| 18
|
$primary: $red
$info: $blue
$link: $neutral
| 13,318
|
https://github.com/chechons/master-php/blob/master/master-php/aprendiendo-php/10-bucle-for/index.php
|
Github Open Source
|
Open Source
|
MIT
| null |
master-php
|
chechons
|
PHP
|
Code
| 29
| 87
|
<?php
/*
Bucle for
*/
$resultado = 0;
for($i = 0; $i <=100; $i++){
$resultado += $i;
// echo "<p>$resultado</p>";
}
echo "<h1> El resultado es: $resultado </h1>";
?>
| 4,021
|
https://github.com/VegB/VLN-Transformer/blob/master/texar/torch/data/data/data_iterators.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
VLN-Transformer
|
VegB
|
Python
|
Code
| 3,533
| 10,646
|
# Copyright 2019 The Texar Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Various data iterator classes.
"""
# pylint: disable=protected-access
from typing import (
Any, Callable, Dict, Generic, Iterable, Iterator, List, Optional,
Sequence, Tuple, TypeVar, Union, Mapping)
import pkg_resources
import torch
from torch import __version__ as _torch_version # type: ignore
from torch.utils.data import DataLoader
from torch.utils.data import sampler as torch_sampler
from texar.torch.data.data.data_base import DatasetBase
from texar.torch.data.data.dataset_utils import Batch
from texar.torch.utils.types import MaybeSeq
from texar.torch.utils.utils import ceildiv, map_structure
_torch_version = pkg_resources.parse_version(_torch_version)
__all__ = [
"DataIterator",
"TrainTestDataIterator",
"BatchingStrategy",
"TokenCountBatchingStrategy",
]
# `Dict` is invariant, `Mapping` is not.
DatasetsType = Union[Mapping[str, DatasetBase], MaybeSeq[DatasetBase]]
Example = TypeVar('Example')
# pylint: disable=attribute-defined-outside-init
# TODO: Remove this when Pylint fixes the bug. If the `disable` directive is not
# added, Pylint incorrectly reports this error for `self.size` in subclasses of
# `SamplerBase` in Python 3.6 due to use of the Generic class.
# See Pylint issue: https://github.com/PyCQA/pylint/issues/2981
class SamplerBase(torch_sampler.Sampler, Generic[Example]):
r"""A subclass of :torch_docs:`~torch.utils.data.Sampler
<data.html#torch.utils.data.Sampler>` that supports:
- Returning raw examples when required.
- Creating iterators with unknown dataset size.
This class is used internally in
:class:`~texar.torch.data.data.DataIterator`. It calls the
:meth:`~texar.torch.data.data.DatasetBase._prefetch_source` method to ensure
the required number of raw examples are prefetched from source.
Args:
data: The :class:`~texar.torch.data.data.DatasetBase` instance.
"""
size: Optional[int]
def __init__(self, data: DatasetBase[Any, Example]):
super().__init__(data)
self._data = data
self.size = None
def _iterator_given_size(self, size: int) -> Iterator[int]:
r"""Return an iterator that generates samples when the dataset size
is given.
Args:
size: The dataset size.
"""
raise NotImplementedError
def _iterator_unknown_size(self) -> Iterator[int]:
r"""Return an iterator that generates samples when the dataset size
is unknown. This iterator must also call
:meth:`texar.torch.data.data.DatasetBase._prefetch_source` and check
whether the dataset size can be determined, before yielding the index.
See example implementations for details.
"""
raise NotImplementedError
def __iter__(self) -> Union[Iterator[int], Iterator[Tuple[int, Example]]]:
r"""Return an iterator based on the dataset settings.
"""
self.size = self._data._dataset_size
if (not self._data._fully_cached or
self._data._should_call_prefetch_source):
self._data._start_iteration()
# First epoch of lazy loading, calling prefetch, and returning
# indices and examples.
iterator = self._iterator_unknown_size()
else:
# Non-lazy loading, or when dataset has been fully iterated.
assert self.size is not None
iterator = self._iterator_given_size(self.size)
if self._data._should_call_prefetch_processed:
# Processing routine is performed in main process. Yield
# processed examples instead.
map_fn = lambda idx: (idx, self._data._processed_cache[idx])
elif self._data._should_yield_raw_example:
# Return indices and examples for any epoch in this case.
map_fn = lambda idx: (idx, self._data._source[idx])
else:
map_fn = None # type: ignore
if map_fn is not None:
return map(map_fn, iterator)
return iterator
def __len__(self):
if self.size is not None:
return self.size
raise AttributeError("Dataset size cannot be determined at this point")
class SequentialSampler(SamplerBase[Example]):
r"""Samples elements sequentially, always in the same order. Same as
:torch_docs:`~torch.utils.data.SequentialSampler
<data.html#torch.utils.data.SequentialSampler>`
"""
def _iterator_given_size(self, size: int) -> Iterator[int]:
return iter(range(size))
def _iterator_unknown_size(self) -> Iterator[int]:
index = 0
while True:
cur_size = self._data._prefetch_source(index)
if cur_size is not None:
self.size = cur_size
break
yield index
index += 1
class RandomSampler(SamplerBase[Example]):
r"""Samples elements randomly. If without replacement, then sample from a
shuffled dataset. If with replacement, then user can specify ``num_samples``
to draw.
This class uses :torch_docs:`torch.utils.data.RandomSampler
<data.html#torch.utils.data.RandomSampler>` directly. Given the
nature of such shuffling, it cannot be used for iterators with unknown size.
Args:
data: The :class:`~texar.torch.data.data.DatasetBase` instance.
num_samples (int): number of samples to draw, default=len(dataset)
replacement (bool): samples are drawn with replacement if `True`,
default=False
"""
def __init__(self, data: DatasetBase[Any, Example],
replacement: bool = False, num_samples: Optional[int] = None):
super().__init__(data)
self._sampler = torch_sampler.RandomSampler(
data, replacement, num_samples)
def _iterator_given_size(self, size: int) -> Iterator[int]:
del size # not used
return iter(self._sampler)
def _iterator_unknown_size(self) -> Iterator[int]:
raise TypeError(
"RandomSampler does not support lazy data loading. To perform "
"shuffling with lazy loading, use BufferShuffleSampler.")
class BufferShuffleSampler(SamplerBase[Example]):
r"""A :torch_docs:`~torch.utils.data.Sampler
<data.html#torch.utils.data.Sampler>` that uses a shuffle buffer, as
in TensorFlow. The buffer is first filled with data examples. Each time a
sample is drawn from the buffer, and the drawn sample is replaced with the
next data example.
This class is used internally in
:class:`~texar.torch.data.data.DataIterator`. It calls the
:meth:`~texar.torch.data.data.DatasetBase._prefetch_source` method to ensure
the required number of raw examples are prefetched from source.
Args:
data: The :class:`~texar.torch.data.data.DatasetBase` instance.
buffer_size: The size of the shuffle buffer. Use larger buffer sizes for
more uniformly-random shuffling.
"""
def __init__(self, data: DatasetBase[Any, Example], buffer_size: int):
super().__init__(data)
self.buffer_size = buffer_size
def _iterator_given_size(self, size) -> Iterator[int]:
if self.buffer_size >= size:
yield from iter(torch.randperm(size).tolist())
return
buffer = list(range(self.buffer_size))
for x in range(self.buffer_size, size):
sample = torch.randint(self.buffer_size, (1,)).item()
index = buffer[sample]
yield index
buffer[sample] = x
yield from (buffer[x] for x in torch.randperm(self.buffer_size))
def _iterator_unknown_size(self) -> Iterator[int]:
buffer = list(range(self.buffer_size))
x = self.buffer_size
while True:
sample = torch.randint(self.buffer_size, (1,)).item()
index = buffer[sample]
cur_size = self._data._prefetch_source(index)
if cur_size is not None and index >= cur_size:
self.size = cur_size
if self.size is not None and index >= self.size:
break
yield index
buffer[sample] = x
x += 1
yield from (buffer[x] for x in torch.randperm(self.buffer_size)
if buffer[x] < self.size)
# pylint: enable=attribute-defined-outside-init
class BatchingStrategy(Generic[Example]):
r"""Decides batch boundaries in dynamic batching. Please refer to
:class:`TokenCountBatchingStrategy` for a concrete example.
"""
def reset_batch(self) -> None:
r"""Reset the internal state of the batching strategy. This method is
called at the start of iteration, and after each batch is yielded.
"""
raise NotImplementedError
def add_example(self, example: Example) -> bool:
r"""Add an example into the current batch, and modify internal states
accordingly. If the example should not be added to the batch, this
method does not modify the internal state, and returns `False`.
Args:
example: The example to add to the batch.
Returns:
A boolean value indicating whether :attr:`example` should be added
to the batch.
"""
raise NotImplementedError
class TokenCountBatchingStrategy(BatchingStrategy[Example]):
r"""Create dynamically-sized batches so that the total number of tokens
inside each batch is constrained.
Args:
max_tokens (int): The maximum number of tokens inside each batch.
max_batch_size (int, optional): The maximum number of examples for each
batch. If `None`, batches can contain arbitrary number of examples
as long as the total number of tokens does not exceed
:attr:`max_tokens`.
length_fn (callable, optional): A function taking a data example as
argument, and returning the number of tokens in the example. By
default, :python:`len` is used, which is the desired behavior if the
dataset in question is a :class:`~texar.torch.data.MonoTextData`.
"""
sum_tokens: int
cur_batch_size: int
def __init__(self, max_tokens: int, max_batch_size: Optional[int] = None,
length_fn: Optional[Callable[[Example], int]] = None):
self.max_batch_size = max_batch_size
self.max_tokens = max_tokens
self.length_fn: Callable[[Example], int]
self.length_fn = length_fn or len # type: ignore
def reset_batch(self) -> None:
self.sum_tokens = 0
self.cur_batch_size = 0
def add_example(self, example: Example) -> bool:
if self.cur_batch_size == self.max_batch_size:
return False
cur_tokens = self.length_fn(example)
if cur_tokens + self.sum_tokens > self.max_tokens:
return False
self.cur_batch_size += 1
self.sum_tokens += cur_tokens
return True
class DynamicBatchSampler(torch_sampler.BatchSampler, Generic[Example]):
r"""A subclass of :torch_docs:`~torch.utils.data.BatchSampler
<data.html#torch.utils.data.BatchSampler>` that supports dynamic batching
through a user-provided :class:`BatchingStrategy`. This class is used
internally.
Args:
dataset: The dataset to create batches from.
sampler: An instance of :class:`SamplerBase` that returns indices of
each sampled example.
strategy: An instance of :class:`BatchingStrategy` that decides whether
a batch should be yielded.
"""
def __init__(self, dataset: DatasetBase[Any, Example], # pylint: disable=super-init-not-called
sampler: SamplerBase, strategy: BatchingStrategy[Example]):
self.dataset = dataset
self.sampler = sampler
self.strategy = strategy
def __iter__(self) -> Union[Iterator[List[int]], # type: ignore
Iterator[List[Tuple[int, Example]]]]:
batch = [] # type: ignore
self.strategy.reset_batch()
for idx in self.sampler:
if isinstance(idx, tuple):
example = self.dataset[idx[0]]
else:
example = self.dataset[idx]
while not self.strategy.add_example(example):
if len(batch) == 0:
raise ValueError(f"Batching strategy refused to add "
f"example {idx} to empty batch.")
yield batch
batch = []
self.strategy.reset_batch()
batch.append(idx)
if len(batch) > 0:
yield batch
self.strategy.reset_batch()
def __len__(self):
raise TypeError("DynamicBatchSampler does not support __len__")
# pylint: disable=ungrouped-imports
if _torch_version >= pkg_resources.parse_version("1.2.0"): # PyTorch 1.2.0 +
from torch.utils.data._utils.pin_memory import ( # type: ignore
pin_memory as _pin_memory)
elif _torch_version >= pkg_resources.parse_version("1.1.0"): # PyTorch 1.1.0 +
from torch.utils.data._utils.pin_memory import ( # type: ignore
pin_memory_batch as _pin_memory)
else:
from torch.utils.data.dataloader import ( # type: ignore
pin_memory_batch as _pin_memory)
def move_memory(data, device):
def _move_fn(x):
if isinstance(x, torch.Tensor):
return x.to(device=device, non_blocking=True)
return x
if isinstance(data, Batch):
return Batch(len(data), batch={
key: map_structure(_move_fn, value)
for key, value in data.items()
})
return map_structure(_move_fn, data)
if _torch_version >= pkg_resources.parse_version("1.2.0"): # PyTorch 1.2.0 +
# PyTorch 1.2 split the `_DataLoaderIter` class into two:
# `_SingleProcessDataLoaderIter` for when `num_workers == 0`, i.e. when
# multi-processing is disabled; `_MultiProcessingDataLoaderIter` for
# otherwise. The implementation is also slightly different from previous
# releases.
#
# To keep compatibility, our iterator classes should be a subclass of both
# PyTorch `_Single...`/`_Multi...` (for single/multi-process), and our own
# `_Cache...`/`_Data...` (for caching/no caching). This results in four
# different concrete classes, as this regex shows:
# `_[SM]P(Cache)?DataLoaderIter`.
#
# We only expose `_DataLoaderIter` and `_CacheDataLoaderIter` to other
# classes, and construct concrete classes in their `__new__` methods
# depending on the value of `num_workers`. This is for compatibility with
# previous versions, so we don't need to change other parts of the code.
from torch.utils.data.dataloader import ( # type: ignore
_BaseDataLoaderIter, _SingleProcessDataLoaderIter,
_MultiProcessingDataLoaderIter)
class _DataLoaderIter(_BaseDataLoaderIter):
r"""Iterates once over the DataLoader's dataset. This is almost
identical to PyTorch
:class:`torch.utils.data.dataloader._BaseDataLoaderIter`, except that we
check `allow_smaller_final_batch` here. This is because using
`drop_last` in :class:`~torch.utils.data.sampler.BatchSampler` would
cause the dataset to not load/process/cache certain elements from the
final batch, which complicates the already complex logic.
"""
def __new__(cls, loader: 'SingleDatasetIterator'):
if loader.num_workers > 0:
return super().__new__(_MPDataLoaderIter)
else:
return super().__new__(_SPDataLoaderIter)
def __init__(self, loader: 'SingleDatasetIterator'):
self.device = loader.device
self._batch_size = loader.batch_size
super().__init__(loader)
def __next__(self):
batch = super().__next__()
# Drop smaller final batch according to settings. Note that
# `_batch_size` could be None if dynamic batching is used.
if (self._batch_size is not None and
batch.batch_size < self._batch_size and
not self.dataset.hparams.allow_smaller_final_batch):
raise StopIteration
if self.device is not None:
batch = move_memory(batch, self.device)
return batch
class _SPDataLoaderIter(_DataLoaderIter, _SingleProcessDataLoaderIter):
pass
class _MPDataLoaderIter(_DataLoaderIter, _MultiProcessingDataLoaderIter):
pass
class _CacheDataLoaderIter(_BaseDataLoaderIter):
r"""Iterates once over the DataLoader's dataset. This class is used when
examples are processed and returned by worker processes. We need to
record the corresponding indices of each batch, call
:meth:`texar.torch.data.data.DatasetBase._add_cached_examples` to cache
the processed examples, and return only the
:class:`~texar.torch.data.data.Batch` instance to the user.
"""
def __new__(cls, loader: 'SingleDatasetIterator'):
if loader.num_workers > 0:
return super().__new__(_MPCacheDataLoaderIter)
else:
return super().__new__(_SPCacheDataLoaderIter)
def __init__(self, loader: 'SingleDatasetIterator'):
self._indices_dict: Dict[int, List[int]] = {}
self._batch_size = loader.batch_size
self.device = loader.device
super().__init__(loader)
class _SPCacheDataLoaderIter(_CacheDataLoaderIter,
_SingleProcessDataLoaderIter):
def __next__(self):
index = self._next_index() # may raise StopIteration
data = self.dataset_fetcher.fetch(index) # may raise StopIteration
if self.dataset._should_yield_raw_example:
index = [idx[0] for idx in index]
examples, data = data
self.dataset._add_cached_examples(index, examples)
if self.pin_memory:
data = move_memory(_pin_memory(data), self.device)
return data
class _MPCacheDataLoaderIter(_CacheDataLoaderIter,
_MultiProcessingDataLoaderIter):
dataset: DatasetBase
worker_queue_idx: int # so that Pylint gives no errors
def _try_put_index(self):
assert self.tasks_outstanding < 2 * self.num_workers
try:
index = self._next_index()
except StopIteration:
return
for _ in range(self.num_workers): # find next active worker, if any
worker_queue_idx = next(self.worker_queue_idx_cycle)
if self.workers_status[worker_queue_idx]:
break
else:
# not found (i.e., didn't break)
return
self.index_queues[worker_queue_idx].put((self.send_idx, index))
if self.dataset._should_yield_raw_example:
index = [idx[0] for idx in index]
self._indices_dict[self.send_idx] = index
self.task_info[self.send_idx] = (worker_queue_idx,)
self.tasks_outstanding += 1
self.send_idx += 1
def _process_data(self, batch):
batch = super()._process_data(batch)
indices = self._indices_dict[self.rcvd_idx - 1]
del self._indices_dict[self.rcvd_idx - 1]
examples, batch = batch
self.dataset._add_cached_examples(indices, examples)
return batch
def __next__(self):
batch = super().__next__()
if (self._batch_size is not None and
batch.batch_size < self.dataset.batch_size and
not self.dataset.hparams.allow_smaller_final_batch):
raise StopIteration
batch = move_memory(batch, self.device)
return batch
else:
# PyTorch 1.1 and lower defines only the class `_DataLoaderIter` for
# iterating over `DataLoader`.
from torch.utils.data.dataloader import ( # type: ignore
_DataLoaderIter as torch_DataLoaderIter)
class _DataLoaderIter(torch_DataLoaderIter): # type: ignore
r"""Iterates once over the DataLoader's dataset. This is almost
identical to PyTorch
:class:`torch.utils.data.dataloader._DataLoaderIter`, except that we
check `allow_smaller_final_batch` here. This is because using
`drop_last` in :class:`~torch.utils.data.sampler.BatchSampler` would
cause the dataset to not load/process/cache certain elements from the
final batch, which complicates the already complex logic.
"""
def __init__(self, loader: 'SingleDatasetIterator'):
self._batch_size = loader.batch_size
self.device = loader.device
super().__init__(loader)
def __next__(self):
batch = super().__next__()
# Drop smaller final batch according to settings. Note that
# `_batch_size` could be None if dynamic batching is used.
if (self._batch_size is not None and
batch.batch_size < self._batch_size and
not self.dataset.hparams.allow_smaller_final_batch):
raise StopIteration
batch = move_memory(batch, self.device)
return batch
class _CacheDataLoaderIter(torch_DataLoaderIter): # type: ignore
r"""Iterates once over the DataLoader's dataset. This class is used when
examples are processed and returned by worker processes. We need to
record the corresponding indices of each batch, call
:meth:`texar.torch.data.data.DatasetBase._add_cached_examples` to cache
the processed examples, and return only the
:class:`~texar.torch.data.data.Batch` instance to the user.
"""
dataset: DatasetBase
worker_queue_idx: int # so that Pylint gives no errors
def __init__(self, loader: 'SingleDatasetIterator'):
self._indices_dict: Dict[int, List[int]] = {}
self._batch_size = loader.batch_size
self.device = loader.device
super().__init__(loader)
def _put_indices(self):
assert self.batches_outstanding < 2 * self.num_workers
indices = next(self.sample_iter, None)
if indices is None:
return
self.index_queues[self.worker_queue_idx].put(
(self.send_idx, indices))
if self.dataset._should_yield_raw_example:
indices = [index[0] for index in indices]
self._indices_dict[self.send_idx] = indices
self.worker_queue_idx = ((self.worker_queue_idx + 1) %
self.num_workers)
self.batches_outstanding += 1
self.send_idx += 1
def _process_next_batch(self, batch):
batch = super()._process_next_batch(batch)
indices = self._indices_dict[self.rcvd_idx - 1]
del self._indices_dict[self.rcvd_idx - 1]
examples, batch = batch
self.dataset._add_cached_examples(indices, examples)
return batch
def __next__(self):
if self.num_workers == 0: # same-process loading
indices = next(self.sample_iter) # may raise StopIteration
batch = self.collate_fn([self.dataset[i] for i in indices])
if self.dataset._should_yield_raw_example:
indices = [index[0] for index in indices]
examples, batch = batch
self.dataset._add_cached_examples(indices, examples)
if self.pin_memory:
batch = _pin_memory(batch)
else:
batch = super().__next__()
if (self._batch_size is not None and
batch.batch_size < self.dataset.batch_size and
not self.dataset.hparams.allow_smaller_final_batch):
raise StopIteration
batch = move_memory(batch, self.device)
return batch
class SingleDatasetIterator(DataLoader):
r"""Iterator for a single dataset. This iterator is based on the PyTorch
:class:`~torch.utils.data.DataLoader` interface, with a custom shuffling
routine. This class is used internally.
Args:
dataset: The dataset to iterator through. The dataset must be an
instance of :class:`texar.torch.data.DatasetBase`, because
configurations are read from the dataset `HParams`.
batching_strategy: The batching strategy to use when performing dynamic
batching. If `None`, fixed-sized batching is used.
pin_memory: If `True`, tensors will be moved onto page-locked memory
before returning. This argument is passed into the constructor for
:torch_docs:`DataLoader <data.html#torch.utils.data.DataLoader>`.
Defaults to `None`, which will set the value to `True` if the
:class:`~texar.torch.data.DatasetBase` instance is set to use a CUDA
device. Set to `True` or `False` to override this behavior.
"""
dataset: DatasetBase
def __init__(self, dataset: DatasetBase,
batching_strategy: Optional[BatchingStrategy] = None,
pin_memory: Optional[bool] = None):
shuffle = dataset.hparams.shuffle
shuffle_buffer_size = dataset.hparams.shuffle_buffer_size
sampler: SamplerBase
if shuffle and shuffle_buffer_size is not None:
sampler = BufferShuffleSampler(dataset, shuffle_buffer_size)
elif shuffle:
sampler = RandomSampler(dataset)
else:
sampler = SequentialSampler(dataset)
num_workers = dataset.hparams.num_parallel_calls
collate_fn = dataset._collate_and_maybe_return
is_cuda = dataset.device is not None and dataset.device.type == "cuda"
if pin_memory is None:
pin_memory = is_cuda
self.device = None
if pin_memory and is_cuda:
self.device = dataset.device
if batching_strategy is not None:
batch_sampler = DynamicBatchSampler(
dataset, sampler, batching_strategy)
super().__init__(
dataset, batch_sampler=batch_sampler,
collate_fn=collate_fn, num_workers=num_workers,
pin_memory=pin_memory)
else:
super().__init__(
dataset, batch_size=dataset.batch_size, drop_last=False,
sampler=sampler, collate_fn=collate_fn, num_workers=num_workers,
pin_memory=pin_memory)
def __iter__(self):
if self.dataset._should_return_processed_examples:
# Accepts processed examples from workers and add to dataset cache.
return _CacheDataLoaderIter(self)
else:
return _DataLoaderIter(self)
def __len__(self):
if self.batch_size is None:
raise TypeError("__len__ not supported for dynamic batching")
data_length = len(self.dataset) # may throw TypeError
if self.dataset.hparams.allow_smaller_final_batch:
return ceildiv(data_length, self.batch_size)
return data_length // self.batch_size
class DataIterator:
r"""Data iterator that switches and iterates through multiple datasets.
This is a wrapper of :class:`~texar.torch.data.SingleDatasetIterator`.
Args:
datasets: Datasets to iterate through. This can be:
- A single instance of :class:`~texar.torch.data.DatasetBase`.
- A `dict` that maps dataset name to instances of
:class:`~texar.torch.data.DatasetBase`.
- A `list` of instances of :class:`texar.torch.data.DatasetBase`.
The name of instances (:attr:`texar.torch.data.DatasetBase.name`)
must be unique.
batching_strategy: The batching strategy to use when performing dynamic
batching. If `None`, fixed-sized batching is used.
pin_memory: If `True`, tensors will be moved onto page-locked memory
before returning. This argument is passed into the constructor for
:torch_docs:`DataLoader <data.html#torch.utils.data.DataLoader>`.
Defaults to `None`, which will set the value to `True` if the
:class:`~texar.torch.data.DatasetBase` instance is set to use a CUDA
device. Set to `True` or `False` to override this behavior.
Example:
Create an iterator over two datasets and generating fixed-sized batches:
.. code-block:: python
train_data = MonoTextData(hparams_train)
test_data = MonoTextData(hparams_test)
iterator = DataIterator({'train': train_data, 'test': test_data})
for epoch in range(200): # Run 200 epochs of train/test
# Starts iterating through training data from the beginning.
iterator.switch_to_dataset('train')
for batch in iterator:
... # Do training with the batch.
# Starts iterating through test data from the beginning
for batch in iterator.get_iterator('test'):
... # Do testing with the batch.
Dynamic batching based on total number of tokens:
.. code-block:: python
iterator = DataIterator(
{'train': train_data, 'test': test_data},
batching_strategy=TokenCountBatchingStrategy(max_tokens=1000))
Dynamic batching with custom strategy (e.g. total number of tokens in
examples from :class:`~texar.torch.data.PairedTextData`, including
padding):
.. code-block:: python
class CustomBatchingStrategy(BatchingStrategy):
def __init__(self, max_tokens: int):
self.max_tokens = max_tokens
self.reset_batch()
def reset_batch(self) -> None:
self.max_src_len = 0
self.max_tgt_len = 0
self.cur_batch_size = 0
def add_example(self, ex: Tuple[List[str], List[str]]) -> bool:
max_src_len = max(self.max_src_len, len(ex[0]))
max_tgt_len = max(self.max_tgt_len, len(ex[0]))
if (max(max_src_len + max_tgt_len) *
(self.cur_batch_size + 1) > self.max_tokens):
return False
self.max_src_len = max_src_len
self.max_tgt_len = max_tgt_len
self.cur_batch_size += 1
return True
iterator = DataIterator(
{'train': train_data, 'test': test_data},
batching_strategy=CustomBatchingStrategy(max_tokens=1000))
"""
# TODO: Think about whether we should support save/load.
def __init__(self, datasets: DatasetsType,
batching_strategy: Optional[BatchingStrategy] = None,
pin_memory: Optional[bool] = None):
self._default_dataset_name = 'data'
if isinstance(datasets, DatasetBase):
datasets = {self._default_dataset_name: datasets}
elif isinstance(datasets, Sequence):
if any(not isinstance(d, DatasetBase) for d in datasets):
raise ValueError("`datasets` must be an non-empty list of "
"`texar.torch.data.DatasetBase` instances.")
num_datasets = len(datasets)
datasets = {d.name: d for d in datasets}
if len(datasets) < num_datasets:
raise ValueError("Names of datasets must be unique.")
_datasets = {
name: SingleDatasetIterator(dataset, batching_strategy, pin_memory)
for name, dataset in datasets.items()}
self._datasets = _datasets
if len(self._datasets) <= 0:
raise ValueError("`datasets` must not be empty.")
self._current_dataset_name: Optional[str] = None
@property
def num_datasets(self) -> int:
r"""Number of datasets.
"""
return len(self._datasets)
@property
def dataset_names(self) -> List[str]:
r"""A list of dataset names.
"""
return list(self._datasets.keys())
def _validate_dataset_name(self, dataset_name: Optional[str]) -> str:
r"""Validate the provided dataset name, and return the validated name.
"""
if dataset_name is None:
if self.num_datasets > 1:
raise ValueError("`dataset_name` is required if there are "
"more than one datasets.")
dataset_name = next(iter(self._datasets))
if dataset_name not in self._datasets:
raise ValueError("Dataset not found: ", dataset_name)
return dataset_name
def switch_to_dataset(self, dataset_name: Optional[str] = None):
r"""Re-initializes the iterator of a given dataset and starts iterating
over the dataset (from the beginning).
Args:
dataset_name (optional): Name of the dataset. If not provided,
there must be only one Dataset.
"""
self._current_dataset_name = self._validate_dataset_name(dataset_name)
def get_iterator(self,
dataset_name: Optional[str] = None) -> Iterator[Batch]:
r"""Re-initializes the iterator of a given dataset and starts iterating
over the dataset (from the beginning).
Args:
dataset_name (optional): Name of the dataset. If not provided,
there must be only one Dataset.
"""
if dataset_name is not None or self._current_dataset_name is None:
dataset_name = self._validate_dataset_name(dataset_name)
elif self._current_dataset_name is not None:
dataset_name = self._current_dataset_name
else:
raise ValueError("No dataset is selected.")
return iter(self._datasets[dataset_name])
def __iter__(self) -> Iterator[Batch]:
r"""Returns the iterator for the currently selected or default dataset.
"""
return self.get_iterator()
def __len__(self):
return len(self._datasets[self._validate_dataset_name(None)])
class TrainTestDataIterator(DataIterator):
r"""Data iterator that alternates between training, validation, and test
datasets.
:attr:`train`, :attr:`val`, and :attr:`test` are instances of
:class:`~texar.torch.data.DatasetBase`. At least one of them must be
provided.
This is a wrapper of :class:`~texar.torch.data.DataIterator`.
Args:
train (optional): Training data.
val (optional): Validation data.
test (optional): Test data.
batching_strategy: The batching strategy to use when performing dynamic
batching. If `None`, fixed-sized batching is used.
pin_memory: If `True`, tensors will be moved onto page-locked memory
before returning. This argument is passed into the constructor for
:torch_docs:`DataLoader <data.html#torch.utils.data.DataLoader>`.
Defaults to `None`, which will set the value to `True` if the
:class:`~texar.torch.data.DatasetBase` instance is set to use a CUDA
device. Set to `True` or `False` to override this behavior.
Example:
.. code-block:: python
train_data = MonoTextData(hparams_train)
val_data = MonoTextData(hparams_val)
iterator = TrainTestDataIterator(train=train_data, val=val_data)
for epoch in range(200): # Run 200 epochs of train/val
# Starts iterating through training data from the beginning.
iterator.switch_to_train_data(sess)
for batch in iterator:
... # Do training with the batch.
# Starts iterating through val data from the beginning.
for batch in iterator.get_val_iterator():
... # Do validation on the batch.
"""
def __init__(self, train: Optional[DatasetBase] = None,
val: Optional[DatasetBase] = None,
test: Optional[DatasetBase] = None,
batching_strategy: Optional[BatchingStrategy] = None,
pin_memory: Optional[bool] = None):
dataset_dict = {}
self._train_name = 'train'
self._val_name = 'val'
self._test_name = 'test'
if train is not None:
dataset_dict[self._train_name] = train
if val is not None:
dataset_dict[self._val_name] = val
if test is not None:
dataset_dict[self._test_name] = test
if len(dataset_dict) == 0:
raise ValueError("At least one of `train`, `val`, and `test` "
"must be provided.")
super().__init__(dataset_dict, batching_strategy, pin_memory)
def switch_to_train_data(self) -> None:
r"""Switch to training data."""
if self._train_name not in self._datasets:
raise ValueError("Training data not provided.")
self.switch_to_dataset(self._train_name)
def switch_to_val_data(self) -> None:
r"""Switch to validation data."""
if self._val_name not in self._datasets:
raise ValueError("Validation data not provided.")
self.switch_to_dataset(self._val_name)
def switch_to_test_data(self) -> None:
r"""Switch to test data."""
if self._test_name not in self._datasets:
raise ValueError("Test data not provided.")
self.switch_to_dataset(self._test_name)
def get_train_iterator(self) -> Iterable[Batch]:
r"""Obtain an iterator over training data."""
if self._train_name not in self._datasets:
raise ValueError("Training data not provided.")
return self.get_iterator(self._train_name)
def get_val_iterator(self) -> Iterable[Batch]:
r"""Obtain an iterator over validation data."""
if self._val_name not in self._datasets:
raise ValueError("Validation data not provided.")
return self.get_iterator(self._val_name)
def get_test_iterator(self) -> Iterable[Batch]:
r"""Obtain an iterator over test data."""
if self._test_name not in self._datasets:
raise ValueError("Test data not provided.")
return self.get_iterator(self._test_name)
| 1,678
|
https://github.com/i17c/mdrill/blob/master/trunk/adhoc-solr/src/main/java/com/alimama/mdrill/solr/realtime/MessageStore.java
|
Github Open Source
|
Open Source
|
ICU, Apache-2.0
| 2,017
|
mdrill
|
i17c
|
Java
|
Code
| 1,163
| 3,423
|
package com.alimama.mdrill.solr.realtime;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.log4j.Logger;
public class MessageStore implements Closeable {
private final static Logger log = Logger.getLogger(MessageStore.class);
private static final String FILE_SUFFIX = ".binlog";
static class Segment {
final long start;
final File file;
FileMessageSet fileMessageSet;
public Segment(final long start, final File file, final boolean mutable) {
super();
this.start = start;
this.file = file;
log.warn("create segment " + this.file.getAbsolutePath());
try {
final FileChannel channel = new RandomAccessFile(this.file, "rw").getChannel();
this.fileMessageSet = new FileMessageSet(channel, 0, channel.size(), mutable);
}
catch (final IOException e) {
log.error("create Segment", e);
}
}
public long size() {
return this.fileMessageSet.highWaterMark();
}
public boolean contains(final long offset) {
if (this.size() == 0 && offset == this.start || this.size() > 0 && offset >= this.start
&& offset <= this.start + this.size() - 1) {
return true;
}
else {
return false;
}
}
}
static class SegmentList {
AtomicReference<Segment[]> contents = new AtomicReference<Segment[]>();
public SegmentList(final Segment[] s) {
this.contents.set(s);
}
public SegmentList() {
super();
this.contents.set(new Segment[0]);
}
public void append(final Segment segment) {
while (true) {
final Segment[] curr = this.contents.get();
final Segment[] update = new Segment[curr.length + 1];
System.arraycopy(curr, 0, update, 0, curr.length);
update[curr.length] = segment;
if (this.contents.compareAndSet(curr, update)) {
return;
}
}
}
public void delete(final Segment segment) {
while (true) {
final Segment[] curr = this.contents.get();
int index = -1;
for (int i = 0; i < curr.length; i++) {
if (curr[i] == segment) {
index = i;
break;
}
}
if (index == -1) {
return;
}
final Segment[] update = new Segment[curr.length - 1];
System.arraycopy(curr, 0, update, 0, index);
if (index + 1 < curr.length) {
System.arraycopy(curr, index + 1, update, index, curr.length - index - 1);
}
if (this.contents.compareAndSet(curr, update)) {
return;
}
}
}
public Segment[] view() {
return this.contents.get();
}
public Segment last() {
final Segment[] copy = this.view();
if (copy.length > 0) {
return copy[copy.length - 1];
}
return null;
}
public Segment first() {
final Segment[] copy = this.view();
if (copy.length > 0) {
return copy[0];
}
return null;
}
}
private SegmentList segments;
private final File partitionDir;
private int maxSegmentsCount=4;
public int getMaxSegmentsCount() {
return maxSegmentsCount;
}
public void setMaxSegmentsCount(int maxSegmentsCount) {
this.maxSegmentsCount = maxSegmentsCount;
}
public MessageStore(final String dataPath) throws IOException {
this(dataPath, 0);
}
public MessageStore(final String dataPath, final long offsetIfCreate) throws IOException {
this.partitionDir = new File(dataPath);
this.checkDir(this.partitionDir);
this.loadSegments(offsetIfCreate);
}
@Override
public void close() throws IOException {
for (final Segment segment : this.segments.view()) {
segment.fileMessageSet.close();
}
}
private void loadSegments(final long offsetIfCreate) throws IOException {
final List<Segment> accum = new ArrayList<Segment>();
final File[] ls = this.partitionDir.listFiles();
if (ls != null) {
for (final File file : ls) {
if (file.isFile() && file.toString().endsWith(FILE_SUFFIX)) {
if (!file.canRead()) {
throw new IOException("Could not read file " + file);
}
final String filename = file.getName();
final long start = Long.parseLong(filename.substring(0, filename.length() - FILE_SUFFIX.length()));
accum.add(new Segment(start, file, false));
}
}
}
if (accum.size() == 0) {
final File newFile = new File(this.partitionDir, this.nameFromOffset(offsetIfCreate));
accum.add(new Segment(offsetIfCreate, newFile,true));
}
else {
Collections.sort(accum, new Comparator<Segment>() {
@Override
public int compare(final Segment o1, final Segment o2) {
if (o1.start == o2.start) {
return 0;
}
else if (o1.start > o2.start) {
return 1;
}
else {
return -1;
}
}
});
// this.validateSegments(accum);
final Segment last = accum.remove(accum.size() - 1);
last.fileMessageSet.close();
log.info("Loading the last segment in mutable mode and running recover on " + last.file.getAbsolutePath());
final Segment mutable = new Segment(last.start, last.file,true);
accum.add(mutable);
log.info("Loaded " + accum.size() + " segments...");
}
this.segments = new SegmentList(accum.toArray(new Segment[accum.size()]));
}
// private void validateSegments(final List<Segment> segments) {
// this.writeLock.lock();
// try {
// for (int i = 0; i < segments.size() - 1; i++) {
// final Segment curr = segments.get(i);
// final Segment next = segments.get(i + 1);
// if (curr.start + curr.size() != next.start) {
// throw new IllegalStateException("The following segments don't validate: "
// + curr.file.getAbsolutePath() + ", " + next.file.getAbsolutePath());
// }
// }
// }
// finally {
// this.writeLock.unlock();
// }
// }
private void checkDir(final File dir) {
if (!dir.exists()) {
if (!dir.mkdir()) {
throw new RuntimeException("Create directory failed:" + dir.getAbsolutePath());
}
}
if (!dir.isDirectory()) {
throw new RuntimeException("Path is not a directory:" + dir.getAbsolutePath());
}
}
private final Lock writeLock = new ReentrantLock();
public long append(final IMessage req) throws IOException {
return this.appendBuffer(req);
}
private long appendBuffer(final IMessage buffer) throws IOException {
this.writeLock.lock();
try {
final Segment cur = this.segments.last();
long offset = cur.start + cur.fileMessageSet.append(buffer);
this.mayBeRoll();
return offset;
}
catch (final IOException e) {
log.error("Append file failed", e);
throw e;
}
finally {
this.writeLock.unlock();
}
}
private long maxSegmentSize=1024*1024*64;
public long getMaxSegmentSize() {
return maxSegmentSize;
}
public void setMaxSegmentSize(long maxSegmentSize) {
this.maxSegmentSize = maxSegmentSize;
}
private void mayBeRoll() throws IOException {
if (this.segments.last().fileMessageSet.getSizeInBytes().get() >= this.maxSegmentSize) {
this.roll();
}
}
String nameFromOffset(final long offset) {
final NumberFormat nf = NumberFormat.getInstance();
nf.setMinimumIntegerDigits(20);
nf.setMaximumFractionDigits(0);
nf.setGroupingUsed(false);
return nf.format(offset) + FILE_SUFFIX;
}
private void roll() throws IOException {
final long newOffset = this.nextAppendOffset();
final File newFile = new File(this.partitionDir, this.nameFromOffset(newOffset));
this.segments.last().fileMessageSet.flush();
this.segments.last().fileMessageSet.setMutable(false);
this.segments.append(new Segment(newOffset, newFile,true));
while(this.segments.view().length>this.getMaxSegmentsCount())
{
this.segments.delete(this.segments.first());
}
}
private long nextAppendOffset() throws IOException {
final Segment last = this.segments.last();
last.fileMessageSet.flush();
return last.start + last.size();
}
public void flush() throws IOException {
this.writeLock.lock();
try {
this.flush0();
}
finally {
this.writeLock.unlock();
}
}
private void flush0() throws IOException {
this.segments.last().fileMessageSet.flush();
}
public long getMaxOffset() {
final Segment last = this.segments.last();
return last.start + last.size();
}
public long getMinOffset() {
return this.segments.first().start;
}
public int read(final IMessage bf, long offset) throws IOException {
Segment seg = this.findSegment(this.segments.view(), offset);
if (seg == null) {
return -1;
}
return seg.fileMessageSet.read(bf, offset - seg.start);
}
public long skipToNext(long offset) throws IOException{
Segment seg=this.findSegment(this.segments.view(), offset);
if(seg==null)
{
return -1;
}
Segment rtn= this.findSegmentNext(this.segments.view(), seg);
if(rtn==null)
{
return -1;
}
return rtn.start;
}
public long getNearestOffset(final long offset) {
return this.getNearestOffset(offset, this.segments);
}
long getNearestOffset(final long offset, final SegmentList segments) {
try {
final Segment segment = this.findSegment(segments.view(), offset);
if (segment != null) {
return segment.start;
}
else {
final Segment last = segments.last();
return last.start + last.size();
}
}
catch (final ArrayIndexOutOfBoundsException e) {
return segments.first().start;
}
}
Segment findSegmentNext(final Segment[] segments,Segment seg) {
if (segments == null || segments.length < 1) {
return null;
}
long endseg=segments.length-1;
for(int i=0;i<endseg;i++)
{
if(segments[i].equals(seg))
{
return segments[i+1];
}
}
return null;
}
Segment findSegment(final Segment[] segments, final long offset) {
if (segments == null || segments.length < 1) {
return null;
}
final Segment last = segments[segments.length - 1];
if (offset < segments[0].start) {
throw new ArrayIndexOutOfBoundsException();
}
if (offset >= last.start + last.size()) {
return null;
}
int low = 0;
int high = segments.length - 1;
while (low <= high) {
final int mid = high + low >>> 1;
final Segment found = segments[mid];
if (found.contains(offset)) {
return found;
}
else if (offset < found.start) {
high = mid - 1;
}
else {
low = mid + 1;
}
}
return null;
}
}
| 10,594
|
https://github.com/jordanopensource/dsp-web/blob/master/components/Elements/Dropdown.vue
|
Github Open Source
|
Open Source
|
Unlicense, MIT
| 2,022
|
dsp-web
|
jordanopensource
|
Vue
|
Code
| 272
| 1,033
|
<template>
<div class="dropdown">
<div class="dropdown-link">
<button class="dropdown-button" v-if="show" key="on" @click="show = false">
<i class="ri-close-fill"></i>
</button>
<button class="dropdown-button" v-else key="off" @click="show = true">
<i class="ri-list-settings-fill"></i>
</button>
<h4 @click="show = !show" class="flex-grow ltr:ml-4 rtl:mr-4">{{ getTitle }}</h4>
</div>
<!-- Dropdown Menu -->
<transition name="dropdown">
<div class="dropdown-menu" v-bind:class="{ active: show }" v-if="show">
<div :class="active == 'all' ? 'active': ''" class="dropdown-menu-link" @click="setActive('all', $t('all'))">
<p class="dropdown-menu-text">{{ $t('all') }}</p>
</div>
<div v-for="item in items" :key="item.id" :class="active == item.name ? 'active': ''" class="dropdown-menu-link"
@click="setActive(item.name, item['title_' + $i18n.locale])">
<p class="dropdown-menu-text">{{ item['title_' + $i18n.locale] }}</p>
</div>
</div>
</transition>
</div>
</template>
<script>
export default {
data() {
return {
show: false
}
},
props: {
items: {
type: Array,
required: true
},
active: {
type: String,
required: true
}
},
methods: {
setActive(value, title) {
this.show = false
this.$emit('setActive', value, title)
}
},
computed: {
getTitle() {
if (this.active == 'all') {
return this.$t('all')
} else {
return this.items.find((item) => item.name == this.active)?.['title_' + this.$i18n.locale]
}
}
}
}
</script>
<style scoped>
.dropdown {
@apply relative flex justify-start items-start;
}
.dropdown-link {
@apply w-full h-full flex flex-row flex-no-wrap items-center justify-start;
}
.dropdown-link:hover>* {
@apply fill-current text-josa-teal cursor-pointer;
}
.dropdown-button {
@apply fill-current text-josa-black;
}
.dropdown-button:focus {
@apply outline-none;
}
.dropdown-menu {
@apply absolute z-10 overflow-y-auto py-4 my-4 bg-white;
top: 100%;
background-clip: padding-box;
}
.dropdown-menu-link {
@apply relative z-20 bg-white w-full cursor-pointer;
}
.dropdown-menu-link:hover {
@apply bg-josa-teal-300;
}
.dropdown-menu-text {
@apply px-8 py-4;
}
/* Transitions */
.dropdown-menu-link,
.dropdown-link,
.dropdown-menu-text {
transition: color 0.25s linear;
-webkit-transition: color 0.25s linear;
transition: background-color 0.25s linear;
-webkit-transition: background-color 0.25s linear;
}
.dropdown-enter-active,
.dropdown-leave-active {
transition: all 1s;
}
.dropdown-enter,
.dropdown-leave-to {
opacity: 0;
transform: translateY(30px);
}
</style>
| 17,704
|
https://github.com/echalkpad/t4f-data/blob/master/pubsub/activemq/src/main/java/org/apache/activemq/book/ch7/spring/SpringClient.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,014
|
t4f-data
|
echalkpad
|
Java
|
Code
| 183
| 432
|
/****************************************************************
* Licensed to the AOS Community (AOS) under one or more *
* contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The AOS licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.activemq.book.ch7.spring;
import org.apache.activemq.broker.BrokerService;
import org.apache.xbean.spring.context.FileSystemXmlApplicationContext;
public class SpringClient {
public static void main(String... args) throws Exception {
BrokerService broker = new BrokerService();
broker.addConnector("tcp://localhost:61616");
broker.setPersistent(false);
broker.start();
FileSystemXmlApplicationContext context = new FileSystemXmlApplicationContext("src/main/resources/org/apache/activemq/book/ch7/spring-client.xml");
SpringPublisher publisher = (SpringPublisher)context.getBean("stockPublisher");
publisher.start();
}
}
| 39,265
|
https://github.com/bbjoite09/PythonProject/blob/master/tkinter_project/tkinter_basic/2_button.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
PythonProject
|
bbjoite09
|
Python
|
Code
| 119
| 446
|
from tkinter import *
root = Tk()
root.title("Yeonii GUI") # GUI 지정
# 버튼 추가
btn1 = Button(root, text="버튼1")
btn1.pack()
# pad -> 버튼 내용(text) 제외하고 여백을 5, 10으로 지정함
btn2 = Button(root, padx=5, pady=10, text="버튼2")
btn2.pack()
btn3 = Button(root, padx=10, pady=5, text="버튼3")
btn3.pack()
# width, height -> pad와 다름. 크기를 직접 지정함
# --> text가 지정한 버튼 크기를 초과하였을때, 내용이 덜 보이더라도 크기는 그대로 유지함
btn4 = Button(root, width=10, height=3, text="버튼4")
btn4.pack()
# 버튼 색깔 지정
btn5 = Button(root, fg="red", bg="yellow", text="버튼5") # fg = foreground / bg = background
btn5.pack()
# 이미지 버튼
photo = PhotoImage(file="img1.png")
btn6 = Button(root, image=photo)
btn6.pack()
# 버튼에 동작 추가
def btncmd():
print("버튼이 클릭되었어요!")
btn7 = Button(root, text="동작하는 버튼", command=btncmd)
btn7.pack()
root.mainloop()
| 50,431
|
https://github.com/struktured/riakc_ppx/blob/master/src/lib/robj.ml
|
Github Open Source
|
Open Source
|
BSD-2-Clause
| 2,016
|
riakc_ppx
|
struktured
|
OCaml
|
Code
| 641
| 1,257
|
let option_of_bool = function
| Some true -> Some true
| _ -> None
module Link = struct
type t = { bucket : (string option [@key 1])
; key : (string option [@key 2])
; tag : (string option [@key 3])
} [@@deriving protobuf]
let bucket t = t.bucket
let key t = t.key
let tag t = t.tag
let set_bucket b t = { t with bucket = b }
let set_key k t = { t with key = k }
let set_tag tag t = { t with tag = tag }
end
module Pair = struct
type t = { key : (string [@key 1])
; value : (string option [@key 2])
} [@@deriving protobuf]
let create ~k ~v = { key = k; value = v }
let key t = t.key
let value t = t.value
let set_key s t = {t with key = s}
let set_value so t = {t with value = so}
end
module Usermeta = Pair
module Index = Pair
module Content = struct
module Link = Link
module Pair = Pair
module Usermeta = Usermeta
module Index = Index
type t = { value : (string [@key 1])
; content_type : (string option [@key 2])
; charset : (string option [@key 3])
; content_encoding : (string option [@key 4])
; vtag : (string option [@key 5])
; links : (Link.t list [@key 6])
; last_mod : (Int32.t option [@key 7] [@encoding `varint])
; last_mod_usec : (Int32.t option [@key 8] [@encoding `varint])
; usermeta : (Usermeta.t list [@key 9])
; indices : (Index.t list [@key 10])
; deleted : (bool option [@key 11])
} [@@deriving protobuf]
let create v =
{ value = v
; content_type = None
; charset = None
; content_encoding = None
; vtag = None
; links = []
; last_mod = None
; last_mod_usec = None
; usermeta = []
; indices = []
; deleted = Some false
}
let value t = t.value
let content_type t = t.content_type
let charset t = t.charset
let content_encoding t = t.content_encoding
let vtag t = t.vtag
let links t = t.links
let last_mod t = t.last_mod
let last_mod_usec t = t.last_mod_usec
let usermeta t = t.usermeta
let indices t = t.indices
let deleted t = match t.deleted with Some x -> x | None -> false
let set_value v t = { t with value = v }
let set_content_type ct t = { t with content_type = ct }
let set_charset cs t = { t with charset = cs }
let set_content_encoding ce t = { t with content_encoding = ce }
let set_vtag vt t = { t with vtag = vt }
let set_links ls t = { t with links = ls }
let set_last_mod lm t = { t with last_mod = lm }
let set_last_mod_usec lmu t = { t with last_mod_usec = lmu }
let set_usermeta u t = { t with usermeta = u }
let set_indices i t = { t with indices = i }
end
type 'a t = { contents : Content.t list
; vclock : string option
; unchanged : bool
}
let of_pb contents vclock unchanged =
{ contents = contents
; vclock = vclock
; unchanged = Core.Std.Option.value ~default:false unchanged
}
let to_pb t = (t.contents, t.vclock)
let create c =
{ contents = [c]
; vclock = None
; unchanged = false
}
let of_value v = create (Content.create v)
let contents t = t.contents
let content t = Core.Std.List.hd_exn (t.contents)
let vclock t = t.vclock
let unchanged t = t.unchanged
let set_contents cs t = { t with contents = cs }
let set_content c t = { t with contents = [c] }
let set_vclock v t = { t with vclock = v }
| 46,927
|
https://github.com/NHS-Talent-Management/nhs-theme/blob/master/src/js/main.js
|
Github Open Source
|
Open Source
|
MIT
| null |
nhs-theme
|
NHS-Talent-Management
|
JavaScript
|
Code
| 20
| 86
|
window.devenv = true;
YUI().use('get', function(A){
var baseUrl = '/o/nhs-theme/js/';
A.Get.script([
baseUrl + 'header-menu.module.js'
],
{
onSuccess: function(){
}
});
});
| 14,679
|
https://github.com/marcogrella/TDD-Unit-Tests/blob/master/Tests/src/test/java/com/tests/TestsApplicationTests.java
|
Github Open Source
|
Open Source
|
MIT
| null |
TDD-Unit-Tests
|
marcogrella
|
Java
|
Code
| 15
| 64
|
package com.tests;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
class TestsApplicationTests {
@Test
void contextLoads() {
}
}
| 18,967
|
https://github.com/jacksontenorio8/python/blob/master/CursoEmVideo/Mundo2_EstruturasDeControle/desafio038.py
|
Github Open Source
|
Open Source
|
MIT
| null |
python
|
jacksontenorio8
|
Python
|
Code
| 76
| 164
|
"""
@jacksontenorio8
Escreva um programa que leia dois números inteiro e compare-os,
mostrando na tela uma mensagem:
- O primeiro valor é maior
- O segundo valor é maior
- Não existe valor maior, os dois são iguais.
"""
num1 = int(input('Primeiro número: '))
num2 = int(input('Segundo número: '))
if num1 > num2:
print('O PRIMEIRO valor é maior.')
elif num1 < num2:
print('O SEGUNDO valor é maior.')
else:
print('Não existe valor maior, os dois são IGUAIS.')
| 44,739
|
https://github.com/alislin/TaskLog/blob/master/TaskLog/TaskLog.DataModel/IPriority.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
TaskLog
|
alislin
|
C#
|
Code
| 21
| 47
|
namespace TaskLog.DataModel
{
public interface IPriority
{
int Priority { get; set; }
int Point { get; set; }
}
}
| 39,835
|
https://github.com/bloom-housing/bloom/blob/master/backend/core/src/applications/dto/application.dto.ts
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,023
|
bloom
|
bloom-housing
|
TypeScript
|
Code
| 289
| 1,041
|
import { OmitType } from "@nestjs/swagger"
import { ArrayMaxSize, IsDefined, ValidateNested } from "class-validator"
import { Application } from "../entities/application.entity"
import { Expose, plainToClass, Transform, Type } from "class-transformer"
import { IdDto } from "../../shared/dto/id.dto"
import { ApplicantDto } from "./applicant.dto"
import { AddressDto } from "../../shared/dto/address.dto"
import { AlternateContactDto } from "./alternate-contact.dto"
import { DemographicsDto } from "./demographics.dto"
import { HouseholdMemberDto } from "./household-member.dto"
import { AccessibilityDto } from "./accessibility.dto"
import { ValidationsGroupsEnum } from "../../shared/types/validations-groups-enum"
import { UnitTypeDto } from "../../unit-types/dto/unit-type.dto"
export class ApplicationDto extends OmitType(Application, [
"listing",
"listingId",
"user",
"userId",
"applicant",
"mailingAddress",
"alternateAddress",
"alternateContact",
"accessibility",
"demographics",
"householdMembers",
"preferredUnit",
] as const) {
@Expose()
@ValidateNested({ groups: [ValidationsGroupsEnum.default] })
@Type(() => ApplicantDto)
applicant: ApplicantDto
@Expose()
@IsDefined({ groups: [ValidationsGroupsEnum.default] })
@ValidateNested({ groups: [ValidationsGroupsEnum.default] })
@Type(() => IdDto)
@Transform(
(value, obj) => {
return plainToClass(IdDto, { id: obj.listingId })
},
{ toClassOnly: true }
)
listing: IdDto
@Expose()
@IsDefined({ groups: [ValidationsGroupsEnum.default] })
@ValidateNested({ groups: [ValidationsGroupsEnum.default] })
@Type(() => IdDto)
@Transform(
(value, obj) => {
return obj.userId ? plainToClass(IdDto, { id: obj.userId }) : undefined
},
{ toClassOnly: true }
)
user?: IdDto
@Expose()
@IsDefined({ groups: [ValidationsGroupsEnum.default] })
@ValidateNested({ groups: [ValidationsGroupsEnum.default] })
@Type(() => AddressDto)
mailingAddress: AddressDto
@Expose()
@IsDefined({ groups: [ValidationsGroupsEnum.default] })
@ValidateNested({ groups: [ValidationsGroupsEnum.default] })
@Type(() => AddressDto)
alternateAddress: AddressDto
@Expose()
@IsDefined({ groups: [ValidationsGroupsEnum.default] })
@ValidateNested({ groups: [ValidationsGroupsEnum.default] })
@Type(() => AlternateContactDto)
alternateContact: AlternateContactDto
@Expose()
@IsDefined({ groups: [ValidationsGroupsEnum.default] })
@ValidateNested({ groups: [ValidationsGroupsEnum.default] })
@Type(() => AccessibilityDto)
accessibility: AccessibilityDto
@Expose()
@IsDefined({ groups: [ValidationsGroupsEnum.default] })
@ValidateNested({ groups: [ValidationsGroupsEnum.default] })
@Type(() => DemographicsDto)
demographics: DemographicsDto
@Expose()
@IsDefined({ groups: [ValidationsGroupsEnum.default] })
@ValidateNested({ groups: [ValidationsGroupsEnum.default], each: true })
@ArrayMaxSize(32, { groups: [ValidationsGroupsEnum.default] })
@Type(() => HouseholdMemberDto)
householdMembers: HouseholdMemberDto[]
@Expose()
@IsDefined({ groups: [ValidationsGroupsEnum.default] })
@ValidateNested({ groups: [ValidationsGroupsEnum.default], each: true })
@Type(() => UnitTypeDto)
preferredUnit: UnitTypeDto[]
}
| 4,863
|
https://github.com/rodfloripa/py-greenhouse-1/blob/master/examples/palmer_penguins/src/main.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
py-greenhouse-1
|
rodfloripa
|
Python
|
Code
| 465
| 2,011
|
import greenhouse_clock
import data_sourcing
import data_splitting
import data_preprocessing
import feature_engineering
import eda_monitoring
import modeling
import performance_monitoring
from prefect import Flow, task, context
import pandas as pd
# Pandas options for better shell display
pd.set_option("display.max_rows", 100)
pd.set_option("display.max_columns", None)
pd.set_option("display.width", None)
start_time = greenhouse_clock.get_time()
@task
def sourcing():
return data_sourcing.get()
@task
def cleansing(df):
return data_preprocessing.clean(df)
@task
def normalizing(df):
return data_preprocessing.normalize(df)
@task(nout=3)
def splitting(df):
return data_splitting.split(df)
@task(nout=3)
def one_hot(train, valid, test, cols):
logger = context.get("logger")
logger.info(train)
train_hot, valid_hot, test_hot = feature_engineering.one_hot_encoding(
train=train,
valid=valid,
test=test,
cols=cols,
)
train = train.join(train_hot)
valid = valid.join(valid_hot)
test = test.join(test_hot)
logger.info(train)
return train, valid, test
@task(nout=3)
def imputation(train, valid, test, cols, imputation_method):
logger = context.get("logger")
# Find rows where the numerical variables are nan
mask = train[cols].isna()
logger.info(train[mask])
train_imp, valid_imp, test_imp = feature_engineering.numerical_missing_imputation(
train=train,
valid=valid,
test=test,
cols=cols,
imputation_method=imputation_method,
)
train = train.join(train_imp, rsuffix="_imputed")
valid = valid.join(valid_imp, rsuffix="_imputed")
test = test.join(test_imp, rsuffix="_imputed")
logger.info(train[mask])
return train, valid, test
@task
def eda(df, path, preffix, suffix):
eda_monitoring.export_eda_report(df=df, path=path, preffix=preffix, suffix=suffix)
pass
@task(nout=5)
def model(train, valid, test, obs, y_col, x_col):
mo = modeling.model()
mo.fit(train=train, y_col=y_col, x_col=x_col)
lst = list(mo.transform_sets(train=train, valid=valid, test=test))
lst.append(mo.transform_new(obs=obs))
return lst
@task
def threshold(y_true, y_score):
return performance_monitoring.optimal_threshold(y_true=y_true, y_score=y_score)
@task
def performance(y_true, y_score, best_hyperparams, path, opt_thr, suffix):
return performance_monitoring.report_performance(
y_true=y_true,
y_score=y_score,
best_hyperparams=best_hyperparams,
path=path,
opt_thr=opt_thr,
suffix=suffix,
)
@task
def binarize(binary_map, series):
return series.map(binary_map)
@task
def print_out(s):
print(s)
pass
@task
def df_to_csv(df, filename):
df.to_csv(filename)
pass
# Define prefect flow
with Flow("greenhouse") as flow:
df = sourcing()
df = cleansing(df)
df = normalizing(df)
train, valid, test = splitting(df)
# eda(
# df=train,
# path="monitor/",
# preffix=start_time,
# suffix="before_feat_eng"
# )
# Categorical
cat_cols = [
"sex",
]
train, valid, test = one_hot(
train=train,
valid=valid,
test=test,
cols=cat_cols,
)
# Numerical
num_cols = [
"bill_length_mm",
"bill_depth_mm",
"flipper_length_mm",
"body_mass_g",
]
train, valid, test = imputation(
train=train,
valid=valid,
test=test,
cols=num_cols,
imputation_method="median",
)
# eda(
# df=train,
# path="monitor/",
# preffix=start_time,
# suffix="after_feat_eng"
# )
y_col = ["species"]
x_col = [
"sex_male",
"sex_female",
"sex_na",
"bill_length_mm_imputed",
"bill_depth_mm_imputed",
"flipper_length_mm_imputed",
"body_mass_g_imputed",
]
# `obs=test` just as an example here.
# It should be actually new data, unseen by the model.
train, valid, test, best_hyperparams, new = model(
train=train,
valid=valid,
test=test,
obs=test,
y_col=y_col,
x_col=x_col,
)
path = "data/"
filename = path + "{}_predict_new.csv".format(start_time)
df_to_csv(df=new, filename=filename)
# Obtain the optimal threshold of
# class 0 vs 1+2
# from the training set
opt_thr = threshold(y_true=train["actual"], y_score=train["prob_0"])
# class 0 --> 1
# class 1 or class 2 --> 0
binary_map = {
0: 1,
1: 0,
2: 0,
}
# Performance report over training set
performance(
y_true=binarize(binary_map=binary_map, series=train["actual"]),
y_score=train["prob_0"],
best_hyperparams=best_hyperparams,
path="monitor/",
opt_thr=opt_thr,
suffix="_train",
)
# Performance report over validation set
performance(
y_true=binarize(binary_map=binary_map, series=valid["actual"]),
y_score=valid["prob_0"],
best_hyperparams=best_hyperparams,
path="monitor/",
opt_thr=opt_thr,
suffix="_valid",
)
# Performance report over test set
performance(
y_true=binarize(binary_map=binary_map, series=test["actual"]),
y_score=test["prob_0"],
best_hyperparams=best_hyperparams,
path="monitor/",
opt_thr=opt_thr,
suffix="_test",
)
if __name__ == "__main__":
# Run prefect flow
flow.run()
# Export flow as a PDF
flow.visualize(filename="flow/prefect_flow")
| 41,338
|
https://github.com/npocmaka/Windows-Server-2003/blob/master/multimedia/directx/dxvb/dx8vb/dmusobj.h
|
Github Open Source
|
Open Source
|
Unlicense
| 2,021
|
Windows-Server-2003
|
npocmaka
|
C
|
Code
| 126
| 578
|
#include "resource.h" // main symbols
#include "dmusicc.h"
#define typedef__dxj_DirectMusic LPDIRECTMUSIC
/////////////////////////////////////////////////////////////////////////////
// Direct
//REVIEW -- using pointers to ID's is necessary because some compilers don't like
//references as template arguments.
class C_dxj_DirectMusicObject :
#ifdef USING_IDISPATCH
public CComDualImpl<I_dxj_DirectMusic, &IID_I_dxj_DirectMusic, &LIBID_DIRECTLib>,
public ISupportErrorInfo,
#else
public I_dxj_DirectMusic,
#endif
public CComObjectRoot
{
public:
C_dxj_DirectMusicObject() ;
virtual ~C_dxj_DirectMusicObject() ;
BEGIN_COM_MAP(C_dxj_DirectMusicObject)
COM_INTERFACE_ENTRY(I_dxj_DirectMusic)
#ifdef USING_IDISPATCH
COM_INTERFACE_ENTRY(IDispatch)
COM_INTERFACE_ENTRY(ISupportErrorInfo)
#endif
END_COM_MAP()
DECLARE_AGGREGATABLE(C_dxj_DirectMusicObject)
#ifdef USING_IDISPATCH
// ISupportsErrorInfo
STDMETHOD(InterfaceSupportsErrorInfo)(REFIID riid);
#endif
// I_dxj_DirectMusic
public:
/* [hidden] */ HRESULT STDMETHODCALLTYPE InternalSetObject(
/* [in] */ IUnknown __RPC_FAR *lpdd);
/* [hidden] */ HRESULT STDMETHODCALLTYPE InternalGetObject(
/* [retval][out] */ IUnknown __RPC_FAR *__RPC_FAR *lpdd);
HRESULT STDMETHODCALLTYPE Activate(VARIANT_BOOL fEnable);
HRESULT STDMETHODCALLTYPE SetDirectSound(I_dxj_DirectSound *DirectSound,long hWnd);
////////////////////////////////////////////////////////////////////////
//
// note: this is public for the callbacks
DECL_VARIABLE(_dxj_DirectMusic);
private:
public:
DX3J_GLOBAL_LINKS(_dxj_DirectMusic);
DWORD InternalAddRef();
DWORD InternalRelease();
};
| 46,533
|
https://github.com/virender-hestabit/api20/blob/master/application/views/sidemenu.php
|
Github Open Source
|
Open Source
|
MIT, LicenseRef-scancode-unknown-license-reference
| 2,020
|
api20
|
virender-hestabit
|
PHP
|
Code
| 13
| 80
|
<div class="sidebar-nav">
<?PHP
$user_id = $this->session->user_id;
$menu = $this->Database_conn2->get_menu_tree('sidemenu',$user_id,'',0);
echo $menu;
?>
</div>
| 26,116
|
https://github.com/mourad/turnpike/blob/master/examples/pubsub/client.go
|
Github Open Source
|
Open Source
|
MIT
| 2,015
|
turnpike
|
mourad
|
Go
|
Code
| 48
| 195
|
package main
import (
"fmt"
"time"
"gopkg.in/jcelliott/turnpike.v1"
)
func testHandler(uri string, event interface{}) {
fmt.Printf("Received event: %s\n", event)
}
func main() {
c := turnpike.NewClient()
err := c.Connect("ws://127.0.0.1:8080/ws", "http://localhost/")
if err != nil {
panic("Error connecting:" + err.Error())
}
c.Subscribe("event:test", testHandler)
for {
c.Publish("event:test", "test")
<-time.After(time.Second)
}
}
| 16,184
|
https://github.com/meissel/nemu/blob/master/src/nm_remote_api.h
|
Github Open Source
|
Open Source
|
BSD-2-Clause
| 2,022
|
nemu
|
meissel
|
C
|
Code
| 97
| 414
|
#ifndef NM_REMOTE_API_H_
#define NM_REMOTE_API_H_
#define NM_API_VERSION "0.2"
#include <nm_mon_daemon.h>
#include <json.h>
static const char NM_API_RET_ARRAY[] = "{\"return\":[%s]}";
static const char NM_API_RET_VAL[] = "{\"return\":\"%s\"}";
static const char NM_API_RET_OK[] = "{\"return\":\"ok\"}";
static const char NM_API_RET_ERR[] = "{\"return\":\"err\",\"error\":\"%s\"}";
static const char NM_API_RET_VAL_UINT[] = "{\"return\":\"%u\"}";
#define NM_API_PORT 0x501D
#define NM_API_MD_LEN 20
#define NM_API_CL_BUF_LEN 2048
#define NM_API_SHA256_LEN 65
typedef struct nm_api_ops {
const char method[NM_API_MD_LEN];
void (*run)(struct json_object *request, nm_str_t *reply);
} nm_api_ops_t;
typedef struct nm_api_ctx {
nm_mon_vms_t *vms;
nm_thr_ctrl_t *ctrl;
} nm_api_ctx_t;
#define NM_API_CTX_INIT (nm_api_ctx_t) { NULL, NULL }
void *nm_api_server(void *ctx);
#endif /* NM_REMOTE_API_H_ */
/* vim:set ts=4 sw=4: */
| 3,801
|
https://github.com/gsus17/Realtime-chat-demo-operator/blob/master/src/app/login/login.component.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
Realtime-chat-demo-operator
|
gsus17
|
TypeScript
|
Code
| 135
| 523
|
import { Component, OnInit, NgZone } from '@angular/core';
import { FirebaseAuthService } from './firebase-auth/firebase-auth.service';
import { Router } from '@angular/router';
import { User } from '../interfaces/user';
@Component({
selector: 'app-login',
templateUrl: './login.component.html',
styleUrls: ['./login.component.scss']
})
export class LoginComponent implements OnInit {
constructor(
private firebaseAuthService: FirebaseAuthService,
private zone: NgZone,
private router: Router) { }
ngOnInit() {
}
/**
* Login with google.
*/
public googleLogin() {
console.log(`${LoginComponent.name}::googleLogin`);
this.firebaseAuthService.googleLogin()
.then((res) => {
console.log(`${LoginComponent.name}::success`, res);
this.saveLocalUser(res);
this.goToMaster();
})
.catch((err) => {
console.log(`${LoginComponent.name}::catch`, err);
});
}
/**
* Login with twitter.
*/
public twitterLogin() {
console.log(`${LoginComponent.name}::twitterLogin`);
}
/**
* Save user login.
*/
private saveLocalUser(response: any) {
console.log(`${LoginComponent.name}::saveLocalUser`);
const user: User = {
uid: response.user.uid,
name: response.user.displayName,
avatar: response.user.photoURL,
messageUnread: 0
};
this.firebaseAuthService.setUser(user);
}
/**
* Redirect to master view.
*/
private goToMaster() {
console.log(`${LoginComponent.name}::goToMaster`);
this.zone.run(() => this.router.navigate(['/master']));
}
}
| 35,003
|
https://github.com/franky47/delta-v/blob/master/back/tests/helpers/factories/product.factory.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
delta-v
|
franky47
|
TypeScript
|
Code
| 48
| 158
|
import { faker } from '@faker-js/faker';
import { buildFactory } from '../../../src/core/testHelpers';
import { Product } from '../../../src/entities/product.entity';
const buildSchema = (): Product => {
return {
id: faker.datatype.uuid(),
name: faker.commerce.productName(),
info: faker.lorem.paragraph(),
childrenQuestion: faker.lorem.sentence(),
};
};
export const productEntityFactory = (args?: Partial<Product>): Product =>
buildFactory<Product>({
...buildSchema(),
})(args);
| 46,747
|
https://github.com/PacificBiosciences/falcon3/blob/master/falcon_kit/mains/graph_to_contig.py
|
Github Open Source
|
Open Source
|
BSD-3-Clause-Clear
| null |
falcon3
|
PacificBiosciences
|
Python
|
Code
| 1,290
| 4,377
|
"""
TODO: (from convo w/ Ivan)
the issue with this script (but would still like to re-read it to refresh my memory). The script loads all edge sequences and tries to do two things at once: create p_ctg and a_ctg sequences, and align the bubbles using those sequences
If we generate:
1. All paths first (as tiling paths) for all p_ctg and all a_ctg without loading sequences - this should not consume much space (take a look at *_tiling_paths files).
2. Load the first read of each tiling path fully, and only edge sequences for every transition, we can generate the output sequences with the same memory/disk consumption.
3. Align bubbles after that.
Our resource consumption should be same
Bubbles?
It aligns them to produce the identity score
After that the dedup_a_tigs.py script is used to deduplicate fake a_ctg.
But that script is simple, and only depends on the alignment info that the previous script stored in the a_ctg header.
"""
import argparse
import logging
import sys
import networkx as nx
from ..FastaReader import open_fasta_reader
from ..io import open_progress
RCMAP = dict(list(zip("ACGTacgtNn-", "TGCAtgcaNn-")))
def log(msg):
sys.stderr.write(msg)
sys.stderr.write('\n')
def rc(seq):
return "".join([RCMAP[c] for c in seq[::-1]])
def reverse_end(node_id):
node_id, end = node_id.split(":")
new_end = "B" if end == "E" else "E"
return node_id + ":" + new_end
def yield_first_seq(one_path_edges, seqs):
if one_path_edges and one_path_edges[0][0] != one_path_edges[-1][1]:
# If non-empty, and non-circular,
# prepend the entire first read.
(vv, ww) = one_path_edges[0]
(vv_rid, vv_letter) = vv.split(":")
if vv_letter == 'E':
first_seq = seqs[vv_rid]
else:
assert vv_letter == 'B'
first_seq = "".join([RCMAP[c] for c in seqs[vv_rid][::-1]])
yield first_seq
def compose_ctg(seqs, edge_data, ctg_id, path_edges, proper_ctg):
total_score = 0
total_length = 0
edge_lines = []
sub_seqs = []
# If required, add the first read to the path sequence.
if proper_ctg:
sub_seqs = list(yield_first_seq(path_edges, seqs))
total_length = 0 if len(sub_seqs) == 0 else len(sub_seqs[0])
# Splice-in the rest of the path sequence.
for vv, ww in path_edges:
rid, s, t, aln_score, idt, e_seq = edge_data[(vv, ww)]
sub_seqs.append(e_seq)
edge_lines.append('%s %s %s %s %d %d %d %0.2f' % (
ctg_id, vv, ww, rid, s, t, aln_score, idt))
total_length += abs(s - t)
total_score += aln_score
return edge_lines, sub_seqs, total_score, total_length
def run(improper_p_ctg, proper_a_ctg, preads_fasta_fn, sg_edges_list_fn, utg_data_fn, ctg_paths_fn):
"""improper==True => Neglect the initial read.
We used to need that for unzip.
"""
reads_in_layout = set()
with open_progress(sg_edges_list_fn) as f:
for l in f:
l = l.strip().split()
"""001039799:E 000333411:E 000333411 17524 20167 17524 99.62 G"""
v, w, rid, s, t, aln_score, idt, type_ = l
if type_ != "G":
continue
r1 = v.split(":")[0]
reads_in_layout.add(r1)
r2 = w.split(":")[0]
reads_in_layout.add(r2)
seqs = {}
# load all p-read name into memory
with open_fasta_reader(preads_fasta_fn) as f:
for r in f:
rname = r.name.strip().split()[0]
if rname not in reads_in_layout:
continue
seqs[rname] = r.sequence.upper() # name == rid-string
edge_data = {}
with open_progress(sg_edges_list_fn) as f:
for l in f:
l = l.strip().split()
"""001039799:E 000333411:E 000333411 17524 20167 17524 99.62 G"""
v, w, rid, s, t, aln_score, idt, type_ = l
if type_ != "G":
continue
r1, dir1 = v.split(":")
reads_in_layout.add(r1) # redundant, but harmless
r2, dir2 = w.split(":")
reads_in_layout.add(r2) # redundant, but harmless
s = int(s)
t = int(t)
aln_score = int(aln_score)
idt = float(idt)
if s < t:
e_seq = seqs[rid][s:t]
assert 'E' == dir2
else:
# t and s were swapped for 'c' alignments in ovlp_to_graph.generate_string_graph():702
# They were translated from reverse-dir to forward-dir coordinate system in LA4Falcon.
e_seq = "".join([RCMAP[c] for c in seqs[rid][t:s][::-1]])
assert 'B' == dir2
edge_data[(v, w)] = (rid, s, t, aln_score, idt, e_seq)
utg_data = {}
with open_progress(utg_data_fn) as f:
for l in f:
l = l.strip().split()
s, v, t, type_, length, score, path_or_edges = l
if type_ not in ["compound", "simple", "contained"]:
continue
length = int(length)
score = int(score)
if type_ in ("simple", "contained"):
path_or_edges = path_or_edges.split("~")
else:
path_or_edges = [tuple(e.split("~"))
for e in path_or_edges.split("|")]
utg_data[(s, v, t)] = type_, length, score, path_or_edges
p_ctg_out = open("p_ctg.fasta", "w")
a_ctg_out = open("a_ctg_all.fasta", "w")
p_ctg_t_out = open("p_ctg_tiling_path", "w")
a_ctg_t_out = open("a_ctg_all_tiling_path", "w")
layout_ctg = set()
with open_progress(ctg_paths_fn) as f:
for l in f:
l = l.strip().split()
ctg_id, c_type_, i_utig, t0, length, score, utgs = l
ctg_id = ctg_id
s0 = i_utig.split("~")[0]
if (reverse_end(t0), reverse_end(s0)) in layout_ctg:
continue
else:
layout_ctg.add((s0, t0))
ctg_label = i_utig + "~" + t0
length = int(length)
utgs = utgs.split("|")
one_path = []
total_score = 0
total_length = 0
#a_ctg_data = []
a_ctg_group = {}
for utg in utgs:
s, v, t = utg.split("~")
type_, length, score, path_or_edges = utg_data[(s, v, t)]
total_score += score
total_length += length
if type_ == "simple":
if len(one_path) != 0:
one_path.extend(path_or_edges[1:])
else:
one_path.extend(path_or_edges)
if type_ == "compound":
c_graph = nx.DiGraph()
all_alt_path = []
for ss, vv, tt in path_or_edges:
type_, length, score, sub_path = utg_data[(ss, vv, tt)]
v1 = sub_path[0]
for v2 in sub_path[1:]:
c_graph.add_edge(
v1, v2, e_score=edge_data[(v1, v2)][3])
v1 = v2
shortest_path = nx.shortest_path(c_graph, s, t, "e_score")
score = nx.shortest_path_length(c_graph, s, t, "e_score")
all_alt_path.append((score, shortest_path))
# a_ctg_data.append( (s, t, shortest_path) ) #first path is the same as the one used in the primary contig
while 1:
n0 = shortest_path[0]
for n1 in shortest_path[1:]:
c_graph.remove_edge(n0, n1)
n0 = n1
try:
shortest_path = nx.shortest_path(
c_graph, s, t, "e_score")
score = nx.shortest_path_length(
c_graph, s, t, "e_score")
#a_ctg_data.append( (s, t, shortest_path) )
all_alt_path.append((score, shortest_path))
except nx.exception.NetworkXNoPath:
break
# if len(shortest_path) < 2:
# break
# Is sorting required, if we are appending the shortest paths in order?
all_alt_path.sort()
all_alt_path.reverse()
shortest_path = all_alt_path[0][1]
# The longest branch in the compound unitig is added to the primary path.
if len(one_path) != 0:
one_path.extend(shortest_path[1:])
else:
one_path.extend(shortest_path)
a_ctg_group[(s, t)] = all_alt_path
if len(one_path) == 0:
continue
one_path_edges = list(zip(one_path[:-1], one_path[1:]))
# Compose the primary contig.
p_edge_lines, p_ctg_seq_chunks, p_total_score, p_total_length = compose_ctg(seqs, edge_data, ctg_id, one_path_edges, (not improper_p_ctg))
# Write out the tiling path.
p_ctg_t_out.write('\n'.join(p_edge_lines))
p_ctg_t_out.write('\n')
# Write the sequence.
# Using the `total_score` instead of `p_total_score` intentionally. Sum of
# edge scores is not identical to sum of unitig scores.
p_ctg_out.write('>%s %s %s %d %d\n' % (ctg_id, ctg_label, c_type_, p_total_length, total_score))
p_ctg_out.write(''.join(p_ctg_seq_chunks))
p_ctg_out.write('\n')
a_id = 0
for (v, w) in a_ctg_group.keys():
atig_output = []
# Compose the base sequence.
for sub_id in range(len(a_ctg_group[(v, w)])):
score, atig_path = a_ctg_group[(v, w)][sub_id]
atig_path_edges = list(zip(atig_path[:-1], atig_path[1:]))
a_ctg_id = '%s-%03d-%02d' % (ctg_id, a_id + 1, sub_id)
a_edge_lines, sub_seqs, a_total_score, a_total_length = compose_ctg(
seqs, edge_data, a_ctg_id, atig_path_edges, proper_a_ctg)
seq = ''.join(sub_seqs)
# Keep the placeholder for these values for legacy purposes, but mark
# them as for deletion.
# The base a_ctg will also be output to the same file, for simplicity.
delta_len = 0
idt = 1.0
cov = 1.0
atig_output.append((v, w, atig_path, a_total_length, a_total_score, seq, atig_path_edges, a_ctg_id, a_edge_lines, delta_len, idt, cov))
if len(atig_output) == 1:
continue
for sub_id, data in enumerate(atig_output):
v, w, tig_path, a_total_length, a_total_score, seq, atig_path_edges, a_ctg_id, a_edge_lines, delta_len, a_idt, cov = data
# Write out the tiling path.
a_ctg_t_out.write('\n'.join(a_edge_lines))
a_ctg_t_out.write('\n')
# Write the sequence.
a_ctg_out.write('>%s %s %s %d %d %d %d %0.2f %0.2f\n' % (a_ctg_id, v, w, a_total_length, a_total_score, len(atig_path_edges), delta_len, idt, cov))
a_ctg_out.write(''.join(seq))
a_ctg_out.write('\n')
a_id += 1
a_ctg_out.close()
p_ctg_out.close()
a_ctg_t_out.close()
p_ctg_t_out.close()
class HelpF(argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter):
pass
def main(argv=sys.argv):
description = 'Generate the primary and alternate contig fasta files and tiling paths, given the string graph.'
epilog = """
We write these:
p_ctg_out = open("p_ctg.fasta", "w")
a_ctg_out = open("a_ctg_all.fasta", "w")
p_ctg_t_out = open("p_ctg_tiling_path", "w")
a_ctg_t_out = open("a_ctg_all_tiling_path", "w")
"""
parser = argparse.ArgumentParser(
description=description,
formatter_class=HelpF,
epilog=epilog)
parser.add_argument('--improper-p-ctg', action='store_true',
help='Skip the initial read in each p_ctg path.')
parser.add_argument('--proper-a-ctg', action='store_true',
help='Skip the initial read in each a_ctg path.')
parser.add_argument('--preads-fasta-fn', type=str,
default='./preads4falcon.fasta',
help='Input. Preads file, required to construct the contigs.')
parser.add_argument('--sg-edges-list-fn', type=str,
default='./sg_edges_list',
help='Input. File containing string graph edges, produced by ovlp_to_graph.py.')
parser.add_argument('--utg-data-fn', type=str,
default='./utg_data',
help='Input. File containing unitig data, produced by ovlp_to_graph.py.')
parser.add_argument('--ctg-paths-fn', type=str,
default='./ctg_paths',
help='Input. File containing contig paths, produced by ovlp_to_graph.py.')
args = parser.parse_args(argv[1:])
run(**vars(args))
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
main(sys.argv)
| 26,695
|
https://github.com/TransformCore/trade-tariff-backend/blob/master/app/controllers/api/admin/footnotes_controller.rb
|
Github Open Source
|
Open Source
|
LicenseRef-scancode-proprietary-license, MIT
| 2,023
|
trade-tariff-backend
|
TransformCore
|
Ruby
|
Code
| 79
| 363
|
module Api
module Admin
class FootnotesController < ApiController
before_action :authenticate_user!
def index
@footnotes = Footnote.actual.national.eager(:footnote_descriptions).all
render json: Api::Admin::FootnoteSerializer.new(@footnotes).serializable_hash
end
def show
@footnote = Footnote.national.with_pk!(footnote_pk)
render json: Api::Admin::FootnoteSerializer.new(@footnote, { is_collection: false }).serializable_hash
end
def update
@footnote = Footnote.national.with_pk!(footnote_pk)
@description = @footnote.footnote_description
@description.set(footnote_params[:attributes])
if @description.save
render json: Api::Admin::FootnoteSerializer.new(@footnote, { is_collection: false }).serializable_hash
else
render json: Api::Admin::ErrorSerializationService.new(@description).call, status: :unprocessable_entity
end
end
private
def footnote_params
params.require(:data).permit(:type, attributes: [:description])
end
def footnote_pk
[footnote_id[0..1], footnote_id[2, 5]]
end
def footnote_id
params.fetch(:id, '')
end
end
end
end
| 18,908
|
https://github.com/fordp2002/ZX21/blob/master/KiCad/ZX21/ESP32-Footprints/ESP32-footprints-Lib.pretty/ESP32.kicad_mod
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,020
|
ZX21
|
fordp2002
|
KiCad Layout
|
Code
| 813
| 2,353
|
(module ESP32 (layer F.Cu) (tedit 57CF26E0)
(fp_text reference REF** (at 0 -4.572) (layer F.SilkS)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_text value ESP32 (at 0 4.318) (layer F.Fab)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_line (start -2.413 -3.175) (end -3.175 -3.175) (layer F.SilkS) (width 0.15))
(fp_line (start -3.175 -3.175) (end -3.175 -2.413) (layer F.SilkS) (width 0.15))
(fp_line (start -3.175 2.413) (end -3.175 3.175) (layer F.SilkS) (width 0.15))
(fp_line (start -3.175 3.175) (end -2.413 3.175) (layer F.SilkS) (width 0.15))
(fp_line (start 3.175 2.413) (end 3.175 3.175) (layer F.SilkS) (width 0.15))
(fp_line (start 3.175 3.175) (end 2.413 3.175) (layer F.SilkS) (width 0.15))
(fp_line (start 2.413 -3.175) (end 3.175 -3.175) (layer F.SilkS) (width 0.15))
(fp_line (start 3.175 -3.175) (end 3.175 -2.413) (layer F.SilkS) (width 0.15))
(pad 37 smd oval (at 2.2 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 25 smd oval (at 2.9 -2.2) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 13 smd oval (at -2.2 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 49 smd rect (at 0 0) (size 4.3 4.3) (layers F.Cu F.Paste F.Mask))
(pad 1 smd oval (at -2.9 2.2) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 2 smd oval (at -2.9 1.8) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 3 smd oval (at -2.9 1.4) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 4 smd oval (at -2.9 1) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 5 smd oval (at -2.9 0.6) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 6 smd oval (at -2.9 0.2) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 7 smd oval (at -2.9 -0.2) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 8 smd oval (at -2.9 -0.6) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 9 smd oval (at -2.9 -1) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 10 smd oval (at -2.9 -1.4) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 11 smd oval (at -2.9 -1.8) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 12 smd oval (at -2.9 -2.2) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 14 smd oval (at -1.8 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 15 smd oval (at -1.4 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 16 smd oval (at -1 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 17 smd oval (at -0.6 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 18 smd oval (at -0.2 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 19 smd oval (at 0.2 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 20 smd oval (at 0.6 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 21 smd oval (at 1 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 22 smd oval (at 1.4 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 23 smd oval (at 1.8 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 24 smd oval (at 2.2 -2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 26 smd oval (at 2.9 -1.8) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 27 smd oval (at 2.9 -1.4) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 28 smd oval (at 2.9 -1) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 29 smd oval (at 2.9 -0.6) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 30 smd oval (at 2.9 -0.2) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 31 smd oval (at 2.9 0.2) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 32 smd oval (at 2.9 0.6) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 33 smd oval (at 2.9 1) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 34 smd oval (at 2.9 1.4) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 35 smd oval (at 2.9 1.8) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 36 smd oval (at 2.9 2.2) (size 0.85 0.2) (layers F.Cu F.Paste F.Mask))
(pad 38 smd oval (at 1.8 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 39 smd oval (at 1.4 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 40 smd oval (at 1 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 41 smd oval (at 0.6 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 42 smd oval (at 0.2 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 43 smd oval (at -0.2 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 44 smd oval (at -0.6 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 45 smd oval (at -1 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 46 smd oval (at -1.4 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 47 smd oval (at -1.8 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
(pad 48 smd oval (at -2.2 2.9) (size 0.2 0.85) (layers F.Cu F.Paste F.Mask))
)
| 10,668
|
https://github.com/BradSharp/Roblox/blob/master/Utility/Signal.lua
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
Roblox
|
BradSharp
|
Lua
|
Code
| 131
| 426
|
local Signal = {}
local Connection = {}
Signal.__index = {}
function Signal.new()
return setmetatable({
__connections = {},
__threads = {}
}, Signal)
end
function Signal.__index:Fire(...)
for _, connection in ipairs(self.__connections) do
task.spawn(connection.__callback, ...)
end
for _, thread in ipairs(self.__threads) do
task.spawn(thread.__callback, ...)
end
table.clear(self.__threads)
end
function Signal.__index:Defer(...)
for _, connection in ipairs(self.__connections) do
task.defer(connection.__callback, ...)
end
for _, thread in ipairs(self.__threads) do
task.defer(thread.__callback, ...)
end
table.clear(self.__threads)
end
function Signal.__index:Connect(handler)
local connection = Connection.new(handler)
table.insert(self.__connections, connection)
return connection
end
function Signal.__index:Wait()
table.insert(self.__threads, coroutine.running())
return coroutine.yield()
end
Connection.__index = {}
function Connection.new(signal, callback)
return setmetatable({
Connected = true,
__callback = callback,
__signal = signal
}, Connection)
end
function Connection.__index:Disconnect()
local connections = self.__signal.__connections
local i = table.find(connections, self)
if i > 0 then
table.remove(connections, i)
end
self.Connected = false
end
return Signal
| 1,955
|
https://github.com/lechium/tvOS130Headers/blob/master/System/Library/PrivateFrameworks/TelephonyUtilities.framework/TUContactsDataProvider.h
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
tvOS130Headers
|
lechium
|
C
|
Code
| 166
| 856
|
/*
* This header is generated by classdump-dyld 1.0
* on Tuesday, November 5, 2019 at 2:45:31 AM Mountain Standard Time
* Operating System: Version 13.0 (Build 17J586)
* Image Source: /System/Library/PrivateFrameworks/TelephonyUtilities.framework/TelephonyUtilities
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos.
*/
@protocol TUContactsDataSource;
@class TUContactsDataProviderIgnoredHandles;
@interface TUContactsDataProvider : NSObject {
BOOL _useAsianNameFormat;
/*^block*/id _localeSupportsPrefixHintForFetchRequest;
id<TUContactsDataSource> _contactsDataSource;
TUContactsDataProviderIgnoredHandles* _ignoredHandles;
}
@property (assign,nonatomic) BOOL useAsianNameFormat; //@synthesize useAsianNameFormat=_useAsianNameFormat - In the implementation block
@property (nonatomic,copy) id localeSupportsPrefixHintForFetchRequest; //@synthesize localeSupportsPrefixHintForFetchRequest=_localeSupportsPrefixHintForFetchRequest - In the implementation block
@property (nonatomic,readonly) id<TUContactsDataSource> contactsDataSource; //@synthesize contactsDataSource=_contactsDataSource - In the implementation block
@property (nonatomic,copy) TUContactsDataProviderIgnoredHandles * ignoredHandles; //@synthesize ignoredHandles=_ignoredHandles - In the implementation block
+(id)numberFormatter;
+(id)asianLocaleCountryCodes;
+(id)unsupportedLocalesForPrefixHint;
+(id)defaultContactKeyDescriptors;
+(id)keysByCombiningDefaultKeysWithKeysToFetch:(id)arg1 ;
-(id)init;
-(id)executeFetchRequest:(id)arg1 ;
-(id)initWithContactsDataSource:(id)arg1 ;
-(id)contactsForHandle:(id)arg1 countryCode:(id)arg2 keysToFetch:(id)arg3 prefixHint:(id)arg4 ;
-(id)compositeNameForContacts:(id)arg1 ;
-(id)contactWithIdentifier:(id)arg1 keysToFetch:(id)arg2 ;
-(id)unifiedContactsForFetchRequest:(id)arg1 countryCode:(id)arg2 ;
-(int)personIDForContact:(id)arg1 ;
-(id)contactLabelForContacts:(id)arg1 matchingHandle:(id)arg2 countryCode:(id)arg3 ;
-(id)compositeNameForHandles:(id)arg1 countryCode:(id)arg2 ;
-(id)labeledHandlesForContacts:(id)arg1 ;
-(id<TUContactsDataSource>)contactsDataSource;
-(TUContactsDataProviderIgnoredHandles *)ignoredHandles;
-(id)prefixHintForFetchRequest:(id)arg1 ;
-(BOOL)useAsianNameFormat;
-(id)localizedCompositeNameForContact:(id)arg1 secondContact:(id)arg2 ;
-(id)formattedNameForHandle:(id)arg1 countryCode:(id)arg2 ;
-(id)localeSupportsPrefixHintForFetchRequest;
-(id)labeledHandlesForContactWithIdentifier:(id)arg1 ;
-(void)setUseAsianNameFormat:(BOOL)arg1 ;
-(void)setLocaleSupportsPrefixHintForFetchRequest:(id)arg1 ;
-(void)setIgnoredHandles:(TUContactsDataProviderIgnoredHandles *)arg1 ;
@end
| 39,745
|
https://github.com/IVIR3zaM/Grabber/blob/master/src/Resources/AbstractResource.php
|
Github Open Source
|
Open Source
|
MIT
| null |
Grabber
|
IVIR3zaM
|
PHP
|
Code
| 375
| 891
|
<?php
namespace IVIR3aM\Grabber\Resources;
use IVIR3aM\Grabber\Entities\Maps\AbstractMap;
use IVIR3aM\Grabber\Entities\AbstractValue;
use IVIR3aM\Grabber\Entities\AbstractValueFactory;
/**
* Class AbstractResource
* Abstraction Layer for any kind of resource
* @package IVIR3aM\Grabber
*/
abstract class AbstractResource implements \Countable
{
/**
* @var array
*/
protected $settings = [];
/**
* Resource constructor.
* @param array|null $settings
*/
public function __construct(array $settings = null)
{
if (is_array($settings)) {
$this->setSettings($settings);
}
}
/**
* Setting all settings required
* @param array $settings
* @return AbstractResource
*/
public function setSettings(array $settings) : AbstractResource
{
$this->settings = $settings;
return $this;
}
/**
* Getting all current settings
* @return array list of settings
*/
public function getSettings() : array
{
return $this->settings;
}
/**
* Set a single setting
* @param string $key
* @param $value
* @return AbstractResource
*/
public function setSetting(string $key, $value) : AbstractResource
{
$this->settings[$key] = $value;
return $this;
}
/**
* Get a single setting value
* @param string $key
* @return mixed the setting value
*/
public function getSetting(string $key)
{
if (isset($this->settings[$key])) {
return $this->settings[$key];
}
}
/**
* Remove a single setting value
* @param string $key
* @return $this
*/
public function unsetSetting(string $key)
{
if (isset($this->settings[$key])) {
unset($this->settings[$key]);
}
return $this;
}
/**
* @return int number of settings elements
*/
public function count() : int
{
return count($this->settings);
}
/**
* Fetch an Entity from Resource base on Entity Map
* @param AbstractMap $map
* @param AbstractValueFactory $factory
* @throws Exception on any failure
* @return AbstractValue
*/
abstract public function fetch(AbstractMap $map, AbstractValueFactory $factory) : AbstractValue;
/**
* Push and save an Entity to Resource base on Entity Map
* @param AbstractMap $map
* @param AbstractValue $entity
* @return bool whether pushing was successful or not
*/
abstract public function push(AbstractMap $map, AbstractValue $entity) : bool;
/**
* Fetch all Entities from Resource base on Entity Map
* @param AbstractMap $map
* @param AbstractValueFactory $factory
* @throws Exception on any failure
* @return array
*/
abstract public function fetchAll(AbstractMap $map, AbstractValueFactory $factory) : array;
/**
* Push and save all Entities to Resource base on Entity Map
* @param AbstractMap $map
* @param AbstractValue[] $entities
* @return bool whether pushing was successful or not
*/
abstract public function pushAll(AbstractMap $map, array $entities) : bool;
}
| 30,811
|
https://github.com/velocist/TS4CheatsInfo/blob/master/src/sims4communitylib/events/build_buy/common_build_buy_event_dispatcher.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
TS4CheatsInfo
|
velocist
|
Python
|
Code
| 143
| 617
|
"""
The Sims 4 Community Library is licensed under the Creative Commons Attribution 4.0 International public license (CC BY 4.0).
https://creativecommons.org/licenses/by/4.0/
https://creativecommons.org/licenses/by/4.0/legalcode
Copyright (c) COLONOLNUTTY
"""
from typing import Any
from sims4communitylib.events.build_buy.events.build_buy_enter import S4CLBuildBuyEnterEvent
from sims4communitylib.events.build_buy.events.build_buy_exit import S4CLBuildBuyExitEvent
from sims4communitylib.events.event_handling.common_event_registry import CommonEventRegistry
from sims4communitylib.modinfo import ModInfo
from sims4communitylib.services.common_service import CommonService
from sims4communitylib.utils.common_injection_utils import CommonInjectionUtils
from zone import Zone
class CommonBuildBuyEventDispatcherService(CommonService):
"""A service that dispatches Build/Buy events.
.. warning:: Do not use this service directly to listen for events!\
Use the :class:`.CommonEventRegistry` to listen for dispatched events.
"""
def _on_build_buy_enter(self, zone: Zone, *_, **__):
return CommonEventRegistry.get().dispatch(S4CLBuildBuyEnterEvent(zone))
def _on_build_buy_exit(self, zone: Zone, *_, **__):
return CommonEventRegistry.get().dispatch(S4CLBuildBuyExitEvent(zone))
@CommonInjectionUtils.inject_safely_into(ModInfo.get_identity(), Zone, Zone.on_build_buy_enter.__name__)
def _common_build_buy_enter(original, self, *args, **kwargs) -> Any:
result = original(self, *args, **kwargs)
CommonBuildBuyEventDispatcherService.get()._on_build_buy_enter(self, *args, **kwargs)
return result
@CommonInjectionUtils.inject_safely_into(ModInfo.get_identity(), Zone, Zone.on_build_buy_exit.__name__)
def _common_build_buy_exit(original, self, *args, **kwargs) -> Any:
result = original(self, *args, **kwargs)
CommonBuildBuyEventDispatcherService.get()._on_build_buy_exit(self, *args, **kwargs)
return result
| 46,624
|
https://github.com/helinteg/srr-clang/blob/master/lib/StaticAnalyzer/Checkers/IntegerOverflowChecker.cpp
|
Github Open Source
|
Open Source
|
NCSA
| 2,015
|
srr-clang
|
helinteg
|
C++
|
Code
| 2,359
| 8,266
|
//=== IntegerOverflowChecker.cpp - integer overflows checker ----*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
///
/// \file
/// \brief This defines IntegerOverflowChecker, which checks arithmetic
/// operations for integer overflows. This check corresponds to CWE-190.
///
//===----------------------------------------------------------------------===//
//
// Check for overflow performs by checkAdd(), checkSub() and checkMul()
// functions. checkAdd() and checkSub() consist of two parts for signed integer
// overflow check and unsigned integer overflow check(wraparound).
//
// Couple of heuristics were added for FP suppressing. USubHeuristic prevents
// warnings for intentional integer overflow while getting i.e UINT_MAX by
// subtracting 1U from 0U. GlobalsMembersHeuristic suppresses warning if
// arguments of arithmetic operation are global variables or class members.
// Sometimes CSA fails to determine right value for that type of arguments and
// inter-unit analysis assumed to be the best solution of this problem.
//
//===----------------------------------------------------------------------===//
#include "ClangSACheckers.h"
#include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
#include "clang/StaticAnalyzer/Core/Checker.h"
#include "clang/StaticAnalyzer/Core/CheckerManager.h"
#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
#include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
#include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h"
using namespace clang;
using namespace ento;
using namespace nonloc;
namespace {
class IntegerOverflowChecker : public Checker<check::PostStmt<BinaryOperator>,
check::PostStmt<CXXNewExpr>,
check::PostStmt<CallExpr>,
check::PostStmt<MemberExpr>,
check::Bind> {
mutable std::unique_ptr<BuiltinBug> BT_Def, BT_Undef;
/// Stores SourceLocations in which overflows happened for reducing the amount
/// of equivalent warnings.
mutable std::set<SourceLocation> OverflowLoc;
struct IntegerOverflowFilter {
DefaultBool CheckIntegerOverflowDef;
DefaultBool CheckIntegerOverflowUndef;
CheckName CheckNameIntegerOverflowDef;
CheckName CheckNameIntegerOverflowUndef;
};
void reportBug(const std::string &Msg, CheckerContext &C,
const SourceLocation &SL, bool isUndef) const;
std::string composeMsg(ProgramStateRef StateNotOverflow, const SVal &Lhs,
const SVal &Rhs, const Expr *ExprLhs,
const Expr *ExprRhs, bool isSigned, bool isOverflow,
BinaryOperator::Opcode *Op, CheckerContext &C) const;
/// Check if addition of \p Lhs and \p Rhs can overflow.
Optional<DefinedOrUnknownSVal> checkAdd(CheckerContext &C, const SVal &Lhs,
const SVal &Rhs, QualType BinType,
bool &isOverflow) const;
/// Check if subtraction of \p Lhs and \p Rhs can overflow.
Optional<DefinedOrUnknownSVal> checkSub(CheckerContext &C, const SVal &Lhs,
const SVal &Rhs,
const QualType &BinType,
bool &isOverflow) const;
/// Check if multiplication of \p Lhs and \p Rhs can overflow.
Optional<DefinedOrUnknownSVal> checkMul(CheckerContext &C, const SVal &Lhs,
const SVal &Rhs,
const QualType &BinType,
bool &isOverflow) const;
/// \returns dump and constraints of \p Val.
std::string getSymbolInformation(const SVal &Val, const Expr *E,
CheckerContext &C) const;
/// We ignore intentional underflow because of subtracting X from zero - the
/// minimum unsigned value.
bool makeUSubHeuristics(const BinaryOperator *BO) const;
/// \returns true if there are suspicions that the actual value might be lose
/// by analyzer.
bool makeGlobalsMembersHeuristics(const SVal &Val, const Stmt *S,
CheckerContext &C) const;
/// Check if \p S should be ignored when participates in overflow.
bool hasGlobalVariablesOrMembers(const Stmt *S, CheckerContext &C) const;
/// Check if \p SE should be ignored when participates in overflow.
bool hasGlobalVariablesOrMembers(const SymExpr *SE, CheckerContext &C) const;
ProgramStateRef addToWhiteList(const Stmt *S, ProgramStateRef State,
const LocationContext *LCtx) const;
inline ProgramStateRef addToWhiteList(const SVal &SV,
ProgramStateRef State) const;
bool isInWhiteList(const Stmt *S, ProgramStateRef State,
const LocationContext *LCtx) const;
inline bool isInWhiteList(const SVal &Val, ProgramStateRef State) const;
public:
IntegerOverflowFilter Filter;
/// Check addition, multiplication, and subtraction for overflow.
void checkPostStmt(const BinaryOperator *B, CheckerContext &C) const;
/// Contains check for new[].
void checkPostStmt(const CXXNewExpr *NE, CheckerContext &C) const;
/// Note if value returned by a call should be ignored when participates in
/// overflow.
void checkPostStmt(const CallExpr *CE, CheckerContext &C) const;
/// Make MemberExpr ignored.
void checkPostStmt(const MemberExpr *ME, CheckerContext &C) const;
/// Note if value which is handled by checkBind should be ignored when
/// participates in overflow.
void checkBind(const SVal &Loc, const SVal &Val, const Stmt *S,
CheckerContext &C) const;
};
} // end anonymous namespace
/// WhiteList stores symbols change of which can be missed by analyzer.
REGISTER_LIST_WITH_PROGRAMSTATE(WhiteList, SVal)
void IntegerOverflowChecker::reportBug(const std::string &Msg,
CheckerContext &C,
const SourceLocation &SL,
bool isUndef) const {
if (const ExplodedNode *N = C.generateSink(C.getState())) {
if (isUndef && !BT_Undef)
BT_Undef.reset(new BuiltinBug(
Filter.CheckNameIntegerOverflowUndef, "Integer overflow",
"Arithmetic operation resulted in an overflow"));
else if (!isUndef && !BT_Def)
BT_Def.reset(
new BuiltinBug(Filter.CheckNameIntegerOverflowDef, "Integer overflow",
"Arithmetic operation resulted in an overflow"));
BuiltinBug *BT =
static_cast<BuiltinBug *>((isUndef ? BT_Undef : BT_Def).get());
C.emitReport(llvm::make_unique<BugReport>(*BT, Msg, N));
OverflowLoc.insert(SL);
}
}
std::string
IntegerOverflowChecker::composeMsg(ProgramStateRef StateNotOverflow,
const SVal &Lhs, const SVal &Rhs,
const Expr *ExprLhs, const Expr *ExprRhs,
bool isSigned, bool isOverflow,
BinaryOperator::Opcode *Op,
CheckerContext &C) const {
std::string Msg;
std::string ErrorType = (!Op || isOverflow) ? "Overflow" : "Underflow";
if (StateNotOverflow) {
Msg.assign("Possible integer " + ErrorType + ": ");
if (C.getState()->isTainted(Lhs))
Msg.append("left operand is tainted. ");
else
Msg.append("right operand is tainted. ");
} else {
if (isSigned)
Msg.assign("Undefined behavior: ");
Msg.append("Integer " + ErrorType + ". ");
}
std::string Operation, Preposition;
if (!Op || *Op == BO_Mul || *Op == BO_MulAssign) {
Operation = "Multiplication of ";
Preposition = " with ";
} else if (*Op == BO_Add || *Op == BO_AddAssign) {
Operation = "Addition of ";
Preposition = " with ";
} else {
Operation = "Subtraction of ";
Preposition = " from ";
}
if (Op && (*Op == BO_Sub || (*Op == BO_SubAssign)))
Msg.append(Operation + getSymbolInformation(Rhs, ExprRhs, C) + Preposition +
getSymbolInformation(Lhs, ExprLhs, C));
else
Msg.append(Operation + getSymbolInformation(Lhs, ExprLhs, C) + Preposition +
getSymbolInformation(Rhs, ExprRhs, C));
if (!Op)
Msg.append(" while memory allocation.");
return Msg;
}
Optional<DefinedOrUnknownSVal>
IntegerOverflowChecker::checkAdd(CheckerContext &C, const SVal &Lhs,
const SVal &Rhs, QualType BinType,
bool &isOverflow) const {
SVal CondOverflow;
ProgramStateRef State = C.getState();
SValBuilder &SvalBuilder = C.getSValBuilder();
SVal NullSval = SvalBuilder.makeZeroVal(BinType);
QualType CondType = SvalBuilder.getConditionType();
SVal ValArgSum = SvalBuilder.evalBinOp(State, BO_Add, Lhs, Rhs, BinType);
if (BinType->isSignedIntegerType()) {
// For positive operands
// rhs > 0
SVal CondRhsGtNull = SvalBuilder.evalBinOp(State, BO_GT, Rhs, NullSval,
CondType);
// lhs > 0
SVal CondLhsGtNull = SvalBuilder.evalBinOp(State, BO_GT, Lhs, NullSval,
CondType);
// rhs > 0 && lhs > 0
SVal CondArgsGtNull = SvalBuilder.evalBinOp(State, BO_And, CondRhsGtNull,
CondLhsGtNull, CondType);
// lhs+rhs<=0
SVal CondArgSumLtNull = SvalBuilder.evalBinOp(State, BO_LE, ValArgSum,
NullSval, CondType);
SVal CondPositiveOverflow =
SvalBuilder.evalBinOp(State, BO_And, CondArgsGtNull, CondArgSumLtNull,
CondType);
// For negative operands
// lhs < 0
SVal CondLhsLtNull = SvalBuilder.evalBinOp(State, BO_LT, Rhs, NullSval,
CondType);
// rhs < 0
SVal CondRhsLtNull = SvalBuilder.evalBinOp(State, BO_LT, Lhs, NullSval,
CondType);
// rhs < 0 && lhs < 0
SVal CondArgsLtNull = SvalBuilder.evalBinOp(State, BO_And, CondLhsLtNull,
CondRhsLtNull, CondType);
// lhs+rhs>=0
SVal CondArgSumGtNull = SvalBuilder.evalBinOp(State, BO_GE, ValArgSum,
NullSval, CondType);
SVal CondNegativeOverflow =
SvalBuilder.evalBinOp(State, BO_And, CondArgsLtNull, CondArgSumGtNull,
CondType);
if (!CondPositiveOverflow.isZeroConstant())
isOverflow = true;
CondOverflow = SvalBuilder.evalBinOp(State, BO_Or, CondPositiveOverflow,
CondNegativeOverflow, CondType);
} else {
isOverflow = true;
// lhs > sum
SVal CondLhsGtArgSum = SvalBuilder.evalBinOp(State, BO_GT, Lhs, ValArgSum,
CondType);
// rhs > sum
SVal CondRhsGtArgSum = SvalBuilder.evalBinOp(State, BO_GT, Rhs, ValArgSum,
CondType);
// lhs > sum && rhs > sum
CondOverflow = SvalBuilder.evalBinOp(State, BO_And, CondLhsGtArgSum,
CondRhsGtArgSum, CondType);
}
return CondOverflow.getAs<DefinedOrUnknownSVal>();
}
Optional<DefinedOrUnknownSVal>
IntegerOverflowChecker::checkSub(CheckerContext &C, const SVal &Lhs,
const SVal &Rhs, const QualType &BinType,
bool &isOverflow) const {
SVal CondOverflow;
ProgramStateRef State = C.getState();
SValBuilder &SvalBuilder = C.getSValBuilder();
SVal NullSval = SvalBuilder.makeZeroVal(BinType);
QualType CondType = SvalBuilder.getConditionType();
SVal ValArgSub = SvalBuilder.evalBinOp(State, BO_Sub, Lhs, Rhs, BinType);
if (BinType->isSignedIntegerType()) {
// When first operand is negative
// lhs < 0
SVal CondLhsLtNull = SvalBuilder.evalBinOp(State, BO_LT, Lhs, NullSval,
CondType);
// rhs > 0
SVal CondRhsGtNull = SvalBuilder.evalBinOp(State, BO_GT, Rhs, NullSval,
CondType);
// rhs > 0 && lhs < 0
SVal CondLhsLtNullRhsGtNull =
SvalBuilder.evalBinOp(State, BO_And, CondLhsLtNull, CondRhsGtNull,
CondType);
// lhs - rhs >= 0
SVal CondArgSubGeNull = SvalBuilder.evalBinOp(State, BO_GE, ValArgSub,
NullSval, CondType);
// rhs > 0 && lhs < 0 && lhs-rhs >= 0
SVal CondNegativeOverflow =
SvalBuilder.evalBinOp(State, BO_And, CondLhsLtNullRhsGtNull,
CondArgSubGeNull, CondType);
// When first operand is positive
// lhs > 0
SVal CondLhsGtNull = SvalBuilder.evalBinOp(State, BO_GT, Lhs, NullSval,
CondType);
// rhs < 0
SVal CondRhsLtNull = SvalBuilder.evalBinOp(State, BO_LT, Rhs, NullSval,
CondType);
// rhs < 0 && lhs > 0
SVal CondLhsGtNullRhsLtNull =
SvalBuilder.evalBinOp(State, BO_And, CondLhsGtNull, CondRhsLtNull,
CondType);
// lhs - rhs <= 0
SVal CondArgSubLeNull = SvalBuilder.evalBinOp(State, BO_LE, ValArgSub,
NullSval, CondType);
// rhs < 0 && lhs > 0 && lhs - rhs <= 0
SVal CondPositiveOverflow =
SvalBuilder.evalBinOp(State, BO_And, CondLhsGtNullRhsLtNull,
CondArgSubLeNull, CondType);
CondOverflow = SvalBuilder.evalBinOp(State, BO_Or, CondNegativeOverflow,
CondPositiveOverflow, CondType);
if (!CondPositiveOverflow.isZeroConstant())
isOverflow = true;
} else
CondOverflow = SvalBuilder.evalBinOp(State, BO_LT, Lhs, Rhs, CondType);
return CondOverflow.getAs<DefinedOrUnknownSVal>();
}
Optional<DefinedOrUnknownSVal>
IntegerOverflowChecker::checkMul(CheckerContext &C, const SVal &Lhs,
const SVal &Rhs, const QualType &BinType,
bool &isOverflow) const {
ProgramStateRef State = C.getState();
ProgramStateRef CondNotOverflow, CondPossibleOverflow;
SValBuilder &SvalBuilder = C.getSValBuilder();
SVal NullSval = SvalBuilder.makeZeroVal(BinType);
QualType CondType = SvalBuilder.getConditionType();
// lhs == 0
SVal LhsNotNull = SvalBuilder.evalBinOp(State, BO_NE, Lhs, NullSval,
CondType);
// rhs == 0
SVal RhsNotNull = SvalBuilder.evalBinOp(State, BO_NE, Rhs, NullSval,
CondType);
Optional<DefinedOrUnknownSVal> CondOverflow =
SvalBuilder.evalBinOp(State, BO_And, LhsNotNull, RhsNotNull, CondType)
.getAs<DefinedOrUnknownSVal>();
if (!CondOverflow.hasValue())
return CondOverflow;
std::tie(CondPossibleOverflow, CondNotOverflow) =
State->assume(*CondOverflow);
if (CondNotOverflow && CondPossibleOverflow)
return CondOverflow;
if (CondPossibleOverflow) {
// lhs * rhs
SVal ValMulti = SvalBuilder.evalBinOp(State, BO_Mul, Lhs, Rhs, BinType);
// First operand(lhs) is not 0
// (lhs * rhs)/lhs
SVal ValDiv = SvalBuilder.evalBinOp(State, BO_Div, ValMulti, Lhs, BinType);
// (lhs * rhs)/lhs != rhs
CondOverflow = SvalBuilder.evalBinOp(State, BO_NE, ValDiv, Rhs, CondType)
.getAs<DefinedOrUnknownSVal>();
}
isOverflow = BinType->isUnsignedIntegerOrEnumerationType() ||
SvalBuilder.evalBinOp(State, BO_LT, Lhs, NullSval, CondType)
.isZeroConstant() ==
SvalBuilder.evalBinOp(State, BO_LT, Rhs, NullSval, CondType)
.isZeroConstant();
return CondOverflow;
}
std::string
IntegerOverflowChecker::getSymbolInformation(const SVal &Val, const Expr *E,
CheckerContext &C) const {
ProgramStateRef State = C.getState();
std::string StreamRangeStr, SValDumpStr;
llvm::raw_string_ostream StreamRange(StreamRangeStr), SValDump(SValDumpStr);
Val.dumpToStream(SValDump);
if (Val.getSubKind() == SymbolValKind) {
State->getConstraintManager().print(State, StreamRange, "\n", "\n");
StreamRange.flush();
size_t from = StreamRangeStr.find(SValDump.str() + " : ");
if (from != std::string::npos) {
size_t to = StreamRangeStr.find("\n", from);
from += SValDump.str().length();
SValDump.str().append(StreamRangeStr.substr(from, to - from));
}
}
if (!E || isa<IntegerLiteral>(E->IgnoreParenCasts()))
return SValDump.str();
E = E->IgnoreParens();
if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E))
if ((UO->getOpcode() == UO_Plus || UO->getOpcode() == UO_Minus) &&
isa<IntegerLiteral>(UO->getSubExpr()))
return SValDump.str();
SValDump << " (";
E->printPretty(SValDump, 0, C.getASTContext().getPrintingPolicy());
SValDump << ")";
return SValDump.str();
}
// We ignore intentional underflow with subtracting X from zero - the minimal
// unsigned value.
bool
IntegerOverflowChecker::makeUSubHeuristics(const BinaryOperator *BO) const {
const Expr *ExprLhs = BO->getLHS()->IgnoreParenCasts();
if (isa<IntegerLiteral>(ExprLhs)) {
const IntegerLiteral *IL = dyn_cast<IntegerLiteral>(ExprLhs);
return IL->getValue().isMinValue();
}
return false;
}
bool
IntegerOverflowChecker::makeGlobalsMembersHeuristics(const SVal &Val,
const Stmt *S,
CheckerContext &C)const {
if (Val.isConstant()) {
bool good = isInWhiteList(Val, C.getState()) &&
(S->getStmtClass() != Stmt::IntegerLiteralClass) &&
(S->getStmtClass() != Stmt::ImplicitCastExprClass);
return good ? true : hasGlobalVariablesOrMembers(S, C);
} else if (const SymExpr *SE = Val.getAsSymExpr())
return isInWhiteList(Val, C.getState()) ? true
: hasGlobalVariablesOrMembers(SE, C);
else if (const MemRegion *Mem = Val.getAsRegion())
return isInWhiteList(Val, C.getState()) || isa<FieldRegion>(Mem) ||
Mem->hasGlobalsOrParametersStorage();
return false;
}
bool
IntegerOverflowChecker::hasGlobalVariablesOrMembers(const Stmt *S,
CheckerContext &C) const {
if (S == NULL || S->getStmtClass() == Stmt::IntegerLiteralClass)
return false;
ProgramStateRef State = C.getState();
const LocationContext *LCtx = C.getLocationContext();
if ((S->getStmtClass() != Stmt::ImplicitCastExprClass) &&
isInWhiteList(S, State, LCtx))
return true;
if (const MemberExpr *MExpr = dyn_cast<MemberExpr>(S)) {
if (MExpr->getMemberDecl()->isFunctionOrFunctionTemplate())
return hasGlobalVariablesOrMembers(MExpr->getMemberDecl()->getBody(), C);
// We found member usage!
return true;
}
if (const ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(S))
if (isa<DeclRefExpr>(ICE->getSubExpr()) && isInWhiteList(C.getSVal(ICE),
State))
return true;
if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(S))
if (const VarDecl *VarD = dyn_cast<VarDecl>(DRE->getDecl())) {
Loc VLoc = C.getStoreManager().getLValueVar(VarD, LCtx);
SVal VVal = C.getStoreManager().getBinding(State->getStore(), VLoc);
if (isInWhiteList(VVal, State))
return true;
}
// We will not surrender!
for (auto I = S->child_begin(); I != S->child_end(); I++)
if (hasGlobalVariablesOrMembers(*I, C))
return true;
return false;
}
bool
IntegerOverflowChecker::hasGlobalVariablesOrMembers(const SymExpr *SE,
CheckerContext &C) const {
WhiteListTy ES = C.getState()->get<WhiteList>();
for (auto I = ES.begin(); I != ES.end(); ++I) {
SVal Val = *I;
SymbolRef SR = Val.getAsSymbol();
if (SR == SE)
return true;
}
// SymbolCast
if (const SymbolCast *SC = dyn_cast<SymbolCast>(SE))
return hasGlobalVariablesOrMembers(SC->getOperand(), C);
// BinarySymExpr
if (const IntSymExpr *ISE = dyn_cast<IntSymExpr>(SE))
return hasGlobalVariablesOrMembers(ISE->getRHS(), C);
if (const SymIntExpr *SIE = dyn_cast<SymIntExpr>(SE))
return hasGlobalVariablesOrMembers(SIE->getLHS(), C);
if (const SymSymExpr *SSE = dyn_cast<SymSymExpr>(SE))
return (hasGlobalVariablesOrMembers(SSE->getLHS(), C) ||
hasGlobalVariablesOrMembers(SSE->getRHS(), C));
// SymbolData
if (const SymbolRegionValue *SRV = dyn_cast<SymbolRegionValue>(SE)) {
const TypedValueRegion *TVR = SRV->getRegion();
return isa<FieldRegion>(TVR) || TVR->hasGlobalsOrParametersStorage();
}
if (const SymbolDerived *SD = dyn_cast<SymbolDerived>(SE)) {
const TypedValueRegion *TVR = SD->getRegion();
return isa<FieldRegion>(TVR) || TVR->hasGlobalsOrParametersStorage();
}
if (const SymbolConjured *SC = dyn_cast<SymbolConjured>(SE))
return hasGlobalVariablesOrMembers(SC->getStmt(), C);
return false;
}
ProgramStateRef
IntegerOverflowChecker::addToWhiteList(const Stmt *S, ProgramStateRef State,
const LocationContext *LCtx) const {
if (const Expr *E = dyn_cast_or_null<Expr>(S))
S = E->IgnoreParens();
return addToWhiteList(State->getSVal(S, LCtx), State);
}
inline ProgramStateRef
IntegerOverflowChecker::addToWhiteList(const SVal &Val,
ProgramStateRef State) const {
return State->get<WhiteList>().contains(Val) ? State
: State->add<WhiteList>(Val);
}
bool IntegerOverflowChecker::isInWhiteList(const Stmt *S, ProgramStateRef State,
const LocationContext *LCtx) const {
if (const Expr *E = dyn_cast_or_null<Expr>(S))
S = E->IgnoreParens();
return isInWhiteList(State->getSVal(S, LCtx), State);
}
inline bool IntegerOverflowChecker::isInWhiteList(const SVal &V,
ProgramStateRef State) const {
return State->get<WhiteList>().contains(V);
}
void IntegerOverflowChecker::checkPostStmt(const BinaryOperator *B,
CheckerContext &C) const {
if (OverflowLoc.find(B->getExprLoc()) != OverflowLoc.end())
return;
if (!B->getLHS()->getType()->isIntegerType() ||
!B->getRHS()->getType()->isIntegerType())
return;
ProgramStateRef State = C.getState();
QualType BinType = B->getType();
const Expr *ExprLhs = B->getLHS();
const Expr *ExprRhs = B->getRHS();
SVal Lhs = C.getSVal(ExprLhs);
SVal Rhs = C.getSVal(ExprRhs);
if (makeGlobalsMembersHeuristics(Lhs, ExprLhs, C)) {
C.addTransition(addToWhiteList(Lhs, State));
return;
}
if (makeGlobalsMembersHeuristics(Rhs, ExprRhs, C)) {
C.addTransition(addToWhiteList(Rhs, State));
return;
}
if (!Filter.CheckIntegerOverflowDef && BinType->isUnsignedIntegerType())
return;
if (!Filter.CheckIntegerOverflowUndef && BinType->isSignedIntegerType())
return;
BinaryOperator::Opcode Op = B->getOpcode();
if (Op != BO_Add && Op != BO_Mul && Op != BO_Sub && Op != BO_AddAssign &&
Op != BO_MulAssign && Op != BO_SubAssign)
return;
Optional<DefinedOrUnknownSVal> CondOverflow;
ProgramStateRef StateOverflow, StateNotOverflow;
bool isOverflow = false;
if (Op == BO_Add || Op == BO_AddAssign)
CondOverflow = checkAdd(C, Lhs, Rhs, BinType, isOverflow);
else if (Op == BO_Sub || Op == BO_SubAssign) {
if ((BinType->isUnsignedIntegerType()) && makeUSubHeuristics(B))
return;
CondOverflow = checkSub(C, Lhs, Rhs, BinType, isOverflow);
} else if (Op == BO_Mul || Op == BO_MulAssign)
CondOverflow = checkMul(C, Lhs, Rhs, BinType, isOverflow);
if (!CondOverflow)
return;
std::tie(StateOverflow, StateNotOverflow) = State->assume(*CondOverflow);
if (!StateOverflow ||
(StateNotOverflow && !(State->isTainted(Lhs) || State->isTainted(Rhs))))
return;
std::string Msg = composeMsg(StateNotOverflow, Lhs, Rhs, ExprLhs, ExprRhs,
B->getType()->isSignedIntegerOrEnumerationType(),
isOverflow, &Op, C);
reportBug(Msg, C, B->getExprLoc(), BinType->isSignedIntegerType());
}
void IntegerOverflowChecker::checkPostStmt(const CXXNewExpr *NewExpr,
CheckerContext &C) const {
if (!Filter.CheckIntegerOverflowDef)
return;
if (NewExpr->getOperatorNew()->getOverloadedOperator() != OO_Array_New)
return;
const Expr *ArrSize = NewExpr->getArraySize();
SVal ElementCount = C.getSVal(ArrSize);
ProgramStateRef State = C.getState();
if (makeGlobalsMembersHeuristics(ElementCount, ArrSize, C)) {
C.addTransition(addToWhiteList(ElementCount, State));
return;
}
QualType NewExprType = NewExpr->getAllocatedType();
uint64_t NewExprTypeSize = C.getASTContext().getTypeSizeInChars(NewExprType)
.getQuantity();
SValBuilder &SvalBuilder = C.getSValBuilder();
SVal NewExprTypeSizeVal = SvalBuilder.makeIntVal(NewExprTypeSize, true);
bool isOverflow;
Optional<DefinedOrUnknownSVal> CondOverflow = checkMul(C, NewExprTypeSizeVal,
ElementCount,
ArrSize->getType(),
isOverflow);
if (!CondOverflow)
return;
ProgramStateRef StateOverflow, StateNotOverflow;
std::tie(StateOverflow, StateNotOverflow) = State->assume(*CondOverflow);
if (!StateOverflow || (StateNotOverflow && !State->isTainted(ElementCount)))
return;
std::string Msg = composeMsg(StateNotOverflow, NewExprTypeSizeVal,
ElementCount, 0, ArrSize, false, isOverflow, 0,
C);
reportBug(Msg, C, NewExpr->getExprLoc(), false);
}
void IntegerOverflowChecker::checkPostStmt(const CallExpr *CE,
CheckerContext &C) const {
if (makeGlobalsMembersHeuristics(C.getSVal(CE), CE, C))
C.addTransition(addToWhiteList(CE, C.getState(), C.getLocationContext()));
}
void IntegerOverflowChecker::checkPostStmt(const MemberExpr *ME,
CheckerContext &C) const {
C.addTransition(addToWhiteList(ME, C.getState(), C.getLocationContext()));
}
void IntegerOverflowChecker::checkBind(const SVal &Loc, const SVal &Val,
const Stmt *S, CheckerContext &C) const {
if (makeGlobalsMembersHeuristics(Val, S, C))
C.addTransition(addToWhiteList(Val, C.getState()));
}
#define REGISTER_CHECKER(name) \
void ento::register##name(CheckerManager &mgr) { \
IntegerOverflowChecker *checker = \
mgr.registerChecker<IntegerOverflowChecker>(); \
checker->Filter.Check##name = true; \
checker->Filter.CheckName##name = mgr.getCurrentCheckName(); \
}
REGISTER_CHECKER(IntegerOverflowDef)
REGISTER_CHECKER(IntegerOverflowUndef)
| 49,051
|
https://github.com/y-iihoshi/ThScoreFileConverter/blob/master/ThScoreFileConverter/Models/Th13/ClearReplacer.cs
|
Github Open Source
|
Open Source
|
LicenseRef-scancode-unknown-license-reference, BSD-2-Clause
| 2,023
|
ThScoreFileConverter
|
y-iihoshi
|
C#
|
Code
| 82
| 311
|
//-----------------------------------------------------------------------
// <copyright file="ClearReplacer.cs" company="None">
// Copyright (c) IIHOSHI Yoshinori.
// Licensed under the BSD-2-Clause license. See LICENSE.txt file in the project root for full license information.
// </copyright>
//-----------------------------------------------------------------------
#pragma warning disable SA1600 // Elements should be documented
using System.Collections.Generic;
using ThScoreFileConverter.Core.Models.Th13;
using IScoreData = ThScoreFileConverter.Models.Th10.IScoreData<ThScoreFileConverter.Models.Th13.StageProgress>;
namespace ThScoreFileConverter.Models.Th13;
// %T13CLEAR[x][yy]
internal class ClearReplacer : ClearReplacerBase<
Chara, CharaWithTotal, LevelPractice, LevelPractice, LevelPracticeWithTotal, StagePractice, IScoreData>
{
public ClearReplacer(IReadOnlyDictionary<CharaWithTotal, IClearData<
CharaWithTotal, LevelPractice, LevelPractice, LevelPracticeWithTotal, StagePractice, IScoreData>> clearDataDictionary)
: base(Definitions.FormatPrefix, Parsers.LevelParser, Parsers.CharaParser, clearDataDictionary)
{
}
}
| 40,376
|
https://github.com/bigdata-ustc/TKT/blob/master/TKT/DKT/Module/etl.py
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
TKT
|
bigdata-ustc
|
Python
|
Code
| 223
| 793
|
# coding: utf-8
# create by tongshiwei on 2019/4/12
import torch
from gluonnlp.data import FixedBucketSampler, PadSequence
from tqdm import tqdm
from TKT.shared.etl import *
def transform(raw_data, params):
# 定义数据转换接口
# raw_data --> batch_data
num_buckets = params.num_buckets
batch_size = params.batch_size
responses = raw_data
batch_idxes = FixedBucketSampler([len(rs) for rs in responses], batch_size, num_buckets=num_buckets)
batch = []
def index(r):
correct = 0 if r[1] <= 0 else 1
return r[0] * 2 + correct
for batch_idx in tqdm(batch_idxes, "batchify"):
batch_rs = []
batch_pick_index = []
batch_labels = []
for idx in batch_idx:
batch_rs.append([index(r) for r in responses[idx]])
if len(responses[idx]) <= 1:
pick_index, labels = [], []
else:
pick_index, labels = zip(*[(r[0], 0 if r[1] <= 0 else 1) for r in responses[idx][1:]])
batch_pick_index.append(list(pick_index))
batch_labels.append(list(labels))
max_len = max([len(rs) for rs in batch_rs])
padder = PadSequence(max_len, pad_val=0)
batch_rs, data_mask = zip(*[(padder(rs), len(rs)) for rs in batch_rs])
max_len = max([len(rs) for rs in batch_labels])
padder = PadSequence(max_len, pad_val=0)
batch_labels, label_mask = zip(*[(padder(labels), len(labels)) for labels in batch_labels])
batch_pick_index = [padder(pick_index) for pick_index in batch_pick_index]
# Load
batch.append(
[torch.tensor(batch_rs), torch.tensor(data_mask), torch.tensor(batch_labels),
torch.tensor(batch_pick_index),
torch.tensor(label_mask)])
return batch
def pesudo_data_iter(_cfg):
return transform(pseudo_data_generation(_cfg), _cfg)
def etl(data_src, params):
raw_data = extract(data_src)
return transform(raw_data, params)
if __name__ == '__main__':
from longling.lib.structure import AttrDict
import os
filename = "../../../data/junyi/data/test"
print(os.path.abspath(filename))
for data in tqdm(extract(filename)):
pass
parameters = AttrDict({"batch_size": 128, "num_buckets": 100})
for data in tqdm(etl(filename, params=parameters)):
pass
| 20,833
|
https://github.com/patil215/v8/blob/master/fuzzer_output/interesting/sample_1554192674891.js
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| null |
v8
|
patil215
|
JavaScript
|
Code
| 50
| 117
|
function main() {
const v5 = [1337];
let v10 = undefined;
const v15 = [2930408582];
const v16 = [v15];
const v17 = {};
const v19 = new BigUint64Array(v16,v17);
for (const v20 of v19) {
const v21 = v20 + v20;
const v22 = v19 >= v21;
}
}
%NeverOptimizeFunction(main);
main();
| 27,585
|
https://github.com/idealcountry/practice/blob/master/SpringCloud/finance/product/src/main/java/com/spring/cloud/product/controller/ConfigController.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
practice
|
idealcountry
|
Java
|
Code
| 49
| 268
|
package com.spring.cloud.product.controller;
import com.spring.cloud.common.pojo.UserInfo;
import com.spring.cloud.product.facade.ConfigFacade;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RequestMapping("/config")
@RestController
public class ConfigController {
@Autowired
private ConfigFacade configFacade = null;
@GetMapping("/cb/{id}")
public UserInfo getUserWithCircuitBreaker(@PathVariable("id") Long id) {
return configFacade.getUserWithCircuitBreaker(id);
}
@GetMapping("/rl/{id}")
public UserInfo getUserWithRatelimiter(@PathVariable("id") Long id) {
return configFacade.getUserWithRatelimiter(id);
}
}
| 36,665
|
https://github.com/chaosdorf/meteroid-ios/blob/master/meteroid/UserListCell.m
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
meteroid-ios
|
chaosdorf
|
Objective-C
|
Code
| 272
| 587
|
//
// UserListCell.m
// meteroid
//
// Copyright (C) 2013 Gerrit Giehl <r4mp@chaosdorf.de>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
#import "UserListCell.h"
@implementation UserListCell
@synthesize lbUsername;
@synthesize imageUser;
- (id)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier
{
self = [super initWithStyle:style reuseIdentifier:reuseIdentifier];
if (self) {
// Initialization code
}
return self;
}
- (void)setSelected:(BOOL)selected animated:(BOOL)animated
{
[super setSelected:selected animated:animated];
// Configure the view for the selected state
}
-(void)setCellDetails:(User *)user
{
lbUsername.text = user.name;
if (user.imageData) {
UIImage *image = [UIImage imageWithData:user.imageData];
imageUser.image = image;
} else {
user.imageData = [user loadImageData:user.email];
UIImage *image = [UIImage imageWithData:user.imageData];
imageUser.image = image;
}
}
@end
| 40,939
|
https://github.com/Erick-Kiaco/Projetos-Step/blob/master/ProjetoJavaSEBanco/src/sistemabancario/PessoaFisica.java
|
Github Open Source
|
Open Source
|
MIT
| null |
Projetos-Step
|
Erick-Kiaco
|
Java
|
Code
| 21
| 92
|
package sistemabancario;
public class PessoaFisica extends Pessoa {
public PessoaFisica(TipoDocumento tipoDocumento, String numeroDocumento) {
super(tipoDocumento, numeroDocumento);
setNome("Thiago Lins");
setEmail("Lins@gmail.com");
}
}
| 8,646
|
https://github.com/EdLeafe/photoserver/blob/master/tests/conftest.py
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
photoserver
|
EdLeafe
|
Python
|
Code
| 324
| 1,309
|
from __future__ import absolute_import, print_function, unicode_literals
import builtins
from mock import patch
import uuid
import warnings
import pymysql
import pytest
import db_create
import entities
import utils
@pytest.fixture(scope="function")
def test_db_cursor():
""" A db cursor for use in testing that cleans up after it's done."""
db_name = "test_{}".format(uuid.uuid4().hex)
cls = pymysql.cursors.DictCursor
creds = utils.parse_creds()
creds.pop("dbname", "")
conn = pymysql.connect(
host=creds.get("host"),
user=creds["username"],
passwd=creds["password"],
charset="utf8",
cursorclass=cls,
)
test_crs = conn.cursor()
test_crs.execute("create database {};".format(db_name))
conn.select_db(db_name)
conn.db = db_name
with warnings.catch_warnings():
warnings.simplefilter("ignore")
db_create.main(test_crs)
builtins.TEST_CURSOR = test_crs
yield test_crs
test_crs.execute("drop database if exists {};".format(db_name))
delattr(builtins, "TEST_CURSOR")
def ensure_table(crs, name):
sql = "select table_name from information_schema.tables where table_schema = %s and table_name = %s"
res = crs.execute(sql, (crs.connection.db, name))
if not res:
create_name = "create_{}".format(name)
mthd = getattr(db_create, create_name)
mthd(crs)
@pytest.fixture
def mock_etcd():
with patch("utils._get_etcd_client") as mock_client:
yield mock_client
@pytest.fixture # (scope="function")
def frame_factory(test_db_cursor):
"""Given a frame name, creates that frame record and returns its ID"""
ensure_table(test_db_cursor, "frame")
def make_frame(name, **kwargs):
frame = entities.Frame(name=name, **kwargs)
frame.save()
return frame.pkid
return make_frame
@pytest.fixture
def frame(frame_factory):
pkid = frame_factory("test_frame")
yield pkid
@pytest.fixture
def frame_obj(frame):
yield entities.Frame.get(frame)
@pytest.fixture # (scope="function")
def frameset_factory(test_db_cursor):
"""Given a frameset name, creates that frameset record and returns its ID"""
ensure_table(test_db_cursor, "frameset")
def make_frameset(name, **kwargs):
frameset = entities.Frameset(name=name, **kwargs)
frameset.save()
return frameset.pkid
return make_frameset
@pytest.fixture
def frameset(frameset_factory):
pkid = frameset_factory("test_frameset")
yield pkid
@pytest.fixture
def frameset_obj(frameset):
yield entities.Frameset.get(frameset)
@pytest.fixture
def frameset_with_6_frames(frameset_obj, frame_factory, album, mock_etcd):
frame_ids = [frame_factory(name="Frame {}".format(num)) for num in range(6)]
frameset_obj.set_frames(frame_ids)
yield frameset_obj.pkid
@pytest.fixture # (scope="function")
def album_factory(test_db_cursor):
"""Given a album name, creates that album record and returns its ID"""
ensure_table(test_db_cursor, "album")
def make_album(name, **kwargs):
album = entities.Album(name=name, **kwargs)
album.save()
return album.pkid
return make_album
@pytest.fixture
def album(album_factory):
pkid = album_factory("test_album")
yield pkid
@pytest.fixture
def album_obj(album):
yield entities.Album.get(album)
@pytest.fixture # (scope="function")
def image_factory(test_db_cursor):
"""Given a image name, creates that image record and returns its ID"""
ensure_table(test_db_cursor, "image")
def make_image(name, **kwargs):
img = entities.Image(name=name, **kwargs)
img.save()
return img.pkid
return make_image
@pytest.fixture
def image(image_factory):
pkid = image_factory("test_image")
yield pkid
@pytest.fixture
def image_obj(image):
yield entities.Image.get(image)
| 23,504
|
https://github.com/geniusgeek/Carbon/blob/master/samples/src/main/java/tk/zielony/carbonsamples/feature/LargeShadowActivity.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
Carbon
|
geniusgeek
|
Java
|
Code
| 95
| 464
|
package tk.zielony.carbonsamples.feature;
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.Animation;
import com.nineoldandroids.animation.ValueAnimator;
import tk.zielony.carbonsamples.R;
/**
* Created by Marcin on 2014-12-15.
*/
public class LargeShadowActivity extends Activity {
ValueAnimator animator;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_largeshadow);
final View view = findViewById(R.id.button);
animator = ValueAnimator.ofFloat(0.2f, 1);
animator.setInterpolator(new AccelerateDecelerateInterpolator());
animator.setRepeatCount(Animation.INFINITE);
animator.setRepeatMode(Animation.REVERSE);
animator.setDuration(2000);
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
ViewGroup.LayoutParams layoutParams = view.getLayoutParams();
layoutParams.width = (int) ((float) valueAnimator.getAnimatedValue() * (getWindow().getDecorView().getWidth() - 100));
layoutParams.height = (int) ((float) valueAnimator.getAnimatedValue() * (getWindow().getDecorView().getHeight() - 100));
view.setLayoutParams(layoutParams);
((View) view.getParent()).postInvalidate();
}
});
animator.start();
}
}
| 48,003
|
https://github.com/kintalken/prolog-playground/blob/master/list_without_middle/tests/suite/basic.prolog
|
Github Open Source
|
Open Source
|
MIT
| null |
prolog-playground
|
kintalken
|
Prolog
|
Code
| 4
| 19
|
:- begin_tests(basic)
:- end_tests(basic)
| 7,328
|
https://github.com/usc-cloud/parallel-louvain-modularity/blob/master/src/benchmark/run_benchmark.sh
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
parallel-louvain-modularity
|
usc-cloud
|
Shell
|
Code
| 208
| 769
|
#!/bin/bash
#####################################################################
#
# File : run_benchmark.sh
#
# Description: Generates benchmarking results for report
#
#
# Author : Patrick Small
#
# Date : April 17, 2014
#
#####################################################################
# Benchmark results file
RFILE_SERIAL=benchmark_serial.txt
RFILE_PARALLEL=benchmark_parallel.txt
RFILE_OPENMP=benchmark_openmp.txt
STDOUT_SERIAL=benchmark_serial_stdout.txt
STDOUT_PARALLEL=benchmark_parallel_stdout.txt
# Graph directory
GRAPH_DIR=../../data/graphs
# Test parameters
#GRAPH_SIZES=(1000)
GRAPH_SIZES=(1000 10000 100000 1000000 10000000)
#GRAPH_NAME_SIZES=(1k)
GRAPH_NAME_SIZES=(1k 10k 100k 1m 10m)
GRAPH_TYPE=sf
#GRAPH_TYPE=ba
NPROCS=(2 4 8)
#NPROCS=(2)
THREADCOUNT=1
# Use weighted graphs or not
USEWEIGHT=0
# Recreate result files
echo "#NODES NPROC STIME SMOD" > ${RFILE_SERIAL}
echo "#NODES NPROC PTIME PMOD" > ${RFILE_PARALLEL}
echo "#NODES NPROC OTIME OMOD" > ${RFILE_OPENMP}
# Remove old output log
rm ${STDOUT_SERIAL}
rm ${STDOUT_PARALLEL}
# Run serial tests
for (( i = 0 ; i < ${#GRAPH_SIZES[@]} ; i=$i+1 ));
do
GRAPHFILE=${GRAPH_DIR}/graph_${GRAPH_TYPE}_${GRAPH_NAME_SIZES[$i]}
if [ $USEWEIGHT -eq 0 ]; then
WEIGHTFLAG=""
else
WEIGHTFLAG="-w ${GRAPHFILE}.weights"
fi
echo "./benchmark_serial ${GRAPH_SIZES[$i]} ${GRAPHFILE}.bin -l -1 ${WEIGHTFLAG}"
./benchmark_serial ${GRAPH_SIZES[$i]} ${GRAPHFILE}.bin -l -1 ${WEIGHTFLAG} >> ${STDOUT_SERIAL}
done
# Run parallel tests
for (( i = 0 ; i < ${#GRAPH_SIZES[@]} ; i=$i+1 ));
do
for NPROC in "${NPROCS[@]}";
do
GRAPHFILE=${GRAPH_DIR}/graph_${GRAPH_TYPE}_${GRAPH_NAME_SIZES[$i]}_${NPROC}
if [ $USEWEIGHT -eq 0 ]; then
WEIGHTFLAG=""
else
WEIGHTFLAG="-w ${GRAPHFILE}.weights"
fi
mpirun -np ${NPROC} ./benchmark_parallel ${GRAPH_SIZES[$i]} ${THREADCOUNT} ${GRAPHFILE} -r ${GRAPHFILE} -l -1 -t ${THREADCOUNT} ${WEIGHTFLAG} >> ${STDOUT_PARALLEL}
done
done
exit 0
| 6,248
|
https://github.com/RedHat-Healthcare/Event-Builder/blob/master/src/main/java/io/connectedhealth_idaas/eventbuilder/builders/fhir/ActivityDefinition.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
Event-Builder
|
RedHat-Healthcare
|
Java
|
Code
| 7
| 29
|
package io.connectedhealth_idaas.eventbuilder.builders.fhir;
public class ActivityDefinition {
}
| 34,407
|
https://github.com/fmartinlef/zbxtool/blob/master/admtool/hosts_config.py
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
zbxtool
|
fmartinlef
|
Python
|
Code
| 1,679
| 5,540
|
''' coding: utf-8
zabbix a hosts configuration statistic excel file with :
- host name / description / visible ...
- specific host user macros
- specific host tag value (zabbix version >= 4.2)
- specific host inventory attributes
Author : Francois Martin-Lefevre : fml@axynergie.com
'''
import sys
import os
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PARENT_DIR = os.path.dirname(CURRENT_DIR)
sys.path.append(PARENT_DIR)
import argparse
import filecmp
import re
import tempfile
import itertools as it
from pyzabbix.api import ZabbixAPI, ZabbixAPIException
from openpyxl import Workbook
from openpyxl.worksheet.table import Table, TableStyleInfo
from openpyxl.styles import NamedStyle, PatternFill, Font, Border, Side
from openpyxl.utils.dataframe import dataframe_to_rows
import pandas as pd
from admtool.zbxtool_functions import parse_config, init_log, logging, copy_file, zbx_connect, zbx_table
def parse_arg():
parser = argparse.ArgumentParser(add_help=True, description='zbxtools :: statistics of hosts configuration loading ')
parser.add_argument("--config", type=str,
help="config file path and name (default: config.ini)",
default="config.ini")
parser.add_argument("--verbose", type=bool,
help="mode verbose (default: false)",
default=False)
parser.add_argument("--zbxenv", type=str,
help="config file zabbix environment section")
parser.add_argument("--xlfile", type=str,
help="name of excel output file")
parser.add_argument("--limit", type=int,
help="for test purpose, limits the number of host scanned")
args=parser.parse_args()
return args
def zbx_fmt_host(zapi,host,convert, proxy_name):
''' return a formated host definition wich convert
- proxy_id with corresponding proxy name
- suppress 'hostid' if not in selection value
- all values indicated in convert table
'''
result = {}
tmp_result = dict(host)
# loop on specific items and replace corresponding value
for key1 in host:
if key1 == "proxy_hostid":
try:
tmp_result[key1] = proxy_name[host[key1]]
except KeyError:
continue
elif key1 == "hostid":
if key1 in sel["hosts"]:
continue
else:
del tmp_result[key1]
# elif key1 == "inventory":
# try:
# tmp_result[key1]["os_full"] = tmp_result[key1]["os_full"].replace(tmp_result[key1]["name"], "")
# except KeyError:
# continue
else:
try:
tmp_result[key1] = convert[key1][host[key1]]
except KeyError:
continue
result.update(tmp_result)
return result
def zbx_proxy_name(zapi):
''' query the declared proxy and construct a dictionnary with
key = proxyid / value = host
'''
result = {}
proxy_qry = zapi.do_request('proxy.get',
{"output": ["proxyids", "host"],
})['result']
for proxy in proxy_qry:
result.update({proxy["proxyid"]:proxy["host"]})
return result
def zbx_host_config(zapi, hosts):
''' return a list of hosts :
- attributes : as values of dict
- macros, groups, templates, inventory, tags(*) : as list of dict
* tags only with zabbix > 4.2.0
'''
result = []
host = []
global sel
global desc
convert = zbx_table("init")
# query the proxy dictionnary
proxy_name = zbx_proxy_name(zapi)
idx_host = 0
for ho in hosts:
idx_host += 1
if v_zabbix >=420:
host = zapi.do_request('host.get',
{"output": sel["hosts"],
"hostids": ho["hostid"],
"selectMacros": sel["macros"],
"selectGroups": sel["groups"],
"selectParentTemplates": sel["templates"],
"selectInventory": sel["inventory"],
# "selectItems" : sel["items"],
"selectTags": sel["tags"],
})['result'][0]
else:
host = zapi.do_request('host.get',
{"output": sel["hosts"],
"hostids": ho["hostid"],
"selectMacros": sel["macros"],
"selectGroups": sel["groups"],
"selectParentTemplates": sel["templates"],
"selectInventory": sel["inventory"],
# "selectItems" : sel["items"],
})['result'][0]
# print(host[0]["host"])
logging.info("host (" + str(idx_host) + "/" + str(len(hosts)) + ") : " + host["host"])
fmt_host = zbx_fmt_host(zapi, host, convert, proxy_name)
result.append(fmt_host)
if (limit_host != 0 and idx_host >= limit_host):
logging.info("ended program due to host scan number limitation (--limit parameter)")
break
return result
def extract_list(hosts_tbl, it2, it3):
''' extract list of value in the host dictionnary
struct of hosts_tbl = {it1:value, it2:[{it3:value}, ...], ...}
it2 : dictionnary name of the list
it3 : item name in the dictionnary list
the return list is a list of it3 values
if it2 or it3 don't exist -> return a blank list
'''
result = []
try:
for ho in hosts_tbl:
for idx in ho[it2]:
if idx[it3] not in result:
result.append(idx[it3])
except KeyError:
pass
return result
def extract_noempty_list(hosts_tbl):
''' extract key of no empty values in inventory dictionnary
'''
result = []
for ho in hosts_tbl:
for key in ho["inventory"]:
if key != "hostid" and key not in result and ho["inventory"][key] != "":
result.append(key)
return result
def extract_lists(hosts_tbl):
''' extract the lists of macros, tags, templates, groups, inventory
'''
result = {}
macros_list = extract_list(hosts_tbl,"macros","macro")
result.update({"macros": macros_list})
tags_list = extract_list(hosts_tbl, "tags", "tag")
result.update({"tags": tags_list})
templates_list = extract_list(hosts_tbl, "parentTemplates", "host")
result.update({"parentTemplates": templates_list})
groups_list = extract_list(hosts_tbl, "groups", "name")
result.update({"groups": groups_list})
inventory_list = extract_noempty_list(hosts_tbl)
result.update({"inventory": inventory_list})
return result
def ws_host_title(host_tbl, host_lst):
''' format host title for excel output
columns title are prefixed with
ho: host data
ma: macros
ta: tags
in: inventory
for host data the title is replaced with desc attributes
'''
result =[]
global desc
title = dict(zip(sel["hosts"], desc["hosts"]))
for key in host_tbl[0]:
if key not in ["groups", "parentTemplates", "macros", "tags", "inventory"]:
try:
result.append("ho_" + title[key])
except KeyError:
pass
elif key == "macros":
for idx in host_lst["macros"]:
result.append("ma_" + idx)
elif key == "tags":
for idx in host_lst["tags"]:
result.append("ta_" + idx)
elif key == "inventory":
for key in host_lst["inventory"]:
result.append("in_" + key)
return result
def ws_host_data(host_tbl, host_lst):
''' append zabbix data to worksheet data structure
[[row1],[row2], ...]
where row is a list of 'host_tpl' value in the correct 'host_lst' order
'''
result =[]
for ho in host_tbl:
tmp_result=[]
for key in ho:
if key not in ["groups", "parentTemplates", "macros", "tags", "inventory", "items"]:
tmp_result.append(ho[key])
elif key == "macros":
for id1 in host_lst["macros"]:
tmp_val = ""
for id2 in ho[key]:
if id1 == id2["macro"]:
tmp_val = id2["value"]
tmp_result.append(tmp_val)
elif key == "tags":
for id1 in host_lst["tags"]:
tmp_val = ""
for id2 in ho[key]:
if id1 == id2["tag"]:
tmp_val = id2["value"]
tmp_result.append(tmp_val)
elif key == "inventory":
for key1 in host_lst["inventory"]:
try:
tmp_result.append(ho["inventory"][key1])
except KeyError:
tmp_result.append("")
result.append(tmp_result)
return result
def ws_group_data(host_data, host_lst):
result =[]
for ho in host_data:
for id1 in ho["groups"]:
result.append([ho["host"],id1["name"]])
return result
def ws_template_data(host_data, host_lst):
result =[]
for ho in host_data:
for id1 in ho["parentTemplates"]:
result.append([ho["host"],id1["host"]])
return result
def xl_create_ws(wb, wsname, wstitle,wsdata):
''' create sheet and apply format
'''
ws = wb.create_sheet(wsname)
ws.append(wstitle)
for row in wsdata:
ws.append(row)
first_cell = ws.cell(row=1,column=1).coordinate
last_cell = ws.cell(row=len(wsdata)+1,column=len(wstitle)).coordinate
refstr = first_cell + ":" + last_cell
# print(refstr)
tab = Table(displayName=wsname, ref=refstr)
style = TableStyleInfo(name="TableStyleLight9", showFirstColumn=False,
showLastColumn=False, showRowStripes=True, showColumnStripes=True)
tab.tableStyleInfo = style
ws.add_table(tab)
return ws
def xl_create_dataframe(ws):
''' create a pandas dataframe from a ws sheet
- first line of ws sheet is the columns name
- no index in the ws, values in columns are not unique
return a pandas dataframe object
'''
data = ws.values
cols = next(data)[0:]
df = pd.DataFrame(data, columns=cols)
return df
def zbx_stat(df, ws, val, pv_lst, startcol):
''' store pivot statistics in specific worksheet
inputs :
df : dataframe
ws : worksheet
val : count value for pivot
pv_list : list of columns pivot compute
statcol : starting column for value writing (the row is allways set to 1)
'''
for pv in pv_lst:
pivot = df.pivot_table(index=pv, values=val, aggfunc="count")
if mode_verbose:
logging.info("stat process -> %s statistics", pv)
xl_stat(ws, pivot, startcol)
startcol += 3
return
def xl_stat(ws, pv, startcol):
''' format all statistics in a single sheet
inputs :
- worksheet name (if not exist will be created)
- pv : pivot dataframe
_ws - startcolumn
'''
for id_row, r in enumerate(dataframe_to_rows(pv)):
row = list(r)
for id_col, val in enumerate(row):
# print("r", id_row, "c", id_col, "val", val)
if id_row == 0 and id_col == 1:
ws.cell(row=1, column=startcol+1).value = val
ws.cell(row=1, column=startcol+1).style = "zbx title"
elif id_row == 1 and id_col == 0:
ws.cell(row=1, column=startcol).value = val
ws.cell(row=1, column=startcol).style = "zbx title"
elif id_row >=1 and id_col >= 0:
ws.cell(row=id_row, column=startcol+id_col).value = val
ws.cell(row=id_row, column=startcol+id_col).style = "zbx data"
return
def xl_create_named_style(wb):
''' create excel named style
wb : excel workbook
'''
zbx_title = NamedStyle(name="zbx title")
zbx_title.font = Font(bold=True, color="ffffff")
zbx_title.fill = PatternFill(fill_type="solid", start_color="538DD5", end_color="538DD5")
bd = Side(style="thin", color="538DD5")
zbx_title.border = Border(left=bd, top=bd, right=bd, bottom=bd)
wb.add_named_style(zbx_title)
zbx_data = NamedStyle(name="zbx data")
zbx_data.font = Font(color="000000")
zbx_data.border = Border(left=bd, top=bd, right=bd, bottom=bd)
wb.add_named_style(zbx_data)
return
if __name__ == '__main__':
''' process args and config parameters for module variables setting
1/ args parameter
2/ config parameters
3/ if config parameter not exist then args is the default
'''
module = __file__[__file__.find("/")+1:__file__.find(".")]
mod_path = os.path.dirname(os.path.abspath(__file__))
args = parse_arg()
conf_file = mod_path + "/" + args.config
config = parse_config(conf_file,module)
mode_verbose = args.verbose or config.getboolean("default", "mode_verbose") or False
init_log(config, mode_verbose)
zbxenv = args.zbxenv or config["default"]["env"] or "Zabbix"
limit_host = args.limit or 0
xlfile = args.xlfile or config[module]["xlfile"] or "host.xlsx"
zbxtool_dir = config["paths"]["zbxtool_dir"]
dir_data = zbxtool_dir + "/" + config[module]["save_dir"] + "/" + zbxenv + "/"
logging.info("save directory is : %s", dir_data)
if not os.path.exists(dir_data):
os.makedirs(dir_data)
if limit_host != 0:
logging.info("max number of host activated %s hosts", str(limit_host))
logging.info("zabbix environment = %s ", zbxenv)
# init global variable
global sel
sel = {}
global desc
desc = {}
# start of program logic
zapi = zbx_connect(config,zbxenv)
v_zabbix = int(zapi.api_version().replace(".",""))
# collect all hostsid ordered by hostname
hosts = zapi.do_request('host.get',{"output": ["hostid"],
"sortfield" : "host",
})['result']
# loop on the hostids for collecting the selectionned data
sel.update({"hosts": ["host", "description","name", "proxy_hostid", "flags", "maintenance_status",
"status", "snmp_available"]})
desc.update({"hosts": ["host", "description", "name", "proxy", "mode ajout", "maintenance",
"etat", "interface snmp"]})
sel.update({"macros": ["macro", "value"]})
sel.update({"groups": ["name"]})
sel.update({"templates": ["host"]})
sel.update({"inventory": "extend"})
sel.update({"tags": ["tag", "value"]})
sel.update({"items": ["itemid", "name", "_key", "type"]})
hosts_tbl = zbx_host_config(zapi, hosts)
# extract lists
zbx_host_list = extract_lists(hosts_tbl)
# print(zbx_host_list)
# create and process excel data
wb = Workbook()
# create named style for excel statistics sheet formating
xl_create_named_style(wb)
# process host data
host_title = ws_host_title(hosts_tbl, zbx_host_list)
host_data = ws_host_data(hosts_tbl, zbx_host_list)
ws_host = xl_create_ws(wb, "Hosts", host_title, host_data)
logging.info("sheet 'Hosts' created : " + str(len(host_title))
+ " cols " + str(len(host_data)) + " rows")
# process groups data
group_title = ["host", "group"]
group_data = ws_group_data(hosts_tbl, zbx_host_list)
ws_groups = xl_create_ws(wb, "Groups", group_title, group_data)
logging.info("sheet 'Groups' created : " + str(len(group_title))
+ " cols " + str(len(group_data)) + " rows")
# process templates data
template_title = ["host", "template"]
template_data = ws_template_data(hosts_tbl, zbx_host_list)
ws_templates = xl_create_ws(wb, "Templates", template_title, template_data)
logging.info("sheet 'Templates' created : " + str(len(template_title))
+ " cols " + str(len(template_data)) + " rows")
wb.save(dir_data + xlfile)
# process statistics data -> create pandas dataframe and proceed with pivot table
ws_stat = wb.create_sheet("STATS")
logging.info("process 'groups' statistics data")
df_groups = xl_create_dataframe(ws_groups)
groups_pivot = ["group"]
groups_value = "host"
last_col = 1
zbx_stat(df_groups, ws_stat, groups_value, groups_pivot, last_col)
last_col += len(groups_pivot) * 3
logging.info("process 'templates' statistics data")
df_templates = xl_create_dataframe(ws_templates)
templates_pivot = ["template"]
templates_value = "host"
zbx_stat(df_templates, ws_stat, templates_value, templates_pivot, last_col)
last_col += len(templates_pivot) * 3
logging.info("process 'host' statistics data")
df_host = xl_create_dataframe(ws_host)
host_pivot = ["ho_proxy", "ho_mode ajout", "ho_etat", "ho_maintenance"]
for ti in host_title:
if ti[0:8] in ["ma_{$TAG", "ma_{$SNM"]:
host_pivot.append(ti)
if ti in ["in_hardware", "in_type", "in_model", "in_os", "in_os_full", "in_software_app_a",
"in_location", "in_site_city", "in_contact"]:
host_pivot.append(ti)
host_value = "ho_host"
zbx_stat(df_host, ws_stat, host_value, host_pivot, last_col)
last_col += len(host_pivot) * 3
logging.info("end of statistics data process")
del wb["Sheet"]
wb.save(dir_data + xlfile)
# ending program
logging.info("program ended")
| 41,539
|
https://github.com/ryanau/doc_now_client/blob/master/app/containers/Doctor/components/ListItem/messages.js
|
Github Open Source
|
Open Source
|
MIT
| null |
doc_now_client
|
ryanau
|
JavaScript
|
Code
| 72
| 245
|
/*
* ListItem Messages
*
* This contains all the text for the ListItem component.
*/
import { defineMessages } from 'react-intl';
export default defineMessages({
moreInfoButton: {
id: 'app.components.ListItem.moreInfoButton',
defaultMessage: 'More Info +',
},
lessInfoButton: {
id: 'app.components.ListItem.lessInfoButton',
defaultMessage: 'Less Info -',
},
nextStep: {
id: 'app.components.ListItem.nextStep',
defaultMessage: 'Next',
},
book: {
id: 'app.components.ListItem.book',
defaultMessage: 'Book',
},
min: {
id: 'app.components.ListItem.min',
defaultMessage: 'mins',
},
specialty: {
id: 'app.components.ListItem.specialty',
defaultMessage: 'General',
},
});
| 21,004
|
https://github.com/epc2101/DeadEndFinal/blob/master/DeadEnd/Assets/InteractiveObjectPrefabs/NewObjects.meta
|
Github Open Source
|
Open Source
|
CC-BY-3.0, CC-BY-4.0
| 2,016
|
DeadEndFinal
|
epc2101
|
Unity3D Asset
|
Code
| 6
| 41
|
fileFormatVersion: 2
guid: 34f9f5d8ecd8c324d958a996a57b9ad0
DefaultImporter:
userData:
| 30,268
|
https://github.com/kalsolio/jitamin/blob/master/app/Providers/RouteProvider.php
|
Github Open Source
|
Open Source
|
MIT
| null |
jitamin
|
kalsolio
|
PHP
|
Code
| 121
| 360
|
<?php
/*
* This file is part of Jitamin.
*
* Copyright (C) 2016 Jitamin Team
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Jitamin\Providers;
use Jitamin\Core\Http\Route;
use Jitamin\Core\Http\Router;
use Pimple\Container;
use Pimple\ServiceProviderInterface;
/**
* Route Provider.
*/
class RouteProvider implements ServiceProviderInterface
{
/**
* Register providers.
*
* @param \Pimple\Container $container
*
* @return \Pimple\Container
*/
public function register(Container $container)
{
$container['router'] = new Router($container);
$container['route'] = new Route($container);
if (ENABLE_URL_REWRITE) {
$container['route']->enable();
foreach (glob(JITAMIN_DIR.DIRECTORY_SEPARATOR.'routes'.DIRECTORY_SEPARATOR.'*.php') as $file) {
$routes = require $file;
foreach ($routes as $name => $entry) {
list($controller, $action) = explode('@', $entry);
$container['route']->addRoute($name, $controller, $action);
}
}
}
return $container;
}
}
| 14,560
|
https://github.com/gbaghdasaryan94/Kapan/blob/master/Final/IMSurvey/static/js/account.js
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
Kapan
|
gbaghdasaryan94
|
JavaScript
|
Code
| 245
| 1,179
|
$( document ).ready(function(){
$(window).scroll(function () {
if ($(window).width() < 992) {
if ($(this).scrollTop() > 600) {
$('.download-circle').addClass('active');
} else {
$('.download-circle').removeClass('active');
}
}
});
$(".info").hover(function(){
$(this).find(".edit").css("display", "block");
}, function(){
$(this).find(".edit").css("display", "none");
})
function FormToJSON(form){
var array = $(form).serializeArray();
var json = {};
$.each(array, function() {
json[this.name] = this.value || '';
});
return json;
}
function renderSave(req, json){
return {
info : `<div class="info" data="${json["id"]}" info="info">
<div class="edit"><i class="fa fa-edit change " data-toggle="modal"
data-target="#change"></i> <i class="fa fa-remove remove"></i></div>
<p class="sub-heading">${json["name"]}</p>
<p class="duration">${json["start"]} - ${json["finish"]}</p>
<p>${json["profession"]}</p>
</div>`,
skill : `<div class="skill-alone info" data="${json["id"]}" info="skill">${json["skill"]}
<div class="edit"><i class="fa fa-remove remove "></i></div>
</div>`
}[req];
}
$(".save").click(function(event){
event.preventDefault();
let form = $(this).closest("form");
let req = $(this).attr("info");
console.log(req);
let json = FormToJSON(form);
console.log(json);
$.ajax({
type: "POST",
url: `/${req}/add`,
data: json,
dataType: "json"
}).done(function(res) {
console.log(res);
let className = form.attr("id");
json["id"] = res.id;
let block = renderSave(req, json);
let section = $(".container").find(`.${className}`);
console.log(section);
if (section)
section.append(block);
}).fail(function(err) {
console.log(err);
});
})
$(".change").click(function(){
let info = $(this).closest(".info");
let form = $("#change").find("form");
form.find("input[name='name']").val(info.children(".sub-heading").text());
form.find("input[name='profession']").val(info.children("p").eq(2).text());
form.find("input[name='start']").val(info.children(".duration").text().split(" - ")[0]);
form.find("input[name='finish']").val(info.children(".duration").text().split(" - ")[1]);
form.find("input[name='id']").val(info.attr("data"));
form.find('button.update').attr("info", info.attr("info"));
})
$(".update").click(function(event){
event.preventDefault();
let json = FormToJSON($("#change").find("form"));
let req = $(this).attr("info");
$.ajax({
type: "POST",
url: `/${req}/update/${json['id']}`,
data: json,
dataType: "json"
}).done(function(res) {
let info = $(`.info[data=${json["id"]}]`);
info.children(".sub-heading").text(json["name"]);
info.children("p").eq(2).text(json["profession"]);
info.children(".duration").text(json["start"] + " - " + json["finish"]);
$(".close").click();
}).fail(function(err) {
});
});
$(".remove").click(function(){
let info = $(this);
let id = info.closest(".info").attr("data");
let req = info.closest(".info").attr("info");
$.ajax({
type: "GET",
url: `/${req}/delete/${id}`,
success: function () {
info.closest(".info").remove();
},
error: function (data) {
}
});
})
})
| 32,119
|
https://github.com/squeek502/EarliestOfGames/blob/master/java/squeek/earliestofgames/content/CrateModel.java
|
Github Open Source
|
Open Source
|
Unlicense
| 2,014
|
EarliestOfGames
|
squeek502
|
Java
|
Code
| 621
| 3,134
|
package squeek.earliestofgames.content;
import net.minecraft.client.model.ModelBase;
import net.minecraft.client.model.ModelRenderer;
import net.minecraft.client.renderer.texture.TextureManager;
import net.minecraft.entity.Entity;
import net.minecraftforge.common.util.ForgeDirection;
import squeek.earliestofgames.filters.EmptyFilter;
import squeek.earliestofgames.filters.IFilter;
import squeek.earliestofgames.filters.SizeFilter;
public class CrateModel extends ModelBase
{
private ModelRenderer[] frame = new ModelRenderer[ForgeDirection.VALID_DIRECTIONS.length * 2];
private ModelRenderer[] sides = new ModelRenderer[ForgeDirection.VALID_DIRECTIONS.length];
private ModelRenderer[] sizeFilters = new ModelRenderer[3];
private float scale = 0.0625f;
public CrateModel()
{
// four corner pillars
frame[0] = new ModelRenderer(this, 0, 0);
frame[0].addBox(0F, 0F, 0F, 2, 16, 2);
frame[0].setRotationPoint(0F, 0F, 0F);
frame[0].setTextureSize(64, 32);
frame[1] = new ModelRenderer(this, 0, 0);
frame[1].addBox(0F, 0F, 0F, 2, 16, 2);
frame[1].setRotationPoint(0F, 0F, 16F);
frame[1].setTextureSize(64, 32);
frame[1].rotateAngleY = (float) Math.toRadians(90D);
frame[2] = new ModelRenderer(this, 0, 0);
frame[2].addBox(0F, 0F, 0F, 2, 16, 2);
frame[2].setRotationPoint(16F, 0F, 16F);
frame[2].setTextureSize(64, 32);
frame[2].rotateAngleY = (float) Math.toRadians(180D);
frame[3] = new ModelRenderer(this, 0, 0);
frame[3].addBox(0F, 0F, 0F, 2, 16, 2);
frame[3].setRotationPoint(16F, 0F, 0F);
frame[3].setTextureSize(64, 32);
frame[3].rotateAngleY = (float) Math.toRadians(270D);
// support beams
frame[4] = new ModelRenderer(this, 2, 0);
frame[4].addBox(0F, 0F, 0F, 12, 2, 2);
frame[4].setRotationPoint(2F, 14F, 0F);
frame[4].setTextureSize(64, 32);
frame[5] = new ModelRenderer(this, 2, 0);
frame[5].addBox(0F, 0F, 0F, 12, 2, 2);
frame[5].setRotationPoint(2F, 0F, 0F);
frame[5].setTextureSize(64, 32);
frame[6] = new ModelRenderer(this, 2, 0);
frame[6].addBox(0F, 0F, 0F, 12, 2, 2);
frame[6].setRotationPoint(0F, 0F, 14F);
frame[6].setTextureSize(64, 32);
frame[6].rotateAngleY = (float) Math.toRadians(90D);
frame[7] = new ModelRenderer(this, 2, 0);
frame[7].addBox(0F, 0F, 0F, 12, 2, 2);
frame[7].setRotationPoint(0F, 14F, 14F);
frame[7].setTextureSize(64, 32);
frame[7].rotateAngleY = (float) Math.toRadians(90D);
frame[8] = new ModelRenderer(this, 2, 0);
frame[8].addBox(0F, 0F, 0F, 12, 2, 2);
frame[8].setRotationPoint(14F, 0F, 16F);
frame[8].setTextureSize(64, 32);
frame[8].rotateAngleY = (float) Math.toRadians(180D);
frame[9] = new ModelRenderer(this, 2, 0);
frame[9].addBox(0F, 0F, 0F, 12, 2, 2);
frame[9].setRotationPoint(14F, 14F, 16F);
frame[9].setTextureSize(64, 32);
frame[9].rotateAngleY = (float) Math.toRadians(180D);
frame[10] = new ModelRenderer(this, 2, 0);
frame[10].addBox(0F, 0F, 0F, 12, 2, 2);
frame[10].setRotationPoint(16F, 14F, 2F);
frame[10].setTextureSize(64, 32);
frame[10].rotateAngleY = (float) Math.toRadians(270D);
frame[11] = new ModelRenderer(this, 2, 0);
frame[11].addBox(0F, 0F, 0F, 12, 2, 2);
frame[11].setRotationPoint(16F, 0F, 2F);
frame[11].setTextureSize(64, 32);
frame[11].rotateAngleY = (float) Math.toRadians(270D);
sides[ForgeDirection.DOWN.ordinal()] = new ModelRenderer(this, 8, 4);
sides[ForgeDirection.DOWN.ordinal()].addBox(-6F, 0F, -0.5F, 12, 12, 1);
sides[ForgeDirection.DOWN.ordinal()].setRotationPoint(8F, 1F, 2F);
sides[ForgeDirection.DOWN.ordinal()].setTextureSize(64, 32);
sides[ForgeDirection.DOWN.ordinal()].rotateAngleX = (float) Math.toRadians(90D);
sides[ForgeDirection.UP.ordinal()] = new ModelRenderer(this, 8, 4);
sides[ForgeDirection.UP.ordinal()].addBox(-6F, 0F, -0.5F, 12, 12, 1);
sides[ForgeDirection.UP.ordinal()].setRotationPoint(8F, 15F, 14F);
sides[ForgeDirection.UP.ordinal()].setTextureSize(64, 32);
sides[ForgeDirection.UP.ordinal()].rotateAngleX = (float) Math.toRadians(90D);
sides[ForgeDirection.UP.ordinal()].rotateAngleY = (float) Math.toRadians(180D);
sides[ForgeDirection.NORTH.ordinal()] = new ModelRenderer(this, 8, 4);
sides[ForgeDirection.NORTH.ordinal()].addBox(-6F, 0F, -0.5F, 12, 12, 1);
sides[ForgeDirection.NORTH.ordinal()].setRotationPoint(8F, 2F, 1F);
sides[ForgeDirection.NORTH.ordinal()].setTextureSize(64, 32);
sides[ForgeDirection.SOUTH.ordinal()] = new ModelRenderer(this, 8, 4);
sides[ForgeDirection.SOUTH.ordinal()].addBox(-6F, 0F, -0.5F, 12, 12, 1);
sides[ForgeDirection.SOUTH.ordinal()].setRotationPoint(8F, 2F, 15F);
sides[ForgeDirection.SOUTH.ordinal()].setTextureSize(64, 32);
sides[ForgeDirection.SOUTH.ordinal()].rotateAngleY = (float) Math.toRadians(180D);
sides[ForgeDirection.WEST.ordinal()] = new ModelRenderer(this, 8, 4);
sides[ForgeDirection.WEST.ordinal()].addBox(-6F, 0F, -0.5F, 12, 12, 1);
sides[ForgeDirection.WEST.ordinal()].setRotationPoint(1F, 2F, 8F);
sides[ForgeDirection.WEST.ordinal()].setTextureSize(64, 32);
sides[ForgeDirection.WEST.ordinal()].rotateAngleY = (float) Math.toRadians(90D);
sides[ForgeDirection.EAST.ordinal()] = new ModelRenderer(this, 8, 4);
sides[ForgeDirection.EAST.ordinal()].addBox(-6F, 0F, -0.5F, 12, 12, 1);
sides[ForgeDirection.EAST.ordinal()].setRotationPoint(15F, 2F, 8F);
sides[ForgeDirection.EAST.ordinal()].setTextureSize(64, 32);
sides[ForgeDirection.EAST.ordinal()].rotateAngleY = (float) Math.toRadians(270D);
sizeFilters[0] = new ModelRenderer(this, 34, 4);
sizeFilters[0].addBox(-6F, 0F, -0.5F, 12, 12, 1);
sizeFilters[0].setTextureSize(64, 32);
sizeFilters[1] = new ModelRenderer(this, 8, 17);
sizeFilters[1].addBox(-6F, 0F, -0.5F, 12, 12, 1);
sizeFilters[1].setTextureSize(64, 32);
sizeFilters[2] = new ModelRenderer(this, 34, 17);
sizeFilters[2].addBox(-6F, 0F, -0.5F, 12, 12, 1);
sizeFilters[2].setTextureSize(64, 32);
}
public void renderFrame()
{
for (ModelRenderer framePart : frame)
{
if (framePart != null)
framePart.render(scale);
}
}
public void renderSides(CrateTile tile, TextureManager textureManager)
{
int i = 0;
for (ModelRenderer sidePart : sides)
{
ForgeDirection side = ForgeDirection.getOrientation(i);
if (sidePart != null)
{
IFilter filter = tile.filters[side.ordinal()];
if (filter != null)
{
if (filter instanceof EmptyFilter)
{
}
else if (filter instanceof SizeFilter)
{
ModelRenderer sizeFilter = ((SizeFilter) filter).maxItemSize > 1f ? sizeFilters[2] : (((SizeFilter) filter).maxItemSize <= 0.5f ? sizeFilters[0] : sizeFilters[1]);
sizeFilter.setRotationPoint(sidePart.rotationPointX, sidePart.rotationPointY, sidePart.rotationPointZ);
sizeFilter.rotateAngleX = sidePart.rotateAngleX;
sizeFilter.rotateAngleY = sidePart.rotateAngleY;
sizeFilter.rotateAngleZ = sidePart.rotateAngleZ;
sizeFilter.render(scale);
}
}
else
{
sidePart.render(scale);
}
}
i++;
}
}
public void renderSides()
{
for (ModelRenderer sidePart : sides)
{
if (sidePart != null)
{
sidePart.render(scale);
}
}
}
@Override
public void render(Entity par1Entity, float par2, float par3, float par4, float par5, float par6, float par7)
{
super.render(par1Entity, par2, par3, par4, par5, par6, par7);
renderFrame();
renderSides();
}
}
| 24,624
|
https://github.com/Yuego/py3o.template/blob/master/py3o/template/tests/test_templates.py
|
Github Open Source
|
Open Source
|
MIT
| null |
py3o.template
|
Yuego
|
Python
|
Code
| 2,100
| 8,768
|
# -*- encoding: utf-8 -*-
import datetime
import os
import unittest
import zipfile
import traceback
import copy
import base64
import lxml.etree
import pkg_resources
import six
from io import BytesIO
from genshi.template import TemplateError
from pyjon.utils import get_secure_filename
from py3o.template import Template, TextTemplate, TemplateException
from py3o.template.main import XML_NS, get_soft_breaks
if six.PY3:
# noinspection PyUnresolvedReferences
from unittest.mock import Mock
elif six.PY2:
# noinspection PyUnresolvedReferences
from mock import Mock
class TestTemplate(unittest.TestCase):
def tearDown(self):
pass
def setUp(self):
pass
def test_example_1(self):
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_example_template.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
template.set_image_path(
'staticimage.logo',
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/images/new_logo.png'
)
)
class Item(object):
pass
items = list()
item1 = Item()
item1.val1 = 'Item1 Value1'
item1.val2 = 'Item1 Value2'
item1.val3 = 'Item1 Value3'
item1.Currency = 'EUR'
item1.Amount = '12345.35'
item1.InvoiceRef = '#1234'
items.append(item1)
# if you are using python 2.x you should use xrange
for i in range(1000):
item = Item()
item.val1 = 'Item%s Value1' % i
item.val2 = 'Item%s Value2' % i
item.val3 = 'Item%s Value3' % i
item.Currency = 'EUR'
item.Amount = '6666.77'
item.InvoiceRef = 'Reference #%04d' % i
items.append(item)
document = Item()
document.total = '9999999999999.999'
data = dict(items=items, document=document)
error = False
try:
template.render(data)
except ValueError as e:
print('The template did not render properly...')
traceback.print_exc()
error = True
assert error is False
def test_link_validation_missing_equal(self):
"""test a missing equal sign in a link raises a template error"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_missing_eq_in_link.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
class Item(object):
def __init__(self, val):
self.val = val
data_dict = {
"items": [Item(1), Item(2), Item(3), Item(4)]
}
template.set_image_path(
'staticimage.logo',
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/images/new_logo.png'
)
)
except_occured = False
error_text = ""
try:
template.render(data_dict)
except TemplateException as e:
except_occured = True
error_text = "{}".format(e)
assert except_occured is True
assert error_text == (
"Missing '=' in instruction 'for \"item in items\"'"
)
def test_list_duplicate(self):
"""test duplicated listed get a unique id"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_list_template.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
class Item(object):
def __init__(self, val):
self.val = val
data_dict = {
"items": [Item(1), Item(2), Item(3), Item(4)]
}
error = False
template.set_image_path(
'staticimage.logo',
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/images/new_logo.png'
)
)
template.render(data_dict)
outodt = zipfile.ZipFile(outname, 'r')
try:
content_list = [
lxml.etree.parse(BytesIO(outodt.read(filename)))
for filename in template.templated_files
]
except lxml.etree.XMLSyntaxError as e:
error = True
print(
"List was not deduplicated->{}".format(e)
)
# remove end file
os.unlink(outname)
assert error is False
# first content is the content.xml
content = content_list[0]
list_expr = '//text:list'
list_items = content.xpath(
list_expr,
namespaces=template.namespaces
)
ids = []
for list_item in list_items:
ids.append(
list_item.get(
'{}id'.format(XML_NS)
)
)
assert ids, "this list of ids should not be empty"
assert len(ids) == len(set(ids)), "all ids should have been unique"
def test_missing_opening(self):
"""test orphaned /for raises a TemplateException"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_missing_open_template.odt'
)
outname = get_secure_filename()
try:
template = Template(template_name, outname)
finally:
os.remove(outname)
class Item(object):
def __init__(self, val):
self.val = val
data_dict = {
"items": [Item(1), Item(2), Item(3), Item(4)]
}
template.set_image_path(
'staticimage.logo',
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/images/new_logo.png'
)
)
# this will raise a TemplateException... or the test will fail
error_occured = False
try:
template.render(data_dict)
except TemplateException as e:
error_occured = True
# make sure this is the correct TemplateException that pops
assert e.message == "No open instruction for /for"
# and make sure we raised
assert error_occured is True
def test_ignore_undefined_variables_logo(self):
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_logo.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data = {}
error = True
try:
template.render(data)
print("Error: template contains a logo variable that must be "
"replaced")
except ValueError:
error = False
assert error is False
template = Template(template_name, outname,
ignore_undefined_variables=True)
error = False
try:
template.render(data)
except:
traceback.print_exc()
error = True
assert error is False
def test_ignore_undefined_variables_1(self):
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_undefined_variables_1.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data = {}
error = True
try:
template.render(data)
print("Error: template contains variables that must be "
"replaced")
except TemplateError:
error = False
assert error is False
template = Template(template_name, outname,
ignore_undefined_variables=True)
error = False
try:
template.render(data)
except:
traceback.print_exc()
error = True
assert error is False
def test_ignore_undefined_variables_2(self):
"""
Test ignore undefined variables for template with dotted variables like
py3o.document.value
"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_undefined_variables_2.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data = {}
error = True
try:
template.render(data)
print("Error: template contains variables that must be "
"replaced")
except TemplateError:
error = False
assert error is False
template = Template(template_name, outname,
ignore_undefined_variables=True)
error = True
try:
template.render(data)
print("Error: template contains dotted variables that must be "
"replaced")
except TemplateError:
error = False
assert error is False
def test_escape_false_template(self):
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/test_false_value.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
template.render({'false_value': False})
outodt = zipfile.ZipFile(outname, 'r')
content_list = lxml.etree.parse(
BytesIO(outodt.read(template.templated_files[0]))
)
result_a = lxml.etree.tostring(
content_list,
pretty_print=True,
).decode('utf-8')
result_e = open(
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/template_test_false_value_result.xml'
)
).read()
result_a = result_a.replace("\n", "").replace(" ", "")
result_e = result_e.replace("\n", "").replace(" ", "")
self.assertEqual(result_a, result_e)
outname = get_secure_filename()
template = Template(template_name, outname, escape_false=True)
template.render({'false_value': False})
outodt = zipfile.ZipFile(outname, 'r')
content_list = lxml.etree.parse(
BytesIO(outodt.read(template.templated_files[0]))
)
result_a = lxml.etree.tostring(
content_list,
pretty_print=True,
).decode('utf-8')
result_e = open(
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/template_test_escape_false_value_result.xml'
)
).read()
result_a = result_a.replace("\n", "").replace(" ", "")
result_e = result_e.replace("\n", "").replace(" ", "")
self.assertEqual(result_a, result_e)
def test_invalid_template_1(self):
"""a template should not try to define a /for and a for on the same
paragraph
"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_example_invalid_template.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
class Item(object):
pass
items = list()
item1 = Item()
item1.val1 = 'Item1 Value1'
item1.val2 = 'Item1 Value2'
item1.val3 = 'Item1 Value3'
item1.Currency = 'EUR'
item1.Amount = '12345.35'
item1.InvoiceRef = '#1234'
items.append(item1)
# if you are using python 2.x you should use xrange
for i in range(1000):
item = Item()
item.val1 = 'Item%s Value1' % i
item.val2 = 'Item%s Value2' % i
item.val3 = 'Item%s Value3' % i
item.Currency = 'EUR'
item.Amount = '6666.77'
item.InvoiceRef = 'Reference #%04d' % i
items.append(item)
document = Item()
document.total = '9999999999999.999'
data = dict(
items=items,
items2=copy.copy(items),
document=document
)
error = False
try:
template.render(data)
except TemplateException:
error = True
assert error is True, "This template should have been refused"
def test_template_with_function_call(self):
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_template_function_call.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data_dict = {
'amount': 32.123,
}
template.render(data_dict)
outodt = zipfile.ZipFile(outname, 'r')
content_list = lxml.etree.parse(
BytesIO(outodt.read(template.templated_files[0]))
)
result_a = lxml.etree.tostring(
content_list,
pretty_print=True,
).decode('utf-8')
result_e = open(
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/template_with_function_call_result.xml'
)
).read()
result_a = result_a.replace("\n", "").replace(" ", "")
result_e = result_e.replace("\n", "").replace(" ", "")
assert result_a == result_e
def test_format_date(self):
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_template_format_date.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data_dict = {
'datestring': '2015-08-02',
'datetimestring': '2015-08-02 17:05:06',
'datestring2': '2015-10-15',
'datetime': datetime.datetime.strptime(
'2015-11-13 17:00:20',
'%Y-%m-%d %H:%M:%S'
),
}
template.render(data_dict)
outodt = zipfile.ZipFile(outname, 'r')
content_list = lxml.etree.parse(
BytesIO(outodt.read(template.templated_files[0]))
)
result_a = lxml.etree.tostring(
content_list,
pretty_print=True,
).decode('utf-8')
result_e = open(
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/template_format_date_result.xml'
)
).read()
result_a = result_a.replace("\n", "").replace(" ", "")
result_e = result_e.replace("\n", "").replace(" ", "")
assert result_a == result_e
def test_format_date_exception(self):
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_template_format_date_exception.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data_dict = {
'date': '2015/08/02',
}
# this will raise a TemplateException... or the test will fail
error_occured = False
try:
template.render(data_dict)
except TemplateException as e:
error_occured = True
# and make sure we raised
assert error_occured is True
def test_style_application_with_function_call(self):
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/style_application_with_function_call.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data_dict = {
'date': '2015-08-02',
}
template.render(data_dict)
outodt = zipfile.ZipFile(outname, 'r')
content_list = lxml.etree.parse(
BytesIO(outodt.read(template.templated_files[0]))
)
result_a = lxml.etree.tostring(
content_list,
pretty_print=True,
).decode('utf-8')
result_e = open(
pkg_resources.resource_filename(
'py3o.template', (
'tests/templates/'
'style_application_with_function_call_result.xml'
)
)
).read()
result_a = result_a.replace("\n", "").replace(" ", "")
result_e = result_e.replace("\n", "").replace(" ", "")
assert result_a == result_e
def test_image_injection(self):
"""Test insertion of images from the data source into the template"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_image_injection.odt'
)
logo_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/images/new_logo.png'
)
image_names = [
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/images/image{i}.png'.format(i=i)
) for i in range(1, 4)
]
outname = get_secure_filename()
template = Template(template_name, outname)
logo = open(logo_name, 'rb').read()
images = [open(iname, 'rb').read() for iname in image_names]
data_dict = {
'items': [
Mock(val1=i, val3=i ** 2, image=base64.b64encode(image))
for i, image in enumerate(images, start=1)
],
'document': Mock(total=6),
'logo': logo,
}
template.render(data_dict)
outodt = zipfile.ZipFile(outname, 'r')
content_list = lxml.etree.parse(
BytesIO(outodt.read(template.templated_files[0]))
)
namelist = outodt.namelist()
i = 0
nmspc = template.namespaces
table = content_list.find('//table:table', nmspc)
frame_path = 'table:table-cell/text:p/draw:frame'
for row in table.findall('table:table-row', nmspc):
frame_elem = row.find(frame_path, nmspc)
if frame_elem is None:
continue
image_elem = frame_elem.find('draw:image', nmspc)
self.assertIsNotNone(image_elem)
href = image_elem.get('{{{}}}href'.format(nmspc['xlink']))
self.assertTrue(href)
self.assertIn(href, namelist)
self.assertEqual(images[i], outodt.read(href))
frame_elem.remove(image_elem)
i += 1
self.assertEqual(i, 3, u"Images were not found in the output")
expected_xml = lxml.etree.parse(
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/image_injection_result.xml'
)
)
result = lxml.etree.tostring(
content_list, pretty_print=True,
).decode('utf-8')
expected = lxml.etree.tostring(
expected_xml, pretty_print=True,
).decode('utf-8')
result = result.replace("\n", "").replace(" ", "")
expected = expected.replace("\n", "").replace(" ", "")
self.assertEqual(result, expected)
def test_ignore_undefined_variables_image_injection(self):
"""Test ignore undefined variables for injected image"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_image_injection.odt'
)
outname = get_secure_filename()
data = {
'items': [],
'document': Mock(total=6),
}
template = Template(template_name, outname)
error = True
try:
template.render(data)
print("Error: template contains variables that must be "
"replaced")
except TemplateError:
error = False
self.assertFalse(error)
template = Template(
template_name, outname, ignore_undefined_variables=True
)
error = False
try:
template.render(data)
except:
traceback.print_exc()
error = True
self.assertFalse(error)
def test_text_template(self):
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_text_template'
)
user_data = {'mylist': [
Mock(var0=1, var1='1', var2=1.0),
Mock(var0=2, var1='2', var2=2.0),
Mock(var0=3, var1='3', var2=3.0),
]}
outname = get_secure_filename()
template = TextTemplate(template_name, outname)
template.render(user_data)
result = open(outname, 'rb').read()
expected = u''.join(
u'{} {} {}\n'.format(line.var0, line.var1, line.var2)
for line in user_data['mylist']
).encode('utf-8')
self.assertEqual(result, expected)
def test_ignore_undefined_variables_text_template(self):
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_text_template'
)
user_data = {}
outname = get_secure_filename()
template = TextTemplate(template_name, outname)
error = True
try:
template.render(user_data)
print("Error: template contains variables that must be "
"replaced")
except TemplateError:
error = False
self.assertFalse(error)
template = TextTemplate(
template_name, outname, ignore_undefined_variables=True
)
error = False
try:
template.render(user_data)
except:
traceback.print_exc()
error = True
self.assertFalse(error)
def test_remove_soft_page_breaks(self):
template_xml = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_soft_page_break.odt'
)
outname = get_secure_filename()
template = Template(template_xml, outname)
soft_breaks = get_soft_breaks(
template.content_trees[0], template.namespaces
)
self.assertEqual(len(soft_breaks), 2)
template.remove_soft_breaks()
soft_breaks = get_soft_breaks(
template.content_trees[0], template.namespaces
)
self.assertEqual(len(soft_breaks), 0)
template = Template(template_xml, outname)
soft_breaks = get_soft_breaks(
template.content_trees[0], template.namespaces
)
self.assertEqual(len(soft_breaks), 2)
template.render(data={"list1": [1, 2, 3]})
soft_breaks = get_soft_breaks(
template.content_trees[0], template.namespaces
)
self.assertEqual(len(soft_breaks), 0)
outodt = zipfile.ZipFile(outname, 'r')
content_list = lxml.etree.parse(
BytesIO(outodt.read(template.templated_files[0]))
)
nmspc = template.namespaces
paragraphs = content_list.findall('//text:p', nmspc)
bottom_break_paragraphs, middle_break_paragraphs = 0, 0
for p in paragraphs:
if not p.text:
continue
text = p.text.strip()
if text == (
u"This is a text with a margin at the bottom and a "
u"soft-page-break"
):
bottom_break_paragraphs += 1
elif text == (
u"This is a paragraph that is cut in half by a "
u"soft-page-break. This text should not remain cut "
u"in half after rendering."
):
middle_break_paragraphs += 1
else:
self.fail(u"Unidentified text in result: {}".format(text))
self.assertEqual(bottom_break_paragraphs, 3)
self.assertEqual(middle_break_paragraphs, 3)
def test_remove_soft_breaks_without_tail(self):
template_xml = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_page_break_without_tail.odt'
)
t = Template(template_xml, get_secure_filename())
soft_breaks = get_soft_breaks(t.content_trees[0], t.namespaces)
assert len(soft_breaks) > 0
t.remove_soft_breaks()
soft_breaks = get_soft_breaks(t.content_trees[0], t.namespaces)
assert len(soft_breaks) == 0
t = Template(template_xml, get_secure_filename())
soft_breaks = get_soft_breaks(t.content_trees[0], t.namespaces)
assert len(soft_breaks) > 0
t.render(data={"items": [
{'Amount': 3, 'Currency': 'D'},
{'Amount': 2, 'Currency': 'E'},
{'Amount': 1, 'Currency': 'C'},
]})
soft_breaks = get_soft_breaks(t.content_trees[0], t.namespaces)
assert len(soft_breaks) == 0
def test_invalid_links(self):
u"""Check that exceptions are raised on link url and text mismatch"""
templates = [
('py3o_invalid_link.odt', 'url and text do not match.*'),
('py3o_invalid_link_old.odt', 'url and text do not match.*'),
('py3o_invalid_link_none.odt', 'Text not found for link.*'),
]
for template, error in templates:
template_fname = pkg_resources.resource_filename(
'py3o.template', 'tests/templates/{}'.format(template)
)
t = Template(template_fname, get_secure_filename())
with self.assertRaisesRegexp(TemplateException, error):
t.render({'amount': 0.0})
def test_table_cell_function_call(self):
u"""Test function calls inside ODT table cells"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_table_cell_function_call.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data_dict = {
'items': [
Mock(val1=i, val2=range(i), val3=i ** 2)
for i in range(1, 4)
],
'document': Mock(total=6),
}
template.render(data_dict)
def test_table_cell_for_loop(self):
u"""Test for loop inside ODT table cells"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_table_cell_for_loop.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data_dict = {
'items': [
Mock(val1=i, val2=range(i), val3=i ** 2)
for i in range(1, 4)
],
'document': Mock(total=6),
}
template.render(data_dict)
def test_odt_value_styles(self):
u"""Test odf_value attribute and ODT styles"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_odt_value_styles.odt'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data_dict = {
'string_date': '1999-12-30',
'odt_value_date': Mock(
__str__=lambda s: '2009-07-06',
odf_value=40000,
odf_type='date',
)
}
template.render(data_dict)
outodt = zipfile.ZipFile(outname, 'r')
content_list = lxml.etree.parse(
BytesIO(outodt.read(template.templated_files[0]))
)
expected_xml = lxml.etree.parse(
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/odt_value_styles_result.xml'
)
)
result = lxml.etree.tostring(
content_list, pretty_print=True,
).decode('utf-8')
expected = lxml.etree.tostring(
expected_xml, pretty_print=True,
).decode('utf-8')
result = result.replace("\n", "").replace(" ", "")
expected = expected.replace("\n", "").replace(" ", "")
self.assertEqual(result, expected)
def test_ods_value_styles(self):
u"""Test odf_value attribute and ODS styles"""
template_name = pkg_resources.resource_filename(
'py3o.template',
'tests/templates/py3o_ods_value_styles.ods'
)
outname = get_secure_filename()
template = Template(template_name, outname)
data_dict = {
'string_date': '1999-12-30',
'odf_value_date': Mock(
__str__=lambda s: '2009-07-06',
odf_value=40000,
odf_type='date',
)
}
template.render(data_dict)
outodt = zipfile.ZipFile(outname, 'r')
content_list = lxml.etree.parse(
BytesIO(outodt.read(template.templated_files[0]))
)
expected_xml = lxml.etree.parse(
pkg_resources.resource_filename(
'py3o.template',
'tests/templates/ods_value_styles_result.xml'
)
)
result = lxml.etree.tostring(
content_list, pretty_print=True,
).decode('utf-8')
expected = lxml.etree.tostring(
expected_xml, pretty_print=True,
).decode('utf-8')
result = result.replace("\n", "").replace(" ", "")
expected = expected.replace("\n", "").replace(" ", "")
self.assertEqual(result, expected)
| 49,308
|
https://github.com/cbsa-informatik-uni-siegen/gtitool/blob/master/de.unisiegen.gtitool.core/source/de/unisiegen/gtitool/core/entities/regex/UnfinishedNode.java
|
Github Open Source
|
Open Source
|
MIT
| null |
gtitool
|
cbsa-informatik-uni-siegen
|
Java
|
Code
| 753
| 2,328
|
package de.unisiegen.gtitool.core.entities.regex;
import java.util.ArrayList;
import javax.swing.event.EventListenerList;
import de.unisiegen.gtitool.core.entities.State;
import de.unisiegen.gtitool.core.entities.listener.PrettyStringChangedListener;
import de.unisiegen.gtitool.core.parser.ParserOffset;
import de.unisiegen.gtitool.core.parser.style.PrettyPrintable;
import de.unisiegen.gtitool.core.parser.style.PrettyString;
import de.unisiegen.gtitool.core.parser.style.PrettyToken;
import de.unisiegen.gtitool.core.parser.style.Style;
/**
* A representation of an unfinished node, that stands for a Language from a
* State s0 to a state s1 in the Convert DFA -> Regex algorithm
*/
public class UnfinishedNode extends LeafNode
{
/**
* The serial version uid.
*/
private static final long serialVersionUID = 7119354254164008482L;
/**
* The k
*/
private int k;
/**
* The {@link EventListenerList}.
*/
private EventListenerList listenerList = new EventListenerList ();
/**
* The name
*/
private String name;
/**
* The offset of this {@link UnfinishedNode} in the source code.
*
* @see #getParserOffset()
* @see #setParserOffset(ParserOffset)
*/
private ParserOffset parserOffset = NO_PARSER_OFFSET;
/**
* The position
*/
private int position;
/**
*
*/
private State s0;
/**
* The {@link State} s1
*/
private State s1;
/**
* Creates a new {@link UnfinishedNode}
*
* @param s0 The {@link State} s0
* @param s1 The {@link State} s1
* @param k The k
*/
public UnfinishedNode ( State s0, State s1, int k )
{
this.s0 = s0;
this.s1 = s1;
this.k = k;
this.name = "L" + s0.getName () + s1.getName () + k; //$NON-NLS-1$
}
/**
* {@inheritDoc}
*
* @see PrettyPrintable#addPrettyStringChangedListener(de.unisiegen.gtitool.core.entities.listener.PrettyStringChangedListener)
*/
public void addPrettyStringChangedListener (
PrettyStringChangedListener listener )
{
this.listenerList.add ( PrettyStringChangedListener.class, listener );
}
/**
* {@inheritDoc}
*
* @see RegexNode#clone()
*/
@Override
public RegexNode clone ()
{
return new UnfinishedNode ( this.s0, this.s1, this.k );
}
/**
* {@inheritDoc}
*
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
public int compareTo ( @SuppressWarnings ( "unused" ) RegexNode o )
{
return 0;
}
/**
* {@inheritDoc}
*
* @see RegexNode#equals(java.lang.Object)
*/
@Override
public boolean equals ( Object o )
{
if ( o == this )
{
return true;
}
if ( o instanceof UnfinishedNode )
{
return this.name.equals ( ( ( UnfinishedNode ) o ).getName () );
}
return false;
}
/**
* {@inheritDoc}
*
* @see RegexNode#getAllChildren()
*/
@Override
public ArrayList < RegexNode > getAllChildren ()
{
return new ArrayList < RegexNode > ();
}
/**
* {@inheritDoc}
*
* @see RegexNode#getChildren()
*/
@Override
public ArrayList < RegexNode > getChildren ()
{
return new ArrayList < RegexNode > ();
}
/**
* Returns the k.
*
* @return The k.
* @see #k
*/
public int getK ()
{
return this.k;
}
/**
* {@inheritDoc}
*
* @see RegexNode#getLeftChildrenCount()
*/
@Override
public int getLeftChildrenCount ()
{
return 0;
}
/**
* Returns the name.
*
* @return The name.
* @see #name
*/
public String getName ()
{
return this.name;
}
/**
* {@inheritDoc}
*
* @see RegexNode#getNextUnfinishedNode()
*/
@Override
public UnfinishedNode getNextUnfinishedNode ()
{
return this;
}
/**
* {@inheritDoc}
*
* @see RegexNode#getNodeString()
*/
@Override
public PrettyString getNodeString ()
{
PrettyString s = new PrettyString ();
s.add ( new PrettyToken ( this.name, Style.REGEX_SYMBOL ) );
return s;
}
/**
* {@inheritDoc}
*
* @see de.unisiegen.gtitool.core.entities.Entity#getParserOffset()
*/
public ParserOffset getParserOffset ()
{
return this.parserOffset;
}
/**
* {@inheritDoc}
*
* @see LeafNode#getPosition()
*/
@Override
public int getPosition ()
{
return this.position;
}
/**
* {@inheritDoc}
*
* @see RegexNode#getRightChildrenCount()
*/
@Override
public int getRightChildrenCount ()
{
return 0;
}
/**
* Returns the s0.
*
* @return The s0.
* @see #s0
*/
public State getS0 ()
{
return this.s0;
}
/**
* Returns the s1.
*
* @return The s1.
* @see #s1
*/
public State getS1 ()
{
return this.s1;
}
/**
* {@inheritDoc}
*
* @see RegexNode#isInCoreSyntax()
*/
@Override
public boolean isInCoreSyntax ()
{
return true;
}
/**
* {@inheritDoc}
*
* @see RegexNode#nullable()
*/
@Override
public boolean nullable ()
{
return false;
}
/**
* {@inheritDoc}
*
* @see PrettyPrintable#removePrettyStringChangedListener(de.unisiegen.gtitool.core.entities.listener.PrettyStringChangedListener)
*/
public void removePrettyStringChangedListener (
PrettyStringChangedListener listener )
{
this.listenerList.remove ( PrettyStringChangedListener.class, listener );
}
/**
* {@inheritDoc}
*
* @see de.unisiegen.gtitool.core.entities.Entity#setParserOffset(de.unisiegen.gtitool.core.parser.ParserOffset)
*/
public void setParserOffset ( ParserOffset parserOffset )
{
this.parserOffset = parserOffset;
}
/**
* {@inheritDoc}
*
* @see LeafNode#setPosition(int)
*/
@Override
public void setPosition ( int p )
{
this.position = p;
}
/**
* {@inheritDoc}
*
* @see RegexNode#toCoreSyntax(boolean)
*/
@Override
public RegexNode toCoreSyntax (
@SuppressWarnings ( "unused" ) boolean withCharacterClasses )
{
return this;
}
/**
* {@inheritDoc}
*
* @see RegexNode#toPrettyString()
*/
public PrettyString toPrettyString ()
{
PrettyString s = new PrettyString ();
s.add ( new PrettyToken ( this.name, Style.REGEX_SYMBOL ) );
return s;
}
/**
* {@inheritDoc}
*
* @see java.lang.Object#toString()
*/
@Override
public String toString ()
{
return "'" + this.name; //$NON-NLS-1$
}
}
| 7,336
|
https://github.com/dexterx17/planning/blob/master/application/views/recursos/block.php
|
Github Open Source
|
Open Source
|
MIT
| null |
planning
|
dexterx17
|
PHP
|
Code
| 68
| 313
|
<tr id="item<?php echo $info['ID']; ?>">
<td>
<a class="btn btn-redirected" data-content="item<?php echo $info['ID']; ?>" href="<?php echo site_url("recursos/nuevo/".$info['ID'].'/'.$info['proyecto']); ?>" data-toggle="tooltip" title="<?php echo lang('recursos_edit'); ?>">
<i class="fa fa-lg fa-fw fa-edit"></i>
</a>
<a class="btn btn-delete" data-content="item<?php echo $info['ID']; ?>" href="<?php echo site_url("recursos/delete/".$info['ID']); ?>" data-toggle="tooltip" title="<?php echo lang('recursos_delete'); ?>">
<i class="fa fa-lg fa-fw fa-trash-o"></i>
</a>
</td>
<td><?php echo $tipos_transacion[$info['estado']]; ?></td>
<td><?php echo $info['recurso']; ?></td>
<td><?php echo $info['descripcion']; ?></td>
<td><?php echo $info['costo']; ?></td>
</tr>
| 31,552
|
https://github.com/galich/SharpDevelop/blob/master/src/AddIns/Debugger/Debugger.Tests/Tests/StackFrame_Callstack.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
SharpDevelop
|
galich
|
C#
|
Code
| 365
| 1,281
|
// Copyright (c) 2014 AlphaSierraPapa for the SharpDevelop Team
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this
// software and associated documentation files (the "Software"), to deal in the Software
// without restriction, including without limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
// to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
using System;
namespace Debugger.Tests
{
public class StackFrame_Callstack
{
public static void Main()
{
Sub1();
}
static void Sub1()
{
Sub2();
}
static void Sub2()
{
System.Diagnostics.Debugger.Break();
}
}
}
#if TEST_CODE
namespace Debugger.Tests {
public partial class DebuggerTests
{
[NUnit.Framework.Test]
public void StackFrame_Callstack()
{
StartTest();
ObjectDump("Callstack", this.CurrentThread.GetCallstack());
this.CurrentStackFrame.StepOut();
ObjectDump("Callstack", this.CurrentThread.GetCallstack());
this.CurrentStackFrame.StepOut();
ObjectDump("Callstack", this.CurrentThread.GetCallstack());
EndTest();
}
}
}
#endif
#if EXPECTED_OUTPUT
<?xml version="1.0" encoding="utf-8"?>
<DebuggerTests>
<Test
name="StackFrame_Callstack.cs">
<Started />
<ModuleLoaded>mscorlib.dll (No symbols)</ModuleLoaded>
<ModuleLoaded>StackFrame_Callstack.exe (Has symbols)</ModuleLoaded>
<Paused>StackFrame_Callstack.cs:37,4-37,40</Paused>
<Callstack>
<Item>
<StackFrame
ChainIndex="1"
FrameIndex="2"
MethodInfo="[Method Debugger.Tests.StackFrame_Callstack.Sub2():System.Void]"
NextStatement="StackFrame_Callstack.cs:37,4-37,40"
Thread="Thread Name = Suspended = False" />
</Item>
<Item>
<StackFrame
ChainIndex="1"
FrameIndex="1"
MethodInfo="[Method Debugger.Tests.StackFrame_Callstack.Sub1():System.Void]"
NextStatement="StackFrame_Callstack.cs:32,4-32,11"
Thread="Thread Name = Suspended = False" />
</Item>
<Item>
<StackFrame
ChainIndex="1"
MethodInfo="[Method Debugger.Tests.StackFrame_Callstack.Main():System.Void]"
NextStatement="StackFrame_Callstack.cs:27,4-27,11"
Thread="Thread Name = Suspended = False" />
</Item>
</Callstack>
<Paused>StackFrame_Callstack.cs:32,4-32,11</Paused>
<Callstack>
<Item>
<StackFrame
ChainIndex="1"
FrameIndex="1"
MethodInfo="[Method Debugger.Tests.StackFrame_Callstack.Sub1():System.Void]"
NextStatement="StackFrame_Callstack.cs:32,4-32,11"
Thread="Thread Name = Suspended = False" />
</Item>
<Item>
<StackFrame
ChainIndex="1"
MethodInfo="[Method Debugger.Tests.StackFrame_Callstack.Main():System.Void]"
NextStatement="StackFrame_Callstack.cs:27,4-27,11"
Thread="Thread Name = Suspended = False" />
</Item>
</Callstack>
<Paused>StackFrame_Callstack.cs:27,4-27,11</Paused>
<Callstack>
<Item>
<StackFrame
ChainIndex="1"
MethodInfo="[Method Debugger.Tests.StackFrame_Callstack.Main():System.Void]"
NextStatement="StackFrame_Callstack.cs:27,4-27,11"
Thread="Thread Name = Suspended = False" />
</Item>
</Callstack>
<Exited />
</Test>
</DebuggerTests>
#endif // EXPECTED_OUTPUT
| 44,004
|
https://github.com/wemblykj/FPGA_MSO/blob/master/src/lib/dsp/rtl/cic_integrator.v
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
FPGA_MSO
|
wemblykj
|
Verilog
|
Code
| 165
| 444
|
`timescale 1ns / 1ps
//////////////////////////////////////////////////////////////////////////////////
// Company:
// Engineer: Paul Wightmore
//
// Create Date: 21:09:14 12/15/2020
// Design Name: cic_integrator
// Module Name: cic_integrator.v
// Project Name: FPGA Mixed Signal Oscilloscope
// Target Devices:
// Tool versions:
// Description: Provides a generic integrator stage implmentation.
//
// A from scratch implementation of a Cascading Integrator-Comb
// Filter inspired by the the article:
// 'A Beginner's Guide To Cascaded Integrator-Comb (CIC) Filters'
// https://www.dsprelated.com/showarticle/1337.php
//
// Dependencies:
//
// Revision:
// Revision 0.01 - File Created
// Additional Comments:
// License: https://www.apache.org/licenses/LICENSE-2.0
//
//////////////////////////////////////////////////////////////////////////////////
module cic_integrator
#(
// characteristics
// precision
parameter PRECISION = 12 // the vertical precision
)
(
input rst_n, // reset (active low)
input clk, // clock
input [PRECISION-1:0] x, // stage input
output [PRECISION-1:0] y // stage output
);
reg [PRECISION-1:0] z;
always @(posedge clk or negedge rst_n) begin
if (!rst_n) begin
z <= 0;
end else begin
z <= z + x;
end
end
assign y = z;
endmodule
| 14,315
|
https://github.com/dtczhl/doppler-illustrator/blob/master/android/app/src/main/java/com/huanlezhang/dtcdopplerillustrator/MainActivity.java
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
doppler-illustrator
|
dtczhl
|
Java
|
Code
| 505
| 1,815
|
package com.huanlezhang.dtcdopplerillustrator;
import android.Manifest;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.ActivityCompat;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.RadioGroup;
import android.widget.TextView;
import android.widget.ToggleButton;
import java.util.Arrays;
/**
* Illustrating Doppler effect
*
* @author Huanle Zhang, University of California, Davis
* www.huanlezhang.com
* @version 0.2
* @since 2019-07-11
*/
public class MainActivity extends Activity {
// private static final String TAG = "DTC MainActivity";
private static final String[] PermissionStrings = {
Manifest.permission.RECORD_AUDIO
};
private static final int Permission_ID = 1;
private RadioGroup mRadioGroup;
private boolean mIsSender;
private ToggleButton mMainToggleBtn;
private ImageView mImageView;
private int mImageViewWidth;
private int mImageViewHeight;
private Bitmap mBitmap;
private Canvas mCanvas;
private Paint mPaint;
private Paint mBaseLinePaint;
// for sender
PlaySound mPlaySound = new PlaySound();
private final int FREQ_SOUND = 19000; // emit 19 KHz sounds
// for receiver
private Handler mHandler = new Handler();
private Runnable mDrawFFTRun = new DrawFFT();
private AnalyzeFrequency mFftAnalysis;
private final int N_FFT_DOT = 4096;
private float[] mCurArray = new float[N_FFT_DOT/2-1];
private static final int FREQ_OFFSET_MAX = 20; // maximum frequency range
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ActivityCompat.requestPermissions(this, PermissionStrings, Permission_ID);
mRadioGroup = findViewById(R.id.radioGroup);
mMainToggleBtn = findViewById(R.id.startToggleBtn);
mMainToggleBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ToggleButton toggleButton = (ToggleButton) v;
if (toggleButton.isChecked()) {
startMain();
} else {
stopMain();
}
}
});
// set up the imageview
mImageView = findViewById(R.id.mainImageView);
DisplayMetrics displayMetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
mImageViewWidth = displayMetrics.widthPixels - (int)(getResources().getDisplayMetrics().density*4.0+0.5);
mImageViewHeight = mImageViewWidth; // a square view
mBitmap = Bitmap.createBitmap(mImageViewWidth, mImageViewHeight, Bitmap.Config.ARGB_4444);
mCanvas = new Canvas(mBitmap);
mCanvas.drawColor(Color.LTGRAY);
mPaint = new Paint();
mPaint.setColor(Color.BLACK);
mPaint.setStrokeWidth(10);
mBaseLinePaint = new Paint();
mBaseLinePaint.setColor(Color.BLUE);
mBaseLinePaint.setStrokeWidth(5);
mImageView.setImageBitmap(mBitmap);
mImageView.invalidate();
TextView maxFreqText = findViewById(R.id.maxFreq);
maxFreqText.setText(FREQ_OFFSET_MAX + " Hz");
TextView minFreqText = findViewById(R.id.minFreq);
minFreqText.setText(-FREQ_OFFSET_MAX + " Hz");
}
void startMain() {
enableAllUI(false);
mMainToggleBtn.setEnabled(true);
int radioBtnId = mRadioGroup.getCheckedRadioButtonId();
if (radioBtnId == R.id.senderRadio) {
// sender
mIsSender = true;
mPlaySound = new PlaySound();
mPlaySound.mOutputFreq = FREQ_SOUND;
mPlaySound.start();
} else {
// receiver
mIsSender = false;
mFftAnalysis = new AnalyzeFrequency(mHandler, mDrawFFTRun);
mFftAnalysis.start();
}
}
void stopMain() {
enableAllUI(true);
if (mIsSender) {
if (mPlaySound != null) {
mPlaySound.stop();
mPlaySound = null;
}
} else {
if (mFftAnalysis != null) {
mFftAnalysis.stop();
mFftAnalysis = null;
}
Arrays.fill(mCurArray, (float) 0.0);
}
}
void enableAllUI(boolean enable) {
// for radio group
for (int i = 0; i < mRadioGroup.getChildCount(); i++) {
mRadioGroup.getChildAt(i).setEnabled(enable);
}
mMainToggleBtn.setEnabled(enable);
}
// draw doppler on screen
public class DrawFFT implements Runnable{
@Override
public void run() {
if (mFftAnalysis == null) {
return;
}
int scaleFFT = (mImageViewHeight/2) / FREQ_OFFSET_MAX;
int N_CH_DOT = mFftAnalysis.N_CH_DOT;
int fftInterval = (int) (1.0 * mImageViewWidth / N_CH_DOT);
int fftDisplayRange = fftInterval * N_CH_DOT;
int index = (mFftAnalysis.mChIndex - 1) % N_CH_DOT;
mCanvas.drawColor(Color.LTGRAY);
// horizontal base line
mCanvas.drawLine(0, mImageViewHeight/2, mImageViewWidth, mImageViewHeight/2, mBaseLinePaint);
for (int i = fftDisplayRange; i > fftInterval; i -= fftInterval) {
mCanvas.drawLine(i, mImageViewHeight / 2 - scaleFFT * mFftAnalysis.mCh[(index + 2 * N_CH_DOT) % N_CH_DOT],
i - fftInterval, mImageViewHeight / 2 - scaleFFT * mFftAnalysis.mCh[(index + 2 * N_CH_DOT - 1) % N_CH_DOT],
mPaint);
--index;
}
mImageView.invalidate();
}
}
}
| 38,322
|
https://github.com/Dok11/angular/blob/master/packages/compiler-cli/src/ngtsc/transform/src/transform.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
angular
|
Dok11
|
TypeScript
|
Code
| 383
| 1,016
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {WrappedNodeExpr} from '@angular/compiler';
import * as ts from 'typescript';
import {IvyCompilation} from './compilation';
import {ImportManager, translateExpression} from './translator';
export function ivyTransformFactory(compilation: IvyCompilation):
ts.TransformerFactory<ts.SourceFile> {
return (context: ts.TransformationContext): ts.Transformer<ts.SourceFile> => {
return (file: ts.SourceFile): ts.SourceFile => {
return transformIvySourceFile(compilation, context, file);
};
};
}
/**
* A transformer which operates on ts.SourceFiles and applies changes from an `IvyCompilation`.
*/
function transformIvySourceFile(
compilation: IvyCompilation, context: ts.TransformationContext,
file: ts.SourceFile): ts.SourceFile {
const importManager = new ImportManager();
// Recursively scan through the AST and perform any updates requested by the IvyCompilation.
const sf = visitNode(file);
// Generate the import statements to prepend.
const imports = importManager.getAllImports().map(
i => ts.createImportDeclaration(
undefined, undefined,
ts.createImportClause(undefined, ts.createNamespaceImport(ts.createIdentifier(i.as))),
ts.createLiteral(i.name)));
// Prepend imports if needed.
if (imports.length > 0) {
sf.statements = ts.createNodeArray([...imports, ...sf.statements]);
}
return sf;
// Helper function to process a class declaration.
function visitClassDeclaration(node: ts.ClassDeclaration): ts.ClassDeclaration {
// Determine if this class has an Ivy field that needs to be added, and compile the field
// to an expression if so.
const res = compilation.compileIvyFieldFor(node);
if (res !== undefined) {
// There is a field to add. Translate the initializer for the field into TS nodes.
const exprNode = translateExpression(res.initializer, importManager);
// Create a static property declaration for the new field.
const property = ts.createProperty(
undefined, [ts.createToken(ts.SyntaxKind.StaticKeyword)], res.field, undefined, undefined,
exprNode);
// Replace the class declaration with an updated version.
node = ts.updateClassDeclaration(
node,
// Remove the decorator which triggered this compilation, leaving the others alone.
maybeFilterDecorator(node.decorators, compilation.ivyDecoratorFor(node) !),
node.modifiers, node.name, node.typeParameters, node.heritageClauses || [],
[...node.members, property]);
}
// Recurse into the class declaration in case there are nested class declarations.
return ts.visitEachChild(node, child => visitNode(child), context);
}
// Helper function that recurses through the nodes and processes each one.
function visitNode<T extends ts.Node>(node: T): T;
function visitNode(node: ts.Node): ts.Node {
if (ts.isClassDeclaration(node)) {
return visitClassDeclaration(node);
} else {
return ts.visitEachChild(node, child => visitNode(child), context);
}
}
}
function maybeFilterDecorator(
decorators: ts.NodeArray<ts.Decorator>| undefined,
toRemove: ts.Decorator): ts.NodeArray<ts.Decorator>|undefined {
if (decorators === undefined) {
return undefined;
}
const filtered = decorators.filter(dec => ts.getOriginalNode(dec) !== toRemove);
if (filtered.length === 0) {
return undefined;
}
return ts.createNodeArray(filtered);
}
| 4,922
|
https://github.com/jusinamine/dzcode.io/blob/master/mobile/src/redux/reducers/index.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
dzcode.io
|
jusinamine
|
TypeScript
|
Code
| 54
| 132
|
import { articlesScreen } from "./articles-screen";
import { combineReducers } from "redux";
import { contributeScreen } from "./contribute-screen";
import { faqScreen } from "./faq-screen";
import { general } from "./general";
import { learnScreen } from "./learn-screen";
import { projectsScreen } from "./projects-screen";
export const mainReducer = combineReducers({
articlesScreen,
contributeScreen,
faqScreen,
learnScreen,
projectsScreen,
general,
});
| 46,023
|
https://github.com/Grommerin/meta-marsboard-bsp/blob/master/recipes-strim/strim-scripts/strim-scripts/unused/canlogd.sh
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
meta-marsboard-bsp
|
Grommerin
|
Shell
|
Code
| 176
| 789
|
#!/bin/sh
PROGRAM_MAIN_FILE_NAME=canlog
PROGRAM_UPDATE_FILE_NAME=canlog_upd
if [ -f /etc/init.d/functions ] ; then
. /etc/init.d/functions
elif [ -f /etc/rc.d/init.d/functions ] ; then
. /etc/rc.d/init.d/functions
else
exit 0
fi
KIND="canlog"
THIS_SCRIPT_FILE=/etc/init.d/canlogd
Given_permission()
{
chmod a+x /home/root/canlogdir/${PROGRAM_UPDATE_FILE_NAME}
}
Replace_firmware()
{
cp -f /home/root/canlogdir/${PROGRAM_UPDATE_FILE_NAME} /home/root/canlogdir/${PROGRAM_MAIN_FILE_NAME}
}
Delete_firmware()
{
rm -f /home/root/canlogdir/${PROGRAM_MAIN_FILE_NAME}
}
Delete_firmware_upd()
{
rm -f /home/root/canlogdir/${PROGRAM_UPDATE_FILE_NAME}
}
Check_firmware()
{
if [[ -f /home/root/canlogdir/${PROGRAM_UPDATE_FILE_NAME} ]] ; then
Given_permission
#Delete_firmware
Replace_firmware
Delete_firmware_upd
fi
}
Check_init()
{
if [[ -f /usr/bin/canlogdir/settings.xml ]] ; then
cp -rf /usr/bin/canlogdir /home/root/
rm -rf /usr/bin/canlogdir
fi
if [[ -f /usr/bin/${PROGRAM_MAIN_FILE_NAME} ]] ; then
cp -f /usr/bin/${PROGRAM_MAIN_FILE_NAME} /home/root/canlogdir/${PROGRAM_MAIN_FILE_NAME}
rm -f /usr/bin/${PROGRAM_MAIN_FILE_NAME}
fi
}
start()
{
echo -n "Starting $KIND services: "
Check_init
Check_firmware
DATE=$(date +%m%d%H%M%y)
FILE=/home/root/canlogdir/log${DATE}.log
/home/root/canlogdir/canlog >> $FILE
echo
}
stop()
{
echo -n "Shutting down $KIND services: "
kill -s SIGTERM $(ps | grep -m 1 ${PROGRAM_MAIN_FILE_NAME} | awk '{print $1}')
echo
}
restart()
{
echo -n "Restarting $KIND services: "
stop
start
echo
}
case "$1" in
start)
start
;;
stop)
stop
;;
restart)
restart
;;
*)
echo $"Usage: $0 {start|stop|restart}"
exit 1
esac
exit $?
| 25,922
|
https://github.com/itsmepatz/uranus-crm/blob/master/module/cron/core/Cron_Model.php
|
Github Open Source
|
Open Source
|
MIT
| null |
uranus-crm
|
itsmepatz
|
PHP
|
Code
| 21
| 84
|
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
require_once(FCPATH.'resources/core/Dermeva_Model.php');
class Cron_Model extends Dermeva_Model {
function __construct()
{
parent::__construct();
$this->load->database();
}
}
| 22,855
|
https://github.com/sam016/THREE-CSGMesh/blob/master/v2/grid-material.js
|
Github Open Source
|
Open Source
|
MIT
| null |
THREE-CSGMesh
|
sam016
|
JavaScript
|
Code
| 240
| 906
|
import * as THREE from "three";
let vs = `
varying vec3 vertex;
void main() {
vertex = position;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`
let gridValFrag=`
float gridVal;
{
vec2 coord = (vUv.xy-.5)*GRID_SIZE;// Compute anti-aliased world-space grid lines
vec2 grid = abs(fract(coord - 0.5) - 0.5) / fwidth(coord);
float line = min(grid.x, grid.y);
gridVal = 1.0 - min(line, 1.0);
if(gridVal<.001)gridVal=0.;//discard;
}
`
let fs = `
// License: CC0 (http://creativecommons.org/publicdomain/zero/1.0/)
//#extension GL_OES_standard_derivatives : enable
varying vec3 vertex;
uniform vec4 color;
void main() {
${gridValFrag}
gl_FragColor = vec4(color.xyz, color.w*gridVal);// Just visualize the grid lines directly
}
`
class GridMaterial{
constructor(template,gridSz=10.) {
template = template.clone();
if(template){
template.onBeforeCompile = (shader,renderer)=>{
shader.fragmentShader=shader.fragmentShader.replace(`#ifdef TRANSPARENCY
diffuseColor.a`,`
#if ( defined( USE_UV ) && ! defined( UVS_VERTEX_ONLY ) )
//varying vec2 vUv;
//outgoingLight.rgb=vec3(1.,0.,0.);
${gridValFrag}
diffuseColor.a *= max(gridVal,diffuseColor.a);
// diffuseColor.a *= gridVal;
//outgoingLight.rgb = vec3(1.)-outgoingLight.rgb;
outgoingLight.rgb = outgoingLight.brg;
#endif
#ifdef TRANSPARENCY
diffuseColor.a`)
//replace(`uniform`,`uniform float gridScale;
//uniform`).
.replace('GRID_SIZE',(gridSz|0)+'.')
//console.log(shader.fragmentShader)
}
return template
}else
return new THREE.ShaderMaterial({
uniforms: {
time: { value: 1.0 },
resolution: { value: new THREE.Vector2() },
color: { value: new THREE.Vector4(1,1,1,1) },
gridScale:{value:1.}
},
vertexShader:vs,
fragmentShader: fs,
extensions:{derivatives:true},
transparent:true,
side:THREE.DoubleSide
})
}
static makeGrid(material){
let grid = new THREE.Mesh(
new THREE.PlaneGeometry(20.00, 20.00),
new GridMaterial(
material || new THREE.MeshStandardMaterial({
map: new THREE.TextureLoader().load(
"https://cdn.glitch.com/02b1773f-db1a-411a-bc71-ff25644e8e51%2Fmandala.jpg?v=1594201375330"
),
transparent: true,
opacity: 1,
alphaTest: 0.5,
depthWrite: false,
side: THREE.DoubleSide
})
)
);
grid.rotation.x = Math.PI * -0.5;
grid.renderOrder = 0;
return grid;
}
}
export default GridMaterial
| 15,294
|
https://github.com/alenSavov/Software-University-SoftUni/blob/master/05.C# DB Fundamentals/01.Database Basics – MS SQL – May 2018/08.Exam Preparations/Databases MSSQL Server Exam - 22 October 2017/03.Queries.sql
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
Software-University-SoftUni
|
alenSavov
|
SQL
|
Code
| 656
| 1,655
|
--Task 5 - Users by Age
SELECT Username,
Age
FROM Users
ORDER BY Age,
Username DESC;
--Task 6 - Unassigned Reports
SELECT Description,
Opendate
FROM Reports
WHERE Employeeid IS NULL
ORDER BY Opendate,
Description;
--Task 7. Employees & Reports
SELECT E.Firstname,
E.Lastname,
R.Description,
FORMAT(R.Opendate, 'yyyy-MM-dd') AS Opendate
FROM Employees AS E
JOIN Reports AS R ON R.Employeeid = E.Id
ORDER BY E.Id,
R.Opendate;
--Task 8 - Most reported Category
SELECT C.Name AS CategoryName,
COUNT(*) AS ReportsNumber
FROM Categories AS C
JOIN Reports AS R ON R.Categoryid = C.Id
GROUP BY C.Name
ORDER BY Reportsnumber DESC,
Categoryname;
--Task 9. Employees in Category
SELECT C.Name,
COUNT(E.Id) AS Numberemployees
FROM Categories AS C
JOIN Departments AS D ON D.Id = C.Departmentid
JOIN Employees AS E ON E.Departmentid = D.Id
GROUP BY C.Name;
--Task 10 - Birthday Reports
SELECT DISTINCT C.Name
FROM Categories C
JOIN Reports AS R ON R.CategoryId = C.Id
JOIN Users AS U ON U.Id = R.Userid
WHERE DAY(R.Opendate) = DAY(U.Birthdate)
AND MONTH(R.Opendate) = MONTH(U.Birthdate)
ORDER BY C.Name;
-- Task 11 - Users per Employee
SELECT E.Firstname + ' ' + E.Lastname AS FullName,
COUNT(DISTINCT R.Userid) AS UsersCount
FROM Employees AS E
LEFT JOIN Reports AS R ON R.Employeeid = E.Id
GROUP BY E.Firstname + ' ' + E.Lastname
ORDER BY UsersCount DESC,
FullName;
--Task 12 - Emergency Patrol
SELECT OpenDate, Description, u.email [Reporter Emial]
FROM Reports r
JOIN Users AS u on u.id = r.UserId
JOIN Categories c on c.id = r.CategoryId
JOIN Departments d on d.id = c.Departmentid
WHERE Description LIKE '%str%' AND
LEN(Description) > 20 AND
CloseDate IS NULL AND
d.Id IN (1,4,5)
ORDER BY OpenDate,
[Reporter Emial];
--Task 13 - Numbers Coincidence
SELECT DISTINCT Username
FROM Users u
JOIN Reports r on r.UserId = u.id
JOIN Categories c ON c.id = r.CategoryId
WHERE (Username LIKE '[0-9]_%' AND CAST(c.id as varchar) = LEFT(username, 1)) OR
(Username LIKE '%_[0-9]' AND CAST(c.id as varchar) = RIGHT(username, 1))
ORDER BY Username;
--Task 14 - Count open and closed reports per employee in the last month
SELECT E.Firstname+' '+E.Lastname AS FullName,
ISNULL(CONVERT(varchar, CC.ReportSum), '0') + '/' +
ISNULL(CONVERT(varchar, OC.ReportSum), '0') AS [Stats]
FROM Employees AS E
JOIN (SELECT EmployeeId, COUNT(1) AS ReportSum
FROM Reports R
WHERE YEAR(OpenDate) = 2016
GROUP BY EmployeeId) AS OC ON OC.Employeeid = E.Id
LEFT JOIN (SELECT EmployeeId, COUNT(1) AS ReportSum
FROM Reports R
WHERE YEAR(CloseDate) = 2016
GROUP BY EmployeeId) AS CC ON CC.Employeeid = E.Id
ORDER BY FullName
--Task 15 - Average closing time
SELECT D.Name AS Departmentname,
ISNULL(
CONVERT(
VARCHAR, AVG(DATEDIFF(DAY, R.Opendate, R.Closedate))
), 'no info'
) AS AverageTime
FROM Departments AS D
JOIN Categories AS C ON C.DepartmentId = D.Id
LEFT JOIN Reports AS R ON R.CategoryId = C.Id
GROUP BY D.Name
ORDER BY D.Name
--Task 16 - Favourite Categories
SELECT Department,
Category,
Percentage
FROM
(SELECT D.Name AS Department,
C.Name AS Category,
CAST(
ROUND(
COUNT(1) OVER(PARTITION BY C.Id) * 100.00 / COUNT(1) OVER(PARTITION BY D.Id), 0
) as int
) AS Percentage
FROM Categories AS C
JOIN Reports AS R ON R.Categoryid = C.Id
JOIN Departments AS D ON D.Id = C.Departmentid) AS Stats
GROUP BY Deprtment,
Category,
Percentage;
ORDER BY Deprtment,
Category,
Percentage;
--Task 20 - Bonus - Categories Revisiion
SELECT c.Name,
COUNT(r.Id) AS ReportsNumber,
CASE
WHEN InProgressCount > WaitingCount THEN 'in progress'
WHEN InProgressCount < WaitingCount THEN 'waiting'
ELSE 'equal'
END AS MainStatus
FROM Reports AS r
JOIN Categories AS c ON c.Id = r.CategoryId
JOIN Status AS s ON s.Id = r.StatusId
JOIN (SELECT r.CategoryId,
SUM(CASE WHEN s.Label = 'in progress' THEN 1 ELSE 0 END) as InProgressCount,
SUM(CASE WHEN s.Label = 'waiting' THEN 1 ELSE 0 END) as WaitingCount
FROM Reports AS r
JOIN Status AS s on s.Id = r.StatusId
WHERE s.Label IN ('waiting','in progress')
GROUP BY r.CategoryId
) AS sc ON sc.CategoryId = c.Id
WHERE s.Label IN ('waiting','in progress')
GROUP BY C.Name,
CASE
WHEN InProgressCount > WaitingCount THEN 'in progress'
WHEN InProgressCount < WaitingCount THEN 'waiting'
ELSE 'equal'
END
ORDER BY C.Name,
ReportsNumber,
MainStatus;
| 25,681
|
https://github.com/cpoopc/com.cp.monsterMod/blob/master/src/com/cp/monsterMod/views/LyricGesture.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,016
|
com.cp.monsterMod
|
cpoopc
|
Java
|
Code
| 253
| 862
|
package com.cp.monsterMod.views;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.view.GestureDetector.OnGestureListener;
import android.view.View.OnTouchListener;
public class LyricGesture implements OnTouchListener,OnGestureListener{
private BaseLyricFragment context;
private GestureDetector mGestureDetector;
private int way;
private boolean starttoggle;
private boolean updatetoggle;
public LyricGesture(BaseLyricFragment context){
this.context = context;
mGestureDetector = new GestureDetector(context.getActivity(), this);
}
@Override
public boolean onTouch(View arg0, MotionEvent arg1) {
if(arg1.getAction() == MotionEvent.ACTION_UP)
{
way = 0;
if(updatetoggle){
context.updateplayer();
updatetoggle = false;
}
}
return mGestureDetector.onTouchEvent(arg1);
}
@Override
public boolean onDown(MotionEvent arg0) {
// starttoggle = true;
return true;
}
@Override
public boolean onFling(MotionEvent arg0, MotionEvent arg1, float arg2,
float arg3) {
return false;
}
@Override
public void onLongPress(MotionEvent arg0) {
}
/**
* @Parameters:
arg0 The first down motion event that started the scrolling.
arg1 The move motion event that triggered the current onScroll.
arg2 The distance along the X axis that has been scrolled since the last call to onScroll. This is NOT the distance between e1 and e2.
arg3 The distance along the Y axis that has been scrolled since the last call to onScroll. This is NOT the distance between e1 and e2.
@Returns:
true if the event is consumed, else false
*/
@Override
public boolean onScroll(MotionEvent arg0, MotionEvent arg1, float arg2,
float arg3) {
Log.e("onScroll", "arg2:"+arg2+","+"arg3:"+arg3);//"arg0:"+arg0+","+"arg1:"+arg1+","+
//前3次用作starttoggle触发条件
if(way<3){
context.updatelab(-arg2, -arg3, true);
way++;
}else{
//y轴位移大于x轴返回true
starttoggle = context.updatelab(-arg2, -arg3, false);
if(starttoggle){
updatetoggle = true;
context.slidestart();
starttoggle = false;
}
if(updatetoggle){
context.updateprogress(-arg2,-arg3);
}
}
Log.e("way", "way:"+way);
return false;
}
@Override
public void onShowPress(MotionEvent arg0) {
}
@Override
public boolean onSingleTapUp(MotionEvent arg0) {
return false;
}
}
| 21,821
|
https://github.com/Web-Dev-Collaborative/Web-Dev-Hub/blob/master/2-content/live-examples/dom-examples/pointer-lock/app.js
|
Github Open Source
|
Open Source
|
MIT
| null |
Web-Dev-Hub
|
Web-Dev-Collaborative
|
JavaScript
|
Code
| 234
| 624
|
// helper function
const RADIUS = 20;
function degToRad(degrees) {
var result = Math.PI / 180 * degrees;
return result;
}
// setup of the canvas
var canvas = document.querySelector('canvas');
var ctx = canvas.getContext('2d');
var x = 50;
var y = 50;
function canvasDraw() {
ctx.fillStyle = "black";
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = "#f00";
ctx.beginPath();
ctx.arc(x, y, RADIUS, 0, degToRad(360), true);
ctx.fill();
}
canvasDraw();
// pointer lock object forking for cross browser
canvas.requestPointerLock = canvas.requestPointerLock ||
canvas.mozRequestPointerLock;
document.exitPointerLock = document.exitPointerLock ||
document.mozExitPointerLock;
canvas.onclick = function() {
canvas.requestPointerLock();
};
// pointer lock event listeners
// Hook pointer lock state change events for different browsers
document.addEventListener('pointerlockchange', lockChangeAlert, false);
document.addEventListener('mozpointerlockchange', lockChangeAlert, false);
function lockChangeAlert() {
if (document.pointerLockElement === canvas ||
document.mozPointerLockElement === canvas) {
console.log('The pointer lock status is now locked');
document.addEventListener("mousemove", updatePosition, false);
} else {
console.log('The pointer lock status is now unlocked');
document.removeEventListener("mousemove", updatePosition, false);
}
}
var tracker = document.getElementById('tracker');
var animation;
function updatePosition(e) {
x += e.movementX;
y += e.movementY;
if (x > canvas.width + RADIUS) {
x = -RADIUS;
}
if (y > canvas.height + RADIUS) {
y = -RADIUS;
}
if (x < -RADIUS) {
x = canvas.width + RADIUS;
}
if (y < -RADIUS) {
y = canvas.height + RADIUS;
}
tracker.textContent = "X position: " + x + ", Y position: " + y;
if (!animation) {
animation = requestAnimationFrame(function() {
animation = null;
canvasDraw();
});
}
}
| 32,003
|
https://github.com/slachiewicz/maven-doxia/blob/master/doxia-core/src/main/java/org/apache/maven/doxia/macro/SwfMacro.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,019
|
maven-doxia
|
slachiewicz
|
Java
|
Code
| 645
| 1,663
|
package org.apache.maven.doxia.macro;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.doxia.sink.Sink;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.util.StringUtils;
/**
* Macro for embedding Flash (SWF) within Maven documentation.
*
* @author <a href="mailto:steve.motola@gmail.com">Steve Motola</a>
* @author <a href="mailto:vincent.siveton@gmail.com">Vincent Siveton</a>
*/
@Deprecated
@Component( role = Macro.class, hint = "swf" )
public class SwfMacro
extends AbstractMacro
{
/** {@inheritDoc} */
public void execute( Sink sink, MacroRequest request )
throws MacroExecutionException
{
// parameter defaults
String src = "";
String id = "swf";
String width = "400";
String height = "400";
String quality = "high";
String menu = "false";
String loop = "0";
String play = "true";
String version = "9,0,45,0";
String allowScript = "sameDomain";
// assign parameters
for ( String key : request.getParameters().keySet() )
{
Object parameterObject = request.getParameter( key );
if ( !( parameterObject instanceof String ) )
{
continue;
}
String str = (String) parameterObject;
switch ( key )
{
case "src":
if ( StringUtils.isNotEmpty( str ) )
{
src = str;
}
break;
case "id":
if ( StringUtils.isNotEmpty( str ) )
{
id = str;
}
break;
case "width":
if ( StringUtils.isNotEmpty( str ) )
{
width = str;
}
break;
case "height":
if ( StringUtils.isNotEmpty( str ) )
{
height = str;
}
break;
case "quality":
if ( StringUtils.isNotEmpty( str ) )
{
quality = str;
}
break;
case "menu":
if ( StringUtils.isNotEmpty( str ) )
{
menu = str;
}
break;
case "loop":
if ( StringUtils.isNotEmpty( str ) )
{
loop = str;
}
break;
case "play":
if ( StringUtils.isNotEmpty( str ) )
{
play = str;
}
break;
case "version":
// enable version shorthand
// TODO: put in other shorthand versions
if ( str.equals( "6" ) )
{
version = "6,0,29,0";
}
else
{
if ( str.equals( "9" ) )
{
version = "9,0,45,0";
}
else
{
if ( StringUtils.isNotEmpty( str ) )
{
version = str;
}
}
}
break;
case "allowScript":
if ( StringUtils.isNotEmpty( str ) )
{
allowScript = str;
}
break;
default:
// ignore all other
}
}
StringBuilder content = new StringBuilder();
content.append( "<center>" ).append( EOL );
content.append( "<object classid=\"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000\" " )
.append( "codebase=\"http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=" )
.append( version ).append( "\" width=\"" ).append( width ).append( "\" height=\"" ).append( height )
.append( "\" id=\"" ).append( id ).append( "\">" ).append( EOL );
content.append( "<param name=\"movie\" value=\"" ).append( src ).append( "\" />" ).append( EOL );
content.append( "<param name=\"quality\" value=\"" ).append( quality ).append( "\" />" ).append( EOL );
content.append( "<param name=\"menu\" value=\"" ).append( menu ).append( "\" />" ).append( EOL );
content.append( "<param name=\"loop\" value=\"" ).append( loop ).append( "\" />" ).append( EOL );
content.append( "<param name=\"play\" value=\"" ).append( play ).append( "\" />" ).append( EOL );
content.append( "<param name=\"allowScriptAccess\" value=\"" )
.append( allowScript ).append( "\" />" ).append( EOL );
content.append( "<embed src=\"" ).append( src ).append( "\" width=\"" ).append( width ).append( "\" height=\"" )
.append( height ).append( "\" loop=\"" ).append( loop ).append( "\" play=\"" ).append( play )
.append( "\" quality=\"" ).append( quality ).append( "\" allowScriptAccess=\"" ).append( allowScript )
.append( "\" " ).append( "pluginspage=\"http://www.macromedia.com/go/getflashplayer\" " )
.append( "type=\"application/x-shockwave-flash\" menu=\"" ).append( menu ).append( "\">" ).append( EOL );
content.append( "</embed>" ).append( EOL );
content.append( "</object>" ).append( EOL );
content.append( "</center>" ).append( EOL );
sink.rawText( content.toString() );
}
}
| 11,541
|
https://github.com/brownstef/Statiq.Framework/blob/master/src/core/Statiq.Common/Meta/RawMetadataEnumerable.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
Statiq.Framework
|
brownstef
|
C#
|
Code
| 38
| 134
|
using System.Collections;
using System.Collections.Generic;
namespace Statiq.Common
{
internal class RawMetadataEnumerable : IEnumerable<KeyValuePair<string, object>>
{
private readonly IMetadata _metadata;
public RawMetadataEnumerable(IMetadata metadata)
{
_metadata = metadata;
}
public IEnumerator<KeyValuePair<string, object>> GetEnumerator() => _metadata.GetRawEnumerator();
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
}
}
| 5,436
|
https://github.com/mianowski/licytacje_api/blob/master/licytacje_api/rest.py
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
licytacje_api
|
mianowski
|
Python
|
Code
| 186
| 877
|
import os
import requests
import unicodedata
import urllib.parse
from pathlib import Path
from datetime import date
from datetime import datetime
from babel.numbers import parse_decimal
from bs4 import BeautifulSoup
from .bs_helpers import make_soup
from .notice import Notice
NODE_URL = "https://licytacje.komornik.pl"
def get_search_soup(data: dict):
initial_resp = requests.get(NODE_URL+"/Notice/Search")
initial_soup = make_soup(initial_resp)
data["__RequestVerificationToken"] = get_verification_token(initial_soup)
resp = requests.post(NODE_URL+"/Notice/Search",
data=data, cookies=initial_resp.cookies)
return make_soup(resp)
def get_notice_soup(id):
return make_soup(requests.get(NODE_URL+"/Notice/Details/"+str(id)))
def get_soup(url):
return make_soup(requests.get(url))
def get_rows(html_soup):
return html_soup.find_all("tr")
def get_cols(row):
return row.find_all("td")
def get_verification_token(html_soup: BeautifulSoup) -> str:
ver_token = html_soup.find(
"input", attrs={"name": "__RequestVerificationToken"})
return ver_token.attrs["value"]
def get_notices(html_soup):
notices = html_soup.find_all(
"a", href=lambda href: href and "/Notice/Details" in href)
return [NODE_URL+str(notice.get("href")) for notice in notices]
def parse_price(text: str):
return parse_decimal(text.replace('\xa0', '').replace(',', '.').replace(' zł', ''), locale='pl_PL')
def get_current_date():
return date.today().strftime("%d.%m.%Y")
def get_report(data: dict) -> dict:
soup = get_search_soup(data)
rows = get_rows(soup)
auctions = []
for row in rows[1:]:
cols = get_cols(row)
if (len(cols) > 8):
url = NODE_URL + cols[8].contents[1].get('href')
cur_notice = Notice(url)
auction = {
'date': datetime.strptime(cols[2].get_text(strip=True), "%d.%m.%Y"),
'url': url,
'price': parse_price(cols[6].get_text(strip=True)),
'kw': cur_notice.get_kw_number(),
'address': cur_notice.get_address()
}
auction['id'] = auction['date'].strftime(
"%Y%m%d")+"-"+str(auction["price"].to_integral_exact())
auctions.append(auction)
files_dir = os.path.join("..", "notices")
Path(files_dir).mkdir(parents=True, exist_ok=True)
file_path = os.path.join(files_dir, auction['id'])
cur_notice.save_to_file(file_path)
return auctions
| 12,696
|
https://github.com/Eurus-Holmes/nni/blob/master/nni/nas/nn/pytorch/.gitignore
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
nni
|
Eurus-Holmes
|
Ignore List
|
Code
| 14
| 23
|
# Ignored in git, but need to be included in package source files.
_layers.py
| 34,056
|
https://github.com/veneres/EyeCloud/blob/master/client/src/app/attention-data/attention-data.component.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
EyeCloud
|
veneres
|
TypeScript
|
Code
| 182
| 628
|
import { Component, OnInit } from '@angular/core';
import { AttentionCloudService } from '../attention-cloud.service';
import { Point, Utilities } from '../classes/Utilities';
import { Thumbnail } from '../classes/Thumbnail';
import { AggregatedFixationPoint } from '../classes/AggregatedFixationPoints';
@Component({
selector: 'app-attention-data',
templateUrl: './attention-data.component.html',
styleUrls: ['./attention-data.component.css']
})
export class AttentionDataComponent implements OnInit {
displayComponent: boolean;
displayStatistics: boolean;
clouds: Thumbnail[];
selectedPoint: AggregatedFixationPoint;
displayedData;
constructor(private attentionCloudService: AttentionCloudService) {
this.displayComponent = false;
this.displayStatistics = false;
this.clouds = [];
this.displayedData = {};
}
ngOnInit() {
this.attentionCloudService.cloudsVisible.subscribe((clouds: Thumbnail[]) => {
if (clouds) {
this.clouds = clouds;
this.displayComponent = true;
} else {
this.displayComponent = false;
}
});
this.attentionCloudService.currentSelectedPoint.subscribe((point: Point) => {
if (this.clouds && this.clouds.length > 0) {
for (let i = 0; i < this.clouds.length; i++) {
let cloud = this.clouds[i];
if (cloud.styleX == point.x && cloud.styleY == point.y) {
this.selectedPoint = cloud.getAggregatedFixationPoint();
this.updateDisplayedData();
break;
}
}
this.displayStatistics = !!this.selectedPoint;
} else {
this.displayStatistics = false;
}
});
}
private updateDisplayedData() {
const d = this.displayedData;
const p = this.selectedPoint;
d.X = Math.round(p.getX());
d.Y = Math.round(p.getY());
d.Duration = Math.round(p.getDuration());
d.ModeTimestamp = p.getModeTimestamp(1000);
d.Timestamps = p.getBinsOfTimestamps(1000);
d.TimestampBins = Object.keys(d.Timestamps);
d.NumPoints = p.getNumPoints();
}
}
| 992
|
https://github.com/cassio645/Aprendendo-python/blob/master/CursoEmVideo/pythonProject/venv/Lib/site-packages/Interface/tests/test_bridge.py
|
Github Open Source
|
Open Source
|
MIT
| null |
Aprendendo-python
|
cassio645
|
Python
|
Code
| 116
| 256
|
##############################################################################
#
# Copyright (c) 2005 Zope Corporation and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" Unit tests for Z3 -> Z2 bridge utilities.
$Id: test_bridge.py 76337 2007-06-04 21:45:18Z philikon $
"""
import unittest
from zope.testing.doctest import DocFileSuite
def test_suite():
return unittest.TestSuite([
DocFileSuite('bridge.txt', package='Interface.tests'),
])
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| 29,939
|
https://github.com/Team-5-Jellyfish-Store/Jellyfish-Store/blob/master/OnlineStore/OnlineStore.Tests/Services/CourierServiceTests/Constructor_Should.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
Jellyfish-Store
|
Team-5-Jellyfish-Store
|
C#
|
Code
| 150
| 603
|
using AutoMapper;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
using OnlineStore.Data.Contracts;
using OnlineStore.Logic.Contracts;
using OnlineStore.Logic.Services;
using System;
namespace OnlineStore.Tests.Services.CourierServiceTests
{
[TestClass]
public class Constructor_Should
{
private Mock<IOnlineStoreContext> ctxStub;
private Mock<ITownService> townServiceStub;
private Mock<IAddressService> addressServiceStub;
private Mock<IMapper> mapperStub;
[TestInitialize]
public void Initialize()
{
ctxStub = new Mock<IOnlineStoreContext>();
townServiceStub = new Mock<ITownService>();
addressServiceStub = new Mock<IAddressService>();
mapperStub = new Mock<IMapper>();
}
[TestMethod]
public void Throw_ArgumentNullException_When_OnlineStoreContext_InNull()
{
// Arrange
Action creatingCourierService = () => new CourierService(null, townServiceStub.Object, addressServiceStub.Object, mapperStub.Object);
// Act & Assert
Assert.ThrowsException<ArgumentNullException>(creatingCourierService);
}
[TestMethod]
public void Throw_ArgumentNullException_When_TownService_InNull()
{
// Arrange
Action creatingCourierService = () => new CourierService(ctxStub.Object, null, addressServiceStub.Object, mapperStub.Object);
// Act & Assert
Assert.ThrowsException<ArgumentNullException>(creatingCourierService);
}
[TestMethod]
public void Throw_ArgumentNullException_When_AddressService_InNull()
{
// Arrange
Action creatingCourierService = () => new CourierService(ctxStub.Object, townServiceStub.Object, null, mapperStub.Object);
// Act & Assert
Assert.ThrowsException<ArgumentNullException>(creatingCourierService);
}
[TestMethod]
public void Throw_ArgumentNullException_When_Mapper_InNull()
{
// Arrange
Action creatingCourierService = () => new CourierService(ctxStub.Object, townServiceStub.Object, addressServiceStub.Object, null);
// Act & Assert
Assert.ThrowsException<ArgumentNullException>(creatingCourierService);
}
}
}
| 10,309
|
https://github.com/ddolheguy/coding-test-1/blob/master/app/services/tickerService.js
|
Github Open Source
|
Open Source
|
MIT
| null |
coding-test-1
|
ddolheguy
|
JavaScript
|
Code
| 185
| 572
|
import Rx from 'rxjs/Rx';
import io from 'socket.io-client';
let socket = null;
let tickerProvider$ = null;
let connectionStatusProvider$ = null;
const ticker$ = new Rx.Subject();
const connectionStatus$ = new Rx.ReplaySubject(1);
const CONNECTION_STATUS = {
CONNECTED: 'Connected',
DISCONNECTED: 'Disconnected'
};
const connect = () => {
socket = io('http://localhost:4000');
socket.on('connect', () => {
connectionStatus$.next(CONNECTION_STATUS.CONNECTED);
});
socket.on('disconnect', () => {
connectionStatus$.next(CONNECTION_STATUS.DISCONNECTED);
});
};
const disconnect = () => {
if (socket) socket.disconnect();
};
const connectionStatus = () => {
return (socket && socket.connected) ? CONNECTION_STATUS.CONNECTED : CONNECTION_STATUS.DISCONNECTED;
};
const connectionStatusStream = () => {
if (!connectionStatusProvider$) connectionStatusProvider$ = connectionStatus$.publish().refCount();
return connectionStatusProvider$;
};
const tickerStream = () => {
if (!tickerProvider$) tickerProvider$ = ticker$.publish().refCount();
return tickerProvider$;
};
const subscribeToTicker = (ticker) => {
return Rx.Observable.create(obs => {
if (!socket) throw new Error('socket has not been initialised');
const dataReceived = (data) => {
try {
if (typeof data === 'string') {
ticker$.next(JSON.parse(data));
} else {
ticker$.next(data);
}
} catch (err) {
ticker$.error(err);
}
};
socket.on(ticker, dataReceived);
socket.emit('ticker', ticker);
obs.next();
return () => {
socket.off(ticker, dataReceived);
};
});
};
export default {
connect,
disconnect,
subscribeToTicker,
connectionStatus,
connectionStatusStream,
tickerStream,
CONNECTION_STATUS
};
| 6,866
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.