hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
40181ae027992843d3d06d1df1c85377d6181240
| 180
|
swift
|
Swift
|
Sources/ParserCombinators/SimpleList.swift
|
fwcd/swift-prolog
|
92f3f37b748f5de75f97636c74c4f48af44674a2
|
[
"MIT"
] | 3
|
2020-03-19T20:53:26.000Z
|
2022-03-17T22:49:57.000Z
|
Sources/ParserCombinators/SimpleList.swift
|
fwcd/swift-prolog
|
92f3f37b748f5de75f97636c74c4f48af44674a2
|
[
"MIT"
] | null | null | null |
Sources/ParserCombinators/SimpleList.swift
|
fwcd/swift-prolog
|
92f3f37b748f5de75f97636c74c4f48af44674a2
|
[
"MIT"
] | null | null | null |
public struct SimpleList<T>: List {
public let values: [T]
public static func from(_ values: [T]) -> SimpleList<T> {
return SimpleList(values: values)
}
}
| 22.5
| 61
| 0.611111
|
fd42c47d19363212fb89c2968f8bd6e826f1d936
| 2,581
|
swift
|
Swift
|
MentionmeSwift/Classes/Requests/MentionmeRefereeRegisterRequest.swift
|
JonathanDowning/mention-me-ios-sdk
|
eb6412ca16f07ddbe2121daa66c2788595411525
|
[
"MIT"
] | 1
|
2019-09-07T23:10:26.000Z
|
2019-09-07T23:10:26.000Z
|
MentionmeSwift/Classes/Requests/MentionmeRefereeRegisterRequest.swift
|
JonathanDowning/mention-me-ios-sdk
|
eb6412ca16f07ddbe2121daa66c2788595411525
|
[
"MIT"
] | 1
|
2019-10-17T08:33:29.000Z
|
2019-10-17T09:13:15.000Z
|
MentionmeSwift/Classes/Requests/MentionmeRefereeRegisterRequest.swift
|
JonathanDowning/mention-me-ios-sdk
|
eb6412ca16f07ddbe2121daa66c2788595411525
|
[
"MIT"
] | 2
|
2019-10-03T10:30:28.000Z
|
2020-07-20T17:37:10.000Z
|
//
// MentionmeRefereeRegisterRequest.swift
// TestMentionme
//
// Created by Andreas Bagias on 05/03/2019.
// Copyright © 2019 Mention-me. All rights reserved.
//
import Foundation
public class MentionmeRefereeRegisterRequest: MentionmeRequest{
public var mentionmeReferrerParameters: MentionmeReferrerParameters?
public var mentionmeCustomerParameters: MentionmeCustomerParameters?
override init() {
super.init()
}
public convenience init(mentionmeReferrerParameters: MentionmeReferrerParameters,
mentionmeCustomerParameters: MentionmeCustomerParameters){
self.init()
super.method = MethodType.post
super.urlSuffix = "referee/register"
super.urlEndpoint = "consumer"
self.mentionmeReferrerParameters = mentionmeReferrerParameters
self.mentionmeCustomerParameters = mentionmeCustomerParameters
}
func createBodyParameters(){
var params: [String: Any] = [String: Any]()
if let mentionmeReferrerParameters = mentionmeReferrerParameters,
let mentionmeCustomerParameters = mentionmeCustomerParameters{
params["referrerMentionMeIdentifier"] = mentionmeReferrerParameters.referrerMentionMeIdentifier
params["referrerToken"] = mentionmeReferrerParameters.referrerToken
var customerParams: [String: Any] = [String: Any]()
customerParams["emailAddress"] = mentionmeCustomerParameters.emailAddress
customerParams["firstname"] = mentionmeCustomerParameters.firstname
customerParams["surname"] = mentionmeCustomerParameters.surname
if let title = mentionmeCustomerParameters.title{
customerParams["title"] = title
}
if let uniqueIdentifier = mentionmeCustomerParameters.uniqueIdentifier{
customerParams["uniqueIdentifier"] = uniqueIdentifier
}
if let segment = mentionmeCustomerParameters.segment{
customerParams["segment"] = segment
}
params["customer"] = customerParams
}
bodyParameters = params
}
override func createRequest(requestParameters: MentionmeRequestParameters) -> NSMutableURLRequest {
createBodyParameters()
return super.createRequest(requestParameters: requestParameters)
}
}
| 33.960526
| 107
| 0.643936
|
93c719815157c295d7387113d2d80dbce22c50d8
| 4,548
|
cs
|
C#
|
src/Detached.Mappers/MapperFactories/Entity/EntityMapperFactory.cs
|
leonardoporro/EntityFrameworkCore.Detached
|
22ea222c05e4149014a1cc0effe108311a518c78
|
[
"MIT"
] | 7
|
2016-09-13T17:10:38.000Z
|
2016-12-22T13:48:54.000Z
|
src/Detached.Mappers/MapperFactories/Entity/EntityMapperFactory.cs
|
leonardoporro/EntityFrameworkCore.Detached
|
22ea222c05e4149014a1cc0effe108311a518c78
|
[
"MIT"
] | 3
|
2016-10-12T15:37:35.000Z
|
2016-12-22T12:29:26.000Z
|
src/Detached.Mappers/MapperFactories/Entity/EntityMapperFactory.cs
|
leonardoporro/EntityFrameworkCore.Detached
|
22ea222c05e4149014a1cc0effe108311a518c78
|
[
"MIT"
] | null | null | null |
using Detached.Mappers.Exceptions;
using Detached.Mappers.TypeMaps;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using static Detached.RuntimeTypes.Expressions.ExtendedExpression;
using static System.Linq.Expressions.Expression;
namespace Detached.Mappers.MapperFactories.Entity
{
public abstract class EntityMapperFactory : ComplexTypeMapperFactory
{
public Expression CreateBackReference(TypeMap typeMap)
{
if (typeMap.BackReferenceMap != null)
{
BackReferenceMap backRef = typeMap.BackReferenceMap;
if (backRef.MemberTypeOptions.IsCollection)
{
Expression list = backRef.MemberOptions.GetValue(typeMap.TargetExpression, typeMap.BuildContextExpression);
Expression item = backRef.Parent.TargetExpression;
Expression newList = backRef.MemberTypeOptions.Construct(typeMap.BuildContextExpression, null);
return Block(
If(IsNull(list), Assign(list, newList)),
IfThen(Not(Call("Contains", list, item)),
Call("Add", list, item)
)
);
}
else
{
return typeMap.BackReferenceMap.MemberOptions.SetValue(
typeMap.TargetExpression,
typeMap.BackReferenceMap.Parent.TargetExpression,
typeMap.BuildContextExpression);
}
}
else
{
return Empty();
}
}
protected virtual bool CanMapKey(TypeMap typeMap)
{
return typeMap.Members.Where(m => m.IsKey).Any();
}
protected virtual Expression CreateKey(TypeMap typeMap)
{
if (typeMap.TargetKeyExpression == null || typeMap.SourceKeyExpression == null)
{
List<MemberMap> keyMembers = new List<MemberMap>();
foreach (MemberMap memberMap in typeMap.Members)
{
if (memberMap.IsKey)
keyMembers.Add(memberMap);
}
if (keyMembers.Count == 0)
{
typeMap.SourceKeyExpression = Constant(NoKey.Instance);
typeMap.TargetKeyExpression = Constant(NoKey.Instance);
}
else
{
Expression[] sourceKeyMembers = new Expression[keyMembers.Count];
Expression[] targetKeyMembers = new Expression[keyMembers.Count];
Type[] keyMemberTypes = new Type[keyMembers.Count];
for (int i = 0; i < keyMembers.Count; i++)
{
MemberMap keyMember = keyMembers[i];
sourceKeyMembers[i] = keyMember.SourceOptions.GetValue(typeMap.SourceExpression, typeMap.BuildContextExpression);
targetKeyMembers[i] = keyMember.TargetOptions.GetValue(typeMap.TargetExpression, typeMap.BuildContextExpression);
sourceKeyMembers[i] = CallMapper(keyMember.TypeMap, sourceKeyMembers[i], Default(targetKeyMembers[i].Type));
keyMemberTypes[i] = keyMembers[i].TargetOptions.Type;
}
Type keyType = GetKeyType(keyMemberTypes);
typeMap.SourceKeyExpression = New(keyType, sourceKeyMembers);
typeMap.TargetKeyExpression = New(keyType, targetKeyMembers);
}
}
return Empty();
}
protected virtual Type GetKeyType(Type[] types)
{
switch (types.Length)
{
case 1:
return typeof(EntityKey<>).MakeGenericType(types);
case 2:
return typeof(EntityKey<,>).MakeGenericType(types);
case 3:
return typeof(EntityKey<,,>).MakeGenericType(types);
case 4:
return typeof(EntityKey<,,,>).MakeGenericType(types);
case 5:
return typeof(EntityKey<,,,,>).MakeGenericType(types);
default:
throw new InvalidOperationException("Maximum of 5 key members allowed.");
}
}
}
}
| 40.972973
| 137
| 0.53584
|
ae300e2a56011ef5dbcd9834c16082e32f824c2b
| 5,443
|
cs
|
C#
|
src/FuncSharp.Tests/Try/TryTests.cs
|
siroky/FuncSharp
|
996c7669fd17de4f17dafbc43c047f62377eb762
|
[
"MIT"
] | 60
|
2015-01-25T02:13:13.000Z
|
2022-02-15T16:58:26.000Z
|
src/FuncSharp.Tests/Try/TryTests.cs
|
siroky/FuncSharp
|
996c7669fd17de4f17dafbc43c047f62377eb762
|
[
"MIT"
] | 62
|
2015-01-06T01:15:17.000Z
|
2021-11-23T13:55:53.000Z
|
src/FuncSharp.Tests/Try/TryTests.cs
|
siroky/FuncSharp
|
996c7669fd17de4f17dafbc43c047f62377eb762
|
[
"MIT"
] | 24
|
2016-01-17T19:39:35.000Z
|
2022-02-03T13:20:03.000Z
|
using System;
using System.Collections.Generic;
using System.Linq;
using Xunit;
namespace FuncSharp.Tests
{
public class TryTests
{
private static readonly Exception Exception = new NotImplementedException();
private static readonly ITry<int, NotImplementedException> S1 = Try.Catch<int, NotImplementedException>(_ => 42);
private static readonly ITry<int, NotImplementedException> E1 = Try.Catch<int, NotImplementedException>(_ => throw Exception);
private static readonly ITry<int> S2 = Try.Create<int, Exception>(_ => 42);
private static readonly ITry<int> E2 = Try.Create<int, Exception>(_ => throw Exception);
[Fact]
public void Catch()
{
Assert.True(S1.IsSuccess);
Assert.Equal(42, S1.Get());
Assert.True(E1.IsError);
Assert.Throws<NotImplementedException>(() => E1.Get());
}
[Fact]
public void Create()
{
Assert.True(S2.IsSuccess);
Assert.Equal(42, S2.Get());
Assert.True(E2.IsError);
Assert.Throws<NotImplementedException>(() => E2.Get());
}
[Fact]
public void Get()
{
Assert.Equal(42, S1.Get());
Assert.Equal(42, S1.Get(e => new InvalidOperationException("test")));
Assert.Equal(42, S2.Get());
Assert.Equal(42, S2.Get(e => new InvalidOperationException("test")));
Assert.Throws<NotImplementedException>(() => E1.Get());
Assert.Throws<InvalidOperationException>(() => E1.Get(e => new InvalidOperationException("foo", e)));
Assert.Throws<NotImplementedException>(() => E2.Get());
Assert.Throws<InvalidOperationException>(() => E2.Get(e => new InvalidOperationException("foo", e.First())));
}
[Fact]
public void Map()
{
Assert.Equal(45, S1.Map(i => i + 3).Get());
Assert.Equal(45, S2.Map(i => i + 3).Get());
Assert.True(E1.Map(i => i + 3).IsError);
Assert.True(E2.Map(i => i + 3).IsError);
}
[Fact]
public void MapError()
{
Assert.Equal(42, S1.MapError(e => new InvalidOperationException("foo", e)).Get());
Assert.Equal(42, S2.MapError(e => new InvalidOperationException("foo", e.First())).Get());
Assert.Throws<InvalidOperationException>(() => E1.MapError(e => new InvalidOperationException("foo", e)).Get());
Assert.Throws<InvalidOperationException>(() => E2.MapError(e => new InvalidOperationException("foo", e.First())).Get());
}
[Fact]
public void Where()
{
Assert.Equal(42, S1.Where(i => i > 40, _ => Exception).Get());
Assert.Throws<NotImplementedException>(() => S1.Where(i => i > 50, _ => Exception).Get());
Assert.Throws<NotImplementedException>(() => E1.Where(i => i > 40, _ => Exception).Get());
Assert.Throws<NotImplementedException>(() => E2.Where(i => i > 50, _ => Exception).Get());
}
[Fact]
public void Aggregate()
{
var as1 = Try.Aggregate<int, int, IProduct2<int, int>>(S1, S1, success: Product2.Create);
Assert.Equal(42, as1.Get().ProductValue1);
Assert.Equal(42, as1.Get().ProductValue2);
var as2 = Try.Aggregate(S2, S2, Product2.Create);
Assert.Equal(42, as2.Get().ProductValue1);
Assert.Equal(42, as2.Get().ProductValue2);
var am1 = Try.Aggregate<int, int, IProduct2<int, int>>(S1, E1, Product2.Create);
Assert.Equal(Exception, am1.Error.Get());
Assert.Throws<NotImplementedException>(() => am1.Get());
var am2 = Try.Aggregate(S2, E2, Product2.Create);
Assert.Equal(Exception, am2.Error.FlatMap(e => e.SingleOption()).Get());
Assert.Throws<NotImplementedException>(() => am2.Get());
var ae1 = Try.Aggregate<int, int, IProduct2<int, int>>(E1, E1, Product2.Create);
Assert.True(ae1.Error.Get() is AggregateException a && a.InnerExceptions.SequenceEqual(new[] { Exception, Exception }));
Assert.Throws<AggregateException>(() => ae1.Get());
var ae2 = Try.Aggregate(E2, E2, Product2.Create);
Assert.True(ae2.Error.Get().SequenceEqual(new[] { Exception, Exception }));
Assert.Throws<AggregateException>(() => ae2.Get());
var asc1 = Try.Aggregate(new List<ITry<int, Exception>> { S1, S1, S1 });
Assert.True(asc1.Get().SequenceEqual(new[] { 42, 42, 42 }));
var asc2 = Try.Aggregate(new[] { S2, S2, S2 });
Assert.True(asc2.Get().SequenceEqual(new[] { 42, 42, 42 }));
var ast1 = Try.Aggregate(new List<ITry<int, Exception>> { S1, S1, S1 }, i => i.Sum());
Assert.Equal(126, ast1.Get());
var amc1 = Try.Aggregate(new List<ITry<int, Exception>> { S1, E1, S1, E1 });
Assert.True(amc1.Error.Get() is AggregateException ag && ag.InnerExceptions.SequenceEqual(new[] { Exception, Exception }));
Assert.Throws<AggregateException>(() => amc1.Get());
var amc2 = Try.Aggregate(new[] { S2, E2, S2, E2 });
Assert.True(amc2.Error.Get().SequenceEqual(new[] { Exception, Exception }));
Assert.Throws<AggregateException>(() => amc2.Get());
}
}
}
| 43.544
| 135
| 0.574499
|
a001023f132ec1caa3875968bc702f0724374209
| 1,605
|
tsx
|
TypeScript
|
src/components/Bio.tsx
|
master-atul/blog
|
bff2053a2b3ff3fd3a0569ce0d402bbf4f477228
|
[
"MIT"
] | null | null | null |
src/components/Bio.tsx
|
master-atul/blog
|
bff2053a2b3ff3fd3a0569ce0d402bbf4f477228
|
[
"MIT"
] | 28
|
2021-03-01T21:16:44.000Z
|
2022-02-27T06:08:30.000Z
|
src/components/Bio.tsx
|
master-atul/blog
|
bff2053a2b3ff3fd3a0569ce0d402bbf4f477228
|
[
"MIT"
] | null | null | null |
import React from 'react';
import config from '../config';
// Import typefaces
import 'typeface-montserrat';
import 'typeface-merriweather';
import { rhythm } from '../utils/typography';
const styles = {
container: {
display: 'flex',
flexDirection: 'row',
paddingTop: rhythm(1 / 3),
},
avatar: {
width: rhythm(2),
height: rhythm(2),
marginRight: rhythm(1),
},
intro: {
textAlign: 'justify',
},
};
const Bio = () => (
<section style={styles.container}>
<img src={config.image} alt="Atul R" style={styles.avatar} />
<p style={styles.intro}>
Written by
<strong>
{' '}
<a
href="https://www.atulr.com"
target="_blank"
rel="noopener noreferrer"
>
Atul R
</a>
</strong>{' '}
a developer 🖥,{' '}
<a
href="https://www.reactnative.guide"
target="_blank"
rel="noopener noreferrer"
>
author
</a>{' '}
📖 and trainer 👨🏽🎓. He primarily works on Javascript ecosystem and
occasionally hacks around in C++, Rust and Python. He is an{' '}
<a
href="https://github.com/a7ul"
target="_blank"
rel="noopener noreferrer"
>
open source
</a>{' '}
enthusiast and <span style={{ color: 'red' }}>❤</span> making useful tools
for humans.{' '}
<a
href="https://twitter.com/a7ulr"
target="_blank"
rel="noopener noreferrer"
>
You should follow him on Twitter{' '}
</a>
</p>
</section>
);
export default Bio;
| 22.605634
| 80
| 0.535202
|
20a56d851131a814fcba1ea9e6a37635d236dbd6
| 2,232
|
cs
|
C#
|
Test.IdentityServer.Utility/Attribute/IdentityAuthorizationAttribute.cs
|
ArjunBhalodiya/IdentityServer4Startup
|
a5dfa321902fbb1501f648c9a1a7107f3a87a852
|
[
"Apache-2.0"
] | 1
|
2020-03-30T14:32:04.000Z
|
2020-03-30T14:32:04.000Z
|
Test.IdentityServer.Utility/Attribute/IdentityAuthorizationAttribute.cs
|
ArjunBhalodiya/IdentityServer4Startup
|
a5dfa321902fbb1501f648c9a1a7107f3a87a852
|
[
"Apache-2.0"
] | null | null | null |
Test.IdentityServer.Utility/Attribute/IdentityAuthorizationAttribute.cs
|
ArjunBhalodiya/IdentityServer4Startup
|
a5dfa321902fbb1501f648c9a1a7107f3a87a852
|
[
"Apache-2.0"
] | 1
|
2019-06-30T18:15:34.000Z
|
2019-06-30T18:15:34.000Z
|
using System;
using System.Linq;
using System.Security.Claims;
using Microsoft.AspNetCore.Mvc.Filters;
namespace Test.IdentityServer.Utility.Attribute
{
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Method, AllowMultiple = true, Inherited = true)]
public class IdentityAuthorizationAttribute : ActionFilterAttribute
{
private const string scopeClaimType = "scope";
public string[] Scopes { get; set; } = new string[] { };
public IdentityAuthorizationAttribute()
{
Scopes = null;
}
public IdentityAuthorizationAttribute(string scope)
{
if (string.IsNullOrEmpty(scope))
{
throw new ArgumentNullException("scope");
}
Scopes = new[] { scope };
}
public IdentityAuthorizationAttribute(string[] scopes)
{
Scopes = scopes ?? throw new ArgumentNullException("scopes");
}
public override void OnActionExecuting(ActionExecutingContext context)
{
if (!ValidateScope(context))
{
throw new UnauthorizedAccessException("Invalid Scope!");
}
base.OnActionExecuting(context);
}
private bool ValidateScope(ActionExecutingContext context)
{
HandleUnauthorizedRequest(context);
if (Scopes == null)
{
return true;
}
var principal = context.HttpContext.User as ClaimsPrincipal;
var grantedScopes = principal.FindAll(scopeClaimType).Select(c => c.Value).ToList();
foreach (var scope in Scopes)
{
if (grantedScopes.Contains(scope, StringComparer.OrdinalIgnoreCase))
{
return true;
}
}
return false;
}
private static void HandleUnauthorizedRequest(ActionExecutingContext context)
{
if (context.HttpContext.User.Claims == null || !context.HttpContext.User.Identity.IsAuthenticated)
{
throw new UnauthorizedAccessException("Unauthorized!");
}
}
}
}
| 29.368421
| 110
| 0.579301
|
0052d0af018cb530854e57635de1a9f19037fcec
| 567
|
sql
|
SQL
|
db/seeds.sql
|
nivaniuc/glowing-broccoli
|
a2ed14be9dea8a763d1004cf59a1b7e88c5f828f
|
[
"MIT"
] | null | null | null |
db/seeds.sql
|
nivaniuc/glowing-broccoli
|
a2ed14be9dea8a763d1004cf59a1b7e88c5f828f
|
[
"MIT"
] | null | null | null |
db/seeds.sql
|
nivaniuc/glowing-broccoli
|
a2ed14be9dea8a763d1004cf59a1b7e88c5f828f
|
[
"MIT"
] | null | null | null |
USE employeesdb;
INSERT INTO department (department_name)
VALUES
('Engineering'),
('Admin'),
('Design'),
('Labor');
INSERT INTO role (title, salary, department_id)
VALUES
('Manager', 55000, 1 ),
('CSS', 38000, 2),
('JS', 44000, 3),
('HTML', 48000, 4),
('Entry Level', 32000, 4);
INSERT INTO employee (first_name, last_name, role_id, manager_id)
VALUES
('Mike', 'Oxlong', 1, null),
('Drew', 'Ligma', 2, 1),
('Joe', 'Rogan', 2, 1),
('Steve', 'Buschemi', 3, 1),
('Danny', 'Devito', 4, 1),
('Becky', 'Lynch', 4, 1),
('Adam', 'Sandler', 4, 1),
('Pete', 'Bulger', 4, 1);
| 21
| 65
| 0.604938
|
74958187077452f1be347134411ce96371571138
| 3,430
|
sql
|
SQL
|
sql/class_sub.sql
|
chichi711/BMO
|
0f5287038515c0e995aa150bf5f219097716b2cb
|
[
"MIT"
] | null | null | null |
sql/class_sub.sql
|
chichi711/BMO
|
0f5287038515c0e995aa150bf5f219097716b2cb
|
[
"MIT"
] | null | null | null |
sql/class_sub.sql
|
chichi711/BMO
|
0f5287038515c0e995aa150bf5f219097716b2cb
|
[
"MIT"
] | null | null | null |
-- phpMyAdmin SQL Dump
-- version 5.0.4
-- https://www.phpmyadmin.net/
--
-- 主機: localhost
-- 產生時間: 2021 年 02 月 07 日 20:13
-- 伺服器版本: 10.4.17-MariaDB
-- PHP 版本: 7.4.12
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- 資料庫: `bmo`
--
-- --------------------------------------------------------
--
-- 資料表結構 `class_sub`
--
CREATE TABLE `class_sub` (
`sub_id` int(3) NOT NULL,
`sub_sort` int(2) NOT NULL,
`main_id` int(2) NOT NULL,
`sub_name` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- 傾印資料表的資料 `class_sub`
--
INSERT INTO `class_sub` (`sub_id`, `sub_sort`, `main_id`, `sub_name`) VALUES
(1, 0, 1, '現代華文創作'),
(2, 1, 1, '現代詩'),
(3, 2, 1, '旅行飲食/自然文學'),
(4, 3, 1, '現代翻譯文學'),
(5, 4, 1, '推理/犯罪小說'),
(6, 5, 1, '恐怖/驚悚小說'),
(7, 6, 1, '奇幻/科幻小說'),
(8, 7, 1, '歷史/武俠小說'),
(9, 8, 1, '愛情小說'),
(10, 0, 2, '領導/管理'),
(11, 0, 2, '傳記/實例'),
(12, 0, 2, '管理實務'),
(13, 0, 2, '投資理財'),
(14, 0, 2, '職場工作術'),
(15, 0, 2, '創業/電商'),
(16, 0, 2, '行銷/業務'),
(17, 0, 3, '手作設計'),
(18, 0, 3, '影視娛樂'),
(19, 0, 3, '戶外活動/運動'),
(20, 0, 3, '美妝/時尚'),
(21, 0, 3, '品味/指南'),
(22, 0, 4, '烘焙/點心'),
(23, 2, 4, '主題料理'),
(24, 1, 4, '飲品/冰品'),
(25, 3, 4, '飲食生活'),
(26, 4, 4, '各國料理'),
(27, 0, 5, '自我成長'),
(28, 0, 5, '勵志小品'),
(29, 0, 5, '人性/人際'),
(30, 0, 5, '家庭/兩性'),
(31, 0, 5, '心理諮商/治療'),
(32, 0, 5, '人格天賦/潛能開發'),
(33, 0, 6, '健康飲食'),
(34, 0, 6, '懷孕/育兒'),
(35, 0, 6, '減肥/塑身'),
(36, 0, 6, '健康觀念'),
(37, 0, 6, '疾病百科'),
(38, 0, 6, '舒壓/養生'),
(39, 0, 7, '日本'),
(40, 0, 7, '亞洲'),
(41, 0, 7, '台灣旅遊'),
(42, 0, 7, '世界旅遊'),
(43, 0, 7, '中國'),
(44, 0, 7, '歐洲'),
(45, 0, 7, '美洲'),
(46, 0, 7, '大洋洲'),
(47, 0, 7, '非洲'),
(48, 0, 8, '佛教'),
(49, 0, 8, '基督教'),
(50, 0, 8, '占星/血型'),
(51, 0, 8, '占卜'),
(52, 0, 8, '命理'),
(53, 0, 8, '曆書'),
(54, 0, 8, '其他宗教'),
(55, 6, 9, 'APP開發/程式設計'),
(56, 7, 9, '辦公軟體/應用軟體'),
(57, 3, 9, '資料庫/大數據'),
(58, 1, 9, '網頁開發'),
(59, 2, 9, '網路/架站'),
(60, 4, 9, '工程/3D繪圖'),
(61, 5, 9, '遊戲/數位生活'),
(62, 0, 9, '認證考試'),
(63, 0, 10, '女性時尚'),
(64, 1, 10, '男性時尚'),
(65, 0, 11, '偶像藝人'),
(66, 0, 11, '次文化'),
(67, 0, 12, '0-3歲嬰幼兒'),
(68, 0, 12, '親子教養'),
(69, 0, 12, '親子生活'),
(70, 0, 13, '寶寶書(0-3)'),
(71, 0, 13, '遊戲本'),
(72, 0, 13, '塗鴉/貼紙/黏土'),
(73, 0, 14, '古代羅曼史'),
(74, 0, 14, ' 現代羅曼史'),
(75, 0, 15, '奇幻/科幻輕小說'),
(76, 0, 15, '推理/神怪輕小說'),
(77, 0, 15, '玄幻/仙俠輕小說'),
(78, 0, 16, '戀愛關係'),
(79, 0, 16, '魔幻冒險'),
(80, 0, 17, '英語學習'),
(81, 0, 17, '英語檢定'),
(82, 0, 18, '建築/室內設計'),
(83, 0, 18, ' 攝影'),
(84, 0, 19, '植物/花草生活'),
(85, 0, 19, '動物/寵物'),
(86, 0, 20, '台灣史地'),
(87, 0, 20, '亞洲史地'),
(88, 0, 21, '社會議題'),
(89, 0, 21, '政治/國際情勢'),
(90, 0, 22, '研究所考試'),
(91, 0, 22, ' 升大學/插大'),
(92, 0, 23, '高中學測'),
(93, 0, 23, '高中指考'),
(94, 0, 25, '各國文學'),
(95, 1, 25, '類型文學'),
(96, 0, 27, '女性時尚'),
(97, 0, 27, '男性時尚');
--
-- 已傾印資料表的索引
--
--
-- 資料表索引 `class_sub`
--
ALTER TABLE `class_sub`
ADD PRIMARY KEY (`sub_id`);
--
-- 在傾印的資料表使用自動遞增(AUTO_INCREMENT)
--
--
-- 使用資料表自動遞增(AUTO_INCREMENT) `class_sub`
--
ALTER TABLE `class_sub`
MODIFY `sub_id` int(3) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=98;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| 20.914634
| 76
| 0.514286
|
433e6ca9efc30ada6cda1688015ed52748e9ed8f
| 566
|
tsx
|
TypeScript
|
src/compounds/card/index.tsx
|
TintKoKo/Test_Project
|
d34cad511a8c6b061196b817ea9f43a57687b519
|
[
"MIT"
] | null | null | null |
src/compounds/card/index.tsx
|
TintKoKo/Test_Project
|
d34cad511a8c6b061196b817ea9f43a57687b519
|
[
"MIT"
] | null | null | null |
src/compounds/card/index.tsx
|
TintKoKo/Test_Project
|
d34cad511a8c6b061196b817ea9f43a57687b519
|
[
"MIT"
] | null | null | null |
import React, { FC } from "react";
import { TouchableOpacity } from "react-native";
import { Spacer, Typography } from "../../elements";
import styles from "./styles";
interface CardProps {
title: string;
description: string;
onClick?: () => void;
}
export const Card: FC<CardProps> = ({ title, description, onClick }) => {
return (
<TouchableOpacity style={styles.container} onPress={() => onClick?.()}>
<Typography>{title}</Typography>
<Spacer height={16} />
<Typography>{description}</Typography>
</TouchableOpacity>
);
};
| 24.608696
| 75
| 0.641343
|
f4e381a68c5123a2901e5bba6e3fb210512f1b37
| 223
|
ts
|
TypeScript
|
packages/core/out/api/factories/transaction/getUnconfirmedTransactions.d.ts
|
fruits-eco-blockchain/fruitsJS
|
be4bc342a77f3e117578175187c854d4ec6a1711
|
[
"Apache-2.0"
] | null | null | null |
packages/core/out/api/factories/transaction/getUnconfirmedTransactions.d.ts
|
fruits-eco-blockchain/fruitsJS
|
be4bc342a77f3e117578175187c854d4ec6a1711
|
[
"Apache-2.0"
] | null | null | null |
packages/core/out/api/factories/transaction/getUnconfirmedTransactions.d.ts
|
fruits-eco-blockchain/fruitsJS
|
be4bc342a77f3e117578175187c854d4ec6a1711
|
[
"Apache-2.0"
] | null | null | null |
import { ChainService } from '../../../service';
import { UnconfirmedTransactionList } from '../../..';
export declare const getUnconfirmedTransactions: (service: ChainService) => () => Promise<UnconfirmedTransactionList>;
| 55.75
| 118
| 0.721973
|
0727921f8efeba87e2701ef77bb4865ef792e8ca
| 26,268
|
h
|
C
|
src/objmemory.h
|
leopardoTenaz/Smalltalk
|
260fd05a0e86eead99f4511253dff554a4ceddec
|
[
"MIT"
] | 687
|
2020-05-18T04:01:53.000Z
|
2022-03-30T09:01:30.000Z
|
src/objmemory.h
|
leopardoTenaz/Smalltalk
|
260fd05a0e86eead99f4511253dff554a4ceddec
|
[
"MIT"
] | 10
|
2020-05-22T22:35:20.000Z
|
2021-12-02T15:30:50.000Z
|
src/objmemory.h
|
leopardoTenaz/Smalltalk
|
260fd05a0e86eead99f4511253dff554a4ceddec
|
[
"MIT"
] | 61
|
2020-05-18T14:37:09.000Z
|
2022-03-30T09:04:40.000Z
|
//
// objmemory.h
// Smalltalk-80
//
// Created by Dan Banay on 2/20/20.
// Copyright © 2020 Dan Banay. All rights reserved.
//
// MIT License
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
#pragma once
#include <cstdint>
#include <cassert>
#include <functional>
#include "hal.h"
#include "filesystem.h"
#include "realwordmemory.h"
#include "oops.h"
// The Smalltalk-80 VM generates a tremendous amount of circular references as it runs
// -- primarily a MethodContext that references a BlockContext (from a temp field) that
// has a back reference to that MethodContext (the sender field). If a reference counting only
// scheme is used, then free object table entries will eventually be consumed. If, on the other hand,
// a GC only approach is used then memory will fill up with contexts and GC will happen fairly
// frequently. Therefore, the hybrid reference counting approach with full garbage collection
// when too much circular garbage accumulates is recommended.
// GM_MARK_SWEEP and GC_REF_COUNT are not mutually exclusive!
// You can define *BOTH* for a hybrid collector which ref counts until
// memory is exhausted (cyclical data) and then does a full GC
// Mark and sweep collection when memory full
#define GC_MARK_SWEEP
// Ref counting
#define GC_REF_COUNT
// Define to use recursive marking for ref counting/GC
// If undefined the stack space efficient pointer reversal approach described
// on page 678 of G&R is used. Not recommended, and only included for completeness.
//#define RECURSIVE_MARKING
// Perform range checks etc. at runtime
#define RUNTIME_CHECKING
#ifdef RUNTIME_CHECKING
#define RUNTIME_CHECK2(c,f,l) runtime_check(c, "RUNTIME ERROR: (" #c ") at: " f "(" #l ")")
#define RUNTIME_CHECK1(c,f,l) RUNTIME_CHECK2(c,f,l)
#define RUNTIME_CHECK(cond) RUNTIME_CHECK1(cond, __FILE__, __LINE__)
#else
#define RUNTIME_CHECK(cond) ((void)0)
#endif
#ifdef GC_MARK_SWEEP
class IGCNotification
{
public:
// About to garbage collect. Client should call addRoot to specify roots of the world
virtual void prepareForCollection() = 0;
// Garbage collection has been completed
virtual void collectionCompleted() = 0;
};
#endif
class ObjectMemory
{
public:
#ifdef GC_MARK_SWEEP
ObjectMemory(IHardwareAbstractionLayer *halInterface, IGCNotification *notification = 0);
#else
ObjectMemory(IHardwareAbstractionLayer *halInterface);
#endif
bool loadSnapshot(IFileSystem *fileSystem, const char *imageFileName);
bool saveSnapshot(IFileSystem *fileSystem, const char *imageFileName);
// --- BCIInterface ---
inline int oopsLeft()
{
return freeOops;
}
inline std::uint32_t coreLeft()
{
return freeWords;
}
void garbageCollect();
// storePointer:ofObject:withValue:
int storePointer_ofObject_withValue(int fieldIndex, int objectPointer, int valuePointer);
// storeWord:ofObject:withValue:
int storeWord_ofObject_withValue(int wordIndex, int objectPointer, int valueWord);
// increaseReferencesTo:
inline void increaseReferencesTo(int objectPointer)
{
/* "source"
self countUp: objectPointer
*/
#ifdef GC_REF_COUNT
countUp(objectPointer);
#endif
}
// initialInstanceOf:
int initialInstanceOf(int classPointer);
// decreaseReferencesTo:
inline void decreaseReferencesTo(int objectPointer)
{
/* "source"
self countDown: objectPointer
*/
#ifdef GC_REF_COUNT
countDown(objectPointer);
#endif
}
// isIntegerValue:
inline bool isIntegerValue(int valueWord)
{
/* "source"
"ERROR: G&R really cock this up"
"dbanay - still broken in July 1985 ed!"
^valueWord >= -16384 and: [valueWord <= 16383]
*/
return valueWord >= -16384 && valueWord <= 16383;
}
// fetchWord:ofObject:
inline int fetchWord_ofObject(int wordIndex, int objectPointer)
{
/* "source"
^self heapChunkOf: objectPointer word: HeaderSize + wordIndex
*/
RUNTIME_CHECK(wordIndex >= 0 && wordIndex < fetchWordLengthOf(objectPointer));
return heapChunkOf_word(objectPointer, HeaderSize + wordIndex);
}
// integerValueOf:
inline int integerValueOf(int objectPointer)
{
/* "source"
^objectPointer/2
*/
return (std::int16_t)(objectPointer & 0xfffe)/2;
// Right shifting a negative number is undefined according to the standard.
// return ((std::int16_t) objectPointer) >> 1;
}
// swapPointersOf:and:
void swapPointersOf_and(int firstPointer, int secondPointer);
// fetchWordLengthOf:
inline int fetchWordLengthOf(int objectPointer)
{
/* "source"
^(self sizeBitsOf: objectPointer) - HeaderSize
*/
return sizeBitsOf(objectPointer) - HeaderSize;
}
// instantiateClass:withWords:
int instantiateClass_withWords(int classPointer, int length);
// isIntegerObject:
inline bool isIntegerObject(int objectPointer)
{
/* "source"
^(objectPointer bitAnd: 1) = 1
*/
return (objectPointer & 1) == 1;
}
// instantiateClass:withBytes:
int instantiateClass_withBytes(int classPointer, int length);
// hasObject:
bool hasObject(int objectPointer);
// instantiateClass:withPointers:
int instantiateClass_withPointers(int classPointer, int length);
// fetchByte:ofObject:
inline int fetchByte_ofObject(int byteIndex, int objectPointer)
{
/* "source"
^self heapChunkOf: objectPointer byte: (HeaderSize*2 + byteIndex)
*/
return heapChunkOf_byte(objectPointer, (HeaderSize*2 + byteIndex));
}
// fetchPointer:ofObject:
inline int fetchPointer_ofObject(int fieldIndex, int objectPointer)
{
/* "source"
^self heapChunkOf: objectPointer word: HeaderSize + fieldIndex
*/
RUNTIME_CHECK(fieldIndex >= 0 && fieldIndex < fetchWordLengthOf(objectPointer));
return heapChunkOf_word(objectPointer, HeaderSize + fieldIndex);
}
// fetchClassOf:
inline int fetchClassOf(int objectPointer)
{
/* Note that fetchClassOf:objectPointer returns IntegerClass (the object table index of SmallInteger)
if its argument is an immediate integer. G&R pg 686 */
/* "source"
(self isIntegerObject: objectPointer)
ifTrue: [^IntegerClass] "ERROR IntegerClass not defined"
ifFalse: [^self classBitsOf: objectPointer]
*/
if (isIntegerObject(objectPointer))
return ClassSmallInteger;
return classBitsOf(objectPointer);
}
// integerObjectOf:
inline int integerObjectOf(int value)
{
/* "source"
^(value bitShift: 1) + 1
*/
return (std::uint16_t) ((value << 1) | 1);
}
// fetchByteLengthOf:
inline int fetchByteLengthOf(int objectPointer)
{
/* "source"
"ERROR in selector of next line"
^(self fetchWordLengthOf: objectPointer)*2 - (self oddBitOf: objectPointer)
*/
return fetchWordLengthOf(objectPointer)*2 - oddBitOf(objectPointer);
}
// instanceAfter:
int instanceAfter(int objectPointer);
// storeByte:ofObject:withValue:
inline int storeByte_ofObject_withValue(int byteIndex, int objectPointer, int valueByte)
{
/* "source"
^self heapChunkOf: objectPointer
byte: (HeaderSize*2 + byteIndex)
put: valueByte
*/
return heapChunkOf_byte_put(objectPointer,
HeaderSize*2 + byteIndex,
valueByte);
}
// --- ObjectPointers ---
// cantBeIntegerObject:
void cantBeIntegerObject(int objectPointer);
#ifdef GC_MARK_SWEEP
void addRoot(int rootObjectPointer) //dbanay
{
markObjectsAccessibleFrom(rootObjectPointer);
}
#endif
private:
// --- Compaction ---
// sweepCurrentSegmentFrom:
int sweepCurrentSegmentFrom(int lowWaterMark);
// compactCurrentSegment
void compactCurrentSegment();
// releasePointer:
void releasePointer(int objectPointer);
// reverseHeapPointersAbove:
void reverseHeapPointersAbove(int lowWaterMark);
// abandonFreeChunksInSegment:
int abandonFreeChunksInSegment(int segment);
// allocateChunk:
int allocateChunk(int size);
#ifdef GC_MARK_SWEEP
// --- MarkingGarbage ---
// reclaimInaccessibleObjects
void reclaimInaccessibleObjects();
// markObjectsAccessibleFrom:
int markObjectsAccessibleFrom(int rootObjectPointer);
// markAccessibleObjects
void markAccessibleObjects();
// rectifyCountsAndDeallocateGarbage
void rectifyCountsAndDeallocateGarbage();
// zeroReferenceCounts
void zeroReferenceCounts();
#endif
// --- NonpointerObjs ---
// lastPointerOf:
int lastPointerOf(int objectPointer);
// spaceOccupiedBy:
int spaceOccupiedBy(int objectPointer);
// allocate:odd:pointer:extra:class:
int allocate_odd_pointer_extra_class(
int size,
int oddBit,
int pointerBit,
int extraWord,
int classPointer
);
// --- UnallocatedSpc ---
// headOfFreePointerList
int headOfFreePointerList();
// toFreeChunkList:add:
void toFreeChunkList_add(int size, int objectPointer);
// headOfFreeChunkList:inSegment:put:
int headOfFreeChunkList_inSegment_put(int size, int segment, int objectPointer);
// removeFromFreePointerList
int removeFromFreePointerList();
// toFreePointerListAdd:
void toFreePointerListAdd(int objectPointer);
// removeFromFreeChunkList:
int removeFromFreeChunkList(int size);
// resetFreeChunkList:inSegment:
void resetFreeChunkList_inSegment(int size, int segment);
// headOfFreeChunkList:inSegment:
int headOfFreeChunkList_inSegment(int size, int segment);
// headOfFreePointerListPut:
int headOfFreePointerListPut(int objectPointer);
// --- RefCntGarbage ---
// countDown:
int countDown(int rootObjectPointer);
// countUp:
int countUp(int objectPointer);
// deallocate:
void deallocate(int objectPointer);
// forAllOtherObjectsAccessibleFrom:suchThat:do:
int forAllOtherObjectsAccessibleFrom_suchThat_do(
int objectPointer,
const std::function <bool (int)>& predicate,
const std::function <void (int)>& action
);
// forAllObjectsAccessibleFrom:suchThat:do:
int forAllObjectsAccessibleFrom_suchThat_do(int objectPointer,
const std::function <bool (int)>& predicate,
const std::function <void (int)>& action);
// --- ObjectTableEnt ---
// segmentBitsOf:
inline int segmentBitsOf(int objectPointer)
{
/* "source"
^self ot: objectPointer bits: 12 to: 15
*/
return ot_bits_to(objectPointer, 12, 15);
}
// heapChunkOf:byte:put:
inline int heapChunkOf_byte_put(int objectPointer, int offset, int value)
{
/* "source"
^wordMemory segment: (self segmentBitsOf: objectPointer)
word: ((self locationBitsOf: objectPointer) + (offset//2))
byte: (offset\\2) put: value
*/
return wordMemory.segment_word_byte_put(segmentBitsOf(objectPointer),
locationBitsOf(objectPointer) + (offset/2),
offset % 2, value);
}
// pointerBitOf:put:
inline int pointerBitOf_put(int objectPointer, int value)
{
/* "source"
^self ot: objectPointer bits: 9 to: 9 put: value
*/
return ot_bits_to_put(objectPointer, 9, 9, value);
}
// heapChunkOf:word:
inline int heapChunkOf_word(int objectPointer, int offset)
{
/* "source"
^wordMemory segment: (self segmentBitsOf: objectPointer)
word: ((self locationBitsOf: objectPointer) + offset)
*/
return wordMemory.segment_word(segmentBitsOf(objectPointer),
locationBitsOf(objectPointer) + offset);
}
// segmentBitsOf:put:
inline int segmentBitsOf_put(int objectPointer, int value)
{
/* "source"
^self ot: objectPointer bits: 12 to: 15 put: value
*/
return ot_bits_to_put(objectPointer, 12, 15, value);
}
// heapChunkOf:word:put:
inline int heapChunkOf_word_put(int objectPointer, int offset, int value)
{
/* "source"
^wordMemory segment: (self segmentBitsOf: objectPointer)
word: ((self locationBitsOf: objectPointer) + offset)
put: value
*/
return wordMemory.segment_word_put(segmentBitsOf(objectPointer),
locationBitsOf(objectPointer) + offset,
value);
}
// oddBitOf:
inline int oddBitOf(int objectPointer)
{
/* "source"
^self ot: objectPointer bits: 8 to: 8
*/
return ot_bits_to(objectPointer, 8, 8);
}
// freeBitOf:
inline int freeBitOf(int objectPointer)
{
/* "source"
^self ot: objectPointer bits: 10 to: 10
*/
return ot_bits_to(objectPointer, 10, 10);
}
// locationBitsOf:
inline int locationBitsOf(int objectPointer)
{
/* "source"
self cantBeIntegerObject: objectPointer.
^wordMemory segment: ObjectTableSegment
word: ObjectTableStart + objectPointer + 1
*/
cantBeIntegerObject(objectPointer);
return wordMemory.segment_word(ObjectTableSegment,
ObjectTableStart + objectPointer + 1);
}
// ot:
inline int ot(int objectPointer)
{
/* "source"
self cantBeIntegerObject: objectPointer.
^wordMemory segment: ObjectTableSegment
word: ObjectTableStart + objectPointer
*/
cantBeIntegerObject(objectPointer);
return wordMemory.segment_word(ObjectTableSegment,
ObjectTableStart + objectPointer);
}
// freeBitOf:put:
inline int freeBitOf_put(int objectPointer, int value)
{
/* "source"
^self ot: objectPointer bits: 10 to: 10 put: value
*/
return ot_bits_to_put(objectPointer, 10, 10, value);
}
// classBitsOf:put:
inline int classBitsOf_put(int objectPointer, int value)
{
/* "source"
^self heapChunkOf: objectPointer word: 1 put: value
*/
return heapChunkOf_word_put(objectPointer, 1, value);
}
// heapChunkOf:byte:
inline int heapChunkOf_byte(int objectPointer, int offset)
{
/* "source"
^wordMemory segment: (self segmentBitsOf: objectPointer)
word: ((self locationBitsOf: objectPointer) + (offset//2))
byte: (offset\\2)
*/
return wordMemory.segment_word_byte(segmentBitsOf(objectPointer),
locationBitsOf(objectPointer) + offset/2,
offset % 2);
}
// locationBitsOf:put:
inline int locationBitsOf_put(int objectPointer, int value)
{
/* "source"
self cantBeIntegerObject: objectPointer.
^wordMemory segment: ObjectTableSegment
word: ObjectTableStart + objectPointer + 1
put: value
*/
cantBeIntegerObject(objectPointer);
return wordMemory.segment_word_put(ObjectTableSegment,
ObjectTableStart + objectPointer + 1,
value);
}
// sizeBitsOf:
inline int sizeBitsOf(int objectPointer)
{
/* "source"
^self heapChunkOf: objectPointer word: 0
*/
return heapChunkOf_word(objectPointer, 0);
}
// oddBitOf:put:
inline int oddBitOf_put(int objectPointer, int value)
{
/* "source"
^self ot: objectPointer bits: 8 to: 8 put: value
*/
return ot_bits_to_put(objectPointer, 8, 8, value);
}
// ot:put:
inline int ot_put(int objectPointer, int value)
{
/* "source"
self cantBeIntegerObject: objectPointer.
^wordMemory segment: ObjectTableSegment
word: ObjectTableStart + objectPointer
put: value
*/
cantBeIntegerObject(objectPointer);
return wordMemory.segment_word_put(ObjectTableSegment,
ObjectTableStart + objectPointer,
value);
}
// countBitsOf:put:
inline int countBitsOf_put(int objectPointer, int value)
{
/* "source"
^self ot: objectPointer bits: 0 to: 7 put: value
*/
return ot_bits_to_put(objectPointer, 0, 7, value);
}
// classBitsOf:
inline int classBitsOf(int objectPointer)
{
/* "source"
^self heapChunkOf: objectPointer word: 1
*/
return heapChunkOf_word(objectPointer, 1);
}
// countBitsOf:
inline int countBitsOf(int objectPointer)
{
/* "source"
^self ot: objectPointer bits: 0 to: 7
*/
return ot_bits_to(objectPointer, 0, 7);
}
// ot:bits:to:put:
inline int ot_bits_to_put(
int objectPointer,
int firstBitIndex,
int lastBitIndex,
int value
)
{
/* "source"
self cantBeIntegerObject: objectPointer.
^wordMemory segment: ObjectTableSegment
word: ObjectTableStart + objectPointer
bits: firstBitIndex
to: lastBitIndex
put: value
*/
cantBeIntegerObject(objectPointer);
return wordMemory.segment_word_bits_to_put(ObjectTableSegment,
ObjectTableStart + objectPointer,
firstBitIndex,
lastBitIndex,
value);
}
// sizeBitsOf:put:
inline int sizeBitsOf_put(int objectPointer, int value)
{
/* "source"
^self heapChunkOf: objectPointer word: 0 put: value
*/
return heapChunkOf_word_put(objectPointer, 0, value);
}
// ot:bits:to:
inline int ot_bits_to(int objectPointer, int firstBitIndex, int lastBitIndex)
{
/* "source"
self cantBeIntegerObject: objectPointer.
^wordMemory segment: ObjectTableSegment
word: ObjectTableStart + objectPointer
bits: firstBitIndex
to: lastBitIndex
*/
cantBeIntegerObject(objectPointer);
return wordMemory.segment_word_bits_to(ObjectTableSegment,
ObjectTableStart + objectPointer,
firstBitIndex, lastBitIndex);
}
// pointerBitOf:
inline int pointerBitOf(int objectPointer)
{
/* "source"
^self ot: objectPointer bits: 9 to: 9
*/
return ot_bits_to(objectPointer, 9, 9);
}
// --- Allocation ---
// obtainPointer:location:
int obtainPointer_location(int size, int location);
// attemptToAllocateChunk:
int attemptToAllocateChunk(int size);
// attemptToAllocateChunkInCurrentSegment:
int attemptToAllocateChunkInCurrentSegment(int size);
void outOfMemoryError();
int auditFreeOops();
#ifdef RUNTIME_CHECKING
inline void runtime_check(bool condition, const char *errorMessage)
{
if (!condition)
{
assert(0);
hal->error(errorMessage);
}
}
#endif
private:
RealWordMemory wordMemory;
// Special Register G&R pg. 667
int currentSegment; // The index of the heap segment currently being used for allocation
int freeWords; // free words remaining (make primitiveFreeCore "fast")
// An a table entry with a free bit set OR that contains a reference to a free chunk
// (free bit clear but count field zero) of memory is counted as a free oop
int freeOops; // free OT entries (make primitiveFreeOops "fast")
// G&R pg. 664 - Object Table Related Constants
// Object Table Segment (last segment) contains the Object Table followed by the
// head of the OT free pointer list
// +-------------------------+
// | | <--- ObjectTableStart
// | |
// | |
// | Object Table |
// | |
// | |
// +-------------------------+
// | FreePointerList |
// +-------------------------+
// |////// UNUSED WORD //////|
// +-------------------------+
//
static const int ObjectTableSegment = RealWordMemory::SegmentCount-1;
static const int ObjectTableStart = 0;
static const int ObjectTableSize = RealWordMemory::SegmentSize - 2;
// The smallest number that is too large to represent in an eight-bit count field; that is, 256.
static const int HugeSize = 256; // G&R pg 661
// The location of the head of the linked list of free object table entries
static const int FreePointerList = ObjectTableStart + ObjectTableSize; // G&R pg. 664
// G&R pg. 664 - Object Table Related Constants
// The smallest size of chunk that is not stored on a list whose chunk share the same size.
// (Theindex of the last free chunk list).
static const int BigSize = 20;
static const int FirstFreeChunkListSize = BigSize+1;
// Heap Constants G&R pg. 658
// The number of heaps segments used in the implementation.
// We reserve the last segment for the Object Table and use the remaining for the heap
static const int HeapSegmentCount = RealWordMemory::SegmentCount - 1;
// Each heap segment is organized as follows:
//
// +-------------------------+
// | |
// | |
// | Object Storage |
// | |
// | |<--- HeapSpaceStop (last word)
// +-------------------------+
// | Array of BigSize+1 |<--- FirstFreeChunkList
// | Free Chunks Linked |
// | List Heads |
// | |<--- LastFreeChunkList
// +-------------------------+
//
// The index of the first memory segmentused to store the heap
static const int FirstHeapSegment = 0;
static const int LastHeapSegment = FirstHeapSegment + HeapSegmentCount - 1;
// The address of the last location used in each heap segment.
static const int HeapSpaceStop = RealWordMemory::SegmentSize - FirstFreeChunkListSize - 1;
static const int HeaderSize = 2; // The number of words in an object header(2).
// If HeaderSize changes, revisit forAllOtherObjectsAccessibleFrom_suchThat_do
// where we test if the offset passes the class field...
// The location of the head of the linked list of free chunks of size zero. Comes right
// after the last word for object storage.
static const int FirstFreeChunkList = HeapSpaceStop + 1;
// The bluebook incorrectly uses LastFreeChunkList in all places it is used! The
// headOfFreeChunkList:inSegment: and headOfFreeChunkList:inSegment:put methods take
// a SIZE as the first parameter not a location.
// The location of the head of the linked list of free chunks of size BigSize or larger.
// static const int LastFreeChunkList = FirstFreeChunkList + BigSize;
// Any sixteen-bit value that cannot be an object table index, e.g.,2**16~1.
static const int NonPointer = 65535;
// Last special oop
// (See SystemTracer in Smalltalk.sources)
static const int LastSpecialOop = 52;
// Snapshots
// Object space starts at offset 512 in the image
static const int ObjectSpaceBaseInImage = 512;
bool loadObjectTable( IFileSystem *fileSystem, int fd);
static bool padToPage(IFileSystem *fileSystem, int fd);
bool loadObjects(IFileSystem *fileSystem, int fd);
bool saveObjects(IFileSystem *fileSystem, int fd);
#ifdef GC_MARK_SWEEP
IGCNotification *gcNotification;
#endif
// Interface to the host operating system
IHardwareAbstractionLayer *hal;
};
| 30.579744
| 109
| 0.613027
|
3382532935823b1bfa60967815895ba065530756
| 7,572
|
go
|
Go
|
httperror.go
|
acoshift/e
|
3b1d24b21f23a02fac5dcb5c99b64cf2e9bd3e4c
|
[
"MIT"
] | 2
|
2017-01-24T12:23:22.000Z
|
2017-05-31T16:33:24.000Z
|
httperror.go
|
acoshift/e
|
3b1d24b21f23a02fac5dcb5c99b64cf2e9bd3e4c
|
[
"MIT"
] | null | null | null |
httperror.go
|
acoshift/e
|
3b1d24b21f23a02fac5dcb5c99b64cf2e9bd3e4c
|
[
"MIT"
] | null | null | null |
// Package httperror is the reusable http error collection
package httperror
import (
"errors"
"fmt"
"net/http"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
)
// Error is the httperror's Error
type Error struct {
Status int `json:"status"` // http status code
Code string `json:"code"` // error code
Message string `json:"message"` // error message
}
// Error implements error interface
func (err *Error) Error() string {
return fmt.Sprintf("%s: [%d] %s", err.Code, err.Status, err.Message)
}
// Clone error
func (err Error) Clone() *Error {
return &err
}
// NewError creates new Error
func NewError(status int, code string, message string) error {
return &Error{Status: status, Code: code, Message: message}
}
// Func is the error creator function
type Func func(error) error
// New is the helper function for create Func
func New(status int, code string) Func {
return func(err error) error {
return &Error{status, code, err.Error()}
}
}
// StatusFunc is the error creator function pre-defined status
type StatusFunc func(string, error) error
// NewWithStatus is the helper function for create StatusFunc
func NewWithStatus(status int) StatusFunc {
return func(code string, err error) error {
return &Error{status, code, err.Error()}
}
}
// CodeFunc is the error creator function pre-defined code
type CodeFunc func(int, error) error
// NewWithCode is the helper function for create CodeFunc
func NewWithCode(code string) CodeFunc {
return func(status int, err error) error {
return &Error{status, code, err.Error()}
}
}
// NewHTTPError is the helper function for create http error
func NewHTTPError(status int, code string) error {
return &Error{status, code, http.StatusText(status)}
}
var mapHTTPStatusCode = map[int]string{
http.StatusBadRequest: "bad_request",
http.StatusUnauthorized: "unauthorized",
http.StatusForbidden: "forbidden",
http.StatusNotFound: "not_found",
http.StatusMethodNotAllowed: "method_not_allowed",
http.StatusRequestTimeout: "request_timeout",
http.StatusConflict: "conflict",
http.StatusGone: "gone",
http.StatusInternalServerError: "internal_server_error",
http.StatusNotImplemented: "not_implemented",
}
func newPreDefinedHTTPError(status int) error {
return NewHTTPError(status, mapHTTPStatusCode[status])
}
func newPreDefinedEmptyHTTPError(status int) error {
return NewError(status, mapHTTPStatusCode[status], "")
}
// Pre-defined errors
var (
BadRequest = newPreDefinedHTTPError(http.StatusBadRequest)
Unauthorized = newPreDefinedHTTPError(http.StatusUnauthorized)
Forbidden = newPreDefinedHTTPError(http.StatusForbidden)
NotFound = newPreDefinedHTTPError(http.StatusNotFound)
MethodNotAllowed = newPreDefinedHTTPError(http.StatusMethodNotAllowed)
RequestTimeout = newPreDefinedHTTPError(http.StatusRequestTimeout)
Conflict = newPreDefinedHTTPError(http.StatusConflict)
Gone = newPreDefinedHTTPError(http.StatusGone)
InternalServerError = newPreDefinedHTTPError(http.StatusInternalServerError)
NotImplemented = newPreDefinedHTTPError(http.StatusNotImplemented)
// Empty message errors
emptyBadRequest = newPreDefinedEmptyHTTPError(http.StatusBadRequest)
emptyUnauthorized = newPreDefinedEmptyHTTPError(http.StatusUnauthorized)
emptyForbidden = newPreDefinedEmptyHTTPError(http.StatusForbidden)
emptyNotFound = newPreDefinedEmptyHTTPError(http.StatusNotFound)
emptyMethodNotAllowed = newPreDefinedEmptyHTTPError(http.StatusMethodNotAllowed)
emptyRequestTimeout = newPreDefinedEmptyHTTPError(http.StatusRequestTimeout)
emptyConflict = newPreDefinedEmptyHTTPError(http.StatusConflict)
emptyGone = newPreDefinedEmptyHTTPError(http.StatusGone)
emptyInternalServerError = newPreDefinedEmptyHTTPError(http.StatusInternalServerError)
emptyNotImplemented = newPreDefinedEmptyHTTPError(http.StatusNotImplemented)
)
// Merge an error with other error
// if one or both errors are Error type, result will be an Error
// if none is Error, result will be native go's error
func Merge(err, other error) error {
if other == nil {
return err
}
if err == nil {
return other
}
if e, ok := err.(*Error); ok {
r := e.Clone()
if len(r.Message) > 0 {
r.Message += "; "
}
r.Message += other.Error()
return r
}
if e, ok := other.(*Error); ok {
r := e.Clone()
if len(r.Message) > 0 {
r.Message += "; "
}
r.Message += err.Error()
return r
}
return errors.New(err.Error() + "; " + other.Error())
}
// BadRequestWith merges error with bad request
func BadRequestWith(err error) error {
return Merge(emptyBadRequest, err)
}
// UnauthorizedWith merges error with unauthorized
func UnauthorizedWith(err error) error {
return Merge(emptyUnauthorized, err)
}
// ForbiddenWith merges error with forbidden
func ForbiddenWith(err error) error {
return Merge(emptyForbidden, err)
}
// NotFoundWith merges error with not found
func NotFoundWith(err error) error {
return Merge(emptyNotFound, err)
}
// MethodNotAllowedWith merges error with method not allowed
func MethodNotAllowedWith(err error) error {
return Merge(emptyMethodNotAllowed, err)
}
// RequestTimeoutWith merges error with request timeout
func RequestTimeoutWith(err error) error {
return Merge(emptyRequestTimeout, err)
}
// ConflictWith merges error with conflict
func ConflictWith(err error) error {
return Merge(emptyConflict, err)
}
// GoneWith merges error with gone
func GoneWith(err error) error {
return Merge(emptyGone, err)
}
// InternalServerErrorWith merges error with internal server error
func InternalServerErrorWith(err error) error {
return Merge(emptyInternalServerError, err)
}
// GRPC maps grpc error to http error
func GRPC(err error) error {
if err == nil {
return nil
}
// check is err grpc error
desc := grpc.ErrorDesc(err)
switch grpc.Code(err) {
case codes.OK:
return nil
case codes.Canceled:
return NewError(http.StatusRequestTimeout, "canceled", desc)
case codes.Unknown:
return NewError(http.StatusInternalServerError, "unknown", desc)
case codes.InvalidArgument:
return NewError(http.StatusBadRequest, "invalid_argument", desc)
case codes.DeadlineExceeded:
return NewError(http.StatusRequestTimeout, "deadline_exceeded", desc)
case codes.NotFound:
return NewError(http.StatusNotFound, "not_found", desc)
case codes.AlreadyExists:
return NewError(http.StatusConflict, "already_exists", desc)
case codes.PermissionDenied:
return NewError(http.StatusForbidden, "permission_denied", desc)
case codes.Unauthenticated:
return NewError(http.StatusUnauthorized, "unauthenticated", desc)
case codes.ResourceExhausted:
return NewError(http.StatusForbidden, "resource_exhausted", desc)
case codes.FailedPrecondition:
return NewError(http.StatusPreconditionFailed, "failed_precondition", desc)
case codes.Aborted:
return NewError(http.StatusConflict, "aborted", desc)
case codes.OutOfRange:
return NewError(http.StatusBadRequest, "out_of_range", desc)
case codes.Unimplemented:
return NewError(http.StatusNotImplemented, "unimplemented", desc)
case codes.Internal:
return NewError(http.StatusInternalServerError, "internal", desc)
case codes.Unavailable:
return NewError(http.StatusServiceUnavailable, "service_unavailable", desc)
case codes.DataLoss:
return NewError(http.StatusInternalServerError, "data_loss", desc)
default:
return err
}
}
| 31.949367
| 87
| 0.750396
|
38c80e14801aa23a67cd97e70bd3a17abd97b9b4
| 246
|
php
|
PHP
|
src/themes/bootstrap5/views/modules/sidebar/header.php
|
bytic/admin-base
|
68a60f28fc04ff7a84bd26896abb1fc03a8e50fb
|
[
"MIT"
] | null | null | null |
src/themes/bootstrap5/views/modules/sidebar/header.php
|
bytic/admin-base
|
68a60f28fc04ff7a84bd26896abb1fc03a8e50fb
|
[
"MIT"
] | null | null | null |
src/themes/bootstrap5/views/modules/sidebar/header.php
|
bytic/admin-base
|
68a60f28fc04ff7a84bd26896abb1fc03a8e50fb
|
[
"MIT"
] | null | null | null |
<?php
$logoUrl = function_exists('logoUrl') ? logoUrl('logo-white.png') : asset('/images/logos/logo-white.png');
?>
<li class="nav-profile">
<img src="<?php echo $logoUrl; ?>" class="img-fluid"/>
<!-- <div class="image"></div>-->
</li>
| 35.142857
| 106
| 0.597561
|
2c615bee7ee5bbf21c0e324d25f80ee007fba715
| 40,152
|
py
|
Python
|
nemo/collections/asr/parts/utils/nmesc_clustering.py
|
gkucsko/NeMo
|
c1ae0a7744d9a0ac206f61b2883ce00c9b8339b9
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/asr/parts/utils/nmesc_clustering.py
|
gkucsko/NeMo
|
c1ae0a7744d9a0ac206f61b2883ce00c9b8339b9
|
[
"Apache-2.0"
] | 1
|
2022-03-06T14:09:02.000Z
|
2022-03-06T14:09:02.000Z
|
nemo/collections/asr/parts/utils/nmesc_clustering.py
|
gkucsko/NeMo
|
c1ae0a7744d9a0ac206f61b2883ce00c9b8339b9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2007-2020 The scikit-learn developers.
# BSD 3-Clause License
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# NME-SC clustering is based on the implementation from the paper
# https://arxiv.org/pdf/2003.02405.pdf and the implementation from
# https://github.com/tango4j/Auto-Tuning-Spectral-Clustering.
from collections import Counter
from typing import Dict, List
import torch
from torch.linalg import eigh
@torch.jit.script
def cos_similarity(a: torch.Tensor, b: torch.Tensor, eps=torch.tensor(3.5e-4)):
"""
Args:
a: (torch.tensor)
Matrix containing speaker representation vectors. (N x embedding_dim)
b: (torch.tensor)
Matrix containing speaker representation vectors. (N x embedding_dim)
Returns:
res (torch.tensor)
N by N matrix containing the cosine similarities of the values.
"""
a_norm = a / (torch.norm(a, dim=1).unsqueeze(1) + eps)
b_norm = b / (torch.norm(a, dim=1).unsqueeze(1) + eps)
res = torch.mm(a_norm, b_norm.transpose(0, 1))
res.fill_diagonal_(1)
return res
@torch.jit.script
def ScalerMinMax(X: torch.Tensor):
"""
Min-max scale the input affinity matrix X, which will lead to a dynamic range of
[0, 1].
Args:
X: (torch.tensor)
Matrix containing cosine similarity values among embedding vectors (N x N)
Returns:
v_norm: (torch.tensor)
Min-max normalized value of X.
"""
v_min, v_max = X.min(), X.max()
v_norm = (X - v_min) / (v_max - v_min)
return v_norm
@torch.jit.script
def getEuclideanDistance(specEmbA: torch.Tensor, specEmbB: torch.Tensor, device: torch.device = torch.device('cpu')):
"""
Args:
specEmbA: (torch.tensor)
Matrix containing spectral embedding vectors from eigenvalue decomposition (N x embedding_dim).
specEmbB: (torch.tensor)
Matrix containing spectral embedding vectors from eigenvalue decomposition (N x embedding_dim).
Returns:
dis: (torch.tensor)
Euclidean distance values of the two sets of spectral embedding vectors.
"""
specEmbA, specEmbB = specEmbA.to(device), specEmbB.to(device)
A, B = specEmbA.unsqueeze(dim=1), specEmbB.unsqueeze(dim=0)
dis = (A - B) ** 2.0
dis = dis.sum(dim=-1).squeeze()
return dis
@torch.jit.script
def kmeans_plusplus_torch(
X: torch.Tensor,
n_clusters: int,
random_state: int,
n_local_trials: int = 30,
device: torch.device = torch.device('cpu'),
):
"""
Choose initial centroids for initializing k-means algorithm. The performance of
k-means algorithm can vary significantly by the initial centroids. To alleviate
this problem, k-means++ algorithm chooses initial centroids based on the probability
proportional to the distance from the formally chosen centroids. The centroids
selected by k-means++ algorithm improve the chance of getting more accurate and
stable clustering results. The overall implementation of k-means++ algorithm is
inspired by the numpy based k-means++ implementation in:
https://github.com/scikit-learn/scikit-learn
Originally, the implementation of the k-means++ algorithm in scikit-learn is based
on the following research article:
Arthur, David, and Sergei Vassilvitskii. k-means++: The advantages of careful
seeding. Proceedings of the eighteenth annual ACM-SIAM symposium on Discrete
algorithms, Society for Industrial and Applied Mathematics (2007)
Args:
X: (torch.tensor)
Matrix containing cosine similarity values among embedding vectors (N x N)
n_clusters: (int)
Maximum number of speakers for estimating number of speakers.
Shows stable performance under 20.
random_state: (int)
Seed variable for setting up a random state.
n_local_trials: (int)
Number of trials for creating initial values of the center points.
device: (torch.device)
Torch device variable.
Returns:
centers: (torch.tensor)
The coordinates for center points that are used for initializing k-means algorithm.
indices: (torch.tensor)
The indices of the best candidate center points.
"""
torch.manual_seed(random_state)
X = X.to(device)
n_samples, n_features = X.shape
centers = torch.zeros(n_clusters, n_features, dtype=X.dtype)
center_id = torch.randint(0, n_samples, (1,)).long()
indices = torch.full([n_clusters,], -1, dtype=torch.int)
centers[0] = X[center_id].squeeze(0)
indices[0] = center_id.squeeze(0)
centers = centers.to(device)
closest_dist_diff = centers[0, None].repeat(1, X.shape[0]).view(X.shape[0], -1) - X
closest_dist_sq = closest_dist_diff.pow(2).sum(dim=1).unsqueeze(dim=0)
current_pot = closest_dist_sq.sum()
for c in range(1, n_clusters):
rand_vals = torch.rand(n_local_trials) * current_pot.item()
if len(closest_dist_sq.shape) > 1:
torch_cumsum = torch.cumsum(closest_dist_sq, dim=1)[0]
else:
torch_cumsum = torch.cumsum(closest_dist_sq, dim=0)
candidate_ids = torch.searchsorted(torch_cumsum, rand_vals.to(device))
N_ci = candidate_ids.shape[0]
distance_diff = X[candidate_ids].repeat(1, X.shape[0]).view(X.shape[0] * N_ci, -1) - X.repeat(N_ci, 1)
distance = distance_diff.pow(2).sum(dim=1).view(N_ci, -1)
distance_to_candidates = torch.minimum(closest_dist_sq, distance)
candidates_pot = distance_to_candidates.sum(dim=1)
best_candidate = torch.argmin(candidates_pot)
current_pot = candidates_pot[best_candidate]
closest_dist_sq = distance_to_candidates[best_candidate]
best_candidate = candidate_ids[best_candidate]
centers[c] = X[best_candidate]
indices[c] = best_candidate
return centers, indices
@torch.jit.script
def kmeans_torch(
X: torch.Tensor,
num_clusters: int,
threshold: float = 1e-4,
iter_limit: int = 15,
random_state: int = 0,
device: torch.device = torch.device('cpu'),
):
"""
Run k-means algorithm on the given set of spectral embeddings in X. The threshold
and iter_limit variables are set to show the best performance on speaker diarization
tasks. The overall implementation of k-means algorithm is inspired by the k-means
algorithm implemented in https://github.com/scikit-learn/scikit-learn.
References:
Arthur, David, and Sergei Vassilvitskii. k-means++: The advantages of careful
seeding. Proceedings of the eighteenth annual ACM-SIAM symposium on Discrete
algorithms, Society for Industrial and Applied Mathematics (2007).
Args:
X: (torch.tensor)
Cosine similarity matrix calculated from speaker embeddings
num_clusters: (int)
The estimated number of speakers.
threshold: (float)
This threshold limits the change of center values. If the square of
the center shift values are bigger than this threshold, the iteration stops.
iter_limit: (int)
The maximum number of iterations that is allowed by the k-means algorithm.
device: (torch.device)
Torch device variable
Returns:
selected_cluster_indices: (torch.tensor)
The assigned cluster labels from the k-means clustering.
"""
# Convert tensor type to float
X = X.float().to(device)
input_size = X.shape[0]
# Initialize the cluster centers with kmeans_plusplus algorithm.
plusplus_init_states = kmeans_plusplus_torch(X, n_clusters=num_clusters, random_state=random_state, device=device)
centers = plusplus_init_states[0]
iter_count = 0
selected_cluster_indices = torch.zeros(input_size).int()
for iter_count in range(iter_limit):
euc_dist = getEuclideanDistance(X, centers, device=device)
if len(euc_dist.shape) <= 1:
break
else:
selected_cluster_indices = torch.argmin(euc_dist, dim=1)
center_inits = centers.clone()
for index in range(num_clusters):
selected_cluster = torch.nonzero(selected_cluster_indices == index).squeeze().to(device)
chosen_indices = torch.index_select(X, 0, selected_cluster)
if chosen_indices.shape[0] == 0:
chosen_indices = X[torch.randint(len(X), (1,))]
centers[index] = chosen_indices.mean(dim=0)
# Calculate the delta from center_inits to centers
center_delta_pow = torch.pow((centers - center_inits), 2)
center_shift_pow = torch.pow(torch.sum(torch.sqrt(torch.sum(center_delta_pow, dim=1))), 2)
# If the cluster centers are not changing significantly, stop the loop.
if center_shift_pow < threshold:
break
return selected_cluster_indices
@torch.jit.script
def getTheLargestComponent(affinity_mat: torch.Tensor, seg_index: int, device: torch.device):
"""
Find the largest affinity_mat connected components for each given node.
This is for checking whether the affinity_mat is fully connected.
Args:
affinity_mat: (torch.tensor)
A square matrix (tensor) containing normalized cosine distance values
seg_index: (int)
The segment index that is targeted to be explored.
Returns:
connected_nodes: (torch.tensor)
A tensor containing booleans that indicate whether the node is connected.
"""
num_of_segments = affinity_mat.shape[0]
connected_nodes = torch.zeros(num_of_segments, dtype=torch.bool).to(device)
nodes_to_explore = torch.zeros(num_of_segments, dtype=torch.bool).to(device)
nodes_to_explore[seg_index] = True
for k in range(num_of_segments):
last_num_component = connected_nodes.sum()
torch.logical_or(connected_nodes, nodes_to_explore, out=connected_nodes)
if last_num_component >= connected_nodes.sum():
break
indices = (nodes_to_explore == torch.tensor(True)).nonzero().t().squeeze()
if len(indices.size()) == 0:
indices = indices.unsqueeze(0)
for i in indices:
neighbors = affinity_mat[i]
torch.logical_or(nodes_to_explore, neighbors.squeeze(0), out=nodes_to_explore)
return connected_nodes
@torch.jit.script
def isGraphFullyConnected(affinity_mat: torch.Tensor, device: torch.device):
"""
Check whether the given affinity matrix is a fully connected graph.
"""
return getTheLargestComponent(affinity_mat, 0, device).sum() == affinity_mat.shape[0]
@torch.jit.script
def getKneighborsConnections(affinity_mat: torch.Tensor, p_value: int):
"""
Binarize top-p values for each row from the given affinity matrix.
"""
binarized_affinity_mat = torch.zeros_like(affinity_mat).int()
for i in range(affinity_mat.shape[0]):
line = affinity_mat[i, :]
sorted_idx = torch.argsort(line, descending=True)
indices = sorted_idx[:p_value]
binarized_affinity_mat[indices, i] = torch.ones(indices.shape[0]).to(affinity_mat.device).int()
return binarized_affinity_mat
@torch.jit.script
def getAffinityGraphMat(affinity_mat_raw: torch.Tensor, p_value: int):
"""
Calculate a binarized graph matrix and
symmetrize the binarized graph matrix.
"""
X = getKneighborsConnections(affinity_mat_raw, p_value)
symm_affinity_mat = 0.5 * (X + X.T)
return symm_affinity_mat
@torch.jit.script
def getMinimumConnection(mat: torch.Tensor, max_N: torch.Tensor, n_list: torch.Tensor, device: torch.device):
"""
Generate connections until fully connect all the nodes in the graph.
If the graph is not fully connected, it might generate inaccurate results.
"""
p_value = torch.tensor(1)
affinity_mat = getAffinityGraphMat(mat, p_value)
for i, p_value in enumerate(n_list):
fully_connected = isGraphFullyConnected(affinity_mat, device)
affinity_mat = getAffinityGraphMat(mat, p_value)
if fully_connected or p_value > max_N:
break
return affinity_mat, p_value
@torch.jit.script
def getRepeatedList(mapping_argmat: torch.Tensor, score_mat_size: torch.Tensor):
"""
Count the numbers in the mapping dictionary and create lists that contain
repeated indices that will be used for creating a repeated affinity matrix.
This repeated matrix is then used for fusing multiple affinity values.
"""
repeat_list = torch.zeros(score_mat_size, dtype=torch.int32)
idxs, counts = torch.unique(mapping_argmat, return_counts=True)
repeat_list[idxs] = counts.int()
return repeat_list
def get_argmin_mat(uniq_scale_dict: dict):
"""
Calculate the mapping between the base scale and other scales. A segment from a longer scale is
repeatedly mapped to a segment from a shorter scale or the base scale.
Args:
uniq_scale_dict (dict) :
Dictionary of embeddings and timestamps for each scale.
Returns:
session_scale_mapping_dict (dict) :
Dictionary containing argmin arrays indexed by scale index.
"""
scale_list = sorted(list(uniq_scale_dict.keys()))
segment_anchor_dict = {}
for scale_idx in scale_list:
time_stamp_list = uniq_scale_dict[scale_idx]['time_stamps']
time_stamps_float = torch.tensor([[float(x.split()[0]), float(x.split()[1])] for x in time_stamp_list])
segment_anchor_dict[scale_idx] = torch.mean(time_stamps_float, dim=1)
base_scale_idx = max(scale_list)
base_scale_anchor = segment_anchor_dict[base_scale_idx]
session_scale_mapping_dict = {}
for scale_idx in scale_list:
curr_scale_anchor = segment_anchor_dict[scale_idx]
curr_mat = torch.tile(curr_scale_anchor, (base_scale_anchor.shape[0], 1))
base_mat = torch.tile(base_scale_anchor, (curr_scale_anchor.shape[0], 1)).t()
argmin_mat = torch.argmin(torch.abs(curr_mat - base_mat), dim=1)
session_scale_mapping_dict[scale_idx] = argmin_mat
return session_scale_mapping_dict
def getMultiScaleCosAffinityMatrix(uniq_embs_and_timestamps: dict, device: torch.device = torch.device('cpu')):
"""
Calculate cosine similarity values among speaker embeddings for each scale then
apply multiscale weights to calculate the fused similarity matrix.
Args:
uniq_embs_and_timestamps: (dict)
The dictionary containing embeddings, timestamps and multiscale weights.
If uniq_embs_and_timestamps contains only one scale, single scale diarization
is performed.
Returns:
fused_sim_d (torch.tensor):
This function generates an affinity matrix that is obtained by calculating
the weighted sum of the affinity matrices from the different scales.
base_scale_emb (torch.tensor):
The base scale embedding (the embeddings from the finest scale)
"""
uniq_scale_dict = uniq_embs_and_timestamps['scale_dict']
base_scale_idx = max(uniq_scale_dict.keys())
base_scale_emb = uniq_scale_dict[base_scale_idx]['embeddings']
multiscale_weights = uniq_embs_and_timestamps['multiscale_weights'].float().to(device)
score_mat_list, repeated_tensor_list = [], []
session_scale_mapping_dict = get_argmin_mat(uniq_scale_dict)
for scale_idx in sorted(uniq_scale_dict.keys()):
mapping_argmat = session_scale_mapping_dict[scale_idx]
emb_t = uniq_scale_dict[scale_idx]['embeddings'].half().to(device)
score_mat_torch = getCosAffinityMatrix(emb_t)
repeat_list = getRepeatedList(mapping_argmat, torch.tensor(score_mat_torch.shape[0])).to(device)
repeated_tensor_0 = torch.repeat_interleave(score_mat_torch, repeats=repeat_list, dim=0)
repeated_tensor_1 = torch.repeat_interleave(repeated_tensor_0, repeats=repeat_list, dim=1)
repeated_tensor_list.append(repeated_tensor_1)
repp = torch.stack(repeated_tensor_list).float()
fused_sim_d = torch.matmul(repp.permute(2, 1, 0), multiscale_weights.t()).squeeze(2).t()
return fused_sim_d, base_scale_emb
@torch.jit.script
def getCosAffinityMatrix(_emb: torch.Tensor):
"""
Calculate cosine similarity values among speaker embeddings then min-max normalize
the affinity matrix.
"""
emb = _emb.half()
sim_d = cos_similarity(emb, emb)
sim_d = ScalerMinMax(sim_d)
return sim_d
@torch.jit.script
def getLaplacian(X: torch.Tensor):
"""
Calculate a laplacian matrix from an affinity matrix X.
"""
X.fill_diagonal_(0)
D = torch.sum(torch.abs(X), dim=1)
D = torch.diag_embed(D)
L = D - X
return L
@torch.jit.script
def eigDecompose(laplacian: torch.Tensor, cuda: bool, device: torch.device = torch.device('cpu')):
"""
Calculate eigenvalues and eigenvectors from the Laplacian matrix.
"""
if cuda:
if device is None:
device = torch.cuda.current_device()
laplacian = laplacian.float().to(device)
else:
laplacian = laplacian.float()
lambdas, diffusion_map = eigh(laplacian)
return lambdas, diffusion_map
@torch.jit.script
def getLamdaGaplist(lambdas: torch.Tensor):
"""
Calculate the gaps between lambda values.
"""
if torch.is_complex(lambdas):
lambdas = torch.real(lambdas)
return lambdas[1:] - lambdas[:-1]
@torch.jit.script
def addAnchorEmb(emb: torch.Tensor, anchor_sample_n: int, anchor_spk_n: int, sigma: float):
"""
Add randomly generated synthetic embeddings to make eigen analysis more stable.
We refer to these embeddings as anchor embeddings.
emb (torch.tensor):
The input embedding from the embedding extractor.
anchor_sample_n (int):
Number of embedding samples per speaker.
anchor_sample_n = 10 is recommended.
anchor_spk_n (int):
Number of speakers for synthetic embedding.
anchor_spk_n = 3 is recommended.
sigma (int):
The amplitude of synthetic noise for each embedding vector.
If the sigma value is too small, under-counting could happen.
If the sigma value is too large, over-counting could happen.
sigma = 50 is recommended.
"""
emb_dim = emb.shape[1]
std_org = torch.std(emb, dim=0)
new_emb_list = []
for _ in range(anchor_spk_n):
emb_m = torch.tile(torch.randn(1, emb_dim), (anchor_sample_n, 1))
emb_noise = torch.randn(anchor_sample_n, emb_dim).T
emb_noise = torch.matmul(
torch.diag(std_org), emb_noise / torch.max(torch.abs(emb_noise), dim=0)[0].unsqueeze(0)
).T
emb_gen = emb_m + sigma * emb_noise
new_emb_list.append(emb_gen)
new_emb_list.append(emb)
new_emb_np = torch.vstack(new_emb_list)
return new_emb_np
def getEnhancedSpeakerCount(
emb: torch.Tensor,
cuda: bool,
random_test_count: int = 5,
anchor_spk_n: int = 3,
anchor_sample_n: int = 10,
sigma: float = 50,
):
"""
Calculate the number of speakers using NME analysis with anchor embeddings.
emb (torch.Tensor):
The input embedding from the embedding extractor.
cuda (bool):
Use cuda for the operations if cuda==True.
random_test_count (int):
Number of trials of the enhanced counting with randomness.
The higher the count, the more accurate the enhanced counting is.
anchor_spk_n (int):
Number of speakers for synthetic embedding.
anchor_spk_n = 3 is recommended.
anchor_sample_n (int):
Number of embedding samples per speaker.
anchor_sample_n = 10 is recommended.
sigma (float):
The amplitude of synthetic noise for each embedding vector.
If the sigma value is too small, under-counting could happen.
If the sigma value is too large, over-counting could happen.
sigma = 50 is recommended.
"""
est_num_of_spk_list = []
for seed in range(random_test_count):
torch.manual_seed(seed)
emb_aug = addAnchorEmb(emb, anchor_sample_n, anchor_spk_n, sigma)
mat = getCosAffinityMatrix(emb_aug)
nmesc = NMESC(
mat,
max_num_speaker=emb.shape[0],
max_rp_threshold=0.15,
sparse_search=True,
sparse_search_volume=50,
fixed_thres=-1.0,
NME_mat_size=300,
cuda=cuda,
)
est_num_of_spk, _ = nmesc.NMEanalysis()
est_num_of_spk_list.append(est_num_of_spk)
ctt = Counter(est_num_of_spk_list)
comp_est_num_of_spk = max(ctt.most_common(1)[0][0] - anchor_spk_n, 1)
return comp_est_num_of_spk
@torch.jit.script
def estimateNumofSpeakers(affinity_mat: torch.Tensor, max_num_speaker: int, cuda: bool = False):
"""
Estimate the number of speakers using eigendecomposition on the Laplacian Matrix.
Args:
affinity_mat: (torch.tensor)
N by N affinity matrix
max_num_speaker: (int)
Maximum number of clusters to consider for each session
cuda: (bool)
If cuda available eigendecomposition is computed on GPUs.
Returns:
num_of_spk: (torch.tensor)
The estimated number of speakers
lambdas: (torch.tensor)
The lambda values from eigendecomposition
lambda_gap: (torch.tensor)
The gap between the lambda values from eigendecomposition
"""
laplacian = getLaplacian(affinity_mat)
lambdas, _ = eigDecompose(laplacian, cuda)
lambdas = torch.sort(lambdas)[0]
lambda_gap = getLamdaGaplist(lambdas)
num_of_spk = torch.argmax(lambda_gap[: min(max_num_speaker, lambda_gap.shape[0])]) + 1
return num_of_spk, lambdas, lambda_gap
@torch.jit.script
class SpectralClustering:
"""
Perform spectral clustering by calculating spectral embeddings then run k-means clustering
algorithm on the spectral embeddings.
"""
def __init__(
self,
n_clusters: int = 8,
random_state: int = 0,
n_random_trials: int = 1,
cuda: bool = False,
device: torch.device = torch.device('cpu'),
):
"""
Initialize the variables needed for spectral clustering and k-means++.
Args:
n_clusters (int):
Number of the estimated (or oracle) number of speakers
random_state (int):
Random seed that determines a random state of k-means initialization.
n_random_trials (int):
Number of trials with different random seeds for k-means initialization.
k-means++ algorithm is executed for multiple times then the final result
is obtained by taking a majority vote.
cuda (bool):
if cuda=True, spectral clustering is done on GPU.
device (torch.device):
Torch device variable
"""
self.n_clusters = n_clusters
self.random_state = random_state
self.n_random_trials = max(n_random_trials, 1)
self.cuda = cuda
self.device = device
def predict(self, X):
"""
Call self.clusterSpectralEmbeddings() function to predict cluster labels.
Args:
X (torch.tensor):
Affinity matrix input
Returns:
labels (torch.tensor):
clustering label output
"""
if X.shape[0] != X.shape[1]:
raise ValueError("The affinity matrix is not a square matrix.")
labels = self.clusterSpectralEmbeddings(X, cuda=self.cuda, device=self.device)
return labels
def clusterSpectralEmbeddings(self, affinity, cuda: bool = False, device: torch.device = torch.device('cpu')):
"""
Perform k-means clustering on spectral embeddings. To alleviate the effect of randomness,
k-means clustering is performed for (self.n_random_trials) times then the final labels are obtained
by taking a majority vote. If speed is the major concern, self.n_random_trials should be set to 1.
n_random_trials=30 is recommended to see an improved result.
Args:
affinity (torch.tensor):
Affinity matrix input
cuda (torch.bool):
Use cuda for spectral clustering if cuda=True
device (torch.device):
Torch device variable
Returns:
labels (torch.tensor):
clustering label output
"""
spectral_emb = self.getSpectralEmbeddings(affinity, n_spks=self.n_clusters, cuda=cuda)
labels_set = []
for random_state_seed in range(self.random_state, self.random_state + self.n_random_trials):
_labels = kmeans_torch(
X=spectral_emb, num_clusters=self.n_clusters, random_state=random_state_seed, device=device
)
labels_set.append(_labels)
stacked_labels = torch.stack(labels_set)
label_index = torch.mode(torch.mode(stacked_labels, 0)[1])[0]
labels = stacked_labels[label_index]
return labels
def getSpectralEmbeddings(self, affinity_mat: torch.Tensor, n_spks: int = 8, cuda: bool = False):
"""
Calculate eigenvalues and eigenvectors to extract spectral embeddings.
Args:
affinity (torch.tensor):
Affinity matrix input
cuda (torch.bool):
Use cuda for spectral clustering if cuda=True
device (torch.device):
Torch device variable
Returns:
labels (torch.Tensor):
clustering label output
"""
laplacian = getLaplacian(affinity_mat)
lambdas_, diffusion_map_ = eigDecompose(laplacian, cuda)
diffusion_map = diffusion_map_[:, :n_spks]
inv_idx = torch.arange(diffusion_map.size(1) - 1, -1, -1).long()
embedding = diffusion_map.T[inv_idx, :]
return embedding[:n_spks].T
@torch.jit.script
class NMESC:
"""
Normalized Maximum Eigengap based Spectral Clustering (NME-SC)
uses Eigengap analysis to get an estimated p-value for
affinity binarization and an estimated number of speakers.
p_value (also referred to as p_neighbors) is for taking
top p number of affinity values and convert those to 1 while
convert the rest of values to 0.
p_value can be also tuned on a development set without performing
NME-analysis. Fixing p_value brings about significantly faster clustering
speed, but the performance is limited to the development set.
References:
Tae Jin Park et al., Auto-Tuning Spectral Clustering for Speaker Diarization
Using Normalized Maximum Eigengap, IEEE Signal Processing Letters 27 (2019),
https://arxiv.org/abs/2003.02405
Args:
Please refer to def __init__().
Methods:
NMEanalysis():
Performs NME-analysis to estimate p_value and the number of speakers
subsampleAffinityMat(NME_mat_size):
Subsamples the number of speakers to reduce the computational load
getPvalueList():
Generates a list containing p-values that need to be examined.
getEigRatio(p_neighbors):
Calculates g_p, which is a ratio between p_neighbors and the maximum eigengap
getLamdaGaplist(lambdas):
Calculates lambda gap values from an array contains lambda values
estimateNumofSpeakers(affinity_mat):
Estimates the number of speakers using lambda gap list
"""
def __init__(
self,
mat,
max_num_speaker: int = 10,
max_rp_threshold: float = 0.15,
sparse_search: bool = True,
sparse_search_volume: int = 30,
use_subsampling_for_NME: bool = True,
fixed_thres: float = 0.0,
cuda: bool = False,
NME_mat_size: int = 512,
device: torch.device = torch.device('cpu'),
):
"""
Args:
mat: (torch.tensor)
Cosine similarity matrix calculated from the provided speaker embeddings.
max_num_speaker: (int)
Maximum number of speakers for estimating number of speakers.
Shows stable performance under 20.
max_rp_threshold: (float)
Limits the range of parameter search.
Clustering performance can vary depending on this range.
Default is 0.25.
sparse_search: (bool)
To increase the speed of parameter estimation, sparse_search=True
limits the number of p_values we search.
sparse_search_volume: (int)
Number of p_values we search during NME analysis.
Default is 30. The lower the value, the faster NME-analysis becomes.
However, a value lower than 20 might cause a poor parameter estimation.
use_subsampling_for_NME: (bool)
Use subsampling to reduce the calculational complexity.
Default is True.
fixed_thres: (float or None)
A fixed threshold which can be used instead of estimating the
threshold with NME analysis. If fixed_thres is float,
it skips the NME analysis part.
cuda (bool)
Use cuda for Eigen decomposition if cuda=True.
NME_mat_size: (int)
Targeted size of matrix for NME analysis.
"""
self.max_num_speaker: int = max_num_speaker
self.max_rp_threshold = max_rp_threshold
self.use_subsampling_for_NME = use_subsampling_for_NME
self.NME_mat_size: int = NME_mat_size
self.sparse_search = sparse_search
self.sparse_search_volume = sparse_search_volume
self.fixed_thres: float = fixed_thres
self.cuda: bool = cuda
self.eps = 1e-10
self.max_N = torch.tensor(0)
self.mat = mat
self.p_value_list: torch.Tensor = torch.tensor(0)
self.device = device
def NMEanalysis(self):
"""
Subsample the input matrix to reduce the computational load.
"""
if self.use_subsampling_for_NME:
subsample_ratio = self.subsampleAffinityMat(self.NME_mat_size)
else:
subsample_ratio = torch.tensor(1)
# Scans p_values and find a p_value that generates
# the smallest g_p value.
eig_ratio_list = []
est_spk_n_dict: Dict[int, torch.Tensor] = {}
self.p_value_list = self.getPvalueList()
for p_value in self.p_value_list:
est_num_of_spk, g_p = self.getEigRatio(p_value)
est_spk_n_dict[p_value.item()] = est_num_of_spk
eig_ratio_list.append(g_p)
index_nn = torch.argmin(torch.tensor(eig_ratio_list))
rp_p_value = self.p_value_list[index_nn]
affinity_mat = getAffinityGraphMat(self.mat, rp_p_value)
# Checks whether the affinity graph is fully connected.
# If not, it adds a minimum number of connections to make it fully connected.
if not isGraphFullyConnected(affinity_mat, device=self.device):
affinity_mat, rp_p_value = getMinimumConnection(
self.mat, self.max_N, self.p_value_list, device=self.device
)
p_hat_value = (subsample_ratio * rp_p_value).type(torch.int)
est_num_of_spk = est_spk_n_dict[rp_p_value.item()]
return est_num_of_spk, p_hat_value
def subsampleAffinityMat(self, NME_mat_size: int):
"""
Perform subsampling of affinity matrix.
This subsampling is for calculational complexity, not for performance.
The smaller NME_mat_size is,
- the bigger the chance of missing a speaker.
- the faster p-value estimation speed (based on eigen decomposition).
The recommended NME_mat_size is 250~750.
However, if there are speakers who speak for very short period of time in the recording,
this subsampling might make the system miss underrepresented speakers.
Use this variable with caution.
Args:
NME_mat_size: (int)
The targeted matrix size
Returns:
subsample_ratio : (float)
The ratio between NME_mat_size and the original matrix size
"""
subsample_ratio = torch.max(torch.tensor(1), torch.tensor(self.mat.shape[0] / NME_mat_size)).type(torch.int)
self.mat = self.mat[:: subsample_ratio.item(), :: subsample_ratio.item()]
return subsample_ratio
def getEigRatio(self, p_neighbors: int):
"""
For a given p_neighbors value, calculate g_p, which is a ratio between p_neighbors and the
maximum eigengap values.
References:
Tae Jin Park et al., Auto-Tuning Spectral Clustering for Speaker Diarization Using
Normalized Maximum Eigengap, IEEE Signal Processing Letters 27 (2019),
https://arxiv.org/abs/2003.02405
Args:
p_neighbors: (int)
Determines how many binary graph connections we want to keep for each row.
Returns:
est_num_of_spk: (int)
Estimated number of speakers
g_p: (float)
The ratio between p_neighbors value and the maximum eigen gap value.
"""
affinity_mat = getAffinityGraphMat(self.mat, p_neighbors)
est_num_of_spk, lambdas, lambda_gap_list = estimateNumofSpeakers(affinity_mat, self.max_num_speaker, self.cuda)
arg_sorted_idx = torch.argsort(lambda_gap_list[: self.max_num_speaker], descending=True)
max_key = arg_sorted_idx[0]
max_eig_gap = lambda_gap_list[max_key] / (max(lambdas) + self.eps)
g_p = (p_neighbors / self.mat.shape[0]) / (max_eig_gap + self.eps)
return est_num_of_spk, g_p
def getPvalueList(self):
"""
Generates a p-value (p_neighbour) list for searching.
"""
if self.fixed_thres > 0.0:
p_value_list = torch.floor(torch.tensor(self.mat.shape[0] * self.fixed_thres)).type(torch.int)
self.max_N = p_value_list[0]
else:
self.max_N = torch.floor(torch.tensor(self.mat.shape[0] * self.max_rp_threshold)).type(torch.int)
if self.sparse_search:
N = torch.min(self.max_N, torch.tensor(self.sparse_search_volume).type(torch.int))
p_value_list = torch.unique(torch.linspace(start=1, end=self.max_N, steps=N).type(torch.int))
else:
p_value_list = torch.arange(1, self.max_N)
return p_value_list
def COSclustering(
uniq_embs_and_timestamps,
oracle_num_speakers=None,
max_num_speaker: int = 8,
min_samples_for_NMESC: int = 6,
enhanced_count_thres: int = 80,
max_rp_threshold: float = 0.15,
sparse_search_volume: int = 30,
fixed_thres: float = 0.0,
cuda=False,
):
"""
Clustering method for speaker diarization based on cosine similarity.
NME-SC part is converted to torch.tensor based operations in NeMo 1.9.
Args:
uniq_embs_and_timestamps: (dict)
The dictionary containing embeddings, timestamps and multiscale weights.
If uniq_embs_and_timestamps contains only one scale, single scale diarization
is performed.
oracle_num_speaker: (int or None)
The oracle number of speakers if known else None
max_num_speaker: (int)
The maximum number of clusters to consider for each session
min_samples_for_NMESC: (int)
The minimum number of samples required for NME clustering. This avoids
zero p_neighbour_lists. If the input has fewer segments than min_samples,
it is directed to the enhanced speaker counting mode.
enhanced_count_thres: (int)
For the short audio recordings under 60 seconds, clustering algorithm cannot
accumulate enough amount of speaker profile for each cluster.
Thus, getEnhancedSpeakerCount() employs anchor embeddings (dummy representations)
to mitigate the effect of cluster sparsity.
enhanced_count_thres = 80 is recommended.
max_rp_threshold: (float)
Limits the range of parameter search.
Clustering performance can vary depending on this range.
Default is 0.15.
sparse_search_volume: (int)
Number of p_values we search during NME analysis.
Default is 30. The lower the value, the faster NME-analysis becomes.
Lower than 20 might cause a poor parameter estimation.
fixed_thres: (float)
If fixed_thres value is provided, NME-analysis process will be skipped.
This value should be optimized on a development set to obtain a quality result.
Default is None and performs NME-analysis to estimate the threshold.
Returns:
Y: (torch.tensor[int])
Speaker label for each segment.
"""
device = torch.device("cuda") if cuda else torch.device("cpu")
# Get base-scale (the highest index) information from uniq_embs_and_timestamps.
uniq_scale_dict = uniq_embs_and_timestamps['scale_dict']
emb = uniq_scale_dict[max(uniq_scale_dict.keys())]['embeddings']
if emb.shape[0] == 1:
return torch.zeros((1,), dtype=torch.int32)
elif emb.shape[0] <= max(enhanced_count_thres, min_samples_for_NMESC) and oracle_num_speakers is None:
est_num_of_spk_enhanced = getEnhancedSpeakerCount(emb, cuda)
else:
est_num_of_spk_enhanced = None
if oracle_num_speakers:
max_num_speaker = oracle_num_speakers
mat, emb = getMultiScaleCosAffinityMatrix(uniq_embs_and_timestamps, device)
nmesc = NMESC(
mat,
max_num_speaker=max_num_speaker,
max_rp_threshold=max_rp_threshold,
sparse_search=True,
sparse_search_volume=sparse_search_volume,
fixed_thres=fixed_thres,
NME_mat_size=300,
cuda=cuda,
device=device,
)
if emb.shape[0] > min_samples_for_NMESC:
est_num_of_spk, p_hat_value = nmesc.NMEanalysis()
affinity_mat = getAffinityGraphMat(mat, p_hat_value)
else:
affinity_mat = mat
if oracle_num_speakers:
est_num_of_spk = oracle_num_speakers
elif est_num_of_spk_enhanced:
est_num_of_spk = est_num_of_spk_enhanced
spectral_model = SpectralClustering(n_clusters=est_num_of_spk, cuda=cuda, device=device)
Y = spectral_model.predict(affinity_mat)
return Y.cpu().numpy()
| 37.915014
| 119
| 0.671424
|
bb6de1d9207427ad650fb6bbe583587a1ecfb155
| 2,872
|
cs
|
C#
|
gsa-simulator/Search.aspx.cs
|
joesoc/google-saml-bridge-for-windows
|
21d97ffba7d01b44d462cc68fdcc276ff7d01c29
|
[
"Apache-2.0"
] | null | null | null |
gsa-simulator/Search.aspx.cs
|
joesoc/google-saml-bridge-for-windows
|
21d97ffba7d01b44d462cc68fdcc276ff7d01c29
|
[
"Apache-2.0"
] | null | null | null |
gsa-simulator/Search.aspx.cs
|
joesoc/google-saml-bridge-for-windows
|
21d97ffba7d01b44d462cc68fdcc276ff7d01c29
|
[
"Apache-2.0"
] | null | null | null |
/*
* Copyright (C) 2006 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
using System;
using System.Collections;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Web;
using System.Web.SessionState;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Web.UI.HtmlControls;
using System.Security;
/// <summary>
/// Summary description for Search.
/// </summary>
public partial class Search : System.Web.UI.Page
{
protected void Page_Load(object sender, System.EventArgs e)
{
Common.log("enter page_load");
// Put user code to initialize the page here
if (Request.Params["q"] == null || "".Equals(Request.Params["q"]))
{
return;
}
String query = "SamlRequest=" + BuildSamlRequest();
query += "&RelayState=" + HttpUtility.UrlEncode(Request.Url.AbsoluteUri);
Common.log(query);
Session.Add("URL", Request.Params["Resource"]);
Common.log("URL to test: " + Request.Params["Resource"]);
//Response.Write(query);
Response.Redirect(Common.GetLoginPage(Request) + "?" + query);
}
String BuildSamlRequest()
{
Common.log("inside BuildSamlRequest");
// Put user code to initialize the page here
String req = Common.SamlRequestTemplate;
req = req.Replace("%ID", Common.GenerateRandomString());
req = req.Replace("%INSTANT", Common.FormatNow());
req = req.Replace("%ISSUER", Server.MachineName);
Common.log("request before encoding=" + req);
//byte[] decData = new System.Text.UTF8Encoding().GetBytes(req);
Common.log("before deflate length=" + req.Length);
String encoded = Common.Compress(req);
Common.log("base64 encoded string: " + encoded);
return HttpUtility.UrlEncode(encoded);
}
#region Web Form Designer generated code
override protected void OnInit(EventArgs e)
{
//
// CODEGEN: This call is required by the ASP.NET Web Form Designer.
//
InitializeComponent();
base.OnInit(e);
}
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.Load += new System.EventHandler(this.Page_Load);
}
#endregion
}
| 31.56044
| 81
| 0.667131
|
a3e1f264a377f19c9ad1b3ed8c17e1c2dc768f32
| 451
|
java
|
Java
|
src/gta_text/locations/ChineseTakeaway.java
|
SteveSmith16384/gta-mud
|
f8efd298a98b002236e642a9a73244828a11ebf7
|
[
"MIT"
] | 3
|
2020-04-03T14:49:39.000Z
|
2021-08-28T04:05:55.000Z
|
src/gta_text/locations/ChineseTakeaway.java
|
SteveSmith16384/gta-mud
|
f8efd298a98b002236e642a9a73244828a11ebf7
|
[
"MIT"
] | null | null | null |
src/gta_text/locations/ChineseTakeaway.java
|
SteveSmith16384/gta-mud
|
f8efd298a98b002236e642a9a73244828a11ebf7
|
[
"MIT"
] | null | null | null |
package gta_text.locations;
import java.io.IOException;
import gta_text.npcs.TakeawayAssistant;
public class ChineseTakeaway extends Location {
public ChineseTakeaway(String no) throws IOException {
super(no);
new TakeawayAssistant(this);
}
public void regenChars() throws IOException {
if (this.containsCharacter(TakeawayAssistant.NAME) == false) {
new TakeawayAssistant(this);
}
}
}
| 21.47619
| 70
| 0.687361
|
23e688dc4b36d5306f8dc867c807ba58b1f37b1f
| 414
|
js
|
JavaScript
|
docs/search/variables_3.js
|
MacanPN/single-cell-tools
|
1c6cff36ccaa0263067cb7d0efca96f3cd01e95f
|
[
"MIT"
] | null | null | null |
docs/search/variables_3.js
|
MacanPN/single-cell-tools
|
1c6cff36ccaa0263067cb7d0efca96f3cd01e95f
|
[
"MIT"
] | 2
|
2018-02-05T17:06:48.000Z
|
2018-02-07T00:49:17.000Z
|
docs/search/variables_3.js
|
MacanPN/single-cell-tools
|
1c6cff36ccaa0263067cb7d0efca96f3cd01e95f
|
[
"MIT"
] | 2
|
2018-02-05T17:24:32.000Z
|
2018-02-05T17:26:33.000Z
|
var searchData=
[
['result_5ffilename',['result_filename',['../classsc-analyses_1_1settings.html#a8ad29f9ce6fda35a27d688db63be1de9',1,'sc-analyses::settings']]],
['run_5fmode',['run_mode',['../classsc-analyses_1_1settings.html#abd0aa46bbc29419b20ee3cc53cb80575',1,'sc-analyses::settings']]],
['run_5fmodes',['run_modes',['../namespacesc-analyses.html#a5c4820a68409c2ca9d5965d48c1643c8',1,'sc-analyses']]]
];
| 59.142857
| 145
| 0.763285
|
1b44a2d581c673b65d642aeb7251e9e6b819c0cb
| 783
|
dart
|
Dart
|
ui/kefu_workbench/lib/widgets/sliver_refresh_control.dart
|
weoil/kefu_server
|
2a6d3a881265620d8be90a0e48a4f9dd9931ee0d
|
[
"Apache-2.0"
] | 165
|
2019-12-23T07:02:48.000Z
|
2022-03-28T06:53:50.000Z
|
ui/kefu_workbench/lib/widgets/sliver_refresh_control.dart
|
weoil/kefu_server
|
2a6d3a881265620d8be90a0e48a4f9dd9931ee0d
|
[
"Apache-2.0"
] | 10
|
2020-03-07T12:42:22.000Z
|
2022-03-25T19:24:30.000Z
|
ui/kefu_workbench/lib/widgets/sliver_refresh_control.dart
|
weoil/kefu_server
|
2a6d3a881265620d8be90a0e48a4f9dd9931ee0d
|
[
"Apache-2.0"
] | 60
|
2020-01-03T05:02:40.000Z
|
2022-01-27T06:25:12.000Z
|
import '../core_flutter.dart';
CupertinoSliverRefreshControl sliverRefreshControl({
VoidCallback onRefresh,
Color bgColor = const Color(0xffffffff),
bool isNoRefresh = false,
Color iconColor = Colors.grey,
Color textColor = const Color(0xff666666),
Widget noRefreshWidget
}){
return CupertinoSliverRefreshControl(
builder: (context, refreshState, pulledExtent, refreshTriggerPullDistance, refreshIndicatorExtent){
return RefreshWidget(
refreshState,
pulledExtent,
refreshTriggerPullDistance,
refreshIndicatorExtent,
bgColor: bgColor,
isNoRefresh: isNoRefresh,
iconColor: iconColor,
textColor: textColor,
noRefreshWidget: noRefreshWidget
);
},
onRefresh: onRefresh
);
}
| 27
| 103
| 0.708812
|
37513ec12039e2b5f843bfb2a5d15677eaf4d6c3
| 3,657
|
swift
|
Swift
|
Chapter07/0711/SpaceInvaders/SpaceInvaders/Enemy.swift
|
CoderDream/SwiftGameInAction
|
f53523efe85c15ecb39b70472b2f2b702570cf0b
|
[
"MIT"
] | null | null | null |
Chapter07/0711/SpaceInvaders/SpaceInvaders/Enemy.swift
|
CoderDream/SwiftGameInAction
|
f53523efe85c15ecb39b70472b2f2b702570cf0b
|
[
"MIT"
] | null | null | null |
Chapter07/0711/SpaceInvaders/SpaceInvaders/Enemy.swift
|
CoderDream/SwiftGameInAction
|
f53523efe85c15ecb39b70472b2f2b702570cf0b
|
[
"MIT"
] | 1
|
2021-03-21T05:10:57.000Z
|
2021-03-21T05:10:57.000Z
|
//
// Enemy.swift
// SpaceInvaders
//
// Created by CoderDream on 2019/3/25.
// Copyright © 2019 CoderDream. All rights reserved.
//
import Foundation
import UIKit
class Enemy: NSObject {
// 敌人的大小
var eSize: Int = 32
var gameView: UIView = UIView()
var enemyList = Array<UIImageView>() // NSMutableArray = NSMutableArray()
var enemyRows: Int = 5
var enemyColumns: Int = 5
// 移动敌人
var enemyTimer: Timer = Timer()
// 敌人子弹
var enemyBulletTimer: Timer = Timer()
var ememiesBullet: EnemyBullet? = EnemyBullet()
var minXPos: NSInteger = 0
var maxXPos: NSInteger = 0
var goingLeft :Bool = false
func initEnemies(gameView: UIView) {
// 添加敌人
self.gameView = gameView
let enemyImage = UIImage(named: "enemy01.png")
// 敌人位置
self.minXPos = 10
self.maxXPos = 365
var rowCount: Int = 0
let startX: Int = 10
let startY: Int = 0
let enemyCount = enemyRows * enemyColumns
print("enemyCount \(enemyCount)")
for i in 0 ..< enemyCount {
let columnMod: Int = i % enemyColumns
if columnMod == 0 {
rowCount += 1
}
// 设置位置
let xPos: Int = startX + ((eSize * columnMod) + (columnMod * 5))
let yPos: Int = startY + ((eSize * rowCount) + (rowCount * 10))
// 创建一个 enemyView 的图像对象,用来显示敌人
let enemyView = UIImageView(image: enemyImage)
enemyView.frame = CGRect(x: xPos, y: yPos, width: eSize, height: eSize)
enemyView.tag = i
self.enemyList.insert(enemyView, at: i)//(enemyView)
self.gameView.addSubview(enemyView)
}
}
func cleanEnemies() {
//self.enemyList.removeAllObjects()
}
func startTimers() {// 0.03
self.enemyTimer = Timer.scheduledTimer(timeInterval: 0.03, target: self, selector: #selector(moveEnemies), userInfo: nil, repeats: true)
self.enemyBulletTimer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(dropBomb), userInfo: nil, repeats: true)
}
@objc func moveEnemies(){
var enemyView: UIImageView = self.enemyList[0] //as! UIImageView
// 判断敌人的 x 的位置是否小于最小的 x 的位置,即 10
if Int(enemyView.frame.origin.x) <= self.minXPos {
goingLeft = false
}
enemyView = self.enemyList[enemyColumns - 1] //as! UIImageView
// 判断敌人的 x 的位置是否大于于最大的 x 的位置,即 365(1920*1080)
//print("maxXPos: \(self.maxXPos)")
if Int(enemyView.frame.origin.x) >= self.maxXPos {
goingLeft = true
}
for i in 0 ..< self.enemyList.count {
enemyView = self.enemyList[i] //as! UIImageView;
var xPos: Int = 0
if goingLeft {
xPos = Int(enemyView.frame.origin.x) - 3;
}
else {
xPos = Int(enemyView.frame.origin.x) + 3;
}
enemyView.frame = CGRect(x: xPos, y: Int(enemyView.frame.origin.y), width: eSize, height: eSize)
self.gameView.addSubview(enemyView)
}
}
@objc func dropBomb(){
//let newBullet: EnemyBullet = EnemyBullet()
//newBullet.fireBullet(gameView: self.gameView, enemyList: self.enemyList)
// if self.ememiesBullet == nil { // || self.ememiesBullet?.isActive == false {
ememiesBullet!.fireBullet(gameView: self.gameView, enemyList: (self.enemyList as NSArray) as! Array<UIImageView>)
// }
}
}
| 32.945946
| 144
| 0.566038
|
c9a2f2a2d523e11d40f159b9f26230c4b39fb57c
| 1,705
|
tsx
|
TypeScript
|
src/App.tsx
|
no3forfuk/Frame_rn
|
697d0acbda0288aae697b7dcf0398ce1112f54de
|
[
"MIT"
] | 1
|
2020-07-23T09:24:37.000Z
|
2020-07-23T09:24:37.000Z
|
src/App.tsx
|
no3forfuk/Frame_rn
|
697d0acbda0288aae697b7dcf0398ce1112f54de
|
[
"MIT"
] | null | null | null |
src/App.tsx
|
no3forfuk/Frame_rn
|
697d0acbda0288aae697b7dcf0398ce1112f54de
|
[
"MIT"
] | 1
|
2020-08-28T06:32:18.000Z
|
2020-08-28T06:32:18.000Z
|
import React, {Component, Fragment} from "react"
import "./App.less"
import * as redux from "react-redux"
import SOCKET from "./socket"
import {LOGIN_SWITCH, SOCKET_SWITCH} from "./switch"
import {storage} from "./utils"
import {socketConf} from "./env"
import {renderRoutes} from "react-router-config"
const {connect} = redux
interface Props {
history: any
route: any
}
interface State {}
class App extends Component<Props, State> {
private socket: any
constructor(props: Props) {
super(props)
this.state = {}
}
componentDidMount(): void {
this.isLogin()
}
onSocketMessage = (message: {}) => {
console.log(message)
}
isLogin = () => {
const userId = storage.getSession("userId")
const {history} = this.props
if (userId) {
//开启socket
// this.startSocket();
// this.props.history.push('/home')
} else {
if (LOGIN_SWITCH) {
history.replace("/login")
}
}
}
startSocket = () => {
if (!SOCKET_SWITCH) return
this.socket = new SOCKET({
url: socketConf.url,
onMessage: this.onSocketMessage
})
this.socket.start()
}
public render() {
const {routes} = this.props.route
return (
<Fragment>
<div className="qh-layout">{renderRoutes(routes)}</div>
</Fragment>
)
}
}
const mapStateToProps = (store: {}) => {
return store
}
const mapDispatchToProps = (dispatch: void) => {
return {
dispatch
}
}
export default connect(mapStateToProps, mapDispatchToProps)(App)
| 22.142857
| 71
| 0.558944
|
fa2773883101d19fb4a9236d013891fd8d8796f5
| 2,172
|
cpp
|
C++
|
src/e101_200/q1414.cpp
|
extremedeckguru/leetcode
|
e45923ccbca7ae1c5f85d8c996392e8b492c1306
|
[
"MIT"
] | 9
|
2020-04-09T12:37:50.000Z
|
2021-04-01T14:01:14.000Z
|
src/e101_200/q1414.cpp
|
extremedeckguru/leetcode
|
e45923ccbca7ae1c5f85d8c996392e8b492c1306
|
[
"MIT"
] | 3
|
2020-05-05T02:43:54.000Z
|
2020-05-20T11:12:16.000Z
|
src/e101_200/q1414.cpp
|
extremedeckguru/leetcode
|
e45923ccbca7ae1c5f85d8c996392e8b492c1306
|
[
"MIT"
] | 5
|
2020-04-17T02:32:10.000Z
|
2020-05-20T10:12:26.000Z
|
/*
#面试刷题# 第0101期
#Leetcode# Q1414 找出和为K的斐波那契数的最小个数
难度:中
给定一个数k,返回其总和等于k的斐波那契数的最小值,一个斐波那契数是否可以多次使用。
斐波那契数的定义为。
F1 = 1
F2 = 1
Fn = Fn-1 + Fn-2 ,对于 n > 2。
可以保证,对于给定的约束,我们总是可以找到这样的斐波那契数和k。
约束条件:
1 <= k <= 10^9
示例1:
Input: k = 7
Output: 2
Explanation: The Fibonacci numbers are: 1, 1, 2, 3, 5, 8, 13, ...
For k = 7 we can use 2 + 5 = 7.
示例2:
Input: k = 10
Output: 2
Explanation: For k = 10 we can use 2 + 8 = 10.
示例3:
Input: k = 19
Output: 3
Explanation: For k = 19 we can use 1 + 5 + 13 = 19.
*/
#include "leetcode.h"
namespace q1414 {
template<typename T>
bool run_testcases() {
T slt;
CHECK_RET(2 == slt.findMinFibonacciNumbers(7));
CHECK_RET(2 == slt.findMinFibonacciNumbers(10));
CHECK_RET(3 == slt.findMinFibonacciNumbers(19));
return true;
}
// Runtime: 4 ms, faster than 79.64%
// Memory Usage: 6.5 MB, less than 100.00%
class Solution {
public:
int findMinFibonacciNumbers(int k) {
// compute fibonacci
vector<int> fib{1,1};
for (int i=0; fib[i] + fib[i+1] <= k; ++i) {
fib.push_back(fib[i] + fib[i+1]);
}
// search answer starting from the bigger
int ret = 0;
for (auto iter = fib.rbegin(); iter != fib.rend(); ++iter) {
if (*iter > k) {continue;}
k -= *iter;
++ret;
if(0 == k) {break;}
}
return ret;
}
};
TEST(Q1414, Solution) {EXPECT_TRUE(run_testcases<Solution>());}
// @Tiabeanie2
class Solution02 {
public:
int findMinFibonacciNumbers(int k) {
vector<int> fibos = {1, 1};
while (fibos.back() < 1e9)
{
fibos.push_back(fibos.back() + fibos[fibos.size() - 2]);
}
int ans = 0;
while (k > 1)
{
auto it = std::lower_bound(fibos.begin(), fibos.end(), k);
if (*it == k)
{
ans ++;
break;
}
--it;
ans ++;
k -= *it;
}
if (k == 1) ans ++;
return ans;
}
};
TEST(Q1414, Solution02) {EXPECT_TRUE(run_testcases<Solution02>());}
}; // nam;espace q1414
| 21.939394
| 70
| 0.521179
|
20e41c8ee94ec58159077b9913a4f4539e8bbfb4
| 10,245
|
py
|
Python
|
cogs/tweet.py
|
being24/sagumo
|
be4b4da25a7e19ec1dc86e543bc72333248bb18e
|
[
"MIT"
] | 4
|
2020-04-04T14:56:55.000Z
|
2021-01-02T08:21:08.000Z
|
cogs/tweet.py
|
being24/sagumo
|
be4b4da25a7e19ec1dc86e543bc72333248bb18e
|
[
"MIT"
] | 119
|
2020-01-31T09:57:45.000Z
|
2022-03-25T07:07:01.000Z
|
cogs/tweet.py
|
being24/sagumo
|
be4b4da25a7e19ec1dc86e543bc72333248bb18e
|
[
"MIT"
] | 4
|
2020-04-06T09:05:14.000Z
|
2021-06-04T07:32:51.000Z
|
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
import asyncio
import logging
import os
from datetime import datetime
from functools import partial, wraps
import discord
from discord.ext import commands, tasks
from discord.ext.menus import ListPageSource, MenuPages
from dotenv import load_dotenv
from requests_oauthlib import OAuth1Session
from .utils.common import CommonUtil
from .utils.confirm import Confirm
from .utils.tweet_manager import TweetManager, TweetParameter
def async_wrap(func):
@wraps(func)
async def run(*args, loop=None, executor=None, **kwargs):
if loop is None:
loop = asyncio.get_event_loop()
pfunc = partial(func, *args, **kwargs)
return await loop.run_in_executor(executor, pfunc)
return run
def is_in_guild():
def predicate(ctx):
return ctx.guild.id in [410454762522411009, 609058923353341973]
return commands.check(predicate)
class TweetList(ListPageSource):
def __init__(self, ctx, data):
self.ctx = ctx
super().__init__(data, per_page=10)
async def write_page(self, menu, fields=[]):
offset = (menu.current_page * self.per_page) + 1
len_data = len(self.entries)
embed = discord.Embed(
title="承認待ちのツイートは以下の通りです",
description=f"{len_data}件待機中",
color=0x0088ff)
embed.set_thumbnail(url=self.ctx.guild.me.avatar.replace(format="png").url)
embed.set_footer(
text=f"{offset:,} - {min(len_data, offset+self.per_page-1):,} of {len_data:,} records.")
for num, tweet in enumerate(fields):
time = tweet.created_at.strftime('%Y-%m-%d %H:%M:%S')
tweet_author = self.ctx.guild.get_member(tweet.author_id)
val = f"**ID** : {tweet.message_id} by : {tweet_author.mention} content : {tweet.content} time : {time})"
embed.add_field(
name=f"{num+offset}番目",
value=f"{val}",
inline=False)
return embed
async def format_page(self, menu, entries):
'''
fields = []
for entry in entries:
fields.append((entry.brief, syntax(entry)))
'''
return await self.write_page(menu, entries)
class DiscordTweet(commands.Cog):
def __init__(self, bot) -> None:
self.bot = bot
dotenv_path = os.path.join(os.path.dirname(__file__), '.env')
load_dotenv(dotenv_path)
CK = os.getenv('CONSUMER_KEY')
CS = os.getenv('CONSUMER_SECRET')
AT = os.getenv('ACCESS_TOKEN')
ATS = os.getenv('ACCESS_TOKEN_SECRET')
if any([token is None for token in [CK, CS, AT, ATS]]):
raise FileNotFoundError("API key not found error!")
self.twitter = OAuth1Session(CK, CS, AT, ATS)
self.url = "https://api.twitter.com/1.1/statuses/update.json"
self.async_tweet = async_wrap(self.send_tweet)
self.c = CommonUtil()
self.tweet_mng = TweetManager()
self.finish = '\N{WHITE HEAVY CHECK MARK}'
self.tweet_timer.stop()
self.tweet_timer.start()
@commands.Cog.listener()
async def on_ready(self):
"""on_ready時に発火する関数
"""
await self.tweet_mng.create_table()
async def start_paginating(self, ctx, reaction_list_of_guild):
if reaction_list_of_guild is None:
await ctx.send("集計中のリアクションはありません")
else:
menu = MenuPages(source=TweetList(ctx, reaction_list_of_guild),
delete_message_after=True,
timeout=60.0)
await menu.start(ctx)
def log_tweet(self, ctx):
error_content = f'tweeted\nmessage_content: {ctx.message.content}\nmessage_author : {ctx.message.author}\n{ctx.message.jump_url}'
logging.warning(error_content, exc_info=True)
def send_tweet(self, tweet: str) -> int:
"""tweetをする関数
Args:
tweet (str): 内容
Returns:
bool: 成否
"""
params = {"status": tweet}
res = self.twitter.post(self.url, params=params) # post送信
if res.status_code == 200: # 正常投稿出来た場合
return 200
else: # 正常投稿出来なかった場合
return res.status_code
@commands.command(description='ツイートを実施')
@is_in_guild()
async def tweet(self, ctx, content: str):
"""ツイートを行うコマンド、管理者の1名以上の承認で投稿されます"""
if not await self.c.has_bot_manager(ctx):
return
self.log_tweet(ctx)
now = datetime.now()
embed = discord.Embed(
title="ツイートを行います",
description="管理者1名のリアクションで投稿します",
color=0x37d2c0)
embed.add_field(
name="内容",
value=f"{content}",
inline=True)
embed.set_footer(
text=f"created_at : {now.strftime('%Y/%m/%d %H:%M')}")
msg = await ctx.reply(embed=embed)
await msg.add_reaction(self.finish)
data = TweetParameter(
message_id=msg.id,
author_id=ctx.author.id,
channel_id=ctx.channel.id,
content=content)
await self.tweet_mng.register_tweetdata(data)
@commands.command(description='承認待ちを中止')
@is_in_guild()
async def remove_tweet(self, ctx, message_id: int):
"""DBから情報を削除し、承認待ちを中止するコマンド"""
if not await self.c.has_bot_user(ctx):
return
if await self.tweet_mng.is_exist(message_id):
confirm = await Confirm(f'ID : {message_id}の承認待ちを終了し、削除しますか?').prompt(ctx)
if confirm:
await self.tweet_mng.remove_tweetdata(message_id)
await ctx.reply(f"ID : {message_id}は{ctx.author}により削除されました")
else:
notify_msg = await ctx.send(f"ID : {message_id}の削除を中止しました")
await self.c.autodel_msg(notify_msg)
else:
notify_msg = await ctx.send(f"ID : {message_id}は待機していません")
await self.c.autodel_msg(notify_msg)
@ commands.command(aliases=['lstw', 'lst'],
description='待機中のツイート一覧', invoke_without_command=True)
async def list_tweet(self, ctx):
"""待機中のツイートを表示するコマンド"""
if not await self.c.has_bot_manager(ctx):
return
reaction_list_of_guild = await self.tweet_mng.get_all_tweetdata()
if reaction_list_of_guild is None:
await ctx.send("集計中のリアクションはありません")
return
if len(reaction_list_of_guild) == 0:
await ctx.send("集計中のリアクションはありません")
return
await self.start_paginating(ctx, reaction_list_of_guild)
async def delete_expired_tweet(self) -> None:
"""30日前から待機中のツイートを削除する関数
"""
all_aggregation = await self.tweet_mng.get_all_tweetdata()
if all_aggregation is None:
return
now = datetime.now()
for reaction in all_aggregation:
elapsed_time = now - reaction.created_at
if elapsed_time.days >= 30:
await self.tweet_mng.remove_tweetdata(reaction.message_id)
channel = self.bot.get_channel(reaction.channel_id)
if isinstance(channel, discord.Thread):
if channel.archived:
return
msg = await channel.fetch_message(reaction.message_id)
await msg.clear_reactions()
@ commands.Cog.listener()
async def on_raw_reaction_add(self, reaction: discord.RawReactionActionEvent):
if reaction.member is None:
return
if reaction.member.bot:
return
if reaction.guild_id is None:
return
if tweet_data := await self.tweet_mng.get_tweetdata(reaction.message_id):
main_guild = self.bot.get_guild(410454762522411009)
main_guild = self.bot.get_guild(609058923353341973)
admin_role = discord.utils.get(
main_guild.roles, name='サイト管理者')
member_role_ids = [role.id for role in reaction.member.roles]
member_role_ids.append(reaction.user_id)
channel = self.bot.get_channel(reaction.channel_id)
msg = await channel.fetch_message(reaction.message_id)
if admin_role not in reaction.member.roles:
try:
await msg.remove_reaction(str(reaction.emoji), reaction.member)
except discord.Forbidden:
await channel.send('リアクションの除去に失敗しました.')
notify_msg = await channel.send(f"{reaction.member.mention} 権限無しのリアクションは禁止です!")
# await self.autodel_msg(notify_msg)
return
now = datetime.now()
if reaction.emoji.name == self.finish:
result = await self.async_tweet(tweet_data.content)
if result != 200:
await msg.reply(f'ツイートに失敗しました\nerror_code : {result}')
else:
embed = msg.embeds[0]
embed.add_field(
name="ツイートしました",
value=f"内容 : {tweet_data.content}",
inline=False)
embed.add_field(
name="ツイート日時",
value=f"{now.strftime('%Y/%m/%d %H:%M')}",
inline=False)
await msg.edit(embed=embed)
await msg.clear_reactions()
await self.tweet_mng.remove_tweetdata(reaction.message_id)
@ tasks.loop(hours=12.0)
async def tweet_timer(self) -> None:
await self.delete_expired_tweet()
@tweet_timer.before_loop
async def before_printer(self):
print('tweet waiting...')
await self.bot.wait_until_ready()
@tweet_timer.error
async def error(self, arg):
now = discord.utils.utcnow()
jst_now = self.c.convert_utc_into_jst(now)
print(jst_now, self.qualified_name, arg)
logging.warning(arg)
def setup(bot):
bot.add_cog(DiscordTweet(bot))
"""
if __name__ == "__main__":
DT = DiscordTweet()
async_tweet = async_wrap(DiscordTweet.tweet)
result = asyncio.run(async_tweet(DT, "asyncio test"))
"""
| 32.52381
| 137
| 0.595217
|
b7fce1cd7e16e2041c5aae414ccbd8171fe09443
| 7,445
|
cs
|
C#
|
Samples/SampleBrowser/ProductSamples/WizardSamples/Demo/Features/MainWindow.xaml.cs
|
Actipro/WPF-Controls
|
7a20f3ca11c4c2d24149681145da0b029cedec06
|
[
"MIT"
] | 40
|
2021-02-19T00:57:13.000Z
|
2022-03-09T05:04:28.000Z
|
Samples/SampleBrowser/ProductSamples/WizardSamples/Demo/Features/MainWindow.xaml.cs
|
Actipro/WPF-Controls
|
7a20f3ca11c4c2d24149681145da0b029cedec06
|
[
"MIT"
] | null | null | null |
Samples/SampleBrowser/ProductSamples/WizardSamples/Demo/Features/MainWindow.xaml.cs
|
Actipro/WPF-Controls
|
7a20f3ca11c4c2d24149681145da0b029cedec06
|
[
"MIT"
] | 9
|
2021-02-18T23:52:51.000Z
|
2021-12-03T19:42:12.000Z
|
using ActiproSoftware.Windows.Controls.Wizard;
using ActiproSoftware.Windows.Themes;
using System;
using System.ComponentModel;
using System.Threading;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Interop;
#if WPF
using MessageBox = ActiproSoftware.Windows.Controls.ThemedMessageBox;
#endif
namespace ActiproSoftware.ProductSamples.WizardSamples.Demo.Features {
/// <summary>
/// Provides the demo user control for this sample.
/// </summary>
public partial class MainWindow {
private BackgroundWorker simpleProcessingBackgroundWorker;
/// <summary>
/// Initializes an instance of the <c>MainWindow</c> class.
/// </summary>
public MainWindow() {
InitializeComponent();
}
/// <summary>
/// Occurs when the control is clicked.
/// </summary>
/// <param name="sender">The sender of the event.</param>
/// <param name="e">Event arguments.</param>
private void programmaticSelectedPageNavigationSampleHyperlink_Click(object sender, RoutedEventArgs e) {
wizard.SelectedPage = transitionEffectsPage;
}
/// <summary>
/// Occurs when the control is clicked.
/// </summary>
/// <param name="sender">The sender of the event.</param>
/// <param name="e">Event arguments.</param>
private void programmaticSelectedIndexNavigationSampleHyperlink_Click(object sender, RoutedEventArgs e) {
wizard.SelectedIndex = 1;
}
/// <summary>
/// Occurs when the control is clicked.
/// </summary>
/// <param name="sender">The sender of the event.</param>
/// <param name="e">Event arguments.</param>
private void startProcessingButton_Click(object sender, RoutedEventArgs e) {
// Disable the buttons while processing occurs
startProcessingButton.IsEnabled = false;
processingPage.CancelButtonEnabled = false;
processingPage.BackButtonEnabled = false;
processingPage.NextButtonEnabled = false;
// Initialize the background worker
if (simpleProcessingBackgroundWorker == null) {
simpleProcessingBackgroundWorker = new BackgroundWorker();
simpleProcessingBackgroundWorker.WorkerReportsProgress = true;
simpleProcessingBackgroundWorker.DoWork += delegate(object sndr, DoWorkEventArgs eventArgs) {
// Simply sleep for 100ms to simulate processing
for (int index = 0; index <= 10; index++) {
Thread.Sleep(100);
simpleProcessingBackgroundWorker.ReportProgress(index * 10);
}
};
simpleProcessingBackgroundWorker.ProgressChanged += delegate(object sndr, ProgressChangedEventArgs eventArgs) {
progressTextBlock.Text = (eventArgs.ProgressPercentage < 100 ? eventArgs.ProgressPercentage + "% complete" : "Processing completed");
progressBar.Value = eventArgs.ProgressPercentage;
};
simpleProcessingBackgroundWorker.RunWorkerCompleted += delegate(object sndr, RunWorkerCompletedEventArgs eventArgs) {
// Re-enable the buttons now that the processing is complete
startProcessingButton.IsEnabled = true;
processingPage.CancelButtonEnabled = null;
processingPage.BackButtonEnabled = null;
processingPage.NextButtonEnabled = null;
};
}
// Start the background work
simpleProcessingBackgroundWorker.RunWorkerAsync();
}
/// <summary>
/// Occurs when the selection is changed.
/// </summary>
/// <param name="sender">The sender of the event.</param>
/// <param name="e">Event arguments.</param>
private void themeListBox_SelectionChanged(object sender, RoutedEventArgs e) {
if ((themeListBox.SelectedItem != null) && (((ListBoxItem)themeListBox.SelectedItem).Content != null)) {
// Load the proper resources
bool isAlternateButtonLayout = false;
string themeName = ((ListBoxItem)themeListBox.SelectedItem).Tag as string;
if ((themeName != null) && (themeName.EndsWith(".xaml"))) {
// Load a theme from a resource dictionary in the sample project
this.Resources = (ResourceDictionary)System.Windows.Application.LoadComponent(
new Uri("/ProductSamples/WizardSamples/Demo/Features/Themes/" + themeName, UriKind.Relative));
isAlternateButtonLayout = themeName.StartsWith("Alternate");
}
else {
// Clear any loaded theme resources
this.Resources = null;
}
// Change the button visibilities if using the alternate layout for the button container
wizard.FinishButtonVisible = !isAlternateButtonLayout;
finishPage.NextButtonVisible = !isAlternateButtonLayout;
// Due what seems to be a bug in VisualBrush where style changes don't update the rendered control,
// this will force the brush to repaint the sample exterior page Wizard that is on the Themes page
sampleExteriorPageWizard.Resources = this.Resources;
}
}
/// <summary>
/// Occurs when the wizard's Cancel button is clicked.
/// </summary>
/// <param name="sender">The sender of the event.</param>
/// <param name="e">Event arguments.</param>
private void wizard_Cancel(object sender, RoutedEventArgs e) {
if ((BrowserInteropHelper.IsBrowserHosted) || (!wizard.CancelButtonClosesWindow))
MessageBox.Show("You clicked the Cancel button while on the '" + wizard.SelectedPage.Caption + "' page.", "Wizard Sample");
}
/// <summary>
/// Occurs when the wizard's Finish button is clicked.
/// </summary>
/// <param name="sender">The sender of the event.</param>
/// <param name="e">Event arguments.</param>
private void wizard_Finish(object sender, RoutedEventArgs e) {
if ((BrowserInteropHelper.IsBrowserHosted) || (!wizard.FinishButtonClosesWindow))
MessageBox.Show("You clicked the Finish button while on the '" + wizard.SelectedPage.Caption + "' page.", "Wizard Sample");
}
/// <summary>
/// Occurs when the wizard's Help button is clicked.
/// </summary>
/// <param name="sender">The sender of the event.</param>
/// <param name="e">Event arguments.</param>
private void wizard_Help(object sender, RoutedEventArgs e) {
MessageBox.Show("You clicked the Help button while on the '" + wizard.SelectedPage.Caption + "' page.", "Wizard Sample");
}
/// <summary>
/// Occurs after the wizard's selected page has changed.
/// </summary>
/// <param name="sender">The sender of the event.</param>
/// <param name="e">Event arguments.</param>
private void wizard_SelectedPageChanged(object sender, WizardSelectedPageChangeEventArgs e) {
if (e.NewSelectedPage == processingPage) {
// Clear the processing amount
progressBar.Value = 0;
}
if (e.NewSelectedPage == cancelSelectionChangePage) {
// Update the selection flags TextBlock to indicate what flags were used in selecting this page
selectionFlagsTextBlock.Text = e.SelectionFlags.ToString();
}
}
/// <summary>
/// Occurs before the wizard's selected page has changed.
/// </summary>
/// <param name="sender">The sender of the event.</param>
/// <param name="e">Event arguments.</param>
private void wizard_SelectedPageChanging(object sender, WizardSelectedPageChangeEventArgs e) {
if (e.OldSelectedPage == cancelSelectionChangePage) {
// If the cancel selection change CheckBox is checked, cancel the selection change
if (cancelSelectionChangeCheckBox.IsChecked == true) {
MessageBox.Show("The selected page change is cancelled because you have the CheckBox set. Clear the CheckBox to be able to navigate through the wizard again.", "Wizard Sample");
e.Cancel = true;
}
}
}
}
}
| 41.361111
| 183
| 0.718469
|
43d61a40ea4ca20cec1815651cfebe2bba517b97
| 789
|
ts
|
TypeScript
|
src/core/store.ts
|
my-repositories/friendly2
|
1ec075712ef91fe6fe01cfd5cd4b3b0f04d949b1
|
[
"MIT"
] | null | null | null |
src/core/store.ts
|
my-repositories/friendly2
|
1ec075712ef91fe6fe01cfd5cd4b3b0f04d949b1
|
[
"MIT"
] | null | null | null |
src/core/store.ts
|
my-repositories/friendly2
|
1ec075712ef91fe6fe01cfd5cd4b3b0f04d949b1
|
[
"MIT"
] | 1
|
2020-10-18T12:15:10.000Z
|
2020-10-18T12:15:10.000Z
|
import { injectable } from '~/ioc/decorators/injectable';
@injectable()
export class Store {
private static _linksKey = '__STORE__LINKS__';
private static _maxLinksCount = 500;
get(): string[] {
return JSON.parse(sessionStorage.getItem(Store._linksKey) || '[]');
}
save(newLinks: string[]): void {
if (!newLinks || !newLinks.length) {
return;
}
const storageLinks = this.get();
const mergedLinks = new Set([
...newLinks,
...storageLinks,
]);
const data = Array.from(mergedLinks).slice(0, Store._maxLinksCount);
sessionStorage.setItem(Store._linksKey, JSON.stringify(data));
}
reset(): void {
sessionStorage.setItem(Store._linksKey, '[]');
}
}
| 25.451613
| 76
| 0.588086
|
c0ca9bbcd4c1e00c0a2c3c07ccfcd39fd966b37d
| 4,502
|
sql
|
SQL
|
vote.sql
|
G41mywaifu/ssm-
|
059599c78aa66b9ce775854850b864e6c9fa90e4
|
[
"MIT"
] | 1
|
2021-03-30T08:15:23.000Z
|
2021-03-30T08:15:23.000Z
|
vote.sql
|
G41mywaifu/ssm
|
059599c78aa66b9ce775854850b864e6c9fa90e4
|
[
"MIT"
] | null | null | null |
vote.sql
|
G41mywaifu/ssm
|
059599c78aa66b9ce775854850b864e6c9fa90e4
|
[
"MIT"
] | null | null | null |
drop table if exists tb_admin;
drop table if exists tb_answer_opt;
drop table if exists tb_answer_txt;
drop table if exists tb_question;
drop table if exists tb_question_opt;
drop table if exists tb_survey;
/*==============================================================*/
/* Table: tb_admin */
/*==============================================================*/
create table tb_admin
(
id int(11) not null auto_increment,
account varchar(50),
password varchar(50),
name varchar(50),
phone varchar(50),
remark varchar(200),
primary key (id)
);
/*==============================================================*/
/* Table: tb_answer_opt */
/*==============================================================*/
create table tb_answer_opt
(
id int(11) not null auto_increment,
survey_id int(11),
question_id int(11),
opt_id int(11),
type varchar(20) comment '1radio|2checkbox',
create_time datetime,
voter varchar(50),
primary key (id)
);
/*==============================================================*/
/* Table: tb_answer_txt */
/*==============================================================*/
create table tb_answer_txt
(
id int(11) not null auto_increment,
survey_id int(11),
question_id int(11),
result varchar(200),
create_time datetime,
voter varchar(50),
primary key (id)
);
/*==============================================================*/
/* Table: tb_question */
/*==============================================================*/
create table tb_question
(
id int(11) not null auto_increment,
title varchar(200),
remark varchar(200),
type int(1) comment '1radio|2checkbox|3text|4textarea',
required int(1) comment '0非必填1必填',
check_style varchar(50) comment 'text;number;date',
order_style int(1) comment '0顺序1随机',
show_style int(1) comment '1;2;3;4',
test int(1) comment '0不测评1测评',
score int(3),
orderby int(11),
creator int(11),
create_time datetime,
survey_id int(11),
primary key (id)
);
/*==============================================================*/
/* Table: tb_question_opt */
/*==============================================================*/
create table tb_question_opt
(
id int(11) not null auto_increment,
survey_id int(11),
question_id int(11),
type varchar(20) comment '1radio|2checkbox',
opt varchar(200),
orderby int(11),
answer int(1) comment '默认为NULL;1答案',
primary key (id)
);
/*==============================================================*/
/* Table: tb_survey */
/*==============================================================*/
create table tb_survey
(
ID int(11) not null auto_increment,
title varchar(100),
remark varchar(200),
bounds int(1) comment '0:不限制;1:限制',
start_time datetime,
end_time datetime,
rules int(1) comment '0公开;1密码',
password varchar(50),
url varchar(200),
state varchar(50) comment '创建、执行中、结束',
logo varchar(200),
bgimg varchar(200),
anon int(1) comment '0匿名;1不匿名',
creator int(11),
create_time datetime,
primary key (ID)
);
alter table tb_answer_opt add constraint FK_Reference_2 foreign key (opt_id)
references tb_question_opt (id) on delete restrict on update restrict;
alter table tb_question add constraint FK_Reference_1 foreign key (survey_id)
references tb_survey (ID) on delete restrict on update restrict;
| 36.306452
| 78
| 0.400933
|
1b6491d694aa48e275beb549745ef554abbc5883
| 238
|
rb
|
Ruby
|
server/db/migrate/20140929132049_create_page_errors.rb
|
ryanbahniuk/context
|
8b7fc32e1abfd951c88065c340159f8e7ac08756
|
[
"MIT"
] | null | null | null |
server/db/migrate/20140929132049_create_page_errors.rb
|
ryanbahniuk/context
|
8b7fc32e1abfd951c88065c340159f8e7ac08756
|
[
"MIT"
] | null | null | null |
server/db/migrate/20140929132049_create_page_errors.rb
|
ryanbahniuk/context
|
8b7fc32e1abfd951c88065c340159f8e7ac08756
|
[
"MIT"
] | null | null | null |
class CreatePageErrors < ActiveRecord::Migration
def change
create_table :page_errors do |t|
t.belongs_to :user
t.belongs_to :url
t.text :description
t.boolean :resolved?
t.timestamps
end
end
end
| 19.833333
| 48
| 0.663866
|
4cb6ac90ca6a5994e57e97b5a1daacc1aa9f2626
| 178
|
cs
|
C#
|
Assets/System/Windows/Forms/IContainerControl.cs
|
SixGodZhang/WinformUnity
|
c91f21d48a568b333b0ef83ab4a38c815e5f214e
|
[
"MIT"
] | 442
|
2015-11-23T05:50:37.000Z
|
2022-03-24T23:42:17.000Z
|
System/Windows/Forms/IContainerControl.cs
|
superowner/Unity-WinForms
|
52bf9b9597e57c96804bab9789fdc045820fcece
|
[
"MIT"
] | 49
|
2016-07-23T06:48:28.000Z
|
2022-03-16T15:54:24.000Z
|
System/Windows/Forms/IContainerControl.cs
|
superowner/Unity-WinForms
|
52bf9b9597e57c96804bab9789fdc045820fcece
|
[
"MIT"
] | 79
|
2016-07-27T02:53:24.000Z
|
2022-03-14T00:12:36.000Z
|
namespace System.Windows.Forms
{
public interface IContainerControl
{
Control ActiveControl { get; set; }
bool ActivateControl(Control active);
}
}
| 17.8
| 45
| 0.657303
|
eb8fc0c1a3066dd4bf2894ba8136375ee3e37462
| 23,877
|
lua
|
Lua
|
Scripts/02 consensual_settings.lua
|
arcaneAgilmente/consensual
|
89d5bea56a8e06986fe33871dbe8acf4766861fd
|
[
"BSD-3-Clause"
] | 3
|
2019-03-20T03:03:22.000Z
|
2022-03-18T17:57:53.000Z
|
Scripts/02 consensual_settings.lua
|
AratnitY/consensual
|
89d5bea56a8e06986fe33871dbe8acf4766861fd
|
[
"BSD-3-Clause"
] | null | null | null |
Scripts/02 consensual_settings.lua
|
AratnitY/consensual
|
89d5bea56a8e06986fe33871dbe8acf4766861fd
|
[
"BSD-3-Clause"
] | 1
|
2019-03-20T03:03:24.000Z
|
2019-03-20T03:03:24.000Z
|
local cons_player= {}
function add_cons_player_field(name, value)
cons_player[name]= value
end
function cons_player:clear_init(player_number)
for k, v in pairs(self) do
if k ~= "id" then
self[k]= nil
end
end
local player_state= GAMESTATE:GetPlayerState(player_number)
local option_get= player_state.get_player_options_no_defect or
player_state.GetPlayerOptions
self.player_number= player_number
self.current_options= option_get(player_state, "ModsLevel_Current")
self.song_options= option_get(player_state, "ModsLevel_Song")
self.stage_options= option_get(player_state, "ModsLevel_Stage")
self.preferred_options= option_get(player_state, "ModsLevel_Preferred")
self.judge_totals= {}
self:set_speed_info_from_poptions()
self.dspeed= {min= dspeed_default_min, max= dspeed_default_max, alternate= false}
self:flags_reset()
self:pain_config_reset()
self:combo_qual_reset()
self:unacceptable_score_reset()
self:stage_stats_reset()
self:session_stats_reset()
self.mine_effect= sorted_mine_effect_names[1]
self.sigil_data= {detail= 16, size= 150}
self.play_history= {}
self:load_default_ops()
end
function cons_player:clear_mods()
self:clear_init(self.player_number)
GAMESTATE:ApplyGameCommand("mod,clearall", self.player_number)
-- SM5 will crash if a noteskin is not applied after clearing all mods.
-- Apply the default noteskin first in case Cel doesn't exist.
local default_noteskin= THEME:GetMetric("Common", "DefaultNoteSkinName")
local prev_note, succeeded= self.song_options:NoteSkin("uswcelsm5")
if not succeeded then
prev_note, succeeded= self.song_options:NoteSkin(default_noteskin)
if not succeeded then
Warn("Failed to set default noteskin when clearing player options. Please do not delete the default noteskin.")
end
end
end
function cons_player:noob_mode()
-- TODO: Test whether this accidentally overrides player_config.
-- Move rating_cap and options_level to something similar to
-- profile_flag_setting so the machine owner can configure what each level
-- sets.
self.rating_cap= 5
self.options_level= 1
self.flags= set_player_flag_to_level(self.player_number, 1)
self.pain_config= set_player_pain_to_level(self.player_number, 1)
end
function cons_player:simple_options_mode()
self.rating_cap= 10
self.options_level= 2
self.flags= set_player_flag_to_level(self.player_number, 2)
self.pain_config= set_player_pain_to_level(self.player_number, 2)
end
function cons_player:all_options_mode()
self.rating_cap= 15
self.options_level= 3
self.flags= set_player_flag_to_level(self.player_number, 3)
self.pain_config= set_player_pain_to_level(self.player_number, 3)
end
function cons_player:excessive_options_mode()
self.rating_cap= -1
self.options_level= 4
self.flags= set_player_flag_to_level(self.player_number, 4)
self.pain_config= set_player_pain_to_level(self.player_number, 4)
end
function cons_player:combo_qual_reset()
self.combo_quality= {}
end
local function empty_judge_count_set()
local ret= {}
for i, tns in ipairs(TapNoteScore) do
ret[tns]= 0
end
for i, tns in ipairs(HoldNoteScore) do
ret[tns]= 0
end
return ret
end
function cons_player:unacceptable_score_reset()
self.unacceptable_score= {
enabled= false, condition= "dance_points", value= 0}
end
function cons_player:stage_stats_reset()
self.stage_stats= {firsts= {}}
local function empty_col_score()
return {
dp= 0, mdp= 0, max_combo= 0, step_timings= {},
judge_counts= empty_judge_count_set(),
}
end
self.fake_score= empty_col_score()
local cur_style= GAMESTATE:GetCurrentStyle(self.player_number)
if cur_style then
local columns= cur_style:ColumnsPerPlayer()
--Trace("Making column score slots for " .. tostring(columns) .. " columns.")
self.column_scores= {}
-- Track indices from the engine are 1-indexed.
-- Column 0 is for all columns combined.
for c= 0, columns do
self.column_scores[c]= empty_col_score()
end
end
end
function cons_player:session_stats_reset()
self.session_stats= {}
-- Columns in the session stats are for every panel on the pad, to handle
-- mixed sessions. Otherwise, a session where P2 played one song on single,
-- and one song on double would put the data for the single song in the
-- wrong columns.
-- style compatibility issue: Dance, Pump, and Techno are the only supported games.
for i= -1, 18 do
self.session_stats[i]= {
dp= 0, mdp= 0, max_combo= 0, judge_counts= {
early= empty_judge_count_set(), late= empty_judge_count_set()}
}
end
end
function cons_player:flags_reset()
self.flags= set_player_flag_to_level(self.player_number, 1)
-- allow_toasty is set here so it will be affected if the preference is changed while the game is running.
self.flags.gameplay.allow_toasty= PREFSMAN:GetPreference("EasterEggs")
end
function cons_player:pain_config_reset()
self.pain_config= set_player_pain_to_level(self.player_number, 1)
end
function cons_player:set_speed_info_from_poptions()
local speed= nil
local mode= nil
if self.preferred_options:MaxScrollBPM() > 0 then
mode= "m"
speed= self.preferred_options:MaxScrollBPM()
elseif self.preferred_options:TimeSpacing() > 0 then
mode= "C"
speed= self.preferred_options:ScrollBPM()
else
mode= "x"
speed= self.preferred_options:ScrollSpeed()
end
self.speed_info= { speed= speed, mode= mode }
return self.speed_info
end
function cons_player:get_speed_info()
return self.speed_info or self:set_speed_info_from_poptions()
end
local function find_read_bpm_for_player_steps(player_number)
local bpms= get_timing_bpms(GAMESTATE:GetCurrentSteps(player_number),
GAMESTATE:GetCurrentSong())
return bpms[2]
end
if not set_newfield_speed_mod then
function set_newfield_speed_mod() return end
end
function set_speed_from_speed_info(player, newfield)
-- mmods are just a poor mask over xmods, so if you set an mmod in
-- the middle of the song, it'll null out. This means that if you
-- use PlayerState:SetPlayerOptions, it'll ruin whatever mmod the
-- player has set. So this code is here to remove that mask.
if not player.player_number or not GAMESTATE:IsPlayerEnabled(player.player_number) then return end
local speed_info= player:get_speed_info()
speed_info.prev_bps= nil
local mode_functions= {
x= function(speed)
player.preferred_options:XMod(speed)
player.stage_options:XMod(speed)
player.song_options:XMod(speed)
player.current_options:XMod(speed)
if newfield then
newfield:set_speed_mod(false, speed)
end
end,
C= function(speed)
player.preferred_options:CMod(speed)
player.stage_options:CMod(speed)
player.song_options:CMod(speed)
player.current_options:CMod(speed)
if newfield then
newfield:set_speed_mod(true, speed)
end
end,
m= function(speed)
local read_bpm= find_read_bpm_for_player_steps(player.player_number)
local real_speed= (speed / read_bpm) / get_rate_from_songopts()
player.preferred_options:XMod(real_speed)
player.stage_options:XMod(real_speed)
player.song_options:XMod(real_speed)
player.current_options:XMod(real_speed)
if newfield then
newfield:set_speed_mod(false, speed, read_bpm)
end
--player.song_options:MMod(speed)
--player.current_options:MMod(speed)
end,
}
if mode_functions[speed_info.mode] then
mode_functions[speed_info.mode](speed_info.speed)
end
end
function cons_player:load_default_ops()
local defcon= DeepCopy(player_config:get_data(nil))
for k, v in pairs(defcon) do
self[k]= v
end
end
function cons_player:set_ops_from_profile(profile)
if profile then
self.proguid= profile:GetGUID()
end
local prof_slot= pn_to_profile_slot(self.player_number)
self.pain_config= profile_pain_setting:load(prof_slot)
self.flags= profile_flag_setting:load(prof_slot)
self.style_config= style_config:load(prof_slot)
self.shown_noteskins= shown_noteskins:load(prof_slot)
style_config_sanity_enforcer(self.style_config)
local config= player_config:load(prof_slot)
local migrated= update_old_player_config(prof_slot, config)
for k, v in pairs(config) do
self[k]= v
end
if self.preferred_steps_type == "" then
local style_info= first_compat_style_info(1)
if style_info then
self.preferred_steps_type= style_info[1].steps_type
end
end
local ops= self.preferred_options
if migrated then
self:persist_mod("Tilt", ops:Tilt())
self:persist_mod("Skew", ops:Skew())
end
self:reset_to_persistent_mods()
end
function cons_player:reset_to_persistent_mods()
local ops= self.preferred_options
reset_mods(self, ops)
for name, value in pairs(self.persistent_mods) do
if ops[name] then
ops[name](ops, value)
end
end
for name, value in pairs(self.cons_persistent_mods) do
if not self[name] then
self:set_cons_mod(name, value)
else
cons_persistent_mods[name]= nil
end
end
end
function cons_player:set_cons_mod(name, value)
if not self.cons_mods_set then self.cons_mods_set= {} end
self.cons_mods_set[name]= value and true or nil
self[name]= value
end
function cons_player:unpersist_mod(name, persist_type)
if persist_type == "cons" then
self.cons_persistent_mods[name]= nil
elseif persist_type == "song" then
self.persistent_song_mods[name]= nil
else
self.persistent_mods[name]= nil
end
end
function cons_player:persist_mod(name, value, persist_type)
if not value or value == 0 then value= nil end
if type(value) == "number" and math.abs(value) < .001 then
value= nil
end
if value and name == "MusicRate" and math.abs(value - 1) < .01 then
value= nil
end
if persist_type == "cons" then
self.cons_persistent_mods[name]= value
elseif persist_type == "song" then
self.persistent_song_mods[name]= value
else
self.persistent_mods[name]= value
end
end
function cons_player:get_persist_mod_value(name, persist_type)
if persist_type == "cons" then
return self.cons_persistent_mods[name]
elseif persist_type == "song" then
return self.persistent_song_mods[name]
else
return self.persistent_mods[name]
end
end
local cons_player_mt= { __index= cons_player}
if cons_players and cons_players[PLAYER_1] and cons_players[PLAYER_2] then
for k, v in pairs(all_player_indices) do
setmetatable(cons_players[v], cons_player_mt)
end
else
cons_players= {}
for k, v in pairs(all_player_indices) do
cons_players[v]= {}
setmetatable(cons_players[v], cons_player_mt)
end
end
function get_preferred_steps_type(pn)
return cons_players[pn].preferred_steps_type
end
function set_preferred_steps_type(pn, value)
cons_players[pn].preferred_steps_type= value
end
function options_allowed()
return true
end
function generic_gsu_flag(flag_field, flag_name)
return
function(player_number)
return cons_players[player_number].flags[flag_field][flag_name]
end,
function(player_number)
cons_players[player_number].flags[flag_field][flag_name]= true
MESSAGEMAN:Broadcast("player_flags_changed", {pn= player_number, field= flag_field, name= flag_name})
end,
function(player_number)
cons_players[player_number].flags[flag_field][flag_name]= false
MESSAGEMAN:Broadcast("player_flags_changed", {pn= player_number, field= flag_field, name= flag_name})
end
end
function generic_flag_control_element(flag_field, flag_name)
local funcs= {generic_gsu_flag(flag_field, flag_name)}
return {name= flag_name, init= funcs[1], set= funcs[2], unset= funcs[3]}
end
local tn_judges= {
"TapNoteScore_Miss", "TapNoteScore_W5", "TapNoteScore_W4", "TapNoteScore_W3", "TapNoteScore_W2", "TapNoteScore_W1"
}
local tn_hold_judges= {
"HoldNoteScore_LetGo", "HoldNoteScore_Held", "HoldNoteScore_Missed"
}
local generic_fake_judge= {
__index= {
initialize=
function(self, pn, tn_settings)
self.settings= DeepCopy(tn_settings)
self.used= {}
for i= 1, #tn_judges do
self.used[i]= 0
end
local steps= GAMESTATE:GetCurrentSteps(pn)
local taps= steps:GetRadarValues(pn):GetValue("RadarCategory_TapsAndHolds")
local holds= steps:GetRadarValues(pn):GetValue("RadarCategory_Holds")
end,
}}
local fake_judges= {
TapNoteScore_Miss= function() return "TapNoteScore_Miss" end,
TapNoteScore_W5= function() return "TapNoteScore_W5" end,
TapNoteScore_W4= function() return "TapNoteScore_W4" end,
TapNoteScore_W3= function() return "TapNoteScore_W3" end,
TapNoteScore_W2= function() return "TapNoteScore_W2" end,
TapNoteScore_W1= function() return "TapNoteScore_W1" end,
Random=
function()
return tn_judges[MersenneTwister.Random(1, #tn_judges)]
end
}
function set_fake_judge(tns)
return
function(player_number)
cons_players[player_number].fake_judge= fake_judges[tns]
end
end
function unset_fake_judge(player_number)
cons_players[player_number].fake_judge= nil
end
function check_fake_judge(tns)
return
function(player_number)
return cons_players[player_number].fake_judge == fake_judges[tns]
end
end
function check_mine_effect(eff)
return
function(player_number)
return cons_players[player_number].mine_effect == eff
end
end
function set_mine_effect(eff)
return
function(player_number)
cons_players[player_number].mine_effect= eff
end
end
function unset_mine_effect(player_number)
cons_players[player_number].mine_effect= "none"
end
function GetPreviousPlayerSteps(player_number)
return cons_players[player_number].prev_steps
end
function GetPreviousPlayerScore(player_number)
return cons_players[player_number].prev_score or 0
end
function ConvertScoreToFootRateChange(meter, score)
local diff= (math.max(0, score - .625) * (8 / .375)) - 4
if meter > 13 then
diff= diff * .25
elseif meter > 10 then
diff= diff * .5
elseif meter > 8 then
diff= diff * .75
end
if diff > 0 then
diff= math.floor(diff + .5)
else
diff= math.ceil(diff - .5)
end
return diff
--score= score^4
--local max_diff= scale(meter, 8, 16, 4, 1)
--max_diff= force_to_range(1, max_diff, 4)
--local change= scale(score, 0, 1, -max_diff, max_diff)
--if change < 0 then
-- change= math.floor(change + .75)
--else
-- change= math.floor(change + .25)
--end
--return change
end
local time_remaining= 0
function set_time_remaining_to_default()
time_remaining= misc_config:get_data().default_credit_time
end
function reduce_time_remaining(amount)
if not GAMESTATE:IsEventMode() then
time_remaining= time_remaining - amount
end
end
function get_time_remaining()
return time_remaining
end
function song_short_enough(s)
if GAMESTATE:IsEventMode() then
return true
else
local maxlen= time_remaining + misc_config:get_data().song_length_grace
if s.GetLastSecond then
local len= s:GetLastSecond() - s:GetFirstSecond()
return len <= maxlen and len > 0
else
local steps_type= GAMESTATE:GetCurrentStyle():GetStepsType()
return (s:GetTotalSeconds(steps_type) or 0) <= maxlen
end
end
end
local censoring_on= true
function toggle_censoring()
censoring_on= not censoring_on
end
function turn_censoring_on()
censoring_on= true
end
local chart_rating_cap= -1
function update_rating_cap()
local old_cap= chart_rating_cap
chart_rating_cap= 0
for i, pn in ipairs(GAMESTATE:GetEnabledPlayers()) do
if cons_players[pn].rating_cap < 0 then
chart_rating_cap= -1
break
else
chart_rating_cap= math.max(cons_players[pn].rating_cap, chart_rating_cap)
end
end
return old_cap ~= chart_rating_cap
end
function get_rating_cap()
return chart_rating_cap
end
function disable_rating_cap()
chart_rating_cap= -1
end
function song_uncensored(song)
if censoring_on and check_censor_list(song) then
return false
end
return true
end
function song_fits_rating_cap(song)
if chart_rating_cap > 0 then
local steps_list= get_filtered_steps_list(song)
local playable_steps= false
local i= 1
while not playable_steps and i <= #steps_list do
if steps_list[i]:GetMeter() <= chart_rating_cap then
playable_steps= true
end
i= i+1
end
return playable_steps
end
return true
end
function time_short_enough(t)
if GAMESTATE:IsEventMode() then
return true
else
return t <= time_remaining
end
end
local last_song_time= 0
function set_last_song_time(t)
last_song_time= t
end
function get_last_song_time()
return last_song_time
end
function convert_score_to_time(score)
if not score then return 0 end
local conf_data= misc_config:get_data()
local min_score_for_reward= conf_data.min_score_for_reward
if score < min_score_for_reward then return 0 end
local score_factor= score - min_score_for_reward
local reward_factor_high= 1-min_score_for_reward
local min_reward= conf_data.min_reward_pct
local max_reward= conf_data.max_reward_pct
local time_mult= last_song_time
if not conf_data.reward_time_by_pct then
min_reward= conf_data.min_reward_time
max_reward= conf_data.max_reward_time
time_mult= 1
end
return scale(score_factor, 0, reward_factor_high, min_reward, max_reward) * time_mult
end
function cons_can_join()
return GAMESTATE:GetCoinMode() == "CoinMode_Home" or
GAMESTATE:GetCoinMode() == "CoinMode_Free" or
GAMESTATE:GetCoins() >= GAMESTATE:GetCoinsNeededToJoin()
end
function cons_join_player(pn)
local ret= GAMESTATE:JoinInput(pn)
if ret then
cons_players[pn]:clear_init(pn)
if april_fools then
cons_players[pn].fake_judge= fake_judges.Random
end
end
return ret
end
function get_coin_info()
-- Trace("CoinMode: " .. GAMESTATE:GetCoinMode())
-- Trace("Coins: " .. GAMESTATE:GetCoins())
-- Trace("Needed: " .. GAMESTATE:GetCoinsNeededToJoin())
local coins= GAMESTATE:GetCoins()
local needed= GAMESTATE:GetCoinsNeededToJoin()
local credits= math.floor(coins / needed)
coins= coins % needed
if needed == 0 then
credits= 0
coins= 0
end
return credits, coins, needed
end
local steps_types_to_show= {}
function cons_get_steps_types_to_show()
return steps_types_to_show
end
function update_steps_types_to_show()
steps_types_to_show= {}
for i, data in ipairs(combined_visible_styles()) do
steps_types_to_show[data.stepstype]= true
end
end
function cons_set_current_steps(pn, steps)
local num_players= GAMESTATE:GetNumPlayersEnabled()
local curr_st= GAMESTATE:GetCurrentStyle(pn):GetStepsType()
local to_st= steps:GetStepsType()
if curr_st ~= to_st then
local curr_style_info= stepstype_to_style[curr_st][num_players]
if not curr_style_info then
lua.ReportScriptError("Error when trying to fetch style info. Dumping stepstype_to_style.")
rec_print_table(stepstype_to_style)
end
local to_style= stepstype_to_style[to_st][num_players]
if to_style then
if curr_style_info.for_sides > to_style.for_sides then
-- If the current style is double, and we try to set the style to
-- single, then we run into the error of having too many sides
-- joined to change styles.
-- Unjoining the other side prevents that error.
GAMESTATE:UnjoinPlayer(other_player[pn])
elseif curr_style_info.for_sides < to_style.for_sides then
-- No action necessary.
end
set_current_style(to_style.name, pn)
else
lua.ReportScriptError("Need to change the style, but no to_style found.")
return
end
end
local curr_st= GAMESTATE:GetCurrentStyle(pn):GetStepsType()
if curr_st ~= steps:GetStepsType() then
lua.ReportScriptError("Attempted to set steps with invalid stepstype: "
.. curr_st .. " ~= " .. steps:GetStepsType())
return
end
gamestate_set_curr_steps(pn, steps)
end
function JudgmentTransformCommand(self, params)
do return end
local elpos= cons_players[params.Player].gameplay_element_positions
local rev_tilt= cons_players[params.Player].flags.gameplay.reverse_tilts_judge
local x= elpos.judgment_xoffset or 0
local y= elpos.judgment_yoffset or -30
if params.bReverse then
y = y * -1
if rev_tilt then
self:rotationx(180)
end
else
self:rotationx(0)
end
if params.bCentered and rev_tilt then
if params.Player == PLAYER_1 then
self:rotationz(90)
else
self:rotationz(-90)
end
else
self:rotationz(0)
end
self:xy(x, y)
end
local function cons_save_profile(profile, dir)
if profile == PROFILEMAN:GetMachineProfile() then return end
local cp= false
for i, pn in pairs(cons_players) do
if pn.proguid == profile:GetGUID() then
cp= pn
break
end
end
if cp then
local pn= cp.player_number
local prof_slot= pn_to_profile_slot(pn)
profile_pain_setting:save(prof_slot)
profile_flag_setting:set_dirty(prof_slot)
profile_flag_setting:save(prof_slot)
local config_data= player_config:get_data(prof_slot)
for k, v in pairs(config_data) do
if type(v) ~= "table" then
config_data[k]= cp[k]
end
end
player_config:set_dirty(prof_slot)
player_config:save(prof_slot)
style_config:save(prof_slot)
shown_noteskins:save(prof_slot)
end
end
if add_profile_load_callback then
add_profile_save_callback(cons_save_profile)
else
function SaveProfileCustom(profile, dir)
cons_save_profile(profile, dir)
end
end
function player_using_profile(pn)
return PROFILEMAN:IsPersistentProfile(pn)
end
function ops_level(pn)
return cons_players[pn].options_level
end
function reset_mods(player, ops)
local specific_mods= {
{"LifeSetting", "LifeType_Bar"},
{"DrainSetting", "DrainType_Normal"},
{"BatteryLives", 4},
{"TimeSpacing", 0},
{"MaxScrollBPM", 0},
{"ScrollSpeed", 1},
{"ScrollBPM", 200},
}
local bool_mods= {
"TurnNone", "Mirror", "Backwards", "Left", "Right", "Shuffle",
"SoftShuffle", "SuperShuffle", "NoHolds", "NoRolls", "NoMines",
"Little", "Wide", "Big", "Quick", "BMRize", "Skippy", "Mines",
"AttackMines", "Echo", "Stomp", "Planted", "Floored", "Twister",
"HoldRolls", "NoJumps", "NoHands", "NoLifts", "NoFakes", "NoQuads",
"NoStretch", "MuteOnError",
}
local float_mods= {
"Boost", "Brake", "Wave", "Expand", "Boomerang", "Drunk", "Dizzy",
"Confusion", "Mini", "Tiny", "Flip", "Invert", "Tornado", "Tipsy",
"Bumpy", "Beat", "Xmode", "Twirl", "Roll", "Hidden", "HiddenOffset",
"Sudden", "SuddenOffset", "Stealth", "Blink", "RandomVanish", "Reverse",
"Split", "Alternate", "Cross", "Centered", "Dark", "Blind", "Cover",
"RandAttack", "NoAttack", "PlayerAutoPlay", "Tilt", "Skew", "Passmark",
"RandomSpeed",
}
for i, mod in ipairs(specific_mods) do
ops[mod[1]](ops, mod[2])
end
for i, mod in ipairs(bool_mods) do
ops[mod](ops, false)
end
for i, mod in ipairs(float_mods) do
ops[mod](ops, 0)
end
if player.cons_mods_set then
for name, i in pairs(player.cons_mods_set) do
player[name]= nil
end
player.cons_mods_set= {}
end
end
function apply_newfield_config(newfield, config, vanxoff, vanyoff)
local torad= math.pi / 180
newfield:get_fov_mod():set_value(config.fov)
newfield:get_vanish_x_mod():set_value(config.vanish_x + vanxoff)
newfield:get_vanish_y_mod():set_value(config.vanish_y + vanyoff)
newfield:get_trans_rot_x():set_value(config.rot_x*torad)
newfield:get_trans_rot_y():set_value(config.rot_y*torad)
newfield:get_trans_rot_z():set_value(config.rot_z*torad)
if config.use_separate_zooms then
newfield:get_trans_zoom_x():set_value(config.zoom_x)
newfield:get_trans_zoom_y():set_value(config.zoom_y)
newfield:get_trans_zoom_z():set_value(config.zoom_z)
else
newfield:get_trans_zoom_x():set_value(config.zoom)
newfield:get_trans_zoom_y():set_value(config.zoom)
newfield:get_trans_zoom_z():set_value(config.zoom)
end
for i, col in ipairs(newfield:get_columns()) do
col:get_reverse_scale():set_value(config.reverse)
col:get_reverse_offset_pixels():set_value(_screen.cy + config.yoffset)
end
end
function find_current_stepstype(pn)
local steps= gamestate_get_curr_steps(pn)
if steps then
return steps:GetStepsType()
end
local style= GAMESTATE:GetCurrentStyle(pn)
if style then
return style:GetStepsType()
end
style= GAMEMAN:GetStylesForGame(GAMESTATE:GetCurrentGame():GetName())[1]
if style then
return style:GetStepsType()
end
local last_type= profiles[pn]:get_last_stepstype()
if last_type then
return last_type
end
return "StepsType_Dance_Single"
end
| 28.698317
| 115
| 0.766721
|
46d0b8a95c7789b8ac8bd87003eafb97485b3d61
| 5,132
|
lua
|
Lua
|
msb-core/openresty-ext/src/assembly/resources/openresty/nginx/luaext/customrouter.lua
|
gongysh2004/common-services-microservice-bus
|
78c23a6a8b4a53ea594f6c68afbb6729867b08f9
|
[
"Apache-2.0"
] | null | null | null |
msb-core/openresty-ext/src/assembly/resources/openresty/nginx/luaext/customrouter.lua
|
gongysh2004/common-services-microservice-bus
|
78c23a6a8b4a53ea594f6c68afbb6729867b08f9
|
[
"Apache-2.0"
] | null | null | null |
msb-core/openresty-ext/src/assembly/resources/openresty/nginx/luaext/customrouter.lua
|
gongysh2004/common-services-microservice-bus
|
78c23a6a8b4a53ea594f6c68afbb6729867b08f9
|
[
"Apache-2.0"
] | null | null | null |
--[[
Copyright 2016 ZTE Corporation.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: Zhaoxing Meng
email: meng.zhaoxing1@zte.com.cn
]]
-- put red into the connection pool of size 100,
-- with 10 seconds max idle time
local function close_redis(red)
if not red then
return
end
--release connection to the pool
local pool_max_idle_time = 10000
local pool_size = 100
local ok, err = red:set_keepalive(pool_max_idle_time, pool_size)
if not ok then
ngx.log(ngx.ERR, "set keepalive error:", err)
end
return
end
local function query_ipurl_updatecache(red,key)
local keyprefix = "msb:routing:custom:"..key
local infokey=keyprefix..":info"
-- first of all check whether the status is 1(enabled)
local status = red:hget(infokey,"status")
if not (status=="1") then
ngx.log(ngx.WARN, key.."is disabled.status=", status)
return nil
end
-- Try to get url for key
local url, err = red:hget(infokey,"url")
ngx.log(ngx.WARN, "==url:", url)
if not url or url == ngx.null then
return nil
end
-- Try to get ip:port for key
local serverkey=keyprefix..":lb:server1"
local server, err = red:hget(serverkey,"ip")..":"..red:hget(serverkey,"port")
ngx.log(ngx.WARN, "==server:", server)
if not server or server == ngx.null then
return nil
end
-- get the local cache
local cache = ngx.shared.ceryx
local uri = ngx.var.uri
-- Save found key to local cache for 5 seconds
cache:set("custom:key:"..uri,key,5)
cache:set("custom:server:"..uri,server,5)
cache:set("custom:url:"..uri,url,5)
ngx.var.key = key
ngx.var.server = server
ngx.var.url = url
return true
end
local function query_allkeys_updatecache(red)
-- Try to get all keys start with "msb:routing:custom:"
local allkeys, err = red:keys("msb:routing:custom:*")
if not allkeys or allkeys == ngx.null then
ngx.log(ngx.ERR,err)
return ""
end
local key_set={}
for key, value in ipairs(allkeys) do
name = string.gsub(string.gsub(string.gsub(value,"msb:routing:custom:",""),":info",""),":lb:server1","")
key_set[name]=true
end
local key_table = {}
local index = 1
for key,_ in pairs(key_set) do
key_table[index] = key
index = index + 1
end
table.sort(key_table, function (a, b)
return a > b
end)
local servicenames = ""
local delimiter = "<>"
for i=1,#key_table do
servicenames=servicenames..key_table[i]..delimiter
end
-- get the local cache
local cache = ngx.shared.ceryx
-- Save all keys to local cache for 30 seconds(0.5 minutes)
cache:set("customrouter:allkeys", servicenames, 30)
return servicenames;
end
local function query_router_info()
local uri = ngx.var.uri
ngx.log(ngx.WARN, "==uri:", uri)
-- Check if key exists in local cache
local cache = ngx.shared.ceryx
local key, flags = cache:get("custom:key:"..uri)
local server, flags = cache:get("custom:server:"..uri)
local url, flags = cache:get("custom:url:"..uri)
if key and server and url then
ngx.var.key = key
ngx.var.server = server
ngx.var.url = url
ngx.log(ngx.WARN, "==using custom cache:", key.."&&"..server.."&&"..url)
return
end
local redis = require "resty.redis"
local red = redis:new()
red:set_timeout(1000) -- 1000 ms
local redis_host = "127.0.0.1"
local redis_port = 6379
local res, err = red:connect(redis_host, redis_port)
-- Return if could not connect to Redis
if not res then
ngx.log(ngx.ERR, "connect to redis error:", err)
return
end
-- Check if all servicenames exists in local cache
local servicenames, flags = cache:get("customrouter:allkeys")
if servicenames then
ngx.log(ngx.WARN,"==get all keys from cache:",servicenames)
else
servicenames = query_allkeys_updatecache(red)
end
local delimiter = "<>"
for key in string.gmatch(servicenames,"(.-)"..delimiter) do
ngx.log(ngx.WARN, "==key_table key:", key)
local from, to, err = ngx.re.find(uri, "^"..key.."(/(.*))?$", "jo")
if from then
ngx.log(ngx.WARN,"Matched! start-end:",from,"-",to)
local result = query_ipurl_updatecache(red,key)
if result then
break
end
else
ngx.log(ngx.WARN,"not Matched")
if err then
ngx.log(ngx.WARN,"ngx.re.find throw error: ",err)
return
end
end
end
return close_redis(red)
end
local function rewrite_router_url()
local server=ngx.var.server
if server=="fallback" then
ngx.status = ngx.HTTP_NOT_FOUND
ngx.exit(ngx.status)
end
local url=ngx.var.url
local key=ngx.var.key
local rewriteduri = ngx.re.sub(ngx.var.uri, "^"..key.."(.*)", url.."$1", "o")
ngx.log(ngx.WARN, "==rewrited uri:", rewriteduri)
ngx.req.set_uri(rewriteduri)
end
query_router_info()
rewrite_router_url()
| 27.44385
| 106
| 0.692907
|
c9ee7b544f05ddc62a4dbeb1ccd4c6a37912c60c
| 783
|
ts
|
TypeScript
|
src/app/home/home.component.ts
|
amiroslaw/scully-plugins
|
7f1f5f9f774c1983cb49b78cd4c35292e32d156c
|
[
"MIT"
] | 20
|
2020-03-14T22:33:21.000Z
|
2021-07-25T03:40:50.000Z
|
src/app/home/home.component.ts
|
amiroslaw/scully-plugins
|
7f1f5f9f774c1983cb49b78cd4c35292e32d156c
|
[
"MIT"
] | 27
|
2020-03-25T02:09:05.000Z
|
2022-03-02T07:44:21.000Z
|
src/app/home/home.component.ts
|
amiroslaw/scully-plugins
|
7f1f5f9f774c1983cb49b78cd4c35292e32d156c
|
[
"MIT"
] | 7
|
2020-05-18T15:27:38.000Z
|
2022-02-22T14:42:58.000Z
|
import { Component, OnInit } from '@angular/core';
import { ScullyRoutesService, ScullyRoute } from '@scullyio/ng-lib';
import { Observable } from 'rxjs';
import { map } from 'rxjs/operators';
@Component({
selector: 'app-home',
templateUrl: './home.component.html',
styleUrls: ['./home.component.scss']
})
export class HomeComponent implements OnInit {
links$: Observable<ScullyRoute[]> = this.scully.available$.pipe(
map(routeList =>
routeList.filter((route: ScullyRoute) => route.route.startsWith(`/blog/`))
),
map(blogs => blogs.sort((a, b) => (a.date < b.date ? -1 : 1)))
);
constructor(private scully: ScullyRoutesService) {}
ngOnInit() {
// debug current pages
this.links$.subscribe(links => {
console.log(links);
});
}
}
| 27.964286
| 80
| 0.651341
|
5db03cd420da69738fbe3acbf8468ad87ef583ce
| 17,405
|
cpp
|
C++
|
xcore/crowd_app/leon/params/params.cpp
|
baidu-research/hydra-xeye
|
67d4464cff8f44f3a396a2c9aed46d0ce02f8de2
|
[
"Apache-2.0"
] | 1
|
2021-09-01T00:50:34.000Z
|
2021-09-01T00:50:34.000Z
|
xcore/crowd_app/leon/params/params.cpp
|
baidu-research/hydra-xeye
|
67d4464cff8f44f3a396a2c9aed46d0ce02f8de2
|
[
"Apache-2.0"
] | null | null | null |
xcore/crowd_app/leon/params/params.cpp
|
baidu-research/hydra-xeye
|
67d4464cff8f44f3a396a2c9aed46d0ce02f8de2
|
[
"Apache-2.0"
] | null | null | null |
#include "params.hpp"
#include <Log.h>
#ifndef MVLOG_UNIT_NAME
#define MVLOG_UNIT_NAME params
#endif
#include <mvLog.h>
#include "utils/utils.hpp"
char* Params::merge_conf_json(const char* conf_json) {
cJSON* root = cJSON_Parse(conf_json);
if (root == NULL) {
LOGE << "invalid json string";
return NULL;
}
// merge xeye_id
cJSON_ReplaceItemInObject(root, "xeye_id", cJSON_CreateString(m_xeye_id.c_str()));
// merge confidence
cJSON_ReplaceItemInObject(root, "confidence", cJSON_CreateNumber(m_confidence));
// merge sensor ccm index
cJSON_ReplaceItemInObject(root, "sensor_ccm_index", cJSON_CreateNumber(m_sensor_ccm_index));
// merge data server url
cJSON* data_server = cJSON_GetObjectItem(root, "data_server");
if (data_server != NULL) {
cJSON_ReplaceItemInObject(data_server, "url", \
cJSON_CreateString(data_server_url().c_str()));
} else {
LOGE << "Can not merge data server url";
}
// merge heartbeat server url
cJSON* heartbeat_server = cJSON_GetObjectItem(root, "heartbeat_server");
if (heartbeat_server != NULL) {
cJSON_ReplaceItemInObject(heartbeat_server, "url", \
cJSON_CreateString(heartbeat_server_url().c_str()));
} else {
LOGE << "Can not merge heartbeat server url";
}
// merge local network info
cJSON* local_network_info = cJSON_GetObjectItem(root, "local_network_info");
if (local_network_info != NULL) {
cJSON_ReplaceItemInObject(local_network_info, "ip_address", \
cJSON_CreateString(local_ipaddr().c_str()));
cJSON_ReplaceItemInObject(local_network_info, "net_mask", \
cJSON_CreateString(local_netmask().c_str()));
cJSON_ReplaceItemInObject(local_network_info, "gate_way", \
cJSON_CreateString(local_gateway().c_str()));
} else {
LOGE << "Can not merge local network info";
}
char* new_json = cJSON_Print(root);
cJSON_Delete(root);
return new_json;
}
bool Params::load_from_file(const std::string& filename) {
// [NOTE]: before calling load_from_file function,
// this callback function should be initialized first
// or the xeye_id could not pass to LRT properly.
assert(m_params_callback != nullptr);
// try to figure out why the c++ way do not work.
FILE *f = NULL;
long len = 0;
/* open in read binary mode */
f = fopen(filename.c_str(), "rb");
if (NULL == f) {
LOGE << "Can not open file: " << filename;
return false;
}
/* get the length */
fseek(f, 0, SEEK_END);
len = ftell(f);
fseek(f, 0, SEEK_SET);
std::shared_ptr<char> data(new char[len + 1]);
fread(data.get(), 1, len, f);
data.get()[len] = '\0';
if (0 != fclose(f)) {
// non fatal error, only apply logging
LOGE << "close json file: " << filename << " FAILED";
}
cJSON* root = cJSON_Parse(data.get());
if (nullptr == root) {
LOGE << "Invalid cjson format maybe";
return false;
}
bool succeed = true;
cJSON* psub_node = NULL;
psub_node = cJSON_GetObjectItem(root, "xeye_id");
if (psub_node != NULL) {
m_xeye_id = std::string(psub_node->valuestring);
} else {
succeed = false;
LOGE << "no xeye_id nodes in config file";
}
mvLog(MVLOG_INFO, "xeye_id: %s", m_xeye_id.c_str());
psub_node = cJSON_GetObjectItem(root, "confidence");
if (psub_node != NULL) {
m_confidence = psub_node->valueint;
} else {
succeed = false;
LOGE << "no confidence sub node in config file";
}
mvLog(MVLOG_INFO, "Confidence: %d", m_confidence);
psub_node = cJSON_GetObjectItem(root, "sensor_ccm_index");
if (psub_node != NULL) {
m_sensor_ccm_index = psub_node->valueint;
} else {
succeed = false;
LOGE << "No sensor_ccm_index nodes in config file";
}
cJSON* running_mode = cJSON_GetObjectItem(root, "running_mode");
if (running_mode != NULL) {
psub_node = cJSON_GetObjectItem(running_mode, "mode");
if (psub_node != NULL) {
set_mode(std::string(psub_node->valuestring));
} else {
succeed = false;
LOGE << "No mode nodes in running_mode";
}
mvLog(MVLOG_INFO, "Running mode: %s", get_mode_str().c_str());
} else {
succeed = false;
LOGE << "No running_mode nodes in config file";
}
cJSON* online_grab = cJSON_GetObjectItem(root, "online_grab");
if (online_grab != NULL) {
psub_node = cJSON_GetObjectItem(online_grab, "enable");
if (psub_node != NULL) {
std::get<0>(m_online_img_grab) = (psub_node->valueint != 0);
} else {
succeed = false;
LOGE << "No enable node in online_grab";
}
psub_node = cJSON_GetObjectItem(online_grab, "strategy");
if (psub_node != NULL) {
std::get<1>(m_online_img_grab) = std::string(psub_node->valuestring);
if (online_image_grab_strategy() != "interval" && \
online_image_grab_strategy() != "ondemand") {
std::get<0>(m_online_img_grab) = false;
LOGE << "Invalid image grabbing strategy, this feature is forced to be disabled.";
} else {
psub_node = cJSON_GetObjectItem(online_grab, "value");
if (psub_node != NULL && psub_node->valueint > 0) {
std::get<2>(m_online_img_grab) = psub_node->valueint;
} else {
// if value is not valie, still forced this feature to be disabled;
std::get<0>(m_online_img_grab) = false;
LOGE << "Invalid image grab value setting. value should be > 0";
}
}
} else {
succeed = false;
LOGE << "No strategy node in online_grab";
}
mvLog(MVLOG_INFO, "online grab enable : %d", is_online_image_grab_enable());
mvLog(MVLOG_INFO, "online grab strategy: %s", online_image_grab_strategy().c_str());
mvLog(MVLOG_INFO, "online grab value : %d", online_image_flow_control_value());
} else {
succeed = false;
mvLog(MVLOG_ERROR, "No online_grab node in config file");
}
cJSON* online_update = cJSON_GetObjectItem(root, "online_update");
if (online_update) {
psub_node = cJSON_GetObjectItem(online_update, "host");
if (psub_node != NULL) {
std::get<0>(m_online_update) = std::string(psub_node->valuestring);
}
psub_node = cJSON_GetObjectItem(online_update, "url_prefix");
if (psub_node != NULL) {
std::get<1>(m_online_update) = std::string(psub_node->valuestring);
}
}
if (!load_servers_config_from_json(root)) {
succeed = false;
}
if (!load_models_config_from_json(root)) {
succeed = false;
}
cJSON_Delete(root);
// save the path of the conf file
m_conf_filename = filename;
// call the callback function
if (m_params_callback) {
m_params_callback(this);
}
return succeed;
}
bool Params::save_to_file(const std::string& filename) const {
cJSON* root = NULL;
root = cJSON_CreateObject();
cJSON_AddItemToObject(root, "xeye_id", \
cJSON_CreateString(m_xeye_id.c_str()));
cJSON_AddItemToObject(root, "confidence", \
cJSON_CreateNumber(get_confidence()));
cJSON_AddItemToObject(root, "sensor_ccm_index", \
cJSON_CreateNumber(m_sensor_ccm_index));
cJSON* mode = cJSON_CreateObject();
cJSON_AddItemToObject(mode, "comment", \
cJSON_CreateString("running mode, can only be configured as record, normal, live mode"));
cJSON_AddItemToObject(mode, "mode", cJSON_CreateString(get_mode_str().c_str()));
cJSON_AddItemToObject(root, "running_mode", mode);
// online grab image configuration
cJSON* online_grab = cJSON_CreateObject();
cJSON_AddItemToObject(online_grab, "enable", cJSON_CreateBool(is_online_image_grab_enable()));
cJSON_AddItemToObject(online_grab, "strategy", cJSON_CreateString(online_image_grab_strategy().c_str()));
cJSON_AddItemToObject(online_grab, "value", cJSON_CreateNumber(online_image_flow_control_value()));
cJSON_AddItemToObject(root, "online_grab", online_grab);
// online update configuration
cJSON* online_update = cJSON_CreateObject();
cJSON_AddItemToObject(online_update, "host", cJSON_CreateString(online_update_host().c_str()));
cJSON_AddItemToObject(online_update, "url_prefix", cJSON_CreateString(online_update_url_prefix().c_str()));
cJSON_AddItemToObject(root, "online_update", online_update);
save_servers_config_to_json(root);
// models configuration
save_models_config_to_json(root);
// must free out_str here, or will cause a memory leak
char* out_str = cJSON_Print(root);
std::ofstream of(filename, std::ofstream::binary);
of.write(out_str, strlen(out_str));
of.close();
cJSON_Delete(root);
free(out_str);
return true;
}
bool Params::save_servers_config_to_json(cJSON* root) const {
cJSON* data_server = NULL;
cJSON* ntp_server = NULL;
cJSON* heartbeat_server = NULL;
cJSON* playback_server = NULL;
cJSON* local_network_info = NULL;
// data server
cJSON_AddItemToObject(root, "data_server", \
data_server = cJSON_CreateObject());
cJSON_AddItemToObject(data_server, "comment", cJSON_CreateString( \
"the IP address and port of server which xeye send data to"));
cJSON_AddItemToObject(data_server, "addr", \
cJSON_CreateString(data_server_addr().c_str()));
cJSON_AddItemToObject(data_server, "port", \
cJSON_CreateNumber(data_server_port()));
cJSON_AddItemToObject(data_server, "url", \
cJSON_CreateString(data_server_url().c_str()));
// ntp server
cJSON_AddItemToObject(root, "ntp_server", \
ntp_server = cJSON_CreateObject());
cJSON_AddItemToObject(ntp_server, "comment", cJSON_CreateString( \
"the IP address and port of NTP time syncronization server"));
cJSON_AddItemToObject(ntp_server, "addr", \
cJSON_CreateString(ntp_server_addr().c_str()));
cJSON_AddItemToObject(ntp_server, "port", \
cJSON_CreateNumber(ntp_server_port()));
// heartbeat server
cJSON_AddItemToObject(root, "heartbeat_server", \
heartbeat_server = cJSON_CreateObject());
cJSON_AddItemToObject(heartbeat_server, "comment", cJSON_CreateString( \
"the IP address and port of heartbeat server"));
cJSON_AddItemToObject(heartbeat_server, "addr", \
cJSON_CreateString(heartbeat_server_addr().c_str()));
cJSON_AddItemToObject(heartbeat_server, "port", \
cJSON_CreateNumber(heartbeat_server_port()));
cJSON_AddItemToObject(heartbeat_server, "url", \
cJSON_CreateString(heartbeat_server_url().c_str()));
// playback server
cJSON_AddItemToObject(root, "playback_server", \
playback_server = cJSON_CreateObject());
cJSON_AddItemToObject(playback_server, "comment", cJSON_CreateString( \
"the IP address and port of playback server, \
if addr section is empty, then playback mode is disabled"));
cJSON_AddItemToObject(playback_server, "addr", \
cJSON_CreateString(playback_server_addr().c_str()));
cJSON_AddItemToObject(playback_server, "port", \
cJSON_CreateNumber(playback_server_port()));
// local network info
cJSON_AddItemToObject(root, "local_network_info", \
local_network_info = cJSON_CreateObject());
cJSON_AddItemToObject(local_network_info, "comment", cJSON_CreateString( \
"static ip address, netmask and gateway configuration"));
cJSON_AddItemToObject(local_network_info, "ip_address", \
cJSON_CreateString(local_ipaddr().c_str()));
cJSON_AddItemToObject(local_network_info, "net_mask", \
cJSON_CreateString(local_netmask().c_str()));
cJSON_AddItemToObject(local_network_info, "gate_way", \
cJSON_CreateString(local_gateway().c_str()));
return true;
}
bool Params::load_servers_config_from_json(const cJSON* root) {
bool succeed = true;
cJSON* data_server = cJSON_GetObjectItem(root, "data_server");
if (nullptr == data_server) {
LOGE << "NO object named data_server in json file";
succeed = false;
} else {
m_data_server = std::make_tuple( \
std::string(cJSON_GetObjectItem(data_server, "addr")->valuestring), \
cJSON_GetObjectItem(data_server, "port")->valueint, \
cJSON_GetObjectItem(data_server, "url")->valuestring);
}
cJSON* ntp_server = cJSON_GetObjectItem(root, "ntp_server");
if (nullptr == ntp_server) {
LOGE << "NO object named ntp_server in json file";
succeed = false;
} else {
m_ntp_server = std::make_pair( \
std::string(cJSON_GetObjectItem(ntp_server, "addr")->valuestring), \
cJSON_GetObjectItem(ntp_server, "port")->valueint);
}
cJSON* hb_server = cJSON_GetObjectItem(root, "heartbeat_server");
if (nullptr == hb_server) {
LOGE << "NO object named heartbeat_server in json file";
succeed = false;
} else {
m_hb_server = std::make_tuple( \
std::string(cJSON_GetObjectItem(hb_server, "addr")->valuestring), \
cJSON_GetObjectItem(hb_server, "port")->valueint, \
std::string(cJSON_GetObjectItem(hb_server, "url")->valuestring));
}
cJSON* local_network_info = cJSON_GetObjectItem(root, "local_network_info");
if (nullptr == local_network_info) {
LOGE << "NO object named local_network_info in json file";
succeed = false;
} else {
m_local_address = std::make_tuple( \
std::string(cJSON_GetObjectItem(local_network_info, "ip_address")->valuestring), \
std::string(cJSON_GetObjectItem(local_network_info, "net_mask")->valuestring), \
std::string(cJSON_GetObjectItem(local_network_info, "gate_way")->valuestring));
}
cJSON* playback_server = cJSON_GetObjectItem(root, "playback_server");
if (nullptr == playback_server) {
LOGE << "NO object named playback_server in json file";
succeed = false;
} else {
m_playback_server = std::make_pair( \
std::string(cJSON_GetObjectItem(playback_server, "addr")->valuestring), \
cJSON_GetObjectItem(playback_server, "port")->valueint);
}
mvLog(MVLOG_INFO, "data_server:(%s, %d, %s)", data_server_addr().c_str(), \
data_server_port(), data_server_url().c_str());
mvLog(MVLOG_INFO, "playback_server:(%s, %d)", playback_server_addr().c_str(), \
playback_server_port());
mvLog(MVLOG_INFO, "ntp_server:(%s, %d)", ntp_server_addr().c_str(), \
ntp_server_port());
mvLog(MVLOG_INFO, "heartbeat_server:(%s, %d, %s)", heartbeat_server_addr().c_str(), \
heartbeat_server_port(), heartbeat_server_url().c_str());
if (std::get<0>(m_local_address) == "") {
mvLog(MVLOG_INFO, "Use DHCP server");
} else {
mvLog(MVLOG_INFO, "Static ip address: %s, netmask: %s, gateway: %s", \
std::get<0>(m_local_address).c_str(), \
std::get<1>(m_local_address).c_str(), \
std::get<2>(m_local_address).c_str());
}
return succeed;
}
bool Params::save_models_config_to_json(cJSON* root) const {
assert(root != NULL);
cJSON* models = NULL;
// support unlimited models
cJSON_AddItemToObject(root, "models", models = cJSON_CreateArray());
for (size_t i = 0; i < m_models.size(); ++i) {
cJSON* item = cJSON_CreateObject();
cJSON_AddItemToObject(item, "comment", \
cJSON_CreateString("model_name: the name of the model, \
model_path: the abs path of the model file"));
cJSON_AddItemToObject(item, "model_name", \
cJSON_CreateString(m_models[i].first.c_str()));
cJSON_AddItemToObject(item, "model_path", \
cJSON_CreateString(m_models[i].second.c_str()));
cJSON_AddItemToArray(models, item);
}
return true;
}
bool Params::load_models_config_from_json(cJSON* root) {
assert(root != NULL);
cJSON* models = cJSON_GetObjectItem(root, "models");
if (models == nullptr) {
return false;
}
int models_number = cJSON_GetArraySize(models);
if (models_number <= 0) {
mvLog(MVLOG_ERROR, "No models defined in conf.json");
return false;
}
for (int i = 0; i < models_number; ++i) {
cJSON* model = cJSON_GetArrayItem(models, i);
if (model == nullptr) {
LOGE << "No model nodes in models";
return false;
}
m_models.push_back(std::make_pair( \
cJSON_GetObjectItem(model, "model_name")->valuestring,
cJSON_GetObjectItem(model, "model_path")->valuestring));
mvLog(MVLOG_INFO, "model_name: %s, model_path: %s", \
m_models[i].first.c_str(), m_models[i].second.c_str());
}
return (m_models.size() > 0);
}
| 40.382831
| 111
| 0.637863
|
75bd5bca52ac5f263fe4185915dec9ab8e3f35d4
| 151
|
css
|
CSS
|
CSS/coding-164/chapter9/src/02-plugins-module.css
|
zhuyudong/blog
|
f3dbddb0065ce8e314718d1a1201c7cafc8a8465
|
[
"MIT"
] | 1
|
2019-06-04T21:00:07.000Z
|
2019-06-04T21:00:07.000Z
|
CSS/coding-164/chapter9/src/02-plugins-module.css
|
zhuyudong/blog
|
f3dbddb0065ce8e314718d1a1201c7cafc8a8465
|
[
"MIT"
] | 2
|
2020-07-18T13:36:36.000Z
|
2021-05-09T03:07:45.000Z
|
CSS/coding-164/chapter9/src/02-plugins-module.css
|
zhuyudong/blog
|
f3dbddb0065ce8e314718d1a1201c7cafc8a8465
|
[
"MIT"
] | 1
|
2019-08-29T02:59:37.000Z
|
2019-08-29T02:59:37.000Z
|
/* css-reset */
*{
padding:0;
margin:0;
}
body{
margin: 10px 20px 10px 20px;
font-size:14px;
}
body{
background-color:#ffffff;
}
| 10.066667
| 32
| 0.569536
|
0901d3118494ba3323850479185d0df03d780cff
| 309
|
ts
|
TypeScript
|
documentation/techdoc/lst/datatype_diastolic.ts
|
regionostergotland/ipforregionen
|
bbc7a3a1a95feeaaa187b21afa046aa6995192b0
|
[
"MIT"
] | null | null | null |
documentation/techdoc/lst/datatype_diastolic.ts
|
regionostergotland/ipforregionen
|
bbc7a3a1a95feeaaa187b21afa046aa6995192b0
|
[
"MIT"
] | 11
|
2020-04-17T07:27:03.000Z
|
2022-03-02T07:14:24.000Z
|
documentation/techdoc/lst/datatype_diastolic.ts
|
regionostergotland/ipforregionen
|
bbc7a3a1a95feeaaa187b21afa046aa6995192b0
|
[
"MIT"
] | 2
|
2019-06-11T09:35:56.000Z
|
2019-07-12T15:11:50.000Z
|
let diastolicDataType = new DataTypeQuantity(
{
path: 'any_event',
label: 'Diastoliskt',
description: `Det minsta systemiskt arteriella blodtrycket uppmätt
diastoliskt eller i hjärtcykelns avslappningsfas.`,
required: true,
single: false,
visible: true,
}, 'mm[Hg]', 0, 1000
)
| 25.75
| 70
| 0.68932
|
932859bb4684db7d90622ff5f15d5294322508d1
| 3,974
|
go
|
Go
|
src/github.com/cppforlife/bosh-warden-cpi/vm/warden_file_service.go
|
crhntr/bosh-warden-cpi-release
|
ec4f6d68bef61cdc9393dbdd1c9fce601bb6d887
|
[
"Apache-2.0"
] | 9
|
2015-06-03T21:38:43.000Z
|
2021-02-05T12:46:05.000Z
|
src/github.com/cppforlife/bosh-warden-cpi/vm/warden_file_service.go
|
crhntr/bosh-warden-cpi-release
|
ec4f6d68bef61cdc9393dbdd1c9fce601bb6d887
|
[
"Apache-2.0"
] | 16
|
2016-01-06T06:07:48.000Z
|
2018-11-14T19:46:11.000Z
|
src/bosh-warden-cpi/vm/warden_file_service.go
|
cloudfoundry/bosh-warden-cpi-release
|
c8dec68d2364acda70ca633e5962d746f3ecbe13
|
[
"Apache-2.0"
] | 13
|
2015-05-19T20:50:23.000Z
|
2018-11-04T15:05:39.000Z
|
package vm
import (
"archive/tar"
"bytes"
"fmt"
"io"
"io/ioutil"
"path/filepath"
wrdn "code.cloudfoundry.org/garden"
bosherr "github.com/cloudfoundry/bosh-utils/errors"
boshlog "github.com/cloudfoundry/bosh-utils/logger"
)
type wardenFileService struct {
container wrdn.Container
logTag string
logger boshlog.Logger
}
func NewWardenFileService(container wrdn.Container, logger boshlog.Logger) WardenFileService {
return &wardenFileService{
container: container,
logTag: "vm.wardenFileService",
logger: logger,
}
}
func (s *wardenFileService) Download(sourcePath string) ([]byte, error) {
sourceFileName := filepath.Base(sourcePath)
tmpFilePath := filepath.Join("/tmp", sourceFileName)
s.logger.Debug(s.logTag, "Downloading file at %s", sourcePath)
// Copy settings file to a temporary directory
// so that tar (running as vcap) has permission to readdir.
// (/var/vcap/bosh is owned by root.)
script := fmt.Sprintf(
"cp %s %s && chown vcap:vcap %s",
sourcePath,
tmpFilePath,
tmpFilePath,
)
err := s.runPrivilegedScript(script)
if err != nil {
return []byte{}, bosherr.WrapError(err, "Running copy source file script")
}
spec := wrdn.StreamOutSpec{
Path: tmpFilePath,
User: "root",
}
streamOut, err := s.container.StreamOut(spec)
if err != nil {
return []byte{}, bosherr.WrapErrorf(err, "Streaming out file '%s'", sourceFileName)
}
tarReader := tar.NewReader(streamOut)
_, err = tarReader.Next()
if err != nil {
return []byte{}, bosherr.WrapErrorf(err, "Reading tar header for '%s'", sourceFileName)
}
return ioutil.ReadAll(tarReader)
}
func (s *wardenFileService) Upload(destinationPath string, contents []byte) error {
s.logger.Debug(s.logTag, "Uploading file to %s", destinationPath)
destinationFileName := filepath.Base(destinationPath)
// Stream in settings file to a temporary directory
// so that tar (running as vcap) has permission to unpack into dir.
tarReader, err := s.tarReader(destinationFileName, contents)
if err != nil {
return bosherr.WrapError(err, "Creating tar")
}
spec := wrdn.StreamInSpec{
Path: "/tmp/",
User: "root",
TarStream: tarReader,
}
err = s.container.StreamIn(spec)
if err != nil {
return bosherr.WrapError(err, "Streaming in tar")
}
tmpFilePath := filepath.Join("/tmp", destinationFileName)
// Move settings file to its final location
script := fmt.Sprintf(
"mv %s %s",
tmpFilePath,
destinationPath,
)
err = s.runPrivilegedScript(script)
if err != nil {
return bosherr.WrapErrorf(err, "Moving temporary file to destination '%s'", destinationPath)
}
return nil
}
func (s *wardenFileService) runPrivilegedScript(script string) error {
processSpec := wrdn.ProcessSpec{
Path: "bash",
Args: []string{"-c", script},
User: "root",
}
// Collect output for debugging
stdout := new(bytes.Buffer)
stderr := new(bytes.Buffer)
processIO := wrdn.ProcessIO{Stdout: stdout, Stderr: stderr}
process, err := s.container.Run(processSpec, processIO)
if err != nil {
return bosherr.WrapError(err, "Running script")
}
exitCode, err := process.Wait()
if err != nil {
return bosherr.WrapError(err, "Waiting for script")
}
if exitCode != 0 {
return bosherr.Errorf("Script exited with non-0 exit code, stdout: '%s' stderr: '%s'", stdout.String(), stderr.String())
}
return nil
}
func (s *wardenFileService) tarReader(fileName string, contents []byte) (io.Reader, error) {
tarBytes := &bytes.Buffer{}
tarWriter := tar.NewWriter(tarBytes)
fileHeader := &tar.Header{
Name: fileName,
Size: int64(len(contents)),
Mode: 0640,
}
err := tarWriter.WriteHeader(fileHeader)
if err != nil {
return nil, bosherr.WrapError(err, "Writing tar header")
}
_, err = tarWriter.Write(contents)
if err != nil {
return nil, bosherr.WrapError(err, "Writing file to tar")
}
err = tarWriter.Close()
if err != nil {
return nil, bosherr.WrapError(err, "Closing tar writer")
}
return tarBytes, nil
}
| 23.514793
| 122
| 0.70005
|
2ddf409b15816542c99c67db6aa72e8455e160f9
| 5,161
|
dart
|
Dart
|
pkg/fake_gcloud/lib/mem_datastore.dart
|
mingsai/pub-dev
|
faa61a1c3ae5bbe21d88b9e24c15f3a6423717dd
|
[
"BSD-3-Clause"
] | null | null | null |
pkg/fake_gcloud/lib/mem_datastore.dart
|
mingsai/pub-dev
|
faa61a1c3ae5bbe21d88b9e24c15f3a6423717dd
|
[
"BSD-3-Clause"
] | null | null | null |
pkg/fake_gcloud/lib/mem_datastore.dart
|
mingsai/pub-dev
|
faa61a1c3ae5bbe21d88b9e24c15f3a6423717dd
|
[
"BSD-3-Clause"
] | null | null | null |
import 'package:gcloud/common.dart';
import 'package:gcloud/datastore.dart';
/// Implementation of [Datastore] interface with in-memory storage.
///
/// Entities are cloned, and should be safe to modify them once they are stored
/// or read.
class MemDatastore implements Datastore {
final _entities = <Key, Entity>{};
int _unusedId = 0;
@override
Future<List<Key>> allocateIds(List<Key> keys) async {
return keys.map((k) {
if (k.elements.last.id == null) {
final elements = List<KeyElement>.from(k.elements);
final last = elements.removeLast();
elements.add(KeyElement(last.kind, _unusedId++));
return Key(elements, partition: k.partition);
} else {
return k;
}
}).toList();
}
@override
Future<Transaction> beginTransaction({bool crossEntityGroup = false}) async {
return _Transaction();
}
@override
Future<CommitResult> commit(
{List<Entity> inserts,
List<Entity> autoIdInserts,
List<Key> deletes,
Transaction transaction}) async {
deletes?.forEach((key) => _entities.remove(key));
inserts?.forEach((e) {
_entities[e.key] = e;
});
if (autoIdInserts != null && autoIdInserts.isNotEmpty) {
throw UnimplementedError(
'fake_gcloud.Datastore.autoIdInserts is not implemented.');
}
return CommitResult([]);
}
@override
Future<List<Entity>> lookup(List<Key> keys, {Transaction transaction}) async {
if (keys.any((k) => k.elements.any((e) => e.id == null))) {
throw ArgumentError('Key contains null.');
}
return keys.map((key) => _entities[key]).toList();
}
dynamic _getValue(Entity entity, String property) {
if (property == '__key__') return entity.key;
return entity.properties[property];
}
int _compare(dynamic a, dynamic b) {
if (a is Key && b is Key) {
if (a.elements.length != 1) {
throw UnimplementedError();
}
if (b.elements.length != 1) {
throw UnimplementedError();
}
return Comparable.compare(a.elements.single.id as Comparable,
b.elements.single.id as Comparable);
} else {
return Comparable.compare(a as Comparable, b as Comparable);
}
}
@override
Future<Page<Entity>> query(Query query,
{Partition partition, Transaction transaction}) async {
List<Entity> items = _entities.values
.where((e) => e.key.elements.last.kind == query.kind)
.where(
(e) {
if (query.ancestorKey == null) {
return true;
}
if (query.ancestorKey.partition != e.key.partition) {
return false;
}
if (query.ancestorKey.elements.length != e.key.elements.length - 1) {
return false;
}
for (int i = 0; i < query.ancestorKey.elements.length; i++) {
if (query.ancestorKey.elements[i] != e.key.elements[i]) {
return false;
}
}
return true;
},
).where(
(e) {
if (query.filters == null || query.filters.isEmpty) {
return true;
}
return query.filters.every((f) {
final v = _getValue(e, f.name);
if (v == null) return false;
final c = _compare(v, f.value);
switch (f.relation) {
case FilterRelation.Equal:
return c == 0;
case FilterRelation.LessThan:
return c < 0;
case FilterRelation.LessThanOrEqual:
return c <= 0;
case FilterRelation.GreatherThan:
return c > 0;
case FilterRelation.GreatherThanOrEqual:
return c >= 0;
default:
throw UnimplementedError('Not handled relation: ${f.relation}');
}
});
},
).toList();
if (query.orders != null && query.orders.isNotEmpty) {
items.sort((a, b) {
for (Order o in query.orders) {
final ap = _getValue(a, o.propertyName);
final bp = _getValue(b, o.propertyName);
final c = _compare(ap, bp);
if (c == 0) continue;
if (o.direction == OrderDirection.Ascending) {
return c;
} else {
return -c;
}
}
return 0;
});
}
if (query.offset != null && query.offset > 0) {
items = items.skip(query.offset).toList();
}
if (query.limit != null && query.limit < items.length) {
items = items.sublist(0, query.limit);
}
return _Page(items, 0, 100);
}
@override
Future<void> rollback(Transaction transaction) async {
return null;
}
}
class _Transaction implements Transaction {}
class _Page implements Page<Entity> {
final List<Entity> _items;
@override
final List<Entity> items;
final int _offset;
final int _pageSize;
_Page(this._items, this._offset, this._pageSize)
: items = _items.skip(_offset).take(_pageSize).toList();
@override
bool get isLast => _offset + _pageSize > _items.length;
@override
Future<Page<Entity>> next({int pageSize}) async {
return _Page(_items, _offset + _pageSize, pageSize ?? _pageSize);
}
}
| 29.323864
| 80
| 0.585933
|
bb43ffe8c3d3f4f48fa14913dbe6544189f870d8
| 5,126
|
cs
|
C#
|
src/Microsoft.Graph/Generated/requests/IRestrictedSignInRequest.cs
|
luismanez/msgraph-sdk-dotnet
|
ffecbd4a279ee4292c9540c5468e39d90781bc8d
|
[
"MIT"
] | null | null | null |
src/Microsoft.Graph/Generated/requests/IRestrictedSignInRequest.cs
|
luismanez/msgraph-sdk-dotnet
|
ffecbd4a279ee4292c9540c5468e39d90781bc8d
|
[
"MIT"
] | 5
|
2021-04-02T07:32:51.000Z
|
2022-03-07T12:09:19.000Z
|
src/Microsoft.Graph/Generated/requests/IRestrictedSignInRequest.cs
|
luismanez/msgraph-sdk-dotnet
|
ffecbd4a279ee4292c9540c5468e39d90781bc8d
|
[
"MIT"
] | null | null | null |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
// <auto-generated/>
// Template Source: IEntityRequest.cs.tt
namespace Microsoft.Graph
{
using System;
using System.IO;
using System.Net.Http;
using System.Threading;
using System.Linq.Expressions;
/// <summary>
/// The interface IRestrictedSignInRequest.
/// </summary>
public partial interface IRestrictedSignInRequest : IBaseRequest
{
/// <summary>
/// Creates the specified RestrictedSignIn using POST.
/// </summary>
/// <param name="restrictedSignInToCreate">The RestrictedSignIn to create.</param>
/// <returns>The created RestrictedSignIn.</returns>
System.Threading.Tasks.Task<RestrictedSignIn> CreateAsync(RestrictedSignIn restrictedSignInToCreate); /// <summary>
/// Creates the specified RestrictedSignIn using POST.
/// </summary>
/// <param name="restrictedSignInToCreate">The RestrictedSignIn to create.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The created RestrictedSignIn.</returns>
System.Threading.Tasks.Task<RestrictedSignIn> CreateAsync(RestrictedSignIn restrictedSignInToCreate, CancellationToken cancellationToken);
/// <summary>
/// Deletes the specified RestrictedSignIn.
/// </summary>
/// <returns>The task to await.</returns>
System.Threading.Tasks.Task DeleteAsync();
/// <summary>
/// Deletes the specified RestrictedSignIn.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The task to await.</returns>
System.Threading.Tasks.Task DeleteAsync(CancellationToken cancellationToken);
/// <summary>
/// Gets the specified RestrictedSignIn.
/// </summary>
/// <returns>The RestrictedSignIn.</returns>
System.Threading.Tasks.Task<RestrictedSignIn> GetAsync();
/// <summary>
/// Gets the specified RestrictedSignIn.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The RestrictedSignIn.</returns>
System.Threading.Tasks.Task<RestrictedSignIn> GetAsync(CancellationToken cancellationToken);
/// <summary>
/// Updates the specified RestrictedSignIn using PATCH.
/// </summary>
/// <param name="restrictedSignInToUpdate">The RestrictedSignIn to update.</param>
/// <returns>The updated RestrictedSignIn.</returns>
System.Threading.Tasks.Task<RestrictedSignIn> UpdateAsync(RestrictedSignIn restrictedSignInToUpdate);
/// <summary>
/// Updates the specified RestrictedSignIn using PATCH.
/// </summary>
/// <param name="restrictedSignInToUpdate">The RestrictedSignIn to update.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <exception cref="ClientException">Thrown when an object returned in a response is used for updating an object in Microsoft Graph.</exception>
/// <returns>The updated RestrictedSignIn.</returns>
System.Threading.Tasks.Task<RestrictedSignIn> UpdateAsync(RestrictedSignIn restrictedSignInToUpdate, CancellationToken cancellationToken);
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="value">The expand value.</param>
/// <returns>The request object to send.</returns>
IRestrictedSignInRequest Expand(string value);
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="expandExpression">The expression from which to calculate the expand value.</param>
/// <returns>The request object to send.</returns>
IRestrictedSignInRequest Expand(Expression<Func<RestrictedSignIn, object>> expandExpression);
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="value">The select value.</param>
/// <returns>The request object to send.</returns>
IRestrictedSignInRequest Select(string value);
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="selectExpression">The expression from which to calculate the select value.</param>
/// <returns>The request object to send.</returns>
IRestrictedSignInRequest Select(Expression<Func<RestrictedSignIn, object>> selectExpression);
}
}
| 47.462963
| 153
| 0.643387
|
44063d0df5ac65a630b90f2f301d0584e9a77f1a
| 593
|
py
|
Python
|
tests/conftest.py
|
marcoaaguiar/channels-ws-auth
|
a9b4e87f3be81097c5078803dead9fbd8dd7be1f
|
[
"MIT"
] | 1
|
2020-05-04T11:13:22.000Z
|
2020-05-04T11:13:22.000Z
|
tests/conftest.py
|
marcoaaguiar/channels-ws-auth
|
a9b4e87f3be81097c5078803dead9fbd8dd7be1f
|
[
"MIT"
] | 21
|
2020-05-20T23:29:51.000Z
|
2021-06-25T15:38:45.000Z
|
tests/conftest.py
|
marcoaaguiar/channels-ws-auth
|
a9b4e87f3be81097c5078803dead9fbd8dd7be1f
|
[
"MIT"
] | null | null | null |
import pytest
from rest_framework.test import APIClient
from channels_ws_auth.models import WSAuthTicket
@pytest.fixture
@pytest.mark.django_db
def user(django_user_model):
return django_user_model.objects.create_user(username="user")
@pytest.fixture
@pytest.mark.django_db
def ticket(user):
return WSAuthTicket.objects.create(user=user)
@pytest.fixture
def api_client():
return APIClient()
@pytest.fixture
def api_client_with_credentials(db, user, api_client):
api_client.force_authenticate(user=user)
yield api_client
api_client.force_authenticate(user=None)
| 21.178571
| 65
| 0.797639
|
7f0022b1698ca715b875100e029e149e430784dc
| 6,861
|
cs
|
C#
|
StoreApp/StoreLibrary/OwnerRepo.cs
|
2011-nov02-net/paul-project0-version2
|
8347f0a092176626cddd389504bb6e7c5afe0072
|
[
"MIT"
] | null | null | null |
StoreApp/StoreLibrary/OwnerRepo.cs
|
2011-nov02-net/paul-project0-version2
|
8347f0a092176626cddd389504bb6e7c5afe0072
|
[
"MIT"
] | 5
|
2020-12-01T20:06:24.000Z
|
2020-12-11T16:14:39.000Z
|
StoreApp/StoreLibrary/OwnerRepo.cs
|
2011-nov02-net/paul-project0-version2
|
8347f0a092176626cddd389504bb6e7c5afe0072
|
[
"MIT"
] | null | null | null |
using Model;
using System;
using Microsoft.EntityFrameworkCore;
using System.Linq;
using System.Collections.Generic;
namespace StoreLibrary
{
public class OwnerRepo : IProducts
{
private readonly ICollection<Store> _store;
public readonly DbContextOptions<project0Context> _dbContext;
public OwnerRepo(ICollection<Store> store)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
}
public OwnerRepo(DbContextOptions<project0Context> s_dbContextOptions)
{
_store = new List<Store>();
_dbContext = s_dbContextOptions;
}
public void AddStore(string name)
{
// get the context of the db
using var context = new project0Context(_dbContext);
if (name.Length > 0)
{
// create the db model to add
Store store = new Store() { StoreName = name };
//add location to context and save
context.Add(store);
context.SaveChanges();
}
}
public Store GetStore(int id)
{
using var context = new project0Context(_dbContext);
var dbStore = context.Stores.First(s => s.Id == id);
if (dbStore == null) return null;
return new Store(dbStore.StoreName, dbStore.Id, dbStore.Location, dbStore.Inventories, dbStore.Products );
}
public ICollection<Store> GetAllStore()
{
using var context = new project0Context(_dbContext);
var dbStore = context.Stores.ToList();
return dbStore.Select(s => new Store(s.StoreName, s.Id, s.Location, s.Inventories, s.Products)).ToList();
}
public void AddLocation(string name)
{
// get the context of the db
using var context = new project0Context(_dbContext);
if (name.Length > 0)
{
// create the db model to add
Location location = new Location() { Name = name };
//add location to context and save
context.Add(location);
context.SaveChanges();
}
}
public Location GetLocation(int id)
{
// get the context of the db
using var context = new project0Context(_dbContext);
// find the location in the db that has said id
var dbLocation = context.Locations.FirstOrDefault(l => l.Id == id);
// check for null value
if (dbLocation == null) return null;
return new Location(dbLocation.Name, dbLocation.Id);
}
public ICollection<Location> GetAllLocations()
{
// set up context
using var context = new project0Context(_dbContext);
// get all locations from db
var dbLocations = context.Locations.ToList();
// convert and return
return dbLocations.Select(l => new Location(l.Name, l.Id)).ToList();
}
public void AddProduct(string name, string status, decimal price, int stock)
{
// get the context of the db
using var context = new project0Context(_dbContext);
// Create the new product
var product = new Product()
{
Name = name,
Status = status,
Price = price,
Stock = stock,
};
// Add to db
context.Products.Add(product);
context.SaveChanges();
}
public bool IsProduct(string name)
{
// get the context of the db
using var context = new project0Context(_dbContext);
return context.Products.Any(p => p.Name == name);
}
public Product GetProduct(string name)
{
// get the context of the db
using var context = new project0Context(_dbContext);
return context.Products.FirstOrDefault(p => p.Name == name);
}
public Product GetProduct(int id)
{
// get the context of the db
using var context = new project0Context(_dbContext);
var dbProduct = context.Products.First(p => p.Id == id);
return new Product(dbProduct.Name, dbProduct.Id, dbProduct.Price, dbProduct.Status);
}
public ICollection<Inventory> GetStoreInventory(Store store)
{
// set up context
using var context = new project0Context(_dbContext);
// get the inventory for each location
var dbInventory = context.Inventories.Where(s => s.StoreId == store.Id).Include(s => s.Product).ToList();
// get the products related to each
var inventory = new List<Inventory>();
foreach (var item in dbInventory)
{
// create our converted product
Product prod = new Product(item.Product, item.ProductId, item.Stock, item.Store, item.StoreId);
// create the new inventory
inventory.Add(new Inventory(prod, item.Stock));
}
return inventory;
}
public bool AddStoreInventory(Store store, int productId, int stock)
{
using var context = new project0Context(_dbContext);
var inventory = context.Inventories.First(i => i.StoreId == store.Id && i.ProductId == productId);
try
{
inventory.Stock += stock;
context.SaveChanges();
}
catch (DbUpdateException)
{
return false;
}
return true;
}
public bool AddStoreInventory(Store store, Product product, int stock)
{
// set up context
using var context = new project0Context(_dbContext);
// make the new inventory
Inventory inventory = new Inventory()
{
StoreId = store.Id,
Stock = stock,
ProductId = product.Id
};
context.Inventories.Add(inventory);
// ensure that the save works successfully
try
{
context.SaveChanges();
}
catch (DbUpdateException)
{
return false;
}
return true;
}
public void AddStore()
{
throw new NotImplementedException();
}
public bool AddStoreInventory()
{
throw new NotImplementedException();
}
}
}
| 29.320513
| 118
| 0.529223
|
e4b9a58f11027c30cd6e2d9314fc5eb90eb335c0
| 882
|
rs
|
Rust
|
src/exp.rs
|
svoctor/lisper
|
5290eaabc81d65dacadaf36092636250d092b3a4
|
[
"MIT"
] | 2
|
2021-01-14T20:57:47.000Z
|
2021-06-07T18:18:04.000Z
|
src/exp.rs
|
svoctor/lisper
|
5290eaabc81d65dacadaf36092636250d092b3a4
|
[
"MIT"
] | 17
|
2021-01-08T03:25:05.000Z
|
2021-12-21T17:44:22.000Z
|
src/exp.rs
|
svoctor/lisper
|
5290eaabc81d65dacadaf36092636250d092b3a4
|
[
"MIT"
] | 1
|
2021-02-19T21:34:11.000Z
|
2021-02-19T21:34:11.000Z
|
use std::fmt;
// Represents an individual Lisp expresion
#[derive(Clone)]
pub enum LisperExp {
Bool(bool),
Symbol(String),
Number(f64),
List(Vec<LisperExp>),
Func(fn(&LisperExp) -> LisperExp),
Lambda(Vec<LisperExp>),
}
// Used for to_string
impl fmt::Display for LisperExp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let str:String = match self {
LisperExp::Symbol(s) => s.to_string(),
LisperExp::Number(n) => n.to_string(),
LisperExp::Bool(b) => b.to_string(),
LisperExp::List(list) | LisperExp::Lambda(list) => {
let items:Vec<String> = list.iter().map(|item| item.to_string()).collect();
format!("({})", items.join(","))
},
LisperExp::Func(_) => "Function".to_string()
};
write!(f, "{}", str)
}
}
| 30.413793
| 91
| 0.537415
|
f61eada41d605c8284e0bbb196d25322ae8faab8
| 2,103
|
cc
|
C++
|
common/config.cc
|
yonsei-icsl/nebula
|
12bc2440978018d27996df6b97180056dddefa6c
|
[
"BSD-3-Clause"
] | 5
|
2020-02-13T04:12:41.000Z
|
2021-02-08T04:53:55.000Z
|
common/config.cc
|
yonsei-icsl/Nebula
|
12bc2440978018d27996df6b97180056dddefa6c
|
[
"BSD-3-Clause"
] | 1
|
2020-10-29T10:59:08.000Z
|
2020-12-31T04:04:58.000Z
|
common/config.cc
|
yonsei-icsl/Nebula
|
12bc2440978018d27996df6b97180056dddefa6c
|
[
"BSD-3-Clause"
] | 2
|
2021-03-21T02:45:40.000Z
|
2022-03-10T12:31:06.000Z
|
#include <algorithm>
#include <assert.h>
#include <cstdlib>
#include <fstream>
#include <iostream>
#include <locale>
#include "config.h"
#include "utils.h"
namespace nebula {
// Configuration section
section_config_t::section_config_t(std::string m_name) :
name(m_name) {
}
section_config_t::~section_config_t() {
settings.clear();
}
// Add (key, value) pair to the section settings
void section_config_t::add_setting(std::string m_key, std::string m_value) {
settings.insert(std::pair<std::string,std::string>(lowercase(m_key), lowercase(m_value)));
}
// Check if a setting exists.
bool section_config_t::exists(std::string m_key) {
return settings.find(lowercase(m_key)) != settings.end();
}
// Configuration
config_t::config_t() {
}
config_t::~config_t() {
sections.clear();
}
// Parse configuration file
void config_t::parse(std::string m_config_name) {
std::fstream file_stream;
file_stream.open(m_config_name.c_str(), std::fstream::in);
if(!file_stream.is_open()) {
std::cerr << "Error: failed to open " << m_config_name << std::endl;
exit(1);
}
std::string line;
while(getline(file_stream, line)) {
// Erase all spaces
line.erase(remove(line.begin(),line.end(),' '),line.end());
// Skip blank lines or comments
if(!line.size() || (line[0] == '#')) continue;
// Beginning of [section]
if(line[0] == '[') {
std::string section_name = line.substr(1, line.size()-2);
sections.push_back(section_config_t(section_name));
}
else {
size_t eq = line.find('=');
if(eq == std::string::npos) {
std::cerr << "Error: invalid config" << std::endl << line << std::endl;
exit(1);
}
// Save (key, value) pair in the latest section setting.
std::string key = line.substr(0, eq);
std::string value = line.substr(eq+1, line.size()-1);
sections[sections.size()-1].add_setting(key, value);
}
}
}
}
// End of namespace nebula
| 26.620253
| 94
| 0.603899
|
da4a6ae5d8bc5fecf51cd72e91446552f84cb6e7
| 30,954
|
php
|
PHP
|
app/Repositories/Eloquent/YunHeRenderRepository.php
|
b9813147/IES3
|
90e400501f65fe0dd2b9480d152f2db5cf1079e5
|
[
"MIT"
] | null | null | null |
app/Repositories/Eloquent/YunHeRenderRepository.php
|
b9813147/IES3
|
90e400501f65fe0dd2b9480d152f2db5cf1079e5
|
[
"MIT"
] | 2
|
2020-07-18T05:23:02.000Z
|
2021-05-10T13:22:59.000Z
|
app/Repositories/Eloquent/YunHeRenderRepository.php
|
b9813147/IES3
|
90e400501f65fe0dd2b9480d152f2db5cf1079e5
|
[
"MIT"
] | null | null | null |
<?php
/**
* Created by PhpStorm.
* User: ares
* Date: 2018/11/27
* Time: 4:02 PM
*/
namespace App\Repositories\Eloquent;
use App\Models\Districts;
use App\Models\DistrictsAllSchool;
use App\Models\SchoolInfo;
use DB;
use Illuminate\Support\Collection;
class YunHeRenderRepository
{
/**
* 學校資訊
* SchoolName
* SchoolID
* @param int $school
* @return array
*/
public function schoolInfo($school = null)
{
$data = SchoolInfo::query()->select('SchoolName', 'SchoolID')->where('SchoolID', $school)->get();
return $data->toArray();
}
/**
* 驗證此區域 是否存在
* @param int $district_id
* @return bool
*/
public function checkDistrictExist($district_id = null)
{
return Districts::query()->where('district_id', $district_id)->exists();
}
/**
* 檢查此區域 是否有學校存在
* @param int $district_id
* @param int $schoolId
* @return bool
*/
public function checkDistrictBySchoolExist($district_id = null, $schoolId = null)
{
if (isset($schoolId) && isset($district_id)) {
return DistrictsAllSchool::query()->where('districtID', $district_id)->where('schoolID', $schoolId)->select('schoolID')->distinct()->exists();
} elseif (isset($district_id)) {
return DistrictsAllSchool::query()->where('districtID', $district_id)->select('schoolID')->distinct()->exists();
}
}
/**
* @param int $district_id
* @param int $school
* @param int $semester
* @param int $year
* @return \Illuminate\Support\Collection
* @return bool
*/
public function dashboard($district_id = null, $school = null, $semester = null, $year = null)
{
if (isset($district_id) && isset($semester) && isset($year)) {
$data = DB::table('districts_all_schools')
->select('year', 'teachlogintable', 'subjectnum', 'examinationnum', 'textbooknum', 'studylogintable')
->where('districtID', '=', $district_id)
->where('year', '!=', 0)
->where('semester', $semester)
->where('year', $year)
->exists();
if ($data) {
return DB::table('districts_all_schools')
->select('year', 'teachlogintable', 'subjectnum', 'examinationnum', 'textbooknum', 'studylogintable')
->where('districtID', '=', $district_id)
->where('year', '!=', 0)
->where('semester', $semester)
->where('year', $year)
->get();
}
} elseif (isset($district_id)) {
$data = DB::table('districts_all_schools')
->select('year', 'teachlogintable', 'subjectnum', 'examinationnum', 'textbooknum', 'studylogintable')
->where('districtID', '=', $district_id)
->where('year', '!=', 0)
->exists();
if ($data) {
return DB::table('districts_all_schools')
->select('year', 'teachlogintable', 'subjectnum', 'examinationnum', 'textbooknum', 'studylogintable')
->where('districtID', '=', $district_id)
->where('year', '!=', 0)
->get();
}
} elseif (isset($school) && isset($semester) && isset($year)) {
$data = DB::table('districts_all_schools')
->select('year', 'teachlogintable', 'subjectnum', 'examinationnum', 'textbooknum', 'studylogintable')
->where('schoolID', '=', $school)
->where('year', '!=', 0)
->where('semester', $semester)
->where('year', $year)
->exists();
if ($data) {
return DB::table('districts_all_schools')
->select('year', 'teachlogintable', 'subjectnum', 'examinationnum', 'textbooknum', 'studylogintable')
->where('schoolID', '=', ($schoolId == 0) ? 0 : $schoolId)
->where('year', '!=', 0)
->where('semester', $semester)
->where('year', $year)
->get();
}
} elseif (isset($school)) {
$data = DB::table('districts_all_schools')
->select('year', 'teachlogintable', 'subjectnum', 'examinationnum', 'textbooknum', 'studylogintable')
->where('schoolID', '=', $school)
->where('year', '!=', 0)
->exists();
if ($data) {
return DB::table('districts_all_schools')
->select('year', 'teachlogintable', 'subjectnum', 'examinationnum', 'textbooknum', 'studylogintable')
->where('schoolID', '=', $school)
->where('year', '!=', 0)
->get();
}
} else {
$data = false;
}
/** @noinspection PhpIncompatibleReturnTypeInspection */
return $data;
}
/**
* @param string $category
* @param null $school_id
* @param int $semester
* @param int $year
* $semester 0 上學期 8-1 1 下學期 2-7 2 = all
* @return array
*/
public function resource($category = null, $school_id = null, $semester = 2, $year = null)
{
switch ($semester) {
case 0:
$data = DB::table('teacherdata')
->selectRaw("sum($category) as total,teacherdata.targetmonth date")
->join('member', 'member.MemberID', 'teacherdata.memberid')
->where('member.SchoolID', $school_id)
->where('targetyear', $year)
->whereBetween('targetmonth',[8,12])
->ORwhere('targetyear', $year + 1)
->where('targetmonth', 1)
->where('member.SchoolID', $school_id)
->groupBy('targetmonth')
->get();
$data = $this->array_json($data);
return [
'num' => array_sum($data),
'time' => array_keys($data),
'data' => array_values($data),
];
break;
case 1:
$data = DB::table('teacherdata')
->selectRaw("sum($category) as total,teacherdata.targetmonth date")
->join('member', 'member.MemberID', 'teacherdata.memberid')
->where('member.SchoolID', $school_id)
->where('targetyear', $year)
->whereBetween('targetmonth',[2,7])
->where('member.SchoolID', $school_id)
->groupBy('targetmonth')
->get();
$data = $this->array_json($data);
return [
'num' => array_sum($data),
'time' => array_keys($data),
'data' => array_values($data),
];
break;
default:
$data = DB::table('teacherdata')
->selectRaw("sum($category) as total,teacherdata.targetyear date")
->join('member', 'member.MemberID', 'teacherdata.memberid')
->where('member.SchoolID', $school_id)
->where('member.SchoolID', $school_id)
->groupBy('targetyear')
->get();
$data = $this->array_json($data);
return [
'num' => array_sum($data),
'time' => array_keys($data),
'data' => array_values($data),
];
}
// $data = DB::table('yu_he_districts')
// ->selectRaw("sum($category) as total,year as date")
// ->where('districtID', $district_id)
// ->where('schoolId', $school_id)
// ->groupBy('year')
// ->get();
}
public function study()
{
}
/**
* @param int $district_id
* @param int $semester
* @param int $year
* @param int $school_id
* @return Collection|string
* $semester 0 上學期 8-1 1 下學期 2-7 2 = all
*/
public function getData($district_id = null, $semester = 2, $year = null, $school_id = null)
{
$select = '
sum(teachnum) teachnum,
sum(studentnum) studentnum,
sum(patriarchnum) patriarchnum,
sum(teachlogintable) teachlogintable,
sum(studylogintable) studylogintable,
sum(curriculum) curriculum,
sum(electronicalnote) electronicalnote,
sum(uploadMovie) uploadMovie,
sum(production) production,
sum(overturnClass) overturnClass,
sum(analogyTest) analogyTest,
sum(onlineTest) onlineTest,
sum(interClassCompetition) interClassCompetition,
sum(HiTeach) HiTeach,
sum(performanceLogin) performanceLogin,
sum(mergeActivity) mergeActivity,
sum(onlineChecking) onlineChecking,
sum(allLearningProcess) allLearningProcess,
sum(sokratesTotal) sokratesTotal,
sum(personAge) personAge,
sum(areaShare) areaShare,
sum(schoolShare) schoolShare,
sum(overallResourece) overallResourece,
sum(subjectnum) subjectnum,
sum(examinationnum) examinationnum,
sum(textbooknum) textbooknum,
sum(underway) underway,
sum(unfinished) unfinished,
sum(achieve) achieve,
sum(molecularHomework) molecularHomework,
sum(denominatorHomework) denominatorHomework,
sum(denominatorOnlineTest) denominatorOnlineTest,
sum(molecularOnlineTest) molecularOnlineTest ';
switch ($semester) {
case 1:
if (isset($district_id) && isset($year) && isset($school_id)) {
return DB::table('yu_he_districts')
->selectRaw($select)
->where('districtID', '=', $district_id)
->where('year', $year)
->where('schoolId', $school_id)
->whereBetween('month', [8, 12])
->ORwhere('year', $year + 1)
->where('month', 1)
->where('districtID', $district_id)
->where('schoolId', $school_id)
// ->groupBy('schoolName','schoolId','schoolCode')
->get();
} elseif (isset($district_id) && isset($year)) {
return DB::table('yu_he_districts')
->selectRaw($select)
->where('districtID', '=', $district_id)
->where('year', $year)
->whereBetween('month', [8, 12])
->ORwhere('year', $year + 1)
->where('month', 1)
->where('districtID', $district_id)
// ->groupBy('schoolName','schoolId','schoolCode')
->get();
} else {
return '無此學區級學校數據';
}
break;
case 0:
if (isset($district_id) && isset($year) && isset($school_id)) {
return DB::table('yu_he_districts')
->selectRaw($select)
->where('year', $year)
->where('districtID', '=', $district_id)
->where('schoolId', $school_id)
->whereBetween('month', [2, 7])
// ->groupBy('schoolName','schoolId','schoolCode')
->get();
} elseif (isset($district_id) && isset($year)) {
return DB::table('yu_he_districts')
->selectRaw($select)
->where('year', $year)
->where('districtID', '=', $district_id)
->whereBetween('month', [2, 7])
// ->groupBy('schoolName','schoolId','schoolCode')
->get();
} else {
return '無此學區級學校數據';
}
break;
default:
if (isset($district_id) && isset($school_id)) {
return DB::table('yu_he_districts')->selectRaw($select)
->where('districtID', '=', $district_id)
->where('schoolId', $school_id)
// ->groupBy('schoolName','schoolId','schoolCode')
->get();
} elseif ($district_id) {
return DB::table('yu_he_districts')->selectRaw($select)
->where('districtID', '=', $district_id)
// ->groupBy('schoolName','schoolId','schoolCode')
->get();
} else {
return '無此學區級學校數據';
}
break;
}
}
/**
* @param null $category
* @param null $district_id
* @param null $school_id
* @param int $semester
* @param null $year
* @return array|string
* $semester 0 上學期 8-1 1 下學期 2-7 2 = all
*/
public function dataFormat($category = null, $district_id = null, $school_id = null, $semester = 2, $year = null)
{
switch ($semester) {
case 0:
if (isset($district_id) && isset($year) && isset($school_id)) {
$data = DB::table('yu_he_districts')
->selectRaw("sum($category) as total,month date")
->where('districtID', $district_id)
->where('schoolId', $school_id)
->where('year', $year)
->whereBetween('month', [8, 12])
->ORwhere('year', $year + 1)
->where('month', 1)
->where('districtID', $district_id)
->where('schoolId', $school_id)
->groupBy('month')
->get();
$data = $this->array_json($data);
return [
'num' => array_sum($data),
'time' => array_keys($data),
'data' => array_values($data),
];
} elseif (isset($district_id) && isset($year)) {
$data = DB::table('yu_he_districts')
->selectRaw("sum($category) as total,month date")
->where('districtID', $district_id)
->where('year', $year)
->whereBetween('month', [8, 12])
->ORwhere('year', $year + 1)
->where('month', 1)
->where('districtID', $district_id)
->groupBy('month')
->get();
$data = $this->array_json($data);
// dd($data);
return [
'num' => array_sum($data),
'time' => array_keys($data),
'data' => array_values($data),
];
} else {
return '無此學區級學校數據';
}
break;
case 1:
if (isset($district_id) && isset($year) && isset($school_id)) {
$data = DB::table('yu_he_districts')
->selectRaw("sum($category) as total,month date")
->where('districtID', $district_id)
->where('schoolId', $school_id)
->where('year', $year)
->whereBetween('month', [2, 7])
->groupBy('month')
->get();
$data = $this->array_json($data);
return [
'num' => array_sum($data),
'time' => array_keys($data),
'data' => array_values($data),
];
} elseif (isset($district_id) && isset($year)) {
$data = DB::table('yu_he_districts')
->selectRaw("sum($category) as total,month date")
->where('districtID', $district_id)
->where('year', $year)
->whereBetween('month', [2, 7])
->groupBy('month')
->get();
$data = $this->array_json($data);
return [
'num' => array_sum($data),
'time' => array_keys($data),
'data' => array_values($data),
];
} else {
return '無此學區級學校數據';
}
break;
default:
if (isset($district_id) && isset($school_id)) {
$data = DB::table('yu_he_districts')
->selectRaw("sum($category) as total,year as date")
->where('districtID', $district_id)
->where('schoolId', $school_id)
->groupBy('year')
->get();
$data = $this->array_json($data);
return [
'num' => array_sum($data),
'time' => array_keys($data),
'data' => array_values($data),
];
} elseif (isset($district_id)) {
$data = DB::table('yu_he_districts')
->selectRaw("sum($category) as total,year as date")
->where('districtID', $district_id)
->groupBy('year')
->get();
$data = $this->array_json($data);
return [
'num' => array_sum($data),
'time' => array_keys($data),
'data' => array_values($data),
];
} else {
return '無此學區級學校數據';
}
break;
}
}
/**
* @param $school_id
* @param int $semester
* @param $year
* @return array|string
* $semester 0 上學期 8-1 1 下學期 2-7 2 = all
*/
public function schoolCourse($school_id = null, $semester = 2, $year = null)
{
switch ($semester) {
case 0:
if (isset($school_id) && isset($year)) {
$data = DB::table('yu_he_course_details')
->join('course', 'course.CourseNO', 'yu_he_course_details.courseNO')
->selectRaw('count(yu_he_course_details.courseNO) total ,yu_he_course_details.courseNO courseNO ,course.CourseName courseName')
->where('yu_he_course_details.schoolID', $school_id)
->where('yu_he_course_details.year', $year)
->whereBetween('month', [8, 12])
->ORwhere('year', $year + 1)
->where('month', 1)
->where('yu_he_course_details.schoolID', $school_id)
->groupBy('yu_he_course_details.courseNO', 'course.CourseName')
->get();
if (!count($data) == 0) {
foreach ($data as $item) {
$curriculums[] = $item->courseName;
$total[] = $item->total;
$curriculumsid[] = $item->courseNO;
}
return [
'curriculums' => array_values($curriculums),
'data' => array_values($total),
'curriculumsid' => array_values($curriculumsid),
];
}
return [
'curriculums' => 0,
'data' => 0,
'curriculumsid' => 0,
];
} else {
return '無此學區級學校數據';
}
break;
case 1:
if (isset($school_id) && isset($year)) {
$data = DB::table('yu_he_course_details')
->join('course', 'course.CourseNO', 'yu_he_course_details.courseNO')
->selectRaw('count(yu_he_course_details.courseNO) total ,yu_he_course_details.courseNO courseNO ,course.CourseName courseName')
->where('yu_he_course_details.schoolID', $school_id)
->where('yu_he_course_details.year', $year)
->whereBetween('month', [2, 7])
->groupBy('yu_he_course_details.courseNO', 'course.CourseName')
->get();
if (!count($data) == 0) {
foreach ($data as $item) {
$curriculums[] = $item->courseName;
$total[] = $item->total;
$curriculumsid[] = $item->courseNO;
}
return [
'curriculums' => array_values($curriculums),
'data' => array_values($total),
'curriculumsid' => array_values($curriculumsid),
];
}
return [
'curriculums' => 0,
'data' => 0,
'curriculumsid' => 0,
];
} else {
return '無此學區級學校數據';
}
break;
default:
if (isset($school_id)) {
$data = DB::table('yu_he_course_details')
->join('course', 'course.CourseNO', 'yu_he_course_details.courseNO')
->selectRaw('count(yu_he_course_details.courseNO) total ,yu_he_course_details.courseNO courseNO ,course.CourseName courseName')
->where('yu_he_course_details.schoolID', $school_id)
->groupBy('yu_he_course_details.courseNO', 'course.CourseName')
->get();
if (!count($data) == 0) {
foreach ($data as $item) {
$curriculums[] = $item->courseName;
$total[] = $item->total;
$curriculumsid[] = $item->courseNO;
}
return [
'curriculums' => array_values($curriculums),
'data' => array_values($total),
'curriculumsid' => array_values($curriculumsid),
];
}
return [
'curriculums' => 0,
'data' => 0,
'curriculumsid' => 0,
];
} else {
return '無此學區級學校數據';
}
}
}
/**
* @param $school_id
* @param int $semester
* @param $year
* @return array|string
* $semester 0 上學期 8-1 1 下學期 2-7 2 = all
*/
public function schoolTeacher($school_id = null, $semester = 2, $year = null)
{
switch ($semester) {
case 0:
if (isset($school_id) && isset($year)) {
$data = DB::table('yu_he_course_details')
->join('course', 'course.CourseNO', 'yu_he_course_details.courseNO')
->join('member', 'member.MemberID', 'course.MemberID')
->selectRaw('count(yu_he_course_details.courseNO) data,member.MemberID memberID ,member.RealName realName')
->where('yu_he_course_details.schoolID', $school_id)
->where('yu_he_course_details.year', $year)
->whereBetween('month', [8, 12])
->ORwhere('year', $year + 1)
->where('month', 1)
->where('yu_he_course_details.schoolID', $school_id)
->groupBy('yu_he_course_details.courseNO', 'course.CourseName')
->get();
if (!count($data) == 0) {
foreach ($data as $item) {
$realName[] = $item->realName;
$total[] = $item->data;
$teachId[] = $item->memberID;
}
return [
'teach' => array_values($realName),
'data' => array_keys($total),
'teachid' => array_values($teachId),
'teachnum' => array_sum($total)
];
}
return [
'teach' => 0,
'data' => 0,
'teachid' => 0,
'teachnum' => 0,
];
} else {
return '無此學區級學校數據';
}
break;
case 1:
if (isset($school_id) && isset($year)) {
$data = DB::table('yu_he_course_details')
->join('course', 'course.CourseNO', 'yu_he_course_details.courseNO')
->join('member', 'member.MemberID', 'course.MemberID')
->selectRaw('count(yu_he_course_details.courseNO) data,member.MemberID memberID ,member.realName')
->where('yu_he_course_details.schoolID', $school_id)
->where('yu_he_course_details.year', $year)
->whereBetween('month', [2, 7])
->groupBy('member.MemberID', 'member.RealName')
->get();
if (!count($data) == 0) {
foreach ($data as $item) {
$realName[] = $item->realName;
$total[] = $item->data;
$teachId[] = $item->memberID;
}
return [
'teach' => array_values($realName),
'data' => array_keys($total),
'teachid' => array_values($teachId),
'teachnum' => array_sum($total)
];
}
return [
'teach' => 0,
'data' => 0,
'teachid' => 0,
'teachnum' => 0,
];
} else {
return '無此學區級學校數據';
}
break;
default:
if (isset($school_id)) {
$data = DB::table('yu_he_course_details')
->join('course', 'course.CourseNO', 'yu_he_course_details.courseNO')
->join('member', 'member.MemberID', 'course.MemberID')
->selectRaw('count(yu_he_course_details.courseNO) data,member.MemberID memberID ,member.RealName')
->where('yu_he_course_details.schoolID', $school_id)
->groupBy('member.MemberID', 'member.RealName')
->get();
if (!count($data) == 0) {
foreach ($data as $item) {
$realName[] = $item->RealName;
$total[] = $item->data;
$teachId[] = $item->memberID;
}
return [
'teach' => array_values($realName),
'data' => array_keys($total),
'teachid' => array_values($teachId),
'teachnum' => array_sum($total)
];
}
return [
'teach' => 0,
'data' => 0,
'teachid' => 0,
'teachnum' => 0
];
} else {
return '無此學區級學校數據';
}
}
}
/**
* @param array $data
* @return array
* 格式轉化
*/
public function array_json($data = null)
{
if (!count($data)) {
return $data = [];
}
foreach ($data as $item) {
if (isset($yearData[$item->date])) {
$yearData[$item->date] += intval($item->total);
} else {
$yearData[$item->date] = intval($item->total);
}
}
if ($yearData != null) {
//排序
ksort($yearData);
//存所有學校資訊
return $data = $yearData;
} else {
return $data = [];
}
}
/**
* @param array $arrayData
* @return array
*/
public function array_merge($arrayData)
{
$v = array();
foreach ($arrayData AS $data) {
foreach ($data as $key => $value) {
if (isset($v[$key])) {
$v[$key] += $value;
} else {
$v[$key] = $value;
}
}
}
return $v;
}
}
| 40.304688
| 154
| 0.408251
|
05c03dbed52b4b0b97e0bdecca69b7c9ceae0b97
| 1,091
|
py
|
Python
|
Tasker.py
|
slzKud/SkHelper_Bot
|
7d33406ce1a52f31dc9e21fb927517e2a03caa86
|
[
"MIT"
] | null | null | null |
Tasker.py
|
slzKud/SkHelper_Bot
|
7d33406ce1a52f31dc9e21fb927517e2a03caa86
|
[
"MIT"
] | null | null | null |
Tasker.py
|
slzKud/SkHelper_Bot
|
7d33406ce1a52f31dc9e21fb927517e2a03caa86
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# -*- coding: UTF-8 -*-
import Tools,config,sys, getopt
if __name__ == "__main__":
if len(sys.argv)<3:
print("error")
exit()
op=sys.argv[1]
task_name=sys.argv[2]
if op=="-t":
if task_name=="say_hello":
Tools.send_to_admin("您好,这是一条测试信息")
elif task_name=="status":
Tools.send_to_admin("我状态很好")
elif task_name=="check_conoha":
Charge_C=Tools.GetConohaCharge()
Status_C=Tools.GetConohaStatus()
if(Charge_C<=config.Conoha_Warning_Charge):
Tools.send_to_admin("你的Conoha账户余额已经小于70日元,请尽快续费!")
elif Status_C!="Active" :
Tools.send_to_admin("你的Conoha服务器已经停止运行,请尽快补清欠费!")
elif task_name=="check_cloudcone":
M=Tools.GetCloudConeInfo()
if M["status"]!="online":
Tools.send_to_admin("你的CloudCone账户已经停机,请尽快缴费!")
elif task_name=="check_server":
pass
else:
print("Error")
exit()
else:
print("Error")
exit()
| 29.486486
| 65
| 0.55637
|
385d84473a1a6bf770dbffcc2df6558248e1c8ba
| 2,050
|
php
|
PHP
|
app/Services/Core/Oneloan/Rongshidai/RongshidaiService.php
|
OliverCaiJia/micro_service_api
|
cbb337b674799a05957880e6e77d6296402391a6
|
[
"MIT"
] | null | null | null |
app/Services/Core/Oneloan/Rongshidai/RongshidaiService.php
|
OliverCaiJia/micro_service_api
|
cbb337b674799a05957880e6e77d6296402391a6
|
[
"MIT"
] | 1
|
2019-01-25T11:00:57.000Z
|
2020-12-24T02:50:13.000Z
|
app/Services/Core/Oneloan/Rongshidai/RongshidaiService.php
|
OliverCaiJia/micro_service_api
|
cbb337b674799a05957880e6e77d6296402391a6
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Services\Core\Oneloan\Rongshidai;
use App\Services\AppService;
use App\Helpers\Http\HttpClient;
use App\Services\Core\Oneloan\Rongshidai\RongshidaiConfig\RongshidaiConfig;
use App\Services\Core\Oneloan\Rongshidai\RongshidaiConfig\RsaUtil;
/**
* 融时代对接
*/
class RongshidaiService extends AppService
{
/**
* 融时代接口对接
*
* @param array $params
* @return mixed
*/
public static function spread($params = [])
{
//请求url
$url = RongshidaiConfig::REAL_URL;
//请求参数
$req = [
'source' => RongshidaiConfig::SYS_TYPE,
'applyLoanList' => [
//客户概况
0 => [
'mobileNumber' => $params['mobile'], //必填 手机号
'city' => $params['city'], //必填 城市
'name' => $params['name'], //选填 名字
//资质信息
'isHouse' => $params['house_info'] == '000' ? 'N' : 'Y', //必填 有无房产
'isCar' => $params['car_info'] == '000' ? 'N' : 'Y', //必填 是否名下有车
'isFund' => $params['accumulation_fund'] == '000' ? 'N' : 'Y', //必填 是否有公积金
'isSocialSecurity' => $params['social_security'] == 0 ? 'N' : 'Y', //必填 是否有社保
'isInsurance' => $params['has_insurance'] == 0 ? 'N' : 'Y', //必填 是否有保单
//借款需求
'applyAmount' => $params['money'], //必填 贷款金额额度
],
],
];
$data = json_encode($req);
//RSA加密
$encryptData = RsaUtil::i()->rsaEncrypt($data);
//SHA1WITHRSA加签
$sign = RsaUtil::i()->sha1WithRsaSign($data);
$request = [
'json' => [
'data' => $encryptData,
'sign' => $sign,
'systype' => RongshidaiConfig::SYS_TYPE,
],
];
$response = HttpClient::i(['verify' => false])->request('POST', $url, $request);
$result = $response->getBody()->getContents();
$arr = json_decode($result, true);
return $arr;
}
}
| 30.597015
| 93
| 0.485854
|
f4726f1b470eb94ff0e73acceb586ea4da94ad7f
| 649
|
sql
|
SQL
|
airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/UNNEST_ALIAS_CHILDREN_AB2.sql
|
vagrantism/airbyte
|
b072a6d7bd6ea0b843ad7e98ff8f98dd3b8aa824
|
[
"MIT"
] | 1
|
2022-03-31T18:23:00.000Z
|
2022-03-31T18:23:00.000Z
|
airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/UNNEST_ALIAS_CHILDREN_AB2.sql
|
vagrantism/airbyte
|
b072a6d7bd6ea0b843ad7e98ff8f98dd3b8aa824
|
[
"MIT"
] | 4
|
2022-01-25T17:48:19.000Z
|
2022-03-17T13:06:32.000Z
|
airbyte-integrations/bases/base-normalization/integration_tests/normalization_test_output/snowflake/test_nested_streams/models/generated/airbyte_ctes/TEST_NORMALIZATION/UNNEST_ALIAS_CHILDREN_AB2.sql
|
vagrantism/airbyte
|
b072a6d7bd6ea0b843ad7e98ff8f98dd3b8aa824
|
[
"MIT"
] | 2
|
2021-04-28T15:15:37.000Z
|
2022-03-28T17:32:15.000Z
|
{{ config(
cluster_by = ["_AIRBYTE_EMITTED_AT"],
unique_key = env_var('AIRBYTE_DEFAULT_UNIQUE_KEY', '_AIRBYTE_AB_ID'),
schema = "_AIRBYTE_TEST_NORMALIZATION",
tags = [ "nested-intermediate" ]
) }}
-- SQL model to cast each column to its adequate SQL type converted from the JSON schema type
select
_AIRBYTE_UNNEST_ALIAS_HASHID,
cast(AB_ID as {{ dbt_utils.type_bigint() }}) as AB_ID,
cast(OWNER as {{ type_json() }}) as OWNER,
_AIRBYTE_AB_ID,
_AIRBYTE_EMITTED_AT,
{{ current_timestamp() }} as _AIRBYTE_NORMALIZED_AT
from {{ ref('UNNEST_ALIAS_CHILDREN_AB1') }}
-- CHILDREN at unnest_alias/children
where 1 = 1
| 34.157895
| 93
| 0.713405
|
2f82a0aae935f03ba0bf12846289daf70df25fbf
| 414
|
js
|
JavaScript
|
fake/fakeFS.js
|
vegardbb/browser-support-spec
|
2fe10d980eeaa17985be0bf35583dced1186b919
|
[
"MIT"
] | 4
|
2019-10-09T10:14:28.000Z
|
2020-01-10T10:15:56.000Z
|
fake/fakeFS.js
|
vegardbb/browser-support-spec
|
2fe10d980eeaa17985be0bf35583dced1186b919
|
[
"MIT"
] | 1
|
2020-07-12T11:43:54.000Z
|
2020-07-12T11:43:54.000Z
|
fake/fakeFS.js
|
vegardbb/browser-support-spec
|
2fe10d980eeaa17985be0bf35583dced1186b919
|
[
"MIT"
] | null | null | null |
// technically a stub, but we throw in the timer just to simulate I/O
// options is an optional argument
exports.writeFile = function fakeWriteFile(file, data, options, callback) {
let cb = () => true;
if (typeof callback === 'function') {
cb = callback;
}
else if (typeof options === 'function') {
cb = options;
}
else {
throw new Error('Callback required!');
}
setTimeout(cb, 500);
}
| 25.875
| 75
| 0.642512
|
82c26a7cfaa831d3c729e6fec4c527341544354d
| 1,001
|
kt
|
Kotlin
|
app/src/main/java/com/christopherelias/blockchain/di/UtilsModule.kt
|
ChristopherME/compose-blockchain
|
8e7dd621de2cff6f8510b0df9eee0025ad3dc37d
|
[
"Apache-2.0"
] | 18
|
2021-10-31T18:46:24.000Z
|
2022-03-12T10:44:55.000Z
|
app/src/main/java/com/christopherelias/blockchain/di/UtilsModule.kt
|
ChristopherME/Blockchain
|
8e7dd621de2cff6f8510b0df9eee0025ad3dc37d
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/com/christopherelias/blockchain/di/UtilsModule.kt
|
ChristopherME/Blockchain
|
8e7dd621de2cff6f8510b0df9eee0025ad3dc37d
|
[
"Apache-2.0"
] | 2
|
2021-11-15T10:01:52.000Z
|
2022-01-17T18:24:09.000Z
|
package com.christopherelias.blockchain.di
import com.christopherelias.blockchain.utils.connectivity.ConnectivityUtils
import com.christopherelias.blockchain.utils.resource_provider.ResourceProvider
import com.christopherelias.blockchain.utils_impl.connectivity.ConnectivityUtilsImpl
import com.christopherelias.blockchain.utils_impl.resource_provider.ResourceProviderImpl
import dagger.Binds
import dagger.Module
import dagger.hilt.InstallIn
import dagger.hilt.components.SingletonComponent
import javax.inject.Singleton
/*
* Created by Christopher Elias on 9/06/2021
* christopher.mike.96@gmail.com
*
* Lima, Peru.
*/
@Module
@InstallIn(SingletonComponent::class)
abstract class UtilsModule {
@Binds
@Singleton
abstract fun provideConnectivityUtils(
connectivityUtilsImpl: ConnectivityUtilsImpl
): ConnectivityUtils
@Binds
@Singleton
abstract fun provideResourceProviderUtils(
resourceProviderImpl: ResourceProviderImpl
): ResourceProvider
}
| 28.6
| 88
| 0.815185
|
f46b13e2f4fe3382ec58b2e6590f01b05274190c
| 1,003
|
cs
|
C#
|
GitPVP/RiotObjects/Platform/Summoner/Masterybook/TalentEntry.cs
|
t2hv33/LegendaryClient
|
02646de90612a3a8b77048508cc8003fb8a87dd9
|
[
"BSD-2-Clause"
] | 1
|
2021-08-02T05:28:26.000Z
|
2021-08-02T05:28:26.000Z
|
GitPVP/RiotObjects/Platform/Summoner/Masterybook/TalentEntry.cs
|
t2hv33/LegendaryClient
|
02646de90612a3a8b77048508cc8003fb8a87dd9
|
[
"BSD-2-Clause"
] | null | null | null |
GitPVP/RiotObjects/Platform/Summoner/Masterybook/TalentEntry.cs
|
t2hv33/LegendaryClient
|
02646de90612a3a8b77048508cc8003fb8a87dd9
|
[
"BSD-2-Clause"
] | null | null | null |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using PVPNetConnect.RiotObjects.Platform.Summoner;
namespace PVPNetConnect.RiotObjects.Platform.Summoner.Masterybook
{
public class TalentEntry : RiotGamesObject
{
public override string TypeName
{
get
{
return this.type;
}
}
private string type = "com.riotgames.platform.summoner.masterybook.TalentEntry";
public TalentEntry()
{
}
public TalentEntry(Callback callback)
{
this.callback = callback;
}
public TalentEntry(TypedObject result)
{
base.SetFields(this, result);
}
public delegate void Callback(TalentEntry result);
private Callback callback;
public override void DoCallback(TypedObject result)
{
base.SetFields(this, result);
callback(this);
}
[InternalName("rank")]
public Int32 Rank { get; set; }
[InternalName("talentId")]
public Int32 TalentId { get; set; }
[InternalName("talent")]
public Talent Talent { get; set; }
[InternalName("summonerId")]
public Double SummonerId { get; set; }
}
}
| 16.716667
| 80
| 0.768694
|
05d91d032bf9a9ba00fcc63e6d950a4745c5b13a
| 3,737
|
py
|
Python
|
utilities/latent_space_utilities.py
|
jjc2718/netreg
|
292540e911cdfbe18ff6fe0f9bfe8e055053d23c
|
[
"BSD-3-Clause"
] | null | null | null |
utilities/latent_space_utilities.py
|
jjc2718/netreg
|
292540e911cdfbe18ff6fe0f9bfe8e055053d23c
|
[
"BSD-3-Clause"
] | 6
|
2019-07-12T15:52:31.000Z
|
2020-01-13T18:14:41.000Z
|
utilities/latent_space_utilities.py
|
jjc2718/netreg
|
292540e911cdfbe18ff6fe0f9bfe8e055053d23c
|
[
"BSD-3-Clause"
] | 1
|
2019-07-18T18:28:59.000Z
|
2019-07-18T18:28:59.000Z
|
"""
Utilities for processing and comparing latent spaces
"""
import os
import glob
import numpy as np
import pandas as pd
def get_overlap_cols_from_plier(models_dirs):
# this implements approach 1 from 5.analyze_plier_compression.ipynb
genesets = []
for m_dir in models_dirs:
plier_pattern = os.path.join(m_dir,
'components_10',
'plier_*_weight_matrix.tsv.gz')
fnames = glob.glob(plier_pattern)
genesets.append(set(pd.read_csv(fnames[0], sep='\t', index_col=0).columns.values))
return sorted(list(genesets[0].intersection(*genesets[1:])))
def get_overlap_cols_from_files(f1, f2):
# this implements approach 2 from 5.analyze_plier_compression.ipynb
f1_names = set(pd.read_csv(f1, sep='\t', index_col=0).columns.values)
f2_names = set(pd.read_csv(f2, sep='\t', index_col=0).columns.values)
return sorted(list(f1_names.intersection(f2_names)))
def get_matrices_from_files(files, gene_subset, shuffled=False):
mtxs, filenames = [], []
if shuffled:
files = [f for f in files if 'shuffled' in f]
else:
files = [f for f in files if 'shuffled' not in f]
for f in files:
mtxs.append(pd.read_csv(f, sep='\t', header=0, index_col=0)[gene_subset])
filenames.append(f)
return (mtxs, files)
def calculate_avg_cca(z_dims, models_map, overlap=False, verbose=False):
import itertools
import utilities.cca_core as cca_core
algorithms = list(models_map.keys())
avg_cca_mtx = {z_dim: np.zeros((len(algorithms), len(algorithms))) for z_dim in z_dims}
for z_dim in z_dims:
for alg1, alg2 in itertools.combinations_with_replacement(algorithms, 2):
if verbose:
print('Comparing {} with {} for z={}...'.format(alg1, alg2, z_dim), end='')
i1, i2 = algorithms.index(alg1), algorithms.index(alg2)
cca_values = []
alg1_pattern = os.path.join(models_map[alg1],
'components_{}'.format(z_dim),
'{}_*_weight_matrix.tsv.gz'.format(alg1.split('_')[0]))
alg2_pattern = os.path.join(models_map[alg2],
'components_{}'.format(z_dim),
'{}_*_weight_matrix.tsv.gz'.format(alg2.split('_')[0]))
alg1_files = glob.glob(alg1_pattern)
alg2_files = glob.glob(alg2_pattern)
if overlap:
overlap_cols = get_overlap_cols_from_files(alg1_files[0],
alg2_files[0])
else:
overlap_cols = get_overlap_cols_from_plier(
list(set(models_map.values())))
(alg1_matrices, alg1_files) = get_matrices_from_files(alg1_files,
overlap_cols)
(alg2_matrices, alg2_files) = get_matrices_from_files(alg2_files,
overlap_cols)
for s1, s2 in itertools.product(range(len(alg1_matrices)),
range(len(alg2_matrices))):
cca_result = cca_core.robust_cca_similarity(alg1_matrices[s1],
alg2_matrices[s2],
verbose=False)
cca_values.append(np.mean(cca_result['mean']))
avg_cca_mtx[z_dim][i1, i2] = np.mean(cca_values)
avg_cca_mtx[z_dim][i2, i1] = avg_cca_mtx[z_dim][i1, i2]
if verbose:
print('done')
return avg_cca_mtx
| 45.573171
| 91
| 0.561948
|
1abc7b9e286b74108932c5449db24cff0b016998
| 3,608
|
py
|
Python
|
src/lib/python/util/retrier.py
|
chen23/f5-bigip-image-generator
|
70c0199ef833bdb1827460b8c12f2800e7808b62
|
[
"Apache-2.0"
] | 34
|
2019-08-21T01:28:27.000Z
|
2021-10-05T07:21:58.000Z
|
src/lib/python/util/retrier.py
|
chen23/f5-bigip-image-generator
|
70c0199ef833bdb1827460b8c12f2800e7808b62
|
[
"Apache-2.0"
] | 34
|
2019-09-13T10:17:31.000Z
|
2022-03-09T00:01:00.000Z
|
src/lib/python/util/retrier.py
|
chen23/f5-bigip-image-generator
|
70c0199ef833bdb1827460b8c12f2800e7808b62
|
[
"Apache-2.0"
] | 16
|
2019-08-21T20:06:17.000Z
|
2022-03-25T11:59:00.000Z
|
#!/usr/bin/env python3
"""retrier module"""
# Copyright (C) 2019 F5 Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import logging
from retry import retry
from util.logger import LOGGER
# pylint would prefer that this entire class be turned into a function which tracks its state
# internally. This would be possible if adding the optional 'tries' and 'delay' variables to the
# calling signature didn't cause ambiguity with the *args and **kwargs passed to the retried method.
# Instead, we use this class as a container for optionally injecting the 'tries' and 'delay'
# variables as state which is external to the signature. For example:
# retrier = Retrier(some_function, value1, arg2=value2) # unambiguous calling signature
# retrier.tries = 5 # state declared optionally
# retrier.execute() # state gets injected into signature here
# pylint: disable=too-few-public-methods
class Retrier:
"""Wrapper class for retry execution using the retry decorator"""
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.tries = 10
self.delay = 30
self.logs = []
# Determine log files to record progress to
for handler in LOGGER.handlers:
# pylint is worried because _CapturingHandler doesn't have a .baseFilename member.
# This is actually irrelevant because _CapturingHandler is a pibling of FileHandler and
# will never pass the isinstance check.
# pylint: disable=no-member
if isinstance(handler, logging.FileHandler):
self.logs.append(handler.baseFilename)
def _record(self, content):
"""Prints progress info to console and logs files"""
print(content, end='', flush=True)
for log in self.logs:
with open(log, 'a+') as file:
file.write(content)
def execute(self):
"""Calls the function defined in the constructor with the arguments provided to the
constructor. Retries the function call <tries> times if it fails to return True. Logs
progress to the console as well as any file handlers attached to LOGGER."""
@retry(AssertionError, tries=self.tries, delay=self.delay)
def _execute():
"""In order to access self.tries and self.delay_secs the retry decorator must be
declared within the scope of the Retrier class. We accomplish this be applying the
decorator to an inner function within the class scope of the outer method."""
_execute.remaining -= 1
if self.func(*self.args, **self.kwargs):
self._record('\n')
return True
self._record('.')
if _execute.remaining < 1:
self._record('\n')
return False
raise AssertionError('{} tries remaining'.format(_execute.remaining))
_execute.remaining = self.tries
if _execute():
return True
| 44
| 100
| 0.659091
|
3951c149406c38c9ba1728a90d3ed08b419f64c5
| 51
|
py
|
Python
|
rogal/tiles/__init__.py
|
kosciak/ecs-rogal
|
d553104e0ea350d11272d274a900419620b9389e
|
[
"MIT"
] | 4
|
2021-01-23T13:25:46.000Z
|
2021-03-19T03:08:05.000Z
|
rogal/tiles/__init__.py
|
kosciak/ecs-rogal
|
d553104e0ea350d11272d274a900419620b9389e
|
[
"MIT"
] | null | null | null |
rogal/tiles/__init__.py
|
kosciak/ecs-rogal
|
d553104e0ea350d11272d274a900419620b9389e
|
[
"MIT"
] | null | null | null |
from .core import RenderOrder, Glyph, Colors, Tile
| 25.5
| 50
| 0.784314
|
4401e88e4ee3c5327c58a55d01a6834eb0131d6c
| 1,031
|
py
|
Python
|
alphaVantageAPI/utils.py
|
twopirllc/alphaVantageAPI
|
983228a80ce8ced134c05ef509db8671b40f6f5d
|
[
"MIT"
] | 78
|
2018-08-06T04:34:03.000Z
|
2022-03-14T05:04:40.000Z
|
alphaVantageAPI/utils.py
|
twopirllc/alphaVantageAPI
|
983228a80ce8ced134c05ef509db8671b40f6f5d
|
[
"MIT"
] | 4
|
2020-10-04T17:48:56.000Z
|
2021-06-15T03:21:58.000Z
|
alphaVantageAPI/utils.py
|
twopirllc/alphaVantageAPI
|
983228a80ce8ced134c05ef509db8671b40f6f5d
|
[
"MIT"
] | 20
|
2018-09-03T03:36:01.000Z
|
2022-01-05T08:21:03.000Z
|
# -*- coding: utf-8 -*-
import time
from functools import wraps
from pathlib import Path
from time import perf_counter
def final_time(stime):
time_diff = perf_counter() - stime
return f"{time_diff * 1000:2.4f} ms ({time_diff:2.4f} s)"
def is_home(path:Path): # -> bool
"""Determines if the path is a User path or not.
If the Path begins with '~', then True, else False"""
if isinstance(path, str) and len(path) > 0:
path = Path(path)
if isinstance(path, Path) and len(path.parts) > 0:
return path.parts[0] == '~'
else:
return False
def timed(fn):
"""Simple timing decorator that stores the elapsed time
as a string property called 'timed' to the fn.
"""
@wraps(fn)
def _timer(*args, **kwargs):
start = time.time()
fn(*args, **kwargs)
end = time.time()
diff = end - start
elapsed_time = f"[!] {fn.__name__} {diff * 1000:2.2f} ms ({diff:2.2f} s)"
fn.timed = elapsed_time
return fn
return _timer
| 27.131579
| 81
| 0.603298
|
d61a3c20e055e95f23f158f99f65eed5dc6d5cf4
| 63,290
|
cs
|
C#
|
ambeth/Ambeth.Mapping/ambeth/mapping/ModelTransferMapper.cs
|
Dennis-Koch/ambeth
|
8552b210b8b37d3d8f66bdac2e094bf23c8b5fda
|
[
"Apache-2.0"
] | null | null | null |
ambeth/Ambeth.Mapping/ambeth/mapping/ModelTransferMapper.cs
|
Dennis-Koch/ambeth
|
8552b210b8b37d3d8f66bdac2e094bf23c8b5fda
|
[
"Apache-2.0"
] | 6
|
2017-04-24T06:55:18.000Z
|
2022-01-21T23:15:36.000Z
|
ambeth/Ambeth.Mapping/ambeth/mapping/ModelTransferMapper.cs
|
Dennis-Koch/ambeth
|
8552b210b8b37d3d8f66bdac2e094bf23c8b5fda
|
[
"Apache-2.0"
] | 4
|
2018-10-28T14:05:27.000Z
|
2022-01-08T12:54:51.000Z
|
using De.Osthus.Ambeth.Cache;
using De.Osthus.Ambeth.Collections;
using De.Osthus.Ambeth.Config;
using De.Osthus.Ambeth.Exceptions;
using De.Osthus.Ambeth.Filter.Model;
using De.Osthus.Ambeth.Ioc.Annotation;
using De.Osthus.Ambeth.Log;
using De.Osthus.Ambeth.Mapping.Config;
using De.Osthus.Ambeth.Merge;
using De.Osthus.Ambeth.Merge.Model;
using De.Osthus.Ambeth.Merge.Transfer;
using De.Osthus.Ambeth.Metadata;
using De.Osthus.Ambeth.Model;
using De.Osthus.Ambeth.Proxy;
using De.Osthus.Ambeth.Threading;
using De.Osthus.Ambeth.Typeinfo;
using De.Osthus.Ambeth.Util;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
namespace De.Osthus.Ambeth.Mapping
{
public class ModelTransferMapper : IMapperService, IDisposable
{
protected static readonly Object NOT_YET_READY = new Object();
[LogInstance]
public ILogger Log { private get; set; }
[Autowired]
public ICacheHelper CacheHelper { protected get; set; }
[Autowired]
public ICacheModification CacheModification { protected get; set; }
[Autowired]
public IConversionHelper ConversionHelper { protected get; set; }
[Autowired]
public ICache Cache { protected get; set; }
[Autowired]
public IEntityFactory EntityFactory { protected get; set; }
[Autowired]
public IEntityMetaDataProvider EntityMetaDataProvider { protected get; set; }
[Autowired]
public IListTypeHelper ListTypeHelper { protected get; set; }
[Autowired]
public IDedicatedMapperRegistry MapperExtensionRegistry { protected get; set; }
[Autowired]
public IPrefetchHelper PrefetchHelper { protected get; set; }
[Autowired]
public IObjRefHelper OriHelper { protected get; set; }
[Autowired]
public IPropertyInfoProvider PropertyInfoProvider { protected get; set; }
[Autowired]
public ITypeInfoProvider TypeInfoProvider { protected get; set; }
protected readonly HashMap<Type, IMap<String, ITypeInfoItem>> typeToTypeInfoMap = new HashMap<Type, IMap<String, ITypeInfoItem>>();
protected readonly HashMap<IObjRef, IObjRef> alreadyCreatedObjRefsMap = new HashMap<IObjRef, IObjRef>();
protected readonly IdentityHashMap<Object, IMap<Type, Object>> boToSpecifiedVOMap = new IdentityHashMap<Object, IMap<Type, Object>>();
protected readonly IdentityHashMap<Object, Object> voToBoMap = new IdentityHashMap<Object, Object>();
protected readonly HashMap<CompositIdentityClassKey, Object> reverseRelationMap = new HashMap<CompositIdentityClassKey, Object>();
protected readonly IdentityHashSet<Object> allBOsToKeepInCache = new IdentityHashSet<Object>();
protected readonly IdentityHashSet<Object> bosToRemoveTempIdFrom = new IdentityHashSet<Object>();
protected readonly IdentityHashSet<Object> vosToRemoveTempIdFrom = new IdentityHashSet<Object>();
protected long nextTempId = -1;
[Property(MappingConfigurationConstants.InitDirectRelationsInBusinessObjects, DefaultValue = "true")]
public bool initDirectRelationsInBusinessObjects { protected get; set; }
public void Dispose()
{
ConversionHelper = null;
EntityMetaDataProvider = null;
TypeInfoProvider = null;
OriHelper = null;
Cache = null;
}
public Object MapToBusinessObject(Object valueObject)
{
if (valueObject == null)
{
return null;
}
IList<Object> valueObjects = new Object[] { valueObject };
IList<Object> results = MapToBusinessObjectList(valueObjects);
return results[0];
}
public IList<Object> MapToBusinessObjectListFromListType(Object listTypeObject)
{
IList<Object> valueObjectList = (IList<Object>)ListTypeHelper.UnpackListType(listTypeObject);
return MapToBusinessObjectList(valueObjectList);
}
public IList<Object> MapToBusinessObjectList(IList<Object> valueObjectList)
{
if (valueObjectList.Count == 0)
{
return EmptyList.Empty<Object>();
}
ICacheIntern cache = (ICacheIntern)this.Cache.CurrentCache;
IEntityMetaDataProvider entityMetaDataProvider = this.EntityMetaDataProvider;
IdentityHashMap<Object, Object> voToBoMap = this.voToBoMap;
List<Object> allValueObjects = new List<Object>(valueObjectList.Count);
bool acquiredHardRefs = cache.AcquireHardRefTLIfNotAlready();
bool oldActive = CacheModification.Active;
CacheModification.Active = true;
try
{
ResolveAllValueObjectsDirectly(valueObjectList, allValueObjects, IdentityHashSet<Object>.Create(valueObjectList.Count), null);
MapBosByVos(allValueObjects, cache);
for (int i = allValueObjects.Count; i-- > 0; )
{
ResolvePrimitiveProperties(allValueObjects[i], cache);
}
List<DirectValueHolderRef> boToPendingRelationsList = new List<DirectValueHolderRef>();
CHashSet<IObjRef> referencedBOsSet = new CHashSet<IObjRef>();
HashMap<IObjRef, IObjRef> alreadyCreatedObjRefMap = new HashMap<IObjRef, IObjRef>();
try
{
for (int i = allValueObjects.Count; i-- > 0; )
{
CollectReferencedBusinessObjects(allValueObjects[i], referencedBOsSet, boToPendingRelationsList, alreadyCreatedObjRefMap, cache);
}
IList<IObjRef> referencedBOsList = referencedBOsSet.ToList();
if (initDirectRelationsInBusinessObjects)
{
IPrefetchState prefetchState = PrefetchHelper.Prefetch(boToPendingRelationsList);
// Store retrieved BOs to hard ref to suppress Weak GC handling of cache
allBOsToKeepInCache.Add(prefetchState);
IList<Object> referencedBOs = cache.GetObjects(referencedBOsList, CacheDirective.FailEarly | CacheDirective.ReturnMisses);
for (int a = referencedBOs.Count; a-- > 0; )
{
Object referencedBO = referencedBOs[a];
if (referencedBO == null)
{
throw new MappingException("At least one entity could not be found: " + referencedBOsList[a].ToString());
}
}
//// Allocate specific pending relations to their bo fields
//for (int a = boToPendingRelationsList.Count; a-- > 0;)
//{
// PendingRelation pendingRelation = boToPendingRelationsList[a];
// Object businessObject = pendingRelation.BusinessObject;
// IRelationInfoItem member = pendingRelation.Member;
// IList<IObjRef> pendingObjRefs = pendingRelation.PendingObjRefs;
// // Everything which gets missed by now does not exist in the DB.
// // FailEarly is important to suppress redundant tries of previously failed loadings
// IList<Object> pendingObjects = childCache.GetObjects(pendingObjRefs, CacheDirective.failEarly());
// Object convertedPendingObjects = ConvertPrimitiveValue(pendingObjects, member.ElementType, member);
// member.SetValue(businessObject, convertedPendingObjects);
//}
}
}
finally
{
alreadyCreatedObjRefMap = null;
}
List<Object> allBusinessObjects = new List<Object>(allValueObjects.Count);
List<DirectValueHolderRef> objRefContainers = new List<DirectValueHolderRef>(allValueObjects.Count);
for (int i = allValueObjects.Count; i-- > 0; )
{
Object valueObject = allValueObjects[i];
Object businessObject = voToBoMap.Get(valueObject);
IDedicatedMapper dedicatedMapper = MapperExtensionRegistry.GetDedicatedMapper(businessObject.GetType());
if (dedicatedMapper != null)
{
dedicatedMapper.ApplySpecialMapping(businessObject, valueObject, CopyDirection.VO_TO_BO);
}
allBusinessObjects.Add(businessObject);
if (!initDirectRelationsInBusinessObjects)
{
continue;
}
IEntityMetaData metaData = ((IEntityMetaDataHolder)businessObject).Get__EntityMetaData();
RelationMember[] relationMembers = metaData.RelationMembers;
if (relationMembers.Length == 0)
{
continue;
}
IValueHolderContainer vhc = (IValueHolderContainer)businessObject;
for (int relationIndex = relationMembers.Length; relationIndex-- > 0; )
{
RelationMember relationMember = relationMembers[relationIndex];
if (ValueHolderState.INIT == vhc.Get__State(relationIndex))
{
continue;
}
objRefContainers.Add(new DirectValueHolderRef(vhc, relationMember));
}
}
if (objRefContainers.Count > 0)
{
PrefetchHelper.Prefetch(objRefContainers);
}
List<IObjRef> orisToGet = new List<IObjRef>(valueObjectList.Count);
for (int i = 0, size = valueObjectList.Count; i < size; i++)
{
Object rootValueObject = valueObjectList[i];
IValueObjectConfig config = GetValueObjectConfig(rootValueObject.GetType());
IEntityMetaData metaData = entityMetaDataProvider.GetMetaData(config.EntityType);
IMap<String, ITypeInfoItem> boNameToVoMember = GetTypeInfoMapForVo(config);
Object id = GetIdFromValueObject(rootValueObject, metaData, boNameToVoMember, config);
ObjRef objRef = new ObjRef(metaData.EntityType, ObjRef.PRIMARY_KEY_INDEX, id, null);
orisToGet.Add(objRef);
}
IList<Object> businessObjectList = cache.GetObjects(orisToGet, CacheDirective.FailEarly | CacheDirective.ReturnMisses);
ClearObjectsWithTempIds((IWritableCache)cache);
for (int a = allBusinessObjects.Count; a-- > 0; )
{
Object businessObject = allBusinessObjects[a];
if (businessObject is IDataObject)
{
((IDataObject)businessObject).ToBeUpdated = true;
}
}
return businessObjectList;
}
finally
{
CacheModification.Active = oldActive;
cache.ClearHardRefs(acquiredHardRefs);
}
}
public Object MapToValueObject(Object businessObject, Type valueObjectType)
{
if (businessObject == null)
{
return null;
}
IList<Object> businessObjects = new Object[] { businessObject };
IList<Object> results = MapToValueObjectList(businessObjects, valueObjectType);
return results[0];
}
public Object MapToValueObjectListType(IList<Object> businessObjectList, Type valueObjectType, Type listType)
{
IList<Object> valueObjectList = MapToValueObjectList(businessObjectList, valueObjectType);
return ListTypeHelper.PackInListType(valueObjectList, listType);
}
public Object MapToValueObjectRefListType(IList<Object> businessObjectList, Type valueObjectRefListType)
{
IList<Object> valueObjectList = MapToValueObjectRefList(businessObjectList);
return ListTypeHelper.PackInListType(valueObjectList, valueObjectRefListType);
}
public IList<Object> MapToValueObjectList(IList<Object> businessObjectList, Type valueObjectType)
{
if (businessObjectList.Count == 0)
{
return EmptyList.Empty<Object>();
}
ICache cache = this.Cache.CurrentCache;
IPrefetchHelper prefetchHelper = this.PrefetchHelper;
// Ensure all potential value-holders of To-One BOs are initialized in a batch
prefetchHelper.Prefetch(businessObjectList);
// Checking for correct types
IEntityMetaData boMetaData = ((IEntityMetaDataHolder)businessObjectList[0]).Get__EntityMetaData();
Type businessObjectType = boMetaData.EntityType;
IValueObjectConfig config = GetValueObjectConfig(valueObjectType);
if (!config.EntityType.Equals(businessObjectType))
{
throw new ArgumentException("'" + businessObjectType.FullName + "' cannot be mapped to '" + valueObjectType.FullName + "'");
}
List<Object> pendingValueHolders = new List<Object>();
List<IBackgroundWorkerDelegate> runnables = new List<IBackgroundWorkerDelegate>();
List<Object> valueObjectList = new List<Object>(businessObjectList.Count);
for (int i = 0; i < businessObjectList.Count; i++)
{
Object businessObject = businessObjectList[i];
Object valueObject = SubMapToCachedValueObject(businessObject, valueObjectType, pendingValueHolders, runnables);
valueObjectList.Add(valueObject);
}
while (pendingValueHolders.Count > 0 || runnables.Count > 0)
{
if (pendingValueHolders.Count > 0)
{
prefetchHelper.Prefetch(pendingValueHolders);
pendingValueHolders.Clear();
}
List<IBackgroundWorkerDelegate> runnablesClone = new List<IBackgroundWorkerDelegate>(runnables);
// Reset ORIGINAL lists because they may have been referenced from within cascading runnables
runnables.Clear();
for (int a = 0, size = runnablesClone.Count; a < size; a++)
{
runnablesClone[a]();
}
// PendingValueHolders might be (re-)filled after the runnables. So we need a while loop
}
ClearObjectsWithTempIds((IWritableCache)cache);
return valueObjectList;
}
protected IList<Object> MapToValueObjectRefList(IList<Object> businessObjectList)
{
if (businessObjectList.Count == 0)
{
return EmptyList.Empty<Object>();
}
// Checking for correct types
List<Object> refList = new List<Object>(businessObjectList.Count);
for (int a = 0, size = businessObjectList.Count; a < size; a++)
{
Object businessObject = businessObjectList[a];
IEntityMetaData metaData = ((IEntityMetaDataHolder)businessObject).Get__EntityMetaData();
PrimitiveMember idMember = SelectIdMember(metaData);
Object id = idMember.GetValue(businessObject, false);
if (id == null)
{
throw new ArgumentException("BusinessObject '" + businessObject + "' at index " + a + " does not have a valid ID");
}
refList.Add(id);
}
return refList;
}
protected void ResolveProperties(Object businessObject, Object valueObject, ICollection<Object> pendingValueHolders,
ICollection<IBackgroundWorkerDelegate> runnables)
{
IEntityMetaDataProvider entityMetaDataProvider = this.EntityMetaDataProvider;
IEntityMetaData businessObjectMetaData = ((IEntityMetaDataHolder)businessObject).Get__EntityMetaData();
IValueObjectConfig config = entityMetaDataProvider.GetValueObjectConfig(valueObject.GetType());
IMap<String, ITypeInfoItem> boNameToVoMember = GetTypeInfoMapForVo(config);
CopyPrimitives(businessObject, valueObject, config, CopyDirection.BO_TO_VO, businessObjectMetaData, boNameToVoMember);
RelationMember[] relationMembers = businessObjectMetaData.RelationMembers;
if (relationMembers.Length == 0)
{
return;
}
IObjRefContainer vhc = (IObjRefContainer)businessObject;
for (int relationIndex = relationMembers.Length; relationIndex-- > 0; )
{
RelationMember boMember = relationMembers[relationIndex];
String boMemberName = boMember.Name;
String voMemberName = config.GetValueObjectMemberName(boMemberName);
ITypeInfoItem voMember = boNameToVoMember.Get(boMemberName);
if (config.IsIgnoredMember(voMemberName) || voMember == null)
{
continue;
}
Object voMemberValue = CreateVOMemberValue(vhc, relationIndex, boMember, config, voMember, pendingValueHolders, runnables);
if (!Object.ReferenceEquals(voMemberValue, NOT_YET_READY))
{
SetPropertyValue(valueObject, voMember, voMemberValue);
}
else
{
runnables.Add(delegate()
{
Object voMemberValue2 = CreateVOMemberValue(vhc, relationIndex, boMember, config, voMember, pendingValueHolders, runnables);
if (Object.ReferenceEquals(voMemberValue2, NOT_YET_READY))
{
throw new Exception("Must never happen");
}
SetPropertyValue(valueObject, voMember, voMemberValue2);
});
}
}
}
/// <summary>
/// Added to fix https://jira.osthus.de/browse/AMBETH-499
/// </summary>
/// <param name="valueObject"></param>
/// <param name="voMember"></param>
/// <param name="voMemberValue"></param>
protected void SetPropertyValue(Object valueObject, ITypeInfoItem voMember, Object voMemberValue)
{
if (voMember.CanWrite)
{
voMember.SetValue(valueObject, voMemberValue);
}
else if (voMember.CanRead)
{
Object currentValue = voMember.GetValue(valueObject);
if (currentValue == null)
{
String msg = "Property has only a getter and is null: " + valueObject.GetType().FullName + "." + voMember.Name;
throw new Exception(msg);
}
Type realType = voMember.RealType;
if (typeof(IList).IsAssignableFrom(realType))
{
IList list = (IList)currentValue;
list.Clear();
foreach (Object elem in (IList)voMemberValue)
{
list.Add(elem);
}
}
else
{
String msg = "Handling of getter-only property type " + realType.FullName + " not yet implemented: " + valueObject.GetType().FullName + "."
+ voMember.Name;
throw new Exception(msg);
}
}
else
{
String msg = "Property not accessible: " + valueObject.GetType().FullName + "." + voMember.Name;
throw new Exception(msg);
}
}
protected void MapBosByVos(IList<Object> valueObjects, ICacheIntern cache)
{
List<IObjRef> toLoad = new List<IObjRef>();
List<Object> waitingVOs = new List<Object>();
IEntityMetaDataProvider entityMetaDataProvider = this.EntityMetaDataProvider;
IMap<Object, Object> voToBoMap = this.voToBoMap;
for (int i = valueObjects.Count; i-- > 0; )
{
Object valueObject = valueObjects[i];
if (valueObject == null || voToBoMap.ContainsKey(valueObject))
{
continue;
}
IValueObjectConfig config = GetValueObjectConfig(valueObject.GetType());
IEntityMetaData boMetaData = entityMetaDataProvider.GetMetaData(config.EntityType);
IMap<String, ITypeInfoItem> boNameToVoMember = GetTypeInfoMapForVo(config);
Object businessObject = null;
Object id = GetIdFromValueObject(valueObject, boMetaData, boNameToVoMember, config);
if (id != null)
{
if (initDirectRelationsInBusinessObjects)
{
IObjRef ori = GetObjRef(config.EntityType, ObjRef.PRIMARY_KEY_INDEX, id, alreadyCreatedObjRefsMap);
toLoad.Add(ori);
waitingVOs.Add(valueObject);
}
else
{
businessObject = CreateBusinessObject(boMetaData, cache);
voToBoMap.Put(valueObject, businessObject);
}
}
else
{
businessObject = CreateBusinessObject(boMetaData, cache);
SetTempIdToValueObject(valueObject, boMetaData, boNameToVoMember, config);
bosToRemoveTempIdFrom.Add(businessObject);
id = GetIdFromValueObject(valueObject, boMetaData, boNameToVoMember, config);
voToBoMap.Put(valueObject, businessObject);
}
}
if (toLoad.Count > 0)
{
IList<Object> businessObjects = cache.GetObjects(toLoad, CacheDirective.ReturnMisses);
for (int i = businessObjects.Count; i-- > 0; )
{
Object businessObject = businessObjects[i];
Object valueObject = waitingVOs[i];
if (businessObject == null)
{
IValueObjectConfig config = GetValueObjectConfig(valueObject.GetType());
IEntityMetaData boMetaData = entityMetaDataProvider.GetMetaData(config.EntityType);
businessObject = CreateBusinessObject(boMetaData, cache);
}
voToBoMap.Put(valueObject, businessObject);
}
}
}
protected Object CreateBusinessObject(IEntityMetaData boMetaData, ICacheIntern cache)
{
Object businessObject = EntityFactory.CreateEntity(boMetaData);
cache.AssignEntityToCache(businessObject);
return businessObject;
}
protected void ResolvePrimitiveProperties(Object valueObject, ICacheIntern cache)
{
IValueObjectConfig config = GetValueObjectConfig(valueObject.GetType());
IEntityMetaData boMetaData = EntityMetaDataProvider.GetMetaData(config.EntityType);
IMap<String, ITypeInfoItem> boNameToVoMember = GetTypeInfoMapForVo(config);
Object businessObject = voToBoMap.Get(valueObject);
if (businessObject == null)
{
throw new Exception("Must never happen");
}
Object[] primitives = CopyPrimitives(businessObject, valueObject, config, CopyDirection.VO_TO_BO, boMetaData, boNameToVoMember);
Object id = boMetaData.IdMember.GetValue(businessObject, false);
Object version = boMetaData.VersionMember.GetValue(businessObject, false);
cache.AddDirect(boMetaData, id, version, businessObject, primitives, null);// relationValues);
}
protected void CollectReferencedBusinessObjects(Object valueObject, IISet<IObjRef> referencedBOsSet, IList<DirectValueHolderRef> boToPendingRelationsList,
IMap<IObjRef, IObjRef> alreadyCreatedObjRefMap, ICacheIntern cache)
{
IValueObjectConfig config = GetValueObjectConfig(valueObject.GetType());
IEntityMetaDataProvider entityMetaDataProvider = this.EntityMetaDataProvider;
IEntityMetaData boMetaData = entityMetaDataProvider.GetMetaData(config.EntityType);
IMap<String, ITypeInfoItem> boNameToVoMember = GetTypeInfoMapForVo(config);
IdentityHashMap<Object, Object> voToBoMap = this.voToBoMap;
RelationMember[] relationMembers = boMetaData.RelationMembers;
if (relationMembers.Length == 0)
{
return;
}
IValueHolderContainer businessObject = (IValueHolderContainer)voToBoMap.Get(valueObject);
if (businessObject == null)
{
throw new Exception("Must never happen");
}
ICacheHelper cacheHelper = this.CacheHelper;
IConversionHelper conversionHelper = this.ConversionHelper;
IListTypeHelper listTypeHelper = this.ListTypeHelper;
HashMap<CompositIdentityClassKey, Object> reverseRelationMap = this.reverseRelationMap;
StringBuilder sb = new StringBuilder();
for (int relationIndex = relationMembers.Length; relationIndex-- > 0; )
{
RelationMember boMember = relationMembers[relationIndex];
String boMemberName = boMember.Name;
String voMemberName = config.GetValueObjectMemberName(boMemberName);
ITypeInfoItem voMember = boNameToVoMember.Get(boMemberName);
Object voValue = null;
if (voMember != null)
{
if (config.IsIgnoredMember(voMemberName))
{
// Nothing to collect
Object convertedEmptyRelation = ConvertPrimitiveValue(EmptyList.Empty<Object>(), typeof(Object), boMember.RealType, boMember.ElementType);
boMember.SetValue(businessObject, convertedEmptyRelation);
continue;
}
sb.Length = 0;
String voSpecifiedName = sb.Append(boMemberName).Append("Specified").ToString();
ITypeInfoItem voSpecifiedMember = boNameToVoMember.Get(voSpecifiedName);
if (voSpecifiedMember != null && !(bool)(voSpecifiedMember.GetValue(valueObject)))
{
continue;
}
voValue = voMember.GetValue(valueObject);
}
else
{
Object boValue = null;
// Workaround bis das Problem (TODO) behoben ist, um zumindest eindeutige Relationen fehlerfrei
// aufzuloesen.
CompositIdentityClassKey key = new CompositIdentityClassKey(valueObject, boMember.ElementType);
voValue = reverseRelationMap.Get(key);
if (voValue != null)
{
boValue = voToBoMap.Get(voValue);
boMember.SetValue(businessObject, boValue);
continue;
}
Object id = boMetaData.IdMember.GetValue(businessObject, false);
if (id != null)
{
// TODO value ueber die Rueckreferenz finden
// Bis dahin wird es nach dem Mapping beim Speichern knallen, weil der LazyValueHolder bei neuen
// Entitaeten nicht aufgeloest werden kann.
if (ValueHolderState.INIT != businessObject.Get__State(relationIndex))
{
businessObject.Set__Uninitialized(relationIndex, null);
}
}
else if (boMember.RealType.Equals(boMember.ElementType))
{
// To-one relation
boValue = null;
boMember.SetValue(businessObject, boValue);
}
else
{
// To-many relation
boValue = ListUtil.CreateCollectionOfType(boMember.RealType, 0);
boMember.SetValue(businessObject, boValue);
}
continue;
}
if (voValue == null)
{
// Nothing to collect
Object convertedEmptyRelation = ConvertPrimitiveValue(EmptyList.Empty<Object>(), typeof(Object), boMember.RealType, boMember.ElementType);
boMember.SetValue(businessObject, convertedEmptyRelation);
continue;
}
if (config.HoldsListType(voMember.Name))
{
voValue = listTypeHelper.UnpackListType(voValue);
}
IList<Object> voList = ListUtil.AnyToList(voValue);
if (voList.Count == 0)
{
// Nothing to collect
Object convertedEmptyRelation = ConvertPrimitiveValue(EmptyList.Empty<Object>(), typeof(Object), boMember.RealType, boMember.ElementType);
boMember.SetValue(businessObject, convertedEmptyRelation);
continue;
}
IEntityMetaData boMetaDataOfItem = entityMetaDataProvider.GetMetaData(boMember.ElementType);
PrimitiveMember boIdMemberOfItem = SelectIdMember(boMetaDataOfItem);
sbyte idIndex = boMetaDataOfItem.GetIdIndexByMemberName(boIdMemberOfItem.Name);
List<IObjRef> pendingRelations = new List<IObjRef>();
ValueObjectMemberType memberType = config.GetValueObjectMemberType(voMemberName);
bool mapAsBasic = memberType == ValueObjectMemberType.BASIC;
if (!mapAsBasic)
{
for (int a = 0, size = voList.Count; a < size; a++)
{
Object voItem = voList[a];
IValueObjectConfig configOfItem = entityMetaDataProvider.GetValueObjectConfig(voItem.GetType());
if (configOfItem == null)
{
// This is a simple id which we can use
IObjRef objRef = GetObjRef(boMetaDataOfItem.EntityType, idIndex, voItem, alreadyCreatedObjRefsMap);
referencedBOsSet.Add(objRef);
pendingRelations.Add(objRef);
continue;
}
// voItem is a real VO handle
Object boItem = voToBoMap.Get(voItem);
Object idOfItem = GetIdFromBusinessObject(boItem, boMetaDataOfItem);
if (idOfItem == null)
{
throw new Exception("All BOs must have at least a temporary id at this point. " + boItem);
}
IObjRef objRef2 = GetObjRef(boMetaDataOfItem.EntityType, ObjRef.PRIMARY_KEY_INDEX, idOfItem, alreadyCreatedObjRefsMap);
referencedBOsSet.Add(objRef2);
pendingRelations.Add(objRef2);
}
}
if (mapAsBasic)
{
Type targetType = boMember.ElementType;
List<Object> boList = new List<Object>();
for (int a = 0, size = voList.Count; a < size; a++)
{
Object voItem = voList[a];
Object boItem = conversionHelper.ConvertValueToType(targetType, voItem);
boList.Add(boItem);
}
Object relationValue = cacheHelper.ConvertResultListToExpectedType(boList, boMember.RealType, boMember.ElementType);
boMember.SetValue(businessObject, relationValue);
}
else if (pendingRelations.Count == 0)
{
Object relationValue = cacheHelper.CreateInstanceOfTargetExpectedType(boMember.RealType, boMember.ElementType);
boMember.SetValue(businessObject, relationValue);
}
else
{
IObjRef[] objRefs = pendingRelations.Count > 0 ? pendingRelations.ToArray() : ObjRef.EMPTY_ARRAY;
businessObject.Set__Uninitialized(relationIndex, objRefs);
cache.AssignEntityToCache(businessObject);
referencedBOsSet.AddAll(objRefs);
boToPendingRelationsList.Add(new DirectValueHolderRef(businessObject, boMember));
}
}
}
protected IObjRef GetObjRef(Type entityType, sbyte idIndex, Object id, IMap<IObjRef, IObjRef> alreadyCreatedObjRefMap)
{
ObjRef objRef = new ObjRef(entityType, idIndex, id, null);
IObjRef usingObjRef = alreadyCreatedObjRefMap.Get(objRef);
if (usingObjRef == null)
{
alreadyCreatedObjRefMap.Put(objRef, objRef);
usingObjRef = objRef;
}
return usingObjRef;
}
protected Object CreateVOMemberValue(IObjRefContainer businessObject, int relationIndex, RelationMember boMember, IValueObjectConfig config, ITypeInfoItem voMember,
ICollection<Object> pendingValueHolders, ICollection<IBackgroundWorkerDelegate> runnables)
{
Object voMemberValue = null;
Type voMemberType = voMember.RealType;
bool holdsListType = config.HoldsListType(voMember.Name);
bool singularValue = voMemberType.Equals(voMember.ElementType) && !holdsListType;
// TODO: How to check for instance of IList? what if it is IList<T> ?
//if (!singularValue && !List.class.isAssignableFrom(voMemberType) && !holdsListType)
//{
// throw new ArgumentException("Unsupportet collection type '" + voMemberType.getName() + "'");
//}
if (ValueHolderState.INIT != businessObject.Get__State(relationIndex))
{
pendingValueHolders.Add(new DirectValueHolderRef(businessObject, boMember));
return NOT_YET_READY;
}
Object boValue = boMember.GetValue(businessObject, false);
IList<Object> referencedBOs = ListUtil.AnyToList(boValue);
IList<Object> referencedVOs = null;
IConversionHelper conversionHelper = this.ConversionHelper;
IEntityMetaDataProvider entityMetaDataProvider = this.EntityMetaDataProvider;
if (referencedBOs.Count > 0)
{
referencedVOs = new List<Object>(referencedBOs.Count);
Type voMemberElementType = voMember.ElementType;
IValueObjectConfig refConfig = entityMetaDataProvider.GetValueObjectConfig(voMemberElementType);
bool mapAsBasic = config.GetValueObjectMemberType(voMember.Name) == ValueObjectMemberType.BASIC;
IEntityMetaData referencedBOMetaData = entityMetaDataProvider.GetMetaData(boMember.ElementType);
PrimitiveMember refBOBuidMember = SelectIdMember(referencedBOMetaData);
PrimitiveMember refBOVersionMember = referencedBOMetaData.VersionMember;
sbyte refBOBuidIndex = referencedBOMetaData.GetIdIndexByMemberName(refBOBuidMember.Name);
Type expectedVOType = config.GetMemberType(voMember.Name);
IObjRefProvider buidOriProvider = new MappingObjRefProvider(refBOBuidMember, refBOVersionMember, refBOBuidIndex);
for (int i = 0; i < referencedBOs.Count; i++)
{
Object refBO = referencedBOs[i];
if (mapAsBasic)
{
Object refVO = conversionHelper.ConvertValueToType(expectedVOType, refBO);
referencedVOs.Add(refVO);
continue;
}
if (refConfig == null)
{
IObjRef refOri = OriHelper.GetCreateObjRef(refBO, buidOriProvider);
if (refOri == null || refOri.IdNameIndex != refBOBuidIndex)
{
throw new ArgumentException("ORI of referenced BO is null or does not contain BUID: " + refOri);
}
if (refOri.Id != null)
{
referencedVOs.Add(refOri.Id);
}
else
{
throw new Exception("Relation ID is null:" + refBO);
}
}
else
{
referencedVOs.Add(SubMapToCachedValueObject(refBO, voMemberElementType, pendingValueHolders, runnables));
}
}
}
if (!singularValue)
{
if (holdsListType)
{
voMemberValue = ListTypeHelper.PackInListType(referencedVOs, voMemberType);
}
else
{
if (referencedVOs != null && voMemberType.IsAssignableFrom(referencedVOs.GetType()))
{
voMemberValue = referencedVOs;
}
else if (voMemberType.IsArray)
{
if (referencedVOs == null)
{
referencedVOs = EmptyList.Empty<Object>();
}
voMemberValue = ListUtil.AnyToArray(referencedVOs, voMemberType.GetElementType());
}
else
{
if (referencedVOs == null)
{
referencedVOs = EmptyList.Empty<Object>();
}
voMemberValue = ListUtil.CreateCollectionOfType(voMemberType, referencedVOs.Count);
ListUtil.FillList(voMemberValue, referencedVOs);
}
}
}
else if (referencedVOs != null)
{
voMemberValue = referencedVOs[0];
}
return voMemberValue;
}
protected IValueObjectConfig GetValueObjectConfig(Type valueObjectType)
{
IValueObjectConfig config = EntityMetaDataProvider.GetValueObjectConfig(valueObjectType);
if (config == null)
{
throw new Exception("No config found for value object type '" + valueObjectType.Name + "'");
}
return config;
}
protected void ResolveAllValueObjectsDirectly(Object valueObject, IList<Object> allDirectVOs, IdentityHashSet<Object> alreadyScannedSet, Object parent)
{
if (valueObject == null || !alreadyScannedSet.Add(valueObject))
{
return;
}
if (valueObject is IEnumerable)
{
foreach (Object item in (IEnumerable)valueObject)
{
ResolveAllValueObjectsDirectly(item, allDirectVOs, alreadyScannedSet, parent);
}
return;
}
// filling map for resolving relations without back-link
// null for root or non-unique cases
Type parentBoType = null;
if (parent != null)
{
IValueObjectConfig parentConfig = EntityMetaDataProvider.GetValueObjectConfig(parent.GetType());
parentBoType = parentConfig.EntityType;
}
CompositIdentityClassKey key = new CompositIdentityClassKey(valueObject, parentBoType);
if (!reverseRelationMap.ContainsKey(key))
{
reverseRelationMap.Put(key, parent);
}
else
{
reverseRelationMap.Put(key, null);
}
IValueObjectConfig config = EntityMetaDataProvider.GetValueObjectConfig(valueObject.GetType());
if (config == null)
{
return;
}
allDirectVOs.Add(valueObject);
if (HandleNoEntities(valueObject, config))
{
return;
}
IMap<String, ITypeInfoItem> boNameToVoMember = GetTypeInfoMapForVo(config);
IEntityMetaData metaData = EntityMetaDataProvider.GetMetaData(config.EntityType);
foreach (ITypeInfoItem boMember in metaData.RelationMembers)
{
String boMemberName = boMember.Name;
String voMemberName = config.GetValueObjectMemberName(boMemberName);
ValueObjectMemberType valueObjectMemberType = config.GetValueObjectMemberType(voMemberName);
ITypeInfoItem voMember = boNameToVoMember.Get(boMemberName);
if (voMember == null || config.IsIgnoredMember(voMemberName) || valueObjectMemberType == ValueObjectMemberType.BASIC)
{
// ValueObjectMemberType.BASIC members of entityType VO are special case mappings via conversionHelper
continue;
}
Object item = voMember.GetValue(valueObject, false);
if (item == null)
{
// Nothing to resolve
continue;
}
if (config.HoldsListType(voMember.Name))
{
item = ListTypeHelper.UnpackListType(item);
}
ResolveAllValueObjectsDirectly(item, allDirectVOs, alreadyScannedSet, valueObject);
}
}
protected bool HandleNoEntities(Object valueObject, IValueObjectConfig config)
{
Type entityType = config.EntityType;
if (typeof(IFilterDescriptor).IsAssignableFrom(entityType))
{
return true;
}
else if (typeof(ISortDescriptor).IsAssignableFrom(entityType))
{
return true;
}
return false;
}
protected Object SubMapToCachedValueObject(Object subBusinessObject, Type valueObjectType, ICollection<Object> pendingValueHolders,
ICollection<IBackgroundWorkerDelegate> runnables)
{
IMap<Type, Object> boVOsMap = boToSpecifiedVOMap.Get(subBusinessObject);
if (boVOsMap == null)
{
boVOsMap = new IdentityHashMap<Type, Object>();
boToSpecifiedVOMap.Put(subBusinessObject, boVOsMap);
}
IEntityMetaData metaData = ((IEntityMetaDataHolder)subBusinessObject).Get__EntityMetaData();
Object subValueObject = boVOsMap.Get(valueObjectType);
if (subValueObject == null)
{
subValueObject = Activator.CreateInstance(valueObjectType);
boVOsMap.Put(valueObjectType, subValueObject);
Object id = GetIdFromBusinessObject(subBusinessObject, metaData);
if (id == null)
{
SetTempIdToBusinessObject(subBusinessObject, metaData);
vosToRemoveTempIdFrom.Add(subValueObject);
}
ResolveProperties(subBusinessObject, subValueObject, pendingValueHolders, runnables);
}
IDedicatedMapper dedicatedMapper = MapperExtensionRegistry.GetDedicatedMapper(metaData.EntityType);
if (dedicatedMapper != null)
{
dedicatedMapper.ApplySpecialMapping(subBusinessObject, subValueObject, CopyDirection.BO_TO_VO);
}
return subValueObject;
}
public Object GetIdFromValueObject(Object valueObject)
{
IEntityMetaDataProvider entityMetaDataProvider = this.EntityMetaDataProvider;
IValueObjectConfig config = entityMetaDataProvider.GetValueObjectConfig(valueObject.GetType());
IEntityMetaData boMetaData = entityMetaDataProvider.GetMetaData(config.EntityType);
IMap<String, ITypeInfoItem> boNameToVoMember = GetTypeInfoMapForVo(config);
return GetIdFromValueObject(valueObject, boMetaData, boNameToVoMember, config);
}
public Object GetVersionFromValueObject(Object valueObject)
{
IEntityMetaDataProvider entityMetaDataProvider = this.EntityMetaDataProvider;
IValueObjectConfig config = entityMetaDataProvider.GetValueObjectConfig(valueObject.GetType());
IEntityMetaData boMetaData = entityMetaDataProvider.GetMetaData(config.EntityType);
IMap<String, ITypeInfoItem> boNameToVoMember = GetTypeInfoMapForVo(config);
String boVersionMemberName = boMetaData.VersionMember.Name;
ITypeInfoItem voVersionMember = boNameToVoMember.Get(boVersionMemberName);
return voVersionMember.GetValue(valueObject, false);
}
protected Object GetIdFromValueObject(Object valueObject, IEntityMetaData boMetaData, IMap<String, ITypeInfoItem> boNameToVoMember, IValueObjectConfig config)
{
ITypeInfoItem voIdMember = getVoIdMember(config, boMetaData, boNameToVoMember);
return voIdMember.GetValue(valueObject, false);
}
protected void SetTempIdToValueObject(Object valueObject, IEntityMetaData boMetaData, IMap<String, ITypeInfoItem> boNameToVoMember, IValueObjectConfig config)
{
ITypeInfoItem voIdMember = getVoIdMember(config, boMetaData, boNameToVoMember);
Object tempId = GetNextTempIdAs(voIdMember.ElementType);
voIdMember.SetValue(valueObject, tempId);
vosToRemoveTempIdFrom.Add(valueObject);
}
protected void RemoveTempIdFromValueObject(Object valueObject, IEntityMetaData boMetaData, IMap<String, ITypeInfoItem> boNameToVoMember,
IValueObjectConfig config)
{
ITypeInfoItem voIdMember = getVoIdMember(config, boMetaData, boNameToVoMember);
Object nullEquivalentValue = NullEquivalentValueUtil.GetNullEquivalentValue(voIdMember.ElementType);
voIdMember.SetValue(valueObject, nullEquivalentValue);
}
protected Object GetIdFromBusinessObject(Object businessObject, IEntityMetaData metaData)
{
return metaData.IdMember.GetValue(businessObject, false);
}
protected void SetTempIdToBusinessObject(Object businessObject, IEntityMetaData metaData)
{
PrimitiveMember idMember = metaData.IdMember;
Object tempId = GetNextTempIdAs(idMember.ElementType);
idMember.SetValue(businessObject, tempId);
bosToRemoveTempIdFrom.Add(businessObject);
}
protected void RemoveTempIdFromBusinessObject(Object businessObject, IEntityMetaData metaData, ObjRef tempObjRef, IWritableCache cache)
{
PrimitiveMember idMember = metaData.IdMember;
Object id = idMember.GetValue(businessObject);
tempObjRef.RealType = metaData.EntityType;
tempObjRef.IdNameIndex = ObjRef.PRIMARY_KEY_INDEX;
tempObjRef.Id = id;
cache.Remove(tempObjRef);
idMember.SetValue(businessObject, null);
}
protected ITypeInfoItem getVoIdMember(IValueObjectConfig config, IEntityMetaData boMetaData, IMap<String, ITypeInfoItem> boNameToVoMember)
{
String boIdMemberName = boMetaData.IdMember.Name;
return boNameToVoMember.Get(boIdMemberName);
}
protected void ClearObjectsWithTempIds(IWritableCache cache)
{
ISet<Object> bosToRemoveTempIdFrom = this.bosToRemoveTempIdFrom;
IEntityMetaDataProvider entityMetaDataProvider = this.EntityMetaDataProvider;
ISet<Object> vosToRemoveTempIdFrom = this.vosToRemoveTempIdFrom;
if (vosToRemoveTempIdFrom.Count > 0)
{
foreach (Object vo in vosToRemoveTempIdFrom)
{
IValueObjectConfig config = entityMetaDataProvider.GetValueObjectConfig(vo.GetType());
IMap<String, ITypeInfoItem> boNameToVoMember = GetTypeInfoMapForVo(config);
IEntityMetaData boMetaData = entityMetaDataProvider.GetMetaData(config.EntityType);
RemoveTempIdFromValueObject(vo, boMetaData, boNameToVoMember, config);
}
vosToRemoveTempIdFrom.Clear();
}
if (bosToRemoveTempIdFrom.Count > 0)
{
ObjRef objRef = new ObjRef();
foreach (Object bo in bosToRemoveTempIdFrom)
{
IEntityMetaData metaData = ((IEntityMetaDataHolder)bo).Get__EntityMetaData();
RemoveTempIdFromBusinessObject(bo, metaData, objRef, cache);
}
bosToRemoveTempIdFrom.Clear();
}
}
protected Object GetNextTempIdAs(Type elementType)
{
if (nextTempId == long.MinValue)
{
nextTempId = -1;
}
return ConversionHelper.ConvertValueToType(elementType, nextTempId--);
}
protected PrimitiveMember SelectIdMember(IEntityMetaData referencedBOMetaData)
{
if (referencedBOMetaData == null)
{
throw new ArgumentException("Business object contains reference to object without metadata");
}
PrimitiveMember idMember = referencedBOMetaData.IdMember;
if (referencedBOMetaData.GetAlternateIdCount() == 1)
{
idMember = referencedBOMetaData.AlternateIdMembers[0];
}
else if (referencedBOMetaData.GetAlternateIdCount() > 1)
{
// AgriLog specific solution for AlternateIdCount > 1
foreach (PrimitiveMember alternateIdMember in referencedBOMetaData.AlternateIdMembers)
{
if (alternateIdMember.Name.Equals("Buid"))
{
idMember = alternateIdMember;
break;
}
}
}
return idMember;
}
protected IMap<String, ITypeInfoItem> GetTypeInfoMapForVo(IValueObjectConfig config)
{
IMap<String, ITypeInfoItem> typeInfoMap = typeToTypeInfoMap.Get(config.ValueType);
if (typeInfoMap == null)
{
StringBuilder sb = new StringBuilder();
typeInfoMap = new HashMap<String, ITypeInfoItem>();
IEntityMetaData boMetaData = EntityMetaDataProvider.GetMetaData(config.EntityType);
AddTypeInfoMapping(typeInfoMap, config, boMetaData.IdMember.Name, sb);
if (boMetaData.VersionMember != null)
{
AddTypeInfoMapping(typeInfoMap, config, boMetaData.VersionMember.Name, sb);
}
foreach (PrimitiveMember primitiveMember in boMetaData.PrimitiveMembers)
{
AddTypeInfoMapping(typeInfoMap, config, primitiveMember.Name, sb);
}
foreach (RelationMember relationMember in boMetaData.RelationMembers)
{
AddTypeInfoMapping(typeInfoMap, config, relationMember.Name, sb);
}
typeToTypeInfoMap.Put(config.ValueType, typeInfoMap);
}
return typeInfoMap;
}
protected void AddTypeInfoMapping(IMap<String, ITypeInfoItem> typeInfoMap, IValueObjectConfig config, String boMemberName, StringBuilder sb)
{
String voMemberName = config.GetValueObjectMemberName(boMemberName);
ITypeInfoItem voMember = TypeInfoProvider.GetHierarchicMember(config.ValueType, voMemberName);
if (voMember == null)
{
return;
}
Type elementType = config.GetMemberType(voMemberName);
if (elementType != null)
{
TypeInfoItem.SetEntityType(elementType, voMember, null);
}
typeInfoMap.Put(boMemberName, voMember);
if (sb != null)
{
sb.Length = 0;
String voSpecifiedName = sb.Append(voMemberName).Append("Specified").ToString();
ITypeInfoItem voSpecifiedMember = TypeInfoProvider.GetHierarchicMember(config.ValueType, voSpecifiedName);
if (voSpecifiedMember != null)
{
sb.Length = 0;
String boSpecifiedName = sb.Append(boMemberName).Append("Specified").ToString();
typeInfoMap.Put(boSpecifiedName, voSpecifiedMember);
}
}
}
protected Object[] CopyPrimitives(Object businessObject, Object valueObject, IValueObjectConfig config, CopyDirection direction,
IEntityMetaData businessObjectMetaData, IMap<String, ITypeInfoItem> boNameToVoMember)
{
PrimitiveMember[] primitiveMembers = AllPrimitiveMembers(businessObjectMetaData);
Object[] primitives = new Object[businessObjectMetaData.PrimitiveMembers.Length];
StringBuilder sb = new StringBuilder();
for (int i = primitiveMembers.Length; i-- > 0; )
{
PrimitiveMember boMember = primitiveMembers[i];
String boMemberName = boMember.Name;
String voMemberName = config.GetValueObjectMemberName(boMemberName);
sb.Length = 0;
String boSpecifiedMemberName = sb.Append(boMemberName).Append("Specified").ToString();
ITypeInfoItem voMember = boNameToVoMember.Get(boMemberName);
ITypeInfoItem voSpecifiedMember = boNameToVoMember.Get(boSpecifiedMemberName);
bool isSpecified = true;
if (config.IsIgnoredMember(voMemberName) || voMember == null)
{
continue;
}
switch (direction)
{
case CopyDirection.VO_TO_BO:
{
// Copy primitive from value object to business object
// TODO: Copy by value instead of copy by reference
if (voSpecifiedMember != null)
{
isSpecified = (bool)voSpecifiedMember.GetValue(valueObject);
}
if (!isSpecified)
{
continue;
}
Object value = voMember.GetValue(valueObject, false);
if (value != null && config.HoldsListType(voMemberName))
{
value = ListTypeHelper.UnpackListType(value);
}
value = ConvertPrimitiveValue(value, voMember.ElementType, boMember.RealType, boMember.ElementType);
// Do not 'kill' technical members except 'version' (for optimistic locking)
if (boMember.TechnicalMember && !boMember.Equals(businessObjectMetaData.VersionMember)
&& (value == null || value.Equals(boMember.NullEquivalentValue)))
{
continue;
}
if (value == null)
{
value = boMember.NullEquivalentValue;
}
boMember.SetValue(businessObject, value);
if (i < primitives.Length)
{
primitives[i] = value;
}
break;
}
case CopyDirection.BO_TO_VO:
{
// Copy primitive from business object to value object
// TODO: Copy by value instead of copy by reference
Object value = boMember.GetValue(businessObject, false);
isSpecified = value != null;
if (voSpecifiedMember != null)
{
voSpecifiedMember.SetValue(valueObject, isSpecified);
}
if (!isSpecified)
{
continue;
}
if (config.HoldsListType(voMemberName))
{
if (value is IEnumerable && !(value is String))
{
value = ListTypeHelper.PackInListType((IEnumerable)value, voMember.RealType);
}
}
value = ConvertPrimitiveValue(value, boMember.ElementType, voMember.RealType, voMember.ElementType);
if (voMember.TechnicalMember && (value == null || value.Equals(voMember.NullEquivalentValue)))
{
continue;
}
if (value == null)
{
value = boMember.NullEquivalentValue;
}
SetPropertyValue(valueObject, voMember, value);
break;
}
default:
throw RuntimeExceptionUtil.CreateEnumNotSupportedException(direction);
}
}
return primitives;
}
protected PrimitiveMember[] AllPrimitiveMembers(IEntityMetaData businessObjectMetaData)
{
PrimitiveMember[] primitiveValueMembers = businessObjectMetaData.PrimitiveMembers;
int technicalMemberCount = 1;
if (businessObjectMetaData.VersionMember != null)
{
technicalMemberCount++;
}
PrimitiveMember[] primitiveMembers = new PrimitiveMember[primitiveValueMembers.Length + technicalMemberCount];
Array.Copy(primitiveValueMembers, 0, primitiveMembers, 0, primitiveValueMembers.Length);
int insertIndex = primitiveMembers.Length - technicalMemberCount;
primitiveMembers[insertIndex++] = businessObjectMetaData.IdMember;
if (businessObjectMetaData.VersionMember != null)
{
primitiveMembers[insertIndex++] = businessObjectMetaData.VersionMember;
}
return primitiveMembers;
}
protected Object ConvertPrimitiveValue(Object value, Type sourceElementType, Type targetRealType, Type targetElementType)
{
if (value == null)
{
return null;
}
else if (value.GetType().IsArray && !typeof(String).Equals(targetRealType)) // do not handle byte[]
// or char[] to
// String here
{
return ConvertPrimitiveValue(ListUtil.AnyToList(value), sourceElementType, targetRealType, targetElementType);
}
else if (!(value is IEnumerable) || (value is String))
{
return ConversionHelper.ConvertValueToType(targetRealType, value);
}
IConversionHelper conversionHelper = this.ConversionHelper;
IEnumerable coll = (IEnumerable)value;
List<Object> result = new List<Object>();
foreach (Object item in (IEnumerable)value)
{
Object convertedItem = ConversionHelper.ConvertValueToType(targetElementType, item);
result.Add(convertedItem);
}
if (targetRealType.IsArray)
{
Array array = Array.CreateInstance(targetRealType.GetElementType(), result.Count);
for (int a = result.Count; a-- > 0; )
{
array.SetValue(result[a], a);
}
return array;
}
else if (typeof(IEnumerable).IsAssignableFrom(targetRealType) && !typeof(String).Equals(targetRealType))
{
value = ListUtil.CreateCollectionOfType(targetRealType, result.Count);
ListUtil.FillList(value, result);
return value;
}
else if (result.Count == 0)
{
return null;
}
else if (result.Count == 1)
{
return result[0];
}
throw new ArgumentException("Cannot map '" + value.GetType() + "' of '" + sourceElementType + "' to '" + targetRealType
+ "' of '" + targetElementType + "'");
}
public Object GetMappedBusinessObject(IObjRef objRef)
{
return Cache.GetObject(objRef, CacheDirective.FailEarly);
}
public IList<Object> GetAllActiveBusinessObjects()
{
return voToBoMap.Values();
}
}
}
| 47.196122
| 172
| 0.564434
|
dd640feb310a2693b4e66142a78facc275ab0f6f
| 423
|
java
|
Java
|
apps/wemp/bundles/net.wapwag.wemp.dao/src/main/java/net/wapwag/wemp/dao/model/org/WaterManageAuth.java
|
wpgsh/cloud4water
|
70cf873fcae20196707a76e8841b049006bc2314
|
[
"Apache-2.0"
] | null | null | null |
apps/wemp/bundles/net.wapwag.wemp.dao/src/main/java/net/wapwag/wemp/dao/model/org/WaterManageAuth.java
|
wpgsh/cloud4water
|
70cf873fcae20196707a76e8841b049006bc2314
|
[
"Apache-2.0"
] | null | null | null |
apps/wemp/bundles/net.wapwag.wemp.dao/src/main/java/net/wapwag/wemp/dao/model/org/WaterManageAuth.java
|
wpgsh/cloud4water
|
70cf873fcae20196707a76e8841b049006bc2314
|
[
"Apache-2.0"
] | null | null | null |
package net.wapwag.wemp.dao.model.org;
import net.wapwag.wemp.dao.model.ObjectType;
import javax.persistence.Entity;
import javax.persistence.Table;
/**
* Water management authority
* Created by Administrator on 2016/10/25 0025.
*/
@Entity
@Table(name = "water_management_auth")
public class WaterManageAuth extends Organization {
public WaterManageAuth() {
super(ObjectType.WATER_MANAGE_AUTH);
}
}
| 20.142857
| 51
| 0.749409
|
54f82165382866fa1fd459ec0ff268a8ae4de7f2
| 49
|
sql
|
SQL
|
src/resources/database/checkChatByContact.sql
|
VirgilSecurity/virgil-messenger-qt
|
cc29129705c8374580f2d7e76226544093838ca0
|
[
"BSD-3-Clause"
] | 6
|
2020-04-24T06:20:18.000Z
|
2021-09-30T13:06:12.000Z
|
src/resources/database/checkChatByContact.sql
|
VirgilSecurity/virgil-messenger-qt
|
cc29129705c8374580f2d7e76226544093838ca0
|
[
"BSD-3-Clause"
] | 13
|
2020-04-28T08:39:19.000Z
|
2021-04-26T08:09:13.000Z
|
src/resources/database/checkChatByContact.sql
|
VirgilSecurity/virgil-messenger-qt
|
cc29129705c8374580f2d7e76226544093838ca0
|
[
"BSD-3-Clause"
] | 2
|
2020-04-16T11:57:14.000Z
|
2020-09-07T17:34:19.000Z
|
SELECT 1
FROM chats
WHERE contactId = :contactId
| 12.25
| 28
| 0.795918
|
cdd702ee61a5554493fdcba086a6b054f8d36efd
| 452
|
cs
|
C#
|
PlaygroundProject/Assets/Scripts/_DONT_USE_/Editor/Attributes/HealthSystemInspector.cs
|
danieldz7/unityDemo
|
c60014f687d6a7f41e975faa1903be8aa14334fd
|
[
"MIT"
] | null | null | null |
PlaygroundProject/Assets/Scripts/_DONT_USE_/Editor/Attributes/HealthSystemInspector.cs
|
danieldz7/unityDemo
|
c60014f687d6a7f41e975faa1903be8aa14334fd
|
[
"MIT"
] | null | null | null |
PlaygroundProject/Assets/Scripts/_DONT_USE_/Editor/Attributes/HealthSystemInspector.cs
|
danieldz7/unityDemo
|
c60014f687d6a7f41e975faa1903be8aa14334fd
|
[
"MIT"
] | null | null | null |
using UnityEngine;
using System.Collections;
using UnityEditor;
[CanEditMultipleObjects]
[CustomEditor(typeof(HealthSystemAttribute))]
public class PlayerHealthInspector : BaseInspectorWindow
{
private string explanation = "This scripts allows the Players or other objects to receive damage.";
public override void OnInspectorGUI()
{
GUILayout.Space(10);
EditorGUILayout.HelpBox(explanation, MessageType.Info);
base.OnInspectorGUI();
}
}
| 23.789474
| 100
| 0.79646
|
b0cb2443cc71f8be4b340f059f8ec2a0c17ed77a
| 1,194
|
h
|
C
|
src/cmainsignals.h
|
hansendm/DigitalNote-2
|
a9da4cf46ffc1ba7d5103787e25b89092f75c803
|
[
"MIT"
] | null | null | null |
src/cmainsignals.h
|
hansendm/DigitalNote-2
|
a9da4cf46ffc1ba7d5103787e25b89092f75c803
|
[
"MIT"
] | null | null | null |
src/cmainsignals.h
|
hansendm/DigitalNote-2
|
a9da4cf46ffc1ba7d5103787e25b89092f75c803
|
[
"MIT"
] | null | null | null |
#ifndef CMAINSIGNALS_H
#define CMAINSIGNALS_H
#include <boost/signals2/signal.hpp>
class CTransaction;
class CBlock;
class uint256;
class CBlockLocator;
struct CMainSignals
{
// Notifies listeners of updated transaction data (passing hash, transaction, and optionally the block it is found in.
boost::signals2::signal<void (const CTransaction&, const CBlock*, bool, bool)> SyncTransaction;
// Notifies listeners of an erased transaction (currently disabled, requires transaction replacement).
boost::signals2::signal<void (const uint256 &)> EraseTransaction;
// Notifies listeners of an updated transaction without new data (for now: a coinbase potentially becoming visible).
boost::signals2::signal<void (const uint256 &)> UpdatedTransaction;
// Notifies listeners of a new active block chain.
boost::signals2::signal<void (const CBlockLocator &)> SetBestChain;
// Notifies listeners about an inventory item being seen on the network.
boost::signals2::signal<void (const uint256 &)> Inventory;
// Tells listeners to broadcast their data.
boost::signals2::signal<void (bool)> Broadcast;
};
#endif // CMAINSIGNALS_H
| 42.642857
| 123
| 0.737018
|
74d9ad3a299ecd2e0ba1871b0e79ff5f37c8aec1
| 1,097
|
css
|
CSS
|
src/css/dropdown.css
|
eb-89/UserInterfaces2021
|
af04fb023df0c7da8fc6ee5652528d794418503b
|
[
"MIT"
] | null | null | null |
src/css/dropdown.css
|
eb-89/UserInterfaces2021
|
af04fb023df0c7da8fc6ee5652528d794418503b
|
[
"MIT"
] | 2
|
2021-02-25T18:16:08.000Z
|
2021-03-04T23:25:38.000Z
|
src/css/dropdown.css
|
eb-89/UserInterfaces2021
|
af04fb023df0c7da8fc6ee5652528d794418503b
|
[
"MIT"
] | null | null | null |
/*
Dropdown menu styling
*/
.dropdown-wrp {
position: relative;
display: inline-block;
}
.dropdown-btn {
background-color: #f1e9cd;
color: #341001;
padding: 0.3em 0.7em;
font-size: 20px;
border: 0.3em double #341001;
transition: all 0.8s;
cursor: pointer;
text-align: center;
margin-left: 10px;
}
.dropdown-btn:hover, .dropdown-btn:focus {
background-color: hsl(47deg 56% 95%);
}
#dropdown-search {
box-sizing: border-box;
font-size: 16px;
padding: 15px;
border: none;
position: sticky;
top: 0;
border-bottom: 1px solid #ddd;
}
.dropdown-content {
display: none;
position: absolute;
background-color: #f1e9cd;
min-width: 100%;
box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);
z-index: 1;
}
.dropdown-content.scrollable {
max-height: 250px;
overflow: scroll;
overflow-x: hidden;
}
.show {
display: block;
}
.dropdown-text {
color: #341001;
padding: 12px 16px;
display: block;
}
.dropdown-text:hover {
background-color: hsl(47deg 56% 95%);
cursor: pointer;
}
| 17.412698
| 49
| 0.621696
|
44115e0d2224449634b945c062df9862aa55e738
| 588
|
lua
|
Lua
|
gateway/src/apicast/metrics/3scale_backend_calls.lua
|
abarrak/APIcast
|
f018994f1ca447e528636527a31c4683aea9c04e
|
[
"Apache-2.0"
] | 141
|
2016-10-06T19:15:44.000Z
|
2019-03-05T13:27:33.000Z
|
gateway/src/apicast/metrics/3scale_backend_calls.lua
|
abarrak/APIcast
|
f018994f1ca447e528636527a31c4683aea9c04e
|
[
"Apache-2.0"
] | 494
|
2016-10-03T13:21:16.000Z
|
2019-03-14T07:16:18.000Z
|
gateway/src/apicast/metrics/3scale_backend_calls.lua
|
abarrak/APIcast
|
f018994f1ca447e528636527a31c4683aea9c04e
|
[
"Apache-2.0"
] | 80
|
2019-03-19T08:32:47.000Z
|
2022-03-31T02:18:00.000Z
|
local prometheus = require('apicast.prometheus')
local format = string.format
local _M = {}
local threescale_backend_call = prometheus(
'counter',
'threescale_backend_calls',
"Calls to the 3scale backend",
{ 'endpoint', 'status' }
)
local function label_for_status(status)
if not status or status == '' or status == 0 then
return 'invalid_status'
else
return format("%dxx", status/100)
end
end
function _M.report(endpoint, status)
if threescale_backend_call then
threescale_backend_call:inc(1, { endpoint, label_for_status(status) })
end
end
return _M
| 20.275862
| 74
| 0.72449
|
14b9549a888fcef783fe50cae09559963eabdc3e
| 3,429
|
ts
|
TypeScript
|
packages/fx-core/src/plugins/resource/bot/question.ts
|
thure/TeamsFx
|
c17f4b049af79dc9386be2e9097ad5cf24a54a08
|
[
"MIT"
] | null | null | null |
packages/fx-core/src/plugins/resource/bot/question.ts
|
thure/TeamsFx
|
c17f4b049af79dc9386be2e9097ad5cf24a54a08
|
[
"MIT"
] | null | null | null |
packages/fx-core/src/plugins/resource/bot/question.ts
|
thure/TeamsFx
|
c17f4b049af79dc9386be2e9097ad5cf24a54a08
|
[
"MIT"
] | null | null | null |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
import { MultiSelectQuestion, OptionItem } from "@microsoft/teamsfx-api";
import { getLocalizedString } from "../../../common/localizeUtils";
import { QuestionNames } from "./constants";
import {
HostType,
HostTypes,
NotificationTrigger,
NotificationTriggers,
} from "./resources/strings";
export interface HostTypeTriggerOptionItem extends OptionItem {
hostType: HostType;
trigger?: NotificationTrigger;
}
// NOTE: id must be the sample as cliName to prevent parsing error for CLI default value.
export const FunctionsTimerTriggerOptionItem: HostTypeTriggerOptionItem = optionWithL10n({
id: "timer",
hostType: HostTypes.AZURE_FUNCTIONS,
trigger: NotificationTriggers.TIMER,
});
export const FunctionsHttpTriggerOptionItem: HostTypeTriggerOptionItem = optionWithL10n({
id: "http",
hostType: HostTypes.AZURE_FUNCTIONS,
trigger: NotificationTriggers.HTTP,
});
export const AppServiceOptionItem: HostTypeTriggerOptionItem = optionWithL10n({
id: "http-restify",
hostType: HostTypes.APP_SERVICE,
});
export const HostTypeTriggerOptions: HostTypeTriggerOptionItem[] = [
FunctionsTimerTriggerOptionItem,
FunctionsHttpTriggerOptionItem,
AppServiceOptionItem,
];
// The restrictions of this question:
// - appService and function are mutually exclusive
// - users must select at least one trigger.
export function createHostTypeTriggerQuestion(): MultiSelectQuestion {
const prefix = "plugins.bot.questionHostTypeTrigger";
return {
name: QuestionNames.BOT_HOST_TYPE_TRIGGER,
title: getLocalizedString(`${prefix}.title`),
type: "multiSelect",
staticOptions: HostTypeTriggerOptions,
default: [FunctionsTimerTriggerOptionItem.id],
placeholder: getLocalizedString(`${prefix}.placeholder`),
validation: {
validFunc: async (input: string[]): Promise<string | undefined> => {
const name = input as string[];
if (name.length === 0) {
return getLocalizedString(`${prefix}.error.emptySelection`);
}
if (name.includes(AppServiceOptionItem.id) && name.length > 1) {
return getLocalizedString(`${prefix}.error.hostTypeConflict`);
}
return undefined;
},
},
onDidChangeSelection: async function (
currentSelectedIds: Set<string>,
previousSelectedIds: Set<string>
): Promise<Set<string>> {
if (currentSelectedIds.size > 1 && currentSelectedIds.has(AppServiceOptionItem.id)) {
if (previousSelectedIds.has(AppServiceOptionItem.id)) {
currentSelectedIds.delete(AppServiceOptionItem.id);
} else {
currentSelectedIds = new Set([AppServiceOptionItem.id]);
}
}
return currentSelectedIds;
},
};
}
type HostTypeTriggerOptionItemWithoutText = Omit<
HostTypeTriggerOptionItem,
"label" | "cliName" | "description" | "detail"
>;
function optionWithL10n(option: HostTypeTriggerOptionItemWithoutText): HostTypeTriggerOptionItem {
// e.g. expands to plugins.bot.triggers.functionsTimer.label
const prefix = "plugins.bot.triggers";
return {
...option,
label: getLocalizedString(`${prefix}.${option.id}.label`),
cliName: getLocalizedString(`${prefix}.${option.id}.cliName`),
description: getLocalizedString(`${prefix}.${option.id}.description`),
detail: getLocalizedString(`${prefix}.${option.id}.detail`),
};
}
| 33.950495
| 98
| 0.720327
|
0d33d6a5cf37502ebb4f7a3028f543eef8e87a52
| 12,013
|
c
|
C
|
src/xyzsh.c
|
ab25cq/xyzsh
|
d111269fbd630edf5421580f8618f5967149cd4e
|
[
"MIT"
] | 39
|
2015-02-23T07:33:25.000Z
|
2022-02-07T09:36:09.000Z
|
src/xyzsh.c
|
ab25cq/xyzsh
|
d111269fbd630edf5421580f8618f5967149cd4e
|
[
"MIT"
] | null | null | null |
src/xyzsh.c
|
ab25cq/xyzsh
|
d111269fbd630edf5421580f8618f5967149cd4e
|
[
"MIT"
] | 3
|
2018-04-22T04:33:52.000Z
|
2021-12-04T06:32:27.000Z
|
#include "config.h"
#include "xyzsh.h"
#include <string.h>
#include <stdio.h>
#include <signal.h>
#include <sys/stat.h>
#include <locale.h>
#include <fcntl.h>
sObject* gErrMsg;
volatile BOOL gXyzshSigInt;
volatile BOOL gXyzshSigUser;
volatile BOOL gXyzshSigTstp;
volatile BOOL gXyzshSigCont;
sObject* gDirStack;
void xyzsh_read_rc_core(char* path)
{
xyzsh_set_signal_optc();
sRunInfo runinfo;
char** argv = NULL;
int argc = 0;
if(path && access(path, R_OK) == 0) {
memset(&runinfo, 0, sizeof(sRunInfo));
runinfo.mSName = path;
runinfo.mCurrentObject = gRootObject;
if(!load_file(path, gStdin, gStdout, &runinfo, argv, argc))
{
if(runinfo.mRCode == RCODE_BREAK) {
fprintf(stderr, "invalid break. Not in a loop\n");
exit(1);
}
else if(runinfo.mRCode & RCODE_RETURN) {
fprintf(stderr, "invalid return. Not in a function\n");
exit(1);
}
else if(runinfo.mRCode == RCODE_EXIT) {
}
else {
fprintf(stderr, "run time error\n");
fprintf(stderr, "%s", string_c_str(gErrMsg));
exit(1);
}
}
if(runinfo.mRCode != 0) {
fprintf(stderr, "return code is %d\n", runinfo.mRCode);
exit(1);
}
}
xyzsh_restore_signal_default();
}
static void xyzsh_read_rc()
{
char sys_rc_path[PATH_MAX];
snprintf(sys_rc_path, PATH_MAX, "%sxyzsh.xyzsh", SYSCONFDIR);
char help_rc_path[PATH_MAX];
snprintf(help_rc_path, PATH_MAX, "%shelp.xyzsh", SYSCONFDIR);
char completion_rc_path[PATH_MAX];
snprintf(completion_rc_path, PATH_MAX, "%scompletion.xyzsh", SYSCONFDIR);
char user_rc_path[PATH_MAX];
char* home;
if(home = getenv("HOME")) {
snprintf(user_rc_path, PATH_MAX, "%s/.xyzsh/xyzsh.xyzsh", home);
}
else {
strncpy(user_rc_path, "", PATH_MAX);
}
xyzsh_read_rc_core(sys_rc_path);
xyzsh_read_rc_core(help_rc_path);
xyzsh_read_rc_core(completion_rc_path);
xyzsh_read_rc_core(user_rc_path);
}
static void xyzsh_read_rc_mini()
{
char sys_rc_path[PATH_MAX];
snprintf(sys_rc_path, PATH_MAX, "%sxyzsh.xyzsh", SYSCONFDIR);
xyzsh_read_rc_core(sys_rc_path);
}
void xyzsh_init(enum eAppType app_type, BOOL no_runtime_script)
{
setenv("XYZSH_VERSION", "1.5.8", 1);
setenv("XYZSH_DOCDIR", DOCDIR, 1);
setenv("XYZSH_DATAROOTDIR", DOCDIR, 1);
setenv("XYZSH_EXT_PATH", EXTDIR, 1);
setenv("XYZSH_SYSCONFDIR", SYSCONFDIR, 1);
char* home = getenv("HOME");
if(home) {
char home_library[PATH_MAX];
snprintf(home_library, PATH_MAX, "%s/.xyzsh/lib/", home);
char* ld_library_path = getenv("LD_LIBRARY_PATH");
if(ld_library_path) {
char ld_library_path2[512];
snprintf(ld_library_path2, 512, "%s:%s:%s", ld_library_path, EXTDIR, home_library);
setenv("LD_LIBRARY_PATH", ld_library_path2, 1);
}
else {
char ld_library_path2[512];
snprintf(ld_library_path2, 512, "%s:%s", EXTDIR, home_library);
setenv("LD_LIBRARY_PATH", ld_library_path2, 1);
}
}
else {
char* ld_library_path = getenv("LD_LIBRARY_PATH");
if(ld_library_path) {
char ld_library_path2[512];
snprintf(ld_library_path2, 512, "%s:%s", ld_library_path, EXTDIR);
setenv("LD_LIBRARY_PATH", ld_library_path2, 1);
}
else {
char ld_library_path2[512];
snprintf(ld_library_path2, 512, "%s", EXTDIR);
setenv("LD_LIBRARY_PATH", ld_library_path2, 1);
}
}
setlocale(LC_ALL, "");
stack_init(1);;
stack_start_stack();
gErrMsg = STRING_NEW_STACK("");
gXyzshSigInt = FALSE;
gXyzshSigUser = FALSE;
gXyzshSigTstp = FALSE;
gXyzshSigCont = FALSE;
xyzsh_set_signal_other = NULL;
gc_init(1);
run_init(app_type);
load_init();
xyzsh_editline_init();
gDirStack = VECTOR_NEW_GC(10, FALSE);
uobject_put(gXyzshObject, "_dir_stack", gDirStack);
char* term_env = getenv("TERM");
if(term_env != NULL && strcmp(term_env, "") != 0) {
mcurses_init();
}
if(!xyzsh_rehash("init", 0)) {
fprintf(stderr, "run time error\n");
fprintf(stderr, "%s", string_c_str(gErrMsg));
exit(1);
}
if(!no_runtime_script) {
xyzsh_read_rc();
}
else {
xyzsh_read_rc_mini();
}
}
void xyzsh_final()
{
xyzsh_editline_final();
mcurses_final();
load_final();
run_final();
gc_final();
stack_end_stack();
stack_final();
}
void err_msg(char* msg, char* sname, int line)
{
char* tmp = MALLOC(strlen(sname) + 128 + strlen(msg));
snprintf(tmp, strlen(sname) + 128 + strlen(msg), "%s %d: %s\n", sname, line, msg);
string_put(gErrMsg, tmp);
FREE(tmp);
}
void err_msg_adding(char* msg, char* sname, int line)
{
char* tmp = MALLOC(strlen(sname) + 128 + strlen(msg));
snprintf(tmp, strlen(sname) + 128 + strlen(msg), "%s %d: %s\n", sname, line, msg);
string_push_back(gErrMsg, tmp);
FREE(tmp);
}
void sig_int()
{
gXyzshSigInt = TRUE;
#ifdef MDEBUG
fprintf(stderr, "SIGINT!!\n");
#endif
}
void sig_tstp()
{
gXyzshSigInt = TRUE;
#ifdef MDEBUG
fprintf(stderr, "SIGTSTP!!\n");
#endif
}
void sig_int_optc()
{
sigchld_block(1);
gXyzshSigInt = TRUE;
//killpg(0, SIGINT);
#ifdef MDEBUG
//fprintf(stderr, "SIGINT!!\n");
#endif
sigchld_block(0);
}
void sig_tstp_optc()
{
sigchld_block(1);
#ifdef MDEBUG
fprintf(stderr, "SIGTSTP!!\n");
#endif
//msave_ttysettings();
killpg(0, SIGSTOP);
sigchld_block(0);
}
void sig_cont_optc()
{
sigchld_block(1);
//mrestore_ttysettings();
sigchld_block(0);
}
void sig_user()
{
gXyzshSigUser = TRUE;
}
void sigchld_action(int signum, siginfo_t* info, void* ctx)
{
}
void sigchld_block(int block)
{
sigset_t sigset;
sigemptyset(&sigset);
sigaddset(&sigset, SIGCHLD);
if(sigprocmask(block?SIG_BLOCK:SIG_UNBLOCK, &sigset, NULL) != 0)
{
fprintf(stderr, "error\n");
exit(1);
}
}
void sigttou_block(int block)
{
sigset_t sigset;
sigemptyset(&sigset);
sigaddset(&sigset, SIGTTOU);
if(sigprocmask(block?SIG_BLOCK:SIG_UNBLOCK, &sigset, NULL) != 0)
{
fprintf(stderr, "error\n");
exit(1);
}
}
void xyzsh_restore_signal_default()
{
struct sigaction sa;
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_DFL;
if(sigaction(SIGPIPE, &sa, NULL) < 0) {
perror("sigaction1");
exit(1);
}
if(sigaction(SIGCHLD, &sa, NULL) < 0) {
perror("sigaction1");
exit(1);
}
if(sigaction(SIGINT, &sa, NULL) < 0) {
perror("sigaction2");
exit(1);
}
if(sigaction(SIGCONT, &sa, NULL) < 0) {
perror("sigaction3");
exit(1);
}
if(sigaction(SIGWINCH, &sa, NULL) < 0) {
perror("sigaction4");
exit(1);
}
if(sigaction(SIGPROF, &sa, NULL) < 0) {
perror("sigaction5");
exit(1);
}
if(sigaction(SIGTTIN, &sa, NULL) < 0) {
perror("sigaction6");
exit(1);
}
if(sigaction(SIGTTOU, &sa, NULL) < 0) {
perror("sigaction7");
exit(1);
}
if(sigaction(SIGTSTP, &sa, NULL) < 0) {
perror("sigaction8");
exit(1);
}
if(sigaction(SIGQUIT, &sa, NULL) < 0) {
perror("sigaction9");
exit(1);
}
if(sigaction(SIGUSR1, &sa, NULL) < 0) {
perror("sigaction10");
exit(1);
}
}
void (*xyzsh_set_signal_other)();
void xyzsh_set_signal()
{
struct sigaction sa;
memset(&sa, 0, sizeof(sa));
sa.sa_sigaction = sigchld_action;
sa.sa_flags = SA_RESTART|SA_SIGINFO;
if(sigaction(SIGCHLD, &sa, NULL) < 0) {
perror("sigaction1");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_flags = SA_SIGINFO;
sa.sa_handler = sig_int;
if(sigaction(SIGINT, &sa, NULL) < 0) {
perror("sigaction2");
exit(1);
}
memset(&sa, 0, sizeof(sa));
if(sigaction(SIGCONT, &sa, NULL) < 0) {
perror("sigaction3");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_DFL;
if(sigaction(SIGWINCH, &sa, NULL) < 0) {
perror("sigaction4");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_IGN;
sa.sa_flags = 0;
if(sigaction(SIGTTOU, &sa, NULL) < 0) {
perror("sigaction5");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_IGN;
sa.sa_flags = 0;
if(sigaction(SIGTTIN, &sa, NULL) < 0) {
perror("sigaction6");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = sig_tstp;
sa.sa_flags = 0;
if(sigaction(SIGTSTP, &sa, NULL) < 0) {
perror("sigaction7");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_IGN;
sa.sa_flags = 0;
if(sigaction(SIGQUIT, &sa, NULL) < 0) {
perror("sigaction8");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_sigaction = sig_user;
sa.sa_flags = SA_RESTART;
if(sigaction(SIGUSR1, &sa, NULL) < 0) {
perror("sigaction9");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_IGN;
sa.sa_flags = 0;
if(sigaction(SIGPIPE, &sa, NULL) < 0) {
perror("sigaction10");
exit(1);
}
/*
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_DFL;
sa.sa_flags = SA_RESTART;
if(sigaction(SIGSTOP, &sa, NULL) < 0) {
perror("sigaction11");
exit(1);
}
*/
if(xyzsh_set_signal_other) xyzsh_set_signal_other();
}
void xyzsh_set_signal_optc()
{
struct sigaction sa;
memset(&sa, 0, sizeof(sa));
sa.sa_sigaction = sigchld_action;
sa.sa_flags = SA_SIGINFO|SA_RESTART;
if(sigaction(SIGCHLD, &sa, NULL) < 0) {
perror("sigaction1");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_flags = SA_SIGINFO;
sa.sa_handler = sig_int_optc;
if(sigaction(SIGINT, &sa, NULL) < 0) {
perror("sigaction2");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_sigaction = sig_tstp_optc;
sa.sa_flags = SA_RESTART;
if(sigaction(SIGTSTP, &sa, NULL) < 0) {
perror("sigaction7");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_sigaction = sig_cont_optc;
sa.sa_flags = SA_RESTART;
if(sigaction(SIGCONT, &sa, NULL) < 0) {
perror("sigaction3");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_DFL;
if(sigaction(SIGWINCH, &sa, NULL) < 0) {
perror("sigaction4");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_IGN;
sa.sa_flags = 0;
if(sigaction(SIGTTOU, &sa, NULL) < 0) {
perror("sigaction5");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_IGN;
sa.sa_flags = 0;
if(sigaction(SIGTTIN, &sa, NULL) < 0) {
perror("sigaction6");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_IGN;
sa.sa_flags = 0;
if(sigaction(SIGQUIT, &sa, NULL) < 0) {
perror("sigaction8");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = sig_user;
sa.sa_flags = 0;
if(sigaction(SIGUSR1, &sa, NULL) < 0) {
perror("sigaction9");
exit(1);
}
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_IGN;
sa.sa_flags = 0;
if(sigaction(SIGPIPE, &sa, NULL) < 0) {
perror("sigaction10");
exit(1);
}
/*
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_DFL;
sa.sa_flags = SA_RESTART;
if(sigaction(SIGSTOP, &sa, NULL) < 0) {
perror("sigaction11");
exit(1);
}
*/
if(xyzsh_set_signal_other) xyzsh_set_signal_other();
}
| 22.538462
| 95
| 0.574128
|
8519630bc125ab7fc348e931a92780515b0b2160
| 5,944
|
cs
|
C#
|
src/testing/Azos.Tests.Nub/Application/ModuleTests.cs
|
JohnPKosh/azos
|
b8cb4c3c43628ce0d5b1df9a1ddd861e0a00d861
|
[
"MIT"
] | 183
|
2018-10-19T21:17:58.000Z
|
2022-03-25T04:48:25.000Z
|
src/testing/Azos.Tests.Nub/Application/ModuleTests.cs
|
JohnPKosh/azos
|
b8cb4c3c43628ce0d5b1df9a1ddd861e0a00d861
|
[
"MIT"
] | 587
|
2018-10-30T02:51:14.000Z
|
2022-03-29T09:30:44.000Z
|
src/testing/Azos.Tests.Nub/Application/ModuleTests.cs
|
JohnPKosh/azos
|
b8cb4c3c43628ce0d5b1df9a1ddd861e0a00d861
|
[
"MIT"
] | 35
|
2018-10-19T21:17:21.000Z
|
2021-12-28T20:27:47.000Z
|
/*<FILE_LICENSE>
* Azos (A to Z Application Operating System) Framework
* The A to Z Foundation (a.k.a. Azist) licenses this file to you under the MIT license.
* See the LICENSE file in the project root for more information.
</FILE_LICENSE>*/
using Azos.Apps;
using Azos.Conf;
using Azos.Scripting;
namespace Azos.Tests.Nub.Application
{
[Runnable]
public class ModuleTests
{
static readonly ConfigSectionNode BASE_CONF = @"
app{
modules
{
module{type='Azos.Tests.Nub.Application.ModuleTests+MyModuleA, Azos.Tests.Nub' order=3 key = 3333}
module{type='Azos.Tests.Nub.Application.ModuleTests+MyModuleA, Azos.Tests.Nub' name='Module1' order=1 key=1000}
module{type='Azos.Tests.Nub.Application.ModuleTests+MyModuleB, Azos.Tests.Nub' name='Module4' order=4 key=4000}
module{type='Azos.Tests.Nub.Application.ModuleTests+MyModuleA, Azos.Tests.Nub' name='Module2' order=2 key= 2200 }
module{type='Azos.Tests.Nub.Application.ModuleTests+MyServiceA, Azos.Tests.Nub' name = 's1'}
module{type='Azos.Tests.Nub.Application.ModuleTests+MyServiceB, Azos.Tests.Nub' name = 's2'}
}
}
".AsLaconicConfig(handling: Data.ConvertErrorHandling.Throw);
static readonly ConfigSectionNode DUPLICATE_CONF1 = @"
app{
modules
{
module{type='Azos.Tests.Nub.Application.ModuleTests+MyModuleA, Azos.Tests.Nub'}
module{type='Azos.Tests.Nub.Application.ModuleTests+MyModuleA, Azos.Tests.Nub'}
}
}
".AsLaconicConfig(handling: Data.ConvertErrorHandling.Throw);
static readonly ConfigSectionNode DUPLICATE_CONF2 = @"
app{
modules
{
module{type='Azos.Tests.Nub.Application.ModuleTests+MyModuleA, Azos.Tests.Nub' name='module1'}
module{type='Azos.Tests.Nub.Application.ModuleTests+MyModuleB, Azos.Tests.Nub' name='module1'}
}
}
".AsLaconicConfig(handling: Data.ConvertErrorHandling.Throw);
[Run]
public void Test_ModuleInjectionAndOrdering()
{
using (var app = new AzosApplication(null, BASE_CONF))
{
Aver.AreEqual(6, app.ModuleRoot.ChildModules.Count);
Aver.AreEqual("s1", app.ModuleRoot.ChildModules[0].Name);//their order is not specified hence 0
Aver.AreEqual("s2", app.ModuleRoot.ChildModules[1].Name);
Aver.AreEqual("Module1", app.ModuleRoot.ChildModules[2].Name);
Aver.AreEqual("Module2", app.ModuleRoot.ChildModules[3].Name);
Aver.AreEqual("Azos.Tests.Nub.Application.ModuleTests+MyModuleA", app.ModuleRoot.ChildModules[4].Name);
Aver.AreEqual("Module4", app.ModuleRoot.ChildModules[5].Name);
Aver.IsTrue(app.ModuleRoot.ChildModules[0] is MyServiceA);
Aver.IsTrue(app.ModuleRoot.ChildModules[1] is MyServiceB);
Aver.IsTrue(app.ModuleRoot.ChildModules[2] is MyModuleA);
Aver.IsTrue(app.ModuleRoot.ChildModules[3] is MyModuleA);
Aver.IsTrue(app.ModuleRoot.ChildModules[4] is MyModuleA);
Aver.IsTrue(app.ModuleRoot.ChildModules[5] is MyModuleB);
}
}
[Run]
public void Test_ModuleInjectionAndAccess()
{
using (var app = new AzosApplication(null, BASE_CONF))
{
Aver.AreEqual(6, app.ModuleRoot.ChildModules.Count);
var logic = app.ModuleRoot.TryGet<IMyLogic>("Module1");
Aver.IsNotNull(logic);
Aver.AreEqual(1000, logic.Key);
logic = app.ModuleRoot.TryGet<IMyLogic>("Module2");
Aver.IsNotNull(logic);
Aver.AreEqual(2200, logic.Key);
logic = app.ModuleRoot.TryGet<IMyLogic>("Module3");
Aver.IsNull(logic);
logic = app.ModuleRoot.TryGet<IMyLogic>("Module4");
Aver.IsNotNull(logic);
Aver.AreEqual(4000, logic.Key);
logic = app.ModuleRoot.TryGet<IMyLogic>("s1");
Aver.IsNull(logic);
Aver.Throws<AzosException>(() => app.ModuleRoot.Get<IMyLogic>("s1"));
var svc = app.ModuleRoot.TryGet<IMyService>("s1");
Aver.IsNotNull(svc);
Aver.IsTrue(svc is MyServiceA);
svc = app.ModuleRoot.TryGet<IMyService>("s2");
Aver.IsNotNull(svc);
Aver.IsTrue(svc is MyServiceB);
}
}
[Run]
[Aver.Throws(typeof(AzosException), Message = "module already contains a child")]
public void Test_DuplicateModule1()
{
using (var app = new AzosApplication(null, DUPLICATE_CONF1)) { }
}
[Run]
[Aver.Throws(typeof(AzosException), Message = "module already contains a child")]
public void Test_DuplicateModule2()
{
using (var app = new AzosApplication(null, DUPLICATE_CONF2)) { }
}
interface IMyLogic : IModule { int Key { get; set; } }
interface IMyService : IModule { }
public class MyModuleA : ModuleBase, IMyLogic
{
public MyModuleA(IApplication app) : base(app) { }
public MyModuleA(IModule parent) : base(parent) { }
public override bool IsHardcodedModule => false;
public override string ComponentLogTopic => "testing";
[Config] public int Key { get; set; }
}
public class MyModuleB : ModuleBase, IMyLogic
{
public MyModuleB(IApplication app) : base(app) { }
public MyModuleB(IModule parent) : base(parent) { }
public override bool IsHardcodedModule => false;
public override string ComponentLogTopic => "testing";
[Config] public int Key { get; set; }
}
public class MyServiceA : ModuleBase, IMyService
{
public MyServiceA(IApplication app) : base(app) { }
public MyServiceA(IModule parent) : base(parent) { }
public override bool IsHardcodedModule => false;
public override string ComponentLogTopic => "testing";
}
public class MyServiceB : ModuleBase, IMyService
{
public MyServiceB(IApplication app) : base(app) { }
public MyServiceB(IModule parent) : base(parent) { }
public override bool IsHardcodedModule => false;
public override string ComponentLogTopic => "testing";
}
}
}
| 33.772727
| 119
| 0.675639
|
aa711bbe155f0dd86f1708dc2558e7e39dc0dfff
| 210
|
rb
|
Ruby
|
lib/ext/marc/control_field.rb
|
cbeer/marc-to-argot
|
c43f62091ab9c6bb7d92c0bd16b7d35a2c3d2414
|
[
"MIT"
] | 15
|
2017-03-09T20:11:24.000Z
|
2021-09-28T14:56:46.000Z
|
lib/ext/marc/control_field.rb
|
cbeer/marc-to-argot
|
c43f62091ab9c6bb7d92c0bd16b7d35a2c3d2414
|
[
"MIT"
] | 29
|
2017-01-20T03:34:34.000Z
|
2019-06-28T14:24:58.000Z
|
lib/ext/marc/control_field.rb
|
cbeer/marc-to-argot
|
c43f62091ab9c6bb7d92c0bd16b7d35a2c3d2414
|
[
"MIT"
] | 5
|
2018-10-18T13:13:39.000Z
|
2021-12-30T20:16:24.000Z
|
module MARC
# Extending the Marc ControlField class to add some helpers
class ControlField
def uses_book_configuration_in_006?
true if value.byteslice(0) =~ /[at]/
end
end
end
| 17.5
| 62
| 0.671429
|
7903edf5b65133e31be0e99f122eb4a37325fe2f
| 495
|
rb
|
Ruby
|
app/channels/chat_engine/message_channel.rb
|
yaseen2211/ChatGem
|
0f9a3c527ddd878b48fac68672cc2fb50dec96f1
|
[
"MIT"
] | null | null | null |
app/channels/chat_engine/message_channel.rb
|
yaseen2211/ChatGem
|
0f9a3c527ddd878b48fac68672cc2fb50dec96f1
|
[
"MIT"
] | 1
|
2019-05-15T16:58:10.000Z
|
2019-05-15T16:58:10.000Z
|
app/channels/chat_engine/message_channel.rb
|
yaseen2211/ChatGem
|
0f9a3c527ddd878b48fac68672cc2fb50dec96f1
|
[
"MIT"
] | 3
|
2020-02-15T21:27:33.000Z
|
2021-08-19T13:19:17.000Z
|
module ChatEngine
class MessageChannel < ApplicationCable::Channel
def subscribed
stream_from "chat_#{params[:chat_id]}" if params[:chat_id]
stream_from "message_notification_#{params[:user_id]}" if params[:user_id]
# user = params[:user_id].split('_')
# user[0].constantize.find(user[1].to_i).update(online:true) if params[:user_id]
# logger.info params
end
def unsubscribed
# Any cleanup needed when channel is unsubscribed
end
end
end
| 30.9375
| 86
| 0.690909
|
16ea38f782f0e29137b8896407e4d605bc2215ca
| 5,176
|
swift
|
Swift
|
Example/Source/Controllers/HorizontalController/HorizontalController.swift
|
PandaSystems/ScrollController
|
c82cac8fd288d1fa9afed1234a5168d4a790dcef
|
[
"MIT"
] | 408
|
2017-11-14T14:17:16.000Z
|
2022-03-29T14:13:07.000Z
|
Example/Source/Controllers/HorizontalController/HorizontalController.swift
|
PandaSystems/ScrollController
|
c82cac8fd288d1fa9afed1234a5168d4a790dcef
|
[
"MIT"
] | 52
|
2017-10-12T17:27:06.000Z
|
2021-11-29T09:51:47.000Z
|
Example/Source/Controllers/HorizontalController/HorizontalController.swift
|
PandaSystems/ScrollController
|
c82cac8fd288d1fa9afed1234a5168d4a790dcef
|
[
"MIT"
] | 29
|
2017-12-28T13:36:59.000Z
|
2022-02-04T19:51:44.000Z
|
//
// HorizontalController.swift
// SlideController_Example
//
// Created by Evgeny Dedovets on 8/10/17.
// Copyright © 2017 Touchlane LLC. All rights reserved.
//
import UIKit
import SlideController
class HorizontalController {
private let internalView = HorizontalView()
private let slideController: SlideController<HorizontalTitleScrollView, HorizontalTitleItem>!
private var addedPagesCount: Int
lazy var removeCurrentPageAction: (() -> Void)? = { [weak self] in
guard let strongSelf = self else { return }
guard let currentPageIndex = strongSelf.slideController.content
.firstIndex(where: { strongSelf.slideController.currentModel === $0 }) else {
return
}
strongSelf.slideController.removeAtIndex(index: currentPageIndex)
}
lazy var insertAction: (() -> Void)? = { [weak self] in
guard let strongSelf = self else { return }
let page = SlideLifeCycleObjectBuilder<ColorPageLifeCycleObject>(object: ColorPageLifeCycleObject())
guard let index = strongSelf.slideController.content
.firstIndex(where: { strongSelf.slideController.currentModel === $0 }) else {
return
}
strongSelf.slideController.insert(object: page, index: index)
strongSelf.addedPagesCount += 1
let titleItems = strongSelf.slideController.titleView.items
guard titleItems.indices.contains(index) else { return }
titleItems[index].titleLabel.text = strongSelf.title(for: strongSelf.addedPagesCount)
}
lazy var appendAction: (() -> Void)? = { [weak self] in
guard let strongSelf = self else { return }
let page = SlideLifeCycleObjectBuilder<ColorPageLifeCycleObject>(object: ColorPageLifeCycleObject())
strongSelf.slideController.append(object: [page])
strongSelf.addedPagesCount += 1
let titleItems = strongSelf.slideController.titleView.items
let lastItemIndex = titleItems.count - 1
titleItems[lastItemIndex].titleLabel.text = strongSelf.title(for: strongSelf.addedPagesCount)
}
private lazy var changePositionAction: ((Int) -> Void)? = { [weak self] position in
guard let strongSelf = self else { return }
switch position {
case 0:
strongSelf.slideController.titleView.position = TitleViewPosition.beside
strongSelf.slideController.titleView.isTransparent = false
case 1:
strongSelf.slideController.titleView.position = TitleViewPosition.above
strongSelf.slideController.titleView.isTransparent = true
default:
break
}
}
init() {
let pagesContent = [
SlideLifeCycleObjectBuilder<ColorPageLifeCycleObject>(object: ColorPageLifeCycleObject()),
SlideLifeCycleObjectBuilder<ColorPageLifeCycleObject>(),
SlideLifeCycleObjectBuilder<ColorPageLifeCycleObject>()]
slideController = SlideController(
pagesContent: pagesContent,
startPageIndex: 0,
slideDirection: SlideDirection.horizontal)
addedPagesCount = pagesContent.count
for index in 0..<addedPagesCount {
slideController.titleView.items[index].titleLabel.text = title(for: index + 1)
}
slideController.titleView.titleSize = 44
internalView.contentView = slideController.view
}
var optionsController: (ViewAccessible & ContentActionable)? {
didSet {
internalView.optionsView = optionsController?.view
optionsController?.removeDidTapAction = removeCurrentPageAction
optionsController?.insertDidTapAction = insertAction
optionsController?.appendDidTapAction = appendAction
optionsController?.changePositionAction = changePositionAction
}
}
}
private typealias PrivateHorizontalController = HorizontalController
private extension PrivateHorizontalController {
func title(for index: Int) -> String {
return "page \(index)"
}
}
private typealias ViewLifeCycleDependableImplementation = HorizontalController
extension ViewLifeCycleDependableImplementation: ViewLifeCycleDependable {
func viewDidAppear() {
slideController.viewDidAppear()
}
func viewDidDisappear() {
slideController.viewDidDisappear()
}
}
private typealias ViewAccessibleImplementation = HorizontalController
extension ViewAccessibleImplementation: ViewAccessible {
var view: UIView {
return internalView
}
}
private typealias StatusBarAccessibleImplementation = HorizontalController
extension StatusBarAccessibleImplementation: StatusBarAccessible {
var statusBarStyle: UIStatusBarStyle {
return .lightContent
}
}
private typealias TitleAccessibleImplementation = HorizontalController
extension TitleAccessibleImplementation: TitleAccessible {
var title: String {
return "Horizontal"
}
}
private typealias TitleColorableImplementation = HorizontalController
extension TitleColorableImplementation: TitleColorable {
var titleColor: UIColor {
return .white
}
}
| 36.450704
| 108
| 0.705371
|
ed5e7ae1fc4b1e6ea7b9d3ccf99f805d68031d36
| 365
|
asm
|
Assembly
|
oeis/240/A240199.asm
|
neoneye/loda-programs
|
84790877f8e6c2e821b183d2e334d612045d29c0
|
[
"Apache-2.0"
] | 11
|
2021-08-22T19:44:55.000Z
|
2022-03-20T16:47:57.000Z
|
oeis/240/A240199.asm
|
neoneye/loda-programs
|
84790877f8e6c2e821b183d2e334d612045d29c0
|
[
"Apache-2.0"
] | 9
|
2021-08-29T13:15:54.000Z
|
2022-03-09T19:52:31.000Z
|
oeis/240/A240199.asm
|
neoneye/loda-programs
|
84790877f8e6c2e821b183d2e334d612045d29c0
|
[
"Apache-2.0"
] | 3
|
2021-08-22T20:56:47.000Z
|
2021-09-29T06:26:12.000Z
|
; A240199: Area under the path specified by n-th composition.
; Submitted by Christian Krause
; 0,1,4,3,9,8,5,6,16,15,12,13,7,8,11,10,25,24,21,22,16,17,20,19,9,10,13,12,18,17,14,15,36,35,32,33,27,28,31,30,20,21,24,23,29,28,25,26,11,12,15,14,20,19,16,17,27,26,23,24,18
seq $0,6068 ; a(n) is Gray-coded into n.
seq $0,197354 ; a(n) = Sum_{k>=0} A030308(n,k)*(2k+1).
| 52.142857
| 173
| 0.660274
|
fe401a0466c34160dc62dae53da025c43f73561f
| 2,549
|
dart
|
Dart
|
example/lib/persisted_uri_card.dart
|
daentech/shared-storage
|
bb2a44e6069c2b4d3e8dc77a033dc838e61d742b
|
[
"MIT"
] | null | null | null |
example/lib/persisted_uri_card.dart
|
daentech/shared-storage
|
bb2a44e6069c2b4d3e8dc77a033dc838e61d742b
|
[
"MIT"
] | null | null | null |
example/lib/persisted_uri_card.dart
|
daentech/shared-storage
|
bb2a44e6069c2b4d3e8dc77a033dc838e61d742b
|
[
"MIT"
] | null | null | null |
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
import 'package:shared_storage/shared_storage.dart';
import 'package:shared_storage_example/key_value_text.dart';
import 'package:shared_storage_example/list_files.dart';
import 'package:shared_storage_example/simple_card.dart';
class PersistedUriCard extends StatefulWidget {
final UriPermission permissionUri;
final VoidCallback onChange;
const PersistedUriCard(
{Key? key, required this.permissionUri, required this.onChange})
: super(key: key);
@override
_PersistedUriCardState createState() => _PersistedUriCardState();
}
class _PersistedUriCardState extends State<PersistedUriCard> {
void _appendSampleFile(Uri parentUri) async {
/// Create a new file inside the `parentUri`
final documentFile = await parentUri.toDocumentFile();
documentFile?.createFileAsString(
mimeType: 'text/plain',
content: 'Sample File Content',
displayName: 'File created by Shared Storage Sample App',
);
}
void _revokeUri(Uri uri) async {
await releasePersistableUriPermission(uri);
widget.onChange();
}
void _openListFilesPage() {
Navigator.of(context).push(
MaterialPageRoute(
builder: (context) => ListFiles(uri: widget.permissionUri.uri),
),
);
}
Widget _buildActionButton(String text,
{required VoidCallback onTap, Color? color}) {
return TextButton(
style: TextButton.styleFrom(primary: color),
onPressed: onTap,
child: Text(text),
);
}
@override
Widget build(BuildContext context) {
return SimpleCard(
onTap: _openListFilesPage,
children: [
KeyValueText(
entries: {
'isWritePermission': '${widget.permissionUri.isWritePermission}',
'isReadPermission': '${widget.permissionUri.isReadPermission}',
'persistedTime': '${widget.permissionUri.persistedTime}',
'uri': '${widget.permissionUri.uri}',
},
),
Row(
children: [
_buildActionButton(
'Create Sample File',
onTap: () => _appendSampleFile(
widget.permissionUri.uri,
),
),
const Padding(padding: EdgeInsets.all(4)),
_buildActionButton(
'Revoke',
onTap: () => _revokeUri(
widget.permissionUri.uri,
),
color: Colors.red,
),
],
),
],
);
}
}
| 28.322222
| 77
| 0.628874
|
9fc9389aa7ff3bd80f250bda8245bd2574102bd0
| 931
|
lua
|
Lua
|
Tutorial/17.VDoc/.publish/android/VDoc2.lua
|
kennykwok1/PlaygroundOSS
|
f75916582c3044bd59e83f86db56cf2703413a85
|
[
"Apache-2.0"
] | 1
|
2018-06-13T15:43:42.000Z
|
2018-06-13T15:43:42.000Z
|
Tutorial/17.VDoc/VDoc2.lua
|
kennykwok1/PlaygroundOSS
|
f75916582c3044bd59e83f86db56cf2703413a85
|
[
"Apache-2.0"
] | null | null | null |
Tutorial/17.VDoc/VDoc2.lua
|
kennykwok1/PlaygroundOSS
|
f75916582c3044bd59e83f86db56cf2703413a85
|
[
"Apache-2.0"
] | null | null | null |
function setup()
sysLoad("asset://VDoc.lua")
pVDoc = UI_VirtualDoc( nil, 10000,
0, 0, -- x,y
400, 400, -- doc size
400, 400, -- view size
1000, -- command最大数
true -- vertical
)
sysCommand(pVDoc, UI_VDOC_ALIGN, VD_ALIGN_CENTER, 200)
sysCommand(pVDoc, UI_VDOC_FONT, 0, "Courrier New", 16)
sysCommand(pVDoc, UI_VDOC_CLEAR, 0xff, 0x000000)
vDocDraw = {
{ VD_DRAW_TEXT, 0, 0, "aaaaaaaaaa\nbbbbbbbbbbb\n", 0xff, 0xff0000, 0 },
{ VD_DRAW_TEXT, 0, 100, "cccccccccc\nddddddddddd\n", 0xff, 0xff0000, 0 },
}
sysCommand(pVDoc, UI_VDOC_DRAW, vDocDraw)
sysCommand(pVDoc, UI_VDOC_VIEWPOS, 0, 0)
TASK_StageOnly(pVDoc)
end
function execute(deltaT)
end
function leave()
TASK_StageClear()
end
| 28.212121
| 77
| 0.53276
|
52113b9e64fe7613a9ac4a6ea7d904758070a79f
| 1,196
|
swift
|
Swift
|
Sources/App/Routes/SellerLogin.swift
|
Bnslarry/sab-service
|
1a937d7071aa8b4e2727ead1adf5f5e1c490bdae
|
[
"MIT"
] | 1
|
2019-01-24T07:54:35.000Z
|
2019-01-24T07:54:35.000Z
|
Sources/App/Routes/SellerLogin.swift
|
Bnslarry/ubuntu-service
|
c4e069047be8b14edfb8252e03957b59f9dc553f
|
[
"MIT"
] | null | null | null |
Sources/App/Routes/SellerLogin.swift
|
Bnslarry/ubuntu-service
|
c4e069047be8b14edfb8252e03957b59f9dc553f
|
[
"MIT"
] | null | null | null |
//
// sellerLogin.swift
// HelloPackageDescription
//
// Created by Lambert Bns on 2018/3/21.
//
import Foundation
import Vapor
extension Droplet {
func SellerLogin() throws {
post("sellerLogin") { req in
var json = JSON()
guard
let phoneNumber = req.data["phoneNumber"]?.string,
let password = req.data["password"]?.string
else {
try json.set("status", 102)
return json
throw Abort.badRequest
}
let mysqlDriver = try self.mysql()
let result = try mysqlDriver.raw("select * from seller where phone_number = '\(phoneNumber)' and password = '\(password)'")
var arr = result.wrapped.pathIndexableArray
if(arr?.count == 0) //账号不存在或密码错误
{
try json.set("status", 101)
}
else //密码正确
{
json = JSON(node: result)
try json.set("status", 100)
}
return json
}
try resource("psot", PostController.self)
}
}
| 27.181818
| 135
| 0.476589
|
4023c166af547f5e5a68202ca06e2100a15ccc5b
| 3,525
|
rb
|
Ruby
|
vendor/jruby/1.9/gems/reek-1.6.5/spec/reek/cli/report_spec.rb
|
eprislac/guard-yard
|
bb375780b56cddb7cee4f40357bda6d8a60bb0ea
|
[
"MIT"
] | null | null | null |
vendor/jruby/1.9/gems/reek-1.6.5/spec/reek/cli/report_spec.rb
|
eprislac/guard-yard
|
bb375780b56cddb7cee4f40357bda6d8a60bb0ea
|
[
"MIT"
] | 1
|
2015-04-15T06:16:02.000Z
|
2015-04-15T08:12:03.000Z
|
vendor/ruby/2.0.0/gems/reek-1.6.5/spec/reek/cli/report_spec.rb
|
eprislac/guard-yard
|
bb375780b56cddb7cee4f40357bda6d8a60bb0ea
|
[
"MIT"
] | null | null | null |
require 'spec_helper'
require 'reek/examiner'
require 'reek/cli/report/report'
require 'reek/cli/report/formatter'
require 'reek/cli/report/strategy'
require 'rainbow'
require 'stringio'
include Reek
include Reek::Cli
def capture_output_stream
$stdout = StringIO.new
yield
$stdout.string
ensure
$stdout = STDOUT
end
def report_options
{
warning_formatter: Report::SimpleWarningFormatter,
report_formatter: Report::Formatter,
strategy: Report::Strategy::Quiet
}
end
describe Report::TextReport, ' when empty' do
context 'empty source' do
let(:examiner) { Examiner.new('') }
def report(obj)
obj.add_examiner examiner
end
it 'has an empty quiet_report' do
tr = Report::TextReport.new
tr.add_examiner(examiner)
expect { tr.show }.to_not output.to_stdout
end
context 'when output format is html' do
it 'has the text 0 total warnings' do
html_report = report(Report::HtmlReport.new(report_options))
html_report.show
file = File.expand_path('../../../../reek.html', __FILE__)
text = File.read(file)
File.delete(file)
expect(text).to include('0 total warnings')
end
end
context 'when output format is yaml' do
it 'prints empty yaml' do
yaml_report = report(Report::YamlReport.new(report_options))
result = capture_output_stream { yaml_report.show }
expect(result).to match(/^--- \[\]\n.*$/)
end
end
context 'when output format is text' do
it 'prints nothing' do
text_report = report(Report::TextReport.new)
expect { text_report.show }.to_not output.to_stdout
end
end
end
context 'with a couple of smells' do
before :each do
@examiner = Examiner.new('def simple(a) a[3] end')
@rpt = Report::TextReport.new report_options
end
context 'with colors disabled' do
before :each do
Rainbow.enabled = false
@result = @rpt.add_examiner(@examiner).smells.first
end
it 'has a header' do
expect(@result).to match('string -- 2 warnings')
end
it 'should mention every smell name' do
expect(@result).to include('UncommunicativeParameterName')
expect(@result).to include('FeatureEnvy')
end
end
context 'with colors enabled' do
before :each do
Rainbow.enabled = true
end
context 'with non smelly files' do
before :each do
Rainbow.enabled = true
@rpt.add_examiner(Examiner.new('def simple() puts "a" end'))
@rpt.add_examiner(Examiner.new('def simple() puts "a" end'))
@result = @rpt.smells
end
it 'has a footer in color' do
result = capture_output_stream { @rpt.show }
expect(result).to end_with "\e[32m0 total warnings\n\e[0m"
end
end
context 'with smelly files' do
before :each do
Rainbow.enabled = true
@rpt.add_examiner(Examiner.new('def simple(a) a[3] end'))
@rpt.add_examiner(Examiner.new('def simple(a) a[3] end'))
@result = @rpt.smells
end
it 'has a header in color' do
expect(@result.first).
to start_with "\e[36mstring -- \e[0m\e[33m2 warning\e[0m\e[33ms\e[0m"
end
it 'has a footer in color' do
result = capture_output_stream { @rpt.show }
expect(result).to end_with "\e[31m4 total warnings\n\e[0m"
end
end
end
end
end
| 26.503759
| 81
| 0.621844
|
3c9625f482eeece05ee9670b4b03352a1a98ca40
| 4,835
|
ps1
|
PowerShell
|
sambs-installer-update.ps1
|
sambsawsdev/sambs-scoop
|
a6c18e64530aa40058f90c2b953491ae5a309ab8
|
[
"MIT"
] | null | null | null |
sambs-installer-update.ps1
|
sambsawsdev/sambs-scoop
|
a6c18e64530aa40058f90c2b953491ae5a309ab8
|
[
"MIT"
] | null | null | null |
sambs-installer-update.ps1
|
sambsawsdev/sambs-scoop
|
a6c18e64530aa40058f90c2b953491ae5a309ab8
|
[
"MIT"
] | null | null | null |
Param(
[Parameter(Mandatory=$false, Position=0)]
[string]$branch='develop'
)
class SambsInstaller {
[string]$version = '0.0.0'
[string]$license = 'https://github.com/sambsawsdev/sambs-installer/blob/main/LICENSE'
[string]$extract_dir = 'sambs-installer-main'
[string]$url = 'https://github.com/sambsawsdev/sambs-installer/archive/main.zip'
[string]$homepage = 'https://github.com/sambsawsdev/sambs-installer'
[string]$hash = 'sha256:'
[string[]]$bin = @("sambs-installer.cmd", "sambs-installer-uninstall.cmd")
#[string]$persist = 'package'
[string] toString() {
return $this | ConvertTo-Json -Depth 2
}
}
function Update-Installer {
Param(
[Parameter(Mandatory=$true, Position=0)]
[string]$branch
)
Process {
try {
Write-Log 'starting...'
# Use env:SAMBS_HOME or the default <userHome>/.sambs
if ([string]::IsNullOrWhiteSpace($env:SAMBS_HOME)) {
$env:SAMBS_HOME = Join-Path -Path $HOME -ChildPath '/.sambs'
}
# Create the download directory
[string]$installerUpdatePath = Join-Path -Path $env:SAMBS_HOME -ChildPath '/installer-update'
$null = New-Item -Path $installerUpdatePath -ItemType Directory -Force
# Download the sambs-installer zip file
[string]$sambsInstallerFilePath = Join-Path -Path $installerUpdatePath -ChildPath "sambs-installer-$branch.zip"
[System.Net.WebClient]$webClient = [System.Net.WebClient]::new()
$webClient.DownloadFile("https://github.com/sambsawsdev/sambs-installer/archive/$branch.zip", $sambsInstallerFilePath);
# Get the hash of the file
[string]$installerHash = (Get-FileHash -LiteralPath $sambsInstallerFilePath -Algorithm SHA256 | Select-Object Hash).Hash
# Remove the download directory
Remove-Item -LiteralPath $installerUpdatePath -Recurse -Force
# Ensure the mainfest file exists
[string]$sambsInstallerJsonFilePath = Join-Path -Path $PSScriptroot -ChildPath '/bucket/sambs-installer.json'
if ( -not ( Test-Path -LiteralPath $sambsInstallerJsonFilePath -PathType Leaf ) ) {
$null = New-Item -Path $sambsInstallerJsonFilePath -ItemType File -Force
}
# Populate the json into a class
[SambsInstaller]$sambsInstaller = [SambsInstaller]::new()
$sambsInstallerJson = Get-Content -LiteralPath $sambsInstallerJsonFilePath -Raw | ConvertFrom-Json
# Loop through all the properties of the destination
$sambsInstaller | Get-Member -MemberType Properties | ForEach-Object {
# Ensure the property on the json is not null
if (-not [string]::IsNullOrWhiteSpace($sambsInstallerJson."$($_.Name)")) {
# Populate the destination property with the value from the json
$sambsInstaller."$($_.Name)" = $sambsInstallerJson."$($_.Name)"
}
}
# Only update if the hash has changed
if ("sha256:$installerHash" -ne $sambsInstaller.hash) {
# Update all the fields
$sambsInstaller.extract_dir = "sambs-installer-$branch"
$sambsInstaller.url = "https://github.com/sambsawsdev/sambs-installer/archive/$branch.zip"
$sambsInstaller.hash = "sha256:$installerHash"
# Increment the build version
[System.Version]$version = [System.Version]::new($sambsInstaller.version)
$sambsInstaller.version = "$($version.Major).$($version.Minor).$($version.Build+1)"
}
$sambsInstaller | ConvertTo-Json -Depth 2 | Out-File -FilePath $sambsInstallerJsonFilePath -Force
Write-Log "$($sambsInstaller.ToString())"
Write-Log 'completed.'
} catch {
Write-Log -message "failed: $_" -level 'Error' -forgroundColour 'Red'
throw "$_"
}
}
}
function Write-Log {
Param(
[Parameter(Mandatory=$true, Position=0)]
[string]$message,
[Parameter(Mandatory=$false, Position=1)]
[string]$level = 'Info',
[Parameter(Mandatory=$false, Position=2)]
[string]$forgroundColour = 'White'
)
Process {
try {
# Format the message
[string]$formattedMessage = "$(Get-Date -UFormat '%Y/%m/%d %T' ) [$($MyInvocation.MyCommand.Name)] -$level- : Sambs installer update $message"
# Output the message
Write-Host $formattedMessage -ForegroundColor $forgroundColour
} catch {
throw "Sambs installer logging failed: $_"
}
}
}
Update-Installer -branch $branch
| 42.412281
| 154
| 0.609307
|
252852ebb67725882ec00d1df93b5b9aa6e1f4f3
| 13,079
|
css
|
CSS
|
dist/flip.css
|
jplajpla23/Flip
|
fddbdbfd608f5537bdbffd145c321cbf51c2b98f
|
[
"MIT"
] | 72
|
2015-07-22T21:32:17.000Z
|
2022-03-08T16:10:16.000Z
|
dist/flip.css
|
jplajpla23/Flip
|
fddbdbfd608f5537bdbffd145c321cbf51c2b98f
|
[
"MIT"
] | 21
|
2015-10-26T23:58:56.000Z
|
2020-12-11T02:55:08.000Z
|
dist/flip.css
|
jplajpla23/Flip
|
fddbdbfd608f5537bdbffd145c321cbf51c2b98f
|
[
"MIT"
] | 7
|
2016-02-19T18:34:45.000Z
|
2021-04-05T06:26:04.000Z
|
/*
* @pqina/tick v1.7.6 - Counters Made Easy
* Copyright (c) 2020 PQINA - https://github.com/pqina/tick/
*/
.tick {
box-sizing: border-box;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
cursor: default;
position: relative;
z-index: 1;
line-height: 1.4; }
.tick * {
box-sizing: inherit; }
.tick [data-view] {
max-width: 100%; }
.tick span[data-view] {
display: inline-block; }
.tick [data-layout~='pad'] {
margin: -.25em; }
.tick [data-layout~='pad'] > * {
margin: .25em; }
.tick [data-layout~='horizontal'] {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-orient: horizontal;
-webkit-box-direction: normal;
-webkit-flex-direction: row;
-ms-flex-direction: row;
flex-direction: row;
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center; }
.tick [data-layout~='horizontal'][data-layout~='baseline'] {
-webkit-box-align: baseline;
-webkit-align-items: baseline;
-ms-flex-align: baseline;
align-items: baseline; }
.tick [data-layout~='horizontal'][data-layout~='center'] {
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center; }
.tick [data-layout~='horizontal'][data-layout~='right'] {
-webkit-box-pack: end;
-webkit-justify-content: flex-end;
-ms-flex-pack: end;
justify-content: flex-end; }
.tick [data-layout~='horizontal'][data-layout~='left'] {
-webkit-box-pack: start;
-webkit-justify-content: flex-start;
-ms-flex-pack: start;
justify-content: flex-start; }
.tick [data-layout~='horizontal'][data-layout~='stretch'], .tick [data-layout~='horizontal'][data-layout~='fill'] {
-webkit-align-content: stretch;
-ms-flex-line-pack: stretch;
align-content: stretch;
-webkit-flex-wrap: nowrap;
-ms-flex-wrap: nowrap;
flex-wrap: nowrap; }
.tick [data-layout~='horizontal'][data-layout~='stretch'] > *, .tick [data-layout~='horizontal'][data-layout~='fill'] > * {
-webkit-box-flex: 1;
-webkit-flex: 1 0 0;
-ms-flex: 1 0 0px;
flex: 1 0 0; }
.tick [data-layout~='horizontal'][data-layout~='stretch'] > *, .tick [data-layout~='horizontal'][data-layout~='fill'] > * {
width: 100%; }
.tick [data-layout~='horizontal'][data-layout~='multi-line'] {
-webkit-flex-wrap: wrap;
-ms-flex-wrap: wrap;
flex-wrap: wrap; }
.tick [data-layout~='horizontal'][data-layout~='fit'] {
display: -webkit-inline-box;
display: -webkit-inline-flex;
display: -ms-inline-flexbox;
display: inline-flex;
-webkit-flex-wrap: nowrap;
-ms-flex-wrap: nowrap;
flex-wrap: nowrap;
-webkit-align-content: center;
-ms-flex-line-pack: center;
align-content: center;
white-space: nowrap;
-webkit-box-pack: start;
-webkit-justify-content: flex-start;
-ms-flex-pack: start;
justify-content: flex-start; }
.tick [data-layout~='vertical'] {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-orient: vertical;
-webkit-box-direction: normal;
-webkit-flex-direction: column;
-ms-flex-direction: column;
flex-direction: column;
-webkit-box-align: center;
-webkit-align-items: center;
-ms-flex-align: center;
align-items: center; }
.tick [data-layout~='vertical'][data-layout~='top'] {
-webkit-box-pack: start;
-webkit-justify-content: flex-start;
-ms-flex-pack: start;
justify-content: flex-start; }
.tick [data-layout~='vertical'][data-layout~='bottom'] {
-webkit-box-pack: end;
-webkit-justify-content: flex-end;
-ms-flex-pack: end;
justify-content: flex-end;
min-height: 100%; }
.tick [data-layout~='vertical'][data-layout~='middle'] {
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center;
min-height: 100%; }
.tick [data-layout~='vertical'][data-layout~='left'] {
-webkit-box-align: start;
-webkit-align-items: flex-start;
-ms-flex-align: start;
align-items: flex-start; }
.tick [data-layout~='vertical'][data-layout~='right'] {
-webkit-box-align: end;
-webkit-align-items: flex-end;
-ms-flex-align: end;
align-items: flex-end; }
.tick [data-layout~='vertical'][data-layout~='center'] {
text-align: center; }
.tick [data-layout~='vertical'][data-layout~='stretch'], .tick [data-layout~='vertical'][data-layout~='fill'] {
-webkit-box-align: stretch;
-webkit-align-items: stretch;
-ms-flex-align: stretch;
align-items: stretch;
min-height: 100%; }
.tick [data-layout~='vertical'][data-layout~='stretch'] > *, .tick [data-layout~='vertical'][data-layout~='fill'] > * {
-webkit-box-flex: 1;
-webkit-flex: 1 0 0;
-ms-flex: 1 0 0px;
flex: 1 0 0; }
.tick [data-layout~='vertical'] > * + * {
margin-top: .5em; }
.tick [data-layout~='overlay'] {
position: relative; }
.tick [data-layout~='overlay'] > * {
margin: 0; }
.tick [data-layout~='overlay'][data-layout~='center'] {
text-align: center; }
.tick [data-layout~='overlay'][data-layout~='left'] {
text-align: left; }
.tick [data-layout~='overlay'][data-layout~='right'] {
text-align: right; }
.tick [data-layout~='overlay'] > [data-overlay='stretch'],
.tick [data-layout~='overlay'] > [data-overlay='fill'] {
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0; }
.tick [data-layout~='overlay'] > [data-overlay='center'] {
display: -webkit-box;
display: -webkit-flex;
display: -ms-flexbox;
display: flex;
-webkit-box-align: center;
-webkit-align-items: center;
-ms-flex-align: center;
align-items: center;
-webkit-box-pack: center;
-webkit-justify-content: center;
-ms-flex-pack: center;
justify-content: center;
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0; }
/*
// Default Visual Style
.tick-text[data-style*='transition'] {
// prevents width changes when animating between empty space and characters
min-width:1ex;
text-align:center;
}
// flipper
.tick-flip {
// distance from other possible flippers
margin-left:.0625em;
margin-right:.0625em;
// prevents width changes when animating between empty space and characters
min-width:1.125em;
// sets default border radius
border-radius:.125em;
// default spacing
letter-spacing: .25em;
text-indent: .25em;
}
.tick-flip-front,
.tick-flip-back {
color:#eee;
background-color:#333;
box-shadow:0 .125em .3125em rgba(0,0,0,.25), 0 .02125em .06125em rgba(0,0,0,.25);
}
// matrix
.tick-matrix-char {
margin:0 .05em;
}
.tick-matrix-pixel {
width:5px;
height:5px;
margin:1px;
background-color:currentColor;
border-radius:2px;
}
// meter (horizontal / vertical)
.tick-meter-rail {
background-color:#eee;
}
.tick-meter-fill {
background-color:currentColor;
}
// meter (ring)
.tick-meter canvas {
}
*/
/*
* @pqina/flip v1.7.7 - A Beautifully Animated Flip Clock
* Copyright (c) 2020 PQINA - https://pqina.nl/flip/
*/
/**
* Layout
*/
.tick-flip {
position: relative;
text-align: center; }
.tick-flip * {
border-radius: inherit;
white-space: pre; }
.tick-flip * {
letter-spacing: inherit;
text-indent: inherit; }
.tick-flip-front {
border-bottom-left-radius: 0;
border-bottom-right-radius: 0; }
.tick-flip-back {
border-top-left-radius: 0;
border-top-right-radius: 0; }
.tick-flip-spacer {
display: block;
visibility: hidden; }
.tick-flip-shadow {
position: absolute;
left: 1px;
right: 1px;
top: 1px;
bottom: 1px;
color: transparent !important;
background: transparent !important; }
.tick-flip-shadow-top {
bottom: calc(50% - 1px); }
.tick-flip-shadow-bottom {
top: calc(50% + 1px); }
.tick-flip-card-shadow {
position: absolute;
left: 0.15em;
right: 0.15em;
bottom: 0.125em;
height: 0.5em;
background-color: transparent;
border-radius: 0;
opacity: 0;
-webkit-transform-origin: 0 100%;
transform-origin: 0 100%;
box-shadow: 0 0.125em 0.25em rgba(0, 0, 0, 0.5), 0 0.125em 0.5em rgba(0, 0, 0, 0.75);
z-index: 0; }
/**
* Card
*/
.tick-flip-card {
position: absolute;
z-index: 1;
left: 0;
top: 0;
width: 100%;
height: 100%;
-webkit-perspective: 4em;
perspective: 4em; }
.tick-flip-panel-front,
.tick-flip-panel-back {
position: absolute;
left: 0;
width: 100%;
height: 51%;
-webkit-backface-visibility: hidden;
backface-visibility: hidden;
-webkit-transform-style: preserve-3d;
transform-style: preserve-3d; }
.tick-flip-panel-front-text,
.tick-flip-panel-back-text {
position: absolute;
left: -1px;
top: 0;
right: -1px;
height: 100%;
overflow: hidden; }
.tick-flip-panel-text-wrapper {
position: absolute;
left: 0;
top: 0;
right: 0;
height: 100%; }
.tick-flip-panel-back-text .tick-flip-panel-text-wrapper {
height: 200%;
top: -100%; }
.tick-flip-panel-front {
-webkit-transform-origin: center bottom;
transform-origin: center bottom;
top: 0;
z-index: 2;
box-shadow: inset 0 1px rgba(255, 255, 255, 0.05); }
.tick-flip-panel-back {
-webkit-transform-origin: center top;
transform-origin: center top;
top: 50%;
z-index: 1;
box-shadow: inset 0 -1px rgba(0, 0, 0, 0.1); }
.tick-flip-panel-back::after {
z-index: 1;
content: '';
position: absolute;
left: 0;
top: 0;
width: 100%;
height: 100%;
background-image: -webkit-linear-gradient(top, rgba(0, 0, 0, 0.3) 1px, rgba(0, 0, 0, 0.15) 1px, transparent 30%);
background-image: linear-gradient(180deg, rgba(0, 0, 0, 0.3) 1px, rgba(0, 0, 0, 0.15) 1px, transparent 30%); }
.tick-flip-panel-back-shadow {
z-index: 2; }
.tick-flip-panel-back-highlight {
z-index: 3; }
.tick-flip-panel-back-shadow,
.tick-flip-panel-back-highlight {
border-bottom-left-radius: inherit;
border-bottom-right-radius: inherit; }
.tick-flip-panel-front-shadow,
.tick-flip-panel-back-shadow,
.tick-flip-panel-back-highlight {
position: absolute;
left: 0;
top: 0;
right: 0;
bottom: 0;
opacity: 0; }
.tick-flip-panel-front-shadow {
background-image: -webkit-linear-gradient(bottom, rgba(0, 0, 0, 0.8), rgba(0, 0, 0, 0.3));
background-image: linear-gradient(to top, rgba(0, 0, 0, 0.8), rgba(0, 0, 0, 0.3)); }
.tick-flip-panel-back-shadow {
background-image: -webkit-linear-gradient(top, rgba(0, 0, 0, 0.7), rgba(0, 0, 0, 0.5));
background-image: linear-gradient(to bottom, rgba(0, 0, 0, 0.7), rgba(0, 0, 0, 0.5)); }
.tick-flip-panel-back-highlight {
background-image: -webkit-linear-gradient(top, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0.3));
background-image: linear-gradient(to bottom, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0.3)); }
.tick [data-style*='shadow:inner'],
.tick [data-style*='shadow:inner'] .tick-flip-card-shadow {
box-shadow: none; }
.tick [data-style*='shadow:none'] .tick-flip-panel-front,
.tick [data-style*='shadow:none'] .tick-flip-panel-back,
.tick [data-style*='shadow:none'] .tick-flip-shadow,
.tick [data-style*='shadow:none'] .tick-flip-card-shadow {
box-shadow: none; }
.tick [data-style*='shadow:none'] .tick-flip-back::after,
.tick [data-style*='shadow:none'] .tick-flip-panel-front-shadow,
.tick [data-style*='shadow:none'] .tick-flip-panel-back-shadow,
.tick [data-style*='shadow:none'] .tick-flip-panel-back-text::after {
background-image: none; }
.tick [data-style*='rounded:none'] {
border-radius: 0; }
.tick [data-style*='rounded:panels'] .tick-flip-shadow-bottom,
.tick [data-style*='rounded:panels'] .tick-flip-front {
border-bottom-left-radius: inherit;
border-bottom-right-radius: inherit; }
.tick [data-style*='rounded:panels'] .tick-flip-shadow-top,
.tick [data-style*='rounded:panels'] .tick-flip-panel-back::after,
.tick [data-style*='rounded:panels'] .tick-flip-back {
border-top-left-radius: inherit;
border-top-right-radius: inherit; }
.tick-flip {
margin-left: .0625em;
margin-right: .0625em;
min-width: 1.125em;
border-radius: .125em;
letter-spacing: .25em;
text-indent: .25em; }
.tick-flip-panel {
color: #edebeb;
background-color: #333232; }
.tick-flip-shadow {
box-shadow: 0 0.125em 0.3125em rgba(0, 0, 0, 0.25), 0 0.02125em 0.06125em rgba(0, 0, 0, 0.25); }
| 28.871965
| 129
| 0.610521
|
67546cb441310c20748c9eb9d24732a0437fd54b
| 114
|
kt
|
Kotlin
|
06-abstract-classes-interfaces/src/com/jjh/interfaces/Translator.kt
|
johnehunt/kotlin-intro
|
08bc210974ef9ba8f2f6ac8952548a74ae91f053
|
[
"Apache-2.0"
] | 1
|
2020-07-18T11:09:21.000Z
|
2020-07-18T11:09:21.000Z
|
06-abstract-classes-interfaces/src/com/jjh/interfaces/Translator.kt
|
johnehunt/kotlin-intro
|
08bc210974ef9ba8f2f6ac8952548a74ae91f053
|
[
"Apache-2.0"
] | null | null | null |
06-abstract-classes-interfaces/src/com/jjh/interfaces/Translator.kt
|
johnehunt/kotlin-intro
|
08bc210974ef9ba8f2f6ac8952548a74ae91f053
|
[
"Apache-2.0"
] | null | null | null |
package com.jjh.interfaces
interface Translator {
fun sayHello() {
println("Actor - Bonjour")
}
}
| 16.285714
| 34
| 0.631579
|
971928e611c3feab6e2415c7700fc3c24dc704ab
| 698
|
tsx
|
TypeScript
|
src/stories/customize-colors.stories.tsx
|
alicanerdurmaz/react-two-thumb-input-range
|
9f2c643dc5b5dd7df066efa718494ac0e78a9435
|
[
"MIT"
] | null | null | null |
src/stories/customize-colors.stories.tsx
|
alicanerdurmaz/react-two-thumb-input-range
|
9f2c643dc5b5dd7df066efa718494ac0e78a9435
|
[
"MIT"
] | 2
|
2021-03-24T19:46:51.000Z
|
2022-01-28T06:55:26.000Z
|
src/stories/customize-colors.stories.tsx
|
alicanerdurmaz/react-two-thumb-input-range
|
9f2c643dc5b5dd7df066efa718494ac0e78a9435
|
[
"MIT"
] | null | null | null |
import React, { useState } from "react"
import TwoThumbInputRange from "../components/TwoThumbInputRange"
export default {
title: "Two Thumb Input Range/Customize Colors",
component: "TwoThumbInputRange",
argTypes: {
railColor: { control: "color" },
trackColor: { control: "color" },
thumbColor: { control: "color" },
showLabels: { control: "boolean" },
},
}
export const InputRange = (args): React.ReactNode => {
const [value, setValue] = useState<[number, number]>([1000, 5000])
const onValueChange = (values: [number, number]) => {
setValue(values)
}
return <TwoThumbInputRange onChange={onValueChange} values={value} min={1000} max={5000} {...args} />
}
| 29.083333
| 103
| 0.670487
|
0faad1e839d257c01b0a75bbcd01a49560fcfe56
| 7,700
|
go
|
Go
|
plugin/path_role.go
|
nimbolus/vault-plugin-auth-openstack
|
99038f6c749e1c1dd48f9fad958032d102e9af46
|
[
"MIT"
] | null | null | null |
plugin/path_role.go
|
nimbolus/vault-plugin-auth-openstack
|
99038f6c749e1c1dd48f9fad958032d102e9af46
|
[
"MIT"
] | null | null | null |
plugin/path_role.go
|
nimbolus/vault-plugin-auth-openstack
|
99038f6c749e1c1dd48f9fad958032d102e9af46
|
[
"MIT"
] | null | null | null |
package plugin
import (
"context"
"fmt"
"strings"
"time"
"github.com/hashicorp/vault/sdk/framework"
"github.com/hashicorp/vault/sdk/helper/policyutil"
"github.com/hashicorp/vault/sdk/logical"
)
const roleSynopsis = "Register an role with the backend."
const roleDescription = `
A role is required to authenticate with this backend. The role binds
OpenStack instance with token policies and token settings. The bindings,
token polices and token settings can all be configured using this endpoint.
`
const roleListSynopsis = "Lists all the roles registered with the backend."
const roleListDescription = `
The list will contain the names of the roles.
`
var roleFields map[string]*framework.FieldSchema = map[string]*framework.FieldSchema{
"name": {
Type: framework.TypeString,
Description: "Name of the role.",
},
"policies": {
Type: framework.TypeCommaStringSlice,
Description: "Policies to be set on tokens issued using this role.",
},
"ttl": {
Type: framework.TypeDurationSecond,
Default: 0,
Description: "Duration in seconds after which the issued token should expire. Defaults to 0, in which case the value will fallback to the system/mount defaults.",
},
"max_ttl": {
Type: framework.TypeDurationSecond,
Default: 0,
Description: "The maximum allowed lifetime of tokens issued using this role.",
},
"period": {
Type: framework.TypeDurationSecond,
Default: 0,
Description: "If set, indicates that the token generated using this role should never expire. The token should be renewed within the duration specified by this value. At each renewal, the token's TTL will be set to the value of this parameter.",
},
"metadata_key": {
Type: framework.TypeString,
Default: "vault-role",
Description: "The key name of the instance metadata to validate the role specified during authentication. The role name must be specified for the key of metadata of the instance specified here.",
},
"auth_period": {
Type: framework.TypeDurationSecond,
Default: 120,
Description: "The authentication deadline. This is the relative number of seconds since the instance started.",
},
"auth_limit": {
Type: framework.TypeInt,
Default: 1,
Description: "The number of times an instance can try authentication.",
},
"tenant_id": {
Type: framework.TypeString,
Description: "Unique ID of the tenant. Overwrites global tenant_id",
},
"tenant_name": {
Type: framework.TypeString,
Description: "Unique ID of the tenant. Overwrites global tenant_name",
},
"project_id": {
Type: framework.TypeString,
Description: "Unique ID of the project. Overwrites global project_id",
},
"project_name": {
Type: framework.TypeString,
Description: "Unique ID of the project. Overwrites global project_name",
},
}
func NewPathRole(b *OpenStackAuthBackend) []*framework.Path {
return []*framework.Path{
{
Pattern: fmt.Sprintf("role/%s", framework.GenericNameRegex("name")),
Fields: roleFields,
ExistenceCheck: b.checkRoleHandler,
Callbacks: map[logical.Operation]framework.OperationFunc{
logical.CreateOperation: b.updateRoleHandler,
logical.ReadOperation: b.readRoleHandler,
logical.UpdateOperation: b.updateRoleHandler,
logical.DeleteOperation: b.deleteRoleHandler,
},
HelpSynopsis: roleSynopsis,
HelpDescription: roleDescription,
},
{
Pattern: "role/?",
Callbacks: map[logical.Operation]framework.OperationFunc{
logical.ListOperation: b.listRoleHandler,
},
HelpSynopsis: roleListSynopsis,
HelpDescription: roleListDescription,
},
{
Pattern: "roles/?",
Callbacks: map[logical.Operation]framework.OperationFunc{
logical.ListOperation: b.listRoleHandler,
},
HelpSynopsis: roleListSynopsis,
HelpDescription: roleListDescription,
},
}
}
func (b *OpenStackAuthBackend) checkRoleHandler(ctx context.Context, req *logical.Request, data *framework.FieldData) (bool, error) {
roleName := strings.ToLower(data.Get("name").(string))
entry, err := readRole(ctx, req.Storage, roleName)
return (entry != nil), err
}
func (b *OpenStackAuthBackend) readRoleHandler(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
roleName := strings.ToLower(data.Get("name").(string))
if roleName == "" {
return logical.ErrorResponse("role name is required"), nil
}
role, err := readRole(ctx, req.Storage, roleName)
if err != nil {
return nil, err
}
if role == nil {
return nil, nil
}
res := &logical.Response{
Data: map[string]interface{}{
"policies": role.Policies,
"ttl": int64(role.TTL / time.Second),
"max_ttl": int64(role.MaxTTL / time.Second),
"period": int64(role.Period / time.Second),
"metadata_key": role.MetadataKey,
"auth_period": int64(role.AuthPeriod / time.Second),
"auth_limit": role.AuthLimit,
"project_id": role.ProjectID,
"project_name": role.ProjectName,
"tenant_id": role.TenantID,
"tenant_name": role.TenantName,
},
}
return res, nil
}
func (b *OpenStackAuthBackend) updateRoleHandler(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
var val interface{}
var ok bool
roleName := strings.ToLower(data.Get("name").(string))
if roleName == "" {
return logical.ErrorResponse("role name is required"), nil
}
role, err := readRole(ctx, req.Storage, roleName)
if err != nil {
return nil, err
}
if role == nil {
role = &Role{Name: roleName}
}
val, ok = data.GetOk("policies")
if ok {
role.Policies = policyutil.ParsePolicies(val)
}
val, ok = data.GetOk("ttl")
if ok {
role.TTL = time.Duration(val.(int)) * time.Second
}
val, ok = data.GetOk("max_ttl")
if ok {
role.MaxTTL = time.Duration(val.(int)) * time.Second
}
val, ok = data.GetOk("period")
if ok {
role.Period = time.Duration(val.(int)) * time.Second
}
val, ok = data.GetOk("metadata_key")
if ok {
role.MetadataKey = val.(string)
}
val, ok = data.GetOk("auth_period")
if ok {
role.AuthPeriod = time.Duration(val.(int)) * time.Second
}
val, ok = data.GetOk("auth_limit")
if ok {
role.AuthLimit = val.(int)
}
val, ok = data.GetOk("project_id")
if ok {
role.ProjectID = val.(string)
}
val, ok = data.GetOk("project_name")
if ok {
role.ProjectName = val.(string)
}
val, ok = data.GetOk("tenant_id")
if ok {
role.TenantID = val.(string)
}
val, ok = data.GetOk("tenant_name")
if ok {
role.TenantName = val.(string)
}
warnings, err := role.Validate(b.System())
if err != nil {
return logical.ErrorResponse(fmt.Sprintf("invalid role: %v", err)), nil
}
entry, err := logical.StorageEntryJSON(fmt.Sprintf("role/%s", roleName), role)
if err != nil {
return nil, err
}
err = req.Storage.Put(ctx, entry)
if err != nil {
return nil, err
}
res := &logical.Response{
Warnings: warnings,
}
return res, nil
}
func (b *OpenStackAuthBackend) deleteRoleHandler(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
roleName := strings.ToLower(data.Get("name").(string))
if roleName == "" {
return logical.ErrorResponse("role name is required"), nil
}
err := req.Storage.Delete(ctx, fmt.Sprintf("role/%s", roleName))
if err != nil {
return nil, err
}
return nil, nil
}
func (b *OpenStackAuthBackend) listRoleHandler(ctx context.Context, req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
roles, err := req.Storage.List(ctx, "role/")
if err != nil {
return nil, err
}
return logical.ListResponse(roles), nil
}
| 28.10219
| 247
| 0.692857
|
2d41870b65737e51e49b4e57263fb88f74b097c4
| 478
|
dart
|
Dart
|
security_monkey/static/packages/$sdk/lib/internal/internal.dart
|
claytonbrown/security_monkey
|
e1786e26c1160f5ba1ff293a4f5d31a76394a8fc
|
[
"Apache-2.0"
] | 1
|
2019-08-13T23:55:47.000Z
|
2019-08-13T23:55:47.000Z
|
security_monkey/static/packages/$sdk/lib/internal/internal.dart
|
claytonbrown/security_monkey
|
e1786e26c1160f5ba1ff293a4f5d31a76394a8fc
|
[
"Apache-2.0"
] | 21
|
2015-05-22T15:21:13.000Z
|
2018-03-06T14:19:39.000Z
|
security_monkey/static/packages/$sdk/lib/internal/internal.dart
|
claytonbrown/security_monkey
|
e1786e26c1160f5ba1ff293a4f5d31a76394a8fc
|
[
"Apache-2.0"
] | null | null | null |
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library dart._internal;
import 'dart:collection';
import 'dart:core' hide Symbol;
import 'dart:core' as core;
import 'dart:math' show Random;
part 'iterable.dart';
part 'list.dart';
part 'lists.dart';
part 'print.dart';
part 'sort.dart';
part 'symbol.dart';
| 25.157895
| 77
| 0.732218
|
9feec2e06b180daf0b13736c95620fc13e2c8a50
| 194
|
py
|
Python
|
xue/classes/admin.py
|
team-xue/xue
|
e6bd9539803a2bf902f48b65a9df86356b5d46b2
|
[
"BSD-3-Clause"
] | 1
|
2015-11-23T02:33:07.000Z
|
2015-11-23T02:33:07.000Z
|
xue/classes/admin.py
|
team-xue/xue
|
e6bd9539803a2bf902f48b65a9df86356b5d46b2
|
[
"BSD-3-Clause"
] | null | null | null |
xue/classes/admin.py
|
team-xue/xue
|
e6bd9539803a2bf902f48b65a9df86356b5d46b2
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from xue.classes.models import *
from django.contrib import admin
admin.site.register(Major)
admin.site.register(LogicalClass)
# vim:ai:et:ts=4:sw=4:sts=4:fenc=utf-8
| 17.636364
| 38
| 0.721649
|
b017e701989dd8430a526223e7bf37e20e7f6490
| 151
|
ps1
|
PowerShell
|
Examples/ImportHtml/DemoGraphics.ps1
|
edwardmjackson/ImportExcel
|
5629536f155806d1456e00f8165441542bfe164f
|
[
"Apache-2.0"
] | 1,858
|
2015-03-27T20:07:07.000Z
|
2022-03-31T17:05:56.000Z
|
Examples/ImportHtml/DemoGraphics.ps1
|
edwardmjackson/ImportExcel
|
5629536f155806d1456e00f8165441542bfe164f
|
[
"Apache-2.0"
] | 1,071
|
2015-04-01T01:48:57.000Z
|
2022-03-31T19:45:08.000Z
|
Examples/ImportHtml/DemoGraphics.ps1
|
edwardmjackson/ImportExcel
|
5629536f155806d1456e00f8165441542bfe164f
|
[
"Apache-2.0"
] | 392
|
2015-03-31T19:45:24.000Z
|
2022-03-24T21:33:16.000Z
|
try {Import-Module $PSScriptRoot\..\..\ImportExcel.psd1} catch {throw ; return}
Import-Html "http://en.wikipedia.org/wiki/Demographics_of_India" 4
| 37.75
| 80
| 0.741722
|
a44ca40bfa268f4b67b767db4d0954a52688722b
| 605
|
php
|
PHP
|
database/seeds/ClassroomsTableSeeder.php
|
luigi-smilzo/laravel-base-crud
|
ef5a83cfc1d125617058d076349057271d0c888e
|
[
"MIT"
] | 1
|
2020-07-14T17:26:07.000Z
|
2020-07-14T17:26:07.000Z
|
database/seeds/ClassroomsTableSeeder.php
|
luigi-smilzo/laravel-base-crud
|
ef5a83cfc1d125617058d076349057271d0c888e
|
[
"MIT"
] | 1
|
2021-02-02T19:40:51.000Z
|
2021-02-02T19:40:51.000Z
|
database/seeds/ClassroomsTableSeeder.php
|
luigi-smilzo/laravel-base-crud
|
ef5a83cfc1d125617058d076349057271d0c888e
|
[
"MIT"
] | null | null | null |
<?php
use Illuminate\Database\Seeder;
use Illuminate\Support\Facades\DB;
use Faker\Generator as Faker;
use App\Classroom;
class ClassroomsTableSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run(Faker $faker)
{
DB::table('classrooms')->truncate();
for ($i = 0; $i < 20; $i++)
{
$newClassroom = new Classroom();
$newClassroom->name = $faker->lastName();
$newClassroom->partecipants = $faker->numberBetween(15, 30);
$newClassroom->save();
}
}
}
| 20.862069
| 72
| 0.565289
|
3cdc0629903690aeda9ab89ed3ca76df6cb425aa
| 3,548
|
dart
|
Dart
|
example/lib/routes.dart
|
obadajasm/qlevar_router
|
3545e93d28ec81f297ff60fcb593dddd9fdea1d9
|
[
"MIT"
] | null | null | null |
example/lib/routes.dart
|
obadajasm/qlevar_router
|
3545e93d28ec81f297ff60fcb593dddd9fdea1d9
|
[
"MIT"
] | null | null | null |
example/lib/routes.dart
|
obadajasm/qlevar_router
|
3545e93d28ec81f297ff60fcb593dddd9fdea1d9
|
[
"MIT"
] | null | null | null |
import 'dart:ui';
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
import 'package:qlevar_router/qlevar_router.dart';
import 'screens/dashboard.dart';
import 'screens/items.dart';
import 'screens/orders.dart';
import 'screens/store.dart';
import 'screens/tests_screens/multi_component_screen.dart';
class AppRoutes {
// Dashboard
static String dashboard = 'Dashboard';
static String dashboardMain = 'Dashboard Main';
static String items = 'Items';
static String tests = 'Tests';
// Items
static String itemsMain = 'Items Main';
static String itemsDetails = 'Items Details';
// Store
static String store = 'Store';
// Tests
static String testMultiSlash = 'Test Multi Slash';
static String testMultiComponent = 'Test Multi Component';
//Other
static String redirect = 'Redirect';
final routes = <QRouteBase>[
QRoute(
name: dashboard,
path: '/dashboard',
page: (childRouter) => DashboardScreen(childRouter),
children: [
QRoute(
name: dashboardMain,
path: '/',
page: (child) => DashboardContent()),
QRoute(
name: items,
path: '/items',
onInit: () => print('onInit Items'),
onDispose: () => print('onDispose Items'),
page: (child) => ItemsScreen(child),
children: [
QRoute(
name: itemsMain,
path: '/',
onInit: () => print('onInit Items Main'),
onDispose: () => print('onDispose Items Main'),
page: (child) => Container()),
QRoute(
name: itemsDetails,
path: '/details',
onInit: () => print('onInit Items Details'),
onDispose: () => print('onDispose Items Details'),
page: (c) => ItemDetailsScreen())
]),
OrdersRoutes(),
QRoute(
name: tests,
path: '/test',
page: (child) => Container(child: child),
children: [
QRoute(
name: testMultiSlash,
path: '/multi/slash/path',
page: (child) => Center(
child: Text(
'It Works',
style: TextStyle(fontSize: 22, color: Colors.yellow),
))),
QRoute(
name: testMultiComponent,
path: '/:number/:name',
page: (child) => TestMultiComponent()),
]),
]),
QRoute(
name: store,
path: '/store',
page: (childRouter) => StoreScreen(childRouter)),
QRoute(
name: redirect,
path: '/redirect',
redirectGuard: (path) => '/dashboard/items'),
];
}
class OrdersRoutes extends QRouteBuilder {
static String orders = 'Orders';
static String ordersMain = 'Orders Main';
static String ordersDetails = 'Orders Details';
@override
QRoute createRoute() => QRoute(
name: orders,
path: '/orders',
page: (child) => OrdersScreen(child),
children: [
QRoute(name: ordersMain, path: '/', page: (child) => Container()),
QRoute(
name: ordersDetails,
path: '/:orderId',
page: (child) => OrderDetails()),
]);
}
| 31.122807
| 79
| 0.505073
|
e0558176dc265d0e77bb1643450fe31f35a43c4c
| 3,053
|
c
|
C
|
tools-src/gnu/glibc/sysdeps/m68k/fpu/switch/switch.c
|
enfoTek/tomato.linksys.e2000.nvram-mod
|
2ce3a5217def49d6df7348522e2bfda702b56029
|
[
"FSFAP"
] | 80
|
2015-01-02T10:14:04.000Z
|
2021-06-07T06:29:49.000Z
|
tools-src/gnu/glibc/sysdeps/m68k/fpu/switch/switch.c
|
unforgiven512/tomato
|
96f09fab4929c6ddde5c9113f1b2476ad37133c4
|
[
"FSFAP"
] | 9
|
2015-05-14T11:03:12.000Z
|
2018-01-04T07:12:58.000Z
|
tools-src/gnu/glibc/sysdeps/m68k/fpu/switch/switch.c
|
unforgiven512/tomato
|
96f09fab4929c6ddde5c9113f1b2476ad37133c4
|
[
"FSFAP"
] | 69
|
2015-01-02T10:45:56.000Z
|
2021-09-06T07:52:13.000Z
|
/* Copyright (C) 1991, 1992, 1997 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with the GNU C Library; if not, write to the Free
Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
02111-1307 USA. */
#include <signal.h>
#include <68881-sw.h>
/* The signal that is sent when a 68881 instruction
is executed and there is no 68881. */
#ifndef TRAPSIG
#define TRAPSIG SIGILL
#endif
/* Zero if no 68881, one if we have a 68881, or -1 if we don't know yet. */
static int have_fpu = -1;
/* Signal handler for the trap that happens if we don't have a 68881. */
static void
trap (sig)
int sig;
{
have_fpu = 0;
}
/* This function is called by functions that want to switch.
The calling function must be a `struct switch_caller' in data space.
It determines whether a 68881 is present, and modifies its caller
to be a static jump to either the 68881 version or the soft version.
It then returns into the function it has chosen to do the work. */
void
__68881_switch (dummy)
int dummy;
{
void **return_address_location = &((void **) &dummy)[-1];
struct switch_caller *const caller
= (struct switch_caller *) (((short int *) *return_address_location) - 1);
if (have_fpu < 0)
{
/* Figure out whether or not we have a 68881. */
__sighandler_t handler = signal (TRAPSIG, trap);
if (handler == SIG_ERR)
/* We can't figure it out, so assume we don't have a 68881.
This assumption will never cause us any problems other than
lost performance, while the reverse assumption could cause
the program to crash. */
have_fpu = 0;
else
{
/* We set `have_fpu' to nonzero, and then execute a 68881
no-op instruction. If we have a 68881, this will do nothing.
If we don't have one, this will trap and the signal handler
will clear `have_fpu'. */
have_fpu = 1;
asm ("fnop");
/* Restore the old signal handler. */
(void) signal (TRAPSIG, handler);
}
}
/* Modify the caller to be a jump to the appropriate address. */
caller->insn = JMP;
caller->target = have_fpu ? caller->fpu : caller->soft;
/* Make the address we will return to be the target we have chosen.
Our return will match the `jsr' done by the caller we have
just modified, and it will be just as if that had instead
been a `jmp' to the new target. */
*return_address_location = caller->target;
}
| 34.693182
| 78
| 0.693744
|
434ebcc8d8aca8bf5d82d9b11fd2e79515f1dfcb
| 2,047
|
ts
|
TypeScript
|
types/typeform__embed/index.d.ts
|
jayden-chan/DefinitelyTyped
|
444d0c1530f686080c1ad3364a49a8d84a461d6d
|
[
"MIT"
] | 2
|
2020-07-31T23:57:42.000Z
|
2020-08-01T02:53:11.000Z
|
types/typeform__embed/index.d.ts
|
jayden-chan/DefinitelyTyped
|
444d0c1530f686080c1ad3364a49a8d84a461d6d
|
[
"MIT"
] | 7
|
2020-07-28T05:15:39.000Z
|
2020-07-29T06:54:38.000Z
|
types/typeform__embed/index.d.ts
|
jayden-chan/DefinitelyTyped
|
444d0c1530f686080c1ad3364a49a8d84a461d6d
|
[
"MIT"
] | 1
|
2020-07-31T12:05:25.000Z
|
2020-07-31T12:05:25.000Z
|
// Type definitions for typeform__embed 0.22
// Project: https://github.com/Typeform/embed
// Definitions by: Florian Merz <https://github.com/florianmrz>
// Gabriel Cangussu <https://github.com/gcangussu>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
export interface WidgetOptions {
opacity?: number;
buttonText?: string;
hideScrollbars?: boolean;
hideFooter?: boolean;
hideHeaders?: boolean;
onSubmit?: (event: OnSubmitEvent) => void;
onReady?: () => void;
disableTracking?: boolean;
}
export interface PopupOptions {
mode?: 'popup' | 'drawer_left' | 'drawer_right' | 'popover';
/**
* @deprecated Use `open: 'load'` instead
*/
autoOpen?: boolean;
/**
* Launch based on behavioral triggers
*/
open?: 'exit' | 'load' | 'scroll' | 'time';
/**
* Configuration for behavioral triggers. Based on open:
* - `exit`: exit threshold in pixels
* - `scroll`: % of page scrolled
* - `time`: time in milliseconds
*/
openValue?: number;
autoClose?: number;
hideScrollbars?: boolean;
hideFooter?: boolean;
hideHeaders?: boolean;
/**
* @deprecated Use `width` instead
*/
drawerWidth?: number;
/**
* Specify the width of the drawer or popup (only applies if using mode
* "drawer_left" or "drawer_right" or "popover")
*/
width?: number;
/**
* Specify the height of the popup (only applies if using mode "popover")
*/
height?: number;
onSubmit?: (event: OnSubmitEvent) => void;
onReady?: () => void;
onClose?: () => void;
container?: HTMLElement;
disableTracking?: boolean;
}
export interface OnSubmitEvent {
/** ID of the response */
response_id: string;
}
export interface PopupInstance {
open: () => void;
close: () => void;
}
export function makeWidget(element: HTMLElement, url: string, options?: WidgetOptions): void;
export function makePopup(url: string, options?: PopupOptions): PopupInstance;
| 30.102941
| 93
| 0.632145
|
e2adf16243065648b30e4ff7eb69a129c2038b2f
| 380
|
js
|
JavaScript
|
src/middlewares/multer.js
|
martuwilliner/grupo-1
|
604e40921a0304dd76c4d9b7e227e32ca22467b9
|
[
"CNRI-Python"
] | 1
|
2021-08-02T03:52:36.000Z
|
2021-08-02T03:52:36.000Z
|
src/middlewares/multer.js
|
martuwilliner/grupo-1
|
604e40921a0304dd76c4d9b7e227e32ca22467b9
|
[
"CNRI-Python"
] | null | null | null |
src/middlewares/multer.js
|
martuwilliner/grupo-1
|
604e40921a0304dd76c4d9b7e227e32ca22467b9
|
[
"CNRI-Python"
] | null | null | null |
const multer = require('multer');
const path = require('path');
module.exports = function (folder) {
const storage = multer.diskStorage({
destination: (req, file, cb) => cb(null,path.resolve(__dirname,'../../public/img',folder)),
filename: (req, file, cb) => cb(null,file.fieldname + Date.now() + path.extname(file.originalname))
})
return storage
}
| 34.545455
| 107
| 0.642105
|
af127aa2603d27e780beaa9063eacb83031764f0
| 405
|
py
|
Python
|
python-mundo3/ex104.py
|
abm-astro/estudos-python
|
c0dcd71489e528d445efa25d4986bf2fd08f8fe6
|
[
"MIT"
] | 1
|
2021-08-15T18:18:43.000Z
|
2021-08-15T18:18:43.000Z
|
python-mundo3/ex104.py
|
abm-astro/estudos-python
|
c0dcd71489e528d445efa25d4986bf2fd08f8fe6
|
[
"MIT"
] | null | null | null |
python-mundo3/ex104.py
|
abm-astro/estudos-python
|
c0dcd71489e528d445efa25d4986bf2fd08f8fe6
|
[
"MIT"
] | null | null | null |
def leiaint(num):
while True:
numero = str(input(num))
if numero.isnumeric() == False:
while numero.isnumeric() == False:
print('\033[1;31mERRO! Digite um número inteiro válido.\033[m')
numero = str(input(num))
return numero
print(15*'--')
n = leiaint('Digite um número: ')
print(f'\033[1;34mVocê acabou de digitar o número {n}.')
| 28.928571
| 79
| 0.57037
|
a9df199e12c676f48aa4c7f9ba9d7ab978f36990
| 4,789
|
php
|
PHP
|
src/Zebooka/PD/Assembler.php
|
zebooka/photos-downloader
|
56c94bf8a5718b307b8d1f48bf9f2034d09f50c6
|
[
"Unlicense",
"MIT"
] | 1
|
2018-04-25T12:59:28.000Z
|
2018-04-25T12:59:28.000Z
|
src/Zebooka/PD/Assembler.php
|
zebooka/photos-downloader
|
56c94bf8a5718b307b8d1f48bf9f2034d09f50c6
|
[
"Unlicense",
"MIT"
] | 5
|
2015-08-19T11:53:12.000Z
|
2015-10-02T05:29:46.000Z
|
src/Zebooka/PD/Assembler.php
|
zebooka/photos-downloader
|
56c94bf8a5718b307b8d1f48bf9f2034d09f50c6
|
[
"Unlicense",
"MIT"
] | null | null | null |
<?php
namespace Zebooka\PD;
class Assembler
{
private $configure;
private $hashinator;
/**
* @var FileBunch[]
*/
private $simulated = array();
public function __construct(Configure $configure, Hashinator $hashinator)
{
$this->configure = $configure;
$this->hashinator = $hashinator;
}
public function assemble(Tokens $tokens, FileBunch $fileBunch)
{
if (!$tokens->shot) {
$tokens->shot = 1;
$newBunchId = $this->assembleNewBunchId($tokens, $fileBunch);
if (!$this->bunchTaken($newBunchId, $fileBunch)) {
$tokens->shot = null;
} else {
$tokens->increaseShot();
}
}
while (true) {
$newBunchId = $this->assembleNewBunchId($tokens, $fileBunch);
if (!$this->bunchTaken($newBunchId, $fileBunch)) {
break;
}
$tokens->increaseShot();
}
if ($this->configure->simulate) {
$this->simulated[$newBunchId] = $fileBunch;
}
return $newBunchId;
}
private function assembleNewBunchId(Tokens $tokens, FileBunch $fileBunch)
{
$to = ((!$this->configure->isKeepInPlace() && file_exists($this->configure->to)) ? realpath($this->configure->to) : $this->configure->to);
if (Configure::KEEP_IN_PLACE === $this->configure->to) {
return $fileBunch->directory() . DIRECTORY_SEPARATOR . $tokens->assembleBasename();
} elseif ($this->configure->subDirectoriesStructure && $dir = $tokens->assembleDirectory($this->configure)) {
return $to . DIRECTORY_SEPARATOR . $dir . DIRECTORY_SEPARATOR . $tokens->assembleBasename();
} else {
return $to . DIRECTORY_SEPARATOR . $tokens->assembleBasename();
}
}
private function bunchTaken($newBunchId, FileBunch $fileBunch)
{
// find extensions of new bunch
$foundExtensions = $this->findExtensionsForBunchId($newBunchId);
// if nothing found - return false
if (!$foundExtensions) {
return false;
}
// we will always compare all files having same lowercased extensions
$intersect = array_intersect(
array_map('mb_strtolower', $foundExtensions),
array_map('mb_strtolower', $fileBunch->extensions())
);
// if intersect is empty - return true
if (!$intersect) {
return true;
}
$foundExtensionsIndex = array_reduce(
$foundExtensions,
function ($extensions, $extension) {
$lowercaseExtension = mb_strtolower($extension);
if (!isset($extensions[$lowercaseExtension])) {
$extensions[$lowercaseExtension] = array();
}
$extensions[$lowercaseExtension][] = $extension;
return $extensions;
},
array()
);
// if intersect is of same files (hashes), return false
foreach ($fileBunch->extensions() as $extension) {
$lowercaseExtension = mb_strtolower($extension);
if (!in_array($lowercaseExtension, $intersect)) {
continue;
}
$oldFile = $fileBunch->bunchId() . '.' . $extension;
foreach ($foundExtensionsIndex[$lowercaseExtension] as $foundExtension) {
$newFile = $newBunchId . '.' . $foundExtension;
if (file_exists($newFile) && !is_file($newFile)) {
return true;
} elseif (is_file($newFile) && !$this->hashinator->equal($newFile, $oldFile)) {
return true;
} elseif (!file_exists($newFile) && isset($this->simulated[$newBunchId])
&& !$this->hashinator->equal($this->simulated[$newBunchId]->bunchId() . '.' . $foundExtension, $oldFile)
) {
return true;
}
}
}
return false;
}
private function findExtensionsForBunchId($bunchId)
{
$extensions = array();
if (is_dir(dirname($bunchId))) {
foreach (new \DirectoryIterator(dirname($bunchId)) as $di) {
/** @var \DirectoryIterator $di */
if (!$di->isDot() && preg_match('/^' . preg_quote(basename($bunchId), '/') . '\\.([^\\.]+)$/', $di->getBasename())) {
$extensions[] = $di->getExtension();
}
}
}
if ($this->configure->simulate && isset($this->simulated[$bunchId])) {
$extensions = array_unique(array_merge($extensions, $this->simulated[$bunchId]->extensions()));
}
return $extensions;
}
}
| 36.557252
| 146
| 0.540405
|
1a9136956272b4df358ce0eef6e16c62f6612b4e
| 5,867
|
py
|
Python
|
Ryu_Application/aclswitch/config_loader.py
|
Bairdo/ACLSwitch
|
27376d201444b214887b7a9a2c8b5df6588925f5
|
[
"Apache-2.0"
] | 1
|
2016-12-20T05:55:19.000Z
|
2016-12-20T05:55:19.000Z
|
Ryu_Application/aclswitch/config_loader.py
|
Bairdo/ACLSwitch-2
|
27376d201444b214887b7a9a2c8b5df6588925f5
|
[
"Apache-2.0"
] | null | null | null |
Ryu_Application/aclswitch/config_loader.py
|
Bairdo/ACLSwitch-2
|
27376d201444b214887b7a9a2c8b5df6588925f5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Jarrod N. Bakker
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Module imports
import json
import json_templates
import logging
__author__ = "Jarrod N. Bakker"
__status__ = "Development"
class ConfigLoader:
"""An object to load configuration parameters.
"""
def __init__(self, policy_file, rule_file, time_rule_file):
self._policy_file = policy_file
self._rule_file = rule_file
self._time_rule_file = time_rule_file
# Logging configuration
min_lvl = logging.DEBUG
console_handler = logging.StreamHandler()
console_handler.setLevel(min_lvl)
#formatter = logging.Formatter("%(asctime)s - %(levelname)s - "
# "%(name)s - %(message)s")
formatter = logging.Formatter("%(levelname)s - %(name)s - %("
"message)s")
console_handler.setFormatter(formatter)
self._logging_config = {"min_lvl": min_lvl, "propagate":
False, "handler": console_handler}
self._logging = logging.getLogger(__name__)
self._logging.setLevel(self._logging_config["min_lvl"])
self._logging.propagate = self._logging_config["propagate"]
self._logging.addHandler(self._logging_config["handler"])
def get_logging_config(self):
"""Return the configuration for logging.
:return: Dict with the configuration.
"""
return self._logging_config
def load_policies(self):
"""Load the policy domains from file.
:return: A list of policies to create.
"""
policies = []
try:
buf_in = open(self._policy_file)
self._logging.info("Reading config from file: %s",
self._policy_file)
for line in buf_in:
if line[0] == "#" or not line.strip():
continue # Skip file comments and empty lines
try:
policy = json.loads(line)
except ValueError:
self._logging.warning("%s could not be parsed as "
"JSON.", line)
continue
if not json_templates.check_policy_json(policy):
self._logging.warning("%s is not valid policy "
"JSON", policy)
continue
self._logging.debug("Read policy: %s", policy)
policies.append(policy["policy"])
except IOError:
self._logging.error("Unable to read from file: %s",
self._policy_file)
finally:
buf_in.close()
return policies
def load_rules(self):
"""Load the rules from file.
:return: A list of rules to create.
"""
rules = []
try:
buf_in = open(self._rule_file)
self._logging.info("Reading config from file: %s",
self._rule_file)
for line in buf_in:
if line[0] == "#" or not line.strip():
continue # Skip file comments and empty lines
try:
rule = json.loads(line)
except ValueError:
self._logging.warning("%s could not be parsed as "
"JSON.", line)
continue
if not json_templates.check_rule_creation_json(rule):
self._logging.warning("%s is not valid rule "
"JSON", rule)
continue
self._logging.debug("Read rule: %s", rule)
rules.append(rule)
buf_in.close()
except IOError:
self._logging.error("Unable to read from file: %s",
self._rule_file)
finally:
buf_in.close()
return rules
def load_time_rules(self):
"""Load the time enforced rules from file.
:return: A list of time enforced rules to create.
"""
time_rules = []
try:
buf_in = open(self._time_rule_file)
self._logging.info("Reading config from file: %s",
self._time_rule_file)
for line in buf_in:
if line[0] == "#" or not line.strip():
continue # Skip file comments and empty lines
try:
rule = json.loads(line)
except ValueError:
self._logging.warning("%s could not be parsed as "
"JSON.", line)
continue
if not json_templates.check_rule_creation_json(rule):
self._logging.warning("%s is not valid time rule "
"JSON", rule)
continue
self._logging.debug("Read rule: %s", rule)
time_rules.append(rule)
buf_in.close()
except IOError:
self._logging.error("Unable to read from file: %s",
self._rule_file)
finally:
buf_in.close()
return time_rules
| 38.598684
| 74
| 0.527016
|
c99b344c9c8818f0a13a19a6e6a71063dd9c364c
| 427
|
ps1
|
PowerShell
|
src/tasks/ToolInstaller/Modules/PowerBI/Scripts/Get-PowerBIDatasetDataSources.ps1
|
rexbenny/powerbi-build-tools
|
91552c64e45cc4a3872a1e7ba00052bf833e611c
|
[
"MIT"
] | 9
|
2020-03-11T17:37:02.000Z
|
2022-02-24T15:04:07.000Z
|
src/tasks/ToolInstaller/Modules/PowerBI/Scripts/Get-PowerBIDatasetDataSources.ps1
|
rexbenny/powerbi-build-tools
|
91552c64e45cc4a3872a1e7ba00052bf833e611c
|
[
"MIT"
] | 13
|
2020-03-11T03:19:17.000Z
|
2022-03-18T12:39:46.000Z
|
src/tasks/ToolInstaller/Modules/PowerBI/Scripts/Get-PowerBIDatasetDataSources.ps1
|
rexbenny/powerbi-build-tools
|
91552c64e45cc4a3872a1e7ba00052bf833e611c
|
[
"MIT"
] | 5
|
2020-07-23T06:39:37.000Z
|
2021-12-25T08:28:45.000Z
|
Function Get-PowerBIDatasetDataSources
{
Param
(
[Parameter(Mandatory = $false)][string]$Group,
[Parameter(Mandatory = $true)][string]$Dataset
)
$GroupId = Get-PowerBIGroup -Group $Group -Id
$DatasetId = Get-PowerBIDataset -Group $GroupId -Dataset $Dataset -Id
$Url = Get-PowerBIUrl -Group $GroupId -Url "datasets/$DatasetId/datasources"
$Results = Invoke-PowerBI -Method Get -Url $Url
return $Results.value
}
| 26.6875
| 77
| 0.725995
|
38732551d813d57e3a9d08ef037285bbdbad09d0
| 310
|
php
|
PHP
|
tests/Mocks/MyCustomDatapackage.php
|
jonrmitchell/datapackage-php
|
48e73fc9b9878fa7bb624f5e9dd0173962077fad
|
[
"MIT"
] | 9
|
2017-04-17T20:18:35.000Z
|
2021-07-04T03:26:34.000Z
|
tests/Mocks/MyCustomDatapackage.php
|
jonrmitchell/datapackage-php
|
48e73fc9b9878fa7bb624f5e9dd0173962077fad
|
[
"MIT"
] | 53
|
2017-03-23T06:59:47.000Z
|
2021-08-29T16:15:24.000Z
|
tests/Mocks/MyCustomDatapackage.php
|
jonrmitchell/datapackage-php
|
48e73fc9b9878fa7bb624f5e9dd0173962077fad
|
[
"MIT"
] | 10
|
2017-03-23T06:55:48.000Z
|
2021-02-23T16:34:49.000Z
|
<?php
namespace frictionlessdata\datapackage\tests\Mocks;
use frictionlessdata\datapackage\Datapackages\DefaultDatapackage;
class MyCustomDatapackage extends DefaultDatapackage
{
public static function handlesDescriptor($descriptor)
{
return isset($descriptor->myCustomDatapackage);
}
}
| 22.142857
| 65
| 0.790323
|
aa6f3870a29fb7c36fcea58761f17c44b4f74d1f
| 98
|
rb
|
Ruby
|
web-app/app/controllers/api/base_controller.rb
|
shapeable/prototype-adpq
|
f2b16b42a0ac6589cd1eeda8741d8b68400cba8a
|
[
"PostgreSQL",
"Ruby",
"MIT"
] | null | null | null |
web-app/app/controllers/api/base_controller.rb
|
shapeable/prototype-adpq
|
f2b16b42a0ac6589cd1eeda8741d8b68400cba8a
|
[
"PostgreSQL",
"Ruby",
"MIT"
] | 79
|
2017-02-21T02:05:44.000Z
|
2017-03-03T21:56:11.000Z
|
web-app/app/controllers/api/base_controller.rb
|
shapeable/prototype-adpq
|
f2b16b42a0ac6589cd1eeda8741d8b68400cba8a
|
[
"PostgreSQL",
"Ruby",
"MIT"
] | 5
|
2017-03-02T21:24:02.000Z
|
2017-03-22T00:17:37.000Z
|
class Api::BaseController < ApplicationController
include Swagger::Docs::ImpotentMethods
end
| 24.5
| 49
| 0.806122
|
c4b3615457b213dec9d96b0c8eaaf5b4af20be21
| 299
|
cpp
|
C++
|
Codeforces Online Judge Solve/dice.cpp
|
Remonhasan/programming-solve
|
5a4ac8c738dd361e1c974162e0eaebbaae72fd80
|
[
"Apache-2.0"
] | null | null | null |
Codeforces Online Judge Solve/dice.cpp
|
Remonhasan/programming-solve
|
5a4ac8c738dd361e1c974162e0eaebbaae72fd80
|
[
"Apache-2.0"
] | null | null | null |
Codeforces Online Judge Solve/dice.cpp
|
Remonhasan/programming-solve
|
5a4ac8c738dd361e1c974162e0eaebbaae72fd80
|
[
"Apache-2.0"
] | null | null | null |
#include<bits/stdc++.h>
using namespace std;
#define ll long long
int main ()
{
ll m,n,k;
cin>>n;
while(n--)
{
cin>>m;
if(m==2||m==3||m==4||m==5||m==6||m==7) cout<<"1"<<endl;
else
{
k=m/2; cout<<k<<endl;
}
}
}
| 16.611111
| 64
| 0.381271
|
45a88e8ad265240d0c726cd00d432c7e87e9a5a9
| 667
|
py
|
Python
|
Random Code Jam Solutions/Moist.py
|
gsidhu/Code_Jam
|
990c85c9036a8b5a87b7a59395f0a710303e69b2
|
[
"MIT"
] | null | null | null |
Random Code Jam Solutions/Moist.py
|
gsidhu/Code_Jam
|
990c85c9036a8b5a87b7a59395f0a710303e69b2
|
[
"MIT"
] | null | null | null |
Random Code Jam Solutions/Moist.py
|
gsidhu/Code_Jam
|
990c85c9036a8b5a87b7a59395f0a710303e69b2
|
[
"MIT"
] | null | null | null |
def moist(filename):
import math
file = open(filename)
out = open("output.txt", "w+")
testcases = int(file.readline())
for i in range(0, testcases):
names = []
count = 0
M = int(file.readline())
for j in range(0, M):
names.append(file.readline().strip('\n'))
for k in range(0, M-1):
if names[k] > names[k+1]:
count+= 1
var = names[k]
names[k] = names[k+1]
names[k+1] = var
result = ("Case #" + str(i+1) + ": " + str(count))
out.write(result + "\n")
file.close()
out.close()
| 30.318182
| 59
| 0.442279
|
ef27d3070405fd88c83a76fe25062ded922c2cb2
| 1,325
|
h
|
C
|
model/v1_custom_resource_definition_names.h
|
ityuhui/client-c
|
1d30380d7ba0fe9b5e97626e0f7507be4ce8f96d
|
[
"curl",
"Apache-2.0"
] | null | null | null |
model/v1_custom_resource_definition_names.h
|
ityuhui/client-c
|
1d30380d7ba0fe9b5e97626e0f7507be4ce8f96d
|
[
"curl",
"Apache-2.0"
] | null | null | null |
model/v1_custom_resource_definition_names.h
|
ityuhui/client-c
|
1d30380d7ba0fe9b5e97626e0f7507be4ce8f96d
|
[
"curl",
"Apache-2.0"
] | null | null | null |
/*
* v1_custom_resource_definition_names.h
*
* CustomResourceDefinitionNames indicates the names to serve this CustomResourceDefinition
*/
#ifndef _v1_custom_resource_definition_names_H_
#define _v1_custom_resource_definition_names_H_
#include <string.h>
#include "../external/cJSON.h"
#include "../include/list.h"
#include "../include/keyValuePair.h"
typedef struct v1_custom_resource_definition_names_t {
list_t *categories; //primitive container
char *kind; // string
char *listKind; // string
char *plural; // string
list_t *shortNames; //primitive container
char *singular; // string
} v1_custom_resource_definition_names_t;
v1_custom_resource_definition_names_t *v1_custom_resource_definition_names_create(
list_t *categories,
char *kind,
char *listKind,
char *plural,
list_t *shortNames,
char *singular
);
void v1_custom_resource_definition_names_free(v1_custom_resource_definition_names_t *v1_custom_resource_definition_names);
v1_custom_resource_definition_names_t *v1_custom_resource_definition_names_parseFromJSON(cJSON *v1_custom_resource_definition_namesJSON);
cJSON *v1_custom_resource_definition_names_convertToJSON(v1_custom_resource_definition_names_t *v1_custom_resource_definition_names);
#endif /* _v1_custom_resource_definition_names_H_ */
| 30.113636
| 137
| 0.813585
|
cc2ac3dda5e976558ce753fc7bc98275f0592c9f
| 812
|
rb
|
Ruby
|
spec/models/hanuman/answer_type_spec.rb
|
wildnote/hanuman
|
c72883c0d5afe34a3b3793ca0987680891eb2ad7
|
[
"MIT"
] | 4
|
2018-11-14T00:41:33.000Z
|
2019-06-22T12:36:25.000Z
|
spec/models/hanuman/answer_type_spec.rb
|
wildnote/hanuman
|
c72883c0d5afe34a3b3793ca0987680891eb2ad7
|
[
"MIT"
] | 19
|
2018-12-20T17:14:06.000Z
|
2022-02-21T21:40:32.000Z
|
spec/models/hanuman/answer_type_spec.rb
|
wildnote/hanuman
|
c72883c0d5afe34a3b3793ca0987680891eb2ad7
|
[
"MIT"
] | 2
|
2018-06-07T08:08:40.000Z
|
2020-04-28T09:55:03.000Z
|
require 'spec_helper'
module Hanuman
RSpec.describe AnswerType, type: :model do
describe 'Validations' do
it 'has a valid factory' do
expect(build(:answer_type)).to be_valid
end
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_uniqueness_of(:name) }
it do
is_expected.to validate_inclusion_of(:status)
.in_array(Hanuman::AnswerType::ANSWER_CHOICE_STATUSES)
end
it do
is_expected.to validate_inclusion_of(:group_type)
.in_array(Hanuman::AnswerType::GROUP_TYPES)
.allow_blank
end
end
describe 'Relations' do
it { is_expected.to have_many(:questions).dependent(:restrict_with_exception) }
end
end
end
| 30.074074
| 85
| 0.626847
|
da409c50abeb681664a9beb60a354952fee7cb82
| 1,777
|
php
|
PHP
|
Framework/Library/Cache/Driver/Predis.php
|
yumancang/Rosergal
|
5e72f727a490f58c011509712181b6e6d148b207
|
[
"MIT"
] | 1
|
2020-04-10T08:50:51.000Z
|
2020-04-10T08:50:51.000Z
|
Framework/Library/Cache/Driver/Predis.php
|
actors315/Rosergal
|
6dad5e5655c1f8e2eff13156adce41fb071a22e2
|
[
"MIT"
] | 1
|
2019-01-23T12:00:19.000Z
|
2019-01-23T12:00:19.000Z
|
Framework/Library/Cache/Driver/Predis.php
|
actors315/Rosergal
|
6dad5e5655c1f8e2eff13156adce41fb071a22e2
|
[
"MIT"
] | 1
|
2019-01-21T08:12:25.000Z
|
2019-01-21T08:12:25.000Z
|
<?php
namespace Twinkle\Library\Cache\Driver;
/**
* Redis - PRedis composer包
* 只做应用层最原始数据的
* JSON或者序列化的操作在调用之前执行
*
* @author yumancang
*
* */
use Twinkle\Library\Cache\CacheAbstract;
class Predis extends CacheAbstract
{
public function __construct(array $config)
{
parent::__construct($config);
switch ($config['mode']) {
case 'predis':
goto PREDIS;
case 'predis_replication':
goto PREDIS_REPLICATION;
case 'predis_sentinels':
goto PREDIS_SENTINELS;
case 'predis_cluser':
goto PREDIS_CLUSER;
}
PREDIS : {
return true;
}
PREDIS_REPLICATION : {
return true;
}
PREDIS_SENTINELS : {
$this->cache = new \Predis\Client($config['config']['sentinels'], $config['config']['options']);
return true;
}
PREDIS_CLUSER : {
return true;
}
}
public function set($key, $val, $expire = 0)
{
$this->cache->set($this->getKey($key), $val);
if ($expire > 0) {
$this->cache->expire($this->getKey($key), $expire);
}
}
public function get($key)
{
return $this->cache->get($this->getKey($key));
}
public function hset($key, $field, $value)
{
return $this->cache->hset($this->getKey($key), $field, $value);
}
public function hget($key, $field)
{
return $this->cache->hget($this->getKey($key), $field);
}
public function delete($key)
{
return $this->cache->del($this->getKey($key));
}
public function getKey($key)
{
return $key;
}
}
| 22.782051
| 108
| 0.513225
|
93cbde9cbf7d7d94bd8ec90c172b972cf5a368fd
| 1,191
|
cs
|
C#
|
WTalk.Universal/Model/Message.cs
|
madagaga/Wtalk
|
70f80d5d3053fe81fe07acc2a326ab5d1ef598a6
|
[
"MIT"
] | 25
|
2016-01-11T12:39:35.000Z
|
2022-02-20T18:31:04.000Z
|
WTalk.Universal/Model/Message.cs
|
madagaga/Wtalk
|
70f80d5d3053fe81fe07acc2a326ab5d1ef598a6
|
[
"MIT"
] | 9
|
2016-05-14T18:33:33.000Z
|
2017-03-03T16:33:59.000Z
|
WTalk.Universal/Model/Message.cs
|
madagaga/Wtalk
|
70f80d5d3053fe81fe07acc2a326ab5d1ef598a6
|
[
"MIT"
] | 13
|
2016-01-11T00:58:12.000Z
|
2018-05-24T09:08:32.000Z
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using WTalk.Core.ProtoJson.Schema;
using WTalk.Core.Utils;
using WTalk.Universal.Observable;
namespace WTalk.Universal.Model
{
public class Message : ObservableObject
{
Event _event;
public Message()
{
}
internal Message(WTalk.Core.ProtoJson.Schema.Event chatEvent)
{
_event = chatEvent;
OnPropertyChanged(nameof(Content));
}
public bool IncomingMessage { get { return _event.sender_id.gaia_id != _event.self_event_state.user_id.gaia_id; } }
public string SenderPhotoUrl { get { return FileCache.Current.Get(_event.sender_id.gaia_id); } }
//public Enums.MessageType Type { get; internal set; }
public string Content { get { return string.Join<string>("\r\n", _event.chat_message.message_content.segment.Select(s => s.text)); } }
public DateTime MessageDate { get { return _event.timestamp.FromUnixTime(); } }
public string LastSegment { get { return _event.chat_message.message_content.segment.Last().text; } }
}
}
| 31.342105
| 142
| 0.674223
|
8c02e032707c61e6425158bd349f20cabdd215ec
| 3,661
|
cs
|
C#
|
src/Http/Http.Results/src/PhysicalFileResult.cs
|
MCCshreyas/aspnetcore
|
132c2315ea435d1c20e00fa6cc80a0dfc548ff4c
|
[
"MIT"
] | 2
|
2021-12-29T20:06:16.000Z
|
2022-03-08T07:29:22.000Z
|
src/Http/Http.Results/src/PhysicalFileResult.cs
|
MCCshreyas/aspnetcore
|
132c2315ea435d1c20e00fa6cc80a0dfc548ff4c
|
[
"MIT"
] | 80
|
2021-05-18T01:02:59.000Z
|
2022-03-28T10:06:22.000Z
|
src/Http/Http.Results/src/PhysicalFileResult.cs
|
stefannikolei/AspNetCore
|
b55573c8a5195836bfcc77730b80c460588b70b4
|
[
"MIT"
] | 1
|
2022-02-20T17:25:32.000Z
|
2022-02-20T17:25:32.000Z
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Microsoft.AspNetCore.Internal;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
namespace Microsoft.AspNetCore.Http.Result;
/// <summary>
/// A <see cref="PhysicalFileResult"/> on execution will write a file from disk to the response
/// using mechanisms provided by the host.
/// </summary>
internal sealed partial class PhysicalFileResult : FileResult, IResult
{
/// <summary>
/// Creates a new <see cref="PhysicalFileResult"/> instance with
/// the provided <paramref name="fileName"/> and the provided <paramref name="contentType"/>.
/// </summary>
/// <param name="fileName">The path to the file. The path must be an absolute path.</param>
/// <param name="contentType">The Content-Type header of the response.</param>
public PhysicalFileResult(string fileName, string? contentType)
: base(contentType)
{
FileName = fileName;
}
/// <summary>
/// Gets or sets the path to the file that will be sent back as the response.
/// </summary>
public string FileName { get; }
// For testing
public Func<string, FileInfoWrapper> GetFileInfoWrapper { get; init; } =
static path => new FileInfoWrapper(path);
public Task ExecuteAsync(HttpContext httpContext)
{
var fileInfo = GetFileInfoWrapper(FileName);
if (!fileInfo.Exists)
{
throw new FileNotFoundException($"Could not find file: {FileName}", FileName);
}
var logger = httpContext.RequestServices.GetRequiredService<ILogger<PhysicalFileResult>>();
Log.ExecutingFileResult(logger, this, FileName);
var lastModified = LastModified ?? fileInfo.LastWriteTimeUtc;
var fileResultInfo = new FileResultInfo
{
ContentType = ContentType,
EnableRangeProcessing = EnableRangeProcessing,
EntityTag = EntityTag,
FileDownloadName = FileDownloadName,
LastModified = lastModified,
};
var (range, rangeLength, serveBody) = FileResultHelper.SetHeadersAndLog(
httpContext,
fileResultInfo,
fileInfo.Length,
EnableRangeProcessing,
lastModified,
EntityTag,
logger);
if (!serveBody)
{
return Task.CompletedTask;
}
if (range != null && rangeLength == 0)
{
return Task.CompletedTask;
}
var response = httpContext.Response;
if (!Path.IsPathRooted(FileName))
{
throw new NotSupportedException($"Path '{FileName}' was not rooted.");
}
if (range != null)
{
FileResultHelper.Log.WritingRangeToBody(logger);
}
var offset = 0L;
var count = (long?)null;
if (range != null)
{
offset = range.From ?? 0L;
count = rangeLength;
}
return response.SendFileAsync(
FileName,
offset: offset,
count: count);
}
internal readonly struct FileInfoWrapper
{
public FileInfoWrapper(string path)
{
var fileInfo = new FileInfo(path);
Exists = fileInfo.Exists;
Length = fileInfo.Length;
LastWriteTimeUtc = fileInfo.LastWriteTimeUtc;
}
public bool Exists { get; init; }
public long Length { get; init; }
public DateTimeOffset LastWriteTimeUtc { get; init; }
}
}
| 30.508333
| 99
| 0.611582
|
bb5c9596a2d18c7d3c875cafc9042022f7e1730f
| 1,381
|
cs
|
C#
|
src/ShardingCore/Sharding/Enumerators/StreamMergeSync/StreamMergeEnumerator.cs
|
zz110/sharding-core
|
a15a4f61af0c3a0703b3a86427e3d4289de8a833
|
[
"Apache-2.0"
] | 1
|
2021-08-25T00:23:35.000Z
|
2021-08-25T00:23:35.000Z
|
src/ShardingCore/Sharding/Enumerators/StreamMergeSync/StreamMergeEnumerator.cs
|
zz110/sharding-core
|
a15a4f61af0c3a0703b3a86427e3d4289de8a833
|
[
"Apache-2.0"
] | null | null | null |
src/ShardingCore/Sharding/Enumerators/StreamMergeSync/StreamMergeEnumerator.cs
|
zz110/sharding-core
|
a15a4f61af0c3a0703b3a86427e3d4289de8a833
|
[
"Apache-2.0"
] | null | null | null |
using System;
using System.Collections;
using System.Collections.Generic;
namespace ShardingCore.Sharding.Enumerators.StreamMergeSync
{
/*
* @Author: xjm
* @Description:
* @Date: Saturday, 14 August 2021 21:25:50
* @Email: 326308290@qq.com
*/
public class StreamMergeEnumerator<T>:IStreamMergeEnumerator<T>
{
private readonly IEnumerator<T> _source;
private bool skip;
public StreamMergeEnumerator(IEnumerator<T> source)
{
_source = source;
skip = true;
}
public bool MoveNext()
{
if (skip)
{
skip = false;
return null != _source.Current;
}
return _source.MoveNext();
}
public void Reset()
{
throw new NotImplementedException();
}
object IEnumerator.Current => Current;
public T Current => skip?default:_source.Current;
public bool SkipFirst()
{
if (skip)
{
skip = false;
return true;
}
return false;
}
public bool HasElement()
{
return null != _source.Current;
}
public T ReallyCurrent => _source.Current;
public void Dispose()
{
_source?.Dispose();
}
}
}
| 21.920635
| 67
| 0.517741
|
34c7794d99e8fdefadcbcdae1314621ab0cc0ace
| 11,263
|
lua
|
Lua
|
translate.lua
|
arturgontijo/OpenNMT
|
3ab15e928a438be119e655dedb8416b1b4d62428
|
[
"MIT"
] | 2,573
|
2016-10-25T00:19:15.000Z
|
2022-03-29T14:39:52.000Z
|
translate.lua
|
arturgontijo/OpenNMT
|
3ab15e928a438be119e655dedb8416b1b4d62428
|
[
"MIT"
] | 466
|
2016-11-22T16:54:32.000Z
|
2021-05-05T16:22:38.000Z
|
translate.lua
|
arturgontijo/OpenNMT
|
3ab15e928a438be119e655dedb8416b1b4d62428
|
[
"MIT"
] | 572
|
2016-11-17T18:31:46.000Z
|
2022-03-15T06:18:17.000Z
|
require('onmt.init')
local tokenizer = require 'tools.utils.tokenizer'
local BPE = require ('tools.utils.BPE')
local cmd = onmt.utils.ExtendedCmdLine.new('translate.lua')
local options = {
{
'-src', '',
[[Source sequences to translate.]],
{
valid = onmt.utils.ExtendedCmdLine.fileExists
}
},
{
'-tgt', '',
[[Optional true target sequences.]]
},
{
'-output', 'pred.txt',
[[Output file.]]
},
{
'-save_attention', '',
[[Optional attention output file.]]
},
{
'-batch_size', 30,
[[Batch size.]],
{
valid = onmt.utils.ExtendedCmdLine.isInt(1)
}
},
{
'-idx_files', false,
[[If set, source and target files are 'key value' with key match between source and target.]]
},
{
'-detokenize_output', false,
[[Detokenize output.]]
}
}
cmd:setCmdLineOptions(options, 'Data')
onmt.translate.Translator.declareOpts(cmd)
tokenizer.declareOpts(cmd)
-- insert on the fly the option depending if there is a hook selected
onmt.utils.HookManager.updateOpt(arg, cmd)
-- expand options depending on source or target (tokenization, mpreprocessing)
onmt.translate.Translator.expandOpts(cmd, "bitext")
onmt.utils.Cuda.declareOpts(cmd)
onmt.utils.HookManager.declareOpts(cmd)
onmt.utils.Logger.declareOpts(cmd)
cmd:text('')
cmd:text('Other options')
cmd:text('')
cmd:option('-time', false, [[Measure average translation time.]])
local function reportScore(name, scoreTotal, wordsTotal)
_G.logger:info(name .. " AVG SCORE: %.2f, " .. name .. " PPL: %.2f",
scoreTotal / wordsTotal,
math.exp(-scoreTotal/wordsTotal))
end
local function main()
local opt = cmd:parse(arg)
_G.logger = onmt.utils.Logger.new(opt.log_file, opt.disable_logs, opt.log_level, opt.log_tag)
_G.profiler = onmt.utils.Profiler.new()
_G.hookManager = onmt.utils.HookManager.new(opt)
onmt.utils.Cuda.init(opt)
local translator = onmt.translate.Translator.new(opt)
local srcReader = onmt.utils.FileReader.new(opt.src, opt.idx_files, translator:srcFeat())
local srcBatch = {}
local srcIdBatch = {}
-- tokenization options
-- tokenization and preprocessing options
local optTok = { {}, {} }
local optMPr = { {}, {} }
local bpes = {}
for k, v in pairs(opt) do
if k:sub(1,4) == 'tok_' then
local idx = 1
if k:sub(5, 8) == 'tgt_' then
idx = 2
k = k:sub(9)
elseif k:sub(5,8) == 'src_' then
k = k:sub(9)
else
k = k:sub(5)
end
optTok[idx][k] = v
end
if k:sub(1,4) == 'mpr_' then
local idx = 1
if k:sub(5, 8) == 'tgt_' then
idx = 2
k = k:sub(9)
elseif k:sub(5,8) == 'src_' then
k = k:sub(9)
else
k = k:sub(5)
end
optMPr[idx][k] = v
end
end
if opt.tok_src_bpe_model ~= '' then
local myopt = {}
myopt.bpe_model = opt.tok_src_bpe_model
myopt.bpe_EOT_marker = opt.tok_src_bpe_EOT_marker
myopt.bpe_BOT_marker = opt.tok_src_bpe_BOT_marker
myopt.joiner_new = opt.tok_src_joiner_new
myopt.joiner_annotate = opt.tok_src_joiner_annotate
myopt.bpe_mode = opt.tok_src_bpe_mode
myopt.bpe_case_insensitive = opt.tok_src_bpe_case_insensitive
bpes[1] = BPE.new(myopt)
end
if opt.tok_tgt_bpe_model ~= '' then
local myopt = {}
myopt.bpe_model = opt.tok_tgt_bpe_model
myopt.bpe_EOT_marker = opt.tok_tgt_bpe_EOT_marker
myopt.bpe_BOT_marker = opt.tok_tgt_bpe_BOT_marker
myopt.joiner_new = opt.tok_tgt_joiner_new
myopt.joiner_annotate = opt.tok_sgt_joiner_annotate
myopt.bpe_mode = opt.tok_tgt_bpe_mode
myopt.bpe_case_insensitive = opt.tok_tgt_bpe_case_insensitive
bpes[2] = BPE.new(myopt)
end
for i = 1, 2 do
_G.logger:info("Using on-the-fly '"..optTok[i]["mode"].."' tokenization for input "..i)
end
-- if source features - no tokenization
if translator:srcFeat() then
optTok[1] = nil
end
local goldReader
local goldBatch
local withGoldScore = opt.tgt:len() > 0
local withAttention = opt.save_attention:len() > 0
if withGoldScore then
goldReader = onmt.utils.FileReader.new(opt.tgt, opt.idx_files)
goldBatch = {}
end
local outFile = onmt.utils.Error.assert(io.open(opt.output, 'w'))
local attFile
if withAttention then
attFile = onmt.utils.Error.assert(io.open(opt.save_attention, 'w'))
end
local sentId = 1
local batchId = 1
local predScoreTotal = 0
local predWordsTotal = 0
local goldScoreTotal = 0
local goldWordsTotal = 0
local globalUnkCountSrc = 0
local globalTotalCountSrc = 0
local globalUnkCountTgt = 0
local globalTotalCountTgt = 0
local timer
if opt.time then
timer = torch.Timer()
timer:stop()
timer:reset()
end
while true do
local srcSeq, srcSeqId = srcReader:next(false)
local goldOutputSeq
if withGoldScore then
goldOutputSeq = goldReader:next(false)
if goldOutputSeq then
goldOutputSeq = _G.hookManager:call("mpreprocess", optMPr[2], goldOutputSeq) or goldOutputSeq
goldOutputSeq = tokenizer.tokenize(optTok[2], goldOutputSeq, bpes[2])
end
end
if srcSeq then
if srcSeq:len() > 0 then
srcSeq = _G.hookManager:call("mpreprocess", optMPr[1], srcSeq) or srcSeq
if optTok[1] then
srcSeq = tokenizer.tokenize(optTok[1], srcSeq, bpes[1])
end
else
srcSeq = {}
end
table.insert(srcBatch, translator:buildInput(srcSeq))
table.insert(srcIdBatch, srcSeqId)
if withGoldScore then
table.insert(goldBatch, translator:buildInputGold(goldOutputSeq))
end
elseif #srcBatch == 0 then
break
end
if srcSeq == nil or #srcBatch == opt.batch_size then
if opt.time then
timer:resume()
end
local results, unkCountSrc, totalCountSrc = translator:translate(srcBatch, goldBatch)
globalUnkCountSrc = globalUnkCountSrc + unkCountSrc;
globalTotalCountSrc = globalTotalCountSrc + totalCountSrc
if opt.time then
timer:stop()
end
for b = 1, #results do
if (srcBatch[b].words and #srcBatch[b].words == 0
or srcBatch[b].vectors and srcBatch[b].vectors:dim() == 0) then
_G.logger:warning('Line ' .. sentId .. ' is empty.')
outFile:write('\n')
else
if srcBatch[b].words then
_G.logger:info('SENT %d: %s', sentId, translator:buildOutput(srcBatch[b]))
else
_G.logger:info('FEATS %d: IDX - %s - SIZE %d', sentId, srcIdBatch[b], srcBatch[b].vectors:size(1))
end
if withGoldScore then
_G.logger:info('GOLD %d: %s', sentId, translator:buildOutput(goldBatch[b]), results[b].goldScore)
_G.logger:info("GOLD SCORE: %.2f", results[b].goldScore)
goldScoreTotal = goldScoreTotal + results[b].goldScore
goldWordsTotal = goldWordsTotal + #goldBatch[b].words
end
if opt.dump_input_encoding then
outFile:write(sentId, ' ', table.concat(torch.totable(results[b]), " "), '\n')
else
for n = 1, #results[b].preds do
-- count target unknown words and words generated on 1-best
if n == 1 then
globalTotalCountTgt = globalTotalCountTgt + #results[b].preds[n].words
for _, w in ipairs(results[b].preds[n].words) do
globalUnkCountTgt = globalUnkCountTgt + (w==onmt.Constants.UNK_WORD and 1 or 0)
end
end
local sentence
if opt.detokenize_output then
sentence = tokenizer.detokenize(optTok[2],
results[b].preds[n].words,
results[b].preds[n].features)
else
sentence = translator:buildOutput(results[b].preds[n])
end
outFile:write(sentence .. '\n')
if withAttention then
local attentions = results[b].preds[n].attention
local score = results[b].preds[n].score
local targetLength = #attentions
if translator:srcFeat() then
attFile:write(string.format('%d ||| %s ||| %f ||| %d\n',
sentId, sentence, score, targetLength))
else
local source = translator:buildOutput(srcBatch[b])
local sourceLength = #srcBatch[b].words
attFile:write(string.format('%d ||| %s ||| %f ||| %s ||| %d %d\n',
sentId, sentence, score, source,
sourceLength, targetLength))
end
for _, attention in ipairs(attentions) do
if attention ~= nil then
attFile:write(table.concat(torch.totable(attention), ' '))
attFile:write('\n')
end
end
attFile:write('\n')
end
if n == 1 then
predScoreTotal = predScoreTotal + results[b].preds[n].score
predWordsTotal = predWordsTotal + #results[b].preds[n].words
if #results[b].preds > 1 then
_G.logger:info('')
_G.logger:info('BEST HYP:')
end
end
if #results[b].preds > 1 then
_G.logger:info("[%.2f] %s", results[b].preds[n].score, sentence)
else
_G.logger:info("PRED %d: %s", sentId, sentence)
_G.logger:info("PRED SCORE: %.2f", results[b].preds[n].score)
end
end
end
end
_G.logger:info('')
sentId = sentId + 1
end
if srcSeq == nil then
break
end
batchId = batchId + 1
srcBatch = {}
srcIdBatch = {}
if withGoldScore then
goldBatch = {}
end
collectgarbage()
end
end
_G.logger:info("Translated "..globalTotalCountSrc.." words, src unk count: "..globalUnkCountSrc..", coverage: "..
((math.floor(globalUnkCountSrc*1000/globalTotalCountSrc))/10).."%, "..
"tgt words: "..globalTotalCountTgt.." words, tgt unk count: "..globalUnkCountTgt..", coverage: "..
((math.floor(globalUnkCountTgt*1000/globalTotalCountTgt))/10).."%, ")
if opt.time then
local time = timer:time()
local sentenceCount = sentId-1
_G.logger:info("Average sentence translation time (in seconds):\n")
_G.logger:info("avg real\t" .. time.real / sentenceCount .. "\n")
_G.logger:info("avg user\t" .. time.user / sentenceCount .. "\n")
_G.logger:info("avg sys\t" .. time.sys / sentenceCount .. "\n")
end
if opt.dump_input_encoding == false then
reportScore('PRED', predScoreTotal, predWordsTotal)
if withGoldScore then
reportScore('GOLD', goldScoreTotal, goldWordsTotal)
end
end
if opt.save_beam_to:len() > 0 then
translator:saveBeamHistories(opt.save_beam_to)
end
outFile:close()
_G.logger:shutDown()
end
main()
| 30.773224
| 115
| 0.592027
|
da288e2f3470e56411bb75da19247c62e5c35b20
| 281
|
php
|
PHP
|
PHP/wxdev/controllers/LoginController.php
|
originalix/HTML-CSS-Prictice
|
8493167e75d1f7bfedcefb192f499765353fbce7
|
[
"MIT"
] | 47
|
2018-05-08T11:58:21.000Z
|
2021-06-05T13:26:32.000Z
|
PHP/wxdev/controllers/LoginController.php
|
PetalsOnaWet/Original
|
f3867032b94d69cdbaa214f49221446e16173dd6
|
[
"MIT"
] | 6
|
2021-01-03T03:06:17.000Z
|
2022-03-08T22:41:34.000Z
|
PHP/wxdev/controllers/LoginController.php
|
PetalsOnaWet/Original
|
f3867032b94d69cdbaa214f49221446e16173dd6
|
[
"MIT"
] | 48
|
2018-03-19T02:39:06.000Z
|
2022-02-23T13:36:14.000Z
|
<?php
namespace app\controllers;
use Yii;
use yii\web\Controller;
use app\models\AdminUser\AdminUserLogin;
class LoginController extends Controller
{
public function actionIndex()
{
$this->layout = false;
return $this->render('//account/login');
}
}
| 16.529412
| 48
| 0.679715
|
851e9dfe08bd841ba591ec1ed0467660700b4e56
| 658
|
cs
|
C#
|
DiscordJS v12.2.0/JS/ArrayBuffer.cs
|
Panthr75/DiscordJS-CSharp
|
fe82cfa6e96a98ba0d176a0a25ca2b66b3207639
|
[
"MIT"
] | 1
|
2020-08-09T01:26:22.000Z
|
2020-08-09T01:26:22.000Z
|
DiscordJS v12.2.0/JS/ArrayBuffer.cs
|
Panthr75/DiscordJS-CSharp
|
fe82cfa6e96a98ba0d176a0a25ca2b66b3207639
|
[
"MIT"
] | null | null | null |
DiscordJS v12.2.0/JS/ArrayBuffer.cs
|
Panthr75/DiscordJS-CSharp
|
fe82cfa6e96a98ba0d176a0a25ca2b66b3207639
|
[
"MIT"
] | null | null | null |
namespace JavaScript
{
public class ArrayBuffer
{
internal int byteLength;
internal int byteOffset;
internal byte[] block;
public static bool IsView(object arg)
{
//
}
public int ByteLength { get; }
public ArrayBuffer Slice(int begin, int end)
{
//
}
internal static void RawBytesToNumber()
{
//
}
internal static int ElementSize<T, This>(This typedArray) where This : TypedArray<T, This>, new()
{
if (typedArray is Int8Array) return 1;
return 0;
}
}
}
| 20.5625
| 105
| 0.507599
|
be20bfbe0f428336705fe0a457720a166d2dfa02
| 456
|
ts
|
TypeScript
|
packages/core/src/components/svg/SvgTheme.ts
|
reflex-ui/reflex-ui
|
e64cee3300bea89e8604801f516a50491ee1fc9c
|
[
"MIT"
] | 8
|
2019-03-11T19:39:07.000Z
|
2020-08-26T12:22:07.000Z
|
packages/core/src/components/svg/SvgTheme.ts
|
reflex-ui/reflex-ui
|
e64cee3300bea89e8604801f516a50491ee1fc9c
|
[
"MIT"
] | 9
|
2019-04-11T18:25:03.000Z
|
2022-02-26T10:09:37.000Z
|
packages/core/src/components/svg/SvgTheme.ts
|
reflex-ui/reflex-ui
|
e64cee3300bea89e8604801f516a50491ee1fc9c
|
[
"MIT"
] | 2
|
2019-03-12T19:45:36.000Z
|
2019-04-17T20:26:30.000Z
|
/**
* Copyright (c) Flavio Silva https://flsilva.com
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import { ViewStyle } from 'react-native';
import { SvgProps } from 'react-native-svg';
import { PrimitiveComponentTheme } from '../PrimitiveComponentTheme';
export type SvgTheme<ComponentProps> = PrimitiveComponentTheme<
ComponentProps,
SvgProps,
ViewStyle
>;
| 25.333333
| 69
| 0.739035
|