identifier
stringlengths
42
383
collection
stringclasses
1 value
open_type
stringclasses
1 value
license
stringlengths
0
1.81k
date
float64
1.99k
2.02k
title
stringlengths
0
100
creator
stringlengths
1
39
language
stringclasses
157 values
language_type
stringclasses
2 values
word_count
int64
1
20k
token_count
int64
4
1.32M
text
stringlengths
5
1.53M
__index_level_0__
int64
0
57.5k
https://github.com/kaywenith/TorchMPS/blob/master/torchmps/tests/benchmarks/test_benchmark_prob_mps.py
Github Open Source
Open Source
MIT
2,022
TorchMPS
kaywenith
Python
Code
538
1,667
# MIT License # # Copyright (c) 2021 Jacob Miller # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in # the Software without restriction, including without limitation the rights to # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of # the Software, and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Tests for probabilistic MPS functions""" from functools import partial import torch from torchmps import ProbMPS, ProbUnifMPS from torchmps.tests.utils_for_tests import group_name def prob_eval_runner( benchmark, old_eval: bool = False, vec_input: bool = False, parallel: bool = False, uniform: bool = True, cmplx: bool = False, input_dim: int = 10, bond_dim: int = 10, seq_len: int = 100, batch: int = 100, ): # Initialize MPS model, fix evaluation style if uniform: mps_model = ProbUnifMPS(input_dim, bond_dim, cmplx, parallel) else: mps_model = ProbMPS(seq_len, input_dim, bond_dim, cmplx, parallel) mps_model = partial(mps_model, slim_eval=(not old_eval)) # Create fake input data if vec_input: fake_data = torch.randn(seq_len, batch, input_dim).abs() fake_data = fake_data / fake_data.sum(dim=-1, keepdim=True) else: fake_data = torch.randint(input_dim, (seq_len, batch)) # Benchmark get_mat_slices using input benchmark benchmark(mps_model, fake_data) # Shorthand for old vs new evaluations old_eval_runner = partial(prob_eval_runner, old_eval=True) new_eval_runner = partial(prob_eval_runner, old_eval=False) @group_name("old_eval") def test_old_eval_base(benchmark): """Benchmark old probabilistic evaluation with default values""" old_eval_runner(benchmark) @group_name("new_eval") def test_new_eval_base(benchmark): """Benchmark new probabilistic evaluation with default values""" new_eval_runner(benchmark) @group_name("old_eval") def test_old_eval_nonuniform(benchmark): """Benchmark old probabilistic evaluation with non-uniform core tensors""" old_eval_runner(benchmark, uniform=False) @group_name("new_eval") def test_new_eval_nonuniform(benchmark): """Benchmark new probabilistic evaluation with non-uniform core tensors""" new_eval_runner(benchmark, uniform=False) @group_name("old_eval") def test_old_eval_complex(benchmark): """Benchmark old probabilistic evaluation with complex core tensors""" old_eval_runner(benchmark, cmplx=True) @group_name("new_eval") def test_new_eval_complex(benchmark): """Benchmark new probabilistic evaluation with complex core tensors""" new_eval_runner(benchmark, cmplx=True) @group_name("old_eval") def test_old_eval_large_seqlen(benchmark): """Benchmark old probabilistic evaluation with 10x longer sequences""" old_eval_runner(benchmark, seq_len=1000) @group_name("new_eval") def test_new_eval_large_seqlen(benchmark): """Benchmark new probabilistic evaluation with 10x longer sequences""" new_eval_runner(benchmark, seq_len=1000) @group_name("old_eval") def test_old_eval_vecs_in(benchmark): """Benchmark old probabilistic evaluation with continuous inputs""" old_eval_runner(benchmark, vec_input=True) @group_name("new_eval") def test_new_eval_vecs_in(benchmark): """Benchmark new probabilistic evaluation with continuous inputs""" new_eval_runner(benchmark, vec_input=True) @group_name("old_eval") def test_old_eval_large_bonddim(benchmark): """Benchmark old probabilistic evaluation with 10x larger bond dimensions""" old_eval_runner(benchmark, bond_dim=100) @group_name("new_eval") def test_new_eval_large_bonddim(benchmark): """Benchmark new probabilistic evaluation with 10x larger bond dimensions""" new_eval_runner(benchmark, bond_dim=100) @group_name("old_eval") def test_old_eval_large_inputdim(benchmark): """Benchmark old probabilistic evaluation with 10x larger input dimensions""" old_eval_runner(benchmark, input_dim=100) @group_name("new_eval") def test_new_eval_large_inputdim(benchmark): """Benchmark new probabilistic evaluation with 10x larger input dimensions""" new_eval_runner(benchmark, input_dim=100) @group_name("old_eval") def test_old_eval_extreme(benchmark): """Benchmark old probabilistic evaluation with larger bond dims and seq lens""" old_eval_runner(benchmark, bond_dim=100, seq_len=1000) @group_name("new_eval") def test_new_eval_extreme(benchmark): """Benchmark new probabilistic evaluation with larger bond dims and seq lens""" new_eval_runner(benchmark, bond_dim=100, seq_len=1000)
14,675
https://github.com/diegorafaelvieira/Programacao-3/blob/master/Aula 10/Exercicios/aula10/Aula-10-Exemplos/exemplo_slide_25.php
Github Open Source
Open Source
MIT
null
Programacao-3
diegorafaelvieira
PHP
Code
34
124
<!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <title>Aula de PHP</title> </head> <body> <?php $nome = "Maria"; $idade = 30; ?> <h1>Welcome, <?php echo $nome?>!</h1> <p> You have <?php echo $idade?> years old. </p> </body> </html>
4,635
https://github.com/collinsauve/csharp-driver/blob/master/src/Cassandra.IntegrationTests/MetadataTests/TokenMapTopologyChangeTests.cs
Github Open Source
Open Source
Apache-2.0, MIT
2,019
csharp-driver
collinsauve
C#
Code
291
1,118
// // Copyright DataStax, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; using System.Linq; using System.Text; using Cassandra.IntegrationTests.TestBase; using Cassandra.IntegrationTests.TestClusterManagement; using Cassandra.Tests; using NUnit.Framework; namespace Cassandra.IntegrationTests.MetadataTests { [TestFixture, Category("short")] public class TokenMapTopologyChangeTests { private ITestCluster TestCluster { get; set; } private ICluster ClusterObj { get; set; } [Test] public void TokenMap_Should_RebuildTokenMap_When_NodeIsDecommissioned() { TestCluster = TestClusterManager.CreateNew(3, new TestClusterOptions { UseVNodes = true }); var keyspaceName = TestUtils.GetUniqueKeyspaceName().ToLower(); ClusterObj = Cluster.Builder().AddContactPoint(TestCluster.InitialContactPoint).Build(); var session = ClusterObj.Connect(); var createKeyspaceCql = $"CREATE KEYSPACE {keyspaceName} WITH replication = {{'class': 'SimpleStrategy', 'replication_factor' : 3}}"; session.Execute(createKeyspaceCql); TestUtils.WaitForSchemaAgreement(ClusterObj); session.ChangeKeyspace(keyspaceName); var replicas = ClusterObj.Metadata.GetReplicas(keyspaceName, Encoding.UTF8.GetBytes("123")); Assert.AreEqual(3, replicas.Count); Assert.AreEqual(3, ClusterObj.Metadata.Hosts.Count); var oldTokenMap = ClusterObj.Metadata.TokenToReplicasMap; this.TestCluster.DecommissionNode(1); TestHelper.RetryAssert(() => { Assert.AreEqual(2, ClusterObj.Metadata.Hosts.Count); replicas = ClusterObj.Metadata.GetReplicas(keyspaceName, Encoding.UTF8.GetBytes("123")); Assert.AreEqual(2, replicas.Count); }, 10, 500); Assert.IsFalse(object.ReferenceEquals(ClusterObj.Metadata.TokenToReplicasMap, oldTokenMap)); } [Test] public void TokenMap_Should_RebuildTokenMap_When_NodeIsBootstrapped() { TestCluster = TestClusterManager.CreateNew(2, new TestClusterOptions { UseVNodes = true }); var keyspaceName = TestUtils.GetUniqueKeyspaceName().ToLower(); ClusterObj = Cluster.Builder().AddContactPoint(TestCluster.InitialContactPoint).Build(); var session = ClusterObj.Connect(); var createKeyspaceCql = $"CREATE KEYSPACE {keyspaceName} WITH replication = {{'class': 'SimpleStrategy', 'replication_factor' : 3}}"; session.Execute(createKeyspaceCql); TestUtils.WaitForSchemaAgreement(ClusterObj); session.ChangeKeyspace(keyspaceName); var replicas = ClusterObj.Metadata.GetReplicas(keyspaceName, Encoding.UTF8.GetBytes("123")); Assert.AreEqual(2, replicas.Count); Assert.AreEqual(2, ClusterObj.Metadata.Hosts.Count); var oldTokenMap = ClusterObj.Metadata.TokenToReplicasMap; this.TestCluster.BootstrapNode(3); TestHelper.RetryAssert(() => { Assert.AreEqual(3, ClusterObj.Metadata.Hosts.Count); replicas = ClusterObj.Metadata.GetReplicas(keyspaceName, Encoding.UTF8.GetBytes("123")); Assert.AreEqual(3, replicas.Count); }, 10, 500); Assert.IsFalse(object.ReferenceEquals(ClusterObj.Metadata.TokenToReplicasMap, oldTokenMap)); } [TearDown] public void TearDown() { TestCluster?.Remove(); ClusterObj?.Shutdown(); } } }
9,341
https://github.com/Iyiola-am/webnotes/blob/master/resources/views/layouts/note.blade.php
Github Open Source
Open Source
MIT
2,019
webnotes
Iyiola-am
PHP
Code
26
107
<article class="card shadow-sm my-3"> <div class="card-body"> <a href="{{ route('notes.show', $note->id) }}" class="text text-primary d-3 card-title">{{ $note->title }}</p> </div> <div class="card-footer"> @include('layouts.action', ['note' => $note]) </div> </article>
20,675
https://github.com/dotnet/runtime/blob/master/src/libraries/Common/tests/System/Security/Cryptography/AlgorithmImplementations/EC/ECKeyPemTests.cs
Github Open Source
Open Source
MIT
2,023
runtime
dotnet
C#
Code
920
8,258
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System.Text; using Test.Cryptography; using Xunit; namespace System.Security.Cryptography.Tests { [SkipOnPlatform(TestPlatforms.Browser, "Not supported on Browser")] public abstract class ECKeyPemTests<TAlg> where TAlg : ECAlgorithm { private const string AmbiguousExceptionMarker = "multiple keys"; private const string EncryptedExceptionMarker = "encrypted key"; private const string NoPemExceptionMarker = "No supported key"; protected abstract TAlg CreateKey(); [Fact] public void ImportFromPem_NoPem() { using (TAlg key = CreateKey()) { ArgumentException ae = AssertExtensions.Throws<ArgumentException>("input", () => key.ImportFromPem("")); Assert.Contains(NoPemExceptionMarker, ae.Message); } } [Fact] public void ImportFromPem_ECPrivateKey_Simple() { using (TAlg key = CreateKey()) { key.ImportFromPem(@" -----BEGIN EC PRIVATE KEY----- MHcCAQEEIHChLC2xaEXtVv9oz8IaRys/BNfWhRv2NJ8tfVs0UrOKoAoGCCqGSM49 AwEHoUQDQgAEgQHs5HRkpurXDPaabivT2IaRoyYtIsuk92Ner/JmgKjYoSumHVmS NfZ9nLTVjxeD08pD548KWrqmJAeZNsDDqQ== -----END EC PRIVATE KEY-----"); ECParameters ecParameters = key.ExportParameters(true); ECParameters expected = EccTestData.GetNistP256ReferenceKey(); EccTestBase.AssertEqual(expected, ecParameters); } } [Fact] public void ImportFromPem_ECPrivateKey_IgnoresUnrelatedAlgorithm() { using (TAlg key = CreateKey()) { key.ImportFromPem(@" -----BEGIN RSA PRIVATE KEY----- MIIBOwIBAAJBALc/WfXui9VeJLf/AprRaoVDyW0lPlQxm5NTLEHDwUd7idstLzPX uah0WEjgao5oO1BEUR4byjYlJ+F89Cs4BhUCAwEAAQJBAK/m8jYvnK9exaSR+DAh Ij12ip5pB+HOFOdhCbS/coNoIowa6WJGrd3Np1m9BBhouWloF8UB6Iu8/e/wAg+F 9ykCIQDzcnsehnYgVZTTxzoCJ01PGpgESilRyFzNEsb8V60ZewIhAMCyOujqUqn7 Q079SlHzXuvocqIdt4IM1EmIlrlU9GGvAh8Ijv3FFPUSLfANgfOIH9mX7ldpzzGk rmaUzxQvyuVLAiEArCTM8dSbopUADWnD4jArhU50UhWAIaM6ZrKqC8k0RKsCIQDC yZWUxoxAdjfrBGsx+U6BHM0Myqqe7fY7hjWzj4aBCw== -----END RSA PRIVATE KEY----- -----BEGIN EC PRIVATE KEY----- MHcCAQEEIHChLC2xaEXtVv9oz8IaRys/BNfWhRv2NJ8tfVs0UrOKoAoGCCqGSM49 AwEHoUQDQgAEgQHs5HRkpurXDPaabivT2IaRoyYtIsuk92Ner/JmgKjYoSumHVmS NfZ9nLTVjxeD08pD548KWrqmJAeZNsDDqQ== -----END EC PRIVATE KEY-----"); ECParameters ecParameters = key.ExportParameters(true); ECParameters expected = EccTestData.GetNistP256ReferenceKey(); EccTestBase.AssertEqual(expected, ecParameters); } } [Fact] public void ImportFromPem_Pkcs8_Simple() { using (TAlg key = CreateKey()) { key.ImportFromPem(@" -----BEGIN PRIVATE KEY----- MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgcKEsLbFoRe1W/2jP whpHKz8E19aFG/Y0ny19WzRSs4qhRANCAASBAezkdGSm6tcM9ppuK9PYhpGjJi0i y6T3Y16v8maAqNihK6YdWZI19n2ctNWPF4PTykPnjwpauqYkB5k2wMOp -----END PRIVATE KEY-----"); ECParameters ecParameters = key.ExportParameters(true); ECParameters expected = EccTestData.GetNistP256ReferenceKey(); EccTestBase.AssertEqual(expected, ecParameters); } } [Fact] public void ImportFromPem_Pkcs8_IgnoresUnrelatedAlgorithm() { using (TAlg key = CreateKey()) { key.ImportFromPem(@" -----BEGIN RSA PRIVATE KEY----- MIIBOwIBAAJBALc/WfXui9VeJLf/AprRaoVDyW0lPlQxm5NTLEHDwUd7idstLzPX uah0WEjgao5oO1BEUR4byjYlJ+F89Cs4BhUCAwEAAQJBAK/m8jYvnK9exaSR+DAh Ij12ip5pB+HOFOdhCbS/coNoIowa6WJGrd3Np1m9BBhouWloF8UB6Iu8/e/wAg+F 9ykCIQDzcnsehnYgVZTTxzoCJ01PGpgESilRyFzNEsb8V60ZewIhAMCyOujqUqn7 Q079SlHzXuvocqIdt4IM1EmIlrlU9GGvAh8Ijv3FFPUSLfANgfOIH9mX7ldpzzGk rmaUzxQvyuVLAiEArCTM8dSbopUADWnD4jArhU50UhWAIaM6ZrKqC8k0RKsCIQDC yZWUxoxAdjfrBGsx+U6BHM0Myqqe7fY7hjWzj4aBCw== -----END RSA PRIVATE KEY----- -----BEGIN PRIVATE KEY----- MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgcKEsLbFoRe1W/2jP whpHKz8E19aFG/Y0ny19WzRSs4qhRANCAASBAezkdGSm6tcM9ppuK9PYhpGjJi0i y6T3Y16v8maAqNihK6YdWZI19n2ctNWPF4PTykPnjwpauqYkB5k2wMOp -----END PRIVATE KEY-----"); ECParameters ecParameters = key.ExportParameters(true); ECParameters expected = EccTestData.GetNistP256ReferenceKey(); EccTestBase.AssertEqual(expected, ecParameters); } } [Fact] public void ImportFromPem_Spki_Simple() { using (TAlg key = CreateKey()) { key.ImportFromPem(@" -----BEGIN PUBLIC KEY----- MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEgQHs5HRkpurXDPaabivT2IaRoyYt Isuk92Ner/JmgKjYoSumHVmSNfZ9nLTVjxeD08pD548KWrqmJAeZNsDDqQ== -----END PUBLIC KEY-----"); ECParameters ecParameters = key.ExportParameters(false); ECParameters expected = EccTestData.GetNistP256ReferenceKey(); EccTestBase.ComparePublicKey(expected.Q, ecParameters.Q, isEqual: true); } } [Fact] public void ImportFromPem_Spki_PrecedingUnrelatedPemIsIgnored() { using (TAlg key = CreateKey()) { key.ImportFromPem(@" -----BEGIN CERTIFICATE----- MIICTzCCAgmgAwIBAgIJAMQtYhFJ0+5jMA0GCSqGSIb3DQEBBQUAMIGSMQswCQYD VQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwHUmVkbW9uZDEY MBYGA1UECgwPTWljcm9zb2Z0IENvcnAuMSAwHgYDVQQLDBcuTkVUIEZyYW1ld29y ayAoQ29yZUZ4KTEgMB4GA1UEAwwXUlNBIDM4NC1iaXQgQ2VydGlmaWNhdGUwHhcN MTYwMzAyMTY1OTA0WhcNMTYwNDAxMTY1OTA0WjCBkjELMAkGA1UEBhMCVVMxEzAR BgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1JlZG1vbmQxGDAWBgNVBAoMD01p Y3Jvc29mdCBDb3JwLjEgMB4GA1UECwwXLk5FVCBGcmFtZXdvcmsgKENvcmVGeCkx IDAeBgNVBAMMF1JTQSAzODQtYml0IENlcnRpZmljYXRlMEwwDQYJKoZIhvcNAQEB BQADOwAwOAIxANrMIthuZxV1Ay4x8gbc/BksZeLVEInlES0JbyiCr9tbeM22Vy/S 9h2zkEciMuPZ9QIDAQABo1AwTjAdBgNVHQ4EFgQU5FG2Fmi86hJOCf4KnjaxOGWV dRUwHwYDVR0jBBgwFoAU5FG2Fmi86hJOCf4KnjaxOGWVdRUwDAYDVR0TBAUwAwEB /zANBgkqhkiG9w0BAQUFAAMxAEzDg/u8TlApCnE8qxhcbTXk2MbX+2n5PCn+MVrW wggvPj3b2WMXsVWiPr4S1Y/nBA== -----END CERTIFICATE----- -----BEGIN PUBLIC KEY----- MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEgQHs5HRkpurXDPaabivT2IaRoyYt Isuk92Ner/JmgKjYoSumHVmSNfZ9nLTVjxeD08pD548KWrqmJAeZNsDDqQ== -----END PUBLIC KEY-----"); ECParameters ecParameters = key.ExportParameters(false); ECParameters expected = EccTestData.GetNistP256ReferenceKey(); EccTestBase.ComparePublicKey(expected.Q, ecParameters.Q, isEqual: true); } } [Fact] public void ImportFromPem_Spki_IgnoresUnrelatedAlgorithms() { using (TAlg key = CreateKey()) { key.ImportFromPem(@" -----BEGIN RSA PRIVATE KEY----- MIIBOwIBAAJBALc/WfXui9VeJLf/AprRaoVDyW0lPlQxm5NTLEHDwUd7idstLzPX uah0WEjgao5oO1BEUR4byjYlJ+F89Cs4BhUCAwEAAQJBAK/m8jYvnK9exaSR+DAh Ij12ip5pB+HOFOdhCbS/coNoIowa6WJGrd3Np1m9BBhouWloF8UB6Iu8/e/wAg+F 9ykCIQDzcnsehnYgVZTTxzoCJ01PGpgESilRyFzNEsb8V60ZewIhAMCyOujqUqn7 Q079SlHzXuvocqIdt4IM1EmIlrlU9GGvAh8Ijv3FFPUSLfANgfOIH9mX7ldpzzGk rmaUzxQvyuVLAiEArCTM8dSbopUADWnD4jArhU50UhWAIaM6ZrKqC8k0RKsCIQDC yZWUxoxAdjfrBGsx+U6BHM0Myqqe7fY7hjWzj4aBCw== -----END RSA PRIVATE KEY----- -----BEGIN PUBLIC KEY----- MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEgQHs5HRkpurXDPaabivT2IaRoyYt Isuk92Ner/JmgKjYoSumHVmSNfZ9nLTVjxeD08pD548KWrqmJAeZNsDDqQ== -----END PUBLIC KEY-----"); ECParameters ecParameters = key.ExportParameters(false); ECParameters expected = EccTestData.GetNistP256ReferenceKey(); EccTestBase.ComparePublicKey(expected.Q, ecParameters.Q, isEqual: true); } } [Fact] public void ImportFromPem_Spki_PrecedingMalformedPem() { using (TAlg key = CreateKey()) { key.ImportFromPem(@" -----BEGIN CERTIFICATE----- $$ I AM NOT A PEM -----END CERTIFICATE----- -----BEGIN PUBLIC KEY----- MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEgQHs5HRkpurXDPaabivT2IaRoyYt Isuk92Ner/JmgKjYoSumHVmSNfZ9nLTVjxeD08pD548KWrqmJAeZNsDDqQ== -----END PUBLIC KEY-----"); ECParameters ecParameters = key.ExportParameters(false); ECParameters expected = EccTestData.GetNistP256ReferenceKey(); EccTestBase.ComparePublicKey(expected.Q, ecParameters.Q, isEqual: true); } } [Fact] public void ImportFromPem_Spki_AmbiguousKey_Spki() { using (TAlg key = CreateKey()) { string pem = @" -----BEGIN PUBLIC KEY----- MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEgQHs5HRkpurXDPaabivT2IaRoyYt Isuk92Ner/JmgKjYoSumHVmSNfZ9nLTVjxeD08pD548KWrqmJAeZNsDDqQ== -----END PUBLIC KEY----- -----BEGIN PUBLIC KEY----- MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEgQHs5HRkpurXDPaabivT2IaRoyYt Isuk92Ner/JmgKjYoSumHVmSNfZ9nLTVjxeD08pD548KWrqmJAeZNsDDqQ== -----END PUBLIC KEY-----"; ArgumentException ae = AssertExtensions.Throws<ArgumentException>("input", () => key.ImportFromPem(pem)); Assert.Contains(AmbiguousExceptionMarker, ae.Message); } } [Fact] public void ImportFromPem_Spki_AmbiguousKey_EncryptedPkcs8() { using (TAlg key = CreateKey()) { string pem = @" -----BEGIN ENCRYPTED PRIVATE KEY----- MIHgMEsGCSqGSIb3DQEFDTA+MCkGCSqGSIb3DQEFDDAcBAjVvm4KTLb0JgICCAAw DAYIKoZIhvcNAgkFADARBgUrDgMCBwQIuHgfok8Ytl0EgZDkDSJ9vt8UvSesdyV+ Evt9yfvEjiP/6yITq59drw1Kcgp6buOCVCY7LZ06aD6WpogiqGDYMuzfvqg5hNFp opSAJ/pvHONL5kyAJLeNyG9c/mR2qyrP2L9gL0Z5fB9NyPejKTLi0PXMGQWdDTH8 Qh0fqdrNovgFLubbJFMQN/MwwIAfIuf0Mn0WFYYeQiBJ3kg= -----END ENCRYPTED PRIVATE KEY----- -----BEGIN PUBLIC KEY----- MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEgQHs5HRkpurXDPaabivT2IaRoyYt Isuk92Ner/JmgKjYoSumHVmSNfZ9nLTVjxeD08pD548KWrqmJAeZNsDDqQ== -----END PUBLIC KEY-----"; ArgumentException ae = AssertExtensions.Throws<ArgumentException>("input", () => key.ImportFromPem(pem)); Assert.Contains(AmbiguousExceptionMarker, ae.Message); } } [Fact] public void ImportFromPem_Spki_AmbiguousKey_EncryptedPkcs8_Pkcs8First() { using (TAlg key = CreateKey()) { string pem = @" -----BEGIN PUBLIC KEY----- MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEgQHs5HRkpurXDPaabivT2IaRoyYt Isuk92Ner/JmgKjYoSumHVmSNfZ9nLTVjxeD08pD548KWrqmJAeZNsDDqQ== -----END PUBLIC KEY----- -----BEGIN ENCRYPTED PRIVATE KEY----- MIHgMEsGCSqGSIb3DQEFDTA+MCkGCSqGSIb3DQEFDDAcBAjVvm4KTLb0JgICCAAw DAYIKoZIhvcNAgkFADARBgUrDgMCBwQIuHgfok8Ytl0EgZDkDSJ9vt8UvSesdyV+ Evt9yfvEjiP/6yITq59drw1Kcgp6buOCVCY7LZ06aD6WpogiqGDYMuzfvqg5hNFp opSAJ/pvHONL5kyAJLeNyG9c/mR2qyrP2L9gL0Z5fB9NyPejKTLi0PXMGQWdDTH8 Qh0fqdrNovgFLubbJFMQN/MwwIAfIuf0Mn0WFYYeQiBJ3kg= -----END ENCRYPTED PRIVATE KEY-----"; ArgumentException ae = AssertExtensions.Throws<ArgumentException>("input", () => key.ImportFromPem(pem)); Assert.Contains(AmbiguousExceptionMarker, ae.Message); } } [Fact] public void ImportFromPem_EncryptedPrivateKeyFails() { using (TAlg key = CreateKey()) { string pem = @" -----BEGIN ENCRYPTED PRIVATE KEY----- MIHgMEsGCSqGSIb3DQEFDTA+MCkGCSqGSIb3DQEFDDAcBAjVvm4KTLb0JgICCAAw DAYIKoZIhvcNAgkFADARBgUrDgMCBwQIuHgfok8Ytl0EgZDkDSJ9vt8UvSesdyV+ Evt9yfvEjiP/6yITq59drw1Kcgp6buOCVCY7LZ06aD6WpogiqGDYMuzfvqg5hNFp opSAJ/pvHONL5kyAJLeNyG9c/mR2qyrP2L9gL0Z5fB9NyPejKTLi0PXMGQWdDTH8 Qh0fqdrNovgFLubbJFMQN/MwwIAfIuf0Mn0WFYYeQiBJ3kg= -----END ENCRYPTED PRIVATE KEY-----"; ArgumentException ae = AssertExtensions.Throws<ArgumentException>("input", () => key.ImportFromPem(pem)); Assert.Contains(EncryptedExceptionMarker, ae.Message); } } [Fact] public void ImportFromPem_MultipleEncryptedPrivateKeyAreAmbiguous() { using (TAlg key = CreateKey()) { string pem = @" -----BEGIN ENCRYPTED PRIVATE KEY----- MIHgMEsGCSqGSIb3DQEFDTA+MCkGCSqGSIb3DQEFDDAcBAjVvm4KTLb0JgICCAAw DAYIKoZIhvcNAgkFADARBgUrDgMCBwQIuHgfok8Ytl0EgZDkDSJ9vt8UvSesdyV+ Evt9yfvEjiP/6yITq59drw1Kcgp6buOCVCY7LZ06aD6WpogiqGDYMuzfvqg5hNFp opSAJ/pvHONL5kyAJLeNyG9c/mR2qyrP2L9gL0Z5fB9NyPejKTLi0PXMGQWdDTH8 Qh0fqdrNovgFLubbJFMQN/MwwIAfIuf0Mn0WFYYeQiBJ3kg= -----END ENCRYPTED PRIVATE KEY----- -----BEGIN ENCRYPTED PRIVATE KEY----- MIHgMEsGCSqGSIb3DQEFDTA+MCkGCSqGSIb3DQEFDDAcBAjVvm4KTLb0JgICCAAw DAYIKoZIhvcNAgkFADARBgUrDgMCBwQIuHgfok8Ytl0EgZDkDSJ9vt8UvSesdyV+ Evt9yfvEjiP/6yITq59drw1Kcgp6buOCVCY7LZ06aD6WpogiqGDYMuzfvqg5hNFp opSAJ/pvHONL5kyAJLeNyG9c/mR2qyrP2L9gL0Z5fB9NyPejKTLi0PXMGQWdDTH8 Qh0fqdrNovgFLubbJFMQN/MwwIAfIuf0Mn0WFYYeQiBJ3kg= -----END ENCRYPTED PRIVATE KEY-----"; ArgumentException ae = AssertExtensions.Throws<ArgumentException>("input", () => key.ImportFromPem(pem)); Assert.Contains(AmbiguousExceptionMarker, ae.Message); } } [Fact] public void ImportFromEncryptedPem_Pkcs8_Char_Simple() { using (TAlg key = CreateKey()) { string pem = @" -----BEGIN ENCRYPTED PRIVATE KEY----- MIHgMEsGCSqGSIb3DQEFDTA+MCkGCSqGSIb3DQEFDDAcBAjVvm4KTLb0JgICCAAw DAYIKoZIhvcNAgkFADARBgUrDgMCBwQIuHgfok8Ytl0EgZDkDSJ9vt8UvSesdyV+ Evt9yfvEjiP/6yITq59drw1Kcgp6buOCVCY7LZ06aD6WpogiqGDYMuzfvqg5hNFp opSAJ/pvHONL5kyAJLeNyG9c/mR2qyrP2L9gL0Z5fB9NyPejKTLi0PXMGQWdDTH8 Qh0fqdrNovgFLubbJFMQN/MwwIAfIuf0Mn0WFYYeQiBJ3kg= -----END ENCRYPTED PRIVATE KEY-----"; key.ImportFromEncryptedPem(pem, (ReadOnlySpan<char>)"test"); ECParameters ecParameters = key.ExportParameters(true); ECParameters expected = EccTestData.GetNistP256ReferenceKey(); EccTestBase.AssertEqual(expected, ecParameters); } } [Fact] public void ImportFromEncryptedPem_Pkcs8_Byte_Simple() { using (TAlg key = CreateKey()) { string pem = @" -----BEGIN ENCRYPTED PRIVATE KEY----- MIHsMFcGCSqGSIb3DQEFDTBKMCkGCSqGSIb3DQEFDDAcBAgf9krO2ZiPvAICCAAw DAYIKoZIhvcNAgkFADAdBglghkgBZQMEAQIEEEv4Re1ATH9lHzx+13GoZU0EgZAV iE/+pIb/4quf+Y524bXUKTGYXzdSUE8Dp1qdZFcwDiCYCTtpL+065fGhmf1KZS2c /OMt/tWvtMSj17+dJvShsu/NYJXF5fsfpSJbd3e50Y3AisW0Ob7mmF54KBfg6Y+4 aATwwQdUIKVzUZsQctsHPjbriQKKn7GKSyUOikBUNQ+TozojX8/g7JAsl+T9jGM= -----END ENCRYPTED PRIVATE KEY-----"; key.ImportFromEncryptedPem(pem, "test"u8); ECParameters ecParameters = key.ExportParameters(true); ECParameters expected = EccTestData.GetNistP256ReferenceKey(); EccTestBase.AssertEqual(expected, ecParameters); } } [Fact] public void ImportFromEncryptedPem_AmbiguousPem_Byte() { using (TAlg key = CreateKey()) { string pem = @" -----BEGIN ENCRYPTED PRIVATE KEY----- MIHsMFcGCSqGSIb3DQEFDTBKMCkGCSqGSIb3DQEFDDAcBAgf9krO2ZiPvAICCAAw DAYIKoZIhvcNAgkFADAdBglghkgBZQMEAQIEEEv4Re1ATH9lHzx+13GoZU0EgZAV iE/+pIb/4quf+Y524bXUKTGYXzdSUE8Dp1qdZFcwDiCYCTtpL+065fGhmf1KZS2c /OMt/tWvtMSj17+dJvShsu/NYJXF5fsfpSJbd3e50Y3AisW0Ob7mmF54KBfg6Y+4 aATwwQdUIKVzUZsQctsHPjbriQKKn7GKSyUOikBUNQ+TozojX8/g7JAsl+T9jGM= -----END ENCRYPTED PRIVATE KEY----- -----BEGIN ENCRYPTED PRIVATE KEY----- MIHgMEsGCSqGSIb3DQEFDTA+MCkGCSqGSIb3DQEFDDAcBAjVvm4KTLb0JgICCAAw DAYIKoZIhvcNAgkFADARBgUrDgMCBwQIuHgfok8Ytl0EgZDkDSJ9vt8UvSesdyV+ Evt9yfvEjiP/6yITq59drw1Kcgp6buOCVCY7LZ06aD6WpogiqGDYMuzfvqg5hNFp opSAJ/pvHONL5kyAJLeNyG9c/mR2qyrP2L9gL0Z5fB9NyPejKTLi0PXMGQWdDTH8 Qh0fqdrNovgFLubbJFMQN/MwwIAfIuf0Mn0WFYYeQiBJ3kg= -----END ENCRYPTED PRIVATE KEY-----"; ArgumentException ae = AssertExtensions.Throws<ArgumentException>("input", () => key.ImportFromEncryptedPem(pem, "test"u8)); Assert.Contains(AmbiguousExceptionMarker, ae.Message); } } [Fact] public void ImportFromEncryptedPem_AmbiguousPem_Char() { using (TAlg key = CreateKey()) { string pem = @" -----BEGIN ENCRYPTED PRIVATE KEY----- MIHsMFcGCSqGSIb3DQEFDTBKMCkGCSqGSIb3DQEFDDAcBAgf9krO2ZiPvAICCAAw DAYIKoZIhvcNAgkFADAdBglghkgBZQMEAQIEEEv4Re1ATH9lHzx+13GoZU0EgZAV iE/+pIb/4quf+Y524bXUKTGYXzdSUE8Dp1qdZFcwDiCYCTtpL+065fGhmf1KZS2c /OMt/tWvtMSj17+dJvShsu/NYJXF5fsfpSJbd3e50Y3AisW0Ob7mmF54KBfg6Y+4 aATwwQdUIKVzUZsQctsHPjbriQKKn7GKSyUOikBUNQ+TozojX8/g7JAsl+T9jGM= -----END ENCRYPTED PRIVATE KEY----- -----BEGIN ENCRYPTED PRIVATE KEY----- MIHgMEsGCSqGSIb3DQEFDTA+MCkGCSqGSIb3DQEFDDAcBAjVvm4KTLb0JgICCAAw DAYIKoZIhvcNAgkFADARBgUrDgMCBwQIuHgfok8Ytl0EgZDkDSJ9vt8UvSesdyV+ Evt9yfvEjiP/6yITq59drw1Kcgp6buOCVCY7LZ06aD6WpogiqGDYMuzfvqg5hNFp opSAJ/pvHONL5kyAJLeNyG9c/mR2qyrP2L9gL0Z5fB9NyPejKTLi0PXMGQWdDTH8 Qh0fqdrNovgFLubbJFMQN/MwwIAfIuf0Mn0WFYYeQiBJ3kg= -----END ENCRYPTED PRIVATE KEY-----"; ArgumentException ae = AssertExtensions.Throws<ArgumentException>("input", () => key.ImportFromEncryptedPem(pem, (ReadOnlySpan<char>)"")); Assert.Contains(AmbiguousExceptionMarker, ae.Message); } } [Fact] public void ImportFromEncryptedPem_UnencryptedPem_ThrowsNoPem() { using (TAlg key = CreateKey()) { string pem = @" -----BEGIN PRIVATE KEY----- MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgcKEsLbFoRe1W/2jP whpHKz8E19aFG/Y0ny19WzRSs4qhRANCAASBAezkdGSm6tcM9ppuK9PYhpGjJi0i y6T3Y16v8maAqNihK6YdWZI19n2ctNWPF4PTykPnjwpauqYkB5k2wMOp -----END PRIVATE KEY-----"; byte[] passwordBytes = Array.Empty<byte>(); ArgumentException ae = AssertExtensions.Throws<ArgumentException>("input", () => key.ImportFromEncryptedPem(pem, passwordBytes)); Assert.Contains(NoPemExceptionMarker, ae.Message); } } [Fact] public void ImportFromEncryptedPem_NoPem() { using(TAlg key = CreateKey()) { ArgumentException ae = AssertExtensions.Throws<ArgumentException>("input", () => key.ImportFromEncryptedPem("", (ReadOnlySpan<char>)"")); Assert.Contains(NoPemExceptionMarker, ae.Message); } } } }
7,619
https://github.com/LSmyrnaios/exareme/blob/master/Exareme-Docker/src/mip-algorithms/mipframework/runner/__init__.py
Github Open Source
Open Source
MIT
2,020
exareme
LSmyrnaios
Python
Code
7
22
from .runner import create_runner __all__ = ['create_runner']
7,309
https://github.com/orfeas0/kibana/blob/master/packages/kbn-optimizer/src/report_optimizer_stats.ts
Github Open Source
Open Source
Apache-2.0
2,020
kibana
orfeas0
TypeScript
Code
249
519
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import { materialize, mergeMap, dematerialize } from 'rxjs/operators'; import { CiStatsReporter } from '@kbn/dev-utils'; import { OptimizerUpdate$ } from './run_optimizer'; import { OptimizerState, OptimizerConfig } from './optimizer'; import { pipeClosure } from './common'; export function reportOptimizerStats(reporter: CiStatsReporter, config: OptimizerConfig) { return pipeClosure((update$: OptimizerUpdate$) => { let lastState: OptimizerState | undefined; return update$.pipe( materialize(), mergeMap(async n => { if (n.kind === 'N' && n.value?.state) { lastState = n.value?.state; } if (n.kind === 'C' && lastState) { await reporter.metrics( config.bundles.map(bundle => { // make the cache read from the cache file since it was likely updated by the worker bundle.cache.refresh(); return { group: `@kbn/optimizer bundle module count`, id: bundle.id, value: bundle.cache.getModuleCount() || 0, }; }) ); } return n; }), dematerialize() ); }); }
18,461
https://github.com/NoelChaparro/CentroVision/blob/master/app/views/lentescontacto.blade.php
Github Open Source
Open Source
MIT
2,015
CentroVision
NoelChaparro
PHP
Code
639
3,522
@extends('layouts.baseother') @section('titulo') Operación Laser @stop @section('Contenido') {{ Form::open(array('action' => 'LentesContactoController@guardarLentesContacto','class' => 'form-horizontal frmLentesContacto')) }} <input type='hidden' name='varIdPaciente' id="varIdPaciente" value='{{$Paciente[0]->IdPaciente}}' /> <!--</br></br> <div class="row offset1"> <ul class="hover-block"> <li> <a href="#mdlBuscarPaciente" role="button" data-toggle="modal" id="btnBuscarPacienteModal"> <div class="b-blue cont-icon"><i class="icon-users"></i></div> <div class="hover-content b-blue"> <h4>Buscar</h4> Buscar paciente en la base de datos de acuerdo al nombre </div> </a> </li> </ul> </div> <div class="modal hide fade" tabindex="-1" role="dialog" id="mdlBuscarPaciente"> <div class="modal-header"> <a class="close" data-dismiss="modal">&times;</a> <h3>Buscar Paciente</h3> <br /> <input type="text" class="input-xlarge search-query span4" id="txtBuscarPaciente" placeholder="Buscar Paciente"> <button type="submit" class="btn" id="btnBuscarPaciente">Buscar</button> </div> <div class="modal-body"> <table class="table tblBusquedaPacientesModal table-striped"> <thead> <tr> <th>#</th> <th>Nombre</th> <th>Dirección</th> <th>Teléfono</th> </tr> </thead> <tbody></tbody> </table> </div> <div class="modal-footer"> <a href="#" class="btn" data-dismiss="modal">Cerrar</a> </div> </div>--> <br><br> <!--start: Cuadro Respaldo --> <div style="display:none"> <div id="dataRespaldo"> ¿Que respaldo desea realizar? <br><br> <button id="btnRespaldarBaseDatos" class="btn btn-info">Base de Datos</button> <button id="btnRespaldarSistema" class="btn btn-info">Archivos de Sistema</button> <div id="loadingRespado"></div> </div> </div> <!--End: Cuadro Respaldo --> <div class="row"> <div class="span7"> <div class="control-group"> {{ Form::label('nombre', 'Nombre:', array('class' => 'control-label')) }} <div class="controls"> {{ Form::text('nombre',$Paciente[0]->Nombre,array('class' => 'input-xxlarge', 'disabled' => '')) }} </div> </div> </div> <div class="span5"> <div class="control-group"> {{ Form::label('fecha', 'Fecha:', array('class' => 'control-label')) }} <div class="controls"> {{ Form::text('fecha',$fechaActual,array('class' => 'input-small', 'disabled' => '')) }} </div> </div> </div> </div> <!-- Tabla de Queratometria y Refraccion --> <table class="table"> <thead> <tr> <th colspan="5" style="text-align:center;">Queratometría</th> <th colspan="3" style="text-align:center;">Refracción</th> </tr> <tr> <th></th> <th style="text-align:center;">Horizontal</th> <th style="text-align:center;">Eje</th> <th style="text-align:center;">Vertical</th> <th style="text-align:center;">Eje</th> <th style="text-align:center;">Sph</th> <th style="text-align:center;">Cyl</th> <th style="text-align:center;">Eje</th> </tr> </thead> <tbody> <tr> <td>OD</td> <td style="text-align:center;">{{ Form::text('queratometriaHorizontalOD','',array('class' => 'input-xlarge','id' => 'queratometriaHorizontalOD')) }}</td> <td style="text-align:center;">{{ Form::text('queratometriaHorizontalEjeOD', '', array('class' => 'input-small','id'=>'queratometriaHorizontalEjeOD')); }}</td> <td style="text-align:center;">{{ Form::text('queratometriaVerticalOD', '', array('class' => 'input-xlarge','id'=>'queratometriaVerticalOD')); }}</td> <td style="text-align:center;">{{ Form::text('queratometriaVerticalEjeOD', '', array('class' => 'input-small','id'=>'queratometriaVerticalEjeOD')); }}</td> <td style="text-align:center;">{{ Form::text('refraccionSphOD', '', array('class' => 'input-small','id'=>'refraccionSphOD')); }}</td> <td style="text-align:center;">{{ Form::text('refraccionCylOD', '', array('class' => 'input-small','id'=>'refraccionCylOD')); }}</td> <td style="text-align:center;">{{ Form::text('refraccionEjeOD', '', array('class' => 'input-small','id'=>'refraccionEjeOD')); }}</td> </tr> <tr> <td>OS</td> <td style="text-align:center;">{{ Form::text('queratometriaHorizontalOI','',array('class' => 'input-xlarge','id' => 'queratometriaHorizontalOI')) }}</td> <td style="text-align:center;">{{ Form::text('queratometriaHorizontalEjeOI', '', array('class' => 'input-small','id'=>'queratometriaHorizontalEjeOI')); }}</td> <td style="text-align:center;">{{ Form::text('queratometriaVerticalOI', '', array('class' => 'input-xlarge','id'=>'queratometriaVerticalOI')); }}</td> <td style="text-align:center;">{{ Form::text('queratometriaVerticalEjeOI', '', array('class' => 'input-small','id'=>'queratometriaVerticalEjeOI')); }}</td> <td style="text-align:center;">{{ Form::text('refraccionSphOI', '', array('class' => 'input-small','id'=>'refraccionSphOI')); }}</td> <td style="text-align:center;">{{ Form::text('refraccionCylOI', '', array('class' => 'input-small','id'=>'refraccionCylOI')); }}</td> <td style="text-align:center;">{{ Form::text('refraccionEjeOI', '', array('class' => 'input-small','id'=>'refraccionEjeOI')); }}</td> </tr> </tbody> </table> <!-- Fin tabla Queratometria y Refraccion --> <br><br> <!-- Tabla de Material del lente --> <table class="table"> <thead> <tr> <th style="text-align:center;">Material</th> <th style="text-align:center;">Marca</th> <th style="text-align:center;">Tipo</th> <th style="text-align:center;">Color</th> </tr> </thead> <tbody> <tr> <td style="text-align:center;">{{ Form::text('material', '', array('class' => 'input-xlarge','id'=>'material')); }}</td> <td style="text-align:center;">{{ Form::text('marca', '', array('class' => 'input-xlarge','id'=>'marca')); }}</td> <td style="text-align:center;">{{ Form::text('tipo', '', array('class' => 'input-xlarge','id'=>'tipo')); }}</td> <td style="text-align:center;">{{ Form::text('color', '', array('class' => 'input-xlarge','id'=>'color')); }}</td> </tr> </tbody> </table> <!-- Fin Tabla de Material del lente --> <br><br> <!-- Tabla de Material del lente --> <table class="table"> <thead> <tr> <th></th> <th style="text-align:center;">CB</th> <th style="text-align:center;">Diametro</th> <th style="text-align:center;">CPP</th> <th style="text-align:center;">Poder</th> <th style="text-align:center;">SR</th> <th style="text-align:center;">RX Final</th> </tr> </thead> <tbody> <tr> <td>OD</td> <td style="text-align:center;">{{ Form::text('curvaBaseOD', '', array('class' => 'input-medium','id'=>'curvaBaseOD')); }}</td> <td style="text-align:center;">{{ Form::text('diametroOD', '', array('class' => 'input-small','id'=>'diametroOD')); }}</td> <td style="text-align:center;">{{ Form::text('cppOD', '', array('class' => 'input-small','id'=>'cppOD')); }}</td> <td style="text-align:center;">{{ Form::text('poderOD', '', array('class' => 'input-small','id'=>'poderOD')); }}</td> <td style="text-align:center;">{{ Form::text('srOD', '', array('class' => 'input-xlarge','id'=>'srOD')); }}</td> <td style="text-align:center;">{{ Form::text('rxFinalOD', '', array('class' => 'input-small','id'=>'rxFinalOD')); }}</td> </tr> <tr> <td>OS</td> <td style="text-align:center;">{{ Form::text('curvaBaseOI', '', array('class' => 'input-medium','id'=>'curvaBaseOI')); }}</td> <td style="text-align:center;">{{ Form::text('diametroOI', '', array('class' => 'input-small','id'=>'diametroOI')); }}</td> <td style="text-align:center;">{{ Form::text('cppOI', '', array('class' => 'input-small','id'=>'cppOI')); }}</td> <td style="text-align:center;">{{ Form::text('poderOI', '', array('class' => 'input-small','id'=>'poderOI')); }}</td> <td style="text-align:center;">{{ Form::text('srOI', '', array('class' => 'input-xlarge','id'=>'srOI')); }}</td> <td style="text-align:center;">{{ Form::text('rxFinalOI', '', array('class' => 'input-small','id'=>'rxFinalOI')); }}</td> </tr> </tbody> </table> <!-- Fin Tabla de Material del lente --> <br><br> <!-- Observaciones --> <div class="control-group"> {{ Form::label('observaciones', 'Observaciones:', array('class' => 'control-label')) }} <div class="controls"> {{ Form::textarea('observaciones','',array('class' => 'span10')) }} </div> </div> <!-- Fin Observaciones --> <!-- start: Button --> <div class="control-group" id="btn_box"> <label class="control-label"></label> <div class="controls"> <input value="Guardar" class="btn btn-large btn-primary" type="submit" id="btnGuardarLenteContacto"> <input value="Cancelar" class="btn btn-large btn-danger" type="button" id="btnCancelarLenteContacto"> </div> </div> <!-- end: button--> {{ Form::close() }} <div class="alert alert-error"> <h4>Historial</h4> </div> <div id="historialLentesContacto"> </div> @stop @section('Javascript') <script src="../js/LentesContacto.js"></script> @stop
9,873
https://github.com/HimanshuAgrahari07/Campaign_API/blob/master/src/dtos/signin.dto.ts
Github Open Source
Open Source
MIT
null
Campaign_API
HimanshuAgrahari07
TypeScript
Code
22
61
import { IsString, IsNumber } from 'class-validator'; class SignInDto { @IsString() public email: string; @IsString() public password: string; } export default SignInDto;
12,337
https://github.com/binkley/basilisk/blob/master/basilisk-service/src/main/java/hm/binkley/basilisk/flora/chef/Chef.java
Github Open Source
Open Source
Unlicense
2,019
basilisk
binkley
Java
Code
46
204
package hm.binkley.basilisk.flora.chef; import hm.binkley.basilisk.StandardDomain; import hm.binkley.basilisk.flora.chef.store.ChefRecord; import hm.binkley.basilisk.flora.chef.store.ChefRepository; import hm.binkley.basilisk.flora.chef.store.ChefStore; import lombok.EqualsAndHashCode; import lombok.ToString; @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) public final class Chef extends StandardDomain< ChefRecord, ChefRepository, ChefStore, Chef> { public Chef(final ChefRecord record) { super(record); } public String getName() { return record.getName(); } }
13,834
https://github.com/nujo/genieparser/blob/master/src/genie/libs/parser/ios/tests/test_show_routing.py
Github Open Source
Open Source
Apache-2.0
2,021
genieparser
nujo
Python
Code
426
2,089
import unittest from unittest.mock import Mock # ATS from pyats.topology import Device from genie.metaparser.util.exceptions import SchemaEmptyParserError, \ SchemaMissingKeyError from genie.libs.parser.ios.show_routing import (ShowIpRoute, ShowIpRouteWord, ShowIpv6Route, ShowIpv6RouteWord, ShowIpv6RouteUpdated, ShowIpRouteSummary) from genie.libs.parser.iosxe.tests.test_show_routing import \ TestShowIpRoute as test_show_ip_route_iosxe,\ TestShowIpv6RouteUpdated as test_show_ipv6_route_updated_iosxe,\ TestShowIpv6RouteWord as test_show_ipv6_route_word_iosxe,\ TestShowIpRouteSummary as test_show_ip_route_summary_iosxe # ============================================ # unit test for 'show ip route' # ============================================= class test_show_ip_route_ios(unittest.TestCase): """ unit test for show ip route """ device = Device(name='aDevice') empty_output = {'execute.return_value': ''} golden_parsed_output_with_route ={ "entry": { "192.168.234.0/24": { "mask": "24", "type": "internal", "known_via": "eigrp 1", "ip": "192.168.234.0", "redist_via": "eigrp", "distance": "90", "metric": "3072", "redist_via_tag": "1", "update": { "age": "3d04h", "interface": "GigabitEthernet0/2.4", "from": "192.168.9.2" }, "paths": { 1: { "age": "3d04h", "interface": "GigabitEthernet0/2.4", "from": "192.168.9.2", "metric": "3072", "share_count": "1", "nexthop": "192.168.9.2", "merge_labels": False, "prefer_non_rib_labels": False, } } } }, "total_prefixes": 1 } golden_output_with_route = {'execute.return_value':''' show ip route 192.168.234.0 Routing entry for 192.168.234.0/24 Known via "eigrp 1", distance 90, metric 3072, type internal Redistributing via eigrp 1 Last update from 192.168.9.2 on GigabitEthernet0/2.4, 3d04h ago Routing Descriptor Blocks: * 192.168.9.2, from 192.168.9.2, 3d04h ago, via GigabitEthernet0/2.4 Route metric is 3072, traffic share count is 1 Total delay is 20 microseconds, minimum bandwidth is 1000000 Kbit Reliability 255/255, minimum MTU 1500 bytes Loading 1/255, Hops 1 '''} def test_empty_1(self): self.device = Mock(**self.empty_output) obj = ShowIpRouteWord(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(route='192.168.234.0') def test_show_ip_route_with_route(self): self.maxDiff = None self.device = Mock(**self.golden_output_with_route) obj = ShowIpRouteWord(device=self.device) parsed_output = obj.parse(route='192.168.234.0') self.assertEqual(parsed_output, self.golden_parsed_output_with_route) class test_show_ip_route(test_show_ip_route_iosxe): def test_empty_1(self): self.device = Mock(**self.empty_output) obj = ShowIpRoute(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() def test_show_ip_route_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1) obj = ShowIpRoute(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_1) def test_show_ip_route_2_with_vrf(self): self.maxDiff = None self.device = Mock(**self.golden_output_2_with_vrf) obj = ShowIpRoute(device=self.device) parsed_output = obj.parse(vrf='VRF1') self.assertEqual(parsed_output, self.golden_parsed_output_2_with_vrf) ################################################### # unit test for show ipv6 route updated #################################################### class test_show_ipv6_route_updated(test_show_ipv6_route_updated_iosxe): def test_empty_1(self): self.device = Mock(**self.empty_output) obj = ShowIpv6RouteUpdated(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() def test_show_ipv6_route_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1) obj = ShowIpv6RouteUpdated(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output,self.golden_parsed_output_1) def test_show_ipv6_route_2(self): self.maxDiff = None self.device = Mock(**self.golden_output_2) obj = ShowIpv6RouteUpdated(device=self.device) parsed_output = obj.parse(vrf='VRF1') self.assertEqual(parsed_output,self.golden_parsed_output_2) ################################################### # unit test for show ipv6 route <WROD> #################################################### class test_show_ipv6_route_word(test_show_ipv6_route_word_iosxe): """unit test for show ipv6 route <WORD>""" def test_empty(self): self.device = Mock(**self.empty_output) obj = ShowIpv6RouteWord(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse(route='2001:db8:400:4::4:1') def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output_with_ipv6_route) obj = ShowIpv6RouteWord(device=self.device) parsed_output = obj.parse(route='2001:db8:400:4::4:1') self.assertEqual(parsed_output,self.golden_parsed_output_with_route) ################################################### # unit test for show ip route summary #################################################### class test_show_ip_route_summary(test_show_ip_route_summary_iosxe): def test_empty(self): self.device = Mock(**self.empty_output) obj = ShowIpRouteSummary(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() def test_golden_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1) obj = ShowIpRouteSummary(device=self.device) parsed_output = obj.parse(vrf='VRF1') self.assertEqual(parsed_output,self.golden_parsed_output_1) if __name__ == '__main__': unittest.main()
14,773
https://github.com/boberfly/the-forge-glfw/blob/master/external/the-forge/Common_3/OS/FileSystem/ZipFileSystem.cpp
Github Open Source
Open Source
Apache-2.0
2,020
the-forge-glfw
boberfly
C++
Code
399
963
///* // * Copyright (c) 2018-2021 The Forge Interactive Inc. // * // * This file is part of The-Forge // * (see https://github.com/ConfettiFX/The-Forge). // * // * Licensed to the Apache Software Foundation (ASF) under one // * or more contributor license agreements. See the NOTICE file // * distributed with this work for additional information // * regarding copyright ownership. The ASF licenses this file // * to you under the Apache License, Version 2.0 (the // * "License"); you may not use this file except in compliance // * with the License. You may obtain a copy of the License at // * // * http://www.apache.org/licenses/LICENSE-2.0 // * // * Unless required by applicable law or agreed to in writing, // * software distributed under the License is distributed on an // * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // * KIND, either express or implied. See the License for the // * specific language governing permissions and limitations // * under the License. //*/ #include "../../ThirdParty/OpenSource/zip/zip.h" #include "../Interfaces/ILog.h" #include "../Interfaces/IMemory.h" static bool ZipOpen(IFileSystem* pIO, const ResourceDirectory resourceDir, const char* fileName, FileMode mode, FileStream* pOut) { // #TODO: Write to zip zip_t* zip = (zip_t*)pIO->pUser; char filePath[FS_MAX_PATH] = {}; fsAppendPathComponent(fsGetResourceDirectory(resourceDir), fileName, filePath); int error = zip_entry_open(zip, filePath); if (error) { LOGF(LogLevel::eINFO, "Error %i finding file %s for opening in zip: %s", error, fileName, fileName); return NULL; } // Extract the contents of the zip entry ssize_t uncompressedSize = zip_entry_size(zip); void* uncompressed = tf_malloc(uncompressedSize); ssize_t bytesRead = zip_entry_noallocread(zip, uncompressed, uncompressedSize); UNREF_PARAM(bytesRead); ASSERT(bytesRead == zip_entry_size(zip)); zip_entry_close(zip); return fsOpenStreamFromMemory(uncompressed, uncompressedSize, mode, true, pOut); } // #NOTE - Only one function needed for zip file IO as we unzip the zip entry when it is opened and treat it as memory buffer // More will be needed if we want to support zip write static IFileSystem gZipFileIO = { ZipOpen }; bool fsOpenZipFile(const ResourceDirectory resourceDir, const char* fileName, FileMode mode, IFileSystem* pOut) { char zipMode = 0; if (mode & FM_WRITE) { zipMode = 'w'; } else if (mode & FM_READ) { zipMode = 'r'; } else { zipMode = 'a'; } zip_t* zipFile = zip_open(resourceDir, fileName, ZIP_DEFAULT_COMPRESSION_LEVEL, zipMode); if (!zipFile) { LOGF(LogLevel::eERROR, "Error creating file system from zip file at %s", fileName); return false; } IFileSystem system = gZipFileIO; system.pUser = zipFile; *pOut = system; return true; } bool fsCloseZipFile(IFileSystem* pZip) { zip_close((zip_t*)pZip->pUser); return true; }
18,822
https://github.com/OpenLocalizationTestOrg/ECMA2YamlTestRepo2/blob/master/fulldocset/add/codesnippet/CSharp/m-system.data.oledb.oled_3_1.cs
Github Open Source
Open Source
CC-BY-4.0, MIT
2,017
ECMA2YamlTestRepo2
OpenLocalizationTestOrg
C#
Code
22
70
public void CreateOleDbParameter() { OleDbParameter parameter = new OleDbParameter(); parameter.ParameterName = "Description"; parameter.OleDbType = OleDbType.VarChar; parameter.Direction = ParameterDirection.Output; parameter.Size = 88; }
243
https://github.com/yarikbratashchuk/NNfSiX/blob/master/R/p001.r
Github Open Source
Open Source
MIT
2,022
NNfSiX
yarikbratashchuk
R
Code
29
76
inputs <- c(1.2, 5.1, 2.1) weights <- c(3.1, 2.1, 8.7) bias <- 3 output <- inputs[1] * weights[1] + inputs[2] * weights[2] + inputs[3] * weights[3] + bias output
51,084
https://github.com/jasonpmcculloch/pure_interface/blob/master/pure_interface.py
Github Open Source
Open Source
MIT
2,017
pure_interface
jasonpmcculloch
Python
Code
2,266
6,916
try: from abc import abstractmethod, abstractproperty, abstractclassmethod, abstractstaticmethod except ImportError: from abc import abstractmethod, abstractproperty class abstractclassmethod(classmethod): __isabstractmethod__ = True def __init__(self, callable): callable.__isabstractmethod__ = True super(abstractclassmethod, self).__init__(callable) class abstractstaticmethod(staticmethod): __isabstractmethod__ = True def __init__(self, callable): callable.__isabstractmethod__ = True super(abstractstaticmethod, self).__init__(callable) import abc import collections import dis import inspect import types import sys import warnings import weakref import six __version__ = '1.9.6' IS_DEVELOPMENT = not hasattr(sys, 'frozen') if six.PY2: _six_ord = ord else: _six_ord = lambda x: x class InterfaceError(Exception): pass class _PIAttributes(object): """ rather than clutter the class namespace with lots of _pi_XXX attributes, collect them all here""" def __init__(self, type_is_interface, interface_method_signatures, interface_property_names): self.type_is_pure_interface = type_is_interface self.abstractproperties = frozenset() # properties that must be provided by instances self.interface_method_names = frozenset(interface_method_signatures.keys()) self.interface_property_names = frozenset(interface_property_names) self.interface_method_signatures = interface_method_signatures self.adapters = weakref.WeakKeyDictionary() self.ducktype_subclasses = set() self.impl_wrapper_type = None class AttributeProperty(object): """ Property that stores it's value in the instance dict under the same name. Abstract properties for concrete classes are replaced with these in the type definition to allow implementations to use attributes. """ def __init__(self, name): self.name = name super(AttributeProperty, self).__init__() def __get__(self, instance, owner): if instance is None: return self try: return instance.__dict__[self.name] except KeyError: raise AttributeError(self.name) def __set__(self, instance, value): instance.__dict__[self.name] = value class _ImplementationWrapper(object): def __init__(self, implementation, interface): self.__impl = implementation self.__interface = interface self.__method_names = interface._pi.interface_method_names self.__property_names = interface._pi.interface_property_names self.__interface_name = interface.__name__ def __getattr__(self, attr): impl = self.__impl if attr in self.__method_names: return getattr(impl, attr) elif attr in self.__property_names: return getattr(impl, attr) else: raise AttributeError("'{}' interface has no attribute '{}'".format(self.__interface_name, attr)) def _builtin_attrs(name): """ These attributes are ignored when checking ABC types for emptyness. """ return name in ('__doc__', '__module__', '__qualname__', '__abstractmethods__', '__dict__', '__metaclass__', '__weakref__', '_abc_cache', '_abc_registry', '_abc_negative_cache_version', '_abc_negative_cache') def _get_pi_attribute(cls, attr_name, default=None): if hasattr(cls, '_pi'): return getattr(cls._pi, attr_name) else: return default def _type_is_pure_interface(cls): """ Return True if cls is a pure interface or an empty ABC class""" if cls is object: return False if hasattr(cls, '_pi'): return cls._pi.type_is_pure_interface if issubclass(type(cls), abc.ABCMeta): for attr, value in six.iteritems(cls.__dict__): if _builtin_attrs(attr): continue if callable(value): if not _is_empty_function(value): return False elif isinstance(value, property): for func in (value.fget, value.fset, value.fdel): if func is not None and not _is_empty_function(func): return False return True return False def _get_abc_interface_props_and_funcs(cls): properties = set() function_sigs = {} if not hasattr(cls, '__abstractmethods__'): return properties, function_sigs for name in cls.__abstractmethods__: if _builtin_attrs(name): pass # shortcut value = getattr(cls, name) if isinstance(value, (staticmethod, classmethod, types.MethodType)): func = six.get_method_function(value) function_sigs[name] = _get_function_signature(func) elif isinstance(value, types.FunctionType): function_sigs[name] = _get_function_signature(value) elif isinstance(value, property): properties.add(name) return properties, function_sigs def _unwrap_function(func): """ Look for decorated functions and return the wrapped function. """ while hasattr(func, '__wrapped__'): func = func.__wrapped__ return func def _is_empty_function(func, unwrap=False): """ Return True if func is considered empty. All functions with no return statement have an implicit return None - this is explicit in the code object. """ if isinstance(func, (staticmethod, classmethod, types.MethodType)): func = six.get_method_function(func) if isinstance(func, property): func = property.fget if unwrap: func = _unwrap_function(func) try: code_obj = six.get_function_code(func) except AttributeError: # This callable is something else - assume it is OK. return True # quick check if code_obj.co_code == b'd\x00\x00S' and code_obj.co_consts[0] is None: return True if code_obj.co_code == b'd\x01\x00S' and code_obj.co_consts[1] is None: return True # convert bytes to instructions instructions = _get_instructions(code_obj) if len(instructions) < 2: return True # this never happens as there is always the implicit return None which is 2 instructions assert instructions[-1].opname == 'RETURN_VALUE' # returns TOS (top of stack) instruction = instructions[-2] if not (instruction.opname == 'LOAD_CONST' and code_obj.co_consts[instruction.arg] is None): # TOS is None return False # return is not None instructions = instructions[:-2] if len(instructions) == 0: return True # look for raise NotImplementedError if instructions[-1].opname == 'RAISE_VARARGS': # the thing we are raising should be the result of __call__ (instantiating exception object) if instructions[-2].opname == 'CALL_FUNCTION': for instr in instructions[:-2]: if instr.opname == 'LOAD_GLOBAL' and code_obj.co_names[instr.arg] == 'NotImplementedError': return True return False _Instruction = collections.namedtuple('_Instruction', ('opcode', 'opname', 'arg', 'argval')) def _get_instructions(code_obj): if hasattr(dis, 'get_instructions'): return list(dis.get_instructions(code_obj)) instructions = [] instruction = None for byte in code_obj.co_code: byte = _six_ord(byte) if instruction is None: instruction = [byte] else: instruction.append(byte) if instruction[0] < dis.HAVE_ARGUMENT or len(instruction) == 3: op_code = instruction[0] op_name = dis.opname[op_code] if instruction[0] < dis.HAVE_ARGUMENT: instructions.append(_Instruction(op_code, op_name, None, None)) else: arg = instruction[1] instructions.append(_Instruction(op_code, op_name, arg, arg)) instruction = None return instructions def _get_function_signature(function): """ Returns a list of argument names and the number of default arguments """ code_obj = function.__code__ args = code_obj.co_varnames[:code_obj.co_argcount] return args, len(function.__defaults__) if function.__defaults__ is not None else 0 def _signatures_are_consistent(func_sig, base_sig): """ :param func_sig: (args, num_default) tuple for overriding function :param base_sig: (args, num_default) tuple for base class function :return: True if signatures are consistent. 2 function signatures are consistent if: * The argument names match * new arguments in func_sig have defaults * The number of arguments without defaults does not increase """ func_args, func_num_defaults = func_sig base_args, base_num_defaults = base_sig base_num_args = len(base_args) func_num_args = len(func_args) func_num_required = func_num_args - func_num_defaults base_num_required = base_num_args - base_num_defaults return (func_args[:base_num_args] == base_args and # parameter names match func_num_args - base_num_args <= func_num_defaults and # new args have defaults func_num_required <= base_num_required # number of required args does not increase ) def _ensure_everything_is_abstract(attributes): # all methods and properties are abstract on a pure interface namespace = {} functions = [] interface_method_signatures = {} interface_property_names = set() for name, value in six.iteritems(attributes): if _builtin_attrs(name): pass # shortcut elif getattr(value, '__isabstractmethod__', False): if isinstance(value, (staticmethod, classmethod, types.FunctionType)): if isinstance(value, (staticmethod, classmethod)): func = value.__func__ else: func = value functions.append(func) interface_method_signatures[name] = _get_function_signature(func) elif isinstance(value, property): interface_property_names.add(name) elif isinstance(value, staticmethod): func = value.__func__ functions.append(func) interface_method_signatures[name] = _get_function_signature(func) value = abstractstaticmethod(func) elif isinstance(value, classmethod): func = value.__func__ interface_method_signatures[name] = _get_function_signature(func) functions.append(func) value = abstractclassmethod(func) elif isinstance(value, types.FunctionType): functions.append(value) interface_method_signatures[name] = _get_function_signature(value) value = abstractmethod(value) elif isinstance(value, property): interface_property_names.add(name) functions.extend([value.fget, value.fset, value.fdel]) # may contain Nones value = abstractproperty(value.fget, value.fset, value.fdel) namespace[name] = value return namespace, functions, interface_method_signatures, interface_property_names def _check_method_signatures(attributes, clsname, interface_method_signatures): """ Scan attributes dict for interface method overrides and check the function signatures are consistent """ for name, base_sig in interface_method_signatures.items(): if name not in attributes: continue value = attributes[name] if not isinstance(value, (staticmethod, classmethod, types.FunctionType)): raise InterfaceError('Interface method over-ridden with non-method') if isinstance(value, (staticmethod, classmethod)): func = value.__func__ else: func = value func_sig = _get_function_signature(func) if not _signatures_are_consistent(func_sig, base_sig): msg = '{module}.{clsname}.{name} argments does not match base class'.format( module=attributes['__module__'], clsname=clsname, name=name) raise InterfaceError(msg) def _patch_properties(cls, base_abstract_properties): """ Create an AttributeProperty for interface properties not provided by an implementation. """ abstract_properties = set() functions = [] for attr in cls.__abstractmethods__: value = getattr(cls, attr) if isinstance(value, abstractproperty): functions.extend([value.fget, value.fset, value.fdel]) # may contain Nones setattr(cls, attr, AttributeProperty(attr)) abstract_properties.add(attr) cls._pi.abstractproperties = frozenset(abstract_properties | base_abstract_properties) abstractmethods = set(cls.__abstractmethods__) - abstract_properties for func in functions: if func is not None and func.__name__ in abstractmethods: abstractmethods.discard(func.__name__) cls.__abstractmethods__ = frozenset(abstractmethods) class PureInterfaceType(abc.ABCMeta): """ Meta-Class for PureInterface. This type: * determines if the new class is an interface or a concrete class. * if the type is an interface: * mark all methods and properties as abstract * ensure all method and property bodies are empty * optionally check overriding method signatures match those on base class. * if the type is a concrete class then patch the abstract properties with AttributeProperies. """ def __new__(mcs, clsname, bases, attributes): base_types = [(cls, _type_is_pure_interface(cls)) for cls in bases] type_is_interface = all(is_interface for cls, is_interface in base_types) if clsname == 'PureInterface' and attributes['__module__'] == 'pure_interface': type_is_interface = True elif len(bases) > 1 and bases[0] is object: bases = bases[1:] # create a consistent MRO order base_types = base_types[1:] interface_method_signatures = dict() interface_property_names = set() base_abstract_properties = set() for i in range(len(bases)-1, -1, -1): # start at back end base, base_is_interface = base_types[i] if base is object: continue abstract_properties = _get_pi_attribute(base, 'abstractproperties', set()) base_abstract_properties.update(abstract_properties) if base_is_interface: if hasattr(base, '_pi'): method_signatures = _get_pi_attribute(base, 'interface_method_signatures', {}) property_names = _get_pi_attribute(base, 'interface_property_names', set()) else: property_names, method_signatures = _get_abc_interface_props_and_funcs(base) interface_method_signatures.update(method_signatures) interface_property_names.update(property_names) elif not issubclass(base, PureInterface) and IS_DEVELOPMENT: _check_method_signatures(base.__dict__, base.__name__, interface_method_signatures) if IS_DEVELOPMENT: _check_method_signatures(attributes, clsname, interface_method_signatures) if type_is_interface: namespace, functions, method_signatures, property_names = _ensure_everything_is_abstract(attributes) interface_method_signatures.update(method_signatures) interface_property_names.update(property_names) unwrap = getattr(mcs, '_pi_unwrap_decorators', False) for func in functions: if func is None: continue if not _is_empty_function(func, unwrap): raise InterfaceError('Function "{}" is not empty.\nDid you forget to inherit from object to make the class concrete?'.format(func.__name__)) else: # concrete sub-type namespace = attributes cls = super(PureInterfaceType, mcs).__new__(mcs, clsname, bases, namespace) cls._pi = _PIAttributes(type_is_interface, interface_method_signatures, interface_property_names) if not type_is_interface: class_properties = set(k for k, v in namespace.items() if isinstance(v, property)) base_abstract_properties.difference_update(class_properties) _patch_properties(cls, base_abstract_properties) if type_is_interface and not cls.__abstractmethods__: cls.__abstractmethods__ = frozenset({''}) # empty interfaces still should not be instantiated return cls def __call__(cls, *args, **kwargs): """ Check that abstract properties are created in constructor """ self = super(PureInterfaceType, cls).__call__(*args, **kwargs) for attr in cls._pi.abstractproperties: if not hasattr(self, attr): raise TypeError('{}.__init__ does not create required attribute "{}"'.format(cls.__name__, attr)) return self # provided_by duck-type checking def _ducktype_check(cls, instance): subclass = type(instance) for attr in cls._pi.interface_method_names: subtype_value = getattr(subclass, attr, None) if not callable(subtype_value): return False for attr in cls._pi.interface_property_names: if not hasattr(instance, attr): return False return True def _class_ducktype_check(cls, subclass): if subclass in cls._pi.ducktype_subclasses: return True for attr in cls._pi.interface_method_names: subtype_value = getattr(subclass, attr, None) if not callable(subtype_value): return False for attr in cls._pi.interface_property_names: if not hasattr(subclass, attr): return False cls._pi.ducktype_subclasses.add(subclass) if IS_DEVELOPMENT: stacklevel = 2 stack = inspect.stack() while stacklevel < len(stack) and 'pure_interface' in stack[stacklevel][1]: stacklevel += 1 warnings.warn('Class {module}.{sub_name} implements {cls_name}.\n' 'Consider inheriting {cls_name} or using {cls_name}.register({sub_name})' .format(cls_name=cls.__name__, sub_name=subclass.__name__, module=cls.__module__), stacklevel=stacklevel) return True def provided_by(cls, obj, allow_implicit=True): """ Returns True if obj provides this interface. provided_by(cls, obj) is equivalent to isinstance(obj, cls) unless allow_implicit is True If allow_implicit is True then returns True if interface duck-type check passes. Returns False otherwise. """ if not cls._pi.type_is_pure_interface: raise ValueError('provided_by() can only be called on interfaces') if isinstance(obj, cls): return True if not allow_implicit: return False if cls._class_ducktype_check(type(obj)): return True return cls._ducktype_check(obj) def interface_only(cls, implementation): """ Returns a wrapper around implementation that provides ONLY this interface. """ if cls._pi.impl_wrapper_type is None: type_name = cls.__name__ + 'Only' attributes = {'__module__': cls.__module__} cls._pi.impl_wrapper_type = type(type_name, (_ImplementationWrapper,), attributes) cls.register(cls._pi.impl_wrapper_type) return cls._pi.impl_wrapper_type(implementation, cls) def adapt(cls, obj, allow_implicit=False, interface_only=None): """ Adapts obj to interface, returning obj if to_interface.provided_by(obj, allow_implicit) is True and raising ValueError if no adapter is found If interface_only is True, or interface_only is None and IS_DEVELOPMENT is True then the returned object is wrapped by an object that only provides the methods and properties defined by to_interface. """ if interface_only is None: interface_only = IS_DEVELOPMENT if cls.provided_by(obj, allow_implicit=allow_implicit): adapted = obj if interface_only: adapted = cls.interface_only(adapted) return adapted adapters = cls._pi.adapters if not adapters: raise ValueError('Cannot adapt {} to {}'.format(obj, cls.__name__)) for obj_class in type(obj).__mro__: if obj_class in adapters: factory = adapters[obj_class] adapted = factory(obj) if not cls.provided_by(adapted, allow_implicit): raise ValueError('Adapter {} does not implement interface {}'.format(factory, cls.__name__)) if interface_only: adapted = cls.interface_only(adapted) return adapted raise ValueError('Cannot adapt {} to {}'.format(obj, cls.__name__)) def adapt_or_none(cls, obj, allow_implicit=False, interface_only=None): """ Adapt obj to to_interface or return None if adaption fails """ try: return cls.adapt(obj, allow_implicit=allow_implicit, interface_only=interface_only) except ValueError: return None def can_adapt(cls, obj, allow_implicit=False): """ Returns True if adapt(obj, allow_implicit) will succeed.""" try: cls.adapt(obj, allow_implicit=allow_implicit) except ValueError: return False return True def filter_adapt(cls, objects, allow_implicit=False, interface_only=None): """ Generates adaptions of the given objects to this interface. Objects that cannot be adapted to this interface are silently skipped. """ for obj in objects: try: f = cls.adapt(obj, allow_implicit=allow_implicit, interface_only=interface_only) except ValueError: continue yield f ABC = abc.ABC if hasattr(abc, 'ABC') else object @six.add_metaclass(PureInterfaceType) class PureInterface(ABC): pass # adaption def adapts(from_type, to_interface=None): """Class or function decorator for declaring an adapter from a type to an interface. E.g. @adapts(MyClass, MyInterface) def interface_factory(obj): .... If decorating a class to_interface may be None to use the first interface in the class's MRO. E.g. @adapts(MyClass) class MyClassToInterfaceAdapter(object, MyInterface): def __init__(self, obj): .... .... will adapt MyClass to MyInterface using MyClassToInterfaceAdapter """ def decorator(cls): if to_interface is None: interfaces = get_type_interfaces(cls) if interfaces: interface = interfaces[0] elif isinstance(cls, type): raise InterfaceError('Class {} does not provide any interfaces'.format(cls.__name__)) else: raise InterfaceError('to_interface must be specified when decorating non-classes') else: interface = to_interface register_adapter(cls, from_type, interface) return cls return decorator def register_adapter(adapter, from_type, to_interface): # types: (from_type) -> to_interface, type, PureInterfaceType """ Registers adapter to convert instances of from_type to objects that provide to_interface for the to_interface.adapt() method. :param adapter: callable that takes an instance of from_type and returns an object providing to_interface. :param from_type: a type to adapt from :param to_interface: a (non-concrete) PureInterface subclass to adapt to. """ if not callable(adapter): raise ValueError('adapter must be callable') if not isinstance(from_type, type): raise ValueError('{} must be a type'.format(from_type)) if not (isinstance(to_interface, type) and _get_pi_attribute(to_interface, 'type_is_pure_interface', False)): raise ValueError('{} is not an interface'.format(to_interface)) adapters = _get_pi_attribute(to_interface, 'adapters') if from_type in adapters: raise ValueError('{} already has an adapter to {}'.format(from_type, to_interface)) adapters[from_type] = weakref.proxy(adapter) def type_is_pure_interface(cls): """ Return True if cls is a pure interface""" try: if not issubclass(cls, PureInterface): return False except TypeError: # handle non-classes return False return _get_pi_attribute(cls, 'type_is_pure_interface', False) def get_type_interfaces(cls): """ Returns all interfaces in the cls mro including cls itself if it is an interface """ try: bases = cls.mro() except AttributeError: # handle non-classes return [] return [base for base in bases if type_is_pure_interface(base) and base is not PureInterface] def get_interface_method_names(interface): """ returns a frozen set of names of methods defined by the interface. if interface is not a PureInterface subtype then an empty set is returned """ if type_is_pure_interface(interface): return _get_pi_attribute(interface, 'interface_method_names') else: return frozenset() def get_interface_property_names(interface): """ returns a frozen set of names of properties defined by the interface if interface is not a PureInterface subtype then an empty set is returned """ if type_is_pure_interface(interface): return _get_pi_attribute(interface, 'interface_property_names') else: return frozenset()
44,234
https://github.com/TPNguyen/ThunderRW/blob/master/random_walk/uniform_sampling.h
Github Open Source
Open Source
MIT
2,021
ThunderRW
TPNguyen
C
Code
454
1,588
// // Created by Shixuan Sun on 11/28/20. // #ifndef XTRAGRAPHCOMPUTING_UNIFORM_SAMPLING_H #define XTRAGRAPHCOMPUTING_UNIFORM_SAMPLING_H #include "types.h" #include "amac_frame.h" void uniform_interleaving_move(Graph *graph, BufferSlot *ring, sfmt_t *sfmt, int length) { // Stage 1: generate random number & prefetch the degree. for (int i = 0; i < RING_SIZE; ++i) { BufferSlot& slot = ring[i]; if (!slot.empty_) { slot.prev_ = slot.w_.current_; slot.r_ = sfmt_genrand_uint32(sfmt); _mm_prefetch((void*)(graph->offset_pair_ + slot.w_.current_), PREFETCH_HINT); } } // Stage 2: generate the position & prefetch the neighbor. for (int i = 0; i < RING_SIZE; ++i) { BufferSlot& slot = ring[i]; if (!slot.empty_) { slot.offset_ = graph->offset_pair_[slot.w_.current_]; slot.r_ = slot.offset_.first + (slot.r_ % (slot.offset_.second - slot.offset_.first)); _mm_prefetch((void*)(graph->adj_ + slot.r_), PREFETCH_HINT); } } // Stage 3: update the walker. for (int i = 0; i < RING_SIZE; ++i) { BufferSlot& slot = ring[i]; if (!slot.empty_) { slot.w_.current_ = graph->adj_[slot.r_]; if (slot.w_.length_ < length) { slot.seq_[slot.w_.length_] = slot.w_.current_; } slot.w_.length_ += 1; } } } void uniform_move(Graph *graph, BufferSlot *ring, sfmt_t *sfmt, int length) { for (int i = 0; i < RING_SIZE; ++i) { BufferSlot& slot = ring[i]; if (!slot.empty_) { slot.prev_ = slot.w_.current_; auto neighbors = graph->neighbors(slot.w_.current_); auto random_value = sfmt_genrand_uint32(sfmt); auto selected_position = random_value % neighbors.second; slot.w_.current_ = neighbors.first[selected_position]; if (slot.w_.length_ < length) { slot.seq_[slot.w_.length_] = slot.w_.current_; } slot.w_.length_ += 1; } } } void uniform_amac_move(Graph *graph, BufferSlot *ring, AMAC_uniform_frame* frames, sfmt_t *sfmt, int length) { int search_ring_id = 0; for (int i = 0; i < RING_SIZE; ++i) { BufferSlot& slot = ring[i]; auto* fr = &frames[search_ring_id]; if (!slot.empty_) { if (fr->state == AMAC_uniform_frame::Empty) { // Execute stage S0 fr->init(graph->offset_pair_, graph->adj_, slot.w_.current_, i); if (search_ring_id == SEARCH_RING_SIZE - 1) { search_ring_id = 0; } else { search_ring_id += 1; } } else { for (;;) { if (fr->state == AMAC_uniform_frame::S0) { fr->execute_S0(); } else if (fr->state == AMAC_uniform_frame::S1) { fr->execute_S1(sfmt); } else if (fr->state == AMAC_uniform_frame::S2) { fr->execute_S2(); // Find a result. auto& t_slot = ring[fr->id]; t_slot.prev_ = t_slot.w_.current_; t_slot.w_.current_ = fr->value_; if (t_slot.w_.length_ < length) { t_slot.seq_[t_slot.w_.length_] = t_slot.w_.current_; } t_slot.w_.length_ += 1; // initialize the slot. fr->init(graph->offset_pair_, graph->adj_, slot.w_.current_, i); break; } if (search_ring_id == SEARCH_RING_SIZE - 1) search_ring_id = 0; else search_ring_id += 1; fr = &frames[search_ring_id]; } } } } // Stage 2.3: start the search. bool more_work; do { more_work = false; for (int i = 0; i < SEARCH_RING_SIZE; ++i) { auto& frame = frames[i]; if (frame.state == AMAC_uniform_frame::S0) { more_work = true; frame.execute_S0(); } else if (frame.state == AMAC_uniform_frame::S1) { more_work = true; frame.execute_S1(sfmt); } else if (frame.state == AMAC_uniform_frame::S2) { frame.execute_S2(); // Find a result. auto& t_slot = ring[frame.id]; t_slot.prev_ = t_slot.w_.current_; t_slot.w_.current_ = frame.value_; if (t_slot.w_.length_ < length) { t_slot.seq_[t_slot.w_.length_] = t_slot.w_.current_; } t_slot.w_.length_ += 1; } } } while (more_work); } #endif //XTRAGRAPHCOMPUTING_UNIFORM_SAMPLING_H
17,588
https://github.com/tommy351/kosko/blob/master/packages/config/src/types.ts
Github Open Source
Open Source
MIT
2,023
kosko
tommy351
TypeScript
Code
118
358
import { array, string, object, assign, optional, record, boolean, integer, min } from "superstruct"; /** * Environment config type. * * @public */ export interface EnvironmentConfig { require?: string[]; components?: string[]; loaders?: string[]; } export const environmentConfigSchema = object({ require: optional(array(string())), components: optional(array(string())), loaders: optional(array(string())) }); /** * Global config type. * * @public */ export interface Config extends EnvironmentConfig { environments?: Record<string, EnvironmentConfig>; paths?: { environment?: { global?: string; component?: string; }; }; extensions?: string[]; baseEnvironment?: string; bail?: boolean; concurrency?: number; } export const configSchema = assign( environmentConfigSchema, object({ environments: optional(record(string(), environmentConfigSchema)), paths: optional( object({ environment: optional( object({ global: optional(string()), component: optional(string()) }) ) }) ), extensions: optional(array(string())), baseEnvironment: optional(string()), bail: optional(boolean()), concurrency: optional(min(integer(), 1)) }) );
40,340
https://github.com/xiayingfeng/blaze-persistence/blob/master/entity-view/impl/src/main/java/com/blazebit/persistence/view/impl/entity/InverseElementToEntityMapper.java
Github Open Source
Open Source
ECL-2.0, Apache-2.0
null
blaze-persistence
xiayingfeng
Java
Code
149
354
/* * Copyright 2014 - 2022 Blazebit. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.blazebit.persistence.view.impl.entity; import com.blazebit.persistence.view.impl.update.UpdateContext; import com.blazebit.persistence.view.impl.update.flush.DirtyAttributeFlusher; import javax.persistence.Query; /** * * @author Christian Beikov * @since 1.2.0 */ public interface InverseElementToEntityMapper<E> { public void flushEntity(UpdateContext context, Object oldParent, Object newParent, Object child, DirtyAttributeFlusher<?, E, Object> nestedGraphNode); public Query createInverseUpdateQuery(UpdateContext context, Object element, DirtyAttributeFlusher<?, E, Object> nestedGraphNode, DirtyAttributeFlusher<?, ?, ?> inverseAttributeFlusher); }
22,092
https://github.com/lcmr/practica1-g2/blob/master/application/views/layout/footer.php
Github Open Source
Open Source
MIT
null
practica1-g2
lcmr
PHP
Code
17
90
<!-- Bootstrap core JavaScript --> <script src="<?php echo base_url('library/users/vendor/jquery/jquery.min.js'); ?>"></script> <script src="<?php echo base_url('library/users/vendor/bootstrap/js/bootstrap.bundle.min.js'); ?>"></script> </body> </html>
30,642
https://github.com/AndersonMGarcia/Modulo1-Introducao/blob/master/11-ExercicioPraticoVariaveis/index.php
Github Open Source
Open Source
MIT
null
Modulo1-Introducao
AndersonMGarcia
PHP
Code
85
386
<!DOCTYPE html> <html lang="pt-br"> <head> <meta charset="UTF-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>Exercício Prático - Variáveis</title> </head> <body> <pre> <?php $lista = [ 'nome' => 'Anderson', 'idade' => '40', 'atributos' =>[ 'forca' => 60, 'agilidade' => 90, 'destreza' => 50 ], 'vida' => 1000, 'mana' => 928 ]; print_r($lista); echo "<hr>"; echo "Nome: ". $lista['nome']."<br>"; echo "Idade: ". $lista['idade']."<br>"; echo "Força: ". $lista['atributos']['forca']."<br>"; echo "Agilidade: ". $lista['atributos']['agilidade']."<br>"; echo "Destreza: ". $lista['atributos']['destreza']."<br>"; echo "Vida: ". $lista['vida']."<br>"; echo "Mana: ". $lista['mana']."<br>"; ?> </pre> </body> </html>
25,521
https://github.com/randiayeshanikossinna/Log-Analyser/blob/master/src/main/java/com/ConstructionTeam/FileRepository/FileReaderBuffered.java
Github Open Source
Open Source
MIT
null
Log-Analyser
randiayeshanikossinna
Java
Code
48
149
package com.ConstructionTeam.FileRepository; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; public class FileReaderBuffered implements InputFileReader { @Override public BufferedReader readFile(String path) { FileReader fileReader = null; try { fileReader = new FileReader(path); } catch (FileNotFoundException e) { System.out.print("A file not Founded :"); } return new BufferedReader(fileReader); } }
27,082
https://github.com/SagePtr/TwelveMonkeys/blob/master/sandbox/sandbox-common/src/main/java/com/twelvemonkeys/io/FileLockingTest.java
Github Open Source
Open Source
BSD-3-Clause
2,022
TwelveMonkeys
SagePtr
Java
Code
361
739
/* * Copyright (c) 2009, Harald Kuhr * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name "TwelveMonkeys" nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.twelvemonkeys.io; import java.io.*; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; /** * FileLockingTest * * @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a> * @author last modified by $Author: haraldk$ * @version $Id: FileLockingTest.java,v 1.0 May 12, 2009 7:15:38 PM haraldk Exp$ */ public class FileLockingTest { public static void main(final String[] pArgs) throws IOException { FileChannel channel = new RandomAccessFile(pArgs[0], "rw").getChannel(); FileLock lock = channel.tryLock(0, Long.MAX_VALUE, pArgs.length <= 1 || !"false".equalsIgnoreCase(pArgs[1])); // Shared lock for entire file System.out.println("lock: " + lock); if (lock != null) { System.in.read(); InputStream stream = Channels.newInputStream(channel); BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); String line; while ((line = reader.readLine()) != null) { System.out.println(line); } } else { System.out.println("Already locked"); } } }
39,862
https://github.com/caffeinehit/cuffs.js/blob/master/src/compiler.coffee
Github Open Source
Open Source
MIT
2,013
cuffs.js
caffeinehit
CoffeeScript
Code
90
215
define ['./ns'], (Cuffs) -> STOP_DESCENT = 'stop-descent-+"*' # Random string, easier equality comparison walk = (tree, callback)-> # Walk a DOM tree depth first and call a callback on # each node. Escape current descent if the callback returns # STOP_DESCENT and continue with next sibling. recurse = (current, depth = 1)-> stopDescent = callback current, depth if stopDescent != STOP_DESCENT and current.firstChild? recurse current.firstChild, depth + 1 return if not current.nextSibling? return recurse current.nextSibling, depth return recurse tree.firstChild if tree.firstChild? return Cuffs.Compiler = { STOP_DESCENT: STOP_DESCENT walk: walk }
9,044
https://github.com/Andarin/Java_project_studies/blob/master/WkVonMuenzenHerausfinden/Main.java
Github Open Source
Open Source
Apache-2.0
null
Java_project_studies
Andarin
Java
Code
295
943
import java.math.BigInteger; /* * Modell: Es gibt 5 Messreihen, die von 2 gezinkten Muenzen stammen. Man weiß nicht, welche Messreihen * von welcher Muenze stammen, und will nun die wahrscheinlichsten Wk der Muenzen herausfinden. */ public class Main { public static int iteration = 100; public static int[][] messreihe = {{1,0,0,0,1,1,0,1,0,1}, {1,1,1,1,0,1,1,1,1,1}, {1,0,1,1,1,1,1,0,1,1}, {1,0,1,0,0,0,1,1,0,0}, {0,1,1,1,0,1,1,1,0,1}}; public static double wk1 = 0.9, wk2 = 0.8; public static void main(String[] args) { int[] sumarray = new int[5]; for (int i = 0; i < 5; i++) { for (int j = 0; j < 10; j++) { sumarray[i] += messreihe[i][j]; } } double a1sum = 0, a2sum = 0, b1sum = 0, b2sum = 0; for (int i = 0; i < iteration; i++) { for (int row = 0; row < 5; row++) { double a = binCoeff(messreihe[row].length,sumarray[row]).longValue()*Math.pow(wk1,sumarray[row])*Math.pow(1-wk1,messreihe[row].length-sumarray[row]); double b = binCoeff(messreihe[row].length,sumarray[row]).longValue()*Math.pow(wk2,sumarray[row])*Math.pow(1-wk2,messreihe[row].length-sumarray[row]); double c = a+b; a = a/c; b = b/c; double a1 = a*sumarray[row]; double a2 = a*(messreihe[row].length-sumarray[row]); double b1 = b*sumarray[row]; double b2 = b*(messreihe[row].length-sumarray[row]); a1sum += a1; a2sum += a2; b1sum += b1; b2sum += b2; } wk1 = a1sum / (a1sum + a2sum); wk2 = b1sum / (b1sum + b2sum); a1sum = 0; a2sum = 0; b1sum = 0; b2sum = 0; System.out.print(i+" Wk1 = "+wk1+" "); System.out.println("Wk2 = "+wk2); } } public static BigInteger binCoeff(int n, int k) { if ((n < 0) || (k < 0) || (k > n)) throw new IllegalArgumentException(n + ", " + k); if (k > n/2) k = n - k; BigInteger result = BigInteger.ONE; for (int i = n - k + 1; i <= n; i++) result = result.multiply(new BigInteger("" + i)); for (int i = 2; i <= k; i++) result = result.divide(new BigInteger("" + i)); return result; } }
13,796
https://github.com/fused-effects/fused-effects/blob/master/src/Control/Carrier/Empty/Maybe.hs
Github Open Source
Open Source
BSD-3-Clause
2,023
fused-effects
fused-effects
Haskell
Code
420
895
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE UndecidableInstances #-} {- | A carrier for an 'Empty' effect, indicating failure with a 'Nothing' value. Users that need access to an error message should use the 'Control.Effect.Fail.Fail' effect. Note that 'Empty' effects can, when they are the last effect in a stack, be interpreted directly to a 'Maybe' without a call to 'runEmpty'. @since 1.0.0.0 -} module Control.Carrier.Empty.Maybe ( -- * Empty carrier runEmpty , evalEmpty , execEmpty , EmptyC(..) -- * Empty effect , module Control.Effect.Empty ) where import Control.Algebra import Control.Effect.Empty import Control.Monad.Fail as Fail import Control.Monad.Fix import Control.Monad.IO.Class import Control.Monad.Trans.Class import Control.Monad.Trans.Maybe import Data.Functor (void) import Data.Maybe (isJust) -- | Run an 'Empty' effect, returning 'Nothing' for empty computations, or 'Just' the result otherwise. -- -- @ -- 'runEmpty' 'empty' = 'pure' 'Nothing' -- @ -- @ -- 'runEmpty' ('pure' a) = 'pure' ('Just' a) -- @ -- -- @since 1.0.0.0 runEmpty :: EmptyC m a -> m (Maybe a) runEmpty (EmptyC m) = runMaybeT m {-# INLINE runEmpty #-} -- | Run an 'Empty' effect, discarding its result. -- -- This is convenient for using 'empty' to signal early returns without needing to know whether control exited normally or not. -- -- @ -- 'evalEmpty' = 'void' '.' 'runEmpty' -- @ -- -- @since 1.1.0.0 evalEmpty :: Functor m => EmptyC m a -> m () evalEmpty = void . runEmpty {-# INLINE evalEmpty #-} -- | Run an 'Empty' effect, replacing its result with a 'Bool' indicating whether control exited normally. -- -- This is convenient for using 'empty' to signal early returns when all you need to know is whether control exited normally or not, and not what value it exited with. -- -- @ -- 'execEmpty' = 'fmap' 'isJust' '.' 'runEmpty' -- @ -- @ -- 'execEmpty' ('pure' a) = 'pure' 'True' -- @ -- @ -- 'execEmpty' 'empty' = 'pure' 'False' -- @ -- -- @since 1.1.0.0 execEmpty :: Functor m => EmptyC m a -> m Bool execEmpty = fmap isJust . runEmpty {-# INLINE execEmpty #-} -- | @since 1.0.0.0 newtype EmptyC m a = EmptyC (MaybeT m a) deriving (Algebra (Empty :+: sig), Applicative, Functor, Monad, MonadFix, MonadIO, MonadTrans) -- | 'EmptyC' passes 'Fail.MonadFail' operations along to the underlying monad @m@, rather than interpreting it as a synonym for 'empty' à la 'MaybeT'. instance Fail.MonadFail m => Fail.MonadFail (EmptyC m) where fail = lift . Fail.fail {-# INLINE fail #-}
7,144
https://github.com/neoPix/Edely/blob/master/Lib/Edely/Tool/SqlConnection.class.php
Github Open Source
Open Source
Apache-2.0
2,014
Edely
neoPix
PHP
Code
285
1,058
<?php /** * Classe SqlConnection * @role: Gestion de la connexion a une base de donnée * @creator: Balan David * @updated: 01/06/2013 **/ class SqlConnection{ private static $_connections; private $_con=null; private $_stm=null; public $name = ''; private function __construct($connection='default') { $this->name = $connection; } function __destruct() { unset(SqlConnection::$_connections[$this->name]); } private function open() { try{ $conf = Configure::read('database.'.$this->name); $this->_con = new PDO('mysql:host='.$conf['server'].';dbname='.$conf['base'], $conf['user'], $conf['password']); } catch(Exception $e){ throw new ServerErrorException(__('Unable to connect to SQL server : {message}', array('message'=>$e->getMessage()))); } } /** * Prepare une requête. * @example: $mysql->prepare('SELECT * FROM table WHERE id=:id'); **/ public function prepare($request) { if($this->_con == null) $this->open(); $this->_stm = $this->_con->prepare($request); } /** * Execute une requête préparée et remplace les paramètres par ceux passés * @example: $mysql->exec(array(':id'=>2)); **/ public function exec($params=array()) { try{ $this->_res = $this->_stm->execute($params); if($this->_res===false){ $err = $this->_stm->errorInfo(); throw new Exception($err[2]); } } catch(Exception $e){ throw new ServerErrorException(__('Unable to execute the request : {message}', array('message'=>$e->getMessage()))); } } /** * Lis la ligne de résultat suivante * @example: while($line = $mysql->read())print_r($line); **/ public function read() { return $this->_stm->fetch(PDO::FETCH_ASSOC); } /** * Récupère le dernier id inséré * @example: $id = $mysql->lastInsertedId(); **/ public function lastInsertedId() { $id=null; try{ $this->_con->lastInsertId(); } catch(Exception $e){ throw new ServerErrorException(__('Unable to get the last inserted id : {message}', array('message'=>$e->getMessage()))); } return $id; } /** * Démare une transaction SQL **/ public function beginTransaction() { if($this->_con == null) $this->open(); $this->_con->beginTransaction(); } /** * Annule et ferme une transaction SQL **/ public function rollback() { if($this->_con->inTransaction()) $this->_conn->rollBack(); } /** * Enregistre et ferme une transaction SQL **/ public function commit() { if($this->_con->inTransaction()) $this->_conn->commit(); } /** * Singleton, récupère la connexion SQL **/ static public function getConnection($connection='default') { if(!isset(self::$_connections[$connection])) self::$_connections[$connection] = new SqlConnection($connection); return self::$_connections[$connection]; } }
20,702
https://github.com/ewaters/screenshots/blob/master/test/base/all_tests.dart
Github Open Source
Open Source
BSD-2-Clause-FreeBSD
2,022
screenshots
ewaters
Dart
Code
9
37
import 'process_common_test.dart' as process_common_test; void main() { process_common_test.main(); }
16,828
https://github.com/asrani27/kosmetik/blob/master/app/Http/Controllers/KeranjangController.php
Github Open Source
Open Source
MIT
null
kosmetik
asrani27
PHP
Code
137
606
<?php namespace App\Http\Controllers; use Alert; use App\Barang; use App\Keranjang; use Illuminate\Http\Request; class KeranjangController extends Controller { public function add(Request $req) { $checkStok = Barang::find($req->barang_id)->stok; $checkKeranjang = Keranjang::where('barang_id', $req->barang_id)->where('type', $req->type)->first(); if($checkKeranjang == null){ $jumlah_jual = $req->jumlah; }else{ $jumlah_jual = $checkKeranjang->jumlah + $req->jumlah; } if($req->type == 'pembelian'){ $check = Keranjang::where('barang_id', $req->barang_id)->where('type', $req->type)->first(); if($check == null){ $s = new Keranjang; $s->barang_id = $req->barang_id; $s->jumlah = $req->jumlah; $s->type = $req->type; $s->save(); }else{ $s = $check; $s->jumlah = $s->jumlah+$req->jumlah; $s->save(); } return back(); }else{ if($jumlah_jual > $checkStok){ Alert::info('Stok Tidak Cukup', 'Info Message'); return back(); }else{ $check = Keranjang::where('barang_id', $req->barang_id)->where('type', $req->type)->first(); if($check == null){ $s = new Keranjang; $s->barang_id = $req->barang_id; $s->jumlah = $req->jumlah; $s->type = $req->type; $s->save(); }else{ $s = $check; $s->jumlah = $s->jumlah+$req->jumlah; $s->save(); } return back(); } } } public function delete($id) { $data = Keranjang::find($id)->delete(); return back(); } }
49,017
https://github.com/kzvd4729/Problem-Solving/blob/master/UVa/UVA - 10871/Accepted.cpp
Github Open Source
Open Source
MIT
2,022
Problem-Solving
kzvd4729
C++
Code
109
622
/**************************************************************************************** * @author: kzvd4729 created: 2018-03-21 19:19:23 * solution_verdict: Accepted language: C++ * run_time: 0 memory_used: * problem: https://vjudge.net/problem/UVA-10871 ****************************************************************************************/ #include<bits/stdc++.h> #define long long long using namespace std; const int N=1e6; int vis[N+2],lim,n,arr[N+2],qm[N+2],x,f,t,ans,id; void seive(void) { vis[1]=1; lim=sqrt(N)+1; for(int i=4;i<=N;i+=2)vis[i]=1; for(int i=3;i<=N;i+=2) { if(vis[i])continue; if(i>lim)continue; for(int j=i*i;j<=N;j+=2*i)vis[j]=1; } } int main() { ios_base::sync_with_stdio(0); cin.tie(0); seive(); cin>>t; while(t--) { cin>>n; qm[0]=0; for(int i=1;i<=n;i++)cin>>arr[i],qm[i]=qm[i-1]+arr[i]; f=0; for(int j=2;j<=n;j++) { for(int i=1;i<=n;i++) { if(i+j-1>n)break; x=qm[i+j-1]-qm[i-1]; if(x>N)continue; if(vis[x]==0) { f=1; ans=j; id=i; break; } } if(f)break; } if(f) { cout<<"Shortest primed subsequence is length "<<ans<<": "; for(int i=id;i<id+ans;i++) { if(i!=id)cout<<" "; cout<<arr[i]; } cout<<endl; } else cout<<"This sequence is anti-primed."<<endl; } return 0; }
32,431
https://github.com/aitorventura/folklore-musical-exchange-backend/blob/master/src/chat/chat.controller.ts
Github Open Source
Open Source
MIT
null
folklore-musical-exchange-backend
aitorventura
TypeScript
Code
295
1,003
import { Controller, Get, Post, Body, Delete, Put, Param, UseGuards, ForbiddenException, } from '@nestjs/common'; import { ChatDto } from '../chat/chat.dto'; import { MessageDto } from '../chat/message.dto'; import { ChatService } from '../chat/chat.service'; import { ChatDataBaseConnection } from './chat.database'; @Controller('chat') export class ChatController { constructor(private readonly chatService: ChatService) { } /** * Se obtienen los datos de un usuario dado el id * @param id del usuario en sesión */ @Get('/myself/:id') async getMyself(@Param('id') id: number) { console.log('Controlador, id: ' + id); const result = await this.chatService.getMyself(id); return result[0]; } /** * * @param idChat * @param id */ @Get('/participant/:id') async getParticipant(@Param('id') id: number) { console.log('controlador'); const result = await this.chatService.getParticipant(id); return result; } /** * Se obtienen todos los chats en los que participa un usuario */ @Get('/all/:id') async getChats(@Param('id') id: number) { return await this.chatService.getChats(id); } /** * Se obtienen los mensajes de un chat con una persona * @param idA * @param idB */ @Get('/:idA/:idB') async getChat(@Param('idA') idA: number, @Param('idB') idB: number) { const result = await this.chatService.getChat(idA, idB); return result; } @Post('/newmsg/:idA/:idB') async createMessage( @Param('idA') idA: number, @Param('idB') idB: number, @Body() messageDto: MessageDto, ) { return await this.chatService.createMessage(idA, idB, messageDto); } /* @Delete(':id') @UseGuards(AuthGuard) async deleteChat(@AuthUser() requester: Requester, @Param('id') id: string) { if (requester.role !== RequesterRole.MGROUP) { throw new ForbiddenException(); } const listExchanges = await new ChatDataBaseConnection().getChatMGroup( requester.id, ); const array = []; listExchanges.forEach(element => { array.push(element.id); }); if (!array.includes(parseInt(id))) { throw new ForbiddenException(); } return this.chatService.deleteChat(parseInt(id)); } @Put(':id') @UseGuards(AuthGuard) async updateChat( @AuthUser() requester: Requester, @Param('id') id: string, @Body() musicalExchangeDto: ChatDto, ) { if (requester.role !== 'MGROUP') { throw new ForbiddenException(); } const listExchanges = await new ChatDataBaseConnection().getChatMGroup( requester.id, ); const array = []; listExchanges.forEach(element => { array.push(element.id); }); if (!array.includes(parseInt(id))) { throw new ForbiddenException(); } musicalExchangeDto.id = parseInt(id); return this.chatService.updateChat(musicalExchangeDto); }*/ }
10,569
https://github.com/ankushjamdagani/semantic-chat/blob/master/client/src/store/reducers.js
Github Open Source
Open Source
MIT
null
semantic-chat
ankushjamdagani
JavaScript
Code
28
107
import { combineReducers } from "redux"; import logoutReducer from "../components/hoc/with-logout/reducers"; import authReducer from "../components/views/auth/reducers"; import homeReducer from "../components/views/home/reducers"; export default combineReducers({ logout: logoutReducer, auth: authReducer, home: homeReducer });
21,263
https://github.com/mrcece/ColaFrameWork/blob/master/Assets/3rd/Behavior Designer/Runtime/Basic Tasks/GameObject/Find.cs
Github Open Source
Open Source
MIT
2,022
ColaFrameWork
mrcece
C#
Code
61
190
using UnityEngine; namespace BehaviorDesigner.Runtime.Tasks.Basic.UnityGameObject { [TaskCategory("Basic/GameObject")] [TaskDescription("Finds a GameObject by name. Returns Success.")] public class Find : Action { [Tooltip("The GameObject name to find")] public SharedString gameObjectName; [Tooltip("The object found by name")] [RequiredField] public SharedGameObject storeValue; public override TaskStatus OnUpdate() { storeValue.Value = GameObject.Find(gameObjectName.Value); return TaskStatus.Success; } public override void OnReset() { gameObjectName = null; storeValue = null; } } }
40,368
https://github.com/ckrause/loda/blob/master/programs/oeis/189/A189480.asm
Github Open Source
Open Source
Apache-2.0
2,021
loda
ckrause
Assembly
Code
30
289
; A189480: [4rn]-4[rn], where r=sqrt(5) and [ ]=floor. ; 0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3,0,0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3,0,1,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,2,3,0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3,3,0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3,0,0,1,2,3,0,1,2,3,0,1,2 mul $0,2 add $0,2 mul $0,4 seq $0,60143 ; a(n) = floor(n/tau), where tau = (1 + sqrt(5))/2. mod $0,4
34,425
https://github.com/illia-okonskyi/PostCore/blob/master/src/PostCore/PostCore.Core/Services/Dao/MailDao.cs
Github Open Source
Open Source
MIT
null
PostCore
illia-okonskyi
C#
Code
1,050
3,510
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Microsoft.EntityFrameworkCore; using PostCore.Core.Activities; using PostCore.Core.Branches; using PostCore.Core.Cars; using PostCore.Core.DbContext; using PostCore.Core.Mail; using PostCore.Core.Users; using PostCore.Utils; namespace PostCore.Core.Services.Dao { public interface IMailDao { Task<IEnumerable<Post>> GetAllAsync( string filterId = null, string filterPersonFrom = null, string filterPersonTo = null, string filterAddressTo = null, long? filterBranchId = null, string filterBranchStockAddress = null, long? filterCarId = null, long? filterSourceBranchId = null, long? filterDestinationBranchId = null, PostState? filterState = null, string sortKey = null, SortOrder sortOrder = SortOrder.Ascending); Task<Post> GetByIdAsync(long postId); Task<IEnumerable<Post>> GetAllForStock( Branch branch, bool withoutAddressOnly = false, long? filterSourceBranchId = null, long? filterDestinationBranchId = null, string filterPersonFrom = null, string filterPersonTo = null, string filterAddressTo = null, string sortKey = null, SortOrder sortOrder = SortOrder.Ascending); Task CreateAsync(Post post, User user); Task DeliverAsync(long postId, User user); Task StockAsync(long postId, string address, User user); Task MoveToCarAsync(long postId, Car car, bool courierDelivery, User user); Task MoveToBranchStockAsync(long postId, Branch branch, User user); } public class MailDao : IMailDao { public static List<string> AcceptableSortKeys { get; private set; } = new List<string> { nameof(Post.Id), nameof(Post.PersonFrom), nameof(Post.PersonTo), nameof(Post.AddressTo), nameof(Post.BranchId), nameof(Post.BranchStockAddress), nameof(Post.CarId), nameof(Post.SourceBranchId), nameof(Post.DestinationBranchId), nameof(Post.State) }; private readonly ApplicationDbContext _dbContext; public MailDao(ApplicationDbContext dbContext) { _dbContext = dbContext; } public async Task<IEnumerable<Post>> GetAllAsync( string filterId = null, string filterPersonFrom = null, string filterPersonTo = null, string filterAddressTo = null, long? filterBranchId = null, string filterBranchStockAddress = null, long? filterCarId = null, long? filterSourceBranchId = null, long? filterDestinationBranchId = null, PostState? filterState = null, string sortKey = null, SortOrder sortOrder = SortOrder.Ascending) { // 1) Check sortKey if (string.IsNullOrEmpty(sortKey)) { sortKey = AcceptableSortKeys.First(); } if (!AcceptableSortKeys.Contains(sortKey)) { throw new ArgumentException("Must be one of AcceptableSortKeys", nameof(sortKey)); } // 2) Filter var mail = _dbContext.Post .AsNoTracking() .Include(p => p.Branch) .Include(p => p.Car) .Include(p => p.SourceBranch) .Include(p => p.DestinationBranch) .AsQueryable(); if (!string.IsNullOrEmpty(filterId)) { mail = mail.Where(p => p.Id.ToString().Contains(filterId)); } if (!string.IsNullOrEmpty(filterPersonFrom)) { mail = mail.Where(p => p.PersonFrom.Contains(filterPersonFrom)); } if (!string.IsNullOrEmpty(filterPersonTo)) { mail = mail.Where(p => p.PersonTo.Contains(filterPersonTo)); } if (!string.IsNullOrEmpty(filterAddressTo)) { mail = mail.Where(p => p.AddressTo.Contains(filterAddressTo)); } if (filterBranchId.HasValue) { mail = mail.Where(p => p.BranchId == filterBranchId.Value); } if (!string.IsNullOrEmpty(filterBranchStockAddress)) { mail = mail.Where(p => p.BranchStockAddress.Contains(filterBranchStockAddress)); } if (filterCarId.HasValue) { mail = mail.Where(p => p.CarId == filterCarId.Value); } if (filterSourceBranchId.HasValue) { mail = mail.Where(p => p.SourceBranchId == filterSourceBranchId.Value); } if (filterDestinationBranchId.HasValue) { mail = mail.Where(p => p.DestinationBranchId == filterDestinationBranchId.Value); } if (filterState.HasValue) { mail = mail.Where(p => p.State == filterState.Value); } // 3) Sort mail = mail.Order(sortKey, sortOrder); return await mail.ToListAsync(); } public async Task<IEnumerable<Post>> GetAllForStock( Branch branch, bool withoutAddressOnly = false, long? filterSourceBranchId = null, long? filterDestinationBranchId = null, string filterPersonFrom = null, string filterPersonTo = null, string filterAddressTo = null, string sortKey = null, SortOrder sortOrder = SortOrder.Ascending) { // 1) Check sortKey if (string.IsNullOrEmpty(sortKey)) { sortKey = AcceptableSortKeys.First(); } if (!AcceptableSortKeys.Contains(sortKey)) { throw new ArgumentException("Must be one of AcceptableSortKeys", nameof(sortKey)); } // 2) Filter var mail = _dbContext.Post .AsNoTracking() .Include(p => p.SourceBranch) .Include(p => p.DestinationBranch) .AsQueryable(); if (withoutAddressOnly) { mail = mail.Where(p => p.BranchStockAddress == null); } if (filterSourceBranchId.HasValue) { mail = mail.Where(p => p.SourceBranchId == filterSourceBranchId.Value); } if (filterDestinationBranchId.HasValue) { mail = mail.Where(p => p.DestinationBranchId == filterDestinationBranchId.Value); } if (!string.IsNullOrEmpty(filterPersonFrom)) { mail = mail.Where(p => p.PersonFrom.Contains(filterPersonFrom)); } if (!string.IsNullOrEmpty(filterPersonTo)) { mail = mail.Where(p => p.PersonTo.Contains(filterPersonTo)); } if (!string.IsNullOrEmpty(filterAddressTo)) { mail = mail.Where(p => p.AddressTo.Contains(filterAddressTo)); } mail = mail .Where(p => p.BranchId == branch.Id) .Where(p => p.State == PostState.Created || p.State == PostState.InBranchStock); // 3) Sort mail = mail.Order(sortKey, sortOrder); return await mail.ToListAsync(); } public async Task<Post> GetByIdAsync(long postId) { return await _dbContext.Post .AsNoTracking() .Include(p => p.Branch) .Include(p => p.SourceBranch) .Include(p => p.DestinationBranch) .Where(p => p.Id == postId) .FirstOrDefaultAsync(); } public async Task CreateAsync(Post post, User user) { using (var transaction = await _dbContext.Database.BeginTransactionAsync()) { post.State = PostState.Created; _dbContext.Post.Add(post); await _dbContext.SaveChangesAsync(); _dbContext.Activity.Add(new Activity { Type = ActivityType.PostCreated, Message = $"Post #{post.Id} created", DateTime = DateTime.Now, User = $"{user.FirstName} {user.LastName}", PostId = post.Id, BranchId = post.SourceBranch.Id }); await _dbContext.SaveChangesAsync(); try { transaction.Commit(); } catch (Exception) { transaction.Rollback(); } } } public async Task DeliverAsync(long postId, User user) { using (var transaction = await _dbContext.Database.BeginTransactionAsync()) { var post = await _dbContext.Post.Where(p => p.Id == postId).FirstOrDefaultAsync(); if (post == null) { throw new ArgumentException("Post with such id not found", nameof(postId)); } post.BranchId = null; post.BranchStockAddress = null; post.CarId = null; post.State = PostState.Delivered; _dbContext.Activity.Add(new Activity { Type = ActivityType.PostDelivered, Message = $"Post #{post.Id} delivered", DateTime = DateTime.Now, User = $"{user.FirstName} {user.LastName}", PostId = post.Id, BranchId = post.DestinationBranchId }); await _dbContext.SaveChangesAsync(); try { transaction.Commit(); } catch (Exception) { transaction.Rollback(); } } } public async Task StockAsync(long postId, string address, User user) { using (var transaction = await _dbContext.Database.BeginTransactionAsync()) { var post = await _dbContext.Post.Where(p => p.Id == postId).FirstOrDefaultAsync(); if (post == null) { throw new ArgumentException("Post with such id not found", nameof(postId)); } post.BranchStockAddress = address; post.State = PostState.InBranchStock; _dbContext.Activity.Add(new Activity { Type = ActivityType.PostStocked, Message = $"Post #{post.Id} stocked with adddress \"{address}\"", DateTime = DateTime.Now, User = $"{user.FirstName} {user.LastName}", PostId = post.Id, BranchId = post.BranchId }); await _dbContext.SaveChangesAsync(); try { transaction.Commit(); } catch (Exception) { transaction.Rollback(); } } } public async Task MoveToCarAsync(long postId, Car car, bool courierDelivery, User user) { using (var transaction = await _dbContext.Database.BeginTransactionAsync()) { var post = await _dbContext.Post.Where(p => p.Id == postId).FirstOrDefaultAsync(); if (post == null) { throw new ArgumentException("Post with such id not found", nameof(postId)); } var branchId = post.BranchId; var carId = car.Id; post.State = courierDelivery ? PostState.InDeviveryToPerson : PostState.InDeliveryToBranchStock; post.BranchId = null; post.BranchStockAddress = null; post.CarId = carId; _dbContext.Activity.Add(new Activity { Type = ActivityType.PostMovedToCar, Message = $"Post #{post.Id} moved to car", DateTime = DateTime.Now, User = $"{user.FirstName} {user.LastName}", PostId = post.Id, BranchId = branchId, CarId = carId }); await _dbContext.SaveChangesAsync(); try { transaction.Commit(); } catch (Exception) { transaction.Rollback(); } } } public async Task MoveToBranchStockAsync(long postId, Branch branch, User user) { using (var transaction = await _dbContext.Database.BeginTransactionAsync()) { var post = await _dbContext.Post.Where(p => p.Id == postId).FirstOrDefaultAsync(); if (post == null) { throw new ArgumentException("Post with such id not found", nameof(postId)); } var branchId = branch.Id; var carId = post.CarId; post.State = PostState.InBranchStock; post.BranchId = branchId; post.BranchStockAddress = null; post.CarId = null; _dbContext.Activity.Add(new Activity { Type = ActivityType.PostMovedToBranchStock, Message = $"Post #{post.Id} moved to branch stock", DateTime = DateTime.Now, User = $"{user.FirstName} {user.LastName}", PostId = post.Id, BranchId = branchId, CarId = carId }); await _dbContext.SaveChangesAsync(); try { transaction.Commit(); } catch (Exception) { transaction.Rollback(); } } } } }
855
https://github.com/dayansoft01/screenshot/blob/master/lib/src/platform_specific/file_manager/file_manager.dart
Github Open Source
Open Source
MIT
2,022
screenshot
dayansoft01
Dart
Code
29
116
import 'dart:typed_data'; // import 'file_manager_mobile.dart'; import 'file_manager_stub.dart' if (dart.library.io) "file_manager_io.dart" if (dart.library.html) "non_io.dart"; abstract class PlatformFileManager { factory PlatformFileManager() => getFileManager(); Future<String> saveFile(Uint8List fileContent, String path, {String? name}); }
18,621
https://github.com/MichaelLogutov/Rocks.Commands/blob/master/src/Rocks.Commands.Tests/Decorators/Async/Commands/IDecoratableAsyncCommand.cs
Github Open Source
Open Source
MIT
2,019
Rocks.Commands
MichaelLogutov
C#
Code
15
49
namespace Rocks.Commands.Tests.Decorators.Async.Commands { internal interface IDecoratableAsyncCommand { int Number { get; set; } } }
49,667
https://github.com/icestudent/ontl/blob/master/ntl/format.hxx
Github Open Source
Open Source
Zlib, LicenseRef-scancode-stlport-4.5
2,022
ontl
icestudent
C++
Code
649
1,951
/**\file********************************************************************* * \brief * * **************************************************************************** */ #ifndef NTL__FORMAT #define NTL__FORMAT #pragma once #include "basedef.hxx" #include <locale> namespace ntl { namespace fmt { template<typename uint_t> static inline char dec_digit(uint_t v) { return static_cast<char>(v) + '0'; } template<typename uint_t> static inline char hex_digit(uint_t v) { static char h[] = "0123456789ABCDEF"; return h[v & 0x0F]; } template<typename int_t, typename char_t> static inline char_t * to_hex(int_t v, char_t * const p) { p[0] = '0'; p[1] = 'x'; p[2 + 2 * sizeof(int_t)] = '\0'; for ( size_t i = sizeof(int_t) * 2; i; --i ) { p[2 + i - 1] = static_cast<char_t>(hex_digit(v)); v >>= 4; } return &p[2 + 2 * sizeof(int_t)]; } template<typename char_t> static inline char_t* to_hex(char_t* dst, size_t dst_len, const void* src, size_t len) { len = std::min(len, dst_len/2); const uint8_t* p = reinterpret_cast<const uint8_t*>(src); for(size_t i = 0; i < len; i++) { const unsigned char c = *p++; *dst++ = hex_digit(c >> 4); *dst++ = hex_digit(c); } if(len*2 < dst_len) *dst = 0; return dst - len*2; } template<typename char_t> static inline size_t from_hex(void* dst, size_t dst_len, const char_t* src, size_t len, const std::locale& locale) { struct { byte operator()(char_t c) const { if(c >= '0' && c <= '9') return c - '0'; if(c >= 'A' && c <= 'F') return (c - 'A' + 10); if(c >= 'a' && c <= 'f') return (c - 'a' + 10); return 0; } } decode; auto& ctype = std::use_facet<std::ctype<char_t>>(locale); const char_t* end = src + len; uint8_t* p = reinterpret_cast<uint8_t*>(dst); while(src < end) { const char_t c = src[0]; if(ctype.is(ctype.space, c)) { src++; continue; } *p++ = (decode(src[0]) << 4) | decode(src[1]); src += 2; } return p - reinterpret_cast<uint8_t*>(dst); } template<typename char_t> static inline size_t from_hex(void* dst, size_t dst_len, const char_t* src, size_t len) { struct { byte operator()(char_t c) const { if(c >= '0' && c <= '9') return c - '0'; if(c >= 'A' && c <= 'F') return (c - 'A' + 10); if(c >= 'a' && c <= 'f') return (c - 'a' + 10); return 0; } } decode; len = std::min(len/2, dst_len); uint8_t* p = reinterpret_cast<uint8_t*>(dst); for(size_t i = 0; i < len; i++, p++, src += 2) { *p = (decode(src[0]) << 4) | decode(src[1]); } return p - reinterpret_cast<uint8_t*>(dst); } template<typename char_t, size_t N> static inline size_t from_hex(void* dst, size_t dst_len, const char_t (&src)[N]) { return from_hex(dst, dst_len, src, N); } template<typename char_t = char> struct hex_str_cast { hex_str_cast(int8_t v) { to_hex(v, buf); } hex_str_cast(int16_t v) { to_hex(v, buf); } hex_str_cast(int32_t v) { to_hex(v, buf); } hex_str_cast(int64_t v) { to_hex(v, buf); } hex_str_cast(uint8_t v) { to_hex(v, buf); } hex_str_cast(uint16_t v) { to_hex(v, buf); } hex_str_cast(uint32_t v) { to_hex(v, buf); } hex_str_cast(uint64_t v) { to_hex(v, buf); } operator const char_t * () { return buf; } private: char_t buf[sizeof(uint64_t) * 2 + sizeof("0x")]; };//struct hex_str_cast template<typename int_t, typename char_t> static inline char_t * to_dec(int_t v, char_t * const p) { static const bool signed_type = static_cast<int_t>(-1) < 0; int i = 0; int j = signed_type && v < 0; if ( j ) { p[i++] = '-'; v = static_cast<int_t>(0-v); } do p[i++] = static_cast<char_t>(dec_digit(v % 10)); while ( v /= 10 ); char_t * const end = &p[i]; p[i--] = '\0'; for ( ; j < i; ++j, --i ) { char_t t = p[i]; p[i] = p[j]; p[j] = t; } return end; } template<typename char_t = char> struct str_cast { str_cast(int8_t v) { to_dec(v, buf); } str_cast(int16_t v) { to_dec(v, buf); } str_cast(int32_t v) { to_dec(v, buf); } str_cast(int64_t v) { to_dec(v, buf); } str_cast(uint8_t v) { to_dec(v, buf); } str_cast(uint16_t v) { to_dec(v, buf); } str_cast(uint32_t v) { to_dec(v, buf); } str_cast(uint64_t v) { to_dec(v, buf); } operator const char_t * () { return buf; } private: char_t buf[sizeof(" 18446744073709551615")]; };//struct str_cast }//namespace fmt namespace format = fmt; }//namespace ntl #endif//#ifndef NTL__FORMAT
20,259
https://github.com/austince/flink/blob/master/flink-formats/flink-orc/src/test/java/org/apache/flink/orc/OrcFileSystemITCase.java
Github Open Source
Open Source
MIT, Apache-2.0, MIT-0, BSD-3-Clause
2,020
flink
austince
Java
Code
204
503
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.orc; import org.apache.flink.table.planner.runtime.batch.sql.BatchFileSystemITCaseBase; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; /** * ITCase for {@link OrcFileSystemFormatFactory}. */ @RunWith(Parameterized.class) public class OrcFileSystemITCase extends BatchFileSystemITCaseBase { private final boolean configure; @Parameterized.Parameters(name = "{0}") public static Collection<Boolean> parameters() { return Arrays.asList(false, true); } public OrcFileSystemITCase(boolean configure) { this.configure = configure; } @Override public String[] formatProperties() { List<String> ret = new ArrayList<>(); ret.add("'format'='orc'"); if (configure) { ret.add("'format.orc.compress'='snappy'"); } return ret.toArray(new String[0]); } }
31,517
https://github.com/abdulsamadola/hospital-finder/blob/master/src/components/Home/Home.scss
Github Open Source
Open Source
MIT
2,021
hospital-finder
abdulsamadola
SCSS
Code
116
396
#components-dropdown-demo-dropdown-button .ant-dropdown-button { margin: 0 8px 8px 0; } #components-dropdown-demo-dropdown-button .ant-btn-group-rtl.ant-dropdown-button { margin: 0 0 8px 8px; } .header { position: sticky; top: 0; z-index: 999999999; -webkit-box-shadow: 10px 10px 39px -4px rgba(0, 0, 0, 0.53); -moz-box-shadow: 10px 10px 39px -4px rgba(0, 0, 0, 0.53); box-shadow: 10px 10px 39px -4px rgba(0, 0, 0, 0.53); border-bottom-left-radius: 20px; border-bottom-right-radius: 20px; } .demo-infinite-container { border: 1px solid #e8e8e8; border-radius: 4px; overflow: auto; padding: 8px 24px; height: 300px; } .demo-loading-container { position: absolute; bottom: 40px; width: 100%; text-align: center; } .title { position: abolute; bottom: -80px; color: wheat; } /* Smartphones (portrait) ----------- */ @media only screen and (max-width: 768px) { /* Styles */ .header .title { display: none; } }
17,026
https://github.com/JackTheRipper42/Binding/blob/master/Assets/Scripts/BindingTest/ViewModel.cs
Github Open Source
Open Source
MIT
null
Binding
JackTheRipper42
C#
Code
295
964
using Assets.Scripts.Binding; using UnityEngine; namespace Assets.Scripts.BindingTest { public class ViewModel { public readonly NotifyingObject<Vector3> PositionProperty; public readonly NotifyingObject<string> UpButtonTextProperty; public readonly NotifyingObject<string> DownButtonTextProperty; public readonly NotifyingObject<float> UpThresholdProperty; public readonly NotifyingObject<float> DownThresholdProperty; public readonly NotifyingObject<ICommand> UpButtonCommandProperty; public readonly NotifyingObject<ICommand> DownButtonCommandProperty; public readonly NotifyingObject<bool> UpAvailableProperty; public readonly NotifyingObject<bool> DownAvailableProperty; private readonly Vector3 _upDownVector = new Vector3(0f, 0.5f, 0f); public ViewModel() { PositionProperty = new NotifyingObject<Vector3>(); UpButtonTextProperty = new NotifyingObject<string>(); DownButtonTextProperty = new NotifyingObject<string>(); UpThresholdProperty = new NotifyingObject<float>(); DownThresholdProperty = new NotifyingObject<float>(); UpButtonCommandProperty = new NotifyingObject<ICommand>(); DownButtonCommandProperty = new NotifyingObject<ICommand>(); DownButtonTextProperty = new NotifyingObject<string>(); UpAvailableProperty = new NotifyingObject<bool>(); DownAvailableProperty = new NotifyingObject<bool>(); PositionProperty.PropertyChanged += (o, e) => { SetUpAvailable(); SetDownAvailable(); }; SetUpAvailable(); SetDownAvailable(); UpAvailableProperty.PropertyChanged += (o, e) => SetUpAvailable(); DownAvailableProperty.PropertyChanged += (o, e) => SetDownAvailable(); var upButtonCommand = new DelegateCommand( () => Position += _upDownVector, () => UpAvailable); PositionProperty.PropertyChanged += (o, e) => upButtonCommand.RaiseCanExecuteChanged(); UpThresholdProperty.PropertyChanged += (o, e) => upButtonCommand.RaiseCanExecuteChanged(); UpButtonCommandProperty.SetValue(upButtonCommand); var downButtonCommand = new DelegateCommand( () => Position -= _upDownVector, () => DownAvailable); PositionProperty.PropertyChanged += (o, e) => downButtonCommand.RaiseCanExecuteChanged(); DownThresholdProperty.PropertyChanged += (o, e) => downButtonCommand.RaiseCanExecuteChanged(); DownButtonCommandProperty.SetValue(downButtonCommand); } public Vector3 Position { get { return PositionProperty.GetValue(); } set { PositionProperty.SetValue(value); } } public string UpButtonText { get { return UpButtonTextProperty.GetValue(); } set { UpButtonTextProperty.SetValue(value); } } public string DownButtonText { get { return DownButtonTextProperty.GetValue(); } set { DownButtonTextProperty.SetValue(value); } } public float UpThreshold { get { return UpThresholdProperty.GetValue(); } set { UpThresholdProperty.SetValue(value); } } public float DownThreshold { get { return DownThresholdProperty.GetValue(); } set { DownThresholdProperty.SetValue(value); } } public bool UpAvailable { get { return UpAvailableProperty.GetValue(); } set { UpAvailableProperty.SetValue(value); } } public bool DownAvailable { get { return DownAvailableProperty.GetValue(); } set { DownAvailableProperty.SetValue(value); } } private void SetUpAvailable() { UpAvailable = Position.y < UpThreshold; } private void SetDownAvailable() { DownAvailable = Position.y > DownThreshold; } } }
33,968
https://github.com/AaronCGoidel/CLImate/blob/master/interactions/__init__.py
Github Open Source
Open Source
BSD-3-Clause
2,021
CLImate
AaronCGoidel
Python
Code
62
99
"""CLImate: Interactions package Classes which implement various interactions with the user via command line YesNo: prompts the user with a yes or no question Selector: prompts the user with a list of options from which they may choose a specified number Created by: Aaron Goidel """ from .interaction import Interaction from .yes_no import YesNo from .selector import Selector from .user_input import UserInput
38,944
https://github.com/cortexapps/datadog-api-client-java/blob/master/src/main/java/com/datadog/api/v1/client/model/SyntheticsTriggerCITestsResponse.java
Github Open Source
Open Source
Apache-2.0
2,022
datadog-api-client-java
cortexapps
Java
Code
590
2,147
/* * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. * This product includes software developed at Datadog (https://www.datadoghq.com/). * Copyright 2019-Present Datadog, Inc. * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package com.datadog.api.v1.client.model; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; import java.util.Objects; /** Object containing information about the tests triggered. */ @ApiModel(description = "Object containing information about the tests triggered.") @JsonPropertyOrder({ SyntheticsTriggerCITestsResponse.JSON_PROPERTY_BATCH_ID, SyntheticsTriggerCITestsResponse.JSON_PROPERTY_LOCATIONS, SyntheticsTriggerCITestsResponse.JSON_PROPERTY_RESULTS, SyntheticsTriggerCITestsResponse.JSON_PROPERTY_TRIGGERED_CHECK_IDS }) @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen") public class SyntheticsTriggerCITestsResponse { @JsonIgnore public boolean unparsed = false; public static final String JSON_PROPERTY_BATCH_ID = "batch_id"; private String batchId; public static final String JSON_PROPERTY_LOCATIONS = "locations"; private List<SyntheticsTriggerCITestLocation> locations = null; public static final String JSON_PROPERTY_RESULTS = "results"; private List<SyntheticsTriggerCITestRunResult> results = null; public static final String JSON_PROPERTY_TRIGGERED_CHECK_IDS = "triggered_check_ids"; private List<String> triggeredCheckIds = null; public SyntheticsTriggerCITestsResponse batchId(String batchId) { this.batchId = batchId; return this; } /** * The public ID of the batch triggered. * * @return batchId */ @javax.annotation.Nullable @ApiModelProperty(value = "The public ID of the batch triggered.") @JsonProperty(JSON_PROPERTY_BATCH_ID) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public String getBatchId() { return batchId; } public void setBatchId(String batchId) { this.batchId = batchId; } public SyntheticsTriggerCITestsResponse locations( List<SyntheticsTriggerCITestLocation> locations) { this.locations = locations; for (SyntheticsTriggerCITestLocation item : locations) { this.unparsed |= item.unparsed; } return this; } public SyntheticsTriggerCITestsResponse addLocationsItem( SyntheticsTriggerCITestLocation locationsItem) { if (this.locations == null) { this.locations = new ArrayList<>(); } this.locations.add(locationsItem); this.unparsed |= locationsItem.unparsed; return this; } /** * List of Synthetics locations. * * @return locations */ @javax.annotation.Nullable @ApiModelProperty(value = "List of Synthetics locations.") @JsonProperty(JSON_PROPERTY_LOCATIONS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public List<SyntheticsTriggerCITestLocation> getLocations() { return locations; } public void setLocations(List<SyntheticsTriggerCITestLocation> locations) { this.locations = locations; } public SyntheticsTriggerCITestsResponse results(List<SyntheticsTriggerCITestRunResult> results) { this.results = results; for (SyntheticsTriggerCITestRunResult item : results) { this.unparsed |= item.unparsed; } return this; } public SyntheticsTriggerCITestsResponse addResultsItem( SyntheticsTriggerCITestRunResult resultsItem) { if (this.results == null) { this.results = new ArrayList<>(); } this.results.add(resultsItem); this.unparsed |= resultsItem.unparsed; return this; } /** * Information about the tests runs. * * @return results */ @javax.annotation.Nullable @ApiModelProperty(value = "Information about the tests runs.") @JsonProperty(JSON_PROPERTY_RESULTS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public List<SyntheticsTriggerCITestRunResult> getResults() { return results; } public void setResults(List<SyntheticsTriggerCITestRunResult> results) { this.results = results; } public SyntheticsTriggerCITestsResponse triggeredCheckIds(List<String> triggeredCheckIds) { this.triggeredCheckIds = triggeredCheckIds; return this; } public SyntheticsTriggerCITestsResponse addTriggeredCheckIdsItem(String triggeredCheckIdsItem) { if (this.triggeredCheckIds == null) { this.triggeredCheckIds = new ArrayList<>(); } this.triggeredCheckIds.add(triggeredCheckIdsItem); return this; } /** * The public IDs of the Synthetics test triggered. * * @return triggeredCheckIds */ @javax.annotation.Nullable @ApiModelProperty(value = "The public IDs of the Synthetics test triggered.") @JsonProperty(JSON_PROPERTY_TRIGGERED_CHECK_IDS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public List<String> getTriggeredCheckIds() { return triggeredCheckIds; } public void setTriggeredCheckIds(List<String> triggeredCheckIds) { this.triggeredCheckIds = triggeredCheckIds; } /** Return true if this SyntheticsTriggerCITestsResponse object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SyntheticsTriggerCITestsResponse syntheticsTriggerCITestsResponse = (SyntheticsTriggerCITestsResponse) o; return Objects.equals(this.batchId, syntheticsTriggerCITestsResponse.batchId) && Objects.equals(this.locations, syntheticsTriggerCITestsResponse.locations) && Objects.equals(this.results, syntheticsTriggerCITestsResponse.results) && Objects.equals( this.triggeredCheckIds, syntheticsTriggerCITestsResponse.triggeredCheckIds); } @Override public int hashCode() { return Objects.hash(batchId, locations, results, triggeredCheckIds); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SyntheticsTriggerCITestsResponse {\n"); sb.append(" batchId: ").append(toIndentedString(batchId)).append("\n"); sb.append(" locations: ").append(toIndentedString(locations)).append("\n"); sb.append(" results: ").append(toIndentedString(results)).append("\n"); sb.append(" triggeredCheckIds: ").append(toIndentedString(triggeredCheckIds)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
50,239
https://github.com/ArrogantWombatics/openbsd-src/blob/master/gnu/usr.bin/perl/t/uni/universal.t
Github Open Source
Open Source
BSD-3-Clause
2,019
openbsd-src
ArrogantWombatics
Perl
Code
475
1,605
#!./perl # # check UNIVERSAL # BEGIN { chdir 't' if -d 't'; @INC = '../lib'; $| = 1; require "./test.pl"; } use utf8; use open qw( :utf8 :std ); plan tests => 93; $a = {}; bless $a, "Bòb"; ok $a->isa("Bòb"); package Hùmàn; sub èàt {} package Fèmàlè; @ISA=qw(Hùmàn); package Àlìcè; @ISA=qw(Bòb Fèmàlè); sub sìng; sub drìnk { return "drinking " . $_[1] } sub nèw { bless {} } $Àlìcè::VERSION = 2.718; { package Cèdrìc; our @ISA; use base qw(Hùmàn); } { package Prògràmmèr; our $VERSION = 1.667; sub wrìtè_perl { 1 } } package main; $a = nèw Àlìcè; ok $a->isa("Àlìcè"); ok $a->isa("main::Àlìcè"); # check that alternate class names work ok(("main::Àlìcè"->nèw)->isa("Àlìcè")); ok $a->isa("Bòb"); ok $a->isa("main::Bòb"); ok $a->isa("Fèmàlè"); ok $a->isa("Hùmàn"); ok ! $a->isa("Màlè"); ok ! $a->isa('Prògràmmèr'); ok $a->isa("HASH"); ok $a->can("èàt"); ok ! $a->can("sleep"); ok my $ref = $a->can("drìnk"); # returns a coderef is $a->$ref("tèà"), "drinking tèà"; # ... which works ok $ref = $a->can("sìng"); eval { $a->$ref() }; ok $@; # ... but not if no actual subroutine ok $a->can("VERSION"); cmp_ok eval { $a->VERSION }, '==', 2.718; ok ! (eval { $a->VERSION(2.719) }); like $@, qr/^Àlìcè version 2.719 required--this is only version 2.718 at /u; ok (!Cèdrìc->isa('Prògràmmèr')); ok (Cèdrìc->isa('Hùmàn')); push(@Cèdrìc::ISA,'Prògràmmèr'); ok (Cèdrìc->isa('Prògràmmèr')); { package Àlìcè; base::->import('Prògràmmèr'); } ok $a->isa('Prògràmmèr'); ok $a->isa("Fèmàlè"); @Cèdrìc::ISA = qw(Bòb); ok (!Cèdrìc->isa('Prògràmmèr')); my $b = 'abc'; my @refs = qw(SCALAR SCALAR LVALUE GLOB ARRAY HASH CODE); my @vals = ( \$b, \3.14, \substr($b,1,1), \*b, [], {}, sub {} ); for ($p=0; $p < @refs; $p++) { for ($q=0; $q < @vals; $q++) { is UNIVERSAL::isa($vals[$p], $refs[$q]), ($p==$q or $p+$q==1); }; }; ok UNIVERSAL::isa(Àlìcè => "UNIVERSAL"); cmp_ok UNIVERSAL::can(Àlìcè => "can"), '==', \&UNIVERSAL::can; eval 'sub UNIVERSAL::slèèp {}'; ok $a->can("slèèp"); { package Pìckùp; no warnings "deprecated"; use UNIVERSAL qw( isa can VERSION ); ::ok isa "Pìckùp", UNIVERSAL; ::cmp_ok can( "Pìckùp", "can" ), '==', \&UNIVERSAL::can; ::ok VERSION "UNIVERSAL" ; } package Fòò; sub DOES { 1 } package Bàr; @Bàr::ISA = 'Fòò'; package Bàz; package main; ok( Fòò->DOES( 'bàr' ), 'DOES() should call DOES() on class' ); ok( Bàr->DOES( 'Bàr' ), '... and should fall back to isa()' ); ok( Bàr->DOES( 'Fòò' ), '... even when inherited' ); ok( Bàz->DOES( 'Bàz' ), '... even without inheriting any other DOES()' ); ok( ! Bàz->DOES( 'Fòò' ), '... returning true or false appropriately' ); package Pìg; package Bòdìnè; Bòdìnè->isa('Pìg'); package main; eval { UNIVERSAL::DOES([], "fòò") }; like( $@, qr/Can't call method "DOES" on unblessed reference/, 'DOES call error message says DOES, not isa' ); # Tests for can seem to be split between here and method.t # Add the verbatim perl code mentioned in the comments of # http://www.xray.mpe.mpg.de/mailing-lists/perl5-porters/2001-05/msg01710.html # but never actually tested. is(UNIVERSAL->can("NòSùchPàckàgè::fòò"), undef); @splàtt::ISA = 'zlòpp'; ok (splàtt->isa('zlòpp')); ok (!splàtt->isa('plòp')); # This should reset the ->isa lookup cache @splàtt::ISA = 'plòp'; # And here is the new truth. ok (!splàtt->isa('zlòpp')); ok (splàtt->isa('plòp'));
50,421
https://github.com/jamescaoBJ/springcloud/blob/master/springboot-dao/src/main/java/com/ncme/springboot/mapper/NcmeCourseTypeMapper.java
Github Open Source
Open Source
Apache-2.0
null
springcloud
jamescaoBJ
Java
Code
15
72
package com.ncme.springboot.mapper; import com.ncme.springboot.model.NcmeCourseType; public interface NcmeCourseTypeMapper { int insert(NcmeCourseType record); int insertSelective(NcmeCourseType record); }
17,161
https://github.com/Drebakare/lex-management-system/blob/master/resources/views/Pages/Old/Actions/create-product-drinktype.blade.php
Github Open Source
Open Source
MIT
null
lex-management-system
Drebakare
PHP
Code
470
2,240
@extends('admin_app') @section('contents') <div class="page-content"> <div class="container-fluid"> <!-- start page title --> <div class="row"> <div class="col-12"> <div class="page-title-box d-flex align-items-center justify-content-between"> <h4 class="mb-0 font-size-18">Create Drink Type</h4> <div class="page-title-right"> <ol class="breadcrumb m-0"> <li class="breadcrumb-item"><a href="{{route('admin.dashboard')}}">Dashboards</a></li> <li class="breadcrumb-item active">Create Drink Type</li> </ol> </div> </div> </div> </div> <div class="row"> <div class="col-12"> <div class="card"> <div class="card-body"> <h4 class="card-title">Add a New Drink Type</h4> <p class="card-title-desc">Fill all information below. Ensure all fields are filled Properly as deleting will not be possible after adding a new drink type</p> <form method="post" action="{{route('submit-drinktype-form')}}" enctype="multipart/form-data" > @csrf <div class="row pb-5"> <div class="col-sm-6"> <div class="form-group"> <label for="productname"> Drink Type</label> <input id="productname" name="name" type="text" class="form-control" required> </div> </div> <div class="col-sm-6"> <div class="form-group"> <label for="productname">Select Drink Type Category</label> <select name="drink_category" class="form-control" required> @foreach($categories as $category) <option value="{{$category->id}}">{{$category->name}}</option> @endforeach </select> </div> </div> <div class="col-sm-6"> <h4 class="card-title">Image Upload</h4> <div class="custom-file"> <input type="file" name="image" class="custom-file-input" id="customFile" accept="image/*" required> <label class="custom-file-label" for="customFile">Choose file</label> </div> </div> </div> <button type="submit" class="btn btn-success mr-1 waves-effect waves-light">Add Drink Type</button> </form> </div> </div> </div> </div> <div class="row"> <div class="col-12"> <div class="card"> <div class="card-body"> <h4 class="card-title">Drink Types</h4> <p class="card-title-desc"> List of Drink Types </p> <table id="datatable-buttons" class="table table-striped table-bordered dt-responsive nowrap" style="border-collapse: collapse; border-spacing: 0; width: 100%;"> <thead> <tr> <th>ID</th> <th>Reference</th> <th>Drink Type</th> <th>Drink Category</th> <th>Date Created</th> <th>Action</th> </tr> </thead> <tbody> @foreach($drink_types as $key => $drink_type) <tr> <td>{{$drink_type->id}}</td> <td>{{$drink_type->token}}</td> <td>{{$drink_type->name}}</td> <td>{{$drink_type->drinkCategory->name}}</td> <td>{{$drink_type->created_at}}</td> <td> <a href="#edit-drink-{{$key}}" data-toggle="modal"> <span data-toggle="tooltip" data-placement="top" title data-original-title="Edit Drink Type Details"> <i class="mdi mdi-square-edit-outline mdi-24px"></i> </span> </a> <a href="#view-image-{{$key}}" data-toggle="modal"> <span data-toggle="tooltip" data-placement="top" title data-original-title="View Drink Type Image"> <i class="mdi mdi-eye mdi-24px"></i> </span> </a> </td> </tr> @endforeach </tbody> </table> </div> </div> </div> <!-- end col --> </div> </div> </div> @foreach($drink_types as $key => $drink_type) <div class="modal fade" id="view-image-{{$key}}" tabindex="-1" role="dialog" aria-labelledby="mySmallModalLabel" aria-hidden="true"> <div class="modal-dialog modal-sm"> <div class="modal-content"> <div class="modal-header"> <h5 class="modal-title mt-0" id="mySmallModalLabel">Drink Type Displayed Image</h5> <button type="button" class="close" data-dismiss="modal" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> </div> <div class="modal-body"> <div class="justify-content-center"> <img src="{{asset('_landing/assets/images/products/'.$drink_type->image)}}" class="img-fluid" alt=""> </div> </div> </div><!-- /.modal-content --> </div><!-- /.modal-dialog --> </div><!-- /.modal --> @endforeach @foreach($drink_types as $key => $drink_type) <div class="modal fade" id="edit-drink-{{$key}}" tabindex="-1" role="dialog" aria-labelledby="mySmallModalLabel" aria-hidden="true"> <div class="modal-dialog modal-sm"> <div class="modal-content"> <div class="modal-header"> <h5 class="modal-title mt-0" id="mySmallModalLabel">Edit Drink Type Details</h5> <button type="button" class="close" data-dismiss="modal" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> </div> <div class="modal-body"> <h4 class="card-title">Edit Drink Type Details</h4> <p class="card-title-desc">Fill all information below correct.</p> <form method="post" action="{{route('edit-drink-details', ['token' => $drink_type->token])}}" enctype="multipart/form-data"> @csrf <div class="row mb-3"> <div class="col-sm-12"> <div class="form-group"> <label for="productname"> Drink Type</label> <input value="{{$drink_type->name}}" id="productname" name="name" type="text" class="form-control" required> </div> </div> <div class="col-sm-12"> <div class="form-group"> <label for="productname">Select Drink Type Category</label> <select name="drink_category" class="form-control" required> @foreach($categories as $category) <option value="{{$category->id}}" @if($category->id == $drink_type->category_id) selected @endif>{{$category->name}}</option> @endforeach </select> </div> </div> <div class="col-sm-12"> <h4 class="card-title">Image Upload</h4> <div class="custom-file"> <input type="file" name="image" class="custom-file-input" id="customFile" accept="image/*"> <label class="custom-file-label" for="customFile">Choose file</label> </div> </div> </div> <button type="submit" class="btn btn-success mr-1 waves-effect waves-light">Edit Drink Type</button> </form> </div> </div><!-- /.modal-content --> </div><!-- /.modal-dialog --> </div><!-- /.modal --> @endforeach @endsection
25,558
https://github.com/madsendennis/scalismo/blob/master/src/main/scala/scalismo/sampling/proposals/MHMixtureProposal.scala
Github Open Source
Open Source
Apache-2.0
2,023
scalismo
madsendennis
Scala
Code
291
729
/* * Copyright 2016 University of Basel, Graphics and Vision Research Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package scalismo.sampling.proposals import scalismo.sampling.{MHProposalGenerator, MHSample} import scalismo.utils.Random /** * Mixture proposal for the special case where the proposal is an MHProposal */ class MHMixtureProposal[A](proposals: IndexedSeq[(Double, MHProposalGenerator[A])])(implicit rnd: Random) extends MHProposalGenerator[A] { val generators: IndexedSeq[MHProposalGenerator[A]] = proposals.map(_._2) val mixtureFactors: IndexedSeq[Double] = { val f = proposals.map(_._1) val totalP = f.sum f.map(c => c / totalP) } // cumsum protected val p: IndexedSeq[Double] = mixtureFactors.scanLeft(0.0)((t, p) => t + p).tail // keep state: last active proposal, useful for printing only private var lastActive = 0 override def propose(current: MHSample[A]): MHSample[A] = { val r = rnd.scalaRandom.nextDouble() val i = p.indexWhere(p => p >= r) // find first element larger than random, use l lastActive = i generators(i).propose(current) } override def logTransitionProbability(from: MHSample[A], to: MHSample[A]): Double = { val transitions = generators.map(g => g.logTransitionProbability(from, to)) if (transitions.exists(_.isNaN)) throw new Exception("NaN transition probability encountered!") if (transitions.exists(!_.isInfinite)) { val maxExpo = transitions.max val sum = mixtureFactors.zip(transitions).map { case (f, t) => f * math.exp(t - maxExpo) }.sum val fwd = math.log(sum) + maxExpo fwd } else Double.NegativeInfinity } } object MHMixtureProposal { def apply[A](proposals: (Double, MHProposalGenerator[A])*)(implicit rnd: Random): MHMixtureProposal[A] = new MHMixtureProposal[A](proposals.toIndexedSeq) }
22,930
https://github.com/miguelpuyol/hermes/blob/master/hermes-consumers/src/main/java/pl/allegro/tech/hermes/consumers/consumer/rate/calculator/MaximumOutputRateCalculator.java
Github Open Source
Open Source
Apache-2.0
2,017
hermes
miguelpuyol
Java
Code
35
186
package pl.allegro.tech.hermes.consumers.consumer.rate.calculator; import pl.allegro.tech.hermes.api.Subscription; import pl.allegro.tech.hermes.consumers.consumer.ActiveConsumerCounter; class MaximumOutputRateCalculator { private final ActiveConsumerCounter activeConsumerCounter; MaximumOutputRateCalculator(ActiveConsumerCounter activeConsumerCounter) { this.activeConsumerCounter = activeConsumerCounter; } double calculateMaximumOutputRate(Subscription subscription) { int numberOfConsumersOnSubscription = activeConsumerCounter.countActiveConsumers(subscription); return subscription.getSerialSubscriptionPolicy().getRate().doubleValue() / Math.max(numberOfConsumersOnSubscription, 1); } }
50,467
https://github.com/lilyclemson/delete_it/blob/master/ML/Trees.ecl
Github Open Source
Open Source
MIT
null
delete_it
lilyclemson
ECL
Code
5,695
17,911
IMPORT ML; IMPORT * FROM $; IMPORT $.Mat; IMPORT * FROM ML.Types; IMPORT * FROM ML.Sampling; EXPORT Trees := MODULE EXPORT t_node := INTEGER4; // Assumes a maximum of 32 levels presently SHARED t_Index:= INTEGER4; EXPORT t_level := INTEGER1; // Would allow up to 2^256 levels EXPORT Node := RECORD t_node node_id; // The node-id for a given point t_level level; // The level for a given point ML.Types.NumericField; END; EXPORT sNode:= RECORD t_RecordID splitId:=0; Node; BOOLEAN HighBranch:= FALSE; END; EXPORT wNode := RECORD t_node node_id; // The node-id for a given point t_level level; // The level for a given point ML.Types.t_Discrete depend; // The dependant value ML.Types.DiscreteField; END; SHARED gNode := RECORD wNode; t_count group_id; END; EXPORT SplitF := RECORD // data structure for splitting results t_node node_id; // The node that is being split t_level level; // The level the split is occuring ML.Types.t_FieldNumber number; // The column used to split ML.Types.t_Discrete value; // The value for the column in question t_node new_node_id; // The new node that value goes to END; EXPORT gSplitF := RECORD SplitF; t_count group_id; END; SHARED final_node := RECORD Types.t_RecordID root_node := 0; // parent node id INTEGER1 root_level := 0; // parent node level Types.t_RecordID final_node := 0; // final node id, '0' means the parent node is a leaf INTEGER1 final_level := 0; // final node level Types.t_Discrete final_class := -1; // final class value, '-1' means the parent node is a branch END; SHARED final_node_instance := RECORD(final_node) Types.t_RecordID instance_id := 0; // instance id Types.t_Discrete instance_class := -1; // instance class value BOOLEAN match:= FALSE; END; SHARED node_error := RECORD(final_node) UNSIGNED4 e:=0; // error count UNSIGNED4 cnt:=0; // total count REAL8 NxErr_est:=0; // N x error estimated END; SHARED node_error to_node_error(SplitF l):= TRANSFORM SELF.root_node := l.node_id; SELF.root_level := l.level; SELF.final_node := l.new_node_id; SELF.final_level := l.level + 1; END; EXPORT NxKoutofM(t_Index N, t_FieldNumber K, t_FieldNumber M) := FUNCTION rndFeatRec:= RECORD t_count gNum :=0; t_FieldNumber number :=0; t_FieldReal rnd :=0; END; seed:= DATASET([{0,0,0}], rndFeatRec); group_seed:= DISTRIBUTE(NORMALIZE(seed, N,TRANSFORM(rndFeatRec, SELF.gNum:= COUNTER)), gNum); allFields:= NORMALIZE(group_seed, M, TRANSFORM(rndFeatRec, SELF.number:= (COUNTER % M) +1, SELF.rnd:=RANDOM(), SELF:=LEFT),LOCAL); allSorted:= SORT(allFields, gNum, rnd, LOCAL); raw_set:= ENTH(allSorted, K, M, 1); RETURN TABLE(raw_set, {gNum, number}); END; /* The NodeIds within a KdTree follow a natural pattern - all the node-ids will have the same number of bits - corresponding to the depth of the tree+1. The left-most will always be 1. Moving from left to right a 0 always implies taking the 'low' decision at a node and a 1 corresponds to taking a 'high'. Thus an ID of 6 = 110 has been split twice; and this group is in the high then low group The Splits show the number and value used to split at each point */ EXPORT KdTree(DATASET(ML.Types.NumericField) f,t_level Depth=10,t_level MedianDepth=0) := MODULE // Cannot presently support median computation on more than 32K nodes at once due to use of FieldAggregate library MedDepth := MIN(MedianDepth,15); // Each iteration attempts to work with the next level down the tree; resolving multiple sub-trees at once // The reason is to ensure that the full cluster is busy all the time // It is assumed that all of the data-nodes are distributed by HASH(id) throughout Split(DATASET(Node) nodes, t_level p_level) := FUNCTION working_nodes:=nodes(level=p_level); /* // For every node_id this computes the maximum and minimum extent of the space spans := TABLE(working_nodes,{ minv := MIN(GROUP,value); maxv := MAX(GROUP,value); cnt := COUNT(GROUP); node_id,number }, node_id,number, MERGE); */ // For every node_id this computes the maximum and minimum extent of the space and variance spans := TABLE(working_nodes,{ minv := MIN(GROUP,value); maxv := MAX(GROUP,value); var:= VARIANCE(GROUP,value); cnt := COUNT(GROUP); node_id,number }, node_id,number, MERGE); leafspans:= spans(cnt=1); onlyvalspan:= spans(cnt>1 and maxv = minv); splitwannabes:= spans(cnt>1 and maxv > minv); /* // Now find the split points - that is the number with the largest span for each node_id, excluding leafs and only one value spans sp := DEDUP( SORT( DISTRIBUTE(splitwannabes, HASH(node_id)),node_id,minv-maxv,LOCAL), node_id, LOCAL );// Here we compute the split point based upon the mean of the range */ // Now find the split points - that is the number with the largest variance (more balanced tree) for each node_id, excluding leafs and only one value spans sp := DEDUP( SORT( DISTRIBUTE(splitwannabes, HASH(node_id)),node_id, -var,LOCAL), node_id, LOCAL );// Here we compute the split point based upon the variance pass:= JOIN(onlyvalspan, sp, LEFT.node_id = RIGHT.node_id, LEFT ONLY, LOOKUP); // Here we compute the split point based upon the mean of the range splits_mean := PROJECT( sp, TRANSFORM(Node,SELF.Id := 0, SELF.level := p_level, SELF.value := (LEFT.maxv+LEFT.minv)/2, SELF := LEFT)); // Here we create split points based upon the median // this gives even split points - but it adds an NLgN process into the loop ... // Method currently uses field aggregates - which requires the node-id to fit into 16 bits into_med := JOIN(working_nodes,sp,LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number,TRANSFORM(ML.Types.NumericField,SELF.Number := LEFT.node_id,SELF := LEFT),LOOKUP); // Transform into splits format - but oops - field is missing // When median = minval we use nextval instead of median to avoid endless right-node propagation (all points >= than split value) s_median := PROJECT( ML.FieldAggregates(into_med).minMedNext, TRANSFORM(Node, SELF.Id := 0, SELF.level := p_level, SELF.node_id:=LEFT.number, SELF.value := IF(LEFT.median = LEFT.minval, LEFT.nextval, LEFT.median), SELF.number := 0)); splits_median := JOIN(s_median,sp,LEFT.node_id=RIGHT.node_id,TRANSFORM(Node,SELF.number := RIGHT.number,SELF := LEFT),FEW); splits := IF ( p_level < MedDepth, splits_median, splits_mean ); // based upon the split points we now partition the data - note the split information is assumed to fit inside RAM // First we perform the split on field to get record_id/node_id pairs r := RECORD ML.Types.t_RecordId id; t_node node_id; END; r NoteNI(working_nodes le, splits ri) := TRANSFORM SELF.node_id := (le.node_id << 1) + IF(le.value<ri.value,0,1); SELF.id := le.id; END; // The ,LOOKUP means that the result will be distributed by ID still ndata := JOIN(working_nodes,splits,LEFT.node_id = RIGHT.node_id AND LEFT.number=RIGHT.number,NoteNI(LEFT,RIGHT),LOOKUP); // The we apply those record_id/node_id pairs back to the original data / we can use local because of the ,LOOKUP above patched := JOIN(working_nodes,ndata,LEFT.id=RIGHT.id,TRANSFORM(Node,SELF.node_id := RIGHT.node_id, SELF.level := LEFT.level+1,SELF := LEFT),LOCAL); // leafs leafs1 := JOIN(working_nodes, leafspans, LEFT.node_id = RIGHT.node_id AND LEFT.number = RIGHT.number, TRANSFORM(LEFT), LOOKUP); leafs2 := JOIN(working_nodes, pass, LEFT.node_id = RIGHT.node_id AND LEFT.number = RIGHT.number, TRANSFORM(LEFT), LOOKUP); RETURN nodes(level<p_level)+leafs1+ leafs2+splits+patched; END; d1 := DISTRIBUTE(PROJECT(ML.Utils.Fat(f,0),TRANSFORM(Node,SELF.Level := 1, SELF.node_id := 1,SELF := LEFT)),HASH(id)); SHARED Res := LOOP(D1,Depth,Split(ROWS(LEFT),COUNTER)); EXPORT Splits := Res(id=0); // The split points used to partition each node id EXPORT Partitioned := Res(id<>0); // The training data - all partitioned EXPORT Counts := TABLE(Partitioned(number=1),{ node_id, Cnt := COUNT(GROUP) }, node_id, FEW); // Number of training elements in each partition EXPORT CountMean := AVE(Counts,Cnt); EXPORT CountVariance := VARIANCE(Counts,Cnt); EXPORT Extents := TABLE(Partitioned,{ node_id, number, MinV := MIN(GROUP,Value), MaxV := MAX(GROUP,Value) }, node_id, number, FEW); completeTree(DATASET(Node) nodes):= FUNCTION leftChildren:= PROJECT(nodes, TRANSFORM(node, SELF.node_id:= LEFT.node_id << 1, SELF.level := LEFT.level+1, SELF:=[]), LOCAL); rightChildren:=PROJECT(nodes, TRANSFORM(node, SELF.node_id:= (LEFT.node_id << 1) + 1, SELF.level := LEFT.level+1, SELF:=[]), LOCAL); x:=JOIN(splits, leftChildren + rightChildren, LEFT.node_id = RIGHT.node_id, TRANSFORM(RIGHT), RIGHT ONLY); return nodes + x; END; EXPORT FullTree := DISTRIBUTE(completeTree(Splits),HASH(node_id)); ; // All splits nodes must have 2 children (another split or leaf node) nodeBoundaries(DATASET(Node) nodes):= FUNCTION loopbody(DATASET(sNode) nodes):= FUNCTION itself:= PROJECT(nodes, TRANSFORM(sNode, SELF.Id:= LEFT.node_id *2 + IF(LEFT.HighBranch, 1, 0), SELF:=LEFT), LOCAL); parentNodeID := PROJECT(nodes, TRANSFORM(sNode, SELF.node_id:= LEFT.node_id DIV 2, SELF.HighBranch:= (LEFT.node_id % 2)=1, SELF:=LEFT), LOCAL); parentData := JOIN(splits, parentNodeID, LEFT.node_id=RIGHT.NODE_id, TRANSFORM(sNode, SELF.splitId:= RIGHT.splitId, SELF.HighBranch:= RIGHT.HighBranch, SELF:= LEFT)); return itself + parentData; END; loop0:= PROJECT(nodes,TRANSFORM(sNode, SELF.splitId:= LEFT.node_id, SELF:=LEFT), LOCAL); allBounds := LOOP(loop0, LEFT.id=0 AND LEFT.level>0,loopbody(ROWS(LEFT))); LowBounds := DEDUP(SORT(allBounds(node_id<>splitId, HighBranch=FALSE), splitId, number, -level),splitId, number); UpBounds := DEDUP(SORT(allBounds(node_id<>splitId, HighBranch=TRUE) , splitId, number, -level),splitId, number); return SORT(LowBounds + UpBounds, splitId, -level); //(node_id<>splitId) END; EXPORT Boundaries:= nodeBoundaries(FullTree); EXPORT LowBounds:= Boundaries(HighBranch=TRUE); EXPORT UpBounds:= Boundaries(HighBranch=FALSE); EXPORT NewInstances(DATASET(NumericField) newData, DATASET(Node) model= Splits):= MODULE maxLevel:= MAX(model, level); instSplitRec := RECORD t_node node_id; // The node-id for a given point t_level level; t_FieldNumber snumber; t_FieldReal svalue; NumericField; END; loopbody(DATASET(Node) nodes, t_level p_level) := FUNCTION allresult:= JOIN(newData, nodes, LEFT.id = RIGHT.id, TRANSFORM(Node, SELF.number:= LEFT.number, SELF.value:= LEFT.value, SELF:=RIGHT), MANY LOOKUP); joinall := JOIN(allresult, model, LEFT.number = RIGHT.number AND LEFT.node_id = RIGHT.node_id AND RIGHT.level= p_level, TRANSFORM(instSplitRec, SELF.node_id:= RIGHT.node_id, SELF.level:= RIGHT.level, SELF.snumber:= RIGHT.number, SELF.svalue:= RIGHT.value, SELF:= LEFT), ALL); leaf:= JOIN(nodes, model, LEFT.node_id = RIGHT.node_id, LOOKUP, LEFT ONLY); split:= PROJECT(joinall, TRANSFORM(Node, SELF.node_id := (LEFT.node_id << 1) + IF(LEFT.value < LEFT.svalue,0,1), SELF.level:= LEFT.level + 1, SELF:= LEFT)); RETURN leaf + split; END; root:= PROJECT(newData(number=1), TRANSFORM(Node, SELF.level:= 1, SELF.node_id:=1, SELF:= LEFT)); EXPORT Locations:= LOOP(root, maxLevel, loopbody(ROWS(LEFT),COUNTER)); END; END; // Previously implemented in Decision MODULE by David Bayliss // Extracted as it is, converted to a function because more impurity based splitting are comming (e.g. Information Gain Ration) // which will be used by different decision tree learning algorithms (e.g. ID.3 Quilan) EXPORT PartitionGiniImpurityBased (DATASET(wNode) nodes, t_level p_level, REAL Purity=1.0) := FUNCTION this_set0 := nodes(level = p_level); // Only process those 'undecided' nodes Purities := ML.Utils.Gini(this_set0(number=1),node_id,depend); // Compute the purities for each node // At this level these nodes are pure enough PureEnough := Purities(1-Purity >= gini); // Remove the 'pure enough' from the working set this_set := JOIN(this_set0,PureEnough,LEFT.node_id=RIGHT.node_id,TRANSFORM(LEFT),LEFT ONLY,LOOKUP); // Make sure the 'pure enough' get through pass_thru := JOIN(this_set0,PureEnough,LEFT.node_id=RIGHT.node_id,TRANSFORM(LEFT),LOOKUP); // Implementation note: it is very tempting to want to distribute by node_id - however at any given level there are only 2^level nodes // so if you want to distribute on a large number of clusters; you cannot pre-distribute. // Implementation node II: this code could be made rather cleaner by re-using the Utils.Gini routine; HOWEVER // it would require an extra join and potentially an extra data scan. For now it is assumed that 'code is cheap' // In a single step compute the counts for each dependant value for each field for each node // Note: the MERGE is to allow for high numbers of dimensions, high cardinalities in the discretes or both // for low dimension, low cardinality cases a ,FEW would be significantly quicker agg := TABLE(this_set,{node_id,number,value,depend,Cnt := COUNT(GROUP)},node_id,number,value,depend,MERGE); // Now to turn those counts into proportions; need the counts independant of depend // Could re-count from this_set; but using agg as it is (probably) significantly smaller aggc := TABLE(agg,{node_id,number,value,TCnt := SUM(GROUP,Cnt)},node_id,number,value,MERGE); r := RECORD agg; REAL4 Prop; // Proportion pertaining to this dependant value END; // Now on each row we have the proportion of the node that is that dependant value prop := JOIN(agg,aggc,LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number AND LEFT.value = RIGHT.value, TRANSFORM(r, SELF.Prop := LEFT.Cnt/RIGHT.Tcnt, SELF := LEFT),HASH); // Compute 1-gini coefficient for each node for each field for each value gini_per := TABLE(prop,{node_id,number,value,tcnt := SUM(GROUP,Cnt),val := SUM(GROUP,Prop*Prop)},node_id,number,value); // The gini coeff for each value is then formed into a weighted average to give the impurity based upon the field gini := TABLE(gini_per,{node_id,number,gini_t := SUM(GROUP,tcnt*val)/SUM(GROUP,tcnt)},node_id,number,FEW); // We can now work out which nodes to split and based upon which column splt := DEDUP( SORT( DISTRIBUTE( gini,HASH(node_id) ), node_id, -gini_t, LOCAL ), node_id, LOCAL ); // We now need to allocate node-ids for the nodes we are about to create; because we cannot control the size of the discrete // fields we cannot do this via bit-shifting (as in the kd-trees); rather we will have to enumerate them an allocate sequentially // The 'aggc' really has nothing to do with the below; it is just a convenient list of node_id/number/value that happens to be // laying around - so we using it rather than hitting a bigger dataset node_cand0 := JOIN(aggc,splt,LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number,TRANSFORM(LEFT),LOOKUP); node_base := MAX(aggc,node_id); // Start allocating new node-ids from the highest previous // Allocate the new node-ids node_cand := PROJECT(node_cand0,TRANSFORM({node_cand0, t_node new_nodeid},SELF.new_nodeid := node_base+COUNTER, SELF := LEFT)); // Construct a fake wNode to pass out splitting information nc0 := PROJECT(node_cand,TRANSFORM(wNode,SELF.value := LEFT.new_nodeid,SELF.depend := LEFT.value,SELF.level := p_level,SELF := LEFT,SELF := [])); // Construct a list of record-ids to (new) node-ids (by joining to the real data) r1 := RECORD ML.Types.t_Recordid id; t_node nodeid; END; // Mapp will be distributed by id because this_set is - and a ,LOOKUP join does not destroy order mapp := JOIN(this_set,node_cand,LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number AND LEFT.value=RIGHT.value, TRANSFORM(r1,SELF.id := LEFT.id,SELF.nodeid:=RIGHT.new_nodeid),LOOKUP); // Now use the mapping to actually reset all the points J := JOIN(this_set,mapp,LEFT.id=RIGHT.id,TRANSFORM(wNode,SELF.node_id:=RIGHT.nodeid,SELF.level:=LEFT.level+1,SELF := LEFT),LOCAL); RETURN J+nc0+nodes(level < p_level)+pass_thru; END; /* The decision tree is designed to split a dataset such that the dependent variables are concentrated by value inside the nodes Put a different way; we are aiming for a node to have one value for the dependant variable It is possible to construct a decision tree with continuous data; for now we are tackling the discrete case Assume raw-data distributed by record-id The tree building has two independent termination conditions - the tree Depth and the required purity of a given node The purity is measured using the Gini co-efficient */ EXPORT Decision(DATASET(ML.Types.DiscreteField) ind,DATASET(ML.Types.DiscreteField) dep,t_level Depth=10,REAL Purity=1.0) := MODULE SHARED wNode := RECORD t_node node_id; // The node-id for a given point t_level level; // The level for a given point ML.Types.t_Discrete depend; // Actually copies the dependant value to EVERY node - paying memory to avoid downstream cycles ML.Types.DiscreteField; END; ind0 := ML.Utils.FatD(ind); // Ensure no sparsity in independents wNode init(ind0 le,dep ri) := TRANSFORM SELF.node_id := 1; SELF.level := 1; SELF.depend := ri.value; SELF := le; END; ind1 := JOIN(ind,dep,LEFT.id = RIGHT.id,init(LEFT,RIGHT)); // If we were prepared to force DEP into memory then ,LOOKUP would go quicker Split(DATASET(wNode) nodes, t_level p_level) := FUNCTION this_set0 := nodes(level = p_level); // Only process those 'undecided' nodes Purities := ML.Utils.Gini(this_set0(number=1),node_id,depend); // Compute the purities for each node // At this level these nodes are pure enough PureEnough := Purities(1-Purity >= gini); // Remove the 'pure enough' from the working set this_set := JOIN(this_set0,PureEnough,LEFT.node_id=RIGHT.node_id,TRANSFORM(LEFT),LEFT ONLY,LOOKUP); // Make sure the 'pure enough' get through pass_thru := JOIN(this_set0,PureEnough,LEFT.node_id=RIGHT.node_id,TRANSFORM(LEFT),LOOKUP); // Implementation note: it is very tempting to want to distribute by node_id - however at any given level there are only 2^level nodes // so if you want to distribute on a large number of clusters; you cannot pre-distribute. // Implementation node II: this code could be made rather cleaner by re-using the Utils.Gini routine; HOWEVER // it would require an extra join and potentially an extra data scan. For now it is assumed that 'code is cheap' // In a single step compute the counts for each dependant value for each field for each node // Note: the MERGE is to allow for high numbers of dimensions, high cardinalities in the discretes or both // for low dimension, low cardinality cases a ,FEW would be significantly quicker agg := TABLE(this_set,{node_id,number,value,depend,Cnt := COUNT(GROUP)},node_id,number,value,depend,MERGE); // Now to turn those counts into proportions; need the counts independant of depend // Could re-count from this_set; but using agg as it is (probably) significantly smaller aggc := TABLE(agg,{node_id,number,value,TCnt := SUM(GROUP,Cnt)},node_id,number,value,MERGE); r := RECORD agg; REAL4 Prop; // Proportion pertaining to this dependant value END; // Now on each row we have the proportion of the node that is that dependant value prop := JOIN(agg,aggc,LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number AND LEFT.value = RIGHT.value, TRANSFORM(r, SELF.Prop := LEFT.Cnt/RIGHT.Tcnt, SELF := LEFT),HASH); // Compute 1-gini coefficient for each node for each field for each value gini_per := TABLE(prop,{node_id,number,value,tcnt := SUM(GROUP,Cnt),val := SUM(GROUP,Prop*Prop)},node_id,number,value); // The gini coeff for each value is then formed into a weighted average to give the impurity based upon the field gini := TABLE(gini_per,{node_id,number,gini_t := SUM(GROUP,tcnt*val)/SUM(GROUP,tcnt)},node_id,number,FEW); // We can now work out which nodes to split and based upon which column splt := DEDUP( SORT( DISTRIBUTE( gini,HASH(node_id) ), node_id, -gini_t, LOCAL ), node_id, LOCAL ); // We now need to allocate node-ids for the nodes we are about to create; because we cannot control the size of the discrete // fields we cannot do this via bit-shifting (as in the kd-trees); rather we will have to enumerate them an allocate sequentially // The 'aggc' really has nothing to do with the below; it is just a convenient list of node_id/number/value that happens to be // laying around - so we using it rather than hitting a bigger dataset node_cand0 := JOIN(aggc,splt,LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number,TRANSFORM(LEFT),LOOKUP); node_base := MAX(aggc,node_id); // Start allocating new node-ids from the highest previous // Allocate the new node-ids node_cand := PROJECT(node_cand0,TRANSFORM({node_cand0, t_node new_nodeid},SELF.new_nodeid := node_base+COUNTER, SELF := LEFT)); // Construct a fake wNode to pass out splitting information nc0 := PROJECT(node_cand,TRANSFORM(wNode,SELF.value := LEFT.new_nodeid,SELF.depend := LEFT.value,SELF.level := p_level,SELF := LEFT,SELF := [])); // Construct a list of record-ids to (new) node-ids (by joining to the real data) r1 := RECORD ML.Types.t_Recordid id; t_node nodeid; END; // Mapp will be distributed by id because this_set is - and a ,LOOKUP join does not destroy order mapp := JOIN(this_set,node_cand,LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number AND LEFT.value=RIGHT.value, TRANSFORM(r1,SELF.id := LEFT.id,SELF.nodeid:=RIGHT.new_nodeid),LOOKUP); // Now use the mapping to actually reset all the points J := JOIN(this_set,mapp,LEFT.id=RIGHT.id,TRANSFORM(wNode,SELF.node_id:=RIGHT.nodeid,SELF.level:=LEFT.level+1,SELF := LEFT),LOCAL); RETURN J+nc0+nodes(level < p_level)+pass_thru; END; SHARED res := LOOP(ind1,Depth,Split(ROWS(LEFT),COUNTER)); SplitF := RECORD t_node node_id; // The node that is being split t_level level; // The level the split is occuring ML.Types.t_FieldNumber number; // The column used to split ML.Types.t_Discrete value; // The value for the column in question t_node new_node_id; // The new node that value goes to END; EXPORT Splits := PROJECT(Res(id=0),TRANSFORM(SplitF,SELF.new_node_id := LEFT.value, SELF.value := LEFT.depend, SELF := LEFT)); // The split points used to partition each node id SHARED nsplits := res(id<>0); EXPORT Partitioned := PROJECT(nsplits,Node); // The training data - all partitioned // Now we want to create records to show the predicted dependant variable for each node; together with a %age hit rate mode_r := RECORD nsplits.node_id; nsplits.depend; Cnt := COUNT(GROUP); OCnt := 0; // Records 'other' than the current one (filled in later) END; d := TABLE(nsplits(number=1),mode_r,node_id,depend,MERGE); m := SORT( DISTRIBUTE(d,HASH(node_id)), node_id,-Cnt,LOCAL); mode_r rol(m le,m ri) := TRANSFORM SELF.OCnt := le.OCnt + ri.Cnt; SELF := le; END; m1 := ROLLUP(m,LEFT.node_id=RIGHT.node_id,rol(LEFT,RIGHT),LOCAL); EXPORT Modes := TABLE(m1,{node_id,depend,size := Cnt+OCnt,pcnt := 100.0 * Cnt / (Cnt+OCnt)}); EXPORT Precision := SUM(Modes,size*pcnt/100)/SUM(Modes,size); EXPORT Counts := TABLE(Partitioned(number=1),{ node_id, Lvl := MAX(GROUP,level), Cnt := COUNT(GROUP) }, node_id, FEW); // Number of training elements in each partition EXPORT Purities := ML.Utils.Gini(nsplits(number=1),node_id,depend); EXPORT CountMean := AVE(Counts,Cnt); EXPORT CountVariance := VARIANCE(Counts,Cnt); END; // Splitting Function Based on Gini Impurity, // Previously implemented in Decision MODULE by David Bayliss, // changed to return a dataset with branch nodes and final nodes EXPORT SplitsGiniImpurBased(DATASET(ML.Types.DiscreteField) ind,DATASET(ML.Types.DiscreteField) dep, t_level Depth=10,REAL Purity=1.0) := FUNCTION ind0 := ML.Utils.FatD(ind); // Ensure no sparsity in independents wNode init(ind0 le,dep ri) := TRANSFORM SELF.node_id := 1; SELF.level := 1; SELF.depend := ri.value; // Actually copies the dependant value to EVERY node - paying memory to avoid downstream cycles SELF := le; END; ind1 := JOIN(ind0, dep, LEFT.id = RIGHT.id, init(LEFT,RIGHT)); // If we were prepared to force DEP into memory then ,LOOKUP would go quicker res := LOOP(ind1, Depth, PartitionGiniImpurityBased(ROWS(LEFT), COUNTER, Purity)); nodes := PROJECT(res(id=0),TRANSFORM(SplitF, SELF.new_node_id := LEFT.value, SELF.value := LEFT.depend, SELF := LEFT)); // The split points used to partition each node i mode_r := RECORD res.node_id; res.level; res.depend; Cnt := COUNT(GROUP); END; nsplits := TABLE(res(id<>0, number=1),mode_r,node_id, level, depend, FEW); leafs:= PROJECT(nsplits, TRANSFORM(SplitF, SELF.number:=0, SELF.value:= LEFT.depend, SELF.new_node_id:=0, SELF:= LEFT)); RETURN nodes + leafs; END; // Function to split a set of nodes based on Information Gain Ratio and level EXPORT PartitionInfoGainRatioBased (DATASET(wNode) nodes, t_level p_level) := FUNCTION this_set0 := nodes(level = p_level); node_base:= MAX(nodes, node_id); // nodes could come from different splits having different sets of remaining attributes min_num:= TABLE(this_set0,{node_id, min_number:= MIN(GROUP, number)}, node_id); this_set1:= JOIN(this_set0, min_num, LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.min_number, LOOKUP); // Calculating Information Entropy of Nodes top_dep := TABLE(this_set1, {node_id, depend, cnt:= COUNT(GROUP)}, node_id, depend); top_dep_tot := TABLE(top_dep, {node_id, tot:= SUM(GROUP, cnt)}, node_id); tdp := RECORD top_dep; REAL4 prop; // Proportion based only on dependent value REAL4 plogp:= 0; END; P_Log_P(REAL P) := IF(P=1, 0, -P*LOG(P)/LOG(2)); top_dep_p:= JOIN(top_dep, top_dep_tot, LEFT.node_id = RIGHT.node_id, TRANSFORM(tdp, SELF.prop:= LEFT.cnt/RIGHT.tot, SELF.plogp:= P_LOG_P(LEFT.cnt/RIGHT.tot), SELF:=LEFT)); top_info := TABLE(top_dep_p, {node_id, info:= SUM(GROUP, plogp)}, node_id); // Information Entropy PureNodes := top_info(info = 0); // Pure Nodes have Info Entropy = 0 // Transforming Pure Nodes into LEAF Nodes to return pass_thru:= JOIN(this_set1, PureNodes, LEFT.node_id=RIGHT.node_id, TRANSFORM(wNode, SELF.number:=0, SELF.value:=0, SELF:=LEFT), LOOKUP); // New working set after removing Pure Nodes this_set := JOIN(this_set0, PureNodes, LEFT.node_id=RIGHT.node_id, TRANSFORM(LEFT),LEFT ONLY, LOOKUP); // Looking for nodes with only one attribute to pass to the final set ths_NodeNumber:= TABLE(TABLE(this_set,{node_id, number, COUNT(GROUP)}, node_id, number),{node_id, cnt:= COUNT(GROUP)}, node_id); ths_one_attrib:= JOIN(this_set, ths_NodeNumber(cnt=1), LEFT.node_id=RIGHT.node_id, TRANSFORM(LEFT), LOOKUP); // Calculating Information Gain of possible splits child := TABLE(this_set, {node_id, number, value, depend, cnt := COUNT(GROUP)}, node_id, number, value, depend,MERGE); child_tot:= TABLE(child, {node_id, number, value, tot := SUM(GROUP,cnt)}, node_id, number, value, MERGE); csp := RECORD child_tot; REAL4 prop; REAL4 plogp; END; // Calculating Intrinsic Information Entropy of each attribute(split) per node csplit_p:= JOIN(child_tot, top_dep_tot, LEFT.node_id = RIGHT.node_id, TRANSFORM(csp, SELF.prop:= LEFT.tot/RIGHT.tot, SELF.plogp:= P_LOG_P(LEFT.tot/RIGHT.tot), SELF:=LEFT)); csplit:= TABLE(csplit_p, {node_id, number, split_info:=SUM(GROUP, plogp)},node_id, number); // Intrinsic Info chp := RECORD child; REAL4 prop; // Proportion pertaining to this dependant value REAL4 plogp:= 0; END; // Information Entropy of new branches per split cprop := JOIN(child, child_tot, LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number AND LEFT.value = RIGHT.value, TRANSFORM(chp, SELF.prop := LEFT.cnt/RIGHT.tot, SELF.plogp:= P_LOG_P(LEFT.cnt/RIGHT.tot), SELF:=LEFT)); cplogp := TABLE(cprop, {node_id, number, value, cont:= SUM(GROUP,cnt), inf0:= SUM(GROUP, plogp)}, node_id, number, value); // Information Entropy of possible splits per node cinfo := TABLE(cplogp, {node_id, number, info:=SUM(GROUP, cont*inf0)/SUM(GROUP, cont)}, node_id, number); grec := RECORD t_node node_id; ML.Types.t_Discrete number; REAL4 gain; END; // Information Gain of possible splits per node gain := JOIN(cinfo, top_info, LEFT.node_id=RIGHT.node_id, TRANSFORM(grec, SELF.node_id:= LEFT.node_id, SELF.number:=LEFT.number, SELF.gain:= RIGHT.info - LEFT.info)); grrec := RECORD t_node node_id; ML.Types.t_Discrete number; REAL4 gain_ratio; END; // Information Gain Ratio of possible splits per node gainRatio := JOIN(gain, csplit, LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number, TRANSFORM(grrec, SELF.node_id:= LEFT.node_id, SELF.number:=LEFT.number, SELF.gain_ratio:= LEFT.gain/RIGHT.split_info)); // Selecting the split with max Info Gain Ratio per node split:= DEDUP(SORT(DISTRIBUTE(gainRatio, HASH(node_id)), node_id, -gain_ratio, LOCAL), node_id, LOCAL); // New working set after removing split nodes ths_minus_spl:= JOIN(this_set, split, LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number, TRANSFORM(LEFT), LEFT ONLY, LOOKUP); final_set:= ths_minus_spl + ths_one_attrib; // Need to add nodes with only one attrib // Creating new Nodes based on splits node_cand0:= JOIN(child_tot, split, LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number, TRANSFORM(LEFT), LOOKUP); node_cand := PROJECT(node_cand0, TRANSFORM({node_cand0, t_node new_nodeid}, SELF.new_nodeid:= node_base + COUNTER, SELF:=LEFT)); new_nodes := PROJECT(node_cand, TRANSFORM(wNode, SELF.value:= LEFT.new_nodeid, SELF.depend:= LEFT.value, SELF.level:=p_level, SELF:= LEFT, SELF:= [])); // Construct a list of record-ids to (new) node-ids (by joining to the real data) r1 := RECORD ML.Types.t_Recordid id; t_node nodeid; END; mapp := JOIN(this_set,node_cand,LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number AND LEFT.value=RIGHT.value, TRANSFORM(r1,SELF.id := LEFT.id,SELF.nodeid:=RIGHT.new_nodeid),LOOKUP); // Now use the mapping to actually reset all the points J := JOIN(final_set, mapp, LEFT.id=RIGHT.id, TRANSFORM(wNode,SELF.node_id:=RIGHT.nodeid,SELF.level:=LEFT.level+1,SELF := LEFT),LOCAL); RETURN J + new_nodes + nodes(level < p_level) + pass_thru; END; // Splitting Function Based on Information Gain Ratio, // Returns a dataset with branch nodes and final nodes EXPORT SplitsInfoGainRatioBased(DATASET(ML.Types.DiscreteField) ind,DATASET(ML.Types.DiscreteField) dep) := FUNCTION ind0 := ML.Utils.FatD(ind); // Ensure no sparsity in independents wNode init(ind0 le,dep ri) := TRANSFORM SELF.node_id := 1; SELF.level := 1; SELF.depend := ri.value; // Actually copies the dependant value to EVERY node - paying memory to avoid downstream cycles SELF := le; END; ind1 := JOIN(ind0, dep, LEFT.id = RIGHT.id, init(LEFT,RIGHT)); // If we were prepared to force DEP into memory then ,LOOKUP would go quicker res := LOOP(ind1, MAX(ROWS(LEFT), level)>= COUNTER, PartitionInfoGainRatioBased(ROWS(LEFT), COUNTER)); nodes := PROJECT(res(id=0),TRANSFORM(SplitF, SELF.new_node_id := LEFT.value, SELF.value := LEFT.depend, SELF := LEFT)); mode_r := RECORD res.node_id; res.level; res.depend; cnt := COUNT(GROUP); END; nsplits := TABLE(res(id<>0),mode_r,node_id, level, depend, FEW); // branch nodes leafs:= PROJECT(nsplits, TRANSFORM(SplitF, SELF.number:=0, SELF.value:= LEFT.depend, SELF.new_node_id:=0, SELF:= LEFT)); // leaf nodes RETURN nodes + leafs; END; EXPORT SplitInstances(DATASET(Splitf) mod, DATASET(ML.Types.DiscreteField) Indep) := FUNCTION splits:= mod(new_node_id <> 0); // separate split or branches leafs := mod(new_node_id = 0); // from final nodes join0 := JOIN(Indep, splits, LEFT.number = RIGHT.number AND LEFT.value = RIGHT.value, LOOKUP, MANY); sort0 := SORT(join0, id, level, number, node_id, new_node_id); dedup0:= DEDUP(sort0, LEFT.id = RIGHT.id AND LEFT.new_node_id != RIGHT.node_id, KEEP 1, LEFT); dedup1:= DEDUP(dedup0, LEFT.id = RIGHT.id AND LEFT.new_node_id = RIGHT.node_id, KEEP 1, RIGHT); RETURN dedup1; END; EXPORT gSplitInstances(DATASET(gSplitf) mod, DATASET(ML.Types.DiscreteField) Indep) := FUNCTION splits:= mod(new_node_id <> 0); // separate split or branches leafs := mod(new_node_id = 0); // from final nodes join0 := JOIN(Indep, splits, LEFT.number = RIGHT.number AND LEFT.value = RIGHT.value, LOOKUP, MANY); sort0 := SORT(join0, group_id, id, level, number, node_id, new_node_id); dedup0:= DEDUP(sort0, LEFT.group_id = RIGHT.group_id AND LEFT.id = RIGHT.id AND LEFT.new_node_id != RIGHT.node_id, KEEP 1, LEFT); dedup1:= DEDUP(dedup0, LEFT.group_id = RIGHT.group_id AND LEFT.id = RIGHT.id AND LEFT.new_node_id = RIGHT.node_id, KEEP 1, RIGHT); RETURN dedup1; END; //Function that prune a Decision Tree based on Estimated Error (C4.5 Quinlan) //Inputs: // - nodes dataset from a learning process // - Independent and Depenedent datasets, should not use the same used in the learning process // - z score corresponding to confidence factor, default z = 0.67449 // default confidence factor = 0.25 -> the positive z for 2 * 0.25% confidence interval = between -0.67449 and 0.67449 EXPORT C45PruneTree(DATASET(Splitf) nodes, DATASET(ML.Types.DiscreteField) Indep, DATASET(ML.Types.DiscreteField) Dep, REAL z=0.67449):= FUNCTION splitData := SplitInstances(nodes, Indep); // splits the instances throughout the tree (looking for a leaf node, same as classify) branches := nodes(new_node_id <> 0); // identify branch nodes leafs := nodes(new_node_id = 0); // identify leaf nodes max_level := MAX(nodes, level); // Calculate the N x estimated error of a leaf node node_error NxErrEst(node_error l):= TRANSFORM UNSIGNED4 N:= l.cnt; REAL4 f:= l.e/N; SELF.NxErr_est:= N*(f + z*z/(2*N) + z*SQRT(f/N - (f*f)/N + (z*z)/(4*N*N)) )/(1 + z*z/N); SELF:=l; END; // Store split_instance/final_node info final_node_instance final_nodes(RECORDOF(splitData) l, RECORDOF(leafs) r ):= TRANSFORM SELF.instance_id := l.id; SELF.root_node := l.node_id; SELF.root_level := l.level; SELF.final_node := r.node_id; SELF.final_level := r.level; SELF.final_class := r.value; END; // Store instance_class value and evaluate match with final_class value final_node_instance actual_class(final_node_instance l, ML.Types.DiscreteField r):= TRANSFORM SELF.instance_class := r.value; SELF.match := l.final_class = r.value; SELF:= l; END; // Return all children nodes of a branch ExplodeSubTree(DATASET(SplitF) branch):= FUNCTION local_level:= MAX(branch, level); RETURN branch + JOIN(nodes, branch(level = local_level), LEFT.node_id=RIGHT.new_node_id); END; // Return parent's node_id of a node FindParent(ML.Types.t_RecordID child_id):= FUNCTION parent_node:= branches(new_node_id = child_id); RETURN if(exists(parent_node), MAX(parent_node,node_id), 0); END; // Populating instance-nodes with predicted and actual classes, and calcualting leaf nodes Error Estimated class_as:= JOIN(splitData, leafs, LEFT.new_node_id = RIGHT.node_id, final_nodes(LEFT, RIGHT), LOOKUP); // classified as real_class:= JOIN(class_as, Dep, LEFT.instance_id = RIGHT.id, actual_class(LEFT, RIGHT), LOOKUP); // real classes and matches rc_err:= TABLE(real_class, {root_node, root_level, final_node, final_level, final_class, e:= COUNT(GROUP,real_class.match=FALSE), cnt:= COUNT(GROUP)}, root_node, root_level, final_node, final_level, final_class); leaf_error:=PROJECT(rc_err, node_error); leaf_NxErrEst:= PROJECT(leaf_error, NxErrEst(LEFT)); // Calculate N x error estimated on tree's leaf nodes // loop body of instance split accumulated loopbody1(DATASET(final_node_instance) nodes_inst, INTEGER1 p_level) := FUNCTION this_set:= nodes_inst(root_level = p_level); final_node_instance trx(SplitF l, final_node_instance r):= TRANSFORM SELF.root_node := l.node_id; SELF.root_level := l.level; SELF.final_node := r.root_node; SELF.final_level:= r.root_level; SELF:= r; END; join1:= JOIN(nodes, this_set, left.new_node_id = right.root_node, trx(LEFT, RIGHT)); RETURN nodes_inst + join1; END; // Generating possible replacing nodes for each branch -> repo nodes acc_split:= LOOP(real_class, max_level, loopbody1(ROWS(LEFT), max_level - COUNTER)); // instance splits accumulated g_acc_split:= TABLE(acc_split, {root_node, root_level, final_class, cnt:= COUNT(GROUP)}, root_node, root_level, final_class); gtot_acc_split:= TABLE(g_acc_split,{root_node, root_level, tot:=SUM(GROUP, cnt)}, root_node, root_level); g_join:= JOIN(g_acc_split, gtot_acc_split, LEFT.root_node = RIGHT.root_node AND LEFT.root_level = RIGHT.root_level, TRANSFORM(node_error, SELF.root_node:= FindParent(LEFT.root_node), SELF.root_level:= LEFT.root_level -1, SELF.final_node:= LEFT.root_node, SELF.final_level:= LEFT.root_level, SELF.final_class:=LEFT.final_class, SELF.e:= RIGHT.tot - LEFT.cnt, SELF.cnt:=RIGHT.tot)); g_sort:= SORT(g_join, final_node, e); // sorting based on error to chose the final class with less errors repo_nodes:= DEDUP(g_sort, final_node); // repo nodes repo_NxErrEst:= PROJECT(repo_nodes,NxErrEst(LEFT)); // Calculate N x error estimated on repo nodes // loop body of branchs and repo nodes error estimated comparisson loopbody2(DATASET(node_error) all_nodes, INTEGER1 n_level) := FUNCTION level_nodes:= all_nodes(root_level = n_level); // get only level nodes // calculating error estimated of branch (all leaf nodes with same root_node) g_level_nodes:= TABLE(level_nodes,{root_node, root_level, err:=SUM(GROUP, e), tot:= SUM(GROUP, cnt), totErr_est:=SUM(GROUP, nxerr_est)}, root_node, root_level); // transforming to update upper level error estimated value lnodes_NxErrEst:= PROJECT(g_level_nodes, TRANSFORM(node_error, SELF.root_node:= FindParent(LEFT.root_node), SELF.root_level:=LEFT.root_level -1, SELF.final_node:= LEFT.root_node, SELF.final_level:=LEFT.root_level, SELF.e:= LEFT.err, SELF.cnt:= LEFT.tot, SELF.nxerr_est:= LEFT.toterr_est, SELF.final_class:= -1)); level_repo:= repo_NxErrEst(final_level= n_level); // get the repo nodes for the level to_chose:=SORT(lnodes_NxErrEst + level_repo, final_node, nxerr_est); for_update:=DEDUP(to_chose, final_node); // will update dataset with chosen nodes for_delete:= for_update(final_class >= 0); // extract leafs nodes from chosen nodes to_delete:=PROJECT(for_delete, TRANSFORM(SplitF, SELF.node_id:=LEFT.root_node, SELF.level:= LEFT.root_level, SELF.new_node_id:=LEFT.final_node, SELF:=[])); subtree_del := LOOP(to_delete, max_level, ExplodeSubTree(ROWS(LEFT))); // will erase whole subtrees of deleted nodes pass_thru_nodes:= JOIN(all_nodes, subtree_del, LEFT.root_node= RIGHT.node_id AND LEFT.final_node=RIGHT.new_node_id, TRANSFORM(LEFT), LEFT ONLY); RETURN pass_thru_nodes + for_update; END; // Comparing branches to repo nodes, // if repo node has smaller error estimated than branch, delete branch and replace // else generate upper level branch node with NxErrEst updated new_nodes:= LOOP(leaf_NxErrEst, max_level -1, loopbody2(ROWS(LEFT), max_level - COUNTER)); Splitf to_leaf(node_error ne):= TRANSFORM SELF.node_id := ne.final_node; SELF.level := ne.final_level; SELF.number := 0; SELF.value := ne.final_class; SELF.new_node_id:= 0; END; // transforming results into Spliitf records to return new_branches:= JOIN(nodes, new_nodes, LEFT.node_id = RIGHT.root_node AND LEFT.new_node_id = RIGHT.final_node, TRANSFORM(LEFT), LOOKUP); new_leafs:= PROJECT(new_nodes(final_class > -1), to_leaf(LEFT)); RETURN new_branches + new_leafs; END; //Function that generates a DT and then prunes it using C45PruneTree //The function receives Independent and Dependet datasets, // fold them into numFolds folds, minimum 3 // use numFolds - 1 folds to generate the unpruned DT, // use 1 fold to prune to DT. //Return the pruned DT. EXPORT SplitsIGR_Pruned(DATASET(Types.DiscreteField) Indep, DATASET(Types.DiscreteField) Dep, INTEGER1 numFolds = 3, REAL z = 0.67449 ):=FUNCTION folds:= ML.Sampling.DiscreteDepNFolds(Dep, MAX(3, numFolds)); trainIndep:= JOIN(Indep, folds(number = 1), LEFT.id = RIGHT.id, LEFT ONLY); trainDep := JOIN(Dep, folds(number = 1), LEFT.id = RIGHT.id, LEFT ONLY); testIndep := JOIN(Indep, folds(number > 1), LEFT.id = RIGHT.id, LEFT ONLY); testDep := JOIN(Dep, folds(number > 1), LEFT.id = RIGHT.id, LEFT ONLY); raw_tree:= SplitsInfoGainRatioBased(trainIndep, trainDep); RETURN C45PruneTree(raw_tree, testIndep, testDep); END; // Function to split a set of nodes based on Feature Selection and Gini Impurity, // the nodes received were generated sampling with replacement nTrees times. // Note: it selects kFeatSel out of mTotFeats features for each sample, features must start at 1 and cannot exist a gap in the numeration. EXPORT RndFeatSelPartitionGIBased(DATASET(gNode) nodes, t_Count nTrees, t_Count kFeatSel, t_Count mTotFeats, t_Count p_level, REAL Purity=1.0):= FUNCTION this_set_all := DISTRIBUTE(nodes, HASH(group_id, node_id)); node_base := MAX(this_set_all, node_id); // Start allocating new node-ids from the highest previous featSet:= NxKoutofM(nTrees, kFeatSel, mTotFeats); // generating list of features selected for each tree minFeats := TABLE(featSet, {gNum, minNumber := MIN(GROUP, number)}, gNum, FEW); // chose the min feature number from the sample this_minFeats:= JOIN(this_set_all, minFeats, LEFT.group_id = RIGHT.gNum AND LEFT.number= RIGHT.minNumber, LOOKUP); Purities := ML.Utils.Gini(this_minFeats, node_id, depend); // Compute the purities for each node PureEnough := Purities(1-Purity >= gini); // just need one match to create a leaf node, all similar instances will fall into the same leaf nodes pass_thru := JOIN(PureEnough, this_set_all , LEFT.node_id=RIGHT.node_id, TRANSFORM(gNode, SELF.id:=0, SELF.number:=0, SELF.value:=0, SELF:=RIGHT), PARTITION RIGHT, KEEP(1)); // splitting the instances that did not reach a leaf node this_set_out:= JOIN(this_set_all, PureEnough, LEFT.node_id=RIGHT.node_id, TRANSFORM(LEFT), LEFT ONLY, LOOKUP); this_set := JOIN(this_set_out, featSet, LEFT.group_id = RIGHT.gNum AND LEFT.number= RIGHT.number, TRANSFORM(LEFT), LOOKUP); agg := TABLE(this_set, {group_id, node_id, number, value, depend,Cnt := COUNT(GROUP)}, group_id, node_id, number, value, depend, LOCAL); aggc := TABLE(agg, {group_id, node_id, number, value, TCnt := SUM(GROUP, Cnt)}, group_id, node_id, number, value, LOCAL); r := RECORD agg; REAL4 Prop; // Proportion pertaining to this dependant value END; prop := JOIN(agg, aggc, LEFT.group_id = RIGHT.group_id AND LEFT.node_id = RIGHT.node_id AND LEFT.number=RIGHT.number AND LEFT.value = RIGHT.value, TRANSFORM(r, SELF.Prop := LEFT.Cnt/RIGHT.Tcnt, SELF := LEFT), HASH); gini_per := TABLE(prop, {group_id, node_id, number, value, tcnt := SUM(GROUP,Cnt),val := SUM(GROUP,Prop*Prop)}, group_id, node_id, number, value, LOCAL); gini := TABLE(gini_per, {group_id, node_id, number, gini_t := SUM(GROUP,tcnt*val)/SUM(GROUP,tcnt)}, group_id, node_id, number, FEW, LOCAL); splt := DEDUP(SORT(gini, group_id, node_id, -gini_t, LOCAL), group_id, node_id, LOCAL); node_cand0 := JOIN(aggc, splt, LEFT.group_id = RIGHT.group_id AND LEFT.node_id = RIGHT.node_id AND LEFT.number = RIGHT.number, TRANSFORM(LEFT), LOOKUP, LOCAL); node_cand := PROJECT(node_cand0, TRANSFORM({node_cand0, t_node new_nodeid}, SELF.new_nodeid := node_base + COUNTER, SELF := LEFT)); // new split nodes found nc0 := PROJECT(node_cand, TRANSFORM(gNode, SELF.value := LEFT.new_nodeid, SELF.depend := LEFT.value, SELF.level := p_level, SELF := LEFT, SELF := []), LOCAL); // Assignig instances that didn't reach a leaf node to (new) node-ids (by joining to the sampled data) r1 := RECORD ML.Types.t_Recordid id; t_node nodeid; END; mapp := JOIN(this_set, node_cand, LEFT.group_id = RIGHT.group_id AND LEFT.node_id=RIGHT.node_id AND LEFT.number=RIGHT.number AND LEFT.value=RIGHT.value, TRANSFORM(r1, SELF.id := LEFT.id, SELF.nodeid:=RIGHT.new_nodeid ),LOOKUP, LOCAL); // Now use the mapping to actually reset all the points J := JOIN(this_set_out, mapp,LEFT.id=RIGHT.id,TRANSFORM(gNode, SELF.node_id:=RIGHT.nodeid, SELF.level:=LEFT.level+1, SELF := LEFT),LOCAL); RETURN pass_thru + nc0 + J; // returning leaf nodes, new splits nodes and reassigned instances END; SHARED DepGroupedRec := RECORD(DiscreteField) UNSIGNED group_id := 0; t_RecordID new_id := 0; END; SHARED DepGroupedRec GroupDepRecords (DiscreteField l, idListGroupRec r) := TRANSFORM SELF.group_id := r.gNum; SELF.new_id := r.id; SELF := l; END; // Function used in Random Forest classifier learning // Note: it selects fsNum out of total number of features, they must start at 1 and cannot exist a gap in the numeration. // Gini Impurity's default parameters: Purity = 1.0 and maxLevel (Depth) = 32 (up to 126 max iterations) // more info http://www.stat.berkeley.edu/~breiman/RandomForests/cc_home.htm#overview EXPORT SplitFeatureSampleGI(DATASET(Types.DiscreteField) Indep, DATASET(Types.DiscreteField) Dep, t_Index treeNum, t_Count fsNum, REAL Purity=1.0, t_level maxLevel=32) := FUNCTION N := MAX(Dep, id); // Number of Instances totFeat := COUNT(Indep(id=N)); // Number of Features depth := MIN(126, maxLevel); // Max number of iterations when building trees (max 126 levels) // sampling with replacement the original dataset to generate treeNum Datasets grList:= ML.Sampling.GenerateNSampleList(treeNum, N); // the number of records will be N * treeNum groupDep:= JOIN(Dep, grList, LEFT.id = RIGHT.oldId, GroupDepRecords(LEFT, RIGHT)); // if grList were not too big we should use lookup // groupDep:= JOIN(Dep, grList, LEFT.id = RIGHT.oldId, GroupDepRecords(LEFT, RIGHT), MANY LOOKUP); ind0 := ML.Utils.FatD(Indep); // Ensure no sparsity in independents gNode init(DiscreteField ind, DepGroupedRec depG) := TRANSFORM SELF.group_id := depG.group_id; SELF.node_id := depG.group_id; SELF.level := 1; SELF.depend := depG.value; // Actually copies the dependant value to EVERY node - paying memory to avoid downstream cycles SELF.id := depG.new_id; SELF := ind; END; ind1 := JOIN(ind0, groupDep, LEFT.id = RIGHT.id, init(LEFT,RIGHT)); // If we were prepared to force DEP into memory then ,LOOKUP would go quicker // generating best feature_selection-gini_impurity splits, loopfilter level = COUNTER let pass only the nodes to be splitted for any current level res := LOOP(ind1, LEFT.level=COUNTER, COUNTER < depth , RndFeatSelPartitionGIBased(ROWS(LEFT), treeNum, fsNum, totFeat, COUNTER, Purity)); splits := PROJECT(res(id=0, number>0),TRANSFORM(gSplitF, SELF.new_node_id := LEFT.value, SELF.value := LEFT.depend, SELF := LEFT)); // node splits leafs1 := PROJECT(res(id=0, number=0),TRANSFORM(gSplitF, SELF.number:=0, SELF.value:= LEFT.depend, SELF.new_node_id:=0, SELF:= LEFT)); // leafs nodes mode_r := RECORD res.group_id; res.node_id; res.level; res.depend; Cnt := COUNT(GROUP); END; // Taking care instances (id>0) that reached maximum level and did not turn into a leaf yet depCnt := TABLE(res(id>0, number=1),mode_r, group_id, node_id, level, depend, FEW); depCntSort := SORT(depCnt, group_id, node_id, cnt); // if more than one dependent value for node_id depCntDedup := DEDUP(depCntSort, group_id, node_id); // the class value with more counts is selected leafs2:= PROJECT(depCntDedup, TRANSFORM(gSplitF, SELF.number:=0, SELF.value:= LEFT.depend, SELF.new_node_id:=0, SELF:= LEFT)); RETURN splits + leafs1+ leafs2; END; END;
41,611
https://github.com/KiroTheBlueFox/MoBlocks/blob/master/src/main/java/kirothebluefox/moblocks/content/decoration/colorableflowerpot/ColorableFlowerPotTileRenderer.java
Github Open Source
Open Source
MIT
2,023
MoBlocks
KiroTheBlueFox
Java
Code
132
761
package kirothebluefox.moblocks.content.decoration.colorableflowerpot; import com.mojang.blaze3d.vertex.PoseStack; import kirothebluefox.moblocks.content.ModTileEntities; import net.minecraft.client.Minecraft; import net.minecraft.client.color.block.BlockColors; import net.minecraft.client.renderer.ItemBlockRenderTypes; import net.minecraft.client.renderer.MultiBufferSource; import net.minecraft.client.renderer.RenderType; import net.minecraft.client.renderer.block.BlockRenderDispatcher; import net.minecraft.client.renderer.block.ModelBlockRenderer; import net.minecraft.client.renderer.blockentity.BlockEntityRenderer; import net.minecraft.client.renderer.blockentity.BlockEntityRendererProvider; import net.minecraft.client.renderer.blockentity.BlockEntityRenderers; import net.minecraft.world.item.BlockItem; import net.minecraft.world.item.Item; import net.minecraft.world.item.ItemStack; import net.minecraft.world.level.Level; import net.minecraft.world.level.block.state.BlockState; import net.minecraftforge.client.model.data.ModelData; public class ColorableFlowerPotTileRenderer implements BlockEntityRenderer<ColorableFlowerPotTile> { public ColorableFlowerPotTileRenderer(BlockEntityRendererProvider.Context context) { } public static void register() { BlockEntityRenderers.register(ModTileEntities.COLORABLE_FLOWER_POT, ColorableFlowerPotTileRenderer::new); } @SuppressWarnings("deprecation") @Override public void render(ColorableFlowerPotTile tileEntityIn, float partialTicks, PoseStack matrixStackIn, MultiBufferSource bufferIn, int combinedLightIn, int combinedOverlayIn) { ItemStack itemstack = tileEntityIn.getItem(); Item item = itemstack.getItem(); if (!itemstack.isEmpty()) { if (item instanceof BlockItem) { matrixStackIn.pushPose(); matrixStackIn.translate(0, 0.25, 0); BlockState blockstate = ((BlockItem) item).getBlock().defaultBlockState(); RenderType renderType = ItemBlockRenderTypes.getMovingBlockRenderType(blockstate); BlockRenderDispatcher blockDispatcher = Minecraft.getInstance().getBlockRenderer(); Level world = tileEntityIn.getLevel(); ModelBlockRenderer renderer = new ModelBlockRenderer(BlockColors.createDefault()); renderer.renderModel( matrixStackIn.last(), bufferIn.getBuffer(renderType), blockstate, blockDispatcher.getBlockModel(blockstate), 0.0f, 0.0f, 0.0f, combinedLightIn, combinedOverlayIn ); matrixStackIn.popPose(); } } } }
41,628
https://github.com/limiao2008/ActivitiSourceCode/blob/master/modules/activiti-bpmn-converter/src/main/java/org/activiti/bpmn/converter/BpmnXMLConverter.java
Github Open Source
Open Source
Apache-2.0
2,021
ActivitiSourceCode
limiao2008
Java
Code
2,145
11,681
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.bpmn.converter; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.XMLConstants; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import javax.xml.transform.stax.StAXSource; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.Validator; import org.activiti.bpmn.constants.BpmnXMLConstants; import org.activiti.bpmn.converter.alfresco.AlfrescoStartEventXMLConverter; import org.activiti.bpmn.converter.alfresco.AlfrescoUserTaskXMLConverter; import org.activiti.bpmn.converter.child.DocumentationParser; import org.activiti.bpmn.converter.child.IOSpecificationParser; import org.activiti.bpmn.converter.child.MultiInstanceParser; import org.activiti.bpmn.converter.export.ActivitiListenerExport; import org.activiti.bpmn.converter.export.BPMNDIExport; import org.activiti.bpmn.converter.export.CollaborationExport; import org.activiti.bpmn.converter.export.DataStoreExport; import org.activiti.bpmn.converter.export.DefinitionsRootExport; import org.activiti.bpmn.converter.export.MultiInstanceExport; import org.activiti.bpmn.converter.export.ProcessExport; import org.activiti.bpmn.converter.export.SignalAndMessageDefinitionExport; import org.activiti.bpmn.converter.parser.BpmnEdgeParser; import org.activiti.bpmn.converter.parser.BpmnShapeParser; import org.activiti.bpmn.converter.parser.DataStoreParser; import org.activiti.bpmn.converter.parser.DefinitionsParser; import org.activiti.bpmn.converter.parser.ExtensionElementsParser; import org.activiti.bpmn.converter.parser.ImportParser; import org.activiti.bpmn.converter.parser.InterfaceParser; import org.activiti.bpmn.converter.parser.ItemDefinitionParser; import org.activiti.bpmn.converter.parser.LaneParser; import org.activiti.bpmn.converter.parser.MessageFlowParser; import org.activiti.bpmn.converter.parser.MessageParser; import org.activiti.bpmn.converter.parser.ParticipantParser; import org.activiti.bpmn.converter.parser.PotentialStarterParser; import org.activiti.bpmn.converter.parser.ProcessParser; import org.activiti.bpmn.converter.parser.ResourceParser; import org.activiti.bpmn.converter.parser.SignalParser; import org.activiti.bpmn.converter.parser.SubProcessParser; import org.activiti.bpmn.converter.util.BpmnXMLUtil; import org.activiti.bpmn.converter.util.InputStreamProvider; import org.activiti.bpmn.exceptions.XMLException; import org.activiti.bpmn.model.Activity; import org.activiti.bpmn.model.Artifact; import org.activiti.bpmn.model.Association; import org.activiti.bpmn.model.BaseElement; import org.activiti.bpmn.model.BooleanDataObject; import org.activiti.bpmn.model.BoundaryEvent; import org.activiti.bpmn.model.BpmnModel; import org.activiti.bpmn.model.DateDataObject; import org.activiti.bpmn.model.DoubleDataObject; import org.activiti.bpmn.model.EventSubProcess; import org.activiti.bpmn.model.FlowElement; import org.activiti.bpmn.model.FlowNode; import org.activiti.bpmn.model.IntegerDataObject; import org.activiti.bpmn.model.LongDataObject; import org.activiti.bpmn.model.Pool; import org.activiti.bpmn.model.Process; import org.activiti.bpmn.model.SequenceFlow; import org.activiti.bpmn.model.StringDataObject; import org.activiti.bpmn.model.SubProcess; import org.activiti.bpmn.model.TextAnnotation; import org.activiti.bpmn.model.Transaction; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.SAXException; /** * @author Tijs Rademakers * @author Joram Barrez * @desc 此类中并没有构造() * 所以 分析静态代码块 * * * * * 该类的静态代码块主要用于初始化类中的各种属性值 并且类中的静态代码块只会被执行一次 * 由于实例化BpmnXMLConverter 类的同时 该类已经被JVM 加载 所以静态代码块 会先执行 * 而静态代码块会调用 addConverter() * 最终 会将流程元素 以及其对应的解析器添加 到 * convertersToBpmnMap,convertersToXMLMap集合中 * * 流程文档中的大部分元素与之对应的解析器是 一一对应关系, 但是对于dataObject 类型的元素来说 * 就需要特殊处理一下,因为该类型的元素仅仅是数据类型不同而已, 其他属性定义几乎完全相同 * 常用数据类型有 String Boolean Integer 等,因此没必要为每一种具体的数据类型单独定义一个解析器 * 只需要在dataObject元素对应的解析器中根据数据类型进行区分处理即可 * 附带的好处就是 可以将不同数据类型的元素解析工作集中起来管理, 这样也可以控制不同数据类型的元素 * 按照指定的先后顺序进行解析 * * ValuedDataObjectXMLConverter 类负责解析dataObject元素 * * 规律: * 元素解析之后 都会将解析结果添加到父级元素(process 或者subprocess中) 有这样一个问题 * 节点和连线如何关联呢???? * */ public class BpmnXMLConverter implements BpmnXMLConstants { protected static final Logger LOGGER = LoggerFactory.getLogger(BpmnXMLConverter.class); protected static final String BPMN_XSD = "org/activiti/impl/bpmn/parser/BPMN20.xsd"; protected static final String DEFAULT_ENCODING = "UTF-8"; /* 实例化一系列元素解析器 此Map 存储的是 元素 以及 元素对应的解析器, key 为 String ,存储流程文档中定义的元素名称, 对应 cnverter.getXMLElementName() 返回值(流程文档中元素的名称) value为 元素对应的解析器, eg: 解析结束事件(endEvent) 元素的时候 可以直接从 此Map中查找 key为 endEvent的值 这样就查询到了 EndEventXMLConverter类 使用Map结构的好处 如果使用List 会存在如下问题 1)需要U型你换遍历解析器集合才能查找到适配当前元素的解析器, 2)客户端向该集合添加元素解析器时 可以能会造成同一个元素的解析器有多个, 如果同一个元素对应多个解析器, 那么引擎该哪一个结果为准? 如果使用Map就不会出现上述问题 客户端可以根据key(元素名称) 值 查找元素对应的解析器 而且相同的key值只能存储一个 所以如果想要使用自定义元素解析器 只需要根据key 值覆盖引擎默认的元素解析器即可 */ protected static Map<String, BaseBpmnXMLConverter> convertersToBpmnMap = new HashMap<String, BaseBpmnXMLConverter>(); protected static Map<Class<? extends BaseElement>, BaseBpmnXMLConverter> convertersToXMLMap = new HashMap<Class<? extends BaseElement>, BaseBpmnXMLConverter>(); protected ClassLoader classloader; protected List<String> userTaskFormTypes; protected List<String> startEventFormTypes; //初始化各种内置元素解析器 例如 signalParser protected BpmnEdgeParser bpmnEdgeParser = new BpmnEdgeParser(); protected BpmnShapeParser bpmnShapeParser = new BpmnShapeParser(); protected DefinitionsParser definitionsParser = new DefinitionsParser(); protected DocumentationParser documentationParser = new DocumentationParser(); protected ExtensionElementsParser extensionElementsParser = new ExtensionElementsParser(); protected ImportParser importParser = new ImportParser(); protected InterfaceParser interfaceParser = new InterfaceParser(); protected ItemDefinitionParser itemDefinitionParser = new ItemDefinitionParser(); protected IOSpecificationParser ioSpecificationParser = new IOSpecificationParser(); protected DataStoreParser dataStoreParser = new DataStoreParser(); protected LaneParser laneParser = new LaneParser(); protected MessageParser messageParser = new MessageParser(); protected MessageFlowParser messageFlowParser = new MessageFlowParser(); protected MultiInstanceParser multiInstanceParser = new MultiInstanceParser(); protected ParticipantParser participantParser = new ParticipantParser(); protected PotentialStarterParser potentialStarterParser = new PotentialStarterParser(); protected ProcessParser processParser = new ProcessParser(); protected ResourceParser resourceParser = new ResourceParser(); protected SignalParser signalParser = new SignalParser(); protected SubProcessParser subProcessParser = new SubProcessParser(); static { //省略一系列的元素解析器添加过程 // events addConverter(new EndEventXMLConverter()); addConverter(new StartEventXMLConverter()); // tasks addConverter(new BusinessRuleTaskXMLConverter()); addConverter(new ManualTaskXMLConverter()); addConverter(new ReceiveTaskXMLConverter()); addConverter(new ScriptTaskXMLConverter()); addConverter(new ServiceTaskXMLConverter()); addConverter(new SendTaskXMLConverter()); addConverter(new UserTaskXMLConverter()); addConverter(new TaskXMLConverter()); addConverter(new CallActivityXMLConverter()); // gateways addConverter(new EventGatewayXMLConverter()); addConverter(new ExclusiveGatewayXMLConverter()); addConverter(new InclusiveGatewayXMLConverter()); addConverter(new ParallelGatewayXMLConverter()); addConverter(new ComplexGatewayXMLConverter()); // connectors addConverter(new SequenceFlowXMLConverter()); // catch, throw and boundary event addConverter(new CatchEventXMLConverter()); addConverter(new ThrowEventXMLConverter()); addConverter(new BoundaryEventXMLConverter()); // artifacts addConverter(new TextAnnotationXMLConverter()); addConverter(new AssociationXMLConverter()); // data store reference addConverter(new DataStoreReferenceXMLConverter()); /* 流程文档中的大部分元素与之对应的解析器都是一一对应的关系 但是对于dataObject 元素来说 就需要特殊处理一下, 因为该类型的元素仅仅是数据类型的不同而已, 其他的属性定义几乎完全相同, 常用的 有String ,Boolean Integer 等 因此没有必要为每一种具体的数据类型 单独定义一个解析器 只需要在dataObject元素对应的解析器中 根据数据类型 进行区分处理即可 附带的好处 就是 可以将不同数据类型的元素解析工作集中起来管理, 这样也可以控制不同数据类型的元素按照指定的先后顺序进行解析 ValuedDataObjectXMLConverter 负责解析 dataObject元素 */ // data objects addConverter(new ValuedDataObjectXMLConverter(), StringDataObject.class); addConverter(new ValuedDataObjectXMLConverter(), BooleanDataObject.class); addConverter(new ValuedDataObjectXMLConverter(), IntegerDataObject.class); addConverter(new ValuedDataObjectXMLConverter(), LongDataObject.class); addConverter(new ValuedDataObjectXMLConverter(), DoubleDataObject.class); addConverter(new ValuedDataObjectXMLConverter(), DateDataObject.class); // Alfresco types addConverter(new AlfrescoStartEventXMLConverter()); addConverter(new AlfrescoUserTaskXMLConverter()); } /* 向此类中的 convertersToBpmnMap 和 convertersToXMLMap 中添加 元素解析器 开发人员可以通过该() 添加自定义元素解析器, 从而替换引擎默认的元素解析器, 该方法 非常重要 */ public static void addConverter(BaseBpmnXMLConverter converter) { addConverter(converter, converter.getBpmnElementType()); } /* 该Map convertersToBpmnMap 存储元素以及元素对应的解析器, key为String 存储流程文档定义的元素名称 converter.getXMLElementName() value为元素对应的解析器 例如 解析结束事件(endEvent) 可以直接从 此Map 中 查找key为endEvent的值 这样就可以查询到 EndEventXMLConverter类 为何使用Map结构???? 方便我们扩展 */ public static void addConverter(BaseBpmnXMLConverter converter, Class<? extends BaseElement> elementType) { convertersToBpmnMap.put(converter.getXMLElementName(), converter); convertersToXMLMap.put(elementType, converter); } public void setClassloader(ClassLoader classloader) { this.classloader = classloader; } public void setUserTaskFormTypes(List<String> userTaskFormTypes) { this.userTaskFormTypes = userTaskFormTypes; } public void setStartEventFormTypes(List<String> startEventFormTypes) { this.startEventFormTypes = startEventFormTypes; } /* 使用BPMN20.XSD 文件以及该文件所引入的 其他XSD 文件来验证流程文档中定义的元素 是否符合其约束 只有validateSchema 为true 才会开启流程文档元素的验证工作 开启之后,根据enableSafeBpmnXML 参数执行不同的逻辑 结论: 不管使用什么方式验证 schema 首先都会调用 createScheam 创建Schema 对象 然后基于该对象获取验证器, 最后直接使用验证器进行流程文档的验证工作, */ public void validateModel(InputStreamProvider inputStreamProvider) throws Exception { Schema schema = createSchema(); Validator validator = schema.newValidator(); validator.validate(new StreamSource(inputStreamProvider.getInputStream())); //使用的是StreamSource对象 } /* 验证是否符合Bpmn规范 */ public void validateModel(XMLStreamReader xmlStreamReader) throws Exception { Schema schema = createSchema(); Validator validator = schema.newValidator(); validator.validate(new StAXSource(xmlStreamReader)); //使用的是STAXSource对象 } /* BPMN_XSD 位于 activiti-bpmn-converter.jar包中 classloader 的使用优先级最高 如果开发人员 想要为classloader 赋值 只需要自定义一个 文档转换器 并且继承BpmnXMLConverter类 然后为其设置classloader属性值即可 */ protected Schema createSchema() throws SAXException { //获取工厂类 SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); //获取工厂类 Schema schema = null; //判断当前类的 classloader 是否存在 if (classloader != null) { /* 如果存在则, 直接通过类加载器 获取BPMN_XSD 获取文件流 newSchema() 依赖 BPMN_XSD资源文件 */ schema = factory.newSchema(classloader.getResource(BPMN_XSD)); // } //判断schema为空 if (schema == null) { //创建 Schema 对象 依赖BPMN_XSD 资源文件 // 通过 BpmnXMLConverter 类获取类加载器, 然后再通过该类加载器 获取BPMN_XSD 文件流 schema = factory.newSchema(BpmnXMLConverter.class.getClassLoader().getResource(BPMN_XSD)); } //判断schema 为空 则报错 if (schema == null) { throw new XMLException("BPMN XSD could not be found"); } return schema; } /* 解析流程文档中的元素,最终将元素解析结果封装为 BaseElement 最终返回BpmnModel 对象 可以将BPMNModel 理解为 流程文档解析之后的内存对象 流程文档中的所有元素的解析结果 都存储在 该对象中 开发人员可以直接通过该对象 获取流程文档中定义的所有元素的信息 在正是环境中 enableSafeBpmnXml 建议设置为 true,这样Activiit引擎解析流程文档时会理解验证 流程文档中定义的元素是否符合BPMN20.xsd文件的约束要求, 方便及早发现错误信息 */ public BpmnModel convertToBpmnModel(InputStreamProvider inputStreamProvider, boolean validateSchema, boolean enableSafeBpmnXml) { return convertToBpmnModel(inputStreamProvider, validateSchema, enableSafeBpmnXml, DEFAULT_ENCODING); } public BpmnModel convertToBpmnModel(InputStreamProvider inputStreamProvider, boolean validateSchema, boolean enableSafeBpmnXml, String encoding) { XMLInputFactory xif = XMLInputFactory.newInstance(); //实例化工厂 //为 此对象 添加防护措施, 防止外部DTD 或者XSD入侵 if (xif.isPropertySupported(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES)) { xif.setProperty(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, false); } if (xif.isPropertySupported(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES)) { xif.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false); } if (xif.isPropertySupported(XMLInputFactory.SUPPORT_DTD)) { xif.setProperty(XMLInputFactory.SUPPORT_DTD, false); } // 初始化此类 InputStreamReader in = null; try { in = new InputStreamReader(inputStreamProvider.getInputStream(), encoding); XMLStreamReader xtr = xif.createXMLStreamReader(in); // 创建此类 try { if (validateSchema) { //判断是否 开启了Schema验证, 如果开启了 则需要验证流程文档中定义的元素是否符合XSD 文件约束要求 if (!enableSafeBpmnXml) { validateModel(inputStreamProvider); } else { validateModel(xtr); } // 验证完成之后 需要重新打开 InputStreamReader 并且实例化 XMLStreamReader类 in = new InputStreamReader(inputStreamProvider.getInputStream(), encoding); //因为Schema文件验证完毕之后该流已经被关闭了, 因此需要重新打开该流 xtr = xif.createXMLStreamReader(in); } } catch (Exception e) { throw new XMLException(e.getMessage(), e); } // 以上 所有步骤 无误之后 直接 调用该() 开始解析流程文档元素 return convertToBpmnModel(xtr); } catch (UnsupportedEncodingException e) { throw new XMLException("The bpmn 2.0 xml is not UTF8 encoded", e); } catch (XMLStreamException e) { throw new XMLException("Error while reading the BPMN 2.0 XML", e); } finally { //关流 if (in != null) { try { in.close(); } catch (IOException e) { LOGGER.debug("Problem closing BPMN input stream", e); } } } } /* 当元素解析的运行环境(加载 和验证 流程文档信息) 准备完毕 开始调用此() 关于此类 解析流程文档命名空间 因为STAX 解析流程文档的熟悉怒是按照 流程文档中元素定义的先后顺序自上而下解析的 所以首先解析 父元素 definitions 该元素对应的解析器为 DefinitionsParser 该元素可以定义一系列命名空间, URL 形如<definitions xmlns="http://www.omg.org/spec/BPMN/20100524/MODEL"></definitions> 因为流程文档中的元素名称是由开发者定义的 ,为了避免命名冲突 需要引入命名空间对元素加以区分 解析外围元素 什么是外围元素 首先需要搞清楚这个概念, 平时定义的流程元素, 例如开始节点等元素 都作为process元素的子元素存在 这里所说的外围元素 指的是作为definition元素的子元素的同时 还作为process元素的兄弟节点存在 如消息元素 <definitions> <message> 最常见的外围元素有message,signal 等 流程文档中定义的大部分外围元素是没有先后顺序之分的,既可以在 process元素之上,可以在下 并且外围元素怒 种类不多 并且不容易变动 所以该类型的元素解析器都会在当前类中进行实例化, 有关外围元素的解析处理逻辑可以查看对应的解析器 例如 : 消息元素的解析 messageFlowParser.parse(xtr,model) 此() 需要2个入参 xtr 参数 程序可以根据该参数值从流程文档中解析元素的属性值 model BpmnModel对象,元素解析完毕, 可以直接将元解析结果存储到该元素对应的属性承载类实例对象中 然后再将其添加到BpmnModel对象中 ,因为外围元素种类不多 平时开发也不经常使用,为了减少风险,增加可控度 外围元素的定义以及解析不建议修改和扩展 解析通用元素 流程文档中通用元素的种类非常多 例如文档元素 documentation 扩展元素 extensionElements 等 这些元素作为流程定义三大要素的子元素存在 试想一下: 如果每个元素都在自己的解析处理逻辑中对通用元素进行解析, 势必会造成相同的解析代码 分散在各个模块中 无形之间增加了项目维护的复杂度 如果通用元素的解析规则变了, 则需要修改 每一个模块的对应的解析逻辑, 工作量非常大 如果将通用元素的解析功能抽离出来进行统一管理, 则以上这些问题 都不会出现 虽然暂时还没有看到具体元素的解析过程,但是已经看到了大量类似 xx.parse()的调用 因为元素解析是基于STAX迭代器方式 所以首先会获取元素类型, 然后再根据元素类型委托不同的解析进行解析, 开闭原则 */ public BpmnModel convertToBpmnModel(XMLStreamReader xtr) { BpmnModel model = new BpmnModel();// 此类负责 存储所有元素解析之后的结果 model.setStartEventFormTypes(startEventFormTypes); model.setUserTaskFormTypes(userTaskFormTypes); try { Process activeProcess = null; List<SubProcess> activeSubProcessList = new ArrayList<SubProcess>(); while (xtr.hasNext()) { try { xtr.next(); } catch(Exception e) { LOGGER.debug("Error reading XML document", e); throw new XMLException("Error reading XML", e); } if (xtr.isEndElement() && ELEMENT_SUBPROCESS.equals(xtr.getLocalName())) { activeSubProcessList.remove(activeSubProcessList.size() - 1); } if (xtr.isEndElement() && ELEMENT_TRANSACTION.equals(xtr.getLocalName())) { activeSubProcessList.remove(activeSubProcessList.size() - 1); } if (xtr.isStartElement() == false) { continue; } //解析definitions元素 if (ELEMENT_DEFINITIONS.equals(xtr.getLocalName())) { definitionsParser.parse(xtr, model); // } else if (ELEMENT_RESOURCE.equals(xtr.getLocalName())) { resourceParser.parse(xtr, model); } else if (ELEMENT_SIGNAL.equals(xtr.getLocalName())) { signalParser.parse(xtr, model); } else if (ELEMENT_MESSAGE.equals(xtr.getLocalName())) { messageParser.parse(xtr, model); } else if (ELEMENT_ERROR.equals(xtr.getLocalName())) { if (StringUtils.isNotEmpty(xtr.getAttributeValue(null, ATTRIBUTE_ID))) { model.addError(xtr.getAttributeValue(null, ATTRIBUTE_ID), xtr.getAttributeValue(null, ATTRIBUTE_ERROR_CODE)); } } else if (ELEMENT_IMPORT.equals(xtr.getLocalName())) { importParser.parse(xtr, model); } else if (ELEMENT_ITEM_DEFINITION.equals(xtr.getLocalName())) { itemDefinitionParser.parse(xtr, model); } else if (ELEMENT_DATA_STORE.equals(xtr.getLocalName())) { dataStoreParser.parse(xtr, model); } else if (ELEMENT_INTERFACE.equals(xtr.getLocalName())) { interfaceParser.parse(xtr, model); } else if (ELEMENT_IOSPECIFICATION.equals(xtr.getLocalName())) { ioSpecificationParser.parseChildElement(xtr, activeProcess, model); } else if (ELEMENT_PARTICIPANT.equals(xtr.getLocalName())) { participantParser.parse(xtr, model); } else if (ELEMENT_MESSAGE_FLOW.equals(xtr.getLocalName())) { messageFlowParser.parse(xtr, model); //解析process元素 } else if (ELEMENT_PROCESS.equals(xtr.getLocalName())) { Process process = processParser.parse(xtr, model); if (process != null) { activeProcess = process; } } else if (ELEMENT_POTENTIAL_STARTER.equals(xtr.getLocalName())) { potentialStarterParser.parse(xtr, activeProcess); } else if (ELEMENT_LANE.equals(xtr.getLocalName())) { laneParser.parse(xtr, activeProcess, model); } else if (ELEMENT_DOCUMENTATION.equals(xtr.getLocalName())) { BaseElement parentElement = null; if (!activeSubProcessList.isEmpty()) { parentElement = activeSubProcessList.get(activeSubProcessList.size() - 1); } else if (activeProcess != null) { parentElement = activeProcess; } documentationParser.parseChildElement(xtr, parentElement, model); } else if (activeProcess == null && ELEMENT_TEXT_ANNOTATION.equals(xtr.getLocalName())) { String elementId = xtr.getAttributeValue(null, ATTRIBUTE_ID); TextAnnotation textAnnotation = (TextAnnotation) new TextAnnotationXMLConverter().convertXMLToElement(xtr, model); textAnnotation.setId(elementId); model.getGlobalArtifacts().add(textAnnotation); } else if (activeProcess == null && ELEMENT_ASSOCIATION.equals(xtr.getLocalName())) { String elementId = xtr.getAttributeValue(null, ATTRIBUTE_ID); Association association = (Association) new AssociationXMLConverter().convertXMLToElement(xtr, model); association.setId(elementId); model.getGlobalArtifacts().add(association); } else if (ELEMENT_EXTENSIONS.equals(xtr.getLocalName())) { extensionElementsParser.parse(xtr, activeSubProcessList, activeProcess, model); } else if (ELEMENT_SUBPROCESS.equals(xtr.getLocalName())) { subProcessParser.parse(xtr, activeSubProcessList, activeProcess); } else if (ELEMENT_TRANSACTION.equals(xtr.getLocalName())) { subProcessParser.parse(xtr, activeSubProcessList, activeProcess); } else if (ELEMENT_DI_SHAPE.equals(xtr.getLocalName())) { bpmnShapeParser.parse(xtr, model); } else if (ELEMENT_DI_EDGE.equals(xtr.getLocalName())) { bpmnEdgeParser.parse(xtr, model); } else { //开始解析 元素 主要解析 事件 网关 活动等 元素信息 if (!activeSubProcessList.isEmpty() && ELEMENT_MULTIINSTANCE.equalsIgnoreCase(xtr.getLocalName())) { multiInstanceParser.parseChildElement(xtr, activeSubProcessList.get(activeSubProcessList.size() - 1), model); } else if (convertersToBpmnMap.containsKey(xtr.getLocalName())) { /* 判断activateProcess 是否为空 ,如果为空 如果该对象为空 ,表示流程文档中没有定义子元素,所以就不需要解析 如果该对象不为空 则第44行 首先根据xtr.getLocalName() 获取元素名称 然后根据元素名称从 convertersToBpmnMap 集合中查找该元素对应的解析器 例如xtr.getLocalName() 值为userTask 那么 对应解析器为 UserTaskXMLConverter */ if (activeProcess != null) { BaseBpmnXMLConverter converter = convertersToBpmnMap.get(xtr.getLocalName()); converter.convertToBpmnModel(xtr, model, activeProcess, activeSubProcessList); } } } } for (Process process : model.getProcesses()) { for (Pool pool : model.getPools()) { if (process.getId().equals(pool.getProcessRef())) { pool.setExecutable(process.isExecutable()); } } processFlowElements(process.getFlowElements(), process); } } catch (XMLException e) { throw e; } catch (Exception e) { LOGGER.error("Error processing BPMN document", e); throw new XMLException("Error processing BPMN document", e); } return model; } /* 节点 和连线 如何关联??? 1)循环遍历所有已经解析完毕的process对象, 如果流程文档中定义 有participant 元素 (泳道) 则循环遍历所有的 泳道(pool) 并且判断process对象中的id 值 是否与pool对象中的processRef 值相等,, 如果2者相等, 则设置pool对象中的 executuable 属性值 (是否 可以执行 ) 然后调用 本() 进行 节点和连线的关联操作 */ private void processFlowElements(Collection<FlowElement> flowElementList, BaseElement parentScope) { //循环遍历集合 for (FlowElement flowElement : flowElementList) { /* 如果flowElement对象 类型是 SequenceFlow 首先获取 连线中的源节点 sourceNode 并将sequnecFlow 设置到 sourceNode中 然后获取连线中的目标 targetNode 并且 sequneceFlow 设置到 targetNdoe中 */ if (flowElement instanceof SequenceFlow) { SequenceFlow sequenceFlow = (SequenceFlow) flowElement; FlowNode sourceNode = getFlowNodeFromScope(sequenceFlow.getSourceRef(), parentScope); if (sourceNode != null) { sourceNode.getOutgoingFlows().add(sequenceFlow); } FlowNode targetNode = getFlowNodeFromScope(sequenceFlow.getTargetRef(), parentScope); if (targetNode != null) { targetNode.getIncomingFlows().add(sequenceFlow); } //如果flowElemtn 是边界事件 , 则需要将边界事件与其吸附的对象进行关联 } else if (flowElement instanceof BoundaryEvent) { BoundaryEvent boundaryEvent = (BoundaryEvent) flowElement; FlowElement attachedToElement = getFlowNodeFromScope(boundaryEvent.getAttachedToRefId(), parentScope); if(attachedToElement != null) { boundaryEvent.setAttachedToRef((Activity) attachedToElement); ((Activity) attachedToElement).getBoundaryEvents().add(boundaryEvent); } //如果是子流程, 调用 processFlowElements 继续执行以上两个步骤 } else if(flowElement instanceof SubProcess) { SubProcess subProcess = (SubProcess) flowElement; processFlowElements(subProcess.getFlowElements(), subProcess); } } } private FlowNode getFlowNodeFromScope(String elementId, BaseElement scope) { FlowNode flowNode = null; if (StringUtils.isNotEmpty(elementId)) { if (scope instanceof Process) { flowNode = (FlowNode) ((Process) scope).getFlowElement(elementId); } else if (scope instanceof SubProcess) { flowNode = (FlowNode) ((SubProcess) scope).getFlowElement(elementId); } } return flowNode; } /* 将BpmnModel 转化为流程文档内容 该操作 与convertBpmnModel()的操作完全相反, */ public byte[] convertToXML(BpmnModel model) { return convertToXML(model, DEFAULT_ENCODING); } public byte[] convertToXML(BpmnModel model, String encoding) { try { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); XMLOutputFactory xof = XMLOutputFactory.newInstance(); OutputStreamWriter out = new OutputStreamWriter(outputStream, encoding); XMLStreamWriter writer = xof.createXMLStreamWriter(out); XMLStreamWriter xtw = new IndentingXMLStreamWriter(writer); DefinitionsRootExport.writeRootElement(model, xtw, encoding); CollaborationExport.writePools(model, xtw); DataStoreExport.writeDataStores(model, xtw); SignalAndMessageDefinitionExport.writeSignalsAndMessages(model, xtw); for (Process process : model.getProcesses()) { if (process.getFlowElements().isEmpty() && process.getLanes().isEmpty()) { // empty process, ignore it continue; } ProcessExport.writeProcess(process, xtw); for (FlowElement flowElement : process.getFlowElements()) { createXML(flowElement, model, xtw); } for (Artifact artifact : process.getArtifacts()) { createXML(artifact, model, xtw); } // end process element xtw.writeEndElement(); } BPMNDIExport.writeBPMNDI(model, xtw); // end definitions root element xtw.writeEndElement(); xtw.writeEndDocument(); xtw.flush(); outputStream.close(); xtw.close(); return outputStream.toByteArray(); } catch (Exception e) { LOGGER.error("Error writing BPMN XML", e); throw new XMLException("Error writing BPMN XML", e); } } private void createXML(FlowElement flowElement, BpmnModel model, XMLStreamWriter xtw) throws Exception { if (flowElement instanceof SubProcess) { SubProcess subProcess = (SubProcess) flowElement; if (flowElement instanceof Transaction) { xtw.writeStartElement(ELEMENT_TRANSACTION); } else { xtw.writeStartElement(ELEMENT_SUBPROCESS); } xtw.writeAttribute(ATTRIBUTE_ID, subProcess.getId()); if (StringUtils.isNotEmpty(subProcess.getName())) { xtw.writeAttribute(ATTRIBUTE_NAME, subProcess.getName()); } else { xtw.writeAttribute(ATTRIBUTE_NAME, "subProcess"); } if (subProcess instanceof EventSubProcess) { xtw.writeAttribute(ATTRIBUTE_TRIGGERED_BY, ATTRIBUTE_VALUE_TRUE); } else if (subProcess instanceof Transaction == false) { if (subProcess.isAsynchronous()) { BpmnXMLUtil.writeQualifiedAttribute(ATTRIBUTE_ACTIVITY_ASYNCHRONOUS, ATTRIBUTE_VALUE_TRUE, xtw); if (subProcess.isNotExclusive()) { BpmnXMLUtil.writeQualifiedAttribute(ATTRIBUTE_ACTIVITY_EXCLUSIVE, ATTRIBUTE_VALUE_FALSE, xtw); } } } if (StringUtils.isNotEmpty(subProcess.getDocumentation())) { xtw.writeStartElement(ELEMENT_DOCUMENTATION); xtw.writeCharacters(subProcess.getDocumentation()); xtw.writeEndElement(); } boolean didWriteExtensionStartElement = ActivitiListenerExport.writeListeners(subProcess, false, xtw); didWriteExtensionStartElement = BpmnXMLUtil.writeExtensionElements(subProcess, didWriteExtensionStartElement, model.getNamespaces(), xtw); if (didWriteExtensionStartElement) { // closing extensions element xtw.writeEndElement(); } MultiInstanceExport.writeMultiInstance(subProcess, xtw); for (FlowElement subElement : subProcess.getFlowElements()) { createXML(subElement, model, xtw); } for (Artifact artifact : subProcess.getArtifacts()) { createXML(artifact, model, xtw); } xtw.writeEndElement(); } else { BaseBpmnXMLConverter converter = convertersToXMLMap.get(flowElement.getClass()); if (converter == null) { throw new XMLException("No converter for " + flowElement.getClass() + " found"); } converter.convertToXML(xtw, flowElement, model); } } private void createXML(Artifact artifact, BpmnModel model, XMLStreamWriter xtw) throws Exception { BaseBpmnXMLConverter converter = convertersToXMLMap.get(artifact.getClass()); if (converter == null) { throw new XMLException("No converter for " + artifact.getClass() + " found"); } converter.convertToXML(xtw, artifact, model); } }
48,513
https://github.com/jpdoyle/verifast/blob/master/examples/busywaiting/clhlock/clhlock.c
Github Open Source
Open Source
MIT
2,023
verifast
jpdoyle
C
Code
2,324
8,190
// Tobias Reinhard and Bart Jacobs. Ghost signals: verifying termination of busy-waiting. 2020. //@ #include "ghost_lists.gh" #include <stdlib.h> #include "atomics.h" #include "clhlock.h" //@ #include <quantifiers.gh> struct node { int lock; //@ struct node *pred; //@ real frac; //@ void *signal; //@ void *predSignal; //@ int level; }; struct lock { struct node *tail; //@ int ghostListId; //@ list<int> level; }; struct lock_thread { struct node *node; struct node *pred; }; /*@ predicate queue( struct lock *l, list<int> lockLevel, predicate() inv, int ghostListId, void *signal, int level, struct node *n, list<struct node *> ns) = 0 <= level &*& n == 0 ? ns == nil : [1/2]n->lock |-> ?lock &*& lock == 0 ? [1/2]n->lock |-> 0 &*& n->pred |-> _ &*& n->frac |-> _ &*& n->level |-> _ &*& n->signal |-> _ &*& n->predSignal |-> _ &*& inv() &*& ns == {n} &*& ghost_list_member_handle(ghostListId, n) &*& malloc_block_node(n) : [1/2]n->pred |-> ?pred &*& [1/2]n->frac |-> ?frac &*& [frac/4]l->ghostListId |-> _ &*& [1/2]n->signal |-> signal &*& [1/2]n->level |-> level &*& [1/2]n->predSignal |-> ?predSignal &*& signal(signal, append(lockLevel, {level}), false) &*& queue(l, lockLevel, inv, ghostListId, predSignal, level - 1, pred, ?ns0) &*& ns == cons(n, ns0); predicate_ctor lock_inv(struct lock *lock, predicate() inv)() = [1/2]lock->ghostListId |-> ?listId &*& [1/2]lock->level |-> ?lockLevel &*& lock->tail |-> ?tail &*& queue(lock, lockLevel, inv, listId, _, _, tail, cons(tail, ?ns)) &*& ghost_list(listId, cons(tail, ns)); predicate lock(struct lock *lock, list<int> level, predicate() inv;) = [1/2]lock->level |-> level &*& level == cons(?level_max_length, ?level0) &*& length(level0) + 1 <= level_max_length &*& atomic_space(lock_inv(lock, inv)) &*& [1/2]lock->ghostListId |-> _ &*& malloc_block_lock(lock); predicate lock_thread(struct lock_thread *thread) = thread->node |-> ?node &*& node != 0 &*& node->lock |-> _ &*& node->pred |-> _ &*& node->frac |-> _ &*& malloc_block_node(node) &*& node->predSignal |-> _ &*& node->signal |-> _ &*& node->level |-> _ &*& thread->pred |-> _ &*& malloc_block_lock_thread(thread); predicate locked(struct lock_thread *thread, struct lock *lock, list<int> level, predicate() inv, real frac, pair<void *, list<int> > ob) = thread->node |-> ?node &*& [1/2]node->lock |-> 1 &*& [1/2]node->pred |-> 0 &*& [1/2]node->frac |-> frac &*& malloc_block_node(node) &*& [1/2]node->predSignal |-> _ &*& [1/2]node->signal |-> ?signal &*& [1/2]node->level |-> ?obLevel &*& ob == pair(signal, append(level, {obLevel})) &*& thread->pred |-> ?pred &*& malloc_block_lock_thread(thread) &*& pred->lock |-> _ &*& pred->pred |-> _ &*& pred->frac |-> _ &*& pred->predSignal |-> _ &*& pred->signal |-> _ &*& pred->level |-> _ &*& malloc_block_node(pred) &*& pred != 0 &*& [frac/2]lock->level |-> level &*& level == cons(?level_max_length, ?level0) &*& length(level0) + 1 <= level_max_length &*& [frac]atomic_space(lock_inv(lock, inv)) &*& [frac]malloc_block_lock(lock) &*& [frac/4]lock->ghostListId |-> ?listId &*& ghost_list_member_handle(listId, node); @*/ struct lock *create_lock() //@ requires exists<list<int> >(?lockLevel) &*& exists<predicate()>(?inv) &*& inv() &*& lockLevel == cons(?lockLevel_max_length, ?lockLevel0) &*& length(lockLevel0) + lock_nb_level_dims <= lockLevel_max_length; //@ ensures lock(result, lockLevel, inv); //@ terminates; { //@ open exists(_); struct lock *lock = malloc(sizeof(struct lock)); if (lock == 0) abort(); struct node *sentinel = malloc(sizeof(struct node)); if (sentinel == 0) abort(); sentinel->lock = 0; //@ sentinel->pred = 0; //@ sentinel->level = 0; lock->tail = sentinel; return lock; //@ int ghostListId = create_ghost_list(); //@ lock->ghostListId = ghostListId; //@ lock->level = lockLevel; //@ ghost_list_insert(ghostListId, nil, nil, sentinel); //@ close queue(lock, lockLevel, inv, ghostListId, sentinel->signal, 0, sentinel, {sentinel}); //@ close lock_inv(lock, inv)(); //@ create_atomic_space(lock_inv(lock, inv)); //@ close lock(lock, lockLevel, inv); } struct lock_thread *create_lock_thread() //@ requires true; //@ ensures lock_thread(result); //@ terminates; { struct lock_thread *thread = malloc(sizeof(struct lock_thread)); if (thread == 0) abort(); struct node *node = malloc(sizeof(struct node)); if (node == 0) abort(); thread->node = node; return thread; //@ close lock_thread(thread); } /*@ lemma void is_ancestor_of_refl(list<pathcomp> p) requires true; ensures is_ancestor_of(p, p) == true; { switch (p) { case nil: case cons(h, t): } } lemma void is_ancestor_of_trans(list<pathcomp> p1, list<pathcomp> p2, list<pathcomp> p3) requires is_ancestor_of(p1, p2) && is_ancestor_of(p2, p3); ensures is_ancestor_of(p1, p3) == true; { switch (p3) { case nil: case cons(p3h, p3t): if (p2 == p3) { } else { is_ancestor_of_trans(p1, p2, p3t); } } } @*/ void acquire_helper(struct lock_thread *thread, struct lock *lock, struct node *pred) /*@ requires thread->node |-> ?node &*& node != 0 &*& [1/2]node->lock |-> 1 &*& [1/2]node->frac |-> ?frac &*& malloc_block_node(node) &*& thread->pred |-> _ &*& [1/2]node->signal |-> ?signal &*& [1/2]node->level |-> ?level &*& [1/2]node->predSignal |-> ?predSignal &*& [frac/2]lock->level |-> ?lockLevel &*& lockLevel == cons(?max_level_length, ?lockLevel0) &*& length(lockLevel0) + 1 <= max_level_length &*& obs(?p, cons(pair(signal, append(lockLevel, {level})), ?obs)) &*& forall(map(snd, obs), (all_sublevels_lt)(1, lockLevel)) == true &*& wait_perm(p, predSignal, append(lockLevel, {level - 1}), acquire_helper) &*& malloc_block_lock_thread(thread) &*& [frac]malloc_block_lock(lock) &*& exists<predicate()>(?inv) &*& [frac]atomic_space(lock_inv(lock, inv)) &*& [frac/4]lock->ghostListId |-> ?ghostListId &*& ghost_list_member_handle(ghostListId, node) &*& [1/2]node->pred |-> pred &*& pred != 0; @*/ //@ ensures locked(thread, lock, lockLevel, inv, frac, ?ob) &*& inv() &*& obs(?p1, cons(ob, obs)) &*& is_ancestor_of(p, p1) == true &*& level_lt(lockLevel, level_of(ob)) == true; //@ terminates; { //@ int acquireThread = currentThread; for (;;) /*@ invariant obs(p, cons(pair(signal, append(lockLevel, {level})), obs)) &*& wait_perm(p, predSignal, append(lockLevel, {level - 1}), acquire_helper) &*& [frac]atomic_space(lock_inv(lock, inv)) &*& [frac/4]lock->ghostListId |-> ghostListId &*& [frac/2]lock->level |-> lockLevel &*& ghost_list_member_handle(ghostListId, node) &*& [1/2]node->pred |-> pred &*& [1/2]node->predSignal |-> predSignal &*& [1/2]node->level |-> level; @*/ { int predLock; { /*@ predicate pre() = obs(p, cons(pair(signal, append(lockLevel, {level})), obs)) &*& wait_perm(p, predSignal, append(lockLevel, {level - 1}), acquire_helper) &*& [frac/4]lock->ghostListId |-> ghostListId &*& [frac/2]lock->level |-> lockLevel &*& ghost_list_member_handle(ghostListId, node) &*& [1/2]node->pred |-> pred &*& [1/2]node->predSignal |-> predSignal &*& [1/2]node->level |-> level; predicate post(int value) = obs(p, cons(pair(signal, append(lockLevel, {level})), obs)) &*& wait_perm(p, predSignal, append(lockLevel, {level - 1}), acquire_helper) &*& [frac/4]lock->ghostListId |-> ghostListId &*& [frac/2]lock->level |-> lockLevel &*& ghost_list_member_handle(ghostListId, node) &*& value == 0 ? [1/2]node->pred |-> 0 &*& [1/2]node->predSignal |-> _ &*& [1/2]node->level |-> level &*& pred->lock |-> _ &*& pred->pred |-> _ &*& pred->frac |-> _ &*& pred->predSignal |-> _ &*& pred->signal |-> _ &*& pred->level |-> _ &*& malloc_block_node(pred) &*& inv() : [1/2]node->pred |-> pred &*& [1/2]node->predSignal |-> predSignal &*& [1/2]node->level |-> level &*& call_perm_(currentThread, acquire_helper); lemma void ghop() requires lock_inv(lock, inv)() &*& is_atomic_load_int_op(?op, &pred->lock, ?P, ?Q) &*& P() &*& pre() &*& currentThread == acquireThread; ensures lock_inv(lock, inv)() &*& is_atomic_load_int_op(op, &pred->lock, P, Q) &*& Q(?value) &*& post(value); { open lock_inv(lock, inv)(); open pre(); struct node *tail = lock->tail; ghost_list_member_handle_lemma(); { lemma void iter(struct node *n, list<struct node *> ns0) requires queue(lock, lockLevel, inv, ghostListId, ?sn, ?ln, n, ?ns1) &*& obs(p, cons(pair(signal, append(lockLevel, {level})), obs)) &*& wait_perm(p, predSignal, append(lockLevel, {level - 1}), acquire_helper) &*& ghost_list(ghostListId, ?ns) &*& ns == cons(tail, _) &*& ns == append(ns0, ns1) &*& mem(node, ns1) == true &*& is_atomic_load_int_op(op, &pred->lock, P, Q) &*& P() &*& [1/2]node->pred |-> pred &*& [1/2]node->predSignal |-> predSignal &*& [1/2]node->level |-> level; ensures queue(lock, lockLevel, inv, ghostListId, sn, ln, n, ?ns1_) &*& obs(p, cons(pair(signal, append(lockLevel, {level})), obs)) &*& wait_perm(p, predSignal, append(lockLevel, {level - 1}), acquire_helper) &*& ghost_list(ghostListId, ?ns_) &*& ns_ == cons(tail, _) &*& ns_ == append(ns0, ns1_) &*& is_atomic_load_int_op(op, &pred->lock, P, Q) &*& Q(?predLock_) &*& predLock_ == 0 ? [1/2]node->pred |-> 0 &*& [1/2]node->predSignal |-> _ &*& [1/2]node->level |-> level &*& pred->lock |-> _ &*& pred->pred |-> _ &*& pred->frac |-> _ &*& pred->predSignal |-> _ &*& pred->level |-> _ &*& pred->signal |-> _ &*& malloc_block_node(pred) &*& inv() : [1/2]node->pred |-> pred &*& [1/2]node->predSignal |-> predSignal &*& [1/2]node->level |-> level &*& call_perm_(currentThread, acquire_helper); { open queue(lock, lockLevel, inv, ghostListId, sn, ln, n, ns1); if (n == node) { merge_fractions node_pred(n, _); merge_fractions node_predSignal(n, _); merge_fractions node_level(n, _); open queue(lock, lockLevel, inv, ghostListId, ?spred, ?lpred, pred, ?ns1t); open node_lock(pred, _); op(); int predLock_ = pred->lock; if (predLock_ == 0) { node->pred = 0; append_assoc(ns0, {n}, {pred}); ghost_list_remove(ghostListId, append(ns0, {n}), nil, pred); close queue(lock, lockLevel, inv, ghostListId, spred, ln - 1, 0, nil); close queue(lock, lockLevel, inv, ghostListId, sn, ln, n, {n}); } else { is_ancestor_of_refl(p); level0_lt_append(max_level_length, lockLevel0, {level - 1}, {level}); if (!forall(map(snd, obs), (level_lt)(append(lockLevel, {level - 1})))) { list<int> l = not_forall(map(snd, obs), (level_lt)(append(lockLevel, {level - 1}))); forall_elim(map(snd, obs), (all_sublevels_lt)(1, lockLevel), l); assert !level_lt(append(lockLevel, {level - 1}), l); assert l == cons(?l_max_length, ?l0); all_sublevel0s_lt_append(max_level_length, lockLevel0, {level - 1}, l0); assert false; } assert max_level_length >= length(lockLevel0) + 1; assert level_lt(append(lockLevel, {level - 1}), append(lockLevel, {level})) == true; wait(predSignal); close queue(lock, lockLevel, inv, ghostListId, spred, ln - 1, pred, ns1t); close queue(lock, lockLevel, inv, ghostListId, sn, ln, n, ns1); } } else { append_assoc(ns0, {n}, tail(ns1)); iter(n->pred, append(ns0, {n})); assert queue(lock, lockLevel, inv, ghostListId, _, _, _, ?ns1_); append_assoc(ns0, {n}, ns1_); close queue(lock, lockLevel, inv, ghostListId, sn, ln, n, cons(n, ns1_)); } } iter(lock->tail, nil); } assert Q(?predLock_); close post(predLock_); close lock_inv(lock, inv)(); } @*/ //@ close pre(); //@ produce_lemma_function_pointer_chunk(ghop) : atomic_load_int_ghost_op(lock_inv(lock, inv), &pred->lock, currentThread, pre, post)() { call(); }; predLock = atomic_load_int(&pred->lock); //@ leak is_atomic_load_int_ghost_op(ghop, _, _, _, _, _); //@ open post(predLock); } if (predLock == 0) break; } thread->pred = pred; //@ close locked(thread, lock, lockLevel, inv, frac, pair(signal, append(lockLevel, {level}))); //@ is_ancestor_of_refl(p); //@ leak wait_perm(p, predSignal, append(lockLevel, {level - 1}), acquire_helper); //@ level0_lt_append(max_level_length, lockLevel0, {}, {level}); } void acquire(struct lock_thread *thread, struct lock *lock) //@ requires obs(?p, ?obs) &*& lock_thread(thread) &*& [?frac]lock(lock, ?lockLevel, ?inv) &*& forall(map(snd, obs), (all_sublevels_lt)(1, lockLevel)) == true; //@ ensures locked(thread, lock, lockLevel, inv, frac, ?ob) &*& inv() &*& obs(?p1, cons(ob, obs)) &*& is_ancestor_of(p, p1) == true &*& level_lt(lockLevel, level_of(ob)) == true; //@ terminates; { //@ open lock_thread(thread); //@ open lock(lock, lockLevel, inv); //@ int ghostListId = lock->ghostListId; struct node *node = thread->node; node->lock = 1; //@ node->frac = frac; struct node *pred; { /*@ predicate pre() = obs(p, obs) &*& node->predSignal |-> _ &*& node->signal |-> _ &*& node->level |-> _ &*& [1/2]node->lock |-> 1 &*& node->pred |-> _ &*& [1/2]node->frac |-> frac &*& [frac/2]lock->level |-> lockLevel &*& [frac/2]lock->ghostListId |-> ghostListId; predicate post(void *result) = ghost_list_member_handle(ghostListId, node) &*& [1/2]node->pred |-> result &*& result != 0 &*& [frac/2]lock->level |-> lockLevel &*& [frac/4]lock->ghostListId |-> ghostListId &*& [1/2]node->predSignal |-> ?predSignal &*& [1/2]node->signal |-> ?signal &*& [1/2]node->level |-> ?level &*& obs(p, cons(pair(signal, append(lockLevel, {level})), obs)); lemma void ghop() requires lock_inv(lock, inv)() &*& is_atomic_exchange_pointer_op(?op, &lock->tail, node, ?P, ?Q) &*& P() &*& pre(); ensures lock_inv(lock, inv)() &*& is_atomic_exchange_pointer_op(op, &lock->tail, node, P, Q) &*& Q(?old) &*& post(old); { open lock_inv(lock, inv)(); open pre(); void *old = lock->tail; op(); assert ghost_list(ghostListId, ?ns); open queue(lock, lockLevel, inv, ghostListId, ?oldSignal, ?oldLevel, old, ns); close queue(lock, lockLevel, inv, ghostListId, oldSignal, oldLevel, old, ns); ghost_list_insert(ghostListId, nil, ns, node); node->pred = old; node->signal = create_signal(); init_signal(node->signal, append(lockLevel, {oldLevel + 1})); node->level = oldLevel + 1; node->predSignal = oldSignal; close queue(lock, lockLevel, inv, ghostListId, node->signal, oldLevel + 1, node, cons(node, ns)); close post(old); close lock_inv(lock, inv)(); } @*/ //@ close pre(); //@ produce_lemma_function_pointer_chunk(ghop) : atomic_exchange_pointer_ghost_op(lock_inv(lock, inv), &lock->tail, node, pre, post)() { call(); }; pred = atomic_exchange_pointer(&lock->tail, node); //@ leak is_atomic_exchange_pointer_ghost_op(ghop, _, _, _, _, _); //@ open post(pred); } //@ produce_call_below_perm_(); //@ pathize_call_below_perm_(); //@ create_wait_perm(node->predSignal, append(lockLevel, {node->level - 1}), acquire_helper); //@ close exists(inv); acquire_helper(thread, lock, pred); } void release_with_ghost_op(struct lock_thread *thread) //@ requires locked(thread, ?lock, ?lockLevel, ?inv, ?frac, ?ob) &*& obs(?p, ?obs) &*& is_release_ghost_op(?rgo, currentThread, inv, p, remove(ob, obs), ?pre, ?post) &*& pre(); //@ ensures post(?p1) &*& obs(?p2, remove(ob, obs)) &*& lock_thread(thread) &*& [frac]lock(lock, lockLevel, inv) &*& is_ancestor_of(p1, p2) == true; //@ terminates; { //@ int releaseThread = currentThread; //@ open locked(thread, lock, lockLevel, inv, frac, ob); struct node *node = thread->node; //@ int ghostListId = lock->ghostListId; { /*@ predicate ghop_pre() = [frac/4]lock->ghostListId |-> ghostListId &*& [frac/2]lock->level |-> lockLevel &*& ghost_list_member_handle(ghostListId, node) &*& [1/2]node->lock |-> 1 &*& [1/2]node->pred |-> 0 &*& [1/2]node->frac |-> frac &*& [1/2]node->signal |-> ?signal &*& [1/2]node->level |-> ?level &*& ob == pair(signal, append(lockLevel, {level})) &*& [1/2]node->predSignal |-> _ &*& obs(p, obs) &*& malloc_block_node(node) &*& is_release_ghost_op(rgo, currentThread, inv, p, remove(ob, obs), pre, post) &*& pre(); predicate ghop_post() = obs(p, remove(ob, obs)) &*& post(p) &*& [frac/2]lock->ghostListId |-> ghostListId &*& [frac/2]lock->level |-> lockLevel; lemma void ghop() requires lock_inv(lock, inv)() &*& is_atomic_store_int_op(?op, &node->lock, 0, ?P, ?Q) &*& P() &*& ghop_pre() &*& currentThread == releaseThread; ensures lock_inv(lock, inv)() &*& is_atomic_store_int_op(op, &node->lock, 0, P, Q) &*& Q() &*& ghop_post(); { open lock_inv(lock, inv)(); open ghop_pre(); ghost_list_member_handle_lemma(); { lemma void iter() requires queue(lock, lockLevel, inv, ghostListId, ?nsig, ?nl, ?n, ?ns) &*& mem(node, ns) == true &*& ghost_list_member_handle(ghostListId, node) &*& [1/2]node->lock |-> 1 &*& [1/2]node->pred |-> 0 &*& [1/2]node->frac |-> frac &*& [1/2]node->predSignal |-> _ &*& [1/2]node->signal |-> ?signal &*& [1/2]node->level |-> ?level &*& ob == pair(signal, append(lockLevel, {level})) &*& obs(p, obs) &*& malloc_block_node(node) &*& is_release_ghost_op(rgo, currentThread, inv, p, remove(ob, obs), pre, post) &*& pre() &*& is_atomic_store_int_op(op, &node->lock, 0, P, Q) &*& P(); ensures obs(p, remove(ob, obs)) &*& post(p) &*& queue(lock, lockLevel, inv, ghostListId, nsig, nl, n, ns) &*& is_atomic_store_int_op(op, &node->lock, 0, P, Q) &*& Q() &*& [frac/4]lock->ghostListId |-> _; { open queue(lock, lockLevel, inv, ghostListId, nsig, nl, n, ns); if (n == node) { merge_fractions node_lock(n, _); merge_fractions node_pred(n, _); merge_fractions node_frac(n, _); merge_fractions node_level(n, _); merge_fractions node_predSignal(n, _); merge_fractions node_signal(n, _); open queue(lock, lockLevel, inv, ghostListId, _, _, 0, _); op(); set_signal(node->signal); leak signal(_, _, true); is_ancestor_of_refl(p); rgo(); leak is_release_ghost_op(rgo, currentThread, inv, p, remove(ob, obs), pre, post); } else { iter(); } close queue(lock, lockLevel, inv, ghostListId, nsig, nl, n, ns); } iter(); } close lock_inv(lock, inv)(); close ghop_post(); } @*/ //@ close ghop_pre(); //@ produce_lemma_function_pointer_chunk(ghop) : atomic_store_int_ghost_op(lock_inv(lock, inv), &node->lock, 0, ghop_pre, ghop_post, currentThread)() { call(); }; atomic_store_int(&node->lock, 0); //@ leak is_atomic_store_int_ghost_op(ghop, _, _, _, _, _, _); //@ open ghop_post(); } thread->node = thread->pred; //@ close [frac]lock(lock, lockLevel, inv); //@ close lock_thread(thread); //@ is_ancestor_of_refl(p); } void release(struct lock_thread *thread) //@ requires locked(thread, ?lock, ?lockLevel, ?inv, ?frac, ?ob) &*& inv() &*& obs(?p, ?obs); //@ ensures obs(?p1, remove(ob, obs)) &*& lock_thread(thread) &*& [frac]lock(lock, lockLevel, inv) &*& is_ancestor_of(p, p1) == true; //@ terminates; { { /*@ predicate pre() = inv(); predicate post(list<pathcomp> p1) = is_ancestor_of(p, p1) == true; @*/ /*@ produce_lemma_function_pointer_chunk release_ghost_op(currentThread, inv, p, remove(ob, obs), pre, post)() { assert obs(?p1, _); open pre(); close post(p1); }; @*/ //@ close pre(); release_with_ghost_op(thread); //@ open post(?p1); //@ assert obs(?p2, _); //@ is_ancestor_of_trans(p, p1, p2); } } void dispose_lock_thead(struct lock_thread *thread) //@ requires lock_thread(thread); //@ ensures true; //@ terminates; { //@ open lock_thread(thread); free(thread->node); free(thread); } void dispose_lock(struct lock *lock) //@ requires lock(lock, ?lockLevel, ?inv); //@ ensures inv(); //@ terminates; { //@ open lock(lock, lockLevel, inv); //@ destroy_atomic_space(); //@ open lock_inv(lock, inv)(); //@ open queue(lock, _, inv, _, _, _, _, _); free(lock->tail); free(lock); //@ leak ghost_list(_, _) &*& ghost_list_member_handle(_, _); }
8,168
https://github.com/ocanbascil/validation-draft/blob/master/exercises/util.py
Github Open Source
Open Source
MIT
2,021
validation-draft
ocanbascil
Python
Code
115
355
def compare(expected, actual, path=''): """Recursive comparator to help with debugging""" assert type(expected) == type(actual), 'Different types, path: {}'.format(path) if isinstance(actual, dict): missing = set(expected.keys()).difference(actual.keys()) extra = set(actual.keys()).difference(expected.keys()) if extra: raise AssertionError('Extra keys in data: {}, path: {}'.format(extra, path)) if missing: raise AssertionError('Missing keys from data: {}, path: {}'.format(missing, path)) for key, value in expected.items(): compare(value, actual[key], path='{} -> {}'.format(path, key)) elif isinstance(actual, (list, tuple)): len_actual = len(actual) len_expected = len(expected) if len_actual != len_expected: raise AssertionError('Different sequence lengths (expected: {} vs actual: {}), Path: {}'.format(len_expected, len_actual), format(path)) for idx, (v_expected, v_actual) in enumerate(zip(expected, actual)): compare(v_expected, v_actual, path='{} -> {}'.format(path, idx)) else: if expected != actual: raise AssertionError('Expected: {} != Actual: {}, Path: {}'.format(expected, actual, path))
34,230
https://github.com/samjf/silverstripe4-docker-example/blob/master/asset-admin/client/src/mocks/react-redux.js
Github Open Source
Open Source
BSD-3-Clause
2,017
silverstripe4-docker-example
samjf
JavaScript
Code
9
18
export function connect() { return (Component) => Component; }
16,216
https://github.com/excalidraw/excalidraw/blob/master/src/components/DialogActionButton.tsx
Github Open Source
Open Source
MIT, LicenseRef-scancode-free-unknown
2,023
excalidraw
excalidraw
TSX
Code
111
340
import clsx from "clsx"; import { ReactNode } from "react"; import "./DialogActionButton.scss"; import Spinner from "./Spinner"; interface DialogActionButtonProps { label: string; children?: ReactNode; actionType?: "primary" | "danger"; isLoading?: boolean; } const DialogActionButton = ({ label, onClick, className, children, actionType, type = "button", isLoading, ...rest }: DialogActionButtonProps & React.ButtonHTMLAttributes<HTMLButtonElement>) => { const cs = actionType ? `Dialog__action-button--${actionType}` : ""; return ( <button className={clsx("Dialog__action-button", cs, className)} type={type} aria-label={label} onClick={onClick} {...rest} > {children && ( <div style={isLoading ? { visibility: "hidden" } : {}}>{children}</div> )} <div style={isLoading ? { visibility: "hidden" } : {}}>{label}</div> {isLoading && ( <div style={{ position: "absolute", inset: 0 }}> <Spinner /> </div> )} </button> ); }; export default DialogActionButton;
33,980
https://github.com/christyfernandes/Paathshala/blob/master/src/app/client/src/app/modules/shared/services/util/util.service.spec.data.ts
Github Open Source
Open Source
MIT
null
Paathshala
christyfernandes
TypeScript
Code
984
4,195
export const servicemockRes = { formData: { 'code': 'board', 'dataType': 'text', 'name': 'Board', 'label': 'Board/Syllabus', 'description': 'Education Board (Like MP Board, NCERT, etc)', 'editable': true, 'inputType': 'select', 'required': false, 'displayProperty': 'Editable', 'visible': true, 'renderingHints': { 'semanticColumnWidth': 'three' }, 'range': [ { 'associations': [ { 'identifier': 'ncfcopy_gradelevel_kindergarten', 'code': 'kindergarten', 'translations': '{\'hi\':\'बाल विहार\'}', 'name': 'Kindergarten', 'description': '', 'category': 'gradeLevel', 'status': 'Live' }, { 'identifier': 'ncfcopy_gradelevel_grade5', 'code': 'grade5', 'translations': null, 'name': 'Grade 5', 'description': '', 'category': 'gradeLevel', 'status': 'Live' }, { 'identifier': 'ncfcopy_gradelevel_grade1', 'code': 'grade1', 'translations': null, 'name': 'Grade 1', 'description': '', 'category': 'gradeLevel', 'status': 'Live' }, { 'identifier': 'ncfcopy_gradelevel_grade2', 'code': 'grade2', 'translations': null, 'name': 'Grade 2', 'description': '', 'category': 'gradeLevel', 'status': 'Live' }, { 'identifier': 'ncfcopy_gradelevel_grade4', 'code': 'grade4', 'translations': null, 'name': 'Grade 4', 'description': '', 'category': 'gradeLevel', 'status': 'Live' }, { 'identifier': 'ncfcopy_gradelevel_grade3', 'code': 'grade3', 'translations': null, 'name': 'Grade 3', 'description': '', 'category': 'gradeLevel', 'status': 'Live' } ], 'identifier': 'ncfcopy_board_ncert', 'code': 'ncert', 'translations': null, 'name': 'NCERT', 'description': '', 'index': 1, 'category': 'board', 'status': 'Live' }, { 'identifier': 'ncfcopy_board_cbse', 'code': 'cbse', 'translations': null, 'name': 'CBSE', 'description': '', 'index': 2, 'category': 'board', 'status': 'Live' }, { 'identifier': 'ncfcopy_board_icse', 'code': 'icse', 'translations': null, 'name': 'ICSE', 'description': '', 'index': 3, 'category': 'board', 'status': 'Live' }, { 'identifier': 'ncfcopy_board_upboard', 'code': 'upboard', 'translations': null, 'name': 'UP Board', 'description': '', 'index': 4, 'category': 'board', 'status': 'Live' }, { 'identifier': 'ncfcopy_board_apboard', 'code': 'apboard', 'translations': null, 'name': 'AP Board', 'description': '', 'index': 5, 'category': 'board', 'status': 'Live' }, { 'identifier': 'ncfcopy_board_tnboard', 'code': 'tnboard', 'translations': null, 'name': 'TN Board', 'description': '', 'index': 6, 'category': 'board', 'status': 'Live' }, { 'identifier': 'ncfcopy_board_ncte', 'code': 'ncte', 'translations': null, 'name': 'NCTE', 'description': '', 'index': 7, 'category': 'board', 'status': 'Live' }, { 'identifier': 'ncfcopy_board_mscert', 'code': 'mscert', 'translations': null, 'name': 'MSCERT', 'description': '', 'index': 8, 'category': 'board', 'status': 'Live' }, { 'identifier': 'ncfcopy_board_bser', 'code': 'bser', 'translations': null, 'name': 'BSER', 'description': '', 'index': 9, 'category': 'board', 'status': 'Live' }, { 'identifier': 'ncfcopy_board_others', 'code': 'others', 'translations': null, 'name': 'Others', 'description': '', 'index': 10, 'category': 'board', 'status': 'Live' } ], 'translations': null, 'index': 1 }, successResult: { id: 'api.content.read', ver: '1.0', ts: '2018-05-03T10:51:12.648Z', params: 'params', responseCode: 'OK', result: { content: { mimeType: 'application/vnd.ekstep.ecml-archive', body: 'body', identifier: 'domain_66675', versionKey: '1497028761823', downloadStatus: 'FAILED', } } } }; export const contentList = [ { 'name': 'PHET Simulations Test', 'image': 'content/do_3125010999257169921165/correct_1515820207913.png', 'description': 'In this content, you will see the PHET simulations related to weights and balances.', 'rating': '0', 'subject': 'Physics', 'medium': 'English', 'orgDetails': {}, 'gradeLevel': 'Other', 'contentType': 'Resource', 'topic': '', 'subTopic': '', 'metaData': { 'identifier': 'do_3125010999257169921165', 'mimeType': 'application/vnd.ekstep.html-archive', 'framework': 'NCF', 'contentType': 'Resource' }, 'completionPercentage': 0, 'mimeTypesCount': 0, 'cardImg': 'content/do_3125010999257169921165/correct_1515820207913.png', 'resourceType': 'Experiment', 'action': { 'onImage': { 'eventName': 'onImage' } }, 'ribbon': { 'left': { 'class': 'ui circular label card-badges-image' }, 'right': { 'name': 'Experiment', 'class': 'ui black right ribbon label' } } }, { 'name': 'SSLC SCIENCE EM PART A 165 QUESTIONS TEST', 'image': 'content/do_3126224885317877761111/assets10science4_3_9292_1538236353_1538236353230.thumb.jpg', 'description': 'SAMACHEER KALVI TAMIL NADU STATE BOARD SCIENCE EM ANY 165 ONE MARKS TEST PART A QUESTIONS OUT OF 170 QUESTIONS', 'rating': '0', 'subject': 'Physics', 'orgDetails': {}, 'gradeLevel': 'Class 10', 'contentType': 'Resource', 'topic': '', 'subTopic': '', 'metaData': { 'identifier': 'do_3126224885317877761111', 'mimeType': 'application/vnd.ekstep.ecml-archive', 'framework': 'NCF', 'contentType': 'Resource' }, 'completionPercentage': 0, 'mimeTypesCount': 0, 'cardImg': 'content/do_3126224885317877761111/assets10science4_3_9292_1538236353_1538236353230.thumb.jpg', 'resourceType': 'Test', 'action': { 'onImage': { 'eventName': 'onImage' } }, 'ribbon': { 'left': { 'class': 'ui circular label card-badges-image' }, 'right': { 'name': 'Test', 'class': 'ui black right ribbon label' } } }, { 'name': 'Copy of Math_testprep_grade10', 'image': 'content/do_31288771643112652813019/notebook_1491393332116.thumb.png', 'description': 'Enter description for TextBook', 'rating': 3, 'subject': 'Assamese (Angkuran)', 'medium': 'Assamese', 'orgDetails': {}, 'gradeLevel': 'Class 7', 'contentType': 'TextBook', 'topic': '', 'subTopic': '', 'metaData': { 'identifier': 'do_31288771643112652813019', 'mimeType': 'application/vnd.ekstep.content-collection', 'framework': 'as_k-12', 'contentType': 'TextBook' }, 'completionPercentage': 0, 'mimeTypesCount': '{"application/pdf":7,"application/vnd.ekstep.content-collection":19,"application/vnd.ekstep.ecml-archive":4,"video/mp4":7}', 'cardImg': 'content/do_31288771643112652813019/notebook_1491393332116.thumb.png', 'resourceType': 'Book', 'organisation': [ 'DIKSHA Support' ], 'action': { 'onImage': { 'eventName': 'onImage' } }, 'ribbon': { 'left': { 'class': 'ui circular label card-badges-image' }, 'right': { 'name': 'Book', 'class': 'ui black right ribbon label' } } } ]; export const contentListWithHoverData = [ { 'name': 'PHET Simulations Test', 'image': 'content/do_3125010999257169921165/correct_1515820207913.png', 'description': 'In this content, you will see the PHET simulations related to weights and balances.', 'rating': '0', 'subject': 'Physics', 'medium': 'English', 'orgDetails': {}, 'gradeLevel': 'Other', 'contentType': 'Resource', 'topic': '', 'subTopic': '', 'metaData': { 'identifier': 'do_3125010999257169921165', 'mimeType': 'application/vnd.ekstep.html-archive', 'framework': 'NCF', 'contentType': 'Resource' }, 'completionPercentage': 0, 'mimeTypesCount': 0, 'cardImg': 'content/do_3125010999257169921165/correct_1515820207913.png', 'resourceType': 'Experiment', 'hoverData': { 'note': '', 'actions': [ { 'type': 'save', 'label': undefined, 'disabled': false }, { 'type': 'open', 'label': 'Open' } ] }, 'action': { 'onImage': { 'eventName': 'onImage' } }, 'ribbon': { 'left': { 'class': 'ui circular label card-badges-image' }, 'right': { 'name': 'Experiment', 'class': 'ui black right ribbon label' } } }, { 'name': 'SSLC SCIENCE EM PART A 165 QUESTIONS TEST', 'image': 'content/do_3126224885317877761111/assets10science4_3_9292_1538236353_1538236353230.thumb.jpg', 'description': 'SAMACHEER KALVI TAMIL NADU STATE BOARD SCIENCE EM ANY 165 ONE MARKS TEST PART A QUESTIONS OUT OF 170 QUESTIONS', 'rating': '0', 'subject': 'Physics', 'orgDetails': {}, 'gradeLevel': 'Class 10', 'contentType': 'Resource', 'topic': '', 'subTopic': '', 'metaData': { 'identifier': 'do_3126224885317877761111', 'mimeType': 'application/vnd.ekstep.ecml-archive', 'framework': 'NCF', 'contentType': 'Resource' }, 'completionPercentage': 0, 'mimeTypesCount': 0, 'cardImg': 'content/do_3126224885317877761111/assets10science4_3_9292_1538236353_1538236353230.thumb.jpg', 'resourceType': 'Test', 'hoverData': { 'note': '', 'actions': [ { 'type': 'save', 'label': undefined, 'disabled': false }, { 'type': 'open', 'label': 'Open' } ] }, 'action': { 'onImage': { 'eventName': 'onImage' } }, 'ribbon': { 'left': { 'class': 'ui circular label card-badges-image' }, 'right': { 'name': 'Test', 'class': 'ui black right ribbon label' } } }, { 'name': 'Copy of Math_testprep_grade10', 'image': 'content/do_31288771643112652813019/notebook_1491393332116.thumb.png', 'description': 'Enter description for TextBook', 'rating': 3, 'subject': 'Assamese (Angkuran)', 'medium': 'Assamese', 'orgDetails': {}, 'gradeLevel': 'Class 7', 'contentType': 'TextBook', 'topic': '', 'subTopic': '', 'metaData': { 'identifier': 'do_31288771643112652813019', 'mimeType': 'application/vnd.ekstep.content-collection', 'framework': 'as_k-12', 'contentType': 'TextBook' }, 'completionPercentage': 0, 'mimeTypesCount': '{"application/pdf":7,"application/vnd.ekstep.content-collection":19,"application/vnd.ekstep.ecml-archive":4,"video/mp4":7}', 'cardImg': 'content/do_31288771643112652813019/notebook_1491393332116.thumb.png', 'resourceType': 'Book', 'organisation': [ 'DIKSHA Support' ], 'hoverData': { 'note': '', 'actions': [ { 'type': 'save', 'label': undefined, 'disabled': false }, { 'type': 'open', 'label': 'Open' } ] }, 'action': { 'onImage': { 'eventName': 'onImage' } }, 'ribbon': { 'left': { 'class': 'ui circular label card-badges-image' }, 'right': { 'name': 'Book', 'class': 'ui black right ribbon label' } } } ];
20,135
https://github.com/dereklamego/JavaPoo/blob/master/ManipulacaoFiles/src/manipulacaofiles/FilesOperations.java
Github Open Source
Open Source
MIT
null
JavaPoo
dereklamego
Java
Code
154
570
package manipulacaofiles; import java.io.File; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.util.InputMismatchException; import java.util.Scanner; public class FilesOperations { File arquivoCriado = new File("info.txt"); //um atributo do tipo file para armazenar o arquivo criado, para poder chamar por paramentros nos metodos na main public File criarArquivo() { try{ arquivoCriado.createNewFile(); }catch(IOException ex){ System.out.println("erro ao criar arquivo"); } return arquivoCriado; } public void escreverArquivo(File arquivo) throws FileNotFoundException, IOException, InputMismatchException { Scanner sc = new Scanner(System.in); FileWriter escrita = new FileWriter(arquivo.getName(),true); System.out.println("Nome: "); String nome = sc.nextLine(); System.out.println("Idade: "); int idade = sc.nextInt(); // sc.nextLine(); System.out.println("Altura:"); float altura = sc.nextFloat(); escrita.write(nome+";"+idade+";"+altura+"\n"); System.out.println("Dados cadastrados no arquivo: " + arquivo.getName() + " no local: " + arquivo.getAbsolutePath()); escrita.close(); } public void lerArquivo(File arquivo) throws FileNotFoundException{ Scanner leitura = new Scanner(arquivo); if(arquivo.length() != 0){ while (leitura.hasNextLine()) { String linhaAtual = leitura.nextLine(); String[] textoSeparado = linhaAtual.split(";"); System.out.println("Nome: "+textoSeparado[0]+"; Idade: "+textoSeparado[1]+"; Altura: "+textoSeparado[2]); } }else{ System.out.println("O arquivo está vazio"); } leitura.close(); } }
14,164
https://github.com/CrackerCat/snail/blob/master/snail/src/main/java/com/acgist/snail/net/torrent/dht/request/FindNodeRequest.java
Github Open Source
Open Source
Apache-2.0
2,023
snail
CrackerCat
Java
Code
104
438
package com.acgist.snail.net.torrent.dht.request; import com.acgist.snail.config.DhtConfig; import com.acgist.snail.net.torrent.dht.DhtRequest; import com.acgist.snail.net.torrent.dht.NodeContext; import com.acgist.snail.net.torrent.dht.response.FindNodeResponse; /** * <p>查找Node</p> * * @author acgist */ public final class FindNodeRequest extends DhtRequest { private FindNodeRequest() { super(DhtConfig.QType.FIND_NODE); } /** * <p>新建请求</p> * * @param target NodeId或者InfoHash * * @return 请求 */ public static final FindNodeRequest newRequest(byte[] target) { final FindNodeRequest request = new FindNodeRequest(); request.put(DhtConfig.KEY_TARGET, target); return request; } /** * <p>处理请求</p> * * @param request 请求 * * @return 响应 */ public static final FindNodeResponse execute(DhtRequest request) { final FindNodeResponse response = FindNodeResponse.newInstance(request); final byte[] target = request.getBytes(DhtConfig.KEY_TARGET); final var nodes = NodeContext.getInstance().findNode(target); // TODO:want response.put(DhtConfig.KEY_NODES, serializeNodes(nodes)); return response; } }
23,651
https://github.com/neoedmund/neoeedit/blob/master/scripts/Concat1.java
Github Open Source
Open Source
BSD-3-Clause
2,023
neoeedit
neoedmund
Java
Code
51
154
import java.util.ArrayList; import java.util.List; import neoe.ne.Script; /** Prototype of Concat lines to a single line */ public class Concat1 implements Script { @Override public List<CharSequence> run(List<CharSequence> lines) { List<CharSequence> ret = new ArrayList<CharSequence>(); StringBuilder sb = new StringBuilder(); for (CharSequence line:lines){ sb.append(String.format("\"%s\" + ", line)); } ret.add(sb); return ret; } }
36,481
https://github.com/hmrc/customs-declare-exports-frontend/blob/master/test/services/SubmissionServiceSpec.scala
Github Open Source
Open Source
Apache-2.0
2,023
customs-declare-exports-frontend
hmrc
Scala
Code
564
2,216
/* * Copyright 2023 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package services import base.{Injector, MockConnectors, MockExportCacheService, UnitWithMocksSpec} import com.kenshoo.play.metrics.Metrics import connectors.CustomsDeclareExportsConnector import forms.declaration.countries.Country import forms.declaration.{AmendmentSubmission, LegalDeclaration} import metrics.{ExportsMetrics, MetricIdentifiers} import models.DeclarationType import models.declaration.DeclarationStatus import models.declaration.submissions.{Action, Submission, SubmissionAmendment} import org.mockito.ArgumentMatchers.{any, eq => equalTo, notNull} import org.mockito.Mockito.{reset, verify, when} import org.scalatest.OptionValues import org.scalatest.concurrent.ScalaFutures import services.audit.{AuditService, AuditTypes, EventData} import services.cache.SubmissionBuilder import uk.gov.hmrc.http.HeaderCarrier import scala.concurrent.ExecutionContext.global import scala.concurrent.Future class SubmissionServiceSpec extends UnitWithMocksSpec with Injector with MockExportCacheService with MockConnectors with OptionValues with ScalaFutures with SubmissionBuilder { private val auditService = mock[AuditService] private val connector = mock[CustomsDeclareExportsConnector] private val exportMetrics = instanceOf[ExportsMetrics] private val hc: HeaderCarrier = mock[HeaderCarrier] private val legalDeclaration = LegalDeclaration("Name", "Role", "email@test.com", confirmation = true) private val amendmentSubmission = AmendmentSubmission("Name", "Role", "email@test.com", "Some reason", confirmation = true) private val auditData = Map( EventData.eori.toString -> "eori", EventData.lrn.toString -> "123LRN", EventData.ducr.toString -> "ducr", EventData.decType.toString -> "STANDARD", EventData.fullName.toString -> legalDeclaration.fullName, EventData.jobRole.toString -> legalDeclaration.jobRole, EventData.email.toString -> legalDeclaration.email, EventData.confirmed.toString -> legalDeclaration.confirmation.toString, EventData.submissionResult.toString -> "Success" ) private val submissionService = new SubmissionService(connector, auditService, exportMetrics) override def beforeEach(): Unit = { super.beforeEach() reset(connector, auditService) } val eori = "eori" val lrn = "123LRN" "SubmissionService.submitDeclaration" should { val registry = instanceOf[Metrics].defaultRegistry val metric = MetricIdentifiers.submissionMetric val timerBefore = registry.getTimers.get(exportMetrics.timerName(metric)).getCount val counterBefore = registry.getCounters.get(exportMetrics.counterName(metric)).getCount "successfully submit to the back end a valid declaration" in { // Given val declaration = aDeclaration( withId("id"), withStatus(DeclarationStatus.DRAFT), withType(DeclarationType.STANDARD), withConsignmentReferences(ducr = "ducr", lrn = lrn) ) declaration.locations.originationCountry.value mustBe Country.GB val expectedSubmission = Submission(uuid = "id", eori = eori, lrn = lrn, actions = Seq.empty[Action], latestDecId = Some("id")) when(connector.submitDeclaration(any[String])(any(), any())).thenReturn(Future.successful(expectedSubmission)) // When val actualSubmission = submissionService.submitDeclaration("eori", declaration, legalDeclaration)(hc, global).futureValue.value actualSubmission.eori mustBe eori actualSubmission.lrn mustBe lrn // Then verify(connector).submitDeclaration(equalTo("id"))(equalTo(hc), any()) verify(auditService).auditAllPagesUserInput(equalTo(AuditTypes.SubmissionPayload), equalTo(declaration))(equalTo(hc)) verify(auditService).audit(equalTo(AuditTypes.Submission), equalTo[Map[String, String]](auditData))(equalTo(hc)) registry.getTimers.get(exportMetrics.timerName(metric)).getCount mustBe >(timerBefore) registry.getCounters.get(exportMetrics.counterName(metric)).getCount mustBe >(counterBefore) } } "SubmissionService.submitAmendment" should { val submissionId = "submissionId" "return None" when { "the declaration's parentDeclarationId is not defined" in { submissionService .submitAmendment(eori, aDeclaration(), amendmentSubmission, submissionId, false)(hc, global) .futureValue mustBe None } "the declaration matching the parentDeclarationId is not found" in { when(connector.findDeclaration(any())(any(), any())).thenReturn(Future.successful(None)) val amendedDecl = aDeclaration( withId("id2"), withStatus(DeclarationStatus.AMENDMENT_DRAFT), withParentDeclarationId("id1"), withConsignmentReferences(ducr = "ducr", lrn = lrn), withDestinationCountry(Country(Some("IT"))), withTotalNumberOfItems(Some("654321"), Some("94.1"), Some("GBP"), Some("no")) ) submissionService .submitAmendment(eori, amendedDecl, amendmentSubmission, submissionId, false)(hc, global) .futureValue mustBe None } } val registry = instanceOf[Metrics].defaultRegistry val metric = MetricIdentifiers.submissionAmendmentMetric val timerBefore = registry.getTimers.get(exportMetrics.timerName(metric)).getCount val counterBefore = registry.getCounters.get(exportMetrics.counterName(metric)).getCount "successfully submit to the back end a valid amendment" in { // Given val parentDeclaration = aDeclaration( withId("id1"), withStatus(DeclarationStatus.COMPLETE), withConsignmentReferences(ducr = "ducr", lrn = lrn), withDestinationCountry(), withTotalNumberOfItems(Some("123456"), Some("1.49"), Some("GBP"), Some("yes")) ) when(connector.findDeclaration(any())(any(), any())).thenReturn(Future.successful(Some(parentDeclaration))) val amendedDecl = aDeclaration( withId("id2"), withStatus(DeclarationStatus.AMENDMENT_DRAFT), withParentDeclarationId("id1"), withConsignmentReferences(ducr = "ducr", lrn = lrn), withDestinationCountry(Country(Some("IT"))), withTotalNumberOfItems(Some("654321"), Some("94.1"), Some("GBP"), Some("no")) ) val expectedActionId = "actionId" when(connector.submitAmendment(any())(any(), any())).thenReturn(Future.successful(expectedActionId)) // When val result = submissionService.submitAmendment(eori, amendedDecl, amendmentSubmission, submissionId, false)(hc, global) result.futureValue mustBe Some(expectedActionId) // Then val expectedFieldPointers = List( "declaration.locations.destinationCountry", "declaration.totalNumberOfItems.totalAmountInvoiced", "declaration.totalNumberOfItems.exchangeRate" ) val expectedSubmissionAmendment = SubmissionAmendment(submissionId, "id2", expectedFieldPointers) verify(connector).submitAmendment(equalTo(expectedSubmissionAmendment))(any(), any()) verify(auditService).auditAllPagesUserInput(equalTo(AuditTypes.AmendmentPayload), equalTo(amendedDecl))(any()) verify(auditService).auditAmendmentSent(equalTo(AuditTypes.Amendment), notNull())(any) registry.getTimers.get(exportMetrics.timerName(metric)).getCount mustBe >(timerBefore) registry.getCounters.get(exportMetrics.counterName(metric)).getCount mustBe >(counterBefore) } } }
43,388
https://github.com/gsilvers/maptz.vscode.extensions.customfolding/blob/master/src/engine/RegionServices.ts
Github Open Source
Open Source
MIT
2,022
maptz.vscode.extensions.customfolding
gsilvers
TypeScript
Code
147
435
/* #region Imports */ "use strict"; import * as vscode from "vscode"; import * as config from "./../config/Configuration"; import {RegionProvider, CustomRegion} from "./CustomRegions"; /* #endregion */ /* #region RegionService */ export class RegionService { regionProvider: RegionProvider; document: vscode.TextDocument; regions: CustomRegion[]; /** * */ constructor(configService: config.ConfigurationService, document: vscode.TextDocument) { this.regionProvider = new RegionProvider(configService); this.document = document; this.regions = []; } public update() { var result = this.regionProvider.getRegions(this.document); this.regions = result.completedRegions; } public getRegions(){ this.update(); return this.regions; } public currentRegions(): CustomRegion[] { this.update(); var ate = vscode.window.activeTextEditor; if (!ate) { return []; } if (this.document !== ate.document) { return []; } var surroundingRegions = []; for (let reg of this.regions) { if (reg.contains(ate.selection.active)) { surroundingRegions.push(reg); } } return surroundingRegions; } public currentRegion(): CustomRegion | null { var currentRegions = this.currentRegions(); if (currentRegions.length === 0) { return null; } return currentRegions[0]; return currentRegions[currentRegions.length - 1]; } } /* #endregion */
32,688
https://github.com/sndnv/stasis/blob/master/client-android/lib/src/main/kotlin/stasis/client_android/lib/model/server/api/requests/CreateDatasetDefinition.kt
Github Open Source
Open Source
Apache-2.0
2,023
stasis
sndnv
Kotlin
Code
41
200
package stasis.client_android.lib.model.server.api.requests import com.squareup.moshi.Json import com.squareup.moshi.JsonClass import stasis.client_android.lib.model.server.datasets.DatasetDefinition import stasis.client_android.lib.model.server.devices.DeviceId @JsonClass(generateAdapter = true) data class CreateDatasetDefinition( val info: String, val device: DeviceId, @Json(name = "redundant_copies") val redundantCopies: Int, @Json(name = "existing_versions") val existingVersions: DatasetDefinition.Retention, @Json(name = "removed_versions") val removedVersions: DatasetDefinition.Retention )
4,532
https://github.com/FernandoGomezS/Masisa/blob/master/deletejpg.php
Github Open Source
Open Source
MIT
null
Masisa
FernandoGomezS
PHP
Code
7
41
<?php $data = $_POST['jpg']; unlink('/sites/masisa/images/'.$data)or print_r(error_get_last()); ?>
21,087
https://github.com/cthacker-udel/NCT-AndroidGUI/blob/master/app/src/main/java/com/example/nctai_trading/bittrex/WalletHealth.java
Github Open Source
Open Source
Apache-2.0
null
NCT-AndroidGUI
cthacker-udel
Java
Code
202
633
/* * * * This file is part of the bittrex4j project. * * @author CCob * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. * / */ package com.example.nctai_trading.bittrex; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import java.time.ZonedDateTime; public class WalletHealth { private String currency; private int depositQueueDepth; private int withdrawQueueDepth; private long blockHeight; private int walletBalance; private int walletConnections; private int minutesSinceBHUpdated; private ZonedDateTime lastChecked; private boolean isActive; @JsonCreator public WalletHealth(@JsonProperty("Currency") String currency, @JsonProperty("DepositQueueDepth") int depositQueueDepth, @JsonProperty("WithdrawQueueDepth") int withdrawQueueDepth, @JsonProperty("BlockHeight") long blockHeight, @JsonProperty("WalletBalance") int walletBalance, @JsonProperty("WalletConnections") int walletConnections, @JsonProperty("MinutesSinceBHUpdated") int minutesSinceBHUpdated, @JsonProperty("LastChecked") ZonedDateTime lastChecked, @JsonProperty("IsActive") boolean isActive) { this.currency = currency; this.depositQueueDepth = depositQueueDepth; this.withdrawQueueDepth = withdrawQueueDepth; this.blockHeight = blockHeight; this.walletBalance = walletBalance; this.walletConnections = walletConnections; this.minutesSinceBHUpdated = minutesSinceBHUpdated; this.lastChecked = lastChecked; this.isActive = isActive; } public String getCurrency() { return currency; } public int getDepositQueueDepth() { return depositQueueDepth; } public int getWithdrawQueueDepth() { return withdrawQueueDepth; } public long getBlockHeight() { return blockHeight; } public int getWalletBalance() { return walletBalance; } public int getWalletConnections() { return walletConnections; } public int getMinutesSinceBHUpdated() { return minutesSinceBHUpdated; } public ZonedDateTime getLastChecked() { return lastChecked; } public boolean isActive() { return isActive; } }
30,456
https://github.com/KnxAssociation/api-client/blob/master/KNX/Api/Contact.php
Github Open Source
Open Source
MIT
null
api-client
KnxAssociation
PHP
Code
266
705
<?php /** * KNX Api * * This file is a part of the KNX Api client * * @package KNX Api */ namespace KNX\Api; class Contact { /** * ID * * @access public * @var int $id */ public $id; /** * Details * * @var $details * @access private */ public $details; /** * Contructor * * @access private * @param string $username * @param string $password */ public function __construct($id = null) { if ($id !== null) { $this->id = $id; } } /** * Get a field * * @access public * @param string $field * @return mixed */ public function __get($key) { return $this->details[$key]; } /** * Set a field * * @access public * @param string $key * @param mixes value */ public function __set($key, $value) { $this->details[$key] = $value; } /** * Isset * * @access public * @param string $key * @return bool $isset */ public function __isset($key) { if (isset($this->details[$key])) { return true; } else { return false; } } /** * Load array * * @access public * @param array $data */ public function load_array($data) { foreach ($data as $key => $value) { $this->$key = $value; } } /** * Validate * * @access public * @param array $errors * @return bool $validated */ public function validate(&$errors) { $data = \KNX\Api\Client\Json::call('contact/validate', $this->details); $errors = $data['validate_errors']; return $data['success']; } /** * Save * * @access public */ public function save() { $data = \KNX\Api\Client\Json::call('contact/insert', $this->details); if (!$data['success']) { throw new \Exception('Contact not saved, unvalidated fields: ' . implode(', ', array_keys($data['validate_errors']))); } $this->id = $data['contact_id']; } }
30,464
https://github.com/letssteam/codal-stm32-B-L475E-IOT01A/blob/master/samples/Basic/MessageBus/MessageBus_Sample.cpp
Github Open Source
Open Source
MIT
null
codal-stm32-B-L475E-IOT01A
letssteam
C++
Code
31
180
#include "MessageBus_Sample.h" using namespace codal; SampleMessageBus* sample; void MessageBusSample_main(codal::STM32DISCO_L475VG_IOT& discoL475VgIot) { sample = new SampleMessageBus(discoL475VgIot); while (true) { discoL475VgIot.io.led1.setDigitalValue(sample->state ? 1 : 0); discoL475VgIot.io.led2.setDigitalValue(discoL475VgIot.io.led2.getDigitalValue() ? 0 : 1); discoL475VgIot.sleep(250); } }
19,896
https://github.com/andela-jejezie/peopleProject/blob/master/node_modules/grunt-nodemon/node_modules/nodemon/lib/index.js
Github Open Source
Open Source
MIT
2,022
peopleProject
andela-jejezie
JavaScript
Code
3
10
module.exports = require('./nodemon');
18,317
https://github.com/puneet1999/Ryujinx/blob/master/Ryujinx.Cpu/MemoryManager.cs
Github Open Source
Open Source
MIT
2,022
Ryujinx
puneet1999
C#
Code
2,311
6,115
using ARMeilleure.Memory; using Ryujinx.Cpu.Tracking; using Ryujinx.Memory; using Ryujinx.Memory.Tracking; using System; using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Threading; namespace Ryujinx.Cpu { /// <summary> /// Represents a CPU memory manager. /// </summary> public sealed class MemoryManager : IMemoryManager, IVirtualMemoryManager, IDisposable { public const int PageBits = 12; public const int PageSize = 1 << PageBits; public const int PageMask = PageSize - 1; private const int PteSize = 8; private readonly InvalidAccessHandler _invalidAccessHandler; /// <summary> /// Address space width in bits. /// </summary> public int AddressSpaceBits { get; } private readonly ulong _addressSpaceSize; private readonly MemoryBlock _backingMemory; private readonly MemoryBlock _pageTable; /// <summary> /// Page table base pointer. /// </summary> public IntPtr PageTablePointer => _pageTable.Pointer; public MemoryTracking Tracking { get; } internal event Action<ulong, ulong> UnmapEvent; /// <summary> /// Creates a new instance of the memory manager. /// </summary> /// <param name="backingMemory">Physical backing memory where virtual memory will be mapped to</param> /// <param name="addressSpaceSize">Size of the address space</param> /// <param name="invalidAccessHandler">Optional function to handle invalid memory accesses</param> public MemoryManager(MemoryBlock backingMemory, ulong addressSpaceSize, InvalidAccessHandler invalidAccessHandler = null) { _invalidAccessHandler = invalidAccessHandler; ulong asSize = PageSize; int asBits = PageBits; while (asSize < addressSpaceSize) { asSize <<= 1; asBits++; } AddressSpaceBits = asBits; _addressSpaceSize = asSize; _backingMemory = backingMemory; _pageTable = new MemoryBlock((asSize / PageSize) * PteSize); Tracking = new MemoryTracking(this, backingMemory, PageSize); Tracking.EnablePhysicalProtection = false; // Disabled for now, as protection is done in software. } /// <summary> /// Maps a virtual memory range into a physical memory range. /// </summary> /// <remarks> /// Addresses and size must be page aligned. /// </remarks> /// <param name="va">Virtual memory address</param> /// <param name="pa">Physical memory address</param> /// <param name="size">Size to be mapped</param> public void Map(ulong va, ulong pa, ulong size) { ulong remainingSize = size; ulong oVa = va; ulong oPa = pa; while (remainingSize != 0) { _pageTable.Write((va / PageSize) * PteSize, PaToPte(pa)); va += PageSize; pa += PageSize; remainingSize -= PageSize; } Tracking.Map(oVa, oPa, size); } /// <summary> /// Unmaps a previously mapped range of virtual memory. /// </summary> /// <param name="va">Virtual address of the range to be unmapped</param> /// <param name="size">Size of the range to be unmapped</param> public void Unmap(ulong va, ulong size) { // If size is 0, there's nothing to unmap, just exit early. if (size == 0) { return; } UnmapEvent?.Invoke(va, size); ulong remainingSize = size; ulong oVa = va; while (remainingSize != 0) { _pageTable.Write((va / PageSize) * PteSize, 0UL); va += PageSize; remainingSize -= PageSize; } Tracking.Unmap(oVa, size); } /// <summary> /// Reads data from CPU mapped memory. /// </summary> /// <typeparam name="T">Type of the data being read</typeparam> /// <param name="va">Virtual address of the data in memory</param> /// <returns>The data</returns> /// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception> public T Read<T>(ulong va) where T : unmanaged { return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0]; } /// <summary> /// Reads data from CPU mapped memory, with read tracking /// </summary> /// <typeparam name="T">Type of the data being read</typeparam> /// <param name="va">Virtual address of the data in memory</param> /// <returns>The data</returns> public T ReadTracked<T>(ulong va) where T : unmanaged { SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false); return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0]; } /// <summary> /// Reads data from CPU mapped memory. /// </summary> /// <param name="va">Virtual address of the data in memory</param> /// <param name="data">Span to store the data being read into</param> /// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception> public void Read(ulong va, Span<byte> data) { ReadImpl(va, data); } /// <summary> /// Writes data to CPU mapped memory. /// </summary> /// <typeparam name="T">Type of the data being written</typeparam> /// <param name="va">Virtual address to write the data into</param> /// <param name="value">Data to be written</param> /// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception> public void Write<T>(ulong va, T value) where T : unmanaged { Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1))); } /// <summary> /// Writes data to CPU mapped memory, with write tracking. /// </summary> /// <param name="va">Virtual address to write the data into</param> /// <param name="data">Data to be written</param> /// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception> public void Write(ulong va, ReadOnlySpan<byte> data) { if (data.Length == 0) { return; } SignalMemoryTracking(va, (ulong)data.Length, true); WriteImpl(va, data); } /// <summary> /// Writes data to CPU mapped memory, without write tracking. /// </summary> /// <param name="va">Virtual address to write the data into</param> /// <param name="data">Data to be written</param> public void WriteUntracked(ulong va, ReadOnlySpan<byte> data) { if (data.Length == 0) { return; } WriteImpl(va, data); } [MethodImpl(MethodImplOptions.AggressiveInlining)] /// <summary> /// Writes data to CPU mapped memory. /// </summary> /// <param name="va">Virtual address to write the data into</param> /// <param name="data">Data to be written</param> private void WriteImpl(ulong va, ReadOnlySpan<byte> data) { try { if (IsContiguousAndMapped(va, data.Length)) { data.CopyTo(_backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length)); } else { int offset = 0, size; if ((va & PageMask) != 0) { ulong pa = GetPhysicalAddressInternal(va); size = Math.Min(data.Length, PageSize - (int)(va & PageMask)); data.Slice(0, size).CopyTo(_backingMemory.GetSpan(pa, size)); offset += size; } for (; offset < data.Length; offset += size) { ulong pa = GetPhysicalAddressInternal(va + (ulong)offset); size = Math.Min(data.Length - offset, PageSize); data.Slice(offset, size).CopyTo(_backingMemory.GetSpan(pa, size)); } } } catch (InvalidMemoryRegionException) { if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) { throw; } } } /// <summary> /// Gets a read-only span of data from CPU mapped memory. /// </summary> /// <remarks> /// This may perform a allocation if the data is not contiguous in memory. /// For this reason, the span is read-only, you can't modify the data. /// </remarks> /// <param name="va">Virtual address of the data</param> /// <param name="size">Size of the data</param> /// <param name="tracked">True if read tracking is triggered on the span</param> /// <returns>A read-only span of the data</returns> /// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception> public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false) { if (size == 0) { return ReadOnlySpan<byte>.Empty; } if (tracked) { SignalMemoryTracking(va, (ulong)size, false); } if (IsContiguousAndMapped(va, size)) { return _backingMemory.GetSpan(GetPhysicalAddressInternal(va), size); } else { Span<byte> data = new byte[size]; ReadImpl(va, data); return data; } } /// <summary> /// Gets a region of memory that can be written to. /// </summary> /// <remarks> /// If the requested region is not contiguous in physical memory, /// this will perform an allocation, and flush the data (writing it /// back to guest memory) on disposal. /// </remarks> /// <param name="va">Virtual address of the data</param> /// <param name="size">Size of the data</param> /// <returns>A writable region of memory containing the data</returns> /// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception> public WritableRegion GetWritableRegion(ulong va, int size) { if (size == 0) { return new WritableRegion(null, va, Memory<byte>.Empty); } if (IsContiguousAndMapped(va, size)) { return new WritableRegion(null, va, _backingMemory.GetMemory(GetPhysicalAddressInternal(va), size)); } else { Memory<byte> memory = new byte[size]; GetSpan(va, size).CopyTo(memory.Span); return new WritableRegion(this, va, memory); } } /// <summary> /// Gets a reference for the given type at the specified virtual memory address. /// </summary> /// <remarks> /// The data must be located at a contiguous memory region. /// </remarks> /// <typeparam name="T">Type of the data to get the reference</typeparam> /// <param name="va">Virtual address of the data</param> /// <returns>A reference to the data in memory</returns> /// <exception cref="MemoryNotContiguousException">Throw if the specified memory region is not contiguous in physical memory</exception> public ref T GetRef<T>(ulong va) where T : unmanaged { if (!IsContiguous(va, Unsafe.SizeOf<T>())) { ThrowMemoryNotContiguous(); } SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), true); return ref _backingMemory.GetRef<T>(GetPhysicalAddressInternal(va)); } private void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException(); [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool IsContiguousAndMapped(ulong va, int size) => IsContiguous(va, size) && IsMapped(va); [MethodImpl(MethodImplOptions.AggressiveInlining)] private bool IsContiguous(ulong va, int size) { if (!ValidateAddress(va)) { return false; } ulong endVa = (va + (ulong)size + PageMask) & ~(ulong)PageMask; va &= ~(ulong)PageMask; int pages = (int)((endVa - va) / PageSize); for (int page = 0; page < pages - 1; page++) { if (!ValidateAddress(va + PageSize)) { return false; } if (GetPhysicalAddressInternal(va) + PageSize != GetPhysicalAddressInternal(va + PageSize)) { return false; } va += PageSize; } return true; } /// <summary> /// Gets the physical regions that make up the given virtual address region. /// If any part of the virtual region is unmapped, null is returned. /// </summary> /// <param name="va">Virtual address of the range</param> /// <param name="size">Size of the range</param> /// <returns>Array of physical regions</returns> public (ulong address, ulong size)[] GetPhysicalRegions(ulong va, ulong size) { if (!ValidateAddress(va)) { return null; } ulong endVa = (va + size + PageMask) & ~(ulong)PageMask; va &= ~(ulong)PageMask; int pages = (int)((endVa - va) / PageSize); List<(ulong, ulong)> regions = new List<(ulong, ulong)>(); ulong regionStart = GetPhysicalAddressInternal(va); ulong regionSize = PageSize; for (int page = 0; page < pages - 1; page++) { if (!ValidateAddress(va + PageSize)) { return null; } ulong newPa = GetPhysicalAddressInternal(va + PageSize); if (GetPhysicalAddressInternal(va) + PageSize != newPa) { regions.Add((regionStart, regionSize)); regionStart = newPa; regionSize = 0; } va += PageSize; regionSize += PageSize; } regions.Add((regionStart, regionSize)); return regions.ToArray(); } private void ReadImpl(ulong va, Span<byte> data) { if (data.Length == 0) { return; } try { int offset = 0, size; if ((va & PageMask) != 0) { ulong pa = GetPhysicalAddressInternal(va); size = Math.Min(data.Length, PageSize - (int)(va & PageMask)); _backingMemory.GetSpan(pa, size).CopyTo(data.Slice(0, size)); offset += size; } for (; offset < data.Length; offset += size) { ulong pa = GetPhysicalAddressInternal(va + (ulong)offset); size = Math.Min(data.Length - offset, PageSize); _backingMemory.GetSpan(pa, size).CopyTo(data.Slice(offset, size)); } } catch (InvalidMemoryRegionException) { if (_invalidAccessHandler == null || !_invalidAccessHandler(va)) { throw; } } } /// <summary> /// Checks if a memory range is mapped. /// </summary> /// <param name="va">Virtual address of the range</param> /// <param name="size">Size of the range in bytes</param> /// <returns>True if the entire range is mapped, false otherwise</returns> public bool IsRangeMapped(ulong va, ulong size) { if (size == 0UL) { return true; } ulong endVa = (va + size + PageMask) & ~(ulong)PageMask; va &= ~(ulong)PageMask; while (va < endVa) { if (!IsMapped(va)) { return false; } va += PageSize; } return true; } /// <summary> /// Checks if the page at a given CPU virtual address is mapped. /// </summary> /// <param name="va">Virtual address to check</param> /// <returns>True if the address is mapped, false otherwise</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool IsMapped(ulong va) { if (!ValidateAddress(va)) { return false; } return _pageTable.Read<ulong>((va / PageSize) * PteSize) != 0; } private bool ValidateAddress(ulong va) { return va < _addressSpaceSize; } /// <summary> /// Performs address translation of the address inside a CPU mapped memory range. /// </summary> /// <remarks> /// If the address is invalid or unmapped, -1 will be returned. /// </remarks> /// <param name="va">Virtual address to be translated</param> /// <returns>The physical address</returns> public ulong GetPhysicalAddress(ulong va) { // We return -1L if the virtual address is invalid or unmapped. if (!ValidateAddress(va) || !IsMapped(va)) { return ulong.MaxValue; } return GetPhysicalAddressInternal(va); } private ulong GetPhysicalAddressInternal(ulong va) { return PteToPa(_pageTable.Read<ulong>((va / PageSize) * PteSize) & ~(0xffffUL << 48)) + (va & PageMask); } /// <summary> /// Reprotect a region of virtual memory for tracking. Sets software protection bits. /// </summary> /// <param name="va">Virtual address base</param> /// <param name="size">Size of the region to protect</param> /// <param name="protection">Memory protection to set</param> public void TrackingReprotect(ulong va, ulong size, MemoryPermission protection) { // Protection is inverted on software pages, since the default value is 0. protection = (~protection) & MemoryPermission.ReadAndWrite; long tag = (long)protection << 48; if (tag > 0) { tag |= long.MinValue; // If any protection is present, the whole pte is negative. } ulong endVa = (va + size + PageMask) & ~(ulong)PageMask; long invTagMask = ~(0xffffL << 48); while (va < endVa) { ref long pageRef = ref _pageTable.GetRef<long>((va >> PageBits) * PteSize); long pte; do { pte = Volatile.Read(ref pageRef); } while (Interlocked.CompareExchange(ref pageRef, (pte & invTagMask) | tag, pte) != pte); va += PageSize; } } /// <summary> /// Obtains a memory tracking handle for the given virtual region. This should be disposed when finished with. /// </summary> /// <param name="address">CPU virtual address of the region</param> /// <param name="size">Size of the region</param> /// <returns>The memory tracking handle</returns> public CpuRegionHandle BeginTracking(ulong address, ulong size) { return new CpuRegionHandle(Tracking.BeginTracking(address, size)); } /// <summary> /// Obtains a memory tracking handle for the given virtual region, with a specified granularity. This should be disposed when finished with. /// </summary> /// <param name="address">CPU virtual address of the region</param> /// <param name="size">Size of the region</param> /// <param name="granularity">Desired granularity of write tracking</param> /// <returns>The memory tracking handle</returns> public CpuMultiRegionHandle BeginGranularTracking(ulong address, ulong size, ulong granularity) { return new CpuMultiRegionHandle(Tracking.BeginGranularTracking(address, size, granularity)); } /// <summary> /// Obtains a smart memory tracking handle for the given virtual region, with a specified granularity. This should be disposed when finished with. /// </summary> /// <param name="address">CPU virtual address of the region</param> /// <param name="size">Size of the region</param> /// <param name="granularity">Desired granularity of write tracking</param> /// <returns>The memory tracking handle</returns> public CpuSmartMultiRegionHandle BeginSmartGranularTracking(ulong address, ulong size, ulong granularity) { return new CpuSmartMultiRegionHandle(Tracking.BeginSmartGranularTracking(address, size, granularity)); } /// <summary> /// Alerts the memory tracking that a given region has been read from or written to. /// This should be called before read/write is performed. /// </summary> /// <param name="va">Virtual address of the region</param> /// <param name="size">Size of the region</param> public void SignalMemoryTracking(ulong va, ulong size, bool write) { // We emulate guard pages for software memory access. This makes for an easy transition to // tracking using host guard pages in future, but also supporting platforms where this is not possible. // Write tag includes read protection, since we don't have any read actions that aren't performed before write too. long tag = (write ? 3L : 1L) << 48; ulong endVa = (va + size + PageMask) & ~(ulong)PageMask; while (va < endVa) { ref long pageRef = ref _pageTable.GetRef<long>((va >> PageBits) * PteSize); long pte; pte = Volatile.Read(ref pageRef); if ((pte & tag) != 0) { Tracking.VirtualMemoryEvent(va, size, write); break; } va += PageSize; } } private ulong PaToPte(ulong pa) { return (ulong)_backingMemory.GetPointer(pa, PageSize).ToInt64(); } private ulong PteToPa(ulong pte) { return (ulong)((long)pte - _backingMemory.Pointer.ToInt64()); } /// <summary> /// Disposes of resources used by the memory manager. /// </summary> public void Dispose() => _pageTable.Dispose(); } }
27,576
https://github.com/catercow/react-apollo/blob/master/test/client/graphql/queries/polling.test.tsx
Github Open Source
Open Source
MIT
null
react-apollo
catercow
TypeScript
Code
477
1,257
import React from 'react'; import renderer from 'react-test-renderer'; import gql from 'graphql-tag'; import ApolloClient from 'apollo-client'; import { InMemoryCache as Cache } from 'apollo-cache-inmemory'; import { mockSingleLink } from '../../../../src/test-utils'; import { ApolloProvider, graphql, ChildProps } from '../../../../src'; import { DocumentNode } from 'graphql'; describe('[queries] polling', () => { let error: typeof console.error; beforeEach(() => { error = console.error; console.error = jest.fn(() => {}); // tslint:disable-line jest.useRealTimers(); }); afterEach(() => { console.error = error; }); // polling it('allows a polling query to be created', done => { jest.useFakeTimers(); const POLL_INTERVAL = 250; const POLL_COUNT = 4; const query: DocumentNode = gql` query people { allPeople(first: 1) { people { name } } } `; const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; const link = mockSingleLink( { request: { query }, result: { data } }, { request: { query }, result: { data: data2 } }, { request: { query }, result: { data } }, ); const client = new ApolloClient({ link, cache: new Cache({ addTypename: false }), }); let count = 0; const Container = graphql(query, { options: () => ({ pollInterval: POLL_INTERVAL, notifyOnNetworkStatusChange: false, }), })(() => { count++; return null; }); const wrapper = renderer.create( <ApolloProvider client={client}> <Container /> </ApolloProvider>, ); jest.runTimersToTime(POLL_INTERVAL * POLL_COUNT); try { expect(count).toEqual(POLL_COUNT); done(); } catch (e) { done.fail(e); } finally { (wrapper as any).unmount(); } }); it('exposes stopPolling as part of the props api', done => { const query: DocumentNode = gql` query people { allPeople(first: 1) { people { name } } } `; const link = mockSingleLink({ request: { query }, result: { data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } }, }); const client = new ApolloClient({ link, cache: new Cache({ addTypename: false }), }); const Container = graphql(query)( class extends React.Component<ChildProps> { componentWillReceiveProps({ data }: ChildProps) { expect(data!.stopPolling).toBeTruthy(); expect(data!.stopPolling instanceof Function).toBeTruthy(); expect(data!.stopPolling).not.toThrow(); done(); } render() { return null; } }, ); renderer.create( <ApolloProvider client={client}> <Container /> </ApolloProvider>, ); }); it('exposes startPolling as part of the props api', done => { const query: DocumentNode = gql` query people { allPeople(first: 1) { people { name } } } `; const link = mockSingleLink({ request: { query }, result: { data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } }, }); const client = new ApolloClient({ link, cache: new Cache({ addTypename: false }), }); let wrapper: renderer.ReactTestRenderer; const Container = graphql(query, { options: { pollInterval: 10 } })( class extends React.Component<ChildProps> { componentWillReceiveProps({ data }: ChildProps) { expect(data!.startPolling).toBeTruthy(); expect(data!.startPolling instanceof Function).toBeTruthy(); // XXX this does throw because of no pollInterval // expect(data.startPolling).not.toThrow(); setTimeout(() => { wrapper.unmount(); done(); }, 0); } render() { return null; } }, ); wrapper = renderer.create( <ApolloProvider client={client}> <Container /> </ApolloProvider>, ); }); });
38,396
https://github.com/Lin0818/py-study-notebook/blob/master/notebook/vector_v1.py
Github Open Source
Open Source
Apache-2.0
2,018
py-study-notebook
Lin0818
Python
Code
89
339
#vector_v1.py #multi-dimensional Vector class from array import array import reprlib import math class Vector: typecode = 'd' def __init__(self, components): self.__components = array(self.typecode, components) self._score = 3 @property def score(self): return self._score def __iter__(self): return iter(self.__components) def __repr__(self): components = reprlib.repr(self.__components) components = components[components.find('['):-1] return 'Vector({})'.format(components) def __str__(self): return str(tuple(self)) def __bytes(self): return (bytes([ord(self.typecode)]) + bytes(self.__components)) def __eq__(self, other): return tuple(self) == tuple(other) def __abs__(self): return math.sqrt(sum(x * x for x in self)) def __bool__(self): return bool(abs(self)) @classmethod def frombytes(cls, octets): typecode = chr(octets[0]) memv = memoryview(octets[1:]).cast(typecode) return cls(memv)
26,817
https://github.com/aevv/qcmd/blob/master/QCmd/QCmd/Commands/QCommandDefinition.cs
Github Open Source
Open Source
Apache-2.0
2,015
qcmd
aevv
C#
Code
47
121
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace QCmd.Commands { class QCommandDefinition { public string Command { get; set; } public IList<string> Parameters { get; set; } public bool HideOnFinish { get; set; } public QCommandDefinition() { Parameters = new List<string>(); } } }
30,662
https://github.com/snumrl/TillingPatch/blob/master/program/lego/tgasave.h
Github Open Source
Open Source
Apache-2.0
2,022
TillingPatch
snumrl
C
Code
155
337
// +------------------------------------------------------------------------- // | tgasave.h // | // | Author: Manmyung Kim // +------------------------------------------------------------------------- // | COPYRIGHT: // | Copyright Manmyung Kim 2013 // | See the included COPYRIGHT.txt file for further details. // | // | This file is part of the TilingMotionPatch. // | TilingMotionPatch is free software: you can redistribute it and/or modify // | it under the terms of the MIT License. // | // | You should have received a copy of the MIT License // | along with TilingMotionPatch. If not, see <mit-license.org>. // +------------------------------------------------------------------------- //------------------------------------------------------------- /// \file tgasave.h /// \date 9-feb-2005 /// \author Rob Bateman /// \brief writes a 24 or 32 bit tga file to the disk. //------------------------------------------------------------- #ifndef TGA_SAVE__H__ #define TGA_SAVE__H__ bool WriteTga(const char* filename, const unsigned w, const unsigned h, const unsigned bpp, const unsigned char* pixels); bool WriteBMP(const char* filename, const unsigned w, const unsigned h, const unsigned bpp, const unsigned char* pixels); #endif
22,727
https://github.com/mottl-jan/koin/blob/master/koin-projects/examples/multimodule-ktor/module-b/src/main/kotlin/org/koin/samples/multi_module/module_b/ModuleBService.kt
Github Open Source
Open Source
Apache-2.0
2,022
koin
mottl-jan
Kotlin
Code
18
65
package org.koin.samples.multi_module.module_b import org.koin.samples.multi_module.common.IService class ModuleBService : IService { override fun sayHello() = "Hello! I am ${javaClass.name}" }
9,796
https://github.com/random-entity/off-axis-projection-unity/blob/master/Assets/Scripts/Projection/ProjectionPlaneCamera.cs
Github Open Source
Open Source
MIT
2,021
off-axis-projection-unity
random-entity
C#
Code
315
996
//NOTE: Undefine this if you need to move the plane at runtime //#define PRECALC_PLANE using System.Collections; using UnityEngine; namespace Apt.Unity.Projection { [ExecuteInEditMode] [RequireComponent(typeof(Camera))] public class ProjectionPlaneCamera : MonoBehaviour { //Code based on https://csc.lsu.edu/~kooima/pdfs/gen-perspective.pdf //and https://forum.unity.com/threads/vr-cave-projection.76051/ [Header("Projection plane")] public ProjectionPlane ProjectionScreen; public bool ClampNearPlane = true; [Header("Helpers")] public bool DrawGizmos = true; private Vector3 eyePos; //From eye to projection screen corners private float n, f; Vector3 va, vb, vc, vd; //Extents of perpendicular projection float l, r, b, t; Vector3 viewDir; private Camera cam; private void Awake() { cam = GetComponent<Camera>(); } private void OnDrawGizmos() { if (ProjectionScreen == null) return; if (DrawGizmos) { var pos = transform.position; Gizmos.color = Color.green; Gizmos.DrawLine(pos, pos + va); Gizmos.DrawLine(pos, pos + vb); Gizmos.DrawLine(pos, pos + vc); Gizmos.DrawLine(pos, pos + vd); Vector3 pa = ProjectionScreen.BottomLeft; Vector3 vr = ProjectionScreen.DirRight; Vector3 vu = ProjectionScreen.DirUp; Gizmos.color = Color.white; Gizmos.DrawLine(pos, viewDir); } } private void LateUpdate() { if(ProjectionScreen != null) { Vector3 pa = ProjectionScreen.BottomLeft; Vector3 pb = ProjectionScreen.BottomRight; Vector3 pc = ProjectionScreen.TopLeft; Vector3 pd = ProjectionScreen.TopRight; Vector3 vr = ProjectionScreen.DirRight; Vector3 vu = ProjectionScreen.DirUp; Vector3 vn = ProjectionScreen.DirNormal; Matrix4x4 M = ProjectionScreen.M; eyePos = transform.position; //From eye to projection screen corners va = pa - eyePos; vb = pb - eyePos; vc = pc - eyePos; vd = pd - eyePos; viewDir = eyePos + va + vb + vc + vd; //distance from eye to projection screen plane float d = -Vector3.Dot(va, vn); if (ClampNearPlane) cam.nearClipPlane = d; n = cam.nearClipPlane; f = cam.farClipPlane; float nearOverDist = n / d; l = Vector3.Dot(vr, va) * nearOverDist; r = Vector3.Dot(vr, vb) * nearOverDist; b = Vector3.Dot(vu, va) * nearOverDist; t = Vector3.Dot(vu, vc) * nearOverDist; Matrix4x4 P = Matrix4x4.Frustum(l, r, b, t, n, f); //Translation to eye position Matrix4x4 T = Matrix4x4.Translate(-eyePos); Matrix4x4 R = Matrix4x4.Rotate(Quaternion.Inverse(transform.rotation) * ProjectionScreen.transform.rotation); cam.worldToCameraMatrix = M * R * T; cam.projectionMatrix = P; } } } }
44,620
https://github.com/Strumenta/antlr-kotlin/blob/master/ci.sh
Github Open Source
Open Source
Apache-2.0
2,023
antlr-kotlin
Strumenta
Shell
Code
100
256
#!/bin/bash set -eu # JS tests will fail because no headless chrome can be found, so we skip tests extra="-x jsIrBrowserTest -x jsLegacyBrowserTest" if [ -n "$*" ]; then extra="$extra $*" fi echo "INFO: $(date) running clean" ./gradlew clean echo "INFO: $(date) running test" ./gradlew test $extra echo "INFO: $(date) running publishToMavenLocal" ./gradlew publishToMavenLocal $extra echo "INFO: $(date) running check for antlr-kotlin-examples-jvm" cd antlr-kotlin-examples-jvm ../gradlew --info clean check $extra cd .. echo "INFO: $(date) running check for antlr-kotlin-examples-mpp" cd antlr-kotlin-examples-mpp ../gradlew --info clean check $extra cd .. # - cd antlr-kotlin-examples-js && ../gradlew --info clean check $extra && cd ..
4,321
https://github.com/martrics/leetcode/blob/master/src/com/adam/test/Q82.java
Github Open Source
Open Source
MIT
null
leetcode
martrics
Java
Code
170
477
package com.adam.test; /** * @author adam * contact: luminghi@hotmail.com * date: 2021/3/25 11:27 * version: 1.0.0 */ public class Q82 { public static void main(String[] args) { ListNode node1 = new ListNode(1); ListNode node2 = new ListNode(2); ListNode node3 = new ListNode(3); ListNode node4 = new ListNode(3); ListNode node5 = new ListNode(4); ListNode node6 = new ListNode(4); ListNode node7 = new ListNode(5); node1.next = node2; node2.next = node3; node3.next = node4; node4.next = node5; node5.next = node6; node6.next = node7; Solution solution = new Solution(); ListNode node = solution.deleteDuplicates(node1); while(node != null){ System.out.println(node.val + "\t"); node = node.next; } } static class Solution{ public ListNode deleteDuplicates(ListNode head) { if(head == null){ return head; } ListNode dummyHead = new ListNode(Integer.MAX_VALUE); dummyHead.next = head; ListNode cur = dummyHead; while (cur.next != null && cur.next.next != null) { if (cur.next.val == cur.next.next.val) { int x = cur.next.val; while (cur.next != null && cur.next.val == x) { cur.next = cur.next.next; } } else { cur = cur.next; } } return dummyHead.next; } } }
46,566
https://github.com/TBuddha/learnkeys/blob/master/learnkeys-refactoring/src/main/java/org/example/c10简化函数调用/p8以函数取代参数/Demo.java
Github Open Source
Open Source
Apache-2.0
2,021
learnkeys
TBuddha
Java
Code
396
1,203
package org.example.c10简化函数调用.p8以函数取代参数; /** * 以下代码用于计算定单折扣价格。虽然这么低的折扣不大可能出现在现实生活中, 不过作为一个范例,我们暂不考虑这一点: * * @author zhout * @date 2020/5/6 14:47 */ class Demo { public double getPrice() { int basePrice = _quantity * _itemPrice; // 折扣等级 int discountLevel; if (_quantity > 100) discountLevel = 2; else discountLevel = 1; double finalPrice = discountedPrice(basePrice, discountLevel); return finalPrice; } private double discountedPrice(int basePrice, int discountLevel) { if (discountLevel == 2) return basePrice * 0.1; else return basePrice * 0.05; } int _quantity; int _itemPrice; } class Demo1 { // 首先,我把计算折扣等级(discountLevel)的代码提炼成为一个独立的 getDiscountLevel() 函数: public double getPrice() { int basePrice = _quantity * _itemPrice; int discountLevel = getDiscountLevel(); double finalPrice = discountedPrice(basePrice, discountLevel); return finalPrice; } private int getDiscountLevel() { if (_quantity > 100) return 2; else return 1; } private double discountedPrice(int basePrice, int discountLevel) { if (discountLevel == 2) return basePrice * 0.1; else return basePrice * 0.05; } int _quantity; int _itemPrice; } class Demo2 { public double getPrice() { int basePrice = _quantity * _itemPrice; int discountLevel = getDiscountLevel(); double finalPrice = discountedPrice(basePrice, discountLevel); return finalPrice; } private int getDiscountLevel() { if (_quantity > 100) return 2; else return 1; } // 然后把discountedPrice() 函数中对discountLevel 参数的所有引用点,替换为getDiscountLevel() 函数的调用: private double discountedPrice(int basePrice, int discountLevel) { if (getDiscountLevel() == 2) return basePrice * 0.1; else return basePrice * 0.05; } int _quantity; int _itemPrice; } class Demo3 { // 此时我就可以使用Remove Parameter 去掉discountLevel 参数了 : public double getPrice() { int basePrice = _quantity * _itemPrice; // int discountLevel = getDiscountLevel(); double finalPrice = discountedPrice(basePrice); return finalPrice; } private int getDiscountLevel() { if (_quantity > 100) return 2; else return 1; } private double discountedPrice(int basePrice) { if (getDiscountLevel() == 2) return basePrice * 0.1; else return basePrice * 0.05; } int _quantity; int _itemPrice; } class Demo4 { // 现在,可以去掉其他非必要的参数和相应的临时变量。最后获得以下代码: public double getPrice() { return discountedPrice(); } private double getBasePrice() { return _quantity * _itemPrice; } private double discountedPrice() { if (getDiscountLevel() == 2) return getBasePrice() * 0.1; else return getBasePrice() * 0.05; } private int getDiscountLevel() { if (_quantity > 100) return 2; else return 1; } int _quantity; int _itemPrice; } class Demo5 { // 最后我还可以针对discountedPrice() 函数使用Inline Method: private double getPrice() { if (getDiscountLevel() == 2) return getBasePrice() * 0.1; else return getBasePrice() * 0.05; } private int getDiscountLevel() { if (_quantity > 100) return 2; else return 1; } private double getBasePrice() { return _quantity * _itemPrice; } int _quantity; int _itemPrice; }
44,781
https://github.com/spicesouls/ducker/blob/master/ducker.py
Github Open Source
Open Source
MIT
2,022
ducker
spicesouls
Python
Code
287
811
#!/bin/env python3 from src.templates import * from src.messaging import * from colorama import init, Style, Fore, Back init() import argparse import sys banner = Fore.YELLOW + Style.BRIGHT + fr""" ____ _ _ ___ __ _ ____ ____ __ ( \/ )( \ / __)( / )( __)( _ \ <(o )___ ) D () \/ (( (__ ) ( ) _) ) / ( ._^ / (____/\____/ \___)(__\_)(____)(__\_) `---'{Fore.RESET} ┌──────────────────────────────────────────── └─ {Fore.YELLOW}spicesouls.github.io/ducker """ + Style.RESET_ALL print(banner) # Args parser = argparse.ArgumentParser(description='Generate rubber ducky payloads with ease.') parser.add_argument('--payload', type=str, help='A File Containing Your Payload.') parser.add_argument('-m', choices=['cmd', 'powershell', 'notepad'], help='Mode: Where to inject your payload.') parser.add_argument('-d', type=int, default=1000, help='The Delay you want between executing lines of your payload in milliseconds. (1000 By Default)') parser.add_argument('-o', type=str, help='The file to save the finalised script to, the finalised script is outputted in the terminal by default.') args = parser.parse_args() if not args.payload: alert('Please supply a Payload.') sys.exit() if not args.m: alert('Please select a Mode.') sys.exit() if not args.o: clioutput = True output = 'None' else: clioutput = False output = args.o ######################### ### SCRIPT GENERATION ### ######################### info('Payload: ' + args.payload) info('Mode: ' + args.m) info('Delay: ' + str(args.d)) info('Output: ' + output) print('') info('Generating Ducky Payload...') try: if args.m == 'cmd': script = cmd(args.payload, str(args.d)) elif args.m == 'powershell': script = powershell(args.payload, str(args.d)) elif args.m == 'notepad': script = notepad(args.payload, str(args.d)) except FileNotFoundError: warn('Error: Payload File Supplied is not found. Exiting...') sys.exit() good('Finished!') if clioutput == True: info('Showing Finalised Script...') print('\n--- SCRIPT START ---') for i in script: print(i) print('--- SCRIPT END ---\n') else: info('Saving Finalised Script To: ' + output) with open(output, 'w') as o: for i in script: o.write(i + '\n') o.close() good('Finalised Script Saved To: ' + output)
28,403
https://github.com/daniel-kullmann/intellij-scala/blob/master/test/org/jetbrains/plugins/scala/testingSupport/scalatest/scala2_11/scalatest2_2_1/Scalatest2_11_2_2_1_DuplicateConfigTest.scala
Github Open Source
Open Source
Apache-2.0
2,020
intellij-scala
daniel-kullmann
Scala
Code
20
129
package org.jetbrains.plugins.scala.testingSupport.scalatest.scala2_11.scalatest2_2_1 import org.jetbrains.plugins.scala.testingSupport.scalatest.ScalaTestDuplicateConfigTest /** * @author Roman.Shein * @since 22.01.2015 */ class Scalatest2_11_2_2_1_DuplicateConfigTest extends Scalatest2_11_2_2_1_Base with ScalaTestDuplicateConfigTest { }
18,364
https://github.com/dzena/tuwien/blob/master/bakk/OOP WS08/src/aufgabe7/HardDisk.java
Github Open Source
Open Source
Apache-2.0
2,013
tuwien
dzena
Java
Code
128
366
package aufgabe7; public class HardDisk implements StandAloneMemory{ protected float used_memory; protected float max_memory; //initializes a Harddisk with variable size public HardDisk(float memory) { max_memory=memory; } //returns the available memory size (in Bytes) public float available() { return max_memory-used_memory; } //will use some of the free memory size. Should not exceed max available. public void use(float memory) { used_memory += memory; if(used_memory > max_memory) used_memory = max_memory; } //frees memory - cannot free more than used before public void free(float memory) { used_memory -= memory; if(used_memory <= 0) used_memory = 0; } //Multimethods for Handling several Memories with several Drives public void UsedByBRDrive(BRDrive drive) { drive.detectWrongMedia(); } public void UsedByDVDrive(DVDrive drive) { drive.detectWrongMedia(); } public void UsedByUSBDrive(USBDrive drive) { drive.detectWrongMedia(); } public void UsedByUSBPort(USBPort drive) { drive.take(this); } }
48,142
https://github.com/tthoma24/sysadmin-challenge/blob/master/src/scripts/dockerbackups.cron.d
Github Open Source
Open Source
Apache-2.0
2,017
sysadmin-challenge
tthoma24
D
Code
34
76
# # cron.d/dockerbackups -- Periodically backu Apache and MySQL Docker containers to S3 # # Written by tthoma24 9/23/17 # # By default, run at 19:15 everyday 15 19 * * * root /data/scripts/dockerbackups.sh
47,066
https://github.com/BlitzkriegSoftware/CoreTenantHelpers/blob/master/BlitzkriegSoftware.Tenant.Demo.Web/Libs/TypeSwitch.cs
Github Open Source
Open Source
MIT
null
CoreTenantHelpers
BlitzkriegSoftware
C#
Code
267
729
using System; using System.Diagnostics.CodeAnalysis; namespace BlitzkriegSoftware.Tenant.Demo.Web.Libs { /// <summary> /// Case Info /// </summary> public class CaseInfo { /// <summary> /// Gets or sets a value indicating whether is Default Case /// </summary> public bool IsDefault { get; set; } /// <summary> /// Gets or sets type to switch on /// </summary> public Type Target { get; set; } /// <summary> /// Gets or sets thing to do if this case selected /// </summary> public Action<object> Action { get; set; } } /// <summary> /// Provides for switching on type (handy for error handling) /// <para>From: http://stackoverflow.com/questions/11277036/typeswitching-in-c-sharp</para> /// </summary> [ExcludeFromCodeCoverage] public static class TypeSwitch { /// <summary> /// Do the action /// </summary> /// <param name="source">Source of the action</param> /// <param name="cases">Cases</param> public static void Do(object source, params CaseInfo[] cases) { if (source == null) { return; } var type = source.GetType(); foreach (var entry in cases) { if (entry.IsDefault || entry.Target.IsAssignableFrom(type)) { entry.Action(source); break; } } } /// <summary> /// Case Block /// </summary> /// <typeparam name="T">Type</typeparam> /// <param name="action">Action</param> /// <returns>Case Information</returns> public static CaseInfo Case<T>(Action action) { return new CaseInfo() { Action = x => action(), Target = typeof(T), }; } /// <summary> /// Case Block (Generic) /// </summary> /// <typeparam name="T">Type</typeparam> /// <param name="action">Action</param> /// <returns>Case Information</returns> public static CaseInfo Case<T>(Action<T> action) { return new CaseInfo() { Action = (x) => action((T)x), Target = typeof(T), }; } /// <summary> /// Default Case Block /// </summary> /// <param name="action">Action</param> /// <returns>Case Information</returns> public static CaseInfo Default(Action action) { return new CaseInfo() { Action = x => action(), IsDefault = true, }; } } }
26,918
https://github.com/xiaoyangxxx/dream_car/blob/master/HARDWARE/OSAL/OSAL_Error.c
Github Open Source
Open Source
Apache-2.0
null
dream_car
xiaoyangxxx
C
Code
24
128
#include <stdio.h> #include <stdlib.h> #include "OSAL_Error.h" void osal_error_output(char *API,int err_para) { //printf("\n********************************************************\nosal API:<%s> \nosal EC :<%x>\n********************************************************\n", API, err_para); if(err_para >= osal_error_alert_queue_full) return; //exit(1); }
9,317
https://github.com/Encrylize/MyDictionary/blob/master/config.py
Github Open Source
Open Source
MIT
2,015
MyDictionary
Encrylize
Python
Code
98
336
import os basedir = os.path.abspath(os.path.dirname(__file__)) class Config: """ Base configuration with values used in all configurations. """ SERVER_NAME = 'localhost:5000' SECRET_KEY = os.getenv('MYDICTIONARY_SECRET_KEY') SQLALCHEMY_RECORD_QUERIES = True SQLALCHEMY_TRACK_MODIFICATIONS = True WORDS_PER_PAGE = 20 class DevelopmentConfig(Config): """ Development configuration. Activates the debugger and uses the database specified in the DEV_DATABASE_URL environment variable. """ DEBUG = True SQLALCHEMY_DATABASE_URI = os.getenv('MYDICTIONARY_DEV_DATABASE_URL') WORDS_PER_PAGE = 5 class TestingConfig(Config): """ Testing configuration. Sets the testing flag to True and uses the database specified in the TEST_DATABASE_URL environment variable. """ TESTING = True SQLALCHEMY_DATABASE_URI = os.getenv('MYDICTIONARY_TEST_DATABASE_URL') config = { 'development': DevelopmentConfig, 'testing': TestingConfig, 'default': DevelopmentConfig }
4,870
https://github.com/JLimperg/lean/blob/master/src/tests/util/lru_cache.cpp
Github Open Source
Open Source
Apache-2.0
2,022
lean
JLimperg
C++
Code
186
585
/* Copyright (c) 2014 Microsoft Corporation. All rights reserved. Released under Apache 2.0 license as described in the file LICENSE. Author: Leonardo de Moura */ #include "util/test.h" #include "util/lru_cache.h" using namespace lean; static void tst1(int C = 10000) { lru_cache<int> m_cache(C); for (int i = 0; i < 2*C; i++) { lean_verify(m_cache.insert(i) == nullptr); } for (int i = C; i < 2*C; i++) { lean_verify(*m_cache.insert(i) == i); } lean_assert(m_cache.size() == static_cast<unsigned>(C)); for (int i = 0; i < C; i++) { lean_assert(!m_cache.contains(i)); } for (int i = C; i < 2*C; i++) { lean_assert(m_cache.contains(i)); } m_cache.set_capacity(C/2); lean_assert(m_cache.capacity() == static_cast<unsigned>(C/2)); for (int i = C; i < C + C/2; i++) { lean_assert(!m_cache.contains(i)); } for (int i = C + C/2; i < 2*C; i++) { lean_assert(m_cache.contains(i)); } for (int i = C + C/2; i < 2*C; i++) { lean_assert(*m_cache.find(i) == i); m_cache.erase(i); lean_assert(!m_cache.contains(i)); } lean_assert(m_cache.size() == 0); } static void tst2() { lru_cache<int> m_cache(5); for (int i = 0; i < 10; i++) { m_cache.insert(i); } lean_assert(m_cache.size() == 5); m_cache.clear(); lean_assert(m_cache.empty()); } int main() { tst1(); tst2(); return has_violations() ? 1 : 0; }
40,640
https://github.com/silenaker/mfei/blob/master/examples/base/src/components/MFEIView.vue
Github Open Source
Open Source
MIT
2,021
mfei
silenaker
Vue
Code
149
565
<template> <div ref="container" v-loading="loading" class="mfei-container"></div> </template> <script> import { ServiceLoader } from "mfei"; export default { name: "MFEIView", data() { return { loading: false, }; }, mounted() { if (this.$route.meta.url) { this.load(this.$route.meta.url); } }, methods: { load(url) { // init service loader this.loader = new ServiceLoader(url, { container: this.$refs.container, // register custom response handlers responseHandlers: { customize: () => { return 'hello, you received "customize" type response'; }, }, }); this.loader.on("error", (err) => this.$message.error(err.toString())); this.loader.on("loading", () => (this.loading = true)); this.loader.on("load", () => { this.loading = false; // send request messages this.loader .postMessage("hello") .then((data) => this.$message.info(data.msg)); this.loader .postMessage("getSecretInfo", { token: "secret-token-from-host" }) .then((data) => this.$message.info(data.secret)) .catch((err) => this.$message.error(err.toString())); this.loader .postMessage("getToken") .then((data) => this.$message.info(data.token)) .catch((err) => this.$message.error(err.toString())); this.loader .postMessage("customizedTypeRequestHandler") .then((data) => this.$message.info(data)) .catch((err) => this.$message.error(err.toString())); }); }, }, watch: { $route(to) { this.loader && this.loader.close({ immediate: true }); this.load(to.meta.url); }, }, }; </script> <style> .mfei-container { flex: 1; } </style>
1,585
https://github.com/zhbyak/ZRNormalAppFramework/blob/master/我的网易彩票/我的网易彩票/Classes/ZRMoreFeatureCell.m
Github Open Source
Open Source
Apache-2.0
2,017
ZRNormalAppFramework
zhbyak
Objective-C
Code
50
208
// // ZRMoreFeatureCell.m // 我的网易彩票 // // Created by 张 锐 on 15/6/24. // Copyright (c) 2015年 张 锐. All rights reserved. // #import "ZRMoreFeatureCell.h" #import "ZRTabBarController.h" @interface ZRMoreFeatureCell () @property (weak, nonatomic) IBOutlet UIImageView *backImage; @end @implementation ZRMoreFeatureCell -(void)setBackImgName:(NSString *)backImgName{ _backImgName = backImgName; self.backImage.image = [UIImage imageNamed:backImgName]; } @end
8,254
https://github.com/xzmeng/gamehub/blob/master/app/main/views.py
Github Open Source
Open Source
MIT
2,020
gamehub
xzmeng
Python
Code
660
2,918
from datetime import date from flask import render_template, request, abort, redirect, url_for, flash, current_app from flask_login import current_user, login_required from sqlalchemy import cast, Float from ..models import Game, Genre, Comment, GameRating, Cart, Order, OrderItem, WishList # from .cart import Cart, WishList from .forms import CommentForm from . import main from .. import db @main.route('/') def index(): available_games = Game.query.filter_by(enabled=True) carousel_games = available_games.filter_by(is_recommended=True) editor_pick_games = available_games.filter_by(is_editor_picked=True) popular_games = available_games.order_by( (cast(Game.rating_total, Float) / cast(Game.rating_count, Float)).desc() )[:5] latest_games = available_games.order_by(Game.issued_date.desc())[:5] return render_template('index.html', carousel_games=carousel_games, popular_games=popular_games, latest_games=latest_games, editor_pick_games=editor_pick_games) @main.route('/game_list') def game_list(): available_games = Game.query.filter_by(enabled=True) query = available_games genre_name = request.args.get('genre_name') genre = None if genre_name: genre = Genre.query.filter_by(name=genre_name).first() if not genre: abort(404) query = genre.games.filter_by(enabled=True) page = request.args.get('page', 1, type=int) pagination = query.paginate( page, per_page=10, error_out=False ) games = pagination.items genres = Genre.query.all() return render_template('game_list.html', games=games, pagination=pagination, genre=genre, genres=genres, genre_name=genre_name) @main.route('/game_detail/<int:id>', methods=['POST', 'GET']) def game_detail(id): form = CommentForm(request.form) if request.method == 'POST' and form.validate(): if not current_user.is_authenticated: return current_app.login_manager.unauthorized() comment = Comment(game_id=id, user_id=current_user.id, body=form.body.data) db.session.add(comment) db.session.commit() flash('You commented successfully!') game = Game.query.get_or_404(id) if not game.enabled: abort(404) print('*' * 30) print(game.rating_count, game.rating_total) comment_query = game.comments page = request.args.get('page', 1, type=int) pagination = comment_query.paginate( page, per_page=10, error_out=False ) comments = pagination.items return render_template('game_detail.html', game=game, photos=game.photos, comments=comments, pagination=pagination, form=form) @main.route('/rating/<int:id>') @login_required def rating(id): game = Game.query.get_or_404(id) if not game.enabled: abort(404) star_num = request.args.get('rating', type=int) if star_num not in [1, 2, 3, 4, 5]: flash('You must choose one of the 5 ranks!') return redirect(url_for('main.game_detail', id=id)) game_rating = current_user.game_ratings.filter_by(game_id=id).first() if game_rating: flash('You have already rated the game!') else: game_rating = GameRating(user_id=current_user.id, game_id=id, rating=star_num * 2) game.rating_count += 1 game.rating_total += star_num * 2 db.session.add(game_rating) db.session.add(game) db.session.commit() flash('You have rated successfully!') return redirect(url_for('main.game_detail', id=id)) @main.route('/search') def search(): keyword = request.args.get('keyword') if not keyword: pass query = Game.query.filter(Game.title.contains(keyword)).filter_by(enabled=True) page = request.args.get('page', 1, type=int) pagination = query.paginate( page, per_page=10, error_out=False ) games = pagination.items return render_template('search.html', games=games, pagination=pagination, keyword=keyword) @main.route('/cart_detail') @login_required def cart_detail(): cart = current_user.cart if not cart: cart = Cart() cart.user = current_user db.session.add(cart) db.session.commit() total_num = cart.games.count() total_price = sum(game.price for game in cart.games) return render_template('cart_detail.html', games=cart.games, total_num=total_num, total_price=total_price) @main.route('/cart_add/<int:id>') @login_required def cart_add(id): game = Game.query.get_or_404(id) if not game.enabled: abort(404) cart = current_user.cart if not cart: cart = Cart() cart.user = current_user if game in current_user.games: flash('You have already owned the game!') elif game in cart.games: flash('The game has already been added to your cart!') else: cart.games.append(game) flash('Successfully added game to your cart !') db.session.add(cart) db.session.commit() return redirect(url_for('main.cart_detail')) @main.route('/cart_remove/<int:id>') @login_required def cart_remove(id): game = Game.query.get_or_404(id) cart = current_user.cart if not cart: abort(404) cart.games.remove(game) db.session.add(cart) db.session.commit() flash('Successfully remove the game from your cart!') return redirect(url_for('main.cart_detail')) @main.route('/cart_checkout') @login_required def cart_checkout(): cart = current_user.cart if not cart: abort(404) order = Order(user_id=current_user.id, total_cost=0) for game in cart.games: if not game.enabled: cart.games.remove(game) continue current_user.games.append(game) order_item = OrderItem(game_id=game.id, status='available') order.order_items.append(order_item) order.date = date.today() order.bill_address = current_user.bill_address order.total_cost += game.price cart.games.remove(game) db.session.add(current_user) db.session.add(order) db.session.commit() flash('You\' payed the money successfully!') return redirect(url_for('main.after_cart_checkout')) @main.route('/wishlist_detail') @login_required def wishlist_detail(): wishlist = current_user.wishlist if not wishlist: wishlist = WishList() wishlist.user = current_user db.session.add(wishlist) db.session.commit() total_num = wishlist.games.count() total_price = sum(game.price for game in wishlist.games) return render_template('wishlist_detail.html', games=wishlist.games, total_num=total_num, total_price=total_price) @main.route('/wishlist_add/<int:id>') @login_required def wishlist_add(id): game = Game.query.get_or_404(id) if not game.enabled: abort(404) wishlist = current_user.wishlist if not wishlist: wishlist = WishList() wishlist.user = current_user if game in current_user.games: flash('You have already owned the game!') elif game in wishlist.games: flash('The game has already been added to your wishlist!') else: wishlist.games.append(game) flash('Successfully added game to your wishlist !') db.session.add(wishlist) db.session.commit() return redirect(url_for('main.wishlist_detail')) @main.route('/wishlist_remove/<int:id>') @login_required def wishlist_remove(id): game = Game.query.get_or_404(id) wishlist = current_user.wishlist if not wishlist: abort(404) wishlist.games.remove(game) db.session.add(wishlist) db.session.commit() flash('Successfully remove the game from your wishlist!') return redirect(url_for('main.wishlist_detail')) @main.route('/wishlist_checkout') @login_required def wishlist_checkout(): cart = current_user.cart if not cart: cart = Cart() cart.user = current_user wishlist = current_user.wishlist if not wishlist: abort(404) for game in wishlist.games: if not game.enabled: wishlist.games.remove(game) continue if game not in cart.games: cart.games.append(game) wishlist.games.remove(game) db.session.add(current_user) db.session.commit() flash('You\'ve add the games in wishlist to cart successfully!') return redirect(url_for('main.cart_detail')) @main.route('/after_checkout') @login_required def after_cart_checkout(): return render_template('after_cart_checkout.html') @main.errorhandler(404) def page_not_found(e): return render_template('404.html') @main.errorhandler(403) def forbidden(e): return render_template('403.html')
33,145
https://github.com/enhavo/enhavo/blob/master/src/Enhavo/Bundle/PageBundle/Factory/PageFactory.php
Github Open Source
Open Source
MIT
2,023
enhavo
enhavo
PHP
Code
96
421
<?php namespace Enhavo\Bundle\PageBundle\Factory; use Enhavo\Bundle\BlockBundle\Factory\NodeFactory; use Enhavo\Bundle\ContentBundle\Entity\Content; use Enhavo\Bundle\ContentBundle\Factory\ContentFactory; use Enhavo\Bundle\PageBundle\Entity\Page; use Enhavo\Bundle\RoutingBundle\Factory\RouteFactory; class PageFactory extends ContentFactory { /** * @var NodeFactory */ private $nodeFactory; /** * @var RouteFactory */ private $routeFactory; public function __construct($className, NodeFactory $nodeFactory, RouteFactory $routeFactory) { parent::__construct($className); $this->nodeFactory = $nodeFactory; $this->routeFactory = $routeFactory; } /** * @param Content|null $originalResource * @return Page */ public function duplicate($originalResource) { if (!$originalResource) { return null; } /** @var Page $originalResource */ /** @var Page $newPage */ $newPage = parent::duplicate($originalResource); $newPage->setTitle($originalResource->getTitle() . ' (2)'); $newPage->setPublic(false); $newPage->setCode(null); $newPage->setRoute($this->routeFactory->createNew()); $newContainer = $this->nodeFactory->duplicate($originalResource->getContent()); $newPage->setContent($newContainer); $newPage->setParent(null); return $newPage; } }
33,287
https://github.com/scali/agrest/blob/master/agrest-docs/agrest-docs-protocol/src/docs/asciidoc/_protocol/_protocol-extensions/sencha-adapter.adoc
Github Open Source
Open Source
Apache-2.0
2,021
agrest
scali
AsciiDoc
Code
61
115
=== Sencha Adapter Provides a few extensions to the Agrest protocol to better handle certain Sencha features: * If a to-one relationship property is included in the Collection, the framework would also generate a companion "synthetic" FK property called "propertyName_id" * `"filter"` key - an alternative to `"exp"`. * `"group"` / `"groupDir"` keys that are functionally equivalent to `"sort"` / `"dir"`.
16,435
https://github.com/brlrt/Timecrypt/blob/master/Android/app/src/main/kotlin/co/timecrypt/android/helpers/OnMessageChangedListener.kt
Github Open Source
Open Source
Apache-2.0
2,016
Timecrypt
brlrt
Kotlin
Code
50
99
package co.timecrypt.android.helpers /** * A simple listener for Timecrypt message changes. */ interface OnMessageChangedListener { /** * Notifies the listener instance that the text being typed was invalidated (either empty or not empty). * * @param empty Whether the new text is empty or not */ fun onTextInvalidated(empty: Boolean) }
1,351
https://github.com/bcesardev/design-patterns-examples/blob/master/composite/src/Triangle.java
Github Open Source
Open Source
Apache-2.0
null
design-patterns-examples
bcesardev
Java
Code
15
39
public class Triangle implements SceneObject { @Override public void draw() { System.out.println("Drawing triangle"); } }
40,988
https://github.com/ricardo-jrm/nebula/blob/master/src/components/legal/CookiePolicy/index.stories.tsx
Github Open Source
Open Source
MIT
null
nebula
ricardo-jrm
TypeScript
Code
42
99
import React from 'react'; import { Story, Meta } from '@storybook/react'; import { CookiePolicy } from '.'; export default { title: 'Components/Legal/CookiePolicy', component: CookiePolicy, parameters: { componentSubtitle: 'CookiePolicy component', }, } as Meta; export const Default: Story = () => <CookiePolicy />;
26,653
https://github.com/RobertChinem/backend-coding-platform/blob/master/src/code-environments/dto/create-code-environment.dto.ts
Github Open Source
Open Source
MIT
null
backend-coding-platform
RobertChinem
TypeScript
Code
4
9
export class CreateCodeEnvironmentDto {}
24,191
https://github.com/lechium/tvOS144Headers/blob/master/System/Library/PrivateFrameworks/HomeKitDaemon.framework/HMDCameraAccessorySettingsConfiguration.h
Github Open Source
Open Source
MIT
2,021
tvOS144Headers
lechium
C
Code
205
724
/* * This header is generated by classdump-dyld 1.5 * on Wednesday, April 14, 2021 at 2:30:51 PM Mountain Standard Time * Operating System: Version 14.4 (Build 18K802) * Image Source: /System/Library/PrivateFrameworks/HomeKitDaemon.framework/HomeKitDaemon * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos. Updated by Kevin Bradley. */ #import <HMFoundation/HMFObject.h> @class NSMutableArray; @interface HMDCameraAccessorySettingsConfiguration : HMFObject { BOOL _isRecordingCapable; BOOL _isRecordingEnabled; BOOL _isRecordingAudioEnabled; BOOL _inclusionZone; BOOL _smartBulletinBoardNotificationEnabled; BOOL _reachabilityNotificationEnabled; unsigned long long _recordingEventTriggers; unsigned long long _numActivityZones; NSMutableArray* _numPointsInActivityZones; } @property (nonatomic,retain) NSMutableArray * numPointsInActivityZones; //@synthesize numPointsInActivityZones=_numPointsInActivityZones - In the implementation block @property (readonly) BOOL isRecordingCapable; //@synthesize isRecordingCapable=_isRecordingCapable - In the implementation block @property (readonly) BOOL isRecordingEnabled; //@synthesize isRecordingEnabled=_isRecordingEnabled - In the implementation block @property (readonly) BOOL isRecordingAudioEnabled; //@synthesize isRecordingAudioEnabled=_isRecordingAudioEnabled - In the implementation block @property (readonly) unsigned long long recordingEventTriggers; //@synthesize recordingEventTriggers=_recordingEventTriggers - In the implementation block @property (readonly) unsigned long long numActivityZones; //@synthesize numActivityZones=_numActivityZones - In the implementation block @property (readonly) BOOL inclusionZone; //@synthesize inclusionZone=_inclusionZone - In the implementation block @property (readonly) BOOL smartBulletinBoardNotificationEnabled; //@synthesize smartBulletinBoardNotificationEnabled=_smartBulletinBoardNotificationEnabled - In the implementation block @property (readonly) BOOL reachabilityNotificationEnabled; //@synthesize reachabilityNotificationEnabled=_reachabilityNotificationEnabled - In the implementation block -(unsigned long long)recordingEventTriggers; -(BOOL)isRecordingAudioEnabled; -(BOOL)isRecordingEnabled; -(id)initWithCameraAccessory:(id)arg1 ; -(unsigned long long)numActivityZones; -(BOOL)isRecordingCapable; -(NSMutableArray *)numPointsInActivityZones; -(void)setNumPointsInActivityZones:(NSMutableArray *)arg1 ; -(BOOL)inclusionZone; -(BOOL)smartBulletinBoardNotificationEnabled; -(BOOL)reachabilityNotificationEnabled; @end
35,769
https://github.com/frenziedtalon/ExchangeSharp/blob/master/src/ExchangeSharpConsole/Options/OrderDetailsOption.cs
Github Open Source
Open Source
MIT
2,022
ExchangeSharp
frenziedtalon
C#
Code
76
226
using System; using System.Threading.Tasks; using CommandLine; using ExchangeSharpConsole.Options.Interfaces; namespace ExchangeSharpConsole.Options { [Verb("order-details", HelpText = "Fetch the order details from the exchange.")] public class OrderDetailsOption : BaseOption, IOptionPerExchange, IOptionPerOrderId, IOptionWithMarketSymbol { public override async Task RunCommand() { using var api = await GetExchangeInstanceAsync(ExchangeName); Authenticate(api); var orderDetails = await api.GetOrderDetailsAsync(OrderId, marketSymbol: MarketSymbol); Console.WriteLine(orderDetails); WaitInteractively(); } public string ExchangeName { get; set; } public string OrderId { get; set; } public string MarketSymbol { get; set; } } }
15,032
https://github.com/embano1/cloud-provider-vsphere/blob/master/pkg/cloudprovider/vsphere/server/client.go
Github Open Source
Open Source
Apache-2.0
null
cloud-provider-vsphere
embano1
Go
Code
75
291
package server import ( "context" "time" "k8s.io/klog" "google.golang.org/grpc" pb "k8s.io/cloud-provider-vsphere/pkg/cloudprovider/vsphere/proto" vcfg "k8s.io/cloud-provider-vsphere/pkg/common/config" ) // NewVSphereCloudProviderClient creates CloudProviderVsphereClient func NewVSphereCloudProviderClient(ctx context.Context) (pb.CloudProviderVsphereClient, error) { var conn *grpc.ClientConn var err error for i := 0; i < RetryAttempts; i++ { conn, err = grpc.Dial(vcfg.DefaultAPIBinding, grpc.WithInsecure()) if err == nil { break } time.Sleep(1 * time.Second) } if err != nil { klog.Errorf("did not connect: %v", err) return nil, err } c := pb.NewCloudProviderVsphereClient(conn) return c, nil }
7,231
https://github.com/sonoble/oom-py-amd64/blob/master/usr/local/oom/test/oomtest.py
Github Open Source
Open Source
MIT
2,016
oom-py-amd64
sonoble
Python
Code
140
297
# ///////////////////////////////////////////////////////////////////// # # oomtest.py : # For each port, # for read keys, functions, write keys # Read each key, read each function, write each write key # # Not checking values, just making sure every key is coded in a # way that doesn't throw an exception. # # Copyright 2015 Finisar Inc. # # Author: Don Bollinger don@thebollingers.or/ # # //////////////////////////////////////////////////////////////////// # from oom import * # the published OOM Northbound API from oom.decode import hexstr # helper function from the decode pack portlist = oom_get_portlist() for port in portlist: for key in port.mmap: test = oom_get_keyvalue(port, key) for key in port.fmap: test = oom_get_memory(port, key) for key in port.wmap: # read current value, write it back val = oom_get_keyvalue(port, key) test = oom_set_keyvalue(port, key, val) print '%d raw memory reads for port %s' % (port.readcount, port.port_name)
24,465
https://github.com/jshpng/elastic-charts/blob/master/src/chart_types/partition_chart/state/selectors/picked_shapes.ts
Github Open Source
Open Source
MIT, ISC, Apache-2.0
2,021
elastic-charts
jshpng
TypeScript
Code
336
897
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import createCachedSelector from 're-reselect'; import { LayerValue } from '../../../../specs'; import { GlobalChartState } from '../../../../state/chart_state'; import { MODEL_KEY } from '../../layout/types/types'; import { QuadViewModel } from '../../layout/types/viewmodel_types'; import { AGGREGATE_KEY, DEPTH_KEY, getNodeName, PARENT_KEY, PATH_KEY, SORT_INDEX_KEY, } from '../../layout/utils/group_by_rollup'; import { partitionGeometries } from './geometries'; function getCurrentPointerPosition(state: GlobalChartState) { return state.interactions.pointer.current.position; } /** @internal */ export const getPickedShapes = createCachedSelector( [partitionGeometries, getCurrentPointerPosition], (geoms, pointerPosition): QuadViewModel[] => { const picker = geoms.pickQuads; const { diskCenter } = geoms; const x = pointerPosition.x - diskCenter.x; const y = pointerPosition.y - diskCenter.y; return picker(x, y); }, )((state) => state.chartId); /** @internal */ export const getPickedShapesLayerValues = createCachedSelector( [getPickedShapes], pickShapesLayerValues, )((state) => state.chartId); /** @internal */ export function pickShapesLayerValues(pickedShapes: QuadViewModel[]): Array<Array<LayerValue>> { const maxDepth = pickedShapes.reduce((acc, curr) => Math.max(acc, curr.depth), 0); return pickedShapes .filter(({ depth }) => depth === maxDepth) // eg. lowest layer in a treemap, where layers overlap in screen space; doesn't apply to sunburst/flame .map<Array<LayerValue>>((viewModel) => { const values: Array<LayerValue> = []; values.push({ groupByRollup: viewModel.dataName, value: viewModel[AGGREGATE_KEY], depth: viewModel[DEPTH_KEY], sortIndex: viewModel[SORT_INDEX_KEY], path: viewModel[PATH_KEY], }); let node = viewModel[MODEL_KEY]; while (node[DEPTH_KEY] > 0) { const value = node[AGGREGATE_KEY]; const dataName = getNodeName(node); values.push({ groupByRollup: dataName, value, depth: node[DEPTH_KEY], sortIndex: node[SORT_INDEX_KEY], path: node[PATH_KEY], }); node = node[PARENT_KEY]; } return values.reverse(); }); }
24,266
https://github.com/sudeeptoG/ios-2/blob/master/EvaKit/Core/SearchModels/EVSearchScope.m
Github Open Source
Open Source
MIT
2,016
ios-2
sudeeptoG
Objective-C
Code
42
129
// // EVSearchScope.m // EvaKit // // Created by Yegor Popovych on 8/18/15. // Copyright (c) 2015 Evature. All rights reserved. // #import "EVSearchScope.h" @implementation EVSearchScope + (instancetype)scopeWithContextTypes:(EVSearchContextType)types { EVSearchScope *scope = [[EVSearchScope new] autorelease]; scope->_type = types; return scope; } @end
9,081
https://github.com/motacano/EasyLOB-Chinook/blob/master/Chinook.UnitTest.xUnit/UnitTests/2 - Service/Service.cs
Github Open Source
Open Source
MIT
2,018
EasyLOB-Chinook
motacano
C#
Code
15
47
using Microsoft.Practices.Unity; using Xunit; namespace Chinook.UnitTest.xUnit { public class ServiceTests : BaseTest { } }
4,698