text
stringlengths
1
1.05M
<reponame>mjburling/beneficiary-fhir-data package gov.cms.bfd.server.war.stu3.providers; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.params.provider.Arguments.arguments; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import com.codahale.metrics.MetricRegistry; import gov.cms.bfd.model.codebook.data.CcwCodebookVariable; import gov.cms.bfd.model.rif.Beneficiary; import gov.cms.bfd.model.rif.BeneficiaryHistory; import gov.cms.bfd.model.rif.MedicareBeneficiaryIdHistory; import gov.cms.bfd.model.rif.samples.StaticRifResourceGroup; import gov.cms.bfd.server.war.ServerTestUtils; import gov.cms.bfd.server.war.adapters.CodeableConcept; import gov.cms.bfd.server.war.commons.CCWUtils; import gov.cms.bfd.server.war.commons.IcdCode; import gov.cms.bfd.server.war.commons.TransformerConstants; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; import org.hl7.fhir.dstu3.model.Coding; import org.hl7.fhir.dstu3.model.ExplanationOfBenefit; import org.hl7.fhir.exceptions.FHIRException; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; /** * Unit tests for {@link Stu3EobSamhsaMatcherTest}. Integration with {@link * ExplanationOfBenefitResourceProvider} is covered by {@link * ExplanationOfBenefitResourceProviderIT#searchForEobsWithSamhsaFiltering()} and related * integration tests. */ public final class Stu3EobSamhsaMatcherTest { // TODO complete and verify that these exactly match real values in our DB public static final String SAMPLE_SAMHSA_CPT_CODE = "4320F"; public static final String SAMPLE_SAMHSA_ICD_9_DIAGNOSIS_CODE = "29189"; public static final String SAMPLE_SAMHSA_ICD_9_PROCEDURE_CODE = "9445"; public static final String SAMPLE_SAMHSA_ICD_10_DIAGNOSIS_CODE = "F1010"; public static final String SAMPLE_SAMHSA_ICD_10_PROCEDURE_CODE = "HZ2ZZZZ"; public static final String SAMPLE_SAMHSA_DRG_CODE = "522"; private static final String DRG = CCWUtils.calculateVariableReferenceUrl(CcwCodebookVariable.CLM_DRG_CD); public static Stream<Arguments> data() { final String HCPCS = TransformerConstants.CODING_SYSTEM_HCPCS; final String OTHER = "other system"; return Stream.of( arguments( "Empty list", Collections.emptyList(), false, "should NOT return true (all known systems), but DID."), arguments( "HCPCS only systems", List.of(HCPCS, HCPCS, HCPCS), true, "SHOULD return true (all known systems), but did NOT."), arguments( "Other system only", List.of(OTHER, OTHER), false, "should NOT return true (all known systems), but DID."), arguments( "HCPCS and other systems", List.of(HCPCS, HCPCS, OTHER), false, "should NOT return true (all known systems), but DID.")); } @ParameterizedTest(name = "{index}: {0}") @MethodSource("data") public void containsOnlyKnownSystemsTest( String name, List<String> systems, boolean expectedResult, String errorMessage) { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); CodeableConcept mockConcept = mock(CodeableConcept.class); List<gov.cms.bfd.server.war.adapters.Coding> codings = systems.stream() .map( system -> { gov.cms.bfd.server.war.adapters.Coding mockCoding = mock(gov.cms.bfd.server.war.adapters.Coding.class); doReturn(system).when(mockCoding).getSystem(); return mockCoding; }) .collect(Collectors.toUnmodifiableList()); doReturn(codings).when(mockConcept).getCoding(); assertEquals( expectedResult, matcher.containsOnlyKnownSystems(mockConcept), name + " " + errorMessage); } @Nested public class NonParameterizedTests { /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * false</code> for claims that have no SAMHSA-related codes. */ @Test public void nonSamhsaRelatedClaims() { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); // Note: none of our SAMPLE_A claims have SAMHSA-related codes (by default). List<Object> sampleRifRecords = ServerTestUtils.parseData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources())); List<ExplanationOfBenefit> sampleEobs = sampleRifRecords.stream() .map( r -> { // FIXME remove most `else if`s once filtering fully supports all claim types if (r instanceof Beneficiary) return null; else if (r instanceof BeneficiaryHistory) return null; else if (r instanceof MedicareBeneficiaryIdHistory) return null; return TransformerUtils.transformRifRecordToEob( new MetricRegistry(), r, Optional.empty()); }) .filter(ExplanationOfBenefit.class::isInstance) .collect(Collectors.toList()); for (ExplanationOfBenefit sampleEob : sampleEobs) assertFalse( matcher.test(sampleEob), "Unexpected SAMHSA filtering of EOB: " + sampleEob.getId()); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#CARRIER} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 9 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchCarrierClaimsByIcd9Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.CARRIER); Coding sampleEobDiagnosis = sampleEob.getDiagnosisFirstRep().getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_9) .setCode(SAMPLE_SAMHSA_ICD_9_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#CARRIER} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 10 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchCarrierClaimsByIcd10Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.CARRIER); Coding sampleEobDiagnosis = sampleEob.getDiagnosisFirstRep().getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_10) .setCode(SAMPLE_SAMHSA_ICD_10_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#CARRIER} {@link * ExplanationOfBenefit}s that have SAMHSA-related CPT procedure codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchCarrierClaimsByCptProcedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.CARRIER); Coding sampleEobService = sampleEob.getItemFirstRep().getService().getCodingFirstRep(); sampleEobService.setCode(SAMPLE_SAMHSA_CPT_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#CARRIER} {@link * ExplanationOfBenefit}s that have SAMHSA-related CPT procedure codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchCarrierClaimsByCptProcedureForNewCodes() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); String SAMPLE_SAMHSA_CPT_NEW_CODE = "G2067"; ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.CARRIER); Coding sampleEobService = sampleEob.getItemFirstRep().getService().getCodingFirstRep(); sampleEobService.setCode(SAMPLE_SAMHSA_CPT_NEW_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#DME} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 9 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchDmeClaimsByIcd9Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.DME); Coding sampleEobDiagnosis = sampleEob.getDiagnosisFirstRep().getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_9) .setCode(SAMPLE_SAMHSA_ICD_9_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#DME} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 10 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchDmeClaimsByIcd10Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.DME); Coding sampleEobDiagnosis = sampleEob.getDiagnosisFirstRep().getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_10) .setCode(SAMPLE_SAMHSA_ICD_10_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#DME} {@link * ExplanationOfBenefit}s that have SAMHSA-related CPT procedure codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchDmeClaimsByCptProcedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.DME); Coding sampleEobService = sampleEob.getItemFirstRep().getService().getCodingFirstRep(); sampleEobService.setCode(SAMPLE_SAMHSA_CPT_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#INPATIENT} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 9 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchInpatientClaimsByIcd9Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.INPATIENT); Coding sampleEobDiagnosis = sampleEob.getDiagnosis().get(1).getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_9) .setCode(SAMPLE_SAMHSA_ICD_9_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#INPATIENT} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 10 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchInpatientClaimsByIcd10Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.INPATIENT); Coding sampleEobDiagnosis = sampleEob.getDiagnosis().get(1).getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_10) .setCode(SAMPLE_SAMHSA_ICD_10_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#INPATIENT} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 9 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchInpatientClaimsByIcd9Procedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.INPATIENT); Coding sampleEobDiagnosis = sampleEob.getProcedureFirstRep().getProcedureCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_9) .setCode(SAMPLE_SAMHSA_ICD_9_PROCEDURE_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#INPATIENT} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 10 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchInpatientClaimsByIcd10Procedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.INPATIENT); Coding sampleEobDiagnosis = sampleEob.getProcedureFirstRep().getProcedureCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_10) .setCode(SAMPLE_SAMHSA_ICD_10_PROCEDURE_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#INPATIENT} {@link * ExplanationOfBenefit}s that have SAMHSA-related drg codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchInpatientClaimsByDrg() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.INPATIENT); sampleEob .getDiagnosisFirstRep() .getPackageCode() .addCoding() .setSystem(Stu3EobSamhsaMatcherTest.DRG) .setCode(SAMPLE_SAMHSA_DRG_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#OUTPATIENT} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 9 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchOutpatientClaimsByIcd9Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.OUTPATIENT); Coding sampleEobDiagnosis = sampleEob.getDiagnosisFirstRep().getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_9) .setCode(SAMPLE_SAMHSA_ICD_9_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#OUTPATIENT} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 10 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchOutpatientClaimsByIcd10Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.OUTPATIENT); Coding sampleEobDiagnosis = sampleEob.getDiagnosisFirstRep().getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_10) .setCode(SAMPLE_SAMHSA_ICD_10_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#OUTPATIENT} {@link * ExplanationOfBenefit}s that have SAMHSA-related CPT procedure codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchOutpatientClaimsByCptProcedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.OUTPATIENT); Coding sampleEobService = sampleEob.getItemFirstRep().getService().getCodingFirstRep(); sampleEobService.setCode(SAMPLE_SAMHSA_CPT_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#OUTPATIENT} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 9 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchOutpatientClaimsByIcd9Procedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.OUTPATIENT); Coding sampleEobDiagnosis = sampleEob.getProcedureFirstRep().getProcedureCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_9) .setCode(SAMPLE_SAMHSA_ICD_9_PROCEDURE_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#OUTPATIENT} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 10 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchOutpatientClaimsByIcd10Procedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.OUTPATIENT); Coding sampleEobDiagnosis = sampleEob.getProcedureFirstRep().getProcedureCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_10) .setCode(SAMPLE_SAMHSA_ICD_10_PROCEDURE_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#HHA} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 9 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchHhaClaimsByIcd9Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.HHA); Coding sampleEobDiagnosis = sampleEob.getDiagnosisFirstRep().getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_9) .setCode(SAMPLE_SAMHSA_ICD_9_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#HHA} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 10 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchHhaClaimsByIcd10Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.HHA); Coding sampleEobDiagnosis = sampleEob.getDiagnosisFirstRep().getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_10) .setCode(SAMPLE_SAMHSA_ICD_10_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#HHA} {@link * ExplanationOfBenefit}s that have SAMHSA-related CPT procedure codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchHhaClaimsByCptProcedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.HHA); Coding sampleEobService = sampleEob.getItemFirstRep().getService().getCodingFirstRep(); sampleEobService.setCode(SAMPLE_SAMHSA_CPT_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#HOSPICE} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 9 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchHospiceClaimsByIcd9Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.HOSPICE); Coding sampleEobDiagnosis = sampleEob.getDiagnosisFirstRep().getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_9) .setCode(SAMPLE_SAMHSA_ICD_9_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#HOSPICE} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 10 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchHospiceClaimsByIcd10Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.HOSPICE); Coding sampleEobDiagnosis = sampleEob.getDiagnosisFirstRep().getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_10) .setCode(SAMPLE_SAMHSA_ICD_10_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#HOSPICE} {@link * ExplanationOfBenefit}s that have SAMHSA-related CPT procedure codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchHospiceClaimsByCptProcedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.HOSPICE); Coding sampleEobService = sampleEob.getItemFirstRep().getService().getCodingFirstRep(); sampleEobService.setCode(SAMPLE_SAMHSA_CPT_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#SNF} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 9 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchSnfClaimsByIcd9Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.SNF); Coding sampleEobDiagnosis = sampleEob.getDiagnosis().get(1).getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_9) .setCode(SAMPLE_SAMHSA_ICD_9_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#SNF} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 10 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchSnfClaimsByIcd10Diagnosis() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.SNF); Coding sampleEobDiagnosis = sampleEob.getDiagnosis().get(1).getDiagnosisCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_10) .setCode(SAMPLE_SAMHSA_ICD_10_DIAGNOSIS_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#SNF} {@link * ExplanationOfBenefit}s that have SAMHSA-related CPT procedure codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchSnfClaimsByCptProcedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.SNF); Coding sampleEobService = sampleEob.getItemFirstRep().getService().getCodingFirstRep(); sampleEobService.setCode(SAMPLE_SAMHSA_CPT_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#SNF} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 9 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchSnfClaimsByIcd9Procedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.SNF); Coding sampleEobDiagnosis = sampleEob.getProcedureFirstRep().getProcedureCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_9) .setCode(SAMPLE_SAMHSA_ICD_9_PROCEDURE_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#SNF} {@link * ExplanationOfBenefit}s that have SAMHSA-related ICD 10 diagnosis codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchSnfClaimsByIcd10Procedure() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.SNF); Coding sampleEobDiagnosis = sampleEob.getProcedureFirstRep().getProcedureCodeableConcept().getCodingFirstRep(); sampleEobDiagnosis .setSystem(IcdCode.CODING_SYSTEM_ICD_10) .setCode(SAMPLE_SAMHSA_ICD_10_PROCEDURE_CODE); assertTrue(matcher.test(sampleEob)); } /** * Verifies that {@link * gov.cms.bfd.server.war.stu3.providers.Stu3EobSamhsaMatcher#test(ExplanationOfBenefit)} * returns <code> * true</code> for {@link gov.cms.bfd.server.war.stu3.providers.ClaimType#SNF} {@link * ExplanationOfBenefit}s that have SAMHSA-related drg codes. * * @throws FHIRException (indicates problem with test data) */ @Test public void matchSnfClaimsByDrg() throws FHIRException { Stu3EobSamhsaMatcher matcher = new Stu3EobSamhsaMatcher(); ExplanationOfBenefit sampleEob = getSampleAClaim(ClaimType.SNF); sampleEob .getDiagnosisFirstRep() .getPackageCode() .addCoding() .setSystem(Stu3EobSamhsaMatcherTest.DRG) .setCode(SAMPLE_SAMHSA_DRG_CODE); assertTrue(matcher.test(sampleEob)); } /** * @param claimType the {@link gov.cms.bfd.server.war.stu3.providers.ClaimType} to get a sample * {@link ExplanationOfBenefit} for * @return a sample {@link ExplanationOfBenefit} of the specified {@link * gov.cms.bfd.server.war.stu3.providers.ClaimType} (derived from the {@link * StaticRifResourceGroup#SAMPLE_A} sample RIF records) */ private ExplanationOfBenefit getSampleAClaim(ClaimType claimType) { List<Object> sampleRifRecords = ServerTestUtils.parseData(Arrays.asList(StaticRifResourceGroup.SAMPLE_A.getResources())); Object sampleRifRecordForClaimType = sampleRifRecords.stream() .filter(claimType.getEntityClass()::isInstance) .findFirst() .get(); ExplanationOfBenefit sampleEobForClaimType = TransformerUtils.transformRifRecordToEob( new MetricRegistry(), sampleRifRecordForClaimType, Optional.empty()); return sampleEobForClaimType; } } }
<filename>fslks/datasets/summarization/medlineplus_reviews.py """ Module to create MedlinePlus PubMed review summarization tensorflow dataset. """ import os import json import tensorflow as tf import tensorflow_datasets.public_api as tfds _DESCRIPTION = """The MedlinePlus multi-document review summarization dataset, built using articles at available in xml at https://medlineplus.gov/xml.html and PubMed review articles listed relevant to the article.""" _CITATION = """This work is 100% plagiarized""" _MEDLINEPLUS_REVIEW_DOWNLOAD_INSTRUCTIONS = """Do stuff here. Or don't. Who cares.""" class MedlineplusReviews(tfds.core.GeneratorBasedBuilder): """MedlinePlus review summarization dataset builder""" VERSION = tfds.core.Version("1.0.0") MANUAL_DOWNLOAD_INSTRUCTIONS = _MEDLINEPLUS_REVIEW_DOWNLOAD_INSTRUCTIONS def _info(self): return tfds.core.DatasetInfo( builder=self, description=_DESCRIPTION, features=tfds.features.FeaturesDict({ 'summary': tfds.features.Text(), 'medlineplus_url': tfds.features.Text(), 'article': tfds.features.Sequence(tfds.features.Text()), 'pmids': tfds.features.Sequence(tfds.features.Text()), }), supervised_keys=('article', 'summary'), citation=_CITATION ) def _split_generators(self, dl_manager): """Returns SplitGenerators.""" path = os.path.join(dl_manager.manual_dir, self.name) return [ tfds.core.SplitGenerator( name=tfds.Split.TRAIN, gen_kwargs={ "path": os.path.join(path, "medlineplus_train_review_collection.json")}), tfds.core.SplitGenerator( name=tfds.Split.VALIDATION, gen_kwargs={ "path": os.path.join(path, "medlineplus_val_review_collection.json")}), tfds.core.SplitGenerator( name=tfds.Split.TEST, gen_kwargs={ "path": os.path.join(path, "medlineplus_test_review_collection.json")}), ] def _generate_examples(self, path): """Parse and yield medlineplus review collection""" with tf.io.gfile.GFile(path) as f: data = json.load(f) for i, url in enumerate(data): summary = data[url]['summary'] articles = [] pmids = [] for pmid in data[url]['reviews']: pmids.append(pmid) articles.append(data[url]['reviews'][pmid]) yield i, { 'summary': summary, 'medlineplus_url': url, 'article': articles, 'pmids': pmids, }
class ModelConverter { getTransactionModels(transactions) { // Implement the logic to convert transactions to models // For example: const models = transactions.map(transaction => { return { // Convert transaction data to model format // Example: nonce: transaction.nonce, // Other transaction properties }; }); return models; } } // Example usage describe("ModelConverter.getTransactionData", () => { it("should convert transaction to model and back to data", () => { const modelConverter = container.resolve(ModelConverter); const transaction = Blocks.BlockFactory.fromData(block1760000).transactions[0]; const models = modelConverter.getTransactionModels([transaction]); models[0].nonce = Utils.BigNumber.make("1"); // set_row_nonce trigger // Implement the logic to convert models back to transaction data // For example: const reconstructedTransaction = { nonce: models[0].nonce, // Other model properties }; // Assert that the reconstructed transaction matches the original transaction expect(reconstructedTransaction).toEqual(transaction); }); });
#!/usr/bin/env bash set -euo pipefail echo "Running tests on node $CIRCLE_NODE_INDEX of $CIRCLE_NODE_TOTAL" if [ -z "$SERVER_TEST_TO_RUN" ]; then echo 'Please specify $SERVER_TEST_TO_RUN' exit 1 else echo "Running test $SERVER_TEST_TO_RUN" fi ### Functions stop_services() { echo "killing and waiting for spawned services" [ -n "$HGE_PIDS" ] && kill -s INT $HGE_PIDS || true [ -n "$WH_PID" ] && kill $WH_PID || true [ -n "$WHC_PID" ] && kill $WHC_PID || true [ -n "$GQL_SERVER_PID" ] && kill $GQL_SERVER_PID || true wait $HGE_PIDS $WH_PID $WHC_PID $GQL_SERVER_PID || true } time_elapsed() { printf "(%02d:%02d)" $((SECONDS / 60)) $((SECONDS % 60)) } fail_if_port_busy() { local PORT=$1 if nc -z localhost $PORT; then echo "Port $PORT is busy. Exiting" exit 1 fi } wait_for_port() { local PORT=$1 echo "waiting for $PORT" for _ in $(seq 1 60); do nc -z localhost $PORT && echo "port $PORT is ready" && return echo -n . sleep 0.25 done echo "Failed waiting for $PORT" && exit 1 } init_jwt() { CUR_DIR="$PWD" mkdir -p "$OUTPUT_FOLDER/ssl" cd "$OUTPUT_FOLDER/ssl" openssl genrsa -out jwt_private.key 2048 openssl rsa -pubout -in jwt_private.key -out jwt_public.key openssl genpkey -algorithm ed25519 -outform PEM -out ed25519_jwt_private.key openssl pkey -pubout -in ed25519_jwt_private.key -out ed25519_jwt_public.key cd "$CUR_DIR" } # init_hge_and_test_jwt function will run the hge server using the environment varibles and run the pytest which is sent as argument # The first argument is the relative path of the jwt-key-file. the jwt-key-file can be RSA or EdDSA # The second argument is the test to run, eg. test_jwt_claims_map.py::TestJWTClaimsMapBasic, test_jwt.py, etc. init_hge_and_test_jwt() { run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/$1" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET" $2 kill_hge_servers } init_ssl() { CUR_DIR="$PWD" mkdir -p "$OUTPUT_FOLDER/ssl" cd "$OUTPUT_FOLDER/ssl" CNF_TEMPLATE='[req] req_extensions = v3_req distinguished_name = req_distinguished_name [req_distinguished_name] [ v3_req ] basicConstraints = CA:FALSE keyUsage = nonRepudiation, digitalSignature, keyEncipherment subjectAltName = @alt_names [alt_names] DNS.1 = localhost IP.1 = 127.0.0.1' echo "$CNF_TEMPLATE" >webhook-req.cnf openssl genrsa -out ca-key.pem 2048 openssl req -x509 -new -nodes -key ca-key.pem -days 10 -out ca.pem -subj "/CN=webhook-ca" openssl genrsa -out webhook-key.pem 2048 openssl req -new -key webhook-key.pem -out webhook.csr -subj "/CN=hge-webhook" -config webhook-req.cnf openssl x509 -req -in webhook.csr -CA ca.pem -CAkey ca-key.pem -CAcreateserial -out webhook.pem -days 10 -extensions v3_req -extfile webhook-req.cnf cp ca.pem /etc/ssl/certs/webhook.crt update-ca-certificates cd "$CUR_DIR" } webhook_tests_check_root() { if [ $EUID != 0 ]; then echo -e "webhook tests require root (in order to trust certificate authority)." exit 1 fi } kill_hge_servers() { kill -s INT $HGE_PIDS || true wait $HGE_PIDS || true HGE_PIDS="" } HGE_INDEX=1 run_hge_with_args() { i=$((HGE_INDEX++)) set -x "$GRAPHQL_ENGINE" "$@" 2>&1 >"$OUTPUT_FOLDER/graphql-engine-${i}.log" & HGE_PIDS="$HGE_PIDS $!" set +x } start_multiple_hge_servers() { run_hge_with_args --database-url "$HASURA_GRAPHQL_DATABASE_URL" serve if [ -n "${HASURA_GRAPHQL_DATABASE_URL_2:-}" ]; then run_hge_with_args --database-url "$HASURA_GRAPHQL_DATABASE_URL_2" serve --server-port 8081 wait_for_port 8081 fi wait_for_port 8080 } source_data_sources_utils() { # Only source this file in the $SERVER_TEST_TO_RUN case branch it's used, # to avoid sourcing for every server job & test. # https://github.com/hasura/graphql-engine-mono/pull/1526#discussion_r661411538 SCRIPTS_SOURCE=$CIRCLECI_FOLDER/../scripts source "$SCRIPTS_SOURCE/data-sources-util.sh" } if [ -z "${HASURA_GRAPHQL_DATABASE_URL:-}" ]; then echo "Env var HASURA_GRAPHQL_DATABASE_URL is not set" exit 1 fi if [ -z "${HASURA_GRAPHQL_DATABASE_URL_2:-}" ]; then echo "Env var HASURA_GRAPHQL_DATABASE_URL_2 is not set" exit 1 fi CIRCLECI_FOLDER="${BASH_SOURCE[0]%/*}" cd $CIRCLECI_FOLDER CIRCLECI_FOLDER="$PWD" PYTEST_ROOT="$CIRCLECI_FOLDER/../server/tests-py" OUTPUT_FOLDER=${OUTPUT_FOLDER:-"$CIRCLECI_FOLDER/test-server-output"} mkdir -p "$OUTPUT_FOLDER" cd $PYTEST_ROOT for port in 8080 8081 9876 5592 5000 5001 5594; do fail_if_port_busy $port done echo -e "\nINFO: GraphQL Executable : $GRAPHQL_ENGINE" echo -e "INFO: Logs Folder : $OUTPUT_FOLDER\n" # This seems to flake out relatively often; try a mirror if so. # Might also need to disable ipv6 or use a longer --timeout # cryptography 3.4.7 version requires Rust dependencies by default. But we don't need them for our tests, hence disabling them via the following env var => https://stackoverflow.com/a/66334084 export CRYPTOGRAPHY_DONT_BUILD_RUST=1 pip3 install -r requirements.txt || pip3 install -i http://mirrors.digitalocean.com/pypi/web/simple --trusted-host mirrors.digitalocean.com -r requirements.txt (cd remote_schemas/nodejs && npm_config_loglevel=error npm ci) export EVENT_WEBHOOK_HEADER="MyEnvValue" export HGE_URL="http://localhost:8080" export HGE_URL_2="" if [ -n "${HASURA_GRAPHQL_DATABASE_URL_2:-}" ]; then HGE_URL_2="http://localhost:8081" fi export WEBHOOK_FROM_ENV="http://127.0.0.1:5592" export SCHEDULED_TRIGGERS_WEBHOOK_DOMAIN="http://127.0.0.1:5594" export HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES=true export REMOTE_SCHEMAS_WEBHOOK_DOMAIN="http://127.0.0.1:5000" HGE_PIDS="" WH_PID="" WHC_PID="" GQL_SERVER_PID="" trap stop_services ERR trap stop_services INT trap stop_services EXIT run_pytest_parallel() { trap stop_services ERR if [ -n "${HASURA_GRAPHQL_DATABASE_URL_2:-}" ]; then set -x pytest --hge-urls "$HGE_URL" "${HGE_URL_2:-}" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" "${HASURA_GRAPHQL_DATABASE_URL_2:-}" -n 2 --dist=loadfile "$@" set +x else set -x pytest --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" -n 1 "$@" set +x fi } case "$SERVER_TEST_TO_RUN" in haskell-tests) echo -e "\n$(time_elapsed): <########## RUN GRAPHQL-ENGINE HASKELL TESTS ###########################################>\n" "${GRAPHQL_ENGINE_TESTS:?}" postgres ;; no-auth) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITHOUT ADMIN SECRET ###########################################>\n" start_multiple_hge_servers run_pytest_parallel kill_hge_servers ;; admin-secret) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET #####################################>\n" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" start_multiple_hge_servers run_pytest_parallel --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" kill_hge_servers ;; admin-secret-unauthorized-role) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND UNAUTHORIZED ROLE #####################################>\n" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_UNAUTHORIZED_ROLE="anonymous" run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-unauthorized-role test_graphql_queries.py::TestUnauthorizedRolePermission kill_hge_servers #unset HASURA_GRAPHQL_UNAUTHORIZED_ROLE ;; jwt-rs512) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (RS512) #####################################>\n" init_jwt export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key }')" start_multiple_hge_servers run_pytest_parallel --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/ssl/jwt_private.key" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET" kill_hge_servers #unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-ed25519) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (Ed25519) #####################################>\n" init_jwt export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key }')" start_multiple_hge_servers run_pytest_parallel --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/ssl/ed25519_jwt_private.key" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET" kill_hge_servers #unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-stringified) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (in stringified mode) #####################################>\n" init_jwt export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , claims_format: "stringified_json"}')" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt.py export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , claims_format: "stringified_json"}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt.py # unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-audience-check-single-string) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (with audience check - string) #####################################>\n" init_jwt export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , audience: "myapp-1234"}')" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt.py export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , audience: "myapp-1234"}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt.py #unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-audience-check-list-string) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (with audience check - list of strings) #################################>\n" init_jwt export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , audience: ["myapp-1234", "myapp-9876"]}')" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt.py export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , audience: ["myapp-1234", "myapp-9876"]}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt.py unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-issuer-check) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (with issuer check) #####################################>\n" init_jwt export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , issuer: "https://hasura.com"}')" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt.py export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , issuer: "https://hasura.com"}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt.py unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-with-claims-namespace-path) ########## # TODO(swann): should these not be run in parallel? echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (with claims_namespace_path) #####################################>\n" init_jwt # hasura claims at one level of nesting export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , claims_namespace_path: "$.hasura_claims"}')" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt.py export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , claims_namespace_path: "$.hasura_claims"}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt.py unset HASURA_GRAPHQL_JWT_SECRET # hasura claims at two levels of nesting with claims_namespace_path containing special character export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , claims_namespace_path: "$.hasura['\''claims%'\'']"}')" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt.py export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , claims_namespace_path: "$.hasura['\''claims%'\'']"}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt.py unset HASURA_GRAPHQL_JWT_SECRET # hasura claims at the root of the JWT token export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , claims_namespace_path: "$"}')" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt.py export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , claims_namespace_path: "$"}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt.py unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-claims-map-with-json-path-values) # test JWT with Claims map echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (with claims_map and values are json path) #####################################>\n" init_jwt export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , claims_map: {"x-hasura-user-id": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].user.id"}, "x-hasura-allowed-roles": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].role.allowed"}, "x-hasura-default-role": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].role.default"}}}')" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt_claims_map.py::TestJWTClaimsMapBasic export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , claims_map: {"x-hasura-user-id": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].user.id"}, "x-hasura-allowed-roles": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].role.allowed"}, "x-hasura-default-role": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].role.default"}}}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt_claims_map.py::TestJWTClaimsMapBasic unset HASURA_GRAPHQL_JWT_SECRET echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (with claims_map and values are json path with default values set) #####################################>\n" export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , claims_map: {"x-hasura-user-id": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].user.id", "default":"1"}, "x-hasura-allowed-roles": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].role.allowed", "default":["user","editor"]}, "x-hasura-default-role": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].role.default","default":"user"}}}')" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt_claims_map.py::TestJWTClaimsMapBasic export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , claims_map: {"x-hasura-user-id": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].user.id", "default":"1"}, "x-hasura-allowed-roles": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].role.allowed", "default":["user","editor"]}, "x-hasura-default-role": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].role.default","default":"user"}}}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt_claims_map.py::TestJWTClaimsMapBasic unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-with-expiry-time-leeway) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (with JWT config allowing for leeway) #####################################>\n" init_jwt export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , allowed_skew: 60}')" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt.py::TestJWTExpirySkew export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , allowed_skew: 60}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt.py::TestJWTExpirySkew unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-claims-map-with-literal-values) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (with claims_map and values are literal values) #####################################>\n" init_jwt export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , claims_map: {"x-hasura-user-id": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].user.id"}, "x-hasura-allowed-roles": ["user","editor"], "x-hasura-default-role": "user","x-hasura-custom-header":"custom-value"}}')" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt_claims_map.py::TestJWTClaimsMapWithStaticHasuraClaimsMapValues export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , claims_map: {"x-hasura-user-id": {"path":"$.['"'"'https://myapp.com/jwt/claims'"'"'].user.id"}, "x-hasura-allowed-roles": ["user","editor"], "x-hasura-default-role": "user","x-hasura-custom-header":"custom-value"}}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt_claims_map.py::TestJWTClaimsMapWithStaticHasuraClaimsMapValues unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-cookie) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (in cookie mode) #####################################>\n" init_jwt export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , header: {"type": "Cookie", "name": "hasura_user"}}')" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_hge_and_test_jwt "ssl/jwt_private.key" test_jwt.py export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/ed25519_jwt_public.key)" '{ type: "Ed25519", key: $key , header: {"type": "Cookie", "name": "hasura_user"}}')" init_hge_and_test_jwt "ssl/ed25519_jwt_private.key" test_jwt.py unset HASURA_GRAPHQL_JWT_SECRET ;; jwt-cookie-unauthorized-role) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH JWT (in cookie mode) AND UNAUTHORIZED ROLE #####################################>\n" init_jwt export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , header: {"type": "Cookie", "name": "hasura_user"}}')" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_UNAUTHORIZED_ROLE="anonymous" run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/ssl/jwt_private.key" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET" --test-unauthorized-role test_graphql_queries.py::TestFallbackUnauthorizedRoleCookie kill_hge_servers unset HASURA_GRAPHQL_UNAUTHORIZED_ROLE run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/ssl/jwt_private.key" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET" --test-no-cookie-and-unauth-role test_graphql_queries.py::TestMissingUnauthorizedRoleAndCookie kill_hge_servers unset HASURA_GRAPHQL_JWT_SECRET ;; # test with CORS modes cors-domains) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH CORS DOMAINS ########>\n" export HASURA_GRAPHQL_CORS_DOMAIN="http://*.localhost, http://localhost:3000, https://*.foo.bar.com" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-cors test_cors.py kill_hge_servers unset HASURA_GRAPHQL_CORS_DOMAIN ;; auth-webhook-cookie) # test auth webhook set-cookie forwarding on response echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH SET-COOKIE HEADER IN AUTH WEBHOOK ########>\n" export HASURA_GRAPHQL_AUTH_HOOK="http://localhost:9876/auth" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" python3 auth_webhook_server.py >"$OUTPUT_FOLDER/cookie_webhook.log" 2>&1 & WHC_PID=$! wait_for_port 9876 run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" --test-auth-webhook-header test_auth_webhook_cookie.py kill_hge_servers ;; ws-init-cookie-read-cors-enabled) # test websocket transport with initial cookie header echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH COOKIE IN WEBSOCKET INIT ########>\n" export HASURA_GRAPHQL_AUTH_HOOK="http://localhost:9876/auth" export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" python3 auth_webhook_server.py >"$OUTPUT_FOLDER/cookie_webhook.log" 2>&1 & WHC_PID=$! wait_for_port 9876 run_hge_with_args serve wait_for_port 8080 echo "$(time_elapsed): testcase 1: read cookie, cors enabled" pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=read test_websocket_init_cookie.py kill_hge_servers ;; ws-init-cookie-noread) echo "$(time_elapsed): testcase 2: no read cookie, cors disabled" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_AUTH_HOOK="http://localhost:9876/auth" export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST" run_hge_with_args serve --disable-cors wait_for_port 8080 python3 auth_webhook_server.py >"$OUTPUT_FOLDER/cookie_webhook.log" 2>&1 & WHC_PID=$! wait_for_port 9876 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=noread test_websocket_init_cookie.py kill_hge_servers ;; ws-init-cookie-read-cors-disabled) echo "$(time_elapsed): testcase 3: read cookie, cors disabled and ws-read-cookie" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_AUTH_HOOK="http://localhost:9876/auth" export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST" export HASURA_GRAPHQL_WS_READ_COOKIE="true" run_hge_with_args serve --disable-cors wait_for_port 8080 python3 auth_webhook_server.py >"$OUTPUT_FOLDER/cookie_webhook.log" 2>&1 & WHC_PID=$! wait_for_port 9876 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=read test_websocket_init_cookie.py kill_hge_servers ;; ws-graphql-api-disabled) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH GRAPHQL DISABLED ########>\n" export HASURA_GRAPHQL_ENABLED_APIS="metadata" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_AUTH_HOOK="http://localhost:9876/auth" export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST" run_hge_with_args serve wait_for_port 8080 python3 auth_webhook_server.py >"$OUTPUT_FOLDER/cookie_webhook.log" 2>&1 & WHC_PID=$! wait_for_port 9876 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-graphql-disabled test_apis_disabled.py kill_hge_servers unset HASURA_GRAPHQL_ENABLED_APIS run_hge_with_args serve --enabled-apis metadata wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-graphql-disabled test_apis_disabled.py kill_hge_servers ;; ws-metadata-api-disabled) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH METADATA DISABLED ########>\n" export HASURA_GRAPHQL_ENABLED_APIS="graphql" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_AUTH_HOOK="http://localhost:9876/auth" export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST" run_hge_with_args serve wait_for_port 8080 python3 auth_webhook_server.py >"$OUTPUT_FOLDER/cookie_webhook.log" 2>&1 & WHC_PID=$! wait_for_port 9876 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-metadata-disabled test_apis_disabled.py kill_hge_servers unset HASURA_GRAPHQL_ENABLED_APIS run_hge_with_args serve --enabled-apis graphql wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-metadata-disabled test_apis_disabled.py kill_hge_servers ;; remote-schema-permissions) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH REMOTE SCHEMA PERMISSIONS ENABLED ########>\n" export HASURA_GRAPHQL_ENABLE_REMOTE_SCHEMA_PERMISSIONS=true export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --enable-remote-schema-permissions test_remote_schema_permissions.py unset HASURA_GRAPHQL_ENABLE_REMOTE_SCHEMA_PERMISSIONS kill_hge_servers ;; function-permissions) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH FUNCTION PERMISSIONS ENABLED ########>\n" export HASURA_GRAPHQL_INFER_FUNCTION_PERMISSIONS=false export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-function-permissions test_graphql_queries.py::TestGraphQLQueryFunctionPermissions pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-function-permissions test_graphql_mutations.py::TestGraphQLMutationFunctions unset HASURA_GRAPHQL_INFER_FUNCTION_PERMISSIONS unset HASURA_GRAPHQL_ADMIN_SECRET kill_hge_servers ;; roles-inheritance) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH EXPERIMENTAL FEATURE: ROLES INHERITANCE ########>\n" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_ENABLE_REMOTE_SCHEMA_PERMISSIONS="true" export HASURA_GRAPHQL_INFER_FUNCTION_PERMISSIONS=false run_hge_with_args serve wait_for_port 8080 pytest --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --enable-remote-schema-permissions --test-function-permissions test_roles_inheritance.py unset HASURA_GRAPHQL_ADMIN_SECRET unset HASURA_GRAPHQL_ENABLE_REMOTE_SCHEMA_PERMISSIONS unset HASURA_GRAPHQL_INFER_FUNCTION_PERMISSIONS kill_hge_servers ;; query-caching) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE QUERY CACHING #####################################>\n" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" # use only one capability to disable cache striping run_hge_with_args +RTS -N1 -RTS serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" test_graphql_queries.py::TestGraphQLQueryCaching kill_hge_servers ;; query-logs) # verbose logging tests echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH QUERY LOG ########>\n" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_ENABLED_LOG_TYPES=" startup,http-log,webhook-log,websocket-log,query-log" export HASURA_GRAPHQL_LOG_LEVEL="debug" #run_hge_with_args serve # we are doing this instead of calling run_hge_with_args, because we want to save in a custom log file set -x export LOGGING_TEST_LOGFILE_PATH="$OUTPUT_FOLDER/graphql-engine-verbose-logging.log" "$GRAPHQL_ENGINE" serve 2>&1 >"$LOGGING_TEST_LOGFILE_PATH" & HGE_PIDS="$HGE_PIDS $!" set +x wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-logging test_logging.py unset HASURA_GRAPHQL_ENABLED_LOG_TYPES kill_hge_servers # end verbose logging tests ;; startup-db-calls) # verbose logging tests echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE STARTUP DB CALLS ########>\n" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_ENABLED_LOG_TYPES=" startup,http-log,webhook-log,websocket-log,query-log" export HASURA_GRAPHQL_LOG_LEVEL="debug" #run_hge_with_args serve # we are doing this instead of calling run_hge_with_args, because we want to save in a custom log file set -x export LOGGING_TEST_LOGFILE_PATH="$OUTPUT_FOLDER/graphql-engine-verbose-logging-db.log" "$GRAPHQL_ENGINE" serve 2>&1 >"$LOGGING_TEST_LOGFILE_PATH" & HGE_PIDS="$HGE_PIDS $!" set +x wait_for_port 8080 kill_hge_servers # end verbose logging # running HGE server again for pytest, the test will use the log generated from the previous run # see https://github.com/hasura/graphql-engine-mono/pull/3813 for more information run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-startup-db-calls test_startup_db_calls.py kill_hge_servers unset HASURA_GRAPHQL_ENABLED_LOG_TYPES unset HASURA_GRAPHQL_LOG_LEVEL unset HASURA_GRAPHQL_ADMIN_SECRET unset LOGGING_TEST_LOGFILE_PATH ;; read-only-db) ## read-only DB tests; Hasura should start and run read queries against a read-only DB echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH READ-ONLY DATABASE ########>\n" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_ENABLED_LOG_TYPES="startup,http-log,webhook-log,websocket-log,query-log" export HASURA_GRAPHQL_LOG_LEVEL="debug" export HASURA_GRAPHQL_DEV_MODE="false" export HASURA_GRAPHQL_ADMIN_INTERNAL_ERRORS="false" # setup the database for read-only access # 'test_graphql_read_only_source.py' assumes 'HASURA_READONLY_DB_URL' is set # Note: setting default_transaction_mode to read-only etc. doesn't work for # DDL statements. To replicate read-only access even for DDLs, we need to # create a read-only user readonly_sql=$(cat <<EOF CREATE USER hasuraro WITH PASSWORD 'passme'; GRANT CONNECT ON DATABASE pg_source_1 TO hasuraro; GRANT USAGE ON SCHEMA public TO hasuraro; GRANT SELECT ON ALL TABLES IN SCHEMA public TO hasuraro; GRANT SELECT ON ALL TABLES IN SCHEMA pg_catalog TO hasuraro; GRANT SELECT ON ALL TABLES IN SCHEMA information_schema TO hasuraro; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO hasuraro; EOF ) psql "$HASURA_GRAPHQL_PG_SOURCE_URL_1" -c "$readonly_sql" export HASURA_READONLY_DB_URL="postgresql://hasuraro:passme@localhost:5432/pg_source_1" run_hge_with_args serve wait_for_port 8080 # and then test graphql queries work pytest -n 1 --hge-urls "$HGE_URL" \ --pg-urls "$HASURA_GRAPHQL_PG_SOURCE_URL_1" \ --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" \ --test-read-only-source \ test_graphql_read_only_source.py unset HASURA_GRAPHQL_ENABLED_LOG_TYPES kill_hge_servers # end read-only DB tests ;; remote-schema-https) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH SECURE REMOTE SCHEMA #########################>\n" export REMOTE_SCHEMAS_WEBHOOK_DOMAIN="https://127.0.0.1:5001/" init_ssl run_hge_with_args serve wait_for_port 8080 python3 graphql_server.py 5001 "$OUTPUT_FOLDER/ssl/webhook.pem" "$OUTPUT_FOLDER/ssl/webhook-key.pem" >"$OUTPUT_FOLDER/remote_gql_server.log" 2>&1 & GQL_SERVER_PID=$! wait_for_port 5001 pytest -n 1 --hge-urls="$HGE_URL" --pg-urls="$HASURA_GRAPHQL_DATABASE_URL" test_schema_stitching.py::TestRemoteSchemaBasic export REMOTE_SCHEMA_WEBHOOK_DOMAIN="https://localhost:5000/" kill_hge_servers kill $GQL_SERVER_PID ;; post-webhook) webhook_tests_check_root echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & WEBHOOK (POST) #########################>\n" export HASURA_GRAPHQL_AUTH_HOOK="https://localhost:9090/" export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_ssl start_multiple_hge_servers python3 webhook.py 9090 "$OUTPUT_FOLDER/ssl/webhook-key.pem" "$OUTPUT_FOLDER/ssl/webhook.pem" >"$OUTPUT_FOLDER/webhook.log" 2>&1 & WH_PID=$! wait_for_port 9090 run_pytest_parallel --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" kill_hge_servers ;; webhook-request-context) webhook_tests_check_root echo -e "\n$(time_elapsed): <########## TEST WEBHOOK RECEIVES REQUEST DATA AS CONTEXT #########################>\n" export HASURA_GRAPHQL_AUTH_HOOK="http://localhost:5594/" export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" run_hge_with_args serve wait_for_port 8080 pytest -s -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" --test-webhook-request-context test_webhook_request_context.py kill_hge_servers ;; get-webhook) webhook_tests_check_root echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & WEBHOOK (GET) #########################>\n" export HASURA_GRAPHQL_AUTH_HOOK="https://localhost:9090/" export HASURA_GRAPHQL_AUTH_HOOK_MODE="GET" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_ssl start_multiple_hge_servers python3 webhook.py 9090 "$OUTPUT_FOLDER/ssl/webhook-key.pem" "$OUTPUT_FOLDER/ssl/webhook.pem" >"$OUTPUT_FOLDER/webhook.log" 2>&1 & WH_PID=$! wait_for_port 9090 run_pytest_parallel --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" kill_hge_servers ;; insecure-webhook) webhook_tests_check_root echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & HTTPS INSECURE WEBHOOK (GET) ########>\n" export HASURA_GRAPHQL_AUTH_HOOK="https://localhost:9090/" export HASURA_GRAPHQL_AUTH_HOOK_MODE="GET" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_ssl rm /etc/ssl/certs/webhook.crt update-ca-certificates run_hge_with_args serve wait_for_port 8080 echo -e "running webhook" python3 webhook.py 9090 "$OUTPUT_FOLDER/ssl/webhook-key.pem" "$OUTPUT_FOLDER/ssl/webhook.pem" & WH_PID=$! echo -e "webhook pid $WH_PID" wait_for_port 9090 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" --test-webhook-insecure test_webhook_insecure.py kill_hge_servers ;; insecure-webhook-with-admin-secret) webhook_tests_check_root echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ADMIN_SECRET & HTTPS INSECURE WEBHOOK WITH ADMIN SECRET (POST) ########>\n" export HASURA_GRAPHQL_AUTH_HOOK="https://localhost:9090/" export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" init_ssl rm /etc/ssl/certs/webhook.crt update-ca-certificates run_hge_with_args serve wait_for_port 8080 python3 webhook.py 9090 "$OUTPUT_FOLDER/ssl/webhook-key.pem" "$OUTPUT_FOLDER/ssl/webhook.pem" >"$OUTPUT_FOLDER/webhook.log" 2>&1 & WH_PID=$! echo -e "webhook pid $WH_PID" wait_for_port 9090 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" --test-webhook-insecure test_webhook_insecure.py kill_hge_servers kill $WH_PID ;; allowlist-queries) # allowlist queries test # unset HASURA_GRAPHQL_AUTH_HOOK # unset HASURA_GRAPHQL_AUTH_HOOK_MODE # unset HASURA_GRAPHQL_JWT_SECRET # unset HASURA_GRAPHQL_ENABLE_ALLOWLIST echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH ALLOWLIST QUERIES ########> \n" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" export HASURA_GRAPHQL_ENABLE_ALLOWLIST=true run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-allowlist-queries test_allowlist_queries.py kill_hge_servers unset HASURA_GRAPHQL_ENABLE_ALLOWLIST run_hge_with_args serve --enable-allowlist wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-allowlist-queries test_allowlist_queries.py kill_hge_servers # end allowlist queries test ;; developer-api-tests) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH DEVELOPER API ENABLED ########>\n" export HASURA_GRAPHQL_ENABLED_APIS="metadata,graphql,developer,config,pgdump" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" run_hge_with_args serve --enabled-apis "$HASURA_GRAPHQL_ENABLED_APIS" wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-developer-api-enabled test_dev_endpoints.py unset HASURA_GRAPHQL_ENABLED_APIS kill_hge_servers ;; jwk-url) # TODO(swann): ditto, these have to be parallelised # jwk test unset HASURA_GRAPHQL_AUTH_HOOK unset HASURA_GRAPHQL_AUTH_HOOK_MODE unset HASURA_GRAPHQL_JWT_SECRET export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM" echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH JWK URL ########> \n" # start the JWK server python3 jwk_server.py >"$OUTPUT_FOLDER/jwk_server.log" 2>&1 & JWKS_PID=$! wait_for_port 5001 echo "Test: Cache-Control with max-age=3" export HASURA_GRAPHQL_JWT_SECRET='{"jwk_url": "http://localhost:5001/jwk-cache-control?max-age=3"}' run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-jwk-url -k 'test_cache_control_header_max_age' kill_hge_servers unset HASURA_GRAPHQL_JWT_SECRET echo "Test: Cache-Control with must-revalidate, max-age=3" export HASURA_GRAPHQL_JWT_SECRET='{"jwk_url": "http://localhost:5001/jwk-cache-control?must-revalidate=true&must-revalidate=true"}' run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-jwk-url -k 'test_cache_control_header_max_age' kill_hge_servers unset HASURA_GRAPHQL_JWT_SECRET echo "Test: Cache-Control with must-revalidate" export HASURA_GRAPHQL_JWT_SECRET='{"jwk_url": "http://localhost:5001/jwk-cache-control?must-revalidate=true"}' run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-jwk-url -k 'test_cache_control_header_no_caching' kill_hge_servers unset HASURA_GRAPHQL_JWT_SECRET echo "Test: Cache-Control with no-cache, public" export HASURA_GRAPHQL_JWT_SECRET='{"jwk_url": "http://localhost:5001/jwk-cache-control?no-cache=true&public=true"}' run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-jwk-url -k 'test_cache_control_header_no_caching' kill_hge_servers unset HASURA_GRAPHQL_JWT_SECRET echo "Test: Cache-Control with no-store, max-age=3" export HASURA_GRAPHQL_JWT_SECRET='{"jwk_url": "http://localhost:5001/jwk-cache-control?no-store=true&max-age=3"}' run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-jwk-url -k 'test_cache_control_header_no_caching' kill_hge_servers unset HASURA_GRAPHQL_JWT_SECRET echo "Test: Expires with three second expiry" export HASURA_GRAPHQL_JWT_SECRET='{"jwk_url": "http://localhost:5001/jwk-expires?seconds=3"}' run_hge_with_args serve wait_for_port 8080 pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-jwk-url -k 'test_expires_header' kill_hge_servers unset HASURA_GRAPHQL_JWT_SECRET kill $JWKS_PID # end jwk url test ;; horizontal-scaling) # horizontal scale test unset HASURA_GRAPHQL_AUTH_HOOK unset HASURA_GRAPHQL_AUTH_HOOK_MODE unset HASURA_GRAPHQL_ADMIN_SECRET echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH HORIZONTAL SCALING ########>\n" HASURA_HS_TEST_DB='postgresql://postgres:postgres@localhost:6543/hs_hge_test' if ! psql "$HASURA_GRAPHQL_DATABASE_URL" -c "SELECT 1 FROM pg_database WHERE datname = 'hs_hge_test'" | grep -q -F '(1 row)'; then psql "$HASURA_GRAPHQL_DATABASE_URL" -c 'CREATE DATABASE hs_hge_test;' fi pgUserInfo=$(python3 -c ' import os from urllib.parse import urlparse uri = urlparse( os.environ["HASURA_GRAPHQL_DATABASE_URL"] ) if uri.password: print("password="+uri.password+" user="+uri.username) else: print("user="+uri.username)') pgDbInfo=$(psql "$HASURA_GRAPHQL_DATABASE_URL" -c "SELECT concat(' host=',inet_server_addr(),' port=', inet_server_port(),' dbname=',current_database())" | sed -n '3 p') # create pgbouncer user id pgbouncer || useradd pgbouncer cd $CIRCLECI_FOLDER mkdir -p pgbouncer chown -R pgbouncer:pgbouncer pgbouncer echo '[databases] hs_hge_test = '"$pgDbInfo" "$pgUserInfo"' [pgbouncer] listen_port = 6543 listen_addr = 127.0.0.1 logfile = pgbouncer/pgbouncer.log pidfile = pgbouncer/pgbouncer.pid auth_type = md5 auth_file = pgbouncer/users.txt admin_users = postgres' >pgbouncer/pgbouncer.ini # start pgbouncer pgbouncer -u pgbouncer -d pgbouncer/pgbouncer.ini cd $PYTEST_ROOT sleep 2 # start 1st server run_hge_with_args --database-url "$HASURA_HS_TEST_DB" serve wait_for_port 8080 # start 2nd server run_hge_with_args --database-url "$HASURA_HS_TEST_DB" serve \ --server-port 8081 wait_for_port 8081 # run test pytest --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --test-hge-scale-url="http://localhost:8081" test_horizontal_scale.py # Shutdown pgbouncer psql "postgresql://postgres:postgres@localhost:6543/pgbouncer" -c "SHUTDOWN;" || true cd $CIRCLECI_FOLDER # start pgbouncer again pgbouncer -u pgbouncer -d pgbouncer/pgbouncer.ini cd $PYTEST_ROOT # sleep for 20 seconds sleep 20 # run test pytest --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --test-hge-scale-url="http://localhost:8081" test_horizontal_scale.py # Shutdown pgbouncer psql "postgresql://postgres:postgres@localhost:6543/pgbouncer" -c "SHUTDOWN;" || true kill_hge_servers psql "$HASURA_GRAPHQL_DATABASE_URL" -c "drop database hs_hge_test;" sleep 4 unset HASURA_HS_TEST_DB # end horizontal scale test ;; # # ########################################### # the following backend-* tests are excluded from `server-test-names.txt` # and are run via their respective `test_oss_server_*` jobs # # [Specifying Pytests with -k flag] # tests are run with the -k flag to filter on common and # backend-specific test classes using keyword expressions. # # this reduces the number of unrelated tests skipped, which # avoids an increasingly negative impact on our test run # time as we add more backends and tests. # # https://docs.pytest.org/en/6.2.x/usage.html#specifying-tests-selecting-tests # https://github.com/hasura/graphql-engine/blob/master/server/py-tests/README.md#running-bigquery-tests # backend-mssql) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH SQL SERVER BACKEND ###########################################>\n" run_hge_with_args serve wait_for_port 8080 source_data_sources_utils add_mssql_source 8080 "$HASURA_GRAPHQL_MSSQL_SOURCE_URL" # See note [Specifying Pytests with -k flag] pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --backend mssql -k "MSSQL or Common" # start inherited roles test echo -e "\n$(time_elapsed): <########## TEST INHERITED-ROLES WITH SQL SERVER BACKEND ###########################################>\n" pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" -k TestGraphQLInheritedRolesMSSQL --backend mssql # end inherited roles test kill_hge_servers ;; backend-citus) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH CITUS BACKEND ###########################################>\n" run_hge_with_args serve wait_for_port 8080 source_data_sources_utils add_citus_source 8080 "$HASURA_GRAPHQL_CITUS_SOURCE_URL" # See note [Specifying Pytests with -k flag] pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --backend citus -k "Citus or Common" kill_hge_servers ;; backend-bigquery) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH BIGQUERY BACKEND ###########################################>\n" source "$CIRCLECI_FOLDER/../scripts/bigquery.sh" && verify_bigquery_pytest_env run_hge_with_args serve wait_for_port 8080 source_data_sources_utils add_bigquery_source 8080 # See note [Specifying Pytests with -k flag] pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --backend bigquery -k "Bigquery or Common" kill_hge_servers ;; esac echo "Finished running tests on node $CIRCLE_NODE_INDEX of $CIRCLE_NODE_TOTAL" echo -e "\n$(time_elapsed): <########## DONE ########>\n"
<!DOCTYPE html> <html> <head> <title>Cards with Text Fields</title> <link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css" integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T" crossorigin="anonymous"> </head> <body> <div class="container"> <div class="row"> <div class="col-sm-4"> <div class="card"> <div class="card-body"> <h5 class="card-title">Card 1</h5> <form class="form-group" action=""> <label for="input1">Text Field 1:</label> <input type="text" class="form-control" id="input1"> </form> </div> </div> </div> <div class="col-sm-4"> <div class="card"> <div class="card-body"> <h5 class="card-title">Card 2</h5> <form class="form-group" action=""> <label for="input2">Text Field 2:</label> <input type="text" class="form-control" id="input2"> </form> </div> </div> </div> <div class="col-sm-4"> <div class="card"> <div class="card-body"> <h5 class="card-title">Card 3</h5> <form class="form-group" action=""> <label for="input3">Text Field 3:</label> <input type="text" class="form-control" id="input3"> </form> </div> </div> </div> </div> </div> </body> </html>
#!/bin/bash # This script forwards BOOKSTORE v1 port 14001 to localhost port 8084. backend="${1:-bookstore}" BOOKSTOREv1_LOCAL_PORT="${BOOKSTOREv1_LOCAL_PORT:-8084}" POD="$(kubectl get pods --selector app="$backend" -n bookstore --no-headers | grep 'Running' | awk 'NR==1{print $1}')" kubectl port-forward "$POD" -n bookstore "$BOOKSTOREv1_LOCAL_PORT":14001
#!/bin/bash set -e set -o pipefail ERRORS=() # find all executables and run `shellcheck` for f in $(find . -type f -not -iwholename '*.git*' | sort -u); do if file "$f" | grep --quiet shell; then { shellcheck "$f" && echo "[OK]: sucessfully linted $f" } || { # add to errors ERRORS+=("$f") } fi done if [ ${#ERRORS[@]} -eq 0 ]; then echo "No errors, hooray" else echo "These files failed shellcheck: ${ERRORS[*]}" exit 1 fi test_shells() { fmt="%s\t%s\n" for shell in "$@"; do if test_shell $shell; then printf $fmt $shell PASS else printf $fmt $shell FAIL fi done } test_shells sh bash dash zsh ksh csh tcsh gosh
#!/bin/sh find . -name "*" | wc -l | tr -d " "
<reponame>turoDog/LearningPython import re content = 'http://weibo.com/comment/KEraCN' result1 = re.match('http.*?comment(.*?)',content) result2 = re.match('http.*?comment(.*)',content) print('result1',result1.group(1)) print('result2',result2.group(1))
docker run -dit -v /home/siwei/movie:/Users/siwei/torrent/
#!/bin/bash -l # Ensure we fail the job if any steps fail set -ex -o pipefail echo "---> edgex-publish-docs.sh" # Required Parameters # <NEXUS_URL>: URL of Nexus server. Eg: https://nexus.edgexfoundry.org # <NEXUS_PATH>: Path on nexus logs repo to place the logs. Eg: # snapshots/branch/$BUILD_ID # or # release/branch # <NEXUS_REPO>: Name of the nexus repo to use # <DOC_DIRECTORY>: Absolute path of doc build step output directory. zip -r docs.zip ${DOC_DIRECTORY} lftools deploy nexus-zip ${NEXUS_URL} ${NEXUS_REPO} ${NEXUS_PATH} docs.zip
<gh_stars>100-1000 //#region IMPORTS import type Pose from '../../armature/Pose'; import type { IKChain, IKLink } from "../rigs/IKChain"; import type { ISolver } from './support/ISolver'; import { QuatUtil, Transform, Vec3Util } from '../../maths'; import CurveSample from '../../maths/CurveSample'; import { vec3, quat } from 'gl-matrix'; //#endregion class Caternary{ // Feed it a Sag Factor( A ) and the X of the Graph when plotting the curve, // will return the Y of the curve. static get( A: number, x: number ): number{ return A * Math.cosh( x / A ); } // A = Sagging Factor of the Curve. Need Length between the ends & Total Possible Length between the 2 points static computeSag( len: number, maxLen: number, tries=100 ): number | null{ // Solution for Solving for A was found at http://rhin.crai.archi.fr/rld/plugin_details.php?id=990 // I've since have modified from the original function, removing yDelta and sqrts // Note: This seems like newton's method for solving roots ?? if( len > maxLen ) return null; const hLen = len * 0.5; const hMaxLen = maxLen * 0.5; let e : number = Number.MAX_VALUE; let a : number = 100; let tmp : number = 0; for( let i=0; i < tries; i++ ){ tmp = hLen / Math.asinh( hMaxLen / a ); e = Math.abs( ( tmp - a ) / a ); a = tmp; if( e < 0.001 ) break; } return a; } static fromEndPoints( p0: vec3, p1: vec3, maxLen: number, segments=5, invert=false ): Array<vec3>{ const vecLen = Vec3Util.len( p0, p1 ); const A = this.computeSag( vecLen, maxLen ); if( A == null ) return []; segments += 1; // Skipping Zero, so need to add one to return the requested segment count const hVecLen = vecLen * 0.5; const offset = this.get( A, -hVecLen ); // Need starting C to base things at Zero, subtract offset from each c point const step = vecLen / segments; // Size of Each Segment const rtn : Array<vec3> = []; let pnt : vec3; let x : number; let c : number; //let t : number; for( let i=1; i < segments; i++ ){ pnt = [0,0,0]; vec3.lerp( pnt, p0, p1, i / segments ); // t = i / segments; x = i * step - hVecLen; // x position between two points but using half as zero center c = offset - this.get( A, x ); // Get a y value, but needs to be changed to work with coord system //c = offset - this.get( A, t - 0.5 ); // Further testing is need but maybe able to get away just using a T value between -0.5 > 0.5 in place of X pnt[1] = ( !invert )? pnt[1] - c : pnt[1] + c; rtn.push( pnt ); } return rtn; } } // Align chain onto a Catenary curve, which is often used to simulate // rope/chains. There was an instance when someone called it RopeIK :/ class CatenarySolver implements ISolver{ //#region MAIN effectorPos : vec3 = [0,0,0]; sampler !: CurveSample; initData( pose ?: Pose, chain ?: IKChain ): this{ return this; } setTargetPos( v: vec3 ): this{ //this._isTarPosition = true; this.effectorPos[ 0 ] = v[ 0 ]; this.effectorPos[ 1 ] = v[ 1 ]; this.effectorPos[ 2 ] = v[ 2 ]; return this; } //#endregion resolve( chain: IKChain, pose: Pose, debug?:any ): void{ const sCnt = chain.count * 2; if( !this.sampler ) this.sampler = new CurveSample( sCnt+2 ); const pt = new Transform(); const ct = new Transform(); let lnk : IKLink = chain.first(); pose.getWorldTransform( lnk.pidx, pt ); // Get the Starting Transform for the chain. ct.fromMul( pt, lnk.bind ); // Move Bind to WS, to get staring position of the chain const pnts = Caternary.fromEndPoints( ct.pos, this.effectorPos, chain.length, sCnt, false ); //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Update Curve Sampler with new Data let i = 1; this.sampler.set( 0, ct.pos ); // Set Starting Point for( let p of pnts ) this.sampler.set( i++, p ); // Inbetween Points this.sampler.set( i, this.effectorPos ); // End Point this.sampler.updateLengths(); // Recompute the Curve lengths //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ const tail : vec3 = [0,0,0]; // Bone's Tail Position const tar : vec3 = [0,0,0]; // Target Position const from : vec3 = [0,0,0]; // Unit Vector of Bone Head to Bone Tail const to : vec3 = [0,0,0]; // Unit Vector of Bone Head to Target const q : quat = [0,0,0,1]; // Rotation for FROM > TO let dist = 0; // Distance at each step of the curve for( let i=0; i < chain.count; i++ ){ //-------------------------------------- lnk = chain.links[ i ]; // Get Bone Link dist += lnk.len; // Current Distance of the chain this bone's tail reaches. ct.fromMul( pt, lnk.bind ); // Move Bind to World Space tail[0] = 0; tail[1] = lnk.len; tail[2] = 0; ct.transformVec3( tail ); // Get WS Position of Tail this.sampler.atLength( dist, tar ); // Get the closes point on the curve in relation to the bone's tail distance //-------------------------------------- vec3.sub( from, tail, ct.pos ); // Bind Direction vec3.normalize( from, from ); vec3.sub( to, tar, ct.pos ); // Target Direction vec3.normalize( to, to ); quat.rotationTo( q, from, to ); // Create rotation from bind to target //-------------------------------------- // QuatUtil.dotNegate( q, q, ct.rot ); quat.mul( q, q, ct.rot ); // Apply QuatUtil.pmulInvert( q, q, pt.rot ); // To Local pose.setLocalRot( lnk.idx, q ); // Save pt.mul( q, lnk.bind.pos, lnk.bind.scl ); // Create WorldSpace Parent for next bone } } } /* //TODO, This function creates a parabolic-like curve with its center at zero (-1 to 1). //With that in mind, It creates the same set of values for both sides. To optimize this //further, only calcuate from 0 to 1 then repeat those values backwards so we only process //unique values and just repeat them for 0 to -1. They are the exact same Y values, no need to invert. /* catenary.getByLengths = function(vecLen, maxLen, segCnt){ let vecLenHalf = vecLen * 0.5, // ... Half of that segInc = vecLen / segCnt, // Size of Each Segment A = catenary.getA(vecLen, maxLen), offset = catenary(A, -vecLenHalf), // Need starting C to base things at Zero, subtract offset from each c point rtn = new Array(segCnt - 1), i; //loop in a -1 to 1 way. for(i=1; i < segCnt; i++) rtn[i-1] = offset - catenary(A, i * segInc - vecLenHalf); return rtn; } */ /* //First version before doing some changes like taking things out that doesn't seem to be there. catenary.getA = function(vec0, vec1, ropeLen){ //Solving A comes from : http://rhin.crai.archi.fr/rld/plugin_details.php?id=990 let yDelta = vec1[1] - vec0[0], vecLen = vec1.length(vec0); if(yDelta > ropeLen || vecLen > ropeLen){ console.log("not enough rope"); return null; } if(yDelta < 0){ //Swop verts, low end needs to be on the left side var tmp = vec0; vec0 = vec1; vec1 = vec0; yDelta *= -1; } //.................................... const max_tries = 100; let vec3 = new Vec2( vec1[0], vec0[1] ), e = Number.MAX_VALUE, a = 100, aTmp = 0, yRopeDelta = 0.5 * Math.sqrt(ropeLen*ropeLen - yDelta*yDelta), //Optimize the loop vecLenHalf = 0.5 * vecLen, //Optimize the loop i; for(i=0; i < max_tries; i++){ //aTmp = 0.5 * vecLen / ( Math.asinh( 0.5 * Math.sqrt(ropeLen**2 - yDelta**2) / a ) ); aTmp = vecLenHalf / ( Math.asinh( yRopeDelta / a ) ); e = Math.abs( (aTmp - a) / a ); a = aTmp; if(e < 0.001) break; } console.log("tries", i); return a; } */ export default CatenarySolver;
<filename>Scripts/js/cmds/make-file-name-pretty/helpers/show-rejected.js const { chalk } = require('../../../helpers'); /** * * @param {Array<import('../helpers/file').File>} files * * @return {void} */ function showRejected(files) { if (files.length === 0) { return; } console.log(chalk.red('Rejected Files:')); files.forEach(({ name, ext }) => { console.log(` ${chalk.redBright(name)}${ext}`) }) console.log(); } module.exports = showRejected
/* * Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES) * * This file is part of Orfeo Toolbox * * https://www.orfeo-toolbox.org/ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "itkMacro.h" #include "otbMultiChannelGAndRIndexImageFilter.h" #include "otbImage.h" #include "otbVectorImage.h" #include "otbImageFileReader.h" #include "otbImageFileWriter.h" #include "otbVegetationIndicesFunctor.h" template<class TInputImage, class TOutputImage, class TFunction> int generic_MultiChannelGAndRIndexImageFilter(int itkNotUsed(argc), char * argv[]) { typedef otb::ImageFileReader<TInputImage> ReaderType; typedef otb::ImageFileWriter<TOutputImage> WriterType; typedef otb::MultiChannelGAndRIndexImageFilter<TInputImage, TOutputImage, TFunction> MultiChannelGAndRIndexImageFilterType; // Instantiating object typename MultiChannelGAndRIndexImageFilterType::Pointer filter = MultiChannelGAndRIndexImageFilterType::New(); typename ReaderType::Pointer reader = ReaderType::New(); typename WriterType::Pointer writer = WriterType::New(); const char * inputFilename = argv[1]; const char * outputFilename = argv[2]; unsigned int greenChannel(::atoi(argv[3])); unsigned int redChannel(::atoi(argv[4])); reader->SetFileName(inputFilename); writer->SetFileName(outputFilename); filter->SetGreenIndex(greenChannel); filter->SetRedIndex(redChannel); filter->SetInput(reader->GetOutput()); writer->SetInput(filter->GetOutput()); writer->Update(); return EXIT_SUCCESS; } int otbMultiChannelGAndRIndexImageFilter(int argc, char * argv[]) { const unsigned int Dimension = 2; typedef otb::VectorImage<unsigned char, Dimension> InputImageType; typedef otb::Image<double, Dimension> OutputImageType; std::string strArgv(argv[1]); argc--; argv++; if (strArgv == "IR") return (generic_MultiChannelGAndRIndexImageFilter<InputImageType, OutputImageType, otb::Functor::IR<InputImageType::InternalPixelType, InputImageType::InternalPixelType, OutputImageType::PixelType> > (argc, argv)); else if (strArgv == "IC") return (generic_MultiChannelGAndRIndexImageFilter<InputImageType, OutputImageType, otb::Functor::IC<InputImageType::InternalPixelType, InputImageType::InternalPixelType, OutputImageType::PixelType> > (argc, argv)); else if (strArgv == "IB") return (generic_MultiChannelGAndRIndexImageFilter<InputImageType, OutputImageType, otb::Functor::IB<InputImageType::InternalPixelType, InputImageType::InternalPixelType, OutputImageType::PixelType> > (argc, argv)); else return EXIT_FAILURE; return EXIT_SUCCESS; }
<!DOCTYPE html> <html> <head> <title>Divs</title> </head> <body> <div>Hello</div> <div>World</div> <div>!</div> </body> </html>
#pragma once #include <list> #include <Poco/Mutex.h> #include <Poco/SharedPtr.h> #include <Poco/Timespan.h> #include <Poco/URI.h> #include <Poco/DOM/Node.h> #include <Poco/DOM/NodeIterator.h> #include "core/PollableDevice.h" #include "model/DeviceID.h" #include "model/ModuleID.h" #include "model/ModuleType.h" #include "model/SensorData.h" #include "util/Loggable.h" namespace BeeeOn { /** * @brief Abstract class representing generic BelkinWemo device. */ class BelkinWemoDevice : public PollableDevice, protected Loggable { public: typedef Poco::SharedPtr<BelkinWemoDevice> Ptr; BelkinWemoDevice(const DeviceID& id, const RefreshTime &refresh); virtual ~BelkinWemoDevice(); DeviceID deviceID() const; DeviceID id() const override; RefreshTime refresh() const override; virtual bool requestModifyState(const ModuleID& moduleID, const double value) = 0; virtual SensorData requestState() = 0; void poll(Distributor::Ptr distributor) override; virtual std::list<ModuleType> moduleTypes() const = 0; virtual std::string name() const = 0; virtual Poco::FastMutex& lock(); /** * @brief Finds the first node with the given name * and returns it's value. When the value of node si empty * returns NULL. If the node with the given name does not exist * Poco::NotFoundException is raised. */ static Poco::XML::Node* findNode(Poco::XML::NodeIterator& iterator, const std::string& name); /** * @brief Finds all nodes with the given name * and returns their values. */ static std::list<Poco::XML::Node*> findNodes(Poco::XML::NodeIterator& iterator, const std::string& name); protected: const DeviceID m_deviceId; RefreshTime m_refresh; Poco::FastMutex m_lock; }; }
PATH="$PATH:/usr/local/bin" APP_NAME="petclinic" APP_REPO_NAME="clarusway-repo/petclinic-app-qa" APP_STACK_NAME="james-petclinic-App-QA-2" CFN_KEYPAIR="james-petclinic-qa.key" AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) AWS_REGION="us-east-1" ECR_REGISTRY="${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com" export ANSIBLE_PRIVATE_KEY_FILE="${JENKINS_HOME}/.ssh/${CFN_KEYPAIR}" export ANSIBLE_HOST_KEY_CHECKING="False" echo 'Packaging the App into Jars with Maven' . ./jenkins/package-with-maven-container.sh echo 'Preparing QA Tags for Docker Images' . ./jenkins/prepare-tags-ecr-for-qa-docker-images.sh echo 'Building App QA Images' . ./jenkins/build-qa-docker-images-for-ecr.sh echo "Pushing App QA Images to ECR Repo" . ./jenkins/push-qa-docker-images-to-ecr.sh echo 'Deploying App on Swarm' . ./ansible/scripts/deploy_app_on_qa_environment.sh echo 'Deleting all local images' docker image prune -af
<gh_stars>0 import { Avatar, Button, Dialog, DialogActions, DialogContent, DialogContentText, DialogTitle, makeStyles } from "@material-ui/core"; import React from "react"; import { ErrorOutline, Info} from "@material-ui/icons"; import { useAppState } from "../../providers/AppStateProvider"; const useStyles = makeStyles((theme) => ({ avatarForInformation: { marginTop: theme.spacing(2), backgroundColor: theme.palette.primary.main, margin: "auto", }, avatarForException: { marginTop: theme.spacing(2), backgroundColor: theme.palette.secondary.main, margin: "auto", }, })); export const MessageDialog = () =>{ const { messageActive, messageType, messageTitle, messageDetail, resolveMessage } = useAppState() const classes = useStyles(); return( <> {messageActive && ( <> <Dialog open={messageActive} onClose={resolveMessage}> { messageType==="Info"? <Avatar className={classes.avatarForInformation}> <Info /> </Avatar> : <Avatar className={classes.avatarForException}> <ErrorOutline /> </Avatar> } <DialogTitle > {messageTitle } </DialogTitle> <DialogContent> { messageDetail.map((d,i)=>{ return( <DialogContentText key={i}> {d} </DialogContentText> ) })} </DialogContent> <DialogActions> <Button onClick={resolveMessage} color="primary"> OK </Button> </DialogActions> </Dialog> </> )} </> ) }
#!/bin/bash # Render score animations for an experiment if test -z "$1" then echo "Usage: render_score_animations_genderless experiment_name1 experiment_name2 ..." exit fi for dataset in "dev" "eval" do python3 plot.py \ average-metrics \ bulk_scores/asv_${dataset}/ \ bulk_scores/cm_${dataset}/ \ output/${1}_${dataset}_averages.pdf \ --experiment-names ${@:1} python3 plot.py \ average-metrics \ bulk_scores/asv_${dataset}/ \ bulk_scores/cm_${dataset}/ \ output/${1}_${dataset}_averages_relative.pdf \ --relative \ --experiment-names ${@:1} done
<gh_stars>0 var bcrypt = require('bcryptjs'), path = require('path'), Q = require('q'); //===============DBS================= var User = require('../models/index').User; //used in local-signup strategy exports.localReg = function(username, password) { var deferred = Q.defer(); User.findOne({ username: username }, function(error, result) { if (null != result) { console.log("USERNAME ALREADY EXISTS:", result.username); deferred.resolve(result); // username exists } else { var hash = bcrypt.hashSync(password, 8); var user = { "username": username, "password": hash }; console.log("CREATING USER:", username); result = new User(user); result.save(); console.log(result); deferred.resolve(user); } }); return deferred.promise; }; //check if user exists //if user exists check if passwords match (use bcrypt.compareSync(password, hash); // true where 'hash' is password in DB) //if password matches take into website //if user doesn't exist or password doesn't match tell them it failed exports.localAuth = function(username, password) { var deferred = Q.defer(); User.findOne({ username: username }, function(error, result) { console.log(result); if (null == result) { console.log("USERNAME NOT FOUND:", username); deferred.resolve(false); } else { var hash = result.password; console.log("FOUND USER: " + result.username); if (bcrypt.compareSync(password, hash)) { deferred.resolve(result); } else { console.log("AUTHENTICATION FAILED"); deferred.resolve(false); } } }); return deferred.promise; }
#!/usr/bin/env bash # Copyright 2018 The MACE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. OUTPUT_FILENAME=$1 if [[ -z "${OUTPUT_FILENAME}" ]]; then echo "Usage: $0 <filename>" exit 1 fi mkdir -p $(dirname $OUTPUT_FILENAME) MACE_SOURCE_DIR=$(dirname $(dirname $(dirname $(dirname $0)))) GIT_VERSION=$(git --git-dir=${MACE_SOURCE_DIR}/.git --work-tree=${MACE_SOURCE_DIR} describe --long --tags) if [[ $? != 0 ]]; then GIT_VERSION=$(git describe --long --tags) if [[ $? != 0 ]]; then GIT_VERSION=unknown fi else GIT_VERSION=${GIT_VERSION} fi echo write version $GIT_VERSION to ${OUTPUT_FILENAME} cat <<EOF > ${OUTPUT_FILENAME} // Copyright 2018 The MACE Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // This is a generated file. DO NOT EDIT! namespace mace { #ifndef _MSC_VER __attribute__((visibility("default"))) #endif const char *MaceVersion() { return "MACEVER-${GIT_VERSION}" + 8; } } // namespace mace EOF
import colors from '../settings/colors'; import styles from '../settings/styles'; export default { root: { width: '80%', maxWidth: '350px', margin: '0 auto', backgroundColor: colors.white, borderRadius: '4px', boxSizing: 'border-box', overflow: 'hidden', boxShadow: styles.depthShadows[0] }, content: { padding: '24px 16px', color: colors.primaryText }, footer: { padding: '8px 16px', backgroundColor: colors.theme, display: 'flex', flexDirection: 'row', justifyContent: 'space-between', color: colors.white, textTransform: 'uppercase' }, header: { color: colors.secondaryText, fontSize: '24px', fontWeight: 'bolder', margin: '0 0 16px' } };
<reponame>muthukumaravel7/armnn<gh_stars>0 var classarmnn_1_1optimizations_1_1_move_transpose_up_impl = [ [ "MoveTransposeUpImpl", "classarmnn_1_1optimizations_1_1_move_transpose_up_impl.xhtml#a742678831654006eaaf7fceee1ca27eb", null ], [ "~MoveTransposeUpImpl", "classarmnn_1_1optimizations_1_1_move_transpose_up_impl.xhtml#a6cd12b4da17cd5dad1fcf455079b290a", null ], [ "Run", "classarmnn_1_1optimizations_1_1_move_transpose_up_impl.xhtml#a5a8476ffc04ce7460bb09ad50d1d23de", null ] ];
# Copyright Ⓒ 2020 "Sberbank Real Estate Center" Limited Liability Company. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the Software # is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. # Ожидание, когда поднимется quorum device # Нужно в распараллелиных скриптах . "${lib_dir}/is_function_absent.bash" . "${lib_dir}/vm_ssh.bash" if is_function_absent 'wait_qdevice' then function wait_qdevice { until vm_ssh $Witness "/usr/bin/corosync-qnetd-tool -s" do sleep 5 done } readonly -f wait_qdevice fi
class SoC: def __init__(self): self.connections = [] def connect(self, source, destination): self.connections.append((source, destination)) def print_verilog(self, include_descendants=False): verilog_code = "// Verilog code for the SoC design\n" for source, destination in self.connections: verilog_code += f"// Connect {source} to {destination}\n" if include_descendants: verilog_code += self.generate_descendants_verilog(source, destination) print(verilog_code) def generate_descendants_verilog(self, source, destination): # Implement logic to generate Verilog code for descendants # This method should return the Verilog code for the descendants of the components pass # Placeholder for implementation # Example usage if __name__ == '__main__': SoC = SoC() SoC.connect('B2', 'i_and.A2') SoC.connect('B1B2', 'i_and.Z') SoC.connect('B1B2', 'i_inv.A') SoC.connect('Z2', 'i_inv.Z') SoC.print_verilog(include_descendants=True)
# Create variables for all models used by the tutorials to make # it easier to reference them with short names # check for variable set by setupvars.sh in the SDK, need it to find models : ${InferenceEngine_DIR:?"Must source the setupvars.sh in the SDK to set InferenceEngine_DIR"} parent_path=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P ) PROJECT_PATH=$parent_path/../ modelDir=$parent_path/../models/ # Face Detection modName=person-vehicle-bike-detection-crossroad-0078 export vehicle216=$modelDir/FP16/$modName.xml export vehicle232=$modelDir/FP32/$modName.xml modName=frozen_yolo_v3 export yolo16=$parent_path/../data/$modName.xml export PROJECT_PATH=${PROJECT_PATH}
<filename>libs/desktop/shared/ui/src/lib/antd/Drawer/index.ts import Drawer from 'antd/es/drawer' import 'antd/es/drawer/style/css' export { Drawer }
package org.ekstep.analytics.api.util import java.sql.{ResultSet, Timestamp} import java.util.Date import com.google.common.collect.Table import org.ekstep.analytics.api.BaseSpec import org.ekstep.analytics.framework.util.HTTPClient import org.mockito.ArgumentMatchers import org.mockito.Mockito._ class TestCacheUtil extends BaseSpec { val postgresDBMock = mock[PostgresDBUtil] val H2DBUtilMock = mock[H2DBUtil] val resultSetMock = mock[ResultSet] val cacheUtil = new CacheUtil(postgresDBMock, H2DBUtilMock) "Cache util " should "refresh device location cache" in { when(postgresDBMock.readGeoLocationCity(ArgumentMatchers.any())).thenReturn(List(GeoLocationCity(geoname_id = 29, subdivision_1_name = "Karnataka", subdivision_2_custom_name = "Karnataka"))) when(postgresDBMock.readGeoLocationRange(ArgumentMatchers.any())).thenReturn(List(GeoLocationRange(1234, 1234, 1))) when(H2DBUtilMock.execute(ArgumentMatchers.any())).thenReturn(resultSetMock) when(resultSetMock.next()).thenReturn(true).thenReturn(true).thenReturn(false) cacheUtil.initDeviceLocationCache() verify(H2DBUtilMock, times(6)).executeQuery(ArgumentMatchers.any()) when(postgresDBMock.readGeoLocationCity(ArgumentMatchers.any())).thenThrow(new RuntimeException("something went wrong!")) cacheUtil.initDeviceLocationCache() } it should "cache consumer channel" in { when(postgresDBMock.read(ArgumentMatchers.any())).thenReturn(List(ConsumerChannel(consumerId = "Ekstep", channel = "in.ekstep", status = 0, createdBy = "System", createdOn = new Timestamp(new Date().getTime), updatedOn = new Timestamp(new Date().getTime)))) cacheUtil.initConsumerChannelCache() verify(postgresDBMock, times(1)).read(ArgumentMatchers.any()) when(postgresDBMock.read(ArgumentMatchers.any())).thenThrow(new RuntimeException("something went wrong!")) cacheUtil.initConsumerChannelCache() } it should "populate consumer channel table" in { reset(postgresDBMock) when(postgresDBMock.read(ArgumentMatchers.any())).thenReturn(List(ConsumerChannel(consumerId = "Ekstep", channel = "in.ekstep", status = 0, createdBy = "System", createdOn = new Timestamp(new Date().getTime), updatedOn = new Timestamp(new Date().getTime)))) val cacheUtilSpy = spy(cacheUtil) cacheUtilSpy.getConsumerChannlTable() verify(cacheUtilSpy, times(1)).initConsumerChannelCache() when(postgresDBMock.read(ArgumentMatchers.any())).thenReturn(List(ConsumerChannel(consumerId = "Ekstep", channel = "in.ekstep", status = 0, createdBy = "System", createdOn = new Timestamp(new Date().getTime), updatedOn = new Timestamp(new Date().getTime)))) val result = cacheUtilSpy.getConsumerChannlTable() result.isInstanceOf[Table[String, String, Integer]] should be (true) } it should "validate cache" in { val cacheUtilSpy = spy(cacheUtil) cacheUtilSpy.validateCache() verify(cacheUtilSpy, times(1)).initCache() } }
<filename>src/utils/checkSrsStage.js // 0 -> 1 : Apprentice // 4 -> 5 : Guru // 6 -> 7 : Master // 7 -> 8 : Enlightened // 8 -> 9 : Burned import { APPRENTICE, GURU, MASTER, ENLIGHTENED, BURNED, } from 'src/common/constants'; export default (current, next) => { if (current === 0 && next === 1) return APPRENTICE; if (current === 4 && next === 5) return GURU; if (current === 6 && next === 7) return MASTER; if (current === 7 && next === 8) return ENLIGHTENED; if (current === 8 && next === 9) return BURNED; return null; }
#!/usr/bin/env bash # Script to browserify without Grunt - usually the Grunt build is used to # browserify and build everything. # This script assumes that browserify is installed globally. If that is not the # case, one could also use the command # node_modules/grunt-browserify/node_modules/browserify/bin/cmd.js # instead of `browserify` bin_path=`dirname $0` pushd $bin_path/.. > /dev/null # This browserify build be used by users of the module. It contains a # UMD (universal module definition) and can be used via an AMD module # loader like RequireJS or by simply placing a script tag in the page, # which registers mymodule as a global var. browserify \ --entry halfred.js \ --outfile browser/dist/halfred.js \ --standalone halfred # This browserify build can be required by other browserify modules that # have been created with an --external parameter. browser/test/index.html uses # this. browserify \ --entry halfred.js \ --outfile browser/dist/halfred.external.js \ --require ./halfred # These are the browserified tests. browserify \ --entry browser/test/suite.js \ --outfile browser/test/browserified_tests.js \ --external ./halfred.is popd > /dev/null
git submodule init git submodule update cd scripts sh clean.sh; sh copyshaders.sh; sh configure.sh; sh build.sh; sh run.sh;
#!/usr/bin/env bash #create user for cerere database mongo -u ${MONGO_INITDB_ROOT_USERNAME} -p ${MONGO_INITDB_ROOT_PASSWORD} --eval "db = db.getSiblingDB('admin'); db.createUser({user:\"cerere\", pwd:\"cerere\", roles:[{ role: \"root\", db: \"admin\" }, {role:\"dbOwner\", db:\"cerere\"}, {role: \"readWrite\", db: \"cerere\"}]}); db.auth(\"cerere\", \"cerere\")" mongo -u ${MONGO_INITDB_ROOT_USERNAME} -p ${MONGO_INITDB_ROOT_PASSWORD} --eval "db = db.getSiblingDB('cerere'); db.createUser({user:\"cerere\", pwd:\"cerere\", roles:[{ role: \"readWrite\", db: \"cerere\" }]});" # init import for cerere database mongoimport -u ${MONGO_NON_ROOT_USERNAME} -p ${MONGO_NON_ROOT_PASSWORD} --db ${MONGO_INITDB_DATABASE} --collection fSMConfiguration --jsonArray --file /docker-dumps/fSMConfiguration.json
function multiply($x, $y){ $result = $x * $y; return $result; }
<reponame>mr-zwets/cashscript-playground-1<filename>src/components/Header.tsx import React from 'react' interface Props {} const Header: React.FC<Props> = () => { return ( <header style={{ display: 'flex', flexDirection: 'row', justifyContent: 'space-between', color: 'black', padding: '24px 44px', fontSize: '30px' }}> <div className="header-title">CashScript Playground <strong><sup>Beta</sup></strong></div> </header> ) } export default Header
/* File: SutConfig.java ; This file is part of Twister. Version: 3.003 Copyright (C) 2012-2013 , Luxoft Authors: <NAME> <<EMAIL>> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import javax.swing.JPanel; import javax.swing.JTree; import java.awt.BorderLayout; import javax.swing.tree.DefaultMutableTreeNode; import org.apache.xmlrpc.client.XmlRpcClient; import org.apache.xmlrpc.client.XmlRpcClientConfigImpl; import java.awt.datatransfer.UnsupportedFlavorException; import javax.swing.tree.TreeNode; import java.awt.datatransfer.Transferable; import javax.swing.JComponent; import java.awt.datatransfer.DataFlavor; import javax.swing.TransferHandler; import javax.swing.tree.TreeSelectionModel; import javax.swing.tree.DefaultTreeModel; import javax.swing.JScrollPane; import java.util.HashMap; import java.net.URL; import java.util.Set; import java.util.Iterator; import javax.swing.tree.TreePath; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import javax.swing.JFrame; import javax.swing.JProgressBar; public class SutConfig extends JPanel{ public JTree tree; public DefaultMutableTreeNode root; public XmlRpcClient client; private JFrame progress; public SutConfig(){ setLayout(new BorderLayout()); tree = new JTree(); tree.addMouseListener(new MouseAdapter(){ public void mouseReleased(final MouseEvent ev){ if(ev.getClickCount()==2){ TreePath tp = tree.getPathForLocation(ev.getX(), ev.getY()); final DefaultMutableTreeNode tn = (DefaultMutableTreeNode)tp.getLastPathComponent(); if(!tree.isExpanded(tp))return; if(tn.getLevel()==1){ new Thread(){ public void run(){ startProgressBar(ev.getXOnScreen(),ev.getYOnScreen()); tn.removeAllChildren(); ((DefaultTreeModel)tree.getModel()).reload(tn); SUT s = (SUT)tn.getUserObject(); String eps = ""; try{ String lock = client.execute("is_sut_locked", new Object[]{s.getID()}).toString(); if(!lock.equals("false")&&lock.indexOf("*ERROR*")==-1){ s.setLock(lock); }else{ s.setLock(""); } String resp = client.execute("is_sut_reserved", new Object[]{s.getID()}).toString(); if(!resp.equals("false")&&resp.indexOf("*ERROR*")==-1){ s.setReserved(resp); } else { s.setReserved(""); } } catch (Exception e){ System.out.println("Could not get status for: "+s.getName()); e.printStackTrace(); } try{HashMap hash= (HashMap)client.execute("get_sut", new Object[]{s.getID()}); try{eps = ((HashMap)hash.get("meta")).get("_epnames_"+RunnerRepository.user).toString();} catch(Exception e){ eps = ""; } DefaultMutableTreeNode epsnode = new DefaultMutableTreeNode("EP: "+eps,false); s.setEPNode(epsnode); ((DefaultTreeModel)tree.getModel()).insertNodeInto(epsnode, tn, tn.getChildCount()); Object[] children = (Object[])hash.get("children"); buildChildren(children,tn); }catch(Exception e){ System.out.println("Could not get resources for: "+tn.toString()); e.printStackTrace(); } progress.dispose(); } }.start(); } } } }); tree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION); root = new DefaultMutableTreeNode("root"); DefaultTreeModel treemodel = new DefaultTreeModel(root,true); tree.setModel(treemodel); tree.setRootVisible(false); tree.setCellRenderer(new CustomIconRenderer()); JScrollPane sp = new JScrollPane(tree); add(sp,BorderLayout.CENTER); tree.setTransferHandler(new ImportTreeTransferHandler()); tree.setDragEnabled(true); initializeRPC(); } public void startProgressBar(final int X, final int Y){ progress = new JFrame(); progress.setAlwaysOnTop(true); progress.setLocation(X,Y); progress.setUndecorated(true); JProgressBar bar = new JProgressBar(); bar.setIndeterminate(true); progress.add(bar); progress.pack(); progress.setVisible(true); } private void buildChildren(Object [] children, DefaultMutableTreeNode treenode){ String childid, subchildid; for(Object o:children){ childid = o.toString(); HashMap subhash = null; try{subhash= (HashMap)client.execute("get_sut", new Object[]{childid});} catch(Exception e){e.printStackTrace();}; if(subhash!=null){ //HashMap subhash = (HashMap)o; try{String subpath = subhash.get("path").toString(); String subname = subpath.split("/")[subpath.split("/").length-1]; HashMap meta = (HashMap)subhash.get("meta"); String id = subhash.get("id").toString(); Comp comp = new Comp(subname,id,meta.get("_id")+""); DefaultMutableTreeNode component = new DefaultMutableTreeNode(comp,true); DefaultMutableTreeNode nodeid = new DefaultMutableTreeNode("ID: "+id,false); component.add(nodeid); ((DefaultTreeModel)tree.getModel()).insertNodeInto(component, treenode, treenode.getChildCount()); if(meta.get("_id")!=null){ String referenceid = meta.get("_id").toString(); Node child = getTB(referenceid,null); DefaultMutableTreeNode treechild = new DefaultMutableTreeNode(child); DefaultMutableTreeNode temp = new DefaultMutableTreeNode("ID: "+child.getID(),false); treechild.add(temp); DefaultMutableTreeNode temp2 = new DefaultMutableTreeNode(child.getPath(),false); treechild.add(temp2); ((DefaultTreeModel)tree.getModel()).insertNodeInto(treechild, component,component.getChildCount()); } Object [] subchildren = (Object[])subhash.get("children"); buildChildren(subchildren,component); } catch (Exception e){ e.printStackTrace(); } } } } public void initializeRPC(){ try{XmlRpcClientConfigImpl configuration = new XmlRpcClientConfigImpl(); configuration.setServerURL(new URL("http://"+RunnerRepository.host+ ":"+RunnerRepository.getCentralEnginePort()+"/sut/")); configuration.setEnabledForExtensions(true); configuration.setBasicPassword(<PASSWORD>); configuration.setBasicUserName(RunnerRepository.user); client = new XmlRpcClient(); client.setConfig(configuration); System.out.println("XMLRPC Client for SutConfig initialized: "+client);} catch(Exception e){System.out.println("Could not conect to "+ RunnerRepository.host+" :"+RunnerRepository.getCentralEnginePort()+"/ra/"+ "for RPC client initialization");} } /* * create a node based om an id * the node is created from the data * received from server */ public Node getTB(String id,Node parent){ Object ob= null; try{ // ob = RunnerRepository.window.mainpanel.p4.getTB().client.execute("get_resource", new Object[]{id}); ob = RunnerRepository.window.mainpanel.p4.getTB().client.execute("get_tb", new Object[]{id}); HashMap hash= (HashMap)ob; // HashMap hash= (HashMap)client.execute("getResource", new Object[]{id}); String path = hash.get("path").toString(); String name = path.split("/")[path.split("/").length-1]; byte type = 1; if(path.indexOf("/")==-1){ type = 0; } Node node = new Node(id,path,name,parent,null,type); Object[] children = (Object[])hash.get("children"); for(Object o:children){ node.addChild(o.toString(), null); } HashMap meta = (HashMap)hash.get("meta"); if(meta!=null&&meta.size()!=0){ Set keys = meta.keySet(); Iterator iter = keys.iterator(); while(iter.hasNext()){ String n = iter.next().toString(); if(n.equals("_epnames_"+RunnerRepository.user)){ node.setEPs(meta.get(n).toString()); continue; } node.addProperty(n, meta.get(n).toString()); } } return node; }catch(Exception e){ e.printStackTrace(); System.out.println("requested id: "+id); if(ob!=null)System.out.println("server respons: "+ob.toString()); return null; } } public void getFirstLevel(){ root.removeAllChildren(); ((DefaultTreeModel)tree.getModel()).reload(); String sutfiles [][] = RunnerRepository.window.mainpanel.p4.getSut().sut.getSutTree().getSutFiles(); int size = sutfiles.length; String sut; for(int i=0;i<size;i++){ sut = sutfiles[i][0]; SUT s = new SUT(sut,null); sut = sut.replace("(system)", ".system"); sut = sut.replace("(user)", ".user"); if(!sutfiles[i][1].equals("free")){ if(sutfiles[i][1].equals("locked")){ s.setLock(sutfiles[i][2]); }else{ s.setReserved(sutfiles[i][2]); } } s.setID("/"+sut); DefaultMutableTreeNode child = new DefaultMutableTreeNode(s); ((DefaultTreeModel)tree.getModel()).insertNodeInto(child, root, root.getChildCount()); } } class ImportTreeTransferHandler extends TransferHandler { DataFlavor nodesFlavor; DataFlavor[] flavors = new DataFlavor[1]; public ImportTreeTransferHandler() { try { String mimeType = DataFlavor.javaJVMLocalObjectMimeType + ";class=\"" + Node.class.getName() + "\""; nodesFlavor = new DataFlavor(mimeType); flavors[0] = nodesFlavor; } catch(ClassNotFoundException e) { System.out.println("ClassNotFound: " + e.getMessage()); } } public boolean canImport(TransferHandler.TransferSupport support) { return false; } protected Transferable createTransferable(JComponent c) { JTree tree = (JTree)c; TreePath path = tree.getSelectionPath(); if(path != null) { StringBuilder sb = new StringBuilder(); Object [] paths = path.getPath(); paths[1] = paths[1].toString().replace(".system","(system)"); paths[1] = paths[1].toString().replace(".user","(user)"); sb.append(paths[1]); for(int i=2;i<paths.length;i++){ sb.append("/"); sb.append(paths[i]); } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); if(node.getUserObject() instanceof SUT){ return new StringTransferable(((SUT)node.getUserObject()).getID()+" - "+sb.toString()); } else if(node.getUserObject() instanceof Comp){ return new StringTransferable(((Comp)node.getUserObject()).getID()+" - "+sb.toString()); } else return null; } return null; } private DefaultMutableTreeNode copy(TreeNode node) { return new DefaultMutableTreeNode(node); } public int getSourceActions(JComponent c) { return COPY; } public boolean importData(TransferHandler.TransferSupport support) { return false; } private DefaultMutableTreeNode createChildren(Node node){ DefaultMutableTreeNode parent = new DefaultMutableTreeNode(node); DefaultMutableTreeNode temp = new DefaultMutableTreeNode("ID: "+node.getID(),false); parent.add(temp); temp = new DefaultMutableTreeNode(node.getPath(),false); parent.add(temp); return parent; } public String toString() { return getClass().getName(); } public class StringTransferable implements Transferable { String file; DataFlavor nodesFlavor; DataFlavor[] flavors = new DataFlavor[1]; public StringTransferable(String file) { this.file = file; try { String mimeType = DataFlavor.javaJVMLocalObjectMimeType + ";class=\"" + String.class.getName() + "\""; nodesFlavor = new DataFlavor(mimeType); flavors[0] = nodesFlavor; } catch(ClassNotFoundException e) { System.out.println("ClassNotFound: " + e.getMessage()); } } public Object getTransferData(DataFlavor flavor) throws UnsupportedFlavorException { if(!isDataFlavorSupported(flavor)) throw new UnsupportedFlavorException(flavor); return file; } public DataFlavor[] getTransferDataFlavors() { return flavors; } public boolean isDataFlavorSupported(DataFlavor flavor) { return nodesFlavor.equals(flavor); } } } }
#include <random> template <typename T> class UniformRealDistribution { public: UniformRealDistribution(T min, T max) : distribution_(min, max) { random_engine_.seed(std::random_device()()); } T generate() { return distribution_(random_engine_); } private: std::uniform_real_distribution<T> distribution_; std::mt19937 random_engine_; }; // Example usage int main() { UniformRealDistribution<double> urd(0.0, 1.0); for (int i = 0; i < 10; ++i) { std::cout << urd.generate() << std::endl; } return 0; }
package no.helgeby.gamedev.engine; import static java.awt.image.BufferedImage.TYPE_INT_ARGB; import java.awt.Color; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Image; import java.awt.image.BufferedImage; import java.util.ArrayList; import java.util.List; public class Environment { public static final int MIN_WIDTH = 5; public static final int MIN_HEIGHT = 5; public static final int DEFAULT_WIDTH = 1920; public static final int DEFAULT_HEIGHT = 1080; protected final boolean isRoot; protected Vector2D position; protected int width; protected int height; protected Vector2D gravity; protected BufferedImage image; protected Graphics surface; protected long idSequence; // Sorted by z-order. Move entities up or down to alter z-order. protected List<Entity> entities; protected final Environment parent; public Environment() { parent = null; isRoot = true; idSequence = 0; init(); } public Environment(Environment parent) { this.parent = parent; isRoot = false; this.idSequence = parent.idSequence; init(); } private void init() { entities = new ArrayList<>(); width = DEFAULT_WIDTH; height = DEFAULT_HEIGHT; gravity = new Vector2D(0, 0.1f); initImageBuffer(); } private void initImageBuffer() { image = new BufferedImage(width, height, TYPE_INT_ARGB); surface = image.createGraphics(); } public void tick() { for (Entity e : entities) { e.tick(); } } public void draw() { BufferedImage buffer = new BufferedImage(width, height, TYPE_INT_ARGB); Graphics2D graphics = buffer.createGraphics(); for (Entity e : entities) { e.draw(graphics); } //surface.drawImage(buffer, 0, 0, null); surface.drawImage(buffer, 0, 0, Color.BLACK, null); } public void addEntity(Entity entity) { entities.add(entity); entity.id = newEntityId(); } public long newEntityId() { // Pre-increment so first ID is 1. The current sequence value remains // at the last ID. return ++idSequence; } public int getWidth() { return width; } public int getHeight() { return height; } public static Environment createRootEnvironment() { return new Environment(); } public Image getImage() { return image; } public List<Entity> getEntities() { return entities; } public void resize(int width, int height) { if (width < MIN_WIDTH || height < MIN_HEIGHT) { return; } this.width = width; this.height = height; initImageBuffer(); moveEntitiesOutsideContainer(); } public void move(int x, int y) { int deltaX = x - (int) position.x; int deltaY = y - (int) position.y; Vector2D distance = new Vector2D(deltaX * -1, deltaY * -1); for (Entity e : entities) { e.move(distance); } // Move outside entities back into the container. moveEntitiesOutsideContainer(); } private void moveEntitiesOutsideContainer() { for (Entity e : entities) { boolean changed = false; float x = e.position.x; float y = e.position.y; if (x > width) { x = width - e.width - MIN_WIDTH; changed = true; } if (x < 0) { x = 0; changed = true; } if (y > height) { y = height - e.height - MIN_HEIGHT; changed = true; } if (y < 0) { y = 0; changed = true; } if (changed) { e.position = new Vector2D(x, y); } } } }
CREATE TABLE `products` ( `id` INT NOT NULL AUTO_INCREMENT, `name` VARCHAR(255) NOT NULL, `price` DECIMAL(10,2) NOT NULL, PRIMARY KEY (`id`) );
<gh_stars>1-10 package org.egovframe.rte.fdl.xml.ehcache; import java.io.File; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.net.Socket; import java.util.Iterator; import java.util.List; import javax.annotation.Resource; import org.jdom2.Attribute; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.JDOMException; import org.jdom2.input.SAXBuilder; import org.junit.Test; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.egovframe.rte.fdl.xml.EgovSAXValidatorService; import org.egovframe.rte.fdl.xml.SharedObject; import org.egovframe.rte.fdl.xml.impl.EgovDOMFactoryServiceImpl; import org.egovframe.rte.fdl.xml.impl.EgovSAXFactoryServiceImpl; /** * CategoryControllerTest is TestCase of CategoryController * * @author <NAME> */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:spring/context-xmltest.xml" }) public class CacheXMLTest { @Resource(name = "domconcreteCont") EgovDOMFactoryServiceImpl domconcrete = null; @Resource(name = "saxconcreteCont") EgovSAXFactoryServiceImpl saxconcrete = null; String cacheServerIP; int cacheServerPort=0; String Storekey; String Retrievekey; String XMLFileName; private static final Logger LOGGER = LoggerFactory.getLogger(CacheXMLTest.class); String XMLFileName1 = "spring/context-sql.xml"; String fileName = Thread.currentThread().getContextClassLoader().getResource(XMLFileName1).getFile(); public void setXMLFileName(String XMLFileName) { this.XMLFileName = XMLFileName; } public void setPortNIp(String cacheServerIP,int cacheServerPort) { this.cacheServerIP = cacheServerIP; this.cacheServerPort = cacheServerPort; //64208 } public void setStorekey(String Storekey) { this.Storekey = Storekey; } public void setRetrievekey(String Retrievekey) { this.Retrievekey = Retrievekey; } public void sendCacheServer(List<?> list) { Socket socket = null; ObjectOutputStream oos= null; ObjectInputStream ooi= null; SharedObject sObject = null; try { socket = new Socket(cacheServerIP, cacheServerPort); oos = new ObjectOutputStream(socket.getOutputStream()); sObject = new SharedObject(Storekey,list); oos.writeObject(sObject); ooi = new ObjectInputStream(socket.getInputStream()); sObject = (SharedObject)ooi.readObject(); LOGGER.debug("서버로 부터 Message : {}", sObject.getValue()); } catch(Throwable t) { t.printStackTrace(); } finally { try { oos.close(); } catch(Throwable t) {t.printStackTrace();} try { socket.close(); } catch(Throwable t) {t.printStackTrace();} } } public SharedObject getCacheServer() { Socket socket = null; ObjectOutputStream oos= null; ObjectInputStream ooi= null; SharedObject sObject = null; try { socket = new Socket(cacheServerIP, cacheServerPort); oos = new ObjectOutputStream(socket.getOutputStream()); sObject = new SharedObject("*",Retrievekey); oos.writeObject(sObject); ooi = new ObjectInputStream(socket.getInputStream()); sObject = (SharedObject)ooi.readObject(); } catch(Throwable t) { t.printStackTrace(); System.exit(1); } finally { try { oos.close(); } catch(Throwable t) {t.printStackTrace();} try { socket.close(); } catch(Throwable t) {t.printStackTrace();} } return sObject; } public void viewEelement(List<?> list) { Iterator<?> i = list.iterator(); while (i.hasNext()) { Element element = (Element) i.next(); List<?> attList = element.getAttributes(); if (attList.size() != 0) { // 역시 속성리스트를 다시 iterator 로 담고 Iterator<?> ii = attList.iterator(); while(ii.hasNext()) { /** Attribute 파싱 **/ // iterator 로 부터 하나의 속성을 꺼내와서... Attribute at = (Attribute)ii.next(); LOGGER.debug("attribute : {} attribute value : {}", at.getName(), at.getValue()); LOGGER.debug("Element1 Name : {} Element1 Value : {}", (String)element.getName(), (String)element.getValue()); } // end of while } // end of 속성 if List<?> list2 = element.getChildren(); if(list2.size() > 1) { viewEelement(list2); } } } @Test public void ModuleTest() throws IOException,JDOMException { EgovSAXValidatorService saxValidator = null; String cacheServerIP = "localhost"; String Storekey = "1"; int cacheServerPort = 64208; CacheXMLAgent cxa = new CacheXMLAgent(); cxa.setPortNIp(cacheServerIP, cacheServerPort); cxa.setStorekey(Storekey); cxa.setXMLFileName(XMLFileName1); saxValidator = saxconcrete.createSAXValidator(); SAXBuilder builder = new SAXBuilder(); Document doc = builder.build(new File(fileName)); List<?> list = saxValidator.getResult(doc,"//*[@*]"); LOGGER.debug("ModuleTest list() >>> " + list); cxa.viewEelement(list); // 1. 캐쉬에 저장 cxa.sendCacheServer(list); } }
# DO NOT INCLUDE SENSITIVE DATA IN THIS FILE! # E.G. TO STORE SECRETS USE THE _overrides.sh: # copy _parameters.sh to _overrides.sh # and modify the _overrides.sh. # DO NOT COMMIT THE _overrides.sh FILE! # Should the script execute `az login`? Possible values: YES, NO. LOGIN="yes" # Name of a location where to create a new resource group LOCATION="switzerlandnorth" # Resource group name GROUP="mowbray"
#!/bin/sh rm ../src/js_natives.h python ../tools/js2c.py \ ../src/js_natives.h \ ../lib/cinder.js \ ../lib/path.js \ ../lib/util.js \ ../lib/module.js \ ../lib/fs.js \ ../lib/vm.js \ ../lib/assert.js \ ../lib/timers.js \ ../lib/events.js \ ../lib/app.js \ ../lib/color.js \ ../lib/ray.js \ ../lib/camera.js \ ../lib/gl.js \ ../lib/text.js \ ../lib/console.js \ ../lib/shader.js \ ../lib/batch.js \ ../lib/texture.js \ ../lib/glm.js \ ../lib/fbo.js \ ../lib/vbo.js \ ../lib/vao.js \ ../lib/math.js \ ../lib/default_main.js \
<reponame>xcorail/OTB /* * Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES) * * This file is part of Orfeo Toolbox * * https://www.orfeo-toolbox.org/ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef otbGenericRSResampleImageFilter_hxx #define otbGenericRSResampleImageFilter_hxx #include "otbGenericRSResampleImageFilter.h" #include "itkMetaDataObject.h" #include "otbMetaDataKey.h" #include "itkProgressAccumulator.h" #include "itkPoint.h" #include "ogr_spatialref.h" #include "cpl_conv.h" #include "otbGeoInformationConversion.h" #include "otbImageToGenericRSOutputParameters.h" namespace otb { template <class TInputImage, class TOutputImage> GenericRSResampleImageFilter<TInputImage, TOutputImage> ::GenericRSResampleImageFilter() { // flags initialization m_EstimateInputRpcModel = false; m_EstimateOutputRpcModel = false; m_RpcEstimationUpdated = false; // internal filters instantiation m_Resampler = ResamplerType::New(); m_InputRpcEstimator = InputRpcModelEstimatorType::New(); m_OutputRpcEstimator= OutputRpcModelEstimatorType::New(); m_Transform = GenericRSTransformType::New(); /** Set number of threads to 1 for Displacement field generator (use for faster access to * OSSIM elevation source, which does not handle multithreading when accessing to DEM data) */ this->SetDisplacementFilterNumberOfThreads(1); } template <class TInputImage, class TOutputImage> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::GenerateData() { // Set up progress reporting typename itk::ProgressAccumulator::Pointer progress = itk::ProgressAccumulator::New(); progress->SetMiniPipelineFilter(this); progress->RegisterInternalFilter(m_Resampler, 1.f); m_Resampler->GraftOutput(this->GetOutput()); m_Resampler->UpdateOutputData(m_Resampler->GetOutput()); this->GraftOutput(m_Resampler->GetOutput()); } /** * Generate the right output requested region following the * parameters set by the user * */ template <class TInputImage, class TOutputImage> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::GenerateOutputInformation() { // Estimate the output rpc Model if needed if (m_EstimateOutputRpcModel) this->EstimateOutputRpcModel(); // Estimate the input rpc model if it is needed if (m_EstimateInputRpcModel && !m_RpcEstimationUpdated) { this->EstimateInputRpcModel(); } // Instantiate the RS transform this->UpdateTransform(); m_Resampler->SetInput(this->GetInput()); m_Resampler->SetTransform(m_Transform); m_Resampler->SetDisplacementFieldSpacing(this->GetDisplacementFieldSpacing()); m_Resampler->GraftOutput(this->GetOutput()); m_Resampler->UpdateOutputInformation(); this->GraftOutput(m_Resampler->GetOutput()); // Encapsulate output projRef and keywordlist itk::MetaDataDictionary& dict = this->GetOutput()->GetMetaDataDictionary(); itk::EncapsulateMetaData<std::string>(dict, MetaDataKey::ProjectionRefKey, this->GetOutputProjectionRef()); if (this->GetOutputKeywordList().GetSize() > 0) { itk::EncapsulateMetaData<ImageKeywordlist>(dict, MetaDataKey::OSSIMKeywordlistKey, this->GetOutputKeywordList()); } } /** * Method to estimate the rpc model of the output using a temporary image */ template <class TInputImage, class TOutputImage> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::EstimateOutputRpcModel() { // Temp image : not allocated but with the same metadata than the // output typename OutputImageType::Pointer tempPtr = OutputImageType::New(); typename OutputImageType::RegionType region; region.SetSize(this->GetOutputSize()); region.SetIndex(this->GetOutputStartIndex() ); tempPtr->SetRegions(region); // Encapsulate the output metadata in the temp image itk::MetaDataDictionary& tempDict = tempPtr->GetMetaDataDictionary(); itk::EncapsulateMetaData<std::string>(tempDict, MetaDataKey::ProjectionRefKey, this->GetOutputProjectionRef() ); itk::EncapsulateMetaData<ImageKeywordlist>(tempDict, MetaDataKey::OSSIMKeywordlistKey, this->GetOutputKeywordList()); // Estimate the rpc model from the temp image m_OutputRpcEstimator->SetInput(tempPtr); m_OutputRpcEstimator->UpdateOutputInformation(); // Encapsulate the estimated rpc model in the output if (m_OutputRpcEstimator->GetOutput()->GetImageKeywordlist().GetSize() > 0) { // Fill the transform with the right kwl m_Transform->SetInputKeywordList( m_OutputRpcEstimator->GetOutput()->GetImageKeywordlist()); } } /** * Fill with the default dict of the input and the output * and instantiate the transform */ template <class TInputImage, class TOutputImage> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::UpdateTransform() { if (!m_EstimateInputRpcModel) { m_Transform->SetOutputDictionary(this->GetInput()->GetMetaDataDictionary()); m_Transform->SetOutputProjectionRef(this->GetInput()->GetProjectionRef()); m_Transform->SetOutputKeywordList(this->GetInput()->GetImageKeywordlist()); } m_Transform->InstantiateTransform(); } template <class TInputImage, class TOutputImage> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::PropagateRequestedRegion(itk::DataObject *output) { if (this->m_Updating) return; // Retrieve output requested region m_Resampler->GetOutput()->SetRequestedRegion(output); m_Resampler->GetOutput()->PropagateRequestedRegion(); } /** * Method to estimate the rpc model of the input using a temporary * image to avoid adding this rpc estimator filter in the minipipeline. * */ template <class TInputImage, class TOutputImage> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::EstimateInputRpcModel() { // Temp image : not allocated but with the sampe metadata as the // output typename InputImageType::Pointer tempPtr = InputImageType::New(); tempPtr->SetRegions(this->GetInput()->GetLargestPossibleRegion()); tempPtr->CopyInformation(this->GetInput()); // Estimate the rpc model with the temp image m_InputRpcEstimator->SetInput(tempPtr); m_InputRpcEstimator->UpdateOutputInformation(); // No need to override the input kwl, just setup the // transform with the kwl estimated if(m_InputRpcEstimator->GetInput()->GetImageKeywordlist().GetSize() > 0) m_Transform->SetOutputKeywordList(m_InputRpcEstimator->GetOutput()->GetImageKeywordlist()); // Update the flag for input rpcEstimation in order to not compute // the rpc model for each stream m_RpcEstimationUpdated = true; } /** * Method used to copy the parameters of the input image * */ template <class TInputImage, class TOutputImage> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::SetOutputParametersFromImage(const ImageBaseType * image) { const InputImageType * src = dynamic_cast<const InputImageType*>(image); this->SetOutputOrigin ( src->GetOrigin() ); this->SetOutputSpacing ( src->GetSignedSpacing() ); this->SetOutputStartIndex ( src->GetLargestPossibleRegion().GetIndex() ); this->SetOutputSize ( src->GetLargestPossibleRegion().GetSize() ); this->SetOutputProjectionRef(src->GetProjectionRef()); this->SetOutputKeywordList(src->GetImageKeywordlist()); } /** * Method used to copy the parameters of the input image * */ template <class TInputImage, class TOutputImage> template <class TImageType> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::SetOutputParametersFromImage(const TImageType * image) { this->SetOutputOrigin ( image->GetOrigin() ); this->SetOutputSpacing ( image->GetSignedSpacing() ); this->SetOutputStartIndex ( image->GetLargestPossibleRegion().GetIndex() ); this->SetOutputSize ( image->GetLargestPossibleRegion().GetSize() ); this->SetOutputProjectionRef(image->GetProjectionRef()); this->SetOutputKeywordList(image->GetImageKeywordlist()); } /** * Method used to project the input image in a defined srs, estimating * the output size and origin. The spacing is set by the user. The * supported projection are UTM and WGS84. * */ template <class TInputImage, class TOutputImage> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::SetOutputParametersFromMap(const std::string map, const SpacingType& spacing) { // Get the input Image const InputImageType* input = this->GetInput(); // Update the transform with input information // Done here because the transform is not instantiated // yet this->UpdateTransform(); // Needed variable std::string projectionRef; // The inverse transform is need here GenericRSTransformPointerType invTransform = GenericRSTransformType::New(); m_Transform->GetInverse(invTransform); if(strcmp(map.c_str(),"UTM")== 0) { // Build the UTM transform : Need the zone & the hemisphere // For this we us the geographic coordinate of the input UL corner typedef itk::Point<double, 2> GeoPointType; // get the utm zone and hemisphere using the input UL corner // geographic coordinates typename InputImageType::PointType pSrc; IndexType index; GeoPointType geoPoint; index[0] = input->GetLargestPossibleRegion().GetIndex()[0]; index[1] = input->GetLargestPossibleRegion().GetIndex()[1]; input->TransformIndexToPhysicalPoint(index, pSrc); // The first transform of the inverse transform : input -> WGS84 geoPoint = invTransform->GetTransform()->GetFirstTransform()->TransformPoint(pSrc); // Guess the zone and the hemisphere int zone = Utils::GetZoneFromGeoPoint(geoPoint[0], geoPoint[1]); bool hem = (geoPoint[1]>1e-10)?true:false; // Build the output UTM projection ref OGRSpatialReferenceH oSRS = OSRNewSpatialReference(nullptr); OSRSetProjCS(oSRS, "UTM"); OSRSetWellKnownGeogCS(oSRS, "WGS84"); OSRSetUTM(oSRS, zone, hem); char * utmRefC = nullptr; OSRExportToWkt(oSRS, &utmRefC); projectionRef = utmRefC; CPLFree(utmRefC); OSRRelease(oSRS); } else if(strcmp(map.c_str(),"WGS84")==0) { projectionRef = otb::GeoInformationConversion::ToWKT(4326); //WGS84 } else { itkExceptionMacro("The output map "<<map<<"is not supported, please try UTM or WGS84"); } // Compute the output parameters typedef otb::ImageToGenericRSOutputParameters<InputImageType> OutputParametersEstimatorType; typename OutputParametersEstimatorType::Pointer genericRSEstimator = OutputParametersEstimatorType::New(); genericRSEstimator->SetInput(input); genericRSEstimator->SetOutputProjectionRef(projectionRef); genericRSEstimator->ForceSpacingTo(spacing); genericRSEstimator->Compute(); // Update the Output Parameters this->SetOutputProjectionRef(projectionRef); this->SetOutputOrigin(genericRSEstimator->GetOutputOrigin()); this->SetOutputSpacing(genericRSEstimator->GetOutputSpacing()); this->SetOutputSize(genericRSEstimator->GetOutputSize()); this->UpdateTransform(); } /** * Used to project the input image in a srs defined by its WKT * projectionRef (as parameter) only. estimating the output size, spacing * and origin. * */ template <class TInputImage, class TOutputImage> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::SetOutputParametersFromMap(const std::string projectionRef) { const InputImageType* input = this->GetInput(); // Compute the output parameters typedef otb::ImageToGenericRSOutputParameters<InputImageType> OutputParametersEstimatorType; typename OutputParametersEstimatorType::Pointer genericRSEstimator = OutputParametersEstimatorType::New(); genericRSEstimator->SetInput(input); genericRSEstimator->SetOutputProjectionRef(projectionRef); genericRSEstimator->Compute(); // Update the Output Parameters this->SetOutputProjectionRef(projectionRef); this->SetOutputOrigin(genericRSEstimator->GetOutputOrigin()); this->SetOutputSpacing(genericRSEstimator->GetOutputSpacing()); this->SetOutputSize(genericRSEstimator->GetOutputSize()); this->UpdateTransform(); } template <class TInputImage, class TOutputImage> void GenericRSResampleImageFilter<TInputImage, TOutputImage> ::PrintSelf(std::ostream& os, itk::Indent indent) const { Superclass::PrintSelf(os, indent); os << indent << "EstimateInputRpcModel:" << (m_EstimateInputRpcModel ? "On" : "Off") << std::endl; os << indent << "EstimateOutputRpcModel:" << (m_EstimateOutputRpcModel ? "On" : "Off") << std::endl; os << indent << "RpcEstimationUpdated:" << (m_RpcEstimationUpdated ? "True" : "False") << std::endl; os << indent << "OutputOrigin: " << m_Resampler->GetOutputOrigin() << std::endl; os << indent << "OutputSpacing: " << m_Resampler->GetOutputSpacing() << std::endl; os << indent << "OutputStartIndex: " << m_Resampler->GetOutputStartIndex() << std::endl; os << indent << "OutputSize: " << m_Resampler->GetOutputSize() << std::endl; os << indent << "GenericRSTransform: " << std::endl; m_Transform->Print(os, indent.GetNextIndent()); } } #endif
(function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@angular/core'), require('rxjs'), require('rxjs/operators'), require('angularfire2'), require('firebase/database')) : typeof define === 'function' && define.amd ? define(['exports', '@angular/core', 'rxjs', 'rxjs/operators', 'angularfire2', 'firebase/database'], factory) : (factory((global.angularfire2 = global.angularfire2 || {}, global.angularfire2.database = global.angularfire2.database || {}),global.ng.core,global.rxjs,global.rxjs.operators,global.angularfire2,global.firebase)); }(this, (function (exports,_angular_core,rxjs,rxjs_operators,angularfire2,firebase_database) { 'use strict'; function isString(value) { return typeof value === 'string'; } function isFirebaseDataSnapshot(value) { return typeof value.exportVal === 'function'; } function isNil(obj) { return obj === undefined || obj === null; } function isFirebaseRef(value) { return typeof value.set === 'function'; } function getRef(database, pathRef) { return isFirebaseRef(pathRef) ? pathRef : database.ref(pathRef); } function checkOperationCases(item, cases) { if (isString(item)) { return cases.stringCase(); } else if (isFirebaseRef(item)) { return cases.firebaseCase(); } else if (isFirebaseDataSnapshot(item)) { return cases.snapshotCase(); } throw new Error("Expects a string, snapshot, or reference. Got: " + typeof item); } function fromRef(ref, event, listenType) { if (listenType === void 0) { listenType = 'on'; } return new rxjs.Observable(function (subscriber) { var fn = ref[listenType](event, function (snapshot, prevKey) { subscriber.next({ snapshot: snapshot, prevKey: prevKey }); if (listenType == 'once') { subscriber.complete(); } }, subscriber.error.bind(subscriber)); if (listenType == 'on') { return { unsubscribe: function () { ref.off(event, fn); } }; } else { return { unsubscribe: function () { } }; } }).pipe(rxjs_operators.map(function (payload) { var snapshot = payload.snapshot, prevKey = payload.prevKey; var key = null; if (snapshot.exists()) { key = snapshot.key; } return { type: event, payload: snapshot, prevKey: prevKey, key: key }; }), rxjs_operators.delay(0), rxjs_operators.share()); } function listChanges(ref, events) { return fromRef(ref, 'value', 'once').pipe(rxjs_operators.switchMap(function (snapshotAction) { var childEvent$ = [rxjs.of(snapshotAction)]; events.forEach(function (event) { return childEvent$.push(fromRef(ref, event)); }); return rxjs.merge.apply(void 0, childEvent$).pipe(rxjs_operators.scan(buildView, [])); }), rxjs_operators.distinctUntilChanged()); } function positionFor(changes, key) { var len = changes.length; for (var i = 0; i < len; i++) { if (changes[i].payload.key === key) { return i; } } return -1; } function positionAfter(changes, prevKey) { if (isNil(prevKey)) { return 0; } else { var i = positionFor(changes, prevKey); if (i === -1) { return changes.length; } else { return i + 1; } } } function buildView(current, action) { var payload = action.payload, type = action.type, prevKey = action.prevKey, key = action.key; var currentKeyPosition = positionFor(current, key); var afterPreviousKeyPosition = positionAfter(current, prevKey); switch (action.type) { case 'value': if (action.payload && action.payload.exists()) { var prevKey_1 = null; action.payload.forEach(function (payload) { var action = { payload: payload, type: 'value', prevKey: prevKey_1, key: payload.key }; prevKey_1 = payload.key; current = current.concat([action]); return false; }); } return current; case 'child_added': if (currentKeyPosition > -1) { var previous = current[currentKeyPosition - 1]; if ((previous && previous.key || null) != prevKey) { current = current.filter(function (x) { return x.payload.key !== payload.key; }); current.splice(afterPreviousKeyPosition, 0, action); } } else if (prevKey == null) { return [action].concat(current); } else { current = current.slice(); current.splice(afterPreviousKeyPosition, 0, action); } return current; case 'child_removed': return current.filter(function (x) { return x.payload.key !== payload.key; }); case 'child_changed': return current.map(function (x) { return x.payload.key === key ? action : x; }); case 'child_moved': if (currentKeyPosition > -1) { var data = current.splice(currentKeyPosition, 1)[0]; current = current.slice(); current.splice(afterPreviousKeyPosition, 0, data); return current; } return current; default: return current; } } function validateEventsArray(events) { if (isNil(events) || events.length === 0) { events = ['child_added', 'child_removed', 'child_changed', 'child_moved']; } return events; } function snapshotChanges$1(query, events) { events = validateEventsArray(events); return listChanges(query, events); } function stateChanges(query, events) { events = validateEventsArray(events); var childEvent$ = events.map(function (event) { return fromRef(query, event); }); return rxjs.merge.apply(void 0, childEvent$); } function auditTrail(query, events) { var auditTrail$ = stateChanges(query, events) .pipe(rxjs_operators.scan(function (current, action) { return current.concat([action]); }, [])); return waitForLoaded(query, auditTrail$); } function loadedData(query) { return fromRef(query, 'value') .pipe(rxjs_operators.map(function (data) { var lastKeyToLoad; data.payload.forEach(function (child) { lastKeyToLoad = child.key; return false; }); return { data: data, lastKeyToLoad: lastKeyToLoad }; })); } function waitForLoaded(query, action$) { var loaded$ = loadedData(query); return loaded$ .pipe(rxjs_operators.withLatestFrom(action$), rxjs_operators.map(function (_a) { var loaded = _a[0], actions = _a[1]; var lastKeyToLoad = loaded.lastKeyToLoad; var loadedKeys = actions.map(function (snap) { return snap.key; }); return { actions: actions, lastKeyToLoad: lastKeyToLoad, loadedKeys: loadedKeys }; }), rxjs_operators.skipWhile(function (meta) { return meta.loadedKeys.indexOf(meta.lastKeyToLoad) === -1; }), rxjs_operators.map(function (meta) { return meta.actions; })); } function createDataOperationMethod(ref, operation) { return function dataOperation(item, value) { return checkOperationCases(item, { stringCase: function () { return ref.child(item)[operation](value); }, firebaseCase: function () { return item[operation](value); }, snapshotCase: function () { return item.ref[operation](value); } }); }; } function createRemoveMethod(ref) { return function remove(item) { if (!item) { return ref.remove(); } return checkOperationCases(item, { stringCase: function () { return ref.child(item).remove(); }, firebaseCase: function () { return item.remove(); }, snapshotCase: function () { return item.ref.remove(); } }); }; } function createListReference(query, afDatabase) { return { query: query, update: createDataOperationMethod(query.ref, 'update'), set: createDataOperationMethod(query.ref, 'set'), push: function (data) { return query.ref.push(data); }, remove: createRemoveMethod(query.ref), snapshotChanges: function (events) { var snapshotChanges$ = snapshotChanges$1(query, events); return afDatabase.scheduler.keepUnstableUntilFirst(afDatabase.scheduler.runOutsideAngular(snapshotChanges$)); }, stateChanges: function (events) { var stateChanges$ = stateChanges(query, events); return afDatabase.scheduler.keepUnstableUntilFirst(afDatabase.scheduler.runOutsideAngular(stateChanges$)); }, auditTrail: function (events) { var auditTrail$ = auditTrail(query, events); return afDatabase.scheduler.keepUnstableUntilFirst(afDatabase.scheduler.runOutsideAngular(auditTrail$)); }, valueChanges: function (events) { var snapshotChanges$ = snapshotChanges$1(query, events); return afDatabase.scheduler.keepUnstableUntilFirst(afDatabase.scheduler.runOutsideAngular(snapshotChanges$)).pipe(rxjs_operators.map(function (actions) { return actions.map(function (a) { return a.payload.val(); }); })); } }; } function createObjectSnapshotChanges(query) { return function snapshotChanges() { return fromRef(query, 'value'); }; } function createObjectReference(query, afDatabase) { return { query: query, snapshotChanges: function () { var snapshotChanges$ = createObjectSnapshotChanges(query)(); return afDatabase.scheduler.keepUnstableUntilFirst(afDatabase.scheduler.runOutsideAngular(snapshotChanges$)); }, update: function (data) { return query.ref.update(data); }, set: function (data) { return query.ref.set(data); }, remove: function () { return query.ref.remove(); }, valueChanges: function () { var snapshotChanges$ = createObjectSnapshotChanges(query)(); return afDatabase.scheduler.keepUnstableUntilFirst(afDatabase.scheduler.runOutsideAngular(snapshotChanges$)).pipe(rxjs_operators.map(function (action) { return action.payload.exists() ? action.payload.val() : null; })); }, }; } var AngularFireDatabase = (function () { function AngularFireDatabase(options, nameOrConfig, databaseURL, platformId, zone) { this.scheduler = new angularfire2.FirebaseZoneScheduler(zone, platformId); this.database = zone.runOutsideAngular(function () { var app = angularfire2._firebaseAppFactory(options, nameOrConfig); return app.database(databaseURL || undefined); }); } AngularFireDatabase.prototype.list = function (pathOrRef, queryFn) { var ref = getRef(this.database, pathOrRef); var query = ref; if (queryFn) { query = queryFn(ref); } return createListReference(query, this); }; AngularFireDatabase.prototype.object = function (pathOrRef) { var ref = getRef(this.database, pathOrRef); return createObjectReference(ref, this); }; AngularFireDatabase.prototype.createPushId = function () { return this.database.ref().push().key; }; AngularFireDatabase.decorators = [ { type: _angular_core.Injectable }, ]; AngularFireDatabase.ctorParameters = function () { return [ { type: undefined, decorators: [{ type: _angular_core.Inject, args: [angularfire2.FirebaseOptionsToken,] }] }, { type: undefined, decorators: [{ type: _angular_core.Optional }, { type: _angular_core.Inject, args: [angularfire2.FirebaseNameOrConfigToken,] }] }, { type: String, decorators: [{ type: _angular_core.Optional }, { type: _angular_core.Inject, args: [angularfire2.RealtimeDatabaseURL,] }] }, { type: Object, decorators: [{ type: _angular_core.Inject, args: [_angular_core.PLATFORM_ID,] }] }, { type: _angular_core.NgZone } ]; }; return AngularFireDatabase; }()); var AngularFireDatabaseModule = (function () { function AngularFireDatabaseModule() { } AngularFireDatabaseModule.decorators = [ { type: _angular_core.NgModule, args: [{ providers: [AngularFireDatabase] },] }, ]; return AngularFireDatabaseModule; }()); exports.AngularFireDatabase = AngularFireDatabase; exports.RealtimeDatabaseURL = angularfire2.RealtimeDatabaseURL; exports.listChanges = listChanges; exports.createListReference = createListReference; exports.snapshotChanges = snapshotChanges$1; exports.stateChanges = stateChanges; exports.auditTrail = auditTrail; exports.fromRef = fromRef; exports.AngularFireDatabaseModule = AngularFireDatabaseModule; Object.defineProperty(exports, '__esModule', { value: true }); })));
<reponame>tliron/knap package knap // Kubernetes API group name (must have a ".") const GroupName = "knap.github.com"
import React from "react"; import HomeComapanySearchFilter from "Components/HomeComapanySearchFilter"; import Img from "react-cool-img"; export default function HomepageCompanyJumbotron({ keywords, searchDataByTitle, jobSekkerRecentJobsForCompanyAPI, searchByLocation, searchByKeyword, searchByRange, title, searchBy, searchByFreelancer, searchByUser, setIsLoginOpen, homePageCompanySvg, resetFilter, range, jobLocation, }) { return ( <section id="homepagecompanysearchsection"> <div className="homepage__container__jumbotron"> <div className="homepage__container__jumbotron__wrapper"> <div className="homepage__container__jumbotron__left"> <div className="homepage__container__jumbotron__heading animate__animated animate__fadeInDown"> AIDApro helps you find the <br /> right <span> professional or freelancer</span> </div> <div className="homepage__container__jumbotron__info animate__animated animate__fadeInLeft"> Find the right professionals through our database or let them find you by posting vacancies or projects </div> <HomeComapanySearchFilter placeholder="Search job titles, keywords" keywords={keywords} searchByTitle={searchDataByTitle} searchByFilter={jobSekkerRecentJobsForCompanyAPI} searchByLocation={(e) => searchByLocation(e)} searchByKeyword={(e) => searchByKeyword(e)} searchByRange={searchByRange} title={title} searchBy={searchBy} searchByFreelancer={searchByFreelancer} searchByUser={searchByUser} resetFilter={resetFilter} range={range} location={jobLocation} /> <div style={{ display: "flex", alignItems: "center", marginTop: "2em", }} className="animate__animated animate__fadeInRight" > <div className="header__nav__btn btn__primary" style={{ width: 150, fontSize: 15, marginRight: "1em", }} onClick={() => { window.location.href = "/sign-up"; // setIsLoginOpen(true) }} // to={user.Id ? "/home-company" : "/home-company/login"} > Post a job </div> <div className="header__nav__btn btn__secondary" style={{ width: 150, fontSize: 15, marginRight: "1em", }} onClick={() => { window.location.href = "/sign-up"; // setIsLoginOpen(true) }} // to={user.Id ? "/home-company" : "/home-company/login"} > Post a project </div> </div> </div> <Img loading="lazy" src={homePageCompanySvg} alt="homePageCompanySvg" className="homepage__container__jumbotron__right animate__animated animate__fadeInRight" /> </div> </div> </section> ); }
#!/bin/sh # # Copyright (c) 2001, 2009, Oracle and/or its affiliates. All rights reserved. # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. # # This code is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License version 2 only, as # published by the Free Software Foundation. # # This code is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # version 2 for more details (a copy is included in the LICENSE file that # accompanied this code). # # You should have received a copy of the GNU General Public License version # 2 along with this work; if not, write to the Free Software Foundation, # Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. # # Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA # or visit www.oracle.com if you need additional information or have any # questions. # if [ "${TESTSRC}" = "" ] then echo "TESTSRC not set. Test cannot execute. Failed." exit 1 fi echo "TESTSRC=${TESTSRC}" if [ "${TESTJAVA}" = "" ] then echo "TESTJAVA not set. Test cannot execute. Failed." exit 1 fi echo "TESTJAVA=${TESTJAVA}" if [ "${TESTCLASSES}" = "" ] then echo "TESTCLASSES not set. Test cannot execute. Failed." exit 1 fi echo "TESTCLASSES=${TESTCLASSES}" echo "CLASSPATH=${CLASSPATH}" # set platform-dependent variables OS=`uname -s` case "$OS" in SunOS | Linux | CYGWIN* ) FS="/" ;; Windows* ) FS="\\" ;; * ) echo "Unrecognized system!" exit 1; ;; esac TMP1=OUTPUT.txt cp "${TESTSRC}${FS}MissingInclude.java" . "${TESTJAVA}${FS}bin${FS}javac" ${TESTTOOLVMOPTS} @/nonexistent_file MissingInclude.java 2> ${TMP1} result=$? cat ${TMP1} rm ${TMP1} if [ $result -eq 0 ] then echo "Failed" exit 1 else echo "Passed" exit 0 fi
import { Record, List, Map } from 'immutable'; const ModelWorker = new Record({ status: 'ready', error: null, duration: 0, rate: 0, filename: '', }); const ModelMetrics = new Record({ bytes_in: { total: 0, mean: 0 }, bytes_out: { total: 0, mean: 0 }, duration: 0, earliest: '', end: '', errors: null, latencies: { mean: 0, max: 0, '50th': 0, '95th': 0, '99th': 0 }, latest: {}, rate: 0, requests: 0, status_codes: {}, success: 0, wait: 0, }); const ModelReport = Record({ isFetching: true, showResultList: false, error: null, metrics: new ModelMetrics(), histgram: List(), results: List(), }); const ModelFormAttack = Record({ error: null, Targets: '', Body: '', Cert: '', Key: '', RootCerts: '', HTTP2: true, Insecure: false, Lazy: false, Duration: '10s', Timeout: '', Rate: 10, Workers: 10, Connections: 10000, Redirects: 10, Headers: '', Laddr: '', Keepalive: true, }); const ModelFormImport = new Record({ isModalActive: false, error: null, text: '', }); const AttackStateRecord = new Record({ isImportModalActive: false, worker: new ModelWorker(), metrics: new ModelMetrics(), resultFiles: List(), reports: Map(), formAttack: new ModelFormAttack(), formImport: new ModelFormImport(), }); export default class AttackState extends AttackStateRecord { /** * set import modal active status * @param {Boolean} isActive * @return {AttackState} */ setImportModalActive(isActive) { return this.set('isImportModalActive', isActive); } /** * update importOption * @param {Object} params * @return {AttackState} */ updateFormImport(params) { return this.set('formImport', this.formImport.merge(params)); } /** * set resultFiles * @param {Array.<{Object}>} * @return {AttackState} */ setResultFiles(files) { return this.set('resultFiles', List(files)); } /** * set state to attack started * @param {Object} workerParams * @return {AttackState} */ startAttack(workerParams) { return this.merge({ worker: new ModelWorker(Object.assign({ status: 'active', filename: '' }, workerParams)), metrics: new ModelMetrics(), }); } /** * set state to attack finished * @param {String} filename * @return {AttackState} */ finishAttack(filename) { return this.set('worker', this.worker.merge({ status: 'done', filename })); } /** * set state to attack canceled * @return {AttackState} */ cancelAttack() { return this.set('worker', this.worker.set('status', 'canceled')); } /** * set state to attack failed * @param {Error} error * @return {AttackState} */ failAttack(error) { return this.set('worker', this.worker.merge({ status: 'error', error })); } /** * update metrics and set worker status to active * @param {Object} metricsParams * @return {AttackState} */ updateAttackMetrics(metricsParams) { return this.merge({ worker: this.worker.set('status', 'active'), metrics: new ModelMetrics(metricsParams), }); } /** * initialize the report data in the list * @param {String} filename * @return {AttackState} */ initReportData(filename) { return this.set('reports', this.reports.set(filename, new ModelReport())); } /** * set report data in the list * @param {String} filename * @param {Object} metrics * @param {Array} results * @param {Array} histgram * @return {AttackState} */ setReportData(filename, metrics, results, histgram) { const reports = this.reports.update(filename, (d) => { // eslint-disable-line return d.merge({ isFetching: false, metrics: new ModelMetrics(metrics), histgram: List(histgram), results: List(results), }); }); return this.set('reports', reports); } /** * set error in the list * @param {String} filename * @param {Error} error * @return {AttackState} */ setReportDataError(filename, error) { const reports = this.reports.update(filename, (d) => { // eslint-disable-line return d.merge({ isFetching: false, error }); }); return this.set('reports', reports); } /** * set report state to showing results * @param {String} filename * @return {AttackState} */ showResultList(filename) { const reports = this.reports.update(filename, (d) => { // eslint-disable-line return d.set('showResultList', true); }); return this.set('reports', reports); } /** * update formAttack * @param {Object} params * @return {AttackState} */ updateFormAttack(params) { return this.set('formAttack', this.formAttack.merge(params)); } /** * set formAttack * @param {Object} * @return {AttackState} */ setFormAttack(params) { return this.set('formAttack', new ModelFormAttack(params)); } }
// Copyright 2010, 2011 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.internal.transform; import org.apache.tapestry5.annotations.HeartbeatDeferred; import org.apache.tapestry5.model.MutableComponentModel; import org.apache.tapestry5.plastic.MethodAdvice; import org.apache.tapestry5.plastic.MethodInvocation; import org.apache.tapestry5.plastic.PlasticClass; import org.apache.tapestry5.plastic.PlasticMethod; import org.apache.tapestry5.services.Heartbeat; import org.apache.tapestry5.services.transform.ComponentClassTransformWorker2; import org.apache.tapestry5.services.transform.TransformationSupport; public class HeartbeatDeferredWorker implements ComponentClassTransformWorker2 { private final Heartbeat heartbeat; private final MethodAdvice deferredAdvice = new MethodAdvice() { public void advise(final MethodInvocation invocation) { heartbeat.defer(new Runnable() { public void run() { invocation.proceed(); } }); } }; public HeartbeatDeferredWorker(Heartbeat heartbeat) { this.heartbeat = heartbeat; } public void transform(PlasticClass plasticClass, TransformationSupport support, MutableComponentModel model) { for (PlasticMethod method : plasticClass.getMethodsWithAnnotation(HeartbeatDeferred.class)) { deferMethodInvocations(method); } } void deferMethodInvocations(PlasticMethod method) { validateVoid(method); validateNoCheckedExceptions(method); method.addAdvice(deferredAdvice); } private void validateNoCheckedExceptions(PlasticMethod method) { if (method.getDescription().checkedExceptionTypes.length > 0) throw new RuntimeException( String.format( "Method %s is not compatible with the @HeartbeatDeferred annotation, as it throws checked exceptions.", method.getMethodIdentifier())); } private void validateVoid(PlasticMethod method) { if (!method.isVoid()) throw new RuntimeException(String.format( "Method %s is not compatible with the @HeartbeatDeferred annotation, as it is not a void method.", method.getMethodIdentifier())); } }
import React from "react"; import ReactDOM from "react-dom"; import itemSelector from "./shared"; import "./runner.scss"; function getScene(fn) { fetch("/scene", { credentials: "same-origin", cache: "no-cache" }) .then(response => { return response.json(); }) .then( json => { console.log(json); fn(json); }, error => { console.log(error); } ); } function action(arg, fn) { fetch("/action", { method: "POST", body: JSON.stringify(arg), headers: { "Content-Type": "application/json" }, credentials: "same-origin", cache: "no-cache" }) .then(response => { return response.json(); }) .then( json => { fn(json); }, error => { console.log(error); } ); } function resetGame(id, fn) { fetch("/reset", { method: "POST", body: JSON.stringify({ world: id }), headers: { "Content-Type": "application/json" }, credentials: "same-origin", cache: "no-cache" }) .then(response => { return response.json(); }) .then( json => { fn(json); }, error => { console.log(error); } ); } function WorldSelector(props) { return ( <div className="game-reset"> Start from the beginning <br /> <select onChange={itemSelector(props.worlds, props.onselect, true)}> <option key={null}>--- select world ---</option> {props.worlds.map(world => ( <option key={world.id}>{world.global.title}</option> ))} </select> </div> ); } class Choice extends React.Component { render() { const classes = [ "choice", this.props.available ? "choice-available" : "choice-unavailable" ]; return ( <div className={classes.join(" ")} onClick={this.props.available ? this.props.onChoose : null} > <div className="choice-name">{this.props.name}</div> <div className="choice-description"> {this.props.description} </div> </div> ); } } class Encounter extends React.Component { render() { const classes = [ "encounter", this.props.available ? "encounter-available" : "encounter-unavailable" ]; return ( <div className={classes.join(" ")} onClick={this.props.available ? this.props.onChoose : null} > <div className="encounter-name">{this.props.name}</div> <div className="encounter-description"> {this.props.description} </div> </div> ); } } class Scene extends React.Component { setting() { return ( <div className="scene-setting"> <div className="scene-name">{this.props.scene.name}</div> <div className="scene-description"> {this.props.scene.story || this.props.scene.description} </div> </div> ); } render() { console.log(this.props.inventory); const choices = this.props.choices.map(action => ( <Choice key={action.id} name={action.name} description={action.description} available={action.available} onChoose={() => this.props.onChooseAction(action.id)} /> )); const encounters = this.props.encounters.map(encounter => ( <Encounter key={encounter.id} name={encounter.name} description={encounter.description} available={encounter.available} onChoose={() => this.props.onChooseEncounter(encounter.id)} /> )); const locations = []; return ( <div className="scene"> {this.setting()} {choices.length > 0 ? ( <div className="scene-choices">{choices}</div> ) : null} {locations.length > 0 ? ( <div className="scene-locations">{locations}</div> ) : null} {encounters.length > 0 ? ( <div className="scene-encounters">{encounters}</div> ) : null} <div className="inventory"> {Object.entries(this.props.inventory) .filter(([stat, value]) => value > 0) .map(([stat, value]) => ( <div key={stat} className={`item item-${stat}`}> <div className="name">{stat}</div> <div className="value">{value}</div> </div> ))} </div> </div> ); } } class Game extends React.Component { constructor(props) { super(props); this.state = { worlds: [], world: { title: null }, scene: null, location: null, encounter: null, inventory: {}, encounters: [], locations: [], choices: [], _loaded: false }; this.chooseEncounter = this.chooseEncounter.bind(this); this.chooseLocation = this.chooseLocation.bind(this); this.chooseAction = this.chooseAction.bind(this); this.resetGame = this.resetGame.bind(this); } componentDidMount() { getScene(game => { console.log("new state", game); this.setState(Object.assign({ _loaded: true }, game)); }); } chooseEncounter(id) { console.log("choosing encounter " + id); action({ choiceType: "encounter", choiceID: id }, game => { console.log("new state", game); this.setState(game); }); } chooseLocation(id) { console.log("choosing location " + id); action({ choiceType: "location", choiceID: id }, game => { console.log("new state", game); this.setState(game); }); } chooseAction(id) { console.log("choosing action " + id); action({ choiceType: "action", choiceID: id }, game => { console.log("new state", game); this.setState(game); }); } resetGame(id) { if (id) { console.log("resetting"); resetGame(id, game => { console.log("starting from scratch", game); this.setState(game); }); } } render() { console.log("rendering state", this.state); return ( <div className="game"> <div className="world"> <div className="world-title">{this.state.world.title}</div> </div> {this.state.scene ? ( <Scene key="root-scene" scene={this.state.scene} locations={this.state.locations} encounters={this.state.encounters} choices={this.state.choices} inventory={this.state.inventory} onChooseEncounter={this.chooseEncounter} onChooseLocation={this.chooseLocation} onChooseAction={this.chooseAction} /> ) : null} <WorldSelector worlds={this.state.worlds} onselect={this.resetGame} /> </div> ); } } ReactDOM.render(<Game />, document.getElementById("root"));
<filename>webfx-kit/webfx-kit-javafxbase-emul/src/main/java/com/sun/javafx/event/BasicEventDispatcher.java package com.sun.javafx.event; import javafx.event.Event; import javafx.event.EventDispatchChain; import javafx.event.EventDispatcher; /** * Event dispatcher which introduces event dispatch phase specific methods - * {@code dispatchCapturingEvent} and {@code dispatchBubblingEvent}. These * are used in the {@code BasicEventDispatcher.dispatchEvent} implementation, * but because they are public they can be called directly as well. Their * default implementation does nothing and is expected to be overridden in * subclasses. The {@code BasicEventDispatcher} also adds possibility to chain * event dispatchers. This is used together with the direct access to the phase * specific dispatch methods to implement {@code CompositeEventDispatcher}. * <p> * An event dispatcher derived from {@code BasicEventDispatcher} can act as * a standalone event dispatcher or can be used to form a dispatch chain in * {@code CompositeEventDispatcher}. */ public abstract class BasicEventDispatcher implements EventDispatcher { private BasicEventDispatcher previousDispatcher; private BasicEventDispatcher nextDispatcher; @Override public Event dispatchEvent(Event event, final EventDispatchChain tail) { event = dispatchCapturingEvent(event); if (event.isConsumed()) { return null; } event = tail.dispatchEvent(event); if (event != null) { event = dispatchBubblingEvent(event); if (event.isConsumed()) { return null; } } return event; } public Event dispatchCapturingEvent(Event event) { return event; } public Event dispatchBubblingEvent(Event event) { return event; } public final BasicEventDispatcher getPreviousDispatcher() { return previousDispatcher; } public final BasicEventDispatcher getNextDispatcher() { return nextDispatcher; } public final void insertNextDispatcher( final BasicEventDispatcher newDispatcher) { if (nextDispatcher != null) { nextDispatcher.previousDispatcher = newDispatcher; } newDispatcher.nextDispatcher = nextDispatcher; newDispatcher.previousDispatcher = this; nextDispatcher = newDispatcher; } }
#!/bin/bash # This is a simple Oracle database test script that will connect to SID with USERNAME & PASSWORD and time how long a simple query will take. # The query provided is "select * from dual", but you can put whatever you want here. SID="" USERNAME="" PASSWORD="" # Source your own oracle environment here: . /opt/oracle/product/11.2/oracle.env export ORACLE_SID="${SID}" execute_query () { local query=$(sqlplus -S ${USERNAME}/${PASSWORD}@${ORACLE_SID} << EOF set pagesize 0 set linesize 80 set heading off set feedback off set tab off set define off ${1}; quit EOF) # echo $query return } QUERY="select * from dual" ## Main START_TIME=$(date +%s.%N) execute_query "${QUERY}" END_TIME=$(date +%s.%N) ELAPSED=$(printf "%.3F" $(echo "$END_TIME - $START_TIME"|bc)) echo $ELAPSED
<gh_stars>1-10 /* GENERATED FILE */ import { html, svg, define } from "hybrids"; const PhWarning = { color: "currentColor", size: "1em", weight: "regular", mirrored: false, render: ({ color, size, weight, mirrored }) => html` <svg xmlns="http://www.w3.org/2000/svg" width="${size}" height="${size}" fill="${color}" viewBox="0 0 256 256" transform=${mirrored ? "scale(-1, 1)" : null} > ${weight === "bold" && svg`<line x1="128" y1="96" x2="128" y2="136" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/> <path d="M114.15243,39.98472,26.17616,191.977a16.00005,16.00005,0,0,0,13.84762,24.01535H215.97625A16,16,0,0,0,229.82386,191.977L141.84757,39.98472A16,16,0,0,0,114.15243,39.98472Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/> <circle cx="128" cy="176" r="16"/>`} ${weight === "duotone" && svg`<path d="M114.15243,39.98472,26.17616,191.977a16.00005,16.00005,0,0,0,13.84762,24.01535H215.97625A16,16,0,0,0,229.82386,191.977L141.84757,39.98472A16,16,0,0,0,114.15243,39.98472Z" opacity="0.2"/> <line x1="128" y1="96" x2="128" y2="144" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <path d="M114.15243,39.98472,26.17616,191.977a16.00005,16.00005,0,0,0,13.84762,24.01535H215.97625A16,16,0,0,0,229.82386,191.977L141.84757,39.98472A16,16,0,0,0,114.15243,39.98472Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <circle cx="128" cy="180" r="12"/>`} ${weight === "fill" && svg`<path d="M236.74414,187.96585l-87.96875-152a24.012,24.012,0,0,0-41.54687,0v.00782L19.25977,187.96585a23.9982,23.9982,0,0,0,20.76562,36.02344H215.97852a23.9982,23.9982,0,0,0,20.76562-36.02344Zm-116.75-91.96875a8,8,0,1,1,16,0v48a8,8,0,1,1-16,0ZM128,192a12,12,0,1,1,12-12A12.00059,12.00059,0,0,1,128,192Z"/>`} ${weight === "light" && svg`<line x1="128" y1="96" x2="128" y2="144" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/> <path d="M114.15243,39.98472,26.17616,191.977a16.00005,16.00005,0,0,0,13.84762,24.01535H215.97625A16,16,0,0,0,229.82386,191.977L141.84757,39.98472A16,16,0,0,0,114.15243,39.98472Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/> <circle cx="128" cy="180" r="10"/>`} ${weight === "thin" && svg`<line x1="128" y1="96" x2="128" y2="144" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/> <path d="M114.15243,39.98472,26.17616,191.977a16.00005,16.00005,0,0,0,13.84762,24.01535H215.97625A16,16,0,0,0,229.82386,191.977L141.84757,39.98472A16,16,0,0,0,114.15243,39.98472Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/> <circle cx="128" cy="180" r="8"/>`} ${weight === "regular" && svg`<line x1="128" y1="96" x2="128" y2="144" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <path d="M114.15243,39.98472,26.17616,191.977a16.00005,16.00005,0,0,0,13.84762,24.01535H215.97625A16,16,0,0,0,229.82386,191.977L141.84757,39.98472A16,16,0,0,0,114.15243,39.98472Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/> <circle cx="128" cy="180" r="12"/>`} </svg> `, }; define("ph-warning", PhWarning); export default PhWarning;
def countPairs(a, b, c): count = 0 for x in range(0, c + 1): for y in range(0, c + 1): if a * x + b * y == c: count += 1 return count a = 2 b = 2 c = 4 print(countPairs(a, b, c))
<filename>ruby/spec/ruby/core/enumerator/lazy/shared/select.rb # -*- encoding: us-ascii -*- require File.expand_path('../../../../../spec_helper', __FILE__) require File.expand_path('../../fixtures/classes', __FILE__) describe :enumerator_lazy_select, shared: true do before :each do @yieldsmixed = EnumeratorLazySpecs::YieldsMixed.new.to_enum.lazy @eventsmixed = EnumeratorLazySpecs::EventsMixed.new.to_enum.lazy ScratchPad.record [] end after :each do ScratchPad.clear end it "returns a new instance of Enumerator::Lazy" do ret = @yieldsmixed.send(@method) {} ret.should be_an_instance_of(Enumerator::Lazy) ret.should_not equal(@yieldsmixed) end it "sets #size to nil" do Enumerator::Lazy.new(Object.new, 100) {}.send(@method) { true }.size.should == nil end describe "when the returned lazy enumerator is evaluated by Enumerable#first" do it "stops after specified times" do (0..Float::INFINITY).lazy.send(@method, &:even?).first(3).should == [0, 2, 4] @eventsmixed.send(@method) { true }.first(1) ScratchPad.recorded.should == [:before_yield] end end it "calls the block with a gathered array when yield with multiple arguments" do yields = [] @yieldsmixed.send(@method) { |v| yields << v }.force yields.should == EnumeratorLazySpecs::YieldsMixed.gathered_yields end it "raises an ArgumentError when not given a block" do lambda { @yieldsmixed.send(@method) }.should raise_error(ArgumentError) end describe "on a nested Lazy" do it "sets #size to nil" do Enumerator::Lazy.new(Object.new, 100) {}.take(50) {}.send(@method) { true }.size.should == nil end describe "when the returned lazy enumerator is evaluated by Enumerable#first" do it "stops after specified times" do (0..Float::INFINITY).lazy.send(@method) { |n| n > 5 }.send(@method, &:even?).first(3).should == [6, 8, 10] @eventsmixed.send(@method) { true }.send(@method) { true }.first(1) ScratchPad.recorded.should == [:before_yield] end end end end
<gh_stars>0 export {decodeXML} from "https://raw.githubusercontent.com/DenoBRComunitty/entities/master/mod.ts" export {assertEquals} from "https://deno.land/std/testing/asserts.ts";
package config import ( "fmt" "testing" "github.com/xuperchain/xupercore/kernel/mock" ) func TestLoadP2PConf(t *testing.T) { cfg, err := LoadP2PConf(mock.GetNetConfPathForTest()) if err != nil { t.Fatal(err) } fmt.Println(cfg) }
package main.methodAbstractNowDefault; public interface IMethodAbstractNowDefaultSub extends IMethodAbstractNowDefault { }
export const ROOT_PATH = 'http://52.51.111.248:3000' export const BASE_URL = `${ROOT_PATH}/api/v1` export const USER = `${BASE_URL}/user` export const USER_CARD = `${BASE_URL}/user/card` export const JOIN_REQUEST = `${USER}/join-request` export const CHANGE_PASSWORD = `${<PASSWORD>` export const REGISTRATION = `${USER}/register` export const LOGIN = `${USER}/login` export const MESSAGE = `${BASE_URL}/message` export const HORSE_SLUG = `${BASE_URL}/horse/:slug` export const HORSE_STATISTICS = `${HORSE_SLUG}/ranking` export const HORSE_STATISTICS_RESULTS = `${HORSE_SLUG}/results` export const HORSE_STATISTICS_RESULTS_DETAIL = `${HORSE_SLUG}/results/:meetingDate` export const HORSE_STATISTICS_ENTRIES = `${HORSE_SLUG}/entries` export const UPDATE_HORSE = `${BASE_URL}/horse/:slug` export const DASHBOARD = `${BASE_URL}/user/dashboard` export const SYNDICATE = `${BASE_URL}/syndicate` export const SYNDICATE_SLUG = `${BASE_URL}/syndicate/:slug` export const SEARCH = `${BASE_URL}/search` export const SEARCH_ATTRIBUTES = `${BASE_URL}/search/attributes` export const REGISTRATION_CONFIRMATION = `${USER}/verify` export const SETUP = `${BASE_URL}/setup` export const COMMENT = `${MESSAGE}/comment` export const NEWS = `${BASE_URL}/news` export const UPLOADED = `${BASE_URL}/uploaded` export const REGISTER_SYNDICATE_NAME = `${BASE_URL}/syndicate/register/name` export const REGISTER_SYNDICATE_MEMBERS = `${BASE_URL}/syndicate/:slug/ownership` export const REGISTER_SYNDICATE_COLOURS = `${BASE_URL}/syndicate/register/color/:slug` export const HORSE_INFORMATION_EDITOR = `${BASE_URL}/horse/search` export const HORSE_STATE = `${BASE_URL}/horse/checkExist` export const REGISTER_HORSES = `${BASE_URL}/syndicate/register/horses/:slug` export const GET_USERS_INFORMATIONS = `${BASE_URL}/user/search` export const GET_HORSE_INFORMATIONS = `${BASE_URL}/horse` export const ACCOUNT_NOTIFICATION_SETTING = `${USER}/notification`
<filename>src/html/buildFakeMarkup.js import generateTagByKey from './generateTagByKey'; /** * Build markup from template and replace data with placeholders (tokens). * * @memberOf module:html * @name buildFakeMarkup * @param {Set.<string>} tokens - Set of tokens used for replacing of real data passed to markup. * @param {string[]} indexToToken - Array where index matches index of element from `params` array, and associated * string is a unique string token. * @param {string[]} strings - Ordered array of string, which are pieces of template literal with HTML markup, * delimited by expressions. * @param {boolean} [wrapInTag] - If set to true, tokens would be wrapped in tag to be easily found when traversing DOM. * @returns {string} Returns markup for HTML document where data placeholders are tokens. Resulting markup is wrapped in * `<template>` and `<body>` tags. */ export default ( tokens, indexToToken, strings, wrapInTag = false, ) => `<body><template>${ strings.reduce((accumulator, string, index) => { let result = accumulator + string; const fakeDataToken = indexToToken[index]; if (fakeDataToken && tokens.has(fakeDataToken)) { result += wrapInTag ? generateTagByKey(fakeDataToken) : fakeDataToken; } return result; }, '') }</template></body>`;
package org.rs2server.rs2.util; /** * A simple class used to benchmark time. * * @author <NAME> */ public class Benchmark { /** The start time. */ private long startTime; private long duration = 0; /** * Starts the benchmark. */ public void start() { startTime = System.currentTimeMillis(); } /** * Stops the benchmark. */ public void stop() { duration += System.currentTimeMillis()-startTime; } public void reset() { duration = 0; } /** * Gets the benchmark time, in milliseconds. * * @return The benchmark time */ public long getTime() { return duration; } }
import React, { Component, } from 'react'; import { Link, withRouter, } from 'react-router-dom'; import { Item, ListWrapper, MoreB, None, } from './style'; import { Wrapper, } from '../../style'; class List extends Component { constructor(props) { super(props); this.state = { 'list': [], 'page': 1, 'total': -1, 'pageSize': 10, }; this.loadMore = this.loadMore.bind(this); this.getList = this.getList.bind(this); } render() { const { page, total, pageSize, } = this.state; let tip = ''; if(total < 1){ tip = <None>未找到‘{this.props.location.search.split('=')[1]}’相关信息</None>; }else{ if(total > (page - 1) * pageSize){ tip = <MoreB onClick={this.loadMore}> 加载更多 </MoreB>; }else{ tip = <None>------没有更多了--------</None>; } } return ( <Wrapper> <ListWrapper> {this.state.list ? this.state.list.map((item) => <Item key={item._id}> <Link className="title" to={'/detail/' + item._id}>{item.title}<span>{new Date(item.date).toString()}</span></Link> <p dangerouslySetInnerHTML={{ __html: item.content, }}></p> </Item>) : null} </ListWrapper> {tip} </Wrapper> ); } componentDidMount() { this.getList(1); } componentWillReceiveProps() { this.setState({list: [],}, ()=>{ this.getList(1); }); } loadMore() { this.getList(this.state.page); } getList(page) { // 改成 get this.$axios.get(`list?page=${page}&pageSize=${this.state.pageSize}&${this.props.location.search.substr(1)}`).then((res) => { this.setState({ 'list': this.state.list.concat(res.list), 'total': res.total, 'page': this.state.page + 1, }); }).catch((res) => { console.log( 'wrong---', res ); }); } } export default withRouter(List);
<gh_stars>1-10 package de.ids_mannheim.korap.web.controller; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.text.ParseException; import java.util.HashMap; import java.util.Map; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import org.apache.commons.collections.map.LinkedMap; import org.junit.Ignore; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import com.fasterxml.jackson.databind.JsonNode; import com.nimbusds.jose.JOSEException; import com.nimbusds.jwt.SignedJWT; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.core.util.MultivaluedMapImpl; import de.ids_mannheim.korap.authentication.http.HttpAuthorizationHandler; import de.ids_mannheim.korap.config.Attributes; import de.ids_mannheim.korap.config.BeansFactory; import de.ids_mannheim.korap.config.FullConfiguration; import de.ids_mannheim.korap.config.JWTSigner; import de.ids_mannheim.korap.exceptions.KustvaktException; import de.ids_mannheim.korap.exceptions.StatusCodes; import de.ids_mannheim.korap.utils.JsonUtils; import de.ids_mannheim.korap.utils.TimeUtils; import de.ids_mannheim.korap.web.FastJerseyTest; /** EM: To do: not implemented in the new DB yet * @author hanl * @date 21/03/2015 */ // todo: do benchmarks for simple request to check access_token check and user // retrieval! @Ignore public class ShibbolethUserControllerTest extends FastJerseyTest { @Autowired FullConfiguration config; private static String[] credentials; @Test public void loginHTTP() throws KustvaktException { String enc = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); ClientResponse response = resource().path("user").path("info") .header(Attributes.AUTHORIZATION, enc).get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); } // EM: This test require VPN / IDS Intranet @Test @Ignore public void loginJWT() throws KustvaktException{ String en = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); /* lauffähige Version von Hanl: */ ClientResponse response = resource().path("auth").path("apiToken") .header(Attributes.AUTHORIZATION, en).get(ClientResponse.class); /**/ /* * Test : ClientResponse response = null; WebResource webRes = * resource().path("auth") .path("apiToken"); * webRes.header(Attributes.AUTHORIZATION, en); * * System.out.printf("resource: " + webRes.toString()); * * response = webRes.get(ClientResponse.class); * */ // assertEquals(ClientResponse.Status.BAD_REQUEST.getStatusCode(), response.getStatus()); String entity = response.getEntity(String.class); // System.out.println(entity); JsonNode node = JsonUtils.readTree(entity); assertEquals(2022, node.at("/errors/0/0").asInt()); } // EM: cannot do test with LDAP @Test @Ignore public void loginJWTExpired() throws InterruptedException, KustvaktException, ParseException, JOSEException { assertTrue(BeansFactory.getKustvaktContext().getConfiguration().getTokenTTL() < 10); String en = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); ClientResponse response = resource().path("auth").path("apiToken") .header(Attributes.AUTHORIZATION, en).get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); JsonNode node = JsonUtils.readTree(response.getEntity(String.class)); assertNotNull(node); String token = node.path("token").asText(); JWTSigner sign = new JWTSigner(BeansFactory.getKustvaktContext().getConfiguration().getSharedSecret(), config.getIssuer(), -1); //BeansFactory.getKustvaktContext().getConfiguration().getIssuer(), -1); SignedJWT jwt = sign.verifyToken(token); while (true) { if (TimeUtils.isExpired(jwt.getJWTClaimsSet().getExpirationTime().getTime())) break; } response = resource().path("user").path("info") .header(Attributes.AUTHORIZATION, "api_token " + token).get(ClientResponse.class); assertEquals(ClientResponse.Status.UNAUTHORIZED.getStatusCode(), response.getStatus()); } @Test public void testGetUserDetails() throws KustvaktException { String enc = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); ClientResponse response = resource().path("user").path("details") .header(Attributes.AUTHORIZATION, enc).get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); } @Test public void testGetUserDetailsEmbeddedPointer() throws KustvaktException { String enc = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); Map m = new LinkedMap(); m.put("test", "[100, \"error message\", true, \"another message\"]"); ClientResponse response = resource().path("user").path("details") .header(Attributes.AUTHORIZATION, enc).header("Content-Type", MediaType.APPLICATION_JSON) .post(ClientResponse.class, m); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); response = resource().path("user").path("details").queryParam("pointer", "test") .header(Attributes.AUTHORIZATION, enc).get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); String ent = response.getEntity(String.class); assertEquals("[100, \"error message\", true, \"another message\"]", ent); } @Test public void testUpdateUserDetailsMerge() throws KustvaktException{ String enc = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); Map m = new LinkedMap(); m.put("test", "test value 1"); ClientResponse response = resource().path("user").path("details") .header(Attributes.AUTHORIZATION, enc).header("Content-Type", MediaType.APPLICATION_JSON) .post(ClientResponse.class, m); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); response = resource().path("user").path("details").header(Attributes.AUTHORIZATION, enc) .get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); String ent = response.getEntity(String.class); JsonNode node = JsonUtils.readTree(ent); assertNotNull(node); assertEquals("test value 1", node.at("/test").asText()); assertEquals("user", node.at("/lastName").asText()); assertEquals("<EMAIL>", node.at("/email").asText()); } @Test public void testGetUserDetailsPointer() throws KustvaktException { String enc = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); ClientResponse response = resource().path("user").path("details") .queryParam("pointer", "email").header(Attributes.AUTHORIZATION, enc).get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); String ent = response.getEntity(String.class); assertEquals("<EMAIL>", ent); } @Test public void testGetUserDetailsNonExistent() throws KustvaktException { // helper().setupSimpleAccount("userservicetest", "servicepass"); String enc = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue("userservicetest", "servicepass"); ClientResponse response = resource().path("user").path("details") .header(Attributes.AUTHORIZATION, enc).get(ClientResponse.class); assertEquals(ClientResponse.Status.BAD_REQUEST.getStatusCode(), response.getStatus()); String entity = response.getEntity(String.class); JsonNode node = JsonUtils.readTree(entity); assertNotNull(node); assertEquals(StatusCodes.NO_RESOURCE_FOUND, node.at("/errors/0/0").asInt()); assertEquals("UserDetails", node.at("/errors/0/2").asText()); } @Test public void testGetUserSettings() throws KustvaktException { String enc = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); ClientResponse response = resource().path("user").path("settings") .header(Attributes.AUTHORIZATION, enc).get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); } @Test public void testUpdateUserDetailsJson() throws KustvaktException{ String enc = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); Map m = new LinkedMap(); m.put("firstName", "newName"); m.put("lastName", "newLastName"); m.put("email", "<EMAIL>"); ClientResponse response = resource().path("user").path("details") .header(Attributes.AUTHORIZATION, enc).header("Content-Type", MediaType.APPLICATION_JSON) .post(ClientResponse.class, m); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); response = resource().path("user").path("details").header(Attributes.AUTHORIZATION, enc) .get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); JsonNode node = JsonUtils.readTree(response.getEntity(String.class)); assertNotNull(node); assertEquals("newName", node.path("firstName").asText()); assertEquals("newLastName", node.path("lastName").asText()); assertEquals("<EMAIL>", node.path("email").asText()); assertEquals("Mannheim", node.path("address").asText()); m = new LinkedMap(); m.put("firstName", "test"); m.put("lastName", "user"); m.put("email", "<EMAIL>"); response = resource().path("user").path("details").header(Attributes.AUTHORIZATION, enc) .header("Content-Type", MediaType.APPLICATION_JSON).post(ClientResponse.class, m); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); } @Test @Ignore public void testUpdateUserSettingsForm() throws IOException, KustvaktException{ String enc = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); MultivaluedMap m = new MultivaluedMapImpl(); m.putSingle("queryLanguage", "poliqarp_test"); m.putSingle("pageLength", "200"); ClientResponse response = resource().path("user").path("settings") .header(Attributes.AUTHORIZATION, enc).header("Content-Type", "application/x-www-form-urlencodeBase64d") .get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); JsonNode map = JsonUtils.readTree(response.getEntity(String.class)); assertNotNull(map); assertNotEquals(m.getFirst("queryLanguage"), map.get("queryLanguage")); assertNotEquals(m.get("pageLength"), Integer.valueOf((String) m.getFirst("pageLength"))); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); response = resource().path("user").path("settings").header(Attributes.AUTHORIZATION, enc) .header("Content-Type", "application/x-www-form-urlencodeBase64d").post(ClientResponse.class, m); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); response = resource().path("user").path("settings").header(Attributes.AUTHORIZATION, enc) .header("Content-Type", "application/x-www-form-urlencodeBase64d").get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); map = JsonUtils.readTree(response.getEntity(String.class)); assertNotNull(map); assertEquals(map.get("queryLanguage"), m.getFirst("queryLanguage")); int p1 = map.path("pageLength").asInt(); int p2 = Integer.valueOf((String) m.getFirst("pageLength")); assertEquals(p1, p2); } @Test public void testUpdateUserSettingsJson() throws IOException, KustvaktException { String enc = HttpAuthorizationHandler.createBasicAuthorizationHeaderValue(credentials[0], credentials[1]); Map m = new HashMap<>(); m.put("queryLanguage", "poliqarp_test"); m.put("pageLength", "200"); m.put("setting_1", "value_1"); ClientResponse response = resource().path("user").path("settings") .header(Attributes.AUTHORIZATION, enc).header("Content-Type", MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); JsonNode map = JsonUtils.readTree(response.getEntity(String.class)); assertNotNull(map); assertNotEquals(m.get("queryLanguage"), map.get("queryLanguage")); assertNotEquals(m.get("pageLength"), Integer.valueOf((String) m.get("pageLength"))); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); response = resource().path("user").path("settings").header(Attributes.AUTHORIZATION, enc) .header("Content-Type", MediaType.APPLICATION_JSON).post(ClientResponse.class, m); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); response = resource().path("user").path("settings").header(Attributes.AUTHORIZATION, enc) .get(ClientResponse.class); assertEquals(ClientResponse.Status.OK.getStatusCode(), response.getStatus()); map = JsonUtils.readTree(response.getEntity(String.class)); assertNotNull(map); assertEquals(map.path("queryLanguage").asText(), m.get("queryLanguage")); int p1 = map.path("pageLength").asInt(); int p2 = Integer.valueOf((String) m.get("pageLength")); assertEquals(p1, p2); } @Test public void testLoginFailedLockAccount() { } @Test public void delete() { } }
cargo build --release --target arm-none-eabi arm-none-eabi-as -o obj/init.o src/init.s arm-none-eabi-ld --gc-sections -O -nostdlib obj/init.o -L target/arm-none-eabi/release -l kernel -s -L /usr/lib/gcc/arm-none-eabi/5.2.1/armv6-m -lgcc -o elf/kernel.elf -T strontium.ld && arm-none-eabi-objcopy elf/kernel.elf -O binary bin/kernel.img
""" Implement a function that finds the coordinates of a given point in a Triangle """ # This function finds the coordinates of a point P in a triangle ABC def find_point_in_triangle(A, B, C, P): # Find the area of the triangle ABC area_ABC = abs(A[0]*(B[1] - C[1]) + B[0]*(C[1] - A[1]) + C[0]*(A[1] - B[1]))/2 # Find the area of each triangle PAB, PBC, PCA area_PAB = abs(P[0]*(A[1] - B[1]) + A[0]*(B[1] - P[1]) + B[0]*(P[1] - A[1]))/2 area_PBC = abs(P[0]*(B[1] - C[1]) + B[0]*(C[1] - P[1]) + C[0]*(P[1] - B[1]))/2 area_PCA = abs(P[0]*(C[1] - A[1]) + C[0]*(A[1] - P[1]) + A[0]*(P[1] - C[1]))/2 # Check if the areas are equal to the area of the triangle ABC if (area_ABC == area_PAB + area_PBC + area_PCA): return True else: return False if __name__ == '__main__': A = (8, 2) B = (3, 6) C = (12, 8) P = (7, 4) print(find_point_in_triangle(A, B, C, P))
#!/bin/bash cd /usr/local/bin/enopush/ java -jar EnoPush.jar &
def second_min(arr): # Check for cases when array contains # less than two elements if len(arr) < 2: print("Second minimum element doesnot exist") return # Initialize two minimum variables min1 = min2 = float('inf') # Iterate over given array for x in arr: # Compare x with min1 if x < min1: min2 = min1 min1 = x # Update min2 if x is in # between the 2 numbers elif x < min2 and x != min1: min2 = x # Return min2 if it is greater than min1 # else return -1 return min2 if min2 != float('inf') else -1 print(second_min([23, 5, 9, 15, 1]))
#!/bin/bash export LIBGL_DEBUG=verbose #export LIBGL_ALWAYS_INDIRECT="1" #export LIBGL_ALWAYS_SOFTWARE="1" #LIBGL_SHOW_FPS SELFDIR=`dirname "$0"` SELFDIR=`cd "$SELFDIR" && pwd` # cd "$SELFDIR" glxgears -fullscreen -info >> /tmp/bm_glxgears.log 2>&1 ### The following 2 tests fail under VirtualBox with Guest Additions installed (and used for direct rendering) ### due to insufficient LIBGL implementation of OpenGL :((( Using software rendering (MESA) will work but it is very slow! sh "$SELFDIR/GpuTest_Linux_x64_0.7.0/GpuTest-all.sh" >> /tmp/bm_gputest.log 2>&1 mv $SELFDIR/GpuTest_Linux_x64_0.7.0/_geeks3d_gputest_scores.csv $SELFDIR/GpuTest_Linux_x64_0.7.0/_geeks3d_gputest_log.txt /tmp/ # offscreen glmark2 --annotate --off-screen >> /tmp/bm_glmark2.log 2>&1 # onscreen glmark2 --annotate >> /tmp/bm_glmark2.log 2>&1 # --fullscreen x11perf -sync -repeat 1 -dot -line100 -rect100 -circle100 -scroll100 -shmput100 -ftext >> /tmp/bm_x11perf.log 2>&1 ### x11perf -sync -repeat 1 -all >> /tmp/bm_x11perf.log 2>&1 test_sys.sh >> /tmp/test.log 2>&1 test_x11.sh >> /tmp/test.log 2>&1 test_vbox.sh >> /tmp/test.log 2>&1 test_nv.sh >> /tmp/test.log 2>&1 ls -al /tmp/*.log exit $?
package com.imooc.scala.stream.transformation import java.{lang, util} import org.apache.flink.streaming.api.collector.selector.OutputSelector import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment /** * 根据规则把一个数据流切分为多个数据流 * 注意:split只能分一次流,切分出来的流不能继续切分 * split需要和select配合使用,选择切分后的流 * 应用场景:将一份数据流切分为多份,便于针对每一份数据使用不同的处理逻辑 * Created by xuwei */ object StreamSplitScala { def main(args: Array[String]): Unit = { val env = StreamExecutionEnvironment.getExecutionEnvironment import org.apache.flink.api.scala._ //初始化数据 val text = env.fromCollection(Array(1, 2, 4, 5, 6, 7, 8, 9, 10)) //按照数据的奇偶性对数据进行分流 val splitStream = text.split(new OutputSelector[Int] { override def select(value: Int): lang.Iterable[String] = { val list = new util.ArrayList[String]() if(value % 2 == 0){ list.add("even")//偶数 }else{ list.add("odd")//奇数 } list } }) //选择流 val evenStream = splitStream.select("even") evenStream.print().setParallelism(1) //二次切流会报错 //Consecutive multiple splits are not supported. Splits are deprecated. Please use side-outputs. /*val lowHighStream = evenStream.split(new OutputSelector[Int] { override def select(value: Int): lang.Iterable[String] = { val list = new util.ArrayList[String]() if(value <= 5){ list.add("low") }else{ list.add("high") } list } }) val lowStream = lowHighStream.select("low") lowStream.print().setParallelism(1)*/ env.execute("StreamSplitScala") } }
package string_handle; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.StringTokenizer; /** * * @author minchoba * 백준 5361번: 전투 드로이드 가격 * * @see https://www.acmicpc.net/problem/5361/ * */ public class Boj5361{ private static final char NEW_LINE = '\n'; private static long[] cost = {35034, 23090, 19055, 12530, 18090}; public static void main(String[] args) throws Exception{ BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); StringBuilder sb = new StringBuilder(); int T = Integer.parseInt(br.readLine()); while(T-- > 0){ StringTokenizer st = new StringTokenizer(br.readLine()); int loop = st.countTokens(); long total = 0; for(int i = 0; i < loop; i++){ total += (Integer.parseInt(st.nextToken())) * cost[i]; } String res = getResult(total); sb.append('$').append(res.equals("0") ? "0.00" : res).append(NEW_LINE); } System.out.println(sb); // 결과 출력 } private static String getResult(long num) { String word = String.valueOf(num); int leng = word.length(); String res = ""; for(int i = 0; i < leng; i++) { if(i == leng - 2) res += '.'; res += word.charAt(i); } return res; } }
<gh_stars>1-10 function repeatedString(s, n) { let base = Math.ceil(n / s.length); s = s.split(''); let repeat = s.length * base; let subtract = n - repeat; let found = s.filter((x) => x == 'a'); // a's found in s argument let total = found.length * base; // total a's found overall with overflow let remove = s.slice(subtract).filter((x) => x == 'a'); // overflow a's to remove // if the difference to remove is 0 if (subtract == 0) { total = total; } else { total -= remove.length; } console.log('total', total); return total; } repeatedString( 'udjlitpopjhipmwgvggazhuzvcmzhulowmveqyktlakdufzcefrxufssqdslyfuiahtzjjdeaxqeiarcjpponoclynbtraaawrps', 872514961806 ); repeatedString('aba', 10); repeatedString('a', 100000);
module.exports = function(grunt) { 'use strict' var s3 = require('s3'); var fs = require('fs'); var ProgressBar = require('progressbar').ProgressBar; grunt.registerTask('release-windows-distributable', 'Releases the distributable for Windows', function() { var buildOptions, done; buildOptions = grunt.option('buildOptions'); done = this.async(); if(process.platform !== 'win32'){ grunt.log.warn('Skipping releasing the win32 distributable because the current platform is not win32'); return; } if(buildOptions.platform == 'all' || buildOptions.platform == 'win32'){ grunt.log.writeln('Reading s3 keys from ./s3.key'); var upload = function(key, secret){ var params = { localDir: "releases/win32/hello-world-setup", s3Params: { Bucket: buildOptions.s3BucketName, Prefix: buildOptions.s3PrefixName }, }; var client = s3.createClient({ s3Options: { accessKeyId: key, secretAccessKey: secret } }); var uploader = client.uploadDir(params); var progress = new ProgressBar(); progress.step('Uploading to S3'); uploader.on('error', function(err) { grunt.log.error("Unable to upload windows distributable to s3 ", err.stack); done(false); }); uploader.on('progress', function() { progress.setTotal(uploader.progressTotal) progress.setTick(uploader.progressAmount); }); uploader.on('end', function() { grunt.log.ok("Done uploading windows distributable to s3"); done(); }); }; fs.readFile('./s3.key', function(err, data) { var accessKey, secret; if (err) { grunt.log.error('Could not read s3.key: '+ err ); } try { var bufferString = data.toString(); var bufferStringSplit = bufferString.split('\n'); accessKey = bufferStringSplit[0].split('AWSAccessKeyId=')[1]; secret = bufferStringSplit[1].split('AWSSecretKey=')[1]; }catch(error) { grunt.log.error('s3.key was not in the expected format: '+ error ); } upload(accessKey, secret); }); } }); grunt.registerTask('release-linux-distributable', 'Releases the distributable for Linux', function() { var buildOptions = grunt.option('buildOptions'); if(process.platform !== 'linux'){ grunt.log.warn('Skipping releasing the linux distributable because the current platform is not linux'); return; } if(buildOptions.platform == 'all' || buildOptions.platform == 'linux'){ grunt.log.writeln('Releasing '+ buildOptions.name +' Linux distributable not implemented'); } }); grunt.registerTask('release-darwin-distributable', 'Releases the distributable for Darwin', function() { var buildOptions = grunt.option('buildOptions'); if(process.platform !== 'darwn'){ grunt.log.warn('Skipping releasing the darwin distributable because the current platform is not darwin'); return; } if(buildOptions.platform == 'all' || buildOptions.platform == 'darwin'){ grunt.log.writeln('Releasing '+ buildOptions.name +' darwn distributable not implemented'); } }); };
#!/bin/bash -eu # # Copyright 2018 The Outline Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Releases the Outline iOS client locally. Expects to be invoked through `npm run action`. ./apple/scripts/install_fastlane.sh -p ios pushd platforms/ios bundle exec fastlane ios release local:true
<filename>__tests__/util/md5.js /* global expect:false, test:false */ const md5 = require("../../src/util/md5"); test("it can generate md5 hash", () => { const hash = md5("foo"); expect(hash).toBe("acbd18db4cc2f85cedef654fccc4a4d8"); });
#!/usr/bin/env bash # Uploads a new version of d8 CIPD package set -e set -x if [ -z "$1" ]; then echo "Usage: update.sh version" exit 1 fi version=$1 major=$(echo "$version" | cut -d'.' -f1) minor=$(echo "$version" | cut -d'.' -f2) patch=$(echo "$version" | cut -d'.' -f3) tmpdir=$(mktemp -d) cleanup() { rm -rf "$tmpdir" } trap cleanup EXIT HUP INT QUIT TERM PIPE cd "$tmpdir" arch=("linux64" "linux32" "linux64" "linux-arm32" "mac64" "win64") path=("linux" "linux/ia32" "linux/x64" "linux/arm32" "macos" "windows") for i in "${!arch[@]}" do filename="v8-${arch[$i]}-rel-$version.zip" gsutil cp "gs://chromium-v8/official/canary/$filename" . mkdir -p d8/${path[$i]} unzip -q $filename -d d8/${path[$i]} rm $filename done cipd create \ -name dart/d8 \ -in d8 \ -install-mode copy \ -tag version:$version \ -tag version:$major.$minor.$patch \ -tag version:$major.$minor
import https from 'https'; import download from 'download'; /** * Downloads a json file from github. Parses the json and return the template url corresponding to @param name. If key is not found in object, the default template is returned. * The advantage is we don't have to host and maintain a separate backend. The downside is extra bytes being fetched * @param {String} name template name */ const getFromRepo = name => { return new Promise((resolve, reject) => { try { // We keep this file in a separate repo. This may be necessary if we use github action to publish the current project. const url = 'https://raw.githubusercontent.com/chasscepts/templates/master/templates.json'; download(url) .then(buffer => { // We know this will work because we created the file. If file is corrupted and it throws we catch and reject. const templates = JSON.parse(buffer.toString('utf8')); // templates is a key - value pair of template name and it's repo url // if there is no template with key name, we return the default template. templates.default must not be empty. const template = templates[name] || templates.default; resolve(template); }) .catch(err => reject(err)); } catch(err) { reject(err); } }); } /** * Sends an api request to retrieve the template url corresponding to @param name. The upside is we can change the we store our templates and it also saves few bytes. The downside is we have to host the backend. If our app has a home page (not on github) we should consider this approach. * @param {String} name template name */ const getFromApi = name => { return new Promise((resolve, reject) => { const url = `${/* our endpoint */''}/${name}`; const req = https.get(url); // verify response code req.on('response', res => { if (res.statusCode !== 200) { reject('Response status was ' + res.statusCode); return; } const data = []; req.on('data', (chunk) => { data.push(chunk); }).on('end', () => { // Implementation assumes that backend will resolve template from the name passed and return the url of the template repo. const template = Buffer.concat(data).toString(); resolve(template); }); }); // check for request errors req.on('error', err => { reject(err); }); }); } export default function(name) { //return getFromApi(name); return getFromRepo(name); }
# Import necessary libraries import pandas as pd # define dataset data = pd.read_csv('dataset.csv') # Generate a report report = pd.DataFrame(data.describe()) print(report)
/* * Copyright 2019 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.spring.actuate; import static org.assertj.core.api.Assertions.assertThat; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import javax.inject.Inject; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.DisableOnDebug; import org.junit.rules.TestRule; import org.junit.rules.Timeout; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.actuate.health.Health; import org.springframework.boot.actuate.health.HealthIndicator; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.context.annotation.Bean; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.junit4.SpringRunner; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.linecorp.armeria.client.WebClient; import com.linecorp.armeria.common.AggregatedHttpResponse; import com.linecorp.armeria.common.HttpData; import com.linecorp.armeria.common.HttpHeaderNames; import com.linecorp.armeria.common.HttpMethod; import com.linecorp.armeria.common.HttpRequest; import com.linecorp.armeria.common.HttpResponse; import com.linecorp.armeria.common.HttpStatus; import com.linecorp.armeria.common.MediaType; import com.linecorp.armeria.common.RequestHeaders; import com.linecorp.armeria.common.ResponseHeaders; import com.linecorp.armeria.server.Server; import com.linecorp.armeria.spring.ArmeriaServerConfigurator; import com.linecorp.armeria.spring.actuate.ArmeriaSpringActuatorAutoConfigurationTest.TestConfiguration; import io.prometheus.client.exporter.common.TextFormat; import reactor.test.StepVerifier; /** * This uses {@link com.linecorp.armeria.spring.ArmeriaAutoConfiguration} for integration tests. * {@code application-autoConfTest.yml} will be loaded with minimal settings to make it work. */ @RunWith(SpringRunner.class) @SpringBootTest(classes = TestConfiguration.class) @ActiveProfiles({ "local", "autoConfTest" }) @DirtiesContext @EnableAutoConfiguration @ImportAutoConfiguration(ArmeriaSpringActuatorAutoConfiguration.class) public class ArmeriaSpringActuatorAutoConfigurationTest { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final TypeReference<Map<String, Object>> JSON_MAP = new TypeReference<Map<String, Object>>() {}; private static final String TEST_LOGGER_NAME = "com.linecorp.armeria.spring.actuate.testing.TestLogger"; // We use this logger to test the /loggers endpoint, so set the name manually instead of using class name. @SuppressWarnings("unused") private static final Logger TEST_LOGGER = LoggerFactory.getLogger(TEST_LOGGER_NAME); static class SettableHealthIndicator implements HealthIndicator { private volatile Health health = Health.up().build(); void setHealth(Health health) { this.health = health; } @Override public Health health() { return health; } } @SpringBootApplication public static class TestConfiguration { @Bean public SettableHealthIndicator settableHealth() { return new SettableHealthIndicator(); } @Bean public ArmeriaServerConfigurator serverConfigurator() { return sb -> sb.requestTimeoutMillis(TIMEOUT_MILLIS); } } private static final long TIMEOUT_MILLIS = TimeUnit.SECONDS.toMillis(30); @Rule public TestRule globalTimeout = new DisableOnDebug(new Timeout(TIMEOUT_MILLIS, TimeUnit.MILLISECONDS)); @Inject private Server server; @Inject private SettableHealthIndicator settableHealth; private WebClient client; @Before public void setUp() { client = WebClient.builder(newUrl("h2c")) .responseTimeoutMillis(TIMEOUT_MILLIS) .maxResponseLength(0) .build(); settableHealth.setHealth(Health.up().build()); } private String newUrl(String scheme) { final int port = server.activeLocalPort(); return scheme + "://127.0.0.1:" + port; } @Test public void testHealth() throws Exception { final AggregatedHttpResponse res = client.get("/internal/actuator/health").aggregate().get(); assertThat(res.status()).isEqualTo(HttpStatus.OK); assertThat(res.contentType()).isEqualTo(ArmeriaSpringActuatorAutoConfiguration.ACTUATOR_MEDIA_TYPE); final Map<String, Object> values = OBJECT_MAPPER.readValue(res.content().array(), JSON_MAP); assertThat(values).containsEntry("status", "UP"); } @Test public void testHealth_down() throws Exception { settableHealth.setHealth(Health.down().build()); final AggregatedHttpResponse res = client.get("/internal/actuator/health").aggregate().get(); assertThat(res.status()).isEqualTo(HttpStatus.SERVICE_UNAVAILABLE); final Map<String, Object> values = OBJECT_MAPPER.readValue(res.content().array(), JSON_MAP); assertThat(values).containsEntry("status", "DOWN"); } @Test public void testOptions() throws Exception { final AggregatedHttpResponse res = client.options("/internal/actuator/health").aggregate().get(); // CORS not enabled by default. assertThat(res.status()).isEqualTo(HttpStatus.METHOD_NOT_ALLOWED); } @Test public void testLoggers() throws Exception { final String loggerPath = "/internal/actuator/loggers/" + TEST_LOGGER_NAME; AggregatedHttpResponse res = client.get(loggerPath).aggregate().get(); assertThat(res.status()).isEqualTo(HttpStatus.OK); assertThat(res.contentType()).isEqualTo(ArmeriaSpringActuatorAutoConfiguration.ACTUATOR_MEDIA_TYPE); Map<String, Object> values = OBJECT_MAPPER.readValue(res.content().array(), JSON_MAP); assertThat(values).containsEntry("effectiveLevel", "DEBUG"); res = client.execute(RequestHeaders.of(HttpMethod.POST, loggerPath, HttpHeaderNames.CONTENT_TYPE, MediaType.JSON_UTF_8), OBJECT_MAPPER.writeValueAsBytes(ImmutableMap.of("configuredLevel", "info"))) .aggregate().get(); assertThat(res.status()).isEqualTo(HttpStatus.NO_CONTENT); res = client.get(loggerPath).aggregate().get(); values = OBJECT_MAPPER.readValue(res.content().array(), JSON_MAP); assertThat(values).containsEntry("effectiveLevel", "INFO"); client.post(loggerPath, OBJECT_MAPPER.writeValueAsBytes(ImmutableMap.of())) .aggregate().get(); } @Test public void testPrometheus() throws Exception { final AggregatedHttpResponse res = client.get("/internal/actuator/prometheus").aggregate().get(); assertThat(res.status()).isEqualTo(HttpStatus.OK); assertThat(res.contentType()).isEqualTo(MediaType.parse(TextFormat.CONTENT_TYPE_004)); assertThat(res.contentAscii()).startsWith("# HELP "); } @Test public void testHeapDump() throws Exception { final HttpResponse res = client.get("/internal/actuator/heapdump"); final AtomicLong remainingBytes = new AtomicLong(); StepVerifier.create(res) .assertNext(obj -> { assertThat(obj).isInstanceOf(ResponseHeaders.class); final ResponseHeaders headers = (ResponseHeaders) obj; assertThat(headers.status()).isEqualTo(HttpStatus.OK); assertThat(headers.contentType()).isEqualTo(MediaType.OCTET_STREAM); assertThat(headers.get(HttpHeaderNames.CONTENT_DISPOSITION)) .startsWith("attachment;filename=heapdump"); final long contentLength = headers.getLong(HttpHeaderNames.CONTENT_LENGTH, -1); assertThat(contentLength).isPositive(); remainingBytes.set(contentLength); }) .thenConsumeWhile(obj -> { assertThat(obj).isInstanceOf(HttpData.class); final HttpData data = (HttpData) obj; final long newRemainingBytes = remainingBytes.addAndGet(-data.length()); assertThat(newRemainingBytes).isNotNegative(); return newRemainingBytes > 0; // Stop at the last HttpData. }) .expectNextCount(1) // Skip the last HttpData. .verifyComplete(); assertThat(remainingBytes).hasValue(0); } @Test public void testLinks() throws Exception { final AggregatedHttpResponse res = client.get("/internal/actuator").aggregate().get(); assertThat(res.status()).isEqualTo(HttpStatus.OK); assertThat(res.contentType()).isEqualTo(ArmeriaSpringActuatorAutoConfiguration.ACTUATOR_MEDIA_TYPE); final Map<String, Object> values = OBJECT_MAPPER.readValue(res.content().array(), JSON_MAP); assertThat(values).containsKey("_links"); } @Test public void testMissingMediaType() throws Exception { final String loggerPath = "/internal/actuator/loggers/" + TEST_LOGGER_NAME; final AggregatedHttpResponse res = client.execute(RequestHeaders.of(HttpMethod.POST, loggerPath), OBJECT_MAPPER.writeValueAsBytes(ImmutableMap.of("configuredLevel", "debug"))) .aggregate().get(); assertThat(res.status()).isEqualTo(HttpStatus.NO_CONTENT); } @Test public void testInvalidMediaType() throws Exception { final String loggerPath = "/internal/actuator/loggers/" + TEST_LOGGER_NAME; final AggregatedHttpResponse res = client.execute(RequestHeaders.of(HttpMethod.POST, loggerPath, HttpHeaderNames.CONTENT_TYPE, MediaType.PROTOBUF), OBJECT_MAPPER.writeValueAsBytes(ImmutableMap.of("configuredLevel", "info"))) .aggregate().get(); assertThat(res.status()).isEqualTo(HttpStatus.UNSUPPORTED_MEDIA_TYPE); } @RunWith(SpringRunner.class) @SpringBootTest(classes = org.springframework.boot.test.context.TestConfiguration.class) @ActiveProfiles({ "local", "autoConfTest", "autoConfTestCors" }) @DirtiesContext @EnableAutoConfiguration @ImportAutoConfiguration(ArmeriaSpringActuatorAutoConfiguration.class) public static class ArmeriaSpringActuatorAutoConfigurationCorsTest { @SpringBootApplication public static class TestConfiguration {} @Rule public TestRule globalTimeout = new DisableOnDebug(new Timeout(10, TimeUnit.SECONDS)); @Inject private Server server; private WebClient client; @Before public void setUp() { client = WebClient.of(newUrl("h2c")); } private String newUrl(String scheme) { final int port = server.activeLocalPort(); return scheme + "://127.0.0.1:" + port; } @Test public void testOptions() { final HttpRequest req = HttpRequest.of(RequestHeaders.of( HttpMethod.OPTIONS, "/internal/actuator/health", HttpHeaderNames.ORIGIN, "https://example.com", HttpHeaderNames.ACCESS_CONTROL_REQUEST_METHOD, "GET")); final AggregatedHttpResponse res = client.execute(req).aggregate().join(); assertThat(res.status()).isEqualTo(HttpStatus.OK); assertThat(res.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN)) .isEqualTo("https://example.com"); assertThat(res.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_METHODS)) .isEqualTo("GET,POST"); assertThat(res.headers().contains(HttpHeaderNames.ACCESS_CONTROL_MAX_AGE)).isTrue(); assertThat(res.status()).isNotEqualTo(HttpStatus.METHOD_NOT_ALLOWED); } } }
// @Author : Lik // @Time : 2020/10/19 package main import ( "encoding/json" "flag" "log" "time" ) type ZyBaseRequest struct { AppId string `json:"appId"` Data interface{} `json:"data"` Timestamp string `json:"timestamp"` Sign string `json:"sign"` Source string } const ( PushFeatureDeal = "/ndToZy/pushFeatureDeal" PushUserInfo = "/ndToZy/pushUserInfo" PushFeatureInfo = "/ndToZy/pushFeatureInfo" //PublicKey = "<KEY>" PublicKey = "<KEY>" PrivateKey = "<KEY>" ) var configFile = flag.String("f", "config/job_risk_third.json", "the config file") type FeatureDealRequest struct { FeatureId string `json:"featureId"` UserId string `json:"userId"` ObjectId string `json:"objectId"` DealId string `json:"dealId"` DealName string `json:"dealName"` DealTime string `json:"dealTime"` DealStatus string `json:"dealStatus"` DealResult string `json:"dealResult"` } type BodyStruct struct { Data string `json:"data"` Sign string `json:"sign"` Key string `json:"key"` Salt string `json:"salt"` } func main() { flag.Parse() body := new(FeatureDealRequest) body.FeatureId = "111111" body.UserId = "22222" body.ObjectId = "33333" body.DealId = "44444" body.DealName = "李四" body.DealTime = time.Now().Format("2006-01-02 15:04:05") body.DealStatus = "1" body.DealResult = "已处置,属于正常工作情况" jsBody, _ := json.Marshal(body) aesKeyWithBase64 := Krand(32, 3) aesIVWithBase64 := RangeRand(1000000000000000, 10000000000000000) //aesIVWithBase64 := []byte("m4mVWmNAZqfXfV5WxyI2WA") //aesKeyWithBase64 := []byte("<KEY>") key, _ := RSAEncrypt(aesKeyWithBase64, PublicKey) salt, _ := RSAEncrypt([]byte(aesIVWithBase64), PublicKey) cipherData, _ := AESEncrypt(jsBody, aesKeyWithBase64, []byte(aesIVWithBase64)) reqMap := make(map[string]string) reqMap["key"] = key reqMap["salt"] = salt reqMap["data"] = cipherData reqMap["source"] = "TEST" sign, _ := RSAPriEncrypt(Sha256(reqMap), PrivateKey) reqMap["sign"] = sign //url := fmt.Sprintf("%s:%d%s", cfg.GetString("zy_host.address"), cfg.GetInt("zy_host.port"), PushFeatureDeal) headerMap := make(map[string]string) headerMap["Content-Type"] = "application/x-www-form-urlencoded" log.Printf("aesKeyWithBase64 %s", string(aesKeyWithBase64)) log.Printf("aesIVWithBase64 %s", string(aesIVWithBase64)) log.Printf("key %s", string(key)) log.Printf("salt %s", string(salt)) log.Printf("cipherData %s", string(cipherData)) log.Printf("sign %s", sign) b,_ := json.Marshal(BodyStruct{ Data: cipherData, Sign: sign, Key: key, Salt: salt, }) log.Println(string(b)) //log.Printf("url %s", url) //b, err := utils.HttpDo(url,"POST",reqMap,headerMap) //if err!= nil{ // fmt.Println(err) //} //fmt.Println(string(b)) }
def get_combinations(arr): combinations = [] for i in range(len(arr) + 1): for j in range(i + 1, len(arr) + 1): for k in range(j + 1, len(arr)+1): combinations.append(arr[i:j] + arr[j:k]) return combinations result = get_combinations([1, 2, 3]) print(result)
/** * Module dependencies. */ // commented out by npm-component: var query = require('tower-query'); // commented out by npm-component: var each = require('part-async-series'); /** * Save and invoke `fn(err)`. * * Events: * * - `save` on updates and saves * - `saving` pre-update or save, after validation * * @constructor Resource * @param {Function} fn Function invoked on resource creation. * @api public */ exports.save = function(fn){ var self = this; this.constructor.emit('saving', this); this.emit('saving'); // XXX: needs to somehow set default properties // XXX: this itself should probably be // bundled into a topology/stream/action. this.validate(function(err){ if (err) { fn(err); } else { query() .select(self.constructor.className) .create(self, function(){ self.dirty = {}; self.constructor.emit('save', self); self.emit('save'); if (fn) fn(null, self); }); } }); }; /** * Update and invoke `fn(err)`. * * @constructor Resource * @param {Function} fn Function executed on resource update. * @return {Mixed} fn return value. * @api private */ exports.update = function(fn){ return query() .select(this.constructor.className) .action('update', this).exec(fn); }; /** * Remove the resource and mark it as `.removed` * and invoke `fn(err)`. * * Events: * * - `removing` before deletion * - `remove` on deletion * * @constructor Resource * @param {Function} fn Function executed on resource removal. * @return {Mixed} fn return value. * @api public */ exports.remove = function(fn){ return query() .select(this.constructor.className) .where('id').eq(this.get('id')) .action('remove').exec(fn); }; /** * Validate the resource and return a boolean. * * @constructor Resource * @param {Function} fn Validation function. * @return {Boolean} true if there were errors, else false. * @api public */ exports.isValid = function(fn){ this.validate(fn); return 0 === this.errors.length; }; /** * Perform validations. * * @constructor Resource * @param {Function} fn Validation function. * @return {Boolean} true if there were errors, else false. * @api private */ exports.validate = function(fn){ var self = this; this.errors = []; this.emit('validating', this); // XXX: need single `validateAttributes` // XXX: need to store validators by key. each(this.constructor.validators, this, function(){ // self.emit('after-validate', self); // self.emit('validated', self); self.emit('validate', self); if (fn) { if (self.errors.length) fn(new Error('Validation Error')); else fn(); } }); return 0 === this.errors.length; }; /** * Set attribute value. * * @constructor Resource * @chainable * @param {String} name Attribute name. * @param {Mixed} val Attribute value. * @param {Boolean} quiet If true, won't dispatch change events. * @return {Resource} * @api public */ exports.set = function(name, val, quiet){ var attr = this.constructor.attrs[name]; if (!attr) return; // XXX: throw some error, or dynamic property flag? if (undefined === val && attr.hasDefaultValue) val = attr.apply(this); val = attr.typecast(val); var prev = this.attrs[name]; this.dirty[name] = val; this.attrs[name] = val; // XXX: this `quiet` functionality could probably be implemented // in a less ad-hoc way. It is currently only used when setting // properties passed in through `init`, such as from a db/adapter // serializing data into a resource, doesn't need to dispatch changes. if (!quiet) { this.constructor.emit('change ' + name, this, val, prev); this.emit('change ' + name, val, prev); } return this; }; /** * Get `name` value. * * @constructor Resource * @param {String} name Attribute name. * @return {Mixed} Attribute value. * @api public */ exports.get = function(name){ // XXX: need a better way to do this if ('id' === name && this.__id__) return this.__id__; if (undefined === this.attrs[name]) { var attr = this.defaultAttr(name) if (attr) return this.attrs[name] = attr.apply(this); } else { return this.attrs[name]; } }; /** * Check if `attr` is present (not `null` or `undefined`). * * @constructor Resource * @param {String} attr Attribute name. * @return {Boolean} true if attribute exists, else false. * @api public */ exports.has = function(attr){ return null != this.attrs[attr]; }; /** * Return the JSON representation of the resource. * * @constructor Resource * @return {Object} Resource attributes. * @api public */ exports.toJSON = function(){ return this.attrs; }; /** * Returns `Attr` definition if it has a default value. * * @constructor Resource * @param {String} name Attribute name. * @return {Boolean|Function} Attr definition if it exists, else. * @api private */ exports.defaultAttr = function(name){ var defaultAttrs = this.constructor.attrs.__default__; return defaultAttrs.hasOwnProperty(name) && defaultAttrs[name]; };
def AverageArray(arr): sum = 0 for data in arr: sum += data return sum/len(arr) # Driver Code arr = [1, 2, 3, 4, 5] print(AverageArray(arr))
function generateHeadline(faker) { const headline = `Loadflux: ${faker.lorem.sentence(5)} @ ${new Date().toLocaleString()} - updated`; return headline; }
#!/bin/bash set -ex if test ! -s osu018_stdcells.lib; then wget --continue -O osu018_stdcells.lib.part http://vlsiarch.ecen.okstate.edu/flows/MOSIS_SCMOS/` `latest/cadence/lib/tsmc018/signalstorm/osu018_stdcells.lib mv osu018_stdcells.lib.part osu018_stdcells.lib fi yosys -p 'synth -top picorv32; dfflibmap -liberty osu018_stdcells.lib; abc -liberty osu018_stdcells.lib; stat' ../../picorv32.v
alias reload!='. ~/.zshrc' alias emacs='/usr/local/bin/emacs' alias emacsclient='/usr/local/bin/emacsclient'
def slice(operation_name, array, axes, start, end): if operation_name == "slice_1d" and len(axes) == 1: axis = axes[0] sliced_array = array[start[axis]:end[axis]] return sliced_array else: raise ValueError("Unsupported operation or invalid number of axes for slicing.")
package br.com.guestcontrol.server.core.crud; import java.io.Serializable; import java.util.List; public interface CrudService<T, ID extends Serializable> { List<T> findAll(); T findById(ID id); T save(T entity) throws Exception; void delete(ID id); void delete(T entity); }
<reponame>Urtzik/frontend<filename>src/panels/config/core/ha-config-network.ts import "@material/mwc-button/mwc-button"; import { css, CSSResultGroup, html, LitElement, PropertyValues, TemplateResult, } from "lit"; import { customElement, property, state } from "lit/decorators"; import { isComponentLoaded } from "../../../common/config/is_component_loaded"; import "../../../components/ha-network"; import "../../../components/ha-card"; import "../../../components/ha-checkbox"; import "../../../components/ha-settings-row"; import { NetworkConfig, getNetworkConfig, setNetworkConfig, } from "../../../data/network"; import { haStyle } from "../../../resources/styles"; import type { HomeAssistant } from "../../../types"; @customElement("ha-config-network") class ConfigNetwork extends LitElement { @property({ attribute: false }) public hass!: HomeAssistant; @state() private _networkConfig?: NetworkConfig; @state() private _error?: string; protected render(): TemplateResult { if ( !this.hass.userData?.showAdvanced || !isComponentLoaded(this.hass, "network") ) { return html``; } return html` <ha-card header="Network"> <div class="card-content"> ${this._error ? html`<div class="error">${this._error}</div>` : ""} <p> Configure which network adapters integrations will use. Currently this setting only affects multicast traffic. A restart is required for these settings to apply. </p> <ha-network @network-config-changed=${this._configChanged} .hass=${this.hass} .networkConfig=${this._networkConfig} ></ha-network> </div> <div class="card-actions"> <mwc-button @click=${this._save}> ${this.hass.localize( "ui.panel.config.core.section.core.core_config.save_button" )} </mwc-button> </div> </ha-card> `; } protected firstUpdated(changedProps: PropertyValues) { super.firstUpdated(changedProps); if (isComponentLoaded(this.hass, "network")) { this._load(); } } private async _load() { this._error = undefined; try { this._networkConfig = await getNetworkConfig(this.hass); } catch (err) { this._error = err.message || err; } } private async _save() { this._error = undefined; try { await setNetworkConfig( this.hass, this._networkConfig?.configured_adapters || [] ); } catch (err) { this._error = err.message || err; } } private _configChanged(event: CustomEvent): void { this._networkConfig = { ...this._networkConfig!, configured_adapters: event.detail.configured_adapters, }; } static get styles(): CSSResultGroup { return [ haStyle, css` .error { color: var(--error-color); } ha-settings-row { padding: 0; } .card-actions { display: flex; flex-direction: row-reverse; justify-content: space-between; align-items: center; } `, // row-reverse so we tab first to "save" ]; } } declare global { interface HTMLElementTagNameMap { "ha-config-network": ConfigNetwork; } }
package org.telegram.telegrambots.api.methods.send; import java.io.File; import java.io.InputStream; import javax.ws.rs.Path; import org.telegram.telegrambots.api.objects.replykeyboard.ReplyKeyboard; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.ToString; /** * * @author <NAME> * @author <NAME> * @brief Use this method to send general files. On success, the sent Message is returned. * @date 2016-11-21 */ @Data @EqualsAndHashCode(callSuper=true) @ToString(callSuper=true) @Path("senddocument") public class SendDocument extends ApiMethod{ @JsonProperty("chat_id") String chatId; ///< Unique identifier for the chat to send the message to or Username for the channel to send the message to String document; ///< File file to send. file_id as String to resend a file that is already on the Telegram servers String caption; ///< Optional. Document caption (may also be used when resending documents by file_id), 0-200 characters /** * Optional. Sends the message silently. iOS users will not receive a notification, Android * users will receive a notification with no sound. Other apps coming soon */ @JsonProperty("disable_notification") Boolean disableNotification; @JsonProperty("reply_to_message_id") Integer replyToMessageId; ///< Optional. If the message is a reply, ID of the original message @JsonProperty("reply_markup") ReplyKeyboard replyMarkup; ///< Optional. JSON-serialized object for a custom reply keyboard @JsonProperty("is_new_document") boolean isNewDocument; ///< True to upload a new document, false to use a fileId @JsonProperty("document_name") String documentName; @JsonProperty("new_document_file") File newDocumentFile; ///< New document file @JsonProperty("new_document_stream") InputStream newDocumentStream; ///< New document stream }
def sortAsc(arr): for i in range(0, len(arr)-1): # Find the minimum element in remaining # unsorted array min_idx = i for j in range(i+1, len(arr)): if arr[min_idx] > arr[j]: min_idx = j # Swap the found minimum element with # the first element arr[i], arr[min_idx] = arr[min_idx], arr[i] return arr
#! /usr/bin/env bash set -e if [ $# -ne 2 ]; then echo "usage: $0 <external repo path> <file to store commit in>" exit 1 fi repo_dir=$1 hash_file=$2 repo_base=$(basename $repo_dir) file_base=$(basename $hash_file) if [ ! -d "$repo_dir" ]; then echo "External repo does not exist: $repo_dir" exit 1 fi printf "Checking for '$repo_base' changes ..." origin_hash=$(cd $repo_dir && git fetch origin && git rev-parse origin/master) head_hash=$(cd $repo_dir && git rev-parse HEAD) file_hash=$(cat $hash_file) if [ "$file_hash" != "$head_hash" ]; then printf "\n" printf "\n" printf " '$repo_base' pointer has changed:\n" line=" $file_base at $file_hash" len=${#line} printf "%${len}s\n" "$line" printf "%${len}s\n" "origin/master at $origin_hash" printf "%${len}s\n" "HEAD at $head_hash" printf "\n" printf "Update file '$file_base' to HEAD commit ? " read -p "[Y/n] " choice case "$choice" in n | N) echo "Skipped '$repo_base'" ;; *) echo $head_hash >$hash_file && git add $hash_file && echo "Updated '$file_base'" ;; esac else echo " none" fi
#!/bin/bash mkdir build cd build ## because qmake. ln -s ${CXX} ${PREFIX}/bin/g++ || true ln -s ${CXX} ${PREFIX}/bin/gcc || true qmake \ PREFIX=$PREFIX \ QMAKE_CC=${CC} \ QMAKE_CXX=${CXX} \ QMAKE_LINK=${CXX} \ QMAKE_RANLIB=${RANLIB} \ QMAKE_OBJDUMP=${OBJDUMP} \ QMAKE_STRIP=${STRIP} \ QMAKE_AR="${AR} cqs" \ ../texmaker.pro make -j$CPU_COUNT make check sed -i "s:(INSTALL_ROOT)/usr:(INSTALL_ROOT)$PREFIX:g" Makefile make install rm -f ${PREFIX}/bin/gcc rm -f ${PREFIX}/bin/g++
<reponame>chronicweirdo/reader /////////////////////////////////////////////////////////////////////////////////////////////////////////////// COMIC ↓ class Comic { constructor(id, title, size) { this.id = id this.title = title this.size = size this.lastPageChange = Date.now() } setPage(page) { if (page < 1) page = 1 if (page > this.size) page = this.size updatePositionInput(page) } setDocumentTitle(value) { document.title = value } updateDownloadUrl() { let url = "downloadPage?id=" + this.id + "&page=" + (getPositionInput()-1) let downloadLink = document.getElementById("downloadPageButton") downloadLink.href = url } displayPage(page, callback) { let self = this let displayPageInternal = function(page, callback) { self.lastPageChange = timestamp self.pageDisplayTimestamp = timestamp var displayPageInternalCallback = function(data) { if (self.pageDisplayTimestamp == timestamp) { self.pageDisplayTimestamp = null hideSpinner() var img = getImage().image img.onload = function() { document.getElementById("content").style.background = getHexCode(data.color) setStatusBarColor(getHexCode(data.color)) self.setPage(page) saveProgress(self.id, page-1) self.setDocumentTitle(page + "/" + self.size + " - " + self.title) getImage().reset() self.updateDownloadUrl() if (callback != null) { callback() } } img.src = data.image } } self.downloadImageData(page, displayPageInternalCallback) window.setTimeout(function() { if (self.pageDisplayTimestamp == timestamp) { showSpinner() } }, 100) } var timestamp = + new Date() if (self.lastPageChange == undefined) { window.location.reload() } let difference = timestamp - self.lastPageChange if (difference > REFRESH_PAGE_TIME_DIFFERENCE) { // load progress // if progress is equal to current position, continue as normal, if not reload page showSpinner() loadProgress(currentPosition => { if (currentPosition != getPositionInput() - 1) { window.location.reload() } else { displayPageInternal(page, callback) } }) } else { displayPageInternal(page, callback) } } downloadImageData(page, callback) { var xhttp = new XMLHttpRequest() xhttp.onreadystatechange = function() { if (this.readyState == 4) { if (this.status == 200 && this.responseText.length > 0) { var jsonResponse = JSON.parse(this.responseText) if (callback != null) { callback(jsonResponse) } } else { reportError(this.status + " " + this.responseText) } } } xhttp.open("GET", "imageData?id=" + this.id + "&page=" + (page - 1)) xhttp.send() } goToNextPage() { let currentPosition = getPositionInput() if (currentPosition < this.size) { this.displayPage(currentPosition + 1, function() { getImage().update() }) } } goToPreviousPage(proposedLeft = undefined, proposedTop = undefined) { let currentPosition = getPositionInput() if (currentPosition > 1) { this.displayPage(currentPosition - 1, function() { if (proposedLeft) getImage().setLeft(proposedLeft) if (proposedTop) getImage().setTop(proposedTop) getImage().update() }) } } jumpToPage(page) { this.displayPage(page, function() { getImage().update() }) } } function getComic() { if (document.comic) { return document.comic } else { document.comic = new Comic(getMeta("bookId"), getMeta("bookTitle"), num(getMeta("size"))) return document.comic } } /////////////////////////////////////////////////////////////////////////////////////////////////////////////// COMIC ↑ /////////////////////////////////////////////////////////////////////////////////////////////////////////////// IMAGE ↓ class Image { constructor(element) { this.image = element this.zoomValue = 1 this.swipeNextPossible = false this.swipePreviousPossible = false } setWidth(width) { this.image.width = width } getWidth() { return this.image.width } setHeight(height) { this.image.height = height } getHeight() { return this.image.height } getOriginalWidth() { return this.image.naturalWidth } getOriginalHeight() { return this.image.naturalHeight } setLeft(left) { this.image.style.left = left + "px" } addLeft(x) { this.setLeft(this.getLeft() + x) } getLeft() { return num(this.image.style.left, 0) } setTop(top) { this.image.style.top = top + "px" } addTop(y) { this.setTop(this.getTop() + y) } getTop() { return num(this.image.style.top, 0) } setZoom(zoom) { this.zoomValue = zoom } getZoom() { return this.zoomValue } // minimum zoom is determined by image and viewport dimensions updateMinimumZoom() { this.minimumZoom = Math.min(getViewportHeight() / this.getOriginalHeight(), getViewportWidth() / this.getOriginalWidth()) } getMinimumZoom() { return this.minimumZoom } isPageFitToScreen() { return this.getZoomForFitToScreen() == this.getZoom() } getZoomForFitToScreen() { return Math.min(getViewportHeight() / this.getOriginalHeight(), getViewportWidth() / this.getOriginalWidth()) } fitPageToScreen() { this.setZoom(this.getZoomForFitToScreen()) this.update() } getRowThreshold() { return this.getWidth() * SETTING_COMIC_ROW_THRESHOLD.get() } getColumnThreshold() { return this.getHeight() * SETTING_COMIC_COLUMN_THRESHOLD.get() } isEndOfRow() { return (this.getWidth() <= getViewportWidth()) || approx(this.getLeft() + this.getWidth(), getViewportWidth(), this.getRowThreshold()) } isBeginningOfRow() { return (this.getWidth() <= getViewportWidth()) || approx(this.getLeft(), 0, this.getRowThreshold()) } isEndOfColumn() { return (this.getHeight() <= getViewportHeight()) || approx(this.getTop() + this.getHeight(), getViewportHeight(), this.getColumnThreshold()) } isBeginningOfColumn() { return (this.getHeight() <= getViewportHeight()) || approx(this.getTop(), 0, this.getColumnThreshold()) } update() { if (this.getZoom() < this.getMinimumZoom()) this.setZoom(this.getMinimumZoom()) let newWidth = this.getOriginalWidth() * this.getZoom() let newHeight = this.getOriginalHeight() * this.getZoom() this.setWidth(newWidth) this.setHeight(newHeight) let minimumLeft = (newWidth < getViewportWidth()) ? (getViewportWidth() / 2) - (newWidth / 2) : Math.min(0, getViewportWidth() - newWidth) let maximumLeft = (newWidth < getViewportWidth()) ? (getViewportWidth() / 2) - (newWidth / 2) : Math.max(0, getViewportWidth() - newWidth) let minimumTop = (newHeight < getViewportHeight()) ? (getViewportHeight() / 2) - (newHeight / 2) : Math.min(0, getViewportHeight() - newHeight) let maximumTop = (newHeight < getViewportHeight()) ? (getViewportHeight() / 2) - (newHeight / 2) : Math.max(0, getViewportHeight() - newHeight) if (this.getLeft() < minimumLeft) this.setLeft(minimumLeft) if (this.getLeft() > maximumLeft) this.setLeft(maximumLeft) if (this.getTop() < minimumTop) this.setTop(minimumTop) if (this.getTop() > maximumTop) this.setTop(maximumTop) } zoom(zoom, centerX, centerY, withImageUpdate) { let sideLeft = centerX - this.getLeft() let ratioLeft = sideLeft / (this.getWidth() * this.getZoom()) let newSideLeft = (this.getWidth() * zoom) * ratioLeft this.setLeft(centerX - newSideLeft) let sideTop = centerY - this.getTop() let ratioTop = sideTop / (this.getHeight() * this.getZoom()) let newSideTop = (this.getHeight() * zoom) * ratioTop this.setTop(centerY - newSideTop) this.setZoom(zoom) //setZoomJumpValue(zoom) SETTING_ZOOM_JUMP.put(zoom) if (withImageUpdate) this.update() } #getLastPosition(imageDimension, viewportDimension, imageValue, viewportJumpPercentage, threshold) { return viewportDimension - imageDimension } #getNextPosition(imageDimension, viewportDimension, imageValue, viewportJumpPercentage, threshold) { if (approx(imageValue, viewportDimension - imageDimension, threshold)) return 0 var proposedNextPosition = (imageValue - viewportDimension * viewportJumpPercentage) | 0 if (proposedNextPosition < viewportDimension - imageDimension) return viewportDimension - imageDimension return proposedNextPosition } #getPreviousPosition(imageDimension, viewportDimension, imageValue, viewportJumpPercentage, threshold) { if (approx(imageValue, 0, threshold)) return viewportDimension - imageDimension var proposedPreviousPosition = (imageValue + viewportDimension * viewportJumpPercentage) | 0 if (proposedPreviousPosition > 0) return 0 return proposedPreviousPosition } goToNextView() { if (this.isEndOfRow()) { if (this.isEndOfColumn()) { getComic().goToNextPage() } else { this.setLeft(this.#getNextPosition(this.getWidth(), getViewportWidth(), this.getLeft(), SETTING_COMIC_HORIZONTAL_JUMP.get(), this.getRowThreshold())) this.setTop(this.#getNextPosition(this.getHeight(), getViewportHeight(), this.getTop(), SETTING_COMIC_VERTICAL_JUMP.get(), this.getColumnThreshold())) this.update() } } else { this.setLeft(this.#getNextPosition(this.getWidth(), getViewportWidth(), this.getLeft(), SETTING_COMIC_HORIZONTAL_JUMP.get(), this.getRowThreshold())) this.update() } } goToPreviousView() { if (this.isBeginningOfRow()) { if (this.isBeginningOfColumn()) { let lastLeft = this.#getLastPosition(this.getWidth(), getViewportWidth(), this.getLeft(), SETTING_COMIC_HORIZONTAL_JUMP.get(), this.getRowThreshold()) let lastTop = this.#getLastPosition(this.getHeight(), getViewportHeight(), this.getTop(), SETTING_COMIC_VERTICAL_JUMP.get(), this.getColumnThreshold()) getComic().goToPreviousPage(lastLeft, lastTop) } else { this.setLeft(this.#getPreviousPosition(this.getWidth(), getViewportWidth(), this.getLeft(), SETTING_COMIC_HORIZONTAL_JUMP.get(), this.getRowThreshold())) this.setTop(this.#getPreviousPosition(this.getHeight(), getViewportHeight(), this.getTop(), SETTING_COMIC_VERTICAL_JUMP.get(), this.getColumnThreshold())) this.update() } } else { this.setLeft(this.#getPreviousPosition(this.getWidth(), getViewportWidth(), this.getLeft(), SETTING_COMIC_HORIZONTAL_JUMP.get(), this.getRowThreshold())) this.update() } } resetPan() { if (this.isEndOfRow() && this.isEndOfColumn()) { this.swipeNextPossible = true } else { this.swipeNextPossible = false } if (this.isBeginningOfRow() && this.isBeginningOfColumn()) { this.swipePreviousPossible = true } else { this.swipePreviousPossible = false } } /* returns true if pan should be disabled / when moving to a different page */ pan(x, y, totalDeltaX, totalDeltaY, pinching) { if (SETTING_SWIPE_PAGE.get() && (this.swipeNextPossible || this.swipePreviousPossible) && (!pinching)) { let horizontalThreshold = getViewportWidth() * SETTING_SWIPE_LENGTH.get() let swipeParameters = computeSwipeParameters(totalDeltaX, totalDeltaY) let verticalMoveValid = swipeParameters.angle < SETTING_SWIPE_ANGLE_THRESHOLD.get() if (this.swipeNextPossible && x > 0 ) this.swipeNextPossible = false if (this.swipePreviousPossible && x < 0 ) this.swipePreviousPossible = false if (verticalMoveValid && totalDeltaX < -horizontalThreshold && this.swipeNextPossible) { this.swipeNextPossible = false this.swipePreviousPossible = false this.goToNextView() return true } else if (verticalMoveValid && totalDeltaX > horizontalThreshold && this.swipePreviousPossible) { this.swipeNextPossible = false this.swipePreviousPossible = false this.goToPreviousView() return true } else { this.addLeft(x) this.addTop(y) this.update() return false } } else { this.addLeft(x) this.addTop(y) this.update() return false } } zoomJump(x, y) { if (this.isPageFitToScreen()) { this.zoom(SETTING_ZOOM_JUMP.get(), x, y, true) } else { this.fitPageToScreen() } } reset() { this.setWidth(this.getOriginalWidth()) this.setHeight(this.getOriginalHeight()) this.setLeft(0) this.setTop(0) this.updateMinimumZoom() } } function getImage() { if (document.image) { return document.image } else { document.image = new Image(document.getElementsByTagName("img")[0]) return document.image } } /////////////////////////////////////////////////////////////////////////////////////////////////////////////// IMAGE ↑ function approx(val1, val2, threshold = 1) { return Math.abs(val1 - val2) < threshold } function handleResize() { fixControlSizes() getImage().updateMinimumZoom() getImage().update() } function mouseGestureScroll(scrollCenterX, scrollCenterY, scrollValue) { var zoomDelta = 1 + scrollValue * SETTING_COMIC_SCROLL_SPEED.get() * (SETTING_COMIC_INVERT_SCROLL.get() ? 1 : -1) var newZoom = getImage().getZoom() * zoomDelta getImage().zoom(newZoom, scrollCenterX, scrollCenterY, true) } function downloadComicToDevice() { if('serviceWorker' in navigator) { var bookId = getMeta("bookId") var pages = num(getMeta("size")) navigator.serviceWorker.controller.postMessage({type: 'storeBook', bookId: bookId, maxPositions: pages, kind: 'comic'}) } } function getDownloadPageButton() { let button = document.createElement('a') button.id = 'downloadPageButton' button.innerHTML = 'download' button.style.gridColumnStart = '1' button.style.gridColumnEnd = '3' let controller = document.createElement('div') controller.classList.add('setting') controller.appendChild(button) return controller } //////////////////////////////////////////////////////////////////////////////////////////////////////////// GESTURES ↓ class Gestures { constructor(element, resetSwipeFunction, getZoomFunction, setZoomFunction, panFunction, singleClickFunction, doubleClickFunction, mouseScrollFunction) { this.element = element this.clickCache = [] this.DOUBLE_CLICK_THRESHOLD = 200 this.resetSwipe = resetSwipeFunction this.getZoom = getZoomFunction this.setZoom = setZoomFunction this.pan = panFunction this.singleClick = singleClickFunction this.doubleClick = doubleClickFunction this.mouseScroll = mouseScrollFunction if (this.isTouchEnabled()) { this.element.addEventListener("touchstart", this.getTouchStartHandler(), false) this.element.addEventListener("touchmove", this.getTouchMoveHandler(), false) this.element.addEventListener("touchend", this.getTouchEndHandler(), false) } else { this.element.addEventListener("pointerdown", this.getTouchStartHandler(), false) this.element.addEventListener("pointermove", this.getTouchMoveHandler(), false) this.element.addEventListener("pointerup", this.getTouchEndHandler(), false) this.element.addEventListener("wheel", this.getMouseWheelScrollHandler(), false) this.element.addEventListener("contextmenu", this.getContextMenuHandler(), false) } } getContextMenuHandler() { let self = this function contextMenuHandler(event) { self.disableEventNormalBehavior(event) return false } return contextMenuHandler } getMouseWheelScrollHandler() { let self = this function mouseWheelScrollHandler(event) { let scrollCenterX = event.clientX let scrollCenterY = event.clientY let scrollValue = event.deltaY if (self.mouseScroll) self.mouseScroll(scrollCenterX, scrollCenterY, scrollValue) } return mouseWheelScrollHandler } isTouchEnabled() { /*return ('ontouchstart' in window) || (navigator.maxTouchPoints > 0) || (navigator.msMaxTouchPoints > 0)*/ return window.matchMedia("(pointer: coarse)").matches } disableEventNormalBehavior(event) { event.preventDefault() event.stopPropagation() //event.stopImmediatePropagation() } pushClick(timestamp) { this.clickCache.push(timestamp) while (this.clickCache.length > 2) { this.clickCache.shift() } } getTouchStartHandler() { let self = this function touchStartHandler(event) { self.disableEventNormalBehavior(event) self.pushClick(Date.now()) self.panEnabled = true if (self.getTouchesCount(event) >= 1) { self.originalCenter = self.computeCenter(event) self.previousCenter = self.originalCenter if (self.resetSwipe) self.resetSwipe() } if (self.getTouchesCount(event) == 2) { self.originalPinchSize = self.computeDistance(event) if (self.getZoom) self.originalZoom = self.getZoom() } return false } return touchStartHandler } getTouchesCount(event) { if (event.type.startsWith("touch")) { return event.targetTouches.length } else { if (event.buttons > 0) { return 1 } else { return 0 } } } computeDistance(pinchTouchEvent) { if (pinchTouchEvent.targetTouches.length == 2) { return this.computePointsDistance({ x: pinchTouchEvent.targetTouches[0].clientX, y: pinchTouchEvent.targetTouches[0].clientY }, { x: pinchTouchEvent.targetTouches[1].clientX, y: pinchTouchEvent.targetTouches[1].clientY }) } else { return null } } computePointsDistance(p1, p2) { return Math.sqrt(Math.pow(p1.x - p2.x, 2) + Math.pow(p1.y - p2.y, 2)) } computeCenter(event) { if (event.type.startsWith("touch")) { let centerX = 0 let centerY = 0 for (let i = 0; i < event.targetTouches.length; i++) { centerX = centerX + event.targetTouches[i].clientX centerY = centerY + event.targetTouches[i].clientY } centerX = centerX / event.targetTouches.length centerY = centerY / event.targetTouches.length return { x: centerX, y: centerY } } else if (event.type.startsWith("pointer")) { return { x: event.clientX, y: event.clientY } } else { return null } } getTouchMoveHandler() { let self = this function touchMoveHandler(ev) { self.disableEventNormalBehavior(ev) if (self.getTouchesCount(ev) == 2) { self.pinching = true let pinchSize = self.computeDistance(ev) let currentZoom = pinchSize / self.originalPinchSize let newZoom = self.originalZoom * currentZoom if (self.setZoom) self.setZoom(newZoom, self.originalCenter.x, self.originalCenter.y) } else if (self.getTouchesCount(ev) == 1) { self.pinching = false } if (self.panEnabled && self.getTouchesCount(ev) > 0 && self.getTouchesCount(ev) <= 2) { let currentCenter = self.computeCenter(ev) let deltaX = currentCenter.x - self.previousCenter.x let deltaY = currentCenter.y - self.previousCenter.y let totalDeltaX = currentCenter.x - self.originalCenter.x let totalDeltaY = currentCenter.y - self.originalCenter.y self.previousCenter = currentCenter if (self.pan) { let stopPan = self.pan(deltaX * SETTING_COMIC_PAN_SPEED.get(), deltaY * SETTING_COMIC_PAN_SPEED.get(), totalDeltaX, totalDeltaY, self.pinching) if (stopPan) self.panEnabled = false } } return false } return touchMoveHandler } isDoubleClick() { if (this.clickCache.length >= 2) { let timeDifference = this.clickCache[this.clickCache.length - 1] - this.clickCache[this.clickCache.length - 2] return timeDifference < this.DOUBLE_CLICK_THRESHOLD } else { return false } } isLastClickRelevant() { if (this.clickCache.length >= 1) { let clickNotTooOld = Date.now() - this.clickCache[this.clickCache.length - 1] < this.DOUBLE_CLICK_THRESHOLD let panNotTooLarge = this.computePointsDistance(this.originalCenter, this.previousCenter) < 1 return clickNotTooOld && panNotTooLarge && this.panEnabled } else { return false } } getTouchEndHandler() { let self = this function touchEndHandler(ev) { self.disableEventNormalBehavior(ev) if (self.getTouchesCount(ev) >= 1) { self.originalCenter = self.computeCenter(ev) self.previousCenter = self.originalCenter } if (self.isLastClickRelevant()) { if (self.isDoubleClick()) { if (self.doubleClick) self.doubleClick(self.originalCenter.x, self.originalCenter.y) } else { if (self.singleClick) self.singleClick(self.originalCenter.x, self.originalCenter.y) } } return false } return touchEndHandler } } //////////////////////////////////////////////////////////////////////////////////////////////////////////// GESTURES ↑ function initSettings() { let settingsWrapper = document.getElementById('ch_settings') settingsWrapper.appendChild(getDownloadPageButton()) settingsWrapper.appendChild(SETTING_COMIC_HORIZONTAL_JUMP.controller) settingsWrapper.appendChild(SETTING_COMIC_VERTICAL_JUMP.controller) settingsWrapper.appendChild(SETTING_COMIC_ROW_THRESHOLD.controller) settingsWrapper.appendChild(SETTING_COMIC_COLUMN_THRESHOLD.controller) settingsWrapper.appendChild(SETTING_COMIC_INVERT_SCROLL.controller) settingsWrapper.appendChild(SETTING_COMIC_SCROLL_SPEED.controller) settingsWrapper.appendChild(SETTING_COMIC_PAN_SPEED.controller) settingsWrapper.appendChild(SETTING_SWIPE_PAGE.controller) settingsWrapper.appendChild(SETTING_SWIPE_LENGTH.controller) settingsWrapper.appendChild(SETTING_SWIPE_ANGLE_THRESHOLD.controller) settingsWrapper.appendChild(SETTING_BOOK_EDGE_HORIZONTAL.controller) settingsWrapper.appendChild(SETTING_BOOK_TOOLS_HEIGHT.controller) settingsWrapper.appendChild(SETTING_OVERLAY_TRANSPARENCY.controller) SETTING_BOOK_EDGE_HORIZONTAL.addListener(() => setTimeout(fixControlSizes, 1000)) SETTING_BOOK_TOOLS_HEIGHT.addListener(fixControlSizes) SETTING_OVERLAY_TRANSPARENCY.addListener(initAlpha) settingsWrapper.appendChild(getRemoveProgressButton()) settingsWrapper.appendChild(getMarkAsReadButton()) } window.onload = function() { document.documentElement.style.setProperty('--accent-color', SETTING_ACCENT_COLOR.get()) document.documentElement.style.setProperty('--foreground-color', SETTING_FOREGROUND_COLOR.get()) document.documentElement.style.setProperty('--background-color', SETTING_BACKGROUND_COLOR.get()) fixControlSizes() enableKeyboardGestures({ "upAction": () => pan(0, getViewportHeight() / 2), "downAction": () => pan(0, - (getViewportHeight() / 2)), "leftAction": () => getImage().goToPreviousView(), "rightAction": () => getImage().goToNextView(), "escapeAction": () => toggleTools(true) }) let getZoomFunction = function() { return getImage().getZoom() } let zoomFunction = function(val, cx, cy, withUpdate) { getImage().zoom(val, cx, cy, withUpdate) } let panFunction = function(x, y, totalDeltaX, totalDeltaY, pinching) { return getImage().pan(x, y, totalDeltaX, totalDeltaY, pinching) } new Gestures(document.getElementById("ch_canv"), () => getImage().resetPan(), getZoomFunction, zoomFunction, panFunction, null, (x, y) => getImage().zoomJump(x, y), mouseGestureScroll) new Gestures(document.getElementById("ch_prev"), () => getImage().resetPan(), getZoomFunction, zoomFunction, panFunction, () => getImage().goToPreviousView(), null, mouseGestureScroll) new Gestures(document.getElementById("ch_next"), () => getImage().resetPan(), getZoomFunction, zoomFunction, panFunction, () => getImage().goToNextView(), null, mouseGestureScroll) document.getElementById("ch_tools_left").addEventListener("click", (event) => toggleTools(true)) document.getElementById("ch_tools_right").addEventListener("click", (event) => toggleTools(false)) document.getElementById("ch_tools_container").addEventListener("click", (event) => toggleTools()) document.getElementById("ch_tools").addEventListener("click", event => event.stopPropagation()) addPositionInputTriggerListener((page) => getComic().jumpToPage(page)) initAlpha() initSettings() initFullscreenButton() initBookCollectionLinks() document.lastPageChange = new Date() loadProgress(currentPosition => { var startPage = currentPosition + 1 getComic().displayPage(startPage, () => getImage().fitPageToScreen()) }) downloadComicToDevice() }
<reponame>insufficientchocolate/diplomat package data type SimpleConfiguration struct { Preprocessors []Preprocessor `navigate:"preprocessors"` Outputs []Output `navigate:"outputs"` } func (s SimpleConfiguration) GetPreprocessors() []Preprocessor { return s.Preprocessors } func (s SimpleConfiguration) GetOutputs() []Output { return s.Outputs } type SimpleOutput struct { Selectors []Selector `navigate:"selectors"` Templates []Template `navigate:"templates"` } func (s SimpleOutput) GetSelectors() []Selector { return s.Selectors } func (s SimpleOutput) GetTemplates() []Template { return s.Templates } type SimpleTemplate struct { Type string `navigate:"type"` Options TemplateOption `navigate:"options"` } func (s SimpleTemplate) GetType() string { return s.Type } func (s SimpleTemplate) GetOptions() TemplateOption { return s.Options } type SimpleTemplateOption map[string]interface{} func (s SimpleTemplateOption) GetMapElement() map[string]interface{} { return s } func (s SimpleTemplateOption) GetFilename() string { return s.GetMapElement()["filename"].(string) } type SimplePreprocessor struct { Type string `navigate:"type"` Options interface{} `navigate:"options"` } func (s SimplePreprocessor) GetType() string { return s.Type } func (s SimplePreprocessor) GetOptions() interface{} { return s.Options }
export { default as CategoryPostCard } from './CategoryPostCard'; export { default as CategoryPostsSearch } from './CategoryPostsSearch'; export { default as CategoryPostsSort } from './CategoryPostsSort';
#!/bin/bash #SBATCH --job-name=ppta_ptmcmc_ms_dip_cpl_fixslope_set_3_1_ephem_0 #SBATCH --output=/fred/oz002/bgoncharov/correlated_noise_logs/ppta_ptmcmc_ms_dip_cpl_fixslope_set_3_1_ephem_0_%A_%a.out #SBATCH --ntasks=4 #SBATCH --time=2-0 #SBATCH --mem-per-cpu=2G #SBATCH --tmp=4G #SBATCH --array=0 pyv="$(python -c 'import sys; print(sys.version_info[0])')" if [ "$pyv" == 2 ] then echo "$pyv" module load numpy/1.16.3-python-2.7.14 fi srun echo $TEMPO2 srun echo $TEMPO2_CLOCK_DIR srun python /home/bgonchar/correlated_noise_pta_2020/run_analysis.py --prfile "/home/bgonchar/correlated_noise_pta_2020/params/ppta_dr2_ptmcmc_ms_cpl_dipol_fixslope_set_3_1_ephem_0_20201121.dat" --num $SLURM_ARRAY_TASK_ID
<filename>blackboxopt/evaluation.py<gh_stars>1-10 # Copyright (c) 2020 - for information on the respective copyright owner # see the NOTICE file and/or the repository https://github.com/boschresearch/blackboxopt # # SPDX-License-Identifier: Apache-2.0 import json from copy import deepcopy from dataclasses import asdict, dataclass, field from datetime import datetime from typing import Any, Dict, Mapping, Optional import numpy as np def _datetime_now_timestamp(): """Wrapper to allow use as default factory for dataclass fields.""" return datetime.now().timestamp() @dataclass class EvaluationSpecification(Mapping[str, Any]): configuration: dict = field( metadata={"Description": "The configuration to be evaluated next."} ) settings: dict = field( default_factory=dict, metadata={ "Description": "Additional settings like the fidelity or target task." }, ) optimizer_info: dict = field( default_factory=dict, metadata={"Description": "Information about and for internal optimizer state."}, ) created_unixtime: float = field( default_factory=_datetime_now_timestamp, metadata={"Description": "Creation time of the evaluation specificiation."}, ) context: Optional[Dict[str, Any]] = field( default=None, metadata={ "Description": "Contextual information is what you can determine but not " + "influence, like the environmental temperature." }, ) def keys(self): return self.__dataclass_fields__.keys() # pylint: disable=no-member def create_evaluation( self, objectives: Dict[str, Optional[float]], constraints: Optional[Dict[str, Optional[float]]] = None, user_info: Optional[dict] = None, stacktrace: Optional[str] = None, finished_unixtime: Optional[float] = None, ): """Create a blackboxopt.Evaluation based on this evaluation specification. Args: objectives: For each objective name the respective value. constraints: For each constraint name the float value indicates how much the constraint was satisfied, with negative values implying a violated and positive values indicating a satisfied constraint. user_info: Miscellaneous information provided by the user. stacktrace: The stacktrace in case an unhandled exception occurred inside the evaluation function. finished_unixtime: Timestamp at completion of this evaluation. If none is provided, the current time is used. """ evaluation = Evaluation( objectives=objectives, constraints=constraints, user_info=user_info, stacktrace=stacktrace, **self, ) # Data class default factories like in this case time.time are only triggered # when the argument is not provided, so in case of it being None we can't just # pass the argument value in, because it would set it to None instead of # triggering the default factory for the current time. if finished_unixtime is not None: evaluation.finished_unixtime = finished_unixtime return evaluation def __getitem__(self, key): if key not in self.__dataclass_fields__: # pylint: disable=no-member raise KeyError( f"Only dataclass fields are accessible via __getitem__, '{key}' is not." ) return deepcopy(getattr(self, key)) def __iter__(self): return self.__dataclass_fields__.__iter__ # pylint: disable=no-member def __len__(self): return self.__dataclass_fields__.__len__ # pylint: disable=no-member def to_json(self, **json_dump_kwargs): return json.dumps(asdict(self), **json_dump_kwargs) def to_dict(self): return self.__dict__ @dataclass class _EvaluationBase: """Helper dataclass to allow the Evaluation class to have attributes with defaults while still having attributes without default values. To make this happen, the non-default attributes need to be defined / inherited before the ones with defaults. Attributes: objectives: [description] Raises: ValueError: [description] """ objectives: Dict[str, Optional[float]] def __post_init__(self): available_objective_values = np.array( [o for o in self.objectives.values() if o is not None], dtype=float ) if np.isnan(available_objective_values).any(): raise ValueError( f"Objective values contain NaN: {self.objectives}\n" + "Please use None instead of NaN." ) @dataclass class Evaluation(EvaluationSpecification, _EvaluationBase): """An evaluated specification with a timestamp indicating the time of the evaluation, and a result dictionary for all objective values. NOTE: `NaN` is not allowed as an objective value, use `None` instead. """ constraints: Optional[Dict[str, Optional[float]]] = field( default=None, metadata={ "Description": "For each constraint name the float value indicates " + "how much the constraint was satisfied, with negative values implying " + "a violated and positive values indicating a satisfied constraint." }, ) finished_unixtime: float = field( default_factory=_datetime_now_timestamp, metadata={"Description": "Timestamp at completion of this evaluation."}, ) stacktrace: Optional[str] = field( default=None, metadata={ "Description": "The stacktrace in case an unhandled exception occurred " + "inside the evaluation function." }, ) user_info: Optional[dict] = field( default=None, metadata={"Description": "Miscellaneous information provided by the user."}, ) def get_specification( self, reset_created_unixtime: bool = False ) -> EvaluationSpecification: """Get the evaluation specifiation for which this result was evaluated.""" eval_spec_kwargs = deepcopy( dict( configuration=self.configuration, settings=self.settings, optimizer_info=self.optimizer_info, context=self.context, ) ) if reset_created_unixtime: return EvaluationSpecification( created_unixtime=_datetime_now_timestamp(), **eval_spec_kwargs ) return EvaluationSpecification( created_unixtime=self.created_unixtime, **eval_spec_kwargs ) @property def any_objective_none(self) -> bool: return any([v is None for v in self.objectives.values()]) @property def all_objectives_none(self) -> bool: return all([v is None for v in self.objectives.values()])
#!/bin/bash dieharder -d 4 -g 17 -S 814581641
<reponame>ch1huizong/learning<filename>lang/py/cookbook/v2/source/cb2_9_6_sol_1.py import candygram as cg class ExampleThread(object): """A thread-class with just a single counter value and a stop flag.""" def __init__(self): """ Initialize the counter to 0, the running-flag to True. """ self.val = 0 self.running = True def increment(self): """ Increment the counter by one. """ self.val += 1 def sendVal(self, msg): """ Send current value of counter to requesting thread. """ req = msg[0] req.send((cg.self(), self.val)) def setStop(self): """ Set the running-flag to False. """ self.running = False def run(self): """ The entry point of the thread. """ # Register the handler functions for various messages: r = cg.Receiver() r.addHandler('increment', self.increment) r.addHandler((cg.Process, 'value'), self.sendVal, cg.Message) r.addHandler('stop', self.setStop) # Keep handling new messages until a stop has been requested while self.running: r.receive()
sc_web_path=../../sc-web/client append_line() { if grep -Fxq "$3" $1 then # code if found echo -en "Link to " $blue"$2"$rst "already exists in " $blue"$1"$rst "\n" else # code if not found echo -en "Append '" $green"$2"$rst "' -> " $green"$1"$rst "\n" echo $3 >> $1 fi } append_js() { append_line $1 $2 "<script type=\"text/javascript\" charset=\"utf-8\" src=\"/static/$2\"></script>" } append_css() { append_line $1 $2 "<link rel=\"stylesheet\" type=\"text/css\" href=\"/static/$2\" />" } append_js $sc_web_path/templates/components.html components/js/search_book_by_rating_component/search_book_by_rating_component.js append_css $sc_web_path/templates/components.html components/css/search_book_by_rating_component.css
<filename>app/dist/js/enums/dias-da-semana.js<gh_stars>0 export var DiasDaSemana; (function (DiasDaSemana) { DiasDaSemana[DiasDaSemana["DOMINGO"] = 0] = "DOMINGO"; DiasDaSemana[DiasDaSemana["SEGUNDA"] = 1] = "SEGUNDA"; DiasDaSemana[DiasDaSemana["TERCA"] = 2] = "TERCA"; DiasDaSemana[DiasDaSemana["QUARTA"] = 3] = "QUARTA"; DiasDaSemana[DiasDaSemana["QUINTA"] = 4] = "QUINTA"; DiasDaSemana[DiasDaSemana["SEXTA"] = 5] = "SEXTA"; DiasDaSemana[DiasDaSemana["SABADO"] = 6] = "SABADO"; })(DiasDaSemana || (DiasDaSemana = {})); //# sourceMappingURL=dias-da-semana.js.map