repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
yurigabrich/EnergyPlusShadow
html/structEnergyPlus_1_1PlantPipingSystemsManager_1_1FarfieldInfo.js
<filename>html/structEnergyPlus_1_1PlantPipingSystemsManager_1_1FarfieldInfo.js var structEnergyPlus_1_1PlantPipingSystemsManager_1_1FarfieldInfo = [ [ "groundTempModel", "structEnergyPlus_1_1PlantPipingSystemsManager_1_1FarfieldInfo.html#a2f5fa278f6fc2aea16079ff541b3cca1", null ] ];
little-sparkle/growler_engine_android
library/src/main/java/com/littlesparkle/growler/library/update/UpdateHelper.java
package com.littlesparkle.growler.library.update; import android.content.Context; import android.support.annotation.NonNull; import com.littlesparkle.growler.library.http.BaseHttpSubscriber; import com.littlesparkle.growler.library.http.ErrorResponse; import com.littlesparkle.growler.library.http.api.ApiException; import com.littlesparkle.growler.library.misc.MiscHelper; import rx.Subscriber; public class UpdateHelper { public static final void checkUpdate( @NonNull final Context context, @NonNull final Subscriber subscriber, @NonNull final String url) { String version = MiscHelper.getVersionName(context); String packageName = MiscHelper.getPackageName(context); new UpdateRequest(url).update(subscriber, packageName, version); } public static final void checkUpdate( @NonNull final Context context, @NonNull final String url, @NonNull final UpdateListener listener) { Subscriber subscriber = new BaseHttpSubscriber<UpdateResponse>() { @Override public void onNext(UpdateResponse updateResponse) { listener.onUpdateAvailable(updateResponse.data); } @Override protected void onError(ErrorResponse errorResponse) { listener.onNoUpdate(); } }; String version = MiscHelper.getVersionName(context); String packageName = MiscHelper.getPackageName(context); new UpdateRequest(url).update(subscriber, packageName, version); } }
matchd-ch/matchd-backend
api/tests/test_company_add_employee.py
import pytest @pytest.mark.django_db def test_add_employee(login, user_employee, add_employee): company = user_employee.company employee_count = len(company.users.all()) new_username = '<EMAIL>' login(user_employee) data, errors = add_employee(user_employee, new_username, 'John', 'Doe', 'Role') assert errors is None assert data is not None assert data.get('addEmployee') is not None assert data.get('addEmployee').get('success') employee = data.get('addEmployee').get('employee') assert employee is not None assert employee.get('role') == 'Role' assert employee.get('firstName') == 'John' assert employee.get('lastName') == 'Doe' assert employee.get('email') == new_username assert employee_count + 1 == len(company.users.all()) @pytest.mark.django_db def test_add_employee_with_existing_username(login, user_employee, add_employee): company = user_employee.company employee_count = len(company.users.all()) login(user_employee) data, errors = add_employee(user_employee, user_employee.username, 'John', 'Doe', 'Role') assert errors is None assert data is not None assert data.get('addEmployee') is not None assert data.get('addEmployee').get('success') is False assert data.get('addEmployee').get('employee') is None errors = data.get('addEmployee').get('errors') assert errors is not None assert 'username' in errors assert errors.get('username')[0].get('code') == 'unique' assert employee_count == len(company.users.all())
GeertArien/low-level-programming
chapter_11/higher_order/algorithm.c
#include "algorithm.h" void for_each(struct list_t* list, foreach_t* func) { struct list_t* it = list; while (it != NULL) { func(it->value); it = it->next; } } struct list_t* map(struct list_t* list, map_t* func) { struct list_t* it = list; struct list_t* n_list; if (!it) { return NULL; } n_list = list_create(func(it->value)); it = it->next; while (it != NULL) { list_add_back(func(it->value), &n_list); it = it->next; } return n_list; } void map_mut(struct list_t* list, map_t* func) { struct list_t* it = list; while (it != NULL) { it->value = func(it->value); it = it->next; } } int foldl(const int start_value, struct list_t* list, foldl_t* func) { struct list_t* it = list; int result = start_value; while (it != NULL) { result = func(it->value, result); it = it->next; } return result; } struct list_t* iterate(const int value, const size_t length, iter_t* func) { struct list_t* list; struct list_t* it; size_t i; if (length < 1) { return NULL; } list = list_create(value); it = list; for (i = 1; i < length; ++i) { list_add_back(func(it->value), &it); it = it->next; } return list; }
matortheeternal/esp.json
src/Generator/TES5/REVB.js
let { def, uint16, int8, div, uint8, format, struct, subrecord, req, record } = require('../helpers'); module.exports = () => { record('REVB', 'Reverb Parameters', { members: [ def('EDID'), req(subrecord('DATA', struct('Data', [ uint16('Decay Time (ms)'), uint16('HF Reference (Hz)'), int8('Room Filter'), int8('Room HF Filter'), int8('Reflections'), int8('Reverb Amp'), format(uint8('Decay HF Ratio'), div(100)), uint8('Reflect Delay (ms), scaled'), uint8('Reverb Delay (ms)'), uint8('Diffusion %'), uint8('Density %'), uint8('Unknown') ]))) ] }) };
selectiveci/selective
spec/dummy/config/application.rb
# frozen_string_literal: true # Set up gems listed in the Gemfile. ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile", __dir__) require "bundler/setup" if File.exist?(ENV["BUNDLE_GEMFILE"]) require "active_record/railtie" Bundler.reset! # must load development here because gemspec dependencies # are loaded as development gems Bundler.require(:default, :test, :development, :assets) module SelectiveDummyApp class Application < Rails::Application config.root = File.expand_path("../", __dir__) def name "SelectiveDummy" end end end require_relative "../engines/dummy_engine/lib/dummy_engine"
VladTrach/elda
elda-lda/src/test/java/com/epimorphics/lda/tests_support/FileManagerModelLoader.java
/* See lda-top/LICENCE (or https://raw.github.com/epimorphics/elda/master/LICENCE) for the licence for this software. (c) Copyright 2011 Epimorphics Limited $Id$ */ package com.epimorphics.lda.tests_support; import com.epimorphics.lda.core.ModelLoader; import com.epimorphics.lda.support.EldaFileManager; import com.hp.hpl.jena.rdf.model.Model; /** A ModelLoaderI that loads the model using the FileManager. @author chris */ public final class FileManagerModelLoader implements ModelLoader { /** Load the model named by the uri using the FileManager's global instance. */ @Override public Model loadModel( String uri ) { return EldaFileManager.get().loadModel(uri); } }
petamoriken/deno
cli/compat/testdata/conditions/node_modules/imports_exports/require_polyfill.js
<gh_stars>1000+ module.exports = { polyfill: "require", };
YoufuLi/radlog
datalog/src/main/java/edu/ucla/cs/wis/bigdatalog/database/type/DbShort.java
package edu.ucla.cs.wis.bigdatalog.database.type; import edu.ucla.cs.wis.bigdatalog.database.index.MurmurHash; import edu.ucla.cs.wis.bigdatalog.database.store.ByteArrayHelper; import edu.ucla.cs.wis.bigdatalog.exception.InterpreterException; import edu.ucla.cs.wis.bigdatalog.measurement.MemoryMeasurement; import edu.ucla.cs.wis.bigdatalog.type.DataType; public class DbShort extends DbNumericType implements EncodedType { private static final long serialVersionUID = 1L; public static final short cacheSize = 2048; public static DbShort[] cache; static { cache = new DbShort[cacheSize]; for (short i = 0; i < cacheSize; i++) cache[i] = new DbShort(i); } private short value; private DbShort(short val) { this.value = val; } public static DbShort create(short value) { if (value > -1 && value < cacheSize) return cache[value]; return new DbShort(value); } public short getValue() { return this.value; } @Override public boolean equals(Object other) { if (other == null || !(other instanceof DbTypeBase)) return false; if (this == other) return true; if (other instanceof DbShort) return (this.value == ((DbShort)other).value); if (other instanceof DbInteger) return (this.value == ((DbInteger)other).getValue()); if (other instanceof DbLong) return (this.value == ((DbLong)other).getValue()); if (other instanceof DbDouble) return ((DbDouble)other).equals(this); if (other instanceof DbFloat) return ((DbFloat)other).equals(this); if (other instanceof DbShort) return (this.value == ((DbShort)other).getValue()); if (other instanceof DbByte) return (this.value == ((DbByte)other).getValue()); if (other instanceof DbLongLong) return ((DbLongLong)other).equals(this); if (other instanceof DbLongLongLongLong) return ((DbLongLongLongLong)other).equals(this); return false; } @Override public boolean greaterThan(DbTypeBase other) { if (other == null) return false; if (other instanceof DbShort) return (this.value > ((DbShort)other).value); if (other instanceof DbInteger) return (this.value > ((DbInteger)other).getValue()); if (other instanceof DbLong) return (this.value > ((DbLong)other).getValue()); if (other instanceof DbDouble) return ((DbDouble)other).lessThan(this); if (other instanceof DbFloat) return ((DbFloat)other).lessThan(this); if (other instanceof DbShort) return (this.value > ((DbShort)other).getValue()); if (other instanceof DbByte) return (this.value > ((DbByte)other).getValue()); if (other instanceof DbLongLong) return ((DbLongLong)other).lessThan(this); if (other instanceof DbLongLongLongLong) return ((DbLongLongLongLong)other).lessThan(this); return false; } @Override public boolean lessThan(DbTypeBase other) { if (other == null) return false; if (other instanceof DbShort) return (this.value < ((DbShort)other).value); if (other instanceof DbInteger) return (this.value < ((DbInteger)other).getValue()); if (other instanceof DbLong) return (this.value < ((DbLong)other).getValue()); if (other instanceof DbDouble) return ((DbDouble)other).greaterThan(this); if (other instanceof DbFloat) return ((DbFloat)other).greaterThan(this); if (other instanceof DbShort) return (this.value < ((DbShort)other).getValue()); if (other instanceof DbByte) return (this.value < ((DbByte)other).getValue()); if (other instanceof DbLongLong) return ((DbLongLong)other).greaterThan(this); if (other instanceof DbLongLongLongLong) return ((DbLongLongLongLong)other).greaterThan(this); return false; } @Override public int hashCode() { return (int)MurmurHash.hash(ByteArrayHelper.getIntAsBytes(this.value)); } @Override public long hashCodeL() { return MurmurHash.hash(ByteArrayHelper.getIntAsBytes(this.value)); } @Override public long hashCodeL(int position) { return MurmurHash.hash(ByteArrayHelper.getIntAsBytes(this.value), position); } @Override public String toString() { return Integer.toString(this.value); } @Override public boolean isConstant() { return true; } @Override public DataType getDataType() { return DataType.SHORT; } @Override public DbShort copy() { return DbShort.create(this.value); } @Override public int getBytes(byte[] bytes, int offset) { return ByteArrayHelper.getShortAsBytes(this.value, bytes, offset); } @Override public byte[] getBytes() { return ByteArrayHelper.getShortAsBytes(this.value); } @Override public int getKey() { return this.value; } @Override public MemoryMeasurement getSizeOf() { return new MemoryMeasurement(2,2); } @Override public DbNumericType add(DbNumericType addend2) { return DbShort.create((short) (this.value + ((DbShort)addend2).value)); } @Override public DbNumericType subtract(DbNumericType subtrahend) { return DbShort.create((short) (this.value - ((DbShort)subtrahend).value)); } @Override public DbNumericType multiply(DbNumericType multiplier) { return DbShort.create((short) (this.value * ((DbShort)multiplier).value)); } @Override public DbNumericType divide(DbNumericType divisor) { if (((DbShort)divisor).value == 0) throw new InterpreterException("Divided by zero"); return DbDouble.create(((double)this.value) / (double)((DbShort)divisor).value); } @Override public boolean match(DbTypeBase dbTypeObject) { return (this.value == ((DbShort)dbTypeObject).value); } @Override public DbDouble logarithm() { return DbDouble.create(java.lang.Math.log10(this.value)); } @Override public DbDouble exponential() { return DbDouble.create(java.lang.Math.exp(this.value)); } @Override public DbInteger step() { return DbInteger.create(DbNumericType.step_impl(this.value)); } }
madanrajhari10/oci-service-operator
vendor/github.com/oracle/oci-go-sdk/v65/database/update_vm_cluster_details.go
// Copyright (c) 2016, 2018, 2022, Oracle and/or its affiliates. All rights reserved. // This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. // Code generated. DO NOT EDIT. // Database Service API // // The API for the Database Service. Use this API to manage resources such as databases and DB Systems. For more information, see Overview of the Database Service (https://docs.cloud.oracle.com/iaas/Content/Database/Concepts/databaseoverview.htm). // package database import ( "fmt" "github.com/oracle/oci-go-sdk/v65/common" "strings" ) // UpdateVmClusterDetails Details for updating the VM cluster. Applies to Exadata Cloud@Customer instances only. // For details on updating a cloud VM cluster in an Exadata Cloud Service instance, see UpdateCloudVmClusterDetails type UpdateVmClusterDetails struct { // The number of CPU cores to enable for the VM cluster. CpuCoreCount *int `mandatory:"false" json:"cpuCoreCount"` // The number of OCPU cores to enable for the VM cluster. Only 1 decimal place is allowed for the fractional part. OcpuCount *float32 `mandatory:"false" json:"ocpuCount"` // The memory to be allocated in GBs. MemorySizeInGBs *int `mandatory:"false" json:"memorySizeInGBs"` // The local node storage to be allocated in GBs. DbNodeStorageSizeInGBs *int `mandatory:"false" json:"dbNodeStorageSizeInGBs"` // The data disk group size to be allocated in TBs. DataStorageSizeInTBs *float64 `mandatory:"false" json:"dataStorageSizeInTBs"` // The data disk group size to be allocated in GBs. DataStorageSizeInGBs *float64 `mandatory:"false" json:"dataStorageSizeInGBs"` // The Oracle license model that applies to the VM cluster. The default is BRING_YOUR_OWN_LICENSE. LicenseModel UpdateVmClusterDetailsLicenseModelEnum `mandatory:"false" json:"licenseModel,omitempty"` // The public key portion of one or more key pairs used for SSH access to the VM cluster. SshPublicKeys []string `mandatory:"false" json:"sshPublicKeys"` Version *PatchDetails `mandatory:"false" json:"version"` UpdateDetails *VmClusterUpdateDetails `mandatory:"false" json:"updateDetails"` // Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. // For more information, see Resource Tags (https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). // Example: `{"Department": "Finance"}` FreeformTags map[string]string `mandatory:"false" json:"freeformTags"` // Defined tags for this resource. Each key is predefined and scoped to a namespace. // For more information, see Resource Tags (https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). DefinedTags map[string]map[string]interface{} `mandatory:"false" json:"definedTags"` DataCollectionOptions *DataCollectionOptions `mandatory:"false" json:"dataCollectionOptions"` } func (m UpdateVmClusterDetails) String() string { return common.PointerString(m) } // ValidateEnumValue returns an error when providing an unsupported enum value // This function is being called during constructing API request process // Not recommended for calling this function directly func (m UpdateVmClusterDetails) ValidateEnumValue() (bool, error) { errMessage := []string{} if _, ok := GetMappingUpdateVmClusterDetailsLicenseModelEnum(string(m.LicenseModel)); !ok && m.LicenseModel != "" { errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for LicenseModel: %s. Supported values are: %s.", m.LicenseModel, strings.Join(GetUpdateVmClusterDetailsLicenseModelEnumStringValues(), ","))) } if len(errMessage) > 0 { return true, fmt.Errorf(strings.Join(errMessage, "\n")) } return false, nil } // UpdateVmClusterDetailsLicenseModelEnum Enum with underlying type: string type UpdateVmClusterDetailsLicenseModelEnum string // Set of constants representing the allowable values for UpdateVmClusterDetailsLicenseModelEnum const ( UpdateVmClusterDetailsLicenseModelLicenseIncluded UpdateVmClusterDetailsLicenseModelEnum = "LICENSE_INCLUDED" UpdateVmClusterDetailsLicenseModelBringYourOwnLicense UpdateVmClusterDetailsLicenseModelEnum = "BRING_YOUR_OWN_LICENSE" ) var mappingUpdateVmClusterDetailsLicenseModelEnum = map[string]UpdateVmClusterDetailsLicenseModelEnum{ "LICENSE_INCLUDED": UpdateVmClusterDetailsLicenseModelLicenseIncluded, "BRING_YOUR_OWN_LICENSE": UpdateVmClusterDetailsLicenseModelBringYourOwnLicense, } var mappingUpdateVmClusterDetailsLicenseModelEnumLowerCase = map[string]UpdateVmClusterDetailsLicenseModelEnum{ "license_included": UpdateVmClusterDetailsLicenseModelLicenseIncluded, "bring_your_own_license": UpdateVmClusterDetailsLicenseModelBringYourOwnLicense, } // GetUpdateVmClusterDetailsLicenseModelEnumValues Enumerates the set of values for UpdateVmClusterDetailsLicenseModelEnum func GetUpdateVmClusterDetailsLicenseModelEnumValues() []UpdateVmClusterDetailsLicenseModelEnum { values := make([]UpdateVmClusterDetailsLicenseModelEnum, 0) for _, v := range mappingUpdateVmClusterDetailsLicenseModelEnum { values = append(values, v) } return values } // GetUpdateVmClusterDetailsLicenseModelEnumStringValues Enumerates the set of values in String for UpdateVmClusterDetailsLicenseModelEnum func GetUpdateVmClusterDetailsLicenseModelEnumStringValues() []string { return []string{ "LICENSE_INCLUDED", "BRING_YOUR_OWN_LICENSE", } } // GetMappingUpdateVmClusterDetailsLicenseModelEnum performs case Insensitive comparison on enum value and return the desired enum func GetMappingUpdateVmClusterDetailsLicenseModelEnum(val string) (UpdateVmClusterDetailsLicenseModelEnum, bool) { enum, ok := mappingUpdateVmClusterDetailsLicenseModelEnumLowerCase[strings.ToLower(val)] return enum, ok }
wrldwzrd89/older-java-games
Fantastle5/src/net/worldwizard/scoremanager/Score.java
/* Fantastle: A Maze-Solving Game Copyright (C) 2008-2010 <NAME> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. Any questions should be directed to the author via email at: <EMAIL> */ package net.worldwizard.scoremanager; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; public final class Score { // Fields private long score; private String name; // Constructors public Score() { this.score = 0L; this.name = "Nobody"; } public Score(final long newScore, final String newName) { this.score = newScore; this.name = newName; } // Methods public long getScore() { return this.score; } public String getName() { return this.name; } public void setScore(final long newScore) { this.score = newScore; } public void setName(final String newName) { this.name = newName; } public static Score readScore(final BufferedReader stream) throws IOException { final Score s = new Score(); s.name = stream.readLine(); s.score = Long.parseLong(stream.readLine()); return s; } public void writeScore(final BufferedWriter stream) throws IOException { stream.write(this.name + "\n"); stream.write(Long.toString(this.score) + "\n"); } }
adunStudio/Sunny
Sunny-Core/06_LAYER/ParticleLayer3D.h
<filename>Sunny-Core/06_LAYER/ParticleLayer3D.h #pragma once #include <iostream> #include <SUNNY.h> #include "MouseLayer2D.h" #include "../05_GAME/graphics/Model3D.h" #include "../05_GAME/graphics/Animation3D.h" #include "../05_GAME/graphics/BulletM.h" #include "../05_GAME/shoot/BulletShooter.h" #include "../05_GAME/shoot/BulletShooter2.h" #include "../05_GAME/shoot/BulletShooter3.h" #include "../05_GAME/shoot/BulletShooter4.h" #include "../05_GAME/shoot/BulletShooter5.h" #include "../05_GAME/shoot/BulletParticle.h" #include "../07_SERVER/BossLockerProtocol.h" #include "../07_SERVER/BossLocker.h" #include "FinalGameLayer2D.h" #include "FinalGame.h" using namespace std; using namespace sunny; using namespace graphics; using namespace directx; using namespace events; using namespace maths; using namespace utils; using namespace game; class ParticleLayer3D : public Layer3D { private: FinalGameLayer2D * layer2d; chrono::steady_clock::time_point start; ParticleSystem * m_particle; ParticleSystem* m_bulletParticle; MaterialInstance* m_SkyboxMaterial; vector<Mesh*> m_animations; vector<Mesh*> m_animations2; vector<Mesh*> m_animations3; vector<Mesh*> m_boss_animations; Animation3D* m_player; Animation3D* m_player2; Animation3D* m_player3; Animation3D* m_boss; vector<BulletM*> m_bullets; Model* m_sphere; BulletShooter* m_shooter; unordered_map<int, BulletShooter*> m_shooters; int m_shooterIndex = 0; float m_radian; float m_degree; public: ParticleLayer3D(); ~ParticleLayer3D(); void OnInit(Renderer3D& renderer) override; void OnTick() override; void OnUpdate(const utils::Timestep& ts) override; void OnEvent(Event& event) override; bool OnKeyPressedEvent(KeyPressedEvent& event); bool OnKeyReleasedEvent(KeyReleasedEvent& event); bool OnMousePressedEvent(MousePressedEvent& event); bool OnMouseReleasedEvent(MouseReleasedEvent& event); bool OnMouseMovedEvent(MouseMovedEvent& event); };
Whizkevina/ft-choir-portal
models/AccessToken.js
<reponame>Whizkevina/ft-choir-portal<gh_stars>0 const mongoose = require('mongoose'); const { Schema } = mongoose; const AccessTokenSchema = new Schema({ token: { type: String, required: true, unique: true, trim: true }, group: { type: Schema.Types.ObjectId, ref: 'Group', required: true }, isValid: { type: Boolean, default: true } }, { timestamps: true }); module.exports = AccessToken = mongoose.model('accesstoken', AccessTokenSchema);
dgusoff/cas
api/cas-server-core-api-configuration-model/src/main/java/org/apereo/cas/configuration/model/core/monitor/ServiceTicketMonitorProperties.java
package org.apereo.cas.configuration.model.core.monitor; import org.apereo.cas.configuration.support.RequiresModule; import com.fasterxml.jackson.annotation.JsonFilter; import lombok.Getter; import lombok.Setter; import lombok.experimental.Accessors; import org.springframework.boot.context.properties.NestedConfigurationProperty; import java.io.Serializable; /** * This is {@link ServiceTicketMonitorProperties}. * * @author <NAME> * @since 6.4.0 */ @RequiresModule(name = "cas-server-core-monitor", automated = true) @Getter @Setter @Accessors(chain = true) @JsonFilter("ServiceTicketMonitorProperties") public class ServiceTicketMonitorProperties implements Serializable { private static final long serialVersionUID = -8167395674267219982L; /** * Warning settings for this monitor. */ @NestedConfigurationProperty private MonitorWarningProperties warn = new MonitorWarningProperties(5000); }
konexios/moonstone
rhea-web/src/main/java/com/arrow/rhea/web/WebSecurity.java
<reponame>konexios/moonstone package com.arrow.rhea.web; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.web.csrf.CsrfFilter; import com.arrow.pegasus.security.CoreWebSecurityAbstract; import com.arrow.pegasus.security.CsrfHeaderFilter; @Configuration // @Order(SecurityProperties.ACCESS_OVERRIDE_ORDER) public class WebSecurity extends CoreWebSecurityAbstract { private final static String CSRF_TOKEN_NAME = "XSRF-TOKEN-RHEA-WEB"; @Override // @formatter:off protected void configure(HttpSecurity http) throws Exception { http .authorizeRequests() .antMatchers(getHttpSecureExceptionPaths()) .permitAll() .anyRequest() .authenticated() .and() .formLogin() .loginPage("/api/v1/core/security/login") .defaultSuccessUrl("/api/rhea/security/user", true) .usernameParameter("username") .passwordParameter("password") .permitAll() .and() .logout() .logoutSuccessUrl("/index.html") .invalidateHttpSession(true) .and() .csrf() .csrfTokenRepository(csrfTokenRepository()).ignoringAntMatchers( "/api/v1/core/security/login", "/status") .and() .addFilterAfter(new CsrfHeaderFilter(configureCsrfTokenName(), getCsrfHeaderExceptionPaths()), CsrfFilter.class) .exceptionHandling() .accessDeniedHandler(new RheaAccessDeniedHandler()); // restrict user to only one session at at time // TODO temporarily commented out, need to move to configuration // http.sessionManagement().maximumSessions(1).maxSessionsPreventsLogin(false).sessionRegistry(sessionRegistry()); } // @formatter:on @Override protected String[] getHttpSecureExceptionPaths() { // @formatter:off return new String[] { "/webjars/**", "/assets/css/**", "/assets/img/**", "/assets/vender/**", "/assets/fonts/**", "/node_modules/**", "/scripts/**", "/index.html", "/", "/partials/signin.html", "/partials/home.html", "/api/rhea/webapp/*", "/status", "/favicon.ico"}; // @formatter:on } @Override protected String configureCsrfTokenName() { return CSRF_TOKEN_NAME; } @Override protected String[] getCsrfHeaderExceptionPaths() { // @formatter:off return new String[] { "/status"}; // @formatter:on } }
bbejeck/kotlin
compiler/frontend/src/org/jetbrains/jet/lang/resolve/calls/TaskPrioritizers.java
<reponame>bbejeck/kotlin /* * Copyright 2010-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.jet.lang.resolve.calls; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jet.lang.descriptors.*; import org.jetbrains.jet.lang.resolve.DescriptorUtils; import org.jetbrains.jet.lang.resolve.scopes.JetScope; import org.jetbrains.jet.lang.types.ErrorUtils; import org.jetbrains.jet.lang.types.JetStandardClasses; import org.jetbrains.jet.lang.types.JetType; import java.util.*; /** * @author abreslav */ public class TaskPrioritizers { /*package*/ static TaskPrioritizer<FunctionDescriptor> FUNCTION_TASK_PRIORITIZER = new TaskPrioritizer<FunctionDescriptor>() { @NotNull @Override protected Collection<FunctionDescriptor> getNonExtensionsByName(JetScope scope, String name) { Set<FunctionDescriptor> functions = Sets.newLinkedHashSet(scope.getFunctions(name)); for (Iterator<FunctionDescriptor> iterator = functions.iterator(); iterator.hasNext(); ) { FunctionDescriptor functionDescriptor = iterator.next(); if (functionDescriptor.getReceiverParameter().exists()) { iterator.remove(); } } addConstructors(scope, name, functions); addVariableAsFunction(scope, name, functions, false); return functions; } @NotNull @Override protected Collection<FunctionDescriptor> getMembersByName(@NotNull JetType receiverType, String name) { JetScope receiverScope = receiverType.getMemberScope(); Set<FunctionDescriptor> members = Sets.newHashSet(receiverScope.getFunctions(name)); addConstructors(receiverScope, name, members); addVariableAsFunction(receiverScope, name, members, false); return members; } @NotNull @Override protected Collection<FunctionDescriptor> getExtensionsByName(JetScope scope, String name) { Set<FunctionDescriptor> extensionFunctions = Sets.newHashSet(scope.getFunctions(name)); for (Iterator<FunctionDescriptor> iterator = extensionFunctions.iterator(); iterator.hasNext(); ) { FunctionDescriptor descriptor = iterator.next(); if (!descriptor.getReceiverParameter().exists()) { iterator.remove(); } } addVariableAsFunction(scope, name, extensionFunctions, true); return extensionFunctions; } private void addConstructors(JetScope scope, String name, Collection<FunctionDescriptor> functions) { ClassifierDescriptor classifier = scope.getClassifier(name); if (classifier instanceof ClassDescriptor && !ErrorUtils.isError(classifier.getTypeConstructor())) { ClassDescriptor classDescriptor = (ClassDescriptor) classifier; functions.addAll(classDescriptor.getConstructors()); } } private void addVariableAsFunction(JetScope scope, String name, Set<FunctionDescriptor> functions, boolean receiverNeeded) { VariableDescriptor variable = scope.getLocalVariable(name); if (variable == null) { variable = DescriptorUtils.filterNonExtensionProperty(scope.getProperties(name)); } if (variable != null) { JetType outType = variable.getOutType(); if (outType != null && JetStandardClasses.isFunctionType(outType)) { VariableAsFunctionDescriptor functionDescriptor = VariableAsFunctionDescriptor.create(variable); if ((functionDescriptor.getReceiverParameter().exists()) == receiverNeeded) { functions.add(functionDescriptor); } } } } }; /*package*/ static TaskPrioritizer<VariableDescriptor> VARIABLE_TASK_PRIORITIZER = new TaskPrioritizer<VariableDescriptor>() { @NotNull @Override protected Collection<VariableDescriptor> getNonExtensionsByName(JetScope scope, String name) { VariableDescriptor descriptor = scope.getLocalVariable(name); if (descriptor == null) { descriptor = DescriptorUtils.filterNonExtensionProperty(scope.getProperties(name)); } if (descriptor == null) return Collections.emptyList(); return Collections.singleton(descriptor); } @NotNull @Override protected Collection<VariableDescriptor> getMembersByName(@NotNull JetType receiverType, String name) { return receiverType.getMemberScope().getProperties(name); } @NotNull @Override protected Collection<VariableDescriptor> getExtensionsByName(JetScope scope, String name) { return Collections2.filter(scope.getProperties(name), new Predicate<VariableDescriptor>() { @Override public boolean apply(@Nullable VariableDescriptor variableDescriptor) { return (variableDescriptor != null) && variableDescriptor.getReceiverParameter().exists(); } }); } }; /*package*/ static TaskPrioritizer<VariableDescriptor> PROPERTY_TASK_PRIORITIZER = new TaskPrioritizer<VariableDescriptor>() { private Collection<VariableDescriptor> filterProperties(Collection<? extends VariableDescriptor> variableDescriptors) { ArrayList<VariableDescriptor> properties = Lists.newArrayList(); for (VariableDescriptor descriptor : variableDescriptors) { if (descriptor instanceof PropertyDescriptor) { properties.add(descriptor); } } return properties; } @NotNull @Override protected Collection<VariableDescriptor> getNonExtensionsByName(JetScope scope, String name) { return filterProperties(VARIABLE_TASK_PRIORITIZER.getNonExtensionsByName(scope, name)); } @NotNull @Override protected Collection<VariableDescriptor> getMembersByName(@NotNull JetType receiver, String name) { return filterProperties(VARIABLE_TASK_PRIORITIZER.getMembersByName(receiver, name)); } @NotNull @Override protected Collection<VariableDescriptor> getExtensionsByName(JetScope scope, String name) { return filterProperties(VARIABLE_TASK_PRIORITIZER.getExtensionsByName(scope, name)); } }; }
Kievkao/Twitteroid
Twitteroid/Modules/Settings/MainSettings/Presenter/TWRSettingsPresenterProtocol.h
// // TWRSettingsPresenterProtocol.h // Twitteroid // // Created by <NAME> on 6/2/16. // Copyright © 2016 Kievkao. All rights reserved. // #import <Foundation/Foundation.h> @class TWRUserProfile; NS_ASSUME_NONNULL_BEGIN @protocol TWRSettingsPresenterProtocol <NSObject> - (void)retrieveUserProfileDidLoad:(TWRUserProfile *)userProfile; - (void)retrieveUserProfileDidStartAsync; - (void)retrieveUserProfileDidFinishWithError:(NSError *)error; @end NS_ASSUME_NONNULL_END
Anzhangusc/RevealHelper3
website_downloa.js
<filename>website_downloa.js const fetch = require("node-fetch-commonjs"); var util = require("./util.js"); var constant = require("./const.js"); // Load the SDK for JavaScript var AWS = require('aws-sdk'); // Set the Region AWS.config.update({region: 'us-west-1'}); s3 = new AWS.S3({apiVersion: '2006-03-01'}); const traitLoad = (raw, state) =>{ let res = false; if(raw){ if('attributes' in raw){ if(Object.prototype.toString.call(raw.attributes) === '[object Array]'){ if(raw.attributes.length > 0){ let traits = raw.attributes; if(traits.length === 1){ const trait_1on1 = traits[0]; if(util.isUnreveal(trait_1on1)){ return false; } } const traits_length = traits.length.toString(); //console.log('traits_length ', traits_length); if('num_traits' in state){ if(state.num_traits){ if(traits_length in state.num_traits){ state['num_traits'][traits_length] += 1; } else{ state['num_traits'][traits_length] = 1; } } } else{ const tmp_num = new Object(); tmp_num[traits_length] = 1; state['num_traits'] = tmp_num; } for(const trait of traits){ if(trait == null){ continue; } else{ if('trait_type' in trait){ if(trait.trait_type != null){ const trait_key = trait.trait_type.toString(); if(!(trait_key in state)){ state[trait_key] = {}; } if('value' in trait){ if(trait.value != null){ res = true; const trait_value = trait.value.toString(); if(trait_value in state[trait_key]){ state[trait_key][trait_value] += 1; } else{ state[trait_key][trait_value]=1; } } else{ continue; } } } else{ continue; } } } } if(res){ if('num' in state){ if(state['num'] != null){ state['num'] += 1; } else{ state['num'] = 1; } } else{ state['num'] = 1; } } } } } } return res; } const download = async (url,state,items,key) =>{ console.log(url); return fetch(url, { method: "GET", cache: 'no-cache', timeout: 7000, }) .then(response => response.json()) .then(response => { //console.log("RESPONESE",response); //const res = traitLoad(response, state); return response; }) .catch(err => { console.log(err); }); } const batchDownload = async (url, start, offset, postfix, items, state, all) =>{ for (let key of Object.keys(items)){ const res = await download(url+key.toString()+postfix,state,items,key); if(traitLoad(res,state)){ all[key] = res; } else{ console.log("RETRY LATER") } }; } const writeState = async (state,contract) =>{ let last_num = 0; if(state){ if(state.num){ const current_num = state.num; console.log(last_num,current_num); if(last_num != current_num){ writeStateDB(contract,state); last_num = current_num; } } } console.log(state); } module.exports.trigger = trigger; async function trigger(config){ let state = {}; let items = {}; let all = {}; console.log(util.getRandomInt(config.prefix.length)); const url = config.prefix; const postfix = config.postfix; let offset = Math.ceil(config.offset); let start = config.start; for(let i = start; i < start + offset; i ++){ items[i] = true; } await batchDownload(url, start, offset, postfix, items, state,all); writeState(state,config.contract); var sortable = []; var details = {}; for(let i of Object.keys(all)){ const [score, detail] = rarityScore(i,all[i],state,details); sortable.push([i,score,detail]); } sortable.sort(function(a, b) { return b[1] - a[1]; }); console.log(sortable); writeRank(sortable,config.contract); } const rarityScore = (key,raw,state,details) => { let res = -1.0; let printout = ""; let detail = []; let TOTAL_SUPPLY = state.num; if(raw){ if('attributes' in raw){ if(raw.attributes.length > 0){ let traits = raw.attributes; //console.log(traits); if(Object.prototype.toString.call(traits) === '[object Array]') { // if it's one on one, buy directly res = 0.0; if(traits.length === 1){ const trait_1on1 = traits[0]; if(util.isUnreveal(trait_1on1)){ return false; } } const traits_length = traits.length.toString(); //console.log('traits_length ', traits_length); if('num_traits' in state){ if(state.num_traits){ let quan = TOTAL_SUPPLY; if(traits_length in state.num_traits){ quan = state.num_traits[traits_length]; } else{ quan = 1; } const freq = quan/TOTAL_SUPPLY; var size = Object.keys(state.num_traits).length; //console.log('num_traits ',quan, " ", freq, " ", size) //const trait_score = 1/(freq*freq*size*size); const trait_score = 1/(freq*size); detail.push({ 'name': 'num_traits', 'value': traits_length.toString(), 'freq': quan.toString(), 'prob': freq.toString(), 'score': trait_score.toString() }) res += trait_score; } } for(const trait of traits){ if(trait == null){ continue; } else{ if('trait_type' in trait){ if(trait.trait_type != null){ let quan = TOTAL_SUPPLY; const trait_key = trait.trait_type.toString(); if(trait_key in state){ let trait_value = ""; if('value' in trait){ if(trait.value != null){ trait_value = trait.value.toString(); if(trait_value in state[trait_key]){ quan = state[trait_key][trait_value]; } else{ quan = 1 } } else{ continue; } } else{ continue; } const freq = quan/TOTAL_SUPPLY; var size = Object.keys(state[trait_key]).length; //console.log(trait_key , " " ,quan, " ", freq, " ", size) //const trait_score = 1/(freq*freq*size*size); const trait_score = 1/(freq*size); detail.push({ 'name': trait_key, 'value': trait_value, 'freq': quan.toString(), 'prob': freq.toString(), 'score': trait_score.toString() }) res += trait_score; } } } } } } } } } details[key] = detail; return [res,detail]; } const writeRank = (sortable,contract) => { var dynamodb = new AWS.DynamoDB(); var docClient = new AWS.DynamoDB.DocumentClient(); for (let rank = 0 ; rank < sortable.length; rank ++){ const newRank = { TableName: "rank", Item:{ Contract: contract.toLowerCase(), Token_id: sortable[rank][0], Rank: rank+1, Score: sortable[rank][1], Detail: JSON.stringify(sortable[rank][2]), } }; docClient.put(newRank, function(err, data) { if (err) { console.error("Unable to add item. Error JSON:", JSON.stringify(err, null, 2)); } else { console.log("Added item:", JSON.stringify(data, null, 2)); } }); } } const writeStateDB = async (contract, json) =>{ var dynamodb = new AWS.DynamoDB(); var docClient = new AWS.DynamoDB.DocumentClient(); const newState = { TableName: "Trait_State", Item:{ Contract: contract.toLowerCase(), State: JSON.stringify(json) } }; docClient.put(newState, function(err, data) { if (err) { console.error("Unable to add item. Error JSON:", JSON.stringify(err, null, 2)); } else { console.log("Added item:", JSON.stringify(data, null, 2)); } }); } const config = { "prefix": "https://anzh.mypinata.cloud/ipfs/QmeBWSnYPEnUimvpPfNHuvgcK9wFH9Sa6cZ4KDfgkfJJis/", "postfix": "", "offset": 10000, "contract": "0xed5af388653567af2f388e6224dc7c4b3241c544", "start": 0 } trigger(config);
ajaypp123/DataStructure_Competative_Programing
Compatative_Programing/Dynamic Programing/knapsack0_1/subset_sum_tabulation.java
/* Check if subset is available, whose sum is N int[] W = {2, 5, 1, 3, 4}; int capacity = 7; int item = 5; Output: true // 2+7 2 3 7 8 10 11 0 1 3 4 5 6 7 8 9 10 11 0 t f f f ... 2 t 3 t ij 7 . 8 10 5 3 3>5 x 5>3 yes no yes arr[i-1] + T[i-1][j-arr[i-1]] no arr[i-1][j] [i][j] */ public class subset_sum_tabulation { public static void main(String[] args) { int[] W = {2, 5, 1, 3, 4}; int capacity = 7; int item = 5; boolean result = subset(W, capacity, item); System.out.println(result); } private static boolean subset(int[] arr, int target, int item) { if(target == 0) { return true; } if(item == 0) { return false; } //Initalize boolean T[][] = new boolean[item+1][target+1]; for(int i=0; i<T.length; i++) { T[i][0] = true; } for(int i=0; i<T[0].length; i++) { T[0][i] = false; } //call T[i][j] for(int i=1; i<T.length; i++) { for(int j=1; j<T[0].length; j++) { if(j-arr[i-1] < 0) { T[i][j] = T[i-1][j]; } else { T[i][j] = (T[i-1][j-arr[i-1]]==true || T[i-1][j] == true)?true:false; } } } return T[item][target]; } }
GordsSleek/CheatBreakerPlus-src
src/main/java/me/travis/wurstplus/wurstplustwo/hacks/chat/AutoExcuse.java
<gh_stars>1-10 //Deobfuscated with https://github.com/PetoPetko/Minecraft-Deobfuscator3000 using mappings "1.12 stable mappings"! // // Decompiled by Procyon v0.5.36 // package me.travis.wurstplus.wurstplustwo.hacks.chat; import java.util.Random; import me.travis.wurstplus.wurstplustwo.hacks.WurstplusCategory; import me.travis.wurstplus.wurstplustwo.hacks.WurstplusHack; public class AutoExcuse extends WurstplusHack { int diedTime; public AutoExcuse() { super(WurstplusCategory.WURSTPLUS_CHAT); this.diedTime = 0; this.name = "Auto Excuse"; this.tag = "AutoExcuse"; this.description = "tell people why you died"; } @Override public void update() { if (this.diedTime > 0) { --this.diedTime; } if (AutoExcuse.mc.player.isDead) { this.diedTime = 500; } if (!AutoExcuse.mc.player.isDead && this.diedTime > 0) { final Random rand = new Random(); final int randomNum = rand.nextInt(6) + 1; if (randomNum == 1) { AutoExcuse.mc.player.sendChatMessage("you win because you are a pingplayer :(("); } if (randomNum == 2) { AutoExcuse.mc.player.sendChatMessage("i was in my hacker console :("); } if (randomNum == 3) { AutoExcuse.mc.player.sendChatMessage("bro im good i was testing settings :(("); } if (randomNum == 5) { AutoExcuse.mc.player.sendChatMessage("im desync :("); } if (randomNum == 6) { AutoExcuse.mc.player.sendChatMessage("youre a cheater :("); } this.diedTime = 0; } } }
eschwartz/aerisjs
src/api/collections/pointdatacollection.js
<reponame>eschwartz/aerisjs<filename>src/api/collections/pointdatacollection.js define([ 'aeris/util', 'aeris/api/collections/aerisapicollection', 'aeris/api/models/pointdata' ], function(_, AerisApiCollection, PointData) { /** * A representation of point data from the Aeris Api. * * @class aeris.api.collections.PointDataCollection * @extends aeris.api.collections.AerisApiCollection * * @constructor * @override */ var PointDataCollection = function(opt_models, opt_options) { var options = _.extend({ validate: true, model: PointData, params: {} }, opt_options); _.defaults(options.params, { limit: 100, p: ':auto', radius: '3000mi' }); AerisApiCollection.call(this, opt_models, options); }; _.inherits(PointDataCollection, AerisApiCollection); return PointDataCollection; });
even311379/EbirdTaiwan2020
ebirdtaiwan/home/migrations/0017_upper_home.py
# Generated by Django 3.1 on 2020-09-29 19:04 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('wagtailimages', '0022_uploadedimage'), ('wagtailcore', '0052_pagelogentry'), ('home', '0016_auto_20200913_1430'), ] operations = [ migrations.CreateModel( name='upper_home', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.page')), ('event1_title', models.CharField(help_text='秋季大亂鬥', max_length=30)), ('event1_description', models.CharField(max_length=150)), ('event1_period', models.CharField(max_length=20)), ('event2_title', models.CharField(help_text='全球觀鳥大日', max_length=30)), ('event2_description', models.CharField(max_length=150)), ('event2_period', models.CharField(max_length=20)), ('event2_url', models.CharField(help_text='10/17全球官鳥大日的網址', max_length=40)), ('event3_title', models.CharField(help_text='台北觀鳥大賽', max_length=30)), ('event3_description', models.CharField(max_length=150)), ('event3_period', models.CharField(max_length=20)), ('event1_page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.page')), ('event1_thumbnail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.image')), ('event2_thumbnail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.image')), ('event3_page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailcore.page')), ('event3_thumbnail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.image')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ]
laohubuzaijia/fbthrift
thrift/compiler/test/fixtures/includes/gen-swift/test/fixtures/includes/MyServiceAsyncReactiveWrapper.java
<reponame>laohubuzaijia/fbthrift<gh_stars>0 /** * Autogenerated by Thrift * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package test.fixtures.includes; import java.util.*; public class MyServiceAsyncReactiveWrapper implements MyService.Reactive { private final MyService.Async _delegate; public MyServiceAsyncReactiveWrapper(MyService.Async _delegate) { this._delegate = _delegate; } @java.lang.Override public void close() { _delegate.close(); } @java.lang.Override public reactor.core.publisher.Mono<Void> query(final test.fixtures.includes.MyStruct s, final test.fixtures.includes.includes.Included i) { return com.facebook.swift.transport.util.FutureUtil.toMono(_delegate.query(s, i)); } @java.lang.Override public reactor.core.publisher.Mono<Void> hasArgDocs(final test.fixtures.includes.MyStruct s, final test.fixtures.includes.includes.Included i) { return com.facebook.swift.transport.util.FutureUtil.toMono(_delegate.hasArgDocs(s, i)); } }
trunda/adhearsion
spec/adhearsion/call_controller/menu_dsl/fixnum_match_calculator_spec.rb
<reponame>trunda/adhearsion # encoding: utf-8 require 'spec_helper' module Adhearsion class CallController module MenuDSL describe FixnumMatchCalculator do let(:match_payload) { :main } it "a potential match scenario" do calculator = FixnumMatchCalculator.new(444, match_payload) match = calculator.match '4' match.should be_potential_match match.should_not be_exact_match match.potential_matches.should be == [444] end it "a multi-digit exact match scenario" do calculator = FixnumMatchCalculator.new(5555, match_payload) match = calculator.match '5555' match.should be_exact_match end it "a single-digit exact match scenario" do calculator = FixnumMatchCalculator.new(1, match_payload) match = calculator.match '1' match.should be_exact_match end it "the context name given to the calculator should be passed on the CalculatedMatch" do match_payload = :icanhascheezburger calculator = FixnumMatchCalculator.new(1337, match_payload) calculator.match('1337').match_payload.should be match_payload end end end end end
wyaadarsh/LeetCode-Solutions
Python3/0101-Symmetric-Tree/soln-2.py
<filename>Python3/0101-Symmetric-Tree/soln-2.py # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution: def isSymmetric(self, root): """ :type root: TreeNode :rtype: bool """ def isMirror(p, q): if not p or not q: return p == q return p.val == q.val and isMirror(p.left, q.right) and isMirror(p.right, q.left) return isMirror(root, root)
tsukoyumi/skylicht-engine
Projects/Skylicht/Audio/Source/Driver/ISoundDrvierImp.cpp
<filename>Projects/Skylicht/Audio/Source/Driver/ISoundDrvierImp.cpp /* !@ MIT License Copyright (c) 2012 - 2019 Skylicht Technology CO., LTD Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. This file is part of the "Skylicht Engine". https://github.com/skylicht-lab/skylicht-engine !# */ #include "stdafx.h" #include "SkylichtAudioConfig.h" #include "ISoundDriver.h" #include "CDriverNull.h" #include "CDriverMMSystem.h" #include "CDriverXAudio2.h" #include "CDriverOpenSL.h" #include "CDriverOpenAL.h" #include "CDriverAudioUnit.h" namespace SkylichtAudio { ISoundDriver* ISoundDriver::createDriver() { #if defined(USE_XAUDIO2) return new CDriverXAudio2(); #elif defined(USE_MMDRIVER) return new CDriverMMSystem(); #elif defined(USE_OPENSL) return new CDriverOpenSL(); #elif defined(USE_OPENAL) return new CDriverOpenAL(); #elif defined(USE_AUDIO_UNIT) return new CDriverAudioUnit(); #else return new CDriverNull(); #endif } }
domkog/AdvancedLogicSimulator
AdvancedLogicSimulator/src/at/fishkog/als/ui/customNodes/ConnectorLine.java
package at.fishkog.als.ui.customNodes; import at.fishkog.als.sim.data.Connector; import javafx.scene.shape.Line; public class ConnectorLine extends Line{ private Connector con; public ConnectorLine(double d, double e, double f, double g, Connector c) { super(d,e,f,g); this.con = c; } public Connector getCon() { return con; } }
wisesky/LeetCode-Practice
src/407. Trapping Rain Water II_learned.py
<filename>src/407. Trapping Rain Water II_learned.py from typing import List import heapq class Solution: def trapRainWater(self, heightMap: List[List[int]]) -> int: if len(heightMap) == 0: return 0 h = len(heightMap) w = len(heightMap[0]) visited = {} pq = [] for i in range(h): heapq.heappush(pq, (heightMap[i][0], (i, 0))) heapq.heappush(pq, (heightMap[i][w-1], (i, w-1))) visited[i, 0] = True visited[i, w-1] = True for j in range(1,w-1): heapq.heappush(pq, (heightMap[0][j], (0,j))) heapq.heappush(pq, (heightMap[h-1][j], (h-1,j))) visited[0, j] = True visited[h-1, j] = True h_max = 0 res = 0 while len(pq) > 0: value , (x, y) = heapq.heappop(pq) if value < h_max: res += h_max - value else: h_max = value up = (x-1, y) if x > 0 else None down = (x+1, y) if x < h-1 else None left = (x, y-1) if y > 0 else None right = (x, y+1) if y < w-1 else None # if up != None and not visited.get(up, False): # heapq.heappush(pq, (heightMap[up[0]][up[1]], up)) # visited[up] = True # if down!= None and not visited.get(down, False): # heapq.heappush(pq, (heightMap[down[0]][down[1]], down)) # visited[down] = True # if left != None and not visited.get(left, False): # heapq.heappush(pq, (heightMap[left[0]][left[1]], left)) # visited[left] = True # if right != None and not visited.get(right, False): # heapq.heappush(pq, (heightMap[right[0]][right[1]], right)) # visited[right] = True # optimization for rd in [up, down, left, right]: if rd != None and not visited.get(rd, False): heapq.heappush(pq, (heightMap[rd[0]][rd[1]], rd)) visited[rd] = True return res if __name__ == "__main__": so = Solution() heightMap = [ [1,4,3,1,3,2], [3,2,1,3,2,4], [2,3,3,2,3,1] ] # 14 heightMap = [ [12,13,1,12], [13,4,13,12], [13,8,10,12], [12,13,12,12], [13,13,13,13] ] # 3 heightMap = [ [5,5,5,1], [5,1,1,5], [5,1,5,5], [5,2,5,8] ] # 44 heightMap = [ [78,16,94,36], [87,93,50,22], [63,28,91,60], [64,27,41,27], [73,37,12,69], [68,30,83,31], [63,24,68,36] ] # 25 heightMap = [ [14,17,18,16,14,16], [17,3,10,2,3,8], [11,10,4,7,1,7], [13,7,2,9,8,10], [13,1,3,4,8,6], [20,3,3,9,10,8] ] # 11 # heightMap = [ # [14,20,11,19,19,16], # [11,10,7,4,9,6], # [17,2,2,6,10,9], # [15,9,2,1,4,1], # [15,5,5,5,8,7], # [14,2,8,6,10,7] # ] result = so.trapRainWater(heightMap) print(result)
seeArmy/web
src/truck/LbsLib.js
/* eslint-disable no-mixed-operators */ /* eslint-disable one-var */ /* eslint-disable no-empty */ export default { PI: 3.14159265358979324, x_pi: 3.14159265358979324 * 3000.0 / 180.0, transformLat(x, y) { let ret = -100.0 + 2.0 * x + 3.0 * y + 0.2 * y * y + 0.1 * x * y + 0.2 * Math.sqrt(Math.abs(x)); ret += (20.0 * Math.sin(6.0 * x * this.PI) + 20.0 * Math.sin(2.0 * x * this.PI)) * 2.0 / 3.0; ret += (20.0 * Math.sin(y * this.PI) + 40.0 * Math.sin(y / 3.0 * this.PI)) * 2.0 / 3.0; ret += (160.0 * Math.sin(y / 12.0 * this.PI) + 320 * Math.sin(y * this.PI / 30.0)) * 2.0 / 3.0; return ret; }, transformLon(x, y) { let ret = 300.0 + x + 2.0 * y + 0.1 * x * x + 0.1 * x * y + 0.1 * Math.sqrt(Math.abs(x)); ret += (20.0 * Math.sin(6.0 * x * this.PI) + 20.0 * Math.sin(2.0 * x * this.PI)) * 2.0 / 3.0; ret += (20.0 * Math.sin(x * this.PI) + 40.0 * Math.sin(x / 3.0 * this.PI)) * 2.0 / 3.0; ret += (150.0 * Math.sin(x / 12.0 * this.PI) + 300.0 * Math.sin(x / 30.0 * this.PI)) * 2.0 / 3.0; return ret; }, delta(lat, lng) { const a = 6378245.0; const ee = 0.00669342162296594323; let dLat = this.transformLat(lng - 105.0, lat - 35.0); let dLon = this.transformLon(lng - 105.0, lat - 35.0); const radLat = lat / 180.0 * this.PI; let magic = Math.sin(radLat); magic = 1 - ee * magic * magic; const sqrtMagic = Math.sqrt(magic); dLat = dLat * 180.0 / (a * (1 - ee) / (magic * sqrtMagic) * this.PI); dLon = dLon * 180.0 / (a / sqrtMagic * Math.cos(radLat) * this.PI); return { 'lat': dLat, 'lng': dLon }; }, outOfChina(lat, lng) { if (lng < 72.004 || lng > 137.8347) { return true; } if (lat < 0.8293 || lat > 55.8271) { return true; } return false; }, gcj_encrypt(wgsLat, wgsLon) { if (this.outOfChina(wgsLat, wgsLon)) { return { 'lat': wgsLat, 'lng': wgsLon }; } const d = this.delta(wgsLat, wgsLon); return { 'lat': wgsLat + d.lat, 'lng': wgsLon + d.lng }; }, bd_encrypt(gcjLat, gcjLon) { const x = gcjLon; const y = gcjLat; const z = Math.sqrt(x * x + y * y) + 0.00002 * Math.sin(y * this.x_pi); const theta = Math.atan2(y, x) + 0.000003 * Math.cos(x * this.x_pi); const bdLon = z * Math.cos(theta) + 0.0065; const bdLat = z * Math.sin(theta) + 0.006; return { 'lat': bdLat, 'lng': bdLon }; }, transformPoint(points) { const pointArray = points; const len = pointArray.length; for (let i = 0; i < len; i += 1) { if (pointArray[i].lng && pointArray[i].lat) { const gcj = this.gcj_encrypt(pointArray[i].lat, pointArray[i].lng); const baidu = this.bd_encrypt(gcj.lat, gcj.lng); pointArray[i].lng = baidu.lng; pointArray[i].lat = baidu.lat; } else { pointArray.splice(i, 1); } } return pointArray; }, delRepeat(array) { const len = array.length; if (len > 1) { const arr = []; array.forEach((a) => { a.pos_str = '查看位置'; a.directionDec = this.getDirection(a.direction); a.stop_point = !!a.stop_time; }, this); arr.push(array[0]); for (let i = 1; i < len - 2; i++) { for (let j = i + 1; j < len; j++) { if (array[i].lng === array[j].lng && array[i].lat === array[j].lat) { } else { arr.push(array[j]); i = j; break; } } } return arr; } return array; }, getDirection(direction) { switch (true) { case direction === 0: return '北方'; case direction === 90: return '东方'; case direction === 180: return '南方'; case direction === 270: return '西方'; case direction === 45: return '东北方'; case direction === 135: return '东南方'; case direction === 225: return '西南方'; case direction === 315: return '西北方'; case direction > 0 && direction < 45: return '东北方偏北'; case direction > 45 && direction < 90: return '东北方偏东'; case direction > 90 && direction < 135: return '东南方偏东'; case direction > 135 && direction < 180: return '东南方偏南'; case direction > 180 && direction < 225: return '西南方偏南'; case direction > 225 && direction < 270: return '西南方偏西'; case direction > 270 && direction < 315: return '西北方偏西'; case direction > 315 && direction < 360: return '西北方偏北'; default: return '西北方偏北'; } } };
tomwhite/google-cloud-java
google-api-grpc/proto-google-cloud-redis-v1/src/main/java/com/google/cloud/redis/v1/CreateInstanceRequestOrBuilder.java
<gh_stars>1-10 // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/redis/v1/cloud_redis.proto package com.google.cloud.redis.v1; public interface CreateInstanceRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:google.cloud.redis.v1.CreateInstanceRequest) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Required. The resource name of the instance location using the form: * `projects/{project_id}/locations/{location_id}` * where `location_id` refers to a GCP region * </pre> * * <code>string parent = 1;</code> */ java.lang.String getParent(); /** * * * <pre> * Required. The resource name of the instance location using the form: * `projects/{project_id}/locations/{location_id}` * where `location_id` refers to a GCP region * </pre> * * <code>string parent = 1;</code> */ com.google.protobuf.ByteString getParentBytes(); /** * * * <pre> * Required. The logical name of the Redis instance in the customer project * with the following restrictions: * * Must contain only lowercase letters, numbers, and hyphens. * * Must start with a letter. * * Must be between 1-40 characters. * * Must end with a number or a letter. * * Must be unique within the customer project / location * </pre> * * <code>string instance_id = 2;</code> */ java.lang.String getInstanceId(); /** * * * <pre> * Required. The logical name of the Redis instance in the customer project * with the following restrictions: * * Must contain only lowercase letters, numbers, and hyphens. * * Must start with a letter. * * Must be between 1-40 characters. * * Must end with a number or a letter. * * Must be unique within the customer project / location * </pre> * * <code>string instance_id = 2;</code> */ com.google.protobuf.ByteString getInstanceIdBytes(); /** * * * <pre> * Required. A Redis [Instance] resource * </pre> * * <code>.google.cloud.redis.v1.Instance instance = 3;</code> */ boolean hasInstance(); /** * * * <pre> * Required. A Redis [Instance] resource * </pre> * * <code>.google.cloud.redis.v1.Instance instance = 3;</code> */ com.google.cloud.redis.v1.Instance getInstance(); /** * * * <pre> * Required. A Redis [Instance] resource * </pre> * * <code>.google.cloud.redis.v1.Instance instance = 3;</code> */ com.google.cloud.redis.v1.InstanceOrBuilder getInstanceOrBuilder(); }
pcdas/parquet-mr
parquet-column/src/main/java/parquet/example/data/simple/NanoTime.java
<filename>parquet-column/src/main/java/parquet/example/data/simple/NanoTime.java /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package parquet.example.data.simple; import java.nio.ByteBuffer; import java.nio.ByteOrder; import parquet.Preconditions; import parquet.io.api.Binary; import parquet.io.api.RecordConsumer; public class NanoTime extends Primitive { private final int julianDay; private final long timeOfDayNanos; public static NanoTime fromBinary(Binary bytes) { Preconditions.checkArgument(bytes.length() == 12, "Must be 12 bytes"); ByteBuffer buf = bytes.toByteBuffer(); buf.order(ByteOrder.LITTLE_ENDIAN); long timeOfDayNanos = buf.getLong(); int julianDay = buf.getInt(); return new NanoTime(julianDay, timeOfDayNanos); } public static NanoTime fromInt96(Int96Value int96) { ByteBuffer buf = int96.getInt96().toByteBuffer(); return new NanoTime(buf.getInt(), buf.getLong()); } public NanoTime(int julianDay, long timeOfDayNanos) { this.julianDay = julianDay; this.timeOfDayNanos = timeOfDayNanos; } public int getJulianDay() { return julianDay; } public long getTimeOfDayNanos() { return timeOfDayNanos; } public Binary toBinary() { ByteBuffer buf = ByteBuffer.allocate(12); buf.order(ByteOrder.LITTLE_ENDIAN); buf.putLong(timeOfDayNanos); buf.putInt(julianDay); buf.flip(); return Binary.fromByteBuffer(buf); } public Int96Value toInt96() { return new Int96Value(toBinary()); } @Override public void writeValue(RecordConsumer recordConsumer) { recordConsumer.addBinary(toBinary()); } @Override public String toString() { return "NanoTime{julianDay="+julianDay+", timeOfDayNanos="+timeOfDayNanos+"}"; } }
maurizioabba/rose
projects/compass/src/util/MPIAbstraction/alt-mpi-headers/mpich-1.2.7p1/include/mpidefs.h
<filename>projects/compass/src/util/MPIAbstraction/alt-mpi-headers/mpich-1.2.7p1/include/mpidefs.h<gh_stars>100-1000 /* This file contains any configuration-dependent data for mpi.h */ #ifndef MPI_DEFS #define MPI_DEFS /* Include Fortran definitions */ #if 0 == 0 #include "mpi_fortdefs.h" #else typedef int MPI_Fint; #endif typedef int MPI_Aint; /* MPI_STATUS_SIZE is not strictly required in C; however, it should match the value for Fortran */ #define MPI_STATUS_SIZE 4 /* Status object. It is the only user-visible MPI data-structure The "count" field is PRIVATE; use MPI_Get_count to access it. */ typedef struct { int count; int MPI_SOURCE; int MPI_TAG; int MPI_ERROR; #if (MPI_STATUS_SIZE > 4) int extra[MPI_STATUS_SIZE - 4]; #endif } MPI_Status; /* Eventually, these should be integrated into mpi.h by using an mpi.h.in file */ /* If const is not supported, define HAVE_NO_C_CONST here */ #if 0 && !defined(HAVE_NO_C_CONST) #define HAVE_NO_C_CONST 1 #endif /* If stdargs supported, define USE_STDARG here */ #if 1 && !defined(USE_STDARG) #define USE_STDARG 1 #endif #if 0 # include "mpid_defs.h" #endif #define ROMIO 1 #if ROMIO == 1 #include "mpio.h" #endif #endif
TaylorNBurrows/Trove_Registry
client/src/components/Main/index.js
<reponame>TaylorNBurrows/Trove_Registry import React from 'react'; import { makeStyles } from '@material-ui/core/styles'; const useStyles = makeStyles(theme => ({ content: { flexGrow: 1, layout: { marginTop:'100px', } }, })); const Main = ({ children }) => { const classes = useStyles(); return ( <main className={classes.content}> {children} </main> ) } export default Main;
lihongan/external-dns
vendor/github.com/nesv/go-dynect/dynect/dsfs.go
<filename>vendor/github.com/nesv/go-dynect/dynect/dsfs.go package dynect // DSFSResponse is used for holding the data returned by a call to // "https://api.dynect.net/REST/DSF/" with 'detail: Y'. type AllDSFDetailedResponse struct { ResponseBlock Data []DSFService `json:"data"` } // DSFResponse is used for holding the data returned by a call to // "https://api.dynect.net/REST/DSF/SERVICE_ID". type DSFResponse struct { ResponseBlock Data DSFService `json:"data"` } // Type DSFService is used as a nested struct, which holds the data for a // DSF Service returned by a call to "https://api.dynect.net/REST/DSF/SERVICE_ID". type DSFService struct { ID string `json:"service_id"` Label string `json:"label"` Active string `json:"active"` TTL string `json:"ttl"` PendingChange string `json:"pending_change"` Notifiers []Notifier `json:"notifiers"` Nodes []DSFNode `json:"nodes"` Rulesets []DSFRuleset `json:"rulesets"` } type DSFRuleset struct { ID string `json:"dsf_ruleset_id` Label string `json:"label"` CriteriaType string `json:"criteria_type"` Criteria interface{} `json:"criteria"` Ordering string `json:"ordering"` Eligible string `json:"eligible"` PendingChange string `json:"pending_change"` ResponsePools []DSFResponsePool `json:"response_pools"` } type DSFResponsePool struct { ID string `json:"dsf_response_pool_id"` Label string `json:"label"` Automation string `json:"automation"` CoreSetCount string `json:"core_set_count"` Eligible string `json:"eligible"` PendingChange string `json:"pending_change"` RsChains []DSFRecordSetChain `json:"rs_chains"` Rulesets []DSFRuleset `json:"rulesets"` Status string `json:"status"` LastMonitored string `json:"last_monitored"` Notifier string `json:"notifier"` } type DSFRecordSetChain struct { ID string `json:"dsf_record_set_failover_chain_id"` Status string `json:"status"` Core string `json:"core"` Label string `json:"label"` DSFResponsePoolID string `json:"dsf_response_pool_id"` DSFServiceID string `json:"service_id"` PendingChange string `json:"pending_change"` DSFRecordSets []DSFRecordSet `json:"record_sets"` } type DSFRecordSet struct { Status string `json:"status"` Eligible string `json:"eligible"` ID string `json:"dsf_record_set_id"` MonitorID string `json:"dsf_monitor_id"` Label string `json:"label"` TroubleCount string `json:"trouble_count"` Records []DSFRecord `json:"records"` FailCount string `json:"fail_count"` TorpidityMax string `json:"torpidity_max"` TTLDerived string `json:"ttl_derived"` LastMonitored string `json:"last_monitored"` TTL string `json:"ttl"` ServiceID string `json:"service_id"` ServeCount string `json:"serve_count"` Automation string `json:"automation"` PendingChange string `json:"pending_change"` } type DSFRecord struct { Status string `json:"status"` Endpoints []string `json:"endpoints"` RDataClass string `json:"rdata_class"` Weight int `json:"weight"` Eligible string `json:"eligible"` ID string `json:"dsf_record_id"` DSFRecordSetID string `json:"dsf_record_set_id"` //RData interface{} `json:"rdata"` EndpointUpCount int `json:"endpoint_up_count"` Label string `json:"label"` MasterLine string `json:"master_line"` Torpidity int `json:"torpidity"` LastMonitored int `json:"last_monitored"` TTL string `json:"ttl"` DSFServiceID string `json:"service_id"` PendingChange string `json:"pending_change"` Automation string `json:"automation"` ReponseTime int `json:"response_time"` Publish string `json:"publish",omit_empty` } type DSFNode struct { Zone string `json:"zone"` FQDN string `json:"fqdn"` } type Notifier struct { ID int `json:"notifier_id"` Label string `json:"label"` Recipients string `json:"recipients"` Active string `json:"active"` }
KhosroPakmanesh/EquityMarketSoftwareEcosystem
project/src/main/java/emSeco/custodianUnit/infrastructure/services/databases/custodianRepositories/implementations/repositories/AllocationDetailRepository.java
package emSeco.custodianUnit.infrastructure.services.databases.custodianRepositories.implementations.repositories; import emSeco.custodianUnit.core.entities.allocationDetail.AllocationDetail; import emSeco.custodianUnit.core.services.infrastructureServices.databases.custodianUnitRepositories.interfaces.repositories.IAllocationDetailRepository; import java.util.ArrayList; import java.util.List; import java.util.UUID; public class AllocationDetailRepository implements IAllocationDetailRepository { private final List<AllocationDetail> allocationDetails; public AllocationDetailRepository() { allocationDetails = new ArrayList<>(); } @Override public void add(List<AllocationDetail> allocationDetail) { this.allocationDetails.addAll(allocationDetail); } @Override public AllocationDetail get(UUID allocationDetailId) { return allocationDetails.stream(). filter(allocationDetail -> allocationDetail.getAllocationDetailInformation(). getAllocationDetailId() == allocationDetailId).findAny().orElse(null); } @Override public List<AllocationDetail> get() { return allocationDetails; } }
felipexperto/hero-project-cra
src/styles/Themes/index.js
<reponame>felipexperto/hero-project-cra<filename>src/styles/Themes/index.js const colors = { white: '#fff', black: '#000', red: '#ff1510', lightred: '#fdecec', lightgreen: '#e7f6e7', transparent: 'transparent', lightestgrey: '#e4e5e9', lightgrey: '#b9b9b9', grey: '#8c8c8c', darkgrey: '#404040', }; const fonts = { family: 'Work Sans', weight: { regular: 400, medium: 500, bold: 700, }, }; const sizes = { largeDesktop: 1200, desktop: 992, tablet: 768, phone: 576, }; const zIndex = {}; const theme = { main: { colors, fonts, sizes, zIndex, }, }; export default theme;
Vyraax/VulkanLab
docs/html/structDtypeSizeCheck.js
var structDtypeSizeCheck = [ [ "dtype", "structDtypeSizeCheck.html#a44bf2c13b8fe13197a3006ce615ca361", null ], [ "name", "structDtypeSizeCheck.html#aa9a8833f0c4308d8a8d0a1559f341cec", null ], [ "size_cpp", "structDtypeSizeCheck.html#a8de36319e0d29a3be4496a6ca5c7e4ae", null ], [ "size_numpy", "structDtypeSizeCheck.html#ab95f6def6c61efe2f684ecc4d4726878", null ] ];
nicho92/mtgDesktopCompanion
src/main/java/org/magic/game/network/actions/AbstractNetworkAction.java
package org.magic.game.network.actions; import java.io.Serializable; public abstract class AbstractNetworkAction implements Serializable { /** * */ private static final long serialVersionUID = 1L; public enum ACTIONS { JOIN, REQUEST_PLAY, RESPONSE, CHANGE_DECK, SPEAK, LIST_PLAYER, CHANGE_STATUS, SHARE } private ACTIONS act; public ACTIONS getAct() { return act; } public void setAct(ACTIONS act) { this.act = act; } }
voxmedia/autotune
rails_template.rb
<reponame>voxmedia/autotune # Add gems gem 'resque', '~> 1.25.2' gem 'omniauth-github', '~> 1.1.2' gem 'foreman', '~> 0.77.0' gem 'unicorn-rails', '~> 2.2.0' gem 's3deploy', :git => 'https://github.com/ryanmark/s3deploy-ruby.git' gem 'autotune', :git => 'https://github.com/voxmedia/autotune.git' # Setup foreman file 'Procfile', <<-CODE redis: redis-server resque_worker: bundle exec rake environment resque:work QUEUE=default,low TERM_CHILD=1 rails: bundle exec unicorn_rails -p 3000 -c config/unicorn.rb CODE # Setup resque append_file 'Rakefile', "require 'resque/tasks'" application 'config.active_job.queue_adapter = :resque' initializer 'resque.rb', <<-CODE Resque.redis = ENV['REDIS_SERVER'] || 'localhost:6379' Resque.redis.namespace = ENV['REDIS_NAMESPACE'] || 'resque:AutoTune' CODE # Setup omniauth initializer initializer 'omniauth.rb', <<-CODE OmniAuth.config.logger = Rails.logger Rails.configuration.omniauth_preferred_provider = Rails.env.production? ? :github : :developer Rails.application.config.middleware.use OmniAuth::Builder do provider :developer unless Rails.env.production? provider :github, ENV['GITHUB_KEY'], ENV['GITHUB_SECRET'] end CODE initializer 'autotune.rb', <<-CODE # Be sure to restart your server when you modify this file. Autotune.configure do |conf| # For notifications conf.redis = Redis.new(:host => ENV['REDIS_SERVER']) # Where should the `FAQ` link go? conf.faq_url = 'http://voxmedia.helpscoutdocs.com/category/19-autotune' # Environment variables used when building blueprints conf.build_environment = { # 'AWS_ACCESS_KEY_ID' => ENV['AWS_ACCESS_KEY_ID'], # 'AWS_SECRET_ACCESS_KEY' => ENV['AWS_SECRET_ACCESS_KEY'], # 'GOOGLE_OAUTH_PERSON' => ENV['GOOGLE_OAUTH_PERSON'], # 'GOOGLE_OAUTH_ISSUER' => ENV['GOOGLE_OAUTH_ISSUER'], # 'GOOGLE_OAUTH_KEYFILE' => ENV['GOOGLE_OAUTH_KEYFILE'], 'ENV' => Rails.env } # These are the environment variables used during git operations conf.setup_environment = { 'GIT_HTTP_USERNAME' => ENV['GIT_HTTP_USERNAME'], 'GIT_HTTP_PASSWORD' => ENV['GIT_HTTP_PASSWORD'], 'GIT_ASKPASS' => Rails.configuration.autotune.git_askpass, 'GIT_PRIVATE_KEY' => ENV['GIT_PRIVATE_KEY'], 'GIT_SSH' => Rails.configuration.autotune.git_ssh, 'ENV' => Rails.env } # Theme meta data conf.theme_meta_data = { 'colors' => { 'primary_color' => { 'friendly_name' => 'Primary color', 'helper_text' => 'Dominant color for the theme' }, 'secondary_color' => { 'friendly_name' => 'Secondary color', 'helper_text' => 'Secondary color for the theme' }, 'button_bg_color' => { 'friendly_name' => 'Button background color', 'helper_text' => 'Color for buttons' }, 'button_bg_color_hover' => { 'friendly_name' => 'Button background hover color', 'helper_text' => 'Color for hover state of buttons' }, 'button_font_color' => { 'friendly_name' => 'Button font color', 'helper_text' => 'Color for text on buttons' } }, 'social' => { 'twitter_handle' => { 'friendly_name' => '<NAME>', 'helper_text' => 'Used for via @ text for shares' } } } # Generic theme data conf.generic_theme = { 'colors' => { 'primary_color' => ' #444444', 'secondary_color' => ' #444444', 'button_bg_color' => ' #444444', 'button_bg_color_hover' => 'darken($button-bg-color, 4%)', 'button_font_color' => 'white' }, 'fonts' => { 'font_css' => '', 'body_font_family' => 'Georgia Regular, serif', 'header_font_family' => 'Georgia Bold, serif', 'button_font_family' => 'Georgia Regular, serif', 'header_font_weight' => 700, 'button_font_weight' => 'normal' }, 'social' => { 'twitter_handle' => 'voxmediainc' } } end # ------------------------------- # Deployment # ------------------------------- # Autotune has deployment targets; preview, publish and media. New projects # are always deployed to the preview target. Projects are deployed to the # publish target when a user clicks the publish button. Media is used to store # thumbnails and other things. # # Deployment settings for production # if Rails.env == 'production' # Autotune.deployment( # :preview, # :connect => 's3://apps.newsorg.com/at-preview', # :base_url => '//apps.newsorg.com/at-preview' # ) # Autotune.deployment( # :publish, # :connect => 's3://apps.newsorg.com/at', # :base_url => '//apps.newsorg.com/at' # ) # Autotune.deployment( # :media, # :connect => 's3://apps.newsorg.com/at-media', # :base_url => 'https://apps.newsorg.com/at-media' # ) # # Deployment settings for staging # elsif Rails.env == 'staging' # Autotune.deployment( # :preview, # :connect => 's3://test.newsorg.com/at-preview', # :base_url => '//test.newsorg.com/at-preview' # ) # Autotune.deployment( # :publish, # :connect => 's3://test.newsorg.com/at', # :base_url => '//test.newsorg.com/at' # ) # Autotune.deployment( # :media, # :connect => 's3://test.newsorg.com/at-media', # :base_url => 'http://test.newsorg.com/at-media' # ) # end # ------------------------------- # Authentication # ------------------------------- # Auth is handled by a configurable callback you can define here. The # callback is passed an omniauth object and can return different things # depending on how you want auth to work. See the Wiki. Autotune.config.verify_omniauth = lambda do |omniauth| Rails.logger.debug omniauth # give this user complete access return [:superuser] # or return true # refuse access to a user # return false # give designer access # return [:designer] # give editor access # return [:editor] # give author access # return [:author] # give designer access to specific themes # return :designer => ['My newsorg'] # give author access to specific themes # return :author => ['My newsorg'] # give editor access to specific themes # return :editor => ['My newsorg', 'Generic'] end # ------------------------------- # Theme data customization # ------------------------------- # Getting data for themes is defined as a callback here that you can customize # It is recommended that you merge the final theme data with generic theme to # make sure that all theme variables are available in all themes Autotune.config.get_theme_data = lambda do |theme| Autotune.config.generic_theme end CODE file 'config/unicorn.rb', <<-CODE worker_processes 6 timeout 90 CODE file 'config/theme_map.yml', <<-CODE --- - name: Generic theme: generic CODE # add engine routes route "mount Autotune::Engine => '/'" # disable magic run 'rm config/initializers/wrap_parameters.rb' say "About to download stuff. It'll be a minute." after_bundle do run 'bundle exec rake autotune:install:migrations' run 'bundle exec rake db:migrate' say <<-SAY ======================================================= Your new Autotune application is now ready to rock! cd #{app_path} bundle exec foreman start ======================================================= SAY end
chibicode/factfulness-source.chibicode.com
src/sources/146-graph-battle-deaths.js
import React from 'react' import * as H from '../components/h' export default () => ( <> <p> 戦争の犠牲者数は <H.A href="http://www.correlatesofwar.org/data-sets/COW-war."> Correlates of War Project </H.A> 、<H.A href="http://gapm.io/xnpgfred">Gleditsch</H.A>、 <H.A href="http://ucdp.uu.se/downloads">UCDP[1]</H.A>、 <H.A href="http://gapm.io/xpriod">PRIO</H.A>を参考にした。 <H.FB pageNumber={365}> これらの戦争の犠牲者数は、戦いで亡くなった兵士と一般人の死を含むが、餓死者など戦いの外で亡くなった人は含まれない。 </H.FB>{' '} また、過去の戦争の犠牲者のデータを可視化した『 <H.A href="http://ja.fallen.io/ww2/">Fallen</H.A> 』というインタラクティブ・ドキュメンタリーもおすすめしたい。ほかにも、1990年以降の犠牲者を比較できるツールは <H.A href="http://ucdp.uu.se/">こちら</H.A>。 </p> <p> 戦争の犠牲者数を数えるのは難しい。戦場は、正確なデータを集めるのが最も大変な場所のひとつだ。上記の調査は公式発表とメディアの取材による数字を利用している。だが、これらの数字に異を唱える研究者もいる。 </p> <p> <H.A href="https://www.bmj.com/content/336/7659/1482"> ObermeyerとMurrayは2008年に発表した論文 </H.A> で、計測方法を変えると、戦争の犠牲者数は増えていると論じた。その計測方法とは、戦争が起きた地域で調査を行い、家族のうち何人が戦争の犠牲になったかを数えるというもの(Sibling Methodと呼ばれる)。論文では、「ベトナム戦争以降、戦争の犠牲者数が減っているとは言えない」という結論が出された。 </p> <p> しかし、戦争のような極端な出来事の統計を取る際、母集団から適切な標本を抽出するのは難しい。たくさんの人が犠牲になった家族ばかりを調査したり、その逆ばかりが調査対象になった場合、全体の犠牲者数の推定に大きな誤差が生まれる。過去の戦争に関する新しい一次情報が出てくることは少ないため、誤差がどれだけあるかも予想がつきにくい。 </p> <p> <H.A href="https://journals.sagepub.com/doi/abs/10.1177/0022002709346253"> Spagat </H.A> らが2009年に発表した論文によると、前述した調査にはやはりバイアスがかかっており、各年代ごとに犠牲者が過剰に、または過少に計上されていたことが判明した。また、 <H.A href="https://journals.sagepub.com/doi/abs/10.1177/0022002712459709"> LacinaとGleditschが2012年に発表した論文 </H.A> によると、どんなに悪く見積もったとしても(たとえば、あえて過去の戦争の最も少ない犠牲者数の見積もりと、最近の戦争の最も多い犠牲者数の見積もりを使うなど)、戦争の犠牲者数は減っている。 </p> <H.BlockAddition> <p> <H.BlockAdditionHeader /> 原文では最後の段落にて、Spagatによる主張をLacinaとGleditschによる主張と混同していたので、訳文では修正した。また、 <H.A href="https://link.springer.com/chapter/10.1007/978-3-319-03820-9_10"> Gleditschが2015年に書いたこちらの記事も参考になった </H.A> 。 </p> </H.BlockAddition> </> )
jizillon/hiphop-php
src/system/gen/cls/RuntimeException.h
/* +----------------------------------------------------------------------+ | HipHop for PHP | +----------------------------------------------------------------------+ | Copyright (c) 2010 Facebook, Inc. (http://www.facebook.com) | +----------------------------------------------------------------------+ | This source file is subject to version 3.01 of the PHP license, | | that is bundled with this package in the file LICENSE, and is | | available through the world-wide-web at the following url: | | http://www.php.net/license/3_01.txt | | If you did not receive a copy of the PHP license and are unable to | | obtain it through the world-wide-web, please send a note to | | <EMAIL> so we can mail you a copy immediately. | +----------------------------------------------------------------------+ */ // @generated by HipHop Compiler #ifndef __GENERATED_cls_RuntimeException_h10646350__ #define __GENERATED_cls_RuntimeException_h10646350__ #include <cls/RuntimeException.fw.h> #include <cls/Exception.h> namespace HPHP { /////////////////////////////////////////////////////////////////////////////// /* SRC: classes/exception.php line 220 */ FORWARD_DECLARE_CLASS(RuntimeException); class c_RuntimeException : public c_Exception { public: // Properties // Class Map virtual bool o_instanceof(CStrRef s) const; DECLARE_CLASS_COMMON(RuntimeException, RuntimeException) DECLARE_INVOKE_EX(RuntimeException, RuntimeException, Exception) // DECLARE_STATIC_PROP_OPS public: #define OMIT_JUMP_TABLE_CLASS_STATIC_GETINIT_RuntimeException 1 #define OMIT_JUMP_TABLE_CLASS_STATIC_GET_RuntimeException 1 #define OMIT_JUMP_TABLE_CLASS_STATIC_LVAL_RuntimeException 1 #define OMIT_JUMP_TABLE_CLASS_CONSTANT_RuntimeException 1 // DECLARE_INSTANCE_PROP_OPS public: #define OMIT_JUMP_TABLE_CLASS_GETARRAY_RuntimeException 1 #define OMIT_JUMP_TABLE_CLASS_SETARRAY_RuntimeException 1 #define OMIT_JUMP_TABLE_CLASS_realProp_RuntimeException 1 #define OMIT_JUMP_TABLE_CLASS_realProp_PRIVATE_RuntimeException 1 // DECLARE_INSTANCE_PUBLIC_PROP_OPS public: #define OMIT_JUMP_TABLE_CLASS_realProp_PUBLIC_RuntimeException 1 // DECLARE_COMMON_INVOKE static bool os_get_call_info(MethodCallPackage &mcp, int64 hash = -1); #define OMIT_JUMP_TABLE_CLASS_STATIC_INVOKE_RuntimeException 1 virtual bool o_get_call_info(MethodCallPackage &mcp, int64 hash = -1); public: DECLARE_INVOKES_FROM_EVAL void init(); }; extern struct ObjectStaticCallbacks cw_RuntimeException; Object co_RuntimeException(CArrRef params, bool init = true); Object coo_RuntimeException(); /////////////////////////////////////////////////////////////////////////////// } #endif // __GENERATED_cls_RuntimeException_h10646350__
manusa/yakc
quickstarts/quarkus-dashboard/src/main/frontend/src/replicasets/selectors.js
<gh_stars>10-100 /* * Copyright 2020 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ const selectors = {}; selectors.statusReplicas = replicaSet => replicaSet?.status?.replicas ?? 0; selectors.statusReadyReplicas = replicaSet => replicaSet?.status?.readyReplicas ?? 0; selectors.isReady = replicaSet => selectors.statusReplicas(replicaSet) === selectors.statusReadyReplicas(replicaSet); selectors.specReplicas = replicaSet => replicaSet?.spec?.replicas ?? 0; // Selectors for array of ReplicaSets selectors.readyCount = replicaSets => replicaSets.reduce( (count, replicaSet) => selectors.isReady(replicaSet) ? count + 1 : count, 0 ); export default selectors;
CDCgov/SDP-Vocabulary-Service
app/views/section_nested_items/show.json.jbuilder
<reponame>CDCgov/SDP-Vocabulary-Service json.partial! 'section_nested_items/section_nested_item', section_nested_item: @section_nested_item
chattertechno/sweden
components/Cards.js
<reponame>chattertechno/sweden<filename>components/Cards.js // @flow import * as React from 'react' import { BackgroundContext } from '../helpers/context' type Props = { children: React.Node, background: string } const Cards = (props: Props) => ( <BackgroundContext.Provider value={props.background}> <div className='cards'> {props.children} <style jsx>{` .cards { display: flex; flex-direction: column; width: 100%; height: 100%; justify-content: space-evenly; } `} </style> </div> </BackgroundContext.Provider> ) export default Cards
IllusionMan1212/ShockPair
app/src/main/c/bluez-android-3.35/utils/audio/gstrtpsbcpay.c
<reponame>IllusionMan1212/ShockPair /* * * BlueZ - Bluetooth protocol stack for Linux * * Copyright (C) 2004-2008 <NAME> <<EMAIL>> * * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * */ #ifdef HAVE_CONFIG_H #include <config.h> #endif #include "gstrtpsbcpay.h" #include <math.h> #include <string.h> #define RTP_SBC_PAYLOAD_HEADER_SIZE 1 #define DEFAULT_MIN_FRAMES 0 #define RTP_SBC_HEADER_TOTAL (12 + RTP_SBC_PAYLOAD_HEADER_SIZE) #if __BYTE_ORDER == __LITTLE_ENDIAN struct rtp_payload { guint8 frame_count:4; guint8 rfa0:1; guint8 is_last_fragment:1; guint8 is_first_fragment:1; guint8 is_fragmented:1; } __attribute__ ((packed)); #elif __BYTE_ORDER == __BIG_ENDIAN struct rtp_payload { guint8 is_fragmented:1; guint8 is_first_fragment:1; guint8 is_last_fragment:1; guint8 rfa0:1; guint8 frame_count:4; } __attribute__ ((packed)); #else #error "Unknown byte order" #endif enum { PROP_0, PROP_MIN_FRAMES }; GST_DEBUG_CATEGORY_STATIC(gst_rtp_sbc_pay_debug); #define GST_CAT_DEFAULT gst_rtp_sbc_pay_debug GST_BOILERPLATE(GstRtpSBCPay, gst_rtp_sbc_pay, GstBaseRTPPayload, GST_TYPE_BASE_RTP_PAYLOAD); static const GstElementDetails gst_rtp_sbc_pay_details = GST_ELEMENT_DETAILS("RTP packet payloader", "Codec/Payloader/Network", "Payload SBC audio as RTP packets", "<NAME> " "<<EMAIL>>"); static GstStaticPadTemplate gst_rtp_sbc_pay_sink_factory = GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS("audio/x-sbc, " "rate = (int) { 16000, 32000, 44100, 48000 }, " "channels = (int) [ 1, 2 ], " "mode = (string) { \"mono\", \"dual\", \"stereo\", \"joint\" }, " "blocks = (int) { 4, 8, 12, 16 }, " "subbands = (int) { 4, 8 }, " "allocation = (string) { \"snr\", \"loudness\" }, " "bitpool = (int) [ 2, 64 ]") ); static GstStaticPadTemplate gst_rtp_sbc_pay_src_factory = GST_STATIC_PAD_TEMPLATE("src", GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS( "application/x-rtp, " "media = (string) \"audio\"," "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", " "clock-rate = (int) { 16000, 32000, 44100, 48000 }," "encoding-name = (string) \"SBC\"") ); static void gst_rtp_sbc_pay_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec); static void gst_rtp_sbc_pay_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec); static gint gst_rtp_sbc_pay_get_frame_len(gint subbands, gint channels, gint blocks, gint bitpool, const gchar* channel_mode) { gint len; gint join; len = 4 + (4 * subbands * channels)/8; if (strcmp(channel_mode, "mono") == 0 || strcmp(channel_mode, "dual") == 0) len += ((blocks * channels * bitpool)+7) / 8; else { join = strcmp(channel_mode, "joint") == 0 ? 1 : 0; len += ((join * subbands + blocks * bitpool)+7)/8; } return len; } static gboolean gst_rtp_sbc_pay_set_caps(GstBaseRTPPayload *payload, GstCaps *caps) { GstRtpSBCPay *sbcpay; gint rate, subbands, channels, blocks, bitpool; gint frame_len; const gchar* channel_mode; GstStructure *structure; sbcpay = GST_RTP_SBC_PAY(payload); structure = gst_caps_get_structure(caps, 0); if (!gst_structure_get_int(structure, "rate", &rate)) return FALSE; if (!gst_structure_get_int(structure, "channels", &channels)) return FALSE; if (!gst_structure_get_int(structure, "blocks", &blocks)) return FALSE; if (!gst_structure_get_int(structure, "bitpool", &bitpool)) return FALSE; if (!gst_structure_get_int(structure, "subbands", &subbands)) return FALSE; channel_mode = gst_structure_get_string(structure, "mode"); if (!channel_mode) return FALSE; frame_len = gst_rtp_sbc_pay_get_frame_len(subbands, channels, blocks, bitpool, channel_mode); sbcpay->frame_length = frame_len; gst_basertppayload_set_options (payload, "audio", TRUE, "SBC", rate); GST_DEBUG_OBJECT(payload, "calculated frame length: %d ", frame_len); return gst_basertppayload_set_outcaps (payload, NULL); } static GstFlowReturn gst_rtp_sbc_pay_flush_buffers(GstRtpSBCPay *sbcpay) { guint available; guint max_payload; GstBuffer* outbuf; guint8 *payload_data; guint frame_count; guint payload_length; struct rtp_payload *payload; if (sbcpay->frame_length == 0) { GST_ERROR_OBJECT(sbcpay, "Frame length is 0"); return GST_FLOW_ERROR; } available = gst_adapter_available(sbcpay->adapter); max_payload = gst_rtp_buffer_calc_payload_len( GST_BASE_RTP_PAYLOAD_MTU(sbcpay) - RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0); max_payload = MIN(max_payload, available); frame_count = max_payload / sbcpay->frame_length; payload_length = frame_count * sbcpay->frame_length; if (payload_length == 0) /* Nothing to send */ return GST_FLOW_OK; outbuf = gst_rtp_buffer_new_allocate(payload_length + RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0); gst_rtp_buffer_set_payload_type(outbuf, GST_BASE_RTP_PAYLOAD_PT(sbcpay)); payload_data = gst_rtp_buffer_get_payload(outbuf); payload = (struct rtp_payload*) payload_data; memset(payload, 0, sizeof(struct rtp_payload)); payload->frame_count = frame_count; gst_adapter_copy(sbcpay->adapter, payload_data + RTP_SBC_PAYLOAD_HEADER_SIZE, 0, payload_length); gst_adapter_flush(sbcpay->adapter, payload_length); GST_BUFFER_TIMESTAMP(outbuf) = sbcpay->timestamp; GST_DEBUG_OBJECT (sbcpay, "Pushing %d bytes", payload_length); return gst_basertppayload_push(GST_BASE_RTP_PAYLOAD(sbcpay), outbuf); } static GstFlowReturn gst_rtp_sbc_pay_handle_buffer(GstBaseRTPPayload *payload, GstBuffer *buffer) { GstRtpSBCPay *sbcpay; guint available; /* FIXME check for negotiation */ sbcpay = GST_RTP_SBC_PAY(payload); sbcpay->timestamp = GST_BUFFER_TIMESTAMP(buffer); gst_adapter_push(sbcpay->adapter, buffer); available = gst_adapter_available(sbcpay->adapter); if (available + RTP_SBC_HEADER_TOTAL >= GST_BASE_RTP_PAYLOAD_MTU(sbcpay) || (sbcpay->min_frames != -1 && available > (sbcpay->min_frames * sbcpay->frame_length))) return gst_rtp_sbc_pay_flush_buffers(sbcpay); return GST_FLOW_OK; } static gboolean gst_rtp_sbc_pay_handle_event(GstPad *pad, GstEvent *event) { GstRtpSBCPay *sbcpay = GST_RTP_SBC_PAY(GST_PAD_PARENT(pad)); switch (GST_EVENT_TYPE(event)) { case GST_EVENT_EOS: gst_rtp_sbc_pay_flush_buffers(sbcpay); break; default: break; } return FALSE; } static void gst_rtp_sbc_pay_base_init(gpointer g_class) { GstElementClass *element_class = GST_ELEMENT_CLASS(g_class); gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&gst_rtp_sbc_pay_sink_factory)); gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&gst_rtp_sbc_pay_src_factory)); gst_element_class_set_details(element_class, &gst_rtp_sbc_pay_details); } static void gst_rtp_sbc_pay_finalize(GObject *object) { GstRtpSBCPay *sbcpay = GST_RTP_SBC_PAY(object); g_object_unref (sbcpay->adapter); GST_CALL_PARENT (G_OBJECT_CLASS, finalize, (object)); } static void gst_rtp_sbc_pay_class_init(GstRtpSBCPayClass *klass) { GObjectClass *gobject_class; GstBaseRTPPayloadClass *payload_class = GST_BASE_RTP_PAYLOAD_CLASS(klass); gobject_class = G_OBJECT_CLASS(klass); parent_class = g_type_class_peek_parent(klass); gobject_class->finalize = GST_DEBUG_FUNCPTR(gst_rtp_sbc_pay_finalize); gobject_class->set_property = GST_DEBUG_FUNCPTR( gst_rtp_sbc_pay_set_property); gobject_class->get_property = GST_DEBUG_FUNCPTR( gst_rtp_sbc_pay_get_property); payload_class->set_caps = GST_DEBUG_FUNCPTR(gst_rtp_sbc_pay_set_caps); payload_class->handle_buffer = GST_DEBUG_FUNCPTR( gst_rtp_sbc_pay_handle_buffer); payload_class->handle_event = GST_DEBUG_FUNCPTR( gst_rtp_sbc_pay_handle_event); /* properties */ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MIN_FRAMES, g_param_spec_int ("min-frames", "minimum frame number", "Minimum quantity of frames to send in one packet " "(-1 for maximum allowed by the mtu)", -1, G_MAXINT, DEFAULT_MIN_FRAMES, G_PARAM_READWRITE)); GST_DEBUG_CATEGORY_INIT(gst_rtp_sbc_pay_debug, "rtpsbcpay", 0, "RTP SBC payloader"); } static void gst_rtp_sbc_pay_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstRtpSBCPay *sbcpay; sbcpay = GST_RTP_SBC_PAY (object); switch (prop_id) { case PROP_MIN_FRAMES: sbcpay->min_frames = g_value_get_int(value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } } static void gst_rtp_sbc_pay_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec) { GstRtpSBCPay *sbcpay; sbcpay = GST_RTP_SBC_PAY (object); switch (prop_id) { case PROP_MIN_FRAMES: g_value_set_int(value, sbcpay->min_frames); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } } static void gst_rtp_sbc_pay_init(GstRtpSBCPay *self, GstRtpSBCPayClass *klass) { self->adapter = gst_adapter_new(); self->frame_length = 0; self->timestamp = 0; self->min_frames = DEFAULT_MIN_FRAMES; } gboolean gst_rtp_sbc_pay_plugin_init (GstPlugin * plugin) { return gst_element_register (plugin, "rtpsbcpay", GST_RANK_NONE, GST_TYPE_RTP_SBC_PAY); }
ianhomer/odin
odin-core/src/main/java/com/purplepip/odin/midix/MidiDevice.java
<filename>odin-core/src/main/java/com/purplepip/odin/midix/MidiDevice.java /* * Copyright (c) 2017 the original author or authors. All Rights Reserved * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.purplepip.odin.midix; import com.purplepip.odin.audio.AudioSystemWrapper; import com.purplepip.odin.clock.MicrosecondPositionProvider; import com.purplepip.odin.clock.PerformanceTimeConverter; import com.purplepip.odin.common.OdinException; import com.purplepip.odin.devices.AbstractDevice; import com.purplepip.odin.devices.DeviceUnavailableException; import javax.sound.midi.MidiMessage; import javax.sound.midi.MidiUnavailableException; import javax.sound.midi.Receiver; import javax.sound.midi.Synthesizer; import javax.sound.midi.Transmitter; import javax.validation.constraints.NotNull; import lombok.extern.slf4j.Slf4j; @Slf4j public class MidiDevice extends AbstractDevice implements MicrosecondPositionProvider, MidiMessageReceiver { private final javax.sound.midi.MidiDevice device; private final MidiHandle handle; private final PerformanceTimeConverter timeConverter; MidiDevice(@NotNull MidiHandle handle, @NotNull javax.sound.midi.MidiDevice device) throws DeviceUnavailableException { this.device = device; this.handle = handle; timeConverter = new PerformanceTimeConverter(this); open(); initialise(); } public MidiHandle getHandle() { return handle; } @Override public boolean isOpen() { return device.isOpen(); } @Override public boolean isSource() { return device.getMaxTransmitters() != 0; } @Override public boolean isSink() { return device.getMaxReceivers() != 0; } /* * TODO : Try to remove external access of MidiDevice, we want to encapsulate all internals * by defining an appropriate device interface. I've marked it as deprecated since it's * only in place to help with a transition. */ @Deprecated public javax.sound.midi.MidiDevice getMidiDevice() { return device; } public Transmitter getTransmitter() throws MidiUnavailableException { return device.getTransmitter(); } @Override protected void initialise() { setProperty("microsecond.position", device.getMicrosecondPosition()); setProperty("receivers.max", device.getMaxReceivers()); setProperty("transmitters.max", device.getMaxTransmitters()); if (device instanceof Synthesizer) { setProperty("synthesizer.latency", ((Synthesizer) device).getLatency() / 1000); } if (device.getMaxReceivers() != 0) { try { setProperty("receiver.default.name", device.getReceiver().getClass().getName()); } catch (MidiUnavailableException e) { setProperty("receiver.default.exception", e.getMessage()); } if (device.getReceivers().size() > 1) { int i = 0; for (Receiver receiver : device.getReceivers()) { setProperty("receiver", i, "name", receiver.getClass().getName()); } } } if (device.getMaxTransmitters() != 0) { try { setProperty("transmitter.default.name", device.getTransmitter().getClass().getName()); } catch (MidiUnavailableException e) { setProperty("transmitter.default.exception", e.getMessage()); } if (device.getTransmitters().size() > 1) { int i = 0; for (Transmitter transmitter : device.getTransmitters()) { setProperty("transmitter", i, "name", transmitter.getClass().getName()); } } } } @Override protected void deviceClose() { device.close(); } @Override public String getName() { return getHandle().getName(); } @Override public String getSummary() { StringBuilder sb = new StringBuilder(); sb.append("μs position = ").append(device.getMicrosecondPosition()); if (device instanceof Synthesizer) { sb.append(" - synthesizer latency = ") .append(getProperty("synthesizer.latency")).append("ms"); } if (device.getMaxReceivers() != 0) { sb.append("\n - receivers : max = ").append(device.getMaxReceivers()); try (Receiver receiver = device.getReceiver()) { if (receiver != null) { sb.append(" : default = ").append(getClassNameOrNullMessage(receiver)); } } catch (MidiUnavailableException e) { sb.append(" : ").append(e.getMessage()); } } device.getReceivers().forEach(receiver -> sb.append(" ; active receiver = ").append(getClassNameOrNullMessage(receiver)) ); if (device.getMaxTransmitters() != 0) { sb.append("\n - transmitters : max = ") .append(device.getMaxTransmitters()); try (Transmitter transmitter = device.getTransmitter()) { if (transmitter != null) { sb.append(" : default = ") .append(getClassNameOrNullMessage(transmitter)); } } catch (MidiUnavailableException e) { sb.append(" : ").append(e.getMessage()); } } device.getTransmitters().forEach(transmitter -> sb.append(" ; transmitter = ") .append(getClassNameOrNullMessage(transmitter)) ); return sb.toString(); } private String getClassNameOrNullMessage(Object o) { return o == null ? "(null)" : o.getClass().getName(); } @Override public String toString() { return device.getClass().getName() + " " + device.getDeviceInfo().getName(); } protected void open() throws DeviceUnavailableException { try { device.open(); } catch (MidiUnavailableException e) { new AudioSystemWrapper().dump(true); throw new DeviceUnavailableException("Cannot open device " + device, e); } } @Override public long getMicroseconds() { return device.getMicrosecondPosition(); } /** * Send MIDI message. * * @param midiMessage MIDI message * @param time performance time */ @Override public boolean send(MidiMessage midiMessage, long time) throws OdinException { try { if (isOpen()) { device.getReceiver().send(midiMessage, timeConverter.convert(time)); return true; } LOG.warn("Message {} sent to closed device", midiMessage); return false; } catch (MidiUnavailableException e) { throw new OdinException("Cannot handle MIDI message for " + midiMessage, e); } } public void onPerformanceStart() { timeConverter.onPerformanceStart(); } public void onPerformanceStop() { timeConverter.onPerformanceStop(); } }
RafaelGomides/VemPython
exercicios/__init__.py
# Projeto: VemPython/__init__.py # Autor: rafael # Data: 19/03/18 - 16:06 # Objetivo: TODO
GoodyIT/MERN-stack-donate---Bitcoin-Payment
client/util/apiCaller.js
import fetch from 'isomorphic-fetch'; import Config from '../../server/config'; export const API_URL = (typeof window === 'undefined' || process.env.NODE_ENV === 'test') ? process.env.BASE_URL || (`http://localhost:${process.env.PORT || Config.port}/api`) : '/api'; export function fetchAPI(endpoint, method = 'get', body, auth) { let token = ''; try { if (typeof(window) !== "undefined") { const tokenData = window.localStorage.getItem('smartproject'); token = JSON.parse(tokenData).token; } } catch (err) { token = ''; } if (auth) { token = auth; } return fetch(endpoint, { headers: { 'content-type': 'application/json', Authorization: 'Bearer ' + token, mode: 'no-cors', }, method, body: JSON.stringify(body), }) .then(response => response.json().then(json => ({ json, response }))) .then(({ json, response }) => { if (!response.ok) { return Promise.reject(json); } return json; }) .then( response => response, error => error ); } export default function callApi(endpoint, method = 'get', body, auth = '') { return fetchAPI(`${API_URL}/${endpoint}`, method, body, auth); }
opentable/logstash
lib/logstash/inputs/twitter.rb
<gh_stars>1-10 require "logstash/inputs/base" require "logstash/namespace" require "net/http" require "json" #require "net/https" # Read events from the twitter streaming api. class LogStash::Inputs::Twitter < LogStash::Inputs::Base config_name "twitter" milestone 1 # Your twitter app's consumer key # # Don't know what this is? You need to create an "application" # on twitter, see this url: <https://dev.twitter.com/apps/new> config :consumer_key, :validate => :string, :required => true # Your twitter app's consumer secret # # If you don't have one of these, you can create one by # registering a new application with twitter: # <https://dev.twitter.com/apps/new> config :consumer_secret, :validate => :password, :required => true # Your oauth token. # # To get this, login to twitter with whatever account you want, # then visit <https://dev.twitter.com/apps> # # Click on your app (used with the consumer_key and consumer_secret settings) # Then at the bottom of the page, click 'Create my access token' which # will create an oauth token and secret bound to your account and that # application. config :oauth_token, :validate => :string, :required => true # Your oauth token secret. # # To get this, login to twitter with whatever account you want, # then visit <https://dev.twitter.com/apps> # # Click on your app (used with the consumer_key and consumer_secret settings) # Then at the bottom of the page, click 'Create my access token' which # will create an oauth token and secret bound to your account and that # application. config :oauth_token_secret, :validate => :password, :required => true # Any keywords to track in the twitter stream config :keywords, :validate => :array, :required => true public def register raise LogStash::ConfigurationError, "Sorry, this plugin doesn't work anymore. We will fix it eventually, but if you need this plugin, please file a ticket on logstash.jira.com :)" require "tweetstream" TweetStream.configure do |c| c.consumer_key = @consumer_key c.consumer_secret = @consumer_secret.value c.oauth_token = @oauth_token c.oauth_token_secret = @oauth_token_secret.value c.auth_method = :oauth end end public def run(queue) client = TweetStream::Client.new @logger.info("Starting twitter tracking", :keywords => @keywords) client.track(*@keywords) do |status| @logger.info? && @logger.info("Got tweet", :user => status.user.screen_name, :text => status.text) event = LogStash::Event.new( "user" => status.user.screen_name, "client" => status.source, "retweeted" => status.retweeted ) event["in-reply-to"] = status.in_reply_to_status_id if status.in_reply_to_status_id #urls = tweet.urls.collect(&:expanded_url) #event["urls"] = urls if urls.size > 0 queue << event end # client.track end # def run end # class LogStash::Inputs::Twitter
0--key/lib
portfolio/Python/GAE/server_test/m/fn.py
<filename>portfolio/Python/GAE/server_test/m/fn.py import logging from decimal import Decimal from google.appengine.api import memcache from google.appengine.ext import ndb from models import Result # MemCache and arithmetics inside def quremo(a, b): """Primitive ariphmetic calculations""" qu = Decimal(b) / Decimal(a) re = int(qu) mo = Decimal(b) % Decimal(a) return str(qu), str(re), str(mo) def get_quremo(a, b): """Memcache or DataStore interaction implementation with aim to avoid server calculations overload""" a_key = a + '&' + b # an unique key for each pair # looking for MemCache value firstly: cached_result = memcache.get(key=a_key) if cached_result is None: # looking for persistent cached value: q = Result.query(Result.a_key == a_key) if q.get(): # the values are there calc_val = tuple(q.fetch(1)[-1].a_value) memcache.add(key=a_key, value=calc_val, time=60) logging.info("Data was restored out from ndb") else: # values are completely new calc_val = quremo(a, b) memcache.add(key=a_key, value=calc_val, time=60) R = Result() R.a_key, R.a_value = a_key, calc_val R.put() logging.info("Data is new and was cached successfully") else: calc_val = cached_result logging.info("Data was retrieved out from MemCache") return calc_val
synacts/digitalid-utility
circumfixes/src/main/java/net/digitalid/utility/circumfixes/Quotes.java
/* * Copyright (C) 2017 Synacts GmbH, Switzerland (<EMAIL>) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.digitalid.utility.circumfixes; import javax.annotation.Nonnull; import javax.annotation.Nullable; import net.digitalid.utility.annotations.method.Pure; import net.digitalid.utility.annotations.ownership.NonCaptured; import net.digitalid.utility.annotations.parameter.Unmodified; import net.digitalid.utility.validation.annotations.type.Immutable; /** * This class enumerates various quotation marks. */ @Immutable public enum Quotes implements Circumfix { /* -------------------------------------------------- Enumerations -------------------------------------------------- */ /** * The single quotes '\'' and '\''. */ SINGLE("'", "'"), /** * The double quotes '"' and '"'. */ DOUBLE("\"", "\""), /** * The angle quotes '«' and '»'. */ ANGLE("«", "»"), /** * The code quotes '"' and '"'. * The intention is to use the quotes * only in case the object is a string. */ CODE("\"", "\""); /* -------------------------------------------------- Prefix -------------------------------------------------- */ private final @Nonnull String prefix; @Pure @Override public @Nonnull String getPrefix() { return prefix; } /* -------------------------------------------------- Suffix -------------------------------------------------- */ private final @Nonnull String suffix; @Pure @Override public @Nonnull String getSuffix() { return suffix; } /* -------------------------------------------------- Constructors -------------------------------------------------- */ private Quotes(@Nonnull String prefix, @Nonnull String suffix) { this.prefix = prefix; this.suffix = suffix; } /* -------------------------------------------------- Formatting -------------------------------------------------- */ /** * Returns the given object surrounded by the given quotes. */ @Pure public static @Nonnull String in(@Nullable Quotes quotes, @NonCaptured @Unmodified @Nullable Object object) { if (quotes == null) { return String.valueOf(object); } else if (quotes == Quotes.CODE) { return object instanceof CharSequence ? "\"" + String.valueOf(object) + "\"" : String.valueOf(object); } else { return quotes.getPrefix() + String.valueOf(object) + quotes.getSuffix(); } } /** * Returns the given object in single quotes. */ @Pure public static @Nonnull String inSingle(@Nullable Object object) { return in(Quotes.SINGLE, object); } /** * Returns the given object in double quotes. */ @Pure public static @Nonnull String inDouble(@Nullable Object object) { return in(Quotes.DOUBLE, object); } /** * Returns the given object in angle quotes. */ @Pure public static @Nonnull String inAngle(@Nullable Object object) { return in(Quotes.ANGLE, object); } /** * Returns the given object in double quotes if it is an instance of {@link CharSequence} or without quotes otherwise. */ @Pure public static @Nonnull String inCode(@Nullable Object object) { return in(Quotes.CODE, object); } }
h3trika/Adept-NASA-Space-Apps-2016-
iOS/Adept_iOS/Adept_iOS/TrainingSelectTableViewController.h
<reponame>h3trika/Adept-NASA-Space-Apps-2016- // // TrainingSelectTableViewController.h // Adept_iOS // // Created by <NAME> on 3/29/16. // Copyright © 2016 <NAME>. All rights reserved. // #import <UIKit/UIKit.h> #import "PickerTableCell.h" typedef NS_ENUM(NSUInteger, TrainingSelectEnum) { MoovTraining, WellshellTraining, UniversalTraining, AppleWatchTraining, AdeptStickTraining, OtherTraining }; @interface TrainingSelectTableViewController : UITableViewController <UIPickerViewDataSource, UIPickerViewDelegate> @property (nonatomic, assign) NSInteger selectedType; @property (nonatomic, strong) NSMutableArray * pickerTimeForExerciseData; @property (nonatomic, strong) NSMutableArray * pickerHardnessData; @property (nonatomic, strong) NSMutableArray * pickerMuscleGroupData; @property (nonatomic, strong) NSMutableArray * pickerMinHeartRateData; @property (nonatomic, strong) NSMutableArray * pickerMaxHeartRateData; @property (nonatomic, strong) NSMutableDictionary * trainingData; @property (nonatomic, strong) NSMutableArray * labelData; @property (nonatomic, strong, getter=detailsLabelData) NSMutableArray * detailsLabelData; @end
VicRen/go-playground
di/main_test.go
<gh_stars>0 package main import ( "bytes" "io/ioutil" "net/http" "testing" ) type MockHttpClient struct { } func (m *MockHttpClient) Get(string) (*http.Response, error) { response := &http.Response{ Body: ioutil.NopCloser(bytes.NewBuffer([]byte("Test Response"))), } return response, nil } func TestSendWithValidResponse(t *testing.T) { httpClient := &MockHttpClient{} err := send(httpClient, "something") if err != nil { t.Errorf("Shouldn't have received an error with a valid MockHttpClient, got %s", err) } }
Jinksi/vars-annotation
src/main/java/org/mbari/m3/vars/annotation/ui/concepttree/TreeItemPredicate.java
package org.mbari.m3.vars.annotation.ui.concepttree; import javafx.scene.control.TreeItem; import java.util.function.Predicate; /** * @author <NAME> * @since 2017-05-16T11:58:00 */ @FunctionalInterface public interface TreeItemPredicate<T> { boolean test(TreeItem<T> parent, T value); static <T> TreeItemPredicate<T> create(Predicate<T> predicate) { return (parent, value) -> predicate.test(value); } }
gitclonefun/atlas
src/main/java/org/openstreetmap/atlas/streaming/resource/zip/ZipResource.java
<filename>src/main/java/org/openstreetmap/atlas/streaming/resource/zip/ZipResource.java package org.openstreetmap.atlas.streaming.resource.zip; import java.io.BufferedInputStream; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.util.Iterator; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import org.openstreetmap.atlas.exception.CoreException; import org.openstreetmap.atlas.streaming.Streams; import org.openstreetmap.atlas.streaming.resource.AbstractResource; import org.openstreetmap.atlas.streaming.resource.Resource; /** * Zipped {@link Resource} flavored wrapper using {@link ZipInputStream} * * @author matthieun */ public class ZipResource { /** * @author matthieun */ public static class ZipIterator implements Iterator<Resource>, Closeable { private final Resource source; private final ZipInputStream input; private ZipEntry nextEntry = null; private boolean doneReading = true; public ZipIterator(final Resource source) { this.source = source; this.input = new ZipInputStream(new BufferedInputStream(this.source.read())); } @Override public void close() { Streams.close(this.input); } @Override public boolean hasNext() { try { if (this.nextEntry == null) { this.nextEntry = this.input.getNextEntry(); } if (this.nextEntry == null) { close(); } return this.nextEntry != null; } catch (final IOException e) { throw new CoreException("Unable to go to next Zip Entry!", e); } } @Override public Resource next() { if (!this.doneReading) { throw new CoreException(PREMATURE_READ_ERROR_MESSAGE); } if (hasNext()) { this.doneReading = false; final Resource result = new AbstractResource() { private final String name = ZipIterator.this.nextEntry.getName(); @Override public String getName() { return this.name; } @Override protected InputStream onRead() { return new InputStream() { @Override public void close() { // Trick to make sure the resource is read fully before moving // to the next one. ZipIterator.this.doneReading = true; } @Override public int read() throws IOException { return ZipIterator.this.input.read(); } }; } }; this.nextEntry = null; return result; } else { return null; } } } public static final String PREMATURE_READ_ERROR_MESSAGE = "Cannot go to the next ZipEntry before the previous one has been fully read."; private final Resource source; public ZipResource(final Resource source) { this.source = source; } /** * @return The entries of the file as an {@link Iterable} of {@link Resource}s. This works only * if each resource is sequentially read. */ public Iterable<Resource> entries() { return () -> new ZipIterator(getSource()); } public String getName() { return this.source.getName(); } @Override public String toString() { return this.getName(); } protected Resource getSource() { return this.source; } }
flyzsd/java-code-snippets
ibm.jdk8/src/java/lang/management/ThreadMXBean.java
/* * Licensed Materials - Property of IBM, * Copyright IBM Corp. 2005, 2016 All Rights Reserved. */ package java.lang.management; /** * The management and monitoring interface for the threading system of the * virtual machine. * <p> * Precisely one instance of this interface will be made available to management * clients. * </p> * <p> * Accessing this <code>MXBean</code> can be done in one of three ways. <br/> * <ol> * <li>Invoking the static {@link ManagementFactory#getThreadMXBean}method. * </li> * <li>Using a javax.management.MBeanServerConnection.</li> * <li>Obtaining a proxy MXBean from the static * {@link ManagementFactory#newPlatformMXBeanProxy}method, passing in * &quot;java.lang:type=Threading&quot; for the value of the second parameter. * </li> * </ol> * </p> * * @since 1.5 */ public interface ThreadMXBean extends PlatformManagedObject { /** * Returns the thread identifiers of every thread in this virtual machine * that is currently blocked in a deadlock situation over a monitor object. * A thread is considered to be deadlocked if it is blocked waiting to run * and owns an object monitor that is sought by another blocked thread. Two * or more threads can be in a deadlock cycle. To determine the threads * currently deadlocked by object monitors <i>and</i> ownable synchronizers * use the {@link #findDeadlockedThreads()} method. * <p> * It is recommended that this method be used solely for problem * determination analysis and not as a means of managing thread * synchronization in a virtual machine. This is because the method may be * very expensive to run. * </p> * * @return an array of the identifiers of every thread in the virtual * machine that has been detected as currently being in a deadlock * situation over an object monitor. May be <code>null</code> if * there are currently no threads in that category. * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;monitor&quot;. */ public long[] findMonitorDeadlockedThreads(); /** * Returns an array of the identifiers of all of the threads that are alive * in the current virtual machine. When processing the return from this * method it should <i>not </i> be assumed that each identified thread is * still alive. * * @return the identifiers of all of the threads currently alive in the * virtual machine. */ public long[] getAllThreadIds(); /** * If supported by the virtual machine, returns the total CPU usage time for * the currently running thread. The returned time will have nanosecond * precision but may not have nanosecond accuracy. * <p> * Method {@link #isCurrentThreadCpuTimeSupported()}may be used to * determine if current thread CPU timing is supported on the virtual * machine. On virtual machines where current thread CPU timing is * supported, the method {@link #isThreadCpuTimeEnabled()}may be used to * determine if thread CPU timing is actually enabled. * </p> * <p> * The return value is identical to that which would be obtained by calling * {@link #getThreadCpuTime}with an argument * <code>Thread.currentThread().getId())</code>. * </p> * * @return on virtual machines where current thread CPU timing is supported * and thread CPU timing is enabled, the number of nanoseconds CPU * usage by the current thread. On virtual machines where current * thread CPU timing is supported but thread CPU timing is not * enabled, <code>-1</code>. * @throws UnsupportedOperationException * if the virtual machine does not support current thread CPU * timing. * */ public long getCurrentThreadCpuTime(); /** * If supported by the virtual machine, returns the total CPU usage time for * the current thread running in user mode. The returned time will have * nanosecond precision but may not have nanosecond accuracy. * <p> * Method {@link #isCurrentThreadCpuTimeSupported()}may be used to * determine if current thread CPU timing is supported on the virtual * machine. On virtual machines where current thread CPU timing is * supported, the method {@link #isThreadCpuTimeEnabled()}may be used to * determine if thread CPU timing is actually enabled. * </p> * <p> * The return value is identical to that which would be obtained by calling * {@link #getThreadUserTime}with an argument * <code>Thread.currentThread().getId())</code>. * </p> * * @return on virtual machines where current thread CPU timing is supported * and thread CPU timing is enabled, the number of nanoseconds CPU * time used by the current thread running in user mode. On virtual * machines where current thread CPU timing is supported but thread * CPU timing is not enabled, <code>-1</code>. * @throws UnsupportedOperationException * if the virtual machine does not support current thread CPU * timing. * */ public long getCurrentThreadUserTime(); /** * Returns the number of daemon threads currently alive in the virtual * machine. * * @return the number of currently alive daemon threads. */ public int getDaemonThreadCount(); /** * Returns the peak number of threads that have ever been alive in the * virtual machine at any one instant since either the virtual machine * start-up or the peak was reset. * * @return the peak number of live threads * @see #resetPeakThreadCount() */ public int getPeakThreadCount(); /** * Returns the number of threads currently alive in the virtual machine. * This includes both daemon threads and non-daemon threads. * * @return the number of currently alive threads. */ public int getThreadCount(); /** * If supported by the virtual machine, returns the total CPU usage time for * the thread with the specified identifier. The returned time will have * nanosecond precision but may not have nanosecond accuracy. * <p> * Method {@link #isThreadCpuTimeSupported()}may be used to determine if * the CPU timing of threads is supported on the virtual machine. On virtual * machines where current thread CPU timing is supported, the method * {@link #isThreadCpuTimeEnabled()}may be used to determine if thread CPU * timing is actually enabled. * </p> * * @param id * the identifier for a thread. Must be a positive number greater * than zero. * @return on virtual machines where thread CPU timing is supported and * enabled, and there is a living thread with identifier * <code>id</code>, the number of nanoseconds CPU time used by * the thread. On virtual machines where thread CPU timing is * supported but not enabled, or where there is no living thread * with identifier <code>id</code> present in the virtual machine, * a value of <code>-1</code> is returned. * @throws IllegalArgumentException * if <code>id</code> is &lt;=0. * @throws UnsupportedOperationException * if the virtual machine does not support thread CPU timing. * @see #isThreadCpuTimeSupported() * @see #isThreadCpuTimeEnabled() */ public long getThreadCpuTime(long id); /** * Returns a {@link ThreadInfo}object for the thread with the specified * identifier. The returned object will not have a stack trace so that a * call to its <code>getStackTrace()</code> method will result in an empty * <code>StackTraceElement</code> array. Similarly, the returned object * will hold no details of locked synchronizers or locked object monitors * for the specified thread; calls to <code>getLockedMonitors()</code> and * <code>getLockedSynchronizers</code> will both return array values. * * @param id * the identifier for a thread. Must be a positive number greater * than zero. * @return if the supplied <code>id</code> maps to a living thread in the * virtual machine (i.e. a started thread which has not yet died), * this method returns a {@link ThreadInfo}object corresponding to * that thread. Otherwise, returns <code>null</code>. * @throws IllegalArgumentException * if <code>id</code> is &lt;=0. * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;monitor&quot;. */ public ThreadInfo getThreadInfo(long id); /** * Returns an array of {@link ThreadInfo}objects ; one for each of the * threads specified in the input array of identifiers. None of the objects * in the return array will have a stack trace so that a call to its * <code>getStackTrace()</code> method will result in an empty * <code>StackTraceElement</code> array. Similarly, the returned object * will hold no details of locked synchronizers or locked object monitors * for the specified thread; calls to <code>getLockedMonitors()</code> and * <code>getLockedSynchronizers</code> will both return array values. * * @param ids * an array of thread identifiers. Each one must be a positive * number greater than zero. * @return an array of {@link ThreadInfo}objects with each entry * corresponding to one of the threads specified in the input array * of identifiers. The return array will therefore have an identical * number of elements to the input <code>ids</code> array. If an * entry in the <code>ids</code> array is invalid (there is no * living thread with the supplied identifier in the virtual * machine) then the corresponding entry in the return array will be * a <code>null</code>. * @throws IllegalArgumentException * if any of the entries in the <code>ids</code> array is * &lt;=0. * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;monitor&quot;. */ public ThreadInfo[] getThreadInfo(long[] ids); /** * Returns an array of {@link ThreadInfo}objects ; one for each of the * threads specified in the <code>ids</code> argument. The stack trace * information in the returned objects will depend on the value of the * <code>maxDepth</code> argument which specifies the maximum number of * {@link StackTraceElement}instances to try and include. A subsequent call * to any of the returned objects' <code>getStackTrace()</code> method * should result in a {@link StackTraceElement}array of up to * <code>maxDepth</code> elements. A <code>maxDepth</code> value of * <code>Integer.MAX_VALUE</code> will attempt to obtain all of the stack * trace information for each specified thread while a <code>maxDepth</code> * value of zero will yield none. * <p> * The returned object will hold no details of locked synchronizers or * locked object monitors for the specified thread; calls to * <code>getLockedMonitors()</code> and * <code>getLockedSynchronizers</code> will both return array values. * </p> * * @param ids * an array of thread identifiers. Each must be a positive number * greater than zero. * @param maxDepth * the <i>maximum </i> number of stack trace entries to be * included in each of the returned <code>ThreadInfo</code> * objects. Supplying <code>Integer.MAX_VALUE</code> attempts * to obtain all of the stack traces. Only a positive value is * expected. * @return an array of <code>ThreadInfo</code> objects. The size of the * array will be identical to that of the <code>ids</code> * argument. Null elements will be placed in the array if the * corresponding thread identifier in <code>ids</code> does not * resolve to a living thread in the virtual machine (i.e. a started * thread which has not yet died). * @throws IllegalArgumentException * if any element in <code>ids</code> is &lt;=0. * @throws IllegalArgumentException * if <code>maxDepth</code> is &lt;0. * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;monitor&quot;. */ public ThreadInfo[] getThreadInfo(long[] ids, int maxDepth); /** * Returns an array of {@link ThreadInfo}objects; one for each of the * threads specified in the <code>ids</code> argument. Each * <code>ThreadInfo</code> will hold details of all of the stack trace * information for each specified thread. The returned * <code>ThreadInfo</code> objects will optionally contain details of all * monitor objects and synchronizers locked by the corresponding thread. In * order to retrieve locked monitor information the * <code>lockedMonitors</code> argument should be set to <code>true</code>; * in order to retrieve locked synchronizers information * <code>lockedSynchronizers</code> should be set to <code>true</code>. * For a given <code>ThreadInfo</code> element of the return array the * optional information may be inspected by calling * {@link ThreadInfo#getLockedMonitors()} and * {@link ThreadInfo#getLockedSynchronizers()} respectively. * <p> * Both <code>lockedMonitors</code> and <code>lockedSynchronizers</code> * arguments should only be set to <code>true</code> if the virtual * machine supports the requested monitoring. * </p> * * @param ids * an array of thread identifiers. Each one must be a positive * number greater than zero. * @param lockedMonitors * boolean indication of whether or not each returned * <code>ThreadInfo</code> should hold information on locked * object monitors * @param lockedSynchronizers * boolean indication of whether or not each returned * <code>ThreadInfo</code> should hold information on locked * synchronizers * @return an array of {@link ThreadInfo}objects with each entry * corresponding to one of the threads specified in the input array * of identifiers. The return array will therefore have an identical * number of elements to the input <code>ids</code> array. If an * entry in the <code>ids</code> array is invalid (there is no * living thread with the supplied identifier in the virtual * machine) then the corresponding entry in the return array will be * a <code>null</code>. * @throws IllegalArgumentException * if any of the entries in the <code>ids</code> array is * &lt;=0. * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;monitor&quot;. * @throws UnsupportedOperationException * if either of the following conditions apply: * <ul> * <li><code>lockedMonitors</code> is <code>true</code> but * a call to {@link #isObjectMonitorUsageSupported()} would * result in a <code>false</code> value * <li><code>lockedSynchronizers</code> is <code>true</code> * but a call to {@link #isSynchronizerUsageSupported()} would * result in a <code>false</code> value * </ul> */ public ThreadInfo[] getThreadInfo(long[] ids, boolean lockedMonitors, boolean lockedSynchronizers); /** * Returns a {@link ThreadInfo}object for the thread with the specified * identifier. The stack trace information in the returned object will * depend on the value of the <code>maxDepth</code> argument which * specifies the maximum number of {@link StackTraceElement}instances to * include. A subsequent call to the returned object's * <code>getStackTrace()</code> method should then result in a * {@link StackTraceElement}array of up to <code>maxDepth</code> * elements. A <code>maxDepth</code> value of * <code>Integer.MAX_VALUE</code> will obtain all of the stack trace * information for the thread while a <code>maxDepth</code> value of zero * will yield none. * <p> * It is possible that the virtual machine may be unable to supply any stack * trace information for the specified thread. In that case the returned * <code>ThreadInfo</code> object will have an empty array of * <code>StackTraceElement</code>s. * </p> * <p> * The returned object will hold no details of locked synchronizers or * locked object monitors for the specified thread; calls to * <code>getLockedMonitors()</code> and * <code>getLockedSynchronizers</code> will both return array values. * </p> * * @param id * the identifier for a thread. Must be a positive number greater * than zero. * @param maxDepth * the <i>maximum </i> number of stack trace entries to be * included in the returned <code>ThreadInfo</code> object. * Supplying <code>Integer.MAX_VALUE</code> obtains all of the * stack trace. Only a positive value is expected. * @return if the supplied <code>id</code> maps to a living thread in the * virtual machine (i.e. a started thread which has not yet died), * this method returns a {@link ThreadInfo}object corresponding to * that thread. Otherwise, returns <code>null</code>. * @throws IllegalArgumentException * if <code>id</code> is &lt;=0. * @throws IllegalArgumentException * if <code>maxDepth</code> is &lt;0. * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;monitor&quot;. */ public ThreadInfo getThreadInfo(long id, int maxDepth); /** * If supported by the virtual machine, returns the total CPU usage time for * the thread with the specified identifier when running in user mode. The * returned time will have nanosecond precision but may not have nanosecond * accuracy. * <p> * Method {@link #isThreadCpuTimeSupported()}may be used to determine if * the CPU timing of threads is supported on the virtual machine. On virtual * machines where current thread CPU timing is supported, the method * {@link #isThreadCpuTimeEnabled()}may be used to determine if thread CPU * timing is actually enabled. * </p> * * @param id * the identifier for a thread. Must be a positive number greater * than zero. * @return on virtual machines where thread CPU timing is supported and * enabled, and there is a living thread with identifier * <code>id</code>, the number of nanoseconds CPU time used by * the thread running in user mode. On virtual machines where thread * CPU timing is supported but not enabled, or where there is no * living thread with identifier <code>id</code> present in the * virtual machine, a value of <code>-1</code> is returned. * <p> * If thread CPU timing was disabled when the thread was started * then the virtual machine is free to choose any measurement start * time between when the virtual machine started up and when thread * CPU timing was enabled with a call to * {@link #setThreadCpuTimeEnabled(boolean)}. * </p> * @throws IllegalArgumentException * if <code>id</code> is &lt;=0. * @throws UnsupportedOperationException * if the virtual machine does not support thread CPU timing. * @see #isThreadCpuTimeSupported() * @see #isThreadCpuTimeEnabled() */ public long getThreadUserTime(long id); /** * Returns the number of threads that have been started in this virtual * machine since it came into being. * * @return the total number of started threads. */ public long getTotalStartedThreadCount(); /** * Returns a boolean indication of whether or not the virtual machine * supports the CPU timing of the current thread. * <p> * Note that this method must return <code>true</code> if * {@link #isThreadCpuTimeSupported()}returns <code>true</code>. * </p> * * @return <code>true</code> if CPU timing of the current thread is * supported, otherwise <code>false</code>. */ public boolean isCurrentThreadCpuTimeSupported(); /** * Returns a boolean indication of whether or not the monitoring of thread * contention situations is enabled on this virtual machine. * * @return <code>true</code> if thread contention monitoring is enabled, * <code>false</code> otherwise. */ public boolean isThreadContentionMonitoringEnabled(); /** * Returns a boolean indication of whether or not the monitoring of thread * contention situations is supported on this virtual machine. * * @return <code>true</code> if thread contention monitoring is supported, * <code>false</code> otherwise. */ public boolean isThreadContentionMonitoringSupported(); /** * Returns a boolean indication of whether or not the CPU timing of threads * is enabled on this virtual machine. * * @return <code>true</code> if thread CPU timing is enabled, * <code>false</code> otherwise. * @throws UnsupportedOperationException * if the virtual machine does not support thread CPU timing. * @see #isThreadCpuTimeSupported() */ public boolean isThreadCpuTimeEnabled(); /** * Returns a boolean indication of whether or not the virtual machine * supports the CPU time measurement of any threads (current or otherwise). * * @return <code>true</code> if the virtual machine supports the CPU * timing of threads, <code>false</code> otherwise. */ public boolean isThreadCpuTimeSupported(); /** * Resets the peak thread count to be the current number of threads alive in * the virtual machine when the call is made. * * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;control&quot;. */ public void resetPeakThreadCount(); /** * Updates the virtual machine to either enable or disable the monitoring of * thread contention situations. * <p> * If it is supported, the virtual machine will initially not monitor thread * contention situations. * </p> * * @param enable * enable thread contention monitoring if <code>true</code>, * otherwise disable thread contention monitoring. * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;control&quot;. * @throws UnsupportedOperationException * if the virtual machine does not support thread contention * monitoring. * @see #isThreadContentionMonitoringSupported() */ public void setThreadContentionMonitoringEnabled(boolean enable); /** * If supported, updates the virtual machine to either enable or disable the * CPU timing of threads. * <p> * The default value of this property depends on the underlying operating * system on which the virtual machine is running. * </p> * * @param enable * enable thread CPU timing if <code>true</code>, otherwise * disable thread CPU timing * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;control&quot;. * @throws UnsupportedOperationException * if the virtual machine does not support thread CPU timing. * @see #isThreadCpuTimeSupported() */ public void setThreadCpuTimeEnabled(boolean enable); /** * Returns a boolean indication of whether or not the virtual machine * supports the monitoring of object monitor usage. * * @return <code>true</code> if object monitor usage is permitted, * otherwise <code>false</code> * @since 1.6 */ public boolean isObjectMonitorUsageSupported(); /** * Returns a boolean indication of whether or not the virtual machine * supports the monitoring of ownable synchronizers (synchronizers that make * use of the <code>AbstractOwnableSynchronizer</code> type and which are * completely owned by a single thread). * * @return <code>true</code> if synchronizer usage monitoring is * permitted, otherwise <code>false</code> */ public boolean isSynchronizerUsageSupported(); /** * If supported by the virtual machine, this method can be used to retrieve * the <code>long</code> id of all threads currently waiting on object * monitors or ownable synchronizers (synchronizers that make use of the * <code>AbstractOwnableSynchronizer</code> type and which are completely * owned by a single thread). To determine the threads currently deadlocked * by object monitors only use the {@link #findMonitorDeadlockedThreads()} * method. * <p> * It is recommended that this method be used solely for problem * determination analysis and not as a means of managing thread * synchronization in a virtual machine. This is because the method may be * very expensive to run. * </p> * * @return an array of the identifiers of every thread in the virtual * machine that has been detected as currently being in a deadlock * situation involving object monitors <i>and</i> ownable * synchronizers. If there are no threads in this category a * <code>null</code> is returned. * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;monitor&quot;. * @throws UnsupportedOperationException * if the virtual machine does not support any monitoring of * ownable synchronizers. * @see #isSynchronizerUsageSupported() * */ public long[] findDeadlockedThreads(); /** * Returns an array of {@link ThreadInfo} objects holding information on all * threads that were alive when the call was invoked. * * @param lockedMonitors * boolean indication of whether or not information on all * currently locked object monitors is to be included in the * returned array * @param lockedSynchronizers * boolean indication of whether or not information on all * currently locked ownable synchronizers is to be included in * the returned array * @return an array of <code>ThreadInfo</code> objects * @throws SecurityException * if there is a security manager in effect and the caller does * not have {@link ManagementPermission}of &quot;monitor&quot;. * @throws UnsupportedOperationException * if either of the following conditions apply: * <ul> * <li><code>lockedMonitors</code> is <code>true</code> but * a call to {@link #isObjectMonitorUsageSupported()} would * result in a <code>false</code> value * <li><code>lockedSynchronizers</code> is <code>true</code> * but a call to {@link #isSynchronizerUsageSupported()} would * result in a <code>false</code> value * </ul> */ public ThreadInfo[] dumpAllThreads(boolean lockedMonitors, boolean lockedSynchronizers); }
Barysman/omim
qt/build_style/build_common.cpp
#include "build_common.h" #include "platform/platform.hpp" #include <QtCore/QCoreApplication> #include <QtCore/QDir> #include <QtCore/QFile> #include <QtCore/QProcess> #include <QtCore/QRegExp> #include <exception> std::pair<int, QString> ExecProcess(QString const & cmd, QProcessEnvironment const * env) { QProcess p; if (nullptr != env) p.setProcessEnvironment(*env); p.start(cmd); p.waitForFinished(-1); int const exitCode = p.exitCode(); QString output = p.readAllStandardOutput(); QString const error = p.readAllStandardError(); if (!error.isEmpty()) { if (!output.isEmpty()) output += "\n"; output += error; } return std::make_pair(exitCode, output); } bool CopyFile(QString const & oldFile, QString const & newFile) { if (oldFile == newFile) return true; if (!QFile::exists(oldFile)) return false; if (QFile::exists(newFile) && !QFile::remove(newFile)) return false; return QFile::copy(oldFile, newFile); } void CopyFromResources(QString const & name, QString const & output) { QString const resourceDir = GetPlatform().ResourcesDir().c_str(); if (!CopyFile(JoinFoldersToPath({resourceDir, name}), JoinFoldersToPath({output, name}))) { throw std::runtime_error(std::string("Cannot copy file ") + name.toStdString() + " to " + output.toStdString()); } } void CopyToResources(QString const & name, QString const & input, QString const & newName) { QString const resourceDir = GetPlatform().ResourcesDir().c_str(); if (!CopyFile(JoinFoldersToPath({input, name}), JoinFoldersToPath({resourceDir, newName.isEmpty() ? name : newName}))) { throw std::runtime_error(std::string("Cannot copy file ") + name.toStdString() + " from " + input.toStdString()); } } QString JoinFoldersToPath(std::initializer_list<QString> const & folders) { QString result; bool firstInserted = false; for (auto it = folders.begin(); it != folders.end(); ++it) { if (it->isEmpty() || *it == QDir::separator()) continue; if (firstInserted) result.append(QDir::separator()); result.append(*it); firstInserted = true; } return QDir::cleanPath(result); } QString GetExternalPath(QString const & name, QString const & primaryPath, QString const & secondaryPath) { QString const resourceDir = GetPlatform().ResourcesDir().c_str(); QString path = JoinFoldersToPath({resourceDir, primaryPath, name}); if (!QFileInfo::exists(path)) path = JoinFoldersToPath({resourceDir, secondaryPath, name}); // Special case for looking for in application folder. if (!QFileInfo::exists(path) && secondaryPath.isEmpty()) { QString const appPath = QCoreApplication::applicationDirPath(); QRegExp rx("(/[^/]*\\.app)", Qt::CaseInsensitive); int i = rx.indexIn(appPath); if (i >= 0) path = JoinFoldersToPath({appPath.left(i), name}); } return path; } QString GetProtobufEggPath() { return GetExternalPath("protobuf-3.3.0-py2.7.egg", "kothic", "../3party/protobuf"); }
sprect8/settle
src/redux/rfps/store.js
<reponame>sprect8/settle import React, { createContext, useReducer } from 'react'; import { reducer, initialState } from './reducers'; import { useActions } from './actions'; import { applyMiddleware } from './middleware'; const StoreContext = createContext(); const StoreProvider = ({ children }) => { const [state, dispatch] = useReducer(reducer, initialState); const actions = useActions(state, applyMiddleware(dispatch)); console.log("RFPs Initialising", state, actions); return ( <StoreContext.Provider value={{ state, actions }}> {children} </StoreContext.Provider> ) } export { StoreContext, StoreProvider }
IOR-BIC/ALBA
Gui/albaGUIWizardPageNew.h
/*========================================================================= Program: ALBA (Agile Library for Biomedical Applications) Module: albaGUIWizardPageNew Authors: <NAME>, <NAME> Copyright (c) BIC All rights reserved. See Copyright.txt or This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notice for more information. =========================================================================*/ #ifndef __albaGUIWizardPageNew_H__ #define __albaGUIWizardPageNew_H__ //---------------------------------------------------------------------------- // Include: //---------------------------------------------------------------------------- #include "wx/wizard.h" #include "albaDefines.h" #include "albaEvent.h" #include "albaObserver.h" #include "albaRWI.h" //---------------------------------------------------------------------------- // forward references : //---------------------------------------------------------------------------- class albaGUIWizard; class albaGUI; class albaGUILutSlider; //---------------------------------------------------------------------------- // Const: //---------------------------------------------------------------------------- /** IDs for the Dialog Style */ enum DIALOG_EXSTYLES { albaWIZARDUSEGUI = 1, albaWIZARDUSERWI = 2, }; /** IDs for the view GUI */ enum GUI_WIZARD_WIDGET_ID { ID_LUT_CHOOSER = 3, }; /** Class Name: albaGUIWizardPageNew. This class is the simplest possible albaGUIWizard implementation. Add gui in panel positions: - lower left - lower right - lower center - lower center bottom */ class ALBA_EXPORT albaGUIWizardPageNew : public wxWizardPageSimple, public albaObserver { public: /** construct. */ albaGUIWizardPageNew (albaGUIWizard *wizardParent,long style = albaWIZARDUSEGUI | albaWIZARDUSERWI, bool ZCrop = false, wxString label=""); /** destructor. */ virtual ~albaGUIWizardPageNew (); /** Set the Listener that will receive event-notification. */ void SetListener(albaObserver *Listener) {m_Listener = Listener;}; /** Precess events coming from other objects. */ void OnEvent(albaEventBase *alba_event); albaObserver *m_Listener; /** Add in wizard gui, another gui in lower left position. */ void AddGuiLowerLeft(albaGUI *gui); /** Add in wizard gui, another gui in lower right position. */ void AddGuiLowerRight(albaGUI *gui); /** Add in wizard gui, another gui in lower center position. */ void AddGuiLowerCenter(albaGUI *gui); /** Add in wizard gui, another gui in lower center bottom position. */ void AddGuiLowerUnderLeft(albaGUI *gui); /** Add in wizard gui, another gui in lower center bottom position. */ void AddGuiLowerUnderCenter(albaGUI *gui); /** Remove in wizard gui, another gui in lower left position. */ void RemoveGuiLowerLeft(albaGUI *gui); /** Remove in wizard gui, another gui under lower left position. */ void RemoveGuiLowerUnderLeft(albaGUI *gui); /** Remove in wizard gui, another gui in lower center position. */ void RemoveGuiLowerCenter(albaGUI *gui); /** Create a chain between this page ad nextPage. */ void SetNextPage(albaGUIWizardPageNew *nextPage); /** Set bounds of ZCrop slider. */ void SetZCropBounds(double ZMin = 0, double ZMax = 1); /** Return bounds of ZCrop slider. */ void GetZCropBounds(double ZCropBpunds[2]); /** Update windowing in order to set correct values in lut slider. */ void UpdateWindowing(); /** Update windowing (with custom range) in order to set correct values in lut slider. (used in dicom importer because the windowing must be that of the total dicom and we see only a slice)*/ void UpdateWindowing(double *scalarRange,double *scalarSubRange); /** Get the windowing range values */ void GetWindowing(double *scalarRange,double *scalarSubRange); /** Update Actor Texture from current lut slider values. */ void UpdateActor(); /** Retrieve the current Render Window. */ albaRWI* GetRWI(){return m_Rwi;}; protected: wxBoxSizer *m_GUISizer; ///< Sizer used for the Lower GUI wxBoxSizer *m_GUIUnderSizer; ///< Sizer used for the under Lower GUI wxBoxSizer *m_LUTSizer; ///< Sizer used for the LUT Slider GUI wxBoxSizer *m_RwiSizer; ///< Sizer used for the vtk render window and if you want to plug any gui on bottom of the RWI wxBoxSizer *m_SizerAll; ///< Vertical sizer used to include all other sizer albaRWI *m_Rwi; ///< Render window albaGUI *m_GuiLowerLeft; ///< Gui variable used to plug custom widgets localized in LOWER LEFT albaGUI *m_GuiLowerCenter; ///< Gui variable used to plug custom widgets localized in LOWER CENTER albaGUI *m_GuiLowerUnderCenter; ///< Gui variable used to plug custom widgets localized under LOWER CENTER albaGUI *m_GuiLowerUnderLeft; ///< Gui variable used to plug custom widgets localized under LOWER LEFT albaGUI *m_GuiView; albaGUILutSlider *m_LutSlider; albaGUILutSlider *m_ZCropSlider; vtkLookupTable *m_ColorLUT; bool m_ZCropOn; friend class albaGUIWizardPageNewTest; //DECLARE_EVENT_TABLE() }; #endif
whysage/python-dependency-injector
tests/unit/providers/test_coroutines_py35.py
<gh_stars>1000+ """Dependency injector coroutine providers unit tests.""" import asyncio import unittest import warnings from dependency_injector import ( providers, errors, ) # Runtime import to get asyncutils module import os _TOP_DIR = os.path.abspath( os.path.sep.join(( os.path.dirname(__file__), '../', )), ) import sys sys.path.append(_TOP_DIR) from asyncutils import AsyncTestCase async def _example(arg1, arg2, arg3, arg4): future = asyncio.Future() future.set_result(None) await future return arg1, arg2, arg3, arg4 def run(main): loop = asyncio.get_event_loop() return loop.run_until_complete(main) class CoroutineTests(AsyncTestCase): def test_init_with_coroutine(self): self.assertTrue(providers.Coroutine(_example)) def test_init_with_not_coroutine(self): self.assertRaises(errors.Error, providers.Coroutine, lambda: None) def test_init_optional_provides(self): provider = providers.Coroutine() provider.set_provides(_example) self.assertIs(provider.provides, _example) self.assertEqual(run(provider(1, 2, 3, 4)), (1, 2, 3, 4)) def test_set_provides_returns_self(self): provider = providers.Coroutine() self.assertIs(provider.set_provides(_example), provider) def test_call_with_positional_args(self): provider = providers.Coroutine(_example, 1, 2, 3, 4) self.assertTupleEqual(self._run(provider()), (1, 2, 3, 4)) def test_call_with_keyword_args(self): provider = providers.Coroutine(_example, arg1=1, arg2=2, arg3=3, arg4=4) self.assertTupleEqual(self._run(provider()), (1, 2, 3, 4)) def test_call_with_positional_and_keyword_args(self): provider = providers.Coroutine(_example, 1, 2, arg3=3, arg4=4) self.assertTupleEqual(run(provider()), (1, 2, 3, 4)) def test_call_with_context_args(self): provider = providers.Coroutine(_example, 1, 2) self.assertTupleEqual(self._run(provider(3, 4)), (1, 2, 3, 4)) def test_call_with_context_kwargs(self): provider = providers.Coroutine(_example, arg1=1) self.assertTupleEqual( self._run(provider(arg2=2, arg3=3, arg4=4)), (1, 2, 3, 4), ) def test_call_with_context_args_and_kwargs(self): provider = providers.Coroutine(_example, 1) self.assertTupleEqual( self._run(provider(2, arg3=3, arg4=4)), (1, 2, 3, 4), ) def test_fluent_interface(self): provider = providers.Coroutine(_example) \ .add_args(1, 2) \ .add_kwargs(arg3=3, arg4=4) self.assertTupleEqual(self._run(provider()), (1, 2, 3, 4)) def test_set_args(self): provider = providers.Coroutine(_example) \ .add_args(1, 2) \ .set_args(3, 4) self.assertEqual(provider.args, (3, 4)) def test_set_kwargs(self): provider = providers.Coroutine(_example) \ .add_kwargs(init_arg3=3, init_arg4=4) \ .set_kwargs(init_arg3=4, init_arg4=5) self.assertEqual(provider.kwargs, dict(init_arg3=4, init_arg4=5)) def test_clear_args(self): provider = providers.Coroutine(_example) \ .add_args(1, 2) \ .clear_args() self.assertEqual(provider.args, tuple()) def test_clear_kwargs(self): provider = providers.Coroutine(_example) \ .add_kwargs(init_arg3=3, init_arg4=4) \ .clear_kwargs() self.assertEqual(provider.kwargs, dict()) def test_call_overridden(self): provider = providers.Coroutine(_example) provider.override(providers.Object((4, 3, 2, 1))) provider.override(providers.Object((1, 2, 3, 4))) self.assertTupleEqual(provider(), (1, 2, 3, 4)) def test_deepcopy(self): provider = providers.Coroutine(_example) provider_copy = providers.deepcopy(provider) self.assertIsNot(provider, provider_copy) self.assertIs(provider.provides, provider_copy.provides) self.assertIsInstance(provider, providers.Coroutine) def test_deepcopy_from_memo(self): provider = providers.Coroutine(_example) provider_copy_memo = providers.Coroutine(_example) provider_copy = providers.deepcopy( provider, memo={id(provider): provider_copy_memo}) self.assertIs(provider_copy, provider_copy_memo) def test_deepcopy_args(self): provider = providers.Coroutine(_example) dependent_provider1 = providers.Callable(list) dependent_provider2 = providers.Callable(dict) provider.add_args(dependent_provider1, dependent_provider2) provider_copy = providers.deepcopy(provider) dependent_provider_copy1 = provider_copy.args[0] dependent_provider_copy2 = provider_copy.args[1] self.assertNotEqual(provider.args, provider_copy.args) self.assertIs(dependent_provider1.provides, dependent_provider_copy1.provides) self.assertIsNot(dependent_provider1, dependent_provider_copy1) self.assertIs(dependent_provider2.provides, dependent_provider_copy2.provides) self.assertIsNot(dependent_provider2, dependent_provider_copy2) def test_deepcopy_kwargs(self): provider = providers.Coroutine(_example) dependent_provider1 = providers.Callable(list) dependent_provider2 = providers.Callable(dict) provider.add_kwargs(a1=dependent_provider1, a2=dependent_provider2) provider_copy = providers.deepcopy(provider) dependent_provider_copy1 = provider_copy.kwargs['a1'] dependent_provider_copy2 = provider_copy.kwargs['a2'] self.assertNotEqual(provider.kwargs, provider_copy.kwargs) self.assertIs(dependent_provider1.provides, dependent_provider_copy1.provides) self.assertIsNot(dependent_provider1, dependent_provider_copy1) self.assertIs(dependent_provider2.provides, dependent_provider_copy2.provides) self.assertIsNot(dependent_provider2, dependent_provider_copy2) def test_deepcopy_overridden(self): provider = providers.Coroutine(_example) object_provider = providers.Object(object()) provider.override(object_provider) provider_copy = providers.deepcopy(provider) object_provider_copy = provider_copy.overridden[0] self.assertIsNot(provider, provider_copy) self.assertIs(provider.provides, provider_copy.provides) self.assertIsInstance(provider, providers.Callable) self.assertIsNot(object_provider, object_provider_copy) self.assertIsInstance(object_provider_copy, providers.Object) def test_repr(self): provider = providers.Coroutine(_example) self.assertEqual(repr(provider), '<dependency_injector.providers.' 'Coroutine({0}) at {1}>'.format( repr(_example), hex(id(provider)))) class DelegatedCoroutineTests(unittest.TestCase): def test_inheritance(self): self.assertIsInstance(providers.DelegatedCoroutine(_example), providers.Coroutine) def test_is_provider(self): self.assertTrue( providers.is_provider(providers.DelegatedCoroutine(_example))) def test_is_delegated_provider(self): provider = providers.DelegatedCoroutine(_example) self.assertTrue(providers.is_delegated(provider)) def test_repr(self): provider = providers.DelegatedCoroutine(_example) self.assertEqual(repr(provider), '<dependency_injector.providers.' 'DelegatedCoroutine({0}) at {1}>'.format( repr(_example), hex(id(provider)))) class AbstractCoroutineTests(AsyncTestCase): def test_inheritance(self): self.assertIsInstance(providers.AbstractCoroutine(_example), providers.Coroutine) def test_call_overridden_by_coroutine(self): with warnings.catch_warnings(): warnings.simplefilter('ignore') @asyncio.coroutine def _abstract_example(): raise RuntimeError('Should not be raised') provider = providers.AbstractCoroutine(_abstract_example) provider.override(providers.Coroutine(_example)) self.assertTrue(self._run(provider(1, 2, 3, 4)), (1, 2, 3, 4)) def test_call_overridden_by_delegated_coroutine(self): with warnings.catch_warnings(): warnings.simplefilter('ignore') @asyncio.coroutine def _abstract_example(): raise RuntimeError('Should not be raised') provider = providers.AbstractCoroutine(_abstract_example) provider.override(providers.DelegatedCoroutine(_example)) self.assertTrue(self._run(provider(1, 2, 3, 4)), (1, 2, 3, 4)) def test_call_not_overridden(self): provider = providers.AbstractCoroutine(_example) with self.assertRaises(errors.Error): provider(1, 2, 3, 4) def test_override_by_not_coroutine(self): provider = providers.AbstractCoroutine(_example) with self.assertRaises(errors.Error): provider.override(providers.Factory(object)) def test_provide_not_implemented(self): provider = providers.AbstractCoroutine(_example) with self.assertRaises(NotImplementedError): provider._provide((1, 2, 3, 4), dict()) def test_repr(self): provider = providers.AbstractCoroutine(_example) self.assertEqual(repr(provider), '<dependency_injector.providers.' 'AbstractCoroutine({0}) at {1}>'.format( repr(_example), hex(id(provider)))) class CoroutineDelegateTests(unittest.TestCase): def setUp(self): self.delegated = providers.Coroutine(_example) self.delegate = providers.CoroutineDelegate(self.delegated) def test_is_delegate(self): self.assertIsInstance(self.delegate, providers.Delegate) def test_init_with_not_callable(self): self.assertRaises(errors.Error, providers.CoroutineDelegate, providers.Object(object()))
FaranushKarimov/JavaProject
Generator of Random Numbers/src/Generator/RandomRange.java
package Generator; import java.util.Random; public class RandomRange { public static final void main(String... aArgs){ log("Generating random integers in the range 1..10."); int START = 1; int END = 10; Random random = new Random(); for (int idx = 1; idx <= 10; ++idx){ showRandomInteger(START, END, random); } log("Done."); } private static void showRandomInteger(int aStart, int aEnd, Random aRandom){ if (aStart > aEnd) { throw new IllegalArgumentException("Start cannot exceed End."); } //получить диапазон, отбрасывая больше, чтобы избежать проблем с переполнением long range = (long)aEnd - (long)aStart + 1; // compute a fraction of the range, 0 <= frac < range long fraction = (long)(range * aRandom.nextDouble()); int randomNumber = (int)(fraction + aStart); log("Generated : " + randomNumber); } private static void log(String aMessage){ System.out.println(aMessage); } }
benhunter/ctf
htb/fatty-10.10.10.174/fatty-client/org/springframework/web/servlet/mvc/condition/RequestCondition.java
package org.springframework.web.servlet.mvc.condition; import javax.servlet.http.HttpServletRequest; import org.springframework.lang.Nullable; public interface RequestCondition<T> { T combine(T paramT); @Nullable T getMatchingCondition(HttpServletRequest paramHttpServletRequest); int compareTo(T paramT, HttpServletRequest paramHttpServletRequest); } /* Location: /home/kali/ctf/htb/fatty-10.10.10.174/ftp/fatty-client.jar!/org/springframework/web/servlet/mvc/condition/RequestCondition.class * Java compiler version: 8 (52.0) * JD-Core Version: 1.1.3 */
yvmunayev/rainbow-modules
packages/icons/src/components/arrowLeft/index.js
<reponame>yvmunayev/rainbow-modules<gh_stars>10-100 import React from 'react'; import PropTypes from 'prop-types'; const ArrowLeft = (props) => { const { className, style, title } = props; return ( <svg className={className} style={style} width="16px" height="14px" viewBox="0 0 16 14"> <title>{title}</title> <g id="modules" stroke="none" strokeWidth="1" fill="none" fillRule="evenodd"> <path d="M15.3333333,6 L2.03366667,6 L7.12633333,1.14933333 C7.393,0.895333333 7.40333333,0.473333333 7.14933333,0.206666667 C6.89566667,-0.0596666667 6.47366667,-0.0703333333 6.20666667,0.183666667 L0.390666667,5.72366667 C0.139,5.97566667 0,6.31033333 0,6.66666667 C0,7.02266667 0.139,7.35766667 0.402333333,7.62066667 L6.207,13.1493333 C6.336,13.2723333 6.50133333,13.3333333 6.66666667,13.3333333 C6.84266667,13.3333333 7.01866667,13.264 7.14966667,13.1263333 C7.40366667,12.8596667 7.39333333,12.438 7.12666667,12.184 L2.01266667,7.33333333 L15.3333333,7.33333333 C15.7013333,7.33333333 16,7.03466667 16,6.66666667 C16,6.29866667 15.7013333,6 15.3333333,6 L15.3333333,6 Z" id="Path" fill="currentColor" fillRule="nonzero" /> </g> </svg> ); }; ArrowLeft.propTypes = { className: PropTypes.string, style: PropTypes.object, title: PropTypes.string, }; ArrowLeft.defaultProps = { className: undefined, style: undefined, title: undefined, }; export default ArrowLeft;
yaplej/bro
src/analyzer/protocol/rfb/rfb-protocol.pac
<filename>src/analyzer/protocol/rfb/rfb-protocol.pac<gh_stars>1000+ enum ServerState { SERVER_VERSION = 0, SERVER_AUTH_TYPE = 1, SERVER_AUTH_TYPE37 = 2, SERVER_AUTH_FAILURE = 3, SERVER_AUTH_VNC_CHALLENGE = 4, SERVER_AUTH_ARD_CHALLENGE = 5, SERVER_AUTH_RESULT = 6, SERVER_INIT = 7, SERVER_MESSAGE_TYPE = 8, SERVER_MESSAGE = 9, SERVER_WAIT = 99, SERVER_INVALID =100, }; enum ClientState { CLIENT_VERSION = 0, CLIENT_AUTH_SELECTION = 1, CLIENT_AUTH_VNC_RESPONSE = 2, CLIENT_AUTH_ARD_RESPONSE = 3, CLIENT_INIT = 4, CLIENT_MESSAGE_TYPE = 5, CLIENT_MESSAGE = 6, CLIENT_WAIT = 99, CLIENT_INVALID =100, }; # The protocol specifies some 32-bit variable-length data fields with the # length derived from packet data. # This value enforces sane length values to help prevent excessive buffering. let MAX_DATA_LENGTH: uint32 = 65536; type RFBProtocolVersion (client: bool) = record { header: "RFB "; major_ver: bytestring &length=3; dot: "."; minor_ver: bytestring &length=3; pad: uint8; } &let { proc: bool = $context.connection.handle_banners(client, this); proc2: bool = $context.flow.proc_rfb_version(client, major_ver, minor_ver); } &length=12; type RFBFailReasonString = record { len: uint32 &enforce(len < MAX_DATA_LENGTH); str: bytestring &length=len; } &let { proc: bool = $context.connection.handle_fail_reason_string(this); } &length=(4 + len); type RFBSecurityType = record { sectype: uint32; } &let { proc: bool = $context.connection.handle_security_type(this); proc2: bool = $context.flow.proc_security_types(this); } &length=4; type RFBSecurityTypes37 = record { count: uint8; types: uint8[count]; } &let { proc: bool = $context.connection.handle_security_types37(this); } &length=(count + 1); type RFBAuthTypeSelected = record { type: uint8; } &let { proc: bool = $context.connection.handle_auth_type_selected(this); proc2: bool = $context.flow.proc_security_types37(this); } &length=1; type RFBSecurityResult = record { result: uint32; } &let { proc: bool = $context.connection.handle_security_result(this); proc2: bool = $context.flow.proc_handle_security_result(result); } &length=4; type RFBVNCAuthenticationRequest = record { challenge: bytestring &length=16; } &let { proc: bool = $context.connection.handle_auth_request(); } &length=16; type RFBVNCAuthenticationResponse = record { response: bytestring &length= 16; } &let { proc: bool = $context.connection.handle_auth_response(); } &length=16; type RFBSecurityARDChallenge = record { # TODO: Not sure if this all is complete/accurate, could not find the spec. generator: uint16; key_length: uint16; prime_mod: bytestring &length=key_length; publickey: bytestring &length=key_length; } &let { proc: bool = $context.connection.handle_ard_challenge(this); } &length=(4 + (2 * key_length)); type RFBSecurityARDResponse = record { publickey: bytestring &length=$context.connection.get_ard_key_length(); creds: bytestring &length=$context.connection.get_ard_key_length(); } &let { proc: bool = $context.connection.handle_ard_response(); } &length=(2 * $context.connection.get_ard_key_length()); type RFBClientInit = record { shared_flag: uint8; } &let { proc: bool = $context.connection.handle_client_init(this); proc2: bool = $context.flow.proc_rfb_share_flag(shared_flag); } &length=1; type RFBServerInit = record { width: uint16; height: uint16; pixel_format: uint8[16]; len: uint32 &enforce(len < MAX_DATA_LENGTH); name: uint8[len]; } &let { proc: bool = $context.connection.handle_server_init(this); proc2: bool = $context.flow.proc_handle_server_params(this); } &length=24 + len; type InvalidData(orig: bool) = record { invalid: uint8; } &let { proc: bool = $context.connection.handle_invalid_data(orig); } &length=1; type WaitData(orig: bool) = record { nothing: bytestring &length = 0; } &let { proc: bool = $context.connection.handle_wait_data(orig); } &length=0; type ClientMessageType = record { type: uint8; } &let { proc: bool = $context.connection.handle_client_message_type(type); } &length=1; type ClientMessage(type: uint8) = case type of { 0 -> set_pixel_format: ClientSetPixelFormat; 2 -> set_encodings: ClientSetEncodings; 3 -> framebuffer_update_request: ClientFramebufferUpdateRequest; 4 -> key_event: ClientKeyEvent; 5 -> pointer_event: ClientPointerEvent; 6 -> cut_text: ClientCutText; } &let { proc: bool = $context.connection.handle_client_message(type); }; type ClientSetPixelFormat = record { pad: uint8[3]; pixel_format: uint8[16]; } &let { proc: bool = $context.connection.handle_client_set_pixel_format(this); } &length=19; type ClientSetEncodings = record { pad: uint8; num_encodings: uint16; encodings: uint32[num_encodings]; } &let { proc: bool = $context.connection.handle_client_set_encodings(this); } &length=3 + (4 * num_encodings); type ClientFramebufferUpdateRequest = record { incremental: uint8; xpos: uint16; ypos: uint16; width: uint16; height: uint16; } &let { proc: bool = $context.connection.handle_client_framebuffer_update_request(this); } &length=9; type ClientKeyEvent = record { down_flag: uint8; pad: uint16; key: uint32; } &let { proc: bool = $context.connection.handle_client_key_event(this); } &length=7; type ClientPointerEvent = record { button_mask: uint8; xpos: uint16; ypos: uint16; } &let { proc: bool = $context.connection.handle_client_pointer_event(this); } &length=5; type ClientCutText = record { pad: uint8[3]; len: uint32 &enforce(len < MAX_DATA_LENGTH); text: bytestring &length=len; } &let { proc: bool = $context.connection.handle_client_cut_text(this); } &length=(7 + len); type ServerMessageType = record { type: uint8; } &let { proc: bool = $context.connection.handle_server_message_type(type); } &length=1; type ServerMessage(type: uint8) = case type of { 0 -> framebuffer_update: ServerFramebufferUpdate; 1 -> set_color_map_entries: ServerSetColorMapEntries; 2 -> bell: ServerBell; 3 -> cut_text: ServerCutText; } &let { proc: bool = $context.connection.handle_server_message(type); }; type PixelData(encoding: int32, x: uint16, y: uint16, w: uint16, h: uint16) = case encoding of { 0 -> raw: PD_Raw(w, h); 1 -> copy_rec: PD_CopyRec; 2 -> rre: PD_RRE; 5 -> hextile: PD_Hextile; 15 -> trle: PD_TRLE; 16 -> zrle: PD_ZRLE; # TODO: binpac is not happy with negative values here #-239 -> cursor_pseudo: PD_PsuedoCursor; #-223 -> desktop_size: PD_PsuedoDesktopSize; }; type PD_Raw(w: uint16, h: uint16) = record { pixels: bytestring &length=(w * h * $context.connection.get_bytes_per_pixel()) &transient; } &length=(w * h * $context.connection.get_bytes_per_pixel()); type PD_CopyRec = record { xpos: uint16; ypos: uint16; } &length=4; type RRE_Subrect = record { pixel: bytestring &length=$context.connection.get_bytes_per_pixel(); xpos: uint16; ypos: uint16; width: uint16; height: uint16; } &length=$context.connection.get_bytes_per_pixel() + 8; type PD_RRE = record { num_subrects: uint32; bg_pixel: bytestring &length=$context.connection.get_bytes_per_pixel(); subrects: RRE_Subrect[num_subrects] &transient; } &length=4 + $context.connection.get_bytes_per_pixel() + (num_subrects * ($context.connection.get_bytes_per_pixel() + 8)); type PD_Hextile = record { # TODO nothing: empty; } &length=0; type PD_TRLE = record { # TODO nothing: empty; } &length=0; type PD_ZRLE = record { len: uint32; zlib_data: bytestring &length=len &transient; } &length=(4 + len); type PD_PsuedoCursor(w: uint16, h: uint16) = record { pixels: bytestring &length=(w * h * $context.connection.get_bytes_per_pixel()) &transient; bitmask: bytestring &length=(h * ((w + 7) / 8)) &transient; } &length=(w * h * $context.connection.get_bytes_per_pixel()) + (h * ((w + 7) / 8)) type PD_PsuedoDesktopSize = record { # Actually no further data nothing: empty; } &length=0; type Rectangle = record { xpos: uint16; ypos: uint16; width: uint16; height: uint16; encoding: int32; pixel_data: PixelData(encoding, xpos, ypos, width, height); # TODO add in pixel_data length to &length } &length=12; type ServerFramebufferUpdate = record { pad: uint8; num_rects: uint16; rects: Rectangle[num_rects]; # TODO add in Rectangle[] length to &length } &length=3; type RGB_Value = record { red: uint16; green: uint16; blue: uint16; } &length=6; type ServerSetColorMapEntries = record { pad: uint8; first_color: uint16; num_colors: uint16; colors: RGB_Value[num_colors]; } &length=5 + (num_colors * 6) type ServerBell = record { nothing: empty; } &length=0; type ServerCutText = record { pad: uint8[3]; len: uint32 &enforce(len < MAX_DATA_LENGTH); text: bytestring &length=len; } &length=(7 + len); type RFB_PDU_request(state: uint8) = case state of { CLIENT_WAIT -> wait: WaitData(true); CLIENT_INVALID -> invalid: InvalidData(true); CLIENT_VERSION -> version: RFBProtocolVersion(true); CLIENT_AUTH_SELECTION -> authtype: RFBAuthTypeSelected; # version 3.7+ CLIENT_AUTH_VNC_RESPONSE -> response: RFBVNCAuthenticationResponse; CLIENT_AUTH_ARD_RESPONSE -> ard_response: RFBSecurityARDResponse; CLIENT_INIT -> shareflag: RFBClientInit; CLIENT_MESSAGE_TYPE -> msg_type: ClientMessageType; CLIENT_MESSAGE -> msg: ClientMessage($context.connection.get_next_msg_type(true)); }; type RFB_PDU_response(state: uint8) = case state of { SERVER_WAIT -> wait: WaitData(false); SERVER_INVALID -> invalid: InvalidData(false); SERVER_VERSION -> version: RFBProtocolVersion(false); SERVER_AUTH_TYPE -> auth_type: RFBSecurityType; SERVER_AUTH_TYPE37 -> auth_types37: RFBSecurityTypes37; SERVER_AUTH_FAILURE -> fail_reason: RFBFailReasonString; SERVER_AUTH_VNC_CHALLENGE -> challenge: RFBVNCAuthenticationRequest; SERVER_AUTH_ARD_CHALLENGE -> ard_challenge: RFBSecurityARDChallenge; SERVER_AUTH_RESULT -> authresult : RFBSecurityResult; SERVER_INIT -> serverinit: RFBServerInit; SERVER_MESSAGE_TYPE -> msg_type: ServerMessageType; # TODO: server message parsing (framebuffer update) is not completely implemented # as it is mostly uninteresting SERVER_MESSAGE -> msg: ServerMessage($context.connection.get_next_msg_type(false)); }; type RFB_PDU(is_orig: bool) = case is_orig of { true -> request: RFB_PDU_request($context.connection.get_state(true)); false -> response: RFB_PDU_response($context.connection.get_state(false)); } &byteorder = bigendian;
sieverssj/incubator-streams
streams-osgi-components/activity-registration/src/main/java/org/apache/streams/osgi/components/impl/ActivityPublisherRegistrationImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.streams.osgi.components.impl; import java.util.Date; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.streams.osgi.components.ActivityPublisherRegistration; import org.apache.streams.osgi.components.activityconsumer.ActivityConsumer; import org.apache.streams.osgi.components.activityconsumer.impl.PushActivityConsumer; public class ActivityPublisherRegistrationImpl implements ActivityPublisherRegistration { private static final transient Log LOG = LogFactory.getLog(ActivityPublisherRegistrationImpl.class); private boolean verbose = true; private String prefix = "Activity Publisher Registration"; public Object register(Object body) { //authorize this producer based on some rule set... //create a new ActivityConsumer... //using the URI supplied to set it up... //return the consumer for addition to the consumer warehouse String answer = prefix + " set body: " + body + " " + new Date(); LOG.info(">> call >>" + answer); ActivityConsumer activityConsumer =(ActivityConsumer)body; //authenticate.. activityConsumer.setAuthenticated(true); return activityConsumer; } public boolean isVerbose() { return verbose; } public void setVerbose(boolean verbose) { this.verbose = verbose; } public String getPrefix() { return prefix; } public void setPrefix(String prefix) { this.prefix = prefix; } }
lebicojerome/toolkit
test/toolkit.test.js
describe('Toolkit', () => { it('should call lifecycle methods in proper order', async () => { // given const ParentSymbol = Symbol.for('my/parent'); const ChildSymbol = Symbol.for('my/child'); const lifecycle = []; class App extends opr.Toolkit.Root { onCreated() { lifecycle.push('App created'); } onAttached() { lifecycle.push('App attached'); } render() { return [ParentSymbol]; } }; loader.define('my/app', App); class Parent extends opr.Toolkit.Component { onCreated() { lifecycle.push('Parent created'); } onAttached() { lifecycle.push('Parent attached'); } render() { return [ChildSymbol]; } }; loader.define('my/parent', Parent); class Child extends opr.Toolkit.Component { onCreated() { lifecycle.push('Child created'); } onAttached() { lifecycle.push('Child attached'); } render() { return ['div']; } }; loader.define('my/child', Child); const settings = { bundles: [], plugins: [], }; const container = document.createElement('section'); container.style.display = 'none'; document.body.appendChild(container); opr.Toolkit.configure(settings); await opr.Toolkit.render('my/app', container); assert.equal(lifecycle.length, 6); assert.equal(lifecycle[0], 'App created'); assert.equal(lifecycle[1], 'Parent created'); assert.equal(lifecycle[2], 'Child created'); assert.equal(lifecycle[3], 'Child attached'); assert.equal(lifecycle[4], 'Parent attached'); assert.equal(lifecycle[5], 'App attached'); }); });
mis1/tetris
app/common/constants.js
<reponame>mis1/tetris /** * Exports constants. * * @module constants */ /** * Enum class from enumify - A JavaScript library for enums. * @external Enum * @see https://github.com/rauschma/enumify */ import {Enum} from 'enumify'; /** * An enum for directions. * @extends {external:Enum} * @property {} LEFT * @property {} RIGHT * @property {} DOWN * @property {} UP */ class Direction extends Enum {} Direction.initEnum(['LEFT', 'RIGHT', 'DOWN', 'UP']); /** * An enum for game states. * @extends {external:Enum} * @property {} READY * @property {} STARTED * @property {} PAUSED * @property {} STOPPED * @property {} FINISHED */ class GameStatus extends Enum {} GameStatus.initEnum(['READY', 'STARTED', 'PAUSED', 'STOPPED', 'FINISHED']); /** * An enum for input keys. * @extends {external:Enum} * @property {} LEFT * @property {} RIGHT * @property {} DOWN * @property {} UP */ class InputKey extends Enum {} InputKey.initEnum(['LEFT', 'RIGHT', 'UP', 'DOWN']); /** * An enum for shape movement outcomes. * @extends {external:Enum} * @property {} YES * @property {} NO * @property {} FREEZE */ class MoveResult extends Enum {} MoveResult.initEnum(['YES', 'NO', 'FREEZE']); /** * Maps input keys to directions. * @type {Object} * @see module:constants~InputKey * @see module:constants~Direction */ const inputkey2Direction = { [InputKey.UP]:Direction.UP, [InputKey.LEFT]:Direction.LEFT, [InputKey.RIGHT]:Direction.RIGHT, [InputKey.DOWN]:Direction.DOWN, }; /** * Board's cells are initialized with clearSymbol. When a shape moves into a cell, clearSymbol is overwritten by shape's symbol. * @type {String} */ const clearSymbol = '0'; export {Direction, GameStatus, InputKey, inputkey2Direction, clearSymbol, MoveResult};
rit1200/kairon
kairon/cli/testing.py
<filename>kairon/cli/testing.py import asyncio from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter from typing import List from loguru import logger from rasa.cli import SubParsersAction from kairon.events.events import EventsTrigger def run_tests_on_model(args): logger.info("bot: {}", args.bot) logger.info("user: {}", args.user) EventsTrigger.trigger_model_testing(args.bot, args.user) def add_subparser(subparsers: SubParsersAction, parents: List[ArgumentParser]): test_parser = subparsers.add_parser( "test", conflict_handler="resolve", formatter_class=ArgumentDefaultsHelpFormatter, parents=parents, help="Tests model on existing stories or test stories.", ) test_parser.add_argument('bot', type=str, help="Bot id for which command is executed", action='store') test_parser.add_argument('user', type=str, help="Kairon user who is initiating the command", action='store') test_parser.set_defaults(func=run_tests_on_model)
psakar/Resteasy
resteasy-jaxrs-testsuite/src/test/java/org/jboss/resteasy/test/nextgen/resource/MapProviderTest.java
<reponame>psakar/Resteasy<gh_stars>0 package org.jboss.resteasy.test.nextgen.resource; import org.jboss.resteasy.test.BaseResourceTest; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedHashMap; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import javax.ws.rs.ext.MessageBodyReader; import javax.ws.rs.ext.MessageBodyWriter; import javax.ws.rs.ext.Provider; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import static org.jboss.resteasy.test.TestPortProvider.generateURL; /** * @author <a href="mailto:<EMAIL>"><NAME></a> * @version $Revision: 1 $ */ public class MapProviderTest extends BaseResourceTest { @Path("/") public static class Resource { @Path("map") @POST public MultivaluedMap<String, String> map(MultivaluedMap<String, String> map) { return map; } } public static abstract class AbstractProvider { public long getLength() { String name = getClass().getSimpleName().replace("Provider", ""); long size = "writer".length() + name.length(); return 2*size; } public String getWriterName() { String name = getClass().getSimpleName().replace("Provider", "Writer"); return name; } public String getReaderName() { String name = getClass().getSimpleName().replace("Provider", "Reader"); return name; } } @Provider @Produces(MediaType.APPLICATION_FORM_URLENCODED) @Consumes(MediaType.APPLICATION_FORM_URLENCODED) public static class MapProvider extends AbstractProvider implements MessageBodyReader<MultivaluedMap<String, String>>, MessageBodyWriter<MultivaluedMap<String, String>> { @Override public long getSize(MultivaluedMap<String, String> t, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return getLength(); } @Override public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return MultivaluedMap.class.isAssignableFrom(type); } @Override public void writeTo(MultivaluedMap<String, String> t, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException { entityStream.write(t.getFirst(getClass().getSimpleName()).getBytes()); entityStream.write(getWriterName().getBytes()); } @Override public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return isWriteable(type, genericType, annotations, mediaType); } @Override public MultivaluedMap<String, String> readFrom( Class<MultivaluedMap<String, String>> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException { MultivaluedMap<String, String> map = new MultivaluedHashMap<String, String>(); map.add(getClass().getSimpleName(), getReaderName()); return map; } } static Client client; @BeforeClass public static void setup() { addPerRequestResource(Resource.class); deployment.getProviderFactory().register(MapProvider.class); client = ClientBuilder.newClient(); } @AfterClass public static void cleanup() { client.close(); } @Test public void testMap() { // writers sorted by type, mediatype, and then by app over builtin Response response = client.target(generateURL("/map")).request(MediaType.APPLICATION_FORM_URLENCODED_TYPE).post(Entity.entity("map", MediaType.APPLICATION_FORM_URLENCODED_TYPE)); Assert.assertEquals(response.getStatus(), 200); String data = response.readEntity(String.class); System.out.println(data); Assert.assertTrue(data.contains("MapWriter")); response.close(); } }
Dbevan/SunderingShadows
cmds/feats/i/_impale.c
#include <std.h> #include <daemons.h> inherit FEAT; #define FEATTIMER 35 int in_shapeshift; void create() { ::create(); feat_type("instant"); feat_category("TwoHandedWeapons"); feat_name("impale"); feat_syntax("impale TARGET"); feat_prereq("Sweeping Blow, Blade block or Parry"); feat_desc("The Impale feat has a chance of impaling your target or staggering them back into another attacker. Both targets will take damage and have a chance to be stunned. If used without an argument this feat will pick up a random attacker. Be aware that this power affects multiple targets. A druid with the 'mastery of fang and claw' feat may also use this feat while in dragon form, even if it has not been purchased directly."); set_save("fort"); set_required_for(({ "light weapon", "strength of arm" })); } int allow_shifted() { return 0; } int prerequisites(object ob) { if (!objectp(ob)) { return 0; } if (!FEATS_D->has_feat(ob, "blade block") && !FEATS_D->has_feat(ob, "parry")) { dest_effect(); return 0; } if (!FEATS_D->has_feat(ob, "sweepingblow")) { dest_effect(); return 0; } return ::prerequisites(ob); } int cmd_impale(string str) { object feat; if (!objectp(TP)) { return 0; } feat = new(base_name(TO)); feat->setup_feat(TP, str); return 1; } void execute_feat() { object* weapons; string type; mapping tempmap; ::execute_feat(); if (!objectp(caster)) { dest_effect(); return; } tempmap = caster->query_property("using impale"); if (!objectp(target)) { object* attackers = caster->query_attackers(); if (mapp(tempmap)) { attackers = filter_array(attackers, (: $2[$1] < time() :), tempmap); } if (!sizeof(attackers)) { tell_object(caster, "%^BOLD%^Nobody to impale.%^RESET%^"); dest_effect(); return; } target = attackers[random(sizeof(attackers))]; } if ((int)caster->query_property("using instant feat")) { tell_object(caster, "You are already in the middle of using a feat!"); dest_effect(); return; } weapons = caster->query_wielded(); if (caster->query_property("shapeshifted") && !caster->query_property("altered")) { in_shapeshift = 1; }else { in_shapeshift = 0; } if (!in_shapeshift) { if (!caster->validate_combat_stance("two hander") || !sizeof(weapons)) { tell_object(caster, "You need to be wielding a two handed weapon to use this feat."); dest_effect(); return; } if (weapons[0]->is_lrweapon()) { tell_object(caster, "That weapon is not designed for such an attack."); dest_effect(); return; } } if (mapp(tempmap)) { if (tempmap[target] > time()) { tell_object(caster, "That target is still wary of such an attack!"); dest_effect(); return; } } if (!objectp(target) || !present(target, place)) { tell_object(caster, "You don't have a target!"); dest_effect(); return; } if (target == caster) { tell_object(caster, "Impale yourself? You would have to be as dumb as an ogre to do that!"); dest_effect(); return; } if (in_shapeshift) { tell_object(caster, "%^BOLD%^%^BLACK%^You rush and impale " + target->QCN + " onto your claws!"); } else { tell_object(caster, "%^BOLD%^%^BLACK%^You step back and quickly thrust your weapon at " + target->QCN + " with all of your might!%^RESET%^"); tell_object(target, "%^BOLD%^%^BLACK%^" + caster->QCN + " steps back and quickly thrusts " + caster->QP + " weapon at you with all of " + caster->QP + " might!%^RESET%^"); tell_room(place, "%^BOLD%^%^BLACK%^" + caster->QCN + " steps back and quickly thrusts " + caster->QP + " weapon at " + target->QCN + " with all of " + caster->QP + " might!%^RESET%^", ({ caster, target })); } caster->use_stamina(roll_dice(1, 6)); caster->set_property("using instant feat", 1); spell_kill(target, caster); return; } void execute_attack() { object* weapons, * attackers, * okattackers, target_two; string type, * keyz, theweapon; int i, dam, mod, enchant, timerz, mult, reaping, diff, res; mapping tempmap, newmap; if (!objectp(caster)) { dest_effect(); return; } caster->remove_property("using instant feat"); ::execute_attack(); weapons = caster->query_wielded(); if (caster->query_property("shapeshifted") && !caster->query_property("altered")) { in_shapeshift = 1; }else { in_shapeshift = 0; } if (!in_shapeshift) { if (sizeof(weapons) < 2) { tell_object(caster, "You need to be wielding a two handed weapon to use this feat!"); dest_effect(); return; } if (weapons[0] != weapons[1]) { tell_object(caster, "You need to be wielding a two handed weapon to use this feat!"); dest_effect(); return; } if (weapons[0]->is_lrweapon()) { tell_object(caster, "That weapon is not designed for such an attack."); dest_effect(); return; } } if (!objectp(target) || !present(target, place)) { tell_object(caster, "Your target is no longer here!"); dest_effect(); return; } if (in_shapeshift) { type = "blunt"; }else { type = weapons[0]->query_type(); } if (!stringp(type) || !type || type == "") { type = "sharp"; } if (strsrch(type, "slash") == -1 && strsrch(type, "pierc" == -1)) { type = "blunt"; }else { type = "sharp"; } if (sizeof(weapons)) { enchant = (int)weapons[0]->query_property("enchantment"); }else { enchant = 0; } tempmap = caster->query_property("using impale"); // adding per-target tracking. -N, 9/10. if (!mapp(tempmap)) { tempmap = ([]); } if (tempmap[target]) { map_delete(tempmap, target); } attackers = (object*)caster->query_attackers(); okattackers = ({}); if (sizeof(attackers)) { for (i = 0; i < sizeof(attackers); i++) { if (!objectp(attackers[i])) { continue; } if (attackers[i] == target) { continue; } if (tempmap[attackers[i]] > time()) { continue; } okattackers += ({ attackers[i] }); break; } } if (sizeof(okattackers)) { target_two = okattackers[0]; // pick up only attackers who have not been impaled. -N, 9/10. } newmap = ([]); keyz = keys(tempmap); if (sizeof(keyz)) { for (i = 0; i < sizeof(keyz); i++) { if (objectp(keyz[i])) { newmap += ([ keyz[i] : tempmap[keyz[i]] ]); } } } timerz = time() + FEATTIMER; newmap += ([ target:timerz ]); delay_subject_msg(target, FEATTIMER, "%^BOLD%^%^WHITE%^" + target->QCN + " can be %^CYAN%^impaled%^WHITE%^ again.%^RESET%^"); caster->remove_property("using impale"); caster->set_property("using impale", newmap); if (!(res = thaco(target, enchant))) { tell_object(caster, "%^BOLD%^%^MAGENTA%^" + target->QCN + " sidesteps your thrust at the " "last instant, leaving you open to attack!%^RESET%^"); tell_object(target, "%^BOLD%^%^MAGENTA%^You sidestep " + caster->QCN + "'s attack at the " "last instant, leaving " + caster->QO + " open to attack!%^RESET%^"); tell_room(place, "%^BOLD%^%^MAGENTA%^" + target->QCN + " sidesteps " + caster->QCN + "'s attack " "at the last instant, leaving " + caster->QP + " open to attack!%^RESET%^", ({ caster, target })); caster->set_paralyzed(roll_dice(1, 6), "%^YELLOW%^You are trying to get back into " "position!%^RESET%^"); dest_effect(); return; }else if (res == -1) { if (stringp(caster->query("featMiss"))) { tell_object(caster, caster->query("featMiss") + " " + query_feat_name() + "!"); caster->delete("featMiss"); }else { tell_object(caster, "%^RED%^" + target->QCN + " is totally unaffected!%^RESET%^"); tell_room(place, "%^RED%^" + target->QCN + " is totally unaffected!%^RESET%^", ({ target, caster })); } dest_effect(); return; } mult = 6; if (FEATS_D->usable_feat(caster, "the reaping")) { mult = 10; } // picking up 12 as a benchmark for druid shift, two-hand sword equiv if (sizeof(weapons)) { dam = weapons[0]->query_wc(); }else { dam = 12; } mod = BONUS_D->query_stat_bonus(caster, "strength"); //dam = ((clevel - 1) / 10 + 1) * (dam / 2); //let it scale properly in 10-level blocks. -N, 9/10 dam += clevel; dam = roll_dice(dam, mult) + mod + caster->query_damage_bonus(); if (!in_shapeshift) { theweapon = weapons[0]->query_short(); }else { theweapon = "body"; } switch (type) { case "sharp": tell_object(caster, "%^BOLD%^%^RED%^You impale " + target->QCN + " with your " + theweapon + ", " "running " + target->QO + " through violently!%^RESET%^"); tell_object(target, "%^BOLD%^%^RED%^" + caster->QCN + " impales you with " + caster->QP + " " "" + theweapon + ", running you through violently!%^RESET%^"); tell_room(place, "%^BOLD%^%^RED%^" + caster->QCN + " impales " + target->QCN + " with " + caster->QP + " " "" + theweapon + ", running " + target->QO + " through violently!%^RESET%^", ({ target, caster })); break; case "blunt": tell_object(caster, "%^BOLD%^%^BLUE%^You slam your " + theweapon + " into " "" + target->QCN + " brutally, staggering " + target->QO + " backwards!%^RESET%^"); tell_object(target, "%^BOLD%^%^BLUE%^" + caster->QCN + " slams " + caster->QP + " " "" + theweapon + " into you, staggering you backwards!%^RESET%^"); tell_room(place, "%^BOLD%^%^BLUE%^" + caster->QCN + " slams " + caster->QP + " " "" + theweapon + " into " + target->QCN + ", staggering " + target->QP + " " "backwards!%^RESET%^", ({ target, caster })); break; } if (!do_save(target, mod)) { tell_object(caster, "%^BOLD%^%^GREEN%^Your attack leaves " + target->QCN + " stunned and " "unable to move!%^RESET%^"); tell_object(target, "%^BOLD%^%^GREEN%^" + caster->QCN + "'s attack leaves you stunned and " "unable to move!%^RESET%^"); tell_room(place, "%^BOLD%^%^GREEN%^" + caster->QCN + "'s attack leaves " + target->QCN + " stunned " "and unable to move!%^RESET%^", ({ target, caster })); target->set_paralyzed(roll_dice(2, 4), "%^YELLOW%^You are struggling to move!%^RESET%^"); } if (objectp(target_two)) { switch (type) { case "sharp": tell_object(caster, "%^BOLD%^%^YELLOW%^Your " + theweapon + " penetrates " "all the way through " + target->QCN + " and hits, " + target_two->QCN + "!%^RESET%^"); tell_object(target_two, "%^BOLD%^%^YELLOW%^" + caster->QCN + "'s " + theweapon + " " "penetrates all the way through " + target->QCN + " and hits you!"); tell_object(target, "%^BOLD%^%^YELLOW%^" + caster->QCN + "'s " + theweapon + " " "penetrates all the way through you and hits " + target_two->QCN + "!%^RESET%^"); tell_room(place, "%^BOLD%^%^YELLOW%^" + caster->QCN + "'s " + theweapon + " " "penetrates all the way through " + target->QCN + " and hits " + target_two->QCN + "!%^RESET%^", ({ target, caster, target_two })); break; case "blunt": tell_object(caster, "%^BOLD%^%^CYAN%^Your attack staggers " + target->QCN + " back, slamming " "" + target->QO + " into " + target_two->QCN + "!%^RESET%^"); tell_object(target, "%^BOLD%^%^CYAN%^" + caster->QCN + "'s attack staggers you back into " "" + target_two->QCN + "!%^RESET%^"); tell_object(target_two, "%^BOLD%^%^CYAN%^" + caster->QCN + "'s attack staggers " + target->QCN + " " "backwards into you!%^RESET%^"); tell_room(place, "%^BOLD%^%^CYAN%^" + caster->QCN + "'s attack staggers " + target->QCN + " back " "into " + target_two->QCN + "!%^RESET%^", ({ target, target_two, caster })); break; } if (!do_save(target_two, mod)) { tell_object(target_two, "%^BOLD%^%^YELLOW%^The attack staggers you, knocking you off " "balance!%^RESET%^"); tell_room(place, "%^BOLD%^%^YELLOW%^" + target_two->QCN + " is staggered and knocked out " "of balance!%^RESET%^", target_two); target_two->set_paralyzed(roll_dice(2, 4), "%^YELLOW%^You are trying to regain " "your balance!%^RESET%^"); } } { string dtype; if (sizeof(weapons)) { if (objectp(weapons[0])) { dtype = weapons[0]->query_damage_type(); } } dtype = dtype ? dtype : "piercing"; if (target->query_property("weapon resistance")) { if (enchant < (int)target->query_property("weapon resistance")) { target->cause_typed_damage(target, target->return_target_limb(), 0, dtype); } }else { target->cause_typed_damage(target, target->return_target_limb(), dam, dtype); } } if (objectp(target_two)) { if (caster->query_property("shapeshifted") && caster->query_unarmed_wc() >= target_two->query_property("weapon resistance")) { target_two->cause_typed_damage(target_two, target_two->return_target_limb(), dam, "piercing"); }else if (target_two->query_property("weapon resistance")) { if (enchant < (int)target_two->query_property("weapon resistance") && sizeof(weapons)) { target_two->cause_typed_damage(target_two, target_two->return_target_limb(), 0, weapons[0]->query_damage_type()); } }else { if (sizeof(weapons)) { target_two->cause_typed_damage(target_two, target_two->return_target_limb(), dam, weapons[0]->query_damage_type()); } } newmap += ([ target_two:timerz ]); } if (reaping) { if (objectp(target_two)) { mod = clevel * 3; }else { mod = to_int(clevel * 1.5); } if (caster->query_hp() < caster->query_max_hp()) { diff = caster->query_max_hp() - caster->query_hp(); if (diff >= mod) { caster->add_hp(mod); mod = 0; }else { caster->add_hp(diff); mod = mod - diff; } } if (mod && (caster->query_extra_hp() < mod)) { caster->add_extra_hp(mod - caster->query_extra_hp()); } tell_object(caster, "%^BOLD%^%^RED%^You are filled with bloodlust and eagerness for battle as you reap your foes!"); } caster->remove_property("using impale"); caster->set_property("using impale", newmap); dest_effect(); } void dest_effect() { ::dest_effect(); remove_feat(TO); return; }
maze-runnar/modal-component
modal/node_modules/@storybook/core/src/client/manager/conditional-polyfills.js
<gh_stars>1-10 import { window } from 'global'; export const importPolyfills = () => { const polyfills = []; if (!window.fetch) { // manually patch window.fetch; // see issue: <https://github.com/developit/unfetch/issues/101#issuecomment-454451035> const patch = ({ default: fetch }) => { window.fetch = fetch; }; polyfills.push(import('unfetch/dist/unfetch').then(patch)); } return Promise.all(polyfills); };
orchestracities/boost
orionadapter/cache/bootstrap.go
<filename>orionadapter/cache/bootstrap.go package cache // NOTE. Concurrency. Treat this var as a const, you can't touch this :-) var cached *store // Init gets the cache infrastructure ready for use. // It must be the first call to this module and must be done in the main // function before any other thread can possibly call the functions this // module exports. func Init() error { s, err := newStore() if err != nil { return err } cached = s return nil }
kurbmedia/transit-rails
app/controllers/transit/medias_controller.rb
<reponame>kurbmedia/transit-rails module Transit class MediasController < TransitController helper_method :resource, :collection respond_to :json def index @menus = Transit::Media.all respond_with(collection) end def show respond_with(resource) end def create @menu = Transit::Media.new(media_params) resource.save respond_with(resource) and return end def update resource.update(media_params) respond_with(resource) end def destroy resource.destroy respond_with(resource) end protected ## # All available uploads # def collection @menus ||= Transit::Media.order('created_at ASC').all end ## # Optional strong params support # def media_params params.require(:media).permit([ :name, :folder_id ]) end ## # The media item currently being manipulated. # def resource @menu ||= params[:id].present? ? Transit::Media.find(params[:id]) : Transit::Media.new end end end
phatblat/macOSPrivateFrameworks
PrivateFrameworks/ScreenReader/SCRSound.h
<filename>PrivateFrameworks/ScreenReader/SCRSound.h // // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>. // #import <ScreenReader/SCROutputSound.h> @class NSData, NSString, NSURL; __attribute__((visibility("hidden"))) @interface SCRSound : SCROutputSound { struct nssound_info *_info; BOOL _ensureSingleCompletionCallbacks; BOOL __completionCallbackNeedsSending; int _busID; float _volume; unsigned int __soundFlags; unsigned long long _soundID; double _audioDuration; struct OpaqueAudioFileID *__audioFile; NSString *__soundName; NSURL *__soundURL; NSData *__soundData; } + (BOOL)shouldShortCircuitSoundOutput; + (void)setShouldShortCircuitSoundOutput:(BOOL)arg1; + (id)_soundForID:(unsigned long long)arg1; + (void)_unregisterSound:(id)arg1; + (void)_registerSound:(id)arg1; + (void)initialize; @property(nonatomic, setter=_setCompletionCallbackNeedsSending:) BOOL _completionCallbackNeedsSending; // @synthesize _completionCallbackNeedsSending=__completionCallbackNeedsSending; @property(nonatomic, setter=_setSoundFlags:) unsigned int _soundFlags; // @synthesize _soundFlags=__soundFlags; @property(copy, nonatomic, setter=_setSoundData:) NSData *_soundData; // @synthesize _soundData=__soundData; @property(copy, nonatomic, setter=_setSoundURL:) NSURL *_soundURL; // @synthesize _soundURL=__soundURL; @property(copy, nonatomic, setter=_setSoundName:) NSString *_soundName; // @synthesize _soundName=__soundName; @property(nonatomic, setter=_setAudioFile:) struct OpaqueAudioFileID *_audioFile; // @synthesize _audioFile=__audioFile; @property(nonatomic) BOOL ensureSingleCompletionCallbacks; // @synthesize ensureSingleCompletionCallbacks=_ensureSingleCompletionCallbacks; @property(nonatomic) double audioDuration; // @synthesize audioDuration=_audioDuration; @property(nonatomic) float volume; // @synthesize volume=_volume; @property(nonatomic) int busID; // @synthesize busID=_busID; @property(nonatomic) unsigned long long soundID; // @synthesize soundID=_soundID; - (void).cxx_destruct; - (id)description; - (void)_informDelegateOfCompletionIfNeeded:(BOOL)arg1; - (void)_audioCompletedCleanup:(struct __CFBoolean *)arg1; - (void)audioCompletedCleanup; - (void)_audioSchedulingComplete; - (BOOL)_audioFailed:(id)arg1; - (BOOL)stopImmediately; - (BOOL)stop; - (BOOL)play; - (BOOL)isPlaying; - (id)copyWithZone:(struct _NSZone *)arg1; - (void)dealloc; - (id)_initWithContentsOfURL:(id)arg1; - (id)_initWithContentsOfFile:(id)arg1; - (void)_postInitialization; @end
NASA-DEVELOP/dnppy
dnppy/radar/decibel_convert.py
__author__ = ["<NAME>, <EMAIL>", "<NAME>", "Jwely"] from dnppy import raster import numpy def decibel_convert(filename): """ Converts the input UAVSAR .grd file into units of decibels. Note that a .hdr file must be created and accompany the .grd/.inc files for this to work :param filename: the full file path string for the .grd data file :return outname: filepath to output file created by this function. """ #arcpy.CheckOutExtension("Spatial") inRaster, meta = raster.to_numpy(filename) dB_raster = 10 * numpy.log10(inRaster) outname = filename.replace(".grd", "_dB.tif") raster.from_numpy(dB_raster, meta, outname) return outname
aybe/lazyusf
rsp_hle/audio.h
<reponame>aybe/lazyusf /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Mupen64plus-rsp-hle - audio.h * * Mupen64Plus homepage: http://code.google.com/p/mupen64plus/ * * Copyright (C) 2014 <NAME> * * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * * This program is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License for more details. * * * * You should have received a copy of the GNU General Public License * * along with this program; if not, write to the * * Free Software Foundation, Inc., * * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ #ifndef AUDIO_H #define AUDIO_H #include <stddef.h> #include <stdint.h> extern const int16_t RESAMPLE_LUT[64 * 4]; int32_t rdot(size_t n, const int16_t *x, const int16_t *y); static inline int16_t adpcm_predict_sample(uint8_t byte, uint8_t mask, unsigned lshift, unsigned rshift) { int16_t sample = (uint16_t)(byte & mask) << lshift; sample >>= rshift; /* signed */ return sample; } void adpcm_compute_residuals(int16_t* dst, const int16_t* src, const int16_t* cb_entry, const int16_t* last_samples, size_t count); #endif
akash2099/LeetCode-Problems
23 Merge k Sorted Lists.cpp
<reponame>akash2099/LeetCode-Problems<gh_stars>1-10 #define ppi pair<int,int> static int fastio=[](){ std::ios::sync_with_stdio(false); cin.tie(NULL); cout.tie(NULL); return 0; }(); /** * Definition for singly-linked list. * struct ListNode { * int val; * ListNode *next; * ListNode() : val(0), next(nullptr) {} * ListNode(int x) : val(x), next(nullptr) {} * ListNode(int x, ListNode *next) : val(x), next(next) {} * }; */ class Solution { public: // Mergin two lists first // ListNode* mergeTwoLists(ListNode* l1, ListNode* l2) { // // Time: O(n) // if(l1 == nullptr) return l2; // else if(l2 == nullptr) return l1; // ListNode* dummy = new ListNode(); // ListNode* curr = dummy; // while(l1 != nullptr && l2!= nullptr){ // if(l1->val <= l2->val){ // curr->next = l1; // l1 = l1->next; // }else { // curr->next = l2; // l2 = l2->next; // } // curr = curr->next; // } // curr->next = l1 == nullptr? l2:l1; // return dummy->next; // } ListNode* mergeKLists(vector<ListNode*>& lists) { // O(nklogk) priority_queue <ppi, vector<ppi>, greater<ppi>> mH; for(int i=0;i<lists.size();++i){ if(lists[i]!=nullptr){ mH.push(make_pair(lists[i]->val,i)); } } ListNode* dummy = new ListNode(); ListNode* curr = dummy; while(mH.size()>0){ int min_value=mH.top().first; int min_index_list=mH.top().second; // cout<<min_value<<" "<<min_index_list<<" "; mH.pop(); // each iteration will fix the size of heap to max(k) curr->next=lists[min_index_list]; lists[min_index_list]=lists[min_index_list]->next; if(lists[min_index_list]!=nullptr){ mH.push(make_pair(lists[min_index_list]->val,min_index_list)); } curr=curr->next; } return dummy->next; } };
pcsx-redux/nugget
openbios/patches/patch_card_info_1.c
<gh_stars>1-10 /* MIT License Copyright (c) 2021 PCSX-Redux authors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include <stdint.h> #include "openbios/patches/patches.h" #include "openbios/sio0/card.h" // clang-format off /* Found in Tales of Destiny Disc 2 (SLUS-01367): ************************************************************* * FUNCTION ************************************************************* undefined _patch_card_info () assume gp = 0x800c5454 undefined v0:1 <RETURN> XREF[1]: 80078448 (W) dword * * v0:4 B0table XREF[1]: 80078448 (W) _patch_card_info XREF[1]: InitCARD:80077f38 (c) 80078438 10 80 01 3c lui at,0x8010 assume gp = <UNKNOWN> 8007843c 90 37 3f ac sw ra,offset DAT_80103790 (at) = ?? 80078440 57 00 09 24 li t1,0x57 80078444 b0 00 0a 24 li t2,0xb0 80078448 09 f8 40 01 jalr t2=>SUB_000000b0 8007844c 00 00 00 00 _nop 80078450 09 00 0a 24 li t2,0x9 80078454 6c 01 42 8c lw B0table ,0x16c (B0table ) 80078458 00 00 00 00 nop 8007845c 88 19 43 20 addi v1,B0table ,0x1988 80078460 e2 e0 01 0c jal FlushCache void FlushCache(void) 80078464 00 00 60 ac _sw zero,0x0(v1) 80078468 10 80 1f 3c lui ra,0x8010 8007846c 90 37 ff 8f lw ra,offset DAT_80103790 (ra) = ?? 80078470 00 00 00 00 nop 80078474 08 00 e0 03 jr ra 80078478 00 00 00 00 _nop The patch simply prevents a fifo write in mcInfoHandler. */ // clang-format on #ifndef GENERATE_HASHES enum patch_behavior patch_card_info_1_execute(uint32_t* ra) { ra[2] = 0 | 0x10000000; ra[3] = 0; ra[5] = 0; g_mcCardInfoPatchActivated = 1; return PATCH_COUNTERPATCH; } #else #include "openbios/patches/hash.h" static const uint8_t masks[] = { 0, 0, 0, 0, // 00 2, 0, 1, 1, // 10 0, 0, 0, 3, // 20 3, 3, 3, 3, // 30 }; static const uint8_t bytes[] = { 0x09, 0x00, 0x0a, 0x24, 0x6c, 0x01, 0x42, 0x8c, 0x00, 0x00, 0x00, 0x00, 0x88, 0x19, 0x43, 0x20, // 00 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x60, 0xac, 0x00, 0x00, 0x1f, 0x3c, 0x00, 0x00, 0xff, 0x8f, // 10 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0xe0, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 20 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 30 }; uint32_t generate_hash_patch_card_info_1(uint32_t mask, unsigned len) { return patch_hash((const uint32_t *)bytes, (uint8_t *)&mask, len); } uint32_t generate_mask_patch_card_info_1() { uint32_t mask = 0; for (unsigned i = 0; i < 16; i++) { mask <<= 2; mask |= masks[15 - i]; } return mask; } #endif
maofw/netty_push_server
src/com/netty/push/pojo/ChannelDeviceInfo.java
<reponame>maofw/netty_push_server package com.netty.push.pojo; import io.netty.channel.Channel; import java.util.List; /** * 渠道设备信息 * * @类名称:ChannelDeviceInfo * @类描述: * @创建人:maofw * @创建时间:2014-10-13 下午3:13:51 * */ public class ChannelDeviceInfo { private Channel channel; private DeviceInfo deviceInfo; private List<MessagePushedInfo> messagePushedInfos; public Channel getChannel() { return channel; } public void setChannel(Channel channel) { this.channel = channel; } public DeviceInfo getDeviceInfo() { return deviceInfo; } public void setDeviceInfo(DeviceInfo deviceInfo) { this.deviceInfo = deviceInfo; } public List<MessagePushedInfo> getMessagePushedInfos() { return messagePushedInfos; } public void setMessagePushedInfos(List<MessagePushedInfo> messagePushedInfos) { if (this.messagePushedInfos != null && this.messagePushedInfos.size() > 0) { this.messagePushedInfos.clear(); this.messagePushedInfos = null; } this.messagePushedInfos = messagePushedInfos; } }
laborautonomo/mibew
src/mibew/styles/pages/default/js/features.js
/*! * Copyright 2005-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 */ (function($) { function updateSurvey() { if ($("#enablepresurvey").is(":checked")) { $(".undersurvey").show(); } else { $(".undersurvey").hide(); } } function updateSSL() { if ($("#enablessl").is(":checked")) { $(".underssl").show(); } else { $(".underssl").hide(); } } function updateGroups() { if ($("#enablegroups").is(":checked")) { $(".undergroups").show(); } else { $(".undergroups").hide(); } } function updateTracking() { if ($("#enabletracking").is(":checked")) { $(".undertracking").show(); } else { $(".undertracking").hide(); } } $(function() { $("#enablepresurvey").change(function() { updateSurvey(); }); $("#enablessl").change(function() { updateSSL(); }); $("#enablegroups").change(function() { updateGroups(); }); $("#enabletracking").change(function() { updateTracking(); }); updateSurvey(); updateSSL(); updateGroups(); updateTracking(); }); })(jQuery);
SamCosta1/Eventlite
src/main/java/uk/ac/man/cs/eventlite/dao/UserDetailsService.java
package uk.ac.man.cs.eventlite.dao; import org.springframework.security.core.userdetails.UsernameNotFoundException; import uk.ac.man.cs.eventlite.helpers.UserDetails; public interface UserDetailsService { UserDetails loadUserByUsername(String username) throws UsernameNotFoundException; }
caicai0/CAIAutoCoding
imtest/iOS_IMKit/Headers/RCChatListViewController.h
// // ConversationListViewController.h // RCIM // // Created by Heq.Shinoda on 14-6-11. // Copyright (c) 2014年 Heq.Shinoda. All rights reserved. // #import "RCBasicViewController.h" #import "RCIMClientHeader.h" #import "RCSelectPersonViewController.h" #import "RCGroupListViewController.h" typedef NS_ENUM(NSInteger, IsAllowScroll){ TableIsScroll = 0, TableIsForbiddenScroll }; @interface RCChatListViewController : RCBasicViewController<RCSelectPersonViewControllerDelegate> { NSTimeInterval startTime, endTime; } @property(nonatomic, strong) NSMutableArray* conversationStore; //@property(nonatomic, strong) NSMutableArray* allConversationItemCell; @property (strong, nonatomic) UITableView *conversationListView; @property(nonatomic, assign) IsAllowScroll isAllowScroll; @property(nonatomic, assign) NSInteger editingCellNum; @property (nonatomic, strong) RCGroupListViewController* currentGroupListView; @property (nonatomic) RCUserAvatarStyle portraitStyle; /** * 启动一对一聊天 * * @param userInfo */ -(void)startPrivateChat:(RCUserInfo*)userInfo; /** * 启动讨论组 * * @param userInfos */ -(void)startDiscussionChat:(NSArray*)userInfos; /** * 导航左面按钮点击事件 */ -(void)leftBarButtonItemPressed:(id)sender; /** * 导航右面按钮点击事件 */ -(void)rightBarButtonItemPressed:(id)sender; /** * 刷新会话列表 */ -(void)refreshChatListView; /** * 会话列表选中调用 * * @param conversation 选中单元的会话信息; */ -(void)onSelectedTableRow:(RCConversation*)conversation; /** * 把chatController的生命周期加入到ChatList管理中 * * @param controller */ -(void)addChatController:(UIViewController*)controller; /** * 获取chatController * * @param targetId * @param conversationType * * @return chatController */ -(id)getChatController:(NSString*)targetId conversationType:(RCConversationType)conversationType; /** * 是否隐藏默认背景,并设置自定义背景,默认为NO */ -(BOOL) showCustomEmptyBackView; @end
secondflying/gym
src/main/java/com/gym/user/entity/CoachLeave.java
package com.gym.user.entity; import java.io.Serializable; import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; import javax.xml.bind.annotation.XmlElement; import com.fasterxml.jackson.annotation.JsonFormat; /** * 教练请假表 * * */ @Entity @Table(name = "coachleave") public class CoachLeave implements Serializable { private static final long serialVersionUID = -7721105669853247233L; @Id @Column(name = "id") @GeneratedValue(strategy = GenerationType.IDENTITY) private Integer id; @XmlElement @Column(name = "coachid") private Integer coachId; @XmlElement @Column(name = "reason") private String reason; @Column(name = "createtime") @JsonFormat(pattern="yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") private Date createTime; @Column(name = "starttime") @JsonFormat(pattern="yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") private Date startTime; @Column(name = "endtime") @JsonFormat(pattern="yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") private Date endTime; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Integer getCoachId() { return coachId; } public void setCoachId(Integer coachId) { this.coachId = coachId; } public String getReason() { return reason; } public void setReason(String reason) { this.reason = reason; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getStartTime() { return startTime; } public void setStartTime(Date startTime) { this.startTime = startTime; } public Date getEndTime() { return endTime; } public void setEndTime(Date endTime) { this.endTime = endTime; } }
chokdee/jitty
data/src/test/java/com/jmelzer/jitty/service/CopyManagerTest.java
package com.jmelzer.jitty.service; import com.jmelzer.jitty.model.PhaseCombination; import com.jmelzer.jitty.model.TournamentClass; import com.jmelzer.jitty.model.TournamentGroup; import com.jmelzer.jitty.model.TournamentPlayer; import com.jmelzer.jitty.model.dto.TournamentClassDTO; import org.junit.Test; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; /** * Created by <NAME> on 31.07.2016. * test copy manager */ public class CopyManagerTest { @Test public void copyTC() { TournamentClass clz = new TournamentClass("1"); clz.createPhaseCombination(PhaseCombination.GK); clz.addPlayer(new TournamentPlayer()); clz.addGroup(new TournamentGroup()); TournamentClassDTO dto = CopyManager.copy(clz, true); assertThat(dto.getName(), is("1")); // assertThat(dto.getGroups().size(), is(1)); } }
vesavlad/somado
src/main/java/gui/dialogs/gloss/GlossProductEditNewDialog.java
/* * * Somado (System Optymalizacji Małych Dostaw) * Optymalizacja dostaw towarów, dane OSM, problem VRP * * Autor: <NAME> 2016 (praca inż. EE PW) * */ package gui.dialogs.gloss; import datamodel.Product; import gui.GUI; import gui.dialogs.GlossDialog; import somado.Lang; /** * * Szablon okienka do dodawania nowego elementu slownika * * @author <NAME> * @version 1.0 * */ @SuppressWarnings("serial") public class GlossProductEditNewDialog extends GlossProductEditDialog { /** Zmienione dane */ private Product addedItem; /** * Konstruktor: nowy element slownika * @param frame Referencja do GUI * @param parentDialog Obiekt nadrzednego okienka */ public GlossProductEditNewDialog(GUI frame, GlossDialog<Product> parentDialog) { super(frame, parentDialog, Lang.get("Gloss.GlossItems") + " - " + Lang.get("Gloss.AddItem")); } /** * Metoda zapisujaca do BD * @param item Dane do zapisania * @return true jezeli OK */ @Override protected boolean saveItem(Product item) { addedItem = item; return glossProducts.addItem(item, frame.getUser()); } /** * Metoda odswieza liste po zapisie do BD */ @Override protected void refreshItemsList() { getParentDialog().getFilters().doUpdate(addedItem.getId()); } }
JeremyZhao1989/leanTest
cgtkLibs/cgtk_config/studio_config.py
<reponame>JeremyZhao1989/leanTest # coding=utf8 import os import platform import cgtk_yaml PROJECT_ROOT_NAME = "CNCGToolKit" def get_studio_cfg_path(): root_path = os.path.join(os.path.dirname(__file__), "../../", "configs", "studio.yml") return root_path def get(item): studio_yml_path = get_studio_cfg_path() all_cfgs = cgtk_yaml.load_file(studio_yml_path) result = all_cfgs.get(item) return format_result(result) def format_result(result): if isinstance(result, dict): for key, value in result.iteritems(): if isinstance(value, dict): result[key] = format_result(value) if set(result.keys()).issubset({"windows", "linux", "osx"}): result = result.get(platform.system().lower()) return result if __name__ == "__main__": print get("deadline") print get("python")
rohankumardubey/cayenne
cayenne-server/src/test/java/org/apache/cayenne/access/translator/select/LimitOffsetStageTest.java
/***************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ****************************************************************/ package org.apache.cayenne.access.translator.select; import org.apache.cayenne.access.sqlbuilder.sqltree.LimitOffsetNode; import org.apache.cayenne.access.sqlbuilder.sqltree.Node; import org.apache.cayenne.map.DbEntity; import org.junit.Before; import org.junit.Test; import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.*; /** * @since 4.2 */ public class LimitOffsetStageTest { private TranslatorContext context; @Before public void prepareContext() { DbEntity entity = new DbEntity(); entity.setName("mock"); TranslatableQueryWrapper wrapper = new MockQueryWrapperBuilder() .withMetaData(new MockQueryMetadataBuilder() .withDbEntity(entity) .withLimitOffset(123, 321) .build()) .build(); context = new MockTranslatorContext(wrapper); } @Test public void perform() { LimitOffsetStage stage = new LimitOffsetStage(); stage.perform(context); Node select = context.getSelectBuilder().build(); Node child = select.getChild(0); assertThat(child, instanceOf(LimitOffsetNode.class)); LimitOffsetNode limitOffsetNode = (LimitOffsetNode) child; assertEquals(123, limitOffsetNode.getLimit()); assertEquals(321, limitOffsetNode.getOffset()); } }
dswd/ToMaTo
cli/tests/backend/sites.py
<filename>cli/tests/backend/sites.py from lib.tests import testCase, testSuite, unicodeTestString from organization import setUp as setUpOrga, tearDown as tearDownOrga, randomName def setUp(name): orga = setUpOrga(name) print "Creating site..." site = randomName() site_create(site, orga, "Just a test") return site def tearDown(site): print "Removing site..." info = site_info(site) site_remove(site) tearDownOrga(info["organization"]) @testCase("api.site_list") def testSiteList(_): print "Retrieving site list..." res = site_list() assert res, "No sites returned" @testCase("api.site_info") def testSiteInfo(_): print "Retrieving site list..." res = site_list() print "Retrieving information on one site..." info = site_info(res[0]["name"]) assert info, "No information returned" assert info == res[0], "Info was different from the list entry" @testCase("api.site_create", setUp=setUp, tearDown=tearDown, requiredFlags=["global_admin"]) def testSiteCreate(name): info = site_info(name) assert info, "Site has not been created" @testCase("api.site_modify", setUp=setUp, tearDown=tearDown, requiredFlags=["global_admin"]) def testSiteModify(name): print "Modifying site..." info = site_modify(name, { "description": "Just a test", "location": "Neverland", "geolocation": {"latitude": 80, "longitude": -20.3}, "description_text": "Please ignore" }) assert info["description_text"] == "Please ignore", "Modify did not work" print "Testing unicode..." info = site_modify(name, {"description": unicodeTestString}) assert info["description"] == unicodeTestString, "Unicode string has been altered" @testCase("api.site_remove", setUp=setUpOrga, tearDown=tearDownOrga, requiredFlags=["global_admin"]) def testSiteRemove(orga): print "Creating site..." site = randomName() site_create(site, orga, "Just a test") print "Removing site..." site_remove(site) tests = [ testSiteList, testSiteInfo, testSiteCreate, testSiteModify, testSiteRemove, ] if __name__ == "__main__": testSuite(tests)
ChaconLima/orange-talents-05-template-mercado-livre
src/main/java/br/com/zupacademy/mateuschacon/mercadolivre/ProductResource/Dtos/ProductQuestionDetails.java
package br.com.zupacademy.mateuschacon.mercadolivre.ProductResource.Dtos; import br.com.zupacademy.mateuschacon.mercadolivre.ProductResource.Models.ProductQuestion; public class ProductQuestionDetails { private String question; public ProductQuestionDetails( ProductQuestion productquestion){ this.question = productquestion.getTitle(); } public String getTitle() { return this.question; } }
jj1232727/system_call
linux-3.16/drivers/spi/spi-sh-msiof.c
/* * SuperH MSIOF SPI Master Interface * * Copyright (c) 2009 <NAME> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. * */ #include <linux/bitmap.h> #include <linux/clk.h> #include <linux/completion.h> #include <linux/delay.h> #include <linux/err.h> #include <linux/gpio.h> #include <linux/interrupt.h> #include <linux/io.h> #include <linux/kernel.h> #include <linux/module.h> #include <linux/of.h> #include <linux/of_device.h> #include <linux/platform_device.h> #include <linux/pm_runtime.h> #include <linux/spi/sh_msiof.h> #include <linux/spi/spi.h> #include <asm/unaligned.h> struct sh_msiof_chipdata { u16 tx_fifo_size; u16 rx_fifo_size; u16 master_flags; }; struct sh_msiof_spi_priv { void __iomem *mapbase; struct clk *clk; struct platform_device *pdev; const struct sh_msiof_chipdata *chipdata; struct sh_msiof_spi_info *info; struct completion done; int tx_fifo_size; int rx_fifo_size; }; #define TMDR1 0x00 /* Transmit Mode Register 1 */ #define TMDR2 0x04 /* Transmit Mode Register 2 */ #define TMDR3 0x08 /* Transmit Mode Register 3 */ #define RMDR1 0x10 /* Receive Mode Register 1 */ #define RMDR2 0x14 /* Receive Mode Register 2 */ #define RMDR3 0x18 /* Receive Mode Register 3 */ #define TSCR 0x20 /* Transmit Clock Select Register */ #define RSCR 0x22 /* Receive Clock Select Register (SH, A1, APE6) */ #define CTR 0x28 /* Control Register */ #define FCTR 0x30 /* FIFO Control Register */ #define STR 0x40 /* Status Register */ #define IER 0x44 /* Interrupt Enable Register */ #define TDR1 0x48 /* Transmit Control Data Register 1 (SH, A1) */ #define TDR2 0x4c /* Transmit Control Data Register 2 (SH, A1) */ #define TFDR 0x50 /* Transmit FIFO Data Register */ #define RDR1 0x58 /* Receive Control Data Register 1 (SH, A1) */ #define RDR2 0x5c /* Receive Control Data Register 2 (SH, A1) */ #define RFDR 0x60 /* Receive FIFO Data Register */ /* TMDR1 and RMDR1 */ #define MDR1_TRMD 0x80000000 /* Transfer Mode (1 = Master mode) */ #define MDR1_SYNCMD_MASK 0x30000000 /* SYNC Mode */ #define MDR1_SYNCMD_SPI 0x20000000 /* Level mode/SPI */ #define MDR1_SYNCMD_LR 0x30000000 /* L/R mode */ #define MDR1_SYNCAC_SHIFT 25 /* Sync Polarity (1 = Active-low) */ #define MDR1_BITLSB_SHIFT 24 /* MSB/LSB First (1 = LSB first) */ #define MDR1_FLD_MASK 0x000000c0 /* Frame Sync Signal Interval (0-3) */ #define MDR1_FLD_SHIFT 2 #define MDR1_XXSTP 0x00000001 /* Transmission/Reception Stop on FIFO */ /* TMDR1 */ #define TMDR1_PCON 0x40000000 /* Transfer Signal Connection */ /* TMDR2 and RMDR2 */ #define MDR2_BITLEN1(i) (((i) - 1) << 24) /* Data Size (8-32 bits) */ #define MDR2_WDLEN1(i) (((i) - 1) << 16) /* Word Count (1-64/256 (SH, A1))) */ #define MDR2_GRPMASK1 0x00000001 /* Group Output Mask 1 (SH, A1) */ /* TSCR and RSCR */ #define SCR_BRPS_MASK 0x1f00 /* Prescaler Setting (1-32) */ #define SCR_BRPS(i) (((i) - 1) << 8) #define SCR_BRDV_MASK 0x0007 /* Baud Rate Generator's Division Ratio */ #define SCR_BRDV_DIV_2 0x0000 #define SCR_BRDV_DIV_4 0x0001 #define SCR_BRDV_DIV_8 0x0002 #define SCR_BRDV_DIV_16 0x0003 #define SCR_BRDV_DIV_32 0x0004 #define SCR_BRDV_DIV_1 0x0007 /* CTR */ #define CTR_TSCKIZ_MASK 0xc0000000 /* Transmit Clock I/O Polarity Select */ #define CTR_TSCKIZ_SCK 0x80000000 /* Disable SCK when TX disabled */ #define CTR_TSCKIZ_POL_SHIFT 30 /* Transmit Clock Polarity */ #define CTR_RSCKIZ_MASK 0x30000000 /* Receive Clock Polarity Select */ #define CTR_RSCKIZ_SCK 0x20000000 /* Must match CTR_TSCKIZ_SCK */ #define CTR_RSCKIZ_POL_SHIFT 28 /* Receive Clock Polarity */ #define CTR_TEDG_SHIFT 27 /* Transmit Timing (1 = falling edge) */ #define CTR_REDG_SHIFT 26 /* Receive Timing (1 = falling edge) */ #define CTR_TXDIZ_MASK 0x00c00000 /* Pin Output When TX is Disabled */ #define CTR_TXDIZ_LOW 0x00000000 /* 0 */ #define CTR_TXDIZ_HIGH 0x00400000 /* 1 */ #define CTR_TXDIZ_HIZ 0x00800000 /* High-impedance */ #define CTR_TSCKE 0x00008000 /* Transmit Serial Clock Output Enable */ #define CTR_TFSE 0x00004000 /* Transmit Frame Sync Signal Output Enable */ #define CTR_TXE 0x00000200 /* Transmit Enable */ #define CTR_RXE 0x00000100 /* Receive Enable */ /* STR and IER */ #define STR_TEOF 0x00800000 /* Frame Transmission End */ #define STR_REOF 0x00000080 /* Frame Reception End */ static u32 sh_msiof_read(struct sh_msiof_spi_priv *p, int reg_offs) { switch (reg_offs) { case TSCR: case RSCR: return ioread16(p->mapbase + reg_offs); default: return ioread32(p->mapbase + reg_offs); } } static void sh_msiof_write(struct sh_msiof_spi_priv *p, int reg_offs, u32 value) { switch (reg_offs) { case TSCR: case RSCR: iowrite16(value, p->mapbase + reg_offs); break; default: iowrite32(value, p->mapbase + reg_offs); break; } } static int sh_msiof_modify_ctr_wait(struct sh_msiof_spi_priv *p, u32 clr, u32 set) { u32 mask = clr | set; u32 data; int k; data = sh_msiof_read(p, CTR); data &= ~clr; data |= set; sh_msiof_write(p, CTR, data); for (k = 100; k > 0; k--) { if ((sh_msiof_read(p, CTR) & mask) == set) break; udelay(10); } return k > 0 ? 0 : -ETIMEDOUT; } static irqreturn_t sh_msiof_spi_irq(int irq, void *data) { struct sh_msiof_spi_priv *p = data; /* just disable the interrupt and wake up */ sh_msiof_write(p, IER, 0); complete(&p->done); return IRQ_HANDLED; } static struct { unsigned short div; unsigned short scr; } const sh_msiof_spi_clk_table[] = { { 1, SCR_BRPS( 1) | SCR_BRDV_DIV_1 }, { 2, SCR_BRPS( 1) | SCR_BRDV_DIV_2 }, { 4, SCR_BRPS( 1) | SCR_BRDV_DIV_4 }, { 8, SCR_BRPS( 1) | SCR_BRDV_DIV_8 }, { 16, SCR_BRPS( 1) | SCR_BRDV_DIV_16 }, { 32, SCR_BRPS( 1) | SCR_BRDV_DIV_32 }, { 64, SCR_BRPS(32) | SCR_BRDV_DIV_2 }, { 128, SCR_BRPS(32) | SCR_BRDV_DIV_4 }, { 256, SCR_BRPS(32) | SCR_BRDV_DIV_8 }, { 512, SCR_BRPS(32) | SCR_BRDV_DIV_16 }, { 1024, SCR_BRPS(32) | SCR_BRDV_DIV_32 }, }; static void sh_msiof_spi_set_clk_regs(struct sh_msiof_spi_priv *p, unsigned long parent_rate, u32 spi_hz) { unsigned long div = 1024; size_t k; if (!WARN_ON(!spi_hz || !parent_rate)) div = DIV_ROUND_UP(parent_rate, spi_hz); /* TODO: make more fine grained */ for (k = 0; k < ARRAY_SIZE(sh_msiof_spi_clk_table); k++) { if (sh_msiof_spi_clk_table[k].div >= div) break; } k = min_t(int, k, ARRAY_SIZE(sh_msiof_spi_clk_table) - 1); sh_msiof_write(p, TSCR, sh_msiof_spi_clk_table[k].scr); if (!(p->chipdata->master_flags & SPI_MASTER_MUST_TX)) sh_msiof_write(p, RSCR, sh_msiof_spi_clk_table[k].scr); } static void sh_msiof_spi_set_pin_regs(struct sh_msiof_spi_priv *p, u32 cpol, u32 cpha, u32 tx_hi_z, u32 lsb_first, u32 cs_high) { u32 tmp; int edge; /* * CPOL CPHA TSCKIZ RSCKIZ TEDG REDG * 0 0 10 10 1 1 * 0 1 10 10 0 0 * 1 0 11 11 0 0 * 1 1 11 11 1 1 */ sh_msiof_write(p, FCTR, 0); tmp = MDR1_SYNCMD_SPI | 1 << MDR1_FLD_SHIFT | MDR1_XXSTP; tmp |= !cs_high << MDR1_SYNCAC_SHIFT; tmp |= lsb_first << MDR1_BITLSB_SHIFT; sh_msiof_write(p, TMDR1, tmp | MDR1_TRMD | TMDR1_PCON); if (p->chipdata->master_flags & SPI_MASTER_MUST_TX) { /* These bits are reserved if RX needs TX */ tmp &= ~0x0000ffff; } sh_msiof_write(p, RMDR1, tmp); tmp = 0; tmp |= CTR_TSCKIZ_SCK | cpol << CTR_TSCKIZ_POL_SHIFT; tmp |= CTR_RSCKIZ_SCK | cpol << CTR_RSCKIZ_POL_SHIFT; edge = cpol ^ !cpha; tmp |= edge << CTR_TEDG_SHIFT; tmp |= edge << CTR_REDG_SHIFT; tmp |= tx_hi_z ? CTR_TXDIZ_HIZ : CTR_TXDIZ_LOW; sh_msiof_write(p, CTR, tmp); } static void sh_msiof_spi_set_mode_regs(struct sh_msiof_spi_priv *p, const void *tx_buf, void *rx_buf, u32 bits, u32 words) { u32 dr2 = MDR2_BITLEN1(bits) | MDR2_WDLEN1(words); if (tx_buf || (p->chipdata->master_flags & SPI_MASTER_MUST_TX)) sh_msiof_write(p, TMDR2, dr2); else sh_msiof_write(p, TMDR2, dr2 | MDR2_GRPMASK1); if (rx_buf) sh_msiof_write(p, RMDR2, dr2); sh_msiof_write(p, IER, STR_TEOF | STR_REOF); } static void sh_msiof_reset_str(struct sh_msiof_spi_priv *p) { sh_msiof_write(p, STR, sh_msiof_read(p, STR)); } static void sh_msiof_spi_write_fifo_8(struct sh_msiof_spi_priv *p, const void *tx_buf, int words, int fs) { const u8 *buf_8 = tx_buf; int k; for (k = 0; k < words; k++) sh_msiof_write(p, TFDR, buf_8[k] << fs); } static void sh_msiof_spi_write_fifo_16(struct sh_msiof_spi_priv *p, const void *tx_buf, int words, int fs) { const u16 *buf_16 = tx_buf; int k; for (k = 0; k < words; k++) sh_msiof_write(p, TFDR, buf_16[k] << fs); } static void sh_msiof_spi_write_fifo_16u(struct sh_msiof_spi_priv *p, const void *tx_buf, int words, int fs) { const u16 *buf_16 = tx_buf; int k; for (k = 0; k < words; k++) sh_msiof_write(p, TFDR, get_unaligned(&buf_16[k]) << fs); } static void sh_msiof_spi_write_fifo_32(struct sh_msiof_spi_priv *p, const void *tx_buf, int words, int fs) { const u32 *buf_32 = tx_buf; int k; for (k = 0; k < words; k++) sh_msiof_write(p, TFDR, buf_32[k] << fs); } static void sh_msiof_spi_write_fifo_32u(struct sh_msiof_spi_priv *p, const void *tx_buf, int words, int fs) { const u32 *buf_32 = tx_buf; int k; for (k = 0; k < words; k++) sh_msiof_write(p, TFDR, get_unaligned(&buf_32[k]) << fs); } static void sh_msiof_spi_write_fifo_s32(struct sh_msiof_spi_priv *p, const void *tx_buf, int words, int fs) { const u32 *buf_32 = tx_buf; int k; for (k = 0; k < words; k++) sh_msiof_write(p, TFDR, swab32(buf_32[k] << fs)); } static void sh_msiof_spi_write_fifo_s32u(struct sh_msiof_spi_priv *p, const void *tx_buf, int words, int fs) { const u32 *buf_32 = tx_buf; int k; for (k = 0; k < words; k++) sh_msiof_write(p, TFDR, swab32(get_unaligned(&buf_32[k]) << fs)); } static void sh_msiof_spi_read_fifo_8(struct sh_msiof_spi_priv *p, void *rx_buf, int words, int fs) { u8 *buf_8 = rx_buf; int k; for (k = 0; k < words; k++) buf_8[k] = sh_msiof_read(p, RFDR) >> fs; } static void sh_msiof_spi_read_fifo_16(struct sh_msiof_spi_priv *p, void *rx_buf, int words, int fs) { u16 *buf_16 = rx_buf; int k; for (k = 0; k < words; k++) buf_16[k] = sh_msiof_read(p, RFDR) >> fs; } static void sh_msiof_spi_read_fifo_16u(struct sh_msiof_spi_priv *p, void *rx_buf, int words, int fs) { u16 *buf_16 = rx_buf; int k; for (k = 0; k < words; k++) put_unaligned(sh_msiof_read(p, RFDR) >> fs, &buf_16[k]); } static void sh_msiof_spi_read_fifo_32(struct sh_msiof_spi_priv *p, void *rx_buf, int words, int fs) { u32 *buf_32 = rx_buf; int k; for (k = 0; k < words; k++) buf_32[k] = sh_msiof_read(p, RFDR) >> fs; } static void sh_msiof_spi_read_fifo_32u(struct sh_msiof_spi_priv *p, void *rx_buf, int words, int fs) { u32 *buf_32 = rx_buf; int k; for (k = 0; k < words; k++) put_unaligned(sh_msiof_read(p, RFDR) >> fs, &buf_32[k]); } static void sh_msiof_spi_read_fifo_s32(struct sh_msiof_spi_priv *p, void *rx_buf, int words, int fs) { u32 *buf_32 = rx_buf; int k; for (k = 0; k < words; k++) buf_32[k] = swab32(sh_msiof_read(p, RFDR) >> fs); } static void sh_msiof_spi_read_fifo_s32u(struct sh_msiof_spi_priv *p, void *rx_buf, int words, int fs) { u32 *buf_32 = rx_buf; int k; for (k = 0; k < words; k++) put_unaligned(swab32(sh_msiof_read(p, RFDR) >> fs), &buf_32[k]); } static int sh_msiof_spi_setup(struct spi_device *spi) { struct device_node *np = spi->master->dev.of_node; struct sh_msiof_spi_priv *p = spi_master_get_devdata(spi->master); if (!np) { /* * Use spi->controller_data for CS (same strategy as spi_gpio), * if any. otherwise let HW control CS */ spi->cs_gpio = (uintptr_t)spi->controller_data; } /* Configure pins before deasserting CS */ sh_msiof_spi_set_pin_regs(p, !!(spi->mode & SPI_CPOL), !!(spi->mode & SPI_CPHA), !!(spi->mode & SPI_3WIRE), !!(spi->mode & SPI_LSB_FIRST), !!(spi->mode & SPI_CS_HIGH)); if (spi->cs_gpio >= 0) gpio_set_value(spi->cs_gpio, !(spi->mode & SPI_CS_HIGH)); return 0; } static int sh_msiof_prepare_message(struct spi_master *master, struct spi_message *msg) { struct sh_msiof_spi_priv *p = spi_master_get_devdata(master); const struct spi_device *spi = msg->spi; /* Configure pins before asserting CS */ sh_msiof_spi_set_pin_regs(p, !!(spi->mode & SPI_CPOL), !!(spi->mode & SPI_CPHA), !!(spi->mode & SPI_3WIRE), !!(spi->mode & SPI_LSB_FIRST), !!(spi->mode & SPI_CS_HIGH)); return 0; } static int sh_msiof_spi_txrx_once(struct sh_msiof_spi_priv *p, void (*tx_fifo)(struct sh_msiof_spi_priv *, const void *, int, int), void (*rx_fifo)(struct sh_msiof_spi_priv *, void *, int, int), const void *tx_buf, void *rx_buf, int words, int bits) { int fifo_shift; int ret; /* limit maximum word transfer to rx/tx fifo size */ if (tx_buf) words = min_t(int, words, p->tx_fifo_size); if (rx_buf) words = min_t(int, words, p->rx_fifo_size); /* the fifo contents need shifting */ fifo_shift = 32 - bits; /* setup msiof transfer mode registers */ sh_msiof_spi_set_mode_regs(p, tx_buf, rx_buf, bits, words); /* write tx fifo */ if (tx_buf) tx_fifo(p, tx_buf, words, fifo_shift); /* setup clock and rx/tx signals */ ret = sh_msiof_modify_ctr_wait(p, 0, CTR_TSCKE); if (rx_buf) ret = ret ? ret : sh_msiof_modify_ctr_wait(p, 0, CTR_RXE); ret = ret ? ret : sh_msiof_modify_ctr_wait(p, 0, CTR_TXE); /* start by setting frame bit */ reinit_completion(&p->done); ret = ret ? ret : sh_msiof_modify_ctr_wait(p, 0, CTR_TFSE); if (ret) { dev_err(&p->pdev->dev, "failed to start hardware\n"); goto err; } /* wait for tx fifo to be emptied / rx fifo to be filled */ wait_for_completion(&p->done); /* read rx fifo */ if (rx_buf) rx_fifo(p, rx_buf, words, fifo_shift); /* clear status bits */ sh_msiof_reset_str(p); /* shut down frame, rx/tx and clock signals */ ret = sh_msiof_modify_ctr_wait(p, CTR_TFSE, 0); ret = ret ? ret : sh_msiof_modify_ctr_wait(p, CTR_TXE, 0); if (rx_buf) ret = ret ? ret : sh_msiof_modify_ctr_wait(p, CTR_RXE, 0); ret = ret ? ret : sh_msiof_modify_ctr_wait(p, CTR_TSCKE, 0); if (ret) { dev_err(&p->pdev->dev, "failed to shut down hardware\n"); goto err; } return words; err: sh_msiof_write(p, IER, 0); return ret; } static int sh_msiof_transfer_one(struct spi_master *master, struct spi_device *spi, struct spi_transfer *t) { struct sh_msiof_spi_priv *p = spi_master_get_devdata(master); void (*tx_fifo)(struct sh_msiof_spi_priv *, const void *, int, int); void (*rx_fifo)(struct sh_msiof_spi_priv *, void *, int, int); int bits; int bytes_per_word; int bytes_done; int words; int n; bool swab; bits = t->bits_per_word; if (bits <= 8 && t->len > 15 && !(t->len & 3)) { bits = 32; swab = true; } else { swab = false; } /* setup bytes per word and fifo read/write functions */ if (bits <= 8) { bytes_per_word = 1; tx_fifo = sh_msiof_spi_write_fifo_8; rx_fifo = sh_msiof_spi_read_fifo_8; } else if (bits <= 16) { bytes_per_word = 2; if ((unsigned long)t->tx_buf & 0x01) tx_fifo = sh_msiof_spi_write_fifo_16u; else tx_fifo = sh_msiof_spi_write_fifo_16; if ((unsigned long)t->rx_buf & 0x01) rx_fifo = sh_msiof_spi_read_fifo_16u; else rx_fifo = sh_msiof_spi_read_fifo_16; } else if (swab) { bytes_per_word = 4; if ((unsigned long)t->tx_buf & 0x03) tx_fifo = sh_msiof_spi_write_fifo_s32u; else tx_fifo = sh_msiof_spi_write_fifo_s32; if ((unsigned long)t->rx_buf & 0x03) rx_fifo = sh_msiof_spi_read_fifo_s32u; else rx_fifo = sh_msiof_spi_read_fifo_s32; } else { bytes_per_word = 4; if ((unsigned long)t->tx_buf & 0x03) tx_fifo = sh_msiof_spi_write_fifo_32u; else tx_fifo = sh_msiof_spi_write_fifo_32; if ((unsigned long)t->rx_buf & 0x03) rx_fifo = sh_msiof_spi_read_fifo_32u; else rx_fifo = sh_msiof_spi_read_fifo_32; } /* setup clocks (clock already enabled in chipselect()) */ sh_msiof_spi_set_clk_regs(p, clk_get_rate(p->clk), t->speed_hz); /* transfer in fifo sized chunks */ words = t->len / bytes_per_word; bytes_done = 0; while (bytes_done < t->len) { void *rx_buf = t->rx_buf ? t->rx_buf + bytes_done : NULL; const void *tx_buf = t->tx_buf ? t->tx_buf + bytes_done : NULL; n = sh_msiof_spi_txrx_once(p, tx_fifo, rx_fifo, tx_buf, rx_buf, words, bits); if (n < 0) break; bytes_done += n * bytes_per_word; words -= n; } return 0; } static const struct sh_msiof_chipdata sh_data = { .tx_fifo_size = 64, .rx_fifo_size = 64, .master_flags = 0, }; static const struct sh_msiof_chipdata r8a779x_data = { .tx_fifo_size = 64, .rx_fifo_size = 256, .master_flags = SPI_MASTER_MUST_TX, }; static const struct of_device_id sh_msiof_match[] = { { .compatible = "renesas,sh-msiof", .data = &sh_data }, { .compatible = "renesas,sh-mobile-msiof", .data = &sh_data }, { .compatible = "renesas,msiof-r8a7790", .data = &r8a779x_data }, { .compatible = "renesas,msiof-r8a7791", .data = &r8a779x_data }, {}, }; MODULE_DEVICE_TABLE(of, sh_msiof_match); #ifdef CONFIG_OF static struct sh_msiof_spi_info *sh_msiof_spi_parse_dt(struct device *dev) { struct sh_msiof_spi_info *info; struct device_node *np = dev->of_node; u32 num_cs = 1; info = devm_kzalloc(dev, sizeof(struct sh_msiof_spi_info), GFP_KERNEL); if (!info) return NULL; /* Parse the MSIOF properties */ of_property_read_u32(np, "num-cs", &num_cs); of_property_read_u32(np, "renesas,tx-fifo-size", &info->tx_fifo_override); of_property_read_u32(np, "renesas,rx-fifo-size", &info->rx_fifo_override); info->num_chipselect = num_cs; return info; } #else static struct sh_msiof_spi_info *sh_msiof_spi_parse_dt(struct device *dev) { return NULL; } #endif static int sh_msiof_spi_probe(struct platform_device *pdev) { struct resource *r; struct spi_master *master; const struct of_device_id *of_id; struct sh_msiof_spi_priv *p; int i; int ret; master = spi_alloc_master(&pdev->dev, sizeof(struct sh_msiof_spi_priv)); if (master == NULL) { dev_err(&pdev->dev, "failed to allocate spi master\n"); return -ENOMEM; } p = spi_master_get_devdata(master); platform_set_drvdata(pdev, p); of_id = of_match_device(sh_msiof_match, &pdev->dev); if (of_id) { p->chipdata = of_id->data; p->info = sh_msiof_spi_parse_dt(&pdev->dev); } else { p->chipdata = (const void *)pdev->id_entry->driver_data; p->info = dev_get_platdata(&pdev->dev); } if (!p->info) { dev_err(&pdev->dev, "failed to obtain device info\n"); ret = -ENXIO; goto err1; } init_completion(&p->done); p->clk = devm_clk_get(&pdev->dev, NULL); if (IS_ERR(p->clk)) { dev_err(&pdev->dev, "cannot get clock\n"); ret = PTR_ERR(p->clk); goto err1; } i = platform_get_irq(pdev, 0); if (i < 0) { dev_err(&pdev->dev, "cannot get platform IRQ\n"); ret = -ENOENT; goto err1; } r = platform_get_resource(pdev, IORESOURCE_MEM, 0); p->mapbase = devm_ioremap_resource(&pdev->dev, r); if (IS_ERR(p->mapbase)) { ret = PTR_ERR(p->mapbase); goto err1; } ret = devm_request_irq(&pdev->dev, i, sh_msiof_spi_irq, 0, dev_name(&pdev->dev), p); if (ret) { dev_err(&pdev->dev, "unable to request irq\n"); goto err1; } p->pdev = pdev; pm_runtime_enable(&pdev->dev); /* Platform data may override FIFO sizes */ p->tx_fifo_size = p->chipdata->tx_fifo_size; p->rx_fifo_size = p->chipdata->rx_fifo_size; if (p->info->tx_fifo_override) p->tx_fifo_size = p->info->tx_fifo_override; if (p->info->rx_fifo_override) p->rx_fifo_size = p->info->rx_fifo_override; /* init master code */ master->mode_bits = SPI_CPOL | SPI_CPHA | SPI_CS_HIGH; master->mode_bits |= SPI_LSB_FIRST | SPI_3WIRE; master->flags = p->chipdata->master_flags; master->bus_num = pdev->id; master->dev.of_node = pdev->dev.of_node; master->num_chipselect = p->info->num_chipselect; master->setup = sh_msiof_spi_setup; master->prepare_message = sh_msiof_prepare_message; master->bits_per_word_mask = SPI_BPW_RANGE_MASK(8, 32); master->auto_runtime_pm = true; master->transfer_one = sh_msiof_transfer_one; ret = devm_spi_register_master(&pdev->dev, master); if (ret < 0) { dev_err(&pdev->dev, "spi_register_master error.\n"); goto err2; } return 0; err2: pm_runtime_disable(&pdev->dev); err1: spi_master_put(master); return ret; } static int sh_msiof_spi_remove(struct platform_device *pdev) { pm_runtime_disable(&pdev->dev); return 0; } static struct platform_device_id spi_driver_ids[] = { { "spi_sh_msiof", (kernel_ulong_t)&sh_data }, { "spi_r8a7790_msiof", (kernel_ulong_t)&r8a779x_data }, { "spi_r8a7791_msiof", (kernel_ulong_t)&r8a779x_data }, {}, }; MODULE_DEVICE_TABLE(platform, spi_driver_ids); static struct platform_driver sh_msiof_spi_drv = { .probe = sh_msiof_spi_probe, .remove = sh_msiof_spi_remove, .id_table = spi_driver_ids, .driver = { .name = "spi_sh_msiof", .owner = THIS_MODULE, .of_match_table = of_match_ptr(sh_msiof_match), }, }; module_platform_driver(sh_msiof_spi_drv); MODULE_DESCRIPTION("SuperH MSIOF SPI Master Interface Driver"); MODULE_AUTHOR("<NAME>"); MODULE_LICENSE("GPL v2"); MODULE_ALIAS("platform:spi_sh_msiof");
EnjoyLifeFund/macHighSierra-py36-pkgs
dendropy/calculate/profiledistance.py
<gh_stars>0 #! /usr/bin/env python ############################################################################## ## DendroPy Phylogenetic Computing Library. ## ## Copyright 2010 <NAME> and <NAME>. ## All rights reserved. ## ## See "LICENSE.txt" for terms and conditions of usage. ## ## If you use this work or any portion thereof in published work, ## please cite it as: ## ## <NAME>. and <NAME>. 2010. DendroPy: a Python library ## for phylogenetic computing. Bioinformatics 26: 1569-1571. ## ############################################################################## """ Profile distances. """ import math import collections from dendropy.utility import constants from dendropy.model import coalescent class MeasurementProfile(object): @staticmethod def _euclidean_distance(v1, v2, is_weight_values_by_comparison_profile_size=True): v1_size = len(v1) v2_size = len(v2) v1_idx = 0 v2_idx = 0 if v1_size > v2_size: v1_idx = v1_size - v2_size weight = float(v2_size) elif v2_size > v1_size: v2_idx = v2_size - v1_size weight = float(v1_size) else: weight = float(v1_size) if not is_weight_values_by_comparison_profile_size: weight = 1.0 ss = 0.0 while v1_idx < v1_size and v2_idx < v2_size: ss += pow(v1[v1_idx]/weight - v2[v2_idx]/weight, 2) v1_idx += 1 v2_idx += 1 return math.sqrt(ss) def __init__(self, profile_data=None, interpolation_method="piecewise_linear"): self.set_data(profile_data) self.fixed_size = 0 self.interpolation_method = interpolation_method def add(self, value): self._profile_data.append(value) self._profile_data = sorted(self._profile_data) self._raw_data_size = len(self._profile_data) self._interpolated_profiles = {} def set_data(self, values): if values is None: values = [] self._profile_data = sorted(values) self._raw_data_size = len(self._profile_data) self._interpolated_profiles = {} def __len__(self): return self._raw_data_size def distance(self, other, profile_size, is_weight_values_by_comparison_profile_size=True): if profile_size is None: profile_size = self._get_profile_comparison_size(other) v1 = self._get_profile_for_size(profile_size) v2 = other._get_profile_for_size(profile_size) return MeasurementProfile._euclidean_distance(v1, v2, is_weight_values_by_comparison_profile_size=is_weight_values_by_comparison_profile_size) # def get(self, idx): # if idx >= self._raw_data_size: # val = 0.0 # else: # val = self._profile_data[idx] # if val is None: # val = 0.0 # return val def _get_profile_comparison_size(self, other): if self.fixed_size > 0 and other.fixed_size > 0: if self.fixed_size != other.fixed_size: raise ValueError("Comparing two profiles locked to different sizes: {} and {}".format( self.fixed_size, other.fixed_size)) return self.fixed_size elif self.fixed_size == 0 and other.fixed_size > 0: if other.fixed_size < self._raw_data_size: raise ValueError("Cannot interpolate points in current profile: current raw data size is {} but other profile is locked to a smaller fixed size ({})".format( self._raw_data_size, other.fixed_size)) return other.fixed_size elif self.fixed_size > 0 and other.fixed_size == 0: if self.fixed_size < other._raw_data_size: raise ValueError("Cannot interpolate points in other profile: other raw data size is {} but current profile is locked to a smaller fixed size ({})".format( other._raw_data_size, self.fixed_size, )) return self.fixed_size else: return max(self._raw_data_size, other._raw_data_size) def _get_profile_for_size(self, profile_size): if not profile_size: return self._raw_data_size try: return self._interpolated_profiles[profile_size] except KeyError: self._interpolated_profiles[profile_size] = self._generate_interpolated_profile(profile_size) return self._interpolated_profiles[profile_size] def _generate_interpolated_profile(self, profile_size): if profile_size == self._raw_data_size: self._interpolated_profiles[profile_size] = list(self._profile_data) return self._interpolated_profiles[profile_size] if self._raw_data_size == 0: raise ValueError("No data in profile") if profile_size < self._raw_data_size: raise ValueError("Error interpolating points in profile: number of requested interpolated points ({}) is less than raw data size ({})".format( profile_size, self._raw_data_size)) default_bin_size = int(profile_size / self._raw_data_size) if default_bin_size == 0: raise ValueError("Profile size ({}) is too small for raw data size ({}), resulting in a null bin size". format(profile_size, self._raw_data_size)) bin_sizes = [default_bin_size] * self._raw_data_size # // due to rounding error, default bin size may not be enough # // this hacky algorithm adds additional points to bins to make up the # // difference, trying to distribute the additional points along the # // length of the line diff = profile_size - (default_bin_size * self._raw_data_size) if diff > 0: dv = float(diff) / self._raw_data_size cv = 0.0 for bin_idx in range(len(bin_sizes)): if diff <= 1.0: break cv += dv if cv >= 1.0: bin_sizes[bin_idx] += 1 diff -= 1.0 cv = cv - 1.0 interpolated_profile = [] if self.interpolation_method == "staircase": for original_data_value in self._profile_data: self._interpolate_flat( interpolated_profile=interpolated_profile, value=original_data_value, num_points=default_bin_size) elif self.interpolation_method == "piecewise_linear": for bin_idx, original_data_value in enumerate(self._profile_data[:-1]): self._interpolate_linear( interpolated_profile=interpolated_profile, x1=bin_idx, y1=self._profile_data[bin_idx], y2=self._profile_data[bin_idx+1], num_points=bin_sizes[bin_idx], max_points=profile_size, ) interpolated_profile.append(self._profile_data[-1]) return interpolated_profile def _interpolate_flat(self, interpolated_profile, value, num_points): interpolated_profile += [value] * num_points return interpolated_profile def _interpolate_linear(self, interpolated_profile, x1, y1, y2, num_points, max_points): assert num_points > 0 slope = float(y2 - y1) / num_points for xi in range(num_points): if max_points and len(interpolated_profile) >= max_points: return interpolated_profile interpolated_profile.append((slope * xi) + y1) class TreeProfile(object): def __init__(self, tree, is_measure_edge_lengths=True, is_measure_patristic_distances=False, is_measure_patristic_steps=False, is_measure_node_distances=True, is_measure_node_steps=True, is_measure_node_ages=True, is_measure_coalescence_intervals=True, is_normalize=True, ultrametricity_precision=constants.DEFAULT_ULTRAMETRICITY_PRECISION, tree_phylogenetic_distance_matrix=None, tree_node_distance_matrix=None, tree_id=None, is_skip_normalization_on_zero_division_error=False, ): self.tree_id = tree_id self.is_measure_edge_lengths = is_measure_edge_lengths self.is_measure_patristic_distances = is_measure_patristic_distances self.is_measure_patristic_steps = is_measure_patristic_steps self.is_measure_node_distances = is_measure_node_distances self.is_measure_node_steps = is_measure_node_steps self.is_measure_node_ages = is_measure_node_ages self.is_measure_coalescence_intervals = is_measure_coalescence_intervals self.is_normalize = is_normalize self.is_skip_normalization_on_zero_division_error = is_skip_normalization_on_zero_division_error self.ultrametricity_precision = ultrametricity_precision self.measurement_profiles = collections.OrderedDict() self.compile(tree) def compile(self, tree, tree_phylogenetic_distance_matrix=None, tree_node_distance_matrix=None, ): if self.is_measure_edge_lengths: if self.is_normalize: tree_length_nf = tree.length() if tree_length_nf == 0: tree_length_nf = 1.0 else: tree_length_nf = 1.0 self.measurement_profiles["Edge.Lengths"] = MeasurementProfile( profile_data=[float(e.length)/tree_length_nf for e in tree.postorder_edge_iter() if e.length is not None]) if self.is_measure_patristic_distances or self.is_measure_patristic_steps: if tree_phylogenetic_distance_matrix is None: tree_phylogenetic_distance_matrix = tree.phylogenetic_distance_matrix() if self.is_measure_patristic_distances: self.measurement_profiles["Patristic.Distances"] = MeasurementProfile( profile_data=tree_phylogenetic_distance_matrix.distances( is_weighted_edge_distances=True, is_normalize_by_tree_size=self.is_normalize,)) if self.is_measure_patristic_steps: self.measurement_profiles["Patristic.Steps"] = MeasurementProfile( profile_data=tree_phylogenetic_distance_matrix.distances( is_weighted_edge_distances=False, is_normalize_by_tree_size=self.is_normalize,)) if self.is_measure_node_distances or self.is_measure_node_steps: if tree_node_distance_matrix is None: tree_node_distance_matrix = tree.node_distance_matrix() if self.is_measure_node_distances: self.measurement_profiles["Node.Distances"] = MeasurementProfile( profile_data=tree_node_distance_matrix.distances( is_weighted_edge_distances=True, is_normalize_by_tree_size=self.is_normalize,)) if self.is_measure_node_steps: self.measurement_profiles["Node.Steps"] = MeasurementProfile( profile_data=tree_node_distance_matrix.distances( is_weighted_edge_distances=False, is_normalize_by_tree_size=self.is_normalize,)) if self.is_measure_node_ages: node_ages = tree.calc_node_ages(ultrametricity_precision=self.ultrametricity_precision) if self.is_normalize: s = sum(node_ages) try: normalized_node_ages = [a/s for a in node_ages] node_ages = normalized_node_ages except ZeroDivisionError as e: if self.is_skip_normalization_on_zero_division_error: pass else: raise self.measurement_profiles["Node.Ages"] = MeasurementProfile(profile_data=node_ages,) if self.is_measure_coalescence_intervals: cf = coalescent.extract_coalescent_frames( tree=tree, ultrametricity_precision=self.ultrametricity_precision,) waiting_times = cf.values() if self.is_normalize: s = sum(waiting_times) try: normalized_waiting_times = [w/s for w in waiting_times] waiting_times = normalized_waiting_times except ZeroDivisionError as e: if self.is_skip_normalization_on_zero_division_error: pass else: raise self.measurement_profiles["Coalescence.Intervals"] = MeasurementProfile(profile_data=waiting_times,) @property def measurement_names(self): names = [] if self.is_measure_edge_lengths: names.append("Edge.Lengths") if self.is_measure_patristic_distances: names.append("Patristic.Distances") if self.is_measure_patristic_steps: names.append("Patristic.Steps") if self.is_measure_node_distances: names.append("Node.Distances") if self.is_measure_node_steps: names.append("Node.Steps") if self.is_measure_node_ages: names.append("Node.Ages") if self.is_measure_coalescence_intervals: names.append("Coalescence.Intervals") return names def measure_distances(self, other_tree_profile, profile_size=None, is_weight_values_by_comparison_profile_size=True): d = collections.OrderedDict() for pm_name in self.measurement_profiles: p1 = self.measurement_profiles[pm_name] p2 = other_tree_profile.measurement_profiles[pm_name] d[pm_name] = p1.distance(p2, profile_size=None, is_weight_values_by_comparison_profile_size=is_weight_values_by_comparison_profile_size) return d
dvlpsh/leetcode-1
python/116_Populating_Next_Right_Pointers_in_Each_Node.py
# Definition for binary tree with next pointer. # class TreeLinkNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None # self.next = None class Solution(object): def connect(self, root): """ :type root: TreeLinkNode :rtype: nothing """ if root is None: return nodes = [root] while len(nodes) != 0: next_step = [] last = None for node in nodes: if last is not None: last.next = node if node.left is not None: next_step.append(node.left) if node.right is not None: next_step.append(node.right) last = node nodes = next_step
juancgalvis/aerolinea-udea-fsi-android
app/src/main/java/es/hol/galvisoft/aerolina/activities/MainActivity.java
<gh_stars>0 package es.hol.galvisoft.aerolina.activities; import android.app.Activity; import android.os.Bundle; import android.util.Log; import android.widget.ListView; import com.parse.ParseException; import com.parse.ParseObject; import com.parse.SaveCallback; import es.hol.galvisoft.aerolina.R; import es.hol.galvisoft.aerolina.adapters.OptionsMainAdapter; import es.hol.galvisoft.aerolina.data.AirportDataManager; import es.hol.galvisoft.aerolina.model.Airport; public class MainActivity extends Activity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ListView listView = (ListView) findViewById(R.id.option_list); listView.setAdapter(new OptionsMainAdapter(this)); listView.setOnItemClickListener((OptionsMainAdapter) listView.getAdapter()); } private void crearAeropuertos() { AirportDataManager airportDataManager = new AirportDataManager(getApplicationContext()); for (final Airport airport : airportDataManager.getAll()) { final ParseObject airportParse = ParseObject.create("Airport"); if (airport.getId() == null) { airportParse.put("name", airport.getName()); airportParse.put("city", airport.getCity()); airportParse.put("iata", airport.getIata()); airportParse.saveInBackground(new SaveCallback() { @Override public void done(ParseException e) { Log.e("air", "INSERT INTO airport VALUES(\"" + airportParse.getObjectId() + "\",\"" + airportParse.getString("name") + "\",\"" + airportParse.getString("city") + "\",\"" + airportParse.getString("iata") + "\");"); } }); } } } }
devrchancay/react-rainbow
src/components/WeeklyCalendar/styled/controls.js
import styled from 'styled-components'; import { MARGIN_MEDIUM } from '../../../styles/margins'; const StyledControls = styled.div` display: flex; flex: 0 0 auto; align-items: center; justify-content: space-between; padding: 0 0 8px 0; > div:first-child { display: flex; justify-content: flex-start; align-items: center; flex-grow: 1; margin-right: ${MARGIN_MEDIUM}; } `; export default StyledControls;
elloop/OpenGL-ES-2.0-cpp
First/scenes/ELDrawable.h
#pragma once #include "elloop/inc.h" #include "basic/ELRef.h" NS_BEGIN(elloop); class Drawable : public Ref { public: virtual void render() {} }; NS_END(elloop);
amoeba/treestats.ne
util/multi/test_multi.rb
# Test the /send method with a fake POST require 'net/http' require 'json' require 'base64' # uri = URI.parse("http://floating-meadow-8649.herokuapp.com/") uri = URI.parse("http://localhost:9292") endpoint = Net::HTTP.new(uri.host, uri.port) files = Dir["./characters/*.json"] exit unless files.length > 0 files.each do |file| request = Net::HTTP::Post.new(uri.request_uri) request.body = File.open(file, 'rb') { |file| file.read } response = endpoint.request(request) puts response end
YanDoroshenko/avataxsdk
modules/core/src/main/scala/org/upstartcommerce/avataxsdk/core/data/models/NoticeModel.scala
<gh_stars>0 /* Copyright 2019 UpStart Commerce, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.upstartcommerce.avataxsdk.core.data.models import java.sql.Date import org.upstartcommerce.avataxsdk.core.data.enums._ final case class NoticeModel(id: Int, companyId: Int, statusId: Int, status: Option[String] = None, receivedDate: Date, closedDate: Option[Date] = None, totalRemit: Option[BigDecimal] = None, customerTypeId: NoticeCustomerType, country: Option[String] = None, region: Option[String] = None, taxAuthorityId: Option[Int] = None, filingFrequency: Option[FilingFrequencyId] = None, filingTypeId: Option[TaxNoticeFilingTypeId] = None, ticketReferenceNo: Option[String] = None, ticketReferenceUrl: Option[String] = None, salesForceCase: Option[String] = None, salesForceCaseUrl: Option[String] = None, taxPeriod: Option[String] = None, reasonId: Int, reason: Option[String] = None, typeId: Option[Int] = None, `type`: Option[String] = None, customerFundingOptionId: Option[FundingOption] = None, priorityId: NoticePriorityId, customerComment: Option[String] = None, hideFromCustomer: Boolean, expectedResolutionDate: Option[Date] = None, showResolutionDateToCustomer: Boolean, closedByUserId: Option[Int] = None, createdByUserName: Option[String] = None, ownedByUserId: Option[Int] = None, description: Option[String] = None, avaFileFormId: Option[Int] = None, revenueContactId: Option[Int] = None, complianceContactId: Option[Int] = None, taxFormCode: Option[String] = None, documentReference: Option[String] = None, jurisdictionName: Option[String] = None, jurisdictionType: Option[String] = None, comments: Option[List[NoticeCommentModel]] = None, finances: Option[List[NoticeFinanceModel]] = None, responsibility: Option[List[NoticeResponsibilityDetailModel]] = None, rootCause: Option[List[NoticeRootCauseDetailModel]] = None, createdDate: Option[Date] = None, createdUserId: Option[Int] = None, modifiedDate: Option[Date] = None, modifiedUserId: Option[Int] = None) { lazy val commentsRaw: List[NoticeCommentModel] = comments.getOrElse(List.empty) lazy val financesRaw: List[NoticeFinanceModel] = finances.getOrElse(List.empty) lazy val responsibilityRaw: List[NoticeResponsibilityDetailModel] = responsibility.getOrElse(List.empty) lazy val rootCauseRaw: List[NoticeRootCauseDetailModel] = rootCause.getOrElse(List.empty) def withId(value: Int): NoticeModel = copy(id = value) def withCompanyId(value: Int): NoticeModel = copy(companyId = value) def withStatusId(value: Int): NoticeModel = copy(statusId = value) def withStatus(value: String): NoticeModel = copy(status = Some(value)) def withReceivedDate(value: Date): NoticeModel = copy(receivedDate = value) def withClosedDate(value: Date): NoticeModel = copy(closedDate = Some(value)) def withTotalRemit(value: BigDecimal): NoticeModel = copy(totalRemit = Some(value)) def withCustomerTypeId(value: NoticeCustomerType): NoticeModel = copy(customerTypeId = value) def withCountry(value: String): NoticeModel = copy(country = Some(value)) def withRegion(value: String): NoticeModel = copy(region = Some(value)) def withTaxAuthorityId(value: Int): NoticeModel = copy(taxAuthorityId = Some(value)) def withFilingFrequency(value: FilingFrequencyId): NoticeModel = copy(filingFrequency = Some(value)) def withFilingTypeId(value: TaxNoticeFilingTypeId): NoticeModel = copy(filingTypeId = Some(value)) def withTicketReferenceNo(value: String): NoticeModel = copy(ticketReferenceNo = Some(value)) def withTicketReferenceUrl(value: String): NoticeModel = copy(ticketReferenceUrl = Some(value)) def withSalesForceCase(value: String): NoticeModel = copy(salesForceCase = Some(value)) def withSalesForceCaseUrl(value: String): NoticeModel = copy(salesForceCaseUrl = Some(value)) def withTaxPeriod(value: String): NoticeModel = copy(taxPeriod = Some(value)) def withReasonId(value: Int): NoticeModel = copy(reasonId = value) def withReason(value: String): NoticeModel = copy(reason = Some(value)) def withTypeId(value: Int): NoticeModel = copy(typeId = Some(value)) def withType(value: String): NoticeModel = copy(`type` = Some(value)) def withCustomerFundingOptionId(value: FundingOption): NoticeModel = copy(customerFundingOptionId = Some(value)) def withPriorityId(value: NoticePriorityId): NoticeModel = copy(priorityId = value) def withCustomerComment(value: String): NoticeModel = copy(customerComment = Some(value)) def withHideFromCustomer(value: Boolean): NoticeModel = copy(hideFromCustomer = value) def withExpectedResolutionDate(value: Date): NoticeModel = copy(expectedResolutionDate = Some(value)) def withShowResolutionDateToCustomer(value: Boolean): NoticeModel = copy(showResolutionDateToCustomer = value) def withClosedByUserId(value: Int): NoticeModel = copy(closedByUserId = Some(value)) def withCreatedByUserName(value: String): NoticeModel = copy(createdByUserName = Some(value)) def withOwnedByUserId(value: Int): NoticeModel = copy(ownedByUserId = Some(value)) def withDescription(value: String): NoticeModel = copy(description = Some(value)) def withAvaFileFormId(value: Int): NoticeModel = copy(avaFileFormId = Some(value)) def withRevenueContactId(value: Int): NoticeModel = copy(revenueContactId = Some(value)) def withComplianceContactId(value: Int): NoticeModel = copy(complianceContactId = Some(value)) def withTaxFormCode(value: String): NoticeModel = copy(taxFormCode = Some(value)) def withDocumentReference(value: String): NoticeModel = copy(documentReference = Some(value)) def withJurisdictionName(value: String): NoticeModel = copy(jurisdictionName = Some(value)) def withJurisdictionType(value: String): NoticeModel = copy(jurisdictionType = Some(value)) def withComments(value: List[NoticeCommentModel]): NoticeModel = copy(comments = Some(value)) def withFinances(value: List[NoticeFinanceModel]): NoticeModel = copy(finances = Some(value)) def withResponsibility(value: List[NoticeResponsibilityDetailModel]): NoticeModel = copy(responsibility = Some(value)) def withRootCause(value: List[NoticeRootCauseDetailModel]): NoticeModel = copy(rootCause = Some(value)) def withCreatedDate(value: Date): NoticeModel = copy(createdDate = Some(value)) def withCreatedUserId(value: Int): NoticeModel = copy(createdUserId = Some(value)) def withModifiedDate(value: Date): NoticeModel = copy(modifiedDate = Some(value)) def withModifiedUserId(value: Int): NoticeModel = copy(modifiedUserId = Some(value)) }
yiv/yivserver
game/service/ucclient.go
<filename>game/service/ucclient.go package service import ( "context" "io" "time" stdopentracing "github.com/opentracing/opentracing-go" "google.golang.org/grpc" "github.com/go-kit/kit/endpoint" "github.com/go-kit/kit/log" "github.com/go-kit/kit/log/level" "github.com/go-kit/kit/sd" ketcd "github.com/go-kit/kit/sd/etcd" "github.com/go-kit/kit/sd/lb" "fmt" "github.com/yiv/yivgame/game/gamer" ucgrpccli "github.com/yiv/yivgame/usercenter/client" "github.com/yiv/yivgame/usercenter/service" ) type userCenter struct { logger log.Logger endpoints service.Endpoints } func (u *userCenter) GetInfo(id gamer.UserID) (info *gamer.PlayerInfo, err error) { var ctx = context.Background() user, err := u.endpoints.GetUserInfo(ctx, int64(id)) if err != nil { level.Error(u.logger).Log("userCenter", "GetInfo", "id", id, "err", err.Error()) return nil, err } var friends []gamer.UserID for _, f := range user.Friends { friends = append(friends, gamer.UserID(f)) } info = &gamer.PlayerInfo{ Token: user.Token, SeatCode: gamer.SeatCode(user.Online), Coin: user.Coin, Gem: user.Gem, Nick: user.Nick, Avatar: user.Avatar, Friends: friends, Character: user.Others["character"], } return } func NewUserCenter(serviceName string, etcdAddr []string, retryMax int, retryTimeout time.Duration, logger log.Logger) (uc *userCenter, err error) { var ctx = context.Background() tracer := stdopentracing.GlobalTracer() // no-op client, err := ketcd.NewClient(ctx, etcdAddr, ketcd.ClientOptions{}) if err != nil { level.Error(logger).Log("userCenter", "NewUserCenter", "err", err.Error()) return nil, fmt.Errorf("NewClient err : %s", err.Error()) } instancer, err := ketcd.NewInstancer(client, serviceName, logger) endpoints := service.Endpoints{} { factory := factoryFor(service.MakeGetUserInfoEndpoint, tracer, logger) endpointer := sd.NewEndpointer(instancer, factory, logger) balancer := lb.NewRoundRobin(endpointer) retry := lb.Retry(retryMax, retryTimeout, balancer) endpoints.GetUserInfoEndpoint = retry } uc = &userCenter{ logger: logger, endpoints: endpoints, } return } func factoryFor(makeEndpoint func(service.Service) endpoint.Endpoint, tracer stdopentracing.Tracer, logger log.Logger) sd.Factory { return func(instance string) (endpoint.Endpoint, io.Closer, error) { conn, err := grpc.Dial(instance, grpc.WithInsecure()) if err != nil { level.Error(logger).Log("userCenter", "factoryFor", "err", err.Error()) return nil, nil, err } svr := ucgrpccli.New(conn, tracer, logger) ep := makeEndpoint(svr) return ep, conn, nil } }
tripplan/tripplan
src/components/Form/Fields/index.js
<gh_stars>0 import ToggleComponent from "./Toggle" import InputComponent from "./Input" import ListComponent from "./List" import Field from "./Field" export const Toggle = Field(ToggleComponent) export const Input = Field(InputComponent) export const List = Field(ListComponent)
glycerine/inferno
limbo/fns.h
int addfile(File*); void addfnptrs(Decl*, int); void addiface(Decl*, Decl*); void addinclude(char*); char *addrprint(char*, char*, int, Addr*); Typelist *addtype(Type*, Typelist*); Node *adtdecl(Decl *ids, Node *fields); void adtdecled(Node *n); void adtdefd(Type*); Decl *adtmeths(Type*); void adtstub(Decl*); long align(long, int); void *allocmem(ulong); void altcheck(Node *an, Type *ret); void altcom(Node*); Inst *andand(Node*, int, Inst*); Decl *appdecls(Decl*, Decl*); int argcompat(Node*, Decl*, Node*); void arraycom(Node*, Node*); void arraydefault(Node*, Node*); Type *arrowtype(Type*, Decl*); void asmdesc(Desc*); void asmentry(Decl*); void asmexc(Except*); void asminitializer(long, Node*); void asminst(Inst*); void asmldt(long, Decl*); void asmmod(Decl*); void asmpath(void); void asmstring(long, Sym*); void asmvar(long, Decl*); int assignindices(Node*); void bccom(Node*, Inst**); Inst *bcom(Node*, int, Inst*); void bindnames(Node*); void bindtypes(Type *t); Ok callcast(Node*, int, int); void callcom(Src*, int, Node*, Node*); Type* calltype(Type*, Node*, Type*); double canontod(ulong v[2]); void casecheck(Node *cn, Type *ret); int casecmp(Type*, Node*, Node*); void casecom(Node*); Node *caselist(Node*, Node*); void casesort(Type*, Label*, Label*, int, int); Case *checklabels(Node *inits, Type *ctype, int nlab, char *title); void checkrefs(Decl*); Node *checkused(Node*); int circlval(Node*, Node*); void concheck(Node *n, int isglobal); Node *condecl(Decl*, Node*); void condecled(Node *n); void constub(Decl*); Type *copytypeids(Type*); char *ctprint(char*, char*, Type*); int ctypeconv(Fmt*); Line curline(void); Decl *curscope(void); int cycarc(Type*, Type*); void cycfield(Type*, Decl*); void cycsizetype(Type*); void cyctype(Type*); int dasdecl(Node *n); void declaserr(Node*); int declasinfer(Node*, Type*); int declconv(Fmt*); Decl *declsort(Decl*); void declstart(void); void decltozero(Node *n); void deldecl(Decl*); int dequal(Decl*, Decl*, int); long descmap(Decl*, uchar*, long); void disaddr(int, Addr*); void disbcon(long); void discon(long); void disdata(int, long); void disdesc(Desc*); void disentry(Decl*); void disexc(Except*); void disinst(Inst*); void disldt(long, Decl*); void dismod(Decl*); void dispath(void); void disvar(long, Decl*); void disword(long); int dotconv(Fmt*); char *dotprint(char*, char*, Decl*, int); Type *dottype(Type*, Decl*); void dtocanon(double, ulong[2]); Decl *dupdecl(Decl*); Decl *dupdecls(Decl*); Node *dupn(int, Src*, Node*); Node *eacom(Node*, Node*, Node*); Ok echeck(Node *n, int typeok, int isglobal, Node* par); Node *ecom(Src*, Node*, Node*); Node *efold(Node *n); Node *elemsort(Node*); void emit(Decl*); Decl *encpolys(Decl*); Sym *enter(char*, int); Desc *enterdesc(uchar*, long, long); Sym *enterstring(char*, int); char *eprint(char*, char*, Node*); char *eprintlist(char*, char*, Node*, char*); void error(Line, char*, ...); #pragma varargck argpos error 2 int etconv(Fmt*); Node *etolist(Node*); void excheck(Node *n, int isglobal); void exccheck(Node *cn, Type *ret); void excom(Node*); Node *exdecl(Decl*, Decl*); void exdecled(Node *n); Type *expandtype(Type*, Type*, Decl*, Tpair**); Type *expandtypes(Type*, Decl*); int expconv(Fmt*); Type *exptotype(Node*); void fatal(char*, ...); #pragma varargck argpos fatal 1 void fielddecled(Node *n); Node *fielddecl(int store, Decl *ids); int findlab(Type *ty, Node *v, Label *labs, int nlab); int fixop(int, Type*, Type*, Type*, int*, int*); Fline fline(int); void fmtcheck(Node*, Node*, Node*); void fncheck(Decl *d); Decl *fnchk(Node *n); void fncom(Decl*); Node *fndecl(Node *n, Type *t, Node *body); void fndecled(Node *n); Decl* fnlookup(Sym*, Type*, Node**); Node *fold(Node*); void foldbranch(Inst*); Node *foldc(Node*); Node *foldcast(Node*, Node*); Node *foldcasti(Node*, Node*); Node *foldr(Node*); Node *foldvc(Node*); void gbind(Node *n); int gcheck(Node*, Decl**, int); void gdasdecl(Node *n); void gdecl(Node *n); Addr genaddr(Node*); Inst *genbra(Src*, int, Node*, Node*); Inst *genchan(Src*, Node*, Type*, Node*); Desc *gendesc(Decl*, long, Decl*); Inst *genfixcastop(Src*, int, Node*, Node*); Inst *genmove(Src*, int, Type*, Node*, Node*); Inst *genop(Src*, int, Node*, Node*, Node*); Inst *genrawop(Src*, int, Node*, Node*, Node*); void genstart(void); long getpc(Inst*); int gfltconv(Fmt*); Decl *globalBconst(Node*); Node *globalas(Node*, Node*, int); Decl *globalbconst(Node*); Decl *globalconst(Node*); Decl *globalfconst(Node*); Decl *globalsconst(Node*); void gsort(Node*); int hasasgns(Node*); int hascall(Node*); Node *hascomm(Node*); int hasside(Node*, int); long idindices(Decl*); long idoffsets(Decl*, long, int); Type *idtype(Type*); void importcheck(Node *n, int isglobal); void importchk(Node*); Node *importdecl(Node *m, Decl *ids); void importdecled(Node *n); void includef(Sym*); Node *indsascom(Src*, Node*, Node*); int initable(Node*, Node*, int); int inloop(void); void installids(int store, Decl *ids); int instconv(Fmt*); Type *insttype(Type*, Decl*, Tpair**); Type *internaltype(Type*); int isimpmod(Sym*); int islval(Node*); int ispoly(Decl*); int ispolyadt(Type*); int istuple(Node*); void joiniface(Type*, Type*); void lexinit(void); void lexstart(char*); int lineconv(Fmt*); int local(Decl*); Decl *lookdot(Decl*, Sym*); Decl *lookup(Sym*); int mapconv(Fmt*); int marklval(Node*); int mathchk(Node*, int); void mergepolydecs(Type*); Type *mkadtcon(Type*); Type *mkadtpickcon(Type*, Type*); Type *mkarrowtype(Line*, Line*, Type*, Sym*); Node *mkbin(int, Node*, Node*); Node *mkconst(Src*, Long); Decl *mkdecl(Src*, int, Type*); Node *mkdeclname(Src*, Decl*); Desc *mkdesc(long, Decl*); Type *mkdottype(Line*, Line*, Type*, Sym*); Type *mkexbasetype(Type*); Type *mkextuptype(Type*); Type *mkextype(Type*); File *mkfile(char*, int, int, int, char*, int, int); Decl *mkids(Src*, Sym*, Type*, Decl*); Type *mkidtype(Src*, Sym*); Type *mkiface(Decl*); Inst *mkinst(void); Type *mkinsttype(Src*, Type*, Typelist*); Node *mkname(Src*, Sym*); Node *mknil(Src*); Node *mkn(int, Node*, Node*); Node *mkrconst(Src*, Real); Node *mksconst(Src*, Sym*); Node *mkscope(Node *body); Type *mktalt(Case*); Desc *mktdesc(Type*); Node *mktn(Type*); Type *mktype(Line*, Line*, int, Type*, Decl*); Node *mkunary(int, Node*); Type *mkvarargs(Node*, Node*); Teq *modclass(void); void modcode(Decl*); void modcom(Decl*); void moddataref(void); Node *moddecl(Decl *ids, Node *fields); void moddecled(Node *n); Decl *modglobals(Decl*, Decl*); Decl *modimp(Dlist*, Decl*); void modrefable(Type*); void modresolve(void); void modstub(Decl*); void modtab(Decl*); Decl *module(Decl*); int mustzero(Decl *); int mpatof(char*, double*); Decl *namedot(Decl*, Sym*); Decl *namesort(Decl*); void narrowmods(void); void nerror(Node*, char*, ...); #pragma varargck argpos nerror 2 int nested(void); Inst *nextinst(void); int nodeconv(Fmt*); int nodes(Node*); char *nprint(char*, char*, Node*, int); void nwarn(Node*, char*, ...); #pragma varargck argpos nwarn 2 int occurs(Decl*, Node*); int opconv(Fmt*); void optabinit(void); void optim(Inst*, Decl*); Inst *oror(Node*, int, Inst*); Decl *outerpolys(Node*); Node *passfns(Src*, Decl*, Node*, Node*, Type*, Tpair*); Node *passimplicit(Node*, Node*); void patch(Inst*, Inst*); void pickcheck(Node*, Type*); int pickdecled(Node *n); Decl *pickdefd(Type*, Decl*); void pickdupcom(Src*, Node*, Node*); Decl* polydecl(Decl*); int polyequal(Decl*, Decl*); void popblock(void); Decl *popids(Decl*); void popscopes(void); Decl *popscope(void); void printdecls(Decl*); int pushblock(void); void pushlabel(Node*); void pushscope(Node *, int); void raisescheck(Type*); int raisescompat(Node*, Node*); void reach(Inst*); void *reallocmem(void*, ulong); void recvacom(Src*, Node*, Node*); void redecl(Decl *d); void reftype(Type*); void repushblock(int); void repushids(Decl*); void resizetype(Type*); long resolvedesc(Decl*, long, Decl*); Decl* resolveldts(Decl*, Decl**); int resolvemod(Decl*); long resolvepcs(Inst*); Node *retalloc(Node*, Node*); Decl *revids(Decl*); Node *rewrite(Node *n); Node *rewritecomm(Node*, Node*, Node*, Node*); Inst *rewritedestreg(Inst*, int, int); Inst *rewritesrcreg(Inst*, int, int, int); void rmfnptrs(Decl*); Node *rotater(Node*); double rpow(double, int); int sameaddr(Node*, Node*); int sametree(Node*, Node*); void sblfiles(void); void sblfn(Decl**, int); void sblinst(Inst*, long); void sblmod(Decl*); void sblty(Decl**, int); void sblvar(Decl*); double scale(Type*); double scale2(Type*, Type*); Node* scheck(Node*, Type*, int); void scom(Node*); char *secpy(char*, char*, char*); char *seprint(char*, char*, char*, ...); #pragma varargck argpos seprint 3 void shareloc(Decl*); int shiftchk(Node*); ulong sign(Decl*); Node *simplify(Node*); Szal sizeids(Decl*, long); void sizetype(Type*); Node *slicelcom(Src*, Node*, Node*); int specific(Type*); int srcconv(Fmt*); char* srcpath(char*, int); int storeconv(Fmt*); char *stprint(char*, char*, Type*); Sym *stringcat(Sym*, Sym*); char *stringpr(char*, char*, Sym*); Long strtoi(char*, int); int stypeconv(Fmt*); Node *sumark(Node*); int symcmp(Sym*, Sym*); Node *tacquire(Node*); Ok tagcast(Node*, Node*, Node*, Decl*, int, int); Node *talloc(Node*, Type*, Node*); int tcompat(Type*, Type*, int); void tcycle(Type*); Decl *tdecls(void); long tdescmap(Type*, uchar*, long); void teqclass(Type*); int tequal(Type*, Type*); void tfree(Node*); void tfreelater(Node*); void tfreenow(void); void tinit(void); int tmustzero(Type *); Type *toptype(Src*, Type*); Type *topvartype(Type *t, Decl *id, int tyok, int polyok); Type* tparent(Type*, Type*); char *tprint(char*, char*, Type*); void translate(char*, char*, char*); void trelease(Node*); int tunify(Type*, Type*, Tpair**); int tupaliased(Node*, Node*); int tupsaliased(Node*, Node*); void tupcom(Node*, Node*); void tuplcom(Node*, Node*); void tuplrcom(Node*, Node*); Decl *tuplefields(Node*); void typebuiltin(Decl*, Type*); Decl *typecheck(int); int typeconv(Fmt*); Node *typedecl(Decl *ids, Type *t); void typedecled(Node *n); Decl *typeids(Decl*, Type*); void typeinit(void); void typestart(void); Decl *undefed(Src *src, Sym *s); Desc *usedesc(Desc*); void usedty(Type*); Type *usetype(Type*); Type *validtype(Type*, Decl*); int valistype(Node*); Type *valtmap(Type*, Tpair*); void varcheck(Node *n, int isglobal); int varcom(Decl*); Node *vardecl(Decl*, Type*); void vardecled(Node *n); Node *varinit(Decl*, Node*); Decl *varlistdecl(Decl*, Node*); Decl *vars(Decl*); int vcom(Decl*); Type *verifytypes(Type*, Decl*, Decl*); void warn(Line, char*, ...); #pragma varargck argpos warn 2 void yyerror(char*, ...); #pragma varargck argpos yyerror 1 int yylex(void); int yyparse(void); void zcom(Node *, Node**); void zcom0(Decl *, Node**); void zcom1(Node *, Node**);