hexsha stringlengths 40 40 | size int64 5 1.05M | ext stringclasses 98
values | lang stringclasses 21
values | max_stars_repo_path stringlengths 3 945 | max_stars_repo_name stringlengths 4 118 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 945 | max_issues_repo_name stringlengths 4 118 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 134k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 945 | max_forks_repo_name stringlengths 4 135 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 1.05M | avg_line_length float64 1 1.03M | max_line_length int64 2 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
df017d7dd402e851ddd14351e50b1183f6d6ac3b | 3,078 | sql | SQL | scorewater-minimum-viable-iot-agent/scorewater-turbinator/src/main/resources/sql/03_create_views.sql | CivityNL/SCOREwater-Java | 355a591454335acaae275f5845a7464225cf84c4 | [
"BSD-3-Clause"
] | null | null | null | scorewater-minimum-viable-iot-agent/scorewater-turbinator/src/main/resources/sql/03_create_views.sql | CivityNL/SCOREwater-Java | 355a591454335acaae275f5845a7464225cf84c4 | [
"BSD-3-Clause"
] | null | null | null | scorewater-minimum-viable-iot-agent/scorewater-turbinator/src/main/resources/sql/03_create_views.sql | CivityNL/SCOREwater-Java | 355a591454335acaae275f5845a7464225cf84c4 | [
"BSD-3-Clause"
] | null | null | null | /*
* Create views
* Usage: PGPASSWORD=password psql -U turbinator -h host.docker.internal -d turbinator -p 5432 -f 03_create_views.sql
*/
BEGIN;
CREATE OR REPLACE VIEW water_quality_observed_wipawoik AS (
SELECT
a.entity_id,
DATE(a.recording_timestamp) AS recording_date,
ROW_NUMBER() OVER (PARTITION BY a.entity_id ORDER BY a.recording_timestamp DESC) AS row_number,
a.recording_timestamp,
'WaterQualityObserved'::TEXT AS type,
'WaterQualityObserved'::TEXT AS dtype,
'IVL'::TEXT AS data_provider,
a.recording_timestamp AS date_modified,
NULL::TIMESTAMP WITHOUT TIME ZONE AS date_created,
a.recording_timestamp AS date_observed,
b.lon AS lon,
b.lat AS lat,
ST_SETSRID(ST_MAKEPOINT(b.lon, b.lat), 4326) as geom,
NULL::text AS properties,
NULL::TEXT AS address,
NULL::text AS ref_point_of_interest,
'IVL'::TEXT AS source,
null::FLOAT AS temperature,
null::TIMESTAMP WITHOUT TIME ZONE AS temperature_timestamp,
null::FLOAT AS conductivity,
null::TIMESTAMP WITHOUT TIME ZONE conductivity_timestamp,
NULL::FLOAT AS conductance,
NULL::TIMESTAMP WITHOUT TIME ZONE AS conductance_timestamp,
NULL::FLOAT AS tss,
NULL::TIMESTAMP AS tss_timestamp,
NULL::FLOAT AS tds,
NULL::TIMESTAMP WITHOUT TIME ZONE AS tds_timestamp,
a.turbidity AS turbidity,
a.recording_timestamp AS turbidity_timestamp,
NULL::FLOAT AS salinity,
NULL::TIMESTAMP WITHOUT TIME ZONE AS salinity_timestamp,
NULL::FLOAT AS ph,
NULL::TIMESTAMP AS ph_timestamp,
NULL::FLOAT AS orp,
NULL::TIMESTAMP WITHOUT TIME ZONE AS orp_timestamp,
NULL::bytea AS measurand,
NULL::FLOAT AS o2,
NULL::TIMESTAMP WITHOUT TIME ZONE AS o2_timestamp,
NULL::FLOAT AS chla,
NULL::TIMESTAMP WITHOUT TIME ZONE AS chla_timestamp,
NULL::FLOAT AS pe,
NULL::TIMESTAMP WITHOUT TIME ZONE AS pe_timestamp,
NULL::FLOAT AS pc,
NULL::TIMESTAMP WITHOUT TIME ZONE AS pc_timestamp,
NULL::FLOAT AS nh4,
NULL::TIMESTAMP AS nh4_timestamp,
NULL::FLOAT AS nh3,
NULL::TIMESTAMP AS nh3_timestamp,
NULL::FLOAT AS cl,
NULL::TIMESTAMP WITHOUT TIME ZONE AS cl_timestamp,
NULL::FLOAT AS no3,
NULL::TIMESTAMP WITHOUT TIME ZONE AS no3_timestamp
FROM turbinator_measurement_wipawoik AS a
JOIN (
SELECT
entity_id,
recording_timestamp,
LEAD(recording_timestamp) OVER (PARTITION BY entity_id ORDER BY recording_timestamp) AS next_recording_timestamp,
lon,
lat
FROM public.turbinator_location_wipawoik
) AS b
ON a.entity_id = b.entity_id AND a.recording_timestamp >= b.recording_timestamp AND (a.recording_timestamp < b.next_recording_timestamp OR b.next_recording_timestamp IS NULL)
);
ALTER TABLE water_quality_observed_wipawoik OWNER TO turbinator;
END; | 40.5 | 178 | 0.67024 |
fd85e269a25b6a485e101c3c0d878a96afc93327 | 4,607 | h | C | zircon/kernel/arch/x86/include/arch/x86/pv.h | allansrc/fuchsia | a2c235b33fc4305044d496354a08775f30cdcf37 | [
"BSD-2-Clause"
] | 210 | 2019-02-05T12:45:09.000Z | 2022-03-28T07:59:06.000Z | zircon/kernel/arch/x86/include/arch/x86/pv.h | PlugFox/fuchsia | 39afe5230d41628b3c736a6e384393df954968c8 | [
"BSD-2-Clause"
] | 56 | 2021-06-03T03:16:25.000Z | 2022-03-20T01:07:44.000Z | zircon/kernel/arch/x86/include/arch/x86/pv.h | PlugFox/fuchsia | 39afe5230d41628b3c736a6e384393df954968c8 | [
"BSD-2-Clause"
] | 73 | 2019-03-06T18:55:23.000Z | 2022-03-26T12:04:51.000Z | // Copyright 2020 The Fuchsia Authors
//
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file or at
// https://opensource.org/licenses/MIT
#ifndef ZIRCON_KERNEL_ARCH_X86_INCLUDE_ARCH_X86_PV_H_
#define ZIRCON_KERNEL_ARCH_X86_INCLUDE_ARCH_X86_PV_H_
#include <zircon/types.h>
#include <ktl/atomic.h>
#include <vm/pmm.h>
static constexpr uint32_t kKvmSystemTimeMsrOld = 0x12;
static constexpr uint32_t kKvmSystemTimeMsr = 0x4b564d01;
static constexpr uint32_t kKvmBootTimeOld = 0x11;
static constexpr uint32_t kKvmBootTime = 0x4b564d00;
static constexpr uint32_t kKvmFeatureClockSourceOld = 1u << 0;
static constexpr uint32_t kKvmFeatureClockSource = 1u << 3;
static constexpr uint8_t kKvmSystemTimeStable = 1u << 0;
// Both structures below are part of the ABI used by Xen and KVM, this ABI is not
// defined by use we just follow it. For more detail please refer to the
// documentation (https://www.kernel.org/doc/Documentation/virtual/kvm/msr.txt).
struct pv_clock_boot_time {
// With multiple VCPUs it is possible that one VCPU can try to read boot time
// while we are updating it because another VCPU asked for the update. In this
// case odd version value serves as an indicator for the guest that update is
// in progress. Therefore we need to update version before we write anything
// else and after, also we need to user proper memory barriers. The same logic
// applies to system time version below, even though system time is per VCPU
// others VCPUs still can access system times of other VCPUs (Linux however
// never does that).
uint32_t version;
uint32_t seconds;
uint32_t nseconds;
};
static_assert(sizeof(struct pv_clock_boot_time) == 12, "sizeof(pv_clock_boot_time) should be 12");
struct pv_clock_system_time {
uint32_t version;
uint32_t pad0;
uint64_t tsc_timestamp;
uint64_t system_time;
uint32_t tsc_mul;
int8_t tsc_shift;
uint8_t flags;
uint8_t pad1[2];
};
static_assert(sizeof(struct pv_clock_system_time) == 32,
"sizeof(pv_clock_system_time) should be 32");
// Initialize the para-virtualized clock.
//
// This function should only be called by CPU 0.
zx_status_t pv_clock_init();
// Shutsdown the para-virtualized clock.
//
// This function should only be called by CPU 0.
void pv_clock_shutdown();
bool pv_clock_is_stable();
uint64_t pv_clock_get_tsc_freq();
// Send para-virtualized IPI.
//
// @param mask_low Low part of CPU mask.
// @param mask_high High part of CPU mask.
// @param start_id APIC ID that the CPU mask starts at.
// @param icr APIC ICR value.
// @return The number of CPUs that the IPI was delivered to, or an error value.
int pv_ipi(uint64_t mask_low, uint64_t mask_high, uint64_t start_id, uint64_t icr);
class MsrAccess;
// PvEoi provides optimized end-of-interrupt signaling for para-virtualized environments.
//
// The initialization sequence of PvEoi instances is tricky. All PvEoi instances should be
// initialized by the boot CPU prior to brining the secondary CPUs online (see |InitAll|).
class PvEoi final {
public:
~PvEoi();
// Initialize all PvEoi instances.
//
// Must be called from a context in which blocking is allowed.
static void InitAll();
// Initialize this PvEoi instances.
//
// Must be called from a context in which blocking is allowed.
void Init();
// Get the current CPU's PvEoi instance.
static PvEoi* get();
// Enable PV_EOI for the current CPU. After it is enabled, callers may use Eoi() rather than
// access a local APIC register if desired.
//
// Once enabled this PvEoi object must be disabled prior to destruction.
//
// It is an error to enable a PvEoi object more than once over its lifetime.
void Enable(MsrAccess* msr);
// Disable PV_EOI for the current CPU.
void Disable(MsrAccess* msr);
// Attempt to acknowledge and signal an end-of-interrupt (EOI) for the current CPU via a
// paravirtual interface. If a fast acknowledge was not available, the function returns
// false and the caller must signal an EOI via the legacy mechanism.
bool Eoi();
private:
// state_ must be contained within a single page. If its alignment is greater than or equal to
// its size, then we know it's not straddling a page boundary.
ktl::atomic<uint64_t> state_{0};
static_assert(sizeof(PvEoi::state_) < PAGE_SIZE &&
alignof(decltype(PvEoi::state_)) >= sizeof(PvEoi::state_));
// The physical address of state_;
paddr_t state_paddr_{0};
ktl::atomic<bool> enabled_{false};
};
#endif // ZIRCON_KERNEL_ARCH_X86_INCLUDE_ARCH_X86_PV_H_
| 34.901515 | 98 | 0.746907 |
2f4e9e6a2989d1e2ed3b65ea96cfc21be4d1f63e | 1,123 | php | PHP | application/modules/front/controllers/Dashboard.php | dnspace/e-logistic | f3547d7baee4a5ea6bd22b3b72da2a0bb7c4ef45 | [
"Apache-2.0"
] | null | null | null | application/modules/front/controllers/Dashboard.php | dnspace/e-logistic | f3547d7baee4a5ea6bd22b3b72da2a0bb7c4ef45 | [
"Apache-2.0"
] | null | null | null | application/modules/front/controllers/Dashboard.php | dnspace/e-logistic | f3547d7baee4a5ea6bd22b3b72da2a0bb7c4ef45 | [
"Apache-2.0"
] | null | null | null | <?php
defined('BASEPATH') OR exit('No direct script access allowed');
require APPPATH . '/libraries/BaseController.php';
/**
* Class : Dashboard (DashboardController)
* Dashboard Class to control Dashboard.
* @author : Sigit Prayitno
* @version : 1.0
* @since : Mei 2017
*/
class Dashboard extends BaseController
{
/**
* This is default constructor of the class
*/
public function __construct()
{
parent::__construct();
$this->isLoggedIn();
}
/**
* This function used to load the first screen of the user
*/
public function index()
{
$this->global['pageTitle'] = 'Dashboard - '.APP_NAME;
$this->global['pageMenu'] = 'Dashboard';
$this->global['contentHeader'] = 'Dashboard';
$this->global['contentTitle'] = 'Welcome to E-Logistic';
$this->global ['role'] = $this->role;
$this->global ['name'] = $this->name;
$this->global ['repo'] = $this->repo;
$data['logtime'] = tgl_indo(date("Y-m-d"));
$this->loadViews('front/v_dashboard', $this->global, $data, NULL);
}
} | 27.390244 | 74 | 0.586821 |
41c2411339e8d55ffcaa144f5ab1daefb82d6122 | 2,742 | h | C | tools/math/inc/shape-sphere.h | TankleL/liblight | 77e14df6c0b0345b0f1e8d69d7bb21d190833a56 | [
"MIT"
] | 3 | 2018-08-28T12:30:56.000Z | 2021-02-22T10:17:56.000Z | tools/math/inc/shape-sphere.h | TankleL/liblight | 77e14df6c0b0345b0f1e8d69d7bb21d190833a56 | [
"MIT"
] | null | null | null | tools/math/inc/shape-sphere.h | TankleL/liblight | 77e14df6c0b0345b0f1e8d69d7bb21d190833a56 | [
"MIT"
] | null | null | null | /* ****************************************************************************
shape-sphere.h
-------------------------------------------------------------------------------
Copyright (c) 2018, Tain L.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**************************************************************************** */
#pragma once
#include "my-prerequisites.h"
#include "shape.h"
namespace Light
{
namespace Math
{
class ShapeSphere : public Shape
{
public:
ShapeSphere(const Point3& pos, decimal r);
public:
virtual bool intersected(Intersection& inters, const Ray3& ray_in) override;
protected:
Point3 m_p;
decimal m_r;
};
inline ShapeSphere::ShapeSphere(const Point3& pos, decimal r)
: m_p(pos)
, m_r(r)
{}
inline bool ShapeSphere::intersected(Intersection& inters, const Ray3& ray_in)
{
bool res = false;
Vector3 op = m_p - ray_in.m_origin; // Solve t^2*d.d + 2*t*(o-p).d + (o-p).(o-p)-R^2 = 0
decimal b = op.dot(ray_in.m_direction);
decimal det = b * b - op.dot(op) + m_r * m_r;
if (det >= 0)
{
det = sqrt(det);
decimal t = b - det;
if (t > epsilon)
{
res = true;
}
else if ((t = b + det) > epsilon)
{
res = true;
}
if (res)
{
inters.m_hit_point = ray_in.on_ray(t);
inters.m_normal = (inters.m_hit_point - m_p);
inters.m_normal.normalize();
inters.m_ray_in = ray_in;
inters.m_travel = t;
}
}
return res;
}
} // namespace Math
} // namespace Light
| 29.483871 | 92 | 0.64442 |
8eb48b63013a7bc775267fdfdba79b7c90d2c3a8 | 3,670 | kt | Kotlin | app/src/main/java/space/narrate/waylan/android/ui/list/ListItemViewHolder.kt | narrate-co/waylan_android | c5640fb3bab994fd2c78cabd83fa962fbff75911 | [
"Apache-2.0"
] | 13 | 2019-08-12T02:42:41.000Z | 2021-02-07T11:26:29.000Z | app/src/main/java/space/narrate/waylan/android/ui/list/ListItemViewHolder.kt | narrate-co/words_android | c5640fb3bab994fd2c78cabd83fa962fbff75911 | [
"Apache-2.0"
] | 41 | 2019-09-28T20:53:00.000Z | 2021-01-27T15:46:01.000Z | app/src/main/java/space/narrate/waylan/android/ui/list/ListItemViewHolder.kt | narrate-co/waylan_android | c5640fb3bab994fd2c78cabd83fa962fbff75911 | [
"Apache-2.0"
] | null | null | null | package space.narrate.waylan.android.ui.list
import android.view.View
import android.view.ViewGroup
import android.widget.LinearLayout
import androidx.appcompat.widget.AppCompatTextView
import androidx.recyclerview.widget.RecyclerView
import com.google.android.material.chip.ChipGroup
import org.threeten.bp.OffsetDateTime
import space.narrate.waylan.android.R
import space.narrate.waylan.android.databinding.ListBannerLayoutBinding
import space.narrate.waylan.android.databinding.ListItemLayoutBinding
import space.narrate.waylan.android.util.toChip
import space.narrate.waylan.core.data.wordset.Synonym
import space.narrate.waylan.core.ui.widget.BannerCardView
import space.narrate.waylan.core.util.AdapterUtils
sealed class ListItemViewHolder<T : ListItemModel>(val view: View): RecyclerView.ViewHolder(view) {
abstract fun bind(item: T)
class HeaderViewHolder(
private val binding: ListBannerLayoutBinding,
listener: BannerCardView.Listener
): ListItemViewHolder<ListItemModel.HeaderModel>(binding.root) {
init {
binding.banner.setLisenter(listener)
}
override fun bind(item: ListItemModel.HeaderModel) {
binding.banner
.setText(item.text)
.setTopButton(item.topButtonText)
.setBottomButton(item.bottomButtonText)
.setLabel(item.label)
}
}
class UserWordViewHolder(
private val binding: ListItemLayoutBinding,
private val listener: ListItemAdapter.ListItemListener
): ListItemViewHolder<ListItemModel.UserWordModel>(binding.root) {
override fun bind(item: ListItemModel.UserWordModel) {
binding.run {
itemContainer.transitionName = item.userWord.id
word.text = item.userWord.word
//Set part of speech
partOfSpeech.text = item.userWord.partOfSpeechPreview.keys.firstOrNull() ?: ""
//Set definition
item.userWord.defPreview.map { it.key }.firstOrNull()?.let {
definition.text = it
}
//Set synonym chips
expandedChipGroup.removeAllViews()
item.userWord.synonymPreview.forEach {syn ->
val synonym = Synonym(syn.key, OffsetDateTime.now(), OffsetDateTime.now())
expandedChipGroup.addView(
synonym.toChip(view.context, expandedChipGroup) {
listener.onWordClicked(it.synonym, binding.root, false)
}
)
}
itemContainer.setOnClickListener {
listener.onWordClicked(item.userWord.word, binding.root, true)
}
}
}
}
class GlobalWordViewHolder(
private val binding: ListItemLayoutBinding,
private val listener: ListItemAdapter.ListItemListener
) : ListItemViewHolder<ListItemModel.GlobalWordModel>(binding.root) {
override fun bind(item: ListItemModel.GlobalWordModel) {
binding.run {
word.text = item.globalWord.word
//Set part of speech
partOfSpeech.text = item.globalWord.partOfSpeechPreview.keys.firstOrNull() ?: ""
//Set definition
item.globalWord.defPreview.map { it.key }.firstOrNull()?.let {
definition.text = it
}
//Set synonym chips
expandedChipGroup.removeAllViews()
item.globalWord.synonymPreview.forEach {
val synonym = Synonym(it.key, OffsetDateTime.now(), OffsetDateTime.now())
expandedChipGroup.addView(
synonym.toChip(view.context, expandedChipGroup) {
listener.onWordClicked(it.synonym, binding.root, false)
}
)
}
itemContainer.setOnClickListener {
listener.onWordClicked(item.globalWord.word, binding.root, true)
}
}
}
}
}
| 32.477876 | 99 | 0.698093 |
bd5e0fd1da6a8c96b81a6e57d55b533733ede540 | 4,251 | swift | Swift | SwiftGit2/Experimental/Struct/Options/RemoteCallbacks.swift | serg-vinnie/SwiftGit2 | c590501f77bfdc2ef8e8ba8404fc1d5194c58073 | [
"MIT"
] | 1 | 2020-01-25T04:47:11.000Z | 2020-01-25T04:47:11.000Z | SwiftGit2/Experimental/Struct/Options/RemoteCallbacks.swift | serg-vinnie/SwiftGit2 | c590501f77bfdc2ef8e8ba8404fc1d5194c58073 | [
"MIT"
] | null | null | null | SwiftGit2/Experimental/Struct/Options/RemoteCallbacks.swift | serg-vinnie/SwiftGit2 | c590501f77bfdc2ef8e8ba8404fc1d5194c58073 | [
"MIT"
] | 1 | 2019-11-27T13:19:40.000Z | 2019-11-27T13:19:40.000Z | //
// RemoteCallbacks.swift
// SwiftGit2-OSX
//
// Created by loki on 25.04.2021.
// Copyright © 2021 GitHub, Inc. All rights reserved.
//
import Clibgit2
import Foundation
public typealias TransferProgressCB = (git_indexer_progress) -> (Bool) // return false to cancel progree
public typealias AuthCB = (_ url: String?, _ username: String?) -> (Credentials)
public enum Auth {
case match(AuthCB)
case credentials(Credentials)
case list([Credentials])
}
public class RemoteCallbacks: GitPayload {
var list = [Credentials]()
var callback : AuthCB?
var recentCredentials = Credentials.default
private var remote_callbacks = git_remote_callbacks()
public var transferProgress: TransferProgressCB?
public init(auth: Auth) {
switch auth {
case .match(let callback):
self.callback = callback
case .credentials(let cred):
self.list = [cred]
case .list(let list):
self.list = Array(list.reversed()) // reversed to use popLast
}
let result = git_remote_init_callbacks(&remote_callbacks, UInt32(GIT_REMOTE_CALLBACKS_VERSION))
assert(result == GIT_OK.rawValue)
}
func next(url: String?, username: String?) -> Credentials {
if let cred = list.popLast() {
return cred
} else if let cb = callback {
return cb(url, username)
}
return .none
}
#if DEBUG
deinit {
// print("RemoteCallbacks deinit")
}
#endif
}
extension RemoteCallbacks {
func with_git_remote_callbacks<T>(_ body: (inout git_remote_callbacks) -> T) -> T {
remote_callbacks.payload = toRetainedPointer()
remote_callbacks.credentials = credentialsCallback
remote_callbacks.transfer_progress = transferCallback
defer {
RemoteCallbacks.release(pointer: remote_callbacks.payload)
}
return body(&remote_callbacks)
}
}
/// Handle the request of credentials, passing through to a wrapped block after converting the arguments.
/// Converts the result to the correct error code required by libgit2 (0 = success, 1 = rejected setting creds,
/// -1 = error)
private func credentialsCallback(
cred: UnsafeMutablePointer<UnsafeMutablePointer<git_cred>?>?,
url: UnsafePointer<CChar>?,
username: UnsafePointer<CChar>?,
_: UInt32,
payload: UnsafeMutableRawPointer?
) -> Int32 {
guard let payload = payload else { return -1 }
let url = url.map(String.init(cString:))
let name = username.map(String.init(cString:))
let result: Int32
let _payload = RemoteCallbacks.unretained(pointer: payload)
_payload.recentCredentials = _payload.next(url: url, username: name)
switch _payload.recentCredentials {
case .none:
return 1 // will fail with: [git_remote_connect]: remote authentication required but no callback set
case .default:
result = git_credential_default_new(cred)
case .sshAgent:
result = git_credential_ssh_key_from_agent(cred, name!)
case let .plaintext(username, password):
result = git_credential_userpass_plaintext_new(cred, username, password)
case let .sshMemory(username, publicKey, privateKey, passphrase):
result = git_credential_ssh_key_memory_new(cred, username, publicKey, privateKey, passphrase)
case let .ssh(publicKey: publicKey, privateKey: privateKey, passphrase: passphrase):
result = git_credential_ssh_key_new(cred, name, publicKey, privateKey, passphrase)
}
return (result != GIT_OK.rawValue) ? -1 : 0
}
// Return a value less than zero to cancel process
private func transferCallback(stats: UnsafePointer<git_indexer_progress>?, payload: UnsafeMutableRawPointer?) -> Int32 {
guard let stats = stats?.pointee else { return -1 }
guard let payload = payload else { return -1 }
let callbacks = RemoteCallbacks.unretained(pointer: payload)
// if progress callback didn't set just continue
if let transferProgress = callbacks.transferProgress {
if transferProgress(stats) == false {
return -1 // if callback returns false return -1 to cancel transfer
}
}
return 0
}
| 32.953488 | 120 | 0.679134 |
868a9bbcd92a59bf3b83edc19484be4ecf1ffc17 | 34,401 | rs | Rust | core/src/progress_map.rs | leonardgee/solana | 559d3d8f49e114e9da0cbd27f898a6c4487846c8 | [
"Apache-2.0"
] | 1 | 2021-04-28T01:50:20.000Z | 2021-04-28T01:50:20.000Z | core/src/progress_map.rs | leonardgee/solana | 559d3d8f49e114e9da0cbd27f898a6c4487846c8 | [
"Apache-2.0"
] | 30 | 2021-11-01T08:28:05.000Z | 2022-03-30T08:39:09.000Z | core/src/progress_map.rs | joeaba/solana-new | 2c1493a63834f6631210e43caca4e4b71b70395b | [
"Apache-2.0"
] | 1 | 2022-02-26T06:44:22.000Z | 2022-02-26T06:44:22.000Z | use crate::{
cluster_info_vote_listener::SlotVoteTracker,
cluster_slots::SlotPubkeys,
replay_stage::SUPERMINORITY_THRESHOLD,
{consensus::Stake, consensus::VotedStakes},
};
use solana_ledger::blockstore_processor::{ConfirmationProgress, ConfirmationTiming};
use solana_runtime::{bank::Bank, bank_forks::BankForks, vote_account::ArcVoteAccount};
use solana_sdk::{clock::Slot, hash::Hash, pubkey::Pubkey};
use std::{
collections::{BTreeMap, HashMap, HashSet},
sync::{Arc, RwLock},
};
type VotedSlot = Slot;
type ExpirationSlot = Slot;
pub(crate) type LockoutIntervals = BTreeMap<ExpirationSlot, Vec<(VotedSlot, Pubkey)>>;
#[derive(Default)]
pub(crate) struct ReplaySlotStats(ConfirmationTiming);
impl std::ops::Deref for ReplaySlotStats {
type Target = ConfirmationTiming;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl std::ops::DerefMut for ReplaySlotStats {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl ReplaySlotStats {
pub fn report_stats(&self, slot: Slot, num_entries: usize, num_shreds: u64) {
datapoint_info!(
"replay-slot-stats",
("slot", slot as i64, i64),
("fetch_entries_time", self.fetch_elapsed as i64, i64),
(
"fetch_entries_fail_time",
self.fetch_fail_elapsed as i64,
i64
),
(
"entry_poh_verification_time",
self.poh_verify_elapsed as i64,
i64
),
(
"entry_transaction_verification_time",
self.transaction_verify_elapsed as i64,
i64
),
("replay_time", self.replay_elapsed as i64, i64),
(
"replay_total_elapsed",
self.started.elapsed().as_micros() as i64,
i64
),
("total_entries", num_entries as i64, i64),
("total_shreds", num_shreds as i64, i64),
("check_us", self.execute_timings.check_us, i64),
("load_us", self.execute_timings.load_us, i64),
("execute_us", self.execute_timings.execute_us, i64),
("store_us", self.execute_timings.store_us, i64),
(
"serialize_us",
self.execute_timings.details.serialize_us,
i64
),
(
"create_vm_us",
self.execute_timings.details.create_vm_us,
i64
),
(
"execute_inner_us",
self.execute_timings.details.execute_us,
i64
),
(
"deserialize_us",
self.execute_timings.details.deserialize_us,
i64
),
(
"changed_account_count",
self.execute_timings.details.changed_account_count,
i64
),
(
"total_account_count",
self.execute_timings.details.total_account_count,
i64
),
(
"total_data_size",
self.execute_timings.details.total_data_size,
i64
),
(
"data_size_changed",
self.execute_timings.details.data_size_changed,
i64
),
);
}
}
#[derive(Debug)]
pub(crate) struct ValidatorStakeInfo {
pub validator_vote_pubkey: Pubkey,
pub stake: u64,
pub total_epoch_stake: u64,
}
impl Default for ValidatorStakeInfo {
fn default() -> Self {
Self {
stake: 0,
validator_vote_pubkey: Pubkey::default(),
total_epoch_stake: 1,
}
}
}
impl ValidatorStakeInfo {
pub fn new(validator_vote_pubkey: Pubkey, stake: u64, total_epoch_stake: u64) -> Self {
Self {
validator_vote_pubkey,
stake,
total_epoch_stake,
}
}
}
pub(crate) struct ForkProgress {
pub(crate) is_dead: bool,
pub(crate) fork_stats: ForkStats,
pub(crate) propagated_stats: PropagatedStats,
pub(crate) replay_stats: ReplaySlotStats,
pub(crate) replay_progress: ConfirmationProgress,
pub(crate) duplicate_stats: DuplicateStats,
// Note `num_blocks_on_fork` and `num_dropped_blocks_on_fork` only
// count new blocks replayed since last restart, which won't include
// blocks already existing in the ledger/before snapshot at start,
// so these stats do not span all of time
pub(crate) num_blocks_on_fork: u64,
pub(crate) num_dropped_blocks_on_fork: u64,
}
impl ForkProgress {
pub fn new(
last_entry: Hash,
prev_leader_slot: Option<Slot>,
duplicate_stats: DuplicateStats,
validator_stake_info: Option<ValidatorStakeInfo>,
num_blocks_on_fork: u64,
num_dropped_blocks_on_fork: u64,
) -> Self {
let (
is_leader_slot,
propagated_validators_stake,
propagated_validators,
is_propagated,
total_epoch_stake,
) = validator_stake_info
.map(|info| {
(
true,
info.stake,
vec![info.validator_vote_pubkey].into_iter().collect(),
{
if info.total_epoch_stake == 0 {
true
} else {
info.stake as f64 / info.total_epoch_stake as f64
> SUPERMINORITY_THRESHOLD
}
},
info.total_epoch_stake,
)
})
.unwrap_or((false, 0, HashSet::new(), false, 0));
Self {
is_dead: false,
fork_stats: ForkStats::default(),
replay_stats: ReplaySlotStats::default(),
replay_progress: ConfirmationProgress::new(last_entry),
duplicate_stats,
num_blocks_on_fork,
num_dropped_blocks_on_fork,
propagated_stats: PropagatedStats {
propagated_validators,
propagated_validators_stake,
is_propagated,
is_leader_slot,
prev_leader_slot,
total_epoch_stake,
..PropagatedStats::default()
},
}
}
pub fn new_from_bank(
bank: &Bank,
my_pubkey: &Pubkey,
voting_pubkey: &Pubkey,
prev_leader_slot: Option<Slot>,
duplicate_stats: DuplicateStats,
num_blocks_on_fork: u64,
num_dropped_blocks_on_fork: u64,
) -> Self {
let validator_fork_info = {
if bank.collector_id() == my_pubkey {
let stake = bank.epoch_vote_account_stake(voting_pubkey);
Some(ValidatorStakeInfo::new(
*voting_pubkey,
stake,
bank.total_epoch_stake(),
))
} else {
None
}
};
Self::new(
bank.last_blockhash(),
prev_leader_slot,
duplicate_stats,
validator_fork_info,
num_blocks_on_fork,
num_dropped_blocks_on_fork,
)
}
pub fn is_duplicate_confirmed(&self) -> bool {
self.duplicate_stats.is_duplicate_confirmed
}
pub fn set_duplicate_confirmed(&mut self) {
self.duplicate_stats.set_duplicate_confirmed();
}
}
#[derive(Debug, Clone, Default)]
pub(crate) struct ForkStats {
pub(crate) weight: u128,
pub(crate) fork_weight: u128,
pub(crate) total_stake: Stake,
pub(crate) block_height: u64,
pub(crate) has_voted: bool,
pub(crate) is_recent: bool,
pub(crate) is_empty: bool,
pub(crate) vote_threshold: bool,
pub(crate) is_locked_out: bool,
pub(crate) voted_stakes: VotedStakes,
pub(crate) is_supermajority_confirmed: bool,
pub(crate) computed: bool,
pub(crate) lockout_intervals: LockoutIntervals,
pub(crate) bank_hash: Option<Hash>,
}
#[derive(Clone, Default)]
pub(crate) struct PropagatedStats {
pub(crate) propagated_validators: HashSet<Pubkey>,
pub(crate) propagated_node_ids: HashSet<Pubkey>,
pub(crate) propagated_validators_stake: u64,
pub(crate) is_propagated: bool,
pub(crate) is_leader_slot: bool,
pub(crate) prev_leader_slot: Option<Slot>,
pub(crate) slot_vote_tracker: Option<Arc<RwLock<SlotVoteTracker>>>,
pub(crate) cluster_slot_pubkeys: Option<Arc<RwLock<SlotPubkeys>>>,
pub(crate) total_epoch_stake: u64,
}
#[derive(Clone, Default)]
pub(crate) struct DuplicateStats {
latest_unconfirmed_duplicate_ancestor: Option<Slot>,
is_duplicate_confirmed: bool,
}
impl DuplicateStats {
pub fn new_with_unconfirmed_duplicate_ancestor(
latest_unconfirmed_duplicate_ancestor: Option<Slot>,
) -> Self {
Self {
latest_unconfirmed_duplicate_ancestor,
is_duplicate_confirmed: false,
}
}
fn set_duplicate_confirmed(&mut self) {
self.is_duplicate_confirmed = true;
self.latest_unconfirmed_duplicate_ancestor = None;
}
fn update_with_newly_confirmed_duplicate_ancestor(&mut self, newly_confirmed_ancestor: Slot) {
if let Some(latest_unconfirmed_duplicate_ancestor) =
self.latest_unconfirmed_duplicate_ancestor
{
if latest_unconfirmed_duplicate_ancestor <= newly_confirmed_ancestor {
self.latest_unconfirmed_duplicate_ancestor = None;
}
}
}
}
impl PropagatedStats {
pub fn add_vote_pubkey(&mut self, vote_pubkey: Pubkey, stake: u64) {
if self.propagated_validators.insert(vote_pubkey) {
self.propagated_validators_stake += stake;
}
}
pub fn add_node_pubkey(&mut self, node_pubkey: &Pubkey, bank: &Bank) {
if !self.propagated_node_ids.contains(node_pubkey) {
let node_vote_accounts = bank
.epoch_vote_accounts_for_node_id(&node_pubkey)
.map(|v| &v.vote_accounts);
if let Some(node_vote_accounts) = node_vote_accounts {
self.add_node_pubkey_internal(
node_pubkey,
node_vote_accounts,
bank.epoch_vote_accounts(bank.epoch())
.expect("Epoch stakes for bank's own epoch must exist"),
);
}
}
}
fn add_node_pubkey_internal(
&mut self,
node_pubkey: &Pubkey,
vote_account_pubkeys: &[Pubkey],
epoch_vote_accounts: &HashMap<Pubkey, (u64, ArcVoteAccount)>,
) {
self.propagated_node_ids.insert(*node_pubkey);
for vote_account_pubkey in vote_account_pubkeys.iter() {
let stake = epoch_vote_accounts
.get(vote_account_pubkey)
.map(|(stake, _)| *stake)
.unwrap_or(0);
self.add_vote_pubkey(*vote_account_pubkey, stake);
}
}
}
#[derive(Default)]
pub(crate) struct ProgressMap {
progress_map: HashMap<Slot, ForkProgress>,
}
impl std::ops::Deref for ProgressMap {
type Target = HashMap<Slot, ForkProgress>;
fn deref(&self) -> &Self::Target {
&self.progress_map
}
}
impl std::ops::DerefMut for ProgressMap {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.progress_map
}
}
impl ProgressMap {
pub fn insert(&mut self, slot: Slot, fork_progress: ForkProgress) {
self.progress_map.insert(slot, fork_progress);
}
pub fn get_propagated_stats(&self, slot: Slot) -> Option<&PropagatedStats> {
self.progress_map
.get(&slot)
.map(|fork_progress| &fork_progress.propagated_stats)
}
pub fn get_propagated_stats_mut(&mut self, slot: Slot) -> Option<&mut PropagatedStats> {
self.progress_map
.get_mut(&slot)
.map(|fork_progress| &mut fork_progress.propagated_stats)
}
pub fn get_fork_stats(&self, slot: Slot) -> Option<&ForkStats> {
self.progress_map
.get(&slot)
.map(|fork_progress| &fork_progress.fork_stats)
}
pub fn get_fork_stats_mut(&mut self, slot: Slot) -> Option<&mut ForkStats> {
self.progress_map
.get_mut(&slot)
.map(|fork_progress| &mut fork_progress.fork_stats)
}
pub fn is_dead(&self, slot: Slot) -> Option<bool> {
self.progress_map
.get(&slot)
.map(|fork_progress| fork_progress.is_dead)
}
pub fn get_hash(&self, slot: Slot) -> Option<Hash> {
self.progress_map
.get(&slot)
.and_then(|fork_progress| fork_progress.fork_stats.bank_hash)
}
pub fn is_propagated(&self, slot: Slot) -> bool {
let leader_slot_to_check = self.get_latest_leader_slot(slot);
// prev_leader_slot doesn't exist because already rooted
// or this validator hasn't been scheduled as a leader
// yet. In both cases the latest leader is vacuously
// confirmed
leader_slot_to_check
.map(|leader_slot_to_check| {
// If the leader's stats are None (isn't in the
// progress map), this means that prev_leader slot is
// rooted, so return true
self.get_propagated_stats(leader_slot_to_check)
.map(|stats| stats.is_propagated)
.unwrap_or(true)
})
.unwrap_or(true)
}
pub fn get_latest_leader_slot(&self, slot: Slot) -> Option<Slot> {
let propagated_stats = self
.get_propagated_stats(slot)
.expect("All frozen banks must exist in the Progress map");
if propagated_stats.is_leader_slot {
Some(slot)
} else {
propagated_stats.prev_leader_slot
}
}
pub fn is_unconfirmed_duplicate(&self, slot: Slot) -> Option<bool> {
self.get(&slot).map(|p| {
p.duplicate_stats
.latest_unconfirmed_duplicate_ancestor
.map(|ancestor| ancestor == slot)
.unwrap_or(false)
})
}
pub fn latest_unconfirmed_duplicate_ancestor(&self, slot: Slot) -> Option<Slot> {
self.get(&slot)
.map(|p| p.duplicate_stats.latest_unconfirmed_duplicate_ancestor)
.unwrap_or(None)
}
pub fn set_unconfirmed_duplicate_slot(&mut self, slot: Slot, descendants: &HashSet<u64>) {
if let Some(fork_progress) = self.get_mut(&slot) {
if fork_progress.is_duplicate_confirmed() {
assert!(fork_progress
.duplicate_stats
.latest_unconfirmed_duplicate_ancestor
.is_none());
return;
}
if fork_progress
.duplicate_stats
.latest_unconfirmed_duplicate_ancestor
== Some(slot)
{
// Already been marked
return;
}
fork_progress
.duplicate_stats
.latest_unconfirmed_duplicate_ancestor = Some(slot);
for d in descendants {
if let Some(fork_progress) = self.get_mut(&d) {
fork_progress
.duplicate_stats
.latest_unconfirmed_duplicate_ancestor = Some(std::cmp::max(
fork_progress
.duplicate_stats
.latest_unconfirmed_duplicate_ancestor
.unwrap_or(0),
slot,
));
}
}
}
}
pub fn set_confirmed_duplicate_slot(
&mut self,
slot: Slot,
ancestors: &HashSet<u64>,
descendants: &HashSet<u64>,
) {
for a in ancestors {
if let Some(fork_progress) = self.get_mut(&a) {
fork_progress.set_duplicate_confirmed();
}
}
if let Some(slot_fork_progress) = self.get_mut(&slot) {
// Setting the fields here is nly correct and necessary if the loop above didn't
// already do this, so check with an assert.
assert!(!ancestors.contains(&slot));
let slot_had_unconfirmed_duplicate_ancestor = slot_fork_progress
.duplicate_stats
.latest_unconfirmed_duplicate_ancestor
.is_some();
slot_fork_progress.set_duplicate_confirmed();
if slot_had_unconfirmed_duplicate_ancestor {
for d in descendants {
if let Some(descendant_fork_progress) = self.get_mut(&d) {
descendant_fork_progress
.duplicate_stats
.update_with_newly_confirmed_duplicate_ancestor(slot);
}
}
} else {
// Neither this slot `S`, nor earlier ancestors were marked as duplicate,
// so this means all descendants either:
// 1) Have no duplicate ancestors
// 2) Have a duplicate ancestor > `S`
// In both cases, there's no need to iterate through descendants because
// this confirmation on `S` is irrelevant to them.
}
}
}
pub fn set_supermajority_confirmed_slot(&mut self, slot: Slot) {
let slot_progress = self.get_mut(&slot).unwrap();
slot_progress.fork_stats.is_supermajority_confirmed = true;
}
pub fn is_supermajority_confirmed(&self, slot: Slot) -> Option<bool> {
self.progress_map
.get(&slot)
.map(|s| s.fork_stats.is_supermajority_confirmed)
}
pub fn is_duplicate_confirmed(&self, slot: Slot) -> Option<bool> {
self.progress_map
.get(&slot)
.map(|s| s.is_duplicate_confirmed())
}
pub fn get_bank_prev_leader_slot(&self, bank: &Bank) -> Option<Slot> {
let parent_slot = bank.parent_slot();
self.get_propagated_stats(parent_slot)
.map(|stats| {
if stats.is_leader_slot {
Some(parent_slot)
} else {
stats.prev_leader_slot
}
})
.unwrap_or(None)
}
pub fn handle_new_root(&mut self, bank_forks: &BankForks) {
self.progress_map
.retain(|k, _| bank_forks.get(*k).is_some());
}
pub fn log_propagated_stats(&self, slot: Slot, bank_forks: &RwLock<BankForks>) {
if let Some(stats) = self.get_propagated_stats(slot) {
info!(
"Propagated stats:
total staked: {},
observed staked: {},
vote pubkeys: {:?},
node_pubkeys: {:?},
slot: {},
epoch: {:?}",
stats.total_epoch_stake,
stats.propagated_validators_stake,
stats.propagated_validators,
stats.propagated_node_ids,
slot,
bank_forks.read().unwrap().get(slot).map(|x| x.epoch()),
);
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::consensus::test::VoteSimulator;
use trees::tr;
#[test]
fn test_add_vote_pubkey() {
let mut stats = PropagatedStats::default();
let mut vote_pubkey = solana_sdk::pubkey::new_rand();
// Add a vote pubkey, the number of references in all_pubkeys
// should be 2
stats.add_vote_pubkey(vote_pubkey, 1);
assert!(stats.propagated_validators.contains(&vote_pubkey));
assert_eq!(stats.propagated_validators_stake, 1);
// Adding it again should change no state since the key already existed
stats.add_vote_pubkey(vote_pubkey, 1);
assert!(stats.propagated_validators.contains(&vote_pubkey));
assert_eq!(stats.propagated_validators_stake, 1);
// Adding another pubkey should succeed
vote_pubkey = solana_sdk::pubkey::new_rand();
stats.add_vote_pubkey(vote_pubkey, 2);
assert!(stats.propagated_validators.contains(&vote_pubkey));
assert_eq!(stats.propagated_validators_stake, 3);
}
#[test]
fn test_add_node_pubkey_internal() {
let num_vote_accounts = 10;
let staked_vote_accounts = 5;
let vote_account_pubkeys: Vec<_> = std::iter::repeat_with(solana_sdk::pubkey::new_rand)
.take(num_vote_accounts)
.collect();
let epoch_vote_accounts: HashMap<_, _> = vote_account_pubkeys
.iter()
.skip(num_vote_accounts - staked_vote_accounts)
.map(|pubkey| (*pubkey, (1, ArcVoteAccount::default())))
.collect();
let mut stats = PropagatedStats::default();
let mut node_pubkey = solana_sdk::pubkey::new_rand();
// Add a vote pubkey, the number of references in all_pubkeys
// should be 2
stats.add_node_pubkey_internal(&node_pubkey, &vote_account_pubkeys, &epoch_vote_accounts);
assert!(stats.propagated_node_ids.contains(&node_pubkey));
assert_eq!(
stats.propagated_validators_stake,
staked_vote_accounts as u64
);
// Adding it again should not change any state
stats.add_node_pubkey_internal(&node_pubkey, &vote_account_pubkeys, &epoch_vote_accounts);
assert!(stats.propagated_node_ids.contains(&node_pubkey));
assert_eq!(
stats.propagated_validators_stake,
staked_vote_accounts as u64
);
// Adding another pubkey with same vote accounts should succeed, but stake
// shouldn't increase
node_pubkey = solana_sdk::pubkey::new_rand();
stats.add_node_pubkey_internal(&node_pubkey, &vote_account_pubkeys, &epoch_vote_accounts);
assert!(stats.propagated_node_ids.contains(&node_pubkey));
assert_eq!(
stats.propagated_validators_stake,
staked_vote_accounts as u64
);
// Adding another pubkey with different vote accounts should succeed
// and increase stake
node_pubkey = solana_sdk::pubkey::new_rand();
let vote_account_pubkeys: Vec<_> = std::iter::repeat_with(solana_sdk::pubkey::new_rand)
.take(num_vote_accounts)
.collect();
let epoch_vote_accounts: HashMap<_, _> = vote_account_pubkeys
.iter()
.skip(num_vote_accounts - staked_vote_accounts)
.map(|pubkey| (*pubkey, (1, ArcVoteAccount::default())))
.collect();
stats.add_node_pubkey_internal(&node_pubkey, &vote_account_pubkeys, &epoch_vote_accounts);
assert!(stats.propagated_node_ids.contains(&node_pubkey));
assert_eq!(
stats.propagated_validators_stake,
2 * staked_vote_accounts as u64
);
}
#[test]
fn test_is_propagated_status_on_construction() {
// If the given ValidatorStakeInfo == None, then this is not
// a leader slot and is_propagated == false
let progress = ForkProgress::new(
Hash::default(),
Some(9),
DuplicateStats::default(),
None,
0,
0,
);
assert!(!progress.propagated_stats.is_propagated);
// If the stake is zero, then threshold is always achieved
let progress = ForkProgress::new(
Hash::default(),
Some(9),
DuplicateStats::default(),
Some(ValidatorStakeInfo {
total_epoch_stake: 0,
..ValidatorStakeInfo::default()
}),
0,
0,
);
assert!(progress.propagated_stats.is_propagated);
// If the stake is non zero, then threshold is not achieved unless
// validator has enough stake by itself to pass threshold
let progress = ForkProgress::new(
Hash::default(),
Some(9),
DuplicateStats::default(),
Some(ValidatorStakeInfo {
total_epoch_stake: 2,
..ValidatorStakeInfo::default()
}),
0,
0,
);
assert!(!progress.propagated_stats.is_propagated);
// Give the validator enough stake by itself to pass threshold
let progress = ForkProgress::new(
Hash::default(),
Some(9),
DuplicateStats::default(),
Some(ValidatorStakeInfo {
stake: 1,
total_epoch_stake: 2,
..ValidatorStakeInfo::default()
}),
0,
0,
);
assert!(progress.propagated_stats.is_propagated);
// Check that the default ValidatorStakeInfo::default() constructs a ForkProgress
// with is_propagated == false, otherwise propagation tests will fail to run
// the proper checks (most will auto-pass without checking anything)
let progress = ForkProgress::new(
Hash::default(),
Some(9),
DuplicateStats::default(),
Some(ValidatorStakeInfo::default()),
0,
0,
);
assert!(!progress.propagated_stats.is_propagated);
}
#[test]
fn test_is_propagated() {
let mut progress_map = ProgressMap::default();
// Insert new ForkProgress for slot 10 (not a leader slot) and its
// previous leader slot 9 (leader slot)
progress_map.insert(
10,
ForkProgress::new(
Hash::default(),
Some(9),
DuplicateStats::default(),
None,
0,
0,
),
);
progress_map.insert(
9,
ForkProgress::new(
Hash::default(),
None,
DuplicateStats::default(),
Some(ValidatorStakeInfo::default()),
0,
0,
),
);
// None of these slot have parents which are confirmed
assert!(!progress_map.is_propagated(9));
assert!(!progress_map.is_propagated(10));
// Insert new ForkProgress for slot 8 with no previous leader.
// The previous leader before 8, slot 7, does not exist in
// progress map, so is_propagated(8) should return true as
// this implies the parent is rooted
progress_map.insert(
8,
ForkProgress::new(
Hash::default(),
Some(7),
DuplicateStats::default(),
None,
0,
0,
),
);
assert!(progress_map.is_propagated(8));
// If we set the is_propagated = true, is_propagated should return true
progress_map
.get_propagated_stats_mut(9)
.unwrap()
.is_propagated = true;
assert!(progress_map.is_propagated(9));
assert!(progress_map.get(&9).unwrap().propagated_stats.is_propagated);
// Because slot 9 is now confirmed, then slot 10 is also confirmed b/c 9
// is the last leader slot before 10
assert!(progress_map.is_propagated(10));
// If we make slot 10 a leader slot though, even though its previous
// leader slot 9 has been confirmed, slot 10 itself is not confirmed
progress_map
.get_propagated_stats_mut(10)
.unwrap()
.is_leader_slot = true;
assert!(!progress_map.is_propagated(10));
}
fn setup_set_unconfirmed_and_confirmed_duplicate_slot_tests(
smaller_duplicate_slot: Slot,
larger_duplicate_slot: Slot,
) -> (ProgressMap, RwLock<BankForks>) {
// Create simple fork 0 -> 1 -> 2 -> 3 -> 4 -> 5
let forks = tr(0) / (tr(1) / (tr(2) / (tr(3) / (tr(4) / tr(5)))));
let mut vote_simulator = VoteSimulator::new(1);
vote_simulator.fill_bank_forks(forks, &HashMap::new());
let VoteSimulator {
mut progress,
bank_forks,
..
} = vote_simulator;
let descendants = bank_forks.read().unwrap().descendants().clone();
// Mark the slots as unconfirmed duplicates
progress.set_unconfirmed_duplicate_slot(
smaller_duplicate_slot,
&descendants.get(&smaller_duplicate_slot).unwrap(),
);
progress.set_unconfirmed_duplicate_slot(
larger_duplicate_slot,
&descendants.get(&larger_duplicate_slot).unwrap(),
);
// Correctness checks
for slot in bank_forks.read().unwrap().banks().keys() {
if *slot < smaller_duplicate_slot {
assert!(progress
.latest_unconfirmed_duplicate_ancestor(*slot)
.is_none());
} else if *slot < larger_duplicate_slot {
assert_eq!(
progress
.latest_unconfirmed_duplicate_ancestor(*slot)
.unwrap(),
smaller_duplicate_slot
);
} else {
assert_eq!(
progress
.latest_unconfirmed_duplicate_ancestor(*slot)
.unwrap(),
larger_duplicate_slot
);
}
}
(progress, bank_forks)
}
#[test]
fn test_set_unconfirmed_duplicate_confirm_smaller_slot_first() {
let smaller_duplicate_slot = 1;
let larger_duplicate_slot = 4;
let (mut progress, bank_forks) = setup_set_unconfirmed_and_confirmed_duplicate_slot_tests(
smaller_duplicate_slot,
larger_duplicate_slot,
);
let descendants = bank_forks.read().unwrap().descendants().clone();
let ancestors = bank_forks.read().unwrap().ancestors();
// Mark the smaller duplicate slot as confirmed
progress.set_confirmed_duplicate_slot(
smaller_duplicate_slot,
&ancestors.get(&smaller_duplicate_slot).unwrap(),
&descendants.get(&smaller_duplicate_slot).unwrap(),
);
for slot in bank_forks.read().unwrap().banks().keys() {
if *slot < larger_duplicate_slot {
// Only slots <= smaller_duplicate_slot have been duplicate confirmed
if *slot <= smaller_duplicate_slot {
assert!(progress.is_duplicate_confirmed(*slot).unwrap());
} else {
assert!(!progress.is_duplicate_confirmed(*slot).unwrap());
}
// The unconfirmed duplicate flag has been cleared on the smaller
// descendants because their most recent duplicate ancestor has
// been confirmed
assert!(progress
.latest_unconfirmed_duplicate_ancestor(*slot)
.is_none());
} else {
assert!(!progress.is_duplicate_confirmed(*slot).unwrap(),);
// The unconfirmed duplicate flag has not been cleared on the smaller
// descendants because their most recent duplicate ancestor,
// `larger_duplicate_slot` has not yet been confirmed
assert_eq!(
progress
.latest_unconfirmed_duplicate_ancestor(*slot)
.unwrap(),
larger_duplicate_slot
);
}
}
// Mark the larger duplicate slot as confirmed, all slots should no longer
// have any unconfirmed duplicate ancestors, and should be marked as duplciate confirmed
progress.set_confirmed_duplicate_slot(
larger_duplicate_slot,
&ancestors.get(&larger_duplicate_slot).unwrap(),
&descendants.get(&larger_duplicate_slot).unwrap(),
);
for slot in bank_forks.read().unwrap().banks().keys() {
// All slots <= the latest duplciate confirmed slot are ancestors of
// that slot, so they should all be marked duplicate confirmed
assert_eq!(
progress.is_duplicate_confirmed(*slot).unwrap(),
*slot <= larger_duplicate_slot
);
assert!(progress
.latest_unconfirmed_duplicate_ancestor(*slot)
.is_none());
}
}
#[test]
fn test_set_unconfirmed_duplicate_confirm_larger_slot_first() {
let smaller_duplicate_slot = 1;
let larger_duplicate_slot = 4;
let (mut progress, bank_forks) = setup_set_unconfirmed_and_confirmed_duplicate_slot_tests(
smaller_duplicate_slot,
larger_duplicate_slot,
);
let descendants = bank_forks.read().unwrap().descendants().clone();
let ancestors = bank_forks.read().unwrap().ancestors();
// Mark the larger duplicate slot as confirmed
progress.set_confirmed_duplicate_slot(
larger_duplicate_slot,
&ancestors.get(&larger_duplicate_slot).unwrap(),
&descendants.get(&larger_duplicate_slot).unwrap(),
);
// All slots should no longer have any unconfirmed duplicate ancestors
progress.set_confirmed_duplicate_slot(
larger_duplicate_slot,
&ancestors.get(&larger_duplicate_slot).unwrap(),
&descendants.get(&larger_duplicate_slot).unwrap(),
);
for slot in bank_forks.read().unwrap().banks().keys() {
// All slots <= the latest duplciate confirmed slot are ancestors of
// that slot, so they should all be marked duplicate confirmed
assert_eq!(
progress.is_duplicate_confirmed(*slot).unwrap(),
*slot <= larger_duplicate_slot
);
assert!(progress
.latest_unconfirmed_duplicate_ancestor(*slot)
.is_none());
}
}
}
| 35.174847 | 98 | 0.570943 |
0ce1ead7fccfec4e0bc42fdbdc128b022ce3b62a | 9,982 | py | Python | test/adb_test.py | bugobliterator/python-adb | 2f4f5bcdf5dab5ccf8bf58ff9e91cde4d134f1c0 | [
"Apache-2.0"
] | 1,549 | 2015-01-04T04:45:48.000Z | 2022-03-31T08:01:59.000Z | test/adb_test.py | bugobliterator/python-adb | 2f4f5bcdf5dab5ccf8bf58ff9e91cde4d134f1c0 | [
"Apache-2.0"
] | 174 | 2015-01-04T04:47:39.000Z | 2022-03-24T10:42:12.000Z | test/adb_test.py | bugobliterator/python-adb | 2f4f5bcdf5dab5ccf8bf58ff9e91cde4d134f1c0 | [
"Apache-2.0"
] | 356 | 2015-01-09T10:10:33.000Z | 2022-03-27T19:25:01.000Z | #!/usr/bin/env python
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for adb."""
from io import BytesIO
import struct
import unittest
from mock import mock
from adb import common
from adb import adb_commands
from adb import adb_protocol
from adb.usb_exceptions import TcpTimeoutException, DeviceNotFoundError
import common_stub
BANNER = b'blazetest'
LOCAL_ID = 1
REMOTE_ID = 2
class BaseAdbTest(unittest.TestCase):
@classmethod
def _ExpectWrite(cls, usb, command, arg0, arg1, data):
usb.ExpectWrite(cls._MakeHeader(command, arg0, arg1, data))
usb.ExpectWrite(data)
if command == b'WRTE':
cls._ExpectRead(usb, b'OKAY', 0, 0)
@classmethod
def _ExpectRead(cls, usb, command, arg0, arg1, data=b''):
usb.ExpectRead(cls._MakeHeader(command, arg0, arg1, data))
if data:
usb.ExpectRead(data)
if command == b'WRTE':
cls._ExpectWrite(usb, b'OKAY', LOCAL_ID, REMOTE_ID, b'')
@classmethod
def _ConvertCommand(cls, command):
return sum(c << (i * 8) for i, c in enumerate(bytearray(command)))
@classmethod
def _MakeHeader(cls, command, arg0, arg1, data):
command = cls._ConvertCommand(command)
magic = command ^ 0xFFFFFFFF
checksum = adb_protocol.AdbMessage.CalculateChecksum(data)
return struct.pack(b'<6I', command, arg0, arg1, len(data), checksum, magic)
@classmethod
def _ExpectConnection(cls, usb):
cls._ExpectWrite(usb, b'CNXN', 0x01000000, 4096, b'host::%s\0' % BANNER)
cls._ExpectRead(usb, b'CNXN', 0, 0, b'device::\0')
@classmethod
def _ExpectOpen(cls, usb, service):
cls._ExpectWrite(usb, b'OPEN', LOCAL_ID, 0, service)
cls._ExpectRead(usb, b'OKAY', REMOTE_ID, LOCAL_ID)
@classmethod
def _ExpectClose(cls, usb):
cls._ExpectRead(usb, b'CLSE', REMOTE_ID, 0)
cls._ExpectWrite(usb, b'CLSE', LOCAL_ID, REMOTE_ID, b'')
@classmethod
def _Connect(cls, usb):
return adb_commands.AdbCommands.Connect(usb, BANNER)
class AdbTest(BaseAdbTest):
@classmethod
def _ExpectCommand(cls, service, command, *responses):
usb = common_stub.StubUsb(device=None, setting=None)
cls._ExpectConnection(usb)
cls._ExpectOpen(usb, b'%s:%s\0' % (service, command))
for response in responses:
cls._ExpectRead(usb, b'WRTE', REMOTE_ID, 0, response)
cls._ExpectClose(usb)
return usb
def testConnect(self):
usb = common_stub.StubUsb(device=None, setting=None)
self._ExpectConnection(usb)
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
def testConnectSerialString(self):
dev = adb_commands.AdbCommands()
with mock.patch.object(common.UsbHandle, 'FindAndOpen', return_value=None):
with mock.patch.object(adb_commands.AdbCommands, '_Connect', return_value=None):
dev.ConnectDevice(serial='/dev/invalidHandle')
def testSmallResponseShell(self):
command = b'keepin it real'
response = 'word.'
usb = self._ExpectCommand(b'shell', command, response)
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
self.assertEqual(response, dev.Shell(command))
def testBigResponseShell(self):
command = b'keepin it real big'
# The data doesn't have to be big, the point is that it just concatenates
# the data from different WRTEs together.
responses = [b'other stuff, ', b'and some words.']
usb = self._ExpectCommand(b'shell', command, *responses)
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
self.assertEqual(b''.join(responses).decode('utf8'),
dev.Shell(command))
def testUninstall(self):
package_name = "com.test.package"
response = 'Success'
usb = self._ExpectCommand(b'shell', ('pm uninstall "%s"' % package_name).encode('utf8'), response)
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
self.assertEqual(response, dev.Uninstall(package_name))
def testStreamingResponseShell(self):
command = b'keepin it real big'
# expect multiple lines
responses = ['other stuff, ', 'and some words.']
usb = self._ExpectCommand(b'shell', command, *responses)
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
response_count = 0
for (expected,actual) in zip(responses, dev.StreamingShell(command)):
self.assertEqual(expected, actual)
response_count = response_count + 1
self.assertEqual(len(responses), response_count)
def testReboot(self):
usb = self._ExpectCommand(b'reboot', b'', b'')
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
dev.Reboot()
def testRebootBootloader(self):
usb = self._ExpectCommand(b'reboot', b'bootloader', b'')
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
dev.RebootBootloader()
def testRemount(self):
usb = self._ExpectCommand(b'remount', b'', b'')
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
dev.Remount()
def testRoot(self):
usb = self._ExpectCommand(b'root', b'', b'')
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
dev.Root()
def testEnableVerity(self):
usb = self._ExpectCommand(b'enable-verity', b'', b'')
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
dev.EnableVerity()
def testDisableVerity(self):
usb = self._ExpectCommand(b'disable-verity', b'', b'')
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
dev.DisableVerity()
class FilesyncAdbTest(BaseAdbTest):
@classmethod
def _MakeSyncHeader(cls, command, *int_parts):
command = cls._ConvertCommand(command)
return struct.pack(b'<%dI' % (len(int_parts) + 1), command, *int_parts)
@classmethod
def _MakeWriteSyncPacket(cls, command, data=b'', size=None):
if not isinstance(data, bytes):
data = data.encode('utf8')
return cls._MakeSyncHeader(command, size or len(data)) + data
@classmethod
def _ExpectSyncCommand(cls, write_commands, read_commands):
usb = common_stub.StubUsb(device=None, setting=None)
cls._ExpectConnection(usb)
cls._ExpectOpen(usb, b'sync:\0')
while write_commands or read_commands:
if write_commands:
command = write_commands.pop(0)
cls._ExpectWrite(usb, b'WRTE', LOCAL_ID, REMOTE_ID, command)
if read_commands:
command = read_commands.pop(0)
cls._ExpectRead(usb, b'WRTE', REMOTE_ID, LOCAL_ID, command)
cls._ExpectClose(usb)
return usb
def testPush(self):
filedata = b'alo there, govnah'
mtime = 100
send = [
self._MakeWriteSyncPacket(b'SEND', b'/data,33272'),
self._MakeWriteSyncPacket(b'DATA', filedata),
self._MakeWriteSyncPacket(b'DONE', size=mtime),
]
data = b'OKAY\0\0\0\0'
usb = self._ExpectSyncCommand([b''.join(send)], [data])
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
dev.Push(BytesIO(filedata), '/data', mtime=mtime)
def testPull(self):
filedata = b"g'ddayta, govnah"
recv = self._MakeWriteSyncPacket(b'RECV', b'/data')
data = [
self._MakeWriteSyncPacket(b'DATA', filedata),
self._MakeWriteSyncPacket(b'DONE'),
]
usb = self._ExpectSyncCommand([recv], [b''.join(data)])
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=usb, banner=BANNER)
self.assertEqual(filedata, dev.Pull('/data'))
class TcpTimeoutAdbTest(BaseAdbTest):
@classmethod
def _ExpectCommand(cls, service, command, *responses):
tcp = common_stub.StubTcp('10.0.0.123')
cls._ExpectConnection(tcp)
cls._ExpectOpen(tcp, b'%s:%s\0' % (service, command))
for response in responses:
cls._ExpectRead(tcp, b'WRTE', REMOTE_ID, 0, response)
cls._ExpectClose(tcp)
return tcp
def _run_shell(self, cmd, timeout_ms=None):
tcp = self._ExpectCommand(b'shell', cmd)
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=tcp, banner=BANNER)
dev.Shell(cmd, timeout_ms=timeout_ms)
def testConnect(self):
tcp = common_stub.StubTcp('10.0.0.123')
self._ExpectConnection(tcp)
dev = adb_commands.AdbCommands()
dev.ConnectDevice(handle=tcp, banner=BANNER)
def testTcpTimeout(self):
timeout_ms = 1
command = b'i_need_a_timeout'
self.assertRaises(
TcpTimeoutException,
self._run_shell,
command,
timeout_ms=timeout_ms)
class TcpHandleTest(unittest.TestCase):
def testInitWithHost(self):
tcp = common_stub.StubTcp('10.11.12.13')
self.assertEqual('10.11.12.13:5555', tcp._serial_number)
self.assertEqual(None, tcp._timeout_ms)
def testInitWithHostAndPort(self):
tcp = common_stub.StubTcp('10.11.12.13:5678')
self.assertEqual('10.11.12.13:5678', tcp._serial_number)
self.assertEqual(None, tcp._timeout_ms)
def testInitWithTimeout(self):
tcp = common_stub.StubTcp('10.0.0.2', timeout_ms=234.5)
self.assertEqual('10.0.0.2:5555', tcp._serial_number)
self.assertEqual(234.5, tcp._timeout_ms)
def testInitWithTimeoutInt(self):
tcp = common_stub.StubTcp('10.0.0.2', timeout_ms=234)
self.assertEqual('10.0.0.2:5555', tcp._serial_number)
self.assertEqual(234.0, tcp._timeout_ms)
if __name__ == '__main__':
unittest.main()
| 31.588608 | 102 | 0.696053 |
2f0674f696153906c025195063f1c35df441bd48 | 4,736 | php | PHP | application/views/header.php | olatiferreira/denunciae-frontend-admin | 1641b66589ff9a60aea1b5a2219248246857503c | [
"MIT"
] | null | null | null | application/views/header.php | olatiferreira/denunciae-frontend-admin | 1641b66589ff9a60aea1b5a2219248246857503c | [
"MIT"
] | null | null | null | application/views/header.php | olatiferreira/denunciae-frontend-admin | 1641b66589ff9a60aea1b5a2219248246857503c | [
"MIT"
] | null | null | null | <?php defined('BASEPATH') OR exit('No direct script access allowed'); ?>
<!DOCTYPE html>
<html lang="pt-br">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no" name="viewport">
<title>DenunciAê!</title>
<!-- Favicon -->
<link rel="shortcut icon" type="image/png" href="<?= base_url('assets/img/logo.gif')?>"/>
<!-- CSS -->
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/bootstrap.min.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/font-awesome.min.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/ionicons.min.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/daterangepicker.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/bootstrap-datepicker.min.css')?>">
<!-- <link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/all.css')?>"> -->
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/bootstrap-colorpicker.min.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/select2.min.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/AdminLTE.min.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/all-skins.min.css')?>">
<!-- <link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/bootstrap3-wysihtml5.min.css')?>"> -->
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/jquery-jvectormap.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/morris.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/fonts/font-awesome/css/font-awesome.min.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/sweetalert2.min.css')?>">
<link rel="stylesheet" type="text/css" href="<?= base_url('assets/css/style.css')?>">
<!-- Google Font -->
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Source+Sans+Pro:300,400,600,700,300italic,400italic,600italic">
<!-- Json -->
<link rel="manifest" href="<?= base_url('assets/manifest.json')?>">
<!-- Scripts-->
<script src="http://code.angularjs.org/1.0.1/angular-1.0.1.min.js"></script>
<script type="text/javascript" src="<?= base_url('assets/js/jquery.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/bootstrap.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/select2.full.min.js')?>"></script>
<script src="<?= base_url('assets/js/jquery.inputmask.js')?>"></script>
<script src="<?= base_url('assets/js/jquery.inputmask.date.extensions.js')?>"></script>
<script src="<?= base_url('assets/js/jquery.inputmask.extensions.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/moment.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/daterangepicker.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/bootstrap-datepicker.min.js')?>"></script>
<script src="<?= base_url('assets/js/bootstrap-colorpicker.min.js')?>"></script>
<script src="<?= base_url('assets/js/bootstrap-timepicker.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/jquery.slimscroll.min.js')?>"></script>
<script src="<?= base_url('assets/js/icheck.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/fastclick.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/adminlte.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/demo.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/raphael.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/morris.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/jquery.sparkline.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/jquery-jvectormap-1.2.2.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/jquery-jvectormap-world-mill-en.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/jquery.knob.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/jquery-ui.min.js')?>"></script>
<script type="text/javascript" src="<?= base_url('assets/js/sweetalert2.min.js')?>"></script>
</head>
<body class="hold-transition skin-blue sidebar-mini"> | 59.2 | 134 | 0.654139 |
2719e9ac57fbb0698f90a1fd225d523b74ae4de5 | 4,782 | h | C | backends/cpu/lib/kernels/fused_matmul_kernel.h | Nexuscompute/runtime | 805afefc84acea69a552507ab6d167b956d21a62 | [
"Apache-2.0"
] | null | null | null | backends/cpu/lib/kernels/fused_matmul_kernel.h | Nexuscompute/runtime | 805afefc84acea69a552507ab6d167b956d21a62 | [
"Apache-2.0"
] | 20 | 2021-12-29T09:53:18.000Z | 2022-01-21T03:24:40.000Z | backends/cpu/lib/kernels/fused_matmul_kernel.h | Nexuscompute/runtime | 805afefc84acea69a552507ab6d167b956d21a62 | [
"Apache-2.0"
] | null | null | null | /*
* Copyright 2020 The TensorFlow Runtime Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// MatMul + Fusion kernel implementation (fusion added via output kernel).
#ifndef TFRT_BACKENDS_CPU_LIB_KERNELS_CPU_FUSED_MATMUL_KERNEL_H_
#define TFRT_BACKENDS_CPU_LIB_KERNELS_CPU_FUSED_MATMUL_KERNEL_H_
#include "./matmul_kernel.h"
#include "tfrt/common/compat/eigen/contraction_output_kernel.h"
#include "tfrt/common/compat/eigen/eigen_dtype.h"
#include "tfrt/common/compat/eigen/eigen_evaluator.h"
#include "tfrt/common/compat/eigen/eigen_kernel.h"
#include "tfrt/common/compat/eigen/tensor_types.h"
#include "tfrt/support/forward_decls.h"
#include "tfrt/tensor/dense_host_tensor.h"
#include "tfrt/tensor/dense_host_tensor_view.h"
namespace tfrt {
namespace cpu {
namespace {
template <typename OutputKernel, typename T, typename EigenEvaluator>
typename EigenEvaluator::DependencyToken FusedMatMulInternal(
const DenseHostTensor& a, const DenseHostTensor& b, DenseHostTensor* output,
const DenseHostTensor& fusion_input, bool transpose_a, bool transpose_b,
EigenEvaluator eigen) {
DHTArrayView<T> bias_view(&fusion_input);
OutputKernel output_kernel(compat::AsEigenConstTensor(bias_view));
return cpu::MatMul<T>(1.0, a, b, 0.0, output, transpose_a, transpose_b,
std::move(output_kernel), eigen);
}
} // namespace
template <typename T, typename EigenEvaluator, typename FuseInputsRange>
typename EigenEvaluator::DependencyToken FusedMatMul(
const DenseHostTensor& a, const DenseHostTensor& b, DenseHostTensor* output,
FuseInputsRange fusion_inputs, bool transpose_a, bool transpose_b,
AggregateAttr fused_ops_attr, const ExecutionContext& exec_ctx) {
static_assert(std::is_same<std::decay_t<decltype(fusion_inputs[0])>,
DenseHostTensor>::value,
"fusion_inputs must be a range of DenseHostTensor");
EigenEvaluator eigen{exec_ctx.host()};
// Parse the MatMul fusion config.
llvm::SmallVector<string_view, 4> fused_ops(fused_ops_attr.GetNumElements());
for (int i = 0; i < fused_ops_attr.GetNumElements(); ++i) {
fused_ops[i] = fused_ops_attr.GetAttribute(i).cast<StringAttr>().GetValue();
}
if (fused_ops.empty()) {
return eigen.MakeError("FusedMatMul must specify fused operations");
}
// Match the fusion to Eigen contraction output kernel.
auto match_fusion = [&](std::initializer_list<string_view> ops) -> bool {
return fused_ops.size() == ops.size() &&
std::equal(fused_ops.begin(), fused_ops.end(), ops.begin());
};
// Dispatch fusion to correct Eigen contraction output kernel.
// Validate BiasAdd operands.
if (fused_ops[0] == "BiasAdd") {
auto& bias = fusion_inputs[0];
if (bias.shape().GetRank() != 1)
return eigen.MakeError("Bias tensor must a vector");
const Index inner_dim = output->shape().GetDimensionSize(1);
if (bias.NumElements() != inner_dim)
return eigen.MakeError("The number of bias elements ", bias.NumElements(),
" doesn't match output inner dimension ",
inner_dim);
}
// Fusion: BiasAdd
if (match_fusion({"BiasAdd"})) {
return FusedMatMulInternal<compat::BiasAddOutputKernel<T>, T>(
a, b, output, fusion_inputs[0], transpose_a, transpose_b, eigen);
}
// Fusion: BiasAdd + Relu
if (match_fusion({"BiasAdd", "Relu"})) {
return FusedMatMulInternal<compat::BiasAddOutputKernel<T, compat::Relu>, T>(
a, b, output, fusion_inputs[0], transpose_a, transpose_b, eigen);
}
// Fusion: BiasAdd + Relu6
if (match_fusion({"BiasAdd", "Relu6"})) {
return FusedMatMulInternal<compat::BiasAddOutputKernel<T, compat::Relu6>,
T>(a, b, output, fusion_inputs[0], transpose_a,
transpose_b, eigen);
}
// Fusion: BiasAdd + Elu
if (match_fusion({"BiasAdd", "Elu"})) {
return FusedMatMulInternal<compat::BiasAddOutputKernel<T, compat::Elu>, T>(
a, b, output, fusion_inputs[0], transpose_a, transpose_b, eigen);
}
return eigen.MakeError("Unsupported fusion type");
}
} // namespace cpu
} // namespace tfrt
#endif // TFRT_BACKENDS_CPU_LIB_KERNELS_CPU_FUSED_MATMUL_KERNEL_H_
| 38.878049 | 80 | 0.707235 |
cb5b218ca932a786da47b1393d253a271601c5fc | 4,616 | h | C | unittest/UnitTest.h | iotivity/iotivity-alljoyn-bridge | 997fa14a85d6827180753a511570c4421a84f221 | [
"Apache-2.0"
] | 3 | 2017-08-24T08:02:53.000Z | 2020-01-19T15:00:37.000Z | unittest/UnitTest.h | iotivity/iotivity-alljoyn-bridge | 997fa14a85d6827180753a511570c4421a84f221 | [
"Apache-2.0"
] | null | null | null | unittest/UnitTest.h | iotivity/iotivity-alljoyn-bridge | 997fa14a85d6827180753a511570c4421a84f221 | [
"Apache-2.0"
] | 4 | 2017-03-21T11:59:53.000Z | 2022-03-07T14:50:06.000Z | //******************************************************************
//
// Copyright 2017 Intel Corporation All Rights Reserved.
//
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
#ifndef _UNITTEST_H
#define _UNITTEST_H
#include "octypes.h"
#include "Resource.h"
#include "VirtualResource.h"
#include <gtest/gtest.h>
struct LocalizedString
{
const char *language;
const char *value;
};
struct DiscoverContext
{
const char *m_uri;
Device *m_device;
Resource *m_resource;
DiscoverContext(const char *uri = NULL) : m_uri(uri), m_device(NULL), m_resource(NULL) { }
~DiscoverContext() { delete m_device; }
};
class AJOCSetUp : public testing::Test
{
public:
static void SetUpStack();
static void TearDownStack();
protected:
virtual ~AJOCSetUp() { }
virtual void SetUp();
virtual void TearDown();
void Wait(long waitMs);
};
class Callback
{
public:
Callback(OCClientResponseHandler cb, void *context = NULL);
OCStackResult Wait(long waitMs);
operator OCCallbackData *() { return &m_cbData; }
private:
OCClientResponseHandler m_cb;
void *m_context;
bool m_called;
OCCallbackData m_cbData;
static OCStackApplicationResult handler(void *ctx, OCDoHandle handle, OCClientResponse *clientResponse);
};
class ResourceCallback
{
public:
OCClientResponse *m_response;
ResourceCallback();
virtual ~ResourceCallback();
OCStackResult Wait(long waitMs);
void Reset();
operator OCCallbackData *() { return &m_cbData; }
protected:
bool m_called;
virtual OCStackApplicationResult Handler(OCDoHandle handle, OCClientResponse *response);
private:
OCCallbackData m_cbData;
static OCStackApplicationResult handler(void *ctx, OCDoHandle handle,
OCClientResponse *response);
};
class ObserveCallback : public ResourceCallback
{
public:
virtual ~ObserveCallback() { }
void Reset();
protected:
virtual OCStackApplicationResult Handler(OCDoHandle handle, OCClientResponse *response);
};
class CreateCallback
{
public:
CreateCallback();
OCStackResult Wait(long waitMs);
operator VirtualResource::CreateCB () { return cb; }
private:
bool m_called;
static void cb(void *ctx);
};
class MethodCall : public ajn::MessageReceiver
{
public:
MethodCall(ajn::BusAttachment *bus, ajn::ProxyBusObject *proxyObj);
QStatus Call(const char *iface, const char *method, const ajn::MsgArg *args, size_t numArgs);
QStatus Wait(long waitMs);
ajn::Message &Reply() { return m_reply; }
private:
ajn::ProxyBusObject *m_proxyObj;
ajn::Message m_reply;
bool m_called;
void ReplyHandler(ajn::Message &reply, void *context);
};
OCStackApplicationResult Discover(void *ctx, OCDoHandle handle, OCClientResponse *response);
OCStackResult ParseJsonPayload(OCRepPayload** outPayload, const char* payload);
#define A_SIZEOF(a) ((size_t) (sizeof(a) / sizeof(a[0])))
class Row
{
public:
Row(ajn::MsgArg arg, int64_t i);
Row(ajn::MsgArg arg, double d);
Row(ajn::MsgArg arg, bool b);
Row(ajn::MsgArg arg, const char *str);
Row(ajn::MsgArg arg, size_t len, const uint8_t *bytes);
Row(ajn::MsgArg arg, const OCRepPayload *payload);
Row(ajn::MsgArg arg, size_t d0, size_t d1, size_t d2, const int64_t *iArray);
Row(ajn::MsgArg arg, size_t d0, size_t d1, size_t d2, const double *dArray);
Row(ajn::MsgArg arg, size_t d0, size_t d1, size_t d2, const bool *bArray);
Row(ajn::MsgArg arg, size_t d0, size_t d1, size_t d2, const char **strArray);
Row(ajn::MsgArg arg, size_t d0, size_t d1, size_t d2, const uint8_t *bytes);
Row(ajn::MsgArg arg, size_t d0, size_t d1, size_t d2, const OCRepPayload *payload);
Row(ajn::MsgArg arg, uint8_t y);
Row(ajn::MsgArg arg, uint64_t t);
ajn::MsgArg m_arg;
OCRepPayloadValue m_value;
};
class FromDBus : public ::testing::TestWithParam<Row> { };
class FromOC : public ::testing::TestWithParam<Row> { };
#endif /* _UNITTEST_H */
| 30.169935 | 108 | 0.680676 |
5bec53a7e3bdbb9d9c0856220fb43f75deac7d43 | 6,020 | c | C | ftt/ftt_lib/ftt_find.c | moibenko/enstore | 6f2ff5b67ff73872a9e68f2a68b0bdaa70cef9b9 | [
"Intel",
"Unlicense"
] | 4 | 2021-10-17T11:17:59.000Z | 2022-02-28T16:58:40.000Z | ftt/ftt_lib/ftt_find.c | moibenko/enstore | 6f2ff5b67ff73872a9e68f2a68b0bdaa70cef9b9 | [
"Intel",
"Unlicense"
] | 17 | 2021-10-05T21:44:06.000Z | 2022-03-31T16:58:40.000Z | ftt/ftt_lib/ftt_find.c | moibenko/enstore | 6f2ff5b67ff73872a9e68f2a68b0bdaa70cef9b9 | [
"Intel",
"Unlicense"
] | 8 | 2021-09-02T18:55:49.000Z | 2022-03-09T21:05:28.000Z | static char rcsid[] = "@(#)$Id$";
#include <stdio.h>
#include <string.h>
#include <ctype.h>
#include <ftt_private.h>
#ifdef WIN32 /* this is Windows */
#include <process.h>
#include <windows.h>
#define geteuid() -1
#define popen _popen
#define pclose _pclose
char * ftt_get_os() {
char ver[20],rel[20];
char *os = "WINNT";
OSVERSIONINFO buf;
buf.dwOSVersionInfoSize =sizeof(OSVERSIONINFO);
GetVersionEx(&buf);
if (buf.dwPlatformId != VER_PLATFORM_WIN32_NT ) os = "WIN32";
sprintf(rel,"%d",buf.dwMajorVersion);
sprintf(ver,"%d",buf.dwMinorVersion);
return ftt_make_os_name( "WINNT", rel,ver);
}
#else /* this is UNIX */
#include <unistd.h>
#include <sys/utsname.h>
char *
ftt_get_os() {
struct utsname buf;
uname(&buf);
return ftt_make_os_name( buf.sysname, buf.release, buf.version);
}
#endif
char *
ftt_make_os_name(char *sys, char *release , char *version) {
static char sysname[512];
sprintf(sysname,"%s+%s.%s", sys, release, version);
return sysname;
}
int
ftt_findslot (char *basename, char *os, char *drivid,
void *p1, void *p2, void *p3) {
int i;
char *lastpart;
int res;
DEBUG4(stderr,"Entering: ftt_findslot %s %s %s\n", basename, os, drivid );
/* tables now only deal with the last directory and file
** component of the pathname
*/
lastpart = ftt_find_last_part(basename);
DEBUG3(stderr,"looking at '%s' part of name\n", lastpart);
for( i = 0; devtable[i].os !=0 ; i++ ) {
if (ftt_matches(os, devtable[i].os) &&
ftt_matches(drivid, devtable[i].drivid)) {
DEBUG4(stderr,"trying format \"%s\" against %s\n",
devtable[i].baseconv_in, lastpart);
res = sscanf(lastpart,devtable[i].baseconv_in,p1,p2,p3);
if (devtable[i].nconv == res ) {
DEBUG3(stderr, "format Matches (\"%s\" against %s)!\n",devtable[i].baseconv_in, lastpart);
return i;
}
DEBUG3(stderr, "format missed... got %d, not %d\n",
res, devtable[i].nconv);
}
}
return -1;
}
extern char *
ftt_strip_to_basename(const char *basename,char *os) {
static char buf[512];
static char buf2[512];
static union { int n; char s[512];} s1, s2, s3;
int i;
char *lastpart;
DEBUG4(stderr, "Entering: ftt_strip_to_basename\n");
memset(buf,0, 512);
memset(buf2,0, 512);
memset(s1.s,0, 512);
strncpy(buf, basename, 512);
#ifdef WIN32
strlwr( buf);
#endif
#ifdef DO_SKIP_SYMLINKS
{
int maxlinks=512;
while( 0 < readlink(buf, buf2, 512) && maxlinks-- >0 ) {
if( buf2[0] == '/' ) {
/* absolute pathname, replace the whole buffer */
strncpy(buf,buf2,512);
} else {
/* relative pathname, replace after last /, if any */
if ( 0 == (p = strrchr(buf,'/'))) {
p = buf;
} else {
p++;
}
strncpy(p, buf2, 512 - (p - buf));
}
}
}
#endif
i = ftt_findslot(buf, os, "", &s1, &s2, &s3);
if (i < 0) {
return 0;
}
/* tables now only deal with the last directory and file component of
** the pathname
*/
lastpart = ftt_find_last_part(buf);
/*
** first item in the format can be either a string or a digit;
** check for strings
*/
if ( devtable[i].baseconv_out[1] == 's') {
sprintf(lastpart, devtable[i].baseconv_out, s1.s, s2.n, s3.n);
} else {
sprintf(lastpart, devtable[i].baseconv_out, s1.n, s2.n, s3.n);
}
return strdup(buf);
}
/*
** search for last 2 slashes in pathname,
** and return the pointer to the character after the next to last one.
** if there isn't one, return the pointer to the original string
*/
char *
ftt_find_last_part( char *p ) {
char *s, *s1 = 0, *s2 = 0;
char s_find = '/';
/* -------------------- for Windows NT ------------------------------- */
#ifdef WIN32
s_find = '\\';
#endif
s = p;
while( s && *s ) {
if( *s == s_find ) {
s2 = s1;
s1 = s;
}
s++;
}
if( s2 ) {
return s2+1;
} else {
return p;
}
}
/*
** get_driveid guesses the drive id the best it can from the available
** system configuration command(s).
** Returns a SCSI device id, or a prefix thereof
*/
extern char *
ftt_get_driveid(char *basename,char *os) {
static char cmdbuf[512];
static char output[512];
static union { int n; char s[512];} s1, s2, s3;
FILE *pf;
char *res = 0;
int i;
DEBUG4(stderr, "Entering: ftt_get_driveid\n");
i = ftt_findslot(basename, os, "", &s1, &s2, &s3);
if (i < 0) {
return 0;
}
if ( 0 != geteuid() && (devtable[i].flags & FTT_FLAG_SUID_DRIVEID) ) {
DEBUG3( stderr, "Running ftt_suid...\n" );
sprintf(cmdbuf, "ftt_suid -i %s", basename );
pf = popen(cmdbuf, "r");
if (pf != 0) {
res = fgets(output,512,pf);
pclose(pf);
} else {
res = 0;
}
} else {
if (FTT_GUESS_ID) {
if ( devtable[i].drividcmd[1] == 's') {
sprintf(cmdbuf, devtable[i].drividcmd, s1.s, s2.n, s3.n);
} else {
sprintf(cmdbuf, devtable[i].drividcmd, s1.n, s2.n, s3.n);
}
DEBUG4(stderr,"Running \"%s\" to get drivid (lenght %d < 512 ) \n", cmdbuf,strlen(cmdbuf));
pf = popen(cmdbuf, "r");
if (pf) {
res = fgets(output, 512,pf);
pclose(pf);
}
} else {
/* Actually look it up... */
ftt_descriptor tmp;
ftt_stat_buf b;
char *pc;
tmp = ftt_open_logical(basename, ftt_get_os(), "XXXXXX", 1);
b = ftt_alloc_stat();
ftt_get_stats(tmp, b);
if (ftt_debug > 3) {
printf("stats at open are:\n");
ftt_dump_stats(b,stdout);
}
pc = ftt_extract_stats(b,FTT_PRODUCT_ID);
if (pc) {
res = strcpy(output, pc);
strcat(output, "\n");
} else {
strcpy( output , "\n");
}
ftt_free_stat(b);
ftt_close(tmp);
}
}
if (res != 0) {
output[strlen(output)-1] = 0; /* stomp the newline */
res = strdup(output);
}
DEBUG3(stderr, "returning %s\n", res);
return res;
}
| 23.515625 | 97 | 0.572757 |
2a884014ec51d7119c337bb206e75e6504752643 | 6,242 | java | Java | jdo/jibx/build/maven/jibx-bind/src/main/java/org/jibx/util/ChainedMap.java | sergiomt/judal | bda7f8a34b6811e979edd988ec87d89f4c84450e | [
"Apache-2.0"
] | null | null | null | jdo/jibx/build/maven/jibx-bind/src/main/java/org/jibx/util/ChainedMap.java | sergiomt/judal | bda7f8a34b6811e979edd988ec87d89f4c84450e | [
"Apache-2.0"
] | 5 | 2020-03-04T21:41:43.000Z | 2021-11-06T23:33:41.000Z | jdo/jibx/build/maven/jibx-bind/src/main/java/org/jibx/util/ChainedMap.java | sergiomt/judal | bda7f8a34b6811e979edd988ec87d89f4c84450e | [
"Apache-2.0"
] | null | null | null | /*
* Copyright (c) 2009, Dennis M. Sosnoski. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of
* JiBX nor the names of its contributors may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jibx.util;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* Partial implementation of {@link java.util.Map} which provides a merged view of a defaults map with an overrides map.
* Although this can be used as a map for most purposes, methods which return live views of the keys or values in the
* map only take into account the overrides, not the defaults.
*/
public class ChainedMap implements Map
{
/** Default values map. */
private final Map m_defaults;
/** Override values map. */
private final Map m_overrides;
/**
* Constructor.
*
* @param defaults map providing defaults for keys not set directly
*/
public ChainedMap(Map defaults) {
m_defaults = defaults;
m_overrides = new HashMap();
}
/**
* Clear all override key-value pairs. This only effects the overrides, not the defaults.
*/
public void clear() {
m_defaults.clear();
}
/**
* Check if a key has a defined value. This will return <code>true</code> if the key is present in the overrides map
* with a non-null value, or if the key is not present in the overrides map but is present in the defaults map.
*
* @param key
* @return <code>true</code> if key defined, <code>false</code> if not
*/
public boolean containsKey(Object key) {
if (m_overrides.containsKey(key)) {
return m_overrides.get(key) != null;
} else {
return m_defaults.containsKey(key);
}
}
/**
* Check if a value is present. This only checks for the value in the overrides map.
*
* @param value
* @return <code>true</code> if value present as an override, <code>false</code> if not
*/
public boolean containsValue(Object value) {
return m_overrides.containsValue(value);
}
/**
* Get the set of entries. This only returns the entries in the overrides map.
*
* @return override entries
*/
public Set entrySet() {
return m_overrides.entrySet();
}
/**
* Get value for key. If the key is present in the overrides map, the value from that map is returned; otherwise,
* the value for the key in the defaults map is returned.
*
* @param key
* @return value (<code>null</code> if key not present)
*/
public Object get(Object key) {
if (m_overrides.containsKey(key)) {
return m_overrides.get(key);
} else {
return m_defaults.get(key);
}
}
/**
* Check if no overrides are defined.
*
* @return <code>true</code> if no overrides, <code>false</code> if any present
*/
public boolean isEmpty() {
return m_overrides.isEmpty();
}
/**
* Get the set of keys. This only returns the keys in the overrides map.
*
* @return keys
*/
public Set keySet() {
return m_overrides.keySet();
}
/**
* Set an override value. This just adds the key-value pair to the override map.
*
* @param key
* @param value
* @return previous value for key (from default map, if not present in overrides)
*/
public Object put(Object key, Object value) {
Object prior;
if (m_overrides.containsKey(key)) {
prior = m_overrides.put(key, value);
} else {
m_overrides.put(key, value);
prior = m_defaults.get(key);
}
return prior;
}
/**
* Add all key-value pairs from another map into the overrides map.
*
* @param map
*/
public void putAll(Map map) {
m_overrides.putAll(map);
}
/**
* Remove a key-value pair. If the key was previously present in the overrides map it is simply removed from that
* map. If it was not present in the overrides map but is present in the defaults map, a null entry is added to the
* overrides map for that key.
*
* @param key
* @return previous value for key
*/
public Object remove(Object key) {
if (m_overrides.containsKey(key)) {
return m_overrides.remove(key);
} else {
return m_defaults.remove(key);
}
}
/**
* Get the number of entries in the map. This returns the entry count for the overrides map only.
*
* @return entry count
*/
public int size() {
return m_overrides.size();
}
/**
* Get the values. This returns only the values in the overrides map.
*
* @return values
*/
public Collection values() {
return m_overrides.values();
}
} | 33.55914 | 120 | 0.646748 |
71c93cb15243802170fe3df03855d238e9ee1949 | 3,557 | kt | Kotlin | http4k-aws/src/test/kotlin/org/http4k/aws/AwsRealChunkKeyContentsIfRequiredTest.kt | savagematt/http4k | 9cc8ef11121bfbe10a1cd0ca58a17885c297af52 | [
"Apache-2.0"
] | null | null | null | http4k-aws/src/test/kotlin/org/http4k/aws/AwsRealChunkKeyContentsIfRequiredTest.kt | savagematt/http4k | 9cc8ef11121bfbe10a1cd0ca58a17885c297af52 | [
"Apache-2.0"
] | null | null | null | http4k-aws/src/test/kotlin/org/http4k/aws/AwsRealChunkKeyContentsIfRequiredTest.kt | savagematt/http4k | 9cc8ef11121bfbe10a1cd0ca58a17885c297af52 | [
"Apache-2.0"
] | null | null | null | package org.http4k.aws
import com.natpryce.hamkrest.assertion.assertThat
import com.natpryce.hamkrest.containsSubstring
import com.natpryce.hamkrest.equalTo
import org.http4k.client.ApacheClient
import org.http4k.core.BodyMode
import org.http4k.core.HttpHandler
import org.http4k.core.Method.DELETE
import org.http4k.core.Method.GET
import org.http4k.core.Method.PUT
import org.http4k.core.Request
import org.http4k.core.Status.Companion.NO_CONTENT
import org.http4k.core.Status.Companion.OK
import org.http4k.core.then
import org.http4k.filter.ChunkKeyContentsIfRequired
import org.http4k.filter.ClientFilters
import org.http4k.filter.DebuggingFilters
import org.http4k.filter.Payload
import org.junit.jupiter.api.Disabled
import org.junit.jupiter.api.Test
class AwsRealChunkKeyContentsIfRequiredTest : AbstractAwsRealS3TestCase() {
@Test
fun `default usage`() {
val requestBodyMode = BodyMode.Memory
bucketLifecycle(ClientFilters.ChunkKeyContentsIfRequired(requestBodyMode = requestBodyMode)
.then(awsClientFilter(Payload.Mode.Signed))
.then(DebuggingFilters.PrintResponse())
.then(ApacheClient(requestBodyMode = requestBodyMode)))
}
@Test
@Disabled
fun `streaming usage`() {
val requestBodyMode = BodyMode.Stream
bucketLifecycle(ClientFilters.ChunkKeyContentsIfRequired(requestBodyMode = requestBodyMode)
.then(awsClientFilter(Payload.Mode.Unsigned))
.then(DebuggingFilters.PrintResponse())
.then(ApacheClient(requestBodyMode = requestBodyMode)))
}
private fun bucketLifecycle(client: HttpHandler) {
val aClient = aClient()
val contentOriginal = (1..10 * 1024 * 1024).map { 'a' }.joinToString("")
assertThat(
"Bucket should not exist in root listing",
aClient(Request(GET, s3Root)).bodyString(),
!containsSubstring(bucketName))
assertThat(
"Put of bucket should succeed",
aClient(Request(PUT, bucketUrl)).status,
equalTo(OK))
assertThat(
"Bucket should exist in root listing",
aClient(Request(GET, s3Root)).bodyString(),
containsSubstring(bucketName))
assertThat(
"Key should not exist in bucket listing",
aClient(Request(GET, bucketUrl)).bodyString(),
!containsSubstring(key))
client(Request(PUT, keyUrl)
.body(contentOriginal.byteInputStream(), contentOriginal.length.toLong()))
assertThat(
"Key should appear in bucket listing",
aClient(Request(GET, bucketUrl)).bodyString(),
containsSubstring(key))
assertThat(
"Key contents should be as expected",
aClient(Request(GET, keyUrl)).bodyString().length,
equalTo(contentOriginal.length))
assertThat(
"Delete of key should succeed",
aClient(Request(DELETE, keyUrl)).status,
equalTo(NO_CONTENT))
assertThat(
"Key should no longer appear in bucket listing",
aClient(Request(GET, bucketUrl)).bodyString(),
!containsSubstring(key))
assertThat(
"Delete of bucket should succeed",
aClient(Request(DELETE, bucketUrl)).status,
equalTo(NO_CONTENT))
assertThat(
"Bucket should no longer exist in root listing",
aClient(Request(GET, s3Root)).bodyString(),
!containsSubstring(bucketName))
}
}
| 37.442105 | 99 | 0.66826 |
ee43b94ab9b98da9d4368955ab05dd92010720e7 | 955 | lua | Lua | Ladder.lua | MangoSister/Grabbit | 7c45963c06aaddd9c8370e3a57d260fcb1a6bafb | [
"MIT"
] | null | null | null | Ladder.lua | MangoSister/Grabbit | 7c45963c06aaddd9c8370e3a57d260fcb1a6bafb | [
"MIT"
] | null | null | null | Ladder.lua | MangoSister/Grabbit | 7c45963c06aaddd9c8370e3a57d260fcb1a6bafb | [
"MIT"
] | null | null | null |
local CLASS = require('Class')
local Assets = require('Assets')
local Audio = require('Audio')
local Entity = require('Entity')
local World = require('World')
local Ladder = CLASS(Entity)
Ladder.class = "Ladder"
Ladder.type = "kinematic"
Ladder.nogravity = true
Ladder.shape = love.physics.newEdgeShape(0, 250, 0, -300)
function Ladder:init()
self.body:setGravityScale(0)
self.body:setSleepingAllowed(false)
-- TODO(yang): make rabbit not colliding with ladder
-- self.fixture:setSensor(true)
end
function Ladder:update(dt)
Entity.update(self, dt)
self.px, self.py = self.body:getPosition()
end
function Ladder:draw()
love.graphics.setColor(255, 0, 0, 255)
love.graphics.line(self.px, self.py+250, self.px, self.py-300)
end
function Ladder:collides(other)
if other.class then
print("Ladder: collides( " .. other.class .. " )")
else
print("Ladder: collides()")
end
end
return Ladder
| 22.209302 | 66 | 0.685864 |
5c92d7747a4d099720c114cde8e1d7de8f44ab0b | 747 | c | C | STM32F103/209_RTT/user/main.c | xqt2010a/GD32F450 | d98fbc8b09b129681a2c5667491cc9daf0d34b0b | [
"MIT"
] | 1 | 2019-07-30T08:06:40.000Z | 2019-07-30T08:06:40.000Z | STM32F103/209_RTT/user/main.c | xqt2010a/GD32F450 | d98fbc8b09b129681a2c5667491cc9daf0d34b0b | [
"MIT"
] | null | null | null | STM32F103/209_RTT/user/main.c | xqt2010a/GD32F450 | d98fbc8b09b129681a2c5667491cc9daf0d34b0b | [
"MIT"
] | 1 | 2019-07-30T08:06:44.000Z | 2019-07-30T08:06:44.000Z | #include "SEGGER_RTT.h"
#include "string.h"
#include "stdio.h"
#include "stdint.h"
#define BUF_LEN 64
#define PRT(...) SEGGER_RTT_printf(0, __VA_ARGS__)
uint8_t Rx_size;
uint8_t Rx_buf[BUF_LEN];
void Delay(void)
{
uint32_t i;
for(i=0; i<0xFFFF; i++);
}
void main(void)
{
SEGGER_RTT_ConfigUpBuffer(0, NULL, NULL, 0, SEGGER_RTT_MODE_NO_BLOCK_SKIP);
PRT("hello world!\r\ntoday is :%s,%s\r\n",__DATE__, __TIME__);
printf("RTT demo!\r\n");
while(1)
{
if(SEGGER_RTT_HasKey()){
Delay();
Rx_size = SEGGER_RTT_HasData(0);
SEGGER_RTT_Read(0, Rx_buf, Rx_size);
SEGGER_RTT_WriteString(0,(char const *)Rx_buf);
memset(Rx_buf, 0, Rx_size);
}
}
}
| 21.970588 | 79 | 0.61178 |
e3fb8c4782ceeaf0c535ec8ecc6763532a6b7539 | 3,898 | go | Go | formactions.go | aosfather/bingo | 16c039860bbaa215a5a5cc2fb51d5a9ae7b6f700 | [
"Apache-2.0"
] | 51 | 2018-04-04T13:46:42.000Z | 2022-03-14T02:15:29.000Z | formactions.go | aosfather/bingo | 16c039860bbaa215a5a5cc2fb51d5a9ae7b6f700 | [
"Apache-2.0"
] | null | null | null | formactions.go | aosfather/bingo | 16c039860bbaa215a5a5cc2fb51d5a9ae7b6f700 | [
"Apache-2.0"
] | 11 | 2018-10-19T07:09:09.000Z | 2021-12-06T14:34:38.000Z | package main
import (
"bytes"
"github.com/aosfather/bingo_mvc/sqltemplate"
"github.com/aosfather/bingo_utils/lua"
l "github.com/yuin/gopher-lua"
"io"
"net/http"
"strings"
"text/template"
"time"
)
/**
表单action
支持的类型:SQL、GET、POST、LUA
*/
//表单action
type FormActions struct {
DB *sqltemplate.DataSource `Inject:""`
pool *lua.LuaPool
}
func (this *FormActions) Init() {
//设置lua引擎的lib脚本查找路径。
lua.SetLuaPath(".\\libs\\lua")
//加载自定义库,http,db
this.pool = lua.NewLuaPool2(100, "bingo", &lua.LuaLib{"http", httplibs}, &lua.LuaLib{"db", CreateDataBasseLib(this.DB)},
&lua.LuaLib{"sys", syslibs})
}
func (this *FormActions) getLuaScript(code string, content string) *lua.LuaScript {
script := &lua.LuaScript{Log: this.lualog}
script.SetPool(this.pool)
script.Load(code, content)
return script
}
func (this *FormActions) lualog(str string) {
debug("lua:", str)
}
func (this *FormActions) Execute(meta *FormMeta, parameter map[string]interface{}) (interface{}, error) {
switch meta.ScriptType {
case "GET":
headers, body := this.processHttpScript(meta, parameter)
return this.doGet(meta.Extends["url"], headers, strings.TrimSpace(body))
case "POST":
headers, body := this.processHttpScript(meta, parameter)
return this.doPost(meta.Extends["url"], headers, strings.TrimSpace(body))
case "LUA":
return this.processLuaScript(meta.Code, meta.Script, parameter)
}
return nil, nil
}
//处理lua脚本,将lua脚本作为整个的运行逻辑
func (this *FormActions) processLuaScript(name, script string, parameter map[string]interface{}) (interface{}, error) {
luascript := this.getLuaScript(name, script)
beforeRun := func(l *l.LState) {
l.SetGlobal("_inputs", lua.SetReadOnly(l, lua.ToLuaTable2(l, parameter)))
}
afterRun := func(l *l.LState) {
}
return luascript.Call(beforeRun, afterRun)
}
//处理http请求
func (this *FormActions) processHttpScript(meta *FormMeta, parameter map[string]interface{}) (map[string]string, string) {
t := template.New(meta.Code)
_, err := t.Parse(meta.Script)
if err != nil {
errs("parse template error!", err.Error())
} else {
headers := make(map[string]string)
buffer := new(bytes.Buffer)
err = t.Execute(buffer, parameter)
if err != nil {
errs("execute template error!", err.Error())
}
return headers, buffer.String()
}
return nil, ""
}
func (this *FormActions) doGet(url string, headers map[string]string, body string) (string, error) {
debug("header:", headers)
buffer := new(bytes.Buffer)
err := doHttpRequest("GET", url, body, buffer, headers)
if err != nil {
return "", err
}
return buffer.String(), nil
}
func (this *FormActions) doPost(url string, headers map[string]string, body string) (string, error) {
if _, ok := headers["Content-Type"]; !ok {
if strings.Index(body, "{") >= 0 {
headers["Content-Type"] = "application/json;charset=utf-8"
} else {
headers["Content-Type"] = "application/x-www-form-urlencoded"
}
}
debug("header:", headers)
buffer := new(bytes.Buffer)
err := doHttpRequest("POST", url, body, buffer, headers)
if err != nil {
return "", err
}
return buffer.String(), nil
}
//网络访问超时设置
const _ClientTimeout = 20 * time.Second
func doHttpRequest(method string, url string, content string, writer io.Writer, headers map[string]string) error {
//post
c := &http.Client{Timeout: _ClientTimeout}
times := 1
DO_POST:
req, err := http.NewRequest(method, url, strings.NewReader(string(content)))
if err != nil {
return err
}
if headers != nil {
for k, v := range headers {
req.Header.Set(k, v)
}
}
debug("values:", content)
resp, err := c.Do(req)
if err != nil {
errs(err.Error())
if times < 3 {
times++
time.Sleep(time.Second)
debug("try the ", times, " times!")
goto DO_POST
} else {
return err
}
}
defer resp.Body.Close()
_, err = io.Copy(writer, resp.Body)
if err != nil {
errs(err.Error())
return err
}
return nil
}
| 23.768293 | 122 | 0.679836 |
d9cacdd06056b03fb88c670cbbe4115e03300189 | 10,083 | rs | Rust | crates/holochain_types/src/dht_op/tests.rs | mhuesch/holochain | 8cade151329117c40e47533449a2f842187c373a | [
"Linux-OpenIB"
] | 619 | 2020-09-15T16:19:12.000Z | 2022-03-31T10:56:45.000Z | crates/holochain_types/src/dht_op/tests.rs | mhuesch/holochain | 8cade151329117c40e47533449a2f842187c373a | [
"Linux-OpenIB"
] | 332 | 2020-09-16T09:57:39.000Z | 2022-03-30T22:31:37.000Z | crates/holochain_types/src/dht_op/tests.rs | mhuesch/holochain | 8cade151329117c40e47533449a2f842187c373a | [
"Linux-OpenIB"
] | 87 | 2020-09-18T01:25:53.000Z | 2022-03-23T14:01:21.000Z | use crate::fixt::AgentValidationPkgFixturator;
use crate::fixt::CloseChainFixturator;
use crate::fixt::CreateFixturator;
use crate::fixt::CreateLinkFixturator;
use crate::fixt::DeleteLinkFixturator;
use crate::fixt::DnaFixturator;
use crate::fixt::EntryFixturator;
use crate::fixt::EntryHashFixturator;
use crate::fixt::EntryTypeFixturator;
use crate::fixt::InitZomesCompleteFixturator;
use crate::fixt::OpenChainFixturator;
use crate::fixt::UpdateFixturator;
use crate::prelude::*;
use ::fixt::prelude::*;
use holo_hash::fixt::HeaderHashFixturator;
use holo_hash::*;
use holochain_zome_types::Entry;
use holochain_zome_types::HeaderHashed;
use observability;
use tracing::*;
struct ElementTest {
entry_type: EntryType,
entry_hash: EntryHash,
original_entry_hash: EntryHash,
commons: Box<dyn Iterator<Item = HeaderBuilderCommon>>,
header_hash: HeaderHash,
sig: Signature,
entry: Entry,
link_add: CreateLink,
link_remove: DeleteLink,
dna: Dna,
chain_close: CloseChain,
chain_open: OpenChain,
agent_validation_pkg: AgentValidationPkg,
init_zomes_complete: InitZomesComplete,
}
impl ElementTest {
fn new() -> Self {
let entry_type = fixt!(EntryType);
let entry_hash = fixt!(EntryHash);
let original_entry_hash = fixt!(EntryHash);
let commons = HeaderBuilderCommonFixturator::new(Unpredictable);
let header_hash = fixt!(HeaderHash);
let sig = fixt!(Signature);
let entry = fixt!(Entry);
let link_add = fixt!(CreateLink);
let link_remove = fixt!(DeleteLink);
let dna = fixt!(Dna);
let chain_open = fixt!(OpenChain);
let chain_close = fixt!(CloseChain);
let agent_validation_pkg = fixt!(AgentValidationPkg);
let init_zomes_complete = fixt!(InitZomesComplete);
Self {
entry_type,
entry_hash,
original_entry_hash,
commons: Box::new(commons),
header_hash,
sig,
entry,
link_add,
link_remove,
dna,
chain_close,
chain_open,
agent_validation_pkg,
init_zomes_complete,
}
}
fn create_element(&mut self) -> (Create, Element) {
let entry_create = builder::Create {
entry_type: self.entry_type.clone(),
entry_hash: self.entry_hash.clone(),
}
.build(self.commons.next().unwrap());
let element = self.to_element(entry_create.clone().into(), Some(self.entry.clone()));
(entry_create, element)
}
fn update_element(&mut self) -> (Update, Element) {
let entry_update = builder::Update {
original_entry_address: self.original_entry_hash.clone(),
entry_type: self.entry_type.clone(),
entry_hash: self.entry_hash.clone(),
original_header_address: self.header_hash.clone().into(),
}
.build(self.commons.next().unwrap());
let element = self.to_element(entry_update.clone().into(), Some(self.entry.clone()));
(entry_update, element)
}
fn entry_create(mut self) -> (Element, Vec<DhtOp>) {
let (entry_create, element) = self.create_element();
let header: Header = entry_create.clone().into();
let ops = vec![
DhtOp::StoreElement(
self.sig.clone(),
header.clone(),
Some(self.entry.clone().into()),
),
DhtOp::RegisterAgentActivity(self.sig.clone(), header.clone()),
DhtOp::StoreEntry(
self.sig.clone(),
NewEntryHeader::Create(entry_create),
self.entry.clone().into(),
),
];
(element, ops)
}
fn entry_update(mut self) -> (Element, Vec<DhtOp>) {
let (entry_update, element) = self.update_element();
let header: Header = entry_update.clone().into();
let ops = vec![
DhtOp::StoreElement(
self.sig.clone(),
header.clone(),
Some(self.entry.clone().into()),
),
DhtOp::RegisterAgentActivity(self.sig.clone(), header.clone()),
DhtOp::StoreEntry(
self.sig.clone(),
NewEntryHeader::Update(entry_update.clone()),
self.entry.clone().into(),
),
DhtOp::RegisterUpdatedContent(
self.sig.clone(),
entry_update.clone(),
Some(self.entry.clone().into()),
),
DhtOp::RegisterUpdatedElement(
self.sig.clone(),
entry_update,
Some(self.entry.clone().into()),
),
];
(element, ops)
}
fn entry_delete(mut self) -> (Element, Vec<DhtOp>) {
let entry_delete = builder::Delete {
deletes_address: self.header_hash.clone(),
deletes_entry_address: self.entry_hash.clone(),
}
.build(self.commons.next().unwrap());
let element = self.to_element(entry_delete.clone().into(), None);
let header: Header = entry_delete.clone().into();
let ops = vec![
DhtOp::StoreElement(self.sig.clone(), header.clone(), None),
DhtOp::RegisterAgentActivity(self.sig.clone(), header.clone()),
DhtOp::RegisterDeletedBy(self.sig.clone(), entry_delete.clone()),
DhtOp::RegisterDeletedEntryHeader(self.sig, entry_delete),
];
(element, ops)
}
fn link_add(mut self) -> (Element, Vec<DhtOp>) {
let element = self.to_element(self.link_add.clone().into(), None);
let header: Header = self.link_add.clone().into();
let ops = vec![
DhtOp::StoreElement(self.sig.clone(), header.clone(), None),
DhtOp::RegisterAgentActivity(self.sig.clone(), header.clone()),
DhtOp::RegisterAddLink(self.sig.clone(), self.link_add.clone()),
];
(element, ops)
}
fn link_remove(mut self) -> (Element, Vec<DhtOp>) {
let element = self.to_element(self.link_remove.clone().into(), None);
let header: Header = self.link_remove.clone().into();
let ops = vec![
DhtOp::StoreElement(self.sig.clone(), header.clone(), None),
DhtOp::RegisterAgentActivity(self.sig.clone(), header.clone()),
DhtOp::RegisterRemoveLink(self.sig.clone(), self.link_remove.clone()),
];
(element, ops)
}
fn others(mut self) -> Vec<(Element, Vec<DhtOp>)> {
let mut elements = Vec::new();
elements.push(self.to_element(self.dna.clone().into(), None));
elements.push(self.to_element(self.chain_open.clone().into(), None));
elements.push(self.to_element(self.chain_close.clone().into(), None));
elements.push(self.to_element(self.agent_validation_pkg.clone().into(), None));
elements.push(self.to_element(self.init_zomes_complete.clone().into(), None));
let mut chain_elements = Vec::new();
for element in elements {
let header: Header = element.header().clone();
let ops = vec![
DhtOp::StoreElement(self.sig.clone(), header.clone(), None),
DhtOp::RegisterAgentActivity(self.sig.clone(), header.clone()),
];
chain_elements.push((element, ops));
}
chain_elements
}
fn to_element(&mut self, header: Header, entry: Option<Entry>) -> Element {
let h = HeaderHashed::with_pre_hashed(header.clone(), self.header_hash.clone());
let h = SignedHeaderHashed::with_presigned(h, self.sig.clone());
Element::new(h, entry.clone())
}
}
#[tokio::test(flavor = "multi_thread")]
async fn test_all_ops() {
observability::test_run().ok();
let builder = ElementTest::new();
let (element, expected) = builder.entry_create();
let result = produce_ops_from_element(&element).unwrap();
assert_eq!(result, expected);
let builder = ElementTest::new();
let (element, expected) = builder.entry_update();
let result = produce_ops_from_element(&element).unwrap();
assert_eq!(result, expected);
let builder = ElementTest::new();
let (element, expected) = builder.entry_delete();
let result = produce_ops_from_element(&element).unwrap();
assert_eq!(result, expected);
let builder = ElementTest::new();
let (element, expected) = builder.link_add();
let result = produce_ops_from_element(&element).unwrap();
assert_eq!(result, expected);
let builder = ElementTest::new();
let (element, expected) = builder.link_remove();
let result = produce_ops_from_element(&element).unwrap();
assert_eq!(result, expected);
let builder = ElementTest::new();
let elements = builder.others();
for (element, expected) in elements {
debug!(?element);
let result = produce_ops_from_element(&element).unwrap();
assert_eq!(result, expected);
}
}
#[tokio::test(flavor = "multi_thread")]
async fn test_dht_basis() {
// Create a header that points to an entry
let original_header = fixt!(Create);
let expected_entry_hash: AnyDhtHash = original_header.entry_hash.clone().into();
let original_header_hash =
HeaderHashed::from_content_sync(Header::Create(original_header.clone()));
let original_header_hash = original_header_hash.into_inner().1;
// Create the update header with the same hash
let update_new_entry = fixt!(Entry);
let mut entry_update = fixt!(Update, update_new_entry.clone());
entry_update.original_entry_address = original_header.entry_hash.clone();
entry_update.original_header_address = original_header_hash;
// Create the op
let op = DhtOp::RegisterUpdatedContent(
fixt!(Signature),
entry_update,
Some(update_new_entry.into()),
);
// Get the basis
let result = op.dht_basis();
// Check the hash matches
assert_eq!(expected_entry_hash, result);
}
| 36.665455 | 93 | 0.613805 |
df971cb7f86de38ee4e25e3e47bf4158c7fece5a | 1,723 | ts | TypeScript | src/commands/takeoff/destroy-project.ts | Takeoff-Env/takeoff | 5cfac571159c3ee7815582c6888096422460a015 | [
"MIT"
] | 101 | 2017-09-26T03:23:23.000Z | 2022-01-23T05:06:49.000Z | src/commands/takeoff/destroy-project.ts | Takeoff-Env/takeoff | 5cfac571159c3ee7815582c6888096422460a015 | [
"MIT"
] | 18 | 2017-09-27T17:02:01.000Z | 2018-12-24T11:02:37.000Z | src/commands/takeoff/destroy-project.ts | Takeoff-Env/takeoff | 5cfac571159c3ee7815582c6888096422460a015 | [
"MIT"
] | 12 | 2017-10-11T16:59:19.000Z | 2018-05-16T12:46:33.000Z | import { CommandResult, TakeoffCommand } from 'commands';
import { TakeoffCmdParameters } from 'takeoff';
import { ExitCode } from 'task';
/**
* Destroys an project in a non-reversable way
*/
export = ({
shell,
args,
opts,
rcFile,
pathExists,
printMessage,
runCommand,
}: TakeoffCmdParameters): TakeoffCommand => ({
args: '<name>',
command: 'destroy',
description:
'Destroys the docker containers for a project. Can also optionally remove the folder, this operation cannot be reversed.',
group: 'takeoff',
options: [
{
description: 'Also removes the directory, otherwise only docker images and volumes are destroyed',
option: '-r, --remove-dir',
},
],
handler(): CommandResult {
const [project]: string[] = args.length > 0 ? args : ['default'];
printMessage(`Destroying project ${project}`);
const envDir = `${rcFile.rcRoot}/projects/${project}`;
if (!pathExists(envDir)) {
return { code: ExitCode.Error, fail: `The project ${project} doesn't exist` };
}
const runCmd = runCommand(`docker-compose -f docker/docker-compose.yml down --rmi all`, envDir);
if (runCmd.code !== 0) {
return { extra: runCmd.stderr, code: runCmd.code, fail: `Error destroying ${project}` };
}
if (opts['r'] || opts['remove-dir']) {
printMessage(`Removing folder ${envDir}`);
const removeFolder = shell.rm('-rf', `${envDir}`);
if (removeFolder.code !== 0) {
return { extra: removeFolder.stderr, code: removeFolder.code, fail: `Error deleting ${project}` };
}
printMessage(`Folder ${envDir} removed`);
}
return { code: ExitCode.Success, success: `Successfully destroyed ${project}` };
},
});
| 30.22807 | 126 | 0.639582 |
7d9304bcb5274c738a552fcc4b9e587ae3cfc878 | 28 | html | HTML | src/app/components/comp-3073/comp-3073.component.html | angular/angular-cli-stress-test | 4c4df522fbbd0989068810c3d77dd0563f87b58b | [
"MIT"
] | 4 | 2017-06-08T06:44:29.000Z | 2021-10-30T02:37:50.000Z | src/app/components/comp-3073/comp-3073.component.html | angular/angular-cli-stress-test | 4c4df522fbbd0989068810c3d77dd0563f87b58b | [
"MIT"
] | null | null | null | src/app/components/comp-3073/comp-3073.component.html | angular/angular-cli-stress-test | 4c4df522fbbd0989068810c3d77dd0563f87b58b | [
"MIT"
] | 5 | 2017-07-15T23:26:24.000Z | 2021-05-14T15:47:04.000Z | <p>
comp-3073 works!
</p>
| 7 | 18 | 0.535714 |
95e50168b1d4ab6401c125f8af435ebdb528c53c | 1,278 | css | CSS | data/usercss/150865.user.css | 33kk/uso-archive | 2c4962d1d507ff0eaec6dcca555efc531b37a9b4 | [
"MIT"
] | 118 | 2020-08-28T19:59:28.000Z | 2022-03-26T16:28:40.000Z | data/usercss/150865.user.css | 33kk/uso-archive | 2c4962d1d507ff0eaec6dcca555efc531b37a9b4 | [
"MIT"
] | 38 | 2020-09-02T01:08:45.000Z | 2022-01-23T02:47:24.000Z | data/usercss/150865.user.css | 33kk/uso-archive | 2c4962d1d507ff0eaec6dcca555efc531b37a9b4 | [
"MIT"
] | 21 | 2020-08-19T01:12:43.000Z | 2022-03-15T21:55:17.000Z | /* ==UserStyle==
@name getpocket.com - MayaDark
@namespace USO Archive
@author nostrenz
@description `Just a realy simple dark theme for getpocket.com`
@version 20171108.22.28
@license NO-REDISTRIBUTION
@preprocessor uso
==/UserStyle== */
@-moz-document url-prefix("https://getpocket.com/a/") {
body,
.side-nav {
background-color: #262626 !important;
}
.pkt-nav,
.searchtoolbar_screenbar,
.side-nav,
.item_content_actionscontainer {
border: none !important;
}
.pkt-nav,
.searchtoolbar_screenbar {
background: #212121 !important;
}
.queue_togglesection_text,
.pagenav_options_link,
.nav-sublist,
.section-archive,
.filter-tags,
.section-favorites,
.reader_content .reader_head h1 {
color: white !important;
}
.item_content,
.item_content_text,
.reader_content {
background-color: #333333;
border: none !important;
color: #D2D2D2;
}
.item_content > .title,
.item_content_text > .title,
.queue_title {
color: #D2D2D2;
}
.item_content > .sub,
.item_content_actions {
background-color: #5C5C5C !important;
}
.item:hover .buttons,
.sub:hover .buttons,
.item_content > .buttons:hover {
background-color: #5C5C5C !important;
}
}
| 20.285714 | 66 | 0.661189 |
c7ee74f718e9c03369bf61d1f159bfb87fbdf316 | 4,364 | java | Java | engine/src/test/java/org/teiid/query/processor/TestArrayTable.java | GavinRay97/teiid | f7ae9acdc372718e15aa9f5827b267dfd68db5a5 | [
"Apache-2.0"
] | 249 | 2015-01-04T12:32:56.000Z | 2022-03-22T07:00:46.000Z | engine/src/test/java/org/teiid/query/processor/TestArrayTable.java | GavinRay97/teiid | f7ae9acdc372718e15aa9f5827b267dfd68db5a5 | [
"Apache-2.0"
] | 312 | 2015-01-06T19:01:51.000Z | 2022-03-10T17:49:37.000Z | engine/src/test/java/org/teiid/query/processor/TestArrayTable.java | GavinRay97/teiid | f7ae9acdc372718e15aa9f5827b267dfd68db5a5 | [
"Apache-2.0"
] | 256 | 2015-01-06T18:14:39.000Z | 2022-03-23T17:55:42.000Z | /*
* Copyright Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags and
* the COPYRIGHT.txt file distributed with this work.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teiid.query.processor;
import static org.junit.Assert.*;
import static org.teiid.query.processor.TestProcessor.*;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.Test;
import org.teiid.core.TeiidProcessingException;
import org.teiid.query.parser.QueryParser;
import org.teiid.query.unittest.RealMetadataFactory;
@SuppressWarnings({"unchecked", "nls"})
public class TestArrayTable {
@Test public void testCorrelatedArrayTable() throws Exception {
String sql = "select x.* from bqt1.smalla, arraytable(objectvalue COLUMNS x string, y integer) x"; //$NON-NLS-1$
List<?>[] expected = new List[] {
Arrays.asList("a", 1),
Arrays.asList("b", 3),
};
process(sql, expected);
}
@Test public void testCorrelatedArrayTable1() throws Exception {
String sql = "select z from bqt1.smalla, arraytable(objectvalue COLUMNS x string, y integer, z long) x"; //$NON-NLS-1$
List<?>[] expected = new List[] {
Arrays.asList(Long.valueOf(2)),
Arrays.asList(Long.valueOf(6)),
};
process(sql, expected);
}
@Test(expected=TeiidProcessingException.class) public void testCorrelatedArrayTable2() throws Exception {
String sql = "select y from bqt1.smalla, arraytable(objectvalue COLUMNS y integer) x"; //$NON-NLS-1$
List<?>[] expected = new List[] {};
process(sql, expected);
}
@Test public void testCorrelatedArrayTable3() throws Exception {
String sql = "select x.* from bqt1.smalla, arraytable(objectvalue COLUMNS x string, y integer, z integer, aa object) x"; //$NON-NLS-1$
List<?>[] expected = new List[] {
Arrays.asList("a", 1, 2, null),
Arrays.asList("b", 3, 6, null),
};
process(sql, expected);
}
//should not work as we are passing 1-dimensional arrays
@Test(expected=TeiidProcessingException.class) public void testCorrelatedMultiRowArrayTable() throws Exception {
String sql = "select z from bqt1.smalla, arraytable(rows objectvalue COLUMNS z long) x"; //$NON-NLS-1$
process(sql, null);
}
@Test public void testMultiRowArrayTable() throws Exception {
String sql = "select * from arraytable(rows ((1,'a'),(2,'b'),(3,)) COLUMNS x integer, y string) x"; //$NON-NLS-1$
assertEquals("SELECT * FROM ARRAYTABLE(ROWS ((1, 'a'), (2, 'b'), (3,)) COLUMNS x integer, y string) AS x", QueryParser.getQueryParser().parseCommand(sql).toString());
List<?>[] expected = new List[] {
Arrays.asList(1, "a"),
Arrays.asList(2, "b"),
Arrays.asList(3, null),
};
process(sql, expected);
}
@Test(expected=TeiidProcessingException.class) public void testMultiRowArrayTableFails() throws Exception {
String sql = "select * from arraytable(rows (1,'a') COLUMNS x integer, y string) x"; //$NON-NLS-1$
process(sql, null);
}
public static void process(String sql, List<?>[] expectedResults) throws Exception {
HardcodedDataManager dataManager = new HardcodedDataManager();
dataManager.addData("SELECT BQT1.SmallA.ObjectValue FROM BQT1.SmallA", new List[] {Collections.singletonList(new Object[] {"a", 1, 2}), Collections.singletonList(new Object[] {"b", 3, 6}), Collections.singletonList(null)} );
ProcessorPlan plan = helpGetPlan(helpParse(sql), RealMetadataFactory.exampleBQTCached());
helpProcess(plan, createCommandContext(), dataManager, expectedResults);
}
}
| 38.964286 | 232 | 0.663611 |
752c59408df602ac6da29eb178cbb99d466f7523 | 347 | h | C | sources/MachIncludes/_ToolCabinet.h | mariusl/step2mach | 3ac7ff6c3b29a25f4520e4325e7922f2d34c547a | [
"MIT"
] | 6 | 2019-05-22T03:18:38.000Z | 2022-02-07T20:54:38.000Z | sources/MachIncludes/_ToolCabinet.h | mariusl/step2mach | 3ac7ff6c3b29a25f4520e4325e7922f2d34c547a | [
"MIT"
] | 1 | 2019-11-10T05:57:09.000Z | 2020-07-01T05:50:49.000Z | sources/MachIncludes/_ToolCabinet.h | mariusl/step2mach | 3ac7ff6c3b29a25f4520e4325e7922f2d34c547a | [
"MIT"
] | 9 | 2019-05-20T06:03:55.000Z | 2022-02-01T10:16:41.000Z | // _ToolCabinet - replacement for class based header
//
#pragma once
struct _Tool
{
short tiptype;
double diameter;
double direction;
TCHAR * description;
double xoffset;
double zoffset;
double xWear;
double zWear;
double turretang;
double toolradius;
int id;
bool front;
};
struct _ToolCabinet
{
_Tool Tools[255];
};
| 13.346154 | 52 | 0.70317 |
2a03da48f61dbfd21377faea85f2508c1578bae3 | 542 | java | Java | Program.java | phucnm/compiler-course | 459e9daccb5e5a1feff2b5656864eaab93e3988f | [
"MIT"
] | null | null | null | Program.java | phucnm/compiler-course | 459e9daccb5e5a1feff2b5656864eaab93e3988f | [
"MIT"
] | null | null | null | Program.java | phucnm/compiler-course | 459e9daccb5e5a1feff2b5656864eaab93e3988f | [
"MIT"
] | null | null | null | import java.util.Vector;
public class Program implements ASTNode {
Vector<Function> functions;
public Program()
{
functions = new Vector<Function>();
}
public void addFunction(Function s)
{
functions.add(s);
}
public Function getFunction(int index)
{
return functions.elementAt(index);
}
public int getFunctionCount()
{
return functions.size();
}
@Override
public Object accept(Visitor visitor)
{
return visitor.visit(this);
}
} | 17.483871 | 43 | 0.605166 |
d29a64249662933eed11e9ead4c0a07c4caae362 | 1,040 | php | PHP | app/Role.php | apurv4193/RYEC-Backend | 8682463777afad323d30f832693d5802f00c1dcd | [
"MIT"
] | null | null | null | app/Role.php | apurv4193/RYEC-Backend | 8682463777afad323d30f832693d5802f00c1dcd | [
"MIT"
] | null | null | null | app/Role.php | apurv4193/RYEC-Backend | 8682463777afad323d30f832693d5802f00c1dcd | [
"MIT"
] | null | null | null | <?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Auth;
use DB;
use Config;
class Role extends Model
{
protected $table = 'roles';
protected $fillable = ['id', 'slug', 'name', 'created_by', 'updated_by', 'status'];
public function insertUpdate($role)
{
if (isset($data['id']) && $data['id'] != '' && $data['id'] > 0) {
$updateData = [];
foreach ($this->fillable as $field) {
if (array_key_exists($field, $data)) {
$updateData[$field] = $data[$field];
}
}
return Role::where('id', $data['id'])->update($updateData);
} else {
return Role::create($data);
}
}
public function getAllRoles()
{
$roles = Role::where('status', '<>', Config::get('constant.DELETED_FLAG'))
->orderBy('id', 'DESC')
->paginate(Config::get('constant.ADMIN_RECORD_PER_PAGE'));
return $roles;
}
}
| 25.365854 | 90 | 0.493269 |
9cd8960e7fa0ed1792d1b9fe84ef85aa1dd1c2fa | 1,422 | lua | Lua | frameworks/cocos2d-x/cocos/scripting/lua-bindings/auto/api/WorldClock.lua | TshineZheng/DragonbonesCocos2dx | cf5e251092d23161dd4876353fa26dfe6425ff18 | [
"MIT"
] | 6 | 2016-12-28T08:38:00.000Z | 2019-03-28T04:51:54.000Z | frameworks/cocos2d-x/cocos/scripting/lua-bindings/auto/api/WorldClock.lua | TshineZheng/DragonbonesCocos2dx | cf5e251092d23161dd4876353fa26dfe6425ff18 | [
"MIT"
] | 2 | 2017-02-10T03:48:11.000Z | 2017-03-03T10:14:35.000Z | frameworks/cocos2d-x/cocos/scripting/lua-bindings/auto/api/WorldClock.lua | TshineZheng/DragonbonesCocos2dx | cf5e251092d23161dd4876353fa26dfe6425ff18 | [
"MIT"
] | 5 | 2017-04-20T07:31:39.000Z | 2022-01-16T15:38:14.000Z |
--------------------------------
-- @module WorldClock
-- @extend IAnimateble
-- @parent_module db
--------------------------------
--
-- @function [parent=#WorldClock] clear
-- @param self
-- @return WorldClock#WorldClock self (return value: db.WorldClock)
--------------------------------
--
-- @function [parent=#WorldClock] contains
-- @param self
-- @param #db.IAnimateble value
-- @return bool#bool ret (return value: bool)
--------------------------------
--
-- @function [parent=#WorldClock] advanceTime
-- @param self
-- @param #float passedTime
-- @return WorldClock#WorldClock self (return value: db.WorldClock)
--------------------------------
--
-- @function [parent=#WorldClock] remove
-- @param self
-- @param #db.Armature armature
-- @return WorldClock#WorldClock self (return value: db.WorldClock)
--------------------------------
--
-- @function [parent=#WorldClock] add
-- @param self
-- @param #db.Armature armature
-- @return WorldClock#WorldClock self (return value: db.WorldClock)
--------------------------------
--
-- @function [parent=#WorldClock] getInstance
-- @param self
-- @return WorldClock#WorldClock ret (return value: db.WorldClock)
--------------------------------
--
-- @function [parent=#WorldClock] WorldClock
-- @param self
-- @return WorldClock#WorldClock self (return value: db.WorldClock)
return nil
| 26.333333 | 67 | 0.548523 |
6521d6a54b4f63498690134ed41a4f7c9285231c | 2,936 | rs | Rust | tests/simple.rs | RustRome/dot-http | f4c7c7b754f53fc42e3c267ab95027dab631a561 | [
"Apache-2.0"
] | 116 | 2020-01-21T00:42:24.000Z | 2022-03-02T20:49:05.000Z | tests/simple.rs | RustRome/dot-http | f4c7c7b754f53fc42e3c267ab95027dab631a561 | [
"Apache-2.0"
] | 25 | 2020-01-20T22:09:29.000Z | 2022-01-14T02:59:56.000Z | tests/simple.rs | RustRome/dot-http | f4c7c7b754f53fc42e3c267ab95027dab631a561 | [
"Apache-2.0"
] | 6 | 2020-02-01T19:29:47.000Z | 2022-01-08T00:12:24.000Z | use crate::common::{create_file, DebugWriter};
use dot_http::output::parse_format;
use dot_http::output::print::FormattedOutputter;
use dot_http::{ClientConfig, Runtime};
use httpmock::{Mock, MockServer};
use std::borrow::BorrowMut;
mod common;
#[test]
fn simple_get() {
let server = MockServer::start();
Mock::new()
.expect_method(httpmock::Method::GET)
.expect_path("/simple_get/30")
.return_status(200)
.return_header("date", "")
.create_on(&server);
let env = "dev";
let snapshot_file = create_file("{}");
let env_file = create_file(r#"{"dev": {"id": 30}}"#);
let script_file = create_file(&format!(
"GET http://localhost:{port}/simple_get/{{{{id}}}}",
port = server.port()
));
let writer = &mut DebugWriter(String::new());
let request_format = "%R\n";
let response_format = "%R\n%H\n%B\n";
let mut outputter = FormattedOutputter::new(
writer,
parse_format(request_format).unwrap(),
parse_format(response_format).unwrap(),
);
let mut runtime = Runtime::new(
env,
&snapshot_file,
&env_file,
outputter.borrow_mut(),
ClientConfig::default(),
)
.unwrap();
runtime.execute(&script_file, 1, false).unwrap();
let DebugWriter(buf) = writer;
debug_assert_eq!(
*buf,
format!(
"\
GET http://localhost:{}/simple_get/30
HTTP/1.1 200 OK
date: \n\
content-length: 0\
\n\n\n",
server.port()
)
);
}
#[test]
fn simple_post() {
let server = MockServer::start();
Mock::new()
.expect_method(httpmock::Method::POST)
.expect_path("/simple_post")
.return_status(200)
.return_header("date", "")
.return_body(r#"{"value": true}"#)
.create_on(&server);
let env = "dev";
let snapshot_file = create_file("{}");
let env_file = create_file("{}");
let script_file = create_file(&format!(
"\
POST http://localhost:{port}/simple_post
{{
\"test\": \"body\"
}}\
",
port = server.port(),
));
let writer = &mut DebugWriter(String::new());
let request_format = "%R\n";
let response_format = "%R\n%H\n%B\n";
let mut outputter = FormattedOutputter::new(
writer,
parse_format(request_format).unwrap(),
parse_format(response_format).unwrap(),
);
let mut runtime = Runtime::new(
env,
&snapshot_file,
&env_file,
outputter.borrow_mut(),
ClientConfig::default(),
)
.unwrap();
runtime.execute(&script_file, 1, false).unwrap();
let DebugWriter(buf) = writer;
debug_assert_eq!(
*buf,
format!(
"\
POST http://localhost:{port}/simple_post
HTTP/1.1 200 OK
date: \n\
content-length: 15\
\n\n\
{{
\"value\": true
}}\n\
",
port = server.port()
)
);
}
| 23.11811 | 60 | 0.56812 |
5801b03c13ef3f52a81a86b0887eca78abce588e | 952 | c | C | release/src-ra-4300/linux/linux-2.6.36.x/arch/tile/lib/delay.c | zhoutao0712/rtn11pb1 | 09e6b6c7ef4b91be0a9374daeacc3ac9f2fa3a05 | [
"Apache-2.0"
] | 1 | 2022-03-19T06:38:01.000Z | 2022-03-19T06:38:01.000Z | release/src-ra-4300/linux/linux-2.6.36.x/arch/tile/lib/delay.c | zhoutao0712/rtn11pb1 | 09e6b6c7ef4b91be0a9374daeacc3ac9f2fa3a05 | [
"Apache-2.0"
] | null | null | null | release/src-ra-4300/linux/linux-2.6.36.x/arch/tile/lib/delay.c | zhoutao0712/rtn11pb1 | 09e6b6c7ef4b91be0a9374daeacc3ac9f2fa3a05 | [
"Apache-2.0"
] | 1 | 2022-03-19T06:38:03.000Z | 2022-03-19T06:38:03.000Z | /*
* Copyright 2010 Tilera Corporation. All Rights Reserved.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, version 2.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
* NON INFRINGEMENT. See the GNU General Public License for
* more details.
*/
#include <linux/module.h>
#include <linux/delay.h>
#include <linux/thread_info.h>
#include <asm/fixmap.h>
#include <hv/hypervisor.h>
void __udelay(unsigned long usecs)
{
hv_nanosleep(usecs * 1000);
}
EXPORT_SYMBOL(__udelay);
void __ndelay(unsigned long nsecs)
{
hv_nanosleep(nsecs);
}
EXPORT_SYMBOL(__ndelay);
/* FIXME: should be declared in a header somewhere. */
EXPORT_SYMBOL(__delay);
| 27.2 | 72 | 0.739496 |
5765c733af6ba1cea0b8111d75ad3c206f1e1092 | 30,592 | h | C | 3rdparty/aws-sdk-cpp-master/aws-cpp-sdk-elasticache/include/aws/elasticache/model/CopySnapshotRequest.h | prateek-s/mesos | 4b81147797e4d9a45e0b2f5e5634d4a214dbc4e8 | [
"Apache-2.0"
] | 2 | 2019-02-08T21:29:57.000Z | 2021-07-27T06:59:19.000Z | 3rdparty/aws-sdk-cpp-master/aws-cpp-sdk-elasticache/include/aws/elasticache/model/CopySnapshotRequest.h | prateek-s/mesos | 4b81147797e4d9a45e0b2f5e5634d4a214dbc4e8 | [
"Apache-2.0"
] | null | null | null | 3rdparty/aws-sdk-cpp-master/aws-cpp-sdk-elasticache/include/aws/elasticache/model/CopySnapshotRequest.h | prateek-s/mesos | 4b81147797e4d9a45e0b2f5e5634d4a214dbc4e8 | [
"Apache-2.0"
] | null | null | null | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <aws/elasticache/ElastiCache_EXPORTS.h>
#include <aws/elasticache/ElastiCacheRequest.h>
#include <aws/core/utils/memory/stl/AWSString.h>
namespace Aws
{
namespace ElastiCache
{
namespace Model
{
/**
* <p>Represents the input of a <i>CopySnapshotMessage</i> action.</p>
*/
class AWS_ELASTICACHE_API CopySnapshotRequest : public ElastiCacheRequest
{
public:
CopySnapshotRequest();
Aws::String SerializePayload() const override;
/**
* <p>The name of an existing snapshot from which to make a copy.</p>
*/
inline const Aws::String& GetSourceSnapshotName() const{ return m_sourceSnapshotName; }
/**
* <p>The name of an existing snapshot from which to make a copy.</p>
*/
inline void SetSourceSnapshotName(const Aws::String& value) { m_sourceSnapshotNameHasBeenSet = true; m_sourceSnapshotName = value; }
/**
* <p>The name of an existing snapshot from which to make a copy.</p>
*/
inline void SetSourceSnapshotName(Aws::String&& value) { m_sourceSnapshotNameHasBeenSet = true; m_sourceSnapshotName = value; }
/**
* <p>The name of an existing snapshot from which to make a copy.</p>
*/
inline void SetSourceSnapshotName(const char* value) { m_sourceSnapshotNameHasBeenSet = true; m_sourceSnapshotName.assign(value); }
/**
* <p>The name of an existing snapshot from which to make a copy.</p>
*/
inline CopySnapshotRequest& WithSourceSnapshotName(const Aws::String& value) { SetSourceSnapshotName(value); return *this;}
/**
* <p>The name of an existing snapshot from which to make a copy.</p>
*/
inline CopySnapshotRequest& WithSourceSnapshotName(Aws::String&& value) { SetSourceSnapshotName(value); return *this;}
/**
* <p>The name of an existing snapshot from which to make a copy.</p>
*/
inline CopySnapshotRequest& WithSourceSnapshotName(const char* value) { SetSourceSnapshotName(value); return *this;}
/**
* <p>A name for the snapshot copy. ElastiCache does not permit overwriting a
* snapshot, therefore this name must be unique within its context - ElastiCache or
* an Amazon S3 bucket if exporting.</p> <p class="title"> <b>Error Message</b>
* </p> <ul> <li> <p> <b>Error Message:</b> The S3 bucket %s already contains an
* object with key %s.</p> <p> <b>Solution:</b> Give the <i>TargetSnapshotName</i>
* a new and unique value. If exporting a snapshot, you could alternatively create
* a new Amazon S3 bucket and use this same value for
* <i>TargetSnapshotName</i>.</p> </li> </ul>
*/
inline const Aws::String& GetTargetSnapshotName() const{ return m_targetSnapshotName; }
/**
* <p>A name for the snapshot copy. ElastiCache does not permit overwriting a
* snapshot, therefore this name must be unique within its context - ElastiCache or
* an Amazon S3 bucket if exporting.</p> <p class="title"> <b>Error Message</b>
* </p> <ul> <li> <p> <b>Error Message:</b> The S3 bucket %s already contains an
* object with key %s.</p> <p> <b>Solution:</b> Give the <i>TargetSnapshotName</i>
* a new and unique value. If exporting a snapshot, you could alternatively create
* a new Amazon S3 bucket and use this same value for
* <i>TargetSnapshotName</i>.</p> </li> </ul>
*/
inline void SetTargetSnapshotName(const Aws::String& value) { m_targetSnapshotNameHasBeenSet = true; m_targetSnapshotName = value; }
/**
* <p>A name for the snapshot copy. ElastiCache does not permit overwriting a
* snapshot, therefore this name must be unique within its context - ElastiCache or
* an Amazon S3 bucket if exporting.</p> <p class="title"> <b>Error Message</b>
* </p> <ul> <li> <p> <b>Error Message:</b> The S3 bucket %s already contains an
* object with key %s.</p> <p> <b>Solution:</b> Give the <i>TargetSnapshotName</i>
* a new and unique value. If exporting a snapshot, you could alternatively create
* a new Amazon S3 bucket and use this same value for
* <i>TargetSnapshotName</i>.</p> </li> </ul>
*/
inline void SetTargetSnapshotName(Aws::String&& value) { m_targetSnapshotNameHasBeenSet = true; m_targetSnapshotName = value; }
/**
* <p>A name for the snapshot copy. ElastiCache does not permit overwriting a
* snapshot, therefore this name must be unique within its context - ElastiCache or
* an Amazon S3 bucket if exporting.</p> <p class="title"> <b>Error Message</b>
* </p> <ul> <li> <p> <b>Error Message:</b> The S3 bucket %s already contains an
* object with key %s.</p> <p> <b>Solution:</b> Give the <i>TargetSnapshotName</i>
* a new and unique value. If exporting a snapshot, you could alternatively create
* a new Amazon S3 bucket and use this same value for
* <i>TargetSnapshotName</i>.</p> </li> </ul>
*/
inline void SetTargetSnapshotName(const char* value) { m_targetSnapshotNameHasBeenSet = true; m_targetSnapshotName.assign(value); }
/**
* <p>A name for the snapshot copy. ElastiCache does not permit overwriting a
* snapshot, therefore this name must be unique within its context - ElastiCache or
* an Amazon S3 bucket if exporting.</p> <p class="title"> <b>Error Message</b>
* </p> <ul> <li> <p> <b>Error Message:</b> The S3 bucket %s already contains an
* object with key %s.</p> <p> <b>Solution:</b> Give the <i>TargetSnapshotName</i>
* a new and unique value. If exporting a snapshot, you could alternatively create
* a new Amazon S3 bucket and use this same value for
* <i>TargetSnapshotName</i>.</p> </li> </ul>
*/
inline CopySnapshotRequest& WithTargetSnapshotName(const Aws::String& value) { SetTargetSnapshotName(value); return *this;}
/**
* <p>A name for the snapshot copy. ElastiCache does not permit overwriting a
* snapshot, therefore this name must be unique within its context - ElastiCache or
* an Amazon S3 bucket if exporting.</p> <p class="title"> <b>Error Message</b>
* </p> <ul> <li> <p> <b>Error Message:</b> The S3 bucket %s already contains an
* object with key %s.</p> <p> <b>Solution:</b> Give the <i>TargetSnapshotName</i>
* a new and unique value. If exporting a snapshot, you could alternatively create
* a new Amazon S3 bucket and use this same value for
* <i>TargetSnapshotName</i>.</p> </li> </ul>
*/
inline CopySnapshotRequest& WithTargetSnapshotName(Aws::String&& value) { SetTargetSnapshotName(value); return *this;}
/**
* <p>A name for the snapshot copy. ElastiCache does not permit overwriting a
* snapshot, therefore this name must be unique within its context - ElastiCache or
* an Amazon S3 bucket if exporting.</p> <p class="title"> <b>Error Message</b>
* </p> <ul> <li> <p> <b>Error Message:</b> The S3 bucket %s already contains an
* object with key %s.</p> <p> <b>Solution:</b> Give the <i>TargetSnapshotName</i>
* a new and unique value. If exporting a snapshot, you could alternatively create
* a new Amazon S3 bucket and use this same value for
* <i>TargetSnapshotName</i>.</p> </li> </ul>
*/
inline CopySnapshotRequest& WithTargetSnapshotName(const char* value) { SetTargetSnapshotName(value); return *this;}
/**
* <p>The Amazon S3 bucket to which the snapshot will be exported. This parameter
* is used only when exporting a snapshot for external access.</p> <p>When using
* this parameter to export a snapshot, be sure Amazon ElastiCache has the needed
* permissions to this S3 bucket. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.GrantAccess">Step
* 2: Grant ElastiCache Access to Your Amazon S3 Bucket</a> in the <i>Amazon
* ElastiCache User Guide</i>.</p> <p> <b>Error Messages:</b> </p> <p>You could
* receive one of the following error messages.</p> <p class="title"> <b>Erorr
* Messages</b> </p> <ul> <li> <p> <b>Error Message: </b> ElastiCache has not been
* granted READ permissions %s on the S3 Bucket.</p> <p> <b>Solution:</b> Add List
* and Read permissions on the bucket.</p> </li> <li> <p> <b>Error Message: </b>
* ElastiCache has not been granted WRITE permissions %s on the S3 Bucket.</p> <p>
* <b>Solution:</b> Add Upload/Delete permissions on the bucket.</p> </li> <li> <p>
* <b>Error Message: </b> ElastiCache has not been granted READ_ACP permissions %s
* on the S3 Bucket.</p> <p> <b>Solution:</b> Add View Permissions permissions on
* the bucket.</p> </li> <li> <p> <b>Error Message:</b> The S3 bucket %s is outside
* of the region.</p> <p> <b>Solution:</b> Before exporting your snapshot, create a
* new Amazon S3 bucket in the same region as your snapshot. For more information,
* see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s does not exist.</p> <p> <b>Solution:</b> Create an Amazon S3 bucket
* in the same region as your snapshot. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s is not owned by the authenticated user.</p> <p> <b>Solution:</b>
* Create an Amazon S3 bucket in the same region as your snapshot. For more
* information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* authenticated user does not have sufficient permissions to perform the desired
* activity.</p> <p> <b>Solution:</b> Contact your system administrator to get the
* needed permissions.</p> </li> </ul> <p>For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html">Exporting
* a Snapshot</a> in the <i>Amazon ElastiCache User Guide</i>.</p>
*/
inline const Aws::String& GetTargetBucket() const{ return m_targetBucket; }
/**
* <p>The Amazon S3 bucket to which the snapshot will be exported. This parameter
* is used only when exporting a snapshot for external access.</p> <p>When using
* this parameter to export a snapshot, be sure Amazon ElastiCache has the needed
* permissions to this S3 bucket. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.GrantAccess">Step
* 2: Grant ElastiCache Access to Your Amazon S3 Bucket</a> in the <i>Amazon
* ElastiCache User Guide</i>.</p> <p> <b>Error Messages:</b> </p> <p>You could
* receive one of the following error messages.</p> <p class="title"> <b>Erorr
* Messages</b> </p> <ul> <li> <p> <b>Error Message: </b> ElastiCache has not been
* granted READ permissions %s on the S3 Bucket.</p> <p> <b>Solution:</b> Add List
* and Read permissions on the bucket.</p> </li> <li> <p> <b>Error Message: </b>
* ElastiCache has not been granted WRITE permissions %s on the S3 Bucket.</p> <p>
* <b>Solution:</b> Add Upload/Delete permissions on the bucket.</p> </li> <li> <p>
* <b>Error Message: </b> ElastiCache has not been granted READ_ACP permissions %s
* on the S3 Bucket.</p> <p> <b>Solution:</b> Add View Permissions permissions on
* the bucket.</p> </li> <li> <p> <b>Error Message:</b> The S3 bucket %s is outside
* of the region.</p> <p> <b>Solution:</b> Before exporting your snapshot, create a
* new Amazon S3 bucket in the same region as your snapshot. For more information,
* see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s does not exist.</p> <p> <b>Solution:</b> Create an Amazon S3 bucket
* in the same region as your snapshot. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s is not owned by the authenticated user.</p> <p> <b>Solution:</b>
* Create an Amazon S3 bucket in the same region as your snapshot. For more
* information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* authenticated user does not have sufficient permissions to perform the desired
* activity.</p> <p> <b>Solution:</b> Contact your system administrator to get the
* needed permissions.</p> </li> </ul> <p>For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html">Exporting
* a Snapshot</a> in the <i>Amazon ElastiCache User Guide</i>.</p>
*/
inline void SetTargetBucket(const Aws::String& value) { m_targetBucketHasBeenSet = true; m_targetBucket = value; }
/**
* <p>The Amazon S3 bucket to which the snapshot will be exported. This parameter
* is used only when exporting a snapshot for external access.</p> <p>When using
* this parameter to export a snapshot, be sure Amazon ElastiCache has the needed
* permissions to this S3 bucket. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.GrantAccess">Step
* 2: Grant ElastiCache Access to Your Amazon S3 Bucket</a> in the <i>Amazon
* ElastiCache User Guide</i>.</p> <p> <b>Error Messages:</b> </p> <p>You could
* receive one of the following error messages.</p> <p class="title"> <b>Erorr
* Messages</b> </p> <ul> <li> <p> <b>Error Message: </b> ElastiCache has not been
* granted READ permissions %s on the S3 Bucket.</p> <p> <b>Solution:</b> Add List
* and Read permissions on the bucket.</p> </li> <li> <p> <b>Error Message: </b>
* ElastiCache has not been granted WRITE permissions %s on the S3 Bucket.</p> <p>
* <b>Solution:</b> Add Upload/Delete permissions on the bucket.</p> </li> <li> <p>
* <b>Error Message: </b> ElastiCache has not been granted READ_ACP permissions %s
* on the S3 Bucket.</p> <p> <b>Solution:</b> Add View Permissions permissions on
* the bucket.</p> </li> <li> <p> <b>Error Message:</b> The S3 bucket %s is outside
* of the region.</p> <p> <b>Solution:</b> Before exporting your snapshot, create a
* new Amazon S3 bucket in the same region as your snapshot. For more information,
* see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s does not exist.</p> <p> <b>Solution:</b> Create an Amazon S3 bucket
* in the same region as your snapshot. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s is not owned by the authenticated user.</p> <p> <b>Solution:</b>
* Create an Amazon S3 bucket in the same region as your snapshot. For more
* information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* authenticated user does not have sufficient permissions to perform the desired
* activity.</p> <p> <b>Solution:</b> Contact your system administrator to get the
* needed permissions.</p> </li> </ul> <p>For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html">Exporting
* a Snapshot</a> in the <i>Amazon ElastiCache User Guide</i>.</p>
*/
inline void SetTargetBucket(Aws::String&& value) { m_targetBucketHasBeenSet = true; m_targetBucket = value; }
/**
* <p>The Amazon S3 bucket to which the snapshot will be exported. This parameter
* is used only when exporting a snapshot for external access.</p> <p>When using
* this parameter to export a snapshot, be sure Amazon ElastiCache has the needed
* permissions to this S3 bucket. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.GrantAccess">Step
* 2: Grant ElastiCache Access to Your Amazon S3 Bucket</a> in the <i>Amazon
* ElastiCache User Guide</i>.</p> <p> <b>Error Messages:</b> </p> <p>You could
* receive one of the following error messages.</p> <p class="title"> <b>Erorr
* Messages</b> </p> <ul> <li> <p> <b>Error Message: </b> ElastiCache has not been
* granted READ permissions %s on the S3 Bucket.</p> <p> <b>Solution:</b> Add List
* and Read permissions on the bucket.</p> </li> <li> <p> <b>Error Message: </b>
* ElastiCache has not been granted WRITE permissions %s on the S3 Bucket.</p> <p>
* <b>Solution:</b> Add Upload/Delete permissions on the bucket.</p> </li> <li> <p>
* <b>Error Message: </b> ElastiCache has not been granted READ_ACP permissions %s
* on the S3 Bucket.</p> <p> <b>Solution:</b> Add View Permissions permissions on
* the bucket.</p> </li> <li> <p> <b>Error Message:</b> The S3 bucket %s is outside
* of the region.</p> <p> <b>Solution:</b> Before exporting your snapshot, create a
* new Amazon S3 bucket in the same region as your snapshot. For more information,
* see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s does not exist.</p> <p> <b>Solution:</b> Create an Amazon S3 bucket
* in the same region as your snapshot. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s is not owned by the authenticated user.</p> <p> <b>Solution:</b>
* Create an Amazon S3 bucket in the same region as your snapshot. For more
* information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* authenticated user does not have sufficient permissions to perform the desired
* activity.</p> <p> <b>Solution:</b> Contact your system administrator to get the
* needed permissions.</p> </li> </ul> <p>For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html">Exporting
* a Snapshot</a> in the <i>Amazon ElastiCache User Guide</i>.</p>
*/
inline void SetTargetBucket(const char* value) { m_targetBucketHasBeenSet = true; m_targetBucket.assign(value); }
/**
* <p>The Amazon S3 bucket to which the snapshot will be exported. This parameter
* is used only when exporting a snapshot for external access.</p> <p>When using
* this parameter to export a snapshot, be sure Amazon ElastiCache has the needed
* permissions to this S3 bucket. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.GrantAccess">Step
* 2: Grant ElastiCache Access to Your Amazon S3 Bucket</a> in the <i>Amazon
* ElastiCache User Guide</i>.</p> <p> <b>Error Messages:</b> </p> <p>You could
* receive one of the following error messages.</p> <p class="title"> <b>Erorr
* Messages</b> </p> <ul> <li> <p> <b>Error Message: </b> ElastiCache has not been
* granted READ permissions %s on the S3 Bucket.</p> <p> <b>Solution:</b> Add List
* and Read permissions on the bucket.</p> </li> <li> <p> <b>Error Message: </b>
* ElastiCache has not been granted WRITE permissions %s on the S3 Bucket.</p> <p>
* <b>Solution:</b> Add Upload/Delete permissions on the bucket.</p> </li> <li> <p>
* <b>Error Message: </b> ElastiCache has not been granted READ_ACP permissions %s
* on the S3 Bucket.</p> <p> <b>Solution:</b> Add View Permissions permissions on
* the bucket.</p> </li> <li> <p> <b>Error Message:</b> The S3 bucket %s is outside
* of the region.</p> <p> <b>Solution:</b> Before exporting your snapshot, create a
* new Amazon S3 bucket in the same region as your snapshot. For more information,
* see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s does not exist.</p> <p> <b>Solution:</b> Create an Amazon S3 bucket
* in the same region as your snapshot. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s is not owned by the authenticated user.</p> <p> <b>Solution:</b>
* Create an Amazon S3 bucket in the same region as your snapshot. For more
* information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* authenticated user does not have sufficient permissions to perform the desired
* activity.</p> <p> <b>Solution:</b> Contact your system administrator to get the
* needed permissions.</p> </li> </ul> <p>For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html">Exporting
* a Snapshot</a> in the <i>Amazon ElastiCache User Guide</i>.</p>
*/
inline CopySnapshotRequest& WithTargetBucket(const Aws::String& value) { SetTargetBucket(value); return *this;}
/**
* <p>The Amazon S3 bucket to which the snapshot will be exported. This parameter
* is used only when exporting a snapshot for external access.</p> <p>When using
* this parameter to export a snapshot, be sure Amazon ElastiCache has the needed
* permissions to this S3 bucket. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.GrantAccess">Step
* 2: Grant ElastiCache Access to Your Amazon S3 Bucket</a> in the <i>Amazon
* ElastiCache User Guide</i>.</p> <p> <b>Error Messages:</b> </p> <p>You could
* receive one of the following error messages.</p> <p class="title"> <b>Erorr
* Messages</b> </p> <ul> <li> <p> <b>Error Message: </b> ElastiCache has not been
* granted READ permissions %s on the S3 Bucket.</p> <p> <b>Solution:</b> Add List
* and Read permissions on the bucket.</p> </li> <li> <p> <b>Error Message: </b>
* ElastiCache has not been granted WRITE permissions %s on the S3 Bucket.</p> <p>
* <b>Solution:</b> Add Upload/Delete permissions on the bucket.</p> </li> <li> <p>
* <b>Error Message: </b> ElastiCache has not been granted READ_ACP permissions %s
* on the S3 Bucket.</p> <p> <b>Solution:</b> Add View Permissions permissions on
* the bucket.</p> </li> <li> <p> <b>Error Message:</b> The S3 bucket %s is outside
* of the region.</p> <p> <b>Solution:</b> Before exporting your snapshot, create a
* new Amazon S3 bucket in the same region as your snapshot. For more information,
* see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s does not exist.</p> <p> <b>Solution:</b> Create an Amazon S3 bucket
* in the same region as your snapshot. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s is not owned by the authenticated user.</p> <p> <b>Solution:</b>
* Create an Amazon S3 bucket in the same region as your snapshot. For more
* information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* authenticated user does not have sufficient permissions to perform the desired
* activity.</p> <p> <b>Solution:</b> Contact your system administrator to get the
* needed permissions.</p> </li> </ul> <p>For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html">Exporting
* a Snapshot</a> in the <i>Amazon ElastiCache User Guide</i>.</p>
*/
inline CopySnapshotRequest& WithTargetBucket(Aws::String&& value) { SetTargetBucket(value); return *this;}
/**
* <p>The Amazon S3 bucket to which the snapshot will be exported. This parameter
* is used only when exporting a snapshot for external access.</p> <p>When using
* this parameter to export a snapshot, be sure Amazon ElastiCache has the needed
* permissions to this S3 bucket. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.GrantAccess">Step
* 2: Grant ElastiCache Access to Your Amazon S3 Bucket</a> in the <i>Amazon
* ElastiCache User Guide</i>.</p> <p> <b>Error Messages:</b> </p> <p>You could
* receive one of the following error messages.</p> <p class="title"> <b>Erorr
* Messages</b> </p> <ul> <li> <p> <b>Error Message: </b> ElastiCache has not been
* granted READ permissions %s on the S3 Bucket.</p> <p> <b>Solution:</b> Add List
* and Read permissions on the bucket.</p> </li> <li> <p> <b>Error Message: </b>
* ElastiCache has not been granted WRITE permissions %s on the S3 Bucket.</p> <p>
* <b>Solution:</b> Add Upload/Delete permissions on the bucket.</p> </li> <li> <p>
* <b>Error Message: </b> ElastiCache has not been granted READ_ACP permissions %s
* on the S3 Bucket.</p> <p> <b>Solution:</b> Add View Permissions permissions on
* the bucket.</p> </li> <li> <p> <b>Error Message:</b> The S3 bucket %s is outside
* of the region.</p> <p> <b>Solution:</b> Before exporting your snapshot, create a
* new Amazon S3 bucket in the same region as your snapshot. For more information,
* see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s does not exist.</p> <p> <b>Solution:</b> Create an Amazon S3 bucket
* in the same region as your snapshot. For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* S3 bucket %s is not owned by the authenticated user.</p> <p> <b>Solution:</b>
* Create an Amazon S3 bucket in the same region as your snapshot. For more
* information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html#Snapshots.Exporting.CreateBucket">Step
* 1: Create an Amazon S3 Bucket</a>.</p> </li> <li> <p> <b>Error Message:</b> The
* authenticated user does not have sufficient permissions to perform the desired
* activity.</p> <p> <b>Solution:</b> Contact your system administrator to get the
* needed permissions.</p> </li> </ul> <p>For more information, see <a
* href="http://docs.aws.amazon.com/AmazonElastiCache/latest/UserGuide/Snapshots.Exporting.html">Exporting
* a Snapshot</a> in the <i>Amazon ElastiCache User Guide</i>.</p>
*/
inline CopySnapshotRequest& WithTargetBucket(const char* value) { SetTargetBucket(value); return *this;}
private:
Aws::String m_sourceSnapshotName;
bool m_sourceSnapshotNameHasBeenSet;
Aws::String m_targetSnapshotName;
bool m_targetSnapshotNameHasBeenSet;
Aws::String m_targetBucket;
bool m_targetBucketHasBeenSet;
};
} // namespace Model
} // namespace ElastiCache
} // namespace Aws
| 69.527273 | 155 | 0.681747 |
a1baef8ae5894d585e6fa5ef690710b66f025a24 | 1,940 | go | Go | lint/internal/astwalk/type_expr_walker.go | olshevskiy87/go-critic | a453eb79bfc744eb7abf2c41ebeca0c2ec2ed0f3 | [
"MIT"
] | null | null | null | lint/internal/astwalk/type_expr_walker.go | olshevskiy87/go-critic | a453eb79bfc744eb7abf2c41ebeca0c2ec2ed0f3 | [
"MIT"
] | null | null | null | lint/internal/astwalk/type_expr_walker.go | olshevskiy87/go-critic | a453eb79bfc744eb7abf2c41ebeca0c2ec2ed0f3 | [
"MIT"
] | 1 | 2018-07-01T22:51:23.000Z | 2018-07-01T22:51:23.000Z | package astwalk
import (
"go/ast"
"go/token"
"go/types"
)
type typeExprWalker struct {
visitor TypeExprVisitor
info *types.Info
}
func (w *typeExprWalker) WalkFile(f *ast.File) {
for _, decl := range f.Decls {
if decl, ok := decl.(*ast.FuncDecl); ok {
if !w.visitor.EnterFunc(decl) {
continue
}
}
switch decl := decl.(type) {
case *ast.FuncDecl:
if !w.visitor.EnterFunc(decl) {
continue
}
w.walkSignature(decl.Type)
ast.Inspect(decl.Body, w.walk)
case *ast.GenDecl:
if decl.Tok == token.IMPORT {
continue
}
ast.Inspect(decl, w.walk)
}
}
}
func (w *typeExprWalker) visit(x ast.Expr) bool {
w.visitor.VisitTypeExpr(x)
return w.visitor.EnterChilds(x)
}
func (w *typeExprWalker) walk(x ast.Node) bool {
switch x := x.(type) {
case *ast.ParenExpr:
if w.isTypeExpr(x.X) {
return w.visit(x)
}
return true
case *ast.MapType:
return w.visit(x)
case *ast.FuncType:
return w.visit(x)
case *ast.StructType:
return w.visit(x)
case *ast.InterfaceType:
if !w.visit(x) {
return false
}
for _, method := range x.Methods.List {
switch x := method.Type.(type) {
case *ast.FuncType:
w.walkSignature(x)
default:
// Embedded interface.
w.walk(x)
}
}
return false
case *ast.ArrayType:
return w.visit(x)
}
return true
}
func (w *typeExprWalker) isTypeExpr(x ast.Expr) bool {
switch x := x.(type) {
case *ast.Ident:
// Identifier may be a type expression if object
// it reffers to is a type name.
_, ok := w.info.ObjectOf(x).(*types.TypeName)
return ok
case *ast.FuncType, *ast.StructType, *ast.InterfaceType, *ast.ArrayType, *ast.MapType:
return true
default:
return false
}
}
func (w *typeExprWalker) walkSignature(typ *ast.FuncType) {
for _, p := range typ.Params.List {
ast.Inspect(p.Type, w.walk)
}
if typ.Results != nil {
for _, p := range typ.Results.List {
ast.Inspect(p.Type, w.walk)
}
}
}
| 19.207921 | 87 | 0.644845 |
dd168901399a8fd9c0d4fc790ee7a41d0662aafa | 1,876 | go | Go | exec/client.go | lyr885/chaosblade-exec-cri | 18677916e3fd152f35b96ad87edec79ba2ae04ee | [
"Apache-2.0"
] | null | null | null | exec/client.go | lyr885/chaosblade-exec-cri | 18677916e3fd152f35b96ad87edec79ba2ae04ee | [
"Apache-2.0"
] | null | null | null | exec/client.go | lyr885/chaosblade-exec-cri | 18677916e3fd152f35b96ad87edec79ba2ae04ee | [
"Apache-2.0"
] | null | null | null | /*
* Copyright 1999-2019 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package exec
import (
"context"
"io/ioutil"
"time"
"github.com/docker/docker/api/types"
"github.com/docker/docker/client"
"github.com/sirupsen/logrus"
)
var cli *Client
type Client struct {
client *client.Client
}
// waitAndGetOutput returns the result
func (c *Client) waitAndGetOutput(containerId string) (string, error) {
containerWait()
resp, err := c.client.ContainerLogs(context.Background(), containerId, types.ContainerLogsOptions{
ShowStderr: true,
ShowStdout: true,
})
if err != nil {
logrus.Warningf("Get container: %s log err: %s", containerId, err)
return "", err
}
defer resp.Close()
bytes, err := ioutil.ReadAll(resp)
return string(bytes), err
}
func containerWait() error {
timer := time.NewTimer(500 * time.Millisecond)
select {
case <-timer.C:
}
return nil
}
//GetImageInspectById
func (c *Client) getImageInspectById(imageId string) (types.ImageInspect, error) {
inspect, _, err := c.client.ImageInspectWithRaw(context.Background(), imageId)
return inspect, err
}
//DeleteImageByImageId
func (c *Client) deleteImageByImageId(imageId string) error {
_, err := c.client.ImageRemove(context.Background(), imageId, types.ImageRemoveOptions{
Force: false,
PruneChildren: true,
})
return err
}
| 25.69863 | 99 | 0.727612 |
f74656d53e59f485a4012a6ffd70e221e290ffd9 | 6,090 | h | C | src/hammer/ToolSelection.h | cstom4994/SourceEngineRebuild | edfd7f8ce8af13e9d23586318350319a2e193c08 | [
"MIT"
] | 6 | 2022-01-23T09:40:33.000Z | 2022-03-20T20:53:25.000Z | src/hammer/ToolSelection.h | cstom4994/SourceEngineRebuild | edfd7f8ce8af13e9d23586318350319a2e193c08 | [
"MIT"
] | null | null | null | src/hammer/ToolSelection.h | cstom4994/SourceEngineRebuild | edfd7f8ce8af13e9d23586318350319a2e193c08 | [
"MIT"
] | 1 | 2022-02-06T21:05:23.000Z | 2022-02-06T21:05:23.000Z | //===== Copyright � 1996-2005, Valve Corporation, All rights reserved. ======//
//
// Purpose:
//
// $NoKeywords: $
//===========================================================================//
#ifndef SELECTION3D_H
#define SELECTION3D_H
#ifdef _WIN32
#pragma once
#endif
#include "Box3D.h"
#include "MapClass.h" // For CMapObjectList
#include "ToolInterface.h"
#include "UtlVector.h"
class CMapWorld;
class CMapView;
class CMapView2D;
class CMapView3D;
class GDinputvariable;
class CRender2D;
class Selection3D : public Box3D {
public:
Selection3D();
~Selection3D();
void Init(CMapDoc *pDocument);
inline bool IsBoxSelecting();
inline bool IsLogicalBoxSelecting();
void EndBoxSelection();
// Start, end logical selection
void StartLogicalBoxSelection(CMapViewLogical *pView, const Vector &vStart);
void EndLogicalBoxSelection();
// Tool3D implementation.
virtual void SetEmpty();
virtual bool IsEmpty();
//
// CBaseTool implementation.
//
virtual void OnActivate();
virtual void OnDeactivate();
virtual ToolID_t GetToolID() { return TOOL_POINTER; }
virtual bool OnContextMenu2D(CMapView2D *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnKeyDown2D(CMapView2D *pView, UINT nChar, UINT nRepCnt, UINT nFlags);
virtual bool OnLMouseDown2D(CMapView2D *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnLMouseUp2D(CMapView2D *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnMouseMove2D(CMapView2D *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnKeyDown3D(CMapView3D *pView, UINT nChar, UINT nRepCnt, UINT nFlags);
virtual bool OnLMouseDblClk3D(CMapView3D *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnLMouseDown3D(CMapView3D *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnLMouseUp3D(CMapView3D *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnMouseMove3D(CMapView3D *pView, UINT nFlags, const Vector2D &vPoint);
virtual void RenderTool2D(CRender2D *pRender);
virtual void RenderToolLogical(CRender2D *pRender);
virtual void RenderTool3D(CRender3D *pRender);
virtual bool OnContextMenuLogical(CMapViewLogical *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnKeyDownLogical(CMapViewLogical *pView, UINT nChar, UINT nRepCnt, UINT nFlags);
virtual bool OnLMouseDownLogical(CMapViewLogical *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnLMouseUpLogical(CMapViewLogical *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnMouseMoveLogical(CMapViewLogical *pView, UINT nFlags, const Vector2D &vPoint);
virtual bool OnLMouseDblClkLogical(CMapViewLogical *pView, UINT nFlags, const Vector2D &vPoint);
void UpdateSelectionBounds();
bool m_bBoxSelection;
protected:
void TransformSelection();
void TransformLogicalSelection(const Vector2D &vecTranslation);
void FinishTranslation(bool bSave, bool bClone);
void StartTranslation(CMapView *pView, const Vector2D &vPoint, const Vector &vHandleOrigin);
bool StartBoxSelection(CMapView *pView, const Vector2D &vPoint, const Vector &vStart);
void UpdateHandleState();
virtual unsigned int GetConstraints(unsigned int nKeyFlags);
void NudgeObjects(CMapView *pView, int nChar, bool bSnap, bool bClone);
GDinputvariable *ChooseEyedropperVar(CMapView *pView, CUtlVector<GDinputvariable *> &VarList);
CMapEntity *FindEntityInTree(CMapClass *pObject);
void SelectInBox(CMapDoc *pDoc, bool bInsideOnly);
CBaseTool *GetToolObject(CMapView2D *pView, const Vector2D &ptScreen, bool bAttach);
CBaseTool *GetToolObjectLogical(CMapViewLogical *pView, const Vector2D &vPoint, bool bAttach);
void SetEyedropperCursor();
void EyedropperPick2D(CMapView2D *pView, const Vector2D &vPoint);
void EyedropperPick3D(CMapView3D *pView, const Vector2D &vPoint);
void EyedropperPick(CMapView *pView, CMapClass *pObject);
void OnEscape(CMapDoc *pDoc);
//
// Tool3D implementation.
//
virtual int HitTest(CMapView *pView, const Vector2D &pt, bool bTestHandles = false);
// Methods related to logical operations
void EyedropperPickLogical(CMapViewLogical *pView, const Vector2D &vPoint);
bool HitTestLogical(CMapView *pView, const Vector2D &ptClient);
void SelectInLogicalBox(CMapDoc *pDoc, bool bInsideOnly);
CSelection *m_pSelection; // the documents selection opject
bool m_bEyedropper; // True if we are holding down the eyedropper hotkey.
bool m_bSelected; // Did we select an object on left button down?
bool m_b3DEditMode; // editing mode in 3D on/off
bool m_bDrawAsSolidBox; // sometimes we want to render the tool bbox solid
// These are fields related to manipulation in logical views
Vector2D m_vLDownLogicalClient; // Logical client pos at which lbutton was pressed.
Vector2D m_vecLogicalSelBoxMins;
Vector2D m_vecLogicalSelBoxMaxs;
bool m_bInLogicalBoxSelection; // Are we doing box selection in the logical mode?
COLORREF m_clrLogicalBox; // The color of the logical box
Vector2D m_vLastLogicalDragPoint; // Last point at which we dragged (world coords)
Vector2D m_vLogicalTranslation;
bool m_bIsLogicalTranslating; // true while translation in logical view
bool m_bLButtonDown;
bool m_bLeftDragged;
};
//-----------------------------------------------------------------------------
// Are we in box selection?
//-----------------------------------------------------------------------------
inline bool Selection3D::IsBoxSelecting() {
return m_bBoxSelection;
}
inline bool Selection3D::IsLogicalBoxSelecting() {
return m_bInLogicalBoxSelection;
}
#endif // SELECTION3D_H
| 31.071429 | 101 | 0.682759 |
deef3dda9620658fb4a36b2d4c1f835e36b7c8d2 | 172 | rs | Rust | workout_plan/src/bin/bin2.rs | tshrpl/rust_learn | d50b27fcf9f04b791a053357b0bac56088820f9d | [
"MIT"
] | null | null | null | workout_plan/src/bin/bin2.rs | tshrpl/rust_learn | d50b27fcf9f04b791a053357b0bac56088820f9d | [
"MIT"
] | null | null | null | workout_plan/src/bin/bin2.rs | tshrpl/rust_learn | d50b27fcf9f04b791a053357b0bac56088820f9d | [
"MIT"
] | null | null | null |
fn main() {
let x = vec![1, 2, 3];
let equal_to_x = move |z| z == x;
// println!("Cant use x here: {:?}", x);
let y = vec![1, 2, 3];
assert!(equal_to_x(y));
}
| 10.75 | 41 | 0.482558 |
f06e2cbdbcd2679f16c4731459ef5b92bd76ef69 | 1,108 | js | JavaScript | docs/src/components/Footer/index.js | kumarharsh/storybook | 9d4438ba495d361acf635b836fe3e8dceaffc9c0 | [
"MIT"
] | 1 | 2019-04-16T01:01:50.000Z | 2019-04-16T01:01:50.000Z | docs/src/components/Footer/index.js | kumarharsh/storybook | 9d4438ba495d361acf635b836fe3e8dceaffc9c0 | [
"MIT"
] | 7 | 2020-07-21T11:41:38.000Z | 2022-01-15T02:35:21.000Z | docs/src/components/Footer/index.js | kumarharsh/storybook | 9d4438ba495d361acf635b836fe3e8dceaffc9c0 | [
"MIT"
] | 1 | 2021-05-20T18:00:46.000Z | 2021-05-20T18:00:46.000Z | import React from 'react';
import Link from 'gatsby-link';
import slackIcon from './images/slack-icon.png';
import githubIcon from './images/github-icon.png';
import './style.css';
const Footer = () => (
<div id="footer" className="row">
<div className="col-md-12">
<div className="row logos">
<div className="col-xs-12">
<center>
Maintained by the <Link to="/basics/community/">Storybook Community</Link>
.
</center>
<center>
<a
href="https://now-examples-slackin-nqnzoygycp.now.sh/"
target="_blank"
rel="noreferrer noopener"
>
<img src={slackIcon} alt="Storybook Slack" />
</a>
<a
href="https://github.com/storybooks/storybook"
target="_blank"
rel="noreferrer noopener"
>
<img src={githubIcon} alt="Storybook GitHub" style={{ padding: '7px' }} />
</a>
</center>
</div>
</div>
</div>
</div>
);
export default Footer;
| 28.410256 | 88 | 0.513538 |
0cf20d68ff93bb50029ab4621417fc5c929819f7 | 11,612 | py | Python | mpas_analysis/ocean/time_series_sst.py | alicebarthel/MPAS-Analysis | a8c568180abf96879e890a73e848db58642cfdb6 | [
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | mpas_analysis/ocean/time_series_sst.py | alicebarthel/MPAS-Analysis | a8c568180abf96879e890a73e848db58642cfdb6 | [
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | mpas_analysis/ocean/time_series_sst.py | alicebarthel/MPAS-Analysis | a8c568180abf96879e890a73e848db58642cfdb6 | [
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # This software is open source software available under the BSD-3 license.
#
# Copyright (c) 2020 Triad National Security, LLC. All rights reserved.
# Copyright (c) 2020 Lawrence Livermore National Security, LLC. All rights
# reserved.
# Copyright (c) 2020 UT-Battelle, LLC. All rights reserved.
#
# Additional copyright and license information can be found in the LICENSE file
# distributed with this code, or at
# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/master/LICENSE
from __future__ import absolute_import, division, print_function, \
unicode_literals
from mpas_analysis.shared import AnalysisTask
from mpas_analysis.shared.plot import timeseries_analysis_plot, savefig
from mpas_analysis.shared.time_series import combine_time_series_with_ncrcat
from mpas_analysis.shared.io import open_mpas_dataset
from mpas_analysis.shared.timekeeping.utility import date_to_days, \
days_to_datetime
from mpas_analysis.shared.io.utility import build_config_full_path, \
make_directories, check_path_exists
from mpas_analysis.shared.html import write_image_xml
class TimeSeriesSST(AnalysisTask):
"""
Performs analysis of the time-series output of sea-surface temperature
(SST).
Attributes
----------
mpasTimeSeriesTask : ``MpasTimeSeriesTask``
The task that extracts the time series from MPAS monthly output
controlConfig : ``MpasAnalysisConfigParser``
Configuration options for a control run (if any)
"""
# Authors
# -------
# Xylar Asay-Davis, Milena Veneziani
def __init__(self, config, mpasTimeSeriesTask, controlConfig=None):
# {{{
"""
Construct the analysis task.
Parameters
----------
config : ``MpasAnalysisConfigParser``
Configuration options
mpasTimeSeriesTask : ``MpasTimeSeriesTask``
The task that extracts the time series from MPAS monthly output
controlConfig : ``MpasAnalysisConfigParser``, optional
Configuration options for a control run (if any)
"""
# Authors
# -------
# Xylar Asay-Davis
# first, call the constructor from the base class (AnalysisTask)
super(TimeSeriesSST, self).__init__(
config=config,
taskName='timeSeriesSST',
componentName='ocean',
tags=['timeSeries', 'sst', 'publicObs'])
self.mpasTimeSeriesTask = mpasTimeSeriesTask
self.controlConfig = controlConfig
self.run_after(mpasTimeSeriesTask)
# }}}
def setup_and_check(self): # {{{
"""
Perform steps to set up the analysis and check for errors in the setup.
Raises
------
OSError
If files are not present
"""
# Authors
# -------
# Xylar Asay-Davis
# first, call setup_and_check from the base class (AnalysisTask),
# which will perform some common setup, including storing:
# self.inDirectory, self.plotsDirectory, self.namelist, self.streams
# self.calendar
super(TimeSeriesSST, self).setup_and_check()
config = self.config
self.startDate = self.config.get('timeSeries', 'startDate')
self.endDate = self.config.get('timeSeries', 'endDate')
self.variableList = \
['timeMonthly_avg_avgValueWithinOceanRegion_avgSurfaceTemperature']
self.mpasTimeSeriesTask.add_variables(variableList=self.variableList)
if config.get('runs', 'preprocessedReferenceRunName') != 'None':
check_path_exists(config.get('oceanPreprocessedReference',
'baseDirectory'))
self.inputFile = self.mpasTimeSeriesTask.outputFile
mainRunName = config.get('runs', 'mainRunName')
regions = config.getExpression('timeSeriesSST', 'regions')
self.xmlFileNames = []
self.filePrefixes = {}
for region in regions:
filePrefix = 'sst_{}_{}'.format(region, mainRunName)
self.xmlFileNames.append('{}/{}.xml'.format(self.plotsDirectory,
filePrefix))
self.filePrefixes[region] = filePrefix
return # }}}
def run_task(self): # {{{
"""
Performs analysis of the time-series output of sea-surface temperature
(SST).
"""
# Authors
# -------
# Xylar Asay-Davis, Milena Veneziani
self.logger.info("\nPlotting SST time series...")
self.logger.info(' Load SST data...')
config = self.config
calendar = self.calendar
mainRunName = config.get('runs', 'mainRunName')
preprocessedReferenceRunName = \
config.get('runs', 'preprocessedReferenceRunName')
preprocessedInputDirectory = config.get('oceanPreprocessedReference',
'baseDirectory')
movingAveragePoints = config.getint('timeSeriesSST',
'movingAveragePoints')
regions = config.getExpression('regions', 'regions')
plotTitles = config.getExpression('regions', 'plotTitles')
regionsToPlot = config.getExpression('timeSeriesSST', 'regions')
regionIndicesToPlot = [regions.index(region) for region in
regionsToPlot]
outputDirectory = build_config_full_path(config, 'output',
'timeseriesSubdirectory')
make_directories(outputDirectory)
dsSST = open_mpas_dataset(fileName=self.inputFile,
calendar=calendar,
variableList=self.variableList,
startDate=self.startDate,
endDate=self.endDate)
yearStart = days_to_datetime(dsSST.Time.min(), calendar=calendar).year
yearEnd = days_to_datetime(dsSST.Time.max(), calendar=calendar).year
timeStart = date_to_days(year=yearStart, month=1, day=1,
calendar=calendar)
timeEnd = date_to_days(year=yearEnd, month=12, day=31,
calendar=calendar)
if self.controlConfig is not None:
baseDirectory = build_config_full_path(
self.controlConfig, 'output', 'timeSeriesSubdirectory')
controlFileName = '{}/{}.nc'.format(
baseDirectory, self.mpasTimeSeriesTask.fullTaskName)
controlStartYear = self.controlConfig.getint(
'timeSeries', 'startYear')
controlEndYear = self.controlConfig.getint('timeSeries', 'endYear')
controlStartDate = '{:04d}-01-01_00:00:00'.format(controlStartYear)
controlEndDate = '{:04d}-12-31_23:59:59'.format(controlEndYear)
dsRefSST = open_mpas_dataset(
fileName=controlFileName,
calendar=calendar,
variableList=self.variableList,
startDate=controlStartDate,
endDate=controlEndDate)
else:
dsRefSST = None
if preprocessedReferenceRunName != 'None':
self.logger.info(' Load in SST for a preprocesses reference '
'run...')
inFilesPreprocessed = '{}/SST.{}.year*.nc'.format(
preprocessedInputDirectory, preprocessedReferenceRunName)
outFolder = '{}/preprocessed'.format(outputDirectory)
make_directories(outFolder)
outFileName = '{}/sst.nc'.format(outFolder)
combine_time_series_with_ncrcat(inFilesPreprocessed,
outFileName, logger=self.logger)
dsPreprocessed = open_mpas_dataset(fileName=outFileName,
calendar=calendar,
timeVariableNames='xtime')
yearEndPreprocessed = days_to_datetime(dsPreprocessed.Time.max(),
calendar=calendar).year
if yearStart <= yearEndPreprocessed:
dsPreprocessedTimeSlice = \
dsPreprocessed.sel(Time=slice(timeStart, timeEnd))
else:
self.logger.warning('Preprocessed time series ends before the '
'timeSeries startYear and will not be '
'plotted.')
preprocessedReferenceRunName = 'None'
self.logger.info(' Make plots...')
for regionIndex in regionIndicesToPlot:
region = regions[regionIndex]
title = '{} SST'.format(plotTitles[regionIndex])
xLabel = 'Time [years]'
yLabel = r'[$\degree$C]'
varName = self.variableList[0]
SST = dsSST[varName].isel(nOceanRegions=regionIndex)
filePrefix = self.filePrefixes[region]
outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix)
lineColors = ['k']
lineWidths = [3]
fields = [SST]
legendText = [mainRunName]
if dsRefSST is not None:
refSST = dsRefSST[varName].isel(nOceanRegions=regionIndex)
fields.append(refSST)
lineColors.append('r')
lineWidths.append(1.5)
controlRunName = self.controlConfig.get('runs', 'mainRunName')
legendText.append(controlRunName)
if preprocessedReferenceRunName != 'None':
SST_v0 = dsPreprocessedTimeSlice.SST
fields.append(SST_v0)
lineColors.append('purple')
lineWidths.append(1.5)
legendText.append(preprocessedReferenceRunName)
if config.has_option(self.taskName, 'firstYearXTicks'):
firstYearXTicks = config.getint(self.taskName,
'firstYearXTicks')
else:
firstYearXTicks = None
if config.has_option(self.taskName, 'yearStrideXTicks'):
yearStrideXTicks = config.getint(self.taskName,
'yearStrideXTicks')
else:
yearStrideXTicks = None
timeseries_analysis_plot(config, fields, calendar=calendar,
title=title, xlabel=xLabel, ylabel=yLabel,
movingAveragePoints=movingAveragePoints,
lineColors=lineColors,
lineWidths=lineWidths,
legendText=legendText,
firstYearXTicks=firstYearXTicks,
yearStrideXTicks=yearStrideXTicks)
savefig(outFileName)
caption = 'Running Mean of {} Sea Surface Temperature'.format(
region)
write_image_xml(
config=config,
filePrefix=filePrefix,
componentName='Ocean',
componentSubdirectory='ocean',
galleryGroup='Time Series',
groupLink='timeseries',
thumbnailDescription='{} SST'.format(region),
imageDescription=caption,
imageCaption=caption)
# }}}
# }}}
# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python
| 38.072131 | 79 | 0.577678 |
16b799ac2f6f0695cf6c952dac5bc9594dc7924b | 11,203 | c | C | gpio/gpio_json_api.c | daanpape/stumon-breakout-server | d202c722266e41c24410a2a209ac0db050d0b4d8 | [
"BSD-2-Clause"
] | 2 | 2015-06-04T00:35:05.000Z | 2016-07-20T06:35:22.000Z | gpio/gpio_json_api.c | dptechnics/DPT-Board-SERV | 3323ab8418bf3977efd54516578d49955d5ed271 | [
"BSD-2-Clause"
] | 1 | 2016-08-17T13:01:01.000Z | 2016-09-08T08:11:05.000Z | gpio/gpio_json_api.c | daanpape/stumon-breakout-server | d202c722266e41c24410a2a209ac0db050d0b4d8 | [
"BSD-2-Clause"
] | null | null | null | /*
* Copyright (c) 2014, Daan Pape
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* File: gpio_json_api.c
* Created on April 6, 2015, 5:02 PM
*/
#include <stdio.h>
#include <stdio.h>
#include <json-c/json.h>
#include <string.h>
#include <stdlib.h>
#include "../logger.h"
#include "../helper.h"
#include "../uhttpd.h"
#include "gpio_json_api.h"
#include "gpio.h"
/**
* Route all get requests concerning the gpio module.
* @param cl the client who made the request.
* @param request the request part of the url.
* @return the result of the called function.
*/
json_object* gpio_get_router(struct client *cl, char *request) {
if (helper_str_startswith(request, "layout", 0))
{
return gpio_get_layout(cl, request + 7);
}
else if (helper_str_startswith(request, "state", 0))
{
return gpio_get_status(cl, request + 6);
}
else if (helper_str_startswith(request, "overview", 0))
{
return gpio_get_overview(cl, request + 9);
}
else if (helper_str_startswith(request, "states", 0))
{
return gpio_get_all_states(cl, request + 10);
}
else
{
log_message(LOG_WARNING, "GPIO API got unknown GET request '%s'\r\n", request);
return NULL;
}
}
/**
* Route all post requests concerning the gpio module.
* @param cl the client who made the request.
* @param request the request part of the url.
* @return the result of the called function.
*/
json_object* gpio_post_router(struct client *cl, char *request) {
return NULL;
}
/**
* Route all put requests concerning the gpio module.
* @param cl the client who made the request.
* @param request the request part of the url.
* @return the result of the called function.
*/
json_object* gpio_put_router(struct client *cl, char *request) {
if (helper_str_startswith(request, "state", 0))
{
return gpio_put_status(cl, request + 6);
}
else if (helper_str_startswith(request, "dir", 0))
{
return gpio_put_direction(cl, request + 4);
}
else if (helper_str_startswith(request, "pulse", 0))
{
return gpio_put_pulse_output(cl, request + 6);
}
else
{
log_message(LOG_WARNING, "GPIO API got unknown PUT request '%s'\r\n", request);
return NULL;
}
}
/**
* Get the layout of the GPIO ports of the board.
* @cl the client who made the request
* @request the request part of the url
*/
json_object* gpio_get_layout(struct client *cl, char *request) {
int i;
/* Create the json object */
json_object *jobj = json_object_new_object();
json_object *jarray = json_object_new_array();
/* Check all the IO ports for existance */
for(i = 0; i < (sizeof(gpio_config) / sizeof(bool)); ++i) {
if(gpio_config[i]){
/* This IO is available, put it in the array */
json_object *portnr = json_object_new_int(i);
json_object_array_add(jarray, portnr);
}
}
/* Add the array to the json object */
json_object_object_add(jobj, "ioports", jarray);
/* Return status ok */
cl->http_status = r_ok;
return jobj;
}
/**
* Get the layout of the GPIO ports and also the current GPIO port
* state.
* @param cl the client who made the request.
* @param request the request part of the url.
*/
json_object* gpio_get_overview(struct client *cl, char *request) {
int i;
/* Create the json object */
json_object *jobj = json_object_new_object();
json_object *jarray = json_object_new_array();
/* Check the state for every IO port */
for(i = 0; i < (sizeof(gpio_config) / sizeof(bool)); ++i) {
if(gpio_config[i]){
json_object *j_gpio_port = json_object_new_object();
/* Add port number */
json_object_object_add(j_gpio_port, "number", json_object_new_int(i));
/* Add port direction and state */
int state = 2;
int dir = 2;
if(gpio_reserve(i)) {
state = gpio_get_state(i);
dir = gpio_get_direction(i);
gpio_release(i);
}
json_object_object_add(j_gpio_port, "state", json_object_new_int(state));
json_object_object_add(j_gpio_port, "direction", json_object_new_int(dir));
/* Add the port info to the array */
json_object_array_add(jarray, j_gpio_port);
}
}
/* Add the array to the json object */
json_object_object_add(jobj, "ports", jarray);
/* Return status ok */
cl->http_status = r_ok;
return jobj;
}
/**
* Get the states of all GPIO ports.
* @param cl the client who made the request.
* @param request the request part of the url.
*/
json_object* gpio_get_all_states(struct client *cl, char *request){
int i;
/* Create the json object */
json_object *jobj = json_object_new_object();
json_object *jarray = json_object_new_array();
/* Check the state for every IO port */
for(i = 0; i < (sizeof(gpio_config) / sizeof(bool)); ++i) {
if(gpio_reserve(i)){
json_object *j_gpio_port = json_object_new_object();
/* Add data */
json_object_object_add(j_gpio_port, "port-number", json_object_new_int(i));
json_object_object_add(j_gpio_port, "port-state", json_object_new_int(gpio_get_state(i)));
json_object_array_add(jarray, j_gpio_port);
/* Release the port */
gpio_release(i);
}
}
/* Add the array to the json object */
json_object_object_add(jobj, "ports", jarray);
/* Return status ok */
cl->http_status = r_ok;
return jobj;
}
/**
* Get the state of a given GPIO port.
* @cl the client who made the request.
* @request the request part of the url.
*/
json_object* gpio_get_status(struct client *cl, char *request)
{
int gpio_pin;
int gpio_state;
/* If sscanf fails the request is malformed */
if(sscanf(request, "%d", &gpio_pin) != 1) {
log_message(LOG_WARNING, "GPIO GET status request failed, bad request");
cl->http_status = r_bad_req;
return NULL;
}
/* Read the GPIO pin state */
gpio_state = gpio_read_and_close(gpio_pin);
/* Check if there was no error reading the pin */
if(gpio_state == -1){
cl->http_status = r_error;
return NULL;
}
/* Put data in JSON object */
json_object *jobj = json_object_new_object();
json_object *j_pin = json_object_new_int(gpio_pin);
json_object *j_state = json_object_new_int(gpio_state);
json_object_object_add(jobj, "pin", j_pin);
json_object_object_add(jobj, "state", j_state);
/* Return status ok */
cl->http_status = r_ok;
return jobj;
}
/**
* Turn on or of a GPIO port.
* @cl the client who made the request
* @request the request part of the url
*/
json_object* gpio_put_status(struct client *cl, char *request)
{
/* This functions expects the following request /<gpiopin>/<state> */
int gpio_pin;
int gpio_state;
/* If sscanf fails the request is malformed */
if (sscanf(request, "%d/%d", &gpio_pin, &gpio_state) != 2) {
cl->http_status = r_bad_req;
return NULL;
}
if (!gpio_write_and_close(gpio_pin, gpio_state == 1 ? GPIO_HIGH : GPIO_LOW)) {
cl->http_status = r_error;
return NULL;
}
/* Put data in JSON object */
json_object *jobj = json_object_new_object();
json_object *j_pin = json_object_new_int(gpio_pin);
json_object *j_state = json_object_new_int(gpio_state);
json_object_object_add(jobj, "pin", j_pin);
json_object_object_add(jobj, "state", j_state);
/* Return status ok */
cl->http_status = r_ok;
return jobj;
}
/**
* Set-up the direction of a GPIO port.
* @param cl the client who made the request.
* @param request the request part of the url.
*/
json_object* gpio_put_direction(struct client *cl, char *request)
{
/* This functions expects the following request /<gpiopin>/<direction> */
int gpio_pin;
int gpio_direction;
/* If sscanf fails the request is malformed */
if(sscanf(request, "%d/%d", &gpio_pin, &gpio_direction) != 2) {
cl->http_status = r_bad_req;
return NULL;
}
if(!gpio_set_direction(gpio_pin, gpio_direction)) {
cl->http_status = r_error;
return NULL;
}
/* Put data in JSON object */
json_object *jobj = json_object_new_object();
json_object *j_pin = json_object_new_int(gpio_pin);
json_object *j_dir = json_object_new_int(gpio_direction);
json_object_object_add(jobj, "pin", j_pin);
json_object_object_add(jobj, "direction", j_dir);
/* Return status ok */
cl->http_status = r_ok;
return jobj;
}
/**
* Pulse an output for a number of milliseconds.
* @param cl the client who made the request.
* @param request the request part of the url.
*/
json_object* gpio_put_pulse_output(struct client *cl, char *request)
{
/* This functions expects the following request /<gpiopin>/<mode>/<nr_of_ms> */
int gpio_pin;
int gpio_mode;
int ms;
/* If sscanf fails the request is malformed */
if(sscanf(request, "%d/%d/%d", &gpio_pin, &gpio_mode, &ms) != 3) {
cl->http_status = r_bad_req;
return NULL;
}
if(!gpio_pulse(gpio_pin, ms*1000, gpio_mode)) {
cl->http_status = r_error;
return NULL;
}
/* Put data in JSON object */
json_object *jobj = json_object_new_object();
json_object *j_pin = json_object_new_int(gpio_pin);
json_object *j_ms = json_object_new_int(ms);
json_object_object_add(jobj, "pin", j_pin);
json_object_object_add(jobj, "pulse_time", j_ms);
/* Return status ok */
cl->http_status = r_ok;
return jobj;
}
| 30.777473 | 102 | 0.647505 |
3321f706d02f1fdbc011b2d0a28c18fa45d4fe4b | 810 | py | Python | Line_chart.py | sanabasangare/data-visualization | 09a03d0414941d28e312037ccaa0b283dbb2ec06 | [
"MIT"
] | null | null | null | Line_chart.py | sanabasangare/data-visualization | 09a03d0414941d28e312037ccaa0b283dbb2ec06 | [
"MIT"
] | null | null | null | Line_chart.py | sanabasangare/data-visualization | 09a03d0414941d28e312037ccaa0b283dbb2ec06 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
from collections import Counter
def line_graph(plt):
# years observed since 2000
years = [2000, 2002, 2005, 2007, 2010, 2012, 2014, 2015]
# total number of websites on the world wide web
# (source: Internet Live Stats)
websites = [17, 38, 64, 121, 206, 697, 968, 863]
# create a line chart with years on x-axis and number of websites on y-axis
plt.plot(years, websites, color='blue', marker='o', linestyle='solid',
linewidth=2)
# adjust the x and y axis markers
plt.xlim(2000, 2015)
plt.ylim(10, 1000)
# add a title to the chart
plt.title("Total number of websites online")
# add a label to the y-axis
plt.ylabel("Websites (millions)")
plt.show()
if __name__ == "__main__":
line_graph(plt)
| 24.545455 | 79 | 0.650617 |
9d84bf32c2b80a5f28d98f3e880852db8d2e82e4 | 1,611 | swift | Swift | SwiftBooster/Classes/Extension/Helpers/JSON.swift | 3pehrbehroozi/SwiftBooster | 33651a5718fb2d8334ba6551facd1d02c3cf2001 | [
"Apache-2.0"
] | null | null | null | SwiftBooster/Classes/Extension/Helpers/JSON.swift | 3pehrbehroozi/SwiftBooster | 33651a5718fb2d8334ba6551facd1d02c3cf2001 | [
"Apache-2.0"
] | null | null | null | SwiftBooster/Classes/Extension/Helpers/JSON.swift | 3pehrbehroozi/SwiftBooster | 33651a5718fb2d8334ba6551facd1d02c3cf2001 | [
"Apache-2.0"
] | null | null | null | //
// JSON.swift
// SwiftBooster
//
// Created by Sepehr Behroozi on 4/17/19.
// Copyright © 2019 ayantech.ir. All rights reserved.
//
import Foundation
/// Typealias for [String: Any]
public typealias JSONObject = [String: Any]
/// Typealias for [Any]
public typealias JSONArray = [Any]
public func getValue<T>(input: Any?, subscripts: Any...) -> T? {
var extractingValue = input
subscripts.forEach { (key) in
if let intKey = key as? Int {
extractingValue = (extractingValue as? [Any])?[intKey]
}
if let stringKey = key as? String {
extractingValue = (extractingValue as? [String: Any])?[stringKey]
}
}
switch T.self {
case is Int.Type:
let result = extractingValue as? Int ?? (extractingValue as? String)?.toInt()
return result as? T
case is Double.Type:
let result = extractingValue as? Double ?? (extractingValue as? String)?.toDouble()
return result as? T
case is String.Type:
let result = extractingValue as? String ?? (extractingValue as? Int)?.toString() ?? (extractingValue as? Double)?.toString()
if result == "null" {
return nil
} else {
return result as? T
}
case is Bool.Type:
var result = extractingValue as? Bool ?? (extractingValue as? String)?.toBool()
if result == nil {
if let resultInt = extractingValue as? Int {
result = resultInt == 1
}
}
return result as? T
default:
return extractingValue as? T
}
}
| 28.767857 | 132 | 0.58473 |
a1bdbef83a6f87d5571c1f8db58aaf15b7fe7210 | 1,599 | h | C | include/history.h | tyounkin/gitr-1-intermediate | 4a57364594223fdc3831c07a3c36ec3a8268f7cf | [
"MIT"
] | null | null | null | include/history.h | tyounkin/gitr-1-intermediate | 4a57364594223fdc3831c07a3c36ec3a8268f7cf | [
"MIT"
] | null | null | null | include/history.h | tyounkin/gitr-1-intermediate | 4a57364594223fdc3831c07a3c36ec3a8268f7cf | [
"MIT"
] | null | null | null | #ifndef _HISTORY_
#define _HISTORY_
#ifdef __CUDACC__
#define CUDA_CALLABLE_MEMBER_DEVICE __device__
#else
#define CUDA_CALLABLE_MEMBER_DEVICE
#endif
#include "Particles.h"
#include "Boundary.h"
#include <math.h>
#include <vector>
struct history {
Particles *particlesPointer;
int tt;
int subSampleFac;
int nP;
float *histX;
float *histY;
float *histZ;
float *histvx;
float *histvy;
float *histvz;
float *histcharge;
history(Particles *_particlesPointer, int _tt,int _subSampleFac, int _nP, float *_histX,float *_histY,float *_histZ,
float *_histvx,float *_histvy,float *_histvz, float * _histcharge) :
particlesPointer(_particlesPointer), tt(_tt),subSampleFac(_subSampleFac), nP(_nP),
histX(_histX),histY(_histY),histZ(_histZ),histvx(_histvx),histvy(_histvy),histvz(_histvz), histcharge(_histcharge) {}
CUDA_CALLABLE_MEMBER_DEVICE
void operator()(std::size_t indx) const
{
if (tt % subSampleFac == 0)
{
histX[(tt/subSampleFac)*nP + indx] = particlesPointer->xprevious[indx];
histY[(tt/subSampleFac)*nP + indx] = particlesPointer->yprevious[indx];
histZ[(tt/subSampleFac)*nP + indx] = particlesPointer->zprevious[indx];
histvx[(tt/subSampleFac)*nP + indx] = particlesPointer->vx[indx];
histvy[(tt/subSampleFac)*nP + indx] = particlesPointer->vy[indx];
histvz[(tt/subSampleFac)*nP + indx] = particlesPointer->vz[indx];
histcharge[(tt/subSampleFac)*nP + indx] = particlesPointer->charge[indx];
}
}
};
#endif
| 31.352941 | 125 | 0.673546 |
d0a82c20d83e32466167e271c75c3ea5316636b5 | 2,088 | css | CSS | data/usercss/120494.user.css | 33kk/uso-archive | 2c4962d1d507ff0eaec6dcca555efc531b37a9b4 | [
"MIT"
] | 118 | 2020-08-28T19:59:28.000Z | 2022-03-26T16:28:40.000Z | data/usercss/120494.user.css | 33kk/uso-archive | 2c4962d1d507ff0eaec6dcca555efc531b37a9b4 | [
"MIT"
] | 38 | 2020-09-02T01:08:45.000Z | 2022-01-23T02:47:24.000Z | data/usercss/120494.user.css | 33kk/uso-archive | 2c4962d1d507ff0eaec6dcca555efc531b37a9b4 | [
"MIT"
] | 21 | 2020-08-19T01:12:43.000Z | 2022-03-15T21:55:17.000Z | /* ==UserStyle==
@name Twitter Heart 2 Star
@namespace USO Archive
@author fricolicious
@description `Replaces Twitter's new red hearts with orange stars.`
@version 20151106.16.2
@license CC0-1.0
@preprocessor uso
==/UserStyle== */
@-moz-document domain("twitter.com") {
.Icon--heartBadge:before {
content: "\f001";
}
.HeartAnimationContainer {
visibility: hidden;
}
.HeartAnimationContainer:after {
content: "\f001";
visibility: visible;
display: block;
position: absolute;
font-family: "rosettaicons";
font-weight: normal;
font-style: normal;
text-align: center;
-webkit-font-smoothing: antialiased;
}
.Icon--colorHeart,
.Icon--circleActiveHeart,
.Icon--heartBadge,
.in .HeartAnimationContainer:after,
.Icon--colorHeart,
.Icon--circleActiveHeart,
.Icon--heartBadge,
.ProfileTweet-action--favorite.withHeartIcon .ProfileTweet-actionButton:hover,
.ProfileTweet-action--favorite.withHeartIcon .ProfileTweet-actionButton:focus,
.ProfileTweet-action--favorite.withHeartIcon .ProfileTweet-actionCount:hover,
.ProfileTweet-action--favorite.withHeartIcon .ProfileTweet-actionCount:focus,
.favorited .ProfileTweet-action--favorite .Icon--heart,
.favorited .ProfileTweet-action--favorite.withHeartIcon .ProfileTweet-actionButtonUndo,
.stream-container .grid-tweet .grid-tweet-actions .favorite .Icon--heart:hover,
.stream-container .grid-tweet .grid-tweet-actions .unfavorite .Icon--heart,
.stream-container .grid-tweet .grid-tweet-actions .favorite .Icon--heart:hover,
.stream-container .grid-tweet .grid-tweet-actions .unfavorite .Icon--heart,
.ProfileTweet-action--favorite .ProfileTweet-actionButton:hover,
.ProfileTweet-action--favorite .ProfileTweet-actionButton:focus,
.ProfileTweet-action--favorite .ProfileTweet-actionCount:hover,
.ProfileTweet-action--favorite .ProfileTweet-actionCount:focus,
.favorited .ProfileTweet-action--favorite .Icon--heart,
.favorited .ProfileTweet-action--favorite .ProfileTweet-actionButtonUndo {
color: #FF9900;
}
} | 37.285714 | 88 | 0.742337 |
816a3c4ccc004e1ff1e9e713ed4c5443e0728a13 | 134 | rs | Rust | src/test/ui/macros/macro-path-prelude-fail-2.rs | Eric-Arellano/rust | 0f6f2d681b39c5f95459cd09cb936b6ceb27cd82 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 66,762 | 2015-01-01T08:32:03.000Z | 2022-03-31T23:26:40.000Z | src/test/ui/macros/macro-path-prelude-fail-2.rs | Eric-Arellano/rust | 0f6f2d681b39c5f95459cd09cb936b6ceb27cd82 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 76,993 | 2015-01-01T00:06:33.000Z | 2022-03-31T23:59:15.000Z | src/test/ui/macros/macro-path-prelude-fail-2.rs | Eric-Arellano/rust | 0f6f2d681b39c5f95459cd09cb936b6ceb27cd82 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11,787 | 2015-01-01T00:01:19.000Z | 2022-03-31T19:03:42.000Z | mod m {
fn check() {
Result::Ok!(); //~ ERROR failed to resolve: partially resolved path in a macro
}
}
fn main() {}
| 16.75 | 86 | 0.552239 |
f087b664ea69f7444bd6a9b4b78ad51a9b5acf37 | 494 | js | JavaScript | src/1979.find-greatest-common-divisor-of-array.2106/1979.find-greatest-common-divisor-of-array.2106.js | jiangshanmeta/meta | 8f9d084cda91988d42208ac7a029612e9edc693b | [
"MIT"
] | 221 | 2018-10-26T07:05:12.000Z | 2022-03-30T03:23:10.000Z | src/1979.find-greatest-common-divisor-of-array.2106/1979.find-greatest-common-divisor-of-array.2106.js | ralap18/meta | 82d660a6eabb15e398a7dcc2a0fa99342143bb12 | [
"MIT"
] | 23 | 2018-09-24T14:50:58.000Z | 2020-09-17T14:23:45.000Z | src/1979.find-greatest-common-divisor-of-array.2106/1979.find-greatest-common-divisor-of-array.2106.js | ralap18/meta | 82d660a6eabb15e398a7dcc2a0fa99342143bb12 | [
"MIT"
] | 45 | 2019-03-29T03:36:19.000Z | 2022-03-25T20:57:13.000Z | /**
* @param {number[]} nums
* @return {number}
*/
var findGCD = function (nums) {
let max = -Infinity;
let min = Infinity;
for (let i = 0; i < nums.length; i++) {
max = Math.max(nums[i], max);
min = Math.min(nums[i], min);
}
return gcd(max, min);
};
function gcd (a, b) {
if (a < b) {
const tmp = a;
a = b;
b = tmp;
}
while (b !== 0) {
const tmp = a % b;
a = b;
b = tmp;
}
return a;
}
| 17.642857 | 43 | 0.425101 |
4762c7b7fd994117749ebface27ab878c54587e2 | 1,379 | html | HTML | openstack_dashboard/dashboards/admin/projects/templates/projects/_update_members.html | dreamhost/horizon | 55569d540e6c1a6957d5127f9bae6a699ed60823 | [
"Apache-2.0"
] | 3 | 2017-02-13T15:11:01.000Z | 2021-07-28T08:28:09.000Z | openstack_dashboard/dashboards/admin/projects/templates/projects/_update_members.html | dreamhost/horizon | 55569d540e6c1a6957d5127f9bae6a699ed60823 | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/admin/projects/templates/projects/_update_members.html | dreamhost/horizon | 55569d540e6c1a6957d5127f9bae6a699ed60823 | [
"Apache-2.0"
] | 2 | 2018-08-29T10:56:04.000Z | 2019-11-11T11:45:20.000Z | {% load i18n %}
<noscript><h3>{{ step }}</h3></noscript>
<div class="project_membership">
<div class="header">
<div class="help_text">{% trans "From here you can add and remove members to this project from the list of all available users." %}</div>
<div class="left">
<div class="fake_table fake_table_header">
<span class="users_title">{% trans "All Users" %}</span>
<input type="text" name="available_users_filter" id="available_users" class="filter" value="Filter">
</div>
</div>
<div class="right">
<div class="fake_table fake_table_header">
<span class="users_title">{% trans "Project Members" %}</span>
<input type="text" name="project_members_filter" id="project_members" class="filter" value="Filter">
</div>
</div>
</div>
<div class="left filterable">
<div class="fake_table" id="available_users">
<ul class="available_users"></ul>
<ul class="no_results" id="no_available_users"><li>{% trans "No users found." %}</li></ul>
</div>
</div>
<div class="right filterable">
<div class="fake_table" id="project_members">
<ul class="project_members"></ul>
<ul class="no_results" id="no_project_members"><li>{% trans "No users found." %}</li></ul>
</div>
</div>
</div>
<div class="hide">
{% include "horizon/common/_form_fields.html" %}
</div>
| 34.475 | 141 | 0.629442 |
f413999a599202ad40175b7bccd66b01bd537e18 | 1,259 | kt | Kotlin | android/src/main/java/au/com/crazybean/mobilex/kurir/modules/auth/verify/VerifyActivity.kt | crazybeanstudio/kurir-x | 7b4e96434246eb1ddae81a6e8688ffc182cf17ce | [
"Apache-2.0"
] | null | null | null | android/src/main/java/au/com/crazybean/mobilex/kurir/modules/auth/verify/VerifyActivity.kt | crazybeanstudio/kurir-x | 7b4e96434246eb1ddae81a6e8688ffc182cf17ce | [
"Apache-2.0"
] | null | null | null | android/src/main/java/au/com/crazybean/mobilex/kurir/modules/auth/verify/VerifyActivity.kt | crazybeanstudio/kurir-x | 7b4e96434246eb1ddae81a6e8688ffc182cf17ce | [
"Apache-2.0"
] | null | null | null | package au.com.crazybean.mobilex.kurir.modules.auth.verify
import android.view.View
import android.widget.EditText
import au.com.crazybean.mobilex.kurir.R
import au.com.crazybean.mobilex.kurir.data.model.User
import au.com.crazybean.mobilex.kurir.modules.base.BaseActivity
import org.koin.android.ext.android.inject
import org.koin.core.parameter.parametersOf
class VerifyActivity : BaseActivity<VerifyActor>(), VerifyScene {
override val actor: VerifyActor? by inject {
parametersOf(this)
}
override val layoutRes: Int
get() = R.layout.sketch_verify
private val editText by lazy {
findViewById<EditText>(R.id.passcode_edit)
}
override fun onViewLoad() {
super.onViewLoad()
findViewById<View>(R.id.verify_button)?.setOnClickListener {
actor?.onVerifyClick(editText?.text.toString().trim())
}
}
override fun showCodeError() {
showError(R.string.error_wrong_code)
}
override fun showProfile(user: User?) {
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
}
override fun showSpinner() {
showLoading()
}
override fun hideSpinner() {
hideLoading()
}
} | 27.369565 | 107 | 0.691819 |
6534588d4ecf52898349353cb160282e5abd6d54 | 3,360 | py | Python | graalpython/benchmarks/src/meso/euler31.py | muellren/graalpython | 9104425805f1d38ad7a521c75e53798a3b79b4f0 | [
"UPL-1.0",
"Apache-2.0",
"OpenSSL"
] | 1 | 2019-05-28T13:04:32.000Z | 2019-05-28T13:04:32.000Z | graalpython/benchmarks/src/meso/euler31.py | muellren/graalpython | 9104425805f1d38ad7a521c75e53798a3b79b4f0 | [
"UPL-1.0",
"Apache-2.0",
"OpenSSL"
] | null | null | null | graalpython/benchmarks/src/meso/euler31.py | muellren/graalpython | 9104425805f1d38ad7a521c75e53798a3b79b4f0 | [
"UPL-1.0",
"Apache-2.0",
"OpenSSL"
] | null | null | null | #!/usr/bin/env python
# Copyright 2008-2010 Isaac Gouy
# Copyright (c) 2013, 2014, Regents of the University of California
# Copyright (c) 2017, 2018, Oracle and/or its affiliates.
# All rights reserved.
#
# Revised BSD license
#
# This is a specific instance of the Open Source Initiative (OSI) BSD license
# template http://www.opensource.org/licenses/bsd-license.php
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of "The Computer Language Benchmarks Game" nor the name of
# "The Computer Language Shootout Benchmarks" nor the name "nanobench" nor the
# name "bencher" nor the names of its contributors may be used to endorse or
# promote products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#runas solve()
#unittest.skip recursive generator
#pythran export solve()
# 01/08/14 modified for benchmarking by Wei Zhang
COINS = [1, 2, 5, 10, 20, 50, 100, 200]
# test
def _sum(iterable):
sum = None
for i in iterable:
if sum is None:
sum = i
else:
sum += i
return sum
def balance(pattern):
return _sum(COINS[x]*pattern[x] for x in range(0, len(pattern)))
def gen(pattern, coinnum, num):
coin = COINS[coinnum]
for p in range(0, num//coin + 1):
newpat = pattern[:coinnum] + (p,)
bal = balance(newpat)
if bal > num:
return
elif bal == num:
yield newpat
elif coinnum < len(COINS)-1:
for pat in gen(newpat, coinnum+1, num):
yield pat
def solve(total):
'''
In England the currency is made up of pound, P, and pence, p, and there are eight coins in general circulation:
1p, 2p, 5p, 10p, 20p, 50p, P1 (100p) and P2 (200p).
It is possible to make P2 in the following way:
1 P1 + 1 50p + 2 20p + 1 5p + 1 2p + 3 1p
How many different ways can P2 be made using any number of coins?
'''
return _sum(1 for pat in gen((), 0, total))
def measure(num):
result = solve(num)
print('total number of different ways: ', result)
def __benchmark__(num=200):
measure(num)
| 34.639175 | 115 | 0.698214 |
8140d895c5819dfb77454f3bfe78bc8e30085b14 | 5,479 | kt | Kotlin | lbmlib/src/main/kotlin/com/alekseyzhelo/lbm/boundary/SlidingBoundary.kt | AlekseyZhelo/LBM | 09f583fd6bdf9ee196c3b2d0e569fe1cfc9ba8bb | [
"Apache-2.0"
] | null | null | null | lbmlib/src/main/kotlin/com/alekseyzhelo/lbm/boundary/SlidingBoundary.kt | AlekseyZhelo/LBM | 09f583fd6bdf9ee196c3b2d0e569fe1cfc9ba8bb | [
"Apache-2.0"
] | null | null | null | lbmlib/src/main/kotlin/com/alekseyzhelo/lbm/boundary/SlidingBoundary.kt | AlekseyZhelo/LBM | 09f583fd6bdf9ee196c3b2d0e569fe1cfc9ba8bb | [
"Apache-2.0"
] | null | null | null | package com.alekseyzhelo.lbm.boundary
import com.alekseyzhelo.lbm.core.lattice.LatticeD2
class SlidingBoundary(
position: BoundaryPosition, lattice: LatticeD2<*>,
x0: Int, x1: Int, y0: Int, y1: Int,
val slideVelocity: DoubleArray
) : BoundaryCondition(position, lattice, x0, x1, y0, y1) {
override fun getType(): BoundaryType {
return BoundaryType.SLIDING
}
override fun streamOutgoing(i: Int, j: Int) {
val coordVel = when (position) {
BoundaryPosition.LEFT -> slideVelocity[1]
BoundaryPosition.TOP -> slideVelocity[1]
BoundaryPosition.RIGHT -> slideVelocity[1]
BoundaryPosition.BOTTOM -> slideVelocity[1]
}
val q = coordVel / (2.0 * lattice.cells[i][j].computeRhoU()[0])
val p = 1.0 - q
when (position) {
BoundaryPosition.TOP -> {
lattice.cells[i][j].fBuf[4] = lattice.cells[i][j].f[2]
lattice.cells[i][j].fBuf[7] = p * lattice.cells[i][j].f[5] + q * lattice.cells[i][j].f[6]
lattice.cells[i][j].fBuf[8] = q * lattice.cells[i][j].f[5] + p * lattice.cells[i][j].f[6]
}
BoundaryPosition.BOTTOM -> {
lattice.cells[i][j].fBuf[2] = lattice.cells[i][j].f[4]
lattice.cells[i][j].fBuf[5] = p * lattice.cells[i][j].f[7] + q * lattice.cells[i][j].f[8]
lattice.cells[i][j].fBuf[6] = q * lattice.cells[i][j].f[7] + p * lattice.cells[i][j].f[8]
}
else -> {
throw UnsupportedOperationException("not implemented yet")
}
}
}
override fun boundaryStream() {
when (position) {
BoundaryPosition.TOP -> {
for (i in x0..x1) {
val iPlus = i + 1
val iSub = i - 1
for (j in y0..y1) {
val jSub = j - 1
// TODO: try: only 4, 7, 8 are inside and should be considered for rho and U
val q = slideVelocity[0] / (2.0 * lattice.cells[i][j].computeRhoU()[0])
val p = 1.0 - q
lattice.cells[i][j].fBuf[0] = lattice.cells[i][j].f[0]
lattice.cells[iPlus][j].fBuf[1] = lattice.cells[i][j].f[1]
lattice.cells[iSub][j].fBuf[3] = lattice.cells[i][j].f[3]
lattice.cells[i][jSub].fBuf[4] = lattice.cells[i][j].f[4]
lattice.cells[iSub][jSub].fBuf[7] = lattice.cells[i][j].f[7]
lattice.cells[iPlus][jSub].fBuf[8] = lattice.cells[i][j].f[8]
lattice.cells[i][j].fBuf[4] = lattice.cells[i][j].f[2]
lattice.cells[i][j].fBuf[7] = p * lattice.cells[i][j].f[5] + q * lattice.cells[i][j].f[6]
lattice.cells[i][j].fBuf[8] = q * lattice.cells[i][j].f[5] + p * lattice.cells[i][j].f[6]
}
}
}
BoundaryPosition.BOTTOM -> {
for (i in x0..x1) {
val iPlus = i + 1
val iSub = i - 1
for (j in y0..y1) {
val jPlus = j + 1
val q =
slideVelocity[0] / (2.0 * lattice.cells[i][j].computeRhoU()[0]) // TODO: correct as approximation to rho_w * u_w / (2.0 * f_8 - f_7)?
val p = 1.0 - q
lattice.cells[i][j].fBuf[0] = lattice.cells[i][j].f[0]
lattice.cells[iPlus][j].fBuf[1] = lattice.cells[i][j].f[1]
lattice.cells[i][jPlus].fBuf[2] = lattice.cells[i][j].f[2]
lattice.cells[iSub][j].fBuf[3] = lattice.cells[i][j].f[3]
lattice.cells[iPlus][jPlus].fBuf[5] = lattice.cells[i][j].f[5]
lattice.cells[iSub][jPlus].fBuf[6] = lattice.cells[i][j].f[6]
lattice.cells[i][j].fBuf[2] = lattice.cells[i][j].f[4]
lattice.cells[i][j].fBuf[5] = p * lattice.cells[i][j].f[7] + q * lattice.cells[i][j].f[8]
lattice.cells[i][j].fBuf[6] = q * lattice.cells[i][j].f[7] + p * lattice.cells[i][j].f[8]
}
}
}
else -> {
throw UnsupportedOperationException("not implemented yet")
}
}
}
override fun defineBoundaryRhoU(rho: Double, U: DoubleArray) {
for (i in x0..x1) {
for (j in y0..y1) {
lattice.cells[i][j].defineRhoU(rho, slideVelocity)
}
}
}
override fun defineBoundaryRhoU(rho: Double, U: (i: Int, j: Int) -> DoubleArray) {
for (i in x0..x1) {
for (j in y0..y1) {
lattice.cells[i][j].defineRhoU(rho, slideVelocity)
}
}
}
override fun defineBoundaryRhoU(rho: (i: Int, j: Int) -> Double, U: DoubleArray) {
for (i in x0..x1) {
for (j in y0..y1) {
lattice.cells[i][j].defineRhoU(rho(i, j), slideVelocity)
}
}
}
override fun defineBoundaryRhoU(rho: (i: Int, j: Int) -> Double, U: (i: Int, j: Int) -> DoubleArray) {
for (i in x0..x1) {
for (j in y0..y1) {
lattice.cells[i][j].defineRhoU(rho(i, j), slideVelocity)
}
}
}
} | 41.195489 | 161 | 0.469246 |
53ac6825d1f293f3458feb35487365dacd9c68f4 | 234 | java | Java | location-tracker/src/test/java/com/rsakin/getaxi/locationtracker/LocationTrackerApplicationTests.java | ramazansakin/GeTaxiApp2 | 5ded3802597c8f1eb211a03a809dc159aef5982d | [
"MIT"
] | 10 | 2021-05-01T01:02:35.000Z | 2021-12-21T06:41:40.000Z | location-tracker/src/test/java/com/rsakin/getaxi/locationtracker/LocationTrackerApplicationTests.java | My-Organization-Sample-Test-Usage/GeTaxiApp2 | 5ded3802597c8f1eb211a03a809dc159aef5982d | [
"MIT"
] | 77 | 2020-12-28T17:27:01.000Z | 2021-09-20T06:48:43.000Z | location-tracker/src/test/java/com/rsakin/getaxi/locationtracker/LocationTrackerApplicationTests.java | My-Organization-Sample-Test-Usage/GeTaxiApp2 | 5ded3802597c8f1eb211a03a809dc159aef5982d | [
"MIT"
] | 6 | 2021-02-04T15:26:51.000Z | 2021-09-21T06:39:41.000Z | package com.rsakin.getaxi.locationtracker;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class LocationTrackerApplicationTests {
@Test
void contextLoads() {
}
}
| 16.714286 | 60 | 0.803419 |
cb3588a2e72ccb867d78d6cc9cec6b4fa7e3ab33 | 398 | go | Go | pkg/csv/csv.go | mchirico/zcovid | 0561f03a907c27089366053a24edb215bb8f10fb | [
"Apache-2.0"
] | null | null | null | pkg/csv/csv.go | mchirico/zcovid | 0561f03a907c27089366053a24edb215bb8f10fb | [
"Apache-2.0"
] | 3 | 2020-11-11T18:52:36.000Z | 2020-11-12T21:53:31.000Z | pkg/csv/csv.go | mchirico/zcovid | 0561f03a907c27089366053a24edb215bb8f10fb | [
"Apache-2.0"
] | null | null | null | package csv
import (
"encoding/csv"
"github.com/mchirico/zcovid/pkg/httputils"
"log"
"strings"
)
func GetCSVRecords(url string) (records [][]string, err error) {
h := httputils.NewHTTP()
r, err := h.Get(url)
if err != nil {
return nil, nil
}
rr := csv.NewReader(strings.NewReader(string(r)))
records, err = rr.ReadAll()
if err != nil {
log.Fatal(err)
}
return records, err
}
| 14.740741 | 64 | 0.648241 |
61365f4d60179aec725208d1c8603ff8875acce3 | 85 | kt | Kotlin | j2k/testData/fileOrElement/classExpression/complexExample.kt | errandir/kotlin | 09435ad0e305b6630146dae426c88952ec5e532d | [
"Apache-2.0"
] | 1 | 2017-09-10T07:15:28.000Z | 2017-09-10T07:15:28.000Z | j2k/tests/testData/ast/classExpression/complexExample.kt | hhariri/kotlin | d150bfbce0e2e47d687f39e2fd233ea55b2ccd26 | [
"Apache-2.0"
] | null | null | null | j2k/tests/testData/ast/classExpression/complexExample.kt | hhariri/kotlin | d150bfbce0e2e47d687f39e2fd233ea55b2ccd26 | [
"Apache-2.0"
] | null | null | null | val constrArgTypes = array<Class<*>>(javaClass<Array<String>>(), javaClass<String>()) | 85 | 85 | 0.729412 |
bcb9c62a24b30a8ab2cab649824b3f79ac4400a8 | 819 | js | JavaScript | src/control/selection/RadioButton.js | skylark-integration/skylark-zebra | 8455099bf05c2255328738f38c97445c59a945ae | [
"MIT"
] | null | null | null | src/control/selection/RadioButton.js | skylark-integration/skylark-zebra | 8455099bf05c2255328738f38c97445c59a945ae | [
"MIT"
] | null | null | null | src/control/selection/RadioButton.js | skylark-integration/skylark-zebra | 8455099bf05c2255328738f38c97445c59a945ae | [
"MIT"
] | null | null | null | define([
"qscript/lang/Class",
"qfacex/windows/control/primitives/selection/RadioButtonBoxView"
], function(Class, DjtRadioButton,){
pkg.RadioView = Class(View, [
function() {
this.$this("rgb(15, 81, 205)", "rgb(65, 131, 255)");
},
function(col1, col2) {
this.color1 = col1;
this.color2 = col2;
},
function $prototype() {
this.paint = function(g,x,y,w,h,d){
g.beginPath();
g.fillStyle = this.color1;
g.arc(x + w/2, y + h/2 , w/3 , 0, 2* Math.PI, 1, false);
g.fill();
g.beginPath();
g.fillStyle = this.color2;
g.arc(x + w/2, y + h/2 , w/4 , 0, 2* Math.PI, 1, false);
g.fill();
};
}
]);
return Class.declare([DjtRadioButton], {
});
});
| 24.088235 | 68 | 0.490842 |
446d194104b3d762a597d152208b5266b6ef5e93 | 2,188 | swift | Swift | ofo_2/ofo_2/RightButtonsView.swift | yanmingLiu/swift-ofo | 9d2fee82d43a84600649b1ec0e8b206502696421 | [
"MIT"
] | null | null | null | ofo_2/ofo_2/RightButtonsView.swift | yanmingLiu/swift-ofo | 9d2fee82d43a84600649b1ec0e8b206502696421 | [
"MIT"
] | null | null | null | ofo_2/ofo_2/RightButtonsView.swift | yanmingLiu/swift-ofo | 9d2fee82d43a84600649b1ec0e8b206502696421 | [
"MIT"
] | null | null | null | //
// RightButtonsView.swift
// ofo_2
//
// Created by lym on 2017/12/27.
// Copyright © 2017年 liuyanming. All rights reserved.
//
import UIKit
class RightButtonsView: UIView {
var loctionBtn = UIButton()
override init(frame: CGRect) {
super.init(frame: frame)
setupSubviews()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
func setupSubviews() -> Void {
let btnWH = 44
let margin: CGFloat = 15
let serviceBtnFrame = CGRect(x: 0, y: 0, width: btnWH, height: btnWH)
let serviceBtn = creatButton(frame: serviceBtnFrame, image: #imageLiteral(resourceName: "rightBottomImage"))
addSubview(serviceBtn)
let loctionBtnFrame = CGRect(x: 0, y: Int(serviceBtn.frame.maxY + margin), width: btnWH, height: btnWH)
loctionBtn = creatButton(frame: loctionBtnFrame, image: #imageLiteral(resourceName: "leftBottomImage"))
addSubview(loctionBtn)
NotificationCenter.default.addObserver(self, selector: .panelViewMove, name: NSNotification.Name(rawValue: "panelViewOpenOrClose"), object: nil)
}
private func creatButton(frame: CGRect, image: UIImage) -> UIButton {
let serviceBtn = UIButton(frame: frame)
serviceBtn.setImage(image, for: .normal)
serviceBtn.layer.shadowOpacity = 0.4
serviceBtn.layer.shadowRadius = 3
serviceBtn.layer.shadowColor = UIColor.lightGray.cgColor
serviceBtn.layer.shadowOffset = CGSize(width: 0.5, height: 0.5)
return serviceBtn
}
@objc func panelViewMove (notofication: Notification) {
let minY = notofication.userInfo!["minY"] as! CGFloat
print(minY)
let margin: CGFloat = 80
UIView.animate(withDuration: 0.3, animations: {
self.frame.origin.y = minY - margin
})
}
/// 移除通知
deinit {
NotificationCenter.default.removeObserver(self)
}
}
private extension Selector {
static let panelViewMove = #selector(RightButtonsView.panelViewMove(notofication:))
}
| 28.415584 | 152 | 0.628428 |
135e2fd945367f7cf6610fb8f44ff25c4239b6c3 | 914 | h | C | Modules/zcb_sign.h | bodhix/Python | cb2d7e29d34eefa99206ea2b7f762a39453821b1 | [
"PSF-2.0"
] | null | null | null | Modules/zcb_sign.h | bodhix/Python | cb2d7e29d34eefa99206ea2b7f762a39453821b1 | [
"PSF-2.0"
] | null | null | null | Modules/zcb_sign.h | bodhix/Python | cb2d7e29d34eefa99206ea2b7f762a39453821b1 | [
"PSF-2.0"
] | null | null | null | #define SIGN_CBZHANG_L1 " _____ _____ ______ _ _ ___ __ _ _____ "
#define SIGN_CBZHANG_L2 " / ___| | _ \\ |___ / | | | | / | | \\ | | / ___| "
#define SIGN_CBZHANG_L3 " | | | |_| | / / | |_| | / /| | | \\| | | | "
#define SIGN_CBZHANG_L4 " | | | _ { / / | _ | / / | | | |\\ | | | _ "
#define SIGN_CBZHANG_L5 " | |___ | |_| | / /__ | | | | / / | | | | \\ | | |_| | "
#define SIGN_CBZHANG_L6 " \\_____| |_____/ /_____| |_| |_| /_/ |_| |_| \\_| \\_____/ "
#define PRINT_SIGN_CBZHANG(io) \
{ \
fprintf(io, "%s\n", SIGN_CBZHANG_L1); \
fprintf(io, "%s\n", SIGN_CBZHANG_L2); \
fprintf(io, "%s\n", SIGN_CBZHANG_L3); \
fprintf(io, "%s\n", SIGN_CBZHANG_L4); \
fprintf(io, "%s\n", SIGN_CBZHANG_L5); \
fprintf(io, "%s\n", SIGN_CBZHANG_L6); \
fprintf(io, "\n"); \
}
| 48.105263 | 92 | 0.432166 |
1fb3fc18865894e0091022e6f1f1dc00e66f25b6 | 108 | css | CSS | projects/profile.print.css | towbi/towbi.github.io | 1d19c69999b45b11d19f62e79bde25537036974d | [
"MIT"
] | null | null | null | projects/profile.print.css | towbi/towbi.github.io | 1d19c69999b45b11d19f62e79bde25537036974d | [
"MIT"
] | null | null | null | projects/profile.print.css | towbi/towbi.github.io | 1d19c69999b45b11d19f62e79bde25537036974d | [
"MIT"
] | null | null | null | .cell {
page-break-inside: avoid; }
.cell.head .inner a, .cell.head .inner input {
display: none; }
| 21.6 | 48 | 0.62037 |
f721480ebd35278f7cb6182c74b8c25c849a355c | 928 | h | C | peerd/constants.h | doitmovin/chromiumos-platform2 | 6462aaf43072307b5a40eb045a89e473381b5fda | [
"BSD-3-Clause"
] | 5 | 2019-01-19T15:38:48.000Z | 2021-10-06T03:59:46.000Z | peerd/constants.h | doitmovin/chromiumos-platform2 | 6462aaf43072307b5a40eb045a89e473381b5fda | [
"BSD-3-Clause"
] | null | null | null | peerd/constants.h | doitmovin/chromiumos-platform2 | 6462aaf43072307b5a40eb045a89e473381b5fda | [
"BSD-3-Clause"
] | 2 | 2021-01-26T12:37:19.000Z | 2021-05-18T13:37:57.000Z | // Copyright 2014 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef PEERD_CONSTANTS_H_
#define PEERD_CONSTANTS_H_
namespace peerd {
namespace constants {
extern const char kSerbusServiceId[];
namespace mdns {
// The record type of a serbus record.
extern const char kSerbusServiceType[];
// Keys inside the Serbus TXT record.
extern const char kSerbusVersion[];
extern const char kSerbusPeerId[];
extern const char kSerbusName[];
extern const char kSerbusNote[];
extern const char kSerbusServiceList[];
extern const char kSerbusServiceDelimiter[];
} // namespace mdns
namespace options {
namespace service {
extern const char kMDNSSectionName[];
extern const char kMDNSPort[];
} // namespace service
} // namespace options
} // namespace constants
} // namespace peerd
#endif // PEERD_CONSTANTS_H_
| 20.622222 | 73 | 0.761853 |
5cf0c98dfe684391a1a3174cd33ef373ab627e2a | 631 | css | CSS | src/components/Search/Search.module.css | kaoru-nk/nextjs-microcms-blog | c729966953c6e80d72efd8d0314aa6e9d024ee1d | [
"Apache-2.0"
] | 42 | 2021-05-10T04:11:31.000Z | 2022-03-29T16:09:28.000Z | src/components/Search/Search.module.css | kaoru-nk/nextjs-microcms-blog | c729966953c6e80d72efd8d0314aa6e9d024ee1d | [
"Apache-2.0"
] | 8 | 2021-08-30T08:46:24.000Z | 2022-02-17T21:50:23.000Z | src/components/Search/Search.module.css | kaoru-nk/nextjs-microcms-blog | c729966953c6e80d72efd8d0314aa6e9d024ee1d | [
"Apache-2.0"
] | 5 | 2021-09-14T13:52:34.000Z | 2022-03-19T00:23:33.000Z | .label {
display: block;
font-size: 14px;
font-weight: bold;
color: var(--color-text-sub);
}
.input {
border: 1px solid var(--color-border);
width: 100%;
box-sizing: border-box;
margin-top: 5px;
border-radius: 5px;
height: 40px;
font-size: 16px;
background: url('/images/icon_search.svg') no-repeat 10px center, var(--color-bg-purple-light);
padding-left: 40px;
box-shadow: none;
-webkit-appearance: none;
transition: box-shadow 0.2s ease;
&:hover {
box-shadow: 0 1px 4px rgba(0, 0, 0, 0.1) inset;
}
&:focus {
outline: none;
box-shadow: 0 1px 4px rgba(0, 0, 0, 0.1) inset;
}
}
| 21.033333 | 97 | 0.630745 |
74385d4bc8e8b060f952b9e64ee8c117c8d63b33 | 220 | h | C | src/OGFrp.Linux.Common/libs.h | OldGodShen/OGFrp | 58458f622c9a6b3e93b1ac6c62e8118fc6b787ac | [
"Apache-2.0"
] | 5 | 2021-08-09T06:49:24.000Z | 2022-01-17T08:24:40.000Z | src/OGFrp.Linux.Common/libs.h | OldGodShen/OGFrp | 58458f622c9a6b3e93b1ac6c62e8118fc6b787ac | [
"Apache-2.0"
] | null | null | null | src/OGFrp.Linux.Common/libs.h | OldGodShen/OGFrp | 58458f622c9a6b3e93b1ac6c62e8118fc6b787ac | [
"Apache-2.0"
] | null | null | null | #pragma once
#include <stdio.h>
#include <string.h>
#include <sys/statfs.h>
#include <limits.h>
#include <unistd.h>
#include <iostream>
#include <string>
/// get executable path
std::string get_cur_executable_path_(); | 16.923077 | 39 | 0.727273 |
945f4d8e6ab03fb74b43b38ebba90b9ccfb0885b | 196 | rs | Rust | pipelined/bevy_render2/src/renderer/mod.rs | bch29/bevy | c53efeed6fb8b1e41737ea4502051fa4cf535784 | [
"MIT"
] | null | null | null | pipelined/bevy_render2/src/renderer/mod.rs | bch29/bevy | c53efeed6fb8b1e41737ea4502051fa4cf535784 | [
"MIT"
] | null | null | null | pipelined/bevy_render2/src/renderer/mod.rs | bch29/bevy | c53efeed6fb8b1e41737ea4502051fa4cf535784 | [
"MIT"
] | null | null | null | mod headless_render_resource_context;
mod render_context;
mod render_resource_context;
pub use headless_render_resource_context::*;
pub use render_context::*;
pub use render_resource_context::*;
| 24.5 | 44 | 0.841837 |
f2233743688c41c69e95f729bf297587f9267a58 | 7,209 | lua | Lua | libs/ffi.lua | LoveBabyForeve/cgmsv-lua | f7a1756237f4180acf5ee03be03e32263f7e381c | [
"Apache-2.0"
] | null | null | null | libs/ffi.lua | LoveBabyForeve/cgmsv-lua | f7a1756237f4180acf5ee03be03e32263f7e381c | [
"Apache-2.0"
] | null | null | null | libs/ffi.lua | LoveBabyForeve/cgmsv-lua | f7a1756237f4180acf5ee03be03e32263f7e381c | [
"Apache-2.0"
] | null | null | null | local ffi = require "ffi";
ffi.cdef [[
void Sleep(int ms);
char *strstr(const char *str1, const char *str2);
]];
function ffi.readMemoryDWORD(addr)
if addr == 0 then
return nil;
end
return ffi.cast("uint32_t*", addr)[0]
end
function ffi.readMemoryInt32(addr)
if addr == 0 then
return nil;
end
return ffi.cast("int32_t*", addr)[0]
end
function ffi.setMemoryInt32(addr, value)
if addr == 0 then
return false;
end
if type(value) ~= 'number' then
return false;
end
ffi.cast("int32_t*", addr)[0] = value;
return true;
end
function ffi.setMemoryDWORD(addr, value)
if addr == 0 then
return false;
end
if type(value) ~= 'number' then
return false;
end
ffi.cast("uint32_t*", addr)[0] = value;
return true;
end
function ffi.setMemoryByte(addr, value)
if addr == 0 then
return false;
end
if type(value) ~= 'number' then
return false;
end
ffi.cast("uint8_t*", addr)[0] = value;
return true;
end
function ffi.readMemoryWORD(addr)
if addr == 0 then
return nil;
end
return ffi.cast("uint16_t*", addr)[0]
end
function ffi.readMemoryBYTE(addr)
if addr == 0 then
return nil;
end
return ffi.cast("uint8_t*", addr)[0]
end
function ffi.readMemoryString(addr)
if addr <= 0 or addr >= 0xffffffff then
return nil;
end
local d = ffi.cast("char*", addr);
if d == nil then
return nil;
end
return ffi.string(ffi.cast("char*", addr))
end
--HOOKS
local hook = { hooks = {} }
ffi.cdef [[
int VirtualProtect(void* lpAddress, unsigned long dwSize, unsigned long flNewProtect, unsigned long* lpflOldProtect);
]]
function hook.new(cast, callback, hook_addr, size)
local _size = size or 5
local new_hook = {}
local detour_addr = tonumber(ffi.cast('intptr_t', ffi.cast('void*', ffi.cast(cast, callback))))
local hookFnPtr = ffi.cast('void*', hook_addr)
local old_prot = ffi.new('unsigned long[1]')
local old_prot2 = ffi.new('unsigned long[1]')
local org_bytes = ffi.new('uint8_t[?]', _size + 10)
ffi.C.VirtualProtect(org_bytes, _size + 10, 0x40, old_prot)
ffi.copy(org_bytes, hookFnPtr, _size)
org_bytes[_size] = 0xE9;
ffi.cast('uint32_t*', org_bytes + _size + 1)[0] = hook_addr + size - (ffi.cast('uint32_t', org_bytes) + _size + 5);
local hook_bytes = ffi.new('uint8_t[?]', _size, 0x90)
hook_bytes[0] = 0xE9
ffi.cast('uint32_t*', hook_bytes + 1)[0] = detour_addr - hook_addr - 5
ffi.C.VirtualProtect(hookFnPtr, _size, 0x40, old_prot)
ffi.copy(hookFnPtr, hook_bytes, _size)
ffi.C.VirtualProtect(hookFnPtr, _size, old_prot[0], old_prot2)
--local orgHookedPtr = ffi.cast(cast, ffi.cast('void*', ffi.cast('uint32_t', org_bytes)));
--ffi.C.VirtualProtect(org_bytes, _size, old_prot[0], old_prot2)
new_hook.uninstall = function()
ffi.C.VirtualProtect(hookFnPtr, _size, 0x40, old_prot)
ffi.copy(hookFnPtr, org_bytes, _size)
ffi.C.VirtualProtect(hookFnPtr, _size, old_prot[0], old_prot2)
hook.hooks[tostring(hook_addr)] = nil;
end
new_hook.call = ffi.cast(cast, org_bytes)
new_hook.org_bytes = org_bytes;
new_hook.callback = callback;
hook.hooks[tostring(hook_addr)] = new_hook;
return setmetatable(new_hook, {
__call = function(self, ...)
local res = self.call(...)
return res
end
})
end
function hook.inlineHook(cast, callback, hookAddr, size, prefixCode, postCode, config)
if config == nil then
config = {
ignoreOriginCode = false,
}
end
local callbackAddr = type(callback) == 'function' and tonumber(ffi.cast('intptr_t', ffi.cast('void*', ffi.cast(cast, callback)))) or 0;
if type(callback) ~= 'function' and callback then
callbackAddr = callback;
end
local hookFnPtr = ffi.cast('void*', hookAddr)
local oldProtectFlag = ffi.new('unsigned long[1]')
local tmpProtectFlag = ffi.new('unsigned long[1]')
local detourBytes = ffi.new('uint8_t[?]', 2048)
local backup = ffi.new('uint8_t[?]', size)
ffi.C.VirtualProtect(backup, size, 0x40, oldProtectFlag)
-- make backup
ffi.copy(backup, hookFnPtr, size);
-- prefixCode
for i, v in ipairs(prefixCode) do
detourBytes[i - 1] = v;
end
--call callback
if callback then
detourBytes[#prefixCode] = 0xE8;
ffi.cast('uint32_t*', detourBytes + #prefixCode + 1)[0] = callbackAddr - (ffi.cast('uint32_t', detourBytes) + #prefixCode + 5);
else
detourBytes[#prefixCode] = 0x90;
detourBytes[#prefixCode + 1] = 0x90;
detourBytes[#prefixCode + 2] = 0x90;
detourBytes[#prefixCode + 3] = 0x90;
detourBytes[#prefixCode + 4] = 0x90;
end
-- postCode
for i, v in ipairs(postCode) do
detourBytes[i - 1 + 5 + #prefixCode] = v;
end
--origin code
if config.ignoreOriginCode then
for i = 1, size do
detourBytes[#prefixCode + 5 + #postCode + i - 1] = 0x90;
end
else
ffi.copy(detourBytes + #prefixCode + 5 + #postCode, hookFnPtr, size);
end
--jmp to origin code
detourBytes[#prefixCode + 5 + size + #postCode] = 0xE9;
ffi.cast('int32_t*', detourBytes + #prefixCode + 5 + size + #postCode + 1)[0] = ffi.cast('int32_t', (hookAddr + size) - (ffi.cast('int32_t', detourBytes) + size + #postCode + #prefixCode + 10));
--mark memory executable
ffi.C.VirtualProtect(detourBytes, 2048, 0x40, tmpProtectFlag);
--mark memory writable
ffi.C.VirtualProtect(hookFnPtr, size, 0x40, oldProtectFlag)
--jmp to hook code
ffi.cast('uint8_t*', hookAddr)[0] = 0xE9;
ffi.cast('uint32_t*', hookAddr + 1)[0] = ffi.cast('uint32_t', detourBytes) - (hookAddr + 5);
for i = 5, size - 1 do
ffi.cast('uint8_t*', hookAddr + i)[0] = 0x90;
end
--restore memory protect
ffi.C.VirtualProtect(hookFnPtr, size, oldProtectFlag[0], tmpProtectFlag)
local new_hook = {}
new_hook.uninstall = function()
ffi.C.VirtualProtect(hookFnPtr, size, 0x40, oldProtectFlag)
ffi.copy(hookFnPtr, backup, size)
ffi.C.VirtualProtect(hookFnPtr, size, oldProtectFlag[0], tmpProtectFlag)
hook.hooks[tostring(hookAddr)] = nil;
end
--new_hook.call = ffi.cast(cast, detourBytes)
new_hook.detourBytes = detourBytes;
new_hook.backup = backup;
new_hook.callback = callback;
hook.hooks[tostring(hookAddr)] = new_hook;
return new_hook;
--return setmetatable(new_hook, {
-- __call = function(self, ...)
-- local res = self.call(...)
-- return res
-- end
--})
end
--HOOKS
---@param hookFnPtr number
---@param value number[]
function ffi.patch(hookFnPtr, value)
local old_prot = ffi.new('unsigned long[1]')
local old_prot2 = ffi.new('unsigned long[1]')
ffi.C.VirtualProtect(ffi.cast('void*', hookFnPtr), #value, 0x40, old_prot);
for i = 1, #value do
ffi.cast('uint8_t*', hookFnPtr)[i - 1] = value[i];
end
ffi.C.VirtualProtect(ffi.cast('void*', hookFnPtr), #value, old_prot[0], old_prot2);
end
function printAsHex(...)
print(table.unpack(table.map({ ... }, function(e)
if type(e) == 'number' and e > 0 then
return string.formatNumber(e, 16)
end
return e;
end)))
end
ffi.hook = hook;
_G.FFI = ffi;
_G.ffi = ffi;
| 32.768182 | 197 | 0.649466 |
ddd585d85eb71e885e09e9fa706b7ad1e098c048 | 572 | php | PHP | components/Test.php | 20100204/shopping | af3c6b5190d298d7d2b34211a192062764d9e78d | [
"BSD-3-Clause"
] | null | null | null | components/Test.php | 20100204/shopping | af3c6b5190d298d7d2b34211a192062764d9e78d | [
"BSD-3-Clause"
] | null | null | null | components/Test.php | 20100204/shopping | af3c6b5190d298d7d2b34211a192062764d9e78d | [
"BSD-3-Clause"
] | null | null | null | <?php
/**
* Created by PhpStorm.
* User: jason
* Date: 2016/9/9
* Time: 17:25
*/
namespace app\components;
class Test
{
public $db;
public function __construct(Db $db)
{
$this->db = $db;
}
public function getList(){
return $this->db->getList();
}
}
interface Db{
public function getList();
}
class Mysql implements Db{
public function getList(){
return ['name'=>'mysql','age'=>100];
}
}
class Oracle implements Db{
public function getList(){
return ['name'=>'oracle','age'=>100];
}
} | 14.3 | 45 | 0.568182 |
7aa9663068d9ab9c036dd5f364cf7759769698da | 1,305 | sql | SQL | employeedbschema.sql | akirillychev/Employee-Tracker | 1d189b209c24fa188486dfa6e80eef76856b09d1 | [
"MIT"
] | null | null | null | employeedbschema.sql | akirillychev/Employee-Tracker | 1d189b209c24fa188486dfa6e80eef76856b09d1 | [
"MIT"
] | null | null | null | employeedbschema.sql | akirillychev/Employee-Tracker | 1d189b209c24fa188486dfa6e80eef76856b09d1 | [
"MIT"
] | null | null | null | --CREATE DATABASE employee_db;
USE employees_db;
CREATE TABLE department (
id INT AUTO_INCREMENT NOT NULL,
name VARCHAR(30),
PRIMARY KEY (id)
);
CREATE TABLE role (
id INT AUTO_INCREMENT NOT NULL,
title VARCHAR(30),
salary DECIMAL,
department_id INT,
PRIMARY KEY (id),
FOREIGN KEY (department_id) REFERENCES department(id) ON DELETE CASCADE
);
CREATE TABLE employee(
id INT AUTO_INCREMENT NOT NULL,
first_name VARCHAR(30),
last_name VARCHAR(30),
role_id INT NOT NULL,
manager_id INT,
PRIMARY KEY (id),
FOREIGN KEY (role_id) REFERENCES role(id) ON DELETE CASCADE,
FOREIGN KEY (manager_id) REFERENCES employee(id) ON DELETE CASCADE
);
INSERT INTO department (name) VALUES ("Research");
INSERT INTO department (name) VALUES ("Acounting");
INSERT INTO department (name) VALUES ("Finance");
INSERT INTO role (title, salary, department_id) VALUES ("Research Analyst", 80000, 1);
INSERT INTO role (title, salary, department_id) VALUES ("Manager", 10000, 2);
INSERT INTO role (title, salary, department_id) VALUES ("Finance Analyst",20000 ,);
INSERT INTO employee (first_name, last_name, role_id, manager_id) VALUES ("Andrew", "K", 1,);
INSERT INTO employee (first_name, last_name, role_id) VALUES ("Ross", "S", 2);
INSERT INTO employee (first_name, last_name, role_id) VALUES ("Al", "T", 3); | 31.829268 | 93 | 0.744828 |
3fceced9affbb7c8cf58f2e378460fa74e31e7e1 | 13,751 | c | C | freebsd5/sys/dev/mii/mii_physubr.c | MarginC/kame | 2ef74fe29e4cca9b4a87a1d5041191a9e2e8be30 | [
"BSD-3-Clause"
] | 91 | 2015-01-05T15:18:31.000Z | 2022-03-11T16:43:28.000Z | freebsd5/sys/dev/mii/mii_physubr.c | MarginC/kame | 2ef74fe29e4cca9b4a87a1d5041191a9e2e8be30 | [
"BSD-3-Clause"
] | 1 | 2016-02-25T15:57:55.000Z | 2016-02-25T16:01:02.000Z | freebsd5/sys/dev/mii/mii_physubr.c | MarginC/kame | 2ef74fe29e4cca9b4a87a1d5041191a9e2e8be30 | [
"BSD-3-Clause"
] | 21 | 2015-02-07T08:23:07.000Z | 2021-12-14T06:01:49.000Z | /* $NetBSD: mii_physubr.c,v 1.5 1999/08/03 19:41:49 drochner Exp $ */
/*-
* Copyright (c) 1998, 1999, 2000, 2001 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Jason R. Thorpe of the Numerical Aerospace Simulation Facility,
* NASA Ames Research Center.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* This product includes software developed by the NetBSD
* Foundation, Inc. and its contributors.
* 4. Neither the name of The NetBSD Foundation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/*
* Subroutines common to all PHYs.
*/
#include <sys/param.h>
#include <sys/systm.h>
#include <sys/kernel.h>
#include <sys/socket.h>
#include <sys/errno.h>
#include <sys/module.h>
#include <sys/bus.h>
#include <net/if.h>
#include <net/if_media.h>
#include <dev/mii/mii.h>
#include <dev/mii/miivar.h>
#include "miibus_if.h"
#if !defined(lint)
static const char rcsid[] =
"$FreeBSD: src/sys/dev/mii/mii_physubr.c,v 1.16 2002/05/04 11:08:49 phk Exp $";
#endif
/*
* Media to register setting conversion table. Order matters.
*/
const struct mii_media mii_media_table[MII_NMEDIA] = {
/* None */
{ BMCR_ISO, ANAR_CSMA,
0, },
/* 10baseT */
{ BMCR_S10, ANAR_CSMA|ANAR_10,
0, },
/* 10baseT-FDX */
{ BMCR_S10|BMCR_FDX, ANAR_CSMA|ANAR_10_FD,
0, },
/* 100baseT4 */
{ BMCR_S100, ANAR_CSMA|ANAR_T4,
0, },
/* 100baseTX */
{ BMCR_S100, ANAR_CSMA|ANAR_TX,
0, },
/* 100baseTX-FDX */
{ BMCR_S100|BMCR_FDX, ANAR_CSMA|ANAR_TX_FD,
0, },
/* 1000baseX */
{ BMCR_S1000, ANAR_CSMA,
0, },
/* 1000baseX-FDX */
{ BMCR_S1000|BMCR_FDX, ANAR_CSMA,
0, },
/* 1000baseT */
{ BMCR_S1000, ANAR_CSMA,
GTCR_ADV_1000THDX },
/* 1000baseT-FDX */
{ BMCR_S1000, ANAR_CSMA,
GTCR_ADV_1000TFDX },
};
void
mii_phy_setmedia(struct mii_softc *sc)
{
struct mii_data *mii = sc->mii_pdata;
struct ifmedia_entry *ife = mii->mii_media.ifm_cur;
int bmcr, anar, gtcr;
if (IFM_SUBTYPE(ife->ifm_media) == IFM_AUTO) {
if ((PHY_READ(sc, MII_BMCR) & BMCR_AUTOEN) == 0)
(void) mii_phy_auto(sc);
return;
}
/*
* Table index is stored in the media entry.
*/
KASSERT(ife->ifm_data >=0 && ife->ifm_data < MII_NMEDIA,
("invalid ife->ifm_data (0x%x) in mii_phy_setmedia",
ife->ifm_data));
anar = mii_media_table[ife->ifm_data].mm_anar;
bmcr = mii_media_table[ife->ifm_data].mm_bmcr;
gtcr = mii_media_table[ife->ifm_data].mm_gtcr;
if (mii->mii_media.ifm_media & IFM_ETH_MASTER) {
switch (IFM_SUBTYPE(ife->ifm_media)) {
case IFM_1000_T:
gtcr |= GTCR_MAN_MS|GTCR_ADV_MS;
break;
default:
panic("mii_phy_setmedia: MASTER on wrong media");
}
}
if (ife->ifm_media & IFM_LOOP)
bmcr |= BMCR_LOOP;
PHY_WRITE(sc, MII_ANAR, anar);
PHY_WRITE(sc, MII_BMCR, bmcr);
if (sc->mii_flags & MIIF_HAVE_GTCR)
PHY_WRITE(sc, MII_100T2CR, gtcr);
}
int
mii_phy_auto(struct mii_softc *sc)
{
/*
* Check for 1000BASE-X. Autonegotiation is a bit
* different on such devices.
*/
if (sc->mii_flags & MIIF_IS_1000X) {
uint16_t anar = 0;
if (sc->mii_extcapabilities & EXTSR_1000XFDX)
anar |= ANAR_X_FD;
if (sc->mii_extcapabilities & EXTSR_1000XHDX)
anar |= ANAR_X_HD;
if (sc->mii_flags & MIIF_DOPAUSE) {
/* XXX Asymmetric vs. symmetric? */
anar |= ANLPAR_X_PAUSE_TOWARDS;
}
PHY_WRITE(sc, MII_ANAR, anar);
} else {
uint16_t anar;
anar = BMSR_MEDIA_TO_ANAR(sc->mii_capabilities) |
ANAR_CSMA;
if (sc->mii_flags & MIIF_DOPAUSE)
anar |= ANAR_FC;
PHY_WRITE(sc, MII_ANAR, anar);
if (sc->mii_flags & MIIF_HAVE_GTCR) {
uint16_t gtcr = 0;
if (sc->mii_extcapabilities & EXTSR_1000TFDX)
gtcr |= GTCR_ADV_1000TFDX;
if (sc->mii_extcapabilities & EXTSR_1000THDX)
gtcr |= GTCR_ADV_1000THDX;
PHY_WRITE(sc, MII_100T2CR, gtcr);
}
}
PHY_WRITE(sc, MII_BMCR, BMCR_AUTOEN | BMCR_STARTNEG);
return (EJUSTRETURN);
}
int
mii_phy_tick(struct mii_softc *sc)
{
struct ifmedia_entry *ife = sc->mii_pdata->mii_media.ifm_cur;
struct ifnet *ifp = sc->mii_pdata->mii_ifp;
int reg;
/* Just bail now if the interface is down. */
if ((ifp->if_flags & IFF_UP) == 0)
return (EJUSTRETURN);
/*
* If we're not doing autonegotiation, we don't need to do
* any extra work here. However, we need to check the link
* status so we can generate an announcement if the status
* changes.
*/
if (IFM_SUBTYPE(ife->ifm_media) != IFM_AUTO)
return (0);
/* Read the status register twice; BMSR_LINK is latch-low. */
reg = PHY_READ(sc, MII_BMSR) | PHY_READ(sc, MII_BMSR);
if (reg & BMSR_LINK) {
/*
* See above.
*/
return (0);
}
/*
* Only retry autonegotiation every N seconds.
*/
if (sc->mii_anegticks == 0)
sc->mii_anegticks = 17;
if (++sc->mii_ticks != sc->mii_anegticks)
return (EJUSTRETURN);
sc->mii_ticks = 0;
mii_phy_reset(sc);
mii_phy_auto(sc);
return (0);
}
void
mii_phy_reset(struct mii_softc *sc)
{
int reg, i;
if (sc->mii_flags & MIIF_NOISOLATE)
reg = BMCR_RESET;
else
reg = BMCR_RESET | BMCR_ISO;
PHY_WRITE(sc, MII_BMCR, reg);
/* Wait 100ms for it to complete. */
for (i = 0; i < 100; i++) {
reg = PHY_READ(sc, MII_BMCR);
if ((reg & BMCR_RESET) == 0)
break;
DELAY(1000);
}
if (sc->mii_inst != 0 && ((sc->mii_flags & MIIF_NOISOLATE) == 0))
PHY_WRITE(sc, MII_BMCR, reg | BMCR_ISO);
}
void
mii_phy_down(struct mii_softc *sc)
{
}
void
mii_phy_update(struct mii_softc *sc, int cmd)
{
struct mii_data *mii = sc->mii_pdata;
if (sc->mii_media_active != mii->mii_media_active ||
cmd == MII_MEDIACHG) {
MIIBUS_STATCHG(sc->mii_dev);
sc->mii_media_active = mii->mii_media_active;
}
if (sc->mii_media_status != mii->mii_media_status) {
MIIBUS_LINKCHG(sc->mii_dev);
sc->mii_media_status = mii->mii_media_status;
}
}
/*
* Given an ifmedia word, return the corresponding ANAR value.
*/
int
mii_anar(media)
int media;
{
int rv;
switch (media & (IFM_TMASK|IFM_NMASK|IFM_FDX)) {
case IFM_ETHER|IFM_10_T:
rv = ANAR_10|ANAR_CSMA;
break;
case IFM_ETHER|IFM_10_T|IFM_FDX:
rv = ANAR_10_FD|ANAR_CSMA;
break;
case IFM_ETHER|IFM_100_TX:
rv = ANAR_TX|ANAR_CSMA;
break;
case IFM_ETHER|IFM_100_TX|IFM_FDX:
rv = ANAR_TX_FD|ANAR_CSMA;
break;
case IFM_ETHER|IFM_100_T4:
rv = ANAR_T4|ANAR_CSMA;
break;
default:
rv = 0;
break;
}
return (rv);
}
/*
* Given a BMCR value, return the corresponding ifmedia word.
*/
int
mii_media_from_bmcr(bmcr)
int bmcr;
{
int rv = IFM_ETHER;
if (bmcr & BMCR_S100)
rv |= IFM_100_TX;
else
rv |= IFM_10_T;
if (bmcr & BMCR_FDX)
rv |= IFM_FDX;
return (rv);
}
/*
* Initialize generic PHY media based on BMSR, called when a PHY is
* attached. We expect to be set up to print a comma-separated list
* of media names. Does not print a newline.
*/
void
mii_add_media(struct mii_softc *sc)
{
const char *sep = "";
struct mii_data *mii;
mii = device_get_softc(sc->mii_dev);
if ((sc->mii_capabilities & BMSR_MEDIAMASK) == 0) {
printf("no media present");
return;
}
#define ADD(m, c) ifmedia_add(&mii->mii_media, (m), (c), NULL)
#define PRINT(s) printf("%s%s", sep, s); sep = ", "
if (sc->mii_capabilities & BMSR_10THDX) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_10_T, 0, sc->mii_inst), 0);
PRINT("10baseT");
}
if (sc->mii_capabilities & BMSR_10TFDX) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_10_T, IFM_FDX, sc->mii_inst),
BMCR_FDX);
PRINT("10baseT-FDX");
}
if (sc->mii_capabilities & BMSR_100TXHDX) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_100_TX, 0, sc->mii_inst),
BMCR_S100);
PRINT("100baseTX");
}
if (sc->mii_capabilities & BMSR_100TXFDX) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_100_TX, IFM_FDX, sc->mii_inst),
BMCR_S100|BMCR_FDX);
PRINT("100baseTX-FDX");
}
if (sc->mii_capabilities & BMSR_100T4) {
/*
* XXX How do you enable 100baseT4? I assume we set
* XXX BMCR_S100 and then assume the PHYs will take
* XXX watever action is necessary to switch themselves
* XXX into T4 mode.
*/
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_100_T4, 0, sc->mii_inst),
BMCR_S100);
PRINT("100baseT4");
}
if (sc->mii_capabilities & BMSR_ANEG) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_AUTO, 0, sc->mii_inst),
BMCR_AUTOEN);
PRINT("auto");
}
#undef ADD
#undef PRINT
}
/*
* Initialize generic PHY media based on BMSR, called when a PHY is
* attached. We expect to be set up to print a comma-separated list
* of media names. Does not print a newline.
*/
void
mii_phy_add_media(struct mii_softc *sc)
{
struct mii_data *mii = sc->mii_pdata;
const char *sep = "";
#define ADD(m, c) ifmedia_add(&mii->mii_media, (m), (c), NULL)
#define PRINT(s) printf("%s%s", sep, s); sep = ", "
if ((sc->mii_flags & MIIF_NOISOLATE) == 0)
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_NONE, 0, sc->mii_inst),
MII_MEDIA_NONE);
/*
* There are different interpretations for the bits in
* HomePNA PHYs. And there is really only one media type
* that is supported.
*/
if (sc->mii_flags & MIIF_IS_HPNA) {
if (sc->mii_capabilities & BMSR_10THDX) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_HPNA_1, 0,
sc->mii_inst),
MII_MEDIA_10_T);
PRINT("HomePNA1");
}
return;
}
if (sc->mii_capabilities & BMSR_10THDX) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_10_T, 0, sc->mii_inst),
MII_MEDIA_10_T);
PRINT("10baseT");
}
if (sc->mii_capabilities & BMSR_10TFDX) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_10_T, IFM_FDX, sc->mii_inst),
MII_MEDIA_10_T_FDX);
PRINT("10baseT-FDX");
}
if (sc->mii_capabilities & BMSR_100TXHDX) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_100_TX, 0, sc->mii_inst),
MII_MEDIA_100_TX);
PRINT("100baseTX");
}
if (sc->mii_capabilities & BMSR_100TXFDX) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_100_TX, IFM_FDX, sc->mii_inst),
MII_MEDIA_100_TX_FDX);
PRINT("100baseTX-FDX");
}
if (sc->mii_capabilities & BMSR_100T4) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_100_T4, 0, sc->mii_inst),
MII_MEDIA_100_T4);
PRINT("100baseT4");
}
if (sc->mii_extcapabilities & EXTSR_MEDIAMASK) {
/*
* XXX Right now only handle 1000SX and 1000TX. Need
* XXX to handle 1000LX and 1000CX some how.
*
* Note since it can take 5 seconds to auto-negotiate
* a gigabit link, we make anegticks 10 seconds for
* all the gigabit media types.
*/
if (sc->mii_extcapabilities & EXTSR_1000XHDX) {
sc->mii_anegticks = 17;
sc->mii_flags |= MIIF_IS_1000X;
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_1000_SX, 0,
sc->mii_inst), MII_MEDIA_1000_X);
PRINT("1000baseSX");
}
if (sc->mii_extcapabilities & EXTSR_1000XFDX) {
sc->mii_anegticks = 17;
sc->mii_flags |= MIIF_IS_1000X;
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_1000_SX, IFM_FDX,
sc->mii_inst), MII_MEDIA_1000_X_FDX);
PRINT("1000baseSX-FDX");
}
/*
* 1000baseT media needs to be able to manipulate
* master/slave mode. We set IFM_ETH_MASTER in
* the "don't care mask" and filter it out when
* the media is set.
*
* All 1000baseT PHYs have a 1000baseT control register.
*/
if (sc->mii_extcapabilities & EXTSR_1000THDX) {
sc->mii_anegticks = 17;
sc->mii_flags |= MIIF_HAVE_GTCR;
mii->mii_media.ifm_mask |= IFM_ETH_MASTER;
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_1000_T, 0,
sc->mii_inst), MII_MEDIA_1000_T);
PRINT("1000baseT");
}
if (sc->mii_extcapabilities & EXTSR_1000TFDX) {
sc->mii_anegticks = 17;
sc->mii_flags |= MIIF_HAVE_GTCR;
mii->mii_media.ifm_mask |= IFM_ETH_MASTER;
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_1000_T, IFM_FDX,
sc->mii_inst), MII_MEDIA_1000_T_FDX);
PRINT("1000baseT-FDX");
}
}
if (sc->mii_capabilities & BMSR_ANEG) {
ADD(IFM_MAKEWORD(IFM_ETHER, IFM_AUTO, 0, sc->mii_inst),
MII_NMEDIA); /* intentionally invalid index */
PRINT("auto");
}
#undef ADD
#undef PRINT
}
int
mii_phy_detach(device_t dev)
{
struct mii_softc *sc;
struct mii_data *mii;
sc = device_get_softc(dev);
mii = device_get_softc(device_get_parent(dev));
mii_phy_down(sc);
sc->mii_dev = NULL;
LIST_REMOVE(sc, mii_list);
return(0);
}
const struct mii_phydesc *
mii_phy_match(const struct mii_attach_args *ma, const struct mii_phydesc *mpd)
{
for (; mpd->mpd_name != NULL; mpd++) {
if (MII_OUI(ma->mii_id1, ma->mii_id2) == mpd->mpd_oui &&
MII_MODEL(ma->mii_id2) == mpd->mpd_model)
return (mpd);
}
return (NULL);
}
| 25.13894 | 81 | 0.688095 |
413cfa0ac10d5e5f978c921ba2e93899837fb6f0 | 1,478 | asm | Assembly | binutils-2.21.1/gcc-4.5.1/gcc/config/xtensa/crtn.asm | cberner12/xv6 | 53c4dfef0d48287ca0d0f9d27eab7a6ed7fee845 | [
"MIT-0"
] | 51 | 2015-01-31T01:51:39.000Z | 2022-02-18T02:01:50.000Z | binutils-2.21.1/gcc-4.5.1/gcc/config/xtensa/crtn.asm | cberner12/xv6 | 53c4dfef0d48287ca0d0f9d27eab7a6ed7fee845 | [
"MIT-0"
] | 7 | 2017-05-29T09:29:00.000Z | 2019-03-11T16:01:39.000Z | binutils-2.21.1/gcc-4.5.1/gcc/config/xtensa/crtn.asm | cberner12/xv6 | 53c4dfef0d48287ca0d0f9d27eab7a6ed7fee845 | [
"MIT-0"
] | 12 | 2015-03-26T08:05:38.000Z | 2022-02-18T02:01:51.000Z | # End of .init and .fini sections.
# Copyright (C) 2003, 2009 Free Software Foundation, Inc.
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GCC is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# Under Section 7 of GPL version 3, you are granted additional
# permissions described in the GCC Runtime Library Exception, version
# 3.1, as published by the Free Software Foundation.
#
# You should have received a copy of the GNU General Public License and
# a copy of the GCC Runtime Library Exception along with this program;
# see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
# <http://www.gnu.org/licenses/>.
# This file just makes sure that the .fini and .init sections do in
# fact return. Users may put any desired instructions in those sections.
# This file is the last thing linked into any executable.
#include "xtensa-config.h"
.section .init
#if XCHAL_HAVE_WINDOWED && !__XTENSA_CALL0_ABI__
retw
#else
l32i a0, sp, 0
addi sp, sp, 32
ret
#endif
.section .fini
#if XCHAL_HAVE_WINDOWED && !__XTENSA_CALL0_ABI__
retw
#else
l32i a0, sp, 0
addi sp, sp, 32
ret
#endif
| 31.446809 | 73 | 0.752368 |
0ba1fd63450f9018e6ce074ff65e67d347246466 | 124 | js | JavaScript | search/classes_a.js | jvegh/TimeAwareComputing.github.io | 8fde9932c6db662851095db2f0b40e6473f30e12 | [
"CC0-1.0"
] | 2 | 2021-03-19T20:24:46.000Z | 2021-03-19T20:24:50.000Z | search/classes_a.js | jvegh/TimeAwareComputing.github.io | 8fde9932c6db662851095db2f0b40e6473f30e12 | [
"CC0-1.0"
] | null | null | null | search/classes_a.js | jvegh/TimeAwareComputing.github.io | 8fde9932c6db662851095db2f0b40e6473f30e12 | [
"CC0-1.0"
] | null | null | null | var searchData=
[
['timedigpmessage_5ftype_489',['TimedIGPMessage_type',['../structTimedIGPMessage__type.html',1,'']]]
];
| 24.8 | 102 | 0.733871 |
98d5fed8190bb0bcbc29d4f583a300db071b54bf | 407 | html | HTML | javawebparts/WEB-INF/src/javawebparts/taglib/uiwidgets/package.html | fzammetti/java-web-parts | 1422b90430c996f8eda26805b3f3d4a69be5a15c | [
"Apache-2.0"
] | 2 | 2019-09-06T23:28:01.000Z | 2020-02-17T20:26:45.000Z | javawebparts/WEB-INF/src/javawebparts/taglib/uiwidgets/package.html | fzammetti/java-web-parts | 1422b90430c996f8eda26805b3f3d4a69be5a15c | [
"Apache-2.0"
] | null | null | null | javawebparts/WEB-INF/src/javawebparts/taglib/uiwidgets/package.html | fzammetti/java-web-parts | 1422b90430c996f8eda26805b3f3d4a69be5a15c | [
"Apache-2.0"
] | null | null | null | <body>
This package contains the UIWidgets taglib. This taglib
is capable of rendering various user interface elements (widgets) to
provide higher levels of UI functionality without any extra work for the
developer to create them. Here you will find things like the Swapper
widget, among others.
<br><br>
This package depends on the following extra packages to compile and run: None.
</body> | 45.222222 | 80 | 0.771499 |
bdf799adf795743a9820388615f174e48d7f9552 | 38,048 | rs | Rust | src/int_blocks.rs | FCG-LLC/hyena | 5354ed498675aee6ed517c75e13b15849cea5c42 | [
"Apache-2.0"
] | null | null | null | src/int_blocks.rs | FCG-LLC/hyena | 5354ed498675aee6ed517c75e13b15849cea5c42 | [
"Apache-2.0"
] | null | null | null | src/int_blocks.rs | FCG-LLC/hyena | 5354ed498675aee6ed517c75e13b15849cea5c42 | [
"Apache-2.0"
] | 2 | 2019-11-12T09:20:35.000Z | 2019-12-16T09:50:53.000Z | use bincode::{serialize, deserialize, Infinite};
use api::ScanComparison;
use scan::BlockScanConsumer;
use catalog::BlockType;
use std::cmp;
// Sorry for this copypasta, it took me bit more time to make templates work and still had some issues, so consider this just a mock
pub trait Scannable<T> {
fn scan(&self, op : ScanComparison, val : &T, scan_consumer : &mut BlockScanConsumer);
}
pub trait Deletable {
fn delete(&mut self, offsets : &Vec<u32>);
}
pub trait Upsertable<T> {
fn multi_upsert(&mut self, offsets : &Vec<u32>, val : &T);
fn upsert(&mut self, data : &Block);
}
pub trait Movable {
fn move_data(&mut self, target : &mut Block, scan_consumer : &BlockScanConsumer);
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub enum Block {
Int64Dense(Int64DenseBlock),
Int64Sparse(Int64SparseBlock),
Int32Sparse(Int32SparseBlock),
Int16Sparse(Int16SparseBlock),
Int8Sparse(Int8SparseBlock),
StringBlock(StringBlock)
}
impl Block {
pub fn create_block(block_type: &BlockType) -> Block {
match block_type {
&BlockType::Int64Dense => Block::Int64Dense(Int64DenseBlock { data: Vec::new() }),
&BlockType::Int64Sparse => Block::Int64Sparse(Int64SparseBlock { data: Vec::new() }),
&BlockType::Int32Sparse => Block::Int32Sparse(Int32SparseBlock { data: Vec::new() }),
&BlockType::Int16Sparse => Block::Int16Sparse(Int16SparseBlock { data: Vec::new() }),
&BlockType::Int8Sparse => Block::Int8Sparse(Int8SparseBlock { data: Vec::new() }),
&BlockType::String => Block::StringBlock(StringBlock::new())
}
}
pub fn len(&self) -> usize {
match self {
&Block::Int64Dense(ref b) => b.data.len(),
&Block::Int64Sparse(ref b) => b.data.len(),
&Block::Int32Sparse(ref b) => b.data.len(),
&Block::Int16Sparse(ref b) => b.data.len(),
&Block::Int8Sparse(ref b) => b.data.len(),
&Block::StringBlock(ref b) => b.index_data.len()
}
}
pub fn consume(&self, scan_consumer : &BlockScanConsumer) -> Block {
let output_block:Block;
match self {
&Block::Int64Dense(ref b) => {
let mut block = Int64DenseBlock::new();
for index in &scan_consumer.matching_offsets {
block.data.push(b.data[*index as usize]);
}
output_block = Block::Int64Dense(block);
},
&Block::Int64Sparse(ref b) => {
output_block = Block::Int64Sparse(b.filter_scan_results(scan_consumer));
},
&Block::Int32Sparse(ref b) => {
output_block = Block::Int32Sparse(b.filter_scan_results(scan_consumer));
},
&Block::Int16Sparse(ref b) => {
output_block = Block::Int16Sparse(b.filter_scan_results(scan_consumer));
},
&Block::Int8Sparse(ref b) => {
output_block = Block::Int8Sparse(b.filter_scan_results(scan_consumer));
},
&Block::StringBlock(ref b) => {
output_block = Block::StringBlock(b.filter_scan_results(scan_consumer))
},
_ => panic!("Unrecognized block type")
}
output_block
}
}
impl Deletable for Block {
fn delete(&mut self, offsets : &Vec<u32>) {
match self {
&mut Block::StringBlock(ref mut b) => b.delete(offsets),
&mut Block::Int64Sparse(ref mut b) => b.delete(offsets),
&mut Block::Int32Sparse(ref mut b) => b.delete(offsets),
&mut Block::Int16Sparse(ref mut b) => b.delete(offsets),
&mut Block::Int8Sparse(ref mut b) => b.delete(offsets),
_ => panic!("I don't know how to handle such block type")
}
}
}
impl Movable for Block {
fn move_data(&mut self, target: &mut Block, scan_consumer: &BlockScanConsumer) {
match self {
&mut Block::StringBlock(ref mut b) => match target {
&mut Block::StringBlock(ref mut c) => b.move_data(c, scan_consumer),
_ => panic!("Not matching block types")
},
&mut Block::Int64Sparse(ref mut b) => match target {
&mut Block::Int64Sparse(ref mut c) => b.move_data(c, scan_consumer),
_ => panic!("Not matching block types")
},
&mut Block::Int32Sparse(ref mut b) => match target {
&mut Block::Int32Sparse(ref mut c) => b.move_data(c, scan_consumer),
_ => panic!("Not matching block types")
},
&mut Block::Int16Sparse(ref mut b) => match target {
&mut Block::Int16Sparse(ref mut c) => b.move_data(c, scan_consumer),
_ => panic!("Not matching block types")
},
&mut Block::Int8Sparse(ref mut b) => match target {
&mut Block::Int8Sparse(ref mut c) => b.move_data(c, scan_consumer),
_ => panic!("Not matching block types")
},
_ => panic!("I don't know how to handle such block type")
}
}
}
impl Scannable<String> for Block {
fn scan(&self, op: ScanComparison, val: &String, scan_consumer: &mut BlockScanConsumer) {
match self {
&Block::StringBlock(ref b) => b.scan(op, val, scan_consumer),
_ => panic!("Wrong block type for String scan")
}
}
}
impl Upsertable<String> for Block {
fn multi_upsert(&mut self, offsets : &Vec<u32>, val : &String) {
match self {
&mut Block::StringBlock(ref mut b) => b.multi_upsert(offsets, val.as_bytes()),
_ => panic!("Wrong block type for String scan")
}
}
fn upsert(&mut self, data : &Block) {
match self {
&mut Block::StringBlock(ref mut b) => match data {
&Block::StringBlock(ref c) => b.upsert(c),
_ => panic!("Wrong block type")
},
_ => panic!("Wrong block type")
}
}
}
impl Scannable<u64> for Block {
fn scan(&self, op : ScanComparison, val : &u64, scan_consumer : &mut BlockScanConsumer) {
match self {
&Block::Int64Dense(ref b) => b.scan(op, val, scan_consumer),
&Block::Int64Sparse(ref b) => b.scan(op, val, scan_consumer),
&Block::Int32Sparse(ref b) => b.scan(op, &(*val as u32), scan_consumer),
&Block::Int16Sparse(ref b) => b.scan(op, &(*val as u16), scan_consumer),
&Block::Int8Sparse(ref b) => b.scan(op, &(*val as u8), scan_consumer),
_ => panic!("Unrecognized u64 block type")
}
}
}
impl Upsertable<u64> for Block {
fn multi_upsert(&mut self, offsets : &Vec<u32>, val : &u64) {
match self {
&mut Block::Int64Sparse(ref mut b) => b.multi_upsert(offsets, *val),
_ => panic!("Wrong block type")
}
}
fn upsert(&mut self, data : &Block) {
match self {
&mut Block::Int64Sparse(ref mut b) => match data {
&Block::Int64Sparse(ref c) => b.upsert(c),
_ => panic!("Wrong block")
},
_ => panic!("Wrong block type")
}
}
}
impl Scannable<u32> for Block {
fn scan(&self, op: ScanComparison, val: &u32, scan_consumer: &mut BlockScanConsumer) {
match self {
&Block::Int32Sparse(ref b) => b.scan(op, val, scan_consumer),
_ => println!("Unrecognized u32 block type")
}
}
}
impl Upsertable<u32> for Block {
fn multi_upsert(&mut self, offsets : &Vec<u32>, val : &u32) {
match self {
&mut Block::Int32Sparse(ref mut b) => b.multi_upsert(offsets, *val),
_ => panic!("Wrong block type")
}
}
fn upsert(&mut self, data : &Block) {
match self {
&mut Block::Int32Sparse(ref mut b) => match data {
&Block::Int32Sparse(ref c) => b.upsert(c),
_ => panic!("Wrong block type")
},
_ => panic!("Wrong block type")
}
}
}
impl Scannable<u16> for Block {
fn scan(&self, op: ScanComparison, val: &u16, scan_consumer: &mut BlockScanConsumer) {
match self {
&Block::Int16Sparse(ref b) => b.scan(op, val, scan_consumer),
_ => println!("Unrecognized u16 block type")
}
}
}
impl Upsertable<u16> for Block {
fn multi_upsert(&mut self, offsets : &Vec<u32>, val : &u16) {
match self {
&mut Block::Int16Sparse(ref mut b) => b.multi_upsert(offsets, *val),
_ => panic!("Wrong block type")
}
}
fn upsert(&mut self, data : &Block) {
match self {
&mut Block::Int16Sparse(ref mut b) => match data {
&Block::Int16Sparse(ref c) => b.upsert(c),
_ => panic!("Wrong block type")
},
_ => panic!("Wrong block type")
}
}
}
impl Scannable<u8> for Block {
fn scan(&self, op: ScanComparison, val: &u8, scan_consumer: &mut BlockScanConsumer) {
match self {
&Block::Int8Sparse(ref b) => b.scan(op, val, scan_consumer),
_ => println!("Unrecognized u8 block type")
}
}
}
impl Upsertable<u8> for Block {
fn multi_upsert(&mut self, offsets : &Vec<u32>, val : &u8) {
match self {
&mut Block::Int8Sparse(ref mut b) => b.multi_upsert(offsets, *val),
_ => panic!("Wrong block type")
}
}
fn upsert(&mut self, data : &Block) {
match self {
&mut Block::Int8Sparse(ref mut b) => match data {
&Block::Int8Sparse(ref c) => b.upsert(c),
_ => panic!("Wrong block type")
},
_ => panic!("Wrong block type")
}
}
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub struct Int64DenseBlock {
pub data : Vec<u64>
}
impl Int64DenseBlock {
pub fn new() -> Int64DenseBlock {
Int64DenseBlock { data: Vec::new() }
}
pub fn append(&mut self, v: u64) {
self.data.push(v);
}
pub fn encapsulate_in_block(self) -> Block {
Block::Int64Dense(self)
}
pub fn filter_scan_results(&self, scan_consumer : &BlockScanConsumer) -> Int64DenseBlock {
let mut out_block = Int64DenseBlock::new();
for index in &scan_consumer.matching_offsets {
out_block.data.push(self.data[*index as usize]);
}
return out_block;
}
}
// As of now this is byte array essentially
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub struct StringBlock {
// Pair: offset, start position in array; the end position might be implied
pub index_data : Vec<(u32, usize)>,
pub str_data : Vec<u8>
}
impl StringBlock {
pub fn new() -> StringBlock {
StringBlock{ index_data: Vec::new(), str_data: Vec::new() }
}
pub fn delete(&mut self, offsets: &Vec<u32>) {
// Because the structure is bit more complex here, lets just be naive and rewrite the str_data while updating index data?
let mut new_index_data:Vec<(u32, usize)> = Vec::new();
let mut new_str_data:Vec<u8> = Vec::new();
let mut offsets_index = 0 as usize;
let mut data_index = 0 as usize;
while data_index < self.index_data.len() {
let cur_index = self.index_data[data_index];
let cur_offset = cur_index.0;
while offsets_index < offsets.len() && offsets[offsets_index] < cur_offset {
offsets_index += 1;
}
// The next offset to remove is somewhere in front, so lets copy this entry
if offsets_index == offsets.len() || offsets[offsets_index] > cur_offset {
let end_str_index = if data_index == self.index_data.len()-1 {
self.str_data.len()
} else {
self.index_data[data_index+1].1
};
let last_index = new_str_data.len();
let cur_str_data = &self.str_data[cur_index.1..end_str_index];
new_index_data.push((cur_index.0, last_index));
new_str_data.extend_from_slice(cur_str_data);
}
data_index += 1;
}
self.index_data = new_index_data;
self.str_data = new_str_data;
}
pub fn multi_upsert(&mut self, offsets: &Vec<u32>, v: &[u8]) {
// Because the structure is bit more complex here, lets just be naive and rewrite the str_data while updating index data?
let mut new_index_data:Vec<(u32, usize)> = Vec::new();
let mut new_str_data:Vec<u8> = Vec::new();
let mut offsets_index = 0 as usize;
let mut data_index = 0 as usize;
while offsets_index < offsets.len() || data_index < self.index_data.len() {
let last_str_index = new_str_data.len();
let cur_offset = if data_index < self.index_data.len() {
self.index_data[data_index].0
} else {
offsets[offsets_index]
};
if offsets_index < offsets.len() && offsets[offsets_index] <= cur_offset {
// Update/insert the value
new_index_data.push((offsets[offsets_index], last_str_index));
new_str_data.extend_from_slice(v);
if offsets[offsets_index] == cur_offset {
// If we did update rather then insert to a non-existing entry
data_index += 1;
}
offsets_index += 1;
} else if data_index < self.index_data.len() {
let cur_index = self.index_data[data_index];
// Copy from existing block
let end_str_index = if data_index < self.index_data.len()-1 {
self.index_data[data_index+1].1
} else {
self.str_data.len()
};
let cur_str_data = &self.str_data[cur_index.1..end_str_index];
new_index_data.push((cur_index.0, last_str_index));
new_str_data.extend_from_slice(cur_str_data);
data_index += 1;
}
}
self.index_data = new_index_data;
self.str_data = new_str_data;
}
pub fn upsert(&mut self, data : &StringBlock) {
// Because the structure is bit more complex here, lets just be naive and rewrite the str_data while updating index data?
let mut new_index_data:Vec<(u32, usize)> = Vec::new();
let mut new_str_data:Vec<u8> = Vec::new();
let mut input_index = 0 as usize;
let mut data_index = 0 as usize;
while input_index < data.index_data.len() || data_index < self.index_data.len() {
let last_str_index = new_str_data.len();
let cur_offset = if data_index < self.index_data.len() {
self.index_data[data_index].0
} else {
data.index_data[input_index].0
};
if input_index < data.index_data.len() && data.index_data[input_index].0 <= cur_offset {
// Update/insert the value
new_index_data.push((data.index_data[input_index].0, last_str_index));
let input_str_end = if input_index == data.index_data.len()-1 {
data.str_data.len()
} else {
data.index_data[input_index+1].1
};
let input_str_slice = &data.str_data[data.index_data[input_index].1..input_str_end];
new_str_data.extend_from_slice(input_str_slice);
if data.index_data[input_index].0 == cur_offset {
// If we did update rather then insert to a non-existing entry
data_index += 1;
}
input_index += 1;
} else if data_index < self.index_data.len() {
let cur_index = self.index_data[data_index];
// Copy from existing block
let end_str_index = if data_index == self.index_data.len()-1 {
self.str_data.len()
} else {
self.index_data[data_index+1].1
};
let cur_str_data = &self.str_data[cur_index.1..end_str_index];
new_index_data.push((cur_index.0, last_str_index));
new_str_data.extend_from_slice(cur_str_data);
data_index += 1;
}
}
self.index_data = new_index_data;
self.str_data = new_str_data;
}
pub fn transpose_offsets(&mut self, new_offsets : &Vec<u32>) {
for i in 0..new_offsets.len() {
let record = &mut self.index_data[i];
record.0 = new_offsets[i];
}
}
pub fn move_data(&mut self, target : &mut StringBlock, scan_consumer : &BlockScanConsumer) {
let mut temp_block = self.filter_scan_results(scan_consumer);
temp_block.transpose_offsets(&scan_consumer.matching_offsets);
target.upsert(&temp_block);
self.delete(&scan_consumer.matching_offsets);
}
pub fn append(&mut self, o: u32, v: &[u8]) {
let last_index = self.str_data.len();
let str_bytes = v;
self.index_data.push((o, last_index));
self.str_data.extend_from_slice(str_bytes);
}
pub fn filter_scan_results(&self, scan_consumer: &BlockScanConsumer) -> StringBlock {
let mut out_block = StringBlock::new();
// TODO: binary search-like operations would be faster usually (binary-search + scans)
let mut block_data_index = 0 as usize;
let mut scan_data_index = 0 as usize;
while scan_data_index < scan_consumer.matching_offsets.len() && block_data_index < self.index_data.len() {
let target_offset = scan_consumer.matching_offsets[scan_data_index];
while block_data_index < self.index_data.len() && self.index_data[block_data_index].0 < target_offset {
block_data_index += 1;
}
if block_data_index < self.index_data.len() && self.index_data[block_data_index].0 == target_offset {
let arr_start_position = self.index_data[block_data_index].1.to_owned();
let arr_end_position = if block_data_index < self.index_data.len()-1 {
self.index_data[block_data_index+1].1.to_owned()
} else {
self.str_data.len().to_owned()
};
let val = &self.str_data[arr_start_position..arr_end_position];
out_block.append(scan_data_index as u32, val);
block_data_index += 1;
}
// Move on regardless
scan_data_index += 1;
}
out_block
}
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub struct TSparseBlock<T:Clone> {
pub data : Vec<(u32,T)>
}
impl<T : Clone> TSparseBlock<T> {
pub fn append(&mut self, o: u32, v: T) {
self.data.push((o, v));
}
pub fn delete(&mut self, offsets: &Vec<u32>) {
let mut indexes:Vec<usize> = Vec::new();
let mut offsets_index = 0 as usize;
let mut data_index = 0 as usize;
while offsets_index < offsets.len() && data_index < self.data.len() {
let target_offset = offsets[offsets_index];
while data_index < self.data.len() && self.data[data_index].0 < target_offset {
data_index += 1;
}
if data_index < self.data.len() && self.data[data_index].0 == target_offset {
indexes.push(data_index);
data_index += 1;
}
// Move on regardless
offsets_index += 1;
}
indexes.reverse();
for i in indexes {
self.data.remove(i);
}
}
pub fn transpose_offsets(&mut self, new_offsets : &Vec<u32>) {
for i in 0..new_offsets.len() {
let record = &mut self.data[i];
record.0 = new_offsets[i];
}
}
pub fn move_data(&mut self, target : &mut TSparseBlock<T>, scan_consumer : &BlockScanConsumer) {
let mut temp_block = self.filter_scan_results(scan_consumer);
temp_block.transpose_offsets(&scan_consumer.matching_offsets);
target.upsert(&temp_block);
self.delete(&scan_consumer.matching_offsets);
}
// Put specific value to multiple columns
pub fn multi_upsert(&mut self, offsets: &Vec<u32>, v: T) {
let mut indexes:Vec<usize> = Vec::new();
let mut offsets_index = 0 as usize;
let mut data_index = 0 as usize;
while offsets_index < offsets.len() {
let target_offset = offsets[offsets_index];
// Forward the self.data position to current offset
while data_index < self.data.len() && self.data[data_index].0 < target_offset {
data_index += 1;
}
if data_index < self.data.len() {
if self.data[data_index].0 == target_offset {
let record = &mut self.data[data_index];
record.1 = v.to_owned();
} else {
// insert
self.data.insert(data_index, (target_offset, v.to_owned()));
}
} else {
// append
self.data.push((target_offset, v.to_owned()));
}
// Move on regardless
offsets_index += 1;
}
}
// Upsert specific values
pub fn upsert(&mut self, upsert_data : &TSparseBlock<T>) {
let mut indexes:Vec<usize> = Vec::new();
let mut offsets_index = 0 as usize;
let mut data_index = 0 as usize;
while offsets_index < upsert_data.data.len() {
let target_offset = upsert_data.data[offsets_index].0;
// Forward the self.data position to current offset
while data_index < self.data.len() && self.data[data_index].0 < target_offset {
data_index += 1;
}
//self.data.insert()
if data_index < self.data.len() {
if self.data[data_index].0 == target_offset {
let record = &mut self.data[data_index];
record.1 = upsert_data.data[offsets_index].1.to_owned();
} else {
// insert
self.data.insert(data_index, (target_offset, upsert_data.data[offsets_index].1.to_owned()));
}
} else {
// append
self.data.push((target_offset, upsert_data.data[offsets_index].1.to_owned()));
}
// Move on regardless
offsets_index += 1;
}
}
pub fn filter_scan_results(&self, scan_consumer : &BlockScanConsumer) -> TSparseBlock<T> {
let mut out_block = TSparseBlock { data: Vec::new() };
// TODO: binary search-like operations would be faster usually (binary-search + scans)
let mut offsets_index = 0 as usize;
let mut data_index = 0 as usize;
while offsets_index < scan_consumer.matching_offsets.len() && data_index < self.data.len() {
let target_offset = scan_consumer.matching_offsets[offsets_index];
while data_index < self.data.len() && self.data[data_index].0 < target_offset {
data_index += 1;
}
if data_index < self.data.len() && self.data[data_index].0 == target_offset {
let val:T = self.data[data_index].1.to_owned();
out_block.append(offsets_index as u32, val);
data_index += 1;
}
// Move on regardless
offsets_index += 1;
}
out_block
}
}
//#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub type Int64SparseBlock = TSparseBlock<u64>;
pub type Int32SparseBlock = TSparseBlock<u32>;
pub type Int16SparseBlock = TSparseBlock<u16>;
pub type Int8SparseBlock = TSparseBlock<u8>;
impl Int64SparseBlock {
pub fn new() -> Int64SparseBlock {
Int64SparseBlock { data: Vec::new() }
}
pub fn encapsulate_in_block(self) -> Block {
Block::Int64Sparse(self)
}
}
impl Int32SparseBlock {
pub fn new() -> Int32SparseBlock {
Int32SparseBlock { data: Vec::new() }
}
pub fn encapsulate_in_block(self) -> Block {
Block::Int32Sparse(self)
}
}
impl Int16SparseBlock {
pub fn new() -> Int16SparseBlock {
Int16SparseBlock { data: Vec::new() }
}
pub fn encapsulate_in_block(self) -> Block {
Block::Int16Sparse(self)
}
}
impl Int8SparseBlock {
pub fn new() -> Int8SparseBlock {
Int8SparseBlock { data: Vec::new() }
}
pub fn encapsulate_in_block(self) -> Block {
Block::Int8Sparse(self)
}
}
impl Scannable<u64> for Int64DenseBlock {
fn scan(&self, op : ScanComparison, val : &u64, scan_consumer : &mut BlockScanConsumer) {
for (offset_usize, value) in self.data.iter().enumerate() {
let offset = offset_usize as u32;
match op {
ScanComparison::Lt => if value < val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::LtEq => if value <= val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::Eq => if value == val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::GtEq => if value >= val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::Gt => if value > val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::NotEq => if value != val { scan_consumer.matching_offsets.push(offset) },
}
}
}
}
// This is not utf-8 aware
fn strings_ne_match(s1 : &[u8], op : &ScanComparison, s2 : &[u8]) -> bool {
for i in 0..cmp::min(s1.len(), s2.len()) {
if s1[i] == s2[i] {
// just continue
} else {
match op {
&ScanComparison::LtEq => return s1[i] < s2[i],
&ScanComparison::Lt => return s1[i] < s2[i],
&ScanComparison::Gt => return s1[i] > s2[i],
&ScanComparison::GtEq => return s1[i] > s2[i],
//_ => println!("Only <, <=, >=, > matches are handled here..."); return false
_ => return false
}
}
}
// The shorter string was a substring of the longer one...
match op {
&ScanComparison::LtEq => return s1.len() < s2.len(),
&ScanComparison::Lt => return s1.len() < s2.len(),
&ScanComparison::Gt => return s1.len() > s2.len(),
&ScanComparison::GtEq => return s1.len() > s2.len(),
_ => return false
// _ => println!("Only <, <=, >=, > matches are handled here...")
}
}
impl Scannable<String> for StringBlock {
/// Screams naiive!
fn scan(&self, op : ScanComparison, str_val : &String, scan_consumer : &mut BlockScanConsumer) {
let mut prev_offset = 0 as u32;
let mut prev_position = 0 as usize;
let val = str_val.as_bytes();
let mut index = 0;
for &(offset_usize, position) in self.index_data.iter() {
let size = position - prev_position;
let offset = offset_usize as u32;
if index > 0 {
match op {
ScanComparison::Eq => if size == val.len() && val == &self.str_data[prev_position..position] { scan_consumer.matching_offsets.push(prev_offset) },
// ScanComparison::Lt =>
ScanComparison::NotEq => {
if size != val.len() || val != &self.str_data[prev_position..position] { scan_consumer.matching_offsets.push(prev_offset) }
},
_ => if strings_ne_match(&self.str_data[prev_position..position], &op, val) { scan_consumer.matching_offsets.push(prev_offset) }
}
}
prev_position = position;
prev_offset = offset;
index += 1;
}
// last element
// TODO: extract/refactor
if index > 0 {
let size = self.str_data.len() - prev_position;
let offset = prev_offset;
let position = self.str_data.len();
match op {
ScanComparison::Eq => if size == val.len() && val == &self.str_data[prev_position..position] { scan_consumer.matching_offsets.push(prev_offset) },
ScanComparison::NotEq => if size != val.len() || val != &self.str_data[prev_position..position] { scan_consumer.matching_offsets.push(prev_offset) },
_ => if strings_ne_match(&self.str_data[prev_position..position], &op, val) { scan_consumer.matching_offsets.push(prev_offset) }
}
}
}
}
impl Scannable<u64> for Int64SparseBlock {
fn scan(&self, op : ScanComparison, val : &u64, scan_consumer : &mut BlockScanConsumer) {
for &(offset, value_ref) in self.data.iter() {
let value = &value_ref;
match op {
ScanComparison::Lt => if value < val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::LtEq => if value <= val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::Eq => if value == val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::GtEq => if value >= val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::Gt => if value > val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::NotEq => if value != val { scan_consumer.matching_offsets.push(offset) },
}
}
}
}
impl Scannable<u32> for Int32SparseBlock {
fn scan(&self, op : ScanComparison, val : &u32, scan_consumer : &mut BlockScanConsumer) {
for &(offset, value_ref) in self.data.iter() {
let value = &value_ref;
match op {
ScanComparison::Lt => if value < val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::LtEq => if value <= val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::Eq => if value == val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::GtEq => if value >= val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::Gt => if value > val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::NotEq => if value != val { scan_consumer.matching_offsets.push(offset) },
}
}
}
}
impl Scannable<u16> for Int16SparseBlock {
fn scan(&self, op : ScanComparison, val : &u16, scan_consumer : &mut BlockScanConsumer) {
for &(offset, value_ref) in self.data.iter() {
let value = &value_ref;
match op {
ScanComparison::Lt => if value < val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::LtEq => if value <= val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::Eq => if value == val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::GtEq => if value >= val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::Gt => if value > val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::NotEq => if value != val { scan_consumer.matching_offsets.push(offset) },
}
}
}
}
impl Scannable<u8> for Int8SparseBlock {
fn scan(&self, op : ScanComparison, val : &u8, scan_consumer : &mut BlockScanConsumer) {
for &(offset, value_ref) in self.data.iter() {
let value = &value_ref;
match op {
ScanComparison::Lt => if value < val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::LtEq => if value <= val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::Eq => if value == val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::GtEq => if value >= val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::Gt => if value > val { scan_consumer.matching_offsets.push(offset) },
ScanComparison::NotEq => if value != val { scan_consumer.matching_offsets.push(offset) },
}
}
}
}
#[test]
fn delete_sparse_block() {
let mut input_block = Int32SparseBlock {
data: vec![
(1, 100),
(2, 200),
(3, 300),
(6, 600),
(8, 800),
(11, 1100)
]
};
let mut expected_block = Int32SparseBlock {
data: vec![
(1, 100),
(8, 800),
(11, 1100)
]
};
let offsets = vec![2,3,6];
input_block.delete(&offsets);
assert_eq!(expected_block, input_block);
}
#[test]
fn delete_string_block() {
let mut input_block = StringBlock::new();
input_block.append(1, "foo".as_bytes());
input_block.append(2, "bar".as_bytes());
input_block.append(13, "snafu".as_bytes());
let mut expected_block = StringBlock::new();
expected_block.append(1, "foo".as_bytes());
expected_block.append(13, "snafu".as_bytes());
let offsets = vec![2,3,6];
input_block.delete(&offsets);
assert_eq!(expected_block, input_block);
}
#[test]
fn multi_upsert_string_block() {
let mut input_block = StringBlock::new();
input_block.append(1, "foo".as_bytes());
input_block.append(2, "bar".as_bytes());
input_block.append(13, "snafu".as_bytes());
let mut expected_block = StringBlock::new();
expected_block.append(0, "lol".as_bytes());
expected_block.append(1, "foo".as_bytes());
expected_block.append(2, "lol".as_bytes());
expected_block.append(3, "lol".as_bytes());
expected_block.append(13, "snafu".as_bytes());
let offsets = vec![0,2,3];
input_block.multi_upsert(&offsets, "lol".as_bytes());
assert_eq!(expected_block, input_block);
}
#[test]
fn upsert_string_block() {
let mut input_block = StringBlock::new();
input_block.append(1, "foo".as_bytes());
input_block.append(2, "bar".as_bytes());
input_block.append(13, "snafu".as_bytes());
let mut upsert_data = StringBlock::new();
upsert_data.append(0, "a0".as_bytes());
upsert_data.append(1, "b1".as_bytes());
upsert_data.append(3, "c2".as_bytes());
let mut expected_block = StringBlock::new();
expected_block.append(0, "a0".as_bytes());
expected_block.append(1, "b1".as_bytes());
expected_block.append(2, "bar".as_bytes());
expected_block.append(3, "c2".as_bytes());
expected_block.append(13, "snafu".as_bytes());
let offsets = vec![0,2,3];
input_block.upsert(&upsert_data);
assert_eq!(expected_block, input_block);
}
#[test]
fn multi_upsert_sparse_block() {
let mut input_block = Int32SparseBlock {
data: vec![
(1, 100),
(8, 800),
(11, 1100)
]
};
let mut expected_block = Int32SparseBlock {
data: vec![
(0, 9999),
(1, 9999),
(2, 9999),
(8, 800),
(11, 1100),
(12, 9999)
]
};
let offsets = vec![0,1,2,12];
input_block.multi_upsert(&offsets, 9999);
assert_eq!(expected_block, input_block);
}
#[test]
fn upsert_sparse_block() {
let mut input_block = Int32SparseBlock {
data: vec![
(1, 100),
(8, 800),
(11, 1100)
]
};
let mut expected_block = Int32SparseBlock {
data: vec![
(1, 101),
(2, 202),
(8, 800),
(11, 1100),
]
};
let upsert_data = Int32SparseBlock {
data: vec![
(1,101),
(2,202)
]
};
input_block.upsert(&upsert_data);
assert_eq!(expected_block, input_block);
}
#[test]
fn string_block() {
let mut expected_block = StringBlock {
index_data: vec![
(0, 0), // foo
(1, 3), // bar
(2, 6), // ""
(3, 6), // snafu
],
str_data: "foobarsnafu".as_bytes().to_vec()
};
let mut str_block = StringBlock::new();
str_block.append(0, "foo".as_bytes());
str_block.append(1, "bar".as_bytes());
str_block.append(2, "".as_bytes());
str_block.append(3, "snafu".as_bytes());
assert_eq!(expected_block, str_block);
let mut consumer = BlockScanConsumer::new();
str_block.scan(ScanComparison::Eq, &String::from("bar"), &mut consumer);
assert_eq!(consumer.matching_offsets, vec![1]);
consumer = BlockScanConsumer::new();
str_block.scan(ScanComparison::NotEq, &String::from("bar"), &mut consumer);
assert_eq!(consumer.matching_offsets, vec![0,2,3]);
consumer = BlockScanConsumer::new();
str_block.scan(ScanComparison::Eq, &String::from("snafu"), &mut consumer);
assert_eq!(consumer.matching_offsets, vec![3]);
}
#[test]
fn it_filters_sparse_block() {
let mut data_block = Int64SparseBlock {
data: vec![
(1, 100),
(2, 200),
(3, 300),
(6, 600),
(8, 800),
(11, 1100)
]
};
let scan_consumer = BlockScanConsumer {
matching_offsets: vec![2,3,4,11]
};
// The offsets are now changed to be with order of scan consumer
let expected_output = Int64SparseBlock {
data: vec![
(0, 200),
(1, 300),
(3, 1100)
]
};
let actual_output = data_block.filter_scan_results(&scan_consumer);
assert_eq!(expected_output, actual_output);
}
| 34.810613 | 166 | 0.566495 |
d23f8f69f33213a32321f930b3c673a50162d387 | 2,001 | php | PHP | app/Http/Controllers/Auth/AuthController.php | DMK-Innovations/BusinessModelToolkit | 5c3706bcc609fbbec0c14b2cfbb1b1718ae446fd | [
"MIT"
] | null | null | null | app/Http/Controllers/Auth/AuthController.php | DMK-Innovations/BusinessModelToolkit | 5c3706bcc609fbbec0c14b2cfbb1b1718ae446fd | [
"MIT"
] | 1 | 2017-04-15T08:11:41.000Z | 2017-04-15T08:11:41.000Z | app/Http/Controllers/Auth/AuthController.php | DMK-INNOVATIONS/BusinessModelToolkit | 5c3706bcc609fbbec0c14b2cfbb1b1718ae446fd | [
"MIT"
] | null | null | null | <?php
namespace App\Http\Controllers\Auth;
use App\Http\Controllers\Controller;
use Illuminate\Contracts\Auth\Guard;
use Illuminate\Contracts\Auth\Registrar;
use Illuminate\Foundation\Auth\AuthenticatesAndRegistersUsers;
use Illuminate\Http\Request;
use Mail;
class AuthController extends Controller {
/*
* |-------------------------------------------------------------------------- | Registration & Login Controller |-------------------------------------------------------------------------- | | This controller handles the registration of new users, as well as the | authentication of existing users. By default, this controller uses | a simple trait to add these behaviors. Why don't you explore it? |
*/
use AuthenticatesAndRegistersUsers;
/**
* Create a new authentication controller instance.
*
* @param \Illuminate\Contracts\Auth\Guard $auth
* @param \Illuminate\Contracts\Auth\Registrar $registrar
* @return void
*/
public function __construct(Guard $auth, Registrar $registrar) {
$this->auth = $auth;
$this->registrar = $registrar;
$this->middleware ( 'guest', [
'except' => 'getLogout'
] );
}
public function postRegister(Request $request) {
$validator = $this->registrar->validator ( $request->all () );
if ($validator->fails ()) {
$this->throwValidationException ( $request, $validator );
}
$token= $_POST['_token'];
$email= $_POST['email'];
$name= $_POST['name'];
$this->auth->login ( $this->registrar->create ( $request->all () ) );
$data['verification_code'] = $token;
Mail::send('registering.emailsend', ['token'=>$token, 'email'=>$email], function($message) use ($email)
{
$message->from('support@toolkit.builders', 'support@toolkit.builders');
$message->to($email);
$message->subject('Complete your toolkit.builders sign up');
});
return response()->view('registering.register', [
'name' => $name,
'email' => $email,
'token'=> $token,
]);
}
}
| 31.265625 | 401 | 0.618691 |
d3bbf78c213e4cecf5fe1674415c41ebcc0c8580 | 1,079 | lua | Lua | csv.lua | prototux/haproxy-summary | db04e6d9416388c1c0f4b386288111cf2b23764b | [
"BSD-2-Clause"
] | 1 | 2017-01-28T17:19:03.000Z | 2017-01-28T17:19:03.000Z | csv.lua | prototux/haproxy-summary | db04e6d9416388c1c0f4b386288111cf2b23764b | [
"BSD-2-Clause"
] | null | null | null | csv.lua | prototux/haproxy-summary | db04e6d9416388c1c0f4b386288111cf2b23764b | [
"BSD-2-Clause"
] | null | null | null | -- Small CSV helper
-- Source: http://lua-users.org/wiki/LuaCsv
local csv = {}
function csv.parse(line, sep)
local res = {}
local pos = 1
sep = sep or ','
while true do
local c = string.sub(line,pos,pos)
if (c == "") then break end
if (c == '"') then
local txt = ""
repeat
local startp,endp = string.find(line,'^%b""',pos)
txt = txt..string.sub(line,startp+1,endp-1)
pos = endp + 1
c = string.sub(line,pos,pos)
if (c == '"') then txt = txt..'"' end
until (c ~= '"')
table.insert(res,txt)
assert(c == sep or c == "")
pos = pos + 1
else
local startp,endp = string.find(line,sep,pos)
if (startp) then
table.insert(res,string.sub(line,pos,startp-1))
pos = endp + 1
else
table.insert(res,string.sub(line,pos))
break
end
end
end
return res
end
return csv
| 27.666667 | 65 | 0.453197 |
81eb4a4916543a1ff893bba612beebe9fcf055de | 4,380 | kt | Kotlin | android-lang/testSrc/com/android/tools/idea/lang/multiDexKeep/MultiDexKeepParserTest.kt | qq1056779951/android | b9677e7537be580437756b17bfca83a907f18598 | [
"Apache-2.0"
] | 831 | 2016-06-09T06:55:34.000Z | 2022-03-30T11:17:10.000Z | android-lang/testSrc/com/android/tools/idea/lang/multiDexKeep/MultiDexKeepParserTest.kt | qq1056779951/android | b9677e7537be580437756b17bfca83a907f18598 | [
"Apache-2.0"
] | 19 | 2017-10-27T00:36:35.000Z | 2021-02-04T13:59:45.000Z | android-lang/testSrc/com/android/tools/idea/lang/multiDexKeep/MultiDexKeepParserTest.kt | qq1056779951/android | b9677e7537be580437756b17bfca83a907f18598 | [
"Apache-2.0"
] | 210 | 2016-07-05T12:22:36.000Z | 2022-03-19T09:07:15.000Z | /*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.idea.lang.multiDexKeep
import com.android.tools.idea.flags.StudioFlags
import com.android.tools.idea.lang.AndroidParsingTestCase
import com.intellij.psi.TokenType
class MultiDexKeepParserTest : AndroidParsingTestCase(MultiDexKeepFileType.INSTANCE.defaultExtension, MultiDexKeepParserDefinition()) {
override fun getTestDataPath() = com.android.tools.idea.lang.getTestDataPath()
override fun setUp() {
super.setUp()
StudioFlags.MULTI_DEX_KEEP_FILE_SUPPORT_ENABLED.override(true)
}
override fun tearDown() {
try {
StudioFlags.MULTI_DEX_KEEP_FILE_SUPPORT_ENABLED.clearOverride()
}
finally {
super.tearDown()
}
}
private fun check(input: String) {
assert(getErrorMessage(input) == null, lazyMessage = { toParseTreeText(input) })
val lexer = MultiDexKeepLexerAdapter()
lexer.start(input)
while (lexer.tokenType != null) {
assert(lexer.tokenType != TokenType.BAD_CHARACTER) { "BAD_CHARACTER ${lexer.tokenText}" }
lexer.advance()
}
}
fun testParseResultOnFailure() {
assertEquals(
"""
MultiDexKeep File
MultiDexKeepClassNamesImpl(CLASS_NAMES)
MultiDexKeepClassNameImpl(CLASS_NAME)
PsiElement(class file name)('this')
PsiErrorElement:class file name or new line expected
PsiElement(BAD_CHARACTER)(' ')
PsiElement(class file name)('should')
PsiElement(BAD_CHARACTER)(' ')
PsiElement(class file name)('fail')
""".trimIndent(),
toParseTreeText("this should fail")
)
}
fun testNoParseErrors() {
check("com/somePackage/SomeClass.class")
}
fun testParsedResult() {
assertEquals(
"""
MultiDexKeep File
MultiDexKeepClassNamesImpl(CLASS_NAMES)
MultiDexKeepClassNameImpl(CLASS_NAME)
PsiElement(class file name)('com/somePackage/SomeClass.class')
""".trimIndent(),
toParseTreeText("""
com/somePackage/SomeClass.class
""".trimIndent())
)
}
fun testInnerClasses() {
assertEquals(
"""
MultiDexKeep File
MultiDexKeepClassNamesImpl(CLASS_NAMES)
MultiDexKeepClassNameImpl(CLASS_NAME)
PsiElement(class file name)('com/somePackage/SomeClass${'$'}Inner.class')
""".trimIndent(),
toParseTreeText("com/somePackage/SomeClass\$Inner.class")
)
}
fun testMultipleLinesParsedResult() {
assertEquals(
"""
MultiDexKeep File
MultiDexKeepClassNamesImpl(CLASS_NAMES)
MultiDexKeepClassNameImpl(CLASS_NAME)
PsiElement(class file name)('com/somePackage/SomeClass.class')
MultiDexKeepClassNameImpl(CLASS_NAME)
PsiElement(class file name)('com/somePackage/AnotherClass.class')
""".trimIndent(),
toParseTreeText("""
com/somePackage/SomeClass.class
com/somePackage/AnotherClass.class
""".trimIndent())
)
}
fun testEmptyLinesBetweenClassNamesParsedResult() {
assertEquals(
"""
MultiDexKeep File
MultiDexKeepClassNamesImpl(CLASS_NAMES)
MultiDexKeepClassNameImpl(CLASS_NAME)
PsiElement(class file name)('com/somePackage/SomeClass.class')
MultiDexKeepClassNameImpl(CLASS_NAME)
PsiElement(class file name)('com/somePackage/AnotherClass.class')
MultiDexKeepClassNameImpl(CLASS_NAME)
PsiElement(class file name)('com/somePackage/OneLastClass.class')
""".trimIndent(),
toParseTreeText("""
com/somePackage/SomeClass.class
com/somePackage/AnotherClass.class
com/somePackage/OneLastClass.class
""".trimIndent())
)
}
} | 31.285714 | 135 | 0.674201 |
df8d1ae2242076bdf69f9fa487f491b863946bc4 | 6,998 | tsx | TypeScript | app/components/zaposleni/components/zaposleniModal/zaposleniModal.tsx | BogMil/racunovodja | 1ba95eafb2a04056ea279f7a93fb1b034564060a | [
"MIT"
] | 1 | 2020-09-19T19:21:58.000Z | 2020-09-19T19:21:58.000Z | app/components/zaposleni/components/zaposleniModal/zaposleniModal.tsx | BogMil/racunovodja | 1ba95eafb2a04056ea279f7a93fb1b034564060a | [
"MIT"
] | 3 | 2021-01-28T21:01:59.000Z | 2022-02-08T17:50:37.000Z | app/components/zaposleni/components/zaposleniModal/zaposleniModal.tsx | BogMil/racunovodja | 1ba95eafb2a04056ea279f7a93fb1b034564060a | [
"MIT"
] | null | null | null | import React from 'react';
import { Button, Modal, Form, Row, Col } from 'react-bootstrap';
import { useSelector, useDispatch } from 'react-redux';
import {
close,
updateZaposleniState,
setErrors
} from './zaposleniModal.actions';
import { AppStore } from '../../../../reducers';
import { reloadEmployees } from '../../zaposleni.actions';
import * as Service from '../../zaposleni.service';
import { handleResponse } from '../../../../utils/responseHandler';
import { CREATE_MODE, EDIT_MODE } from '../../../../constants/modalModes';
import { ErrorText } from '../../../common/errorText';
import { User } from '../../../auth/auth.store.types';
export default function ZaposleniModalComponent() {
const dispatch = useDispatch();
const { zaposleni, mode, show, title, opstine, errors } = useSelector(
(state: AppStore) => {
return state.zaposleniPage.zaposleniModal;
}
);
const { prava_pristupa } = useSelector((state: AppStore) => {
return state.auth.user as User;
});
const handleClose = () => {
dispatch(close());
};
const handleChange = (e: any) => {
let value = e.target.value;
let name = e.target.name;
if (name == 'active') value = e.target.checked;
dispatch(updateZaposleniState(name, value));
};
const handleSave = async () => {
if (mode == CREATE_MODE)
handleResponse(
await Service.createEmployee(zaposleni),
() => {
dispatch(reloadEmployees());
dispatch(close());
},
() => {},
(response: any) => {
dispatch(setErrors(response.data.errors));
}
);
else if (mode == EDIT_MODE)
handleResponse(
await Service.updateEmployee(zaposleni),
() => {
dispatch(reloadEmployees());
dispatch(close());
},
() => {},
(response: any) => {
dispatch(setErrors(response.data.errors));
}
);
};
return (
<Modal
backdrop="static"
centered
show={show}
onHide={handleClose}
className="noselect"
>
<Modal.Header closeButton style={{}}>
<Modal.Title as="h5">{title}</Modal.Title>
</Modal.Header>
<Modal.Body>
<Form>
<Row>
<Col md={6}>
<Form.Group>
<Form.Label>JMBG</Form.Label>
<Form.Control
name="jmbg"
placeholder="Unesite JMBG"
value={zaposleni.jmbg}
onChange={handleChange}
/>
<ErrorText text={errors?.jmbg} />
</Form.Group>
</Col>
<Col md={6}>
<Form.Group>
<Form.Label>Broj zaposlenog</Form.Label>
<Form.Control
name="sifra"
onChange={handleChange}
placeholder="Unesite broj zaposlenog"
value={zaposleni.sifra}
/>
<ErrorText text={errors?.sifra} />
</Form.Group>
</Col>
</Row>
<Row>
<Col md={7}>
<Form.Group>
<Form.Label>Prezime</Form.Label>
<Form.Control
name="prezime"
onChange={handleChange}
placeholder="Unesite prezime"
value={zaposleni.prezime}
/>
<ErrorText text={errors?.prezime} />
</Form.Group>
</Col>
<Col md={5}>
<Form.Group>
<Form.Label>Ime</Form.Label>
<Form.Control
name="ime"
onChange={handleChange}
placeholder="Unesite ime"
value={zaposleni.ime}
/>
<ErrorText text={errors?.ime} />
</Form.Group>
</Col>
</Row>
<Row>
<Col md={6}>
<Form.Group>
<Form.Label>Broj računa</Form.Label>
<Form.Control
name="bankovni_racun"
onChange={handleChange}
placeholder="Unesite broj računa"
value={zaposleni.bankovni_racun}
/>
<ErrorText text={errors?.bankovni_racun} />
</Form.Group>
</Col>
<Col md={6}>
{prava_pristupa.opiro && (
<Form.Group>
<Form.Label>Opština stanovanja</Form.Label>
<Form.Control
as="select"
custom
name="id_opstine"
onChange={handleChange}
value={zaposleni.id_opstine}
>
<>
<option value="">---</option>
{opstine.map(opstina => {
return (
<option key={opstina.id} value={opstina.id}>
{opstina.naziv}
</option>
);
})}
</>
</Form.Control>
<ErrorText text={errors?.opstina_id} />
</Form.Group>
)}
</Col>
</Row>
<Row>
<Col md={9}>
<Form.Group>
<Form.Label>Email</Form.Label>
<Form.Control
name="email1"
onChange={handleChange}
placeholder="Unesite Email adresu"
value={zaposleni.email1 ?? ''}
/>
<ErrorText text={errors?.email1} />
</Form.Group>
</Col>
<Col md={3}>
<Form.Group
controlId="formBasicCheckbox"
style={{ marginTop: 35 }}
>
<Form.Check
custom
name="aktivan"
type="checkbox"
label="Aktivan?"
checked={zaposleni.aktivan}
onChange={handleChange}
/>
<ErrorText text={errors?.aktivan} />
</Form.Group>
</Col>
</Row>
<Row>
<Col md={9}>
<Form.Group>
<Form.Label>Email 2 (opciono)</Form.Label>
<Form.Control
name="email2"
onChange={handleChange}
placeholder="Unesite Email adresu"
value={zaposleni.email2 ?? ''}
/>
<ErrorText text={errors?.email2} />
</Form.Group>
</Col>
</Row>
</Form>
</Modal.Body>
<Modal.Footer>
<Button variant="primary" onClick={handleSave}>
Sačuvaj
</Button>
</Modal.Footer>
</Modal>
);
}
| 30.294372 | 74 | 0.438697 |
d8d02339b668d288a2c120877157fe869cbd3ed5 | 7,623 | lua | Lua | soccar/gamestates/lobby.lua | Dummiesman/KissMP-Soccar | 07eec8951a117daac4f74ecf596ff8f537f78ac9 | [
"MIT"
] | 2 | 2021-04-25T20:01:14.000Z | 2021-04-26T02:31:40.000Z | soccar/gamestates/lobby.lua | Dummiesman/KissMP-Soccar | 07eec8951a117daac4f74ecf596ff8f537f78ac9 | [
"MIT"
] | null | null | null | soccar/gamestates/lobby.lua | Dummiesman/KissMP-Soccar | 07eec8951a117daac4f74ecf596ff8f537f78ac9 | [
"MIT"
] | null | null | null | local M = {}
M.name = "Lobby"
local readyTimer = 0
local lobbyTeamMap = {}
-- team related stuff
local function getFirstIdOnTeam(team)
for client_id, team2 in pairs(lobbyTeamMap) do
if team2 == team then return client_id end
end
return nil
end
local function getTeamMemberCount(team)
local c = 0
for _,team2 in pairs(lobbyTeamMap) do
if team2 == team then c = c + 1 end
end
return c
end
local function allClientsOnTeams()
local cc = 0
local ctc = 0
for client_id, connection in pairs(getConnections()) do
if lobbyTeamMap[client_id] then ctc = ctc + 1 end
cc = cc + 1
end
return cc == ctc
end
local function getClientsTableWithoutTeam()
local t = {}
for client_id, connection in pairs(getConnections()) do
if not lobbyTeamMap[client_id] then table.insert(t, client_id) end
end
return t
end
local function checkTeamFull(team)
local limit = TEAM_LIMITS[team]
if not limit then return true end
if limit < 0 then return false end
return getTeamMemberCount(team) >= limit
end
local function setTeam(client, team)
local currentTeam = lobbyTeamMap[client:getID()]
local newTeamName = TEAM_NAMES[team]
lobbyTeamMap[client:getID()] = team
if currentTeam and currentTeam ~= team then
local currentTeamName = TEAM_NAMES[currentTeam]
sendChatMessage(client, "Changed team from " .. currentTeamName .. " to " .. newTeamName .. ".", {r=1,g=1})
elseif currentTeam and currentTeam == team then
sendChatMessage(client, "You're already on the " .. newTeamName .. " team.", {r=1,g=1})
else
sendChatMessage(client, "Set team to " .. newTeamName .. ".", {r=1,g=1})
end
end
-- game start function
local function startGame()
-- first off, move someone off their team if
-- the other team is empty
local cc = getConnectionCount()
if cc > 1 then
local rc = getTeamMemberCount(TEAM_RED)
local bc = getTeamMemberCount(TEAM_BLUE)
if rc == cc or bc == cc then
-- We must reassign someone
if rc == cc then
local id = getFirstIdOnTeam(TEAM_RED)
lobbyTeamMap[id] = TEAM_BLUE
sendChatMessage(getConnection(id), "*** Your team has been reassigned because everyone was on one team. Your new team is Blue ***", {r=1,g=1})
else
local id = getFirstIdOnTeam(TEAM_BLUE)
lobbyTeamMap[id] = TEAM_RED
sendChatMessage(getConnection(id), "*** Your team has been reassigned because everyone was on one team Your new team is Red ***", {r=1,g=1})
end
end
end
-- clear existing game participants leftover from any previous runs
GameData.reset()
-- add everyone to participants list
for client_id, _ in pairs(getConnections()) do
local participant = GameData.createPlayer(client_id)
GameData.participants[client_id] = participant
GameData.teams[lobbyTeamMap[client_id]].participants[client_id] = participant
GameData.participants[client_id].team = lobbyTeamMap[client_id]
end
-- remove players 2nd+ vehicles
local removeVehiclesTable = {}
for client_id, _ in pairs(getConnections()) do
local vc = 0
for vehicle_id, vehicle in pairs(vehicles) do
if vehicle:getData():getOwner() == client_id and vehicle:getData():getID() ~= GameData.ballVehicleId then
vc = vc + 1
if vc > 1 then
table.insert(removeVehiclesTable, vehicle)
end
end
end
end
for _, vehicle in pairs(removeVehiclesTable) do
vehicle:remove()
end
-- move to running state
StateManager.switchToState(GAMESTATE_RUNNING)
end
-- state stuff
local function onPlayerDisconnected(client_id)
lobbyTeamMap[client_id] = nil
end
local function onEnterState()
lobbyTeamMap = {}
readyTimer = 0
end
local function onChatMessage(client_id, message)
local messageLower = message:lower()
-- debug
if GameData.DEBUG_MODE then
if message == "/s" then startGame() return "" end
end
-- team assignment
if messageLower == "/team blue" or messageLower == "/blue" then
if not checkTeamFull(TEAM_BLUE) then
setTeam(getConnection(client_id), TEAM_BLUE)
else
sendChatMessage(getConnection(client_id), "This team is full", {r=1})
end
return ""
end
if messageLower == "/team red" or messageLower == "/red" then
if not checkTeamFull(TEAM_RED) then
setTeam(getConnection(client_id), TEAM_RED)
else
sendChatMessage(getConnection(client_id), "This team is full", {r=1})
end
return ""
end
if messageLower == "/random" then
local r = math.random()
local attemptTeam = nil
local alternateTeam = nil
if r > 0.5 then
attemptTeam = TEAM_RED
alternateTeam = TEAM_BLUE
else
attemptTeam = TEAM_BLUE
alternateTeam = TEAM_RED
end
if checkTeamFull(attemptTeam) then
attemptTeam = alternateTeam
end
if checkTeamFull(attemptTeam) then
-- can't assign any team?
sendChatMessage(getConnection(client_id), "All teams are full", {r=1})
else
sendChatMessage(getConnection(client_id), "The randomizer assigns you to the " .. TEAM_NAMES[attemptTeam] .. " team.", {r=1, g=1})
setTeam(getConnection(client_id), attemptTeam)
end
return ""
end
-- ball assignment
if messageLower == "/setball" or messageLower == "/ball" then
-- get clients active vehicle and set it as ballVehicleId
local client = getConnection(client_id)
local vehicleId = vehicleIdWrapper(client:getCurrentVehicle())
if not vehicleId then
sendChatMessage(getConnection(client_id), "Failed to set ball vehicle", {r=1})
return ""
end
local vehicle = vehicles[vehicleId]
if not vehicle then
sendChatMessage(getConnection(client_id), "Failed to set ball vehicle", {r=1})
return ""
end
sendChatMessage(getConnection(client_id), "Ball vehicle set", {g=1})
GameData.ballVehicleId = vehicle:getData():getID()
return ""
end
end
local function update(dt)
local ready = allClientsOnTeams()
local connectionCount = getConnectionCount()
if ready and connectionCount >= 2 then
-- if the timer is 0, we've just entered ready state. Notify clients.
local startTime = GameData.DEBUG_MODE and 5 or 10
if readyTimer == 0 then
broadcastChatMessageAndToast("The game will start in " .. tostring(startTime) .. " second(s)", {r=1,g=1})
end
readyTimer = readyTimer + dt
-- start game after timer ends
if readyTimer > startTime then
startGame()
end
else
-- if the timer is not 0, we *were* in ready state, and something happened
if readyTimer ~= 0 then
broadcastChatMessageAndToast("Start timer interrupted. All clients are no longer ready.")
end
-- notify players that they need a team
local lobbyNotifTimer = StateManager.timeInState % 60
local lobbyNotifTimerNext = (StateManager.timeInState + dt) % 60
if lobbyNotifTimerNext < lobbyNotifTimer then
broadcastChatMessage("In lobby mode. Waiting for all players to assign a team.")
-- get the players who have no team
local noTeamMap = getClientsTableWithoutTeam()
local noTeamNameMap = {}
for _,id in pairs(noTeamMap) do
table.insert(noTeamNameMap, getConnection(id):getName())
end
broadcastChatMessage("The following players have not assigned a team yet: " .. strTableToStr(noTeamNameMap), {r=1})
end
--
readyTimer = 0
end
end
M.onEnterState = onEnterState
M.onChatMessage = onChatMessage
M.onPlayerDisconnected = onPlayerDisconnected
M.update = update
return M | 30.987805 | 150 | 0.685819 |
3ad035d48c553ac0ad6305088be65627cb060760 | 1,114 | sql | SQL | taskana-adapter-camunda-spring-boot-test/src/main/resources/sql/clear-camunda-db.sql | sofie29/TaskanaAdapter | 1d442605b6348dcbad6d46e87ffe0e8e6500b4f4 | [
"Apache-2.0"
] | null | null | null | taskana-adapter-camunda-spring-boot-test/src/main/resources/sql/clear-camunda-db.sql | sofie29/TaskanaAdapter | 1d442605b6348dcbad6d46e87ffe0e8e6500b4f4 | [
"Apache-2.0"
] | null | null | null | taskana-adapter-camunda-spring-boot-test/src/main/resources/sql/clear-camunda-db.sql | sofie29/TaskanaAdapter | 1d442605b6348dcbad6d46e87ffe0e8e6500b4f4 | [
"Apache-2.0"
] | null | null | null | -- ENGINE BPMN --
-- Dont clear ByteArray and procdef! --
DELETE FROM ACT_GE_PROPERTY;
DELETE FROM ACT_RU_JOB;
DELETE FROM ACT_RU_JOBDEF;
DELETE FROM ACT_RU_TASK;
DELETE FROM ACT_RU_VARIABLE;
DELETE FROM ACT_RU_EXECUTION;
DELETE FROM ACT_RU_EVENT_SUBSCR;
DELETE FROM ACT_RU_IDENTITYLINK;
DELETE FROM ACT_RU_EVENT_SUBSCR;
DELETE FROM ACT_RU_INCIDENT;
DELETE FROM ACT_RU_FILTER;
DELETE FROM ACT_RU_METER_LOG;
DELETE FROM ACT_RU_EXT_TASK;
DELETE FROM ACT_RU_BATCH;
--DMN--
DELETE FROM ACT_RE_DECISION_DEF;
--CMMN--
DELETE FROM ACT_RU_CASE_EXECUTION;
DELETE FROM ACT_RU_CASE_SENTRY_PART;
DELETE FROM ACT_RE_CASE_DEF;
--History--
DELETE FROM ACT_HI_OP_LOG;
DELETE FROM ACT_HI_DETAIL;
DELETE FROM ACT_HI_TASKINST;
DELETE FROM ACT_HI_VARINST;
DELETE FROM ACT_HI_JOB_LOG;
DELETE FROM ACT_HI_BATCH;
DELETE FROM ACT_HI_IDENTITYLINK;
DELETE FROM ACT_HI_PROCINST;
DELETE FROM ACT_HI_ACTINST;
DELETE FROM ACT_HI_INCIDENT;
DELETE FROM ACT_HI_COMMENT;
DELETE FROM ACT_HI_ATTACHMENT;
DELETE FROM ACT_HI_CASEINST;
DELETE FROM ACT_HI_CASEACTINST;
DELETE FROM ACT_HI_DECINST;
DELETE FROM ACT_HI_DEC_IN;
DELETE FROM ACT_HI_DEC_OUT; | 25.906977 | 39 | 0.841113 |
937f7a346b5c8fd8dc08a7c0e0dce54c9897fa80 | 525 | kt | Kotlin | ansi/src/main/kotlin/uk/tvidal/ansi/AnsiMove.kt | tvidal-net/tvidal-lib | 02b10c64c0dfbf69320d5a798ee165f4ddf10767 | [
"MIT"
] | null | null | null | ansi/src/main/kotlin/uk/tvidal/ansi/AnsiMove.kt | tvidal-net/tvidal-lib | 02b10c64c0dfbf69320d5a798ee165f4ddf10767 | [
"MIT"
] | null | null | null | ansi/src/main/kotlin/uk/tvidal/ansi/AnsiMove.kt | tvidal-net/tvidal-lib | 02b10c64c0dfbf69320d5a798ee165f4ddf10767 | [
"MIT"
] | null | null | null | package uk.tvidal.ansi
import java.lang.System.lineSeparator
enum class AnsiMove(val direction: Char) {
UP('A'),
DOWN('B'),
RIGHT('C'),
LEFT('D'),
NEXT('E'),
PREV('F'),
COL('G'),
POS('H'),
CLEAR_DISPLAY('J'),
CLEAR_LINE('K'),
SCROLL_UP('S'),
SCROLL_DOWN('T'),
SAVE_CURSOR('s'),
RESTORE_CURSOR('u');
operator fun invoke(value: Any, default: String = lineSeparator()) {
if (hasAnsiSupport) print("$ESC[$value$direction")
else print(default)
}
} | 21 | 72 | 0.573333 |
fb031942defd4bee2474e2992265fdec3f9f400d | 354 | php | PHP | src/Exceptions/InvalidHttpVerbException.php | angelxmoreno/copyleaks | 0e6f1a2a93598bf796677361047404a0d2d50207 | [
"MIT"
] | null | null | null | src/Exceptions/InvalidHttpVerbException.php | angelxmoreno/copyleaks | 0e6f1a2a93598bf796677361047404a0d2d50207 | [
"MIT"
] | 2 | 2018-05-18T04:31:41.000Z | 2018-05-19T22:47:03.000Z | src/Exceptions/InvalidHttpVerbException.php | angelxmoreno/copyleaks | 0e6f1a2a93598bf796677361047404a0d2d50207 | [
"MIT"
] | null | null | null | <?php
namespace Axm\CopyLeaks\Exceptions;
use Axm\CopyLeaks\Constants;
/**
* Class InvalidHttpVerbException
* @package Axm\CopyLeaks\Exceptions
*/
class InvalidHttpVerbException extends ExceptionBase
{
/**
* @var string
*/
protected $msg_tpl = '%s is not valid. please use ' . Constants::HTTP_POST . ' or ' . Constants::HTTP_GET;
} | 22.125 | 110 | 0.69774 |
c6d7a8763e708add2c984976e84271ca6ca539ba | 2,705 | rb | Ruby | werd.rb | rk/werd | eaf2b101ee648df1e95b6d41215215f881b5814f | [
"Ruby"
] | 5 | 2015-11-05T03:15:19.000Z | 2021-09-13T21:48:53.000Z | werd.rb | rk/werd | eaf2b101ee648df1e95b6d41215215f881b5814f | [
"Ruby"
] | null | null | null | werd.rb | rk/werd | eaf2b101ee648df1e95b6d41215215f881b5814f | [
"Ruby"
] | null | null | null | #encoding:utf-8
# Random Word Generator with simple rules (see datafiles); recursive;
# details on datafile format can be found in the English ruleset;
# by Robert Kosek, robert.kosek@thewickedflea.com.
#
# Based on the Perl version by Chris Pound (pound@rice.edu), which was
# based on Mark Rosenfelder's Pascal implementation.
#
# Improvements:
# - Now parsed via a PEG parser, with greater flexibility such as
# slashes within the regular expressions.
# - Mutations via Regex! Now you can separate syllables with dashes
# and then perform substitution on it.
# - Optional sections can be wrapped in parenthesis!
# CV(N)C => CVC || CVNC
# - Nestable parenthesis, in case it becomes useful to someone.
# - Generation of an infinite series of words
# - Technical support for Unicode (touch not ye first line)
# - Vertical compaction with tab-delimited list instead of new-lines
require 'ostruct'
require 'optparse'
require 'rubygems'
require './lib/language'
$options = OpenStruct.new
$options.number = 50
$options.seperator = "\n"
$options.morphology= false
$options.debug = false
$options.keep_syllables = false
op = OptionParser.new do |opts|
opts.banner = "Usage: #{$0} [FILE] [$options]"
$options.file = ARGV.first
opts.on("-n", "--number NUM", Integer, "How many words to generate") do |n|
$options.number = n
end
opts.on('-i', '--infinite', 'Generates an infinite set of words') do
$options.number = -1
end
opts.on('-c', '--compact', 'Seperates words with a tab') do
$options.seperator = "\t"
end
opts.on('-m', '--[no-]mutate', 'Perform morphology derivations') do |m|
$options.morphology = m
end
opts.on('--keep-syllables', 'Leave syllable breaks in the output') do
$options.keep_syllables = true
end
opts.on('--debug', 'Enable debug output') do
$options.debug = true
end
opts.on_tail("-h", "--help", "Show this message") do
puts opts
exit
end
end
begin
op.parse!
raise OptionParser::MissingArgument.new("[FILE] must be specified.") if $options.file.nil?
rescue
puts $!, op
exit
end
start = Time.now
lang = Language.from_file($options.file)
printf("Took %.4f seconds to load the config file\n" % (Time.now - start))
srand
unless lang.empty?
if $options.number == -1
puts "Generating an infinite set of words from #{File.basename($options.file)}"
loop do
print lang.generate, $options.seperator
end
else
puts "Generating #{$options.number} words from #{File.basename($options.file)}"
($options.number).times { print lang.generate, $options.seperator }
end
puts if $options.seperator == "\t"
else
raise "Cannot generate words without valid rules!"
end
| 27.886598 | 92 | 0.692791 |
39e65af1f7ab91d02e4c66b0913193fa51653d41 | 2,263 | java | Java | src/net/sf/l2j/gameserver/network/GameCrypt.java | abramsba/arc-server | 231db555a3322a9060414297658350ca10925536 | [
"MIT"
] | 3 | 2020-03-25T10:13:52.000Z | 2021-11-27T16:51:36.000Z | src/net/sf/l2j/gameserver/network/GameCrypt.java | abramsba/arc-server | 231db555a3322a9060414297658350ca10925536 | [
"MIT"
] | null | null | null | src/net/sf/l2j/gameserver/network/GameCrypt.java | abramsba/arc-server | 231db555a3322a9060414297658350ca10925536 | [
"MIT"
] | 3 | 2020-03-18T16:00:10.000Z | 2021-11-27T16:51:51.000Z | /*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>.
*/
package net.sf.l2j.gameserver.network;
/**
* @author KenM
*/
public class GameCrypt
{
private final byte[] _inKey = new byte[16];
private final byte[] _outKey = new byte[16];
private boolean _isEnabled;
public void setKey(byte[] key)
{
System.arraycopy(key, 0, _inKey, 0, 16);
System.arraycopy(key, 0, _outKey, 0, 16);
}
public void decrypt(byte[] raw, final int offset, final int size)
{
if (!_isEnabled)
return;
int temp = 0;
for (int i = 0; i < size; i++)
{
int temp2 = raw[offset + i] & 0xFF;
raw[offset + i] = (byte) (temp2 ^ _inKey[i & 15] ^ temp);
temp = temp2;
}
int old = _inKey[8] & 0xff;
old |= _inKey[9] << 8 & 0xff00;
old |= _inKey[10] << 0x10 & 0xff0000;
old |= _inKey[11] << 0x18 & 0xff000000;
old += size;
_inKey[8] = (byte) (old & 0xff);
_inKey[9] = (byte) (old >> 0x08 & 0xff);
_inKey[10] = (byte) (old >> 0x10 & 0xff);
_inKey[11] = (byte) (old >> 0x18 & 0xff);
}
public void encrypt(byte[] raw, final int offset, final int size)
{
if (!_isEnabled)
{
_isEnabled = true;
return;
}
int temp = 0;
for (int i = 0; i < size; i++)
{
int temp2 = raw[offset + i] & 0xFF;
temp = temp2 ^ _outKey[i & 15] ^ temp;
raw[offset + i] = (byte) temp;
}
int old = _outKey[8] & 0xff;
old |= _outKey[9] << 8 & 0xff00;
old |= _outKey[10] << 0x10 & 0xff0000;
old |= _outKey[11] << 0x18 & 0xff000000;
old += size;
_outKey[8] = (byte) (old & 0xff);
_outKey[9] = (byte) (old >> 0x08 & 0xff);
_outKey[10] = (byte) (old >> 0x10 & 0xff);
_outKey[11] = (byte) (old >> 0x18 & 0xff);
}
}
| 26.011494 | 80 | 0.615555 |
e2e78a5daaa6857dcfda56300079eae65c0627b8 | 12,968 | kt | Kotlin | app/src/main/java/com/alwaystinkering/sandbot/repo/SandbotRepository.kt | grammesm/SandBot-Android | db392f967076e31f27f2e5d90b483e97c259e789 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/alwaystinkering/sandbot/repo/SandbotRepository.kt | grammesm/SandBot-Android | db392f967076e31f27f2e5d90b483e97c259e789 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/alwaystinkering/sandbot/repo/SandbotRepository.kt | grammesm/SandBot-Android | db392f967076e31f27f2e5d90b483e97c259e789 | [
"Apache-2.0"
] | null | null | null | package com.alwaystinkering.sandbot.repo
import android.util.Log
import androidx.core.util.Consumer
import androidx.lifecycle.MutableLiveData
import com.alwaystinkering.sandbot.api.*
import com.alwaystinkering.sandbot.model.LedState
import com.alwaystinkering.sandbot.model.pattern.*
import com.alwaystinkering.sandbot.util.RefreshLiveData
import com.alwaystinkering.sandbot.util.toInt
import okhttp3.MediaType
import okhttp3.MediaType.Companion.toMediaTypeOrNull
import okhttp3.MultipartBody
import okhttp3.RequestBody
import okhttp3.RequestBody.Companion.toRequestBody
import okhttp3.ResponseBody
import org.apache.commons.io.FilenameUtils
import retrofit2.Call
import retrofit2.Callback
import retrofit2.Response
import javax.inject.Inject
class SandbotRepository @Inject constructor(
private val api: SandbotService,
) {
private val TAG: String = "SandbotRepository"
private lateinit var lastStatus: BotStatus
fun getStatus(): RefreshLiveData<BotStatus> {
return RefreshLiveData { callback ->
api.getStatus().enqueue(object : Callback<BotStatus> {
override fun onResponse(call: Call<BotStatus>, response: Response<BotStatus>) {
if (response.isSuccessful) {
lastStatus = response.body()!!
callback.onDataLoaded(response.body())
}
}
override fun onFailure(call: Call<BotStatus>, t: Throwable) {
Log.e(TAG, "Status Failure: ${t.localizedMessage}")
callback.onDataLoaded(null)
}
})
}
}
fun getFileList(): RefreshLiveData<List<AbstractPattern>> {
return RefreshLiveData { callback ->
api.listFiles().enqueue(object : Callback<FileListResult> {
override fun onFailure(call: Call<FileListResult>, t: Throwable) {
callback.onDataLoaded(null)
Log.e(TAG, "File list error ${t.localizedMessage}")
}
override fun onResponse(
call: Call<FileListResult>,
response: Response<FileListResult>
) {
if (response.isSuccessful) {
Log.d(TAG, "File List Retrieved")
callback.onDataLoaded(response.body()!!.sandBotFiles!!.map { createPatternFromFile(it) })
}
}
})
}
}
fun getFileListResult(): RefreshLiveData<FileListResult> {
return RefreshLiveData { callback ->
api.listFiles().enqueue(object : Callback<FileListResult> {
override fun onFailure(call: Call<FileListResult>, t: Throwable) {
Log.e(TAG, "File List Result Failure: ${t.localizedMessage}")
}
override fun onResponse(
call: Call<FileListResult>,
response: Response<FileListResult>
) {
if (response.isSuccessful) {
callback.onDataLoaded(response.body())
}
}
})
}
}
fun resume() {
api.resume().enqueue(object : Callback<CommandResult> {
override fun onFailure(call: Call<CommandResult>, t: Throwable) {
Log.e(TAG, "Resume Command Failure")
}
override fun onResponse(
call: Call<CommandResult>,
response: Response<CommandResult>
) {
if (response.isSuccessful) {
//if (response.body().rslt)
Log.d(TAG, "Resume Command Success")
}
}
})
}
fun pause() {
api.pause().enqueue(object : Callback<CommandResult> {
override fun onFailure(call: Call<CommandResult>, t: Throwable) {
Log.e(TAG, "Pause Command Failure")
}
override fun onResponse(
call: Call<CommandResult>,
response: Response<CommandResult>
) {
if (response.isSuccessful) {
//if (response.body().rslt)
Log.d(TAG, "Pause Command Success")
}
}
})
}
fun stop() {
api.stop().enqueue(object : Callback<CommandResult> {
override fun onFailure(call: Call<CommandResult>, t: Throwable) {
Log.e(TAG, "Stop Command Failure")
}
override fun onResponse(
call: Call<CommandResult>,
response: Response<CommandResult>
) {
if (response.isSuccessful) {
//if (response.body().rslt)
Log.d(TAG, "Stop Command Success")
}
}
})
}
fun home() {
api.home().enqueue(object : Callback<CommandResult> {
override fun onFailure(call: Call<CommandResult>, t: Throwable) {
Log.e(TAG, "Home Command Failure")
}
override fun onResponse(
call: Call<CommandResult>,
response: Response<CommandResult>
) {
if (response.isSuccessful) {
//if (response.body().rslt)
Log.d(TAG, "Home Command Success")
}
}
})
}
fun writeLedOnOff(ledOn: Boolean) {
writeLedState(lastStatus.ledValue!!, ledOn.toInt())
}
fun writeLedValue(ledValue: Int) {
writeLedState(ledValue, lastStatus.ledOn!!)
}
private fun writeLedState(ledValue: Int, ledOn: Int) {
val ledState = LedState(lastStatus)
ledState.ledValue = ledValue
ledState.ledOn = ledOn
val json = ledState.ledJson().replace("\\s+".toRegex(), "")
val body = json.toRequestBody("text/plain".toMediaTypeOrNull())
api.setLed(body).enqueue(object : Callback<ResponseBody> {
override fun onFailure(call: Call<ResponseBody>, t: Throwable) {
Log.e(TAG, "LED Config Write Failure")
}
override fun onResponse(call: Call<ResponseBody>, response: Response<ResponseBody>) {
if (response.isSuccessful) {
Log.d(TAG, "LED Config Write Success")
getStatus()
}
}
})
}
// fun getFileList(): MutableLiveData<List<AbstractPattern>> {
// val fileList = MutableLiveData<List<AbstractPattern>>()
// api.listFiles().enqueue(object : Callback<FileListResult> {
// override fun onFailure(call: Call<FileListResult>, t: Throwable) {
// Log.e(TAG, t.localizedMessage)
// }
//
// override fun onResponse(
// call: Call<FileListResult>,
// response: Response<FileListResult>
// ) {
// if (response.isSuccessful) {
// Log.d(TAG, "File List Retrieved")
// fileList.value =
// response.body()!!.sandBotFiles!!.map { createPatternFromFile(it) }
// }
// }
//
// })
// return fileList
// }
fun saveFile(pattern: AbstractPattern) {
var contents = ""
when (pattern.fileType) {
FileType.SEQUENCE -> {
contents = (pattern as SequencePattern).sequenceContents.joinToString("\n")
}
else -> {
Log.d(TAG, "Save ${pattern.fileType} file type not implemented")
}
}
if (contents.isEmpty()) {
return
}
Log.d(TAG, "Save file: " + pattern.name + " Contents: " + contents)
val filePart = MultipartBody.Part.createFormData(
"file", pattern.name, RequestBody.create(
"plain/text".toMediaTypeOrNull(), contents
)
)
api.saveFile(filePart).enqueue(object : Callback<ResponseBody> {
override fun onResponse(call: Call<ResponseBody>, response: Response<ResponseBody>) {
if (response.isSuccessful) {
Log.d(TAG, "File upload success: " + pattern.name)
}
}
override fun onFailure(call: Call<ResponseBody>, t: Throwable) {
Log.e(TAG, "saveFile Error")
}
})
}
fun playFile(name: String) {
Log.d(TAG, "Play File: $name")
api.playFile("sd", name).enqueue(object : Callback<CommandResult> {
override fun onFailure(call: Call<CommandResult>, t: Throwable) {
Log.e(TAG, "playFile Error")
}
override fun onResponse(
call: Call<CommandResult>,
response: Response<CommandResult>
) {
if (response.isSuccessful) {
//if (response.body().rslt)
Log.d(TAG, "Play File: $name Command Success")
}
}
})
}
fun deleteFile(name: String, callback: Consumer<Boolean>) {
Log.d(TAG, "Delete File: $name")
api.deleteFile("sd", name).enqueue(object : Callback<CommandResult> {
override fun onFailure(call: Call<CommandResult>, t: Throwable) {
Log.e(TAG, "deleteFile Error")
callback.accept(false)
}
override fun onResponse(
call: Call<CommandResult>,
response: Response<CommandResult>
) {
if (response.isSuccessful) {
//if (response.body().rslt)
Log.d(TAG, "Delete File: $name Command Success")
callback.accept(true)
}
}
})
}
fun getFile(name: String, type: FileType): MutableLiveData<AbstractPattern> {
Log.d(TAG, "Get File: $name - $type")
val pattern = MutableLiveData<AbstractPattern>()
when (type) {
FileType.PARAM -> {
api.getParametricFile("sd", name).enqueue(object : Callback<ParametricFile> {
override fun onFailure(call: Call<ParametricFile>, t: Throwable) {
Log.e(TAG, "Get Parametric Error: ${t.localizedMessage}")
}
override fun onResponse(
call: Call<ParametricFile>,
response: Response<ParametricFile>
) {
if (response.isSuccessful) {
pattern.value = ParametricPattern(
name,
response.body()!!.setup,
response.body()!!.loop
)
}
}
})
}
FileType.THETA_RHO -> {
api.getThetaRhoFile("sd", name).enqueue(object : Callback<String> {
override fun onFailure(call: Call<String>, t: Throwable) {
Log.e(TAG, "Get Theta Rho Error: ${t.localizedMessage}")
}
override fun onResponse(
call: Call<String>,
response: Response<String>
) {
if (response.isSuccessful) {
pattern.value = ThetaRhoPattern(name, response.body())
}
}
})
}
FileType.SEQUENCE -> {
api.getSequenceFile("sd", name).enqueue(object : Callback<String> {
override fun onFailure(call: Call<String>, t: Throwable) {
Log.e(TAG, "Get Sequence Error: ${t.localizedMessage}")
}
override fun onResponse(call: Call<String>, response: Response<String>) {
if (response.isSuccessful) {
pattern.value = SequencePattern(name, response.body())
}
}
})
}
}
return pattern
}
fun createPatternFromFile(file: SandBotFile): AbstractPattern {
val extension = FilenameUtils.getExtension(file.name)
val fileType = FileType.fromExtension(extension)
//Log.d(TAG, "SandBotFile Type: $fileType")
// if (fileType === FileType.UNKNOWN) {
// return false
// }
return when (fileType) {
FileType.PARAM -> ParametricPattern(file.name)
// /FileType.GCODE -> ParametricPattern(file.name)
FileType.THETA_RHO -> ThetaRhoPattern(file.name)
FileType.SEQUENCE -> SequencePattern(file.name)
}
}
} | 35.823204 | 113 | 0.513726 |
0a209224c11e646b5549e6e059bd1190ee224706 | 7,218 | h | C | Firmware/roboticwristcode/catkin_ws/src/ros_tivac/TUhand/include/spi_AMIS_30543_stepper.h | Biological-Robotics-at-Tulsa/The-TU-Hand | c12c3875572e88ec821000cd2640d4d5f6f9017b | [
"MIT"
] | null | null | null | Firmware/roboticwristcode/catkin_ws/src/ros_tivac/TUhand/include/spi_AMIS_30543_stepper.h | Biological-Robotics-at-Tulsa/The-TU-Hand | c12c3875572e88ec821000cd2640d4d5f6f9017b | [
"MIT"
] | null | null | null | Firmware/roboticwristcode/catkin_ws/src/ros_tivac/TUhand/include/spi_AMIS_30543_stepper.h | Biological-Robotics-at-Tulsa/The-TU-Hand | c12c3875572e88ec821000cd2640d4d5f6f9017b | [
"MIT"
] | null | null | null | #include <stdbool.h>
#include <stdint.h>
#include <algorithm>
#include <queue>
#include <string>
// TivaC specific includes
extern "C"
{
#include <driverlib/interrupt.h>
#include <driverlib/sysctl.h>
#include <driverlib/gpio.h>
#include <driverlib/adc.h>
#include "inc/hw_ints.h"
#include "driverlib/pin_map.h"
#include "driverlib/timer.h"
#include "driverlib/ssi.h"
#include "inc/hw_types.h"
#include "inc/hw_gpio.h"
#include "inc/hw_memmap.h"
#define TARGET_IS_BLIZZARD_RB1
#include "driverlib/rom.h"
}
// extern "C"
// {
// #include "driverlib/ssi.h"
// #include <driverlib/gpio.h>
// #include "inc/hw_types.h"
// #include "inc/hw_gpio.h"
// #include "inc/hw_memmap.h"
// #define TARGET_IS_BLIZZARD_RB1
// #include "driverlib/rom.h"
// }
//Control registers
#define WR 0x0
#define CR0 0x1
#define CR1 0x2
#define CR2 0x3
#define CR3 0x9
//Status registers
#define SR0 0x4
#define SR1 0x5
#define SR2 0x6
#define SR3 0x7
#define SR4 0xA
void SetupStepperSPIMaster(void){
ROM_GPIOPinConfigure(GPIO_PB4_SSI2CLK);
ROM_GPIOPinConfigure(GPIO_PB7_SSI2TX);
ROM_GPIOPinConfigure(GPIO_PB6_SSI2RX);
ROM_GPIOPinTypeSSI(GPIO_PORTB_BASE,GPIO_PIN_4|GPIO_PIN_6|GPIO_PIN_7);
ROM_SSIConfigSetExpClk(SSI2_BASE, SysCtlClockGet(), SSI_FRF_MOTO_MODE_0, SSI_MODE_MASTER, 800000, 16);
ROM_SSIEnable(SSI2_BASE);
uint32_t buf;
while(ROM_SSIDataGetNonBlocking(SSI2_BASE, &buf) != 0); //clear spi fifo buffer
}
uint32_t SPIReadByte(uint32_t CSPort, uint8_t CSPins, uint8_t address);
void SPIWriteByte(uint32_t CSPort, uint8_t CSPins, uint8_t address, uint8_t data){
ROM_GPIOPinWrite(CSPort, CSPins, 0); //Pull CS LOW
uint32_t ui32Data = 0b1000000000000000 | (address << 8) | data;
ROM_SSIDataPut(SSI2_BASE, ui32Data);
while(ROM_SSIBusy(SSI2_BASE))
{
}
ROM_GPIOPinWrite(CSPort, CSPins, 255); //Pull CS HIGH
}
uint32_t SPIReadByte(uint32_t CSPort, uint8_t CSPins, uint8_t address){
ROM_GPIOPinWrite(CSPort, CSPins, 0); //Pull CS LOW
uint32_t readRequest = (address << 8);
uint32_t dataIn =0;
ROM_SSIDataPut(SSI2_BASE, readRequest);
while(ROM_SSIBusy(SSI2_BASE))
{
}
while(ROM_SSIDataGetNonBlocking(SSI2_BASE, &dataIn) != 0);
ROM_GPIOPinWrite(CSPort, CSPins, 255); //Pull CS HIGH
return dataIn;
}
void ClearStepperRegisters(uint32_t CSPort, uint8_t CSPins){
SPIWriteByte(CSPort, CSPins, WR, 0);
SPIWriteByte(CSPort, CSPins, CR0, 0);
SPIWriteByte(CSPort, CSPins, CR1, 0);
SPIWriteByte(CSPort, CSPins, CR2, 0);
SPIWriteByte(CSPort, CSPins, CR3, 0);
}
void SetStepperCurrent(uint32_t CSPort, uint8_t CSPins, uint16_t milliamps)
{
// This comes from Table 13 of the AMIS-30543 datasheet.
uint8_t code = 0;
if (milliamps <= 3000) { code = 0b11001; }
else if (milliamps <= 2845) { code = 0b11000; }
else if (milliamps <= 2700) { code = 0b10111; }
else if (milliamps <= 2440) { code = 0b10110; }
else if (milliamps <= 2240) { code = 0b10101; }
else if (milliamps <= 2070) { code = 0b10100; }
else if (milliamps <= 1850) { code = 0b10011; }
else if (milliamps <= 1695) { code = 0b10010; }
else if (milliamps <= 1520) { code = 0b10001; }
else if (milliamps <= 1405) { code = 0b10000; }
else if (milliamps <= 1260) { code = 0b01111; }
else if (milliamps <= 1150) { code = 0b01110; }
else if (milliamps <= 1060) { code = 0b01101; }
else if (milliamps <= 955) { code = 0b01100; }
else if (milliamps <= 870) { code = 0b01011; }
else if (milliamps <= 780) { code = 0b01010; }
else if (milliamps <= 715) { code = 0b01001; }
else if (milliamps <= 640) { code = 0b01000; }
else if (milliamps <= 585) { code = 0b00111; }
else if (milliamps <= 540) { code = 0b00110; }
else if (milliamps <= 485) { code = 0b00101; }
else if (milliamps <= 445) { code = 0b00100; }
else if (milliamps <= 395) { code = 0b00011; }
else if (milliamps <= 355) { code = 0b00010; }
else if (milliamps <= 245) { code = 0b00001; }
uint8_t CR0_reg = (SPIReadByte(CSPort, CSPins, CR0) & 0b11100000) | code;
SPIWriteByte(CSPort, CSPins, CR0, CR0_reg);
}
void SetStepperDirection(uint32_t CSPort, uint8_t CSPins, bool forward){
uint8_t CR1_reg;
if(forward){
CR1_reg = (SPIReadByte(CSPort, CSPins, CR1) & 0b01111111);
}
else{
CR1_reg = (SPIReadByte(CSPort, CSPins, CR1) | 0b10000000);
}
SPIWriteByte(CSPort, CSPins, CR1, CR1_reg);
}
void SPIStepperEnable(uint32_t CSPort, uint8_t CSPins){
uint8_t CR2_reg = (SPIReadByte(CSPort, CSPins, CR2) | 0b10000000);
SPIWriteByte(CSPort, CSPins, CR2, CR2_reg);
}
void SPIStepperDisable(uint32_t CSPort, uint8_t CSPins){
SPIWriteByte(CSPort, CSPins, CR2, 0);
}
#define STEPMODE_MICRO_2 2
#define STEPMODE_MICRO_4 4
#define STEPMODE_MICRO_8 8
#define STEPMODE_MICRO_16 16
#define STEPMODE_MICRO_32 32
#define STEPMODE_MICRO_64 64
#define STEPMODE_MICRO_128 128
#define STEPMODE_COMP_HALF 2
#define STEPMODE_COMP_FULL_2PH 1
#define STEPMODE_COMP_FULL_1PH 200
#define STEPMODE_UNCOMP_HALF 201
#define STEPMODE_UNCOMP_FULL 202
void SetStepperStepMode(uint32_t CSPort, uint8_t CSPins, uint8_t stepmode){
uint8_t sm = 0;
uint8_t esm = 0;
if (stepmode == STEPMODE_MICRO_2) { sm = 0b100; esm = 0b000; }
else if (stepmode == STEPMODE_MICRO_4) { sm = 0b011; esm = 0b000; }
else if (stepmode == STEPMODE_MICRO_8) { sm = 0b010; esm = 0b000; }
else if (stepmode == STEPMODE_MICRO_16) { sm = 0b001; esm = 0b000; }
else if (stepmode == STEPMODE_MICRO_32) { sm = 0b000; esm = 0b000; }
else if (stepmode == STEPMODE_MICRO_64) { sm = 0b000; esm = 0b010; }
else if (stepmode == STEPMODE_MICRO_128) { sm = 0b000; esm = 0b001; }
else if (stepmode == STEPMODE_COMP_HALF) { sm = 0b100; esm = 0b000; }
else if (stepmode == STEPMODE_COMP_FULL_2PH) { sm = 0b000; esm = 0b011; }
else if (stepmode == STEPMODE_COMP_FULL_1PH) { sm = 0b000; esm = 0b100; }
else if (stepmode == STEPMODE_UNCOMP_HALF) { sm = 0b101; esm = 0b000; }
else if (stepmode == STEPMODE_UNCOMP_FULL) { sm = 0b111; esm = 0b000; }
uint8_t CR0_reg = (SPIReadByte(CSPort, CSPins, CR0) & 0b00011111) | (sm << 5);
SPIWriteByte(CSPort, CSPins, CR0, CR0_reg);
SPIWriteByte(CSPort, CSPins, CR3, esm);
}
std::string SPIStepperGetErrors(uint32_t CSPort, uint8_t CSPins)
{
uint32_t sr0_stat = SPIReadByte(CSPort, CSPins, SR0);
uint32_t sr1_stat = SPIReadByte(CSPort, CSPins, SR1);
uint32_t sr2_stat = SPIReadByte(CSPort, CSPins, SR2);
std::string errormsg;
errormsg = "Error: ";
if(sr0_stat & 0b01000000)
errormsg.append("Temp Warning");
if(sr2_stat & 0b00000100)
errormsg.append("Temp Shutdown");
if(sr0_stat & 0b00010000)
errormsg.append("Watchdog ");
if(sr0_stat & 0b00001100)
errormsg.append("Open coil ");
if((sr1_stat & 0b01111000 ) || (sr2_stat & 0b01111000 ))
errormsg.append("Overcurrent ");
return errormsg;
}
| 31.657895 | 106 | 0.658908 |
e7f06cecae55d479e6604b53a295b76a9bdf0276 | 5,005 | py | Python | backend/tests/unit/protocols/application/test_lists.py | pez-globo/pufferfish-software | b42fecd652731dd80fbe366e95983503fced37a4 | [
"Apache-2.0"
] | 1 | 2020-10-20T23:47:23.000Z | 2020-10-20T23:47:23.000Z | backend/tests/unit/protocols/application/test_lists.py | pez-globo/pufferfish-software | b42fecd652731dd80fbe366e95983503fced37a4 | [
"Apache-2.0"
] | 242 | 2020-10-23T06:44:01.000Z | 2022-01-28T05:50:45.000Z | backend/tests/unit/protocols/application/test_lists.py | pez-globo/pufferfish-vent-software | f1e5e47acf1941e7c729adb750b85bf26c38b274 | [
"Apache-2.0"
] | 1 | 2021-04-12T02:10:18.000Z | 2021-04-12T02:10:18.000Z | """Test the functionality of protocols.application.states classes."""
from ventserver.protocols.application import lists
from ventserver.protocols.protobuf import mcu_pb as pb
def test_send_new_elements() -> None:
"""Test adding new elements to a list for sending."""
example_sequence = [
lists.UpdateEvent(new_elements=[pb.LogEvent(id=i)])
for i in range(20)
]
synchronizer = lists.SendSynchronizer(
segment_type=pb.NextLogEvents,
max_len=10, max_segment_len=5
)
assert synchronizer.output() is None
for update_event in example_sequence:
synchronizer.input(update_event)
assert synchronizer.output() is None
# The first 10 events should've been discarded
for next_expected in range(10):
synchronizer.input(lists.UpdateEvent(next_expected=next_expected))
output = synchronizer.output()
assert isinstance(output, pb.NextLogEvents)
assert output.next_expected == next_expected
assert output.total == 10
assert output.remaining == 10
for (i, event) in enumerate(output.elements):
assert event.id == 10 + i
# Segments should be returned as requested
for next_expected in range(10, 20):
synchronizer.input(lists.UpdateEvent(next_expected=next_expected))
output = synchronizer.output()
assert isinstance(output, pb.NextLogEvents)
assert output.next_expected == next_expected
assert output.total == 10
assert output.remaining == 10 - (next_expected - 10)
for (i, event) in enumerate(output.elements):
assert event.id == next_expected + i
if next_expected <= 15:
assert len(output.elements) == 5
else:
assert len(output.elements) == 5 - (next_expected - 15)
# New elements should be in the segment resulting from a repeated request
assert synchronizer.output() is None
synchronizer.input(lists.UpdateEvent(
new_elements=[pb.LogEvent(id=20)], next_expected=19
))
output = synchronizer.output()
assert isinstance(output, pb.NextLogEvents)
assert output.next_expected == 19
assert output.total == 10
assert output.remaining == 2
for (i, event) in enumerate(output.elements):
assert event.id == 19 + i
assert len(output.elements) == 2
# TODO: add a test where we send all events, then reset expected event to 0.
# All events should be sent again.
def test_receive_new_elements() -> None:
"""Test adding new elements to a list from receiving."""
example_sequence = [
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(0, 5)]
),
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(5, 10)]
),
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(7, 11)]
),
pb.NextLogEvents(
session_id=0, elements=[pb.LogEvent(id=i) for i in range(0, 4)]
),
pb.NextLogEvents(session_id=1),
pb.NextLogEvents(
session_id=1, elements=[pb.LogEvent(id=i) for i in range(0, 4)]
),
]
synchronizer: lists.ReceiveSynchronizer[pb.LogEvent] = \
lists.ReceiveSynchronizer()
assert synchronizer.output() is None
for segment in example_sequence:
synchronizer.input(segment)
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 5
assert len(update_event.new_elements) == 5
for (i, element) in enumerate(update_event.new_elements):
assert element.id == i
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 10
assert len(update_event.new_elements) == 5
for (i, element) in enumerate(update_event.new_elements):
assert element.id == 5 + i
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 11
assert len(update_event.new_elements) == 1
assert update_event.new_elements[0].id == 10
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 0
assert update_event.next_expected == 11
assert len(update_event.new_elements) == 0
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 1
assert update_event.next_expected == 0
assert len(update_event.new_elements) == 0
update_event = synchronizer.output()
assert update_event is not None
assert update_event.session_id == 1
assert update_event.next_expected == 4
assert len(update_event.new_elements) == 4
for (i, element) in enumerate(update_event.new_elements):
assert element.id == i
| 36.532847 | 77 | 0.675524 |
5766a2be31ee4e5aaaea618cb6fae88f9b96bb81 | 2,621 | h | C | src/SimpleMath/SimpleMathLU.h | ORB-HD/MeshUp | 17a59b52436946caa99357a6042091fe898596a6 | [
"MIT"
] | 4 | 2019-01-08T00:56:41.000Z | 2021-06-11T10:05:39.000Z | src/SimpleMath/SimpleMathLU.h | ORB-HD/MeshUp | 17a59b52436946caa99357a6042091fe898596a6 | [
"MIT"
] | 8 | 2018-10-25T08:51:03.000Z | 2019-03-28T11:41:39.000Z | src/SimpleMath/SimpleMathLU.h | ORB-HD/MeshUp | 17a59b52436946caa99357a6042091fe898596a6 | [
"MIT"
] | 2 | 2019-03-28T17:35:43.000Z | 2019-05-06T12:51:46.000Z | #ifndef _SIMPLE_MATH_LU_H
#define _SIMPLE_MATH_LU_H
#include <iostream>
#include <limits>
#include "SimpleMathFixed.h"
#include "SimpleMathDynamic.h"
#include "SimpleMathBlock.h"
namespace SimpleMath {
template <typename matrix_type>
class PartialPivLU {
public:
typedef typename matrix_type::value_type value_type;
private:
PartialPivLU() {}
typedef Dynamic::Matrix<value_type> MatrixXXd;
typedef Dynamic::Matrix<value_type> VectorXd;
bool mIsFactorized;
unsigned int *mPermutations;
MatrixXXd matrixLU;
public:
PartialPivLU(const matrix_type &matrix) :
mIsFactorized(false),
matrixLU(matrix) {
// We can only solve quadratic systems
assert (matrixLU.rows() == matrixLU.cols());
mPermutations = new unsigned int [matrix.cols()];
for (unsigned int i = 0; i < matrix.cols(); i++) {
mPermutations[i] = i;
}
compute();
}
PartialPivLU compute() {
unsigned int n = matrixLU.rows();
unsigned int pi;
unsigned int i,j,k;
for (j = 0; j < n; j++) {
double pv = fabs (matrixLU(j,mPermutations[j]));
// LOG << "j = " << j << " pv = " << pv << std::endl;
// find the pivot
for (k = j; k < n; k++) {
double pt = fabs (matrixLU(j,mPermutations[k]));
if (pt > pv) {
pv = pt;
pi = k;
unsigned int p_swap = mPermutations[j];
mPermutations[j] = mPermutations[pi];
mPermutations[pi] = p_swap;
// LOG << "swap " << j << " with " << pi << std::endl;
// LOG << "j = " << j << " pv = " << pv << std::endl;
}
}
for (i = j + 1; i < n; i++) {
if (fabs(A(j,pivot[j])) <= std::numeric_limits<double>::epsilon()) {
std::cerr << "Error: pivoting failed for matrix A = " << std::endl;
std::cerr << "A = " << std::endl << A << std::endl;
std::cerr << "b = " << b << std::endl;
}
// assert (fabs(A(j,pivot[j])) > std::numeric_limits<double>::epsilon());
double d = A(i,pivot[j])/A(j,pivot[j]);
}
for (k = j; k < n; k++) {
A(i,pivot[k]) -= A(j,pivot[k]) * d;
}
}
mIsFactorized = true;
return *this;
}
Dynamic::Matrix<value_type> solve (
const Dynamic::Matrix<value_type> &rhs
) const {
assert (mIsFactorized);
// temporary result vector which contains the pivoted result
VectorXd px = rhs;
for (int i = mR.cols() - 1; i >= 0; --i) {
for (j = i + 1; j < n; j++) {
px[i] += A(i, pivot[j]) * px[j];
}
px[i] = (b[i] - px[i]) / A (i, pivot[i]);
}
return x;
}
};
}
/* _SIMPLE_MATH_LU_H */
#endif
| 24.268519 | 80 | 0.548264 |
94308bc13d20e0efa4d6589cc2632cfaa6d94a09 | 413 | asm | Assembly | programs/oeis/279/A279077.asm | neoneye/loda | afe9559fb53ee12e3040da54bd6aa47283e0d9ec | [
"Apache-2.0"
] | 22 | 2018-02-06T19:19:31.000Z | 2022-01-17T21:53:31.000Z | programs/oeis/279/A279077.asm | neoneye/loda | afe9559fb53ee12e3040da54bd6aa47283e0d9ec | [
"Apache-2.0"
] | 41 | 2021-02-22T19:00:34.000Z | 2021-08-28T10:47:47.000Z | programs/oeis/279/A279077.asm | neoneye/loda | afe9559fb53ee12e3040da54bd6aa47283e0d9ec | [
"Apache-2.0"
] | 5 | 2021-02-24T21:14:16.000Z | 2021-08-09T19:48:05.000Z | ; A279077: Maximum starting value of X such that repeated replacement of X with X-ceiling(X/7) requires n steps to reach 0.
; 0,1,2,3,4,5,6,8,10,12,15,18,22,26,31,37,44,52,61,72,85,100,117,137,160,187,219,256,299,349,408,477,557,650,759,886,1034,1207,1409,1644,1919,2239,2613,3049,3558,4152,4845,5653,6596,7696,8979,10476,12223,14261
lpb $0
sub $0,1
add $2,1
mov $1,$2
div $2,6
add $2,$1
lpe
mov $0,$1
| 34.416667 | 209 | 0.694915 |
6e38590f35cfd0744e60e86b54b8291c21a65ac0 | 31 | kt | Kotlin | src/Email.kt | waterlink/SignInExample | b4571b468f7c9122b5713a3fe0f7ba4359b68b71 | [
"MIT"
] | null | null | null | src/Email.kt | waterlink/SignInExample | b4571b468f7c9122b5713a3fe0f7ba4359b68b71 | [
"MIT"
] | null | null | null | src/Email.kt | waterlink/SignInExample | b4571b468f7c9122b5713a3fe0f7ba4359b68b71 | [
"MIT"
] | null | null | null | class Email(email: String) {
} | 10.333333 | 28 | 0.677419 |
ddfed4466070467deaf2c545a2032e16833f67db | 1,125 | c | C | snippets/linux/getrlimit.c | qeedquan/misc_utilities | 94c6363388662ac8ebbf075b9c853ce6defbb5b3 | [
"MIT"
] | 8 | 2018-10-17T18:17:25.000Z | 2022-03-18T09:02:53.000Z | snippets/linux/getrlimit.c | qeedquan/misc_utilities | 94c6363388662ac8ebbf075b9c853ce6defbb5b3 | [
"MIT"
] | null | null | null | snippets/linux/getrlimit.c | qeedquan/misc_utilities | 94c6363388662ac8ebbf075b9c853ce6defbb5b3 | [
"MIT"
] | 3 | 2020-07-01T13:52:42.000Z | 2022-03-18T09:10:59.000Z | #include <stdio.h>
#include <string.h>
#include <errno.h>
#include <sys/time.h>
#include <sys/resource.h>
#define nelem(x) (sizeof(x) / sizeof(x[0]))
int
main(void)
{
static const struct {
int resource;
char str[256];
} tab[] = {
{RLIMIT_AS, "RLIMIT_AS"},
{RLIMIT_CORE, "RLIMIT_CORE"},
{RLIMIT_CPU, "RLIMIT_CPU"},
{RLIMIT_DATA, "RLIMIT_DATA"},
{RLIMIT_FSIZE, "RLIMIT_FSIZE"},
{RLIMIT_LOCKS, "RLIMIT_LOCKS"},
{RLIMIT_MEMLOCK, "RLIMIT_MEMLOCK"},
{RLIMIT_MSGQUEUE, "RLIMIT_MSGQUEUE"},
{RLIMIT_NICE, "RLIMIT_NICE"},
{RLIMIT_NOFILE, "RLIMIT_NOFILE"},
{RLIMIT_NPROC, "RLIMIT_NPROC"},
{RLIMIT_RSS, "RLIMIT_RSS"},
{RLIMIT_RTPRIO, "RLIMIT_RTPRIO"},
{RLIMIT_RTTIME, "RLIMIT_RTTIME"},
{RLIMIT_SIGPENDING, "RLIMIT_SIGPENDING"},
{RLIMIT_STACK, "RLIMIT_STACK"},
};
struct rlimit rlim;
size_t i;
for (i = 0; i < nelem(tab); i++) {
if (getrlimit(tab[i].resource, &rlim) < 0)
printf("%s: %s\n", tab[i].str, strerror(errno));
else
printf("%s: %lld %lld\n", tab[i].str, (long long)rlim.rlim_cur, (long long)rlim.rlim_max);
}
return 0;
}
| 24.456522 | 93 | 0.631111 |
2f402b0a4158484d3a44afef47bad31ee0f2703b | 147 | php | PHP | config/constants.php | ziggyou8/msas_backend | dd9f13f12ac2502cd07e8dd3317f60f3c70cf1e2 | [
"MIT"
] | null | null | null | config/constants.php | ziggyou8/msas_backend | dd9f13f12ac2502cd07e8dd3317f60f3c70cf1e2 | [
"MIT"
] | null | null | null | config/constants.php | ziggyou8/msas_backend | dd9f13f12ac2502cd07e8dd3317f60f3c70cf1e2 | [
"MIT"
] | null | null | null | <?php
return [
'frontend' => [
'url' => env('URL_FRONTEND', 'http://msas-frontend.herokuapp.com')
],
'password' => 'passer'
];
| 18.375 | 74 | 0.52381 |
e754832dab77d8c61522e8d0299d1eb2b720fed1 | 22,157 | js | JavaScript | source/actions.js | RahavLussato/redux-react-firebase | f66b2ca8b0b39b5a51e0538e7e2ca43ab25d1556 | [
"MIT"
] | 287 | 2016-01-13T12:20:08.000Z | 2022-02-21T03:15:31.000Z | source/actions.js | enkuush-ca/redux-react-firebase | 004bfd20d60e6ac8c5793c7aa66b161ae6fc9f03 | [
"MIT"
] | 60 | 2016-01-26T15:01:45.000Z | 2018-10-18T22:34:14.000Z | source/actions.js | enkuush-ca/redux-react-firebase | 004bfd20d60e6ac8c5793c7aa66b161ae6fc9f03 | [
"MIT"
] | 48 | 2016-01-18T17:38:37.000Z | 2021-03-23T23:46:45.000Z |
import {
SET,
SET_REQUESTED,
SET_PROFILE,
LOGIN,
LOGOUT,
LOGIN_ERROR,
PERMISSION_DENIED_ERROR,
START,
INIT_BY_PATH
// NO_VALUE
} from './constants'
import { Promise } from 'es6-promise'
import _ from 'lodash'
const getWatchPath = (event, path) => event + ':' + ((getCleanPath(path).substring(0, 1) === '/') ? '' : '/') + getCleanPath(path)
const setWatcher = (firebase, event, path, ConnectId='Manual') => {
const id = getWatchPath(event, path);
firebase._.watchers[id] = firebase._.watchers[id] || {};
if (Object.keys(firebase._.watchers[id]).includes(ConnectId)) {
firebase._.watchers[id][ConnectId]++
} else {
firebase._.watchers[id][ConnectId] = 1
}
return firebase._.watchers[id]
}
const cleanOnceWatcher = (firebase, dispatch, event, path, ConnectId) => {
const id = getWatchPath(event, path);
if (firebase._.watchers[id]) {
if (firebase._.watchers[id][ConnectId] <= 1) {
delete firebase._.watchers[id][ConnectId];
if (Object.keys(firebase._.watchers[id]).length === 0) {
delete firebase._.watchers[id];
}
} else if (firebase._.watchers[id][ConnectId]) {
firebase._.watchers[id][ConnectId]--
}
}
if(firebase._.shouldClearAfterOnce[id]) {
for (let clean of firebase._.shouldClearAfterOnce[id]) {
firebase.database().ref().child(clean.path).off(clean.event);
if(!clean.isSkipClean){
dispatch({
type: INIT_BY_PATH,
path: clean.path
})
}
}
delete firebase._.shouldClearAfterOnce[id];
}
return firebase._.watchers[id]
}
const getWatcherCount = (firebase, event, path) => {
const id = getWatchPath(event, path);
const watchers = firebase._.watchers[id];
return watchers && Object.keys(watchers).length
}
const getCleanPath = (path) => {
let pathSplitted = path.split('#');
return pathSplitted[0];
}
const unsetWatcher = (firebase, dispatch, event, path, ConnectId='Manual', isSkipClean=false, isNewQuery=false) => {
const id = getWatchPath(event, path);
const onceEvent = getWatchPath('once', path);
path = path.split('#')[0]
if ((firebase._.watchers[id] && firebase._.watchers[id][ConnectId] <= 1) || isNewQuery || ConnectId === 'CleanAll') {
var aggregationId = getWatchPath('child_aggregation', path);
if (firebase._.timeouts && firebase._.timeouts[aggregationId]) {
clearTimeout(firebase._.timeouts[aggregationId]);
firebase._.timeouts[aggregationId] = undefined;
}
ConnectId !== 'CleanAll' && delete firebase._.watchers[id][ConnectId];
const countWatchers = ConnectId !== 'CleanAll' ? Object.keys(firebase._.watchers[id]).length : 0;
if (countWatchers === 0 || isNewQuery) {
countWatchers === 0 && delete firebase._.watchers[id];
if (event!='once'){
if (!firebase._.watchers[onceEvent]) {
event !== 'all' && firebase.database().ref().child(path).off(event);
if(!isSkipClean){
dispatch({
type: INIT_BY_PATH,
path
})
}
} else {
firebase._.shouldClearAfterOnce[onceEvent] = firebase._.shouldClearAfterOnce[onceEvent] || [];
firebase._.shouldClearAfterOnce[onceEvent].push({path, event, isSkipClean});
}
}
}
} else if (firebase._.watchers[id] && firebase._.watchers[id][ConnectId]) {
firebase._.watchers[id][ConnectId]--
}
}
export const isWatchPath = (firebase, dispatch, event, path) => {
const id = getWatchPath(event, path);
let isWatch = false;
if (firebase._.watchers[id] > 0) {
isWatch = true;
}
return isWatch;
}
function isNumeric(n) {
return !isNaN(n - parseFloat(n));
}
export const watchEvent = (firebase, dispatch, event, path, ConnectId='Manual', isListenOnlyOnDelta=false,
isAggregation=false, setFunc=undefined, setOptions=undefined) => {
if (path) {
const isNewQuery = path.includes('#')
const isNewSet = setOptions !== undefined
let queryParams = []
if (isNewQuery) {
let pathSplitted = path.split('#')
path = pathSplitted[0]
queryParams = pathSplitted[1].split('&')
}
const watchPath = path
const counter = getWatcherCount(firebase, event, watchPath)
if (counter > 0) {
if (isNewQuery || isNewSet) {
unsetWatcher(firebase, dispatch, event, path, ConnectId, false, isNewQuery || isNewSet)
} else {
setWatcher(firebase, event, watchPath, ConnectId)
return
}
}
setWatcher(firebase, event, watchPath, ConnectId)
let query = firebase.database().ref().child(path)
if (isNewQuery) {
let doNotParse = false
queryParams.forEach((param) => {
param = param.split('=')
switch (param[0]) {
case 'doNotParse':
doNotParse = true
break
case 'orderByValue':
query = query.orderByValue()
doNotParse = true
break
case 'orderByPriority':
query = query.orderByPriority()
doNotParse = true
break
case 'orderByKey':
query = query.orderByKey()
doNotParse = true
break
case 'orderByChild':
query = query.orderByChild(param[1])
break
case 'limitToFirst':
query = query.limitToFirst(parseInt(param[1]))
break
case 'limitToLast':
query = query.limitToLast(parseInt(param[1]))
break
case 'equalTo':
let equalToParam = (!doNotParse && isNumeric(param[1])) ? parseFloat(param[1]) || (param[1] === '0' ? 0 : param[1]) : param[1]
equalToParam = equalToParam === 'null' ? null : equalToParam
query = param.length === 3
? query.equalTo(equalToParam, param[2])
: query.equalTo(equalToParam)
break
case 'startAt':
let startAtParam = (!doNotParse && isNumeric(param[1])) ? parseFloat(param[1]) || (param[1] === '0' ? 0 : param[1]) : param[1]
startAtParam = startAtParam === 'null' ? null : startAtParam
query = param.length === 3
? query.startAt(startAtParam, param[2])
: query.startAt(startAtParam)
break
case 'endAt':
let endAtParam = (!doNotParse && isNumeric(param[1])) ? parseFloat(param[1]) || (param[1] === '0' ? 0 : param[1]) : param[1]
endAtParam = endAtParam === 'null' ? null : endAtParam
query = param.length === 3
? query.endAt(endAtParam, param[2])
: query.endAt(endAtParam)
break
default:
break
}
})
}
const runQuery = (q, e, p) => {
dispatch({
type: START,
timestamp: Date.now(),
requesting: true,
requested: false,
path
})
let aggregationId = getWatchPath('child_aggregation', path);
if (e === 'once') {
q.once('value')
.then(snapshot => {
cleanOnceWatcher(firebase, dispatch, event, watchPath, ConnectId)
if (snapshot.val() !== null) {
if (setFunc) {
setFunc(snapshot, 'value', dispatch, setOptions);
dispatch({
type: SET_REQUESTED,
path: p,
key: snapshot.key,
timestamp: Date.now(),
requesting: false,
requested: true
});
} else {
dispatch({
type: SET,
path: p,
data: snapshot.val(),
snapshot: Object.assign(snapshot, {_event: 'value'}),
key: snapshot.key,
timestamp: Date.now(),
requesting: false,
requested: true,
isChild: false,
isMixSnapshot: false,
isMergeDeep: false
})
}
}
}, dispatchPermissionDeniedError)
} else if (e === 'child_added' && isListenOnlyOnDelta) {
let newItems = false;
q.on(e, snapshot => {
if (!newItems) return;
let tempSnapshot = Object.assign(snapshot, {_event: e});
if (isAggregation) {
if (!firebase._.timeouts[aggregationId]) {
firebase._.aggregatedData[aggregationId] = {}
firebase._.aggregatedSnapshot[aggregationId] = {}
firebase._.timeouts[aggregationId] = setTimeout(() => {
dispatchBulk(p, aggregationId)
}, 1000);
}
firebase._.aggregatedData[aggregationId][snapshot.key] = snapshot.val()
firebase._.aggregatedSnapshot[aggregationId][snapshot.key] = tempSnapshot;
} else {
if (setFunc) {
setFunc(snapshot, 'child_added', dispatch, setOptions);
dispatch({
type: SET_REQUESTED,
path: p,
key: snapshot.key,
timestamp: Date.now(),
requesting: false,
requested: true
});
} else {
dispatch({
type: SET,
path: p,
data: snapshot.val(),
snapshot: tempSnapshot,
key: snapshot.key,
timestamp: Date.now(),
requesting: false,
requested: true,
isChild: true,
isMixSnapshot: true,
isMergeDeep: false
})
}
}
}, dispatchPermissionDeniedError)
q.once('value')
.then(snapshot => {
newItems = true;
if (snapshot.val() !== null) {
if (setFunc) {
setFunc(snapshot, 'value', dispatch, setOptions);
dispatch({
type: SET_REQUESTED,
path: p,
key: snapshot.key,
timestamp: Date.now(),
requesting: false,
requested: true
});
} else {
dispatch({
type: SET,
path: p,
data: snapshot.val(),
snapshot: Object.assign(snapshot, {_event: 'value'}),
key: snapshot.key,
timestamp: Date.now(),
requesting: false,
requested: true,
isChild: false,
isMixSnapshot: true,
isMergeDeep: false
})
}
}
}, dispatchPermissionDeniedError)
} else {
q.on(e, snapshot => {
let data = (e === 'child_removed') ? '_child_removed' : snapshot.val();
let tempSnapshot = Object.assign(snapshot, {_event: e});
if (e !== 'value' && isAggregation) {
if (!firebase._.timeouts[aggregationId]) {
firebase._.aggregatedData[aggregationId] = {}
firebase._.aggregatedSnapshot[aggregationId] = {}
firebase._.timeouts[aggregationId] = setTimeout(() => {
dispatchBulk(p, aggregationId)
}, 1000);
}
firebase._.aggregatedData[aggregationId][snapshot.key] = data
firebase._.aggregatedSnapshot[aggregationId][snapshot.key] = tempSnapshot
} else {
if (setFunc) {
setFunc(tempSnapshot, e, dispatch, setOptions);
} else {
dispatch({
type: SET,
path: p,
data,
snapshot: tempSnapshot,
key: snapshot.key,
timestamp: Date.now(),
requesting: false,
requested: true,
isChild: e !== 'value',
isMixSnapshot: isListenOnlyOnDelta,
isMergeDeep: false
})
}
}
}, (permError) => dispatchPermissionDeniedError(permError, p))
}
}
const dispatchBulk = (p, aggregationId) => {
if (setFunc) {
setFunc(firebase._.aggregatedSnapshot[aggregationId], 'aggregated', dispatch, setOptions);
dispatch({
type: SET_REQUESTED,
path: p,
key: '_NONE',
timestamp: Date.now(),
requesting: false,
requested: true
});
} else {
dispatch({
type: SET,
path: p,
data: firebase._.aggregatedData[aggregationId],
snapshot: firebase._.aggregatedSnapshot[aggregationId],
key: '_NONE',
timestamp: Date.now(),
requesting: false,
requested: true,
isChild: false,
isMixSnapshot: true,
isMergeDeep: true
})
}
firebase._.timeouts[aggregationId] = undefined
}
const dispatchPermissionDeniedError = (permError, p) => {
if (permError && permError.code === 'PERMISSION_DENIED' &&
permError.message && !permError.message.includes('undefined')) {
dispatch({
type: PERMISSION_DENIED_ERROR,
data: undefined,
snapshot: {val: () => undefined},
path: p,
timestamp: Date.now(),
requesting: false,
requested: true,
permError
})
}
throw permError
}
runQuery(query, event, path)
}
}
export const unWatchEvent = (firebase, dispatch, event, path, ConnectId, isSkipClean=false) => {
unsetWatcher(firebase, dispatch, event, path, ConnectId, isSkipClean)
}
export const watchEvents = (firebase, dispatch, events, ConnectId='Manual') =>
events.forEach(event => watchEvent(firebase, dispatch, event.name, event.path, ConnectId, event.isListenOnlyOnDelta, event.isAggregation, event.setFunc, event.setOptions))
export const unWatchEvents = (firebase, dispatch, events, ConnectId='Manual', isUnmount=false) =>
events.forEach(event => unWatchEvent(firebase, dispatch, event.name, event.path, ConnectId, isUnmount ? !!event.isSkipCleanOnUnmount : event.isSkipClean))
const dispatchLoginError = (dispatch, authError) =>
dispatch({
type: LOGIN_ERROR,
authError
})
const dispatchLogin = (dispatch, auth) =>
dispatch({
type: LOGIN,
auth,
authError: null
})
const unWatchUserProfile = (firebase) => {
const authUid = firebase._.authUid
const userProfile = firebase._.config.userProfile
if (firebase._.profileWatch) {
firebase.database().ref().child(`${userProfile}/${authUid}`).off('value', firebase._.profileWatch)
firebase._.profileWatch = null
}
}
const watchUserProfile = (dispatch, firebase) => {
const authUid = firebase._.authUid
const userProfile = firebase._.config.userProfile
unWatchUserProfile(firebase)
if (firebase._.config.userProfile) {
firebase._.profileWatch = firebase.database().ref().child(`${userProfile}/${authUid}`).on('value', snap => {
dispatch({
type: SET_PROFILE,
profile: snap.val()
})
})
}
}
const createLoginPromise = (firebase, credentials) => {
const auth = firebase.auth()
if (_.isString(credentials)) {
return auth.signInWithCustomToken(credentials)
} else if (_.has(credentials, "email") && _.has(credentials, "password")) {
return auth.signInWithEmailAndPassword(email, password)
} else {
return Promise.reject(new Error(`Malformed credentials or unsupported way of logging in: ${credentials}`))
}
}
export const login = (dispatch, firebase, credentials) => {
return new Promise((resolve, reject) => {
dispatchLoginError(dispatch, null)
createLoginPromise(firebase, credentials)
.then(resolve)
.catch(err => {
dispatchLoginError(dispatch, err)
reject(err)
});
})
}
export const init = (dispatch, firebase) => {
firebase.auth().onAuthStateChanged(authData => {
if (!authData) {
return dispatch({type: LOGOUT})
}
firebase._.authUid = authData.uid
watchUserProfile(dispatch, firebase)
if (!!firebase._.firebasePendingEvents) {
for (let key of Object.keys(firebase._.firebasePendingEvents)) {
watchEvents(firebase, dispatch, firebase._.firebasePendingEvents[key], key);
}
firebase._.firebasePendingEvents = undefined
}
dispatchLogin(dispatch, authData)
});
// Run onAuthStateChanged if it exists in config
if (firebase._.config.onAuthStateChanged) {
firebase._.config.onAuthStateChanged(authData, firebase)
}
}
export const logout = (dispatch, firebase, preserve = [], remove = []) => {
firebase.auth().signOut()
dispatch({type: LOGOUT, preserve, remove})
firebase._.authUid = null
unWatchUserProfile(firebase)
}
export const createUser = (dispatch, firebase, credentials, profile) =>
new Promise((resolve, reject) => {
dispatchLoginError(dispatch, null)
firebase.auth().createUserWithEmailAndPassword(credentials.email, credentials.password)
.then((userData) => {
if (profile && firebase._.config.userProfile) {
firebase.database().ref().child(`${firebase._.config.userProfile}/${userData.uid}`).set(profile)
}
login(dispatch, firebase, credentials)
.then(() => resolve(userData.uid))
.catch(err => reject(err))
})
.catch(err => {
dispatchLoginError(dispatch, err)
return reject(err)
})
})
export const resetPassword = (dispatch, firebase, email) => {
dispatchLoginError(dispatch, null)
return firebase.auth().sendPasswordResetEmail(email).catch((err) => {
if (err) {
switch (err.code) {
case 'INVALID_USER':
dispatchLoginError(dispatch, new Error('The specified user account does not exist.'))
break
default:
dispatchLoginError(dispatch, err)
}
return
}
})
}
export default { watchEvents, unWatchEvents, init, logout, createUser, resetPassword, isWatchPath }
| 38.33391 | 175 | 0.463104 |
c3edaa2fd78a8dc0ca30ded213b4654916154891 | 249 | go | Go | main.go | amaro0/uuid | f7e8d5e115d694d58103abb6c0cf33ee307af326 | [
"MIT"
] | null | null | null | main.go | amaro0/uuid | f7e8d5e115d694d58103abb6c0cf33ee307af326 | [
"MIT"
] | null | null | null | main.go | amaro0/uuid | f7e8d5e115d694d58103abb6c0cf33ee307af326 | [
"MIT"
] | null | null | null | package main
import (
"fmt"
"github.com/google/uuid"
)
import "github.com/atotto/clipboard"
func main() {
uuid, err := uuid.NewRandom()
if err != nil {
panic(err)
}
clipboard.WriteAll(uuid.String())
fmt.Println("UUID ready to paste")
}
| 13.105263 | 36 | 0.666667 |
fd6e50c9578fd720829607749ebde510502104b1 | 136 | sql | SQL | src/test/resources/loadext.test_1.sql | jdkoren/sqlite-parser | 9adf75ff5eca36f6e541594d2e062349f9ced654 | [
"MIT"
] | 131 | 2015-03-31T18:59:14.000Z | 2022-03-09T09:51:06.000Z | src/test/resources/loadext.test_1.sql | jdkoren/sqlite-parser | 9adf75ff5eca36f6e541594d2e062349f9ced654 | [
"MIT"
] | 20 | 2015-03-31T21:35:38.000Z | 2018-07-02T16:15:51.000Z | src/test/resources/loadext.test_1.sql | jdkoren/sqlite-parser | 9adf75ff5eca36f6e541594d2e062349f9ced654 | [
"MIT"
] | 43 | 2015-04-28T02:01:55.000Z | 2021-06-06T09:33:38.000Z | -- loadext.test
--
-- db eval {
-- SELECT sqlite3_status('MEMORY_USED') AS mused
-- }
SELECT sqlite3_status('MEMORY_USED') AS mused | 22.666667 | 52 | 0.683824 |
571f35f00deb9ce1c8a0a96fc4f3f361e59ca68a | 647 | h | C | Headers/CoreMaterial/MTColor.h | Shade-Zepheri/Borealis | c427fec52eb5189c96c0b1a293cae15171c80131 | [
"MIT"
] | 4 | 2020-06-29T08:01:31.000Z | 2021-06-08T18:25:59.000Z | Headers/CoreMaterial/MTColor.h | Mighel881/Borealis | 5971492de31d939275804722ef352300ffd782cb | [
"MIT"
] | null | null | null | Headers/CoreMaterial/MTColor.h | Mighel881/Borealis | 5971492de31d939275804722ef352300ffd782cb | [
"MIT"
] | 3 | 2020-06-29T08:12:59.000Z | 2021-07-08T15:07:36.000Z | #import <Foundation/Foundation.h>
#import "CoreMaterial+Structs.h"
@interface MTColor : NSObject
+ (instancetype)blackColor;
+ (instancetype)whiteColor;
+ (instancetype)pinkColor;
+ (instancetype)colorWithWhite:(CGFloat)white alpha:(CGFloat)alpha;
+ (instancetype)colorWithRed:(CGFloat)red green:(CGFloat)green blue:(CGFloat)blue alpha:(CGFloat)alpha;
+ (instancetype)colorWithCGColor:(CGColorRef)CGColor;
- (CGColorRef)CGColor;
- (CAColorMatrix)sourceOverColorMatrix;
- (MTColor *)colorWithAlphaComponent:(CGFloat)alpha;
- (MTColor *)colorWithAdditionalAlphaComponent:(CGFloat)alpha;
- (MTColor *)colorBlendedWithColor:(MTColor *)color;
@end | 32.35 | 103 | 0.79289 |
3ee91217e36ba3da44eafc2396e12d047512ff5a | 13,196 | c | C | acados/ocp_qp/ocp_qp_full_condensing_solver.c | besticka/acados1 | e2fd3e9ca6f02b78b936b269c1d711880e84b4db | [
"BSD-2-Clause"
] | null | null | null | acados/ocp_qp/ocp_qp_full_condensing_solver.c | besticka/acados1 | e2fd3e9ca6f02b78b936b269c1d711880e84b4db | [
"BSD-2-Clause"
] | null | null | null | acados/ocp_qp/ocp_qp_full_condensing_solver.c | besticka/acados1 | e2fd3e9ca6f02b78b936b269c1d711880e84b4db | [
"BSD-2-Clause"
] | null | null | null | /*
* Copyright 2019 Gianluca Frison, Dimitris Kouzoupis, Robin Verschueren, Andrea Zanelli, Niels van Duijkeren, Jonathan Frey, Tommaso Sartor, Branimir Novoselnik, Rien Quirynen, Rezart Qelibari, Dang Doan, Jonas Koenemann, Yutao Chen, Tobias Schöls, Jonas Schlagenhauf, Moritz Diehl
*
* This file is part of acados.
*
* The 2-Clause BSD License
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// external
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
// acados
#include "acados/dense_qp/dense_qp_common.h"
#include "acados/ocp_qp/ocp_qp_common.h"
#include "acados/ocp_qp/ocp_qp_full_condensing.h"
#include "acados/ocp_qp/ocp_qp_full_condensing_solver.h"
#include "acados/utils/mem.h"
#include "acados/utils/timing.h"
#include "acados/utils/types.h"
/************************************************
* opts
************************************************/
int ocp_qp_full_condensing_solver_opts_calculate_size(void *config_, ocp_qp_dims *dims)
{
ocp_qp_xcond_solver_config *config = config_;
qp_solver_config *qp_solver = config->qp_solver;
int size = 0;
size += sizeof(ocp_qp_full_condensing_solver_opts);
dense_qp_dims ddims;
compute_dense_qp_dims(dims, &ddims);
size += ocp_qp_full_condensing_opts_calculate_size(dims);
// TODO(dimitris): shouldn't we pass config->qp_solver instead of config_ below?
size += qp_solver->opts_calculate_size(config_, &ddims);
return size;
}
void *ocp_qp_full_condensing_solver_opts_assign(void *config_, ocp_qp_dims *dims, void *raw_memory)
{
ocp_qp_xcond_solver_config *config = config_;
qp_solver_config *qp_solver = config->qp_solver;
char *c_ptr = (char *) raw_memory;
ocp_qp_full_condensing_solver_opts *opts = (ocp_qp_full_condensing_solver_opts *) c_ptr;
c_ptr += sizeof(ocp_qp_full_condensing_solver_opts);
dense_qp_dims ddims;
compute_dense_qp_dims(dims, &ddims);
assert((size_t) c_ptr % 8 == 0 && "memory not 8-byte aligned!");
opts->cond_opts = ocp_qp_full_condensing_opts_assign(dims, c_ptr);
c_ptr += ocp_qp_full_condensing_opts_calculate_size(dims);
align_char_to(8, &c_ptr);
opts->qp_solver_opts = qp_solver->opts_assign(qp_solver, &ddims, c_ptr);
c_ptr += qp_solver->opts_calculate_size(qp_solver, &ddims);
assert((char *) raw_memory + ocp_qp_full_condensing_solver_opts_calculate_size(config_, dims) ==
c_ptr);
return (void *) opts;
}
void ocp_qp_full_condensing_solver_opts_initialize_default(void *config_, ocp_qp_dims *dims,
void *opts_)
{
ocp_qp_xcond_solver_config *config = config_;
qp_solver_config *qp_solver = config->qp_solver;
// full cond solver
ocp_qp_full_condensing_solver_opts *opts = (ocp_qp_full_condensing_solver_opts *) opts_;
// full condensing
ocp_qp_full_condensing_opts_initialize_default(dims, opts->cond_opts);
// qp solver
qp_solver->opts_initialize_default(qp_solver, NULL,
opts->qp_solver_opts); // TODO(all): pass dense_qp_dims ???
}
void ocp_qp_full_condensing_solver_opts_update(void *config_, ocp_qp_dims *dims, void *opts_)
{
ocp_qp_xcond_solver_config *config = config_;
qp_solver_config *qp_solver = config->qp_solver;
// full cond solver
ocp_qp_full_condensing_solver_opts *opts = (ocp_qp_full_condensing_solver_opts *) opts_;
// full condensing
ocp_qp_full_condensing_opts_update(dims, opts->cond_opts);
// qp solver
qp_solver->opts_update(qp_solver, NULL, opts->qp_solver_opts); // TODO(all): pass dense_qp_dims
}
void ocp_qp_full_condensing_solver_opts_set(void *config_, void *opts_, const char *field, void* value)
{
ocp_qp_full_condensing_solver_opts *opts = (ocp_qp_full_condensing_solver_opts *) opts_;
ocp_qp_xcond_solver_config *config = config_;
int ii;
char module[MAX_STR_LEN];
char *ptr_module = NULL;
int module_length = 0;
// extract module name
char *char_ = strchr(field, '_');
if(char_!=NULL)
{
module_length = char_-field;
for(ii=0; ii<module_length; ii++)
module[ii] = field[ii];
module[module_length] = '\0'; // add end of string
ptr_module = module;
}
//printf("\nin cond solver opts set: %s\n", field);
if((!strcmp(ptr_module, "cond")) | (!strcmp(ptr_module, "expand"))) // pass options to (partial) condensing module
{
// TODO config !!!
ocp_qp_full_condensing_opts_set(opts->cond_opts, field+module_length+1, value);
}
else // pass options to QP module
{
//printf("\ncalling qp solver opts set: %s\n", field);
config->qp_solver->opts_set(config->qp_solver, opts->qp_solver_opts, field, value);
}
return;
}
/************************************************
* memory
************************************************/
int ocp_qp_full_condensing_solver_memory_calculate_size(void *config_, ocp_qp_dims *dims,
void *opts_)
{
ocp_qp_xcond_solver_config *config = config_;
qp_solver_config *qp_solver = config->qp_solver;
ocp_qp_full_condensing_solver_opts *opts = (ocp_qp_full_condensing_solver_opts *) opts_;
dense_qp_dims ddims;
compute_dense_qp_dims(dims, &ddims);
int size = 0;
size += sizeof(ocp_qp_full_condensing_solver_memory);
size += ocp_qp_full_condensing_memory_calculate_size(dims, opts->cond_opts);
size += qp_solver->memory_calculate_size(qp_solver, &ddims, opts->qp_solver_opts);
size += dense_qp_in_calculate_size(qp_solver, &ddims);
size += dense_qp_out_calculate_size(qp_solver, &ddims);
return size;
}
void *ocp_qp_full_condensing_solver_memory_assign(void *config_, ocp_qp_dims *dims, void *opts_,
void *raw_memory)
{
ocp_qp_xcond_solver_config *config = config_;
qp_solver_config *qp_solver = config->qp_solver;
ocp_qp_full_condensing_solver_opts *opts = (ocp_qp_full_condensing_solver_opts *) opts_;
dense_qp_dims ddims;
compute_dense_qp_dims(dims, &ddims);
char *c_ptr = (char *) raw_memory;
ocp_qp_full_condensing_solver_memory *mem = (ocp_qp_full_condensing_solver_memory *) c_ptr;
c_ptr += sizeof(ocp_qp_full_condensing_solver_memory);
assert((size_t) c_ptr % 8 == 0 && "memory not 8-byte aligned!");
mem->cond_memory = (ocp_qp_full_condensing_memory *) ocp_qp_full_condensing_memory_assign(
dims, opts->cond_opts, c_ptr);
c_ptr += ocp_qp_full_condensing_memory_calculate_size(dims, opts->cond_opts);
assert((size_t) c_ptr % 8 == 0 && "memory not 8-byte aligned!");
mem->solver_memory = qp_solver->memory_assign(qp_solver, &ddims, opts->qp_solver_opts, c_ptr);
c_ptr += qp_solver->memory_calculate_size(qp_solver, &ddims, opts->qp_solver_opts);
assert((size_t) c_ptr % 8 == 0 && "memory not 8-byte aligned!");
mem->qpd_in = dense_qp_in_assign(qp_solver, &ddims, c_ptr);
c_ptr += dense_qp_in_calculate_size(qp_solver, &ddims);
assert((size_t) c_ptr % 8 == 0 && "memory not 8-byte aligned!");
mem->qpd_out = dense_qp_out_assign(qp_solver, &ddims, c_ptr);
c_ptr += dense_qp_out_calculate_size(qp_solver, &ddims);
assert((char *) raw_memory +
ocp_qp_full_condensing_solver_memory_calculate_size(config_, dims, opts_) ==
c_ptr);
return mem;
}
/************************************************
* workspace
************************************************/
int ocp_qp_full_condensing_solver_workspace_calculate_size(void *config_, ocp_qp_dims *dims,
void *opts_)
{
ocp_qp_xcond_solver_config *config = config_;
qp_solver_config *qp_solver = config->qp_solver;
ocp_qp_full_condensing_solver_opts *opts = (ocp_qp_full_condensing_solver_opts *) opts_;
dense_qp_dims ddims;
compute_dense_qp_dims(dims, &ddims);
int size = sizeof(ocp_qp_full_condensing_solver_workspace);
size += ocp_qp_full_condensing_workspace_calculate_size(dims, opts->cond_opts);
size += qp_solver->workspace_calculate_size(qp_solver, &ddims, opts->qp_solver_opts);
return size;
}
static void cast_workspace(void *config_, ocp_qp_dims *dims,
ocp_qp_full_condensing_solver_opts *opts,
ocp_qp_full_condensing_solver_memory *mem,
ocp_qp_full_condensing_solver_workspace *work)
{
ocp_qp_xcond_solver_config *config = config_;
qp_solver_config *qp_solver = config->qp_solver;
dense_qp_dims *ddims = mem->qpd_in->dim;
char *c_ptr = (char *) work;
c_ptr += sizeof(ocp_qp_full_condensing_solver_workspace);
work->cond_work = c_ptr;
c_ptr += ocp_qp_full_condensing_workspace_calculate_size(dims, opts->cond_opts);
work->solver_workspace = c_ptr;
c_ptr += qp_solver->workspace_calculate_size(qp_solver, ddims, opts->qp_solver_opts);
assert((char *) work +
ocp_qp_full_condensing_solver_workspace_calculate_size(config_, dims, opts) >=
c_ptr);
}
/************************************************
* functions
************************************************/
int ocp_qp_full_condensing_solver(void *config_, ocp_qp_in *qp_in, ocp_qp_out *qp_out, void *opts_,
void *mem_, void *work_)
{
ocp_qp_xcond_solver_config *config = config_;
qp_solver_config *qp_solver = config->qp_solver;
ocp_qp_info *info = (ocp_qp_info *) qp_out->misc;
acados_timer tot_timer, cond_timer;
acados_tic(&tot_timer);
// cast data structures
ocp_qp_full_condensing_solver_opts *opts = (ocp_qp_full_condensing_solver_opts *) opts_;
ocp_qp_full_condensing_solver_memory *memory = (ocp_qp_full_condensing_solver_memory *) mem_;
ocp_qp_full_condensing_solver_workspace *work =
(ocp_qp_full_condensing_solver_workspace *) work_;
// cast workspace
cast_workspace(config_, qp_in->dim, opts, memory, work);
// condense
acados_tic(&cond_timer);
ocp_qp_full_condensing(qp_in, memory->qpd_in, opts->cond_opts, memory->cond_memory,
work->cond_work);
info->condensing_time = acados_toc(&cond_timer);
// solve qp
int solver_status =
qp_solver->evaluate(qp_solver, memory->qpd_in, memory->qpd_out, opts->qp_solver_opts,
memory->solver_memory, work->solver_workspace);
// expand
acados_tic(&cond_timer);
ocp_qp_full_expansion(memory->qpd_out, qp_out, opts->cond_opts, memory->cond_memory,
work->cond_work);
info->condensing_time += acados_toc(&cond_timer);
info->total_time = acados_toc(&tot_timer);
info->solve_QP_time = ((dense_qp_info *) (memory->qpd_out->misc))->solve_QP_time;
info->interface_time = ((dense_qp_info *) (memory->qpd_out->misc))->interface_time;
info->num_iter = ((dense_qp_info *) (memory->qpd_out->misc))->num_iter;
info->t_computed = ((dense_qp_info *) (memory->qpd_out->misc))->t_computed;
return solver_status;
}
void ocp_qp_full_condensing_solver_config_initialize_default(void *config_)
{
ocp_qp_xcond_solver_config *config = config_;
config->dims_set = &ocp_qp_dims_set;
config->opts_calculate_size = &ocp_qp_full_condensing_solver_opts_calculate_size;
config->opts_assign = &ocp_qp_full_condensing_solver_opts_assign;
config->opts_set = &ocp_qp_full_condensing_solver_opts_set;
config->opts_initialize_default = &ocp_qp_full_condensing_solver_opts_initialize_default;
config->opts_update = &ocp_qp_full_condensing_solver_opts_update;
config->memory_calculate_size = &ocp_qp_full_condensing_solver_memory_calculate_size;
config->memory_assign = &ocp_qp_full_condensing_solver_memory_assign;
config->workspace_calculate_size = &ocp_qp_full_condensing_solver_workspace_calculate_size;
config->evaluate = &ocp_qp_full_condensing_solver;
return;
}
| 37.067416 | 758 | 0.701576 |
d086a98323abca978eeca0e646afdca9d98a887b | 273 | css | CSS | public/css/foundation/single/dashboard.css | HaraDev001/eg-okeicom | 910db1d89694c83b5d3eb286a7ffed72e4988536 | [
"MIT"
] | null | null | null | public/css/foundation/single/dashboard.css | HaraDev001/eg-okeicom | 910db1d89694c83b5d3eb286a7ffed72e4988536 | [
"MIT"
] | null | null | null | public/css/foundation/single/dashboard.css | HaraDev001/eg-okeicom | 910db1d89694c83b5d3eb286a7ffed72e4988536 | [
"MIT"
] | null | null | null | .dashboard{padding:0 20px}.dashboard-content{background:#fff}.dashboard-content:not(:last-child){margin-bottom:20px}.dashboard-content-list{border-bottom:1px solid #F5F5F5}.dashboard-content-list a{display:block;padding:15px 20px}
/*# sourceMappingURL=dashboard.css.map */
| 91 | 230 | 0.802198 |
f16a4c0abcbe9f3811a1d34fd2b925a33028ae43 | 2,477 | rb | Ruby | app/controllers/metrics_controller.rb | tkowark/repmine | 6d358e1178892fb715ece18e5bc5722c6eb882c9 | [
"MIT"
] | 3 | 2017-10-24T18:49:46.000Z | 2020-12-22T17:35:32.000Z | app/controllers/metrics_controller.rb | tkowark/repmine | 6d358e1178892fb715ece18e5bc5722c6eb882c9 | [
"MIT"
] | 7 | 2016-05-02T14:26:41.000Z | 2016-05-03T13:52:31.000Z | app/controllers/metrics_controller.rb | tkowark/repmine | 6d358e1178892fb715ece18e5bc5722c6eb882c9 | [
"MIT"
] | 1 | 2020-05-09T13:48:43.000Z | 2020-05-09T13:48:43.000Z | class MetricsController < ApplicationController
autocomplete :tag, :name, :class_name => 'ActsAsTaggableOn::Tag'
def create
@metric = Metric.new
@metric.save(validate: false)
redirect_to metric_path(@metric)
end
def show
@metric = Metric.find(params[:id])
@measurable_groups = Metric.grouped([@metric.id]).merge(Pattern.grouped){|key, val1, val2| val1 + val2}
@existing_connections = []
@metric.metric_nodes.each do |node|
node.children.each do |child|
@existing_connections << {:source => node.id, :target => child.id}
end
end
@title = @metric.name.blank? ? "New metric" : "Metric '#{@metric.name}'"
end
def update
metric = Metric.find(params[:id])
if metric.update_attributes(params[:metric])
flash[:notice] = "Successfully saved metric!"
render json: {}
else
flash[:error] = "Could not save metric! <br/> #{metric.errors.full_messages.join("<br />")}"
render json: {}, :status => :unprocessable_entity
end
end
def create_connection
source = MetricNode.find(params[:source_id])
target = MetricNode.find(params[:target_id])
target.parent = source
target.save(validate: false)
render :nothing => true, :status => 200, :content_type => 'text/html'
end
def destroy_connection
begin
source = MetricNode.find(params[:source_id])
target = MetricNode.find(params[:target_id])
target.parent = nil
target.save
rescue Exception => e
end
render :nothing => true, :status => 200, :content_type => 'text/html'
end
def download_csv
repository = Repository.find(params[:repository_id])
metric = Metric.find(params[:metrics].first)
metric.calculate(repository)
send_data(
File.open(metric.metrics_path("csv", repository)).read,
:type => 'text/csv; charset=utf-8; header=present',
:filename => metric.fancy_metric_file_name(repository)
)
end
def create_node
metric = Metric.find(params[:metric_id])
measurable = Measurable.find(params[:pattern_id])
node = metric.create_node(measurable)
render :partial => "metric_nodes/show", :layout => false, :locals => {:node => node}
end
def create_operator
metric = Metric.find(params[:metric_id])
node = MetricOperatorNode.create(:operator_cd => params[:operator])
metric.metric_nodes << node
render :partial => "metric_nodes/show", :layout => false, :locals => {:node => node}
end
end | 32.592105 | 107 | 0.664514 |
baed4476a0c7c49062aafb9c1c04e48380d82166 | 1,209 | asm | Assembly | programs/oeis/189/A189894.asm | karttu/loda | 9c3b0fc57b810302220c044a9d17db733c76a598 | [
"Apache-2.0"
] | null | null | null | programs/oeis/189/A189894.asm | karttu/loda | 9c3b0fc57b810302220c044a9d17db733c76a598 | [
"Apache-2.0"
] | null | null | null | programs/oeis/189/A189894.asm | karttu/loda | 9c3b0fc57b810302220c044a9d17db733c76a598 | [
"Apache-2.0"
] | null | null | null | ; A189894: Number of isosceles right triangles on a 2nX(n+1) grid
; 4,50,208,582,1308,2556,4528,7460,11620,17310,24864,34650,47068,62552,81568,104616,132228,164970,203440,248270,300124,359700,427728,504972,592228,690326,800128,922530,1058460,1208880,1374784,1557200,1757188,1975842,2214288,2473686,2755228,3060140,3389680,3745140,4127844,4539150,4980448,5453162,5958748,6498696,7074528,7687800,8340100,9033050,9768304,10547550,11372508,12244932,13166608,14139356,15165028,16245510,17382720,18578610,19835164,21154400,22538368,23989152,25508868,27099666,28763728,30503270,32320540,34217820,36197424
mov $11,$0
mov $13,$0
add $13,1
lpb $13,1
clr $0,11
mov $0,$11
sub $13,1
sub $0,$13
mov $8,$0
mov $10,$0
add $10,1
lpb $10,1
clr $0,8
mov $0,$8
sub $10,1
sub $0,$10
mov $5,$0
mov $7,$0
add $7,1
lpb $7,1
mov $0,$5
sub $7,1
sub $0,$7
mul $0,2
mov $2,$0
pow $0,0
div $2,2
mul $2,33
lpb $0,1
sub $0,1
mov $4,2
add $4,$2
add $4,1
div $4,2
add $4,1
mov $3,$4
mul $3,2
lpe
add $6,$3
lpe
add $9,$6
lpe
add $12,$9
lpe
mov $1,$12
| 24.673469 | 531 | 0.61373 |
39d4c0caba0db8b9bba352c95a835e126feb5441 | 1,948 | swift | Swift | ProjectSwiftDemo/Pods/CocoaChainKit/CocoaChainKit/Classes/UIButton+Chain.swift | ZSMHup/ProjectSwiftDemo | 1df8c733ee016eb325dad4e8fa4311f4bba1e804 | [
"Apache-2.0"
] | 1 | 2018-08-25T03:11:30.000Z | 2018-08-25T03:11:30.000Z | SwiftTool/Pods/CocoaChainKit/CocoaChainKit/Classes/UIButton+Chain.swift | ZSMHup/SwiftTool | c13251bfd9a0a54c479cb0c13ed3f0634a86cb53 | [
"Apache-2.0"
] | null | null | null | SwiftTool/Pods/CocoaChainKit/CocoaChainKit/Classes/UIButton+Chain.swift | ZSMHup/SwiftTool | c13251bfd9a0a54c479cb0c13ed3f0634a86cb53 | [
"Apache-2.0"
] | 1 | 2018-11-20T18:59:19.000Z | 2018-11-20T18:59:19.000Z | //
// UIButton+Chain.swift
// CocoaChainKit
//
// Created by GorXion on 2018/5/8.
//
public extension Chain where Base: UIButton {
@discardableResult
func title(_ title: String?, for state: ControlState...) -> Chain {
state.forEach { base.setTitle(title, for: $0) }
return self
}
@discardableResult
func titleColor(_ color: UIColor?, for state: ControlState...) -> Chain {
state.forEach { base.setTitleColor(color, for: $0) }
return self
}
@discardableResult
func image(_ image: UIImage?, for state: ControlState...) -> Chain {
state.forEach { base.setImage(image, for: $0) }
return self
}
@discardableResult
func backgroundImage(_ image: UIImage?, for state: ControlState...) -> Chain {
state.forEach { base.setBackgroundImage(image, for: $0) }
return self
}
@discardableResult
func attributedTitle(_ attributedTitle: NSAttributedString?, for state: ControlState...) -> Chain {
state.forEach { base.setAttributedTitle(attributedTitle, for: $0) }
return self
}
@discardableResult
func titleEdgeInsets(_ edgeInsets: UIEdgeInsets) -> Chain {
base.titleEdgeInsets = edgeInsets
return self
}
@discardableResult
func titleEdgeInsets(top: CGFloat, left: CGFloat, bottom: CGFloat, right: CGFloat) -> Chain {
base.titleEdgeInsets = UIEdgeInsets(top: top, left: left, bottom: bottom, right: right)
return self
}
@discardableResult
func imageEdgeInsets(_ edgeInsets: UIEdgeInsets) -> Chain {
base.imageEdgeInsets = edgeInsets
return self
}
@discardableResult
func imageEdgeInsets(top: CGFloat, left: CGFloat, bottom: CGFloat, right: CGFloat) -> Chain {
base.imageEdgeInsets = UIEdgeInsets(top: top, left: left, bottom: bottom, right: right)
return self
}
}
| 30.4375 | 103 | 0.637064 |
2a344605a96e2aa9208fb816713bfa628ca5e669 | 175 | java | Java | src/main/java/com/hamusuke/twitter4mc/utils/LinkFinder.java | hamusuke0323/TwitterForMinecraftFabric | 3578d94b3993bf3658274093aca841352b70ec74 | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | 9 | 2021-04-05T05:00:08.000Z | 2021-12-18T22:58:17.000Z | src/main/java/com/hamusuke/twitter4mc/utils/LinkFinder.java | hamusuke0323/TwitterForMinecraftFabric | 3578d94b3993bf3658274093aca841352b70ec74 | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | src/main/java/com/hamusuke/twitter4mc/utils/LinkFinder.java | hamusuke0323/TwitterForMinecraftFabric | 3578d94b3993bf3658274093aca841352b70ec74 | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | package com.hamusuke.twitter4mc.utils;
import net.fabricmc.api.EnvType;
import net.fabricmc.api.Environment;
//TODO
@Environment(EnvType.CLIENT)
public class LinkFinder {
}
| 17.5 | 38 | 0.8 |
755e2debe8d6b2c7bac6230a47392fd5f5df1922 | 9,725 | lua | Lua | command.lua | AntumMT/mod-server_shop | 76f1874323bd042a1d3741b4de5f498a2a5dcc16 | [
"MIT"
] | null | null | null | command.lua | AntumMT/mod-server_shop | 76f1874323bd042a1d3741b4de5f498a2a5dcc16 | [
"MIT"
] | null | null | null | command.lua | AntumMT/mod-server_shop | 76f1874323bd042a1d3741b4de5f498a2a5dcc16 | [
"MIT"
] | null | null | null |
--- Server Shops Chat Commands
--
-- @topic commands
local ss = server_shop
local S = core.get_translator(ss.modname)
local commands = {
{
cmd = "help",
params = "[" .. S("command") .. "]",
desc = S("Shows usage info."),
},
{
cmd = "list",
desc = S("Lists all registered shop IDs."),
},
{
cmd = "info",
params = "<" .. S("ID") .. ">",
desc = S("Lists information about a shop."),
},
{
cmd = "register",
params = "<" .. S("ID") .. ">" .. " <sell/buy> "
.. " [" .. S("product1=value,product2=value,...") .. "]",
desc = S("Registers a new shop."),
persists = true,
},
{
cmd = "unregister",
params = "<" .. S("ID") .. ">",
desc = S("Unregisters a shop."),
persists = true,
},
{
cmd = "add",
params = "<" .. S("ID") .. "> <" .. S("product1=value,product2=value,...") .. ">",
desc = S("Adds one or more items to a shop's product list."),
persists = true,
},
{
cmd = "remove",
params = "<" .. S("ID") .. "> <" .. S("product") .. ">",
desc = S("Removes first instance of an item from a shop's product list."),
persists = true,
},
{
cmd = "removeall",
params = "<" .. S("ID") .. "> <" .. S("product") .. ">",
desc = S("Removes all instances of an item from a shop's product list."),
persists = true,
},
{
cmd = "reload",
desc = S("Reloads shops configuration."),
},
}
local format_usage = function(cmd)
local usage = S("Usage:")
if cmd then
local desc, params, persists
for _, c in ipairs(commands) do
if c.cmd == cmd then
desc = c.desc
params = c.params
persists = c.persists
break
end
end
usage = usage .. "\n /" .. ss.modname .. " " .. cmd
if params then
usage = usage .. " " .. params
end
if desc then
if persists then
desc = desc .. " " .. S("(changes are written to config)")
end
usage = desc .. "\n\n" .. usage
end
else
for _, c in ipairs(commands) do
usage = usage .. "\n /" .. ss.modname .. " " .. c.cmd
if c.params then
usage = usage .. " " .. c.params
end
end
end
return usage
end
--- Manages shops & config.
--
-- @chatcmd server_shop
-- @param command Command to execute.
-- @param[opt] params Parameters associated with command.
-- @usage
-- /server_shop <command> [<params>]
--
-- Commands:
-- - help
-- - Shows usage info.
-- - parameters: [command]
-- - list
-- - Lists all registered shop IDs.
-- - info
-- - Lists information about a shop.
-- - parameters: <id>
-- - register
-- - Registers new shop & updates configuration.
-- - parameters: <id> <sell/buy> [product1=value,product2=value,...]
-- - unregister
-- - Unregisters shop & updates configuration.
-- - parameters: <id>
-- - add
-- - Adds 1 or more items to a shop's product list.
-- - parameters: <id> <product1=value,product2=value,...>
-- - remove
-- - Removes the first instance of an item from a shop's product list.
-- - parameters: <id> <product>
-- - removeall
-- - Removes all instances of an item from a shop's product list.
-- - parameters: <id> <product>
-- - reload
-- - Reloads shops configuration.
core.register_chatcommand(ss.modname, {
description = S("Manage shops configuration.") .. "\n\n"
.. format_usage(),
privs = {server=true},
params = "<" .. S("command") .. "> [" .. S("params") .. "]",
func = function(name, param)
local params = param:split(" ")
local cmd = params[1]
table.remove(params, 1)
if not cmd then
return false, S("Must provide a command:") .. "\n\n" .. format_usage()
end
local shop_id = params[1]
if cmd == "help" then
if #params > 1 then
return false, S("Too many parameters.") .. "\n\n"
.. format_usage(cmd)
end
if params[1] then
local sub_cmd
for _, c in ipairs(commands) do
if params[1] == c.cmd then
sub_cmd = c.cmd
break
end
end
if sub_cmd then
return true, format_usage(sub_cmd)
else
return false, S("Unknown command: @1", sub_cmd)
end
end
return true, S("Manage shops configuration.") .. "\n\n" .. format_usage()
elseif cmd == "reload" then
if #params > 0 then
return false, S('"@1" command takes no parameters.', cmd) .. "\n\n"
.. format_usage(cmd)
end
ss.file_load()
ss.prune_shops()
return true, S("Shops configuration loaded.")
elseif cmd == "register" then
if #params > 3 then
return false, S("Too many parameters.") .. "\n\n"
.. format_usage(cmd)
end
local shop_type = params[2]
local shop_products = params[3]
if not shop_id then
return false, S("Must provide ID.") .. "\n\n" .. format_usage(cmd)
elseif not shop_type then
return false, S("Must provide type.") .. "\n\n" .. format_usage(cmd)
end
if shop_type ~= "sell" and shop_type ~= "buy" then
return false, S('Shop type must be "@1" or "@2".', "sell", "buy")
.. "\n\n" .. format_usage(cmd)
end
local products = {}
if shop_products then
shop_products = shop_products:split(",")
for _, p in ipairs(shop_products) do
local item = p:split("=")
local item_name = item[1]
local item_value = tonumber(item[2])
if not core.registered_items[item_name] then
return false, S('"@1" is not a recognized item.', item_name)
.. "\n\n" .. format_usage(cmd)
elseif not item_value then
return false, S("Item value must be a number.")
.. "\n\n" .. format_usage(cmd)
end
table.insert(products, {item_name, item_value})
end
end
ss.register_persist(shop_id, products, shop_type == "buy")
return true, S("Registered shop with ID: @1", shop_id)
elseif cmd == "unregister" then
if #params > 1 then
return false, S("Too many parameters.") .. "\n\n"
.. format_usage(cmd)
end
if not shop_id then
return false, S("Must provide ID.") .. "\n\n" .. format_usage(cmd)
end
if not ss.unregister_persist(shop_id) then
return false, S("Cannot unregister shop with ID: @1", shop_id)
end
return true, S("Unregistered shop with ID: @1", shop_id)
elseif cmd == "add" then
if #params > 2 then
return false, S("Too many parameters.") .. "\n\n"
.. format_usage(cmd)
end
if not shop_id then
return false, S("Must provide ID.") .. "\n\n" .. format_usage(cmd)
end
if not ss.is_registered(shop_id) then
return false, S("Shop ID @1 is not registered.", shop_id)
end
local shop_products = params[2]
if not shop_products then
return false, S("Must provide product.") .. "\n\n" .. format_usage(cmd)
end
local products = {}
shop_products = shop_products:split(",")
for _, p in ipairs(shop_products) do
local item = p:split("=")
local item_name = item[1]
local item_value = tonumber(item[2])
if not core.registered_items[item_name] then
return false, S('"@1" is not a recognized item.', item_name)
.. "\n\n" .. format_usage(cmd)
elseif not item_value then
return false, S("Item value must be a number.")
.. "\n\n" .. format_usage(cmd)
end
table.insert(products, {item_name, item_value})
end
ss.add_product_persist(shop_id, products)
if #products == 1 then
return true, S("Added 1 item to shop ID @1.", shop_id)
else
return true, S("Added @1 items to shop ID @2.", #products, shop_id)
end
elseif cmd == "remove" or cmd == "removeall" then
if #params > 2 then
return false, S("Too many parameters.") .. "\n\n"
.. format_usage(cmd)
end
if not shop_id then
return false, S("Must provide ID.").. "\n\n" .. format_usage(cmd)
end
local product = params[2]
if not product then
return false, S("Must provide product.") .. "\n\n" .. format_usage(cmd)
end
local count = 0
if cmd == "remove" then
count = ss.remove_product_persist(shop_id, product, false)
else
count = ss.remove_product_persist(shop_id, product, true)
end
if count then
if count == 1 then
return true, S("Removed 1 item from shop ID @1.", shop_id)
elseif count > 1 then
return true, S("Removed @1 items from shop ID @2.", count, shop_id)
end
end
return false, S("Shop ID @1 does not contain @2 in its product list.", shop_id, product)
elseif cmd == "list" then
if #params > 0 then
return false, S('"@1" command takes no parameters.', cmd) .. "\n\n"
end
local shops_list = {}
for id in pairs(ss.get_shops()) do
table.insert(shops_list, id)
end
local msg
local id_count = #shops_list
if id_count > 0 then
if id_count == 1 then
msg = S("There is 1 shop registered: @1", table.concat(shops_list, ", "))
else
msg = S("There are @1 shops registered: @2", id_count, table.concat(shops_list, ", "))
end
else
msg = S("There are no shops registered.")
end
return true, msg
elseif cmd == "info" then
if #params > 1 then
return false, S("Too many parameters.") .. "\n\n"
.. format_usage(cmd)
end
if not shop_id then
return false, S("Must provide ID.").. "\n\n" .. format_usage(cmd)
end
local shop = ss.get_shop(shop_id)
if not shop then
return false, S("Shop ID @1 is not registered.", shop_id)
end
local s_type = S("seller")
if shop.buyer then
s_type = S("buyer")
end
local product_list = {}
for _, p in ipairs(shop.products) do
p = p[1] .. " (" .. p[2]
if ss.currency_suffix then
p = p .. " " .. ss.currency_suffix
end
p = p .. ")"
table.insert(product_list, p)
end
return true, S("Information about shop ID: @1", shop_id)
.. "\n" .. S("Type: @1", s_type)
.. "\n" .. S("Products: @1", table.concat(product_list, ", "))
end
return false, S("Unknown command: @1", cmd)
end,
})
| 25.795756 | 91 | 0.595373 |
2f75efd361693eddbbfce6344fb39427c47ea9a9 | 526 | php | PHP | resources/views/book/index.blade.php | alexandr-vasiliev-sv/book | f5d29cfacf1ca25709edebfc999128b0d2e4a510 | [
"MIT"
] | null | null | null | resources/views/book/index.blade.php | alexandr-vasiliev-sv/book | f5d29cfacf1ca25709edebfc999128b0d2e4a510 | [
"MIT"
] | null | null | null | resources/views/book/index.blade.php | alexandr-vasiliev-sv/book | f5d29cfacf1ca25709edebfc999128b0d2e4a510 | [
"MIT"
] | null | null | null | @extends('layouts.app')
@section('breadcrumbs')
<li class="active">Books</li>
@stop
@section('content')
<div class="row">
<div class="col-md-12">
<h1>
Books
<a href="{{ route('books.create') }}" class="btn btn-default pull-right">Create</a>
</h1>
</div>
</div>
<div class="row">
<div class="col-md-12">
@include('book._booksTable', [
'books' => $books
])
</div>
</div>
@stop | 21.04 | 99 | 0.45057 |
2615fb7b5687656a411150c55990c304a0d68b84 | 184 | java | Java | src/main/java/com/baibai/rush/db/dao/RushCommodityDao.java | xinnywinne/rush | a5e4bb3141bfb8e97f3d5ba968c6eb9309a0ff77 | [
"Apache-2.0"
] | 1 | 2021-02-27T23:13:27.000Z | 2021-02-27T23:13:27.000Z | src/main/java/com/baibai/rush/db/dao/RushCommodityDao.java | xinnywinne/rush | a5e4bb3141bfb8e97f3d5ba968c6eb9309a0ff77 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/baibai/rush/db/dao/RushCommodityDao.java | xinnywinne/rush | a5e4bb3141bfb8e97f3d5ba968c6eb9309a0ff77 | [
"Apache-2.0"
] | null | null | null | package com.baibai.rush.db.dao;
import com.baibai.rush.db.po.RushCommodity;
public interface RushCommodityDao {
public RushCommodity queryRushCommodityById(long commodityId);
}
| 20.444444 | 66 | 0.804348 |
262a75992dfe0757641787cc1c4f260bade78c02 | 210 | java | Java | src/main/java/us/ihmc/simulationconstructionset/gui/BookmarkedVariableAddedListener.java | ihmcrobotics/simulation-construction-set | 98d7bb859164025aad1a7ce5d1a021051c8c2537 | [
"Apache-2.0"
] | 11 | 2018-11-03T02:51:24.000Z | 2022-01-08T03:11:31.000Z | src/main/java/us/ihmc/simulationconstructionset/gui/BookmarkedVariableAddedListener.java | ihmcrobotics/simulation-construction-set | 98d7bb859164025aad1a7ce5d1a021051c8c2537 | [
"Apache-2.0"
] | 28 | 2018-08-13T21:01:10.000Z | 2022-03-01T19:45:50.000Z | src/main/java/us/ihmc/simulationconstructionset/gui/BookmarkedVariableAddedListener.java | ihmcrobotics/simulation-construction-set | 98d7bb859164025aad1a7ce5d1a021051c8c2537 | [
"Apache-2.0"
] | 3 | 2018-06-22T19:06:27.000Z | 2022-01-06T23:00:57.000Z | package us.ihmc.simulationconstructionset.gui;
import us.ihmc.yoVariables.variable.YoVariable;
public interface BookmarkedVariableAddedListener
{
public abstract void bookmarkAdded(YoVariable variable);
}
| 23.333333 | 59 | 0.842857 |
2847402368c67c7d5da6df0cd5871aa90c8b8f51 | 1,581 | rb | Ruby | spec/fcom/git_helpers_spec.rb | davidrunger/fcom | 3990caeaf1848169af3f133198c4aa7c0acc07aa | [
"MIT"
] | 3 | 2019-12-29T13:16:51.000Z | 2020-06-20T22:06:47.000Z | spec/fcom/git_helpers_spec.rb | davidrunger/fcom | 3990caeaf1848169af3f133198c4aa7c0acc07aa | [
"MIT"
] | 29 | 2020-05-21T15:11:29.000Z | 2022-02-14T23:40:40.000Z | spec/fcom/git_helpers_spec.rb | davidrunger/fcom | 3990caeaf1848169af3f133198c4aa7c0acc07aa | [
"MIT"
] | null | null | null | # frozen_string_literal: true
RSpec.describe Fcom::GitHelpers do
subject(:git_helper) { Fcom::GitHelpers.new }
describe '#repo' do
subject(:repo) { git_helper.repo }
context 'when Fcom::GitHelpers#repo is not stubbed' do
# rubocop:disable RSpec/AnyInstance
before { allow_any_instance_of(Fcom::GitHelpers).to receive(:repo).and_call_original }
# rubocop:enable RSpec/AnyInstance
context 'when `git remote ...` indicates that the remote is davidrunger/fcom' do
before do
# rubocop:disable RSpec/AnyInstance
expect_any_instance_of(Kernel).
to receive(:`).
with('git remote show origin').
and_return(<<~GIT_REMOTE_OUTPUT)
* remote origin
Fetch URL: git@github.com:davidrunger/fcom.git
Push URL: git@github.com:davidrunger/fcom.git
HEAD branch: master
Remote branch:
master tracked
Local branches configured for 'git pull':
add-spec-timing merges with remote master
master merges with remote master
safe merges with remote master
Local ref configured for 'git push':
master pushes to master (up to date)
GIT_REMOTE_OUTPUT
# rubocop:enable RSpec/AnyInstance
end
it 'returns a string in form "username/repo" representing the repo' do
expect(repo).to eq('davidrunger/fcom')
end
end
end
end
end
| 35.931818 | 92 | 0.59456 |
18320a5035ac3defa5d061eb02e83a5f05fbf867 | 8,406 | swift | Swift | NKTimeIntervalTextTransformation/Classes/Extensions/NKTextTimeIntervalConfiguration+DefaultValues.swift | nkopilovskii/NKTimeIntervalTextTransformation | 2b658019d015833d33f3b55e3b343c4f37f570b7 | [
"MIT"
] | null | null | null | NKTimeIntervalTextTransformation/Classes/Extensions/NKTextTimeIntervalConfiguration+DefaultValues.swift | nkopilovskii/NKTimeIntervalTextTransformation | 2b658019d015833d33f3b55e3b343c4f37f570b7 | [
"MIT"
] | 2 | 2019-07-16T09:30:42.000Z | 2019-07-16T10:32:20.000Z | NKTimeIntervalTextTransformation/Classes/Extensions/NKTextTimeIntervalConfiguration+DefaultValues.swift | nkopilovskii/NKTimeIntervalTextTransformation | 2b658019d015833d33f3b55e3b343c4f37f570b7 | [
"MIT"
] | null | null | null | //
//Copyright (c) 2019 nkopilovskii <nkopilovskii@gmail.com>
//
//Permission is hereby granted, free of charge, to any person obtaining a copy
//of this software and associated documentation files (the "Software"), to deal
//in the Software without restriction, including without limitation the rights
//to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
//copies of the Software, and to permit persons to whom the Software is
//furnished to do so, subject to the following conditions:
//
//The above copyright notice and this permission notice shall be included in
//all copies or substantial portions of the Software.
//
//THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
//IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
//FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
//AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
//LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
//OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
//THE SOFTWARE.
//
// NKTextTimeIntervalConfiguration+DefaultValues.swift
//
// Created by Nick Kopilovskii on 30.08.2018.
//
import Foundation
//MARK: - NKTextTimeIntervalConfiguration default configurations
/**
This extension contains static methods that generate configurations based on rules for declining the numerals of some languages
*/
public extension NKTextTimeIntervalConfiguration {
mutating func setupDefaultEnglish() {
pastFormat = "\(NKTextTimeIntervalConfiguration.numberValueKey) \(NKTextTimeIntervalConfiguration.timeComponentValueKey) ago"
zeroTimeIntervalPlaceholder = "now"
futureFormat = "in \(NKTextTimeIntervalConfiguration.numberValueKey) \(NKTextTimeIntervalConfiguration.timeComponentValueKey)"
components.insert(NKDateComponent.centuries({
if Int($0) == 0 { return nil }
return abs($0) == 1 ? ("a century", false) : ("centuries", true)
}))
components.insert(NKDateComponent.years({
if Int($0) == 0 { return nil }
return abs($0) == 1 ? ("a year", false) : ("years", true)
}))
components.insert(NKDateComponent.months({
if Int($0) == 0 { return nil }
return abs($0) == 1 ? ("a month", false) : ("months", true)
}))
components.insert(NKDateComponent.weeks({
if Int($0) == 0 { return nil }
return abs($0) == 1 ? ("a week", false) : ("weeks", true)
}))
components.insert(NKDateComponent.days({
if Int($0) == 0 { return nil }
return abs($0) == 1 ? ("a day", false) : ("days", true)
}))
components.insert(NKDateComponent.hours({
if Int($0) == 0 { return nil }
return abs($0) == 1 ? ("an hour", false) : ("hours", true)
}))
components.insert(NKDateComponent.minutes({
if Int($0) == 0 { return nil }
return abs($0) == 1 ? ("a minute", false) : ("minutes", true)
}))
components.insert(NKDateComponent.seconds({
if Int($0) == 0 { return nil }
return abs($0) == 1 ? ("a second", false) : ("seconds", true)
}))
}
mutating func setupDefaultRussian() {
pastFormat = "\(NKTextTimeIntervalConfiguration.numberValueKey) \(NKTextTimeIntervalConfiguration.timeComponentValueKey) назад"
zeroTimeIntervalPlaceholder = "сейчас"
futureFormat = "через \(NKTextTimeIntervalConfiguration.numberValueKey) \(NKTextTimeIntervalConfiguration.timeComponentValueKey)"
let ruleLastOne: (Double) -> Bool = { return Int($0) % 10 == 1 && Int($0) % 100 != 11 }
let ruleLastTwoThreeFour: (Double) -> Bool = {
if ( (Int($0) % 10 == 2 && Int($0) % 100 != 12) || (Int($0) % 10 == 3 && Int($0) % 100 != 13) || (Int($0) % 10 == 4 && Int($0) % 100 != 14) ) { return true }
return false
}
components.insert(NKDateComponent.centuries({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("век", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("века", true) }
return ("веков", true)
}))
components.insert(NKDateComponent.years({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("год", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("года", true) }
return ("лет", true)
}))
components.insert(NKDateComponent.months({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("месяц", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("месяца", true) }
return ("месяцев", true)
}))
components.insert(NKDateComponent.weeks({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("неделю", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("недели", true) }
return ("недель", true)
}))
components.insert(NKDateComponent.days({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("день", true) }
if ruleLastTwoThreeFour($0) { return ("дня", true) }
return ("дней", true)
}))
components.insert(NKDateComponent.hours({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("час", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("часа", true) }
return ("часов", true)
}))
components.insert(NKDateComponent.minutes({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("минуту", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("минуты", true) }
return ("минут", true)
}))
components.insert(NKDateComponent.seconds({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("секунду", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("секунды", true) }
return ("секунд", true)
}))
}
mutating func setupDefaultUkrainian() {
pastFormat = "\(NKTextTimeIntervalConfiguration.numberValueKey) \(NKTextTimeIntervalConfiguration.timeComponentValueKey) тому"
zeroTimeIntervalPlaceholder = "зараз"
futureFormat = "через \(NKTextTimeIntervalConfiguration.numberValueKey) \(NKTextTimeIntervalConfiguration.timeComponentValueKey)"
let ruleLastOne: (Double) -> Bool = { return Int($0) % 10 == 1 && Int($0) % 100 != 11 }
let ruleLastTwoThreeFour: (Double) -> Bool = {
if ( (Int($0) % 10 == 2 && Int($0) % 100 != 12) || (Int($0) % 10 == 3 && Int($0) % 100 != 13) || (Int($0) % 10 == 4 && Int($0) % 100 != 14) ) { return true }
return false
}
components.insert(NKDateComponent.seconds({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("секунду", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("секунди", true) }
return ("секунд", true)
}))
components.insert(NKDateComponent.minutes({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("хвилину", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("хвилини", true) }
return ("хвилин", true)
}))
components.insert(NKDateComponent.hours({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("годину", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("години", true) }
return ("годин", true)
}))
components.insert(NKDateComponent.days({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("день", true) }
if ruleLastTwoThreeFour($0) { return ("дні", true) }
return ("днів", true)
}))
components.insert(NKDateComponent.weeks({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("тиждень", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("тижні", true) }
return ("тижнів", true)
}))
components.insert(NKDateComponent.months({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("місяць", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("місяці", true) }
return ("місяців", true)
}))
components.insert(NKDateComponent.years({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("рік", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("роки", true) }
return ("років", true)
}))
components.insert(NKDateComponent.centuries({
if Int($0) == 0 { return nil }
if ruleLastOne($0) { return ("століття", Int($0) != 1) }
if ruleLastTwoThreeFour($0) { return ("століття", true) }
return ("століть", true)
}))
}
}
| 39.650943 | 163 | 0.620628 |
39bbc23b09ce3a74a9fc444f4b0ffe5eced02149 | 1,958 | js | JavaScript | src/__tests__/rockets.test.js | Ntwali-Josue/space-travelers-hub | 7898960ed4c671dfe0bbdb13934e9b977e587f88 | [
"MIT"
] | 4 | 2021-12-13T17:00:30.000Z | 2021-12-22T19:54:41.000Z | src/__tests__/rockets.test.js | Ntwali-Josue/space-travelers-hub | 7898960ed4c671dfe0bbdb13934e9b977e587f88 | [
"MIT"
] | 26 | 2021-10-04T14:48:25.000Z | 2021-10-08T15:53:41.000Z | src/__tests__/rockets.test.js | Ntwali-Josue/space-travelers-hub | 7898960ed4c671dfe0bbdb13934e9b977e587f88 | [
"MIT"
] | 1 | 2021-12-13T17:00:12.000Z | 2021-12-13T17:00:12.000Z | import { render, screen } from '@testing-library/react';
import { Provider } from 'react-redux';
import store from '../redux/configureStore';
import rocketsReducer from '../redux/rockets/rocketsReducer';
import App from '../App';
import fetchAPI from '../redux/fetchAPI';
import Rocket from '../components/rockets/Rocket';
describe('Rockets reducer', () => {
test('initial state', () => {
expect(rocketsReducer(undefined, {})).toEqual(
{
status: 'empty',
rocketList: [],
reservedRockets: [],
},
);
});
test('fetch rockets action', () => {
const initialState = {
status: 'empty',
rocketList: [],
reservedRockets: [],
};
const action = {
type: 'spaceX/rockets/FETCH_ROCKETS/fulfilled',
payload: [{ name: 'Falcon 1' }, { name: 'Falcon 9' }],
};
expect(rocketsReducer(initialState, action)).toEqual(
{
status: 'fetched',
rocketList: action.payload,
reservedRockets: [],
},
);
});
test('App', () => {
render(
<Provider store={store}>
<App />
</Provider>,
);
expect(screen.getByText('Space Travelers Hub')).toMatchSnapshot();
});
test('render rockets', async () => {
const rocketList = await fetchAPI('https://api.spacexdata.com/v3/rockets');
const mapRockets = rocketList.map(
(rocket) => (
<Rocket
key={rocket.id}
id={rocket.rocket_id}
imgURL={`${rocket.flickr_images[0]}`}
name={rocket.rocket_name}
description={rocket.description}
/>
),
);
render(
<Provider store={store}>
{mapRockets}
</Provider>,
);
expect(screen.getByText('Falcon 1')).toMatchSnapshot();
expect(screen.getByText('Falcon 9')).toMatchSnapshot();
expect(screen.getByText('Falcon Heavy')).toMatchSnapshot();
expect(screen.getByText('Starship')).toMatchSnapshot();
});
});
| 26.459459 | 79 | 0.580184 |