hexsha stringlengths 40 40 | size int64 5 1.05M | ext stringclasses 98
values | lang stringclasses 21
values | max_stars_repo_path stringlengths 3 945 | max_stars_repo_name stringlengths 4 118 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 945 | max_issues_repo_name stringlengths 4 118 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 134k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 945 | max_forks_repo_name stringlengths 4 135 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 1.05M | avg_line_length float64 1 1.03M | max_line_length int64 2 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
71c02be02c7b5bea67599e1c2c0585905de82f22 | 254 | sql | SQL | spesialist-felles/src/main/resources/db/migration/V161__fjern_carriage_returns_fra_fagsystemid_i_endimensjonale_kolonner.sql | navikt/helse-spesialist | e221266ef6c7edfd3335717925094aea11385c60 | [
"MIT"
] | null | null | null | spesialist-felles/src/main/resources/db/migration/V161__fjern_carriage_returns_fra_fagsystemid_i_endimensjonale_kolonner.sql | navikt/helse-spesialist | e221266ef6c7edfd3335717925094aea11385c60 | [
"MIT"
] | 10 | 2021-05-06T06:26:33.000Z | 2022-03-15T18:25:58.000Z | spesialist-felles/src/main/resources/db/migration/V161__fjern_carriage_returns_fra_fagsystemid_i_endimensjonale_kolonner.sql | navikt/helse-spesialist | e221266ef6c7edfd3335717925094aea11385c60 | [
"MIT"
] | null | null | null | UPDATE oppdrag SET fagsystem_id=regexp_replace(fagsystem_id, E'[\\r\\n]+', '', 'g' ) WHERE fagsystem_id LIKE E'%\r\n';
UPDATE utbetalingslinje SET reffagsystemid=regexp_replace(reffagsystemid, E'[\\r\\n]+', '', 'g' ) WHERE reffagsystemid LIKE E'%\r\n';
| 63.5 | 133 | 0.704724 |
789040d5eb9f5d7b212759156b660ef1c05415cb | 411 | swift | Swift | TestCells/ViewController/Data/Forms/CellModels/ICellModel.swift | drrost/SwiftForms | 970f0cde8b62cc1c62a900cbf43b9a6b7c175239 | [
"MIT"
] | null | null | null | TestCells/ViewController/Data/Forms/CellModels/ICellModel.swift | drrost/SwiftForms | 970f0cde8b62cc1c62a900cbf43b9a6b7c175239 | [
"MIT"
] | null | null | null | TestCells/ViewController/Data/Forms/CellModels/ICellModel.swift | drrost/SwiftForms | 970f0cde8b62cc1c62a900cbf43b9a6b7c175239 | [
"MIT"
] | null | null | null | //
// ICellModel.swift
// TestCells
//
// Created by Rostyslav Druzhchenko on 05.04.2020.
// Copyright © 2020 Rostyslav Druzhchenko. All rights reserved.
//
import Foundation
protocol ICellModel {
var type: CellType { get }
var title: String { get }
var isValid: Bool { get }
var validator: IValidator { get set }
}
enum CellType {
case undefined
case text
case selection
}
| 16.44 | 64 | 0.664234 |
fb44f9e52f502dd2f82bfc5c42af3df960e7d829 | 2,517 | h | C | Source/WalkSimulator/Public/Walker.h | hl1282456555/WalkSimulator | 7cae6c6b160659aee42968ad4d243b8deec25b3e | [
"MIT"
] | null | null | null | Source/WalkSimulator/Public/Walker.h | hl1282456555/WalkSimulator | 7cae6c6b160659aee42968ad4d243b8deec25b3e | [
"MIT"
] | null | null | null | Source/WalkSimulator/Public/Walker.h | hl1282456555/WalkSimulator | 7cae6c6b160659aee42968ad4d243b8deec25b3e | [
"MIT"
] | null | null | null | // Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "GameFramework/Actor.h"
#include "WalkSimulatorStructures.h"
#include "VaRestJsonObject.h"
#include "Walker.generated.h"
UCLASS()
class WALKSIMULATOR_API AWalker : public AActor
{
GENERATED_UCLASS_BODY()
public:
// Sets default values for this actor's properties
AWalker();
protected:
// Called when the game starts or when spawned
virtual void BeginPlay() override;
public:
// Called every frame
virtual void Tick(float DeltaTime) override;
UFUNCTION(BlueprintCallable)
void CaptureAnimFrame(const float& StartRecordTime);
UFUNCTION(BlueprintCallable)
void InitWalker(USkeletalMesh* Mesh, UClass* AnimClass);
UFUNCTION(BlueprintCallable)
void SetBonePose(const float& Time);
UFUNCTION(BlueprintCallable)
void SetWalkerTransform(const float& Time);
UFUNCTION(BlueprintCallable)
void GetWireFrame(TArray<FVector2D>& WireFrame, float FrameTime, bool bCapturing = true);
UFUNCTION(BlueprintCallable)
bool IsWalkerInViewport(const TArray<FVector2D>& BoundsPoints);
UFUNCTION(BlueprintCallable)
void RefreshVisibility(float FrameTime);
UFUNCTION(BlueprintImplementableEvent)
void OnMeshHidden();
private:
bool FindNearestAnimFrame(const float& Time, FAnimFrame& CurrentAnimFrame);
public:
UPROPERTY(EditAnywhere, BlueprintReadOnly)
USceneComponent* Scene;
UPROPERTY(EditAnywhere, BlueprintReadOnly)
class USkeletalMeshComponent* SkeletalMesh;
UPROPERTY(EditAnywhere, BlueprintReadOnly)
class UPoseableMeshComponent* PoseableMesh;
UPROPERTY(EditAnywhere, BlueprintReadOnly)
class UStaticMeshComponent* BoundMesh;
UPROPERTY(VisibleAnywhere, BlueprintReadWrite)
int32 WalkerId;
UPROPERTY(VisibleAnywhere, BlueprintReadWrite)
FString MeshName;
UPROPERTY(EditAnywhere, BlueprintReadWrite)
TArray<FString> BoneNames;
UPROPERTY(VisibleAnywhere, BlueprintReadWrite)
float SimulateTime;
UPROPERTY(VisibleAnywhere, BlueprintReadWrite)
float SpawnTime;
UPROPERTY(VisibleAnywhere, BlueprintReadWrite)
FPathPoint CurrentPoint;
UPROPERTY(VisibleAnywhere, BlueprintReadWrite)
TArray<FPathPoint> PathPoints;
UPROPERTY(VisibleAnywhere, BlueprintReadWrite)
TMap<float, FAnimFrame> AnimFrames;
UPROPERTY(VisibleAnywhere, BlueprintReadWrite)
TMap<float, FBoxSphereBounds> BoundsFrames;
UPROPERTY(VisibleAnywhere, BlueprintReadWrite)
int32 CheckStartIndex;
UPROPERTY(VisibleAnywhere, BlueprintReadWrite)
int32 AnimStartIndex;
};
| 24.920792 | 90 | 0.816051 |
9936f00e94c18b83fe2633558adf23964e4a912a | 6,297 | h | C | hw/drivers/sensors/lsm6dsl/src/lsm6dsl_priv.h | WildflowerSchools/mynewt-dw1000-core | dd8e62594cf138aca51a5ce0f716bd2f964b994a | [
"Apache-2.0"
] | 55 | 2018-09-17T22:55:36.000Z | 2021-12-23T05:51:08.000Z | hw/drivers/sensors/lsm6dsl/src/lsm6dsl_priv.h | WildflowerSchools/mynewt-dw1000-core | dd8e62594cf138aca51a5ce0f716bd2f964b994a | [
"Apache-2.0"
] | 21 | 2018-09-29T01:56:08.000Z | 2020-11-20T18:26:03.000Z | hw/drivers/sensors/lsm6dsl/src/lsm6dsl_priv.h | WildflowerSchools/mynewt-dw1000-core | dd8e62594cf138aca51a5ce0f716bd2f964b994a | [
"Apache-2.0"
] | 36 | 2018-09-04T16:35:42.000Z | 2021-12-26T10:44:27.000Z | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#ifndef __LSM6DSL_PRIV_H__
#define __LSM6DSL_PRIV_H__
#ifdef __cplusplus
extern "C" {
#endif
// Inspired by Kris Winer (https://github.com/kriswiner/LSM6DSM_LIS2MDL_LPS22HB)
// Adapted for NRF52 by Niklas Casaril <niklas@loligoelectronics.com>
/* LSM6DSM registers
http://www.st.com/content/ccc/resource/technical/document/datasheet/76/27/cf/88/c5/03/42/6b/DM00218116.pdf/files/DM00218116.pdf/jcr:content/translations/en.DM00218116.pdf
*/
enum lsm6dsl_registers {
LSM6DSL_FUNC_CFG_ACCESS = 0x01,
LSM6DSL_SENSOR_SYNC_TIME_FRAME = 0x04,
LSM6DSL_SENSOR_SYNC_RES_RATIO = 0x05,
LSM6DSL_FIFO_CTRL1 = 0x06,
LSM6DSL_FIFO_CTRL2 = 0x07,
LSM6DSL_FIFO_CTRL3 = 0x08,
LSM6DSL_FIFO_CTRL4 = 0x09,
LSM6DSL_FIFO_CTRL5 = 0x0A,
LSM6DSL_DRDY_PULSE_CFG = 0x0B,
LSM6DSL_INT1_CTRL = 0x0D,
LSM6DSL_INT2_CTRL = 0x0E,
LSM6DSL_WHO_AM_I = 0x0F, // should be 0x6A
LSM6DSL_CTRL1_XL = 0x10,
LSM6DSL_CTRL2_G = 0x11,
LSM6DSL_CTRL3_C = 0x12,
LSM6DSL_CTRL4_C = 0x13,
LSM6DSL_CTRL5_C = 0x14,
LSM6DSL_CTRL6_C = 0x15,
LSM6DSL_CTRL7_G = 0x16,
LSM6DSL_CTRL8_XL = 0x17,
LSM6DSL_CTRL9_XL = 0x18,
LSM6DSL_CTRL10_C = 0x19,
LSM6DSL_MASTER_CONFIG = 0x1A,
LSM6DSL_WAKE_UP_SRC = 0x1B,
LSM6DSL_TAP_SRC = 0x1C,
LSM6DSL_D6D_SRC = 0x1D,
LSM6DSL_STATUS_REG = 0x1E,
LSM6DSL_OUT_TEMP_L = 0x20,
LSM6DSL_OUT_TEMP_H = 0x21,
LSM6DSL_OUTX_L_G = 0x22,
LSM6DSL_OUTX_H_G = 0x23,
LSM6DSL_OUTY_L_G = 0x24,
LSM6DSL_OUTY_H_G = 0x25,
LSM6DSL_OUTZ_L_G = 0x26,
LSM6DSL_OUTZ_H_G = 0x27,
LSM6DSL_OUTX_L_XL = 0x28,
LSM6DSL_OUTX_H_XL = 0x29,
LSM6DSL_OUTY_L_XL = 0x2A,
LSM6DSL_OUTY_H_XL = 0x2B,
LSM6DSL_OUTZ_L_XL = 0x2C,
LSM6DSL_OUTZ_H_XL = 0x2D,
LSM6DSL_SENSORHUB1_REG = 0x2E,
LSM6DSL_SENSORHUB2_REG = 0x2F,
LSM6DSL_SENSORHUB3_REG = 0x30,
LSM6DSL_SENSORHUB4_REG = 0x31,
LSM6DSL_SENSORHUB5_REG = 0x32,
LSM6DSL_SENSORHUB6_REG = 0x33,
LSM6DSL_SENSORHUB7_REG = 0x34,
LSM6DSL_SENSORHUB8_REG = 0x35,
LSM6DSL_SENSORHUB9_REG = 0x36,
LSM6DSL_SENSORHUB10_REG = 0x37,
LSM6DSL_SENSORHUB11_REG = 0x38,
LSM6DSL_SENSORHUB12_REG = 0x39,
LSM6DSL_FIFO_STATUS1 = 0x3A,
LSM6DSL_FIFO_STATUS2 = 0x3B,
LSM6DSL_FIFO_STATUS3 = 0x3C,
LSM6DSL_FIFO_STATUS4 = 0x3D,
LSM6DSL_FIFO_DATA_OUT_L = 0x3E,
LSM6DSL_FIFO_DATA_OUT_H = 0x3F,
LSM6DSL_TIMESTAMP0_REG = 0x40,
LSM6DSL_TIMESTAMP1_REG = 0x41,
LSM6DSL_TIMESTAMP2_REG = 0x42,
LSM6DSL_STEP_TIMESTAMP_L = 0x49,
LSM6DSL_STEP_TIMESTAMP_H = 0x4A,
LSM6DSL_STEP_COUNTER_L = 0x4B,
LSM6DSL_STEP_COUNTER_H = 0x4C,
LSM6DSL_SENSORHUB13_REG = 0x4D,
LSM6DSL_SENSORHUB14_REG = 0x4E,
LSM6DSL_SENSORHUB15_REG = 0x4F,
LSM6DSL_SENSORHUB16_REG = 0x50,
LSM6DSL_SENSORHUB17_REG = 0x51,
LSM6DSL_SENSORHUB18_REG = 0x52,
LSM6DSL_FUNC_SRC1 = 0x53,
LSM6DSL_FUNC_SRC2 = 0x54,
LSM6DSL_WRIST_TILT_IA = 0x55,
LSM6DSL_TAP_CFG = 0x58,
LSM6DSL_TAP_THS_6D = 0x59,
LSM6DSL_INT_DUR2 = 0x5A,
LSM6DSL_WAKE_UP_THS = 0x5B,
LSM6DSL_WAKE_UP_DUR = 0x5C,
LSM6DSL_FREE_FALL = 0x5D,
LSM6DSL_MD1_CFG = 0x5E,
LSM6DSL_MD2_CFG = 0x5F,
LSM6DSL_MASTER_MODE_CODE = 0x60,
LSM6DSL_SENS_SYNC_SPI_ERROR_CODE = 0x61,
LSM6DSL_OUT_MAG_RAW_X_L = 0x66,
LSM6DSL_OUT_MAG_RAW_X_H = 0x67,
LSM6DSL_OUT_MAG_RAW_Y_L = 0x68,
LSM6DSL_OUT_MAG_RAW_Y_H = 0x69,
LSM6DSL_OUT_MAG_RAW_Z_L = 0x6A,
LSM6DSL_OUT_MAG_RAW_Z_H = 0x6B,
LSM6DSL_INT_OIS = 0x6F,
LSM6DSL_CTRL1_OIS = 0x70,
LSM6DSL_CTRL2_OIS = 0x71,
LSM6DSL_CTRL3_OIS = 0x72,
LSM6DSL_X_OFS_USR = 0x73,
LSM6DSL_Y_OFS_USR = 0x74,
LSM6DSL_Z_OFS_USR = 0x75,
};
int lsm6dsl_write8(struct lsm6dsl *dev, uint8_t reg, uint32_t value);
int lsm6dsl_read8(struct lsm6dsl *dev, uint8_t reg, uint8_t *value);
int lsm6dsl_read_bytes(struct lsm6dsl *dev, uint8_t reg, uint8_t *buffer, uint32_t length);
#ifdef __cplusplus
}
#endif
#endif /* __LSM6DSL_PRIV_H__ */
| 43.729167 | 172 | 0.559314 |
b83907cde11105028d96a7760e7cfdfa9f9d2916 | 3,327 | lua | Lua | apioak/sys/admin.lua | Jacktom520/apioak | eefbc4513b0e66dc1ff0bf5881d7fa5ca0df9ce1 | [
"ECL-2.0",
"Apache-2.0",
"BSD-2-Clause"
] | 409 | 2019-11-14T06:58:24.000Z | 2022-03-25T01:16:36.000Z | apioak/sys/admin.lua | Jacktom520/apioak | eefbc4513b0e66dc1ff0bf5881d7fa5ca0df9ce1 | [
"ECL-2.0",
"Apache-2.0",
"BSD-2-Clause"
] | 30 | 2019-11-14T07:20:01.000Z | 2020-08-05T09:29:28.000Z | apioak/sys/admin.lua | Jacktom520/apioak | eefbc4513b0e66dc1ff0bf5881d7fa5ca0df9ce1 | [
"ECL-2.0",
"Apache-2.0",
"BSD-2-Clause"
] | 60 | 2019-11-01T10:50:14.000Z | 2022-01-09T13:40:09.000Z | local oakrouting = require("resty.oakrouting")
local admin = require("apioak.admin")
local router
local _M = {}
function _M.init_worker()
router = oakrouting.new()
-- Common Service Related APIs
router:get("/apioak/admin/plugins", admin.common.plugins)
router:get("/apioak/admin/users", admin.common.users)
router:get("/apioak/admin/members", admin.common.members)
router:get("/apioak/admin/projects", admin.common.projects)
router:get("/apioak/admin/routers", admin.common.routers)
-- Account Related APIs
router:post("/apioak/admin/account/register", admin.account.register)
router:put("/apioak/admin/account/login", admin.account.login)
router:delete("/apioak/admin/account/logout", admin.account.logout)
router:get("/apioak/admin/account/status", admin.account.status)
-- Project Related APIs
router:post("/apioak/admin/project", admin.project.created)
router:put("/apioak/admin/project/{project_id}", admin.project.updated)
router:get("/apioak/admin/project/{project_id}", admin.project.selected)
router:delete("/apioak/admin/project/{project_id}", admin.project.deleted)
router:get("/apioak/admin/project/{project_id}/routers", admin.project.routers)
router:get("/apioak/admin/project/{project_id}/plugins", admin.project.plugins)
router:post("/apioak/admin/project/{project_id}/plugin", admin.project.plugin_created)
router:put("/apioak/admin/project/{project_id}/plugin/{plugin_id}", admin.project.plugin_updated)
router:delete("/apioak/admin/project/{project_id}/plugin/{plugin_id}", admin.project.plugin_deleted)
router:get("/apioak/admin/project/{project_id}/members", admin.project.members)
router:post("/apioak/admin/project/{project_id}/member", admin.project.member_created)
router:delete("/apioak/admin/project/{project_id}/member/{user_id}", admin.project.member_deleted)
router:put("/apioak/admin/project/{project_id}/member/{user_id}", admin.project.member_updated)
-- Router Related APIs
router:post("/apioak/admin/router", admin.router.created)
router:get("/apioak/admin/router/{router_id}", admin.router.query)
router:put("/apioak/admin/router/{router_id}", admin.router.updated)
router:delete("/apioak/admin/router/{router_id}", admin.router.deleted)
router:get("/apioak/admin/router/{router_id}/plugins", admin.router.plugins)
router:post("/apioak/admin/router/{router_id}/plugin", admin.router.plugin_created)
router:put("/apioak/admin/router/{router_id}/plugin/{plugin_id}", admin.router.plugin_updated)
router:delete("/apioak/admin/router/{router_id}/plugin/{plugin_id}", admin.router.plugin_deleted)
router:post("/apioak/admin/router/{router_id}/env/{env}", admin.router.env_push)
router:delete("/apioak/admin/router/{router_id}/env/{env}", admin.router.env_pull)
-- User Manager API
router:post("/apioak/admin/user", admin.user.created)
router:delete("/apioak/admin/user/{user_id}", admin.user.deleted)
router:put("/apioak/admin/user/{user_id}/password", admin.user.password)
router:put("/apioak/admin/user/{user_id}/enable", admin.user.enable)
router:put("/apioak/admin/user/{user_id}/disable", admin.user.disable)
end
function _M.routers()
return router
end
return _M
| 32.940594 | 104 | 0.725579 |
2632c1fc5a8a4f7c3a50cf3dd7050943b6a21c22 | 943 | java | Java | redis/redis-proxy/src/main/java/com/ctrip/xpipe/redis/proxy/config/ProxyConfig.java | z131031231/x-pipe | b175922e3cd4fb77bad4f821c9cf7f7c4e67731e | [
"Apache-2.0"
] | 1,652 | 2016-04-18T10:34:30.000Z | 2022-03-30T06:15:35.000Z | redis/redis-proxy/src/main/java/com/ctrip/xpipe/redis/proxy/config/ProxyConfig.java | z131031231/x-pipe | b175922e3cd4fb77bad4f821c9cf7f7c4e67731e | [
"Apache-2.0"
] | 342 | 2016-07-27T10:38:01.000Z | 2022-03-31T11:11:46.000Z | redis/redis-proxy/src/main/java/com/ctrip/xpipe/redis/proxy/config/ProxyConfig.java | z131031231/x-pipe | b175922e3cd4fb77bad4f821c9cf7f7c4e67731e | [
"Apache-2.0"
] | 492 | 2016-04-25T05:14:10.000Z | 2022-03-16T01:40:38.000Z | package com.ctrip.xpipe.redis.proxy.config;
import com.ctrip.xpipe.api.proxy.CompressAlgorithm;
import com.ctrip.xpipe.redis.core.config.TLSConfig;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.handler.codec.MessageToByteEncoder;
/**
* @author chen.zhu
* <p>
* May 09, 2018
*/
public interface ProxyConfig extends TLSConfig {
int frontendTcpPort();
int frontendTlsPort();
long getTrafficReportIntervalMillis();
int endpointHealthCheckIntervalSec();
boolean noTlsNettyHandler();
int getFixedRecvBufferSize();
// to avoid any connect outside internal network
String[] getInternalNetworkPrefix();
boolean startMonitor();
int getResponseTimeout();
boolean isCompressEnabled();
CompressAlgorithm getCompressAlgorithm();
ByteToMessageDecoder getCompressDecoder();
MessageToByteEncoder<ByteBuf> getCompressEncoder();
}
| 20.955556 | 55 | 0.751856 |
907f5609f97575fe175cfd0912511527477a113e | 8,914 | py | Python | src/display/calendar/calendar2events.py | digsim/ePaperDisplay | 1c4897158e3767ee274ade93ede8ef9924811363 | [
"Apache-2.0"
] | null | null | null | src/display/calendar/calendar2events.py | digsim/ePaperDisplay | 1c4897158e3767ee274ade93ede8ef9924811363 | [
"Apache-2.0"
] | null | null | null | src/display/calendar/calendar2events.py | digsim/ePaperDisplay | 1c4897158e3767ee274ade93ede8ef9924811363 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import requests
import tempfile
import pytz
import datetime
import codecs
import sys
import itertools
import operator
from .calendarEvent import CalendarEvent
from dateutil import rrule
from icalendar import Calendar
from pprint import pprint
from progressbar import Bar, ProgressBar, SimpleProgress
class Calendar2Events(object):
"""This is the class fetching a calendar and for each entry creats a corresponding itc entry"""
def __init__(self, calendarurl, username, password, start, end, verifySSL=True):
"""Constructor"""
self.__log = logging.getLogger('Tube4Droid')
self.__calendarurl = calendarurl
self.__username = username
self.__password = password
self.__start = start
self.__end = end
self.__verifySSL = verifySSL
#self.__proxies = {'http': 'http://10.10.1.10:3128','https': 'http://10.10.1.10:1080',}
self.__proxies = {}
def fetchCalendarEvents(self):
"""
Downloads the ical file and parses it for events.
:return: a list of events
"""
if self.__calendarurl.startswith('http'):
self.__log.debug('Verify SSL is %s',str(self.__verifySSL))
response = requests.get(self.__calendarurl, auth=(self.__username, self.__password), stream=True, verify=self.__verifySSL, proxies=self.__proxies)
if not response.ok:
self.__log.error('Something went terribly wrong: %s %s', response.status_code, response.reason)
sys.exit(3)
with tempfile.NamedTemporaryFile('w+b') as tmp:
self.__log.debug('Saving calendar to file %s', tmp.name)
tmp.write(response.content)
tmp.seek(0)
a = tmp.read()
#a = a.decode('utf-8', 'ignore')
cal = Calendar.from_ical(a)
else:
#with open(self.__calendarurl, 'r', encoding='utf8') as calfile: #works with python3 but not with python2
with codecs.open(self.__calendarurl, 'r', encoding='utf-8', errors='ignore') as calfile:
cal = Calendar.from_ical(calfile.read())
events = self.prepareWorkingUnits(cal)
return events
def prepareWorkingUnits(self, cal):
"""
for a given calendar (ics file), walks over the <code>vevent</code> in this file and create for each one
an <code>CalendarEvent</code>.
:param cal: the <code>Calendar</code> to parse
:return: a list of <code>CalendarEvents</code>
"""
itcEvents = []
exceptions = {}
totalCalendarItem = len(cal.subcomponents)
treatedCalendarItems = 0
widgets = ['Scanning ical File: ', SimpleProgress(), ' ', Bar('>')]
pbar = ProgressBar(widgets=widgets, max_value=totalCalendarItem).start()
for event in cal.walk('vevent'):
evts = []
excep = {}
try:
treatedCalendarItems += 1
pbar.update(treatedCalendarItems)
evts, excep = self.__parseEvent(event)
except Exception as e:
self.__log.error(e)
self.__log.debug('Found non parseable Event %i', treatedCalendarItems)
itcEvents.extend(evts)
for k,v in excep.items():
if k in exceptions:
exceptions[k].extend(v)
else:
exceptions[k] = v
pbar.finish()
event_list = self.filterRecurrenceEvents(itcEvents, exceptions)
return event_list
#pprint(itcEvents)
def __parseEvent(self, event):
"""
Parses the information contained in an event.
:param event: the event to parse
:return: a CalendarEvent representing the event.
"""
foundEvents = []
foundExceptions = {}
hasItcInformation = False
start = event['DTSTART'].dt
# Ignore Fullday events
fulldayevent = not isinstance(start, datetime.datetime)
if fulldayevent:
return foundEvents
origtzinfo = start.tzinfo
# start = start.astimezone(pytz.utc)
end = event['DTEND'].dt
calEvent = CalendarEvent()
calEvent.startdatetime = start
calEvent.enddatetime = end
if 'SUMMARY' in event:
calEvent.comment = event['SUMMARY']
calEvent.UUID = event['UID']
# Parse recurring events
# There is some explanation on https://nylas.com/blog/rrules/
# explaining recurring events (cancelled ocurrences, modified ocurrences)
if 'RRULE' in event:
naivestart = self.__start.astimezone(origtzinfo)
naivestart = naivestart.replace(tzinfo=None)
naiveend = self.__end.astimezone(origtzinfo)
naiveend = naiveend.replace(tzinfo=None)
rules = self.__parse_rrule(event, start)
rEvents = rules.between(naivestart, naiveend)
if 'DESCRIPTION' in event:
calEvent.remarks = event['DESCRIPTION']
for ev in rEvents:
calEvent.startdatetime = pytz.timezone(origtzinfo.zone).localize(ev)
calEvent.enddatetime = calEvent.startdatetime + (end - start)
foundEvents.append(calEvent)
calEvent = calEvent.copy()
# Parse normal events
else:
# First, record if the event is an exception to a recurring event
if 'RECURRENCE-ID' in event:
calEvent.recurrenceId = event['RECURRENCE-ID'].dt
if calEvent.UUID in foundEvents:
foundExceptions[calEvent.UUID].append(calEvent.recurrenceId)
else:
ex = [calEvent.recurrenceId]
foundExceptions[calEvent.UUID] = ex
if self.__start <= start <= self.__end:
if 'DESCRIPTION' in event:
calEvent.remarks = event['DESCRIPTION']
hasItcInformation = True
foundEvents.append(calEvent)
return foundEvents, foundExceptions
def filterRecurrenceEvents(self, events, exceptions):
"""
Special care needs to be taken for recurring events.
See also https://nylas.com/blog/rrules/ for an explanation on how modified recurring events work. In short,
if on event in a series gets deletes RRULE already handles this, as this information is written in the EXDATE
field. However, when one event in a series is modified then the original event does not get an EXDATE. Instead,
the modified event gets the same UID and its RECURRENCE-ID points to the original recurring events which is
overwritten.
Therefore, this function first groups the events by UID and then for each group check whether some of the events
overwrite one of the computed recurrence events. If so the computed ones are removed in favor of the exceptional
ones.
:param events: list of events to filter for modified recurrence
:param exceptions Dictionary of Lists containing for each UUID a list of dates. These dates correspond to exceptions for recuuring events.
:return: list of events where duplictes are filtered out.
"""
cleanedEvents = []
for ev in events:
# we are only interested in recurring events
if ev.recurrenceId is None and ev.UUID is not None:
if ev.UUID in exceptions and ev.startdatetime in exceptions[ev.UUID]:
continue
else:
cleanedEvents.append(ev)
# all other events are fine
else:
cleanedEvents.append(ev)
return cleanedEvents
def __parse_rrule(self, event, start):
# taken from http://codereview.stackexchange.com/questions/137985/parse-rrule-icalendar-entries
rules_text = '\n'.join([line for line in event.content_lines() if line.startswith('RRULE')])
rules = rrule.rruleset()
start = start.replace(tzinfo=None)
rule = rrule.rrulestr(rules_text, dtstart=start)
# in some entries, tzinfo is missing so we assume UTC
if rule._until and rule._until.tzinfo is not None:
rule._until = rule._until.replace(tzinfo=None)
rules.rrule(rule)
excludedDates = event.get('exdate')
if not isinstance(excludedDates, list): # apparently this isn't a list when
excludedDates = [excludedDates] # there is only one EXDATE
for exdate in excludedDates:
try:
rules.exdate(exdate.dts[0].dt.replace(tzinfo=None))
except AttributeError: # sometimes there is a None entry here
pass
return rules
| 41.268519 | 158 | 0.615549 |
130a2d68bd8e8a091c5fa01a6fb3f018c14c1825 | 1,392 | h | C | inc/napa/stl/string.h | localh0rzd/napajs | b3f5e67dd20caef5d96ea8c3e3d4c542fcb9a431 | [
"MIT"
] | 9,088 | 2017-08-08T22:28:16.000Z | 2019-05-05T14:57:12.000Z | inc/napa/stl/string.h | localh0rzd/napajs | b3f5e67dd20caef5d96ea8c3e3d4c542fcb9a431 | [
"MIT"
] | 172 | 2017-08-09T21:32:15.000Z | 2019-05-03T21:21:05.000Z | inc/napa/stl/string.h | localh0rzd/napajs | b3f5e67dd20caef5d96ea8c3e3d4c542fcb9a431 | [
"MIT"
] | 370 | 2017-08-09T04:58:14.000Z | 2019-04-13T18:59:29.000Z | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
#pragma once
#include <napa/stl/allocator.h>
#include <string>
namespace napa {
namespace stl {
template <typename CharT, typename Traits = std::char_traits<CharT>>
using BasicString = std::basic_string<CharT, Traits, napa::stl::Allocator<CharT>>;
typedef BasicString<char> String;
typedef BasicString<char16_t> U16String;
}
}
#if defined(__GNUC__) && !defined(__clang__)
namespace std {
// std::hash specialization for napa::stl::String.
template<>
struct hash<napa::stl::String> : public __hash_base<size_t, napa::stl::String> {
size_t operator()(const napa::stl::String& s) const noexcept {
return std::_Hash_impl::hash(s.data(), s.length() * sizeof(char));
}
};
template<>
struct __is_fast_hash<hash<napa::stl::String>> : std::false_type {
};
// std::hash specialization for napa::stl::U16String.
template<>
struct hash<napa::stl::U16String> : public __hash_base<size_t, napa::stl::U16String> {
size_t operator()(const napa::stl::U16String& s) const noexcept {
return std::_Hash_impl::hash(s.data(), s.length() * sizeof(char16_t));
}
};
template<>
struct __is_fast_hash<hash<napa::stl::U16String>> : std::false_type {
};
}
#endif
| 29 | 90 | 0.646552 |
24fcc0b555be7215228fe556299f027d26426d2b | 18,496 | go | Go | api/network/certificates_api_mocked.go | ingrammicro/cio | fc90708cf1e8c29c2198cc805d35f3a58b25e645 | [
"Apache-2.0"
] | 1 | 2020-09-09T21:29:52.000Z | 2020-09-09T21:29:52.000Z | api/network/certificates_api_mocked.go | ingrammicro/cio | fc90708cf1e8c29c2198cc805d35f3a58b25e645 | [
"Apache-2.0"
] | 51 | 2019-07-03T09:57:25.000Z | 2022-02-15T16:47:03.000Z | api/network/certificates_api_mocked.go | ingrammicro/cio | fc90708cf1e8c29c2198cc805d35f3a58b25e645 | [
"Apache-2.0"
] | null | null | null | // Copyright (c) 2017-2021 Ingram Micro Inc.
package network
import (
"encoding/json"
"fmt"
"testing"
"github.com/ingrammicro/cio/api/types"
"github.com/ingrammicro/cio/utils"
"github.com/stretchr/testify/assert"
)
// ListCertificatesMocked test mocked function
func ListCertificatesMocked(
t *testing.T,
loadBalancerID string,
certificatesIn []*types.Certificate,
) []*types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// to json
dIn, err := json.Marshal(certificatesIn)
assert.Nil(err, "Certificates test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathNetworkLoadBalancerCertificates, loadBalancerID)).Return(dIn, 200, nil)
certificatesOut, err := ds.ListCertificates(loadBalancerID)
assert.Nil(err, "Error getting certificates")
assert.Equal(certificatesIn, certificatesOut, "ListCertificates returned different certificates")
return certificatesOut
}
// ListCertificatesFailErrMocked test mocked function
func ListCertificatesFailErrMocked(
t *testing.T,
loadBalancerID string,
certificatesIn []*types.Certificate,
) []*types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// to json
dIn, err := json.Marshal(certificatesIn)
assert.Nil(err, "Certificates test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathNetworkLoadBalancerCertificates, loadBalancerID)).
Return(dIn, 200, fmt.Errorf("mocked error"))
certificatesOut, err := ds.ListCertificates(loadBalancerID)
assert.NotNil(err, "We are expecting an error")
assert.Nil(certificatesOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return certificatesOut
}
// ListCertificatesFailStatusMocked test mocked function
func ListCertificatesFailStatusMocked(
t *testing.T,
loadBalancerID string,
certificatesIn []*types.Certificate,
) []*types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// to json
dIn, err := json.Marshal(certificatesIn)
assert.Nil(err, "Certificates test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathNetworkLoadBalancerCertificates, loadBalancerID)).Return(dIn, 499, nil)
certificatesOut, err := ds.ListCertificates(loadBalancerID)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(certificatesOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return certificatesOut
}
// ListCertificatesFailJSONMocked test mocked function
func ListCertificatesFailJSONMocked(
t *testing.T,
loadBalancerID string,
certificatesIn []*types.Certificate,
) []*types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Get", fmt.Sprintf(APIPathNetworkLoadBalancerCertificates, loadBalancerID)).Return(dIn, 200, nil)
certificatesOut, err := ds.ListCertificates(loadBalancerID)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(certificatesOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return certificatesOut
}
// GetCertificateMocked test mocked function
func GetCertificateMocked(t *testing.T, loadBalancerID string, certificateIn *types.Certificate) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// to json
dIn, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID)).
Return(dIn, 200, nil)
certificateOut, err := ds.GetCertificate(loadBalancerID, certificateIn.ID)
assert.Nil(err, "Error getting certificate")
assert.Equal(*certificateIn, *certificateOut, "GetCertificate returned different certificate")
return certificateOut
}
// GetCertificateFailErrMocked test mocked function
func GetCertificateFailErrMocked(
t *testing.T,
loadBalancerID string,
certificateIn *types.Certificate,
) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// to json
dIn, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID)).
Return(dIn, 200, fmt.Errorf("mocked error"))
certificateOut, err := ds.GetCertificate(loadBalancerID, certificateIn.ID)
assert.NotNil(err, "We are expecting an error")
assert.Nil(certificateOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return certificateOut
}
// GetCertificateFailStatusMocked test mocked function
func GetCertificateFailStatusMocked(
t *testing.T,
loadBalancerID string,
certificateIn *types.Certificate,
) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// to json
dIn, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID)).
Return(dIn, 499, nil)
certificateOut, err := ds.GetCertificate(loadBalancerID, certificateIn.ID)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(certificateOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return certificateOut
}
// GetCertificateFailJSONMocked test mocked function
func GetCertificateFailJSONMocked(
t *testing.T,
loadBalancerID string,
certificateIn *types.Certificate,
) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Get", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID)).
Return(dIn, 200, nil)
certificateOut, err := ds.GetCertificate(loadBalancerID, certificateIn.ID)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(certificateOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return certificateOut
}
// CreateCertificateMocked test mocked function
func CreateCertificateMocked(t *testing.T, loadBalancerID string, certificateIn *types.Certificate) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// to json
dOut, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Post", fmt.Sprintf(APIPathNetworkLoadBalancerCertificates, loadBalancerID), mapIn).Return(dOut, 200, nil)
certificateOut, err := ds.CreateCertificate(loadBalancerID, mapIn)
assert.Nil(err, "Error creating certificate")
assert.Equal(certificateIn, certificateOut, "CreateCertificate returned different certificate")
return certificateOut
}
// CreateCertificateFailErrMocked test mocked function
func CreateCertificateFailErrMocked(
t *testing.T,
loadBalancerID string,
certificateIn *types.Certificate,
) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// to json
dOut, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Post", fmt.Sprintf(APIPathNetworkLoadBalancerCertificates, loadBalancerID), mapIn).
Return(dOut, 200, fmt.Errorf("mocked error"))
certificateOut, err := ds.CreateCertificate(loadBalancerID, mapIn)
assert.NotNil(err, "We are expecting an error")
assert.Nil(certificateOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return certificateOut
}
// CreateCertificateFailStatusMocked test mocked function
func CreateCertificateFailStatusMocked(
t *testing.T,
loadBalancerID string,
certificateIn *types.Certificate,
) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// to json
dOut, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Post", fmt.Sprintf(APIPathNetworkLoadBalancerCertificates, loadBalancerID), mapIn).Return(dOut, 499, nil)
certificateOut, err := ds.CreateCertificate(loadBalancerID, mapIn)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(certificateOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return certificateOut
}
// CreateCertificateFailJSONMocked test mocked function
func CreateCertificateFailJSONMocked(
t *testing.T,
loadBalancerID string,
certificateIn *types.Certificate,
) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Post", fmt.Sprintf(APIPathNetworkLoadBalancerCertificates, loadBalancerID), mapIn).Return(dIn, 200, nil)
certificateOut, err := ds.CreateCertificate(loadBalancerID, mapIn)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(certificateOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return certificateOut
}
// UpdateCertificateMocked test mocked function
func UpdateCertificateMocked(t *testing.T, loadBalancerID string, certificateIn *types.Certificate) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// to json
dOut, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Put", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID), mapIn).
Return(dOut, 200, nil)
certificateOut, err := ds.UpdateCertificate(loadBalancerID, certificateIn.ID, mapIn)
assert.Nil(err, "Error updating certificate")
assert.Equal(certificateIn, certificateOut, "UpdateCertificate returned different certificate")
return certificateOut
}
// UpdateCertificateFailErrMocked test mocked function
func UpdateCertificateFailErrMocked(
t *testing.T,
loadBalancerID string,
certificateIn *types.Certificate,
) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// to json
dOut, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Put", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID), mapIn).
Return(dOut, 200, fmt.Errorf("mocked error"))
certificateOut, err := ds.UpdateCertificate(loadBalancerID, certificateIn.ID, mapIn)
assert.NotNil(err, "We are expecting an error")
assert.Nil(certificateOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return certificateOut
}
// UpdateCertificateFailStatusMocked test mocked function
func UpdateCertificateFailStatusMocked(
t *testing.T,
loadBalancerID string,
certificateIn *types.Certificate,
) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// to json
dOut, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Put", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID), mapIn).
Return(dOut, 499, nil)
certificateOut, err := ds.UpdateCertificate(loadBalancerID, certificateIn.ID, mapIn)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(certificateOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return certificateOut
}
// UpdateCertificateFailJSONMocked test mocked function
func UpdateCertificateFailJSONMocked(
t *testing.T,
loadBalancerID string,
certificateIn *types.Certificate,
) *types.Certificate {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Put", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID), mapIn).
Return(dIn, 200, nil)
certificateOut, err := ds.UpdateCertificate(loadBalancerID, certificateIn.ID, mapIn)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(certificateOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return certificateOut
}
// DeleteCertificateMocked test mocked function
func DeleteCertificateMocked(t *testing.T, loadBalancerID string, certificateIn *types.Certificate) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// to json
dIn, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID)).
Return(dIn, 200, nil)
err = ds.DeleteCertificate(loadBalancerID, certificateIn.ID)
assert.Nil(err, "Error deleting certificate")
}
// DeleteCertificateFailErrMocked test mocked function
func DeleteCertificateFailErrMocked(t *testing.T, loadBalancerID string, certificateIn *types.Certificate) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// to json
dIn, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID)).
Return(dIn, 200, fmt.Errorf("mocked error"))
err = ds.DeleteCertificate(loadBalancerID, certificateIn.ID)
assert.NotNil(err, "We are expecting an error")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
}
// DeleteCertificateFailStatusMocked test mocked function
func DeleteCertificateFailStatusMocked(t *testing.T, loadBalancerID string, certificateIn *types.Certificate) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewCertificateService(cs)
assert.Nil(err, "Couldn't load certificate service")
assert.NotNil(ds, "Certificate service not instanced")
// to json
dIn, err := json.Marshal(certificateIn)
assert.Nil(err, "Certificate test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf(APIPathNetworkLoadBalancerCertificate, loadBalancerID, certificateIn.ID)).
Return(dIn, 499, nil)
err = ds.DeleteCertificate(loadBalancerID, certificateIn.ID)
assert.NotNil(err, "We are expecting an status code error")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
}
| 31.50937 | 120 | 0.7613 |
2a173275a9b8209961d6e096909f0fddcb1c2333 | 1,124 | java | Java | src/test/java/com/liferay/faces/osgi/weaver/internal/TestCheckClassAdapter.java | stiemannkj1/liferay-faces-osgi-weaver | 5bd3968f6386fb4fe53d58958084349a33be4f0c | [
"Apache-2.0"
] | 1 | 2019-01-18T14:50:19.000Z | 2019-01-18T14:50:19.000Z | src/test/java/com/liferay/faces/osgi/weaver/internal/TestCheckClassAdapter.java | stiemannkj1/liferay-faces-osgi-weaver | 5bd3968f6386fb4fe53d58958084349a33be4f0c | [
"Apache-2.0"
] | null | null | null | src/test/java/com/liferay/faces/osgi/weaver/internal/TestCheckClassAdapter.java | stiemannkj1/liferay-faces-osgi-weaver | 5bd3968f6386fb4fe53d58958084349a33be4f0c | [
"Apache-2.0"
] | null | null | null | /**
* Copyright (c) 2000-2018 Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package com.liferay.faces.osgi.weaver.internal;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.util.CheckClassAdapter;
/**
* @author Kyle Stiemann
*/
public abstract class TestCheckClassAdapter extends CheckClassAdapter {
public TestCheckClassAdapter(ClassVisitor cv) {
super(cv);
}
public TestCheckClassAdapter(ClassVisitor cv, boolean checkDataFlow) {
super(cv, checkDataFlow);
}
public TestCheckClassAdapter(int api, ClassVisitor cv, boolean checkDataFlow) {
super(api, cv, checkDataFlow);
}
}
| 30.378378 | 80 | 0.762456 |
577d7abe3647e772ba3229ff8a6fa9f558dc11db | 1,433 | h | C | src/http_server/HttpServer.h | rlong/osx.app.McRemote | 393a8fc62f411af3198973d5cda9ad12a48e2edd | [
"MIT"
] | 1 | 2018-02-25T12:16:18.000Z | 2018-02-25T12:16:18.000Z | src/http_server/HttpServer.h | rlong/osx.app.McRemote | 393a8fc62f411af3198973d5cda9ad12a48e2edd | [
"MIT"
] | null | null | null | src/http_server/HttpServer.h | rlong/osx.app.McRemote | 393a8fc62f411af3198973d5cda9ad12a48e2edd | [
"MIT"
] | null | null | null | // Copyright (c) 2017 Richard Long
//
// Released under the MIT license ( http://opensource.org/licenses/MIT )
//
#import <Foundation/Foundation.h>
#import "HttpServerContext.h"
#import "HLHttpSecurityJanitor.h"
#import "HLHttpSecurityManager.h"
#import "HLRootRequestHandler.h"
#import "HLSecurityConfiguration.h"
#import "HLWebServer.h"
@interface HttpServer : NSObject {
// runningInTheDock
bool _runningInTheDock;
//@property (nonatomic) bool runningInTheDock;
//@synthesize runningInTheDock = _runningInTheDock;
// runningInTheStatusBar
bool _runningInTheStatusBar;
//@property (nonatomic) bool runningInTheStatusBar;
//@synthesize runningInTheStatusBar = _runningInTheStatusBar;
// rootProcessor
HLRootRequestHandler* _rootProcessor;
//@property (nonatomic, retain) RootProcessor* rootProcessor;
//@synthesize rootProcessor = _rootProcessor;
// serverObjects
HttpServerContext* _serverObjects;
//@property (nonatomic, retain) RGServerObjects* serverObjects;
//@synthesize serverObjects = _serverObjects;
// webServer
HLWebServer* _webServer;
//@property (nonatomic, retain) HLWebServer* webServer;
//@synthesize webServer = _webServer;
}
-(void)start;
#pragma mark instance lifecycle
-(id)initWithRemoteGatewayObjects:(HttpServerContext*)serverObjects runningInTheDock:(bool)runningInTheDock runningInTheStatusBar:(bool)runningInTheStatusBar;
@end
| 22.046154 | 158 | 0.764131 |
702599af5fd15eee2a3eacb56f4b3ec4763e0b1e | 5,298 | lua | Lua | .config/awesome/module/exit-screen/init.lua | Jeremie1001/awesome-dotfiles | b0f67d04f95daa1af89f233cda73665b5ddbe992 | [
"MIT"
] | 22 | 2021-02-03T00:00:25.000Z | 2022-02-25T01:49:29.000Z | .config/awesome/module/exit-screen/init.lua | Jeremie1001/awesome-dotfiles | b0f67d04f95daa1af89f233cda73665b5ddbe992 | [
"MIT"
] | null | null | null | .config/awesome/module/exit-screen/init.lua | Jeremie1001/awesome-dotfiles | b0f67d04f95daa1af89f233cda73665b5ddbe992 | [
"MIT"
] | 6 | 2021-02-05T18:02:48.000Z | 2022-02-17T15:03:17.000Z | --DEPENDENCIES
--i3lock-fancy
local awful = require('awful')
local gears = require('gears')
local wibox = require('wibox')
local icons = require('themes.icons')
local clickable_container = require('widget.clickable-container')
local colors = require('themes.dracula.colors')
local apps = require('config.apps')
local dpi = require('beautiful').xresources.apply_dpi
-- Appearance
local icon_size = dpi(140)
local screen_geometry = awful.screen.focused().geometry
local greeter_message = wibox.widget {
markup = 'Goodbye Jeremie!',
font = 'Inter Regular 52',
align = 'center',
valign = 'center',
forced_width = screen_geometry.width,
widget = wibox.widget.textbox
}
local buildButton = function(icon)
local buttonWidget = wibox.widget {
{
{
{
{
image = icon,
widget = wibox.widget.imagebox
},
margins = dpi(16),
widget = wibox.container.margin
},
shape = gears.shape.circle,
bg = 'transparent',
widget = wibox.container.background
},
forced_width = icon_size,
forced_height = icon_size,
widget = clickable_container
},
left = dpi(24),
right = dpi(24),
widget = wibox.container.margin
}
return buttonWidget
end
function suspend_command()
exit_screen_hide()
awful.spawn.with_shell(apps.default.lock .. ' & systemctl suspend')
end
function exit_command()
_G.awesome.quit()
end
function lock_command()
exit_screen_hide()
awful.spawn.with_shell('sleep 1 && ' .. apps.default.lock)
end
function poweroff_command()
awful.spawn.with_shell('poweroff')
awful.keygrabber.stop(_G.exit_screen_grabber)
end
function reboot_command()
awful.spawn.with_shell('reboot')
awful.keygrabber.stop(_G.exit_screen_grabber)
end
local poweroff = buildButton(icons.power, 'Shutdown')
poweroff:connect_signal(
'button::release',
function()
poweroff_command()
end
)
local reboot = buildButton(icons.restart, 'Restart')
reboot:connect_signal(
'button::release',
function()
reboot_command()
end
)
local suspend = buildButton(icons.sleep, 'Sleep')
suspend:connect_signal(
'button::release',
function()
suspend_command()
end
)
local exit = buildButton(icons.logout, 'Logout')
exit:connect_signal(
'button::release',
function()
exit_command()
end
)
local lock = buildButton(icons.lock, 'Lock')
lock:connect_signal(
'button::release',
function()
lock_command()
end
)
-- Get screen geometry
--local screen_geometry = awful.screen.focused().geometry
-- Create the widget
local exit_screen = function(s)
s.exit_screen =
wibox(
{
x = s.geometry.x,
y = s.geometry.y,
visible = false,
screen = s,
ontop = true,
type = 'splash',
height = s.geometry.height,
width = s.geometry.width,
bg = colors.alpha(colors.selection, 'E0'),
fg = '#FEFEFE'
}
)
s.exit_screen_unfocused =
wibox(
{
x = s.geometry.x,
y = s.geometry.y,
visible = false,
screen = s,
ontop = true,
type = 'splash',
height = s.geometry.height,
width = s.geometry.width,
bg = colors.alpha(colors.selection, 'E0'),
fg = '#FEFEFE'
}
)
local exit_screen_grabber
function exit_screen_unfocused_hide(qqqq)
s.exit_screen_unfocused.visible = false
end
function exit_screen_unfocused_show()
s.exit_screen_unfocused.visible = true
end
function exit_screen_hide()
awful.keygrabber.stop(exit_screen_grabber)
awful.screen.connect_for_each_screen(function(s)
s.exit_screen_unfocused.visible = false
s.exit_screen.visible = false
end)
end
function exit_screen_show()
awful.screen.connect_for_each_screen(function(s)
s.exit_screen_unfocused.visible = true
end)
exit_screen_grabber =
awful.keygrabber.run(
function(_, key, event)
if event == 'release' then
return
end
if key == '1' then
poweroff_command()
elseif key == '2' then
reboot_command()
elseif key == '3' then
suspend_command()
elseif key == '4' then
exit_command()
elseif key == '5' then
lock_command()
elseif key == 'Escape' or key == 'q' or key == 'x' then
exit_screen_hide()
end
end
)
awful.screen.focused().exit_screen.visible = true
end
s.exit_screen:buttons(
gears.table.join(
-- Middle click - Hide exit_screen
awful.button(
{},
2,
function()
exit_screen_hide()
end
),
-- Right click - Hide exit_screen
awful.button(
{},
3,
function()
exit_screen_hide()
end
)
)
)
-- Item placement
s.exit_screen:setup {
nil,
{
nil,
{
poweroff,
reboot,
suspend,
exit,
lock,
layout = wibox.layout.fixed.horizontal
},
nil,
expand = 'none',
layout = wibox.layout.align.vertical
},
nil,
expand = 'none',
layout = wibox.layout.align.horizontal
}
s.exit_screen_unfocused:setup {
nil,
layout = wibox.layout.align.horizontal
}
end
return exit_screen
| 21.10757 | 69 | 0.624198 |
2859c409c9ed88ff8fc202660eea3ae943c69b6f | 1,246 | rb | Ruby | config/routes.rb | MarcusLee143/CS169_Great_Course_Guide | 24fbadaa93833941b80c0db0d7fac8d2d4b8d5bd | [
"MIT"
] | 2 | 2017-11-04T10:18:43.000Z | 2017-11-18T07:06:11.000Z | config/routes.rb | MarcusLee143/CS169_Great_Course_Guide | 24fbadaa93833941b80c0db0d7fac8d2d4b8d5bd | [
"MIT"
] | 24 | 2017-11-04T05:37:53.000Z | 2017-12-04T07:29:28.000Z | config/routes.rb | MarcusLee143/CS169_Great_Course_Guide | 24fbadaa93833941b80c0db0d7fac8d2d4b8d5bd | [
"MIT"
] | 2 | 2017-10-11T04:45:03.000Z | 2017-10-19T23:36:45.000Z | Rails.application.routes.draw do
mount JasmineRails::Engine => '/specs' if defined?(JasmineRails)
# The priority is based upon order of creation: first created -> highest priority.
# See how all your routes lay out with "rake routes".
# You can have the root of your site routed with "root"
root 'login#index'
# General
get "/logout" => "application#logout"
get "/cas/logout" => "login#index" #for testing purposes
get "/welcome" => "application#welcome"
get "/user" => "application#index"
get "/edit" => "application#edit"
post "/update" => "application#update"
post "/create" => 'application#create'
match '/users/email' => 'application#verify', :via => [:get]
post '/updateFilters' => 'application#updateFilters'
get '/about' => 'application#about'
# Professors
resources :professors, only: [:index, :show] do
get :all, :on => :collection
get :distinguished, :on => :collection
end
# Courses
match '/courses/filter' => 'courses#filter', :via => [:get]
resources :courses, only: [:index, :show] do
get :all, :on => :collection
get :schedule, :on => :collection
get :compare, :on => :collection
end
# User Courses
resources :user_courses, only: [:create, :destroy]
end
| 31.948718 | 84 | 0.661316 |
0782bee3e9cc52bce067adc00ffcfef84bf7da8f | 2,564 | sql | SQL | sql/_23_apricot_qa/_02_performance/_02_function_based_index/cases/function_based_index_function_SIN.sql | Zhaojia2019/cubrid-testcases | 475a828e4d7cf74aaf2611fcf791a6028ddd107d | [
"BSD-3-Clause"
] | 9 | 2016-03-24T09:51:52.000Z | 2022-03-23T10:49:47.000Z | sql/_23_apricot_qa/_02_performance/_02_function_based_index/cases/function_based_index_function_SIN.sql | Zhaojia2019/cubrid-testcases | 475a828e4d7cf74aaf2611fcf791a6028ddd107d | [
"BSD-3-Clause"
] | 173 | 2016-04-13T01:16:54.000Z | 2022-03-16T07:50:58.000Z | sql/_23_apricot_qa/_02_performance/_02_function_based_index/cases/function_based_index_function_SIN.sql | Zhaojia2019/cubrid-testcases | 475a828e4d7cf74aaf2611fcf791a6028ddd107d | [
"BSD-3-Clause"
] | 38 | 2016-03-24T17:10:31.000Z | 2021-10-30T22:55:45.000Z | --+ holdcas on;
set system parameters 'dont_reuse_heap_file=yes';
create table t1( a char(1200), b varchar(1200), c nchar(1200), d NCHAR VARYING(1200), e BIT(1200), f BIT VARYING(1200), g int, h SMALLINT, i BIGINT, j NUMERIC, k FLOAT, l DOUBLE, m MONETARY, n DATE, o TIME, p TIMESTAMP, q DATETIME);
insert into t1 values (
'1234567890',
'1234567890',
N'abc',
N'ABC',
B'1111111111',
B'1111111111',
10,
255,
9223372036854775807,
0,
0,
0,
-100,
DATE '2008-10-31',
TIME '00:00:00',
TIMESTAMP '2010-10-31 01:15:45',
DATETIME '2008-10-31 13:15:45');
insert into t1 values (null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null,null);
--TEST Create failed
create index i_t1_a2q on t1(SIN(a));
--TEST Create failed
create index i_t1_a2p on t1(SIN(b));
--TEST Create failed
create index i_t1_a2o on t1(SIN(c));
--TEST Create failed
create index i_t1_a2n on t1(SIN(d));
--TEST Create failed
create index i_t1_a2m on t1(SIN(e));
--TEST Create failed
create index i_t1_a2l on t1(SIN(f));
--TEST Create successfully
create index i_t1_a2k on t1(SIN(g));
--TEST: should use index i_t1_a2k
select /*+ RECOMPILE */* from t1 where SIN(g)>0 ;
--TEST Create successfully
create index i_t1_a2j on t1(SIN(h));
--TEST: should use index i_t1_a2j
select /*+ RECOMPILE */* from t1 where SIN(h)<=1 ;
--TEST Create successfully
create index i_t1_a2i on t1(SIN(i));
--TEST: should use index i_t1_a2i
select /*+ RECOMPILE */* from t1 where SIN(i)<1 ;
--TEST Create successfully
create index i_t1_a2h on t1(SIN(j));
--TEST: should use index i_t1_a2h
select /*+ RECOMPILE */* from t1 where SIN(j)<>0 ;
--TEST Create successfully
create index i_t1_a2g on t1(SIN(k));
--TEST: should use index i_t1_a2g
select /*+ RECOMPILE */* from t1 where SIN(k)<>0 ;
--TEST Create successfully
create index i_t1_a2f on t1(SIN(l));
--TEST: should use index i_t1_a2f
select /*+ RECOMPILE */* from t1 where SIN(l)=0 ;
--TEST Create successfully
create index i_t1_a2e on t1(SIN(m));
--TEST: should use index i_t1_a2e
select /*+ RECOMPILE */* from t1 where SIN(m)<0 ;
--TEST Create failed
create index i_t1_a2d on t1(SIN(n));
--TEST Create failed
create index i_t1_a2c on t1(SIN(o));
--TEST Create failed
create index i_t1_a2b on t1(SIN(p));
--TEST Create failed
create index i_t1_a2a on t1(SIN(q));
--TEST Create failed
create index i_t1_a2a1 on t1(SIN(pi()/3));
--TEST Create failed
create index i_t1_a2a2 on t1(SIN(h/3));
drop table t1;
set system parameters 'dont_reuse_heap_file=no';
commit;
--+ holdcas off;
| 31.268293 | 250 | 0.700468 |
de54457357cff574752610f743d66329da628915 | 2,294 | rs | Rust | src/transform/helper_or.rs | mpdatx/hogan | 2639133009658878fc8faf171aceb6e753971ff4 | [
"Apache-2.0"
] | null | null | null | src/transform/helper_or.rs | mpdatx/hogan | 2639133009658878fc8faf171aceb6e753971ff4 | [
"Apache-2.0"
] | null | null | null | src/transform/helper_or.rs | mpdatx/hogan | 2639133009658878fc8faf171aceb6e753971ff4 | [
"Apache-2.0"
] | null | null | null | use handlebars::*;
#[derive(Clone, Copy)]
pub struct OrHelper;
impl HelperDef for OrHelper {
fn call<'reg: 'rc, 'rc>(
&self,
h: &Helper<'reg, 'rc>,
r: &'reg Handlebars,
ctx: &Context,
rc: &mut RenderContext<'reg>,
out: &mut Output,
) -> HelperResult {
let lvalue = h
.param(0)
.ok_or_else(|| RenderError::new("Left param not found for helper \"or\""))?
.value();
let rvalue = h
.param(1)
.ok_or_else(|| RenderError::new("Right param not found for helper \"or\""))?
.value();
let comparison = lvalue.as_str().map_or(false, |v| v.len() > 0)
|| rvalue.as_str().map_or(false, |v| v.len() > 0);
if h.is_block() {
let template = if comparison {
h.template()
} else {
h.inverse()
};
match template {
Some(ref t) => t.render(r, ctx, rc, out),
None => Ok(()),
}
} else {
if comparison {
out.write(&comparison.to_string())?;
}
Ok(())
}
}
}
#[cfg(test)]
mod test {
use super::*;
use transform::helper_equal::EqualHelper;
use transform::test::test_against_configs;
#[test]
fn test_or() {
let mut handlebars = Handlebars::new();
handlebars.register_helper("eq", Box::new(EqualHelper));
handlebars.register_helper("or", Box::new(OrHelper));
let templates = vec![
(
r#"{{#or (eq Region.Key "TEST") (eq Region.Key "TEST2")}}Foo{{/or}}"#,
"Foo",
),
(
r#"{{#or (eq Region.Key null) (eq Region.Key "NO")}}{{else}}Bar{{/or}}"#,
"Bar",
),
(
r#"{{#if (or (eq Region.Key "TEST") (eq Region.Key "TEST2"))}}Foo{{/if}}"#,
"Foo",
),
(
r#"{{#if (or (eq Region.Key null) (eq Region.Key "NO"))}}{{else}}Bar{{/if}}"#,
"Bar",
),
];
for (template, expected) in templates {
test_against_configs(&handlebars, template, expected)
}
}
}
| 27.309524 | 94 | 0.441587 |
56e44c40b3424d3235e00b96bb4e5c28c937d9f2 | 1,855 | tsx | TypeScript | src/components/inner-components/st-datepicker-inner/st-datepicker-inner.tsx | k1laba/st-datepicker | 714cd0bdc0dc6f710fcfde8baf2c1b466ecb5d1b | [
"MIT"
] | null | null | null | src/components/inner-components/st-datepicker-inner/st-datepicker-inner.tsx | k1laba/st-datepicker | 714cd0bdc0dc6f710fcfde8baf2c1b466ecb5d1b | [
"MIT"
] | null | null | null | src/components/inner-components/st-datepicker-inner/st-datepicker-inner.tsx | k1laba/st-datepicker | 714cd0bdc0dc6f710fcfde8baf2c1b466ecb5d1b | [
"MIT"
] | null | null | null | import { Component, h, Prop } from '@stencil/core';
import { DateHelper } from '../../../utils/date.helper';
import { IDatePickerModel } from '../../../models/date-picker.model';
@Component({
tag: 'st-datepicker-inner',
styleUrl: 'st-datepicker-inner.scss'
})
export class StDatePickerInner {
@Prop() itemSize: number;
@Prop() currentMonth: Date;
@Prop() datepickerDates: IDatePickerModel[];
@Prop() onMonthChange: (date: Date) => void;
@Prop() onDateSelect: (date: Date) => void;
@Prop() resolveDayView: (date: IDatePickerModel) => string;
render() {
return (
<div class="st-datepicker-inner">
<st-datepicker-nav
onDateChange={(date) => this.onMonthChange(date)}
format="MMMM YYYY"
navStep="M"
currentDate={this.currentMonth}>
</st-datepicker-nav>
<div class="st-datepicker-inner__weeks">
{DateHelper.getWeekDaysNames().map(wd =>
<span class="st-datepicker-inner__dates__item"
style={this.getItemStyles()}>{wd}</span>)}
</div>
{DateHelper.getDaysGroupedByWeeks(this.datepickerDates).map(item =>
<div class="st-datepicker-inner__dates">
{item.map(d => <span class="st-datepicker-inner__dates__item"
style={this.getItemStyles()}
onClick={() => this.onDateSelect(d.date)}>
{this.resolveDayView(d)}
</span>)}
</div>)}
</div>
);
}
private getItemStyles() {
return {
width: `${this.itemSize}px`,
height: `${this.itemSize}px`,
}
}
} | 37.1 | 86 | 0.508356 |
10b388d497546e795b5cde9d0760c83be1fb2c81 | 1,975 | lua | Lua | resources/[esx]/esx_extendedjail/locales/fi.lua | Pskaf-Moost/FiveM-Serveur-City | bf5375d25a385b7ddba628f9f71a794513ee4c6f | [
"Xnet",
"X11"
] | 2 | 2021-05-03T11:35:55.000Z | 2021-05-17T19:25:48.000Z | resources/[esx]/esx_extendedjail/locales/fi.lua | Pskaf-Moost/FiveM-Serveur-City | bf5375d25a385b7ddba628f9f71a794513ee4c6f | [
"Xnet",
"X11"
] | null | null | null | resources/[esx]/esx_extendedjail/locales/fi.lua | Pskaf-Moost/FiveM-Serveur-City | bf5375d25a385b7ddba628f9f71a794513ee4c6f | [
"Xnet",
"X11"
] | null | null | null | Locales ['fi'] = {
['not_found'] = 'Tapahtui virhe. Tarkista, että olet määrittänyt oikean pelaajan, oikean ajan ja syyn.',
['jailed'] = 'Sinut on laitettu vankilaan. \nAika: %s',
['pjailed'] = 'Sinut on laitettu putkaan. \nAika: %s',
['already_jailed'] = 'Olet vielä vankilassa. Jatketaan tuomiota tästä!',
['already_pjailed'] = 'Olet vielä putkassa. Jatketaan tästä!',
['remaining_time'] = 'Aikaa jäjellä: %s Minuuttia',
['unjailed'] = 'Sinut on vapautettu vankilasta!',
['blip_name'] = 'Vankila',
['no_permissions'] = 'Sinulla ei ole oikeutta käyttää tätä komentoa.',
['alert'] = 'Varoitus',
['outside_jail'] = 'Miten tänne pääsit? Palautetaanpa sinut takaisin.',
['tried_without_permission'] = 'Pelaaja yritti laittaa henkilöä vankilaan ilman oikeuksia!\n**Nimi:** %s\n**Hex:** %s',
['player_unjailed'] = 'Pelaaja on vapautettu vankilasta.\n**Nimi:** %s\n**Hex:** %s',
['player_jailed'] = 'Pelaaja on vangittu.\n**Nimi:** %s\n**Hex:** %s',
['player_tried_escape'] = 'Pelaaja yritti karata vankilasta. Emote abuse?\n**Nimi:** %s\n**Hex:** %s',
['press_to_escape'] = '~INPUT_CONTEXT~ • Yrittääksesi vapauttaa vankeja',
['no_players'] = 'Täällä ei ole vangittuja!',
['no_correct_item'] = 'Sinulla ei ole tarvittavia työkaluja tiirikointiin!',
['trying_to_computer'] = 'Yritetään murtautua tietokoneelle.',
['alarm_fire'] = 'Hälytykset laukesivat!',
['blip_alarm'] = 'Vankila | Hälytys',
['alarm'] = 'Hälytys',
['alarm_place'] = 'Vankila',
['alarm_reason'] = 'Murtautumishälytys',
['tried_unjail_error'] = 'Et voi vapauttaa henkilöä, joka on jo vapaana.',
['already_jailed_error'] = 'Kyseinen henkilö on jo vangittu.',
['already_started_error'] = 'Vapautus on jo käynnissä, tai sitä on yritetty äskettäin.',
['failed_hacking_error'] = 'Epäonnistuit murtautumisessa!',
['error'] = 'Tapahtui virhe. Varmista, että käytit komentoa oikein!',
['error_noplayer'] = 'Tapahtui virhe. Tätä pelaajaa ei ole palvelimella.'
}
| 59.848485 | 121 | 0.684051 |
7f797d0298c165ecf83acab2aa3304387f11148e | 472 | go | Go | models/points_internal_test.go | adinesh10/influxdb | b4b122345ec2504c4fa5a0753771e4fe75203bb5 | [
"MIT"
] | null | null | null | models/points_internal_test.go | adinesh10/influxdb | b4b122345ec2504c4fa5a0753771e4fe75203bb5 | [
"MIT"
] | null | null | null | models/points_internal_test.go | adinesh10/influxdb | b4b122345ec2504c4fa5a0753771e4fe75203bb5 | [
"MIT"
] | null | null | null | package models
import "testing"
func TestMarshalPointNoFields(t *testing.T) {
points, err := ParsePointsString("m,k=v f=0i")
if err != nil {
t.Fatal(err)
}
// It's unclear how this can ever happen, but we've observed points that were marshalled without any fields.
points[0].(*point).fields = []byte{}
if _, err := points[0].MarshalBinary(); err != ErrPointMustHaveAField {
t.Fatalf("got error %v, exp %v", err, ErrPointMustHaveAField)
}
}
| 26.222222 | 110 | 0.667373 |
6b5260437dcb1e98c7a6896be8dfb4a6a43e375f | 2,812 | c | C | chrom-test.c | neatniets/binpacking-genetic-algorithm | a140f354afbffbaa6a214907fa9eddb812a7fa85 | [
"MIT"
] | null | null | null | chrom-test.c | neatniets/binpacking-genetic-algorithm | a140f354afbffbaa6a214907fa9eddb812a7fa85 | [
"MIT"
] | null | null | null | chrom-test.c | neatniets/binpacking-genetic-algorithm | a140f354afbffbaa6a214907fa9eddb812a7fa85 | [
"MIT"
] | null | null | null | #include "chromosome.h"
#include <stdio.h>
#include <stdlib.h>
#define ARR_SZ 20
#define TEST_CAP 1000
#define MUT_RATE (0.75)
static void print_bin(const bin_t *bin,
const long double *item_sizes, size_t num_items);
static void print_chrom(const chrom_t *chrom,
const long double *item_sizes, size_t num_items);
int main(void) {
srand(3);
long double *arr = malloc(ARR_SZ * sizeof(*arr));
printf("test set:\n");
for (size_t i=0; i<ARR_SZ; i++) {
arr[i] = rand() % (TEST_CAP / 2) + 1;
printf("%Lf ", arr[i]);
}
putchar('\n');
printf("rand\n");
chrom_t *chrom = rand_first_fit(arr, ARR_SZ, TEST_CAP);
printf("chrom 1:\n");
print_chrom(chrom, arr, ARR_SZ);
putchar('\n');
printf("rand\n");
chrom_t *chrom2 = rand_first_fit(arr, ARR_SZ, TEST_CAP);
printf("chrom 2:\n");
print_chrom(chrom2, arr, ARR_SZ);
putchar('\n');
printf("crossover\n");
chrom_t *child = chrom_cx(chrom, chrom2, arr, ARR_SZ);
printf("child chrom:\n");
print_chrom(child, arr, ARR_SZ);
putchar('\n');
printf("mutate child\n");
chrom_mutate(child, MUT_RATE, arr, ARR_SZ);
printf("mutated chrom:\n");
print_chrom(child, arr, ARR_SZ);
putchar('\n');
printf("copy chrom\n");
chrom_t *copy = chrom_copy(child);
printf("free chrom 1\n");
chrom_free(chrom);
printf("free chrom 2\n");
chrom_free(chrom2);
printf("free child chrom\n");
chrom_free(child);
printf("copied chrom:\n");
print_chrom(copy, arr, ARR_SZ);
putchar('\n');
printf("free copy\n");
chrom_free(copy);
free(arr);
return 0;
}
static void print_bin(const bin_t *bin,
const long double *item_sizes, size_t num_items) {
printf("fill: %Lf\n"
"count: %zu\n"
"items:\n",
bin->fill, bin->count);
for (size_t i=0; i<bin->count; i++) {
printf("index: %zu\tsize: %Lf\n",
bin->item_indices[i],
item_sizes[bin->item_indices[i]]);
}
}
static void print_chrom(const chrom_t *chrom,
const long double *item_sizes, size_t num_items) {
printf("fitness: %lf\n"
"bin_cap: %Lf\n"
"num_bins: %zu\n"
"bins:\n",
chrom->fitness, chrom->bin_cap, chrom->num_bins);
for (size_t i=0; i<chrom->num_bins; i++) {
printf("bin %zu:\n", i);
print_bin(chrom->bins[i], item_sizes, num_items);
}
}
| 33.47619 | 74 | 0.514225 |
d40cbf9a2a583c85c5300ae14ea9892aad68b541 | 4,492 | kt | Kotlin | src/main/kotlin/dev/north/fortyone/besu/commands/ReplicationExportCommand.kt | 41north/besu-storage-replication | 49d9ab04b901128aaa329f32fc7c4d6e6e68e0c6 | [
"Apache-2.0"
] | 1 | 2020-09-24T04:42:59.000Z | 2020-09-24T04:42:59.000Z | src/main/kotlin/dev/north/fortyone/besu/commands/ReplicationExportCommand.kt | 41north/besu-storage-replication | 49d9ab04b901128aaa329f32fc7c4d6e6e68e0c6 | [
"Apache-2.0"
] | null | null | null | src/main/kotlin/dev/north/fortyone/besu/commands/ReplicationExportCommand.kt | 41north/besu-storage-replication | 49d9ab04b901128aaa329f32fc7c4d6e6e68e0c6 | [
"Apache-2.0"
] | null | null | null | /*
* Copyright (c) 2020 41North.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.north.fortyone.besu.commands
import com.google.common.collect.Iterators
import dev.north.fortyone.besu.ext.getService
import dev.north.fortyone.besu.services.PutEvent
import dev.north.fortyone.besu.services.ReplicationManager
import dev.north.fortyone.besu.services.StorageTransaction
import kotlinx.coroutines.Job
import kotlinx.coroutines.runBlocking
import org.hyperledger.besu.plugin.services.storage.KeyValueStorage
import org.hyperledger.besu.plugin.services.storage.SegmentIdentifier
import org.slf4j.LoggerFactory
import picocli.CommandLine.Command
import picocli.CommandLine.Model.CommandSpec
import picocli.CommandLine.ParentCommand
import picocli.CommandLine.Spec
@Command(
name = "export",
description = ["Export your Besu key/value storage to supported external services."]
)
class ReplicationExportCommand : Runnable {
companion object {
private val logger = LoggerFactory.getLogger(ReplicationExportCommand::class.java)
}
@ParentCommand
private lateinit var parentCommand: ReplicationSubCommand
@Spec
private lateinit var spec: CommandSpec
private val keyBatchSize = 128
private val maxInFlightTransactions = 1024 * 10
override fun run() {
with(parentCommand) {
// initialise plugins
pluginContext.startPlugins()
// initialise and clear storage
val storageBySegment = initialiseStorage(this)
// perform the export
runBlocking { export(this@with, storageBySegment) }
}
}
private fun initialiseStorage(parentCommand: ReplicationSubCommand): Map<SegmentIdentifier, KeyValueStorage> =
with(parentCommand) {
val storageFactory = storageService
.getByName(keyValueStorageName)
.orElseThrow { throw IllegalArgumentException("Invalid key value storage name: $keyValueStorageName") }
storageSegments
.zip(
storageSegments.map { segment ->
storageFactory.create(segment, pluginCommonConfig, metricsSystem)
}
)
.toMap()
}
private suspend fun export(
parentCommand: ReplicationSubCommand,
storageBySegment: Map<SegmentIdentifier, KeyValueStorage>
) = with(parentCommand) {
val replicationManager = pluginContext.getService<ReplicationManager>()
var replicationJobs = emptyList<Job>()
val tryWaitOnReplicationJobs: suspend (Boolean) -> Unit = { force ->
if (force || replicationJobs.size == maxInFlightTransactions) {
replicationJobs
.also { logger.info("Waiting on {} replication jobs to complete", replicationJobs.size) }
.forEach { job -> job.join() }
.run {
logger.info("{} replication jobs completed", replicationJobs.size)
replicationJobs = emptyList()
}
}
}
storageBySegment
.forEach { (segment, storage) ->
val keysStream = storage.streamKeys()
// TODO ensure each batch of events stays under 1 mb to prevent problems with transaction log implementations
val batchIterator = Iterators.partition(keysStream.iterator(), keyBatchSize)
do {
batchIterator.next()
.also { keys -> logger.debug("Processing {} keys", keys.size) }
.let { keys -> keys.map { key -> PutEvent(key, storage.get(key).get()) } }
.let { storageEvents ->
StorageTransaction(
keyValueStorageName, segment, storageEvents
)
}
.let { txEvent -> replicationManager.onTransaction(txEvent) }
.also { replicationJob ->
replicationJobs = replicationJobs + replicationJob
tryWaitOnReplicationJobs(false)
}
} while (batchIterator.hasNext())
}
// wait on any remaining jobs that didn't meet the batch threshold
tryWaitOnReplicationJobs(true)
logger.info("Finished export")
}
}
| 32.085714 | 117 | 0.697017 |
7e4e75194b7b2c626893c87b75884b816ab1fc3a | 255 | swift | Swift | EBook/Models/SearchHeat.swift | QiuDaniel/Book | df3afac6999ef2e958c0195a0aa7af4cfbd2a960 | [
"Apache-2.0"
] | 1 | 2021-12-03T08:46:56.000Z | 2021-12-03T08:46:56.000Z | EBook/Models/SearchHeat.swift | QiuDaniel/Book | df3afac6999ef2e958c0195a0aa7af4cfbd2a960 | [
"Apache-2.0"
] | null | null | null | EBook/Models/SearchHeat.swift | QiuDaniel/Book | df3afac6999ef2e958c0195a0aa7af4cfbd2a960 | [
"Apache-2.0"
] | null | null | null | //
// SearchHeat.swift
// EBook
//
// Created by SPARK-Daniel on 2021/9/5.
//
import Foundation
struct SearchHeat: Codable {
let id: Int
let name: String
enum CodingKeys: String, CodingKey {
case id
case name
}
}
| 13.421053 | 40 | 0.592157 |
4a2b30ec9995595c09e4585a50af096004bf52aa | 176 | js | JavaScript | src/components/PageBreak.js | bazk/resume | 850ff1478b3ccde8366670c3b71ed3a99ca38da1 | [
"MIT"
] | null | null | null | src/components/PageBreak.js | bazk/resume | 850ff1478b3ccde8366670c3b71ed3a99ca38da1 | [
"MIT"
] | 8 | 2021-03-09T07:55:48.000Z | 2022-02-26T11:37:36.000Z | src/components/PageBreak.js | bazk/resume | 850ff1478b3ccde8366670c3b71ed3a99ca38da1 | [
"MIT"
] | null | null | null | import styled from "styled-components"
const PageBreak = styled.div`
position: relative;
page-break-after: always;
page-break-inside: avoid;
`;
export default PageBreak | 19.555556 | 38 | 0.755682 |
0cb26210fdbce5c2de9ff66cfbeec89817eff49b | 267 | py | Python | tests/test_utils.py | yehzhang/dscraper | 6fd1a4238795e9eb01b9dd8329a84495a70979d1 | [
"Apache-2.0"
] | 1 | 2017-08-13T09:50:06.000Z | 2017-08-13T09:50:06.000Z | tests/test_utils.py | yehzhang/dscraper | 6fd1a4238795e9eb01b9dd8329a84495a70979d1 | [
"Apache-2.0"
] | null | null | null | tests/test_utils.py | yehzhang/dscraper | 6fd1a4238795e9eb01b9dd8329a84495a70979d1 | [
"Apache-2.0"
] | null | null | null | import unittest
import logging
import xml.etree.ElementTree as et
import dscraper.utils as utils
logger = logging.getLogger(__name__)
class TestUtils(unittest.TestCase):
XML_FILES = (
'tests/resources/1.xml',
)
def setUp(self):
pass
| 14.833333 | 36 | 0.692884 |
da4768da8ca6ba3876276baa89736b71e5d20c7b | 101 | sql | SQL | src/test/resources/sql/select/987022cf.sql | Shuttl-Tech/antlr_psql | fcf83192300abe723f3fd3709aff5b0c8118ad12 | [
"MIT"
] | 66 | 2018-06-15T11:34:03.000Z | 2022-03-16T09:24:49.000Z | src/test/resources/sql/select/987022cf.sql | Shuttl-Tech/antlr_psql | fcf83192300abe723f3fd3709aff5b0c8118ad12 | [
"MIT"
] | 13 | 2019-03-19T11:56:28.000Z | 2020-08-05T04:20:50.000Z | src/test/resources/sql/select/987022cf.sql | Shuttl-Tech/antlr_psql | fcf83192300abe723f3fd3709aff5b0c8118ad12 | [
"MIT"
] | 28 | 2019-01-05T19:59:02.000Z | 2022-03-24T11:55:50.000Z | -- file:select.sql ln:197 expect:true
select * from onek2 where unique2 = 11 and stringu1 = 'ATAAAA'
| 33.666667 | 62 | 0.732673 |
bee67d3821c6d0b052802c5a7bb3c80cef7e0856 | 1,147 | html | HTML | index.html | I-School-Class-i190-dvw/12-geospatial-viz-part2-ConorEdwardKelly | 7361f3cb3d37223dc77e602668ba21e5cb5c2c5d | [
"MIT"
] | null | null | null | index.html | I-School-Class-i190-dvw/12-geospatial-viz-part2-ConorEdwardKelly | 7361f3cb3d37223dc77e602668ba21e5cb5c2c5d | [
"MIT"
] | null | null | null | index.html | I-School-Class-i190-dvw/12-geospatial-viz-part2-ConorEdwardKelly | 7361f3cb3d37223dc77e602668ba21e5cb5c2c5d | [
"MIT"
] | null | null | null | <!DOCTYPE html>
<head>
<meta charset="utf-8">
<title>12: Part 2 of Geospatial Viz with D3</title>
<meta name="author" content="Conor Kelly">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- load the d3 library locally, along with some additions for working with geospatial data -->
<script src="./lib/d3.min.js"></script>
<script src="https://d3js.org/d3-tile.v0.0.min.js"></script>
<script src="https://d3js.org/d3-array.v1.min.js"></script>
<script src="https://d3js.org/d3-geo.v1.min.js"></script>
<style>
body {
margin:0; /* Set margin */
padding: 0; /*Set padding */
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; /*Set font*/
}
path {
fill: red; /*Set path fill */
stroke: none; /* Set stroke */
fill-opacity: 0.6; /*Set fill */
stroke-linejoin: round; /*Set stroke line join */
}
path:hover {
fill-opacity: 1; /*Set fill opacity */
}
</style>
</head>
<body>
<script src="main.js"></script>
<!-- script tags for main.js file -->
</body>
| 24.934783 | 99 | 0.574542 |
da39417a2e4939b94a2c2c1695d0305ea8949438 | 107 | swift | Swift | Tests/SwagGenKitTests/XCTest.swift | rocketnik/SwagGen | ab8cebc86a225f918bdce2d858371aa1cb47d205 | [
"MIT"
] | null | null | null | Tests/SwagGenKitTests/XCTest.swift | rocketnik/SwagGen | ab8cebc86a225f918bdce2d858371aa1cb47d205 | [
"MIT"
] | null | null | null | Tests/SwagGenKitTests/XCTest.swift | rocketnik/SwagGen | ab8cebc86a225f918bdce2d858371aa1cb47d205 | [
"MIT"
] | null | null | null | import XCTest
class SwagGenTests: XCTestCase {
func testRunSwagGenTests() {
specTests()
}
}
| 13.375 | 32 | 0.663551 |
cb63f1a73c191e7a7156028f9e877a80ff531d25 | 7,886 | html | HTML | 02-mgt-person/index.html | sebastienlevert/mgt-sandbox | a087555eb33bdcb1ff4c9968f6d67f428319df04 | [
"MIT"
] | 7 | 2022-01-25T16:53:56.000Z | 2022-03-09T17:37:40.000Z | 02-mgt-person/index.html | sebastienlevert/mgt-sandbox | a087555eb33bdcb1ff4c9968f6d67f428319df04 | [
"MIT"
] | 1 | 2022-03-09T08:38:11.000Z | 2022-03-09T08:38:11.000Z | 02-mgt-person/index.html | sebastienlevert/mgt-sandbox | a087555eb33bdcb1ff4c9968f6d67f428319df04 | [
"MIT"
] | 1 | 2022-02-03T14:42:08.000Z | 2022-02-03T14:42:08.000Z | <!DOCTYPE html>
<html>
<head>
<script src="https://unpkg.com/@microsoft/mgt@next/dist/bundle/mgt-loader.js"></script>
<style>
#isSignedIn {
display: none;
}
.example {
font-family: "Segoe UI", "Segoe UI Web (West European)", "Segoe UI",
-apple-system, BlinkMacSystemFont, Roboto, "Helvetica Neue",
sans-serif;
margin-bottom: 8px;
}
.example .title {
border-bottom: 1px solid #8a8886;
margin-bottom: 8px;
width: fit-content;
}
.facepile mgt-person:not(:first-child) {
margin-left: -12px;
}
mgt-person.custom-style {
--avatar-size: 90px;
--avatar-border: 3px dotted red;
--avatar-border-radius: 20% 40%;
--initials-color: green;
--initials-background-color: magenta;
--presence-background-color: blue;
--presence-icon-color: blue;
--font-family: "Segoe UI";
--font-size: 25px;
--font-weight: 700;
--color-sub1: red;
--text-transform: capitalize;
--line2-font-size: 16px;
--line2-font-weight: 400;
--line2-color: green;
--line2-text-transform: lowercase;
--line3-font-size: 8px;
--line3-font-weight: 400;
--line3-color: pink;
--line3-text-transform: none;
--details-spacing: 30px;
}
</style>
</head>
<body>
<title>Using the mgt-person component</title>
<mgt-msal2-provider
client-id="%YOUR_CLIENT_ID%"
scopes="User.Read, User.ReadBasic.All, User.Read.All, Mail.ReadBasic, Sites.Read.All, People.Read.All, People.Read, Contacts.Read, Presence.Read"
login-type="popup"
>
</mgt-msal2-provider>
<mgt-login></mgt-login>
<div id="isSignedIn">
<div class="example">
<div class="title">
Simple person component for currently signed in user using the default
"avatar" view
</div>
<mgt-person person-query="me"></mgt-person>
</div>
<div class="example">
<div class="title">
Simple person component for currently signed in user with its initials
</div>
<mgt-person person-query="me" avatar-type="initials"></mgt-person>
</div>
<div class="example">
<div class="title">Person component for another user</div>
<mgt-person person-query="Alex Wilber"></mgt-person>
</div>
<div class="example">
<div class="title">Person component for multiple users</div>
<div class="facepile">
<mgt-person person-query="me"></mgt-person>
<mgt-person person-query="Alex Wilber"></mgt-person>
<mgt-person person-query="Megan"></mgt-person>
<mgt-person person-query="Patti"></mgt-person>
</div>
</div>
<div class="example">
<div class="title">
Person component for the currently signed in user's manager
</div>
<mgt-person id="manager"></mgt-person>
</div>
<div class="example">
<div class="title">
Person component for currently signed in user using the "oneline" view
</div>
<mgt-person person-query="me" view="oneline"></mgt-person>
</div>
<div class="example">
<div class="title">
Person component for currently signed in user using the "twolines"
view
</div>
<mgt-person person-query="me" view="twolines"></mgt-person>
</div>
<div class="example">
<div class="title">
Person component for currently signed in user using the "threelines"
view
</div>
<mgt-person person-query="me" view="threelines"></mgt-person>
</div>
<div class="example">
<div class="title">
Person component for currently signed in user with custom line data
</div>
<mgt-person
person-query="me"
view="threelines"
line1-property="givenName"
line2-property="jobTitle"
line3-property="mobilePhone"
></mgt-person>
</div>
<div class="example">
<div class="title">Person component for a user not on Graph</div>
<mgt-person
view="threeLines"
fetch-image="false"
person-details='{"mail":"Parker@contoso.com","displayName":"Parker the Porcupine", "jobTitle":"PnP Hero"}'
person-image="https://raw.githubusercontent.com/pnp/media/master/parker/pnp/300w/parker.png"
></mgt-person>
</div>
<div class="example">
<div class="title">
Person component for currently signed in user using the "threelines"
view and adding the hover capability
</div>
<mgt-person person-query="me" view="threelines" person-card="hover">
</mgt-person>
</div>
<div class="example">
<div class="title">
Person component for currently signed in user with the presence
indicator
</div>
<mgt-person person-query="me" show-presence="true"></mgt-person>
</div>
<div class="example">
<div class="title">
Person component for currently signed in user with the presence
indicator and using the "threelines"
</div>
<mgt-person
person-query="me"
show-presence="true"
view="threelines"
></mgt-person>
</div>
<div class="example">
<div class="title">
Customized person component for currently signed in user using the
default "threelines" view
</div>
<mgt-person
class="custom-style"
person-query="me"
view="threelines"
></mgt-person>
</div>
<div class="example">
<div class="title">
Person component with custom events for currently signed in user using
the default "threelines" view
</div>
<mgt-person
id="custom-events"
person-query="me"
view="threelines"
></mgt-person>
</div>
</div>
<div id="isNotSignedIn">
It seems you are not connected. Please sign in using the mgt-login
component!
</div>
<script>
/**
* Loads the current user's manager from a custom request to Microsoft Graph and assigns its value to the person component
*/
const loadManager = () => {
if (mgt.Providers.globalProvider.state === mgt.ProviderState.SignedIn) {
let graphClient = mgt.Providers.globalProvider.graph.client;
graphClient
.api(`/me/manager`)
.get()
.then((result) => {
document.getElementById("manager").userId = result.id;
});
}
};
// When the user is signed in, we modify the visibility of some elements and load some data from Microsoft Graph
mgt.Providers.onProviderUpdated(() => {
if (mgt.Providers.globalProvider.state === mgt.ProviderState.SignedIn) {
document.getElementById("isSignedIn").style.display = "grid";
document.getElementById("isNotSignedIn").style.display = "none";
}
loadManager();
});
//document.getElementById("person-details").personDetails =
// Modifying the presence cache invalidation period to 30 seconds
mgt.CacheService.config.presence.invalidationPeriod = 30000;
// When the user clicks on the first line of the component
document
.querySelector("#custom-events")
.addEventListener("line1clicked", (e) => {
console.log(
`Event: line1clicked at ${new Date().toLocaleString()} with content ${JSON.stringify(
e.detail
)}`
);
});
</script>
</body>
</html>
| 30.684825 | 151 | 0.571646 |
1732ec3d7b0921efa91b3f1833d35225fd1bac99 | 489 | sql | SQL | snprc_ehr/resources/queries/snprc_ehr/ExportPkgCategories.sql | LabKey/snprcEHRModules | 93fab5f8a3ced8f6449a4ed99ae96f513494823f | [
"Apache-2.0"
] | 2 | 2018-11-13T17:29:20.000Z | 2018-11-13T17:29:25.000Z | snprc_ehr/resources/queries/snprc_ehr/ExportPkgCategories.sql | LabKey/snprcEHRModules | 93fab5f8a3ced8f6449a4ed99ae96f513494823f | [
"Apache-2.0"
] | 36 | 2019-02-27T18:02:47.000Z | 2022-03-30T22:16:03.000Z | snprc_ehr/resources/queries/snprc_ehr/ExportPkgCategories.sql | LabKey/snprcEHRModules | 93fab5f8a3ced8f6449a4ed99ae96f513494823f | [
"Apache-2.0"
] | 1 | 2019-12-04T16:29:52.000Z | 2019-12-04T16:29:52.000Z | /********************************************************
Query will feed the export ETL back to CAMP.
02.24.21 srr
straight pull from snd.PkgCategories table
********************************************************/
SELECT pc.CategoryId,
pc.Description,
pc.Comment,
pc.Active,
pc.SortOrder,
pc.Container,
pc.CreatedBy,
pc.Created,
pc.ModifiedBy,
pc.Modified,
pc.Lsid,
pc.Objectid
FROM snd.PkgCategories pc | 23.285714 | 58 | 0.472393 |
f8fbd9271f2ca74d17bd8dace9a4aab59f7cdb8f | 3,071 | swift | Swift | Sources/KafkaNIO/Messages/DeleteTopicsResponse.swift | tbartelmess/kafka-nio | 02abe225ff9f6a19d37fe9753fb84534ee8025bc | [
"Apache-2.0"
] | 7 | 2020-09-27T05:03:54.000Z | 2021-03-16T15:05:12.000Z | Sources/KafkaNIO/Messages/DeleteTopicsResponse.swift | tbartelmess/kafka-nio | 02abe225ff9f6a19d37fe9753fb84534ee8025bc | [
"Apache-2.0"
] | 3 | 2020-09-27T02:47:51.000Z | 2020-10-02T23:42:42.000Z | Sources/KafkaNIO/Messages/DeleteTopicsResponse.swift | tbartelmess/kafka-nio | 02abe225ff9f6a19d37fe9753fb84534ee8025bc | [
"Apache-2.0"
] | null | null | null | //===----------------------------------------------------------------------===//
//
// This source file is part of the KafkaNIO open source project
//
// Copyright © 2020 Thomas Bartelmess.
// Licensed under Apache License v2.0
//
// See LICENSE.txt for license information
//
// SPDX-License-Identifier: Apache-2.0
//
//===----------------------------------------------------------------------===//
//
// This file is auto generated from the Kafka Protocol definition. DO NOT EDIT.
import NIO
struct DeleteTopicsResponse: KafkaResponse {
struct DeletableTopicResult: KafkaResponseStruct {
/// The topic name
let name: String
/// The deletion error, or 0 if the deletion succeeded.
let errorCode: ErrorCode
/// The error message, or null if there was no error.
let errorMessage: String?
init(from buffer: inout ByteBuffer, apiVersion: APIVersion) throws {
let lengthEncoding: IntegerEncoding = (apiVersion >= 4) ? .varint : .bigEndian
name = try buffer.read(lengthEncoding: lengthEncoding)
errorCode = try buffer.read()
if apiVersion >= 5 {
errorMessage = try buffer.read(lengthEncoding: lengthEncoding)
} else {
errorMessage = nil
}
if apiVersion >= 4 {
let _ : [TaggedField] = try buffer.read()
}
}
init(name: String, errorCode: ErrorCode, errorMessage: String?) {
self.name = name
self.errorCode = errorCode
self.errorMessage = errorMessage
}
}
let apiKey: APIKey = .deleteTopics
let apiVersion: APIVersion
let responseHeader: KafkaResponseHeader
/// The duration in milliseconds for which the request was throttled due to a quota violation, or zero if the request did not violate any quota.
let throttleTimeMs: Int32?
/// The results for each topic we tried to delete.
let responses: [DeletableTopicResult]
let taggedFields: [TaggedField]
init(from buffer: inout ByteBuffer, responseHeader: KafkaResponseHeader, apiVersion: APIVersion) throws {
let lengthEncoding: IntegerEncoding = (apiVersion >= 4) ? .varint : .bigEndian
self.apiVersion = apiVersion
self.responseHeader = responseHeader
if apiVersion >= 1 {
throttleTimeMs = try buffer.read()
} else {
throttleTimeMs = nil
}
responses = try buffer.read(apiVersion: apiVersion, lengthEncoding: lengthEncoding)
if apiVersion >= 4 {
taggedFields = try buffer.read()
} else {
taggedFields = []
}
}
init(apiVersion: APIVersion, responseHeader: KafkaResponseHeader, throttleTimeMs: Int32?, responses: [DeletableTopicResult]) {
self.apiVersion = apiVersion
self.responseHeader = responseHeader
self.taggedFields = []
self.throttleTimeMs = throttleTimeMs
self.responses = responses
}
} | 35.298851 | 148 | 0.594269 |
cf714586a956b14ff9784b54d35a8af184fac7f6 | 1,996 | lua | Lua | src/csv.lua | dinamech/duo | 0a4d3569afeef15c3c21d67d7834b2766cc5b720 | [
"BSD-2-Clause"
] | 1 | 2020-05-18T12:00:13.000Z | 2020-05-18T12:00:13.000Z | src/csv.lua | dinamech/duo | 0a4d3569afeef15c3c21d67d7834b2766cc5b720 | [
"BSD-2-Clause"
] | 3 | 2020-05-19T20:51:52.000Z | 2020-05-22T06:15:45.000Z | src/csv.lua | dinamech/duo | 0a4d3569afeef15c3c21d67d7834b2766cc5b720 | [
"BSD-2-Clause"
] | null | null | null | local the = require "the"
local csv, csvWant, csvTake, select
-- Return an iterator that returns all non-blank
-- lines, divided into cells (coerced to numbers,
-- if need be), with all white space and comments removed.
-- Also,
-- skip over any column whose name starts with `the.ch.skip`
-- character (which defaults to `?`).
-- Example usage:
--
-- csv = require("csv")
--
-- for line in csv("data.csv") do
-- print(line)
-- end
--
-- Note that:
--
-- - File processing is incremental (one
-- line at a time) without loading the file into RAM.
-- - This iterator reads from `file` or, if that is absent,
-- from standard input.
function csv(file, want,stream,tmp,row)
stream = file and io.input(file) or io.input()
tmp = io.read()
return function()
if tmp then
tmp = tmp:gsub("[\t\r ]*","") -- no whitespace
row = split(tmp)
tmp = io.read()
if #row > 0 then
want = want or csvWant(row) -- only do first time
return csvTake(want,row)
end
else
io.close(stream) end end
end
-- ## Support
-- Determine what we want.
function csvWant(row, out,put)
out, put = {},0
for get,txt in pairs(row) do
if string.sub(txt,1,1) ~= the.ch.skip then
put = put + 1
out[put] = get
end end
return out
end
-- Take what we `want`
-- (and while we are here, coerce any
-- number strings to numbers).
function csvTake(want,row, out,cell)
out = {}
for put,get in pairs(want) do
cell = row[get]
cell = tonumber(cell) or cell -- coercian
out[put] = cell end
return out
end
-- Low-level function: Split the string `s` on some seperatpr `sep`
-- (which defaults to ",") into a return list.
function split(s, sep,out)
out, sep = {}, sep or ","
for y in string.gmatch(s, "([^" ..sep.. "]+)") do
out[#out+1] = y end
return out
end
-------------
-- ## Export control
return csv
-- ## Author
-- Tim Menzies, April 2020.
| 24.048193 | 68 | 0.596192 |
5a192f75c4de232164f28c91d4d28921a53a4852 | 67 | rs | Rust | api_helix/src/constants.rs | alecsavvy/twitch.rs | dd47d85c2da1bb0599ee42db7032f5b22a83d13c | [
"MIT"
] | null | null | null | api_helix/src/constants.rs | alecsavvy/twitch.rs | dd47d85c2da1bb0599ee42db7032f5b22a83d13c | [
"MIT"
] | null | null | null | api_helix/src/constants.rs | alecsavvy/twitch.rs | dd47d85c2da1bb0599ee42db7032f5b22a83d13c | [
"MIT"
] | null | null | null | pub const HELIX_URL: &'static str = "https://api.twitch.tv/helix";
| 33.5 | 66 | 0.701493 |
39fa4599e17a3985f525f9351d6f9dc9505f9a8b | 1,883 | java | Java | Components/LearningCore/Source/gov/sandia/cognition/statistics/UnivariateDistribution.java | Markoy8/Foundry | c3ec00a8efe08a25dd5eae7150b788e4486c0e6e | [
"BSD-3-Clause"
] | 122 | 2015-01-19T17:36:40.000Z | 2022-02-25T20:22:22.000Z | Components/LearningCore/Source/gov/sandia/cognition/statistics/UnivariateDistribution.java | jbasilico/Foundry | 232e0f91a8f31aa92e20bb36a208f3624d595ee5 | [
"BSD-3-Clause"
] | 45 | 2015-01-23T06:28:33.000Z | 2021-05-18T19:11:29.000Z | Components/LearningCore/Source/gov/sandia/cognition/statistics/UnivariateDistribution.java | jbasilico/Foundry | 232e0f91a8f31aa92e20bb36a208f3624d595ee5 | [
"BSD-3-Clause"
] | 42 | 2015-01-20T03:07:17.000Z | 2021-08-18T08:51:55.000Z | /*
* File: UnivariateDistribution.java
* Authors: Kevin R. Dixon
* Company: Sandia National Laboratories
* Project: Cognitive Foundry
*
* Copyright Jan 27, 2009, Sandia Corporation.
* Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
* license for use of this work by or on behalf of the U.S. Government.
* Export of this program may require a license from the United States
* Government. See CopyrightHistory.txt for complete details.
*
*/
package gov.sandia.cognition.statistics;
/**
* A Distribution that takes Doubles as inputs and can compute its variance.
* @param <NumberType> Type of Number that can be sampled from this distribution.
* @author Kevin R. Dixon
* @since 3.0
*/
public interface UnivariateDistribution<NumberType extends Number>
extends DistributionWithMean<NumberType>
{
/**
* Gets the minimum support (domain or input) of the distribution.
* @return
* Minimum support.
*/
public NumberType getMinSupport();
/**
* Gets the minimum support (domain or input) of the distribution.
* @return
* Minimum support.
*/
public NumberType getMaxSupport();
/**
* Gets the CDF of a scalar distribution.
* @return
* CDF of the scalar distribution.
*/
public CumulativeDistributionFunction<NumberType> getCDF();
/**
* Gets the mean of the distribution as a double.
*
* @return
* The mean as a double.
*/
public double getMeanAsDouble();
/**
* Gets the variance of the distribution. This is sometimes called
* the second central moment by more pedantic people, which is equivalent
* to the square of the standard deviation.
* @return
* Variance of the distribution.
*/
public double getVariance();
}
| 28.530303 | 81 | 0.655868 |
398e6bbfb12dfb67e58137e1178b183b1ee9f02b | 6,010 | asm | Assembly | Transynther/x86/_processed/NONE/_xt_/i3-7100_9_0x84_notsx.log_21829_2078.asm | ljhsiun2/medusa | 67d769b8a2fb42c538f10287abaf0e6dbb463f0c | [
"MIT"
] | 9 | 2020-08-13T19:41:58.000Z | 2022-03-30T12:22:51.000Z | Transynther/x86/_processed/NONE/_xt_/i3-7100_9_0x84_notsx.log_21829_2078.asm | ljhsiun2/medusa | 67d769b8a2fb42c538f10287abaf0e6dbb463f0c | [
"MIT"
] | 1 | 2021-04-29T06:29:35.000Z | 2021-05-13T21:02:30.000Z | Transynther/x86/_processed/NONE/_xt_/i3-7100_9_0x84_notsx.log_21829_2078.asm | ljhsiun2/medusa | 67d769b8a2fb42c538f10287abaf0e6dbb463f0c | [
"MIT"
] | 3 | 2020-07-14T17:07:07.000Z | 2022-03-21T01:12:22.000Z | .global s_prepare_buffers
s_prepare_buffers:
push %r11
push %r12
push %r13
push %r14
push %rax
push %rbx
push %rdx
lea addresses_WC_ht+0x1a85e, %r13
nop
xor $45889, %rdx
mov $0x6162636465666768, %r14
movq %r14, (%r13)
nop
nop
nop
nop
nop
inc %r13
lea addresses_UC_ht+0xbec2, %rdx
nop
nop
nop
nop
nop
sub %rax, %rax
movw $0x6162, (%rdx)
nop
nop
nop
nop
nop
xor %r11, %r11
lea addresses_normal_ht+0x136ba, %r12
nop
nop
and %r14, %r14
movl $0x61626364, (%r12)
nop
nop
nop
and %rdx, %rdx
lea addresses_A_ht+0xd0f1, %rdx
nop
nop
nop
nop
xor $4435, %rbx
mov $0x6162636465666768, %r12
movq %r12, %xmm6
movups %xmm6, (%rdx)
nop
nop
nop
nop
xor %rbx, %rbx
lea addresses_WT_ht+0x1dc9a, %rax
sub $15569, %rbx
movups (%rax), %xmm0
vpextrq $0, %xmm0, %r14
nop
nop
nop
xor %r14, %r14
lea addresses_normal_ht+0x13b1a, %r11
sub %r14, %r14
mov (%r11), %rax
nop
nop
nop
nop
xor %r14, %r14
lea addresses_WT_ht+0x1e042, %r11
nop
cmp %r13, %r13
mov (%r11), %r12w
nop
cmp $27407, %r12
lea addresses_D_ht+0xfc9a, %r11
nop
cmp %rbx, %rbx
mov (%r11), %r14
nop
nop
nop
nop
nop
cmp %r14, %r14
pop %rdx
pop %rbx
pop %rax
pop %r14
pop %r13
pop %r12
pop %r11
ret
.global s_faulty_load
s_faulty_load:
push %r10
push %r11
push %r13
push %rbp
push %rbx
push %rcx
push %rdx
// Store
mov $0x89a, %rcx
nop
nop
nop
dec %r11
movw $0x5152, (%rcx)
nop
nop
add $11768, %r13
// Faulty Load
lea addresses_D+0x1b09a, %rcx
nop
cmp %rbp, %rbp
mov (%rcx), %r10
lea oracles, %r11
and $0xff, %r10
shlq $12, %r10
mov (%r11,%r10,1), %r10
pop %rdx
pop %rcx
pop %rbx
pop %rbp
pop %r13
pop %r11
pop %r10
ret
/*
<gen_faulty_load>
[REF]
{'src': {'type': 'addresses_D', 'same': True, 'size': 16, 'congruent': 0, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
{'dst': {'type': 'addresses_P', 'same': False, 'size': 2, 'congruent': 8, 'NT': False, 'AVXalign': True}, 'OP': 'STOR'}
[Faulty Load]
{'src': {'type': 'addresses_D', 'same': True, 'size': 8, 'congruent': 0, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
<gen_prepare_buffer>
{'dst': {'type': 'addresses_WC_ht', 'same': False, 'size': 8, 'congruent': 2, 'NT': False, 'AVXalign': False}, 'OP': 'STOR'}
{'dst': {'type': 'addresses_UC_ht', 'same': True, 'size': 2, 'congruent': 1, 'NT': False, 'AVXalign': False}, 'OP': 'STOR'}
{'dst': {'type': 'addresses_normal_ht', 'same': False, 'size': 4, 'congruent': 4, 'NT': False, 'AVXalign': False}, 'OP': 'STOR'}
{'dst': {'type': 'addresses_A_ht', 'same': False, 'size': 16, 'congruent': 0, 'NT': False, 'AVXalign': False}, 'OP': 'STOR'}
{'src': {'type': 'addresses_WT_ht', 'same': False, 'size': 16, 'congruent': 9, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
{'src': {'type': 'addresses_normal_ht', 'same': False, 'size': 8, 'congruent': 7, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
{'src': {'type': 'addresses_WT_ht', 'same': False, 'size': 2, 'congruent': 1, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
{'src': {'type': 'addresses_D_ht', 'same': False, 'size': 8, 'congruent': 10, 'NT': False, 'AVXalign': False}, 'OP': 'LOAD'}
{'36': 21829}
36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36 36
*/
| 38.280255 | 2,999 | 0.654742 |
0a21adb1ec8937cf245b6cfbe50e182348cc1b5c | 53,004 | c | C | release/src/router/samba/source/nmbd/nmbd_winsserver.c | ghsecuritylab/tomato_egg | 50473a46347f4631eb4878a0f47955cc64c87293 | [
"FSFAP"
] | 278 | 2015-11-03T03:01:20.000Z | 2022-01-20T18:21:05.000Z | release/src/router/samba/source/nmbd/nmbd_winsserver.c | ghsecuritylab/tomato_egg | 50473a46347f4631eb4878a0f47955cc64c87293 | [
"FSFAP"
] | 374 | 2015-11-03T12:37:22.000Z | 2021-12-17T14:18:08.000Z | release/src/router/samba/source/nmbd/nmbd_winsserver.c | ghsecuritylab/tomato_egg | 50473a46347f4631eb4878a0f47955cc64c87293 | [
"FSFAP"
] | 96 | 2015-11-22T07:47:26.000Z | 2022-01-20T19:52:19.000Z | /*
Unix SMB/Netbios implementation.
Version 1.9.
NBT netbios routines and daemon - version 2
Copyright (C) Jeremy Allison 1994-1998
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#include "includes.h"
#define WINS_LIST "wins.dat"
#define WINS_VERSION 1
extern int DEBUGLEVEL;
extern struct in_addr ipzero;
/****************************************************************************
possibly call the WINS hook external program when a WINS change is made
*****************************************************************************/
static void wins_hook(char *operation, struct name_record *namerec, int ttl)
{
pstring command;
char *cmd = lp_wins_hook();
char *p;
int i;
if (!cmd || !*cmd) return;
for (p=namerec->name.name; *p; p++) {
if (!(isalnum((int)*p) || strchr("._-",*p))) {
DEBUG(3,("not calling wins hook for invalid name %s\n", nmb_namestr(&namerec->name)));
return;
}
}
p = command;
p += slprintf(p, sizeof(command), "%s %s %s %02x %d",
cmd,
operation,
namerec->name.name,
namerec->name.name_type,
ttl);
for (i=0;i<namerec->data.num_ips;i++) {
p += slprintf(p, sizeof(command) - (p-command), " %s", inet_ntoa(namerec->data.ip[i]));
}
DEBUG(3,("calling wins hook for %s\n", nmb_namestr(&namerec->name)));
smbrun(command, NULL, False);
}
/****************************************************************************
hash our interfaces and netbios names settings
*****************************************************************************/
static unsigned wins_hash(void)
{
int i;
unsigned ret = iface_hash();
extern char **my_netbios_names;
for (i=0;my_netbios_names[i];i++)
ret ^= str_checksum(my_netbios_names[i]);
ret ^= str_checksum(lp_workgroup());
return ret;
}
/****************************************************************************
Determine if this packet should be allocated to the WINS server.
*****************************************************************************/
BOOL packet_is_for_wins_server(struct packet_struct *packet)
{
struct nmb_packet *nmb = &packet->packet.nmb;
/* Only unicast packets go to a WINS server. */
if((wins_server_subnet == NULL) || (nmb->header.nm_flags.bcast == True))
{
DEBUG(10, ("packet_is_for_wins_server: failing WINS test #1.\n"));
return False;
}
/* Check for node status requests. */
if (nmb->question.question_type != QUESTION_TYPE_NB_QUERY)
return False;
switch(nmb->header.opcode)
{
/*
* A WINS server issues WACKS, not receives them.
*/
case NMB_WACK_OPCODE:
DEBUG(10, ("packet_is_for_wins_server: failing WINS test #2 (WACK).\n"));
return False;
/*
* A WINS server only processes registration and
* release requests, not responses.
*/
case NMB_NAME_REG_OPCODE:
case NMB_NAME_MULTIHOMED_REG_OPCODE:
case NMB_NAME_REFRESH_OPCODE_8: /* ambiguity in rfc1002 about which is correct. */
case NMB_NAME_REFRESH_OPCODE_9: /* WinNT uses 8 by default. */
if(nmb->header.response)
{
DEBUG(10, ("packet_is_for_wins_server: failing WINS test #3 (response = 1).\n"));
return False;
}
break;
case NMB_NAME_RELEASE_OPCODE:
if(nmb->header.response)
{
DEBUG(10, ("packet_is_for_wins_server: failing WINS test #4 (response = 1).\n"));
return False;
}
break;
/*
* Only process unicast name queries with rd = 1.
*/
case NMB_NAME_QUERY_OPCODE:
if(!nmb->header.response && !nmb->header.nm_flags.recursion_desired)
{
DEBUG(10, ("packet_is_for_wins_server: failing WINS test #5 (response = 1).\n"));
return False;
}
break;
}
return True;
}
/****************************************************************************
Utility function to decide what ttl to give a register/refresh request.
*****************************************************************************/
static int get_ttl_from_packet(struct nmb_packet *nmb)
{
int ttl = nmb->additional->ttl;
if(ttl < lp_min_wins_ttl() )
ttl = lp_min_wins_ttl();
if(ttl > lp_max_wins_ttl() )
ttl = lp_max_wins_ttl();
return ttl;
}
/****************************************************************************
Load or create the WINS database.
*****************************************************************************/
BOOL initialise_wins(void)
{
pstring fname;
time_t time_now = time(NULL);
FILE *fp;
pstring line;
if(!lp_we_are_a_wins_server())
return True;
add_samba_names_to_subnet(wins_server_subnet);
pstrcpy(fname,lp_lockdir());
trim_string(fname,NULL,"/");
pstrcat(fname,"/");
pstrcat(fname,WINS_LIST);
if((fp = sys_fopen(fname,"r")) == NULL)
{
DEBUG(2,("initialise_wins: Can't open wins database file %s. Error was %s\n",
fname, strerror(errno) ));
return True;
}
while (!feof(fp))
{
pstring name_str, ip_str, ttl_str, nb_flags_str;
unsigned int num_ips;
pstring name;
struct in_addr *ip_list;
int type = 0;
int nb_flags;
int ttl;
char *ptr;
char *p;
BOOL got_token;
BOOL was_ip;
int i;
unsigned hash;
int version;
/* Read a line from the wins.dat file. Strips whitespace
from the beginning and end of the line.
*/
if (!fgets_slash(line,sizeof(pstring),fp))
continue;
if (*line == '#')
continue;
if (strncmp(line,"VERSION ", 8) == 0) {
if (sscanf(line,"VERSION %d %u", &version, &hash) != 2 ||
version != WINS_VERSION ||
hash != wins_hash()) {
DEBUG(0,("Discarding invalid wins.dat file [%s]\n",line));
fclose(fp);
return True;
}
continue;
}
ptr = line;
/*
* Now we handle multiple IP addresses per name we need
* to iterate over the line twice. The first time to
* determine how many IP addresses there are, the second
* time to actually parse them into the ip_list array.
*/
if (!next_token(&ptr,name_str,NULL,sizeof(name_str)))
{
DEBUG(0,("initialise_wins: Failed to parse name when parsing line %s\n", line ));
continue;
}
if (!next_token(&ptr,ttl_str,NULL,sizeof(ttl_str)))
{
DEBUG(0,("initialise_wins: Failed to parse time to live when parsing line %s\n", line ));
continue;
}
/*
* Determine the number of IP addresses per line.
*/
num_ips = 0;
do
{
got_token = next_token(&ptr,ip_str,NULL,sizeof(ip_str));
was_ip = False;
if(got_token && strchr(ip_str, '.'))
{
num_ips++;
was_ip = True;
}
} while( got_token && was_ip);
if(num_ips == 0)
{
DEBUG(0,("initialise_wins: Missing IP address when parsing line %s\n", line ));
continue;
}
if(!got_token)
{
DEBUG(0,("initialise_wins: Missing nb_flags when parsing line %s\n", line ));
continue;
}
/* Allocate the space for the ip_list. */
if((ip_list = (struct in_addr *)malloc( num_ips * sizeof(struct in_addr))) == NULL)
{
DEBUG(0,("initialise_wins: Malloc fail !\n"));
return False;
}
/* Reset and re-parse the line. */
ptr = line;
next_token(&ptr,name_str,NULL,sizeof(name_str));
next_token(&ptr,ttl_str,NULL,sizeof(ttl_str));
for(i = 0; i < num_ips; i++)
{
next_token(&ptr, ip_str, NULL, sizeof(ip_str));
ip_list[i] = *interpret_addr2(ip_str);
}
next_token(&ptr,nb_flags_str,NULL, sizeof(nb_flags_str));
/*
* Deal with SELF or REGISTER name encoding. Default is REGISTER
* for compatibility with old nmbds.
*/
if(nb_flags_str[strlen(nb_flags_str)-1] == 'S')
{
DEBUG(5,("initialise_wins: Ignoring SELF name %s\n", line));
free((char *)ip_list);
continue;
}
if(nb_flags_str[strlen(nb_flags_str)-1] == 'R')
nb_flags_str[strlen(nb_flags_str)-1] = '\0';
/* Netbios name. # divides the name from the type (hex): netbios#xx */
pstrcpy(name,name_str);
if((p = strchr(name,'#')) != NULL)
{
*p = 0;
sscanf(p+1,"%x",&type);
}
/* Decode the netbios flags (hex) and the time-to-live (in seconds). */
sscanf(nb_flags_str,"%x",&nb_flags);
sscanf(ttl_str,"%d",&ttl);
/* add all entries that have 60 seconds or more to live */
if ((ttl - 60) > time_now || ttl == PERMANENT_TTL)
{
if(ttl != PERMANENT_TTL)
ttl -= time_now;
DEBUG( 4, ("initialise_wins: add name: %s#%02x ttl = %d first IP %s flags = %2x\n",
name, type, ttl, inet_ntoa(ip_list[0]), nb_flags));
(void)add_name_to_subnet( wins_server_subnet, name, type, nb_flags,
ttl, REGISTER_NAME, num_ips, ip_list );
}
else
{
DEBUG(4, ("initialise_wins: not adding name (ttl problem) %s#%02x ttl = %d first IP %s flags = %2x\n",
name, type, ttl, inet_ntoa(ip_list[0]), nb_flags));
}
free((char *)ip_list);
}
fclose(fp);
return True;
}
/****************************************************************************
Send a WINS WACK (Wait ACKnowledgement) response.
**************************************************************************/
static void send_wins_wack_response(int ttl, struct packet_struct *p)
{
struct nmb_packet *nmb = &p->packet.nmb;
unsigned char rdata[2];
rdata[0] = rdata[1] = 0;
/* Taken from nmblib.c - we need to send back almost
identical bytes from the requesting packet header. */
rdata[0] = (nmb->header.opcode & 0xF) << 3;
if (nmb->header.nm_flags.authoritative &&
nmb->header.response) rdata[0] |= 0x4;
if (nmb->header.nm_flags.trunc) rdata[0] |= 0x2;
if (nmb->header.nm_flags.recursion_desired) rdata[0] |= 0x1;
if (nmb->header.nm_flags.recursion_available &&
nmb->header.response) rdata[1] |= 0x80;
if (nmb->header.nm_flags.bcast) rdata[1] |= 0x10;
reply_netbios_packet(p, /* Packet to reply to. */
0, /* Result code. */
NMB_WAIT_ACK, /* nmbd type code. */
NMB_WACK_OPCODE, /* opcode. */
ttl, /* ttl. */
(char *)rdata, /* data to send. */
2); /* data length. */
}
/****************************************************************************
Send a WINS name registration response.
**************************************************************************/
static void send_wins_name_registration_response(int rcode, int ttl, struct packet_struct *p)
{
struct nmb_packet *nmb = &p->packet.nmb;
char rdata[6];
memcpy(&rdata[0], &nmb->additional->rdata[0], 6);
reply_netbios_packet(p, /* Packet to reply to. */
rcode, /* Result code. */
WINS_REG, /* nmbd type code. */
NMB_NAME_REG_OPCODE, /* opcode. */
ttl, /* ttl. */
rdata, /* data to send. */
6); /* data length. */
}
/***********************************************************************
Deal with a name refresh request to a WINS server.
************************************************************************/
void wins_process_name_refresh_request(struct subnet_record *subrec,
struct packet_struct *p)
{
struct nmb_packet *nmb = &p->packet.nmb;
struct nmb_name *question = &nmb->question.question_name;
BOOL bcast = nmb->header.nm_flags.bcast;
uint16 nb_flags = get_nb_flags(nmb->additional->rdata);
BOOL group = (nb_flags & NB_GROUP) ? True : False;
struct name_record *namerec = NULL;
int ttl = get_ttl_from_packet(nmb);
struct in_addr from_ip;
putip((char *)&from_ip,&nmb->additional->rdata[2]);
if(bcast)
{
/*
* We should only get unicast name refresh packets here.
* Anyone trying to refresh broadcast should not be going to a WINS
* server. Log an error here.
*/
DEBUG(0,("wins_process_name_refresh_request: broadcast name refresh request \
received for name %s from IP %s on subnet %s. Error - should not be sent to WINS server\n",
nmb_namestr(question), inet_ntoa(from_ip), subrec->subnet_name));
return;
}
DEBUG(3,("wins_process_name_refresh_request: Name refresh for name %s \
IP %s\n", nmb_namestr(question), inet_ntoa(from_ip) ));
/*
* See if the name already exists.
*/
namerec = find_name_on_subnet(subrec, question, FIND_ANY_NAME);
/*
* If this is a refresh request and the name doesn't exist then
* treat it like a registration request. This allows us to recover
* from errors (tridge)
*/
if(namerec == NULL)
{
DEBUG(3,("wins_process_name_refresh_request: Name refresh for name %s and \
the name does not exist. Treating as registration.\n", nmb_namestr(question) ));
wins_process_name_registration_request(subrec,p);
return;
}
/*
* Check that the group bits for the refreshing name and the
* name in our database match.
*/
if((namerec != NULL) && ((group && !NAME_GROUP(namerec)) || (!group && NAME_GROUP(namerec))) )
{
DEBUG(3,("wins_process_name_refresh_request: Name %s group bit = %s \
does not match group bit in WINS for this name.\n", nmb_namestr(question), group ? "True" : "False" ));
send_wins_name_registration_response(RFS_ERR, 0, p);
return;
}
/*
* For a unique name check that the person refreshing the name is one of the registered IP
* addresses. If not - fail the refresh. Do the same for group names with a type of 0x1c.
* Just return success for unique 0x1d refreshes. For normal group names update the ttl
* and return success.
*/
if((!group || (group && (question->name_type == 0x1c))) && find_ip_in_name_record(namerec, from_ip ))
{
/*
* Update the ttl.
*/
update_name_ttl(namerec, ttl);
send_wins_name_registration_response(0, ttl, p);
wins_hook("refresh", namerec, ttl);
return;
}
else if(group)
{
/*
* Normal groups are all registered with an IP address of 255.255.255.255
* so we can't search for the IP address.
*/
update_name_ttl(namerec, ttl);
send_wins_name_registration_response(0, ttl, p);
return;
}
else if(!group && (question->name_type == 0x1d))
{
/*
* Special name type - just pretend the refresh succeeded.
*/
send_wins_name_registration_response(0, ttl, p);
return;
}
else
{
/*
* Fail the refresh.
*/
DEBUG(3,("wins_process_name_refresh_request: Name refresh for name %s with IP %s and \
is IP is not known to the name.\n", nmb_namestr(question), inet_ntoa(from_ip) ));
send_wins_name_registration_response(RFS_ERR, 0, p);
return;
}
}
/***********************************************************************
Deal with a name registration request query success to a client that
owned the name.
We have a locked pointer to the original packet stashed away in the
userdata pointer. The success here is actually a failure as it means
the client we queried wants to keep the name, so we must return
a registration failure to the original requestor.
************************************************************************/
static void wins_register_query_success(struct subnet_record *subrec,
struct userdata_struct *userdata,
struct nmb_name *question_name,
struct in_addr ip,
struct res_rec *answers)
{
struct packet_struct *orig_reg_packet;
memcpy((char *)&orig_reg_packet, userdata->data, sizeof(struct packet_struct *));
DEBUG(3,("wins_register_query_success: Original client at IP %s still wants the \
name %s. Rejecting registration request.\n", inet_ntoa(ip), nmb_namestr(question_name) ));
send_wins_name_registration_response(RFS_ERR, 0, orig_reg_packet);
orig_reg_packet->locked = False;
free_packet(orig_reg_packet);
}
/***********************************************************************
Deal with a name registration request query failure to a client that
owned the name.
We have a locked pointer to the original packet stashed away in the
userdata pointer. The failure here is actually a success as it means
the client we queried didn't want to keep the name, so we can remove
the old name record and then successfully add the new name.
************************************************************************/
static void wins_register_query_fail(struct subnet_record *subrec,
struct response_record *rrec,
struct nmb_name *question_name,
int rcode)
{
struct userdata_struct *userdata = rrec->userdata;
struct packet_struct *orig_reg_packet;
struct name_record *namerec = NULL;
memcpy((char *)&orig_reg_packet, userdata->data, sizeof(struct packet_struct *));
/*
* We want to just add the name, as we now know the original owner
* didn't want it. But we can't just do that as an arbitary
* amount of time may have taken place between the name query
* request and this timeout/error response. So we check that
* the name still exists and is in the same state - if so
* we remove it and call wins_process_name_registration_request()
* as we know it will do the right thing now.
*/
namerec = find_name_on_subnet(subrec, question_name, FIND_ANY_NAME);
if( (namerec != NULL)
&& (namerec->data.source == REGISTER_NAME)
&& ip_equal(rrec->packet->ip, *namerec->data.ip) )
{
remove_name_from_namelist( subrec, namerec);
namerec = NULL;
}
if(namerec == NULL)
wins_process_name_registration_request(subrec, orig_reg_packet);
else
DEBUG(2,("wins_register_query_fail: The state of the WINS database changed between \
querying for name %s in order to replace it and this reply.\n", nmb_namestr(question_name) ));
orig_reg_packet->locked = False;
free_packet(orig_reg_packet);
}
/***********************************************************************
Deal with a name registration request to a WINS server.
Use the following pseudocode :
registering_group
|
|
+--------name exists
| |
| |
| +--- existing name is group
| | |
| | |
| | +--- add name (return).
| |
| |
| +--- exiting name is unique
| |
| |
| +--- query existing owner (return).
|
|
+--------name doesn't exist
|
|
+--- add name (return).
registering_unique
|
|
+--------name exists
| |
| |
| +--- existing name is group
| | |
| | |
| | +--- fail add (return).
| |
| |
| +--- exiting name is unique
| |
| |
| +--- query existing owner (return).
|
|
+--------name doesn't exist
|
|
+--- add name (return).
As can be seen from the above, the two cases may be collapsed onto each
other with the exception of the case where the name already exists and
is a group name. This case we handle with an if statement.
************************************************************************/
void wins_process_name_registration_request(struct subnet_record *subrec,
struct packet_struct *p)
{
struct nmb_packet *nmb = &p->packet.nmb;
struct nmb_name *question = &nmb->question.question_name;
BOOL bcast = nmb->header.nm_flags.bcast;
uint16 nb_flags = get_nb_flags(nmb->additional->rdata);
int ttl = get_ttl_from_packet(nmb);
struct name_record *namerec = NULL;
struct in_addr from_ip;
BOOL registering_group_name = (nb_flags & NB_GROUP) ? True : False;
putip((char *)&from_ip,&nmb->additional->rdata[2]);
if(bcast)
{
/*
* We should only get unicast name registration packets here.
* Anyone trying to register broadcast should not be going to a WINS
* server. Log an error here.
*/
DEBUG(0,("wins_process_name_registration_request: broadcast name registration request \
received for name %s from IP %s on subnet %s. Error - should not be sent to WINS server\n",
nmb_namestr(question), inet_ntoa(from_ip), subrec->subnet_name));
return;
}
DEBUG(3,("wins_process_name_registration_request: %s name registration for name %s \
IP %s\n", registering_group_name ? "Group" : "Unique", nmb_namestr(question), inet_ntoa(from_ip) ));
/*
* See if the name already exists.
*/
namerec = find_name_on_subnet(subrec, question, FIND_ANY_NAME);
/*
* Deal with the case where the name found was a dns entry.
* Remove it as we now have a NetBIOS client registering the
* name.
*/
if( (namerec != NULL)
&& ( (namerec->data.source == DNS_NAME)
|| (namerec->data.source == DNSFAIL_NAME) ) )
{
DEBUG(5,("wins_process_name_registration_request: Name (%s) in WINS was \
a dns lookup - removing it.\n", nmb_namestr(question) ));
remove_name_from_namelist( subrec, namerec );
namerec = NULL;
}
/*
* Reject if the name exists and is not a REGISTER_NAME.
* (ie. Don't allow any static names to be overwritten.
*/
if((namerec != NULL) && (namerec->data.source != REGISTER_NAME))
{
DEBUG( 3, ( "wins_process_name_registration_request: Attempt \
to register name %s. Name already exists in WINS with source type %d.\n",
nmb_namestr(question), namerec->data.source ));
send_wins_name_registration_response(RFS_ERR, 0, p);
return;
}
/*
* Special policy decisions based on MS documentation.
* 1). All group names (except names ending in 0x1c) are added as 255.255.255.255.
* 2). All unique names ending in 0x1d are ignored, although a positive response is sent.
*/
/*
* A group name is always added as the local broadcast address, except
* for group names ending in 0x1c.
* Group names with type 0x1c are registered with individual IP addresses.
*/
if(registering_group_name && (question->name_type != 0x1c))
from_ip = *interpret_addr2("255.255.255.255");
/*
* Ignore all attempts to register a unique 0x1d name, although return success.
*/
if(!registering_group_name && (question->name_type == 0x1d))
{
DEBUG(3,("wins_process_name_registration_request: Ignoring request \
to register name %s from IP %s.\n", nmb_namestr(question), inet_ntoa(p->ip) ));
send_wins_name_registration_response(0, ttl, p);
return;
}
/*
* Next two cases are the 'if statement' mentioned above.
*/
if((namerec != NULL) && NAME_GROUP(namerec))
{
if(registering_group_name)
{
/*
* If we are adding a group name, the name exists and is also a group entry just add this
* IP address to it and update the ttl.
*/
DEBUG(3,("wins_process_name_registration_request: Adding IP %s to group name %s.\n",
inet_ntoa(from_ip), nmb_namestr(question) ));
/*
* Check the ip address is not already in the group.
*/
if(!find_ip_in_name_record(namerec, from_ip))
add_ip_to_name_record(namerec, from_ip);
update_name_ttl(namerec, ttl);
send_wins_name_registration_response(0, ttl, p);
return;
}
else
{
/*
* If we are adding a unique name, the name exists in the WINS db
* and is a group name then reject the registration.
*/
DEBUG(3,("wins_process_name_registration_request: Attempt to register name %s. Name \
already exists in WINS as a GROUP name.\n", nmb_namestr(question) ));
send_wins_name_registration_response(RFS_ERR, 0, p);
return;
}
}
/*
* From here on down we know that if the name exists in the WINS db it is
* a unique name, not a group name.
*/
/*
* If the name exists and is one of our names then check the
* registering IP address. If it's not one of ours then automatically
* reject without doing the query - we know we will reject it.
*/
if((namerec != NULL) && (is_myname(namerec->name.name)) )
{
if(!ismyip(from_ip))
{
DEBUG(3,("wins_process_name_registration_request: Attempt to register name %s. Name \
is one of our (WINS server) names. Denying registration.\n", nmb_namestr(question) ));
send_wins_name_registration_response(RFS_ERR, 0, p);
return;
}
else
{
/*
* It's one of our names and one of our IP's - update the ttl.
*/
update_name_ttl(namerec, ttl);
send_wins_name_registration_response(0, ttl, p);
wins_hook("refresh", namerec, ttl);
return;
}
}
/*
* If the name exists and it is a unique registration and the registering IP
* is the same as the the (single) already registered IP then just update the ttl.
*/
if( !registering_group_name
&& (namerec != NULL)
&& (namerec->data.num_ips == 1)
&& ip_equal( namerec->data.ip[0], from_ip ) )
{
update_name_ttl( namerec, ttl );
send_wins_name_registration_response( 0, ttl, p );
wins_hook("refresh", namerec, ttl);
return;
}
/*
* Finally if the name exists do a query to the registering machine
* to see if they still claim to have the name.
*/
if( namerec != NULL )
{
long *ud[(sizeof(struct userdata_struct) + sizeof(struct packet_struct *))/sizeof(long *) + 1];
struct userdata_struct *userdata = (struct userdata_struct *)ud;
/*
* First send a WACK to the registering machine.
*/
send_wins_wack_response(60, p);
/*
* When the reply comes back we need the original packet.
* Lock this so it won't be freed and then put it into
* the userdata structure.
*/
p->locked = True;
userdata = (struct userdata_struct *)ud;
userdata->copy_fn = NULL;
userdata->free_fn = NULL;
userdata->userdata_len = sizeof(struct packet_struct *);
memcpy(userdata->data, (char *)&p, sizeof(struct packet_struct *) );
/*
* Use the new call to send a query directly to an IP address.
* This sends the query directly to the IP address, and ensures
* the recursion desired flag is not set (you were right Luke :-).
* This function should *only* be called from the WINS server
* code. JRA.
*/
query_name_from_wins_server( *namerec->data.ip,
question->name,
question->name_type,
wins_register_query_success,
wins_register_query_fail,
userdata );
return;
}
/*
* Name did not exist - add it.
*/
(void)add_name_to_subnet( subrec, question->name, question->name_type,
nb_flags, ttl, REGISTER_NAME, 1, &from_ip );
if ((namerec = find_name_on_subnet(subrec, question, FIND_ANY_NAME))) {
wins_hook("add", namerec, ttl);
}
send_wins_name_registration_response(0, ttl, p);
}
/***********************************************************************
Deal with a mutihomed name query success to the machine that
requested the multihomed name registration.
We have a locked pointer to the original packet stashed away in the
userdata pointer.
************************************************************************/
static void wins_multihomed_register_query_success(struct subnet_record *subrec,
struct userdata_struct *userdata,
struct nmb_name *question_name,
struct in_addr ip,
struct res_rec *answers)
{
struct packet_struct *orig_reg_packet;
struct nmb_packet *nmb;
struct name_record *namerec = NULL;
struct in_addr from_ip;
int ttl;
memcpy((char *)&orig_reg_packet, userdata->data, sizeof(struct packet_struct *));
nmb = &orig_reg_packet->packet.nmb;
putip((char *)&from_ip,&nmb->additional->rdata[2]);
ttl = get_ttl_from_packet(nmb);
/*
* We want to just add the new IP, as we now know the requesting
* machine claims to own it. But we can't just do that as an arbitary
* amount of time may have taken place between the name query
* request and this response. So we check that
* the name still exists and is in the same state - if so
* we just add the extra IP and update the ttl.
*/
namerec = find_name_on_subnet(subrec, question_name, FIND_ANY_NAME);
if( (namerec == NULL) || (namerec->data.source != REGISTER_NAME) )
{
DEBUG(3,("wins_multihomed_register_query_success: name %s is not in the correct state to add \
a subsequent IP addess.\n", nmb_namestr(question_name) ));
send_wins_name_registration_response(RFS_ERR, 0, orig_reg_packet);
orig_reg_packet->locked = False;
free_packet(orig_reg_packet);
return;
}
if(!find_ip_in_name_record(namerec, from_ip))
add_ip_to_name_record(namerec, from_ip);
update_name_ttl(namerec, ttl);
send_wins_name_registration_response(0, ttl, orig_reg_packet);
wins_hook("add", namerec, ttl);
orig_reg_packet->locked = False;
free_packet(orig_reg_packet);
}
/***********************************************************************
Deal with a name registration request query failure to a client that
owned the name.
We have a locked pointer to the original packet stashed away in the
userdata pointer.
************************************************************************/
static void wins_multihomed_register_query_fail(struct subnet_record *subrec,
struct response_record *rrec,
struct nmb_name *question_name,
int rcode)
{
struct userdata_struct *userdata = rrec->userdata;
struct packet_struct *orig_reg_packet;
memcpy((char *)&orig_reg_packet, userdata->data, sizeof(struct packet_struct *));
DEBUG(3,("wins_multihomed_register_query_fail: Registering machine at IP %s failed to answer \
query successfully for name %s.\n", inet_ntoa(orig_reg_packet->ip), nmb_namestr(question_name) ));
send_wins_name_registration_response(RFS_ERR, 0, orig_reg_packet);
orig_reg_packet->locked = False;
free_packet(orig_reg_packet);
return;
}
/***********************************************************************
Deal with a multihomed name registration request to a WINS server.
These cannot be group name registrations.
***********************************************************************/
void wins_process_multihomed_name_registration_request( struct subnet_record *subrec,
struct packet_struct *p)
{
struct nmb_packet *nmb = &p->packet.nmb;
struct nmb_name *question = &nmb->question.question_name;
BOOL bcast = nmb->header.nm_flags.bcast;
uint16 nb_flags = get_nb_flags(nmb->additional->rdata);
int ttl = get_ttl_from_packet(nmb);
struct name_record *namerec = NULL;
struct in_addr from_ip;
BOOL group = (nb_flags & NB_GROUP) ? True : False;;
putip((char *)&from_ip,&nmb->additional->rdata[2]);
if(bcast)
{
/*
* We should only get unicast name registration packets here.
* Anyone trying to register broadcast should not be going to a WINS
* server. Log an error here.
*/
DEBUG(0,("wins_process_multihomed_name_registration_request: broadcast name registration request \
received for name %s from IP %s on subnet %s. Error - should not be sent to WINS server\n",
nmb_namestr(question), inet_ntoa(from_ip), subrec->subnet_name));
return;
}
/*
* Only unique names should be registered multihomed.
*/
if(group)
{
DEBUG(0,("wins_process_multihomed_name_registration_request: group name registration request \
received for name %s from IP %s on subnet %s. Errror - group names should not be multihomed.\n",
nmb_namestr(question), inet_ntoa(from_ip), subrec->subnet_name));
return;
}
DEBUG(3,("wins_process_multihomed_name_registration_request: name registration for name %s \
IP %s\n", nmb_namestr(question), inet_ntoa(from_ip) ));
/*
* Deal with policy regarding 0x1d names.
*/
if(question->name_type == 0x1d)
{
DEBUG(3,("wins_process_multihomed_name_registration_request: Ignoring request \
to register name %s from IP %s.", nmb_namestr(question), inet_ntoa(p->ip) ));
send_wins_name_registration_response(0, ttl, p);
return;
}
/*
* See if the name already exists.
*/
namerec = find_name_on_subnet(subrec, question, FIND_ANY_NAME);
/*
* Deal with the case where the name found was a dns entry.
* Remove it as we now have a NetBIOS client registering the
* name.
*/
if( (namerec != NULL)
&& ( (namerec->data.source == DNS_NAME)
|| (namerec->data.source == DNSFAIL_NAME) ) )
{
DEBUG(5,("wins_process_multihomed_name_registration_request: Name (%s) in WINS was a dns lookup \
- removing it.\n", nmb_namestr(question) ));
remove_name_from_namelist( subrec, namerec);
namerec = NULL;
}
/*
* Reject if the name exists and is not a REGISTER_NAME.
* (ie. Don't allow any static names to be overwritten.
*/
if( (namerec != NULL) && (namerec->data.source != REGISTER_NAME) )
{
DEBUG( 3, ( "wins_process_multihomed_name_registration_request: Attempt \
to register name %s. Name already exists in WINS with source type %d.\n",
nmb_namestr(question), namerec->data.source ));
send_wins_name_registration_response(RFS_ERR, 0, p);
return;
}
/*
* Reject if the name exists and is a GROUP name.
*/
if((namerec != NULL) && NAME_GROUP(namerec))
{
DEBUG(3,("wins_process_multihomed_name_registration_request: Attempt to register name %s. Name \
already exists in WINS as a GROUP name.\n", nmb_namestr(question) ));
send_wins_name_registration_response(RFS_ERR, 0, p);
return;
}
/*
* From here on down we know that if the name exists in the WINS db it is
* a unique name, not a group name.
*/
/*
* If the name exists and is one of our names then check the
* registering IP address. If it's not one of ours then automatically
* reject without doing the query - we know we will reject it.
*/
if((namerec != NULL) && (is_myname(namerec->name.name)) )
{
if(!ismyip(from_ip))
{
DEBUG(3,("wins_process_multihomed_name_registration_request: Attempt to register name %s. Name \
is one of our (WINS server) names. Denying registration.\n", nmb_namestr(question) ));
send_wins_name_registration_response(RFS_ERR, 0, p);
return;
}
else
{
/*
* It's one of our names and one of our IP's. Ensure the IP is in the record and
* update the ttl.
*/
if(!find_ip_in_name_record(namerec, from_ip)) {
add_ip_to_name_record(namerec, from_ip);
wins_hook("add", namerec, ttl);
} else {
wins_hook("refresh", namerec, ttl);
}
update_name_ttl(namerec, ttl);
send_wins_name_registration_response(0, ttl, p);
return;
}
}
/*
* If the name exists check if the IP address is already registered
* to that name. If so then update the ttl and reply success.
*/
if((namerec != NULL) && find_ip_in_name_record(namerec, from_ip))
{
update_name_ttl(namerec, ttl);
send_wins_name_registration_response(0, ttl, p);
wins_hook("refresh", namerec, ttl);
return;
}
/*
* If the name exists do a query to the owner
* to see if they still want the name.
*/
if(namerec != NULL)
{
long *ud[(sizeof(struct userdata_struct) + sizeof(struct packet_struct *))/sizeof(long *) + 1];
struct userdata_struct *userdata = (struct userdata_struct *)ud;
/*
* First send a WACK to the registering machine.
*/
send_wins_wack_response(60, p);
/*
* When the reply comes back we need the original packet.
* Lock this so it won't be freed and then put it into
* the userdata structure.
*/
p->locked = True;
userdata = (struct userdata_struct *)ud;
userdata->copy_fn = NULL;
userdata->free_fn = NULL;
userdata->userdata_len = sizeof(struct packet_struct *);
memcpy(userdata->data, (char *)&p, sizeof(struct packet_struct *) );
/*
* Use the new call to send a query directly to an IP address.
* This sends the query directly to the IP address, and ensures
* the recursion desired flag is not set (you were right Luke :-).
* This function should *only* be called from the WINS server
* code. JRA.
* Note that this packet is sent to the current owner of the name,
* not the person who sent the packet.
*/
query_name_from_wins_server( namerec->data.ip[0],
question->name,
question->name_type,
wins_multihomed_register_query_success,
wins_multihomed_register_query_fail,
userdata );
return;
}
/*
* Name did not exist - add it.
*/
(void)add_name_to_subnet( subrec, question->name, question->name_type,
nb_flags, ttl, REGISTER_NAME, 1, &from_ip );
if ((namerec = find_name_on_subnet(subrec, question, FIND_ANY_NAME))) {
wins_hook("add", namerec, ttl);
}
send_wins_name_registration_response(0, ttl, p);
}
/***********************************************************************
Deal with the special name query for *<1b>.
***********************************************************************/
static void process_wins_dmb_query_request(struct subnet_record *subrec,
struct packet_struct *p)
{
struct name_record *namerec = NULL;
char *prdata;
int num_ips;
/*
* Go through all the names in the WINS db looking for those
* ending in <1b>. Use this to calculate the number of IP
* addresses we need to return.
*/
num_ips = 0;
for( namerec = (struct name_record *)ubi_trFirst( subrec->namelist );
namerec;
namerec = (struct name_record *)ubi_trNext( namerec ) )
{
if( namerec->name.name_type == 0x1b )
num_ips += namerec->data.num_ips;
}
if(num_ips == 0)
{
/*
* There are no 0x1b names registered. Return name query fail.
*/
send_wins_name_query_response(NAM_ERR, p, NULL);
return;
}
if((prdata = (char *)malloc( num_ips * 6 )) == NULL)
{
DEBUG(0,("process_wins_dmb_query_request: Malloc fail !.\n"));
return;
}
/*
* Go through all the names again in the WINS db looking for those
* ending in <1b>. Add their IP addresses into the list we will
* return.
*/
num_ips = 0;
for( namerec = (struct name_record *)ubi_trFirst( subrec->namelist );
namerec;
namerec = (struct name_record *)ubi_trNext( namerec ) )
{
if(namerec->name.name_type == 0x1b)
{
int i;
for(i = 0; i < namerec->data.num_ips; i++)
{
set_nb_flags(&prdata[num_ips * 6],namerec->data.nb_flags);
putip((char *)&prdata[(num_ips * 6) + 2], &namerec->data.ip[i]);
num_ips++;
}
}
}
/*
* Send back the reply containing the IP list.
*/
reply_netbios_packet(p, /* Packet to reply to. */
0, /* Result code. */
WINS_QUERY, /* nmbd type code. */
NMB_NAME_QUERY_OPCODE, /* opcode. */
lp_min_wins_ttl(), /* ttl. */
prdata, /* data to send. */
num_ips*6); /* data length. */
free(prdata);
}
/****************************************************************************
Send a WINS name query response.
**************************************************************************/
void send_wins_name_query_response(int rcode, struct packet_struct *p,
struct name_record *namerec)
{
char rdata[6];
char *prdata = rdata;
int reply_data_len = 0;
int ttl = 0;
int i;
memset(rdata,'\0',6);
if(rcode == 0)
{
ttl = (namerec->data.death_time != PERMANENT_TTL) ?
namerec->data.death_time - p->timestamp : lp_max_wins_ttl();
/* Copy all known ip addresses into the return data. */
/* Optimise for the common case of one IP address so
we don't need a malloc. */
if( namerec->data.num_ips == 1 )
prdata = rdata;
else
{
if((prdata = (char *)malloc( namerec->data.num_ips * 6 )) == NULL)
{
DEBUG(0,("send_wins_name_query_response: malloc fail !\n"));
return;
}
}
for(i = 0; i < namerec->data.num_ips; i++)
{
set_nb_flags(&prdata[i*6],namerec->data.nb_flags);
putip((char *)&prdata[2+(i*6)], &namerec->data.ip[i]);
}
sort_query_replies(prdata, i, p->ip);
reply_data_len = namerec->data.num_ips * 6;
}
reply_netbios_packet(p, /* Packet to reply to. */
rcode, /* Result code. */
WINS_QUERY, /* nmbd type code. */
NMB_NAME_QUERY_OPCODE, /* opcode. */
ttl, /* ttl. */
prdata, /* data to send. */
reply_data_len); /* data length. */
if((prdata != rdata) && (prdata != NULL))
free(prdata);
}
/***********************************************************************
Deal with a name query.
***********************************************************************/
void wins_process_name_query_request(struct subnet_record *subrec,
struct packet_struct *p)
{
struct nmb_packet *nmb = &p->packet.nmb;
struct nmb_name *question = &nmb->question.question_name;
struct name_record *namerec = NULL;
DEBUG(3,("wins_process_name_query: name query for name %s from IP %s\n",
nmb_namestr(question), inet_ntoa(p->ip) ));
/*
* Special name code. If the queried name is *<1b> then search
* the entire WINS database and return a list of all the IP addresses
* registered to any <1b> name. This is to allow domain master browsers
* to discover other domains that may not have a presence on their subnet.
*/
if(strequal( question->name, "*") && (question->name_type == 0x1b))
{
process_wins_dmb_query_request( subrec, p);
return;
}
namerec = find_name_on_subnet(subrec, question, FIND_ANY_NAME);
if(namerec != NULL)
{
/*
* If it's a DNSFAIL_NAME then reply name not found.
*/
if( namerec->data.source == DNSFAIL_NAME )
{
DEBUG(3,("wins_process_name_query: name query for name %s returning DNS fail.\n",
nmb_namestr(question) ));
send_wins_name_query_response(NAM_ERR, p, namerec);
return;
}
/*
* If the name has expired then reply name not found.
*/
if( (namerec->data.death_time != PERMANENT_TTL)
&& (namerec->data.death_time < p->timestamp) )
{
DEBUG(3,("wins_process_name_query: name query for name %s - name expired. Returning fail.\n",
nmb_namestr(question) ));
send_wins_name_query_response(NAM_ERR, p, namerec);
return;
}
DEBUG(3,("wins_process_name_query: name query for name %s returning first IP %s.\n",
nmb_namestr(question), inet_ntoa(namerec->data.ip[0]) ));
send_wins_name_query_response(0, p, namerec);
return;
}
/*
* Name not found in WINS - try a dns query if it's a 0x20 name.
*/
if(lp_dns_proxy() &&
((question->name_type == 0x20) || question->name_type == 0))
{
DEBUG(3,("wins_process_name_query: name query for name %s not found - doing dns lookup.\n",
nmb_namestr(question) ));
queue_dns_query(p, question, &namerec);
return;
}
/*
* Name not found - return error.
*/
send_wins_name_query_response(NAM_ERR, p, NULL);
}
/****************************************************************************
Send a WINS name release response.
**************************************************************************/
static void send_wins_name_release_response(int rcode, struct packet_struct *p)
{
struct nmb_packet *nmb = &p->packet.nmb;
char rdata[6];
memcpy(&rdata[0], &nmb->additional->rdata[0], 6);
reply_netbios_packet(p, /* Packet to reply to. */
rcode, /* Result code. */
NMB_REL, /* nmbd type code. */
NMB_NAME_RELEASE_OPCODE, /* opcode. */
0, /* ttl. */
rdata, /* data to send. */
6); /* data length. */
}
/***********************************************************************
Deal with a name release.
***********************************************************************/
void wins_process_name_release_request(struct subnet_record *subrec,
struct packet_struct *p)
{
struct nmb_packet *nmb = &p->packet.nmb;
struct nmb_name *question = &nmb->question.question_name;
BOOL bcast = nmb->header.nm_flags.bcast;
uint16 nb_flags = get_nb_flags(nmb->additional->rdata);
struct name_record *namerec = NULL;
struct in_addr from_ip;
BOOL releasing_group_name = (nb_flags & NB_GROUP) ? True : False;;
putip((char *)&from_ip,&nmb->additional->rdata[2]);
if(bcast)
{
/*
* We should only get unicast name registration packets here.
* Anyone trying to register broadcast should not be going to a WINS
* server. Log an error here.
*/
DEBUG(0,("wins_process_name_release_request: broadcast name registration request \
received for name %s from IP %s on subnet %s. Error - should not be sent to WINS server\n",
nmb_namestr(question), inet_ntoa(from_ip), subrec->subnet_name));
return;
}
DEBUG(3,("wins_process_name_release_request: %s name release for name %s \
IP %s\n", releasing_group_name ? "Group" : "Unique", nmb_namestr(question), inet_ntoa(from_ip) ));
/*
* Deal with policy regarding 0x1d names.
*/
if(!releasing_group_name && (question->name_type == 0x1d))
{
DEBUG(3,("wins_process_name_release_request: Ignoring request \
to release name %s from IP %s.", nmb_namestr(question), inet_ntoa(p->ip) ));
send_wins_name_release_response(0, p);
return;
}
/*
* See if the name already exists.
*/
namerec = find_name_on_subnet(subrec, question, FIND_ANY_NAME);
if( (namerec == NULL)
|| ((namerec != NULL) && (namerec->data.source != REGISTER_NAME)) )
{
send_wins_name_release_response(NAM_ERR, p);
return;
}
/*
* Check that the sending machine has permission to release this name.
* If it's a group name not ending in 0x1c then just say yes and let
* the group time out.
*/
if(releasing_group_name && (question->name_type != 0x1c))
{
send_wins_name_release_response(0, p);
return;
}
/*
* Check that the releasing node is on the list of IP addresses
* for this name. Disallow the release if not.
*/
if(!find_ip_in_name_record(namerec, from_ip))
{
DEBUG(3,("wins_process_name_release_request: Refusing request to \
release name %s as IP %s is not one of the known IP's for this name.\n",
nmb_namestr(question), inet_ntoa(from_ip) ));
send_wins_name_release_response(NAM_ERR, p);
return;
}
/*
* Release the name and then remove the IP from the known list.
*/
send_wins_name_release_response(0, p);
remove_ip_from_name_record(namerec, from_ip);
wins_hook("delete", namerec, 0);
/*
* Remove the name entirely if no IP addresses left.
*/
if (namerec->data.num_ips == 0)
remove_name_from_namelist(subrec, namerec);
}
/*******************************************************************
WINS time dependent processing.
******************************************************************/
void initiate_wins_processing(time_t t)
{
static time_t lasttime = 0;
if (!lasttime)
lasttime = t;
if (t - lasttime < 20)
return;
lasttime = t;
if(!lp_we_are_a_wins_server())
return;
expire_names_on_subnet(wins_server_subnet, t);
if(wins_server_subnet->namelist_changed)
wins_write_database(True);
wins_server_subnet->namelist_changed = False;
}
/*******************************************************************
Write out the current WINS database.
******************************************************************/
void wins_write_database(BOOL background)
{
struct name_record *namerec;
pstring fname, fnamenew;
FILE *fp;
if(!lp_we_are_a_wins_server())
return;
/* we will do the writing in a child process to ensure that the parent
doesn't block while this is done */
if (background) {
CatchChild();
if (fork()) {
return;
}
}
slprintf(fname,sizeof(fname),"%s/%s", lp_lockdir(), WINS_LIST);
all_string_sub(fname,"//", "/", 0);
slprintf(fnamenew,sizeof(fnamenew),"%s.%u", fname, (unsigned int)getpid());
if((fp = sys_fopen(fnamenew,"w")) == NULL)
{
DEBUG(0,("wins_write_database: Can't open %s. Error was %s\n", fnamenew, strerror(errno)));
if (background) {
_exit(0);
}
return;
}
DEBUG(4,("wins_write_database: Dump of WINS name list.\n"));
fprintf(fp,"VERSION %d %u\n", WINS_VERSION, wins_hash());
for( namerec
= (struct name_record *)ubi_trFirst( wins_server_subnet->namelist );
namerec;
namerec = (struct name_record *)ubi_trNext( namerec ) )
{
int i;
struct tm *tm;
DEBUGADD(4,("%-19s ", nmb_namestr(&namerec->name) ));
if( namerec->data.death_time != PERMANENT_TTL )
{
char *ts, *nl;
tm = LocalTime(&namerec->data.death_time);
ts = asctime(tm);
nl = strrchr( ts, '\n' );
if( NULL != nl )
*nl = '\0';
DEBUGADD(4,("TTL = %s ", ts ));
}
else
DEBUGADD(4,("TTL = PERMANENT "));
for (i = 0; i < namerec->data.num_ips; i++)
DEBUGADD(4,("%15s ", inet_ntoa(namerec->data.ip[i]) ));
DEBUGADD(4,("%2x\n", namerec->data.nb_flags ));
if( namerec->data.source == REGISTER_NAME )
{
fprintf(fp, "\"%s#%02x\" %d ",
namerec->name.name,namerec->name.name_type, /* Ignore scope. */
(int)namerec->data.death_time);
for (i = 0; i < namerec->data.num_ips; i++)
fprintf( fp, "%s ", inet_ntoa( namerec->data.ip[i] ) );
fprintf( fp, "%2xR\n", namerec->data.nb_flags );
}
}
fclose(fp);
chmod(fnamenew,0644);
unlink(fname);
rename(fnamenew,fname);
if (background) {
_exit(0);
}
}
| 31.738922 | 108 | 0.576258 |
57373d942896a64f9fe778ce7e983adb3a12599e | 2,824 | h | C | Furiosity/Graphics/3D/ModelMesh3D.h | enci/Furiosity | 0f823b31ba369a6f20a69ca079627dccd4b4549a | [
"MIT"
] | 7 | 2015-05-14T18:36:18.000Z | 2020-08-30T19:09:33.000Z | Furiosity/Graphics/3D/ModelMesh3D.h | enci/Furiosity | 0f823b31ba369a6f20a69ca079627dccd4b4549a | [
"MIT"
] | 1 | 2015-10-23T14:24:08.000Z | 2015-10-23T14:24:08.000Z | Furiosity/Graphics/3D/ModelMesh3D.h | enci/Furiosity | 0f823b31ba369a6f20a69ca079627dccd4b4549a | [
"MIT"
] | 1 | 2020-07-31T23:34:49.000Z | 2020-07-31T23:34:49.000Z | ////////////////////////////////////////////////////////////////////////////////
// ModelMesh3D.h
// Furiosity
//
// Created by Bojan Endrovski on 9/5/13.
// Copyright (c) 2013 Game Oven. All rights reserved.
////////////////////////////////////////////////////////////////////////////////
#pragma once
// STL
#include <string>
// Framework
#include "gl.h"
// Local
#include "Frmath.h"
#include "Color.h"
#include "Resource.h"
#include "VertexFormats.h"
namespace Furiosity
{
class ResourceManager;
class Effect;
class ShaderAttribute;
// Wrraper for an obj mesh
class ModelMesh3D : public Resource
{
/// Resource's lifetime is handeled by the resource manager
friend class ResourceManager;
typedef VertexPositionNormalTexture Vertex;
protected:
/// Mesh vertices
std::vector<Vertex> vertices;
/// Mesh indices
std::vector<GLushort> indices;
/// Vertex buffers
GLuint vbo[2];
/// Imports an OBJ model mesh
ModelMesh3D(const std::string& _filename);
/// Protected dtor
~ModelMesh3D();
public:
/// Determine if this mesh is usable for OpenGL.
virtual bool IsValid() override;
/// Reload the OpenGL resources.
virtual void Reload(bool cached = false) override;
/// Dispose any OpenGL resources.
virtual void Invalidate() override;
/// Mesh vertices
const Vertex* Vertices() const { return & vertices[0]; }
/// Number of mesh vertices
const int VertexCount() const { return (int)vertices.size(); }
/// Mesh indices
const GLushort* Indices() const { return & indices[0]; }
/// Number of elements in the indices array.
const int IndexCount() const { return (int)indices.size(); }
/// Getter for VBO's
const GLuint* VertexBuffers() const { return vbo; };
/// Determine if this mesh uses VBO's
bool HasVertexBuffers() const { return vbo[0] != 0; };
int GetVertexBuffer() const;
int GetIndexBuffer() const;
/// Draws the mesh using the shader settings already set. Basically it
/// just sends the mesh data to the GPU, so it only works in context
void Render(Effect& shader);
/// Draws the mesh using the shader settings already set. Basically it
/// just sends the mesh data to the GPU, so it only works in context
void Render(ShaderAttribute& attribPosition,
ShaderAttribute& attribNormal,
ShaderAttribute& attribTexture);
};
}
| 28.525253 | 80 | 0.541076 |
4c0f6f678c797ff196573f2636e3f9a2b9237c8c | 2,909 | php | PHP | application/views/admin/produk_old.php | anggiatm/sistem-informasi-penjualan-crd | 1645289755b43108f6e76a141ad3dad1805c5afb | [
"MIT"
] | null | null | null | application/views/admin/produk_old.php | anggiatm/sistem-informasi-penjualan-crd | 1645289755b43108f6e76a141ad3dad1805c5afb | [
"MIT"
] | null | null | null | application/views/admin/produk_old.php | anggiatm/sistem-informasi-penjualan-crd | 1645289755b43108f6e76a141ad3dad1805c5afb | [
"MIT"
] | null | null | null | <?php $this->load->view('template/header'); ?>
<div class="container-fluid">
<div class="row">
<!-- SIDE BAR -->
<?php $this->load->view('template/sidebar'); ?>
<!-- CONTENT -->
<main role="main" class="col-md-9 ml-sm-auto col-lg-10 px-md-4">
<div class="row mt-4">
<div class="col-md-12">
<nav aria-label="breadcrumb">
<ol class="breadcrumb">
<li class="breadcrumb-item active"><a href="<?php echo base_url() ?>admin/produk">Home</a></li>
</ol>
</nav>
</div>
</div>
<div class="row">
<div class="col-md-4">
<div class="card" style="width: 20rem;" data-value="Fabrikasi">
<img class="card-img-top" src="<?php echo base_url(); ?>public/lib/Barang-Fabrikasi/default.PNG" alt="Card image cap">
<div class="card-body">
<p class="card-text">Barang Fabrikasi</p>
</div>
</div>
</div>
<div class="col-md-4">
<div class="card" style="width: 20rem;" data-value="Satuan">
<img class="card-img-top" src="<?php echo base_url(); ?>public/lib/Barang-Satuan/default.PNG">
<div class="card-body">
<p class="card-text">Barang Satuan</p>
</div>
</div>
</div>
<div class="col-md-4">
<div class="card" style="width: 20rem;" data-value="Service">
<img class="card-img-top" src="<?php echo base_url(); ?>public/lib/Jasa-Service/default.PNG" alt="Card image cap">
<div class="card-body">
<p class="card-text">Jasa Service</p>
</div>
</div>
</div>
</div>
</main>
<!-- END MAIN CONTENT -->
</div>
</div>
<?php $this->load->view('template/mainscript'); ?>
<script>
$(document).ready(function() {
$('.card').each(function() {
$(this).css('cursor', 'pointer').hover(
function() {
$(this).prop('class', 'card bg-info');
//$(this).addClass('active');
},
function() {
$(this).prop('class', 'card');
}).click(function() {
getDetail($(this).attr('data-value'));
});
});
function getDetail(produk) {
window.location = '<?php echo base_url(); ?>admin/tabelproduk?produk=' + produk;
}
})
</script>
<?php $this->load->view('template/footer'); ?> | 34.630952 | 142 | 0.420419 |
e3fe6463b8354d3882846abd0051db76a9d39553 | 568 | go | Go | services/swr/v2/model/model_create_repo_domains_request_body.go | orangedeng/huaweicloud-sdk-go-v3 | e50208a9b9c30c2751789ee265d0210959dd4b87 | [
"Apache-2.0",
"BSD-2-Clause"
] | null | null | null | services/swr/v2/model/model_create_repo_domains_request_body.go | orangedeng/huaweicloud-sdk-go-v3 | e50208a9b9c30c2751789ee265d0210959dd4b87 | [
"Apache-2.0",
"BSD-2-Clause"
] | null | null | null | services/swr/v2/model/model_create_repo_domains_request_body.go | orangedeng/huaweicloud-sdk-go-v3 | e50208a9b9c30c2751789ee265d0210959dd4b87 | [
"Apache-2.0",
"BSD-2-Clause"
] | null | null | null | package model
import (
"encoding/json"
"strings"
)
type CreateRepoDomainsRequestBody struct {
// 共享租户名称
AccessDomain string `json:"access_domain"`
// 当前只支持read权限
Permit string `json:"permit"`
// 截止时间,UTC时间格式。永久有效为forever
Deadline string `json:"deadline"`
// 描述
Description *string `json:"description,omitempty"`
}
func (o CreateRepoDomainsRequestBody) String() string {
data, err := json.Marshal(o)
if err != nil {
return "CreateRepoDomainsRequestBody struct{}"
}
return strings.Join([]string{"CreateRepoDomainsRequestBody", string(data)}, " ")
}
| 20.285714 | 81 | 0.728873 |
85d49c9d354e3163a40e4089a97cc754bfbf649d | 1,524 | js | JavaScript | app/components/Common/constants.js | packagewjx/Coding-Cloud-Frontend | cb7c12ddd2fbe29e015719224637f053c4e51ef1 | [
"MIT"
] | null | null | null | app/components/Common/constants.js | packagewjx/Coding-Cloud-Frontend | cb7c12ddd2fbe29e015719224637f053c4e51ef1 | [
"MIT"
] | 1 | 2018-02-05T01:19:43.000Z | 2018-02-07T02:38:20.000Z | app/components/Common/constants.js | packagewjx/Coding-Cloud-Frontend | cb7c12ddd2fbe29e015719224637f053c4e51ef1 | [
"MIT"
] | null | null | null | // GLOBAL CONSTANTS
// -----------------------------------
export const APP_COLORS = {
'primary': '#5d9cec',
'success': '#27c24c',
'info': '#23b7e5',
'warning': '#ff902b',
'danger': '#f05050',
'inverse': '#131e26',
'green': '#37bc9b',
'pink': '#f532e5',
'purple': '#7266ba',
'dark': '#3a3f51',
'yellow': '#fad732',
'gray-darker': '#232735',
'gray-dark': '#3a3f51',
'gray': '#dde6e9',
'gray-light': '#e4eaec',
'gray-lighter': '#edf1f2'
};
export const APP_MEDIAQUERY = {
'desktopLG': 1200,
'desktop': 992,
'tablet': 768,
'mobile': 480
};
export const SERVER_ADDRESSES = {
'backend': 'https://localhost:8080',
'master': 'https://116.56.140.108:8443',
'mock' : 'http://localhost:3000'
};
export const RESULT_CODE = {
'success': 0,
'unauthorize': 1001,
'unauthenticate': 1002,
'bad-argument': 1003,
'other': 1004,
'json-convert-error': 1005,
'bad-credential': 1006,
'bad-token': 1007
};
export const ANNOTATION_KEYS = {
description: 'description',
iconClass: 'iconClass',
tags: 'tags',
'openshift.io': {
'display-name': 'openshift.io/display-name'
},
'template.openshift.io': {
'documentation-url': 'template.openshift.io/documentation-url',
'long-description': 'template.openshift.io/long-description',
'support-url': 'template.openshift.io/support-url',
'provider-display-name': 'template.openshift.io/provider-display-name'
}
}; | 25.4 | 78 | 0.571522 |
97edc2e9a7b1ca8cfc739c4b53499792f0d369b4 | 929 | lua | Lua | lib/table.lua | Brusalk/AddonToolkit | 433329d3e0d1b265664eb0806f48efd989e30578 | [
"MIT"
] | 2 | 2018-06-01T04:40:14.000Z | 2020-11-14T20:22:38.000Z | lib/table.lua | Brusalk/AddonToolkit | 433329d3e0d1b265664eb0806f48efd989e30578 | [
"MIT"
] | 7 | 2018-06-03T06:37:37.000Z | 2020-09-11T06:26:28.000Z | lib/table.lua | Brusalk/AddonToolkit | 433329d3e0d1b265664eb0806f48efd989e30578 | [
"MIT"
] | null | null | null | -- local _, EventHorizon = ...
-- EventHorizon:file_preload()
-- function EventHorizon.deep_copy(object)
-- if type(object) == "table" then
-- local copy = {}
-- -- Don't improperly use the __pairs metamethod in Lua 5.2
-- for key, value in next, object do
-- copy[deep_copy(key)] = deep_copy(value)
-- end
-- setmetatable(copy, deep_copy(getmetatable(object)))
-- return copy
-- end
-- -- Most values in Lua are immutable
-- return object
-- end
-- local non_nil_value = function(_, value)
-- if value then return true end
-- end
-- function EventHorizon.any(object, predicate)
-- predicate = predicate or non_nil_value
-- if type(object) == "table" then
-- for key, value in pairs(object) do
-- if predicate(key, value) then
-- return true
-- end
-- end
-- return false
-- end
-- error("Invalid usage. Usage: any(object, [predicate])")
-- end
| 26.542857 | 64 | 0.624327 |
1652dc6d5baf5bb62a1d8a3ed9a3fd8d19f6cd9f | 792 | ts | TypeScript | dist/modules/director/director.model.d.ts | devlargs/movie-graphql-api | fecfb539aea8492aaad3eaf00009896f39b70f33 | [
"MIT"
] | null | null | null | dist/modules/director/director.model.d.ts | devlargs/movie-graphql-api | fecfb539aea8492aaad3eaf00009896f39b70f33 | [
"MIT"
] | 6 | 2022-03-21T09:04:55.000Z | 2022-03-21T14:33:02.000Z | dist/modules/director/director.model.d.ts | devlargs/movie-graphql-api | fecfb539aea8492aaad3eaf00009896f39b70f33 | [
"MIT"
] | null | null | null | /// <reference types="mongoose/types/pipelinestage" />
/// <reference types="mongoose/types/connection" />
/// <reference types="mongoose/types/cursor" />
/// <reference types="mongoose/types/document" />
/// <reference types="mongoose/types/error" />
/// <reference types="mongoose/types/mongooseoptions" />
/// <reference types="mongoose/types/schemaoptions" />
import { Document, Schema as MongooseSchema } from "mongoose";
export declare class Director {
_id: MongooseSchema.Types.ObjectId;
firstName: string;
lastName: string;
imageUrl: string;
}
export declare type DirectorDocument = Director & Document;
export declare const DirectorSchema: MongooseSchema<Document<Director, any, any>, import("mongoose").Model<Document<Director, any, any>, any, any, any>, any, any>;
| 46.588235 | 163 | 0.731061 |
1493a137fa45efff2545fd98f1a212677693e283 | 6,622 | kt | Kotlin | stocks/src/main/java/com/pyamsoft/tickertape/stocks/StockModule.kt | pyamsoft/tickertape | bcca4b32e1229f2c8f0e30e6c18ac938968c59a1 | [
"Apache-2.0"
] | 1 | 2021-07-09T19:16:10.000Z | 2021-07-09T19:16:10.000Z | stocks/src/main/java/com/pyamsoft/tickertape/stocks/StockModule.kt | pyamsoft/tickertape | bcca4b32e1229f2c8f0e30e6c18ac938968c59a1 | [
"Apache-2.0"
] | 14 | 2021-08-10T21:37:45.000Z | 2022-03-30T10:23:51.000Z | stocks/src/main/java/com/pyamsoft/tickertape/stocks/StockModule.kt | pyamsoft/tickertape | bcca4b32e1229f2c8f0e30e6c18ac938968c59a1 | [
"Apache-2.0"
] | null | null | null | /*
* Copyright 2021 Peter Kenji Yamanaka
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pyamsoft.tickertape.stocks
import androidx.annotation.CheckResult
import com.pyamsoft.pydroid.bootstrap.network.DelegatingSocketFactory
import com.pyamsoft.pydroid.core.Enforcer
import com.pyamsoft.tickertape.stocks.cache.MemoryStockCacheImpl
import com.pyamsoft.tickertape.stocks.cache.StockCache
import com.pyamsoft.tickertape.stocks.service.ChartService
import com.pyamsoft.tickertape.stocks.service.OptionsService
import com.pyamsoft.tickertape.stocks.service.QuoteService
import com.pyamsoft.tickertape.stocks.service.SearchService
import com.pyamsoft.tickertape.stocks.service.TopService
import com.pyamsoft.tickertape.stocks.sources.ChartSource
import com.pyamsoft.tickertape.stocks.sources.OptionsSource
import com.pyamsoft.tickertape.stocks.sources.QuoteSource
import com.pyamsoft.tickertape.stocks.sources.SearchSource
import com.pyamsoft.tickertape.stocks.sources.TopSource
import com.pyamsoft.tickertape.stocks.sources.yf.YahooChartSource
import com.pyamsoft.tickertape.stocks.sources.yf.YahooOptionsSource
import com.pyamsoft.tickertape.stocks.sources.yf.YahooQuoteSource
import com.pyamsoft.tickertape.stocks.sources.yf.YahooSearchSource
import com.pyamsoft.tickertape.stocks.sources.yf.YahooTopSource
import com.squareup.moshi.Moshi
import dagger.Binds
import dagger.Module
import dagger.Provides
import javax.inject.Named
import javax.inject.Qualifier
import javax.net.SocketFactory
import kotlin.reflect.KClass
import okhttp3.Call
import okhttp3.OkHttpClient
import okhttp3.Request
import okhttp3.logging.HttpLoggingInterceptor
import retrofit2.Retrofit
import retrofit2.converter.moshi.MoshiConverterFactory
private const val STOCK_BASE_URL = "https://finance.yahoo.com"
internal const val STOCK_API_URL = "${STOCK_BASE_URL}/"
@Qualifier @Retention(AnnotationRetention.BINARY) internal annotation class InternalApi
private class OkHttpClientLazyCallFactory(debug: Boolean) : Call.Factory {
private val client by lazy { createOkHttpClient(debug, DelegatingSocketFactory.create()) }
override fun newCall(request: Request): Call {
Enforcer.assertOffMainThread()
return client.newCall(request)
}
companion object {
@JvmStatic
@CheckResult
private fun createOkHttpClient(
debug: Boolean,
socketFactory: SocketFactory,
): OkHttpClient {
Enforcer.assertOffMainThread()
return OkHttpClient.Builder()
.socketFactory(socketFactory)
.apply {
if (debug) {
addInterceptor(
HttpLoggingInterceptor().apply { setLevel(HttpLoggingInterceptor.Level.BODY) })
}
}
.build()
}
}
}
@Module
abstract class StockModule {
// The YFSource uses an internal YF quote source
@Binds
@CheckResult
@InternalApi
internal abstract fun bindYFQuoteSource(impl: YahooQuoteSource): QuoteSource
// The YFSource uses an internal YF options source
@Binds
@CheckResult
@InternalApi
internal abstract fun bindYFOptionsSource(impl: YahooOptionsSource): OptionsSource
// The YFSource uses an internal YF chart source
@Binds
@CheckResult
@InternalApi
internal abstract fun bindYFChartSource(impl: YahooChartSource): ChartSource
// The YFSource uses an internal YF top source
@Binds
@CheckResult
@InternalApi
internal abstract fun bindYFTopSource(impl: YahooTopSource): TopSource
// The YFSource uses an internal YF search source
@Binds
@CheckResult
@InternalApi
internal abstract fun bindYFSearchSource(impl: YahooSearchSource): SearchSource
@Binds
@CheckResult
@InternalApi
internal abstract fun bindNetworkInteractor(impl: StockNetworkInteractor): StockInteractor
@Binds
@CheckResult
@InternalApi
internal abstract fun bindStockCache(impl: MemoryStockCacheImpl): StockCache
@Binds
@CheckResult
internal abstract fun bindRealInteractor(impl: StockInteractorImpl): StockInteractor
@Module
companion object {
@JvmStatic
@CheckResult
private fun createMoshi(): Moshi {
return Moshi.Builder().build()
}
@JvmStatic
@CheckResult
private fun createRetrofit(debug: Boolean, moshi: Moshi): Retrofit {
return Retrofit.Builder()
.baseUrl(STOCK_API_URL)
.callFactory(OkHttpClientLazyCallFactory(debug))
.addConverterFactory(MoshiConverterFactory.create(moshi))
.build()
}
@Provides
@JvmStatic
@InternalApi
@CheckResult
internal fun provideNetworkCreator(
@Named("debug") debug: Boolean,
): NetworkServiceCreator {
// Don't inject these to avoid needing Dagger API in build.gradle
val retrofit = createRetrofit(debug, createMoshi())
return object : NetworkServiceCreator {
override fun <T : Any> create(target: KClass<T>): T {
return retrofit.create(target.java)
}
}
}
@Provides
@JvmStatic
@InternalApi
@CheckResult
internal fun provideQuotes(@InternalApi serviceCreator: NetworkServiceCreator): QuoteService {
return serviceCreator.create(QuoteService::class)
}
@Provides
@JvmStatic
@InternalApi
@CheckResult
internal fun provideCharts(@InternalApi serviceCreator: NetworkServiceCreator): ChartService {
return serviceCreator.create(ChartService::class)
}
@Provides
@JvmStatic
@InternalApi
@CheckResult
internal fun provideTops(@InternalApi serviceCreator: NetworkServiceCreator): TopService {
return serviceCreator.create(TopService::class)
}
@Provides
@JvmStatic
@InternalApi
@CheckResult
internal fun provideSearch(@InternalApi serviceCreator: NetworkServiceCreator): SearchService {
return serviceCreator.create(SearchService::class)
}
@Provides
@JvmStatic
@InternalApi
@CheckResult
internal fun provideOptions(
@InternalApi serviceCreator: NetworkServiceCreator
): OptionsService {
return serviceCreator.create(OptionsService::class)
}
}
}
| 30.516129 | 99 | 0.752945 |
5f956e9385b8478c2210414a42f3dca3236903fc | 202 | css | CSS | src/app/navbar/navbar.component.css | juru-juliette/GitSearch | 59d2dd6a2db3460008ec9dbf48dfe9924735949c | [
"MIT"
] | null | null | null | src/app/navbar/navbar.component.css | juru-juliette/GitSearch | 59d2dd6a2db3460008ec9dbf48dfe9924735949c | [
"MIT"
] | 7 | 2020-09-07T01:57:51.000Z | 2022-02-18T08:41:41.000Z | src/app/navbar/navbar.component.css | juru-juliette/GitSearch | 59d2dd6a2db3460008ec9dbf48dfe9924735949c | [
"MIT"
] | null | null | null | /* .form-group{
margin-top: 7%;
width: 130%;
height: 15%;
background-color: rgb(70, 68, 68);
border: none;
color: ghostwhite;
} */
#logo{
width:50px;
height: 30px;
} | 16.833333 | 39 | 0.529703 |
40fc1655d5c35cb79b5b01990ab01b3df88e9327 | 529 | swift | Swift | test/SourceKit/DocSupport/Inputs/cake.swift | ackyshake/swift | 226a8258078bd4a9987fd5e2485ab72d956deda6 | [
"Apache-2.0"
] | 4 | 2016-01-06T22:53:32.000Z | 2021-09-11T23:50:05.000Z | test/SourceKit/DocSupport/Inputs/cake.swift | ackyshake/swift | 226a8258078bd4a9987fd5e2485ab72d956deda6 | [
"Apache-2.0"
] | null | null | null | test/SourceKit/DocSupport/Inputs/cake.swift | ackyshake/swift | 226a8258078bd4a9987fd5e2485ab72d956deda6 | [
"Apache-2.0"
] | 2 | 2016-11-04T10:07:52.000Z | 2018-04-14T21:55:55.000Z | public protocol Prot {
typealias Element
var p : Int { get }
func foo()
}
public class C1 : Prot {
public typealias Element = Int
public var p : Int = 0
public func foo() {}
public subscript(index: Int) -> Int { return 0 }
public subscript(index i: Float) -> Int { return 0 }
}
public func genfoo<T1 : Prot, T2 : C1 where T1.Element == Int, T2.Element == T1.Element>(x ix: T1, y iy: T2) {}
public extension Prot where Self.Element == Int {
final func extfoo() {}
}
public enum MyEnum : Int {
case Blah
}
| 21.16 | 111 | 0.642722 |
90d9f1786c550ed506168753d0e4390bd86b8f15 | 1,596 | swift | Swift | HTTeaPot/HTTeaPotTests/Test Controllers/StatusCodeViewControllerTest.swift | juliaYamamoto/App_httpStatusCode | d3f0fb361aa74db45a4d142e3c8a5183fb023a5a | [
"MIT"
] | 2 | 2020-12-11T21:52:13.000Z | 2021-01-08T00:16:48.000Z | HTTeaPot/HTTeaPotTests/Test Controllers/StatusCodeViewControllerTest.swift | juliaYamamoto/HoTTeaPot | d3f0fb361aa74db45a4d142e3c8a5183fb023a5a | [
"MIT"
] | null | null | null | HTTeaPot/HTTeaPotTests/Test Controllers/StatusCodeViewControllerTest.swift | juliaYamamoto/HoTTeaPot | d3f0fb361aa74db45a4d142e3c8a5183fb023a5a | [
"MIT"
] | null | null | null | //
// StatusCodeViewControllerTest.swift
// HTTeaPotTests
//
// Created by Júlia Yamamoto on 2020-12-20.
//
import XCTest
@testable import HTTeaPot
class StatusCodeViewControllerTest: XCTestCase {
// MARK: - Properties
var sut: StatusCodeViewController!
// MARK: - Test Properties
// MARK: - Set up and Tear down
override func setUp() {
super.setUp()
let storyboard = UIStoryboard(name: "Main", bundle: nil)
sut = storyboard.instantiateViewController(identifier: "StatusCode") as? StatusCodeViewController
_ = sut.view
}
override func tearDown() {
super.tearDown()
}
// MARK: - Test: Initialization
func testInit_ShouldNotBeNil() {
XCTAssertNotNil(sut)
}
func testInit_TableViewShouldNotBeNil() {
XCTAssertNotNil(sut.statusCodeTableView)
}
// MARK: - Test: Delegate
func testDelegate_TableViewDelegate_ShouldNotBeNil() {
XCTAssertNotNil(sut.statusCodeTableView.delegate)
}
func testDelegate_TableViewDelegate_ShouldNotBeOfCorrectType() {
XCTAssertTrue(sut.statusCodeTableView.delegate is StatusCodeViewController)
}
// MARK: - Test: Data Source
func testDataSource_TableViewDataSource_ShouldNotBeNil() {
XCTAssertNotNil(sut.statusCodeTableView.dataSource)
}
func testDataSource_TableViewDataSource_ShouldNotBeOfCorrectType() {
XCTAssertTrue(sut.statusCodeTableView.dataSource is StatusCodeDataService)
}
}
| 23.470588 | 105 | 0.662907 |
167cd05af17ad13d559eef015e954aced843ca17 | 834 | asm | Assembly | oeis/142/A142439.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 11 | 2021-08-22T19:44:55.000Z | 2022-03-20T16:47:57.000Z | oeis/142/A142439.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 9 | 2021-08-29T13:15:54.000Z | 2022-03-09T19:52:31.000Z | oeis/142/A142439.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 3 | 2021-08-22T20:56:47.000Z | 2021-09-29T06:26:12.000Z | ; A142439: Primes congruent to 30 mod 49.
; Submitted by Simon Strandgaard
; 79,373,569,863,1451,1549,2039,2137,2333,3019,3313,3607,3803,4391,4783,5077,5273,5861,6449,6547,6841,7331,7723,7919,8017,8311,9781,10271,10369,10663,10859,10957,11251,11447,11839,12329,12721,12917,13309,13799,14387,14779,15073,15269,15661,16249,17033,17327,18013,18307,18503,18797,19483,19777,19973,20071,21149,21247,21737,22031,22129,22619,22717,23011,23599,23893,24481,24677,24971,25657,25951,26539,26833,27127,27617,28499,28597,28793,30557,30851,30949,32027,32321,32713,32909,33203,33301,33791
mov $1,39
mov $2,$0
add $2,2
pow $2,2
lpb $2
sub $2,2
mov $3,$1
mul $3,2
seq $3,10051 ; Characteristic function of primes: 1 if n is prime, else 0.
sub $0,$3
add $1,49
mov $4,$0
max $4,0
cmp $4,$0
mul $2,$4
lpe
mov $0,$1
mul $0,2
sub $0,97
| 34.75 | 497 | 0.732614 |
cb0bc40f2e76b6d4969fa67d1ede0526fab0334f | 1,642 | h | C | src/magic.h | DouglasRMiles/QuProlog | 798d86f87fb4372b8918ef582ef2f0fc0181af2d | [
"Apache-2.0"
] | 5 | 2019-11-20T02:05:31.000Z | 2022-01-06T18:59:16.000Z | src/magic.h | logicmoo/QuProlog | 798d86f87fb4372b8918ef582ef2f0fc0181af2d | [
"Apache-2.0"
] | null | null | null | src/magic.h | logicmoo/QuProlog | 798d86f87fb4372b8918ef582ef2f0fc0181af2d | [
"Apache-2.0"
] | 2 | 2022-01-08T13:52:24.000Z | 2022-03-07T17:41:37.000Z | // magic.h - Magic numbers for files.
//
// ##Copyright##
//
// Copyright 2000-2016 Peter Robinson (pjr@itee.uq.edu.au)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.00
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ##Copyright##
//
// $Id: magic.h,v 1.1.1.1 2000/12/07 21:48:04 qp Exp $
#ifndef MAGIC_H
#define MAGIC_H
#include "defs.h"
//
// Size of a magic number.
//
const word32 MAGIC_SIZE = sizeof(word32);
//
// Convert an ASCII string of length 4 to a magic number.
//
//
#define Magic(s) ((wordlong)((s[0]<<24)|(s[1]<<16)|(s[2]<<8)|(s[3])))
//
// Magic strings for different areas.
//
#define QU_PROLOG_VERSION Magic("Q402")
#define CODE_MAGIC_NUMBER Magic("QCA0")
#define PRED_TABLE_MAGIC_NUMBER Magic("QPT0")
#define STRING_TABLE_MAGIC_NUMBER Magic("QST0")
#define ATOM_TABLE_MAGIC_NUMBER Magic("QAT0")
#define NAME_TRAIL_MAGIC_NUMBER Magic("QNT0")
#define NAME_TABLE_MAGIC_NUMBER Magic("QNA0")
#define CHOICE_MAGIC_NUMBER Magic("QCS0")
#define ENVIRONMENT_MAGIC_NUMBER Magic("QES0")
#define HEAP_TRAIL_MAGIC_NUMBER Magic("QHT0")
#define HEAP_MAGIC_NUMBER Magic("QHP0")
#define RECORD_TABLE_MAGIC_NUMBER Magic("QRT0")
#endif // MAGIC_H
| 29.321429 | 75 | 0.719854 |
85be2b32adece228556b09ab4f7a600db1bab7ba | 2,607 | h | C | MdePkg/Include/Library/PalLib.h | heibaifu/UEFI-EDK-II | 1a734ed85fda71630c795832e6d24ea560caf739 | [
"Python-2.0",
"Zlib",
"BSD-2-Clause",
"MIT",
"BSD-2-Clause-Patent",
"BSD-3-Clause"
] | 1 | 2019-04-18T02:11:51.000Z | 2019-04-18T02:11:51.000Z | MdePkg/Include/Library/PalLib.h | oukei123/edk2 | d43056888790f987169c54a5c2895758b85dff72 | [
"Python-2.0",
"Zlib",
"BSD-2-Clause",
"MIT",
"BSD-2-Clause-Patent",
"BSD-3-Clause"
] | null | null | null | MdePkg/Include/Library/PalLib.h | oukei123/edk2 | d43056888790f987169c54a5c2895758b85dff72 | [
"Python-2.0",
"Zlib",
"BSD-2-Clause",
"MIT",
"BSD-2-Clause-Patent",
"BSD-3-Clause"
] | 1 | 2019-05-09T05:44:37.000Z | 2019-05-09T05:44:37.000Z | /** @file
Provides library services to make PAL Calls.
The PAL Library provides a service to make a PAL CALL. This service is identical
in functionality to AsmPalCall() in the functions of the Base Library specific to Intel Itanium architecture.
The only difference is that the PAL Entry Point is not passed in. Implementations
of this library class must manage PAL Entry Point on their own. For example, a PEI
implementation can use a PPI to lookup the PAL Entry Point, and a DXE implementation
can contain a constructor to look up the PAL Entry Point from a HOB. This library class
is only available on Intel Itanium-based platforms.
Copyright (c) 2006 - 2018, Intel Corporation. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
#ifndef __PAL_CALL_LIB_H__
#define __PAL_CALL_LIB_H__
#include <IndustryStandard/Pal.h>
/**
Makes a PAL procedure call.
This is a wrapper function to make a PAL procedure call. Based on the Index value,
this API will make static or stacked PAL call. Architected procedures may be designated
as required or optional. If a PAL procedure is specified as optional, a unique return
code of 0xFFFFFFFFFFFFFFFF is returned in the Status field of the PAL_CALL_RETURN structure.
This indicates that the procedure is not present in this PAL implementation. It is the
caller's responsibility to check for this return code after calling any optional PAL
procedure. No parameter checking is performed on the 4 input parameters, but there are
some common rules that the caller should follow when making a PAL call. Any address
passed to PAL as buffers for return parameters must be 8-byte aligned. Unaligned addresses
may cause undefined results. For those parameters defined as reserved or some fields
defined as reserved must be zero filled or the invalid argument return value may be
returned or undefined result may occur during the execution of the procedure.
This function is only available on Intel Itanium-based platforms.
@param Index The PAL procedure Index number.
@param Arg2 The 2nd parameter for PAL procedure calls.
@param Arg3 The 3rd parameter for PAL procedure calls.
@param Arg4 The 4th parameter for PAL procedure calls.
@return Structure returned from the PAL Call procedure, including the status and return value.
**/
PAL_CALL_RETURN
EFIAPI
PalCall (
IN UINT64 Index,
IN UINT64 Arg2,
IN UINT64 Arg3,
IN UINT64 Arg4
);
#endif
| 44.948276 | 112 | 0.739547 |
e71ab0c33fa8f4e515e24fd42c2ae1f35c783329 | 46 | js | JavaScript | ALS.Glance.Web/Scripts/_references.min.js | smeegoan/als-glance | 92f7cb88747f742ef7672346a169e669b702b967 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2017-12-26T13:01:07.000Z | 2018-01-06T19:02:31.000Z | ALS.Glance.Web/Scripts/_references.min.js | smeegoan/als-glance-lite | 72a117e284ecfa9e56f8a61c45b9dbc271b93431 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | ALS.Glance.Web/Scripts/_references.min.js | smeegoan/als-glance-lite | 72a117e284ecfa9e56f8a61c45b9dbc271b93431 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
//# sourceMappingURL=_references.min.js.map
| 15.333333 | 43 | 0.76087 |
57d81c2be76d948ee839afd08dde5cfcd99e63ec | 39 | sql | SQL | spindb-core/container/try.sql | Lalit-Vikram-Singh/spinnaker-plugins | 86193e9b4d537b9e6495f09634e2ba00b38e03e0 | [
"Apache-2.0"
] | null | null | null | spindb-core/container/try.sql | Lalit-Vikram-Singh/spinnaker-plugins | 86193e9b4d537b9e6495f09634e2ba00b38e03e0 | [
"Apache-2.0"
] | null | null | null | spindb-core/container/try.sql | Lalit-Vikram-Singh/spinnaker-plugins | 86193e9b4d537b9e6495f09634e2ba00b38e03e0 | [
"Apache-2.0"
] | null | null | null | create table timepass3 (name VARCHAR);
| 19.5 | 38 | 0.794872 |
740339173703bdd024c22c46e18f2697f6fd7e5d | 33,931 | rs | Rust | circuit/src/execution-delivery/src/lib.rs | Thang83473361/t3rn | 82db86c3a62754888c7571cd6098ce4f2b319320 | [
"Apache-2.0"
] | 1 | 2021-09-25T18:46:28.000Z | 2021-09-25T18:46:28.000Z | circuit/src/execution-delivery/src/lib.rs | Thang83473361/t3rn | 82db86c3a62754888c7571cd6098ce4f2b319320 | [
"Apache-2.0"
] | null | null | null | circuit/src/execution-delivery/src/lib.rs | Thang83473361/t3rn | 82db86c3a62754888c7571cd6098ce4f2b319320 | [
"Apache-2.0"
] | null | null | null | // This file is part of Substrate.
// Copyright (C) 2020-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License")
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! <!-- markdown-link-check-disable -->
//! # Offchain Worker Example Pallet
//!
//! The Offchain Worker Example: A simple pallet demonstrating
//! concepts, APIs and structures common to most offchain workers.
//!
//! Run `cargo doc --package pallet-example-offchain-worker --open` to view this module's
//! documentation.
//!
//! - [`Config`]
//! - [`Call`]
//! - [`Pallet`]
//!
//!
//! ## Overview
//!
//! In this example we are going to build a very simplistic, naive and definitely NOT
//! production-ready oracle for BTC/USD price.
//! Offchain Worker (OCW) will be triggered after every block, fetch the current price
//! and prepare either signed or unsigned transaction to feed the result back on chain.
//! The on-chain logic will simply aggregate the results and store last `64` values to compute
//! the average price.
//! Additional logic in OCW is put in place to prevent spamming the network with both signed
//! and unsigned transactions, and custom `UnsignedValidator` makes sure that there is only
//! one unsigned transaction floating in the network.
#![cfg_attr(not(feature = "std"), no_std)]
use codec::{Decode, Encode};
use frame_support::dispatch::DispatchResultWithPostInfo;
use frame_support::ensure;
use frame_system::offchain::{SignedPayload, SigningTypes};
use hex_literal::hex;
use sp_application_crypto::Public;
use sp_core::crypto::KeyTypeId;
use sp_runtime::{
traits::{Convert, Hash, Zero},
RuntimeAppPublic, RuntimeDebug,
};
use crate::exec_composer::ExecComposer;
pub use crate::message_assembly::circuit_inbound::StepConfirmation;
use crate::message_assembly::merklize::*;
use pallet_contracts_registry::{RegistryContract, RegistryContractId};
use bp_runtime::ChainId;
pub use pallet::*;
use sp_std::vec;
use sp_std::vec::*;
use t3rn_primitives::abi::{ContractActionDesc, GatewayABIConfig, HasherAlgo as HA};
use t3rn_primitives::transfers::BalanceOf;
use t3rn_primitives::*;
use volatile_vm::VolatileVM;
#[cfg(test)]
pub mod tests;
#[cfg(test)]
pub mod mock;
pub mod exec_composer;
pub mod message_assembly;
pub use crate::message_assembly::test_utils as message_test_utils;
pub type CurrentHash<T, I> =
<<T as pallet_multi_finality_verifier::Config<I>>::BridgedChain as bp_runtime::Chain>::Hash;
pub type CurrentHasher<T, I> =
<<T as pallet_multi_finality_verifier::Config<I>>::BridgedChain as bp_runtime::Chain>::Hasher;
pub type CurrentHeader<T, I> =
<<T as pallet_multi_finality_verifier::Config<I>>::BridgedChain as bp_runtime::Chain>::Header;
type DefaultPolkadotLikeGateway = ();
type PolkadotLikeValU64Gateway = pallet_multi_finality_verifier::Instance1;
type EthLikeKeccak256ValU64Gateway = pallet_multi_finality_verifier::Instance2;
type EthLikeKeccak256ValU32Gateway = pallet_multi_finality_verifier::Instance3;
pub fn init_bridge_instance<T: pallet_multi_finality_verifier::Config<I>, I: 'static>(
origin: T::Origin,
first_header: GenericPrimitivesHeader,
authorities: Option<Vec<T::AccountId>>,
gateway_id: bp_runtime::ChainId,
) -> DispatchResultWithPostInfo {
let header: CurrentHeader<T, I> = Decode::decode(&mut &first_header.encode()[..])
.map_err(|_| "Decoding error: received GenericPrimitivesHeader -> CurrentHeader<T>")?;
let init_data = bp_header_chain::InitializationData {
header,
authority_list: authorities
.unwrap_or(vec![])
.iter()
.map(|id| {
(
sp_finality_grandpa::AuthorityId::from_slice(&id.encode()),
1,
)
})
.collect::<Vec<_>>(),
set_id: 1,
is_halted: false,
};
pallet_multi_finality_verifier::Pallet::<T, I>::initialize_single(origin, init_data, gateway_id)
}
pub fn get_roots_from_bridge<T: pallet_multi_finality_verifier::Config<I>, I: 'static>(
block_hash: Bytes,
gateway_id: bp_runtime::ChainId,
) -> Result<(sp_core::H256, sp_core::H256), Error<T>> {
let gateway_block_hash: CurrentHash<T, I> = Decode::decode(&mut &block_hash[..])
.map_err(|_| Error::<T>::StepConfirmationDecodingError)?;
let (extrinsics_root, storage_root): (CurrentHash<T, I>, CurrentHash<T, I>) =
pallet_multi_finality_verifier::Pallet::<T, I>::get_imported_roots(
gateway_id,
gateway_block_hash,
)
.ok_or(Error::<T>::StepConfirmationBlockUnrecognised)?;
let extrinsics_root_h256: sp_core::H256 = Decode::decode(&mut &extrinsics_root.encode()[..])
.map_err(|_| Error::<T>::StepConfirmationDecodingError)?;
let storage_root_h256: sp_core::H256 = Decode::decode(&mut &storage_root.encode()[..])
.map_err(|_| Error::<T>::StepConfirmationDecodingError)?;
Ok((extrinsics_root_h256, storage_root_h256))
}
/// Defines application identifier for crypto keys of this module.
///
/// Every module that deals with signatures needs to declare its unique identifier for
/// its crypto keys.
/// When offchain worker is signing transactions it's going to request keys of type
/// `KeyTypeId` from the keystore and use the ones it finds to sign the transaction.
/// The keys can be inserted manually via RPC (see `author_insertKey`).
pub const KEY_TYPE: KeyTypeId = KeyTypeId(*b"circ");
pub fn select_validator_for_x_tx_dummy<T: Config>(
_io_schedule: Vec<u8>,
) -> Result<T::AccountId, &'static str> {
// This is the well-known Substrate account of Alice (5GrwvaEF...)
let default_recepient =
hex!("d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d");
let dummy_escrow_alice =
T::AccountId::decode(&mut &default_recepient[..]).expect("should not fail for dummy data");
Ok(dummy_escrow_alice)
}
pub type XtxId<T> = <T as frame_system::Config>::Hash;
pub type AuthorityId = crate::message_assembly::signer::app::Public;
/// A composable cross-chain (X) transaction that has already been verified to be valid and submittable
#[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug)]
pub struct Xtx<AccountId, BlockNumber, Hash, BalanceOf> {
/// The total estimated worth of tx (accumulated value being transferred and estimated fees)
pub estimated_worth: BalanceOf,
/// The total worth so far of tx (accumulated value being transferred and estimated fees)
pub current_worth: BalanceOf,
/// The owner of the bid
pub requester: AccountId,
/// Validator acting as an escrow
pub escrow_account: AccountId,
/// Encoded content of composable tx
pub payload: Vec<u8>,
/// Current step
pub current_step: u32,
/// Current step
pub steps_no: u32,
/// Current phase
pub current_phase: u32,
/// Current round
pub current_round: u32,
pub schedule: XtxSchedule<AccountId, BlockNumber, Hash, BalanceOf>,
// /// Current phase
// pub phase_compilation_context: PhaseCompilationContext<BlockNumber>,
/// Result
pub result_status: Vec<u8>,
/// Block numbers when each phase phase has started
pub phases_blockstamps: (BlockNumber, BlockNumber),
}
/// A composable cross-chain (X) transaction that has already been verified to be valid and submittable
#[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug)]
pub struct StepEntry<AccountId, BlockNumber, Hash, BalanceOf> {
contract_id: Hash,
cost: u128,
result: Option<Vec<u8>>,
input: Vec<u8>,
dest: AccountId,
value: BalanceOf,
proof: Option<Hash>,
updated_at: BlockNumber,
relayer: Option<AccountId>,
gateway_id: Option<bp_runtime::ChainId>,
gateway_entry_id: Hash,
}
/// Schedule consist of phases
/// The first phase, execution / computation phase may consist out of many rounds
/// Each round can consist out of many parallel steps
/// schedule:
/// vector of phases, where
/// phase: vector of rounds, where
/// round: vector of steps
pub type RoundEntry<AccountId, BlockNumber, Hash, BalanceOf> =
Vec<StepEntry<AccountId, BlockNumber, Hash, BalanceOf>>;
#[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug)]
pub struct XtxSchedule<AccountId, BlockNumber, Hash, BalanceOf> {
phases: Vec<RoundEntry<AccountId, BlockNumber, Hash, BalanceOf>>,
}
// check frame/democracy/src/vote.rs
impl<
AccountId: Encode,
BlockNumber: Ord + Copy + Zero + Encode,
Hash: Ord + Copy + Encode,
BalanceOf: Encode,
> Xtx<AccountId, BlockNumber, Hash, BalanceOf>
{
pub fn new(
// Estimated worth (values transferred + aggregated fees)
estimated_worth: BalanceOf,
// Current, actual aggregated worth
current_worth: BalanceOf,
// Requester of xtx
requester: AccountId,
// Validator's account acting as an escrow for this xtx
escrow_account: AccountId,
// Encoded data
payload: Vec<u8>,
// Current step no
current_step: u32,
// Max no of steps
steps_no: u32,
// Current phase (exec, revert, commit)
current_phase: u32,
// Current round (consists of parallel steps)
current_round: u32,
// Results
result_status: Vec<u8>,
// Block numbers of two phases
phases_blockstamps: (BlockNumber, BlockNumber),
// Block numbers of two phases
schedule: XtxSchedule<AccountId, BlockNumber, Hash, BalanceOf>,
) -> Self {
Xtx {
estimated_worth,
current_worth,
requester,
escrow_account,
payload,
steps_no,
current_phase,
current_round,
current_step,
result_status,
phases_blockstamps,
schedule,
}
}
pub fn update_payload(&mut self, new_payload: Vec<u8>) {
self.payload = new_payload;
}
pub fn generate_xtx_id<T: Config>(&self) -> XtxId<T> {
T::Hashing::hash(Encode::encode(self).as_ref())
}
}
#[frame_support::pallet]
pub mod pallet {
use frame_support::pallet_prelude::*;
use frame_system::pallet_prelude::*;
use super::*;
/// Current Circuit's context of active transactions
///
/// The currently active composable transactions, indexed according to the order of creation.
#[pallet::storage]
pub type ActiveXtxMap<T> = StorageMap<
_,
Blake2_128Concat,
XtxId<T>,
Xtx<
<T as frame_system::Config>::AccountId,
<T as frame_system::Config>::BlockNumber,
<T as frame_system::Config>::Hash,
BalanceOf<T>,
>,
OptionQuery,
>;
/// This pallet's configuration trait
#[pallet::config]
pub trait Config:
frame_system::Config
+ pallet_bridge_messages::Config
+ pallet_balances::Config
+ VolatileVM
+ pallet_contracts_registry::Config
+ pallet_xdns::Config
+ pallet_contracts::Config
+ pallet_evm::Config
+ pallet_multi_finality_verifier::Config<DefaultPolkadotLikeGateway>
+ pallet_multi_finality_verifier::Config<PolkadotLikeValU64Gateway>
+ pallet_multi_finality_verifier::Config<EthLikeKeccak256ValU64Gateway>
+ pallet_multi_finality_verifier::Config<EthLikeKeccak256ValU32Gateway>
{
/// The overarching event type.
type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;
/// The overarching dispatch call type.
type Call: From<Call<Self>>;
type AccountId32Converter: Convert<Self::AccountId, [u8; 32]>;
type ToStandardizedGatewayBalance: Convert<BalanceOf<Self>, u128>;
}
#[pallet::pallet]
#[pallet::generate_store(pub(super) trait Store)]
pub struct Pallet<T>(_);
#[pallet::hooks]
impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> {
// `on_initialize` is executed at the beginning of the block before any extrinsic are
// dispatched.
//
// This function must return the weight consumed by `on_initialize` and `on_finalize`.
fn on_initialize(_n: T::BlockNumber) -> Weight {
// Anything that needs to be done at the start of the block.
// We don't do anything here.
0
}
fn on_finalize(_n: T::BlockNumber) {
// We don't do anything here.
}
// A runtime code run after every block and have access to extended set of APIs.
//
// For instance you can generate extrinsics for the upcoming produced block.
fn offchain_worker(_n: T::BlockNumber) {
// We don't do anything here.
// but we could dispatch extrinsic (transaction/unsigned/inherent) using
// sp_io::submit_extrinsic
}
}
/// A public part of the pallet.
#[pallet::call]
impl<T: Config> Pallet<T> {
#[pallet::weight(0)]
pub fn submit_composable_exec_order(
origin: OriginFor<T>,
io_schedule: Vec<u8>,
components: Vec<Compose<T::AccountId, BalanceOf<T>>>,
) -> DispatchResultWithPostInfo {
// Retrieve sender of the transaction.
let requester = ensure_signed(origin)?;
ensure!(
!(components.len() == 0 || io_schedule.len() == 0),
"empty parameters submitted for execution order",
);
let inter_schedule: InterExecSchedule<T::AccountId, BalanceOf<T>> =
Self::decompose_io_schedule(components.clone(), io_schedule.clone())
.expect("Wrong io schedule");
let escrow_account = select_validator_for_x_tx_dummy::<T>(io_schedule.clone())?;
// In dry run we would like to:
// 1. Parse and validate the syntax of unseen in the on-chain registry contracts
// 1.2. Add them to the on-chain registry
// 2. Fetch all of the contracts from on-chain registry involved in that execution and dry run as one xtx.
let (new_xtx, contracts, _contract_ids, _contract_descriptions) =
Self::dry_run_whole_xtx(
inter_schedule.clone(),
requester.clone(),
escrow_account.clone(),
)?;
let x_tx_id: XtxId<T> = new_xtx.generate_xtx_id::<T>();
ActiveXtxMap::<T>::insert(x_tx_id, &new_xtx);
// Every time before the execution - preload the all of the involved contracts to the VM
ExecComposer::preload_bunch_of_contracts::<T>(contracts.clone(), requester.clone())?;
let submitter = Self::select_authority(escrow_account.clone())?;
let first_step = Self::first_unprocessed_step(new_xtx.clone())?;
let (value, input_data, gateway_id) = (
first_step.value,
first_step.input,
None, // Assign None for on-chain targets
);
// ToDo: Work out max gas limit acceptable by each escrow
let gas_limit = u64::max_value();
// ToDo: Pick up execution for the last unconfirmed step
let (circuit_outbound_messages, _last_executed_contract_no) =
ExecComposer::pre_run_bunch_until_break::<T>(
contracts,
escrow_account.clone(),
submitter,
requester.clone(),
value,
input_data,
gas_limit,
gateway_id,
// ToDo: Generate Circuit's params as default ABI
Default::default(),
)?;
// ToDo: Enact on the info about round finished.
Self::deposit_event(Event::StoredNewStep(
requester.clone(),
x_tx_id,
circuit_outbound_messages,
));
Ok(().into())
}
#[pallet::weight(0)]
pub fn register_gateway(
origin: OriginFor<T>,
url: Vec<u8>,
gateway_id: bp_runtime::ChainId,
gateway_abi: GatewayABIConfig,
gateway_vendor: t3rn_primitives::GatewayVendor,
gateway_type: t3rn_primitives::GatewayType,
gateway_genesis: GatewayGenesisConfig,
first_header: GenericPrimitivesHeader,
authorities: Option<Vec<T::AccountId>>,
) -> DispatchResultWithPostInfo {
// Retrieve sender of the transaction.
pallet_xdns::Pallet::<T>::add_new_xdns_record(
origin.clone(),
url,
gateway_id,
gateway_abi.clone(),
gateway_vendor,
gateway_type,
gateway_genesis,
)?;
let res = match (gateway_abi.hasher, gateway_abi.block_number_type_size) {
(HA::Blake2, 32) => init_bridge_instance::<T, DefaultPolkadotLikeGateway>(
origin,
first_header,
authorities,
gateway_id,
)?,
(HA::Blake2, 64) => init_bridge_instance::<T, PolkadotLikeValU64Gateway>(
origin,
first_header,
authorities,
gateway_id,
)?,
(HA::Keccak256, 32) => init_bridge_instance::<T, EthLikeKeccak256ValU32Gateway>(
origin,
first_header,
authorities,
gateway_id,
)?,
(HA::Keccak256, 64) => init_bridge_instance::<T, EthLikeKeccak256ValU64Gateway>(
origin,
first_header,
authorities,
gateway_id,
)?,
(_, _) => init_bridge_instance::<T, DefaultPolkadotLikeGateway>(
origin,
first_header,
authorities,
gateway_id,
)?,
};
Ok(res.into())
}
#[pallet::weight(0)]
pub fn submit_step_confirmation(
origin: OriginFor<T>,
step_confirmation: StepConfirmation,
xtx_id: XtxId<T>,
) -> DispatchResultWithPostInfo {
// Retrieve sender of the transaction.
let _relayer_id = ensure_signed(origin)?;
let xtx: Xtx<T::AccountId, T::BlockNumber, T::Hash, BalanceOf<T>> =
ActiveXtxMap::<T>::get(xtx_id.clone())
.expect("submitted to confirm step id does not match with any Xtx");
let current_step = xtx.schedule.phases[xtx.current_round as usize].clone()
[step_confirmation.clone().step_index as usize]
.clone();
// ToDo: parse events to discover their content and verify execution
// Check inclusion relying on data in palet-multi-verifier
let gateway_id = current_step
.gateway_id
.expect("Confirmation step for remote (Some) gateways only");
let gateway_xdns_record =
pallet_xdns::Pallet::<T>::xdns_registry(current_step.gateway_entry_id)
.ok_or(Error::<T>::StepConfirmationGatewayNotRecognised)?;
let declared_block_hash = step_confirmation.proof.block_hash;
let (extrinsics_root_h256, storage_root_h256) = match (
gateway_xdns_record.gateway_abi.hasher.clone(),
gateway_xdns_record.gateway_abi.block_number_type_size,
) {
(HA::Blake2, 32) => get_roots_from_bridge::<T, DefaultPolkadotLikeGateway>(
declared_block_hash,
gateway_id,
)?,
(HA::Blake2, 64) => get_roots_from_bridge::<T, PolkadotLikeValU64Gateway>(
declared_block_hash,
gateway_id,
)?,
(HA::Keccak256, 32) => get_roots_from_bridge::<T, EthLikeKeccak256ValU32Gateway>(
declared_block_hash,
gateway_id,
)?,
(HA::Keccak256, 64) => get_roots_from_bridge::<T, EthLikeKeccak256ValU64Gateway>(
declared_block_hash,
gateway_id,
)?,
(_, _) => get_roots_from_bridge::<T, DefaultPolkadotLikeGateway>(
declared_block_hash,
gateway_id,
)?,
};
let expected_root = match step_confirmation.proof.proof_trie_pointer {
ProofTriePointer::State => storage_root_h256,
ProofTriePointer::Transaction => extrinsics_root_h256,
ProofTriePointer::Receipts => storage_root_h256,
};
if let Err(computed_root) = check_merkle_proof(
expected_root,
step_confirmation.proof.proof_data.into_iter(),
gateway_xdns_record.gateway_abi.hasher,
) {
log::trace!(
target: "circuit-runtime",
"Step confirmation check failed: inclusion root mismatch. Expected: {}, computed: {}",
expected_root,
computed_root,
);
Err(Error::<T>::StepConfirmationInvalidInclusionProof.into())
} else {
// ToDo: Enact on the confirmation step and save the update
// Self::update_xtx(&xtx, xtx_id, step_confirmation);
// Self::maybe_resume_xtx(&xtx);
Ok(().into())
}
}
}
/// Events for the pallet.
#[pallet::event]
#[pallet::generate_deposit(pub(super) fn deposit_event)]
pub enum Event<T: Config> {
/// Event generated when new price is accepted to contribute to the average.
/// \[who, phase, name\]
NewPhase(T::AccountId, u8, Vec<u8>),
/// News steps that were just added for relayers to deliver.
/// \[who, id, steps\]
StoredNewStep(T::AccountId, XtxId<T>, Vec<CircuitOutboundMessage>),
}
#[pallet::error]
pub enum Error<T> {
/// Non existent public key.
InvalidKey,
IOScheduleNoEndingSemicolon,
IOScheduleEmpty,
IOScheduleUnknownCompose,
ProcessStepGatewayNotRecognised,
StepConfirmationBlockUnrecognised,
StepConfirmationGatewayNotRecognised,
StepConfirmationInvalidInclusionProof,
StepConfirmationDecodingError,
}
}
/// Payload used by this example crate to hold price
/// data required to submit a transaction.
#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)]
pub struct Payload<Public, BlockNumber> {
block_number: BlockNumber,
public: Public,
}
impl<T: SigningTypes> SignedPayload<T> for Payload<T::Public, T::BlockNumber> {
fn public(&self) -> T::Public {
self.public.clone()
}
}
impl<T: Config> Pallet<T> {
/// Receives a list of available components and an io schedule in text format
/// and parses it to create an execution schedule
pub fn decompose_io_schedule(
_components: Vec<Compose<T::AccountId, BalanceOf<T>>>,
_io_schedule: Vec<u8>,
) -> Result<InterExecSchedule<T::AccountId, BalanceOf<T>>, &'static str> {
// set constants
const WHITESPACE_MATRIX: [u8; 4] = [b' ', b'\t', b'\r', b'\n'];
const PHASE_SEPARATOR: u8 = b'|';
const STEP_SEPARATOR: u8 = b',';
const SCHEDULE_END: u8 = b';';
// trims all whitespace chars from io_schedule vector
fn trim_whitespace(input_string: Vec<u8>) -> Vec<u8> {
let mut result = input_string.clone();
// checks if character is whitespace
let is_whitespace = |x: &u8| WHITESPACE_MATRIX.contains(x);
let mut i = 0;
while i < result.len() {
if is_whitespace(&result[i]) {
result.remove(i);
} else {
i += 1;
}
}
result
}
// converts an exec_step vector string to an ExecStep
// throws error if a component is not found
let to_exec_step = |name: Vec<u8>| {
let compose = _components
.clone()
.into_iter()
.find(|comp| comp.name.encode() == name.encode());
match compose {
Some(value) => Ok(ExecStep { compose: value }),
None => Err(Error::<T>::IOScheduleUnknownCompose),
}
};
// splits a phase vector into ExecSteps
let split_into_steps = |phase: Vec<u8>| {
phase
.split(|char| char.eq(&STEP_SEPARATOR))
.filter(|step| !step.is_empty())
.map(|step| to_exec_step(step.to_vec()))
.collect()
};
// splits an io_schedule into phases and then into steps
let split_into_phases = |io_schedule: Vec<u8>| {
io_schedule
.split(|character| character.eq(&PHASE_SEPARATOR))
.filter(|phase| !phase.is_empty())
.map(|phase| {
let steps: Result<Vec<ExecStep<T::AccountId, BalanceOf<T>>>, crate::Error<T>> =
split_into_steps(phase.to_vec());
ensure!(steps.is_ok(), Error::<T>::IOScheduleUnknownCompose);
Ok(ExecPhase {
steps: steps.unwrap(),
})
})
.collect()
};
let mut cloned = trim_whitespace(_io_schedule);
// make sure schedule is not empty
// probably irrelevant since there is already a check for that
let last_char = cloned.last();
ensure!(last_char.is_some(), Error::<T>::IOScheduleEmpty);
// make sure the schedule ends correctly and remove ending character or panic
let ends_correctly = last_char.eq(&Some(&SCHEDULE_END));
ensure!(ends_correctly, Error::<T>::IOScheduleNoEndingSemicolon);
cloned.remove(cloned.len() - 1);
// make sure schedule can be split into phases
let phases: Result<Vec<ExecPhase<T::AccountId, BalanceOf<T>>>, crate::Error<T>> =
split_into_phases(cloned);
ensure!(phases.is_ok(), Error::<T>::IOScheduleUnknownCompose);
Ok(InterExecSchedule {
phases: phases.unwrap(),
})
}
/// Dry run submitted cross-chain transaction
/// User can additionally submit the IO schedule which comes on top as an additional order maker.
/// inter_schedule was analysed already and we at this point we can be sure within
/// the inter_schedule components are in the correct order. At least an order that requester expects.
/// Task of the dry_run here is the decompose the phases into additional rounds that can be submitted in parallel.
/// The output is cross-chain transaction with a fixed schedule that covers all future steps of the incoming rounds and phases.
pub fn dry_run_whole_xtx(
inter_schedule: InterExecSchedule<T::AccountId, BalanceOf<T>>,
escrow_account: T::AccountId,
requester: T::AccountId,
) -> Result<
(
Xtx<T::AccountId, T::BlockNumber, T::Hash, BalanceOf<T>>,
Vec<RegistryContract<T::Hash, T::AccountId, BalanceOf<T>, T::BlockNumber>>,
Vec<RegistryContractId<T>>,
Vec<ContractActionDesc<T::Hash, ChainId, T::AccountId>>,
),
&'static str,
> {
let mut contracts = vec![];
let mut unseen_contracts = vec![];
let mut seen_contracts = vec![];
let mut contract_ids = vec![];
let mut action_descriptions = vec![];
// ToDo: Better phases getter
let first_phase = inter_schedule
.phases
.get(0)
.expect("At least one phase should always be there in inter_schedule");
// Check if there are some unseen contracts - if yes dry_run them in a single context. If fine - add to the contracts-repo.
for step in &first_phase.steps {
let mut protocol_part_of_contract = step.compose.code_txt.clone();
protocol_part_of_contract.extend(step.compose.bytes.clone());
let key = T::Hashing::hash(Encode::encode(&mut protocol_part_of_contract).as_ref());
// If invalid new contract was submitted for execution - break. Otherwise, add the new contract to on-chain registry.
if !pallet_contracts_registry::ContractsRegistry::<T>::contains_key(key) {
let unseen_contract =
ExecComposer::dry_run_single_contract::<T>(step.compose.clone())?;
// Assuming dry run step went well, add the contract now
pallet_contracts_registry::ContractsRegistry::<T>::insert(key, &unseen_contract);
unseen_contracts.push(unseen_contract.clone());
action_descriptions.extend(unseen_contract.action_descriptions);
} else {
// Query for the existent contract and push to queue.
let seen_contract = pallet_contracts_registry::ContractsRegistry::<T>::get(key)
.expect("contains_key called above before accessing the contract");
action_descriptions.extend(seen_contract.action_descriptions.clone());
seen_contracts.push(seen_contract);
}
contract_ids.push(key);
}
contracts.extend(seen_contracts);
contracts.extend(unseen_contracts);
let (current_block_no, block_zero) = (
<frame_system::Pallet<T>>::block_number(),
T::BlockNumber::zero(),
);
let max_steps = contracts.len() as u32;
let new_xtx = Xtx::<
T::AccountId,
T::BlockNumber,
<T as frame_system::Config>::Hash,
BalanceOf<T>,
>::new(
Default::default(),
Default::default(),
requester.clone(),
escrow_account.clone(),
vec![],
0,
max_steps,
0,
0,
vec![],
(current_block_no, block_zero),
Default::default(),
);
Ok((new_xtx, contracts, contract_ids, action_descriptions))
}
pub fn process_phase(
x_tx_id: XtxId<T>,
_components: Vec<Compose<T::AccountId, BalanceOf<T>>>,
escrow_account: T::AccountId,
_schedule: InterExecSchedule<T::AccountId, BalanceOf<T>>,
) -> Result<Vec<CircuitOutboundMessage>, &'static str> {
let current_xtx =
ActiveXtxMap::<T>::get(x_tx_id).ok_or("Cross-chain tx not found while process_step")?;
if current_xtx.current_step > current_xtx.steps_no {
Self::complete_xtx(current_xtx.clone())
} else {
let steps_in_current_round = current_xtx
.schedule
.phases
.get(current_xtx.current_round as usize)
.expect("Each round in schedule should be aligned with current_round in storage");
Self::process_round(
steps_in_current_round.to_vec(),
escrow_account,
current_xtx.requester,
)
}
}
pub fn process_round(
_round_steps: RoundEntry<T::AccountId, T::BlockNumber, T::Hash, BalanceOf<T>>,
_escrow_account: T::AccountId,
_requester: T::AccountId,
) -> Result<Vec<CircuitOutboundMessage>, &'static str> {
let current_round_messages: Vec<CircuitOutboundMessage> = vec![];
let _constructed_outbound_messages = &mut Vec::<CircuitOutboundMessage>::new();
Ok(current_round_messages)
}
pub fn first_unprocessed_step(
xtx: Xtx<T::AccountId, T::BlockNumber, <T as frame_system::Config>::Hash, BalanceOf<T>>,
) -> Result<StepEntry<T::AccountId, T::BlockNumber, T::Hash, BalanceOf<T>>, &'static str> {
let current_step = xtx.schedule.phases[xtx.current_round as usize].clone()
[xtx.current_step as usize]
.clone();
Ok(current_step)
}
pub fn select_authority(escrow_account: T::AccountId) -> Result<AuthorityId, &'static str> {
let mut local_keys = AuthorityId::all();
local_keys.sort();
let auth = AuthorityId::from_slice(escrow_account.encode().as_slice());
let submitter = local_keys
.binary_search(&auth)
.ok()
.map(|location| local_keys[location].clone())
.ok_or("Can't match authority for given account")?;
Ok(submitter)
}
// ToDo: complete_xtx
fn complete_xtx(
_xtx: Xtx<T::AccountId, T::BlockNumber, <T as frame_system::Config>::Hash, BalanceOf<T>>,
) -> Result<Vec<CircuitOutboundMessage>, &'static str> {
// Decide on the next execution phase and enact on it
Ok(vec![])
}
}
| 37.954139 | 131 | 0.606171 |
e9f6c5781783c590bf072cda53ca0aa9d4a5ca73 | 3,983 | swift | Swift | Sources/Animations/PhotoInfoAnimation.swift | Parallaxer/PhotoBook | d0f8fe2a0ad7b56c8b3eba3535cea508f1c6f0a9 | [
"MIT"
] | 2 | 2016-08-02T03:18:11.000Z | 2018-02-25T02:04:00.000Z | Sources/Animations/PhotoInfoAnimation.swift | Parallaxer/PhotoBook | d0f8fe2a0ad7b56c8b3eba3535cea508f1c6f0a9 | [
"MIT"
] | null | null | null | Sources/Animations/PhotoInfoAnimation.swift | Parallaxer/PhotoBook | d0f8fe2a0ad7b56c8b3eba3535cea508f1c6f0a9 | [
"MIT"
] | null | null | null | import Parallaxer
import RxCocoa
import RxSwift
import UIKit
/// Animate photo info transition on and off screen, and lock photo book interaction while info is displayed.
final class PhotoInfoAnimation {
/// Whether the photo book can receive input.
var photoBookInteractionEnabled: Signal<Bool> {
return photoBookInteractionEnabledRelay.asSignal()
}
/// The photo book alpha value.
var photoBookAlpha: Signal<CGFloat> {
return alphaRelay.asSignal()
}
/// The size of the photo book, normalized between 0 and 1.
var photoBookScale: Signal<CGFloat> {
return scaleRelay.asSignal()
}
/// The length of the info view drawer, which is expected to start at the bottom of the screen.
var photoInfoDrawerLength: Signal<CGFloat> {
return drawerLengthRelay.asSignal()
}
private let photoBookInteractionEnabledRelay = PublishRelay<Bool>()
private let alphaRelay = PublishRelay<CGFloat>()
private let scaleRelay = PublishRelay<CGFloat>()
private let drawerLengthRelay = PublishRelay<CGFloat>()
private let maxDrawerLength: CGFloat
init(maxDrawerLength: CGFloat) {
self.maxDrawerLength = maxDrawerLength
}
/// Bind the drawer interaction which drives the photo info animation.
func bindDrawerInteraction(_ drawerInteraction: Observable<ParallaxTransform<CGFloat>>) -> Disposable {
let photoInfoVisibility = drawerInteraction
// Normalize.
.parallaxRelate(to: ParallaxInterval<CGFloat>.rx.interval(from: 0, to: 1))
.share()
return Disposables.create([
bindLockPhotoBookEffect(photoInfoVisibility: photoInfoVisibility),
bindFadePhotoBookEffect(photoInfoVisibility: photoInfoVisibility),
bindScalePhotoBookEffect(photoInfoVisibility: photoInfoVisibility),
bindShowPhotoInfoEffect(photoInfoVisibility: photoInfoVisibility)
])
}
private func bindLockPhotoBookEffect(
photoInfoVisibility: Observable<ParallaxTransform<CGFloat>>)
-> Disposable
{
return photoInfoVisibility
.parallaxValue()
// While in progress, this effect prevents the user from interacting with the photo book.
.map { $0 == CGFloat(0) }
.bind(to: photoBookInteractionEnabledRelay)
}
private func bindFadePhotoBookEffect(
photoInfoVisibility: Observable<ParallaxTransform<CGFloat>>)
-> Disposable
{
return photoInfoVisibility
.parallaxRelate(to: .interval(from: 1, to: 0.75))
.parallaxValue()
.bind(to: alphaRelay)
}
private func bindScalePhotoBookEffect(
photoInfoVisibility: Observable<ParallaxTransform<CGFloat>>)
-> Disposable
{
return photoInfoVisibility
.parallaxRelate(to: .interval(from: 1, to: 0.9))
.parallaxValue()
.bind(to: scaleRelay)
}
private func bindShowPhotoInfoEffect(
photoInfoVisibility: Observable<ParallaxTransform<CGFloat>>)
-> Disposable
{
return photoInfoVisibility
// Don't begin to show the photo info view until 25% of the interaction has occurred; this gives
// the animation a sense of depth as well as priority. We want the photo book to appear to move
// backward a little bit before the photo info view becomes visible.
.parallaxFocus(on: .interval(from: CGFloat(0.25), to: CGFloat(1.0)))
// Over the focus interval, increase the height from 0 to 128. In a production app, you may want
// to use an observable height instead of a hard coded height, especially if your content varies
// in size.
.parallaxRelate(to: .interval(from: CGFloat(0), to: maxDrawerLength))
.parallaxValue()
.bind(to: drawerLengthRelay)
}
}
| 38.669903 | 109 | 0.662315 |
a3d0e45e02b1d2b09a1f139d5d261bd60f9f3a0a | 29 | kts | Kotlin | basic-example/product/settings.gradle.kts | cebartling/saga-pattern-examples | 2687b751e3eb3d2482c9548791dc14df75330a70 | [
"MIT"
] | null | null | null | basic-example/product/settings.gradle.kts | cebartling/saga-pattern-examples | 2687b751e3eb3d2482c9548791dc14df75330a70 | [
"MIT"
] | null | null | null | basic-example/product/settings.gradle.kts | cebartling/saga-pattern-examples | 2687b751e3eb3d2482c9548791dc14df75330a70 | [
"MIT"
] | null | null | null | rootProject.name = "product"
| 14.5 | 28 | 0.758621 |
3b1f9b0e69588d04f46c9d66caf82d49a19bd0a8 | 339 | asm | Assembly | programs/oeis/004/A004960.asm | neoneye/loda | afe9559fb53ee12e3040da54bd6aa47283e0d9ec | [
"Apache-2.0"
] | 22 | 2018-02-06T19:19:31.000Z | 2022-01-17T21:53:31.000Z | programs/oeis/004/A004960.asm | neoneye/loda | afe9559fb53ee12e3040da54bd6aa47283e0d9ec | [
"Apache-2.0"
] | 41 | 2021-02-22T19:00:34.000Z | 2021-08-28T10:47:47.000Z | programs/oeis/004/A004960.asm | neoneye/loda | afe9559fb53ee12e3040da54bd6aa47283e0d9ec | [
"Apache-2.0"
] | 5 | 2021-02-24T21:14:16.000Z | 2021-08-09T19:48:05.000Z | ; A004960: a(n) = ceiling(n*phi^5), where phi is the golden ratio, A001622.
; 0,12,23,34,45,56,67,78,89,100,111,122,134,145,156,167,178,189,200,211,222,233,244,256,267,278,289,300,311,322,333,344,355,366,378,389,400,411,422,433,444,455,466,477
mov $1,$0
add $0,1
mul $1,$0
mul $0,11
lpb $1
sub $1,$0
add $0,1
trn $1,1
lpe
sub $0,11
| 24.214286 | 167 | 0.657817 |
2f1a029f833ef9a4c919a4664962b8496cf5c9dc | 7,227 | lua | Lua | after/plugin/lspconfig.lua | zouzonghua/.nvim | 1cff36a872ac024c704021dad7dc20f521afd9b0 | [
"MIT"
] | 1 | 2021-08-31T04:12:43.000Z | 2021-08-31T04:12:43.000Z | after/plugin/lspconfig.lua | zouzonghua/nvim | 1cff36a872ac024c704021dad7dc20f521afd9b0 | [
"MIT"
] | null | null | null | after/plugin/lspconfig.lua | zouzonghua/nvim | 1cff36a872ac024c704021dad7dc20f521afd9b0 | [
"MIT"
] | null | null | null | local status, nvim_lsp = pcall(require, 'lspconfig')
if not status then
return
end
-- need install `npm i -g typescript typescript-language-server vscode-langservers-extracted diagnostic-languageserver eslint_d prettier stylelint`
local nvim_lsp = require 'lspconfig'
local protocol = require 'vim.lsp.protocol'
-- Enable (broadcasting) snippet capability for completion
local capabilities = vim.lsp.protocol.make_client_capabilities()
capabilities.textDocument.completion.completionItem.snippetSupport = true
nvim_lsp.cssls.setup { capabilities = capabilities }
-- Use an on_attach function to only map the following keys
-- after the language server attaches to the current buffer
local on_attach = function(client, bufnr)
local function buf_set_keymap(...)
vim.api.nvim_buf_set_keymap(bufnr, ...)
end
local function buf_set_option(...)
vim.api.nvim_buf_set_option(bufnr, ...)
end
-- Enable completion triggered by <c-x><c-o>
buf_set_option('omnifunc', 'v:lua.vim.lsp.omnifunc')
-- Mappings.
local opts = { noremap = true, silent = true }
buf_set_keymap('n', 'gD', '<cmd>lua vim.lsp.buf.declaration()<CR>', opts)
buf_set_keymap('n', 'gd', '<cmd>lua vim.lsp.buf.definition()<CR>', opts)
buf_set_keymap('n', 'K', '<cmd>lua vim.lsp.buf.hover()<CR>', opts)
buf_set_keymap('n', 'gi', '<cmd>lua vim.lsp.buf.implementation()<CR>', opts)
-- buf_set_keymap('n', '<C-k>', '<cmd>lua vim.lsp.buf.signature_help()<CR>', opts)
buf_set_keymap('n', '<space>wa', '<cmd>lua vim.lsp.buf.add_workspace_folder()<CR>', opts)
buf_set_keymap('n', '<space>wr', '<cmd>lua vim.lsp.buf.remove_workspace_folder()<CR>', opts)
buf_set_keymap(
'n',
'<space>wl',
'<cmd>lua print(vim.inspect(vim.lsp.buf.list_workspace_folders()))<CR>',
opts
)
buf_set_keymap('n', '<space>D', '<cmd>lua vim.lsp.buf.type_definition()<CR>', opts)
buf_set_keymap('n', '<space>rn', '<cmd>lua vim.lsp.buf.rename()<CR>', opts)
buf_set_keymap('n', '<leader>ca', '<cmd>lua vim.lsp.buf.code_action()<CR>', opts)
buf_set_keymap('n', 'gr', '<cmd>lua vim.lsp.buf.references()<CR>', opts)
buf_set_keymap('n', '<space>e', '<cmd>lua vim.lsp.diagnostic.show_line_diagnostics()<CR>', opts)
buf_set_keymap('n', '[e', '<cmd>lua vim.lsp.diagnostic.goto_prev()<CR>', opts)
buf_set_keymap('n', ']e', '<cmd>lua vim.lsp.diagnostic.goto_next()<CR>', opts)
buf_set_keymap('n', '<space>q', '<cmd>lua vim.lsp.diagnostic.set_loclist()<CR>', opts)
buf_set_keymap('n', '<leader>fm', '<cmd>lua vim.lsp.buf.formatting()<CR>', opts)
end
local efm_formatters = {
prettier = { formatCommand = 'prettier', rootMarkers = { 'package.json' } },
stylua = {
formatCommand = 'stylua --config-path ~/.config/stylua.toml -',
formatStdin = true,
},
lua = { formatCommand = 'lua-format -i', formatStdin = true },
}
nvim_lsp.efm.setup {
on_attach = on_attach,
cmd = { 'efm-langserver', '-logfile', '/tmp/efm.log', '-loglevel', '5' },
init_options = { documentFormatting = true },
filetypes = {
'lua',
'html',
-- 'javascript',
-- 'javascriptreact',
-- 'typescript',
-- 'typescriptreact',
},
settings = {
rootMarkers = { '.git/' },
languages = {
lua = { efm_formatters.stylua },
html = { efm_formatters.orettier },
-- javascript = { efm_formatters.prettier },
-- javascriptreact = { efm_formatters.prettier },
-- typescript = { efm_formatters.prettier },
-- typescriptreact = { efm_formatters.prettier },
},
},
}
-- nvim_lsp.flow.setup { on_attach = on_attach }
nvim_lsp.tsserver.setup {
on_attach = function(client)
client.resolved_capabilities.document_formatting = false
on_attach(client)
end,
filetypes = { 'typescript', 'typescriptreact', 'typescript.tsx' },
}
nvim_lsp.diagnosticls.setup {
on_attach = on_attach,
filetypes = {
'javascript',
'javascriptreact',
'json',
'typescript',
'typescriptreact',
'css',
'less',
'scss',
'markdown',
},
init_options = {
linters = {
eslint = {
command = 'eslint_d',
rootPatterns = { '.git' },
debounce = 100,
args = {
'--stdin',
'--stdin-filename',
'%filepath',
'--format',
'json',
},
sourceName = 'eslint_d',
parseJson = {
errorsRoot = '[0].messages',
line = 'line',
column = 'column',
endLine = 'endLine',
endColumn = 'endColumn',
message = '[eslint] ${message} [${ruleId}]',
security = 'severity',
},
securities = { [2] = 'error', [1] = 'warning' },
},
stylelint = {
sourceName = 'stylelint',
command = 'stylelint',
args = { '--formatter', 'compact', '%filepath' },
rootPatterns = { '.git' },
debounce = 100,
formatPattern = {
[[: line (\d+), col (\d+), (warning|error) - (.+?) \((.+)\)]],
{
line = 1,
column = 2,
security = 3,
message = { 4, ' [', 5, ']' },
},
},
securities = {
warning = 'warning',
error = 'error',
},
},
},
filetypes = {
javascript = 'eslint',
javascriptreact = 'eslint',
typescript = 'eslint',
typescriptreact = 'eslint',
scss = 'stylelint',
css = 'stylelint',
},
formatters = {
eslint_d = {
command = 'eslint_d',
args = { '--stdin', '--fix-to-stdout', '--stdin-filename', '%filepath' },
},
prettier = {
command = 'prettier',
args = { '--stdin-filepath', '%filename' },
},
stylelint = {
command = 'stylelint',
args = { '--fix', '--stdin', '--stdin-filename', '%filepath' },
},
},
formatFiletypes = {
html = 'prettier',
json = 'prettier',
markdown = 'prettier',
css = 'stylelint',
scss = 'stylelint',
less = 'stylelint',
javascript = 'prettier',
javascriptreact = 'prettier',
typescript = 'prettier',
typescriptreact = 'prettier',
},
},
}
-- icon
vim.lsp.handlers['textDocument/publishDiagnostics'] = vim.lsp.with(
vim.lsp.diagnostic.on_publish_diagnostics,
{
signs = false,
underline = true,
virtual_text = { spacing = 4, prefix = '' },
}
)
| 35.600985 | 147 | 0.517227 |
7422297a6350b6bf5f7d782b8707e968fabb78e6 | 54,864 | h | C | Examples/include/asposecpplib/system/smart_ptr.h | kashifiqb/Aspose.PDF-for-C | 13d49bba591c5704685820185741e64a462a5bdc | [
"MIT"
] | null | null | null | Examples/include/asposecpplib/system/smart_ptr.h | kashifiqb/Aspose.PDF-for-C | 13d49bba591c5704685820185741e64a462a5bdc | [
"MIT"
] | null | null | null | Examples/include/asposecpplib/system/smart_ptr.h | kashifiqb/Aspose.PDF-for-C | 13d49bba591c5704685820185741e64a462a5bdc | [
"MIT"
] | null | null | null | /// @file system/smart_ptr.h
#ifndef _aspose_system_smart_ptr_h_
#define _aspose_system_smart_ptr_h_
#include "smart_ptr_counter.h"
#include "detail.h"
#include "select_type.h"
#include <utility>
#include <cassert>
#include <type_traits>
namespace System {
class TypeInfo;
class SmartPtrInfo;
template <typename T> class Array;
template <typename T> class SmartPtr;
namespace Details {
/// Resolves types defined by SmartPtr for non-array types.
/// @tparam T SmartPtr pointee type.
template <typename T> struct ArrayTypeResolver
{
/// Array type; void as pointee is non-array type.
using type = void;
/// Element type; void as pointee is non-array type.
using value_type = void;
};
/// Resolves types defined by SmartPtr for array types.
/// @tparam T Array element type.
template <typename T> struct ArrayTypeResolver<Array<T>>
{
/// Array type.
using type = Array<T>;
/// Array element type.
using value_type = T;
};
/// Gets array element by index; moves operator [] template magic out of SmartPtr class.
/// @tparam X Array element type.
/// @tparam IdxType Index type.
/// @param ptr Array to get element from.
/// @param idx Array index.
/// @return Reference to array element.
template <typename X, typename IdxType> typename Array<X>::UnderlyingType& GetByIndex(const SmartPtr<Array<X>> *ptr, IdxType idx);
/// Creates empty array of required type.
/// @tparam T Array element type.
/// @return Raw pointer to newly created array.
template <typename T> Array<T>* CreateArray(Array<T>*);
}
/// SmartPtr pointer type: weak or shared.
/// Defines whether pointer is being counted when it is being decided whether to delete object or not.
enum class SmartPtrMode : char
{
/// Shared mode: pointer participates in reference counting
Shared,
/// Weak mode: pointer does not participate in reference counting
Weak
};
struct EmptyArrayInitializer {};
/// @brief Pointer class to wrap types being allocated on heap. Use it to manage memory for classes inheriting Object.
/// This pointer type follows intrusive pointer semantics. Reference counter is stored either in Object itself or in counter structure
/// which is tied to Object instance tightly. In any case, all SmartPtr instances form single ownership group regardless how they were created
/// which is unlike how std::shared_ptr class behaves. Converting raw pointer to SmartPtr is safe given there are other SmartPtr instances
/// holding shared references to the same object.
/// SmartPtr class instance can be in one of two states: shared pointer and weak pointer. To keep object alive, one should have count of shared
/// references to it positive. Both weak and shared pointers can be used to access pointed object (to call methods, read or write fields, etc.),
/// but weak pointers do not participate to shared pointer reference counting.
/// Object is being deleted when the last 'shared' SmartPtr pointer to it is being destroyed. So, make sure that this doesn't happen when no
/// other shared SmartPtr pointers to object exist, e. g. during object construction or destruction. Use System::Object::ThisProtector sentry
/// objects (in C++ code) or CppCTORSelfReference or CppSelfReference attribute (in C# code being ported) to fix this issue.
/// Similarily, make sure to break loop references by using System::WeakPtr pointer class or System::SmartPtrMode::Weak pointer mode (in C++
/// code) or CppWeakPtr attribute (in C# code being ported). If two or more objects reference each other using 'shared' pointers, they will
/// never be deleted.
/// If pointer type (weak or shared) should be switched in runtime, use System::SmartPtr<T>::set_Mode() method or System::DynamicWeakPtr class.
/// SmartPtr class doesn't contain any virtual methods. You should only inherit it if you're creating a memory management strategy of your own.
/// This type is a pointer to manage other object's deletion.
/// It should be allocated on stack and passed to functions either by value or by const reference.
/// @tparam T Type of the pointed object. Must be either System::Object or subclass of it.
template <class T>
class SmartPtr
{
public:
/// @brief Pointed type.
typedef T Pointee_;
/// @brief Specialized smart pointer type.
typedef SmartPtr<T> SmartPtr_;
/// @brief Same as Pointee_, if it is a specialization of System::Array, and void otherwise.
typedef typename System::Details::ArrayTypeResolver<T>::type ArrayType;
/// @brief Storage type of pointed array. Only meaningful if T is a specialization of System::Array.
using ValueType = typename System::Details::SelectType<typename System::Details::ArrayTypeResolver<T>::value_type>::type;
/// @brief Creates SmartPtr object of required mode.
/// @param mode Pointer mode.
SmartPtr(SmartPtrMode mode)
: m_data(mode)
{
m_data.SetNull();
}
/// @brief Creates null-pointer SmartPtr object of required mode.
/// @param mode Pointer mode.
SmartPtr(std::nullptr_t = nullptr, SmartPtrMode mode = SmartPtrMode::Shared)
: m_data(mode)
{
m_data.SetNull();
}
/// @brief Creates SmartPtr pointing to specified object, or converts raw pointer to SmartPtr.
/// @param object Pointee.
/// @param mode Pointer mode.
SmartPtr(Pointee_ *object, SmartPtrMode mode = SmartPtrMode::Shared)
: m_data(mode)
{
Lock(object);
}
/// @brief Copy constructs SmartPtr object. Both pointers point to the same object afterwards.
/// @param ptr Pointer to copy.
/// @param mode Pointer mode.
SmartPtr(const SmartPtr_ &ptr, SmartPtrMode mode = SmartPtrMode::Shared)
: m_data(mode)
{
Lock(ptr);
}
/// @brief Copy constructs SmartPtr object. Both pointers point to the same object afterwards. Performs type conversion if allowed.
/// @tparam Q Type of object pointed by x.
/// @param x Pointer to copy.
/// @param mode Pointer mode.
template<class Q, typename = typename std::enable_if<std::is_convertible<Q*, Pointee_*>::value>::type>
SmartPtr(const SmartPtr<Q>& x, SmartPtrMode mode = SmartPtrMode::Shared)
: m_data(mode)
{
Lock(x);
}
/// @brief Move constructs SmartPtr object. Effectively, swaps two pointers, if they are both of same mode. x may be unusable after call.
/// @param x Pointer to move.
/// @param mode Pointer mode.
SmartPtr(SmartPtr_&& x, SmartPtrMode mode = SmartPtrMode::Shared) noexcept
: m_data(mode)
{
if (m_data.GetMode() == x.m_data.GetMode())
{
m_data.SetNull();
m_data.swapPointers(x.m_data);
}
else
{
if (m_data.GetMode() == SmartPtrMode::Shared)
MoveSharedFromWeak(std::move(x));
else
MoveWeakFromShared(std::move(x));
}
}
/// @brief Converts type of referenced array by creating a new array of different type. Useful if in C# there is an array type cast which is unsupported in C++.
/// @tparam Y Type of source array.
/// @param src Pointer to array to create a copy of, but with different type of elements.
/// @param mode Pointer mode.
template <typename Y>
explicit SmartPtr(const SmartPtr<Array<Y>> &src, SmartPtrMode mode = SmartPtrMode::Shared)
: m_data(mode)
{
InitArray(this, src);
}
/// @brief Initializes empty array. Used to port some C# code constructs.
/// @tparam Y Placeholder of EmptyArrayInitializer type.
template <typename Y, typename = typename std::enable_if<std::is_same<Y, EmptyArrayInitializer>::value, void>::type>
explicit SmartPtr(const Y &)
{
Lock(Details::CreateArray((Pointee_*)nullptr));
}
/// @brief Constructs a SmartPtr which shares ownership information with the initial value of ptr, but holds an unrelated and unmanaged pointer p.
/// @param ptr Another smart pointer to share the ownership to the ownership from.
/// @param p Pointer to an object to manage.
/// @param mode Pointer mode.
template <typename P>
SmartPtr(const SmartPtr<P> &ptr, Pointee_ *p, SmartPtrMode mode = SmartPtrMode::Shared)
: m_data(mode)
{
if (ptr == nullptr)
m_data.SetNull();
else if (m_data.GetMode() == SmartPtrMode::Weak)
{
if (ptr.m_data.GetMode() == SmartPtrMode::Weak)
m_data.WeakSetPointer(
p
, ptr.m_data.WeakGetCounter()->WeakRefAdded()
);
else
m_data.WeakSetPointer(
p
#ifdef ENABLE_EXTERNAL_REFCOUNT
, ptr.m_data.SharedGetCounter()->WeakRefAdded()
#else
, ptr.m_data.SharedGetObject()->WeakRefAdded()
#endif
);
}
else
{
if (ptr.m_data.GetMode() == SmartPtrMode::Weak)
m_data.SharedSetPointer(
p
, detail::cast_statically_or_dynamically<Pointee_, Object>::cast(p)
, ptr.m_data.WeakGetCounter()->Lock()
#ifdef ENABLE_EXTERNAL_REFCOUNT
, ptr.m_data.WeakGetCounter()
#endif
);
else
m_data.SharedSetPointer(
p
, detail::cast_statically_or_dynamically<Pointee_, Object>::cast(p)
, ptr.m_data.SharedGetOwned()->SharedRefAdded()
#ifdef ENABLE_EXTERNAL_REFCOUNT
, ptr.m_data.SharedGetCounter()
#endif
);
}
}
/// @brief Destroys SmartPtr object. If required, decreases pointed object's reference counter and deletes object.
~SmartPtr()
{
Release();
}
/// @brief Move-assigns SmartPtr object. x becomes unusable.
/// @param x Pointer to move-assign.
/// @return Reference to this object.
SmartPtr_& operator = (SmartPtr_&& x) noexcept
{
if (m_data.GetComparable() != x.m_data.GetComparable())
{
if (m_data.GetMode() == x.m_data.GetMode())
{
m_data.swapPointers(x.m_data);
}
else if (m_data.GetMode() == SmartPtrMode::Shared)
{
if (m_data.HoldsReference())
{
SharedRefReleaser *const releaser = GetSharedReleaser();
MoveSharedFromWeak(std::move(x));
ReleaseShared(releaser);
}
else
{
MoveSharedFromWeak(std::move(x));
}
}
else
{
if (m_data.HoldsReference())
{
System::Detail::SmartPtrCounter *const releaser = m_data.WeakGetCounter();
MoveWeakFromShared(std::move(x));
ReleaseWeak(releaser);
}
else
{
MoveWeakFromShared(std::move(x));
}
}
}
return *this;
}
/// @brief Copy-assigns SmartPtr object.
/// @param x Pointer to copy-assign.
/// @return Reference to this object.
SmartPtr_& operator = (const SmartPtr_ &x)
{
Assign(x);
return *this;
}
/// @brief Copy-assigns SmartPtr object. Does required type conversions.
/// @tparam Q Type of object pointed by x.
/// @param x Pointer to copy-assign.
/// @return Reference to this object.
template <typename Q>
SmartPtr_& operator = (const SmartPtr<Q> &x)
{
Assign(x);
return *this;
}
/// @brief Assigns raw pointer to SmartPtr object.
/// @param p Pointer value to assign.
/// @return Reference to this object.
SmartPtr_& operator = (Pointee_ *p)
{
if (GetPointer() != p)
{
SmartPtr_ temp(p, get_Mode());
m_data.swapPointers(temp.m_data);
}
return *this;
}
/// @brief Sets pointer value to nullptr.
/// @return Reference to this object.
SmartPtr_& operator = (std::nullptr_t)
{
Release();
m_data.SetNull();
return *this;
}
/// @brief Allows to access members of referenced object.
/// @return Raw pointer to referenced object.
/// @throw System::NullReferenceException If pointer is null.
Pointee_* operator -> () const
{
return GetObjectNotNull();
}
/// @brief Checks if pointer points to nullptr.
/// @return True if pointer points to nullptr and false otherwise.
bool operator == (std::nullptr_t) const
{
return m_data.IsNull();
}
/// @brief Gets pointed object.
/// @return Raw pointer to referenced object.
Pointee_* get() const
{
return m_data.GetPointee();
}
/// @brief Gets pointed object, but asserts that pointer is in shared mode.
/// @return Raw pointer to referenced object.
Pointee_* get_shared() const
{
return m_data.SharedGetPointee();
}
/// @brief Sets pointed object.
/// @param ptr Raw pointer to new referenced object.
void reset(Pointee_ *ptr)
{
Release();
Lock(ptr);
}
/// @brief Makes pointer pointing to nullptr.
void reset()
{
Release();
m_data.SetNull();
}
/// @brief Gets pointer mode.
/// @return Mode of pointer object.
SmartPtrMode get_Mode() const
{
return m_data.GetMode();
}
/// @brief Checks if pointer is in shared mode.
/// @return True if pointer is in shared mode, false otherwise.
bool IsShared() const
{
return m_data.GetMode() == SmartPtrMode::Shared;
}
/// @brief Checks if pointer is in weak mode.
/// @return True if pointer is in weak mode, false otherwise.
bool IsWeak() const
{
return m_data.GetMode() == SmartPtrMode::Weak;
}
/// @brief Sets pointer mode. May alter referenced object's reference counts.
/// @param mode New mode of pointer.
void set_Mode(SmartPtrMode mode)
{
if (m_data.GetMode() == mode)
return;
SmartPtr_ ptr(*this, mode);
m_data.swap(ptr.m_data);
}
/// @brief Gets reference to pointed object. Asserts that pointer is not null.
/// @return Reference to pointed object.
Pointee_& operator *() const
{
return *GetObjectNotNull();
}
/// @brief Checks if pointer is not null.
/// @return False if pointer is null, true otherwise.
explicit operator bool() const noexcept
{
return *this != nullptr;
}
/// @brief Checks if pointer is null.
/// @return True if pointer is null, false otherwise.
bool operator !() const noexcept
{
return *this == nullptr;
}
/// @brief Provides less-compare semantics for SmartPtr class.
/// @tparam Y Type of pointer to compare current one to.
/// @param p Pointer to compare current one to.
/// @return True if the object referenced by SmartPtr is 'less' than p and false otherwise.
template<class Y>
bool operator < (Y* p) const
{
return GetObjectOrNull() < p;
}
/// @brief Provides less-compare semantics for SmartPtr class.
/// @tparam Y Type of pointer to compare current one to.
/// @param x Pointer to compare current one to.
/// @return True if the object referenced by SmartPtr is 'less' than x and false otherwise.
template<class Y>
bool operator < (SmartPtr<Y> const& x) const
{
return GetObjectOrNull() < x.GetObjectOrNull();
}
/// @brief Shortcut to get System::TypeInfo object for the Pointee_ type.
/// @return Const reference to TypeInfo structure which describes Pointee_ type.
static const System::TypeInfo& Type()
{
return Pointee_::Type();
}
/// @brief Casts pointer to different type using static_cast on pointed object.
/// @tparam Y Target type of pointed object.
/// @return Pointer of changed type which is always in shared mode.
template<class Y>
SmartPtr<Y> static_pointer_cast() const
{
return m_data.IsNull() ? SmartPtr<Y>() : SmartPtr<Y>(*this, static_cast<Y*>(m_data.GetPointee()), get_Mode());
}
/// @brief Casts pointer to different type using dynamic_cast on pointed object.
/// @tparam Y Target type of pointed object.
/// @return Pointer of changed type which is always in shared mode.
template<class Y>
SmartPtr<Y> dynamic_pointer_cast() const
{
return m_data.IsNull() ? SmartPtr<Y>() : SmartPtr<Y>(*this, dynamic_cast<Y*>(m_data.GetPointee()), get_Mode());
}
/// @brief Casts pointer to different type using const_cast on pointed object.
/// @tparam Y Target type of pointed object.
/// @return Pointer of changed type which is always in shared mode.
template<class Y>
SmartPtr<Y> const_pointer_cast() const
{
return m_data.IsNull() ? SmartPtr<Y>() : SmartPtr<Y>(*this, const_cast<Y*>(m_data.GetPointee()), get_Mode());
}
/// @brief Checks if pointed object is of specific type or its child type. Follows C# 'is' semantics.
/// @param target Specifies target type to check against.
/// @return True if C# 'is'-style check is positive and false otherwise.
bool Is(const System::TypeInfo &target) const;
/// @brief Gets pointed object (if any) or nullptr. Same as get().
/// @return Raw pointer to referenced object (if any) or nullptr.
Object* GetObjectOrNull() const
{
if (m_data.IsNull())
return nullptr;
else if (m_data.GetMode() == SmartPtrMode::Shared)
return m_data.SharedGetObject();
else
return m_data.WeakGetCounter()->GetObject();
}
/// @brief Converts any pointer type to pointer to Object. Doesn't require Pointee_ type to be complete.
/// @return Raw pointer to referenced object (if any) or nullptr.
SmartPtr<Object> ToObjectPtr() const;
/// @brief Gets pointed object (if any) or nullptr. Same as get().
/// @return Raw pointer to referenced object (if any) or nullptr.
Pointee_* GetPointer() const
{
return m_data.GetPointee();
}
/// @brief Gets number of shared pointers existing to referenced object, including current one. Asserts current pointer being in shared mode.
/// @return Number of shared pointers existing to referenced object, if any. If pointer is null, returns 0.
int get_shared_count() const
{
if (m_data.SharedGetPointee() == nullptr) //Asserts we're in shared pointer mode
return 0;
else
#ifdef ENABLE_EXTERNAL_REFCOUNT
return m_data.SharedGetCounter()->SharedCount();
#else
return m_data.SharedGetOwned()->SharedCount();
#endif
}
/// @brief Calls SetTemplateWeakPtr() method on pointed object (if any).
/// @param argument Argument of SetTemplateWeakPtr method called on referenced object.
void SetContainedTemplateWeakPtr(unsigned int argument) const;
/// @brief Accessor for array elements. Only compiles if SmartPtr_ is specialization of System::Array.
/// @tparam IdxType Type of index (assumed integral).
/// @param idx Index in array.
/// @return Array value at idx position.
template <typename IdxType>
decltype(System::Details::GetByIndex(std::declval<const SmartPtr_*>(), std::declval<IdxType>())) operator[] (IdxType idx) const
{
return System::Details::GetByIndex(this, idx);
}
/// @brief Accessor for begin() method of an underling collection. Only compiles if SmartPtr_ is specialization type
/// with begin() method.
/// @return iterator to the begin of collection
template <typename Q = T> auto begin() noexcept -> decltype(std::declval<Q>().begin())
{
return GetObjectNotNull()->begin();
}
/// @brief Accessor for end() method of an underling collection. Only compiles if SmartPtr_ is specialization type
/// with end() method.
/// @return iterator to the end of collection
template <typename Q = T> auto end() noexcept -> decltype(std::declval<Q>().end())
{
return GetObjectNotNull()->end();
}
/// @brief Accessor for begin() method of an underling collection. Only compiles if SmartPtr_ is specialization type
/// with begin() method.
/// @return iterator to the begin of collection
template <typename Q = T> auto begin() const noexcept -> decltype(std::declval<const Q>().begin())
{
return GetObjectNotNull()->begin();
}
/// @brief Accessor for end() method of an underling collection. Only compiles if SmartPtr_ is specialization type
/// with end() method.
/// @return iterator to the end of collection
template <typename Q = T> auto end() const noexcept -> decltype(std::declval<const Q>().end())
{
return GetObjectNotNull()->end();
}
/// @brief Calls GetHashCode() on pointed object.
/// @return Result of GetHashCode() call on referenced object (if any) or 0.
int GetHashCode() const
{
return GetHashCodeImpl(static_cast<Pointee_*>(nullptr));
}
protected:
template <class Q> friend class SmartPtr;
friend class SmartPtrInfo;
/// @brief Sets pointee object. Increments shared or weak reference count, depending on pointer mode.
/// @param object Object to lock.
void Lock(Pointee_ *object)
{
if (object == nullptr)
m_data.SetNull();
else
{
if (m_data.GetMode() == SmartPtrMode::Shared)
#ifdef ENABLE_EXTERNAL_REFCOUNT
m_data.SharedSetPointer(object, object->Object::SharedRefAdded(), object->Object::GetCounter());
#else
m_data.SharedSetPointer(object, object->Object::SharedRefAdded());
#endif
else
m_data.WeakSetPointer(object, object->Object::WeakRefAdded());
}
}
/// @brief Sets pointee object. Increments shared or weak reference count, depending on pointer mode.
/// @tparam Q Source object type.
/// @param ptr Pointer to object to lock.
template <class Q>
void Lock(const SmartPtr<Q> &ptr)
{
if (ptr == nullptr)
m_data.SetNull();
else if (m_data.GetMode() == SmartPtrMode::Weak)
{
if (ptr.m_data.GetMode() == SmartPtrMode::Weak)
LockWeakFromWeak(ptr);
else
LockWeakFromShared(ptr);
}
else
{
if (ptr.m_data.GetMode() == SmartPtrMode::Weak)
LockSharedFromWeak(ptr);
else
LockSharedFromShared(ptr);
}
}
/// @brief Sets pointee object. Asserts that both current object and ptr are in shared mode.
/// @tparam Q Source object type.
/// @param ptr Pointer to object to lock.
template <typename Q>
void LockSharedFromShared(const SmartPtr<Q> &ptr)
{
m_data.SharedSetPointer(
ptr.m_data.SharedGetPointee()
, ptr.m_data.SharedGetObject()
, ptr.m_data.SharedGetOwned()->SharedRefAdded()
#ifdef ENABLE_EXTERNAL_REFCOUNT
, ptr.m_data.SharedGetCounter()
#endif
);
}
/// @brief Sets pointee object. Asserts that current object is in shared mode and ptr is in weak mode.
/// @tparam Q Source object type.
/// @param ptr Pointer to object to lock.
template <typename Q>
void LockSharedFromWeak(const SmartPtr<Q> &ptr)
{
m_data.SharedSetPointer(
ptr.m_data.WeakGetPointee()
, ptr.m_data.WeakGetCounter()->Lock()
#ifdef ENABLE_EXTERNAL_REFCOUNT
, ptr.m_data.WeakGetCounter()
#endif
);
}
/// @brief Sets pointee object. Asserts that current object is in weak mode and ptr is in shared mode.
/// @tparam Q Source object type.
/// @param ptr Pointer to object to lock.
template <typename Q>
void LockWeakFromShared(const SmartPtr<Q> &ptr)
{
m_data.WeakSetPointer(
ptr.m_data.SharedGetPointee()
#ifdef ENABLE_EXTERNAL_REFCOUNT
, ptr.m_data.SharedGetCounter()->WeakRefAdded()
#else
, ptr.m_data.SharedGetOwned()->WeakRefAdded()
#endif
);
}
/// @brief Sets pointee object. Asserts that both current object and ptr are in weak mode.
/// @tparam Q Source object type.
/// @param ptr Pointer to object to lock.
template <typename Q>
void LockWeakFromWeak(const SmartPtr<Q> &ptr)
{
m_data.WeakSetPointer(
ptr.m_data.WeakGetPointee()
, ptr.m_data.WeakGetCounter()->WeakRefAdded()
);
}
/// @brief Decrements currently referenced object's shared or weak pointer counter, depending on current pointer mode.
void Release()
{
if (m_data.GetPointee() != nullptr)
{
if (m_data.GetMode() == SmartPtrMode::Shared)
ReleaseShared();
else
ReleaseWeak();
}
}
#ifdef ENABLE_EXTERNAL_REFCOUNT
/// @brief Type to use to release shared pointers. Depends on whether external refcount is on or off.
typedef System::Detail::SmartPtrCounter SharedRefReleaser;
/// @brief Gets object to use to release shared pointer to.
/// @return Pointer to owned object pointer.
SharedRefReleaser* GetSharedReleaser() const
{
return m_data.SharedGetCounter();
}
#else
/// @brief Type to use to release shared pointers. Depends on whether external refcount is on or off.
typedef Object SharedRefReleaser;
/// @brief Gets object to use to release shared pointer to.
/// @return Pointer to owned object.
SharedRefReleaser* GetSharedReleaser() const
{
return m_data.SharedGetOwned();
}
#endif
/// @brief Removes shared pointer of a specific object, possibly deleting it.
/// @param releaser Object to release.
static void ReleaseShared(SharedRefReleaser *releaser);
/// @brief Decrements currently referenced object's shared pointer counter.
void ReleaseShared()
{
ReleaseShared(GetSharedReleaser());
}
/// @brief Decrements weak pointer counter.
/// @param counter Counter to decrement.
static void ReleaseWeak(System::Detail::SmartPtrCounter *counter)
{
counter->WeakRefRemoved();
}
/// @brief Decrements currently referenced object's weak pointer counter.
void ReleaseWeak()
{
m_data.WeakGetCounter()->WeakRefRemoved();
}
/// @brief Gets currently referenced object (if any) or throws.
/// @return Raw pointer to referenced object.
/// @throws System::NullReferenceException Thrown if called on null-pointer.
T* GetObjectNotNull() const
{
if (m_data.IsNull())
System::Detail::SmartPtrCounter::ThrowNullReferenceException();
return m_data.GetPointee();
}
/// @brief Implements move semantics. Asserts that current object is in shared mode and x is in weak mode.
/// @param x Pointer to move value from.
void MoveSharedFromWeak(SmartPtr &&x)
{
if (x.m_data.IsNull())
m_data.SetNull();
else
{
m_data.SharedSetPointer(
x.m_data.WeakGetPointee()
, x.m_data.WeakGetCounter()->Lock()
#ifdef ENABLE_EXTERNAL_REFCOUNT
, x.m_data.WeakGetCounter()
#endif
);
x.ReleaseWeak();
x.m_data.SetNull();
}
}
/// @brief Implements move semantics. Asserts that current object is in weak mode and x is in shared mode.
/// @param x Pointer to move value from.
void MoveWeakFromShared(SmartPtr &&x)
{
if (x.m_data.IsNull())
m_data.SetNull();
else
{
m_data.WeakSetPointer(
x.m_data.SharedGetPointee()
#ifdef ENABLE_EXTERNAL_REFCOUNT
, x.m_data.SharedGetCounter()->WeakRefAdded()
#else
, x.m_data.SharedGetOwned()->WeakRefAdded()
#endif
);
x.ReleaseShared();
x.m_data.SetNull();
}
}
/// @brief Performs actual array copying on cast constructor calls.
/// @tparam X Target array element type.
/// @tparam Y Source array element type.
/// @param ptr Target array pointer.
/// @param src Source array pointer.
template <typename X, typename Y>
static void InitArray(SmartPtr<Array<X>> *ptr, const SmartPtr<Array<Y>> &src)
{
T *const destination = new T(src->get_Length());
ptr->Lock(destination);
std::copy(src->data().begin(), src->data().end(), destination->data().begin());
}
/// @brief Calls into GetHashCode() method if it is available on Pointee_ type (which is true if it is a complete type).
/// @tparam Q Same as Pointee_, needed for template magic here.
/// @tparam R Result of GetHashCode() call, should be int.
template <typename Q, typename R = decltype(std::declval<Q*>()->GetHashCode())>
R GetHashCodeImpl(Q*) const
{
if (m_data.IsNull())
return 0;
else
return GetPointer()->GetHashCode();
}
/// @brief Calls GetHashCode() method from Object if it is not available on Pointee_ type (e. g. if it is incomplete).
int GetHashCodeImpl(void*) const
{
if (m_data.IsNull())
return 0;
else if (m_data.GetMode() == SmartPtrMode::Shared)
return m_data.SharedGetObject()->GetHashCode();
else
return m_data.WeakGetCounter()->GetObject()->GetHashCode();
}
/// @brief Copy-assigns SmartPtr object. Does type conversions, if required.
/// @tparam Q Type of object pointed by x.
/// @param x Pointer to copy-assign.
/// @return Reference to this object.
template <typename Q>
void Assign(const SmartPtr<Q> &x)
{
if (m_data.GetComparable() == x.m_data.GetComparable()) return;
if (m_data.GetMode() == SmartPtrMode::Shared)
{
if (!x.m_data.HoldsReference())
{
ReleaseShared(); //Both can't be null at the same time here as GetPointer() results would match
m_data.SetNull();
}
else if (m_data.HoldsReference())
{
SharedRefReleaser *const releaser = GetSharedReleaser();
if (x.m_data.GetMode() == SmartPtrMode::Shared)
LockSharedFromShared(x);
else
LockSharedFromWeak(x);
ReleaseShared(releaser);
}
else
{
if (x.m_data.GetMode() == SmartPtrMode::Shared)
LockSharedFromShared(x);
else
LockSharedFromWeak(x);
}
}
else
{
if (!x.m_data.HoldsReference())
{
ReleaseWeak(); //Both can't be null at the same time here as GetComparable() results would match
m_data.SetNull();
}
else if (m_data.HoldsReference())
{
System::Detail::SmartPtrCounter *const releaser = m_data.WeakGetCounter();
if (x.m_data.GetMode() == SmartPtrMode::Shared)
LockWeakFromShared(x);
else
LockWeakFromWeak(x);
ReleaseWeak(releaser);
}
else
{
if (x.m_data.GetMode() == SmartPtrMode::Shared)
LockWeakFromShared(x);
else
LockWeakFromWeak(x);
}
}
}
/// @brief Internal data storage class which hides data members and enforces neccessary asserts around them.
class Data {
/// @brief Pointer mode.
SmartPtrMode m_mode;
union {
/// @brief Referenced object.
Pointee_ *m_pointee;
/// @brief Representation to make it possible setting m_pointee to nullptr without knowing its actual type.
void *m_internal_pointer;
};
/// @brief Same as m_pointee, but kept as Object type reference.
Object *m_object;
#ifdef ENABLE_EXTERNAL_REFCOUNT
/// @brief Owned object
Object *m_owned;
/// @brief Reference counter structure linked to m_pointee.
System::Detail::SmartPtrCounter *m_counter;
#else
union {
/// @brief Owned pointer
Object *m_owned;
/// @brief Reference counter structure linked to m_pointee.
System::Detail::SmartPtrCounter *m_counter;
};
#endif
public:
/// @brief Initializes Data structure. Doesn't set any members except for m_mode.
Data(SmartPtrMode mode)
: m_mode(mode) //No initialization for other members - they will be assigned by SmartPtr constructor
{}
Data(const Data&) = delete;
Data(Data&&) = delete;
Data& operator = (const Data&) = delete;
Data& operator = (Data&&) = delete;
/// @brief Gets current pointer mode.
/// @return Current pointer mode.
SmartPtrMode GetMode() const
{
return m_mode;
}
/// @brief Sets pointer mode. Doesn't do any reference count or pointer changes.
/// @param mode New pointer mode.
void SetMode(SmartPtrMode mode)
{
m_mode = mode;
}
/// @brief Gets pointed object (if any) or nullptr.
/// @return Referenced object (if any) or nullptr.
T* GetPointee() const
{
return m_pointee;
}
/// @brief Gets pointed object (if set and not yet deleted) or nullptr.
/// @return Referenced object (if set and not yet deleted) or nullptr (if pointer is weak and referenced object is already deleted).
T* GetComparable() const
{
if (m_mode == SmartPtrMode::Shared)
return m_pointee;
else if (m_pointee == nullptr)
return nullptr;
else if (m_counter->GetObject() == nullptr)
return nullptr;
else
return m_pointee;
}
/// @brief Checks if referenced object is set and not yet deleted.
/// @return False if referenced object is non-null and exists and false if pointer is null or pointer is weak and referenced object is already deleted.
bool IsNull() const
{
return m_pointee == nullptr || (m_mode == SmartPtrMode::Weak && m_counter->GetObject() == nullptr);
}
/// @brief Sets pointer to null.
void SetNull()
{
m_pointee = nullptr;
}
/// @brief Checks if pointer actually holds a shared or weak reference to an object.
/// @return True if reference is being held, false otherwise.
bool HoldsReference() const
{
return m_pointee != nullptr;
}
/// @brief Gets referenced object. Asserts that pointer is weak.
/// @return Raw pointer to referenced object.
Pointee_* WeakGetPointee() const
{
assert(m_mode == SmartPtrMode::Weak);
return m_pointee;
}
/// @brief Gets referenced object reference counter. Asserts that pointer is weak. Doesn't null-check.
/// @return Raw pointer to reference counter.
System::Detail::SmartPtrCounter* WeakGetCounter() const
{
assert(m_mode == SmartPtrMode::Weak);
return m_counter;
}
/// @brief Gets referenced object (if any) reference counter or null. Asserts that pointer is weak.
/// @return Raw pointer to reference counter or null if pointer is null.
System::Detail::SmartPtrCounter* WeakGetCounterOrNull() const
{
assert(m_mode == SmartPtrMode::Weak);
return IsNull() ? nullptr : m_counter;
}
/// @brief Sets pointer value. Asserts that pointer is weak.
/// @param pointee Pointee object.
/// @param counter Reference counter of pointee object.
void WeakSetPointer(Pointee_ *pointee, System::Detail::SmartPtrCounter *counter)
{
assert(m_mode == SmartPtrMode::Weak);
m_pointee = pointee;
m_counter = counter;
}
/// @brief Gets referenced object. Asserts that pointer is shared.
/// @return Raw pointer to referenced object.
Pointee_* SharedGetPointee() const
{
assert(m_mode == SmartPtrMode::Shared);
return m_pointee;
}
/// @brief Gets referenced object which is cast to System::Object. Asserts that pointer is shared. Doesn't null-check.
/// @return Raw pointer to referenced object.
Object* SharedGetObject() const
{
assert(m_mode == SmartPtrMode::Shared);
return m_object;
}
/// @brief Gets owned object which. Asserts that pointer is shared. Doesn't null-check.
/// @return Raw pointer to owned object.
Object* SharedGetOwned() const
{
assert(m_mode == SmartPtrMode::Shared);
return m_owned;
}
/// @brief Gets referenced object (if any) which is cast to System::Object or null. Asserts that pointer is shared.
/// @return Raw pointer to referenced object (if any) or null.
Object* SharedGetObjectOrNull() const
{
assert(m_mode == SmartPtrMode::Shared);
return IsNull() ? nullptr : m_object;
}
/// @brief Gets owned object. Asserts that pointer is shared.
/// @return Raw pointer to owned object (if any) or null.
Object* SharedGetOwnedOrNull() const
{
assert(m_mode == SmartPtrMode::Shared);
return IsNull() ? nullptr : m_owned;
}
/// @brief Gets pointer to internal pointer storage field.
/// @return Raw pointer to internal pointer storage field.
void** SharedGetInternalPointer()
{
assert(m_mode == SmartPtrMode::Shared);
return &m_internal_pointer;
}
#ifdef ENABLE_EXTERNAL_REFCOUNT
/// @brief Gets referenced object reference counter. Asserts that pointer is shared. Doesn't null-check.
/// @return Raw pointer to reference counter.
System::Detail::SmartPtrCounter* SharedGetCounter() const
{
assert(m_mode == SmartPtrMode::Shared);
return m_counter;
}
/// @brief Gets pointer to internal reference counter pointer storage field. Doesn't null-check.
/// @return Raw pointer to internal reference counter pointer storage field.
System::Detail::SmartPtrCounter** SharedGetCounterPointer()
{
assert(m_mode == SmartPtrMode::Shared);
return &m_counter;
}
/// @brief Gets pointer to internal reference counter pointer storage field or null.
/// @return Raw pointer to internal reference counter pointer storage field (if pointer is non-null) or null.
System::Detail::SmartPtrCounter* SharedGetCounterOrNull() const
{
assert(m_mode == SmartPtrMode::Shared);
return IsNull() ? nullptr : m_counter;
}
/// @brief Sets pointer value. Asserts that pointer is shared.
/// @param pointee Pointee object.
/// @param object Pointee object cast to System::Object.
/// @param owned Owned object cast to System::Object.
/// @param counter Reference counter of pointee object.
void SharedSetPointer(Pointee_ *pointee, Object *object, Object *owned, System::Detail::SmartPtrCounter *counter)
{
assert(m_mode == SmartPtrMode::Shared);
m_pointee = pointee;
m_object = object;
m_owned = owned;
m_counter = counter;
}
/// @brief Sets pointer value. Asserts that pointer is shared.
/// @param pointee Pointee object.
/// @param object Pointee object cast to System::Object.
/// @param counter Reference counter of pointee object.
void SharedSetPointer(Pointee_ *pointee, Object *object, System::Detail::SmartPtrCounter *counter)
{
assert(m_mode == SmartPtrMode::Shared);
m_pointee = pointee;
m_object = object;
m_owned = object;
m_counter = counter;
}
#else
/// @brief Sets pointer value. Asserts that pointer is shared.
/// @param pointee Pointee object.
/// @param owned Owned object cast to System::Object.
/// @param object Pointee object cast to System::Object.
void SharedSetPointer(Pointee_ *pointee, Object *object, Object *owned)
{
assert(m_mode == SmartPtrMode::Shared);
m_pointee = pointee;
m_object = object;
m_owned = owned;
}
/// @brief Sets pointer value. Asserts that pointer is shared.
/// @param pointee Pointee object.
/// @param object Pointee object cast to System::Object.
void SharedSetPointer(Pointee_ *pointee, Object *object)
{
assert(m_mode == SmartPtrMode::Shared);
m_pointee = pointee;
m_object = object;
m_owned = object;
}
#endif
/// @brief Swaps data of two Data instances, including modes and pointers.
/// @param data Data instance to swap data with.
void swap(Data &data) noexcept
{
std::swap(m_mode, data.m_mode);
swapPointers(data);
}
/// @brief Swaps pointer data of two Data instances, excluding modes.
/// @param data Data instance to swap data with.
void swapPointers(Data &data) noexcept
{
std::swap(m_pointee, data.m_pointee);
std::swap(m_object, data.m_object);
std::swap(m_owned, data.m_owned);
#ifdef ENABLE_EXTERNAL_REFCOUNT
std::swap(m_counter, data.m_counter);
#endif
}
}
/// An instance of Data class.
m_data;
};
namespace Detail {
#ifdef ENABLE_EXTERNAL_REFCOUNT
/// Sentry object to guarantee that object being constructed is not owned by pointers before it is fully constructed; otherwise, constructor exceptions lead to double deletion.
class OwnNextObject
{
public:
/// Constructor.
OwnNextObject()
{
SmartPtrCounter::NextOwnership() = SmartPtrCounter::BeingConstructed; // Not until constructors succeed
}
/// Marks object to be owned by pointers from now on.
/// @tparam T Object type.
/// @param object Pointer to object to switch ownership for.
template <typename T>
void CreatedSuccessfully(T *object) {
object->Object::GetCounter()->CreatedSuccessfully(); // No exceptions thrown - should now manage memory by shared pointers
}
};
#else
/// Dummy class to be used in case of shared pointers counter being stored inside the object so no protection exists against deleted memory access.
class OwnNextObject
{
public:
/// Stub function that does nothing.
/// @tparam T Type of object that was constructed successfully.
template <typename T>
void CreatedSuccessfully(T *) {}
};
#endif
}
/// @brief Trait class to check if a type is a specialization of SmartPtr class.
/// @tparam T Tested class.
template <class T>
struct IsSmartPtr : System::detail::is_a<T, System::SmartPtr> {};
/// @brief Creates object on heap and returns shared pointer to it.
/// @tparam T Class to instantiate.
/// @tparam Args Constructor arguments' types.
/// @param args Constructor arguments.
/// @return SmartPtr to newly created object, always in shared mode.
template<class T, class ...Args>
typename std::enable_if<!IsSmartPtr<T>::value, SmartPtr<T> >::type MakeObject(Args&&... args)
{
System::Detail::OwnNextObject ownershipSentry;
T *const object = ::new T(std::forward<Args>(args)...);
ownershipSentry.CreatedSuccessfully(object);
return SmartPtr<T>(object);
}
/// @brief Creates object on heap and returns shared pointer to it.
/// @tparam T SmartPtr to class to instantiate.
/// @tparam Args Constructor arguments' types.
/// @param args Constructor arguments.
/// @return SmartPtr to newly created object, always in shared mode.
template<class T, class ...Args>
typename std::enable_if<IsSmartPtr<T>::value, T>::type MakeObject(Args&&... args)
{
return MakeObject<typename T::Pointee_>(std::forward<Args>(args)...);
}
/// @brief Equal-compares two smart pointers.
/// @tparam X Pointee type of first pointer.
/// @tparam Y Pointee type of second pointer.
/// @param x First pointer to compare.
/// @param y Second pointer to compare.
/// @return True if pointers match, false otherwise.
template<class X, class Y>
bool operator == (const SmartPtr<X> &x, const SmartPtr<Y> &y)
{
return x.GetObjectOrNull() == y.GetObjectOrNull();
}
/// @brief Non-equal-compares two smart pointers.
/// @tparam X Pointee type of first pointer.
/// @tparam Y Pointee type of second pointer.
/// @param x First pointer to compare.
/// @param y Second pointer to compare.
/// @return False if pointers match, true otherwise.
template<class X, class Y>
bool operator != (const SmartPtr<X> &x, const SmartPtr<Y> &y)
{
return x.GetObjectOrNull() != y.GetObjectOrNull();
}
/// @brief Checks if smart pointer is null.
/// @tparam X Pointee type of pointer.
/// @param x Pointer to check.
/// @return True if pointer is null, false otherwise.
template<class X>
bool operator == (std::nullptr_t, SmartPtr<X> const& x)
{
return x == nullptr;
}
/// @brief Checks if smart pointer is not null.
/// @tparam X Pointee type of pointer.
/// @param x Pointer to check.
/// @return False if pointer is null, true otherwise.
template<class X>
bool operator !=(SmartPtr<X> const& x, std::nullptr_t)
{
return !(x == nullptr);
}
/// @brief Checks if smart pointer is not null.
/// @tparam X Pointee type of pointer.
/// @param x Pointer to check.
/// @return False if pointer is null, true otherwise.
template<class X>
bool operator !=(std::nullptr_t, SmartPtr<X> const& x)
{
return !(x == nullptr);
}
/// @brief Checks if value type object (ported C# structure, etc.) is null.
/// @tparam T Value type.
/// @param x Object to check.
/// @return True if object is null, false otherwise.
template<class T>
typename std::enable_if<!std::is_scalar<T>::value && !std::is_pointer<T>::value && !std::is_array<T>::value, bool>::type operator ==(T const& x, std::nullptr_t)
{
return x.IsNull();
}
/// @brief Checks if value type object (ported C# structure, etc.) is null.
/// @tparam T Value type.
/// @param x Object to check.
/// @return True if object is null, false otherwise.
template<class T>
typename std::enable_if<!std::is_scalar<T>::value && !std::is_pointer<T>::value && !std::is_array<T>::value, bool>::type operator ==(std::nullptr_t, T const& x)
{
return x.IsNull();
}
/// @brief Converts raw pointer to smart pointer.
/// @tparam X Pointee type.
/// @param p Raw pointer to object.
/// @return Shared smart pointer to object.
template<class X>
SmartPtr<X> MakeSharedPtr(X* p)
{
return SmartPtr<X>(p, SmartPtrMode::Shared);
}
/// @brief Converts raw pointer to smart pointer. Overload for const pointers. Useful e. g. when using 'this' variable in C# methods ported as const.
/// @tparam X Pointee type.
/// @param p Raw pointer to object.
/// @return Shared smart pointer to object.
template<class X>
SmartPtr<X> MakeSharedPtr(const X* p)
{
return SmartPtr<X>(const_cast<X*>(p), SmartPtrMode::Shared);
}
/// @brief Casts smart pointers using static_cast.
/// @tparam X Source pointer pointee type.
/// @tparam Y Target pointer pointee type.
/// @param x Source pointer.
/// @return Pointer after cast.
template<class Y, class X>
SmartPtr<Y> static_pointer_cast(SmartPtr<X> const& x)
{
return x.template static_pointer_cast<Y>();
}
/// @brief Casts smart pointers using dynamic_cast.
/// @tparam X Source pointer pointee type.
/// @tparam Y Target pointer pointee type.
/// @param x Source pointer.
/// @return Pointer after cast.
template<class Y, class X>
SmartPtr<Y> dynamic_pointer_cast(SmartPtr<X> const& x)
{
return x.template dynamic_pointer_cast<Y>();
}
/// @brief Casts smart pointers using const_cast.
/// @tparam X Source pointer pointee type.
/// @tparam Y Target pointer pointee type.
/// @param x Source pointer.
/// @return Pointer after cast.
template<class Y, class X>
SmartPtr<Y> const_pointer_cast(SmartPtr<X> const& x)
{
return x.template const_pointer_cast<Y>();
}
/// @brief Service class to test and alter SmartPtr's contents without knowing final type. Used for garbage collection and loop references detection, etc.
/// Think of it as of 'pointer to pointer'. We can't use SmartPtr's basetype as it doesn't have any; instead, we use this 'info' class.
class SmartPtrInfo
{
public:
/// @brief Creates empty SmartPtrInfo object.
inline SmartPtrInfo()
: m_object(nullptr)
, m_owned(nullptr)
, m_internal_pointer(nullptr)
#ifdef ENABLE_EXTERNAL_REFCOUNT
, m_counter(nullptr)
#endif
{}
/// @brief Creates SmartPtrInfo object with information on specific smart pointer.
/// @tparam T Pointee type.
/// @param ptr Smart pointer to create info for.
template <typename T>
explicit inline SmartPtrInfo(SmartPtr<T> &ptr)
: m_object(ptr.IsShared() ? ptr.m_data.SharedGetObject() : nullptr)
, m_owned(ptr.IsShared() ? ptr.m_data.SharedGetOwned() : nullptr)
, m_internal_pointer(ptr.IsShared() ? ptr.m_data.SharedGetInternalPointer() : PointToNullptr())
#ifdef ENABLE_EXTERNAL_REFCOUNT
, m_counter(ptr.m_data.SharedGetCounterPointer())
#endif
{}
/// @brief Checks if info object points to non-null pointer.
/// @return True if info object points to the pointer and this pointer is not null, false otherwise.
inline operator bool() const
{
return *m_internal_pointer != nullptr;
}
/// @brief Checks if info object does not point to non-null pointer.
/// @return False if info object doesn't point to any pointer or if referenced pointer is null, true otherwise.
inline bool operator ! () const
{
return m_object == nullptr;
}
/// @brief Allows to call methods of Object pointed by the referenced pointer.
/// @return Poinee object of referenced pointer.
inline Object* operator -> () const
{
return m_object;
}
/// @brief Sets refereced pointer to null.
inline void reset() const
{
//No reason to reset m_object - the pointer checks m_data.m_pointee to understand if it is null, not m_data.m_object.
*m_internal_pointer = nullptr;
#ifdef ENABLE_EXTERNAL_REFCOUNT
*m_counter = nullptr;
#endif
}
/// @brief Less-compares values of pointers referenced by two info objects.
/// @param other Information on pointer to compare with.
/// @return True if value of pointer referenced by current info object preceeds the value of other one, false otherwise..
inline bool operator < (const SmartPtrInfo &other) const
{
return m_internal_pointer < other.m_internal_pointer;
}
/// @brief Gets object referenced pointer points to.
/// @return Raw pointer to object referenced smart pointer points to.
inline Object* getObject() const
{
return m_object;
}
/// @brief Gets object owned pointer.
/// @return Raw pointer to object owned smart pointer.
inline Object* getOwned() const
{
return m_owned;
}
/// @brief Make ptr point to the same object referenced smart pointer does.
/// @param ptr Target smart pointer.
void mapTo(SmartPtr<Object> &ptr) const;
private:
/// @brief Pointee object of referenced smart pointer.
Object *m_object;
Object *m_owned;
/// @brief Pointer to referenced smart pointer's (if any) m_pointee field or to static variable containing nullptr.
void **m_internal_pointer;
#ifdef ENABLE_EXTERNAL_REFCOUNT
/// @brief Pointer to referenced smart pointer's (if any) m_counter field or to static variable containing nullptr.
System::Detail::SmartPtrCounter **m_counter;
#endif
/// @brief Helper function to generate substitution to m_internal_pointer values for info objects not pointing to any object.
static void** PointToNullptr()
{
static void *pointer = nullptr;
return &pointer;
}
};
/// @brief Gets referenced object of smart pointer.
/// @tparam T Pointee type.
/// @param x Source smart pointer.
/// @return Raw pointer to object referenced by smart pointer passed.
template<class T>
T* get_pointer(System::SmartPtr<T> const& x)
{
return x.GetPointer();
}
/// @brief Alias for 'pointer to array' type.
/// @tparam T Array element type.
template <typename T> using ArrayPtr = SmartPtr<Array<T>>;
/// @brief Alias for smart pointer widely used in the library.
/// @tparam T Pointee type.
template <typename T> using SharedPtr = SmartPtr<T>;
/// @brief Helper function to acquire references to objects. Used to guarantee that System::DynamicWeakPtr updates referenced object after assignments.
/// @tparam T Type to create reference to.
/// @param value Value to create reference to.
/// @return Reference to the value passed to this function.
template <typename T>
T& Ref(T &value)
{
return value;
}
} //namespace System
/// Macro to befriend MakeObject function.
#define FRIEND_FUNCTION_System_MakeObject \
template<class T0_, class ...Args> friend typename std::enable_if<!System::IsSmartPtr<T0_>::value, System::SmartPtr<T0_> >::type System::MakeObject(Args&&... args); \
template<class T0_, class ...Args> friend typename std::enable_if<System::IsSmartPtr<T0_>::value, T0_>::type System::MakeObject(Args&&... args);
namespace std
{
/// Hashing implementation for SmartPtr class.
/// @tparam T Pointee type.
template <class T>
struct hash<System::SharedPtr<T> >
{
/// Hasher argument type.
using argument_type = System::SharedPtr<T>;
/// Hasher result type.
using result_type = std::size_t;
/// Hasher algorithm which calls into actual GetHashCode() implementation.
/// @param val Smart pointer to hash.
/// @return Hash value.
std::size_t operator()(System::SharedPtr<T> const& val) const
{
return val.GetHashCode();
}
};
}
#endif //_aspose_system_smart_ptr_h_
| 39.670282 | 180 | 0.633166 |
27d198346f69ea33d429793519e2c93558f1be1e | 1,206 | swift | Swift | Sources/AtomicModels/AtomicCollection.swift | serhii-londar/GraphiteClient | 61cce9639b777ac105ec69b45146c8fb0466e3a6 | [
"MIT"
] | 4 | 2019-05-31T10:59:41.000Z | 2020-11-12T23:23:46.000Z | Sources/AtomicModels/AtomicCollection.swift | serhii-londar/GraphiteClient | 61cce9639b777ac105ec69b45146c8fb0466e3a6 | [
"MIT"
] | null | null | null | Sources/AtomicModels/AtomicCollection.swift | serhii-londar/GraphiteClient | 61cce9639b777ac105ec69b45146c8fb0466e3a6 | [
"MIT"
] | 1 | 2021-10-30T21:56:11.000Z | 2021-10-30T21:56:11.000Z | import Foundation
public class AtomicCollection<T: Collection>: AtomicValue<T> {
private let conditionLock: ConditionLock
override public init(_ value: T) {
self.conditionLock = ConditionLock(condition: value.count)
super.init(value)
}
override func didUpdateValue() {
conditionLock.condition = value.count
}
/// Locks the invoking thread for up to given time interval until after the number of elements in collection
/// becomes equal to the given count, and invokes a given closure.
/// - Parameter count: The number of elements in underlying collection to match on.
/// - Parameter before: The date by which the lock must be acquired or the attempt will time out.
/// - Parameter work: A closure that will be executed once lock will be acquired and before it will be released.
/// - Returns: true if the lock is acquired within the time limit, false otherwise.
public func waitWhen(
count: Int,
before: Date = Date.distantFuture,
doWork work: (Bool) -> () = { _ in })
-> Bool
{
return conditionLock.lockAndUnlock(whenCondition: count, before: before, work: work)
}
}
| 40.2 | 116 | 0.674959 |
d248fc0eee318a6e301800ba9f132b23b65bb95b | 2,146 | php | PHP | resources/views/welcome.blade.php | naime-hossain/Larashop | 07e1f1c2f5b82b82ee407671751d4857854a563f | [
"MIT"
] | 1 | 2021-03-10T05:00:02.000Z | 2021-03-10T05:00:02.000Z | resources/views/welcome.blade.php | naime-hossain/Larashop | 07e1f1c2f5b82b82ee407671751d4857854a563f | [
"MIT"
] | null | null | null | resources/views/welcome.blade.php | naime-hossain/Larashop | 07e1f1c2f5b82b82ee407671751d4857854a563f | [
"MIT"
] | null | null | null | @extends('layouts.app')
@section('heading')
{{-- expr --}}
@if ($GeneralSetting)
<h1>{{ $GeneralSetting->site_title }}</h1>
<p>{{ $GeneralSetting->site_slogan }}</p>
@else
<h1>welcome to Larashop</h1>
<p>shop like there is no tommorow</p>
@endif
@endsection
@section('content')
{{-- expr --}}
<div class="Products_wrap">
<div class="col-md-12">
<ol class="breadcrumb">
<li class="active">Home</li>
</ol>
<h2>New Products</h2>
</div>
@if (isset($products))
@if ($products->count()>0)
@include('layouts.products')
@endif
@endif
<div class="feature_product_wrap">
@if (isset($feature_products))
@if ($feature_products->count()>0)
<div class="col-md-12">
<h2>Our Feature Products</h2>
</div>
@php
$products=$feature_products;
@endphp
@include('layouts.products')
@endif
@endif
</div>
<div class="feature_product_wrap">
@if (isset($feature_products))
@if ($popular_products->count()>0)
<div class="col-md-12">
<h2>Our Popular Products</h2>
</div>
@php
$products=$popular_products;
@endphp
@include('layouts.products')
<div class="col-sm-4 col-lg-4 col-md-4">
<h4>Like Our products?
</h4>
<p>If you like these product, then check out <a target="_blank" href="{{ route('products') }}">All products</a> And choose from variety</p>
<a class="btn btn-primary" target="" href="{{ route('products') }}">All products</a>
</div>
@endif
@endif
</div>
</div>
@endsection
| 25.547619 | 163 | 0.430568 |
6e4907a8d84e2ffe810ff47d8a6e113667bc8e34 | 427 | html | HTML | 02 Writing Algorithms/03 Securities/99 Asset Classes/01 US Equity/01 Requesting Data/08 Supported Markets.html | LouisSzeto/Documentation | 8f2e1881bad0d3c0a0c0fc2e613c73d8f3d1e0d7 | [
"Apache-2.0"
] | 1 | 2022-03-15T12:16:05.000Z | 2022-03-15T12:16:05.000Z | 02 Writing Algorithms/03 Securities/99 Asset Classes/01 US Equity/01 Requesting Data/08 Supported Markets.html | LouisSzeto/Documentation | 8f2e1881bad0d3c0a0c0fc2e613c73d8f3d1e0d7 | [
"Apache-2.0"
] | null | null | null | 02 Writing Algorithms/03 Securities/99 Asset Classes/01 US Equity/01 Requesting Data/08 Supported Markets.html | LouisSzeto/Documentation | 8f2e1881bad0d3c0a0c0fc2e613c73d8f3d1e0d7 | [
"Apache-2.0"
] | null | null | null | <div>What markets are supported:<br></div><div>-Market.USA <br></div><div><br></div><div>Why do I need to set specific a market?</div><div>- Each brokerage model has a default market for asset classes. If you set a brokerage model, you may not need to specify the market to use.<br></div><div><br></div><div>How do I declare the market?</div><div>- Pass to AddEquity or use the default provided by the brokerage model<br></div> | 427 | 427 | 0.725995 |
132ccfc15fd1135bde97cce034de477f317b575f | 8,027 | c | C | HAP/HAPAccessorySetup.c | Raspberry-Pi-4-MCU/apple_homekit_example | 22970d352d064108a1f86e199be91952fff91ec8 | [
"Apache-2.0"
] | 2,443 | 2019-12-18T13:21:14.000Z | 2022-03-28T07:41:17.000Z | HAP/HAPAccessorySetup.c | Raspberry-Pi-4-MCU/apple_homekit_example | 22970d352d064108a1f86e199be91952fff91ec8 | [
"Apache-2.0"
] | 86 | 2019-12-19T15:11:09.000Z | 2022-03-14T06:04:31.000Z | HAP/HAPAccessorySetup.c | Raspberry-Pi-4-MCU/apple_homekit_example | 22970d352d064108a1f86e199be91952fff91ec8 | [
"Apache-2.0"
] | 266 | 2019-12-18T17:54:09.000Z | 2022-03-27T13:41:45.000Z | // Copyright (c) 2015-2019 The HomeKit ADK Contributors
//
// Licensed under the Apache License, Version 2.0 (the “License”);
// you may not use this file except in compliance with the License.
// See [CONTRIBUTORS.md] for the list of HomeKit ADK project authors.
#include "HAP+Internal.h"
HAP_RESULT_USE_CHECK
bool HAPAccessorySetupIsValidSetupCode(const char* stringValue) {
HAPPrecondition(stringValue);
if (HAPStringGetNumBytes(stringValue) != sizeof(HAPSetupCode) - 1) {
return false;
}
uint8_t numEqual = 0, numAscending = 0, numDescending = 0;
char previousCharacter = '\0';
for (size_t i = 0; i < sizeof(HAPSetupCode) - 1; i++) {
if (i == 3 || i == 6) {
if (stringValue[i] != '-') {
return false;
}
} else {
if (stringValue[i] < '0' || stringValue[i] > '9') {
return false;
}
numEqual += stringValue[i] == previousCharacter;
numAscending += stringValue[i] == previousCharacter + 1;
numDescending += stringValue[i] == previousCharacter - 1;
previousCharacter = stringValue[i];
}
}
// All equal, ascending, or descending?
return numEqual != 7 && ((uint8_t)((uint8_t) stringValue[0] ^ '1') | (uint8_t)(numAscending ^ 7)) &&
((uint8_t)((uint8_t) stringValue[0] ^ '8') | (uint8_t)(numDescending ^ 7));
}
void HAPAccessorySetupGenerateRandomSetupCode(HAPSetupCode* setupCode) {
HAPPrecondition(setupCode);
do {
// Format: XXX-XX-XXX with X being digit from 0-9.
for (size_t i = 0; i < sizeof setupCode->stringValue - 1; i++) {
if (i == 3 || i == 6) {
setupCode->stringValue[i] = '-';
continue;
}
// Add random digit.
uint8_t randomByte;
do {
HAPPlatformRandomNumberFill(&randomByte, sizeof randomByte);
} while ((uint8_t)(randomByte & 0xFU) > 9);
setupCode->stringValue[i] = (char) ('0' + (char) (randomByte & 0xFU));
}
setupCode->stringValue[sizeof setupCode->stringValue - 1] = '\0';
} while (!HAPAccessorySetupIsValidSetupCode(setupCode->stringValue));
}
HAP_RESULT_USE_CHECK
bool HAPAccessorySetupIsValidSetupID(const char* stringValue) {
HAPPrecondition(stringValue);
if (HAPStringGetNumBytes(stringValue) != sizeof(HAPSetupID) - 1) {
return false;
}
for (size_t i = 0; i < sizeof(HAPSetupID) - 1; i++) {
char c = stringValue[i];
if ((uint8_t)((uint8_t)(c < '0') | (uint8_t)(c > '9')) & (uint8_t)((uint8_t)(c < 'A') | (uint8_t)(c > 'Z'))) {
return false;
}
}
return true;
}
void HAPAccessorySetupGenerateRandomSetupID(HAPSetupID* setupID) {
HAPPrecondition(setupID);
for (size_t i = 0; i < sizeof setupID->stringValue - 1; i++) {
char c;
do {
HAPPlatformRandomNumberFill(&c, sizeof c);
} while ((uint8_t)((uint8_t)(c < '0') | (uint8_t)(c > '9')) &
(uint8_t)((uint8_t)(c < 'A') | (uint8_t)(c > 'Z')));
setupID->stringValue[i] = c;
}
setupID->stringValue[sizeof setupID->stringValue - 1] = '\0';
}
/** Prefix of the setup payload. */
#define HAPSetupPayloadPrefix ("X-HM://")
HAP_STATIC_ASSERT(
sizeof(HAPSetupPayload) == sizeof HAPSetupPayloadPrefix - 1 + 9 + sizeof(HAPAccessorySetupSetupHash) + 1,
HAPSetupPayload_FitsTemplate);
void HAPAccessorySetupGetSetupPayload(
HAPSetupPayload* setupPayload,
const HAPSetupCode* _Nullable setupCode,
const HAPSetupID* _Nullable setupID,
HAPAccessorySetupSetupPayloadFlags flags,
HAPAccessoryCategory category) {
HAPPrecondition(setupPayload);
HAPPrecondition(!setupCode || !flags.isPaired);
HAPPrecondition(!setupID || !flags.isPaired);
HAPPrecondition((setupCode && setupID) || (!setupCode && !setupID));
HAPPrecondition(flags.ipSupported || flags.bleSupported);
HAPPrecondition(category > 0);
HAPRawBufferZero(setupPayload->stringValue, sizeof setupPayload->stringValue);
char* pl = setupPayload->stringValue;
// Prefix.
HAPRawBufferCopyBytes(pl, HAPSetupPayloadPrefix, sizeof HAPSetupPayloadPrefix - 1);
pl += sizeof HAPSetupPayloadPrefix - 1;
// Raw VersionCategoryFlagsAndSetupCode.
uint64_t code = (uint64_t)(
/* 45-43 - Version */ ((uint64_t) 0x0U << 43U) |
/* 42-39 - Reserved */ ((uint64_t) 0x0U << 39U) |
/* 38-31 - Category */ ((uint64_t)(category & 0xFFU) << 31U) |
/* 29 - BLE */ ((uint64_t)(flags.bleSupported ? 1U : 0U) << 29U) |
/* 28 - IP */ ((uint64_t)(flags.ipSupported ? 1U : 0U) << 28U) |
/* 27 - Paired */ ((uint64_t)(flags.isPaired ? 1U : 0U) << 27U));
if (setupCode) {
code |= /* 26-00 - Setup code */ (uint64_t)(
(uint64_t)(setupCode->stringValue[0] - '0') * 10000000U +
(uint64_t)(setupCode->stringValue[1] - '0') * 1000000U +
(uint64_t)(setupCode->stringValue[2] - '0') * 100000U +
(uint64_t)(setupCode->stringValue[4] - '0') * 10000U +
(uint64_t)(setupCode->stringValue[5] - '0') * 1000U +
(uint64_t)(setupCode->stringValue[7] - '0') * 100U + (uint64_t)(setupCode->stringValue[8] - '0') * 10U +
(uint64_t)(setupCode->stringValue[9] - '0') * 1U);
}
// Base36 encode.
for (int i = 0; i < 9; i++) {
// Divide code by 36 and get remainder.
uint64_t q;
uint32_t r, d;
uint64_t x = code;
q = x - (x >> 3U);
q = q + (q >> 6U);
q = q + (q >> 12U);
q = q + (q >> 24U);
q = q + (q >> 48U); // not needed for x < 2^48
/* q = x * 8/9 +0/-5 */
q = q >> 5U;
/* q = x / 36 +0/-1 */
r = (uint32_t) x - (uint32_t) q * 36U;
/* 0 <= r < 72 */
d = (r + 28U) >> 6U;
/* d = 1 if r > 35 */
code = q + d;
uint8_t c = (uint8_t)(r - d * 36U);
HAPAssert(c < 36U);
// Constant time transformation to avoid leaking secret data through side channels.
// Index: 0 10
// Alphabet: 0123456789 ABCDEFGHIJKLMNOPQRSTUVWXYZ
// ASCII: 48 57 65 90
// Transform alphabet index into ASCII.
c += '0' + (((int8_t)(c + 0x80 - 10) >> 7) & ('A' - '9' - 1)); // Skip gap between 9 and A.
// Big endian encode.
pl[8 - i] = (char) c;
}
pl += 9;
// SetupID.
if (setupID) {
HAPRawBufferCopyBytes(pl, setupID->stringValue, sizeof setupID->stringValue - 1);
pl += sizeof setupID->stringValue - 1;
} else {
for (size_t i = 0; i < sizeof setupID->stringValue - 1; i++) {
*pl = '0';
pl++;
}
}
// Done.
HAPAssert(!*pl);
HAPAssert(pl - setupPayload->stringValue <= (long) sizeof setupPayload->stringValue);
}
void HAPAccessorySetupGetSetupHash(
HAPAccessorySetupSetupHash* setupHash,
const HAPSetupID* setupID,
const HAPDeviceIDString* deviceIDString) {
HAPPrecondition(setupHash);
HAPPrecondition(setupID);
HAPPrecondition(deviceIDString);
// Concatenate setup ID and Device ID.
uint8_t hash[SHA512_BYTES];
HAPAssert(sizeof setupID->stringValue - 1 + sizeof deviceIDString->stringValue - 1 <= sizeof hash);
size_t o = 0;
HAPRawBufferCopyBytes(&hash[o], setupID->stringValue, sizeof setupID->stringValue - 1);
o += sizeof setupID->stringValue - 1;
HAPRawBufferCopyBytes(&hash[o], deviceIDString->stringValue, sizeof deviceIDString->stringValue - 1);
o += sizeof deviceIDString->stringValue - 1;
// SHA512.
HAP_sha512(hash, hash, o);
// Truncate.
HAPAssert(sizeof setupHash->bytes <= sizeof hash);
HAPRawBufferCopyBytes(setupHash->bytes, hash, sizeof setupHash->bytes);
}
| 36.652968 | 120 | 0.574312 |
efeafbe96abe6d7feec4eca7e54348b78d8c1aa6 | 567 | kt | Kotlin | nbrk_currency_rates/src/main/java/com/nbrk/rates/data/local/sharedpref/AppSettings.kt | rpagyc/nbrk-currency-rates | 7b9170035075b382c2747d2968ddda37f475e242 | [
"Apache-2.0"
] | null | null | null | nbrk_currency_rates/src/main/java/com/nbrk/rates/data/local/sharedpref/AppSettings.kt | rpagyc/nbrk-currency-rates | 7b9170035075b382c2747d2968ddda37f475e242 | [
"Apache-2.0"
] | null | null | null | nbrk_currency_rates/src/main/java/com/nbrk/rates/data/local/sharedpref/AppSettings.kt | rpagyc/nbrk-currency-rates | 7b9170035075b382c2747d2968ddda37f475e242 | [
"Apache-2.0"
] | 1 | 2021-01-20T08:00:45.000Z | 2021-01-20T08:00:45.000Z | package com.nbrk.rates.data.local.sharedpref
import android.content.Context
import org.jetbrains.anko.defaultSharedPreferences
/**
* Created by Roman Shakirov on 29-Oct-17.
* DigitTonic Studio
* support@digittonic.com
*/
class AppSettings(context: Context) {
private val sharedPref = context.defaultSharedPreferences
fun isVisibleInApp(currencyCode: String): Boolean =
sharedPref.getBoolean("pref_key_show_$currencyCode", true)
fun isVisibleInWidget(currencyCode: String): Boolean =
sharedPref.getBoolean("widget_show_$currencyCode", true)
} | 25.772727 | 62 | 0.783069 |
ffd57290da22f8c6815d24faff6d33a147ccbe7e | 3,610 | swift | Swift | AVPlayer/Controller/MovieListViewController.swift | rexjamesg/AVPlayerProject | 463d956d70b19c816990752cd8a350ecc85b456b | [
"MIT"
] | null | null | null | AVPlayer/Controller/MovieListViewController.swift | rexjamesg/AVPlayerProject | 463d956d70b19c816990752cd8a350ecc85b456b | [
"MIT"
] | null | null | null | AVPlayer/Controller/MovieListViewController.swift | rexjamesg/AVPlayerProject | 463d956d70b19c816990752cd8a350ecc85b456b | [
"MIT"
] | null | null | null | //
// MovieListViewController.swift
// AVPlayer
//
// Created by Yu Li Lin on 2019/9/4.
// Copyright © 2019 Yu Li Lin. All rights reserved.
//
import UIKit
class MovieListViewController: BaseViewController {
@IBOutlet weak var listCollectionView: UICollectionView!
@IBOutlet weak var flowLayout: UICollectionViewFlowLayout!
var identifierName:String = "MovieListCell"
var data:Categories?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
initListCollectionView()
data = VideoSource().getVideos()
AppDelegate.AppUtility.lockOrientation(.portrait)
}
private func initListCollectionView() {
let cellNib = UINib.init(nibName: identifierName, bundle: nil)
listCollectionView.register(cellNib, forCellWithReuseIdentifier: identifierName)
listCollectionView.delegate = self
listCollectionView.dataSource = self
let width:CGFloat = view.frame.size.width*0.951690821
let height:CGFloat = width*0.76142132
flowLayout.estimatedItemSize = CGSize(width: width, height: height)
}
override func viewWillAppear(_ animated: Bool) {
}
func reloadData() {
listCollectionView.reloadData()
}
private func presentPlayerController(video:Video) {
let stroyboard = UIStoryboard.init(name: "Main", bundle: nil)
if let playerVC = stroyboard.instantiateViewController(withIdentifier: "PlayerViewController") as? PlayerViewController {
playerVC.video = video
playerVC.modalPresentationStyle = .overFullScreen
self.present(playerVC, animated: true, completion: nil)
}
}
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destination.
// Pass the selected object to the new view controller.
}
/*
func adaptivePresentationStyle(for controller: UIPresentationController) -> UIModalPresentationStyle {
return .none
}
*/
}
extension MovieListViewController: UICollectionViewDelegate, UICollectionViewDataSource{
//MARK: ----- UICollectionViewDelegate & UICollectionViewDataSource -----
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
if let count = data?.count {
return count
}
return 0
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: identifierName, for: indexPath)
if let cell = cell as? MovieListCell {
if let video = data?.getVideo(index: indexPath.row) {
cell.setUp(video: video)
}
}
return cell
}
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
let cell = collectionView.cellForItem(at: indexPath) as? MovieListCell
cell?.selectedAnimation { [self] in
if let video = self.data?.getVideo(index: indexPath.row) {
presentPlayerController(video: video)
}
}
}
}
| 31.12069 | 129 | 0.642382 |
2670fbd0adc96f81847b4477319230de6c33517c | 2,425 | java | Java | src/main/java/org/pageseeder/berlioz/util/Versions.java | clauret/wo-berlioz | 9b4c5695a8f406e1acea48de2bbcc8ce0c1a4bc9 | [
"Apache-2.0"
] | 1 | 2021-07-28T21:12:18.000Z | 2021-07-28T21:12:18.000Z | src/main/java/org/pageseeder/berlioz/util/Versions.java | clauret/wo-berlioz | 9b4c5695a8f406e1acea48de2bbcc8ce0c1a4bc9 | [
"Apache-2.0"
] | null | null | null | src/main/java/org/pageseeder/berlioz/util/Versions.java | clauret/wo-berlioz | 9b4c5695a8f406e1acea48de2bbcc8ce0c1a4bc9 | [
"Apache-2.0"
] | 2 | 2015-09-11T01:52:16.000Z | 2019-05-31T05:10:27.000Z | /*
* Copyright 2015 Allette Systems (Australia)
* http://www.allette.com.au
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.pageseeder.berlioz.util;
/**
* Utility class for versions.
*
* @author Christophe Lauret
* @version 0.9.26 - 16 December 2013
* @since 0.9.26
*/
public final class Versions {
/**
* Utility class.
*/
private Versions() {
}
/**
* Compares two versions.
*
* <p>This method decomposes the versions into components assuming they are separated
* by '.', and compare them in order. If possible the comparison is numeric, otherwise it
* is alphanumeric. The first comparison resulting in a non equal result will be the
* result of this function.
*
* @param versionA The first version to compare.
* @param versionB The second version to compare.
*
* @return a positive number if version A is considered greater than version B;
* a negative number if version B is considered greater than version A;
* zero if the versions are considered equivalent.
*/
public static int compare(String versionA, String versionB) {
String[] atomsA = versionA.split("\\.");
String[] atomsB = versionB.split("\\.");
int atomCount = Math.max(atomsA.length, atomsB.length);
for (int i = 0; i < atomCount; i++) {
String a = atomsA.length > i? atomsA[i] : "0";
String b = atomsB.length > i? atomsB[i] : "0";
int compare = 0;
if (a.matches("[0-9]+") && b.matches("[0-9]+")) {
try {
Long al = Long.valueOf(a);
Long bl = Long.valueOf(b);
compare = al.compareTo(bl);
} catch (NumberFormatException ex) {
compare = a.compareTo(b);
}
} else {
compare = a.compareTo(b);
}
if (compare != 0) return compare;
}
return 0;
}
}
| 33.219178 | 92 | 0.624742 |
e7ecc557e33faf2b68bd5445272a43c0e0419ea1 | 445 | py | Python | change_file_name.py | Guzhongren/picuture2thumbnail | 15d58c2e53652e5c5af9ff1bf89883b9038bfa03 | [
"MIT"
] | 1 | 2019-07-07T17:51:37.000Z | 2019-07-07T17:51:37.000Z | change_file_name.py | Guzhongren/picuture2thumbnail | 15d58c2e53652e5c5af9ff1bf89883b9038bfa03 | [
"MIT"
] | null | null | null | change_file_name.py | Guzhongren/picuture2thumbnail | 15d58c2e53652e5c5af9ff1bf89883b9038bfa03 | [
"MIT"
] | 1 | 2020-01-19T08:27:10.000Z | 2020-01-19T08:27:10.000Z | # -*- coding: utf-8 -*-
# Author:Guzhongren
# created: 2017-05-08
import os
path = 'C:\\geoconFailed\\willfix\\'
for file in os.listdir(path):
if os.path.isfile(os.path.join(path,file))==True:
_file= file.split(".")
_file_name=_file[0]
_file_type=_file[1]
new_file_name=_file_name[:-1]+"."+_file_type
os.rename(os.path.join(path,file), os.path.join(path, new_file_name))
print(file+u"更名成功")
| 27.8125 | 77 | 0.624719 |
f16f7991fccc9454ab41e3a46eeba8f20cef75e2 | 309 | kt | Kotlin | app/src/main/java/com/rolandvitezhu/todocloud/app/Constant.kt | rolandvitezhu/TodoCloud | 4cd43cb7b8ea3b2b93c4a682fd23efead1742393 | [
"MIT"
] | null | null | null | app/src/main/java/com/rolandvitezhu/todocloud/app/Constant.kt | rolandvitezhu/TodoCloud | 4cd43cb7b8ea3b2b93c4a682fd23efead1742393 | [
"MIT"
] | 1 | 2017-07-25T20:12:53.000Z | 2017-07-26T13:06:15.000Z | app/src/main/java/com/rolandvitezhu/todocloud/app/Constant.kt | rolandvitezhu/TodoCloud | 4cd43cb7b8ea3b2b93c4a682fd23efead1742393 | [
"MIT"
] | null | null | null | package com.rolandvitezhu.todocloud.app
/**
* Store app-wide used constants
*/
class Constant {
companion object {
const val DUE_DATE = "dueDate"
const val REMINDER_DATE_TIME = "reminderDateTime"
const val NOTIFICATION_CHANNEL_REMINDER = "NOTIFICATION_CHANNEL_REMINDER"
}
} | 23.769231 | 81 | 0.708738 |
201f5717b05496bce7986a7e2f5d34c58a5acea2 | 82 | css | CSS | less/variables.css | cmply/bootstrap | 1fb89907834af6e1a31baf53725137a6db243aa2 | [
"MIT"
] | null | null | null | less/variables.css | cmply/bootstrap | 1fb89907834af6e1a31baf53725137a6db243aa2 | [
"MIT"
] | null | null | null | less/variables.css | cmply/bootstrap | 1fb89907834af6e1a31baf53725137a6db243aa2 | [
"MIT"
] | null | null | null | @import url('https://fonts.googleapis.com/css?family=Open+Sans:700,600,400,300');
| 41 | 81 | 0.743902 |
9bfd830a7152b77206acc757116f85a31cd77c4e | 25 | js | JavaScript | src/scenes/ReactD3/services/api/index.js | marksauter/reactd3 | 10961213403db7ea25229305c017507e9ca2174b | [
"MIT"
] | null | null | null | src/scenes/ReactD3/services/api/index.js | marksauter/reactd3 | 10961213403db7ea25229305c017507e9ca2174b | [
"MIT"
] | null | null | null | src/scenes/ReactD3/services/api/index.js | marksauter/reactd3 | 10961213403db7ea25229305c017507e9ca2174b | [
"MIT"
] | null | null | null | export * from './data';
| 8.333333 | 23 | 0.56 |
9bb33f8984d239ff34355c6c0ab9b0f42a2c0206 | 14,916 | js | JavaScript | nerdlets/nr1-rssreader-nerdlet/index.js | newrelic/nr1-rssreader | 7e2b7c1a75589b113bc4b989650b8d129fe524e7 | [
"Apache-2.0"
] | null | null | null | nerdlets/nr1-rssreader-nerdlet/index.js | newrelic/nr1-rssreader | 7e2b7c1a75589b113bc4b989650b8d129fe524e7 | [
"Apache-2.0"
] | null | null | null | nerdlets/nr1-rssreader-nerdlet/index.js | newrelic/nr1-rssreader | 7e2b7c1a75589b113bc4b989650b8d129fe524e7 | [
"Apache-2.0"
] | null | null | null | import React from 'react';
import axios from 'axios';
import * as Parser from 'rss-parser';
import { Container, Grid, Image, Card, Icon, List, Modal, Button, Header } from 'semantic-ui-react'
import _ from 'lodash'
import { UserStorageMutation, UserStorageQuery, Toast } from 'nr1';
import EditModal from './editmodal'
import { v1 as uuidv1 } from 'uuid';
import StatusCard from './statuscard'
// Types
import PropTypes from 'prop-types';
let parser = new Parser();
export default class RssStatusNerdletNerdlet extends React.Component {
static propTypes = {
nerdletUrlState: PropTypes.object.isRequired,
launcherUrlState: PropTypes.object.isRequired,
entity: PropTypes.object.isRequired
};
constructor(props) {
super(props);
this.state = {
modalOpen: false,
valueIntoModal: "123456abcdef",
dashboardCards: [],
remountKey: (new Date()).getTime()
}
}
componentDidMount() {
//if (this.props.nerdletUrlState && this.props.nerdletUrlState.entityGuid) {
// console.debug("Calling loadState with props", this.props);
this._loadState();
// } else {
// this.setState({ openModal: true });
// }
}
removeCard(uuid) {
var collection = this.state.dashboardCards;
// search through all cards for this url, and remove it,
for(var i = 0; i < collection.length; i++)
{
var targetuuid = collection[i].uuid;
if(targetuuid == uuid) /// remove by uuid
{
// remove it, ,,
collection = collection.splice(i,1)
break;
}
}
const { entity } = this.props;
const cards = this.state.dashboardCards;
this.setState({ dashboardCards: collection }, () => {
UserStorageMutation.mutate({
actionType: UserStorageMutation.ACTION_TYPE.WRITE_DOCUMENT,
collection: 'rss-dashboard-cards',
documentId: '2434slkj230iufsolwkjw',
document: { cards }
}).then(() => {
Toast.showToast({ title: "Update Saved.", type: Toast.TYPE.NORMAL });
}).catch(error => {
console.error(error);
Toast.showToast({ title: error.message, type: Toast.TYPE.CRITICAL });
});
});
}
editCard(uuid, _rssurl, _logourl) {
var collection = this.state.dashboardCards;
// search through all cards for this url, and remove it,
for(var i = 0; i < collection.length; i++)
{
var targetuuid = collection[i].uuid;
if(targetuuid == uuid) /// find by uuid
{
// update it, ,,
collection[i].rssurl = _rssurl;
collection[i].logourl = _logourl;
break;
}
}
const { entity } = this.props;
const cards = this.state.dashboardCards;
this.setState({ removeCard: (new Date()).getTime() , dashboardCards: collection }, () => {
// window.location.reload(false);
UserStorageMutation.mutate({
actionType: UserStorageMutation.ACTION_TYPE.WRITE_DOCUMENT,
collection: 'rss-dashboard-cards',
documentId: '2434slkj230iufsolwkjw',
document: { cards }
}).then(() => {
Toast.showToast({ title: "Update Saved.", type: Toast.TYPE.NORMAL });
}).catch(error => {
console.error(error);
Toast.showToast({ title: error.message, type: Toast.TYPE.CRITICAL });
});
});
}
appendDashboard(_rssurl, _logourl) {
var bla = this.state.dashboardCards;
var new_uuid = uuidv1();
var ele = {
// rssurl : "https://status.pagerduty.com/history.rss",
// logourl : "https://www.logo.wine/a/logo/PagerDuty/PagerDuty-Logo.wine.svg"
uuid: new_uuid,
rssurl: _rssurl,
logourl: _logourl
}
bla.push(ele)
const { entity } = this.props;
const cards = this.state.dashboardCards;
this.setState({ dashboardCards: bla }, () => {
UserStorageMutation.mutate({
actionType: UserStorageMutation.ACTION_TYPE.WRITE_DOCUMENT,
collection: 'rss-dashboard-cards',
documentId: '2434slkj230iufsolwkjw',
document: { cards }
}).then(() => {
Toast.showToast({ title: "Update Saved.", type: Toast.TYPE.NORMAL });
}).catch(error => {
console.error(error);
Toast.showToast({ title: error.message, type: Toast.TYPE.CRITICAL });
});
});
}
/**
* Load the entity using the loadEntity utils function, then look up if there's a entityList-v0 collection for this entity and user.
* @param {string} entityGuid
*/
_loadState() {
UserStorageQuery.query({
collection: 'rss-dashboard-cards',
documentId: '2434slkj230iufsolwkjw'
}).then(({ data }) => {
console.debug(data);
if (data.cards) {
this.setState({ dashboardCards: data.cards });
} else {
this.setState({ dashboardCards: [] });
}
}).catch(error => {
console.error(error);
this.setState({ dashboardCards: [] });
Toast.showToast({ title: error.message, type: Toast.TYPE.CRITICAL });
});
}
render() {
// let { feed } = this.state;
return (
[
<Button // Button to click to activate the Modal
key='button1'
primary
content='Add New Card'
onClick={
() => {
this.setState({ modalOpen: true })
}
}
/>,
<EditModal // The invisible modal itself
key='modal1'
modalOpen={this.state.modalOpen}
rssurl=""
logourl=""
handleClose={() => {
this.setState({ modalOpen: false }) // just close the dialog
}
}
handleConfirm={(rss, logo) => {
var bla = logo;
// add some checksum on the urls...
this.appendDashboard(rss, logo)
this.setState({ modalOpen: false })
}
}
valueIntoModal={this.state.valueIntoModal}
/>,
<Grid divided='vertically' >
<Grid.Row columns={4}>
{
this.state.dashboardCards.map((item, i) => {
return (
<Grid.Column >
<StatusCard data={item} key={this.state.remountKey}
handleEdit={(uuid, rss, logo) => {
var bla = logo;
this.editCard(uuid, rss, logo)
// add some checksum on the urls...
}
} handleDelete={(uuid) => {
this.removeCard(uuid)
}
}
></StatusCard>
</Grid.Column>
);
})
}
{
/* <Grid.Column >
<StatusCard url="https://status.aws.amazon.com/rss/ec2-us-east-2.rss" icon="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAOoAAAB7CAMAAABNcOyaAAAAtFBMVEX///8jLz7/mQD/lQD/lwAgLTz/kwAaKDgXJjceKzv/kQBMU13k5eb/+fT/s24SIjQAAAAAAB//jQAGGy/19fYAESgAFywAABrT1NYoLj2VmJ2ipaqCh44AABMAABc1P0y4ur3GyMusr7OOj5VobHRZYGn/8eL/pk54fYQAAAo+SFP/0KD/37//w5D/wYD/oTj/59D/mh0dITP/rEz/1bH/rlj/oCz/qUH/sWL/y5X/u3T/uXtuGUAoAAAI4ElEQVR4nO2Za2OiOhCGRe4IRW6KogUUrfXurlrb/v//dTIJSEC32+7Rwp6T50tLCDEvM5mZhEaDwWAwGAwGg8FgMBgMBoPBYDAYDMb/FD0K+uPxuB9E+m0GdGM8YBD57m0GvA3+IOyppukgTJVrBefJueMBYlTujRj7xcYIGvt5nyQUyXgyFyalASpDD5Sho4pchihb3VEqNrZguu0+3d8fdlCb1ytYP2pDx27a5k/aipiNKIqmZQV1MG0UeipXQvQGZGquDJdqSD8QWLhPN6YbWwp+SeRi1HNKA6pWHQw7IdNSZVlRFDm1ruilhpyABM6kLTgxcZcObWq3B8+ZM3wReTLxDsVEKDLc6sy+TdCvAXuojhO2JkmSTFqyQ8S2yVocYROatE0s0kGmTR3jN/KIn/F/4A6O2Epmg8EsmYQdh3PqIVXphoPI18FlXT0aEK3KBN/VidRB3l9/Tp3ymRpkDJZWVTzEDLuJl8SpK7j+aGY910HqpN2KXTpoRF3sfhw2kYvdVUnyDiMvlfqYx2A3gV7Ecr6CHXZMD+nq/VLArgT3IjYGWIwZ4AusTAzzmSZKKpVarPocIpsX4QfAD9RWHSLu79E5mLhDnNbHgaoTZTfdlsqJc9CbujgQg8+qc/w+Bh1YyMk3z/kPcWfgjiZxWh3H4M44u+mHIqdOIvQyxPnZcn0PP4D/x0tV+UukEg9WWiSs4Igjn2WNTIhS4LEil2VWF7+O1OOxVPkvceDGqENJjXCStLLMOkZKrAjnWydI21wPnFwhXUh6koPLYetIRAxDpk6qA6SOXEGsbeuNgXlezWipdqmSysdBTe31L4atIwWpjcTMFyIOWaoFxXFu98YA+nupHUniQddmoNfSi10/GiGiGKZXlBrh7GGSC5AI+dMdorikpCkohIj9mLl4/COre9XZqA7ptEA8a4Uy2XHNW0kwVmipoArpI1cQa3HpDgkp9Wq9GLga42FW5DtmOAlutP+9CX44NNV0dyOKqtLBld1ZaurBxD/nKEh5cAPSZ1rYQlA++y8w+JGVGRwarT2PauLIbuCdJ0aTS8UxNd21tLMbEIrSih/SsKjQe7qo1ZWpkax59J2KfknfwQZF++fh8/Pz0PJMspHLpfqwDZMnYBp/CIEX/tM7qPEZ1/eQeM4hiqCPuGdHPm/3FWdcA8NGZKNihgna3CDiAK1bsSDVhTKQFAx9cNsRaUS2tKAN6qfC3oegj1shHN8QrIv7346LoyfnzWijjAoROC2YPFCIDJgVSeNOmlnxhkCJrgzuB7PQSsUO4ysdvpUAJ0FzUmiMSlJjkSN+66J6UJ2nVZEDfRokq4q96w7q6oHqpUv/fiI+Bcn3ollMCGWrYtPLoduIkU2zTQuu+0MdyZfzCuMagUNCe8Vm1Vsyjq5FmwSdolQcY7mujn3ASSs+Fx4Vo4b+CPeiD36kT+9/K8PHS7VTqlfHZam4yEWVA/iqlRVA4BBmnxRT1kcBlsQDZ/xBl2/AF9VS+m+AvZSS1AZsXZQJTiuPWRu4Ocq2cCLzkf+ej2OqlsqRg6BCI7bTOfxgElwl6ChTKvOsDU7Y0PswqX3Pdcj5lFnxVkeHmAL2otrcXlpTUKU6Vq9A0qFeiyVyojpSi0dPV3BNnKejG8/9i6Q7LouKjv48rRM96vCXrGl4LV3qQA1MDW5hUgeKjeCi4k0gBIu9qsv+Mc4EKpcJ0IOeycnYrPSZdrYJ5VQxnzGujcWyb7a7aj+mZPkzHICrP95Pj+IVOPT2oSj0VM6c48qw8FWmn0qlDah3SZuoUv7rtjnVk1uz8Sj2dRgxJC/TqtqoqNYhxYxqmoqimCaUA3OdZH36q1TMkdqdzku4eMABjBoPSYXPNXg4GJBUwWp+ulohrQ5HI3qQZHogwqNTCKmVOTPKm9wB+bJVDOCcyV2gKHX4ENfQE4v6tmpaffDQ+FGVrcIWYOaJCLkQawML2sR2If76raEiFoSq1scR+hsZgaNhZzPlSSovcMJizer2gB+F5KhPfkBbeX8WT3rYgWUEjBzW4jsywY0GSTJJksEot+NlFHHdy88719qgOe7PkhYwmfUr372VcN1bn2S6sNGv5/Eog8FgMP57TJf7BWI5rXoid2W6XB9t29YA9PfnwqhkGqvVvV/zdLXRJL6Zw9vrO//kdSTtuLjn+Mb6KAlniFbheM9f/CXLHW8fl3fzKAP5LTLpcYPZEetWJLWx3PC89rC80+jTh/Viv19ut9vpdLpd7n9KFUptTHdSkxcO2/uMXvKX6RE5MX+4z2/9HuMgCShWvNzPjSnWyKxaNWEJMFYSvGv7sL9fNJ5uyYsEqfa9lstnWNoQL3h+83Yfy07Xm907DG08oB+yq8mr2VzAiVHAkJAf39q00+WLDQkHooGB1ir/UqnUhvGq8WmCf1ndcNUay9URu4yGHWZqo//umsg/w/KopRleEp7WtwnI0/XTjriLTZbG3m4S81bMepfVb8iRpfV2+m+Ma0y3azQKKZCkTRqJXvgm/1Ct/xKWJ01o5mp3b3+6ETGWi/djphOtiazQNlD85fc3nPGfY+y1XGxT4LXd0+l1+0UrbF9PyG0znch3j+fksuCbwu7Wk/5TjNVRysWihcVLmt08LVBh9zvFBvLZ/VvTtiVeyJ2D3+RRyDjx1SbVEtP3ndYsIiC50vGwfoVyFmkuiAaFqLpdrB5eBFTbC4UHtSO9SdweBentu/V8yHRla0KzLBfZV9KE3WbzdDidHs6cDoenzU6QJInnSw8JmrYurPW9xG/qdgphrDcXE88UY/gz9Ea00JEXNq9Fnzc2/K5G7pthLE78pWk/jaAJ7/vy4t7aUj2ibxlju+bt66b9nU7Jbl6L22utnkox29NGkL4kF63o3eatBvXQ1zH2q4NQiqu/hNe048OqhsvxsxgoWx5tW+OvRZ/UlgLkI/vn6/JfFZM1Yft62KCkwkNSKSHsdpunt8VXq6pag6qhxWr1/kDzvnpdLP9TKmkMmqonw2AwGAwGg8FgMBgMBoPBYDAYDMbfyT9LorZpc83OCQAAAABJRU5ErkJggg=="></StatusCard>
</Grid.Column>
<Grid.Column >
<StatusCard url="https://status.aws.amazon.com/rss/elastictranscoder-us-east-1.rss" icon="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAOoAAAB7CAMAAABNcOyaAAAAtFBMVEX///8jLz7/mQD/lQD/lwAgLTz/kwAaKDgXJjceKzv/kQBMU13k5eb/+fT/s24SIjQAAAAAAB//jQAGGy/19fYAESgAFywAABrT1NYoLj2VmJ2ipaqCh44AABMAABc1P0y4ur3GyMusr7OOj5VobHRZYGn/8eL/pk54fYQAAAo+SFP/0KD/37//w5D/wYD/oTj/59D/mh0dITP/rEz/1bH/rlj/oCz/qUH/sWL/y5X/u3T/uXtuGUAoAAAI4ElEQVR4nO2Za2OiOhCGRe4IRW6KogUUrfXurlrb/v//dTIJSEC32+7Rwp6T50tLCDEvM5mZhEaDwWAwGAwGg8FgMBgMBoPBYDAYDMb/FD0K+uPxuB9E+m0GdGM8YBD57m0GvA3+IOyppukgTJVrBefJueMBYlTujRj7xcYIGvt5nyQUyXgyFyalASpDD5Sho4pchihb3VEqNrZguu0+3d8fdlCb1ytYP2pDx27a5k/aipiNKIqmZQV1MG0UeipXQvQGZGquDJdqSD8QWLhPN6YbWwp+SeRi1HNKA6pWHQw7IdNSZVlRFDm1ruilhpyABM6kLTgxcZcObWq3B8+ZM3wReTLxDsVEKDLc6sy+TdCvAXuojhO2JkmSTFqyQ8S2yVocYROatE0s0kGmTR3jN/KIn/F/4A6O2Epmg8EsmYQdh3PqIVXphoPI18FlXT0aEK3KBN/VidRB3l9/Tp3ymRpkDJZWVTzEDLuJl8SpK7j+aGY910HqpN2KXTpoRF3sfhw2kYvdVUnyDiMvlfqYx2A3gV7Ecr6CHXZMD+nq/VLArgT3IjYGWIwZ4AusTAzzmSZKKpVarPocIpsX4QfAD9RWHSLu79E5mLhDnNbHgaoTZTfdlsqJc9CbujgQg8+qc/w+Bh1YyMk3z/kPcWfgjiZxWh3H4M44u+mHIqdOIvQyxPnZcn0PP4D/x0tV+UukEg9WWiSs4Igjn2WNTIhS4LEil2VWF7+O1OOxVPkvceDGqENJjXCStLLMOkZKrAjnWydI21wPnFwhXUh6koPLYetIRAxDpk6qA6SOXEGsbeuNgXlezWipdqmSysdBTe31L4atIwWpjcTMFyIOWaoFxXFu98YA+nupHUniQddmoNfSi10/GiGiGKZXlBrh7GGSC5AI+dMdorikpCkohIj9mLl4/COre9XZqA7ptEA8a4Uy2XHNW0kwVmipoArpI1cQa3HpDgkp9Wq9GLga42FW5DtmOAlutP+9CX44NNV0dyOKqtLBld1ZaurBxD/nKEh5cAPSZ1rYQlA++y8w+JGVGRwarT2PauLIbuCdJ0aTS8UxNd21tLMbEIrSih/SsKjQe7qo1ZWpkax59J2KfknfwQZF++fh8/Pz0PJMspHLpfqwDZMnYBp/CIEX/tM7qPEZ1/eQeM4hiqCPuGdHPm/3FWdcA8NGZKNihgna3CDiAK1bsSDVhTKQFAx9cNsRaUS2tKAN6qfC3oegj1shHN8QrIv7346LoyfnzWijjAoROC2YPFCIDJgVSeNOmlnxhkCJrgzuB7PQSsUO4ysdvpUAJ0FzUmiMSlJjkSN+66J6UJ2nVZEDfRokq4q96w7q6oHqpUv/fiI+Bcn3ollMCGWrYtPLoduIkU2zTQuu+0MdyZfzCuMagUNCe8Vm1Vsyjq5FmwSdolQcY7mujn3ASSs+Fx4Vo4b+CPeiD36kT+9/K8PHS7VTqlfHZam4yEWVA/iqlRVA4BBmnxRT1kcBlsQDZ/xBl2/AF9VS+m+AvZSS1AZsXZQJTiuPWRu4Ocq2cCLzkf+ej2OqlsqRg6BCI7bTOfxgElwl6ChTKvOsDU7Y0PswqX3Pdcj5lFnxVkeHmAL2otrcXlpTUKU6Vq9A0qFeiyVyojpSi0dPV3BNnKejG8/9i6Q7LouKjv48rRM96vCXrGl4LV3qQA1MDW5hUgeKjeCi4k0gBIu9qsv+Mc4EKpcJ0IOeycnYrPSZdrYJ5VQxnzGujcWyb7a7aj+mZPkzHICrP95Pj+IVOPT2oSj0VM6c48qw8FWmn0qlDah3SZuoUv7rtjnVk1uz8Sj2dRgxJC/TqtqoqNYhxYxqmoqimCaUA3OdZH36q1TMkdqdzku4eMABjBoPSYXPNXg4GJBUwWp+ulohrQ5HI3qQZHogwqNTCKmVOTPKm9wB+bJVDOCcyV2gKHX4ENfQE4v6tmpaffDQ+FGVrcIWYOaJCLkQawML2sR2If76raEiFoSq1scR+hsZgaNhZzPlSSovcMJizer2gB+F5KhPfkBbeX8WT3rYgWUEjBzW4jsywY0GSTJJksEot+NlFHHdy88719qgOe7PkhYwmfUr372VcN1bn2S6sNGv5/Eog8FgMP57TJf7BWI5rXoid2W6XB9t29YA9PfnwqhkGqvVvV/zdLXRJL6Zw9vrO//kdSTtuLjn+Mb6KAlniFbheM9f/CXLHW8fl3fzKAP5LTLpcYPZEetWJLWx3PC89rC80+jTh/Viv19ut9vpdLpd7n9KFUptTHdSkxcO2/uMXvKX6RE5MX+4z2/9HuMgCShWvNzPjSnWyKxaNWEJMFYSvGv7sL9fNJ5uyYsEqfa9lstnWNoQL3h+83Yfy07Xm907DG08oB+yq8mr2VzAiVHAkJAf39q00+WLDQkHooGB1ir/UqnUhvGq8WmCf1ndcNUay9URu4yGHWZqo//umsg/w/KopRleEp7WtwnI0/XTjriLTZbG3m4S81bMepfVb8iRpfV2+m+Ma0y3azQKKZCkTRqJXvgm/1Ct/xKWJ01o5mp3b3+6ETGWi/djphOtiazQNlD85fc3nPGfY+y1XGxT4LXd0+l1+0UrbF9PyG0znch3j+fksuCbwu7Wk/5TjNVRysWihcVLmt08LVBh9zvFBvLZ/VvTtiVeyJ2D3+RRyDjx1SbVEtP3ndYsIiC50vGwfoVyFmkuiAaFqLpdrB5eBFTbC4UHtSO9SdweBentu/V8yHRla0KzLBfZV9KE3WbzdDidHs6cDoenzU6QJInnSw8JmrYurPW9xG/qdgphrDcXE88UY/gz9Ea00JEXNq9Fnzc2/K5G7pthLE78pWk/jaAJ7/vy4t7aUj2ibxlju+bt66b9nU7Jbl6L22utnkox29NGkL4kF63o3eatBvXQ1zH2q4NQiqu/hNe048OqhsvxsxgoWx5tW+OvRZ/UlgLkI/vn6/JfFZM1Yft62KCkwkNSKSHsdpunt8VXq6pag6qhxWr1/kDzvnpdLP9TKmkMmqonw2AwGAwGg8FgMBgMBoPBYDAYDMbfyT9LorZpc83OCQAAAABJRU5ErkJggg=="></StatusCard>
</Grid.Column>
<Grid.Column >
<StatusCard url="https://status.pagerduty.com/history.rss" icon="https://www.logo.wine/a/logo/PagerDuty/PagerDuty-Logo.wine.svg"></StatusCard>
</Grid.Column>
<Grid.Column >
<StatusCard url="https://status.twilio.com/history.rss" icon="https://logo.clearbit.com/twilio.com"></StatusCard>
</Grid.Column>
<Grid.Column >
<StatusCard url="https://jira-software.status.atlassian.com/history.rss" icon="https://wac-cdn.atlassian.com/dam/jcr:75ba14ba-5e19-46c7-98ef-473289b982a7/Jira%20Software-blue.svg?cdnVersion=1123"></StatusCard>
</Grid.Column>
<Grid.Column >
<StatusCard url="https://www.githubstatus.com/history.rss" icon="https://github.githubassets.com/images/modules/logos_page/GitHub-Logo.png"></StatusCard>
</Grid.Column>
*/}
</Grid.Row>
</Grid>
]
);
}
}
| 51.791667 | 3,507 | 0.734916 |
9653eb4f70ee3eb95a60cbc32de0d5ef2cbce997 | 546 | php | PHP | database/seeders/UserSeeder.php | laurensclaas/short-url | f933edeb25afbf0aa61087698f8e06ab06bdf34a | [
"MIT"
] | null | null | null | database/seeders/UserSeeder.php | laurensclaas/short-url | f933edeb25afbf0aa61087698f8e06ab06bdf34a | [
"MIT"
] | null | null | null | database/seeders/UserSeeder.php | laurensclaas/short-url | f933edeb25afbf0aa61087698f8e06ab06bdf34a | [
"MIT"
] | null | null | null | <?php
namespace Database\Seeders;
use Illuminate\Database\Seeder;
use App\Models\User;
class UserSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
if(User::all()->where('email','admin@test.com')->isEmpty()) {
$admin = User::firstOrNew(['email' => 'admin@test.com']);
$admin->password = bcrypt('password');
$admin->name = "Admin Test";
$admin->save();
}
User::factory(10)->create();
}
}
| 20.222222 | 69 | 0.532967 |
d2a1c927894ec6ec03d7fc1292b51fa656c04208 | 2,481 | php | PHP | src/Git/GitoliteClient.php | Rouji/gitlist | 32724a3765626d0b7bf18f00e0144967c9a721fa | [
"BSD-3-Clause"
] | null | null | null | src/Git/GitoliteClient.php | Rouji/gitlist | 32724a3765626d0b7bf18f00e0144967c9a721fa | [
"BSD-3-Clause"
] | null | null | null | src/Git/GitoliteClient.php | Rouji/gitlist | 32724a3765626d0b7bf18f00e0144967c9a721fa | [
"BSD-3-Clause"
] | null | null | null | <?php
namespace GitList\Git;
use GitList\Git\Client as BaseClient;
use GitList\Exception\NoRepositoryException;
class GitoliteClient extends BaseClient
{
protected $gitoliteWrapperPath;
protected $username;
public function __construct($options = null)
{
parent::__construct($options);
$this->gitoliteWrapperPath = $options['gitolite.wrapper_path'];
$this->username = $options['username'];
}
/**
* Filter accessible repositories for a user.
*
* @param array $paths Array of paths where repositories will be searched
* @return array Found repositories, containing their name, path and description sorted
* by repository name
*/
public function getRepositories($paths)
{
$allRepositories = parent::getRepositories($paths);
if(sizeof($paths) !== 1) {
throw new \RuntimeException('Gitolite requires only one root repo directory.');
}
$path = $paths[0];
$output = $this->getGitoliteData();
if($output) {
if(!isset($output['repos'])) {
$allRepositories = array();
}
foreach($allRepositories as $repository => $options) {
if(mb_substr($path, -1) !== DIRECTORY_SEPARATOR) {
$path .= DIRECTORY_SEPARATOR;
}
$repositoryName = preg_replace('/^' . preg_quote($path, '/') . '/', '', $options['path']);
$repositoryName = preg_replace('/\.git$/', '', $repositoryName);
if(!isset($output['repos'][$repositoryName]) || !isset($output['repos'][$repositoryName]['perms']['R']) || $output['repos'][$repositoryName]['perms']['R'] !== 1) {
unset($allRepositories[$repository]);
}
}
} else {
throw new \RuntimeException('There is a problem getting repo info.');
}
if (empty($allRepositories)) {
throw new NoRepositoryException('No repository is accessible to you.');
}
return $allRepositories;
}
protected function getGitoliteData()
{
$cmd = $this->gitoliteWrapperPath ." ". escapeshellarg($this->username);
$output = shell_exec($cmd);
if(!$output) {
return false;
}
$output = json_decode($output, true);
if(!$output) {
return false;
}
return $output;
}
}
| 29.188235 | 179 | 0.559452 |
864a0df431c0e2f0efd8b9554691d00db6713974 | 1,768 | rs | Rust | plugins/sf_player/src/spawn_player.rs | will-hart/ld48 | 05626d3b59840c5dce0a772cbe00d16af72064bb | [
"Apache-2.0",
"MIT"
] | 1 | 2021-04-27T00:51:07.000Z | 2021-04-27T00:51:07.000Z | plugins/sf_player/src/spawn_player.rs | will-hart/ld48 | 05626d3b59840c5dce0a772cbe00d16af72064bb | [
"Apache-2.0",
"MIT"
] | null | null | null | plugins/sf_player/src/spawn_player.rs | will-hart/ld48 | 05626d3b59840c5dce0a772cbe00d16af72064bb | [
"Apache-2.0",
"MIT"
] | null | null | null | use bevy::prelude::*;
use sf_core::{dims::Dims, LightingTarget, Player, Position};
pub fn spawn_player(
mut commands: Commands,
asset_server: Res<AssetServer>,
mut texture_atlases: ResMut<Assets<TextureAtlas>>,
dims: Res<Dims>,
) {
let texture_handle = asset_server.load("character.png");
let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(16., 32.0), 5, 1);
let texture_atlas_handle = texture_atlases.add(texture_atlas);
let player_pos = (10, 50);
let player_tx =
Transform::from_translation(dims.grid_to_world(player_pos.0, player_pos.1).extend(0.));
commands
.spawn()
.insert(LightingTarget {
lighting_strength: 15,
lighting_decay_rate: 10.,
light_growth_rate: 20,
next_lighting_decay: 0.,
max_light_strength: 25,
})
.insert(player_tx.clone())
.insert(GlobalTransform::from_translation(Vec3::ZERO))
.insert(Timer::from_seconds(0.5, true))
.insert(Position(player_pos.0, player_pos.1))
.insert(Player {
velocity: Vec2::ZERO,
move_speed: 200.,
is_grounded: false,
frames_since_jumped: 0,
jump_cooldown: 0,
did_jump: false,
next_update: 0.,
slime_target: 0,
sink_rate: 1. / 10., // 10 per second
next_sink: 0.,
})
.with_children(|parent| {
parent.spawn().insert_bundle(SpriteSheetBundle {
texture_atlas: texture_atlas_handle,
transform: Transform::from_translation(Vec3::new(0.0, 16.0, 0.0)),
..Default::default()
});
});
println!("Spawned player");
}
| 32.740741 | 95 | 0.587104 |
4b9467f71b9a2905e55bf9fc0989a0228fb5fb4a | 12,998 | swift | Swift | Tests/RxGRDBTests/ValueObservationTests.swift | spottraceability/RxGRDB | c220ba065cdacecda95313417a5f5331f1f840c2 | [
"MIT"
] | 199 | 2017-04-06T16:07:08.000Z | 2022-03-04T08:13:13.000Z | Tests/RxGRDBTests/ValueObservationTests.swift | spottraceability/RxGRDB | c220ba065cdacecda95313417a5f5331f1f840c2 | [
"MIT"
] | 52 | 2017-04-06T11:59:27.000Z | 2021-11-25T12:24:10.000Z | Tests/RxGRDBTests/ValueObservationTests.swift | spottraceability/RxGRDB | c220ba065cdacecda95313417a5f5331f1f840c2 | [
"MIT"
] | 45 | 2017-04-06T11:41:52.000Z | 2022-02-03T21:44:17.000Z | import XCTest
import GRDB
import RxSwift
import RxGRDB
private struct Player: Codable, FetchableRecord, PersistableRecord {
var id: Int64
var name: String
var score: Int?
static func createTable(_ db: Database) throws {
try db.create(table: "player") { t in
t.autoIncrementedPrimaryKey("id")
t.column("name", .text).notNull()
t.column("score", .integer)
}
}
}
class ValueObservationTests : XCTestCase {
// MARK: - Default Scheduler
func testDefaultSchedulerChangesNotifications() throws {
func setUp<Writer: DatabaseWriter>(_ writer: Writer) throws -> Writer {
try writer.write(Player.createTable)
return writer
}
func test(writer: DatabaseWriter) throws {
let disposeBag = DisposeBag()
try withExtendedLifetime(disposeBag) {
let testSubject = ReplaySubject<Int>.createUnbounded()
ValueObservation
.tracking(Player.fetchCount)
.rx.observe(in: writer)
.subscribe(testSubject)
.disposed(by: disposeBag)
try writer.writeWithoutTransaction { db in
try Player(id: 1, name: "Arthur", score: 1000).insert(db)
try db.inTransaction {
try Player(id: 2, name: "Barbara", score: 750).insert(db)
try Player(id: 3, name: "Craig", score: 500).insert(db)
return .commit
}
}
let expectedElements = [0, 1, 3]
if writer is DatabaseQueue {
let elements = try testSubject
.take(expectedElements.count)
.toBlocking(timeout: 1).toArray()
XCTAssertEqual(elements, expectedElements)
} else {
let elements = try testSubject
.take(until: { $0 == expectedElements.last }, behavior: .inclusive)
.toBlocking(timeout: 1).toArray()
assertValueObservationRecordingMatch(recorded: elements, expected: expectedElements)
}
}
}
try Test(test)
.run { try setUp(DatabaseQueue()) }
.runAtTemporaryDatabasePath { try setUp(DatabaseQueue(path: $0)) }
.runAtTemporaryDatabasePath { try setUp(DatabasePool(path: $0)) }
}
func testDefaultSchedulerFirstValueIsEmittedAsynchronously() throws {
func setUp<Writer: DatabaseWriter>(_ writer: Writer) throws -> Writer {
try writer.write(Player.createTable)
return writer
}
func test(writer: DatabaseWriter) throws {
let disposeBag = DisposeBag()
withExtendedLifetime(disposeBag) {
let expectation = self.expectation(description: "")
let semaphore = DispatchSemaphore(value: 0)
ValueObservation
.tracking(Player.fetchCount)
.rx.observe(in: writer)
.subscribe(onNext: { _ in
semaphore.wait()
expectation.fulfill()
})
.disposed(by: disposeBag)
semaphore.signal()
waitForExpectations(timeout: 1, handler: nil)
}
}
try Test(test)
.run { try setUp(DatabaseQueue()) }
.runAtTemporaryDatabasePath { try setUp(DatabaseQueue(path: $0)) }
.runAtTemporaryDatabasePath { try setUp(DatabasePool(path: $0)) }
}
func testDefaultSchedulerError() throws {
func test(writer: DatabaseWriter) throws {
let observable = ValueObservation
.tracking { try $0.execute(sql: "THIS IS NOT SQL") }
.rx.observe(in: writer)
let result = observable.toBlocking().materialize()
switch result {
case .completed:
XCTFail("Expected error")
case let .failed(elements: _, error: error):
XCTAssertNotNil(error as? DatabaseError)
}
}
try Test(test)
.run { DatabaseQueue() }
.runAtTemporaryDatabasePath { try DatabaseQueue(path: $0) }
.runAtTemporaryDatabasePath { try DatabasePool(path: $0) }
}
// MARK: - Immediate Scheduler
func testImmediateSchedulerChangesNotifications() throws {
func setUp<Writer: DatabaseWriter>(_ writer: Writer) throws -> Writer {
try writer.write(Player.createTable)
return writer
}
func test(writer: DatabaseWriter) throws {
let disposeBag = DisposeBag()
try withExtendedLifetime(disposeBag) {
let testSubject = ReplaySubject<Int>.createUnbounded()
ValueObservation
.tracking(Player.fetchCount)
.rx.observe(in: writer, scheduling: .immediate)
.subscribe(testSubject)
.disposed(by: disposeBag)
try writer.writeWithoutTransaction { db in
try Player(id: 1, name: "Arthur", score: 1000).insert(db)
try db.inTransaction {
try Player(id: 2, name: "Barbara", score: 750).insert(db)
try Player(id: 3, name: "Craig", score: 500).insert(db)
return .commit
}
}
let expectedElements = [0, 1, 3]
if writer is DatabaseQueue {
let elements = try testSubject
.take(expectedElements.count)
.toBlocking(timeout: 1).toArray()
XCTAssertEqual(elements, expectedElements)
} else {
let elements = try testSubject
.take(until: { $0 == expectedElements.last }, behavior: .inclusive)
.toBlocking(timeout: 1).toArray()
assertValueObservationRecordingMatch(recorded: elements, expected: expectedElements)
}
}
}
try Test(test)
.run { try setUp(DatabaseQueue()) }
.runAtTemporaryDatabasePath { try setUp(DatabaseQueue(path: $0)) }
.runAtTemporaryDatabasePath { try setUp(DatabasePool(path: $0)) }
}
func testImmediateSchedulerEmitsFirstValueSynchronously() throws {
func setUp<Writer: DatabaseWriter>(_ writer: Writer) throws -> Writer {
try writer.write(Player.createTable)
return writer
}
func test(writer: DatabaseWriter) throws {
let disposeBag = DisposeBag()
withExtendedLifetime(disposeBag) {
let semaphore = DispatchSemaphore(value: 0)
ValueObservation
.tracking(Player.fetchCount)
.rx.observe(in: writer, scheduling: .immediate)
.subscribe(onNext: { _ in
semaphore.signal()
})
.disposed(by: disposeBag)
semaphore.wait()
}
}
try Test(test)
.run { try setUp(DatabaseQueue()) }
.runAtTemporaryDatabasePath { try setUp(DatabaseQueue(path: $0)) }
.runAtTemporaryDatabasePath { try setUp(DatabasePool(path: $0)) }
}
func testImmediateSchedulerError() throws {
func test(writer: DatabaseWriter) throws {
let observable = ValueObservation
.tracking { try $0.execute(sql: "THIS IS NOT SQL") }
.rx.observe(in: writer, scheduling: .immediate)
let result = observable.toBlocking().materialize()
switch result {
case .completed:
XCTFail("Expected error")
case let .failed(elements: _, error: error):
XCTAssertNotNil(error as? DatabaseError)
}
}
try Test(test)
.run { DatabaseQueue() }
.runAtTemporaryDatabasePath { try DatabaseQueue(path: $0) }
.runAtTemporaryDatabasePath { try DatabasePool(path: $0) }
}
func testIssue780() throws {
func test(dbPool: DatabasePool) throws {
struct Entity: Codable, FetchableRecord, PersistableRecord, Equatable {
var id: Int64
var name: String
}
try dbPool.write { db in
try db.create(table: "entity") { t in
t.autoIncrementedPrimaryKey("id")
t.column("name", .text)
}
}
let observation = ValueObservation.tracking(Entity.fetchAll)
let entities = try dbPool.rx
.write { db in try Entity(id: 1, name: "foo").insert(db) }
.asCompletable()
.andThen(observation.rx.observe(in: dbPool, scheduling: .immediate))
.take(1)
.toBlocking(timeout: 1)
.single()
XCTAssertEqual(entities, [Entity(id: 1, name: "foo")])
}
try Test(test).runAtTemporaryDatabasePath { try DatabasePool(path: $0) }
}
// MARK: - Utils
/// This test checks the fundamental promise of ValueObservation by
/// comparing recorded values with expected values.
///
/// Recorded values match the expected values if and only if:
///
/// - The last recorded value is the last expected value
/// - Recorded values are in the same order as expected values
///
/// However, both missing and repeated values are allowed - with the only
/// exception of the last expected value which can not be missed.
///
/// For example, if the expected values are [0, 1], then the following
/// recorded values match:
///
/// - `[0, 1]` (identical values)
/// - `[1]` (missing value but the last one)
/// - `[0, 0, 1, 1]` (repeated value)
///
/// However the following recorded values don't match, and fail the test:
///
/// - `[1, 0]` (wrong order)
/// - `[0]` (missing last value)
/// - `[]` (missing last value)
/// - `[0, 1, 2]` (unexpected value)
/// - `[1, 0, 1]` (unexpected value)
func assertValueObservationRecordingMatch<Value>(
recorded recordedValues: [Value],
expected expectedValues: [Value],
_ message: @autoclosure () -> String = "",
file: StaticString = #file,
line: UInt = #line)
where Value: Equatable
{
_assertValueObservationRecordingMatch(
recorded: recordedValues,
expected: expectedValues,
// Last value can't be missed
allowMissingLastValue: false,
message(), file: file, line: line)
}
private func _assertValueObservationRecordingMatch<R, E>(
recorded recordedValues: R,
expected expectedValues: E,
allowMissingLastValue: Bool,
_ message: @autoclosure () -> String = "",
file: StaticString = #file,
line: UInt = #line)
where
R: BidirectionalCollection,
E: BidirectionalCollection,
R.Element == E.Element,
R.Element: Equatable
{
guard let value = expectedValues.last else {
if !recordedValues.isEmpty {
XCTFail("unexpected recorded prefix \(Array(recordedValues)) - \(message())", file: file, line: line)
}
return
}
let recordedSuffix = recordedValues.reversed().prefix(while: { $0 == value })
let expectedSuffix = expectedValues.reversed().prefix(while: { $0 == value })
if !allowMissingLastValue {
// Both missing and repeated values are allowed in the recorded values.
// This is because of asynchronous DatabasePool observations.
if recordedSuffix.isEmpty {
XCTFail("missing expected value \(value) - \(message())", file: file, line: line)
}
}
let remainingRecordedValues = recordedValues.prefix(recordedValues.count - recordedSuffix.count)
let remainingExpectedValues = expectedValues.prefix(expectedValues.count - expectedSuffix.count)
_assertValueObservationRecordingMatch(
recorded: remainingRecordedValues,
expected: remainingExpectedValues,
// Other values can be missed
allowMissingLastValue: true,
message(), file: file, line: line)
}
}
| 39.749235 | 117 | 0.539929 |
90ca5ff9989f68933ec0da0818b11f06a0b594d9 | 1,224 | py | Python | locustfile_view_order.py | Ashutosh-Kaushik/ss-load-test-locust | 286e3cecad39f5ce991717e117392332a9810d64 | [
"Apache-2.0"
] | 1 | 2022-03-31T05:34:33.000Z | 2022-03-31T05:34:33.000Z | locustfile_view_order.py | Ashutosh-Kaushik/ss-load-test-locust | 286e3cecad39f5ce991717e117392332a9810d64 | [
"Apache-2.0"
] | null | null | null | locustfile_view_order.py | Ashutosh-Kaushik/ss-load-test-locust | 286e3cecad39f5ce991717e117392332a9810d64 | [
"Apache-2.0"
] | null | null | null | import csv
import random
import warnings
import os
from locust import HttpUser, task, between
body = {
"RequestHeader": {
"AppType": "N",
"AppVersion": "4.0.4",
"AppVersionCode": "109",
"DeviceId": "81653dce-0dd2-4201-8916-4aecbdd89269",
"DeviceDensity": "320",
"DeviceDensityType": "xhdpi",
"DeviceHeight": "1184",
"DeviceWidth": "768",
"DeviceName": "Unknown Google Nexus 4",
"DeviceOsInfo": "5.1",
"NetworkInfo": "Wifi",
"AccessToken": "PDWZ5pStjE"
},
"RequestURI": {
"Section": "viewOrder"
},
"Params": {
"orderId": "8933318847",
"UserId": "NDkzNzcyNA=="
}
}
header = {
"Host": "api.sastasundar.com",
"Content-Type": "application/json; charset=utf-8",
"Content-Length": "421",
"Accept-Encoding": "gzip, deflate",
"User-Agent": "okhttp/5.0.0-alpha.2"
}
class SastaSundarCheckout(HttpUser):
host = os.getenv('TARGET_URL', 'https://api.sastasundar.com')
def on_start(self):
warnings.filterwarnings("ignore")
self.client.verify = False
@task
def sasta_sundar_search_query(self):
response = self.client.post("/sastasundar/index.php/order/rest_order/postData", headers=header, json=body) | 24.48 | 114 | 0.63317 |
1922254d47853adfe3088a5d6344ed4c19f1fcbb | 2,412 | sql | SQL | softApp/accounts.sql | nnamdimykel/Simple-Login-Page | f6e0e588f662e1743e798176ab01d80db4b463b9 | [
"Apache-2.0"
] | null | null | null | softApp/accounts.sql | nnamdimykel/Simple-Login-Page | f6e0e588f662e1743e798176ab01d80db4b463b9 | [
"Apache-2.0"
] | null | null | null | softApp/accounts.sql | nnamdimykel/Simple-Login-Page | f6e0e588f662e1743e798176ab01d80db4b463b9 | [
"Apache-2.0"
] | null | null | null | -- phpMyAdmin SQL Dump
-- version 4.8.2
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: Jul 24, 2018 at 07:54 PM
-- Server version: 10.1.34-MariaDB
-- PHP Version: 7.2.7
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `accounts`
--
-- --------------------------------------------------------
--
-- Table structure for table `users`
--
CREATE TABLE `users` (
`id` int(11) NOT NULL,
`first_name` varchar(50) NOT NULL,
`last_name` varchar(50) NOT NULL,
`email` varchar(100) NOT NULL,
`password` varchar(100) NOT NULL,
`telephone` int(200) NOT NULL,
`home_address` varchar(100) NOT NULL DEFAULT 'n/a',
`active` tinyint(1) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `users`
--
INSERT INTO `users` (`id`, `first_name`, `last_name`, `email`, `password`, `telephone`, `home_address`, `active`) VALUES
(3, 'm', 'm', 'mykelnnamdi@live.com', '$2y$10$6Si9aCrThl6CZPNzbmZTBuHqzASMw4q/mnhugHv4llWtg0jdUxO6K', 0, '0', 0),
(10, 'Micheal', 'Joseph', 'mjoseph@gmail.com', '$2y$10$0foo6BFLmORLfzfyC/e8kewPuK4o6yuEaKkU8pi97Cfrex/5TICCa', 0, '0', 0),
(11, 'Joseph', 'Nnamdi', 'jn@gmail.com', '$2y$10$cQL06f2NMtw34tfYOfsHzuJAc4FhZxxOJYoXTkhXh0sDUTza2uH7i', 0, '0', 0),
(14, 'ikenyere', 'joseph', 'ik@gmail.com', '123', 8099889, 'uhd', 0),
(15, '', '', '', '', 0, '', 0),
(16, 'eD', 'D', 'josephelijaj45@gmail.com', '$2y$10$ULgPE3n6vvW4ERq1uOVgFeby/.eXHS6Tv/r.Sy5BAqXiMQHV7TIPe', 0, '', 0),
(17, 'oni', ' queen', 'oni@queen.com', '$2y$10$yiX3EHCP8/91MYHmDljwWeeIKoJrW5P5w2xmDlem2wJ5jn/QnFpI.', 812009759, '123,awolowo road,ikoyi,lagos', 0);
--
-- Indexes for dumped tables
--
--
-- Indexes for table `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `users`
--
ALTER TABLE `users`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=18;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| 30.531646 | 149 | 0.673715 |
16b566b8146c8d68b931b4d53463cf39ce662642 | 140 | ts | TypeScript | client/src/geometry/index.ts | gelzis/atlantis-economy-adivsor | 5aebc77fa84ebf581fd27306d37ab61ed1733322 | [
"MIT"
] | 1 | 2020-10-28T18:54:44.000Z | 2020-10-28T18:54:44.000Z | client/src/geometry/index.ts | gelzis/atlantis-economy-adivsor | 5aebc77fa84ebf581fd27306d37ab61ed1733322 | [
"MIT"
] | 21 | 2020-01-02T22:25:53.000Z | 2021-09-20T22:25:20.000Z | client/src/geometry/index.ts | valdisz/atlantis-economy-advisor | d0fc57827857e350ff5a11b216925b774ea2bbbc | [
"MIT"
] | 2 | 2020-02-21T10:44:11.000Z | 2021-07-04T10:24:53.000Z | export * from './doubled-coord'
export * from './offset-coord'
export * from './hex'
export * from './layout'
export * from './orientation'
| 23.333333 | 31 | 0.664286 |
395ea0487ffe5d75ebd3ed4940810f86066973f9 | 85 | html | HTML | packages/govtnz-ds-upstream/src/upstream/govuk/2.12.0/button__with-active-state.html | NZTA/govtnz-design-system | 0828a29cec93da0d43045b422d830e4064e6ad1a | [
"MIT"
] | 1 | 2020-09-28T00:56:23.000Z | 2020-09-28T00:56:23.000Z | packages/govtnz-ds-upstream/src/upstream/govuk/2.12.0/button__with-active-state.html | NZTA/govtnz-design-system | 0828a29cec93da0d43045b422d830e4064e6ad1a | [
"MIT"
] | 2 | 2022-02-14T22:41:39.000Z | 2022-02-27T22:06:39.000Z | packages/govtnz-ds-upstream/src/upstream/govuk/2.7.0/button__with-active-state.html | NZTA/govtnz-design-system | 0828a29cec93da0d43045b422d830e4064e6ad1a | [
"MIT"
] | null | null | null | <button class="govuk-button :active" name="active" type="submit">
Active
</button>
| 21.25 | 65 | 0.705882 |
000212b93b72053f6f4b9bb49f7b2ee984d28fff | 1,241 | kt | Kotlin | app/src/main/java/com/feechan/footballapps/presenter/teams/teamdetail/player/playerdetail/PlayerDetailPresenter.kt | feechanz/FootballApps | d83b5a3c00ab005d7daaa061f12dd8fb59a76941 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/feechan/footballapps/presenter/teams/teamdetail/player/playerdetail/PlayerDetailPresenter.kt | feechanz/FootballApps | d83b5a3c00ab005d7daaa061f12dd8fb59a76941 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/feechan/footballapps/presenter/teams/teamdetail/player/playerdetail/PlayerDetailPresenter.kt | feechanz/FootballApps | d83b5a3c00ab005d7daaa061f12dd8fb59a76941 | [
"Apache-2.0"
] | null | null | null | package com.feechan.footballapps.presenter.teams.teamdetail.player.playerdetail
import com.feechan.footballapps.data.SportApiDB
import com.feechan.footballapps.data.network.response.PlayerDetailResponse
import com.feechan.footballapps.data.repository.ApiRepository
import com.feechan.footballapps.utils.CoroutineContextProvider
import com.google.gson.Gson
import kotlinx.coroutines.experimental.async
import org.jetbrains.anko.coroutines.experimental.bg
class PlayerDetailPresenter(private val view: PlayerDetailContract.View,
private val apiRepository: ApiRepository,
private val gson: Gson, private val context: CoroutineContextProvider = CoroutineContextProvider()) : PlayerDetailContract.Presenter{
override fun destroy() {
}
override fun getPlayerDetail(playerid: String) {
view.showLoadingBar()
async(context.main){
val data = bg{
gson.fromJson(apiRepository
.doRequest(SportApiDB.getPlayerDetail(playerid)),
PlayerDetailResponse::class.java
)
}
view.showPlayer(data.await().players)
view.hideLoadingBar()
}
}
} | 40.032258 | 159 | 0.691378 |
8edd3f90163a5aaa82c95a14211e272a417de14a | 928 | rb | Ruby | app/controllers/groups_controller.rb | SVRourke/quizzical | 11b3d54f187a4f9e5df7df033af48fdf917e02ed | [
"MIT"
] | 4 | 2021-07-18T16:47:26.000Z | 2022-02-02T22:35:31.000Z | app/controllers/groups_controller.rb | SVRourke/quizzical | 11b3d54f187a4f9e5df7df033af48fdf917e02ed | [
"MIT"
] | null | null | null | app/controllers/groups_controller.rb | SVRourke/quizzical | 11b3d54f187a4f9e5df7df033af48fdf917e02ed | [
"MIT"
] | 2 | 2022-02-02T22:35:49.000Z | 2022-02-06T01:53:55.000Z | class GroupsController < ApplicationController
before_action :unauthorized_redirect
after_action :verify_authorized
def index
authorize Group, :index?
@groups = Group.all
end
def show
@group= Group.find(params[:id])
authorize @group, :show?
render :teacher_view_group and return if current_user.teacher
end
def new
authorize Group, :new?
@group = Group.new
end
def create
authorize Group, :create?
@group = Group.new(group_params.merge({teacher: current_user}))
redirect_to group_path(@group) and return if @group.save()
render :new
end
def destroy
@group = Group.find(params[:id])
authorize @group, :destroy?
@group.destroy
redirect_to dashboards_path()
end
private
def group_params
params.require(:group).permit(:name)
end
end | 22.634146 | 71 | 0.622845 |
4eab26d7a27d0ea2d3981dded8f831a869176cae | 1,446 | lua | Lua | Player/BodyFSM/RoboCup/bodyRobocupKickOld.lua | ToyotaResearchInstitute/rad-robot | 9a47e4d88382719ab9bf142932fbcc83dcbcd665 | [
"MIT"
] | null | null | null | Player/BodyFSM/RoboCup/bodyRobocupKickOld.lua | ToyotaResearchInstitute/rad-robot | 9a47e4d88382719ab9bf142932fbcc83dcbcd665 | [
"MIT"
] | null | null | null | Player/BodyFSM/RoboCup/bodyRobocupKickOld.lua | ToyotaResearchInstitute/rad-robot | 9a47e4d88382719ab9bf142932fbcc83dcbcd665 | [
"MIT"
] | 2 | 2018-06-04T12:38:54.000Z | 2018-09-22T10:31:27.000Z | local state = {}
state._NAME = ...
local Body = require'Body'
local util = require'util'
local vector = require'vector'
local libStep = require'libStep'
-- FSM coordination
local simple_ipc = require'simple_ipc'
local motion_ch = simple_ipc.new_publisher('MotionFSM!')
-- Get the human guided approach
require'hcm'
-- Get the robot guided approach
require'wcm'
require'mcm'
local kick_started
function state.entry()
print(state._NAME..' Entry' )
-- Update the time of entry
local t_entry_prev = t_entry -- When entry was previously called
t_entry = Body.get_time()
t_update = t_entry
mcm.set_walk_stoprequest(1)
kick_started = false
end
function state.update()
if Config.disable_kick then
local ballx = wcm.get_ball_x() - Config.fsm.bodyRobocupApproach.target[1]
local bally = wcm.get_ball_y()
local ballr = math.sqrt(ballx*ballx+bally*bally)
if ballr > 0.6 then
return 'done'
end
return
end
local t = Body.get_time()
local dt = t - t_update
-- Save this at the last update time
t_update = t
if mcm.get_walk_ismoving()==0 then
if kick_started then
if mcm.get_walk_kicktype()==1 then
return 'testdone' --this means testing mode (don't run body fsm)
else
return 'done'
end
else
mcm.set_walk_steprequest(1)
kick_started = true
end
end
end
function state.exit()
print(state._NAME..' Exit' )
end
return state
| 21.909091 | 78 | 0.688105 |
85d7b17b65ba3d59043c8f14a8103579c99dd4eb | 935 | js | JavaScript | src/client/js/formHandler.js | AlvaWebDeveloper/ProjectFour | f0a89608204f35ded9fdf5a261ff84a0c67db523 | [
"MIT"
] | null | null | null | src/client/js/formHandler.js | AlvaWebDeveloper/ProjectFour | f0a89608204f35ded9fdf5a261ff84a0c67db523 | [
"MIT"
] | 4 | 2021-05-11T10:57:04.000Z | 2022-02-27T03:15:10.000Z | src/client/js/formHandler.js | AlvaWebDeveloper/ProjectFour | f0a89608204f35ded9fdf5a261ff84a0c67db523 | [
"MIT"
] | null | null | null | import {getHandler} from './getHandler.js'
const entry = document.querySelector('#results');
function handleSubmit(event) {
event.preventDefault()
// check what text was put into the form field
let formText = document.getElementById('sentences').value
const postData = async ( url = '', data = {})=>{
const response = await fetch(url, {
method: 'POST',
credentials: 'same-origin',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(data),
}).then(res => {
return data;
});
}
if (!formText || /^\s*$/.test(formText)){
entry.innerHTML = `<div>Please Enter a Sentence</div>`;
console.log('please enter a sentence');
} else {
postData('/test', {formText});
}
setTimeout(function(){
Client.getHandler('/getresult');
},1500);
console.log('connected');
}
export { handleSubmit}
| 22.261905 | 63 | 0.588235 |
c7f41c0ddb320dab2b4d6996225bd87f090ca912 | 2,137 | java | Java | app/src/main/java/com/example/floralboutique/data/dao/OrderDao.java | roris/TheFloralBoutique | 01e51bc011cbf65fb37c8f7ed93d085b628e3e48 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/example/floralboutique/data/dao/OrderDao.java | roris/TheFloralBoutique | 01e51bc011cbf65fb37c8f7ed93d085b628e3e48 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/example/floralboutique/data/dao/OrderDao.java | roris/TheFloralBoutique | 01e51bc011cbf65fb37c8f7ed93d085b628e3e48 | [
"Apache-2.0"
] | null | null | null | package com.example.floralboutique.data.dao;
import android.arch.lifecycle.LiveData;
import android.arch.persistence.room.Dao;
import android.arch.persistence.room.Insert;
import android.arch.persistence.room.Query;
import android.arch.persistence.room.Transaction;
import android.support.annotation.WorkerThread;
import com.example.floralboutique.data.entity.FlowerOrder;
import com.example.floralboutique.data.entity.Order;
import com.example.floralboutique.ui.cart.CartItemModel;
import com.example.floralboutique.ui.member.orderlist.MemberOrderListItemModel;
import java.util.Date;
import java.util.List;
import java.util.Vector;
@Dao
public abstract class OrderDao {
@WorkerThread
@Insert
public abstract void insert(Order order);
@WorkerThread
@Query("SELECT * FROM Orders WHERE user=:username and date=:date")
public abstract Order findOrderByUsernameAndDate(String username, Date date);
@WorkerThread
@Transaction
public void save(String username, List<CartItemModel> items, FlowerOrderDao flowerOrderDao, CartItemDao cartItemDao) {
Date date = new Date();
Order order = new Order(0, username, "Pending", date);
insert(order);
order = findOrderByUsernameAndDate(username, date);
List<FlowerOrder> flowers = new Vector<>();
for (CartItemModel item : items) {
flowers.add(new FlowerOrder(item.flowerName, order.id, item.price * item.discountPercent, item.quantity));
}
flowerOrderDao.insert(flowers);
cartItemDao.clearCart();
}
@Query("SELECT id,status,date FROM Orders where user=:member ORDER BY date DESC")
public abstract LiveData<List<MemberOrderListItemModel>> getOrdersForMember(String member);
@Query("SELECT id,status,date FROM Orders WHERE id=:id")
public abstract LiveData<MemberOrderListItemModel> load(int id);
@Query("UPDATE Orders SET status=:status WHERE id=:id")
public abstract void updateStatus(int id, String status);
@Query("SELECT * FROM Orders WHERE status <> 'Canceled'")
public abstract LiveData<List<Order>> getAllUncancelledOrders();
}
| 36.844828 | 122 | 0.743566 |
4e7b33e125c1d8a83577ddae9431f263ae1669c2 | 115 | sql | SQL | src/test/resources/sql/alter_operator_class/583095ad.sql | Shuttl-Tech/antlr_psql | fcf83192300abe723f3fd3709aff5b0c8118ad12 | [
"MIT"
] | 66 | 2018-06-15T11:34:03.000Z | 2022-03-16T09:24:49.000Z | src/test/resources/sql/alter_operator_class/583095ad.sql | Shuttl-Tech/antlr_psql | fcf83192300abe723f3fd3709aff5b0c8118ad12 | [
"MIT"
] | 13 | 2019-03-19T11:56:28.000Z | 2020-08-05T04:20:50.000Z | src/test/resources/sql/alter_operator_class/583095ad.sql | Shuttl-Tech/antlr_psql | fcf83192300abe723f3fd3709aff5b0c8118ad12 | [
"MIT"
] | 28 | 2019-01-05T19:59:02.000Z | 2022-03-24T11:55:50.000Z | -- file:alter_generic.sql ln:237 expect:true
ALTER OPERATOR CLASS alt_opc2 USING hash OWNER TO regress_alter_user2
| 38.333333 | 69 | 0.834783 |
265d60ebc5b8458d8a3d849894b71905d2d4ebc0 | 11,910 | java | Java | pinot-core/src/main/java/com/linkedin/pinot/core/startree/DefaultStarTreeBuilder.java | jzmq/pinot | b1f71cd5dc6fd69bef99f7990ee67de8ebd11296 | [
"Apache-2.0"
] | null | null | null | pinot-core/src/main/java/com/linkedin/pinot/core/startree/DefaultStarTreeBuilder.java | jzmq/pinot | b1f71cd5dc6fd69bef99f7990ee67de8ebd11296 | [
"Apache-2.0"
] | null | null | null | pinot-core/src/main/java/com/linkedin/pinot/core/startree/DefaultStarTreeBuilder.java | jzmq/pinot | b1f71cd5dc6fd69bef99f7990ee67de8ebd11296 | [
"Apache-2.0"
] | null | null | null | /**
* Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.core.startree;
import com.google.common.base.Objects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
public class DefaultStarTreeBuilder implements StarTreeBuilder {
private static final Logger LOG = LoggerFactory.getLogger(DefaultStarTreeBuilder.class);
private List<Integer> splitOrder;
private int maxLeafRecords;
private StarTreeTable starTreeTable;
private StarTreeIndexNode starTree;
private Map<Integer, StarTreeTableRange> documentIdRanges;
private Map<Integer, StarTreeTableRange> adjustedDocumentIdRanges;
private boolean buildComplete;
private int totalRawDocumentCount;
private int totalAggDocumentCount;
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("splitOrder", splitOrder)
.add("maxLeafRecords", maxLeafRecords)
.add("starTreeTable", starTreeTable)
.toString();
}
@Override
public void init(List<Integer> splitOrder,
int maxLeafRecords,
StarTreeTable starTreeTable) {
this.splitOrder = splitOrder;
this.maxLeafRecords = maxLeafRecords;
this.starTreeTable = starTreeTable;
this.starTree = new StarTreeIndexNode();
this.documentIdRanges = new HashMap<Integer, StarTreeTableRange>();
this.adjustedDocumentIdRanges = new HashMap<Integer, StarTreeTableRange>();
// Root node is everything (i.e. raw segment)
this.starTree.setDimensionName(StarTreeIndexNode.all());
this.starTree.setDimensionValue(StarTreeIndexNode.all());
this.starTree.setLevel(0);
}
@Override
public int getMaxLeafRecords() {
return maxLeafRecords;
}
@Override
public List<Integer> getSplitOrder() {
return splitOrder;
}
@Override
public void append(StarTreeTableRow row) {
starTreeTable.append(row);
}
@Override
public void build() {
long startMillis;
long endMillis;
LOG.info("Beginning StarTree construction...");
startMillis = System.currentTimeMillis();
constructStarTree(starTree, starTreeTable);
numberNodes();
endMillis = System.currentTimeMillis();
LOG.info("StarTree construction complete, took {} ms", endMillis - startMillis);
LOG.info("Computing document ID ranges...");
startMillis = System.currentTimeMillis();
computeDocumentIdRanges();
endMillis = System.currentTimeMillis();
LOG.info("Document ID range computation complete, took {} ms", endMillis - startMillis);
buildComplete = true;
}
@Override
public StarTreeIndexNode getTree() {
if (!buildComplete) {
throw new IllegalStateException("Must call build first");
}
return starTree;
}
@Override
public StarTreeTable getTable() {
if (!buildComplete) {
throw new IllegalStateException("Must call build first");
}
return starTreeTable;
}
@Override
public StarTreeTableRange getDocumentIdRange(int nodeId) {
if (!buildComplete) {
throw new IllegalStateException("Must call build first");
}
return documentIdRanges.get(nodeId);
}
@Override
public StarTreeTableRange getAggregateAdjustedDocumentIdRange(int nodeId) {
if (!buildComplete) {
throw new IllegalStateException("Must call build first");
}
return adjustedDocumentIdRanges.get(nodeId);
}
@Override
public int getTotalRawDocumentCount() {
if (!buildComplete) {
throw new IllegalStateException("Must call build first");
}
return totalRawDocumentCount;
}
@Override
public int getTotalAggregateDocumentCount() {
if (!buildComplete) {
throw new IllegalStateException("Must call build first");
}
return totalAggDocumentCount;
}
/**
* Recursively constructs the StarTree, splitting nodes and adding leaf records.
*
* @param node
* The sub-tree to potentially split.
* @param table
* The projection of the StarTree table which corresponds to the sub tree.
* @return
* The number of records that were added at this level
*/
private int constructStarTree(StarTreeIndexNode node, StarTreeTable table) {
long startMillis;
long endMillis;
if (node.getLevel() >= splitOrder.size() || table.size() <= maxLeafRecords) {
// Either can no longer split, or max record constraint has been met
return 0;
}
// The next dimension on which to split
Integer splitDimensionId = splitOrder.get(node.getLevel());
LOG.info("Splitting on dimension {} at level {} (table.size={})", splitDimensionId, node.getLevel(), table.size());
// Compute the remaining unique combinations after removing split dimension
int aggregateCombinations = 0;
Iterator<StarTreeTableRow> uniqueItr = table.getUniqueCombinations(Collections.singletonList(splitDimensionId));
while (uniqueItr.hasNext()) {
// And append them to the sub table
// n.b. This appends to the end of the sub table, which may be actually inserting to a parent table
StarTreeTableRow row = uniqueItr.next();
table.append(row);
aggregateCombinations++;
}
LOG.info("Added {} aggregate combinations at {}", aggregateCombinations, node);
// Sort the sub-table based on the current tree prefix
// n.b. If a view of a larger table, it will be partially sorted,
// so the first couple of dimension prefix comparisons will be no-ops.
List<Integer> pathDimensions = node.getPathDimensions();
pathDimensions.add(splitDimensionId);
LOG.info("Sorting sub-table at {} by dimensions {}", node, pathDimensions);
startMillis = System.currentTimeMillis();
table.sort(pathDimensions);
endMillis = System.currentTimeMillis();
LOG.info("Sort of sub-table {} took {} ms", node, endMillis - startMillis);
// Make this node a parent
node.setChildDimensionName(splitDimensionId);
node.setChildren(new HashMap<Integer, StarTreeIndexNode>());
// Compute the GROUP BY stats, including for ALL (i.e. "*") dimension value
LOG.info("Computing group by stats at {}", node);
startMillis = System.currentTimeMillis();
StarTreeTableGroupByStats groupByStats = table.groupBy(splitDimensionId);
endMillis = System.currentTimeMillis();
LOG.info("Group by stats computation at {} took {} ms", node, endMillis - startMillis);
int subTreeAggregateCombinations = 0;
for (Integer valueId : groupByStats.getValues()) {
// Create child
StarTreeIndexNode child = new StarTreeIndexNode();
child.setDimensionName(splitDimensionId);
child.setDimensionValue(valueId);
child.setParent(node);
child.setLevel(node.getLevel() + 1);
// n.b. We will number the nodes later using BFS after fully split
// Add child to parent
node.getChildren().put(valueId, child);
// Create table projection
// n.b. Since the sub table is sorted, we can use minRecordId and raw count to determine the range for
// containing the dimensions whose value is valueId for splitDimensionId, and create a sub table.
Integer minRecordId = groupByStats.getMinRecordId(valueId) + subTreeAggregateCombinations;
Integer rawRecordCount = groupByStats.getRawCount(valueId);
StarTreeTable subTable = table.view(minRecordId, rawRecordCount);
// Create sub-tree
subTreeAggregateCombinations += constructStarTree(child, subTable);
}
return aggregateCombinations + subTreeAggregateCombinations;
}
/**
* Numbers the StarTree nodes using BFS of the tree.
*/
private void numberNodes() {
int nodeId = 0;
Queue<StarTreeIndexNode> queue = new LinkedList<StarTreeIndexNode>();
queue.add(starTree);
while (!queue.isEmpty()) {
StarTreeIndexNode current = queue.remove();
current.setNodeId(nodeId++);
if (!current.isLeaf()) {
for (StarTreeIndexNode child : current.getChildren().values()) {
queue.add(child);
}
}
}
}
/**
* Computes the ranges to which each nodeId maps in the StarTree table.
*
* <p>
* This assumes that the table is sorted according to tree path dimension prefix.
* </p>
*/
private void computeDocumentIdRanges() {
Map<Integer, Integer> currentPrefix = null;
StarTreeIndexNode currentNode = null;
int currentDocumentId = 0;
int matchingPrefixCount = 0;
int matchingStartDocumentId = 0;
int matchingAdjustedStartDocumentId = 0;
int currentRawDocumentId = 0;
int currentAggDocumentId = 0;
Iterator<StarTreeTableRow> tableItr = starTreeTable.getAllCombinations();
while (tableItr.hasNext()) {
StarTreeTableRow currentCombination = tableItr.next();
// Initialize current node
if (currentNode == null) {
currentNode = starTree.getMatchingNode(currentCombination.getDimensions());
currentPrefix = currentNode.getPathValues();
if (currentPrefix.containsValue(StarTreeIndexNode.all())) {
matchingAdjustedStartDocumentId = currentAggDocumentId;
} else {
matchingAdjustedStartDocumentId = currentRawDocumentId;
}
}
if (StarTreeIndexNode.matchesPrefix(currentPrefix, currentCombination.getDimensions())) {
// As long as current document matches the prefix, keep incrementing count
matchingPrefixCount++;
} else {
// We are at the next node's range, so store the current range and reset state to
// be consistent with the new node's range
StarTreeTableRange range = new StarTreeTableRange(matchingStartDocumentId, matchingPrefixCount);
documentIdRanges.put(currentNode.getNodeId(), range);
StarTreeTableRange adjustedRange = new StarTreeTableRange(matchingAdjustedStartDocumentId, matchingPrefixCount);
adjustedDocumentIdRanges.put(currentNode.getNodeId(), adjustedRange);
// Reset the node
currentNode = starTree.getMatchingNode(currentCombination.getDimensions());
if (currentNode == null) {
throw new IllegalStateException("No node matches combination " + currentCombination);
}
currentPrefix = currentNode.getPathValues();
// Reset the matching document
matchingPrefixCount = 1;
matchingStartDocumentId = currentDocumentId;
if (currentPrefix.containsValue(StarTreeIndexNode.all())) {
matchingAdjustedStartDocumentId = currentAggDocumentId;
} else {
matchingAdjustedStartDocumentId = currentRawDocumentId;
}
}
// Move on to next document, also within agg / raw
currentDocumentId++;
if (currentPrefix.containsValue(StarTreeIndexNode.all())) {
currentAggDocumentId++;
} else {
currentRawDocumentId++;
}
}
// The left overs
if (currentNode != null) {
StarTreeTableRange range = new StarTreeTableRange(matchingStartDocumentId, matchingPrefixCount);
documentIdRanges.put(currentNode.getNodeId(), range);
StarTreeTableRange adjustedRange = new StarTreeTableRange(matchingAdjustedStartDocumentId, matchingPrefixCount);
adjustedDocumentIdRanges.put(currentNode.getNodeId(), adjustedRange);
}
totalAggDocumentCount = currentAggDocumentId;
totalRawDocumentCount = currentRawDocumentId;
}
}
| 36.090909 | 120 | 0.705542 |
3fa1da3a4930ccd51511f66fb987631e8f7bb0b1 | 6,903 | h | C | tensorflow/lite/kernels/internal/optimized/depthwiseconv_multithread.h | PaulWang1905/tensorflow | ebf12d22b4801fb8dab5034cc94562bf7cc33fa0 | [
"Apache-2.0"
] | 9 | 2019-12-29T01:47:37.000Z | 2021-12-21T13:47:41.000Z | tensorflow/lite/kernels/internal/optimized/depthwiseconv_multithread.h | PaulWang1905/tensorflow | ebf12d22b4801fb8dab5034cc94562bf7cc33fa0 | [
"Apache-2.0"
] | 1 | 2019-06-18T07:56:15.000Z | 2019-06-18T07:56:15.000Z | tensorflow/lite/kernels/internal/optimized/depthwiseconv_multithread.h | PaulWang1905/tensorflow | ebf12d22b4801fb8dab5034cc94562bf7cc33fa0 | [
"Apache-2.0"
] | 3 | 2020-05-11T07:48:52.000Z | 2021-05-05T10:26:16.000Z | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_OPTIMIZED_DEPTHWISECONV_MULTITHREAD_H_
#define TENSORFLOW_LITE_KERNELS_INTERNAL_OPTIMIZED_DEPTHWISECONV_MULTITHREAD_H_
#include "tensorflow/lite/kernels/cpu_backend_context.h"
#include "tensorflow/lite/kernels/cpu_backend_threadpool.h"
#include "tensorflow/lite/kernels/internal/optimized/depthwiseconv_float.h"
#include "tensorflow/lite/kernels/internal/optimized/depthwiseconv_uint8.h"
namespace tflite {
namespace optimized_ops {
// TODO(luwa): add multithread to per-channel depthwise_conv
// DepthwiseConv can run with multi threads on the dim specified by thread_dim.
// Each thread processes output elements on dim, thread_dim, in the range of
// [thread_start, thread_end).
// For example, assume thread_start = 2, thread_end = 6, and thread_dim = 1, it
// means that it will calculate DepthwiseConv for output_data[:, 2:5, :, :].
template <typename T, typename TS>
struct DepthwiseConvWorkerTask : cpu_backend_threadpool::Task {
DepthwiseConvWorkerTask(const DepthwiseParams& params,
const RuntimeShape& input_shape, const T* input_data,
const RuntimeShape& filter_shape,
const T* filter_data, const RuntimeShape& bias_shape,
const TS* bias_data, const RuntimeShape& output_shape,
T* output_data,
CpuBackendContext* cpu_backend_context,
int thread_start, int thread_end, int thread_dim)
: params_(params),
input_shape_(input_shape),
input_data_(input_data),
filter_shape_(filter_shape),
filter_data_(filter_data),
bias_shape_(bias_shape),
bias_data_(bias_data),
output_shape_(output_shape),
output_data_(output_data),
cpu_backend_context_(cpu_backend_context),
thread_start_(thread_start),
thread_end_(thread_end),
thread_dim_(thread_dim) {}
void Run() override {
DepthwiseConvImpl(params_, input_shape_, input_data_, filter_shape_,
filter_data_, bias_shape_, bias_data_, output_shape_,
output_data_, cpu_backend_context_, thread_start_,
thread_end_, thread_dim_);
}
private:
const DepthwiseParams& params_;
const RuntimeShape& input_shape_;
const T* input_data_;
const RuntimeShape& filter_shape_;
const T* filter_data_;
const RuntimeShape& bias_shape_;
const TS* bias_data_;
const RuntimeShape& output_shape_;
T* output_data_;
CpuBackendContext* cpu_backend_context_;
int thread_start_;
int thread_end_;
int thread_dim_;
};
inline int HowManyConvThreads(const RuntimeShape& output_shape,
const RuntimeShape& filter_shape,
int thread_dim) {
constexpr int kMinMulPerThread = 8;
const int output_units = output_shape.Dims(thread_dim);
const int filter_height = filter_shape.Dims(1);
const int filter_width = filter_shape.Dims(2);
const int num_mul_per_unit =
FlatSizeSkipDim(output_shape, thread_dim) * filter_height * filter_width;
const int min_units_per_thread = kMinMulPerThread / num_mul_per_unit + 1;
int thread_count = output_units / min_units_per_thread;
return thread_count;
}
template <typename T, typename TS>
inline void DepthwiseConv(const DepthwiseParams& params,
const RuntimeShape& input_shape, const T* input_data,
const RuntimeShape& filter_shape,
const T* filter_data, const RuntimeShape& bias_shape,
const TS* bias_data, const RuntimeShape& output_shape,
T* output_data,
CpuBackendContext* cpu_backend_context) {
gemmlowp::ScopedProfilingLabel label("DepthwiseConv");
TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4);
TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4);
TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4);
const int output_batches = output_shape.Dims(0);
const int output_height = output_shape.Dims(1);
int thread_count_batch = HowManyConvThreads(output_shape, filter_shape, 0);
int thread_count_row = HowManyConvThreads(output_shape, filter_shape, 1);
int thread_dim, thread_count, thread_dim_size;
if (thread_count_batch > thread_count_row) {
thread_dim = 0;
thread_dim_size = output_batches;
thread_count = thread_count_batch;
} else {
thread_dim = 1;
thread_dim_size = output_height;
thread_count = thread_count_row;
}
const int max_threads = cpu_backend_context->max_num_threads();
thread_count = std::max(1, std::min(thread_count, max_threads));
// Cap the number of threads to 2 for float path to avoid regression in
// performance (b/132294857).
if (std::is_floating_point<T>::value) {
thread_count = std::min(thread_count, 2);
}
if (thread_count == 1) {
DepthwiseConvImpl(params, input_shape, input_data, filter_shape,
filter_data, bias_shape, bias_data, output_shape,
output_data, cpu_backend_context, /*thread_start=*/0,
/*thread_end=*/output_height, /*thread_dim=*/1);
} else {
std::vector<DepthwiseConvWorkerTask<T, TS>> tasks;
// TODO(b/131746020) don't create new heap allocations every time.
// At least we make it a single heap allocation by using reserve().
tasks.reserve(thread_count);
int thread_start = 0;
for (int i = 0; i < thread_count; ++i) {
int thread_end =
thread_start + (thread_dim_size - thread_start) / (thread_count - i);
tasks.emplace_back(params, input_shape, input_data, filter_shape,
filter_data, bias_shape, bias_data, output_shape,
output_data, cpu_backend_context, thread_start,
thread_end, thread_dim);
thread_start = thread_end;
}
cpu_backend_threadpool::Execute(tasks.size(), tasks.data(),
cpu_backend_context);
}
}
} // namespace optimized_ops
} // namespace tflite
#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_OPTIMIZED_DEPTHWISECONV_MULTITHREAD_H_
| 43.14375 | 82 | 0.689121 |
0cde6e9d59bff904867397a498cf0cce96687bf3 | 3,194 | py | Python | default-approach/data-collection/harpers-data/scraper_scripts/get-harpers-links.py | the-browser/recommending-interesting-writing | 9ff4771d3f437d33c26d2f306e393b5a90a04878 | [
"MIT"
] | 5 | 2020-09-17T17:56:21.000Z | 2021-11-03T02:40:27.000Z | default-approach/data-collection/harpers-data/scraper_scripts/get-harpers-links.py | the-browser/recommending-interesting-writing | 9ff4771d3f437d33c26d2f306e393b5a90a04878 | [
"MIT"
] | null | null | null | default-approach/data-collection/harpers-data/scraper_scripts/get-harpers-links.py | the-browser/recommending-interesting-writing | 9ff4771d3f437d33c26d2f306e393b5a90a04878 | [
"MIT"
] | 1 | 2020-11-01T11:37:38.000Z | 2020-11-01T11:37:38.000Z | BASE_URL="https://harpers.org/sections/readings/page/"
N_ARTICLE_LINK_PAGES = 50
OUTPUT_FILE = 'harpers-later-urls.json'
WORKER_THREADS = 32
import json
import datetime
import dateutil.parser
from dataclasses import dataclass
from dataclasses_json import dataclass_json
from datetime import datetime
from newspaper import Article
from bs4 import BeautifulSoup
from typing import List
from queue import Queue
from threading import Thread
from requests import get
from pathlib import Path
import pandas as pd
from urllib.request import Request, urlopen
@dataclass_json
@dataclass
class HarperReadingArticleUrl:
url: str
title: str
class WriteThread(Thread):
def __init__(self, queue: Queue, *args, **kwargs):
super().__init__(*args, **kwargs)
self.queue = queue
def run(self):
existing_links = []
while True:
article = self.queue.get()
if article is None:
output_file_path = Path(OUTPUT_FILE)
check_df = pd.DataFrame(existing_links)
check_df.drop_duplicates(subset="url", keep="first", inplace=True)
check_df.to_json(output_file_path, orient="records")
break
current_article_json = article.to_dict()
existing_links.insert(0,current_article_json)
class ScrapeThread(Thread):
def __init__(self, chunk, queue: Queue, *args, **kwargs):
super().__init__(*args, **kwargs)
self.chunk = chunk
self.queue = queue
def run(self):
for i in self.chunk:
try:
print(f'Getting articles from list page {i}')
url = f"{BASE_URL}{i}"
req = Request(url , headers={'User-Agent': 'Mozilla/5.0'})
webpage = urlopen(req).read()
soup = BeautifulSoup(webpage, "html5lib")
articles = soup.find_all('div', {'class': 'card'})
for article in articles:
dual_hrefs = article.find_all('a')
link = dual_hrefs[1]['href']
title = dual_hrefs[1].find('h2', {'class': 'ac-title'})
if title is None or title.string is None or link is None or link is None:
continue
article_url = HarperReadingArticleUrl(url=link.strip(), title=str(title.string.strip()) or '')
self.queue.put(article_url)
except Exception as e:
print(f'Something went wrong when scraping: {e}')
print("------------------------------------------")
if __name__ == '__main__':
queue = Queue()
write_thread = WriteThread(queue)
write_thread.start()
worker_threads = []
chunk_size = (N_ARTICLE_LINK_PAGES) // WORKER_THREADS
for i in range(0, N_ARTICLE_LINK_PAGES+1, chunk_size):
chunk = range(i,i+chunk_size)
worker_threads.append(ScrapeThread(chunk, queue))
for thread in worker_threads:
thread.start()
for thread in worker_threads:
thread.join()
# Signal end of jobs to write thread
queue.put(None)
print('Done.')
write_thread.join()
| 31.313725 | 114 | 0.60551 |
40a213e666e8f70d96034aaeb0fbac73e94570db | 12,469 | html | HTML | docs/projects/codecheck/index.html | shubhamnishad97/shubhamnishad.com | b31f7ad03993fa4cf186e864d69f1d51a3d09a6f | [
"MIT"
] | 3 | 2018-02-16T14:46:09.000Z | 2019-11-26T17:27:17.000Z | docs/projects/codecheck/index.html | shubhamnishad97/shubhamnishad.com | b31f7ad03993fa4cf186e864d69f1d51a3d09a6f | [
"MIT"
] | null | null | null | docs/projects/codecheck/index.html | shubhamnishad97/shubhamnishad.com | b31f7ad03993fa4cf186e864d69f1d51a3d09a6f | [
"MIT"
] | null | null | null | <!doctype html>
<html ⚡ lang="en">
<head>
<meta name="keywords" content="Hackerrank,Django,jQuery,online judge">
<meta name="description" content=" CodeCheck is a Django based online judge made using the Hackerrank API. https://code-check-csi.herokuapp.com/
Features Supports 54 languages Contests can be hosted Tracks can be added Direct compiler is provided to run any code Keeps tracks of solved questions of user better user stats with leaderboard and charts User friendly and fully responsive Packages/Platforms used Django django-allauth django-crispy-forms Heroku Ace Editor
">
<link rel="canonical" href="https://shubhamnishad.com/projects/codecheck/" />
<script async src="https://cdn.ampproject.org/v0.js"></script>
<meta name="viewport" content="width=device-width,minimum-scale=1,initial-scale=1">
<style amp-boilerplate>body{-webkit-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-moz-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-ms-animation:-amp-start 8s steps(1,end) 0s 1 normal both;animation:-amp-start 8s steps(1,end) 0s 1 normal both}@-webkit-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-moz-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-ms-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-o-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}</style><noscript><style amp-boilerplate>body{-webkit-animation:none;-moz-animation:none;-ms-animation:none;animation:none}</style></noscript>
<script async custom-element="amp-sidebar" src="https://cdn.ampproject.org/v0/amp-sidebar-0.1.js"></script>
<script async custom-element="amp-selector" src="https://cdn.ampproject.org/v0/amp-selector-0.1.js"></script>
<script async custom-element="amp-accordion" src="https://cdn.ampproject.org/v0/amp-accordion-0.1.js"></script>
<title>CodeCheck - Shubham Nishad</title>
<style amp-custom>
#mainLink{
text-decoration: none;
}
#no_border{
border: 0px;
}
</style>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-101629128-1', 'auto');
ga('send', 'pageview');
</script>
</head>
<body>
<header class="ampstart-headerbar fixed flex justify-start items-center top-0 left-0 right-0 pl2 pr4">
<div role="button" on="tap:header-sidebar.toggle" tabindex="0" class="ampstart-navbar-trigger md-hide lg-hide pr2">☰</div>
<a href="https://shubhamnishad.com/" class="my0 mx-auto" id="mainLink">
Shubham Nishad
</a>
<nav class="ampstart-headerbar-nav ampstart-nav xs-hide sm-hide">
<ul class="list-reset m0 p0 flex justify-center nowrap">
<li class="ampstart-nav-item"><a href="https://shubhamnishad.com/about/" class="text-decoration-none block">About</a></li>
<li class="ampstart-nav-item"><a href="https://shubhamnishad.com/blog/" class="text-decoration-none block">Blog</a></li>
<li class="ampstart-nav-item"><a href="https://shubhamnishad.com/projects/" class="text-decoration-none block">Projects</a></li>
<li class="ampstart-nav-item"><a href="https://shubhamnishad.com/resources/" class="text-decoration-none block">Resources</a></li>
</ul>
</nav>
</header>
<amp-sidebar id="header-sidebar" class="ampstart-sidebar px3 flex flex-column md-hide lg-hide" layout="nodisplay">
<div class="flex justify-start items-center ampstart-sidebar-header"><a href="#" on="tap:header-sidebar.toggle" class="ampstart-navbar-trigger items-start">✕</a></div>
<nav class="ampstart-sidebar-nav ampstart-nav">
<a href="https://shubhamnishad.com/about/" class="block caps h5 py1 abe-primary">About</a>
<a href="https://shubhamnishad.com/blog/" class="block caps h5 py1 abe-primary">Blog</a>
<a href="https://shubhamnishad.com/projects/" class="block caps h5 py1 abe-primary">Projects</a>
<a href="https://shubhamnishad.com/resources/" class="block caps h5 py1 abe-primary">Resources</a>
<ul class="ampstart-social-follow list-reset flex justify-around items-center flex-wrap m0 mb4">
<li class="mr2">
<a aria-label="github profile" href="https://github.com/shubhamnishad97" class="inline-block">
<i class="fa fa-github fa-2x"></i>
</a>
</li>
<li class="mr2">
<a aria-label="twitter profile" href="https://twitter.com/shubhamnishad97" class="inline-block">
<i class="fa fa-twitter fa-2x"></i>
</a>
</li>
<li class="mr2">
<a aria-label="profile" href="https://facebook.com/shubham.nishad.97" class="inline-block">
<i class="fa fa-facebook fa-2x"></i>
</a>
</li>
<li class="mr2">
<a aria-label="angellist profile" href="https://angel.co/shubhamnishad97" class="inline-block">
<i class="fa fa-angellist fa-2x"></i>
</a>
</li>
<li class="mr2">
<a aria-label="linkedin profile" href="https://www.linkedin.com/in/shubhamnishad" class="inline-block">
<i class="fa fa-linkedin fa-2x"></i>
</a>
</li>
</ul>
</nav>
</amp-sidebar>
<header class="www-header center">
<h1 class="mb1">CodeCheck</h1>
<div class="www-index-header-action mx-auto">Sat, Jun 24, 2017</div>
<div class="www-index-header-action mx-auto">
<a href="https://shubhamnishad.com/categories/projects">projects</a>
</div>
<div class="www-index-header-action mx-auto">
<a href="https://shubhamnishad.com/tags/jquery">#jQuery</a>
<a href="https://shubhamnishad.com/tags/django">#Django</a>
</div>
</header>
<amp-selector layout="container">
<main class="flex pl2">
<aside class="col-1 xs-hide"></aside>
<article class="www-components flex-auto col-10">
<section class="www-component-desc">
<p>CodeCheck is a Django based online judge made using the Hackerrank API.
<a href="https://code-check-csi.herokuapp.com/">https://code-check-csi.herokuapp.com/</a></p>
<p><br></p>
<h3 id="features">Features</h3>
<ul>
<li>Supports 54 languages</li>
<li>Contests can be hosted</li>
<li>Tracks can be added</li>
<li>Direct compiler is provided to run any code</li>
<li>Keeps tracks of solved questions of user</li>
<li>better user stats with leaderboard and charts</li>
<li>User friendly and fully responsive</li>
</ul>
<h3 id="packages-platforms-used">Packages/Platforms used</h3>
<ul>
<li><a href="https://www.djangoproject.com/">Django</a></li>
<li><a href="https://github.com/pennersr/django-allauth">django-allauth</a></li>
<li><a href="http://django-crispy-forms.readthedocs.io/en/latest/">django-crispy-forms</a></li>
<li><a href="https://www.heroku.com/">Heroku</a></li>
<li><a href="https://ace.c9.io/">Ace Editor</a></li>
</ul>
<p><br>
<figure >
<amp-img src="compiler.PNG" width=1080 height=610 layout="responsive"></amp-img>
</figure>
<figure >
<amp-img src="contests.PNG" width=1080 height=610 layout="responsive"></amp-img>
</figure>
<figure >
<amp-img src="practice.PNG" width=1080 height=610 layout="responsive"></amp-img>
</figure>
<figure >
<amp-img src="questions.PNG" width=1080 height=610 layout="responsive"></amp-img>
</figure>
<figure >
<amp-img src="user%20main.PNG" width=1080 height=610 layout="responsive"></amp-img>
</figure>
</p>
<p><img src="login.png" width="300">
<img src="reset.png" width="280"></p>
</section>
</article>
<aside class="col-1 xs-hide"></aside>
</main>
</amp-selector>
<link rel="stylesheet" type="text/css" href="https://shubhamnishad.com/css/css.css">
<link href="https://fonts.googleapis.com/css?family=Roboto" rel="stylesheet">
<link rel="stylesheet" type="text/css" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css">
<footer class="ampstart-footer flex flex-column items-center pxy3">
<ul class="ampstart-social-follow list-reset flex justify-around items-center flex-wrap m0 mb4">
<li class="mr2"><a href="https://shubhamnishad.com/about/" class="text-decoration-none inline-block">About</a></li>
<li class="mr2"><a href="https://shubhamnishad.com/blog/" class="text-decoration-none inline-block">Blog</a></li>
<li class="mr2"><a href="https://shubhamnishad.com/projects/" class="text-decoration-none inline-block">Projects</a></li>
<li class="mr2"><a href="https://shubhamnishad.com/resources/" class="text-decoration-none inline-block">Resources</a></li>
</ul>
<ul class="ampstart-social-follow list-reset flex justify-around items-center flex-wrap m0 mb4">
<li class="mr2">
<a aria-label="github profile" href="https://github.com/shubhamnishad97" class="inline-block">
<i class="fa fa-github fa-2x"></i>
</a>
</li>
<li class="mr2">
<a aria-label="twitter profile" href="https://twitter.com/shubhamnishad97" class="inline-block">
<i class="fa fa-twitter fa-2x"></i>
</a>
</li>
<li class="mr2">
<a aria-label="facebook profile" href="https://facebook.com/shubham.nishad.97" class="inline-block">
<i class="fa fa-facebook fa-2x"></i>
</a>
</li>
<li class="mr2">
<a aria-label="angellist profile" href="https://angel.co/shubhamnishad97" class="inline-block">
<i class="fa fa-angellist fa-2x"></i>
</a>
</li>
<li class="mr2">
<a aria-label="linkedin profile" href="https://www.linkedin.com/in/shubhamnishad" class="inline-block">
<i class="fa fa-linkedin fa-2x"></i>
</a>
</li>
</ul>
<small>© 2019 Copyright Text powered by <a href="http://gohugo.io" target="_blank">hugo</a></small>
</footer>
</body>
</html>
| 37.899696 | 768 | 0.537573 |
71d6378c988fc34172a335abf35229552c148b0a | 791 | ts | TypeScript | src/models/fhir/interfaces/ISubstanceNucleicAcid.ts | MeasureAuthoringTool/fhir-typescript-models | e32cd39a0d450c89a6c52702d3267133d44fa399 | [
"CC0-1.0"
] | 3 | 2021-03-02T19:04:49.000Z | 2021-12-28T21:55:16.000Z | src/models/fhir/interfaces/ISubstanceNucleicAcid.ts | MeasureAuthoringTool/fhir-typescript-models | e32cd39a0d450c89a6c52702d3267133d44fa399 | [
"CC0-1.0"
] | 178 | 2020-08-27T17:57:30.000Z | 2022-03-27T00:11:06.000Z | src/models/fhir/interfaces/ISubstanceNucleicAcid.ts | MeasureAuthoringTool/fhir-typescript-models | e32cd39a0d450c89a6c52702d3267133d44fa399 | [
"CC0-1.0"
] | 1 | 2021-12-19T22:07:07.000Z | 2021-12-19T22:07:07.000Z | /* eslint-disable import/prefer-default-export, import/no-cycle, @typescript-eslint/naming-convention, @typescript-eslint/no-empty-interface */
import {
ICodeableConcept,
IDomainResource,
IElement,
IPrimitiveInteger,
IPrimitiveString,
ISubstanceNucleicAcidSubunit,
} from "../internal";
export interface ISubstanceNucleicAcid extends IDomainResource {
sequenceType?: ICodeableConcept;
numberOfSubunits?: IPrimitiveInteger;
_numberOfSubunits?: IElement;
areaOfHybridisation?: IPrimitiveString;
_areaOfHybridisation?: IElement;
oligoNucleotideType?: ICodeableConcept;
subunit?: Array<ISubstanceNucleicAcidSubunit>;
}
/* eslint-enable import/prefer-default-export, import/no-cycle, @typescript-eslint/naming-convention, @typescript-eslint/no-empty-interface */
| 30.423077 | 143 | 0.795196 |
4a524424b8a5fa1c4d2d1be25d736deeef1cf73e | 653 | js | JavaScript | frontend/src/pages/NewSimulation.js | georgeepta/BGP-Simulator | 3fba8e19da5940b9af5638b3b9109c9473ba9e99 | [
"BSD-3-Clause"
] | null | null | null | frontend/src/pages/NewSimulation.js | georgeepta/BGP-Simulator | 3fba8e19da5940b9af5638b3b9109c9473ba9e99 | [
"BSD-3-Clause"
] | null | null | null | frontend/src/pages/NewSimulation.js | georgeepta/BGP-Simulator | 3fba8e19da5940b9af5638b3b9109c9473ba9e99 | [
"BSD-3-Clause"
] | 1 | 2021-07-05T00:42:37.000Z | 2021-07-05T00:42:37.000Z | import React from 'react';
import '../App.css';
import { Button } from '../components/Button';
export default function NewSimulation() {
return (
<div className='new-simulation'>
<div className='simulation-type'>
<h1 className='h1'>Select Simulation Type</h1>
<div className="line-break"></div>
<Button
type="primary"
pagelink="./custom-simulation"
btnname="Custom Simulation"
/>
<Button
type="primary"
pagelink="./random-simulation"
btnname="Random Simulation"
/>
</div>
</div>
);
} | 26.12 | 56 | 0.532925 |
b1ba15d95cf9124bb919c0e070d089ae3ef97b4c | 1,731 | h | C | src/commlib/zcelib/zce_share_mem_mmap.h | sailzeng/zcelib | 88e14ab436f1b40e8071e15ef6d9fae396efc3b4 | [
"Apache-2.0"
] | 72 | 2015-01-08T05:01:48.000Z | 2021-12-28T06:13:03.000Z | src/commlib/zcelib/zce_share_mem_mmap.h | sailzeng/zcelib | 88e14ab436f1b40e8071e15ef6d9fae396efc3b4 | [
"Apache-2.0"
] | 4 | 2016-01-18T12:24:59.000Z | 2019-10-12T07:19:15.000Z | src/commlib/zcelib/zce_share_mem_mmap.h | sailzeng/zcelib | 88e14ab436f1b40e8071e15ef6d9fae396efc3b4 | [
"Apache-2.0"
] | 40 | 2015-01-26T06:49:18.000Z | 2021-07-20T08:11:48.000Z | #ifndef ZCE_LIB_SHARE_MEMORY_MMAP_H_
#define ZCE_LIB_SHARE_MEMORY_MMAP_H_
#include "zce_os_adapt_predefine.h"
#include "zce_boost_non_copyable.h"
/*********************************************************************************
class ZCE_ShareMem_Mmap 封装MMAP共享内存映射文件函数
*********************************************************************************/
class ZCE_ShareMem_Mmap: public ZCE_NON_Copyable
{
public:
//构造函数
ZCE_ShareMem_Mmap();
~ZCE_ShareMem_Mmap();
public:
//打开文件,进行映射
int open(const char *file_name,
std::size_t shm_size,
int file_open_mode = O_CREAT | O_RDWR,
int file_perms_mode = ZCE_SHARE_FILE_PERMS,
const void *want_address = NULL,
int mmap_prot = PROT_READ | PROT_WRITE,
int mmap_flags = MAP_SHARED,
std::size_t offset = 0
);
//打开文件,进行映射, 简单,推荐使用这个函数
int open(const char *file_name,
std::size_t shm_size,
bool if_restore,
bool read_only = false,
bool share_file = true,
const void *want_address = NULL,
std::size_t offset = 0
);
//关闭文件
int close();
//删除映射的文件,当然正在映射的时候不能删除
int remove();
//同步文件
int flush();
///返回映射的内存地址
inline void *addr()
{
return mmap_addr_;
}
///返回文件名称
inline const char *file_name()
{
return mmap_file_name_.c_str();
}
protected:
//映射文件的名称
std::string mmap_file_name_;
//映射的内存地址
void *mmap_addr_;
// 映射的文件句柄
ZCE_HANDLE mmap_handle_;
//映射的共享内存大小
std::size_t shm_size_;
};
#endif //ZCE_LIB_SHARE_MEMORY_MMAP_H_
| 22.776316 | 82 | 0.53264 |
ad8e1594aeab390f713270f55851d61afd14f065 | 578 | rs | Rust | src/lib.rs | erikjohnston/rust-signed-json | 4f0539852a355e5b6bb394579ca461586a04d59b | [
"Apache-2.0"
] | null | null | null | src/lib.rs | erikjohnston/rust-signed-json | 4f0539852a355e5b6bb394579ca461586a04d59b | [
"Apache-2.0"
] | null | null | null | src/lib.rs | erikjohnston/rust-signed-json | 4f0539852a355e5b6bb394579ca461586a04d59b | [
"Apache-2.0"
] | null | null | null | mod canonical;
pub mod json;
#[cfg(feature = "signed")]
pub mod signed;
use anyhow::Error;
pub use canonical::Canonical;
#[doc(inline)]
pub use json::{to_string_canonical, to_vec_canonical};
#[doc(inline)]
#[cfg(feature = "signed")]
pub use signed::Signed;
pub use ed25519_dalek::{Keypair, PublicKey, SecretKey};
/// Create a [`Keypair`] from a serialized [`SecretKey`].
pub fn keypair_from_secret_bytes(bytes: &[u8]) -> Result<Keypair, Error> {
let secret = SecretKey::from_bytes(bytes)?;
let public = PublicKey::from(&secret);
Ok(Keypair { public, secret })
}
| 25.130435 | 74 | 0.698962 |
48b650a179b9986c597fa72ed0a0941c01be7f59 | 1,453 | kt | Kotlin | mobile-ui/src/main/java/org/dukecon/android/ui/features/speakerdetail/SpeakerDetailActivity.kt | dukecon/dukecon_android | 9f3deaf245a953d089fa7666c5f10cb442ea4080 | [
"Apache-2.0"
] | 2 | 2018-12-11T21:41:49.000Z | 2018-12-12T06:55:46.000Z | mobile-ui/src/main/java/org/dukecon/android/ui/features/speakerdetail/SpeakerDetailActivity.kt | dukecon/dukecon_android | 9f3deaf245a953d089fa7666c5f10cb442ea4080 | [
"Apache-2.0"
] | 43 | 2017-07-30T11:55:43.000Z | 2019-03-21T09:18:50.000Z | mobile-ui/src/main/java/org/dukecon/android/ui/features/speakerdetail/SpeakerDetailActivity.kt | dukecon/dukecon_android | 9f3deaf245a953d089fa7666c5f10cb442ea4080 | [
"Apache-2.0"
] | 1 | 2018-05-18T10:31:17.000Z | 2018-05-18T10:31:17.000Z | package org.dukecon.android.ui.features.speakerdetail
import android.app.Activity
import android.content.Intent
import android.os.Build.VERSION
import android.os.Build.VERSION_CODES
import android.os.Bundle
import androidx.appcompat.app.AppCompatActivity
import kotlinx.android.synthetic.main.activity_speaker_detail.*
import org.dukecon.android.ui.R
import org.dukecon.android.ui.ext.getAppComponent
class SpeakerDetailActivity : AppCompatActivity() {
companion object {
@JvmStatic
fun navigate(activity: Activity, speakerId: String) {
val intent = Intent(activity, SpeakerDetailActivity::class.java)
intent.putExtra("speaker_id", speakerId)
activity.startActivity(intent)
}
}
private lateinit var component: SpeakerDetailComponent
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
component = getAppComponent().speakerDetailComponent()
if (VERSION.SDK_INT >= VERSION_CODES.LOLLIPOP) {
postponeEnterTransition()
}
setContentView(R.layout.activity_speaker_detail)
val speakerId = intent.getStringExtra("speaker_id")
speaker_detail_view.setSpeakerId(speakerId)
}
override fun getSystemService(name: String?): Any {
when (name) {
"component" -> return component
else -> return super.getSystemService(name)
}
}
} | 30.914894 | 76 | 0.708878 |
7cb67578b114823eacfbc8d33bfef2685dd6d098 | 578 | rs | Rust | day_10_a/src/main.rs | torkeldanielsson/aoc2020 | d2c1de52a4e1d9e1c8bfa2461d18f8316669bbbf | [
"MIT"
] | null | null | null | day_10_a/src/main.rs | torkeldanielsson/aoc2020 | d2c1de52a4e1d9e1c8bfa2461d18f8316669bbbf | [
"MIT"
] | null | null | null | day_10_a/src/main.rs | torkeldanielsson/aoc2020 | d2c1de52a4e1d9e1c8bfa2461d18f8316669bbbf | [
"MIT"
] | null | null | null | use std::error::Error;
use std::fs;
fn main() -> Result<(), Box<dyn Error>> {
let input = fs::read_to_string("input")?;
let mut numbers: Vec<i64> = input.lines().map(|s| s.parse::<i64>().unwrap()).collect();
numbers.push(0);
numbers.sort();
let mut counts = vec![0; 4];
counts[3] += 1;
for i in 1..numbers.len() {
let a = numbers[i - 1];
let b = numbers[i];
counts[(b - a) as usize] += 1;
//println!("{}", b-a);
}
println!("{:?}", counts);
println!("res: {}", counts[1] * counts[3]);
Ok(())
}
| 19.931034 | 91 | 0.49308 |
31bc1cecdd9728a87898117d1c850ac3c8353864 | 9,441 | swift | Swift | EmbeddedSocial/Sources/Modules/PostDetail/Presenter/PostDetailPresenter.swift | LDaneliukas/EmbeddedSocial-iOS-SDK | c1de4c64b9d744020d10bde8411db03968f24965 | [
"MIT"
] | 10 | 2017-10-20T05:39:40.000Z | 2019-04-28T16:06:12.000Z | EmbeddedSocial/Sources/Modules/PostDetail/Presenter/PostDetailPresenter.swift | LDaneliukas/EmbeddedSocial-iOS-SDK | c1de4c64b9d744020d10bde8411db03968f24965 | [
"MIT"
] | 365 | 2017-09-29T07:31:05.000Z | 2018-10-18T16:27:48.000Z | EmbeddedSocial/Sources/Modules/PostDetail/Presenter/PostDetailPresenter.swift | LDaneliukas/EmbeddedSocial-iOS-SDK | c1de4c64b9d744020d10bde8411db03968f24965 | [
"MIT"
] | 8 | 2019-08-07T07:13:30.000Z | 2021-11-10T10:11:30.000Z | //
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
//
import UIKit
class PostDetailPresenter: PostDetailViewOutput, PostDetailInteractorOutput, PostDetailModuleInput {
weak var view: PostDetailViewInput!
var interactor: PostDetailInteractorInput!
var router: PostDetailRouterInput!
var scrollType: CommentsScrollType = .none
var feedViewController: UIViewController?
var feedModuleInput: FeedModuleInput?
var comments = [Comment]()
var topicHandle: PostHandle!
private var formatter = DateFormatterTool()
private var cursor: String?
private var shouldFetchRestOfComments = false
fileprivate var dataIsFetching = false
fileprivate var loadMoreCellViewModel = LoadMoreCellViewModel()
private let pageSize: Int
private let actionStrategy: AuthorizedActionStrategy
func heightForFeed() -> CGFloat {
return (feedModuleInput?.moduleHeight())!
}
init(pageSize: Int,
actionStrategy: AuthorizedActionStrategy,
handleChangesPublisher: Publisher = HandleChangesMulticast.shared) {
self.pageSize = pageSize
self.actionStrategy = actionStrategy
handleChangesPublisher.subscribe(self)
}
// MARK: PostDetailInteractorOutput
func didFetch(comments: [Comment], cursor: String?) {
self.cursor = cursor
self.comments = comments
self.comments.sort(by: { $0.0.createdTime! < $0.1.createdTime! })
stopLoading()
view.reloadTable(scrollType: scrollType)
}
func didFetchMore(comments: [Comment], cursor: String?) {
dataIsFetching = false
appendWithReplacing(original: &self.comments, appending: comments)
self.comments.sort(by: { $0.0.createdTime! < $0.1.createdTime! })
self.cursor = cursor
stopLoading()
if cursor != nil && shouldFetchRestOfComments == true {
self.fetchMore()
} else if shouldFetchRestOfComments == true {
view.reloadTable(scrollType: .bottom)
shouldFetchRestOfComments = false
} else {
view.updateComments()
view.updateLoadingCell()
}
}
private func enableFetchMore() {
loadMoreCellViewModel.cellHeight = LoadMoreCell.cellHeight
view.updateLoadingCell()
}
private func stopLoading() {
if cursor == nil {
loadMoreCellViewModel.cellHeight = 0.1
} else {
loadMoreCellViewModel.cellHeight = LoadMoreCell.cellHeight
}
loadMoreCellViewModel.stopLoading()
}
private func appendWithReplacing(original: inout [Comment], appending: [Comment]) {
for appendingItem in appending {
if let index = original.index(where: { $0.commentHandle == appendingItem.commentHandle }) {
original[index] = appendingItem
} else {
original.append(appendingItem)
}
}
}
func didFail(error: Error) {
loadMoreCellViewModel.cellHeight = 0.1
loadMoreCellViewModel.stopLoading()
view.updateLoadingCell()
view.endRefreshing()
view.hideLoadingHUD()
}
func commentDidPost(comment: Comment) {
comments.append(comment)
view.postCommentSuccess()
}
func commentPostFailed(error: Error) {
view.hideLoadingHUD()
}
private func setupFeed() {
guard let vc = feedViewController else {
return
}
_ = vc.view
feedModuleInput?.refreshData()
view.setFeedViewController(vc)
}
// MAKR: PostDetailViewOutput
func loadCellModel() -> LoadMoreCellViewModel {
return loadMoreCellViewModel
}
func canFetchMore() -> Bool {
return cursor != nil && !dataIsFetching
}
func refresh() {
cursor = nil
scrollType = .none
feedModuleInput?.refreshData()
loadMoreCellViewModel.cellHeight = LoadMoreCell.cellHeight
loadMoreCellViewModel.startLoading()
view.updateLoadingCell()
interactor.fetchComments(topicHandle: topicHandle, cursor: cursor, limit: Int32(pageSize))
}
func viewIsReady() {
view.setupInitialState()
setupFeed()
interactor.fetchComments(topicHandle: topicHandle, cursor: cursor, limit: Int32(pageSize))
}
func loadRestComments() {
if cursor == nil {
view.reloadTable(scrollType: .bottom)
} else {
shouldFetchRestOfComments = true
fetchMore()
}
}
func numberOfItems() -> Int {
return comments.count
}
func fetchMore() {
dataIsFetching = true
loadMoreCellViewModel.startLoading()
view.updateLoadingCell()
interactor.fetchMoreComments(topicHandle: topicHandle, cursor: cursor, limit: Int32(pageSize))
}
func comment(at index: Int) -> Comment {
return comments[index]
}
func postComment(photo: Photo?, comment: String) {
actionStrategy.executeOrPromptLogin { [weak self] in self?._postComment(photo: photo, comment: comment) }
}
private func _postComment(photo: Photo?, comment: String) {
interactor.postComment(photo: photo, topicHandle: topicHandle, comment: comment)
}
}
extension PostDetailPresenter: CommentCellModuleOutout {
func removed(comment: Comment) {
guard let index = comments.index(where: { $0.commentHandle == comment.commentHandle }) else {
return
}
comments.remove(at: index)
router.backIfNeeded(from: view as! UIViewController)
view.removeComment(index: index)
feedModuleInput?.refreshData()
}
func showMenu(comment: Comment) {
let isMyComment = (SocialPlus.shared.me?.uid == comment.user?.uid)
if isMyComment {
router?.openMyCommentOptions(comment: comment)
} else {
router?.openOtherCommentOptions(comment: comment)
}
}
}
extension PostDetailPresenter: Subscriber {
func update(_ hint: Hint) {
if let hint = hint as? CommentUpdateHint {
updateCommentHandle(from: hint.oldHandle, to: hint.newHandle)
} else if let hint = hint as? TopicUpdateHint, canHandle(hint) {
topicHandle = hint.newHandle
feedModuleInput?.feedType = .single(post: hint.newHandle)
feedModuleInput?.refreshData()
}
}
private func canHandle(_ hint: TopicUpdateHint) -> Bool {
if let feedType = feedModuleInput?.feedType,
case let FeedType.single(topicHandle) = feedType,
topicHandle == hint.oldHandle {
return true
} else {
return false
}
}
private func updateCommentHandle(from oldHandle: String, to newHandle: String) {
guard let idx = comments.index(where: { $0.commentHandle == oldHandle }) else { return }
let commentToUpdate = comments[idx]
commentToUpdate.commentHandle = newHandle
comments[idx] = commentToUpdate
feedModuleInput?.refreshData()
}
}
extension PostDetailPresenter: FeedModuleOutput {
func didScrollFeed(_ feedView: UIScrollView) {
print("feed did scroll in PostDetailPresenter")
}
func didStartRefreshingData() {
print("didStartRefreshingData in PostDetailPresenter")
}
func didFinishRefreshingData(_ error: Error?) {
print("didFinishRefreshingData in PostDetailPresenter")
if let _ = error {
view.refreshPostCell()
return
}
}
func didUpdateFeed() {
view.refreshPostCell()
feedModuleInput?.lockScrolling()
}
func shouldOpenProfile(for userID: String) -> Bool {
return true
}
func commentsPressed() {
view.scrollCollectionViewToBottom()
}
func postRemoved() {
guard let vc = view as? UIViewController else {
return
}
router.backToFeed(from: vc)
}
}
extension PostDetailPresenter: PostMenuModuleOutput {
func postMenuProcessDidStart() {
}
func postMenuProcessDidFinish() {
}
func didBlock(user: User) {
Logger.log("Success")
}
func didUnblock(user: User) {
Logger.log("Success")
}
func didFollow(user: User) {
guard let index = comments.index(where: { $0.user?.uid == user.uid }) else {
return
}
comments[index].userStatus = .accepted
view.refreshCell(index: index)
}
func didUnfollow(user: User) {
guard let index = comments.index(where: { $0.user?.uid == user.uid }) else {
return
}
comments[index].userStatus = .empty
view.refreshCell(index: index)
}
func didRemove(comment: Comment) {
removed(comment: comment)
}
func didReport(post: PostHandle) {
Logger.log("Not implemented")
}
func didRequestFail(error: Error) {
Logger.log("Reloading feed", error, event: .error)
}
}
| 28.783537 | 113 | 0.615401 |
753871f8b5e525aa12fb97010294084a18e5bf27 | 320 | rs | Rust | gtk4/src/subclass/fixed.rs | melix99/gtk4-rs | c382c0536e995584a789a90605a0120ee1aa24eb | [
"MIT-0",
"MIT"
] | 62 | 2019-08-15T14:33:24.000Z | 2020-11-04T10:24:58.000Z | gtk4/src/subclass/fixed.rs | melix99/gtk4-rs | c382c0536e995584a789a90605a0120ee1aa24eb | [
"MIT-0",
"MIT"
] | 32 | 2019-08-13T19:50:27.000Z | 2020-11-05T09:54:52.000Z | gtk4/src/subclass/fixed.rs | melix99/gtk4-rs | c382c0536e995584a789a90605a0120ee1aa24eb | [
"MIT-0",
"MIT"
] | 7 | 2019-08-13T16:47:07.000Z | 2020-10-23T19:42:31.000Z | // Take a look at the license at the top of the repository in the LICENSE file.
// rustdoc-stripper-ignore-next
//! Traits intended for subclassing [`Fixed`](crate::Fixed).
use crate::subclass::prelude::*;
use crate::Fixed;
pub trait FixedImpl: WidgetImpl {}
unsafe impl<T: FixedImpl> IsSubclassable<T> for Fixed {}
| 26.666667 | 79 | 0.73125 |
40dfe64789b33f8b883de5c47dfe32eb03cfa7e7 | 37,626 | py | Python | mars/dataframe/align.py | wjsi/mars | a69fb19edfe748d4393b90ff2c4941a76c084596 | [
"Apache-2.0"
] | 1 | 2022-02-02T03:03:48.000Z | 2022-02-02T03:03:48.000Z | mars/dataframe/align.py | wjsi/mars | a69fb19edfe748d4393b90ff2c4941a76c084596 | [
"Apache-2.0"
] | null | null | null | mars/dataframe/align.py | wjsi/mars | a69fb19edfe748d4393b90ff2c4941a76c084596 | [
"Apache-2.0"
] | null | null | null | # Copyright 1999-2021 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import operator
import numpy as np
import pandas as pd
from .. import opcodes as OperandDef
from ..core import OutputType
from ..core.operand import OperandStage, MapReduceOperand
from ..serialization.serializables import (
FieldTypes,
AnyField,
BoolField,
Int32Field,
KeyField,
ListField,
)
from .core import SERIES_CHUNK_TYPE
from .utils import hash_dtypes, filter_dtypes
from .operands import DataFrameOperandMixin, DataFrameShuffleProxy
from .utils import (
parse_index,
split_monotonic_index_min_max,
build_split_idx_to_origin_idx,
filter_index_value,
hash_index,
)
class DataFrameIndexAlign(MapReduceOperand, DataFrameOperandMixin):
_op_type_ = OperandDef.DATAFRAME_INDEX_ALIGN
_index_min = AnyField("index_min")
_index_min_close = BoolField("index_min_close")
_index_max = AnyField("index_max")
_index_max_close = BoolField("index_max_close")
_index_shuffle_size = Int32Field("index_shuffle_size")
_column_min = AnyField("column_min")
_column_min_close = BoolField("column_min_close")
_column_max = AnyField("column_max")
_column_max_close = BoolField("column_max_close")
_column_shuffle_size = Int32Field("column_shuffle_size")
_column_shuffle_segments = ListField("column_shuffle_segments", FieldTypes.series)
_input = KeyField("input")
def __init__(
self,
index_min_max=None,
index_shuffle_size=None,
column_min_max=None,
column_shuffle_size=None,
column_shuffle_segments=None,
sparse=None,
dtype=None,
dtypes=None,
output_types=None,
**kw
):
if index_min_max is not None:
kw.update(
dict(
_index_min=index_min_max[0],
_index_min_close=index_min_max[1],
_index_max=index_min_max[2],
_index_max_close=index_min_max[3],
)
)
if column_min_max is not None:
kw.update(
dict(
_column_min=column_min_max[0],
_column_min_close=column_min_max[1],
_column_max=column_min_max[2],
_column_max_close=column_min_max[3],
)
)
super().__init__(
_index_shuffle_size=index_shuffle_size,
_column_shuffle_size=column_shuffle_size,
_column_shuffle_segments=column_shuffle_segments,
sparse=sparse,
_dtype=dtype,
_dtypes=dtypes,
_output_types=output_types,
**kw
)
@property
def index_min(self):
return self._index_min
@property
def index_min_close(self):
return self._index_min_close
@property
def index_max(self):
return self._index_max
@property
def index_max_close(self):
return self._index_max_close
@property
def index_min_max(self):
if getattr(self, "_index_min", None) is None:
return None
return (
self._index_min,
self._index_min_close,
self._index_max,
self._index_max_close,
)
@property
def index_shuffle_size(self):
return self._index_shuffle_size
@property
def column_min(self):
return self._column_min
@property
def column_min_close(self):
return self._column_min_close
@property
def column_max(self):
return self._column_max
@property
def column_max_close(self):
return self._column_max_close
@property
def column_min_max(self):
if getattr(self, "_column_min", None) is None:
return None
return (
self._column_min,
self._column_min_close,
self._column_max,
self._column_max_close,
)
@property
def column_shuffle_size(self):
return self._column_shuffle_size
@property
def column_shuffle_segments(self):
return self._column_shuffle_segments
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
self._input = self._inputs[0]
def build_map_chunk_kw(self, inputs, **kw):
if kw.get("index_value", None) is None and inputs[0].index_value is not None:
input_index_value = inputs[0].index_value
index_min_max = self.index_min_max
if index_min_max is not None:
kw["index_value"] = filter_index_value(input_index_value, index_min_max)
else:
kw["index_value"] = parse_index(
inputs[0].index_value.to_pandas(),
input_index_value,
type(self).__name__,
)
if self.output_types[0] == OutputType.dataframe:
if (
kw.get("columns_value", None) is None
and getattr(inputs[0], "columns_value", None) is not None
):
input_columns_value = inputs[0].columns_value
input_dtypes = inputs[0].dtypes
column_min_max = self.column_min_max
if column_min_max is not None:
kw["columns_value"] = filter_index_value(
input_columns_value, column_min_max, store_data=True
)
else:
kw["columns_value"] = parse_index(
inputs[0].columns_value.to_pandas(),
input_columns_value,
type(self).__name__,
)
kw["dtypes"] = input_dtypes[kw["columns_value"].to_pandas()]
column_shuffle_size = self.column_shuffle_size
if column_shuffle_size is not None:
self._column_shuffle_segments = hash_dtypes(
input_dtypes, column_shuffle_size
)
else:
if (
kw.get("dtype", None) is None
and getattr(inputs[0], "dtype", None) is not None
):
kw["dtype"] = inputs[0].dtype
if (
kw.get("name", None) is None
and getattr(inputs[0], "name", None) is not None
):
kw["name"] = inputs[0].name
return kw
def build_reduce_chunk_kw(self, inputs, index, **kw):
kw["index"] = index
if (
kw.get("index_value", None) is None
and inputs[0].inputs[0].index_value is not None
):
index_align_map_chunks = inputs[0].inputs
if index_align_map_chunks[0].op.index_min_max is not None:
# shuffle on columns, all the DataFrameIndexAlignMap has the same index
kw["index_value"] = filter_index_value(
index_align_map_chunks[0].index_value,
index_align_map_chunks[0].op.index_min_max,
)
else:
# shuffle on index
kw["index_value"] = parse_index(
index_align_map_chunks[0].index_value.to_pandas(),
[c.key for c in index_align_map_chunks],
type(self).__name__,
)
if self.output_types[0] == OutputType.dataframe:
if (
kw.get("columns_value", None) is None
and getattr(inputs[0].inputs[0], "columns_value", None) is not None
):
index_align_map_chunks = inputs[0].inputs
if index_align_map_chunks[0].op.column_min_max is not None:
# shuffle on index
kw["columns_value"] = filter_index_value(
index_align_map_chunks[0].columns_value,
index_align_map_chunks[0].op.column_min_max,
store_data=True,
)
kw["dtypes"] = index_align_map_chunks[0].dtypes[
kw["columns_value"].to_pandas()
]
else:
# shuffle on columns
all_dtypes = [
c.op.column_shuffle_segments[index[1]]
for c in index_align_map_chunks
if c.index[0] == index_align_map_chunks[0].index[0]
]
kw["dtypes"] = pd.concat(all_dtypes)
kw["columns_value"] = parse_index(
kw["dtypes"].index, store_data=True
)
else:
if (
kw.get("dtype", None) is None
and getattr(inputs[0].inputs[0], "dtype", None) is not None
):
kw["dtype"] = inputs[0].inputs[0].dtype
if (
kw.get("name", None) is None
and getattr(inputs[0].inputs[0], "name", None) is not None
):
kw["name"] = inputs[0].inputs[0].name
return kw
@classmethod
def execute_map(cls, ctx, op):
# TODO(QIN): add GPU support here
df = ctx[op.inputs[0].key]
filters = [[], []]
chunk = op.outputs[0]
if op.index_shuffle_size == -1:
# no shuffle and no min-max filter on index
filters[0].append(slice(None, None, None))
elif op.index_shuffle_size is None:
# no shuffle on index
comp_op = operator.ge if op.index_min_close else operator.gt
index_cond = comp_op(df.index, op.index_min)
comp_op = operator.le if op.index_max_close else operator.lt
index_cond = index_cond & comp_op(df.index, op.index_max)
filters[0].append(index_cond)
else:
# shuffle on index
shuffle_size = op.index_shuffle_size
filters[0].extend(hash_index(df.index, shuffle_size))
if chunk.ndim == 1:
if len(filters[0]) == 1:
# no shuffle
ctx[chunk.key] = df.loc[filters[0][0]]
else:
for index_idx, index_filter in enumerate(filters[0]):
ctx[chunk.key, (index_idx,)] = df.loc[index_filter]
return
if op.column_shuffle_size == -1:
# no shuffle and no min-max filter on columns
filters[1].append(slice(None, None, None))
if op.column_shuffle_size is None:
# no shuffle on columns
comp_op = operator.ge if op.column_min_close else operator.gt
columns_cond = comp_op(df.columns, op.column_min)
comp_op = operator.le if op.column_max_close else operator.lt
columns_cond = columns_cond & comp_op(df.columns, op.column_max)
filters[1].append(columns_cond)
else:
# shuffle on columns
shuffle_size = op.column_shuffle_size
filters[1].extend(hash_index(df.columns, shuffle_size))
if all(len(it) == 1 for it in filters):
# no shuffle
ctx[chunk.key] = df.loc[filters[0][0], filters[1][0]]
elif len(filters[0]) == 1:
# shuffle on columns
for column_idx, column_filter in enumerate(filters[1]):
shuffle_index = (chunk.index[0], column_idx)
ctx[chunk.key, shuffle_index] = df.loc[filters[0][0], column_filter]
elif len(filters[1]) == 1:
# shuffle on index
for index_idx, index_filter in enumerate(filters[0]):
shuffle_index = (index_idx, chunk.index[1])
ctx[chunk.key, shuffle_index] = df.loc[index_filter, filters[1][0]]
else:
# full shuffle
shuffle_index_size = op.index_shuffle_size
shuffle_column_size = op.column_shuffle_size
out_idxes = itertools.product(
range(shuffle_index_size), range(shuffle_column_size)
)
out_index_columns = itertools.product(*filters)
for out_idx, out_index_column in zip(out_idxes, out_index_columns):
index_filter, column_filter = out_index_column
ctx[chunk.key, out_idx] = df.loc[index_filter, column_filter]
@classmethod
def execute_reduce(cls, ctx, op: "DataFrameIndexAlign"):
chunk = op.outputs[0]
input_idx_to_df = dict(op.iter_mapper_data_with_index(ctx))
row_idxes = sorted({idx[0] for idx in input_idx_to_df})
if chunk.ndim == 2:
col_idxes = sorted({idx[1] for idx in input_idx_to_df})
ress = []
for row_idx in row_idxes:
if chunk.ndim == 2:
row_dfs = []
for col_idx in col_idxes:
row_dfs.append(input_idx_to_df[row_idx, col_idx])
row_df = pd.concat(row_dfs, axis=1)
else:
row_df = input_idx_to_df[(row_idx,)]
ress.append(row_df)
ctx[chunk.key] = pd.concat(ress, axis=0)
@classmethod
def execute(cls, ctx, op):
if op.stage == OperandStage.map:
cls.execute_map(ctx, op)
else:
cls.execute_reduce(ctx, op)
class _AxisMinMaxSplitInfo(object):
def __init__(
self, left_split, left_increase, right_split, right_increase, dummy=False
):
self._left_split = left_split
self._right_split = right_split
self._dummy = dummy
self._left_split_idx_to_origin_idx = build_split_idx_to_origin_idx(
self._left_split, left_increase
)
self._right_split_idx_to_origin_idx = build_split_idx_to_origin_idx(
self._right_split, right_increase
)
def isdummy(self):
return self._dummy
def get_origin_left_idx(self, idx):
return self._left_split_idx_to_origin_idx[idx][0]
def get_origin_left_split(self, idx):
left_idx, left_inner_idx = self._left_split_idx_to_origin_idx[idx]
return self._left_split[left_idx][left_inner_idx]
def get_origin_right_idx(self, idx):
return self._right_split_idx_to_origin_idx[idx][0]
def get_origin_right_split(self, idx):
right_idx, right_inner_idx = self._right_split_idx_to_origin_idx[idx]
return self._right_split[right_idx][right_inner_idx]
class _MinMaxSplitInfo(object):
def __init__(self, row_min_max_split_info=None, col_min_max_split_info=None):
self.row_min_max_split_info = row_min_max_split_info
self.col_min_max_split_info = col_min_max_split_info
def all_axes_can_split(self):
return (
self.row_min_max_split_info is not None
and self.col_min_max_split_info is not None
)
def one_axis_can_split(self):
return (self.row_min_max_split_info is None) ^ (
self.col_min_max_split_info is None
)
def no_axis_can_split(self):
return (
self.row_min_max_split_info is None and self.col_min_max_split_info is None
)
def __getitem__(self, i):
return [self.row_min_max_split_info, self.col_min_max_split_info][i]
def __setitem__(self, axis, axis_min_max_split_info):
assert axis in {0, 1}
if axis == 0:
self.row_min_max_split_info = axis_min_max_split_info
else:
self.col_min_max_split_info = axis_min_max_split_info
def get_row_left_idx(self, out_idx):
return self.row_min_max_split_info.get_origin_left_idx(out_idx)
def get_row_left_split(self, out_idx):
return self.row_min_max_split_info.get_origin_left_split(out_idx)
def get_col_left_idx(self, out_idx):
return self.col_min_max_split_info.get_origin_left_idx(out_idx)
def get_col_left_split(self, out_idx):
return self.col_min_max_split_info.get_origin_left_split(out_idx)
def get_row_right_idx(self, out_idx):
return self.row_min_max_split_info.get_origin_right_idx(out_idx)
def get_row_right_split(self, out_idx):
return self.row_min_max_split_info.get_origin_right_split(out_idx)
def get_col_right_idx(self, out_idx):
return self.col_min_max_split_info.get_origin_right_idx(out_idx)
def get_col_right_split(self, out_idx):
return self.col_min_max_split_info.get_origin_right_split(out_idx)
def get_axis_idx(self, axis, left_or_right, out_idx):
if axis == 0:
if left_or_right == 0:
return self.get_row_left_idx(out_idx)
else:
assert left_or_right == 1
return self.get_row_right_idx(out_idx)
else:
assert axis == 1
if left_or_right == 0:
return self.get_col_left_idx(out_idx)
else:
assert left_or_right == 1
return self.get_col_right_idx(out_idx)
def get_axis_split(self, axis, left_or_right, out_idx):
if axis == 0:
if left_or_right == 0:
return self.get_row_left_split(out_idx)
else:
assert left_or_right == 1
return self.get_row_right_split(out_idx)
else:
assert axis == 1
if left_or_right == 0:
return self.get_col_left_split(out_idx)
else:
assert left_or_right == 1
return self.get_col_right_split(out_idx)
def _get_chunk_index_min_max(index_chunks):
chunk_index_min_max = []
for chunk in index_chunks:
min_val = chunk.min_val
min_val_close = chunk.min_val_close
max_val = chunk.max_val
max_val_close = chunk.max_val_close
if min_val is None or max_val is None:
chunk_index_min_max.append((None, True, None, True))
else:
chunk_index_min_max.append((min_val, min_val_close, max_val, max_val_close))
return chunk_index_min_max
def _get_monotonic_chunk_index_min_max(index, index_chunks):
chunk_index_min_max = _get_chunk_index_min_max(index_chunks)
if index.is_monotonic_decreasing:
return list(reversed(chunk_index_min_max)), False
for j in range(len(chunk_index_min_max) - 1):
# overlap only if the prev max is close and curr min is close
# and they are identical
prev_max, prev_max_close = chunk_index_min_max[j][2:]
curr_min, curr_min_close = chunk_index_min_max[j + 1][:2]
if prev_max_close and curr_min_close and prev_max == curr_min:
return
return chunk_index_min_max, True
def _need_align_map(
input_chunk,
index_min_max,
column_min_max,
dummy_index_splits=False,
dummy_column_splits=False,
):
if isinstance(input_chunk, SERIES_CHUNK_TYPE):
if input_chunk.index_value is None:
return True
if input_chunk.index_value.min_max != index_min_max:
return True
else:
if not dummy_index_splits:
if (
input_chunk.index_value is None
or input_chunk.index_value.min_max != index_min_max
):
return True
if not dummy_column_splits:
if (
input_chunk.columns_value is None
or input_chunk.columns_value.min_max != column_min_max
):
return True
return False
def _is_index_identical(left, right):
if len(left) != len(right):
return False
for left_item, right_item in zip(left, right):
if left_item.key != right_item.key:
return False
return True
def _axis_need_shuffle(left_axis, right_axis, left_axis_chunks, right_axis_chunks):
if _is_index_identical(left_axis_chunks, right_axis_chunks):
return False
if (
not left_axis.is_monotonic_increasing_or_decreasing
and len(left_axis_chunks) > 1
):
return True
if (
not right_axis.is_monotonic_increasing_or_decreasing
and len(right_axis_chunks) > 1
):
return True
return False
def _calc_axis_splits(left_axis, right_axis, left_axis_chunks, right_axis_chunks):
if _axis_need_shuffle(left_axis, right_axis, left_axis_chunks, right_axis_chunks):
# do shuffle
out_chunk_size = max(len(left_axis_chunks), len(right_axis_chunks))
return None, [np.nan for _ in range(out_chunk_size)]
else:
# no need to do shuffle on this axis
if _is_index_identical(left_axis_chunks, right_axis_chunks):
left_chunk_index_min_max = _get_chunk_index_min_max(left_axis_chunks)
right_splits = left_splits = [[c] for c in left_chunk_index_min_max]
right_increase = left_increase = None
elif len(left_axis_chunks) == 1 and len(right_axis_chunks) == 1:
left_splits = [_get_chunk_index_min_max(left_axis_chunks)]
left_increase = left_axis_chunks[0].is_monotonic_decreasing
right_splits = [_get_chunk_index_min_max(right_axis_chunks)]
right_increase = right_axis_chunks[0].is_monotonic_decreasing
else:
(
left_chunk_index_min_max,
left_increase,
) = _get_monotonic_chunk_index_min_max(left_axis, left_axis_chunks)
(
right_chunk_index_min_max,
right_increase,
) = _get_monotonic_chunk_index_min_max(right_axis, right_axis_chunks)
left_splits, right_splits = split_monotonic_index_min_max(
left_chunk_index_min_max,
left_increase,
right_chunk_index_min_max,
right_increase,
)
splits = _AxisMinMaxSplitInfo(
left_splits, left_increase, right_splits, right_increase
)
return splits, None
def _build_dummy_axis_split(chunk_shape):
axis_index_min_max, axis_increase = (
[(i, True, i + 1, True) for i in range(chunk_shape)],
True,
)
if len(axis_index_min_max) == 1:
left_splits, right_splits = [axis_index_min_max], [axis_index_min_max]
else:
left_splits, right_splits = split_monotonic_index_min_max(
axis_index_min_max, axis_increase, axis_index_min_max, axis_increase
)
return _AxisMinMaxSplitInfo(
left_splits, axis_increase, right_splits, axis_increase, dummy=True
)
def _gen_series_chunks(splits, out_shape, left_or_right, series):
out_chunks = []
if splits[0] is not None:
# need no shuffle
for out_idx in range(out_shape[0]):
idx = splits.get_axis_idx(0, left_or_right, out_idx)
index_min_max = splits.get_axis_split(0, left_or_right, out_idx)
chunk = series.cix[(idx,)]
if _need_align_map(chunk, index_min_max, None):
align_op = DataFrameIndexAlign(
stage=OperandStage.map,
index_min_max=index_min_max,
column_min_max=None,
dtype=chunk.dtype,
sparse=series.issparse(),
output_types=[OutputType.series],
)
params = align_op.build_map_chunk_kw(
[chunk], shape=(np.nan,), index=(out_idx,)
)
out_chunk = align_op.new_chunk([chunk], **params)
else:
out_chunk = chunk
out_chunks.append(out_chunk)
else:
# gen map chunks
map_chunks = []
for chunk in series.chunks:
map_op = DataFrameIndexAlign(
stage=OperandStage.map,
sparse=chunk.issparse(),
index_shuffle_size=out_shape[0],
output_types=[OutputType.series],
)
params = map_op.build_map_chunk_kw(
[chunk], shape=(np.nan,), index=chunk.index
)
map_chunks.append(map_op.new_chunk([chunk], **params))
proxy_chunk = DataFrameShuffleProxy(output_types=[OutputType.series]).new_chunk(
map_chunks, shape=()
)
# gen reduce chunks
for out_idx in range(out_shape[0]):
reduce_op = DataFrameIndexAlign(
stage=OperandStage.reduce,
i=out_idx,
sparse=proxy_chunk.issparse(),
output_types=[OutputType.series],
)
params = reduce_op.build_reduce_chunk_kw(
[proxy_chunk], index=(out_idx,), shape=(np.nan,)
)
out_chunks.append(reduce_op.new_chunk([proxy_chunk], **params))
return out_chunks
def _gen_dataframe_chunks(splits, out_shape, left_or_right, df):
out_chunks = []
if splits.all_axes_can_split():
# no shuffle for all axes
kw = {
"index_shuffle_size": -1 if splits[0].isdummy() else None,
"column_shuffle_size": -1 if splits[1].isdummy() else None,
}
for out_idx in itertools.product(*(range(s) for s in out_shape)):
row_idx = splits.get_axis_idx(0, left_or_right, out_idx[0])
col_idx = splits.get_axis_idx(1, left_or_right, out_idx[1])
index_min_max = splits.get_axis_split(0, left_or_right, out_idx[0])
column_min_max = splits.get_axis_split(1, left_or_right, out_idx[1])
chunk = df.cix[row_idx, col_idx]
if _need_align_map(
chunk,
index_min_max,
column_min_max,
splits[0].isdummy(),
splits[1].isdummy(),
):
if splits[1].isdummy():
dtypes = chunk.dtypes
else:
dtypes = filter_dtypes(chunk.dtypes, column_min_max)
chunk_kw = {
"index_value": chunk.index_value if splits[0].isdummy() else None,
"columns_value": chunk.columns_value
if splits[1].isdummy()
else None,
"dtypes": chunk.dtypes if splits[1].isdummy() else None,
}
align_op = DataFrameIndexAlign(
stage=OperandStage.map,
index_min_max=index_min_max,
column_min_max=column_min_max,
dtypes=dtypes,
sparse=chunk.issparse(),
output_types=[OutputType.dataframe],
**kw
)
params = align_op.build_map_chunk_kw(
[chunk], shape=(np.nan, np.nan), index=out_idx, **chunk_kw
)
out_chunk = align_op.new_chunk([chunk], **params)
else:
out_chunk = chunk
out_chunks.append(out_chunk)
elif splits.one_axis_can_split():
# one axis needs shuffle
shuffle_axis = 0 if splits[0] is None else 1
align_axis = 1 - shuffle_axis
for align_axis_idx in range(out_shape[align_axis]):
if align_axis == 0:
kw = {
"index_min_max": splits.get_axis_split(
align_axis, left_or_right, align_axis_idx
),
"index_shuffle_size": -1 if splits[0].isdummy() else None,
"column_shuffle_size": out_shape[shuffle_axis],
}
input_idx = splits.get_axis_idx(
align_axis, left_or_right, align_axis_idx
)
else:
kw = {
"column_min_max": splits.get_axis_split(
align_axis, left_or_right, align_axis_idx
),
"index_shuffle_size": out_shape[shuffle_axis],
"column_shuffle_size": -1 if splits[1].isdummy() else None,
}
input_idx = splits.get_axis_idx(
align_axis, left_or_right, align_axis_idx
)
input_chunks = [c for c in df.chunks if c.index[align_axis] == input_idx]
map_chunks = []
for j, input_chunk in enumerate(input_chunks):
chunk_kw = dict()
if align_axis == 0:
chunk_kw["index_value"] = (
input_chunk.index_value if splits[0].isdummy() else None
)
else:
chunk_kw["columns_value"] = (
input_chunk.columns_value if splits[1].isdummy() else None
)
chunk_kw["dtypes"] = input_chunk.dtypes
map_op = DataFrameIndexAlign(
stage=OperandStage.map,
sparse=input_chunk.issparse(),
output_types=[OutputType.dataframe],
**kw
)
idx = [None, None]
idx[align_axis] = align_axis_idx
idx[shuffle_axis] = j
params = map_op.build_map_chunk_kw(
[input_chunk], shape=(np.nan, np.nan), index=tuple(idx), **chunk_kw
)
map_chunks.append(map_op.new_chunk([input_chunk], **params))
proxy_chunk = DataFrameShuffleProxy(
sparse=df.issparse(), output_types=[OutputType.dataframe]
).new_chunk(map_chunks, shape=())
for j in range(out_shape[shuffle_axis]):
chunk_kw = dict()
if align_axis == 0:
chunk_kw["index_value"] = (
proxy_chunk.inputs[0].inputs[0].index_value
if splits[0].isdummy()
else None
)
else:
chunk_kw["columns_value"] = (
proxy_chunk.inputs[0].inputs[0].columns_value
if splits[1].isdummy()
else None
)
chunk_kw["dtypes"] = proxy_chunk.inputs[0].inputs[0].dtypes
reduce_idx = (
(align_axis_idx, j) if align_axis == 0 else (j, align_axis_idx)
)
reduce_op = DataFrameIndexAlign(
stage=OperandStage.reduce,
i=j,
sparse=proxy_chunk.issparse(),
output_types=[OutputType.dataframe],
)
params = reduce_op.build_reduce_chunk_kw(
[proxy_chunk], shape=(np.nan, np.nan), index=reduce_idx, **chunk_kw
)
out_chunks.append(reduce_op.new_chunk([proxy_chunk], **params))
out_chunks.sort(key=lambda c: c.index)
else:
# all axes need shuffle
assert splits.no_axis_can_split()
# gen map chunks
map_chunks = []
for chunk in df.chunks:
map_op = DataFrameIndexAlign(
stage=OperandStage.map,
sparse=chunk.issparse(),
index_shuffle_size=out_shape[0],
column_shuffle_size=out_shape[1],
output_types=[OutputType.dataframe],
)
params = map_op.build_map_chunk_kw(
[chunk], shape=(np.nan, np.nan), index=chunk.index
)
map_chunks.append(map_op.new_chunk([chunk], **params))
proxy_chunk = DataFrameShuffleProxy(
output_types=[OutputType.dataframe]
).new_chunk(map_chunks, shape=())
# gen reduce chunks
for out_idx in itertools.product(*(range(s) for s in out_shape)):
reduce_op = DataFrameIndexAlign(
stage=OperandStage.reduce,
i=out_idx,
sparse=proxy_chunk.issparse(),
output_types=[OutputType.dataframe],
)
params = reduce_op.build_reduce_chunk_kw(
[proxy_chunk], index=out_idx, shape=(np.nan, np.nan)
)
out_chunks.append(reduce_op.new_chunk([proxy_chunk], **params))
return out_chunks
def align_dataframe_dataframe(left, right):
left_index_chunks = [c.index_value for c in left.cix[:, 0]]
left_columns_chunks = [c.columns_value for c in left.cix[0, :]]
right_index_chunks = [c.index_value for c in right.cix[:, 0]]
right_columns_chunks = [c.columns_value for c in right.cix[0, :]]
index_splits, index_chunk_shape = _calc_axis_splits(
left.index_value, right.index_value, left_index_chunks, right_index_chunks
)
columns_splits, column_chunk_shape = _calc_axis_splits(
left.columns_value,
right.columns_value,
left_columns_chunks,
right_columns_chunks,
)
splits = _MinMaxSplitInfo(index_splits, columns_splits)
out_chunk_shape = (
len(index_chunk_shape or list(itertools.chain(*index_splits._left_split))),
len(column_chunk_shape or list(itertools.chain(*columns_splits._left_split))),
)
left_chunks = _gen_dataframe_chunks(splits, out_chunk_shape, 0, left)
right_chunks = _gen_dataframe_chunks(splits, out_chunk_shape, 1, right)
if _is_index_identical(left_index_chunks, right_index_chunks):
index_nsplits = left.nsplits[0]
else:
index_nsplits = [np.nan for _ in range(out_chunk_shape[0])]
if _is_index_identical(left_columns_chunks, right_columns_chunks):
columns_nsplits = left.nsplits[1]
else:
columns_nsplits = [np.nan for _ in range(out_chunk_shape[1])]
nsplits = [index_nsplits, columns_nsplits]
return nsplits, out_chunk_shape, left_chunks, right_chunks
def align_dataframe_series(left, right, axis="columns"):
if axis == "columns" or axis == 1:
left_columns_chunks = [c.columns_value for c in left.cix[0, :]]
right_index_chunks = [c.index_value for c in right.chunks]
index_splits, chunk_shape = _calc_axis_splits(
left.columns_value,
right.index_value,
left_columns_chunks,
right_index_chunks,
)
dummy_splits, dummy_nsplits = (
_build_dummy_axis_split(left.chunk_shape[0]),
left.nsplits[0],
)
out_chunk_shape = (
len(dummy_nsplits),
len(chunk_shape or list(itertools.chain(*index_splits._left_split))),
)
left_chunks = _gen_dataframe_chunks(
_MinMaxSplitInfo(dummy_splits, index_splits), out_chunk_shape, 0, left
)
right_chunks = _gen_series_chunks(
_MinMaxSplitInfo(index_splits, None), (out_chunk_shape[1],), 1, right
)
if _is_index_identical(left_columns_chunks, right_index_chunks):
index_nsplits = left.nsplits[1]
else:
index_nsplits = [np.nan for _ in range(out_chunk_shape[1])]
nsplits = [dummy_nsplits, index_nsplits]
else:
assert axis == "index" or axis == 0
left_index_chunks = [c.index_value for c in left.cix[:, 0]]
right_index_chunks = [c.index_value for c in right.chunks]
index_splits, index_chunk_shape = _calc_axis_splits(
left.index_value, right.index_value, left_index_chunks, right_index_chunks
)
dummy_splits, dummy_nsplits = (
_build_dummy_axis_split(left.chunk_shape[1]),
left.nsplits[1],
)
out_chunk_shape = (
len(index_chunk_shape or list(itertools.chain(*index_splits._left_split))),
len(dummy_nsplits),
)
left_chunks = _gen_dataframe_chunks(
_MinMaxSplitInfo(index_splits, dummy_splits), out_chunk_shape, 0, left
)
right_chunks = _gen_series_chunks(
_MinMaxSplitInfo(index_splits, None), (out_chunk_shape[0],), 1, right
)
if _is_index_identical(left_index_chunks, right_index_chunks):
index_nsplits = left.nsplits[0]
else:
index_nsplits = [np.nan for _ in range(out_chunk_shape[0])]
nsplits = [index_nsplits, dummy_nsplits]
return nsplits, out_chunk_shape, left_chunks, right_chunks
def align_series_series(left, right):
left_index_chunks = [c.index_value for c in left.chunks]
right_index_chunks = [c.index_value for c in right.chunks]
index_splits, index_chunk_shape = _calc_axis_splits(
left.index_value, right.index_value, left_index_chunks, right_index_chunks
)
out_chunk_shape = (
len(index_chunk_shape or list(itertools.chain(*index_splits._left_split))),
)
splits = _MinMaxSplitInfo(index_splits, None)
left_chunks = _gen_series_chunks(splits, out_chunk_shape, 0, left)
right_chunks = _gen_series_chunks(splits, out_chunk_shape, 1, right)
if _is_index_identical(left_index_chunks, right_index_chunks):
index_nsplits = left.nsplits[0]
else:
index_nsplits = [np.nan for _ in range(out_chunk_shape[0])]
nsplits = [index_nsplits]
return nsplits, out_chunk_shape, left_chunks, right_chunks
| 38.006061 | 88 | 0.593552 |
544a529716cb448b90096dbc95ed4208acf6db92 | 1,437 | go | Go | bloomfilter.go | UncleKing/bloom-go | 11ea284d407b33bd1921122141513ec7c72dddba | [
"MIT"
] | null | null | null | bloomfilter.go | UncleKing/bloom-go | 11ea284d407b33bd1921122141513ec7c72dddba | [
"MIT"
] | null | null | null | bloomfilter.go | UncleKing/bloom-go | 11ea284d407b33bd1921122141513ec7c72dddba | [
"MIT"
] | null | null | null | package bloomfilter
type BloomFilter struct {
bitBuffer []uint64
size uint64
hash1 Hasher
hash2 Hasher
mask []uint64
}
func (bf *BloomFilter) Default() {
bf.init(defaultSize)
bf.hash1 = &SimpleHasher{prefix: randomString1}
bf.hash2 = &SimpleHasher{prefix: randomString2}
}
func (bf *BloomFilter) New(size uint64, hasher1 Hasher, hasher2 Hasher) {
bf.init(size)
bf.hash1 = hasher1
bf.hash2 = hasher2
}
func (bf *BloomFilter) init(size uint64) {
var bitBufferSize uint64
bitBufferSize = 1
if size > 64 {
bitBufferSize = (size / 64) + 1
}
bf.bitBuffer = make([]uint64, bitBufferSize)
bf.size = size
// init the mask
bf.mask = make([]uint64, 64)
bf.mask[0] = 1
// create a 64 bit mask just to speed lookup & writes.
for i := 1; i < 64; i++ {
bf.mask[i] = bf.mask[i-1] << 1
}
}
func (bf *BloomFilter) Add(fo FilterObject) {
h1 := bf.hash1.Hash(fo) % bf.size
h2 := bf.hash2.Hash(fo) % bf.size
c1 := h1 / 64
c2 := h2 / 64
o1 := h1 % 64
o2 := h2 % 64
bf.bitBuffer[c1] = bf.bitBuffer[c1] | bf.mask[o1]
bf.bitBuffer[c2] = bf.bitBuffer[c2] | bf.mask[o2]
}
func (bf *BloomFilter) Exists(fo FilterObject) bool {
h1 := bf.hash1.Hash(fo) % bf.size
h2 := bf.hash2.Hash(fo) % bf.size
c1 := h1 / 64
c2 := h2 / 64
o1 := h1 % 64
o2 := h2 % 64
return (bf.bitBuffer[c1]&bf.mask[o1] > 0) && (bf.bitBuffer[c2]&bf.mask[o2] > 0)
}
func (bf *BloomFilter) Clear() {
bf.init(bf.size)
}
| 18.907895 | 80 | 0.63048 |
40c7af7ff227f068191e59bc73e1c83bccc2a1cb | 538 | html | HTML | _includes/java-jvm-zhouzhiming.html | jiafangtao/lesss.is.more | b8b0243d72e8b0636eed2a4ab908a9a5a0380485 | [
"MIT"
] | null | null | null | _includes/java-jvm-zhouzhiming.html | jiafangtao/lesss.is.more | b8b0243d72e8b0636eed2a4ab908a9a5a0380485 | [
"MIT"
] | 5 | 2016-12-20T08:43:55.000Z | 2022-02-26T05:55:39.000Z | _includes/java-jvm-zhouzhiming.html | jiafangtao/jiafangtao.github.io | 1aa8cf0d2fe80310a59aa0dde7ae745fb3e6c624 | [
"MIT"
] | null | null | null | <script type="text/javascript">
< !--
dd_ad_output="html";
dd_ad_width = 180;
dd_ad_height = 282;
dd_ad_client = "P-314444";
dd_ad_format = 20;
dd_ad_id = 0;
dd_product_id = 23259731;
dd_img_size = 150;
dd_display_style = 0;
dd_text_url = "";
dd_color_text = "";
dd_color_bg = "";
dd_open_target = "_blank";
dd_border = 0;
dd_color_link = "";
dd_ad_text = "";
//--></script>
<script type="text/javascript" src="http://union.dangdang.com/union/script/dd_ads.js"></script> | 26.9 | 95 | 0.611524 |
810060d54cc593d090bf2f45200c4adeb0c5fe23 | 3,070 | rs | Rust | rg3d-ui/src/inspector/editors/vec.rs | vigdail/rg3d | b65bfdab350f8c1d48bcc288a8449cc74653ef51 | [
"MIT"
] | null | null | null | rg3d-ui/src/inspector/editors/vec.rs | vigdail/rg3d | b65bfdab350f8c1d48bcc288a8449cc74653ef51 | [
"MIT"
] | null | null | null | rg3d-ui/src/inspector/editors/vec.rs | vigdail/rg3d | b65bfdab350f8c1d48bcc288a8449cc74653ef51 | [
"MIT"
] | null | null | null | use crate::{
core::{
algebra::{Vector2, Vector3, Vector4},
inspect::PropertyInfo,
pool::Handle,
},
inspector::{
editors::{Layout, PropertyEditorBuildContext, PropertyEditorDefinition},
InspectorError,
},
message::{
FieldKind, MessageDirection, PropertyChanged, UiMessage, UiMessageData, Vec2EditorMessage,
Vec3EditorMessage, Vec4EditorMessage,
},
vec::{vec2::Vec2EditorBuilder, vec3::Vec3EditorBuilder, vec4::Vec4EditorBuilder},
widget::WidgetBuilder,
Thickness, UiNode,
};
use std::any::TypeId;
macro_rules! define_vector_editor {
($name:ident, $builder:ty, $message:tt, $message_variant:ident, $value:ty) => {
#[derive(Debug)]
pub struct $name;
impl PropertyEditorDefinition for $name {
fn value_type_id(&self) -> TypeId {
TypeId::of::<$value>()
}
fn create_instance(
&self,
ctx: PropertyEditorBuildContext,
) -> Result<Handle<UiNode>, InspectorError> {
let value = ctx.property_info.cast_value::<$value>()?;
Ok(
<$builder>::new(WidgetBuilder::new().with_margin(Thickness::uniform(1.0)))
.with_value(*value)
.build(ctx.build_context),
)
}
fn create_message(
&self,
instance: Handle<UiNode>,
property_info: &PropertyInfo,
) -> Result<UiMessage, InspectorError> {
let value = property_info.cast_value::<$value>()?;
Ok($message::value(
instance,
MessageDirection::ToWidget,
*value,
))
}
fn translate_message(
&self,
name: &str,
owner_type_id: TypeId,
message: &UiMessage,
) -> Option<PropertyChanged> {
if message.direction() == MessageDirection::FromWidget {
if let UiMessageData::$message_variant($message::Value(value)) = message.data()
{
return Some(PropertyChanged {
owner_type_id,
name: name.to_string(),
value: FieldKind::object(*value),
});
}
}
None
}
fn layout(&self) -> Layout {
Layout::Horizontal
}
}
};
}
define_vector_editor!(
Vec4PropertyEditorDefinition,
Vec4EditorBuilder,
Vec4EditorMessage,
Vec4Editor,
Vector4<f32>
);
define_vector_editor!(
Vec3PropertyEditorDefinition,
Vec3EditorBuilder,
Vec3EditorMessage,
Vec3Editor,
Vector3<f32>
);
define_vector_editor!(
Vec2PropertyEditorDefinition,
Vec2EditorBuilder,
Vec2EditorMessage,
Vec2Editor,
Vector2<f32>
);
| 29.238095 | 99 | 0.515309 |
262fef5d54452580f40cfb6e1ecc4a4fbcbecf96 | 1,801 | java | Java | chapter_005/src/main/java/exam/Switcher.java | hedg-r52/job4j | 890496986de8b8f402d50dc0603fea916b2f4b70 | [
"Apache-2.0"
] | null | null | null | chapter_005/src/main/java/exam/Switcher.java | hedg-r52/job4j | 890496986de8b8f402d50dc0603fea916b2f4b70 | [
"Apache-2.0"
] | 7 | 2019-11-13T10:48:36.000Z | 2021-04-26T19:15:42.000Z | chapter_005/src/main/java/exam/Switcher.java | hedg-r52/job4j | 890496986de8b8f402d50dc0603fea916b2f4b70 | [
"Apache-2.0"
] | null | null | null | package exam;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
public class Switcher {
private StringBuilder sb;
public static final int SEQUENCE_LENGTH = 10;
final Semaphore sem1 = new Semaphore(1);
final Semaphore sem2 = new Semaphore(0);
public Switcher() {
this.sb = new StringBuilder();
}
public void convertAndAdd(int number) {
sb.append(number);
}
public String getValue() {
return sb.toString();
}
public static void main(String[] args) {
Switcher sw = new Switcher();
ExecutorService exec = Executors.newCachedThreadPool();
exec.execute(new Adder(sw, sw.sem1, sw.sem2, 1));
exec.execute(new Adder(sw, sw.sem2, sw.sem1, 2));
exec.shutdown();
System.out.println(sw.getValue());
}
}
class Adder implements Runnable {
final Switcher switcher;
final Semaphore semaphoreOwn;
final Semaphore semaphoreOther;
final int number;
private final int repeating = 5;
public Adder(Switcher switcher, Semaphore semaphoreOwn, Semaphore semaphoreOther, int number) {
this.switcher = switcher;
this.semaphoreOwn = semaphoreOwn;
this.semaphoreOther = semaphoreOther;
this.number = number;
}
@Override
public void run() {
int i = 0;
while (i < repeating) {
try {
semaphoreOwn.acquire();
for (int j = 0; j < Switcher.SEQUENCE_LENGTH; j++) {
switcher.convertAndAdd(number);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
semaphoreOther.release();
i++;
}
}
} | 26.485294 | 99 | 0.601333 |
af1396c032be86fccacbfa90a2e2730d011170e0 | 644 | rb | Ruby | lib/mongoid/matcher/in.rb | tstaetter/mongoid | ec8d4850a6c610cdf984c65bad095cc921a05b8b | [
"MIT"
] | 999 | 2015-07-27T18:40:22.000Z | 2022-03-31T02:58:17.000Z | lib/mongoid/matcher/in.rb | tstaetter/mongoid | ec8d4850a6c610cdf984c65bad095cc921a05b8b | [
"MIT"
] | 549 | 2015-08-14T08:02:43.000Z | 2022-03-31T18:39:23.000Z | lib/mongoid/matcher/in.rb | tstaetter/mongoid | ec8d4850a6c610cdf984c65bad095cc921a05b8b | [
"MIT"
] | 551 | 2015-07-27T12:13:48.000Z | 2022-03-26T08:19:10.000Z | module Mongoid
module Matcher
# @api private
module In
module_function def matches?(exists, value, condition)
unless Array === condition
raise Errors::InvalidQuery, "$in argument must be an array: #{Errors::InvalidQuery.truncate_expr(condition)}"
end
if Array === value
if value.any? { |v|
condition.any? do |c|
EqImplWithRegexp.matches?('$in', v, c)
end
} then
return true
end
end
condition.any? do |c|
EqImplWithRegexp.matches?('$in', value, c)
end
end
end
end
end
| 24.769231 | 119 | 0.53882 |
f000f73c7ff791dd3f202fae2e9cd2cdf7773f23 | 8,046 | py | Python | hera_cc_utils/catalog.py | pagano-michael/hera_cc_utils | 2d61f8ab0bb4d75b9a2e5891450256195851db08 | [
"MIT"
] | null | null | null | hera_cc_utils/catalog.py | pagano-michael/hera_cc_utils | 2d61f8ab0bb4d75b9a2e5891450256195851db08 | [
"MIT"
] | 6 | 2021-09-08T21:28:12.000Z | 2021-09-15T18:18:33.000Z | hera_cc_utils/catalog.py | pagano-michael/hera_cc_utils | 2d61f8ab0bb4d75b9a2e5891450256195851db08 | [
"MIT"
] | 1 | 2021-12-01T15:29:55.000Z | 2021-12-01T15:29:55.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2021 The HERA Collaboration
# Licensed under the MIT License
"""Utilities for dealing with galaxy/QSO catalogs."""
import numpy as np
import matplotlib.pyplot as plt
from astropy.coordinates import SkyCoord
from .util import deg_per_hr
_xshooter_ref = "https://ui.adsabs.harvard.edu/abs/2020ApJ...905...51S/abstract"
# VIKING
_viking_ref1 = "https://ui.adsabs.harvard.edu/abs/2013ApJ...779...24V/abstract"
_viking_ref2 = "https://ui.adsabs.harvard.edu/abs/2015MNRAS.453.2259V/abstract"
_viking = {
"J2348-3054": {
"ra": "23h48m33.34s",
"dec": "-30d54m10.0s",
"z": 6.886,
"ref": _viking_ref1,
},
"J0109-3047": {
"ra": "01h09m53.13s",
"dec": "-30d47m26.3s",
"z": 6.745,
"ref": _viking_ref1,
},
"J0305-3150": {
"ra": "03h05m16.92s",
"dec": "-31d50m56.0s",
"z": 6.604,
"ref": _viking_ref1,
},
"J0328-3253": {
"ra": "03h28m35.511s",
"dec": "-32d53m22.92s",
"z": 5.860,
"ref": _viking_ref2,
},
"J0046-2837": {
"ra": "00h46m23.645s",
"dec": "-28d37m47.34s",
"z": 5.9926,
"ref": _xshooter_ref,
},
"J2211-3206": {
"ra": "22h11m12.391s",
"dec": "-32d06m12.95s",
"z": 6.3394,
"ref": _xshooter_ref,
},
"J2318-3029": {
"ra": "23h18m33.103s",
"dec": "-30d29m33.36s",
"z": 6.1456,
"ref": _xshooter_ref,
},
"J2348-3054_xshooter": {
"ra": "23h48m33.336s",
"dec": "-30d54m10.24s",
"z": 6.9007,
"ref": _xshooter_ref,
},
}
# Pan-STARRS1
_ps1_ref1 = "https://ui.adsabs.harvard.edu/abs/2014AJ....148...14B/abstract"
_ps1_ref2 = "https://ui.adsabs.harvard.edu/abs/2017ApJ...849...91M/abstract"
_ps1 = {
"PSO 231-20": {"ra": "231.6576", "dec": "-20.8335", "z": 6.5864, "ref": _ps1_ref2},
"PSO J037.9706-28.8389": {
"ra": "02h31m52.96s",
"dec": "-28d50m20.1s",
"z": 5.99,
"ref": _ps1_ref1,
},
"PSO J065.4085-26.9543": {
"ra": "04h21m38.049s",
"dec": "-26d57m15.61s",
"z": 6.1871,
"ref": _xshooter_ref,
},
}
# Banados+ 2016 https://ui.adsabs.harvard.edu/abs/2016ApJS..227...11B/abstract
# has table of all z > 5.6 quasars known at that point (March 2016).
# https://ned.ipac.caltech.edu/inrefcode?search_type=Search&refcode=2016ApJS..227...11B
# VLT ATLAS
# https://ui.adsabs.harvard.edu/abs/2015MNRAS.451L..16C/abstract
_atlas_ref1 = "https://ui.adsabs.harvard.edu/abs/2015MNRAS.451L..16C/abstract"
_atlas_ref2 = "https://ui.adsabs.harvard.edu/abs/2018MNRAS.478.1649C/abstract"
_atlas = {
"J025.6821-33.4627": {
"ra": "025.6821",
"dec": "-33.4627",
"z": 6.31,
"ref": _atlas_ref1,
},
"J332.8017-32.1036": {
"ra": "332.8017",
"dec": "-32.1036",
"z": 6.32,
"ref": _atlas_ref2,
},
}
# VHS-DES
_ps1_vhs_des = "https://ui.adsabs.harvard.edu/abs/2019MNRAS.487.1874R/abstract"
_des = {
"VDES J0020-3653": {
"ra": "00h20m31.47s",
"dec": "-36d53m41.8s",
"z": 6.5864,
"ref": _ps1_vhs_des,
},
}
_yang = "https://ui.adsabs.harvard.edu/abs/2020ApJ...904...26Y/abstract"
_decarli = "https://ui.adsabs.harvard.edu/abs/2018ApJ...854...97D/abstract"
_other = {
"J0142−3327": {"ra": "0142", "dec": "-3327", "z": 6.3379, "ref": _yang},
"J0148−2826": {"ra": "0148", "dec": "-2826", "z": 6.54, "ref": _yang},
"J2002−3013": {"ra": "2002", "dec": "-3013", "z": 6.67, "ref": _yang},
"J2318–3113": {
"ra": "23h18m18.351s",
"dec": "-31d13m46.35s",
"z": 6.444,
"ref": _decarli,
},
}
def _to_decimal(s):
if "." in s:
out = float(s)
elif s[0] == "-":
out = float(s[0:3] + "." + s[3:])
else:
out = float(s[0:2] + "." + s[2:])
return out
_qso_catalogs = {"viking": _viking, "panstarrs": _ps1, "atlas": _atlas, "other": _other}
class Catalog(object):
"""
Define a class for handling QSO catalogs.
Parameters
----------
data : str
The type of data to handle. Right now "qso" is the only allowed value.
kwargs : dict
Keyword arguments to save directly on the object.
"""
def __init__(self, data, **kwargs):
self.data = data
self.kwargs = kwargs
def plot_catalog(
self, ax=None, zmin=None, num=1, projection="rectilinear", **fig_kwargs
):
"""
Plot a catalog using matplotlib.
Parameters
----------
ax : matplotlib axis object, optional
The axes to use for plotting. If None, then a new figure and axis
will be created.
zmin : float, optional
The minimum redshift to use for plotting objects.
num : int, optional
The figure number to create if `ax` is not provided.
projection : str, optional
The projection to use for plotting.
kwargs : dict, optional
Additional kwargs passed to matplotlib.pyplot.figure
Returns
-------
ax : matplotlib axis object
If `ax` is provided as a parameter, the same axis object. Otherwise,
a new one.
Raises
------
NotImplementedError
Raised if any projection besides "rectilinear" is passed.
"""
if projection != "rectilinear":
raise NotImplementedError("Only know rectilinear projection right now!")
# Setup plot window
has_ax = True
if ax is None:
fig = plt.figure(num=num, **fig_kwargs)
ax = fig.gca()
has_ax = False
# Get all objects in catalog
names, coords = self.get_all_pos(zmin=zmin)
# Loop over them all and plot. Could do a lot more efficiently if
# we ever end up with big catalogs.
for i, coord in enumerate(coords):
ra, dec, z = coord
ax.scatter(ra, dec)
if not has_ax:
ax.set_xlabel(r"Right Ascension [hours]", fontsize=24, labelpad=5)
ax.set_ylabel(r"Declination [deg]", fontsize=24, labelpad=5)
return ax
def get_all_pos(self, zmin=None):
"""
Return a list of (RA, DEC, redshift) for all objects.
Parameters
----------
zmin : float
The minimum redshift to include for objects in the catalog.
Returns
-------
names : list of str, shape (n_objects)
The names of objects in the catalog.
data : ndarray, shape (n_objects, 3)
The RA [hour angle], dec [degree], and redshift of the objects.
Raises
------
ValueError
This is raised if `self.data` is not "qso", as this is the only type
of data we know how to handle right now.
"""
if not self.data.lower().startswith("qso"):
raise ValueError("Only know how to do QSOs right now.")
data = []
names = []
for cat in _qso_catalogs.keys():
for element in _qso_catalogs[cat]:
obj = _qso_catalogs[cat][element]
if zmin is not None:
if obj["z"] < zmin:
continue
if "h" in obj["ra"]:
kw = {"frame": "icrs"}
ra = obj["ra"]
dec = obj["dec"]
else:
kw = {"unit": "degree", "frame": "icrs"}
if len(obj["ra"]) == 4:
ra = _to_decimal(obj["ra"]) * deg_per_hr
else:
ra = _to_decimal(obj["ra"])
dec = _to_decimal(obj["dec"])
coord = SkyCoord(ra, dec, **kw)
names.append(element)
data.append((coord.ra.hour, coord.dec.degree, obj["z"]))
return names, np.array(data)
| 28.83871 | 88 | 0.527716 |