text
stringlengths 27
775k
|
|---|
import { TypeTreeNode } from "../../Type Scope/TypeScope";
import { ITypeCheckingType } from "../ExpressionType";
import { Expression } from "./Expression";
export class TypeReferenceExpression extends Expression {
public name = "TypeReferenceExpression";
public type: ITypeCheckingType | undefined;
public node: TypeTreeNode;
constructor(node: TypeTreeNode) {
super();
this.node = node;
this.type = node.typeCheckingType;
this.children = [];
}
}
|
<?php
namespace common\tests\unit\models;
use Codeception\Test\Unit;
use common\components\NonPresentAppleException;
use common\fixtures\AppleFixture;
use common\models\apple\Apple;
use common\tests\UnitTester;
use yii\base\Exception;
class FutureEatingTest extends Unit
{
/**
* @var UnitTester
*/
protected $tester;
public function _fixtures()
{
return [
'apple' => AppleFixture::class,
];
}
public function testPresentApples()
{
$this->tester->amGoingTo('Check obtaining of present apples');
$this->tester->expectTo('obtain successfully');
/** @var Apple $hanging */
$hanging = $this->tester->grabFixture('apple', 'green hanging');
$this->assertEquals(1, $hanging->status_id);
/** @var Apple $rotten */
$rotten = $this->tester->grabFixture('apple', 'green rotten bitten');
$this->assertEquals(3, $rotten->status_id);
/** @var Apple $fallen */
$fallen = $this->tester->grabFixture('apple', 'green fallen bitten');
$this->assertEquals(2, $fallen->status_id);
}
public function testFutureApple()
{
$this->tester->amGoingTo('Check obtaining of future apple');
$this->tester->expect('thrown exception');
try {
$this->tester->grabFixture('apple', 'yellow future');
} catch (Exception $e) {
$this->assertInstanceOf(NonPresentAppleException::class, $e);
return;
}
$this->fail('should catch an exception');
}
}
|
---
title: "SD-WAN"
linkTitle: "Document"
weight: 5
collapsible: true
icon: "/images/icons/index/product-icon-network.svg"
Section1:
children:
- title: 企业云网
url: "/sd-wan/sdwan/intro/intro/"
- title: 光盒
url: "/sd-wan/light-box/intro/intro/"
- title: SD-WAN(新版)
url: "/sd-wan/sdwan_new/intro/10_sdwan/"
---
|
//
// GLRCoreAppearControl.h
// GLRouter
//
// Created by liguoliang on 2020/10/10.
//
#import <UIKit/UIKit.h>
#import <GLRouter/GLRBase.h>
@interface GLRCoreAppearControl : GLRBase
/// push页面
/// @param cls 页面名称
/// @param params 可选参数。初始化以后,进行的参数设置
/// @param container 可选父级容器。为空则使用root向下查找两级是否存在导航进行push。如果没找到,则返回空,push失败
/// @param handle 可选必要条件。为空则无条件进行
- (void)pushTargetClass:(Class)cls
withParams:(NSDictionary *)params
container:(UIViewController *)container
inCondition:(BOOL (^)(UIViewController *target))handle;
/// present页面
/// @param cls 页面名称
/// @param params 可选参数。初始化以后,进行的参数设置
/// @param container 可选父级容器。为空则使用Keywindow的RootViewController
/// @param handle 可选必要条件。为空则无条件进行
- (void)presentTargetClass:(Class)cls
withParams:(NSDictionary *)params
container:(UIViewController *)container
inCondition:(BOOL (^)(UIViewController *target))handle;
@end
|
<?php
class Buscar extends Controller{
public $MODULE = "buscar";
public $TITLE = "Resultados de tu búsqueda";
public function index(){
$query = $_GET["query"];
$filter = '{
"filter":{
"title": "'.$query.'"
}
}';
$response = $this->model->getCollectionEntries("posts", $filter);
$posts = $response->entries;
require APP . 'view/_templates/header.php';
require APP . 'view/busqueda/busqueda.php';
require APP . 'view/_templates/footer.php';
}
}
|
# Book Errata
A considerable amount of time was expended proofreading the book details.
Besides the author there were several other people who also looked for errors.
Sadly, not all errors were caught during production but not all.
This document describes the errors found.
## Errors
### Preface
Page IX, 4th paragraph, second line, should read:
"spelled out in full. Thus, no abbreviated parameter names or positional parameters. This"
### Chapter 1 - Establishing a PowerShell Administrative Environment
Page 11 - small error in screen shots. they should not be different.
Page 12, 2nd Paragraph, penultimate line - the sentence should read: "And for versions later than 1803, the
mechanism may change again."
And with the passage of time since the book was written, the mechanism has indeed changed again.
Page 15 - Step 15 should read SRV1, not SRV2
### Chapter 2 -Managing Windows Networking
Page 65/66 - Step 6 is not actually shown. The output (on page 66) is form step 7 not step 6.
### Chapter 4 - Managing Windows Storage
Page 134 - The code is fine in the book, but the GITHUB copy had an error in step 5 which is fixed.
### Chapter 12 - Managing Azure
Page 446, last paragraph in intro = this recipe does not check whether the RG and the SA are already created.
Page 447, step 2 - the text is wrong, only login to the Azure account
Page 447. step 5 - the name of the share should be $ShareName.
### Chapter 14 - Managing Performance and Usage
page 467 - step 9, second line - the Counter should be $Counter2
Page 471 - photo for step 9 is likewise incorrect in terms of counter name even though the output values are correct.
Page 471 - the lead in to the graphic for step 9, the server name cited should be HV1.
Page 479 - the values for LogFileFormat mentioned in the text are wrong and should be like this:
'''powershell
public enum LogFileFormat
{
CommaSeparated = 0,
TabSeparated = 1,
Sql = 2,
Binary = 3,
}
'''
Page 482 - graphic for step 2 is incorrect.
Page 493 - step 13. Error in getting CPU numbers
# (Script updated too)
'''powershell
#this
$VMReport.VMCPU = $VM.CPUUsage
#should be
$VMReport.VMCPU = $VM.ProcessorCount
|
// No copyright
#include "mnian/widget_history_tree.h"
#include <imgui.h>
#include <algorithm>
#include <cstring>
#include <queue>
#include <stack>
#include <vector>
#include "mnian/app.h"
namespace mnian {
void HistoryTreeWidget::Register(core::DeserializerRegistry* reg) {
reg->RegisterType<core::iWidget, HistoryTreeWidget>();
}
void HistoryTreeWidget::Update() {
if (ImGui::Begin(strId().c_str())) {
if (!ImGui::IsWindowHovered()) {
open_.clear();
}
UpdateItem(&app_->project().history().origin());
}
ImGui::End();
}
bool HistoryTreeWidget::UpdateItem(core::History::Item* item, bool skip_text) {
const auto& style = ImGui::GetStyle();
const auto text_color = ImGui::GetStyleColorVec4(ImGuiCol_Text);
const float indent_unit = style.IndentSpacing*1.f;
const float bullet_size = ImGui::GetTreeNodeToLabelSpacing();
const auto start_gpos = ImGui::GetCursorScreenPos();
auto draw = ImGui::GetWindowDrawList();
bool skip_branch = false;
for (;;) {
const auto& branch = item->branch();
ImGui::PushID(item);
ImVec4 col {1, 1, 1, 1};
if (item == &app_->project().history().head()) {
col = {1, 0, 0, 1};
}
ImGui::BeginGroup();
ImGui::PushStyleColor(ImGuiCol_Text, col);
ImGui::Bullet();
if (!skip_text) {
ImGui::Text(item->command().GetDescription().c_str());
}
ImGui::PopStyleColor();
ImGui::EndGroup();
ImGui::SameLine();
if (ImGui::IsItemHovered()) {
ImGui::SetTooltip(item->command().GetDescription().c_str());
Hover(item);
}
if (ImGui::IsItemClicked()) {
MoveTo(item);
}
ImGui::PopID();
ImGui::NewLine();
// draw the item's branches recursively
const auto gpos = ImGui::GetCursorScreenPos();
if (!skip_branch && open_.contains(item)) {
const auto pos = ImGui::GetCursorPos();
size_t i;
for (i = 1; i < branch.size() && !skip_branch; ++i) {
const float indent = indent_unit*static_cast<float>(i);
ImGui::Indent(indent);
if (UpdateItem(branch[branch.size()-i-1].get(), true)) {
skip_branch = true;
}
ImGui::Unindent(indent);
ImGui::SetCursorPos(pos);
}
skip_text = true;
skip_branch = true;
const float w = static_cast<float>(i)*indent_unit;
draw->AddLine({gpos.x +bullet_size/2.f, gpos.y},
{gpos.x+w-bullet_size/2.f, gpos.y},
ImGui::GetColorU32(text_color));
}
if (branch.size() == 0) {
draw->AddLine({gpos.x+bullet_size/2.f, start_gpos.y},
{gpos.x+bullet_size/2.f, gpos.y},
ImGui::GetColorU32(text_color));
return skip_branch;
}
item = branch.back().get();
}
}
void HistoryTreeWidget::Hover(core::History::Item* item) {
if (open_.contains(item)) return;
open_.clear();
if (item->branch().size() > 1) {
open_.insert(item);
}
while (!item->isOrigin()) {
auto parent = &item->parent();
if (parent->branch().back().get() != item) {
open_.insert(parent);
}
item = parent;
}
}
void HistoryTreeWidget::MoveTo(core::History::Item* item) {
auto& history = app_->project().history();
auto* head = &history.head();
auto lca = &item->FindLowestCommonAncestor(*head);
while (&history.head() != lca) history.UnDo();
std::stack<core::History::Item*> path;
for (auto itr = item; itr != lca; itr = &itr->parent()) {
path.push(itr);
}
while (!path.empty()) {
const auto& branch = history.head().branch();
size_t i;
for (i = 0; branch[i].get() != path.top(); ++i) {}
history.ReDo(i);
path.pop();
}
}
std::unique_ptr<HistoryTreeWidget> HistoryTreeWidget::DeserializeParam(
core::iDeserializer* des) {
(void) des;
return std::make_unique<HistoryTreeWidget>(&des->app());
}
void HistoryTreeWidget::SerializeParam(core::iSerializer* serial) const {
serial->SerializeValue(int64_t{0});
}
} // namespace mnian
|
import {CommunicationObserver, Headers} from '../results'
export default function completeCommunicationObserver(
callbacks: Partial<CommunicationObserver<any>> = {}
): Omit<Required<CommunicationObserver<any>>, 'useCancellable'> {
let state = 0
const retVal = {
next: (data: any): void => {
if (
state === 0 &&
callbacks.next &&
data !== null &&
data !== undefined
) {
callbacks.next(data)
}
},
error: (error: Error): void => {
/* istanbul ignore else propagate error at most once */
if (state === 0) {
state = 1
/* istanbul ignore else safety check */
if (callbacks.error) callbacks.error(error)
}
},
complete: (): void => {
if (state === 0) {
state = 2
/* istanbul ignore else safety check */
if (callbacks.complete) callbacks.complete()
}
},
responseStarted: (headers: Headers, statusCode?: number): void => {
if (callbacks.responseStarted)
callbacks.responseStarted(headers, statusCode)
},
}
return retVal
}
|
module Cms::Addon
module CheckLinks
extend ActiveSupport::Concern
extend SS::Addon
included do
field :check_links_errors, type: Array
field :check_links_errors_updated, type: DateTime
scope :has_check_links_errors, ->{ where(:check_links_errors.exists => true) }
end
end
end
|
## Help Wanted
- Firefox/Safari/Edge port
## Things I am aware of:
- Text replacement is aggressive and does not discriminate on what domains it runs on
- More than just the term "millennials" gets replaced
- There is a lack of a glossary. This is intended.
- I'll sometimes allow new terms, but not on any given schedule or via any published criteria
- There is no indication that the extension is running other than text has been replaced
- There is no count of how many words have been changed, either on a given page or historically
- The hypothesis of linguistic determinism is now generally agreed to be false
- Come at me, bloodmouths
|
> Docs ▸ **Table of Content**
### [Getting started](Getting-started.md)
### [API Reference](api/index.md)
* [Brief version of everything](api/index.md)
* module: [AbstractChart](api/AbstractChart.md)
* module: [CanvasChart](api/CanvasChart.md)
* module: [HybridChart](api/HybridChart.md)
* module: [SvgChart](api/SvgChart.md)
* module: [AbstractPlate](api/AbstractPlate.md)
* module: [CanvasPlate](api/CanvasPlate.md)
* module: [DivPlate](api/DivPlate.md)
* module: [SvgPlate](api/SvgPlate.md)
* module: [LayerOrganizer](api/LayerOrganizer.md)
* module: [helper](api/helper.md)
### [Gallery](Gallery.md)
### Development
* [Developing d3Kit](Developing.md)
* [Versioning](Versioning.md)
|
using System;
using System.Collections.Generic;
using System.IO;
using System.Json;
using System.Linq;
namespace chess_pos_db_gui
{
public class UciEngineProfileStorage
{
private string ProfileListPath { get; set; }
public IList<UciEngineProfile> Profiles { get; private set; }
public UciEngineProfileStorage(string path)
{
ProfileListPath = path;
Profiles = new List<UciEngineProfile>();
DeserializeEngineList();
}
public bool ExistsWithName(string name)
{
return GetByName(name) != null;
}
public void AddProfile(UciEngineProfile profile)
{
if (ExistsWithName(profile.Name))
{
throw new ArgumentException("Name already used");
}
profile.SetParent(this);
Profiles.Add(profile);
SerializeEngineList();
}
public UciEngineProfile GetByName(string name)
{
return Profiles.FirstOrDefault(p => p.Name == name);
}
public void RemoveProfileByName(string name)
{
var obj = Profiles.FirstOrDefault(p => p.Name == name);
obj.SetParent(null);
Profiles.Remove(obj);
SerializeEngineList();
}
#pragma warning disable IDE0060 // Remove unused parameter
public void OnProfileUpdated(UciEngineProfile profile)
#pragma warning restore IDE0060 // Remove unused parameter
{
SerializeEngineList();
}
private void DeserializeEngineList()
{
if (File.Exists(ProfileListPath))
{
var profileListJson = JsonValue.Parse(File.ReadAllText(ProfileListPath));
foreach (JsonValue json in profileListJson)
{
Profiles.Add(new UciEngineProfile(this, json));
}
}
}
private void SerializeEngineList()
{
var json = new JsonArray();
foreach (var profile in Profiles)
{
json.Add(profile.ToJson());
}
Directory.CreateDirectory(Path.GetDirectoryName(ProfileListPath));
File.WriteAllText(ProfileListPath, json.ToString());
}
}
}
|
#!/usr/bin/env bash
APPDIR=target
RESDIR=src/main/resources
BUNDLEDIR=target/bundle
#Exchange with your ARN
LAMBDA_ROLE_ARN=arn:aws:iam::XXXXXXXX:role/yourlambdaarn
function bundle() {
rm -rf ${BUNDLEDIR}
mvn clean package -Pnative-image -Dnative-image.docker-build=true
mkdir -p ${BUNDLEDIR}
cp -r ${APPDIR}/*-runner ${BUNDLEDIR}
## You have to copy your cacerts & libsunec.so for the resource dir
cp -r ${RESDIR}/cacerts ${RESDIR}/bootstrap ${RESDIR}/libsunec.so ${BUNDLEDIR}
chmod 755 ${BUNDLEDIR}/bootstrap
cd ${BUNDLEDIR} && zip -q function.zip bootstrap cacerts libsunec.so *-runner ; cd -
}
echo "##### Bundle function #####"
bundle
echo "##### Deploying function #####"
sls deploy
|
@file:OptIn(ExperimentalSerializationApi::class)
package com.tajmoti.libtvprovider.kinox.model
import kotlinx.serialization.ExperimentalSerializationApi
import kotlinx.serialization.Serializable
import kotlinx.serialization.json.JsonNames
@Serializable
data class StreamReferenceObject(
@JsonNames("Stream")
val stream: String,
@JsonNames("HosterName")
val hosterName: String,
@JsonNames("HosterHome")
val hosterHome: String,
)
|
'use strict';
/**
* @class
*/
function Socket() {
/** Port number. */
this['port#number'] = 0;
}
/** Open a connection. */
Socket.prototype['open~a.connection#now'] = function() {};
|
; Base area SMSQ system sprites
section header
xref smsq_end
include 'dev8_keys_con'
include 'dev8_smsq_smsq_base_keys'
include 'dev8_smsq_smsq_config_keys'
include 'dev8_keys_qdos_sms'
include 'dev8_keys_qdos_ioa'
include 'dev8_keys_qdos_io'
include 'dev8_keys_sys'
header_base
dc.l sys_sprt-header_base ; length of header
dc.l 0 ; module length unknown
dc.l smsq_end-sys_sprt ; loaded length
dc.l 0 ; checksum
dc.l 0 ; select
dc.b 1 ; 1 level down
dc.b 0
dc.w smsq_name-*
smsq_name
dc.w 20
dc.w 'SMSQ System sprites '
dc.l ' '
dc.w $200a
section base
xref sp_table
; Just add all sprites we have to the system sprite table.
sys_sprt
moveq #sms.xtop,d0
trap #do.smsq
move.l sys_clnk(a6),d0
beq.s ss_exit
move.l d0,a3
move.l pt_vecs(a3),a2
lea sp_table,a4
move.l a4,a5
move.w (a4)+,d5 ; that many sprites
moveq #0,d1
ss_loop
move.l (a4)+,a1 ; get pointer to sprite
add.l a5,a1
jsr pv_sspr(a2)
addq.w #1,d1
cmp.w d5,d1
blt.s ss_loop
ss_exit
rts
end
|
/************************************************************
* Use of this source code is governed by a BSD-style license
* that can be found in the License file.
*
* Author : yefeng
* Email : yefeng38083120@126.com
* Create time : 2016-10-04 17:49
* Last modified : 2016-10-04 18:17
* Filename : Box.h
* Description :
* *********************************************************/
#ifndef BRIDGE_BOX_H_
# define BRIDGE_BOX_H_
class BoxImp;
class Box
{
public:
virtual ~Box() {}
virtual void operation() = 0;
protected:
Box() {}
private:
};
class RedefinedBox : public Box
{
public:
RedefinedBox(BoxImp* imp);
~RedefinedBox();
void operation();
private:
BoxImp* imp_;
};
#endif // BRIDGE_BOX_H_
|
---
title: 建立適用于 Azure VPN 用戶端的 Intune 設定檔
titleSuffix: Azure VPN Gateway
description: 瞭解如何建立 Intune 自訂設定檔以部署 Azure VPN 用戶端設定檔
author: cherylmc
ms.service: vpn-gateway
ms.topic: how-to
ms.date: 09/15/2020
ms.author: cherylmc
ms.openlocfilehash: 63b1d7f95711017feba52cad97f05128d83734da
ms.sourcegitcommit: 8e7316bd4c4991de62ea485adca30065e5b86c67
ms.translationtype: MT
ms.contentlocale: zh-TW
ms.lasthandoff: 11/17/2020
ms.locfileid: "94655165"
---
# <a name="create-an-intune-profile-to-deploy-vpn-client-profiles"></a>建立 Intune 設定檔以部署 VPN 用戶端設定檔
您可以使用 Microsoft Intune (Windows 10) 來部署 Azure VPN 用戶端的設定檔。 本文可協助您使用自訂設定來建立 Intune 設定檔。
> [!NOTE]
> 此方法僅適用于部署使用 Azure Active Directory 或一般憑證進行用戶端驗證的設定檔。 如果使用唯一的用戶端憑證,則每位使用者都必須在 Azure VPN Client 內手動選取正確的憑證。
>
## <a name="prerequisites"></a>先決條件
* 裝置已向 Intune MDM 註冊。
* 已在用戶端電腦上部署 Windows 10 的 Azure VPN Client。
* 僅支援 Windows 版本19H2 或更高版本。
## <a name="modify-xml"></a><a name="xml"></a>修改 XML
在下列步驟中,我們會使用適用于 Intune 之自訂 OMA-URI 設定檔的範例 XML,其具有下列設定:
* 自動連接開啟
* 受信任的網路偵測已啟用。
如需其他支援的選項,請參閱 [>VPNV2 CSP](/windows/client-management/mdm/vpnv2-csp) 文章。
1. 從 Azure 入口網站下載 VPN 設定檔,並從套件中解壓縮 *azurevpnconfig.xml* 檔案。
1. 將下列文字複製並貼到新的文字編輯器檔案中。
```xml-interactive
<VPNProfile>
<!--<EdpModeId>corp.contoso.com</EdpModeId>-->
<RememberCredentials>true</RememberCredentials>
<AlwaysOn>true</AlwaysOn>
<TrustedNetworkDetection>contoso.com,test.corp.contoso.com</TrustedNetworkDetection>
<DeviceTunnel>false</DeviceTunnel>
<RegisterDNS>false</RegisterDNS>
<PluginProfile>
<ServerUrlList>azuregateway-7cee0077-d553-4323-87df-069c331f58cb-053dd0f6af02.vpn.azure.com</ServerUrlList>
<CustomConfiguration>
</CustomConfiguration>
<PluginPackageFamilyName>Microsoft.AzureVpn_8wekyb3d8bbwe</PluginPackageFamilyName>
</PluginProfile>
</VPNProfile>
```
1. ```<ServerUrlList>``` ```</ServerUrlList>``` 使用您所下載設定檔中的專案來修改和之間的專案 ( # A0) 。 變更 "TrustedNetworkDetection" FQDN 以符合您的環境。
1. 開啟 Azure 下載的設定檔 ( # A0) ,藉由反白顯示文字,然後按 (ctrl) + C,將整個內容複寫到剪貼簿。
1. 將上一個步驟所複製的文字貼到您在步驟2中于標記之間建立的檔案 ```<CustomConfiguration> </CustomConfiguration>``` 。 使用 xml 副檔名來儲存檔案。
1. 記下標記中的值 ```<name> </name>``` 。 這是設定檔的名稱。 當您在 Intune 中建立設定檔時,您將需要此名稱。 關閉檔案,並記住儲存檔案的位置。
## <a name="create-intune-profile"></a>建立 Intune 設定檔
在本節中,您會建立具有自訂設定的 Microsoft Intune 設定檔。
1. 登入 Intune,然後流覽至 [ **裝置]->** 設定設定檔。 選取 [ **+ 建立設定檔**]。
:::image type="content" source="./media/create-profile-intune/configuration-profile.png" alt-text="組態設定檔":::
1. 針對 [平台] ,選取 [Windows 10 及更新版本] 。 針對 [ **設定檔**],選取 [ **自訂**]。 然後,選取 [Create] \(建立\)。
1. 提供設定檔的名稱和描述,然後選取 **[下一步]**。
1. 在 [ **設定設定** ] 索引標籤上,選取 [ **新增**]。
* **名稱:** 輸入設定的名稱。
* **描述:** 選擇性描述。
* **OMA-URI:** ```./User/Vendor/MSFT/VPNv2/<name of your connection>/ProfileXML``` (可以在 <name></name>標記) 的 azurevpnconfig.xml 檔案中找到這項資訊。
* **資料類型:** (XML 檔案) 的字串。
選取資料夾圖示,然後選取您在步驟6中儲存的檔案( [XML](#xml) 步驟)。 選取 [加入] 。
:::image type="content" source="./media/create-profile-intune/configuration-settings.png" alt-text="組態設定" lightbox="./media/create-profile-intune/configuration-settings.png":::
1. 選取 [下一步]。
1. 在 [指派] 底下,選取您要推送 **設定** 的群組。 然後,選取 [下一步]。
1. 適用性規則是選擇性的。 視需要定義任何規則,然後選取 **[下一步]**。
1. 在 [ **審核 + 建立** ] 頁面上,選取 [ **建立**]。
:::image type="content" source="./media/create-profile-intune/create-profile.png" alt-text="建立設定檔":::
1. 現在已建立您的自訂設定檔。 如需部署此設定檔的 Microsoft Intune 步驟,請參閱 [指派使用者和裝置設定檔](/mem/intune/configuration/device-profile-assign)。
## <a name="next-steps"></a>後續步驟
如需點對站詳細資訊,請參閱[關於點對站](point-to-site-about.md)。
|
require 'sqlite3'
require 'time'
class Database
def initialize()
@timestamp = Time.now.to_i
file = '/var/local/clearance.sqlite'
system(%Q[touch "#{file}"])
@sqlite = SQLite3::Database.new file
end
def setup()
createCommand = 'CREATE TABLE IF NOT EXISTS catalog
(
sku TEXT,
url TEXT,
latest_time NUM,
previous_time NUM,
PRIMARY KEY (sku)
ON CONFLICT REPLACE
)'
@sqlite.execute(createCommand);
updateCommand = 'UPDATE catalog SET previous_time = latest_time'
@sqlite.execute(updateCommand);
end
def upsert(sku, url)
upsertCommand = 'INSERT INTO catalog
(sku, url, latest_time, previous_time)
VALUES
(?, ?, ?, (SELECT latest_time FROM catalog WHERE sku = ?))'
@sqlite.execute(upsertCommand, [sku, url, @timestamp, sku]);
end
def report()
count = 'SELECT
(SELECT COUNT(sku) FROM catalog WHERE previous_time IS NULL)
AS new_count,
(SELECT COUNT(sku) FROM catalog WHERE latest_time = (SELECT MAX(latest_time) FROM catalog))
AS current_count,
(SELECT COUNT(sku) FROM catalog WHERE latest_time = previous_time)
AS old_count'
@sqlite.execute(count) do |row|
p row
end
# @sqlite.execute('SELECT * FROM catalog') do |row|
# p row
# end
end
end
|
# pushes src/wwwroot to gh-pages branch
param ([string] $env = "local")
$msg = 'gh-pages.ps1: tests/client/wwwroot -> gh-pages'
$gitURL = "https://github.com/fsbolero/bolero"
write-host -foregroundColor "green" "=====> $msg"
function clearDir() {
rm -r build/gh-pages -errorAction ignore
}
if ($env -eq "appveyor") {
clearDir
$d = mkdir -force build
git clone $gitURL build/gh-pages
cd build/gh-pages
git config credential.helper "store --file=.git/credentials"
$t = $env:GH_TOKEN
$cred = "https://" + $t + ":@github.com"
$d = pwd
[System.IO.File]::WriteAllText("$pwd/.git/credentials", $cred)
git config user.name "AppVeyor"
git config user.email "websharper-support@intellifactory.com"
} else {
clearDir
cd build
git clone .. gh-pages
cd gh-pages
}
git checkout gh-pages
git rm -rf *
cp -r -force ../../tests/client/wwwroot/* .
(get-content '.\index.html').replace('<base href="/"', '<base href="/Bolero/"') | set-content '.\index.html'
cp -r -force ../../tests/client/bin/Release/netstandard2.0/dist/_framework .
cp -r -force ../../tests/client/bin/Release/netstandard2.0/dist/_content .
git add . 2>git.log
git commit --amend -am $msg
git push -f -u origin gh-pages
cd ../..
clearDir
write-host -foregroundColor "green" "=====> DONE"
|
package examples.java.receiver.publicfield;
public class Correct {
private int value = 0;
public int increment() {
return value++;
}
}
|
/*
* NRF NFManagement Service
*
* NRF NFManagement Service
*
* API version: 1.0.1
* Generated by: OpenAPI Generator (https://openapi-generator.tech)
*/
package models
type NotificationType string
// List of NotificationType
const (
NotificationType_N1_MESSAGES NotificationType = "N1_MESSAGES"
NotificationType_N2_INFORMATION NotificationType = "N2_INFORMATION"
NotificationType_LOCATION_NOTIFICATION NotificationType = "LOCATION_NOTIFICATION"
NotificationType_DATA_REMOVAL_NOTIFICATION NotificationType = "DATA_REMOVAL_NOTIFICATION"
NotificationType_DATA_CHANGE_NOTIFICATION NotificationType = "DATA_CHANGE_NOTIFICATION"
)
|
import { Task } from "./Task";
import { Agent } from "./Agent";
export class PulpAssignmentUniqueCost {
tasks: Task[];
agents: Agent[];
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Auth
{
public interface IUserService
{
Task<IEnumerable<Role>> GetRolesAsync(User user);
Task<User> AddUserAsync(User user);
Task<User> AddUserToRoleAsync(string user, string role);
Task<User> GetUserByIdAsync(string id);
Task<IEnumerable<User>> GetUsersAsync();
}
}
|
import { ResetPasswordDto } from './dto/reset-password.dto';
import { BadRequestException, Injectable } from '@nestjs/common';
import { UsersRespository } from '../../repositories/users/users.respository';
import { LoginAuthDto } from './dto/login-auth.dto';
import { HelperService } from '../../services/helper/helper.service';
import { User } from '@prisma/client';
@Injectable()
export class AuthService {
constructor(
private usersRepository: UsersRespository,
private helperService: HelperService,
) {}
async login(loginAuthDto: LoginAuthDto) {
const email = loginAuthDto.email;
const user =
await this.usersRepository.getOneByEmailOrUsernameOrPhoneNumber(email);
if (!user) {
throw new BadRequestException('Invalid login credentials');
}
const isPasswordMatch = await this.helperService.hashMatches({
hash: user.password,
plainText: loginAuthDto.password,
});
if (!isPasswordMatch) {
throw new BadRequestException('Invalid login credentials');
}
return this.helperService.signJwt(user);
}
async forgetPassword(email: string): Promise<Partial<User>> {
const user =
await this.usersRepository.getOneByEmailOrUsernameOrPhoneNumber(email);
if (!user) {
throw new BadRequestException('Invalid email address');
}
const forgetPasswordToken =
this.helperService.generateForgetPasswordToken();
const forgetPasswordTokenExpiration =
this.helperService.getForgetPasswordTokenExpiration();
const update = await this.usersRepository.updateForgetPasswordTokenByUserId(
{
id: user.id,
forgetPasswordToken,
forgetPasswordTokenExpiration,
},
);
await this.helperService.sendEmail({
to: user.email,
subject: 'Forget password ',
html: forgetPasswordToken,
title: 'Forget password',
});
return update;
}
async resetPassword(data: ResetPasswordDto) {
const user = await this.usersRepository.getOneByForgetPasswordToken(
data.forgetPasswordToken,
);
if (!user) {
throw new BadRequestException('Invalid or expired token');
}
const hashPassword = await this.helperService.hash(data.password);
this.usersRepository.updatePasswordByUserId({
id: user.id,
password: hashPassword,
forgetPasswordToken: null,
forgetPasswordTokenExpiration: null,
});
}
}
|
use ckb_occupied_capacity::Result as CapacityResult;
use crate::{core::Capacity, packed, prelude::*};
impl packed::Script {
pub fn occupied_capacity(&self) -> CapacityResult<Capacity> {
Capacity::bytes(
self.args()
.into_iter()
.map(|arg| arg.as_reader().as_unpack_slice().len())
.sum::<usize>()
+ 32
+ 1,
)
}
}
impl packed::CellOutput {
pub fn occupied_capacity(&self, data_capacity: Capacity) -> CapacityResult<Capacity> {
Capacity::bytes(8)
.and_then(|x| x.safe_add(data_capacity))
.and_then(|x| self.lock().occupied_capacity().and_then(|y| y.safe_add(x)))
.and_then(|x| {
self.type_()
.to_opt()
.as_ref()
.map(packed::Script::occupied_capacity)
.transpose()
.and_then(|y| y.unwrap_or_else(Capacity::zero).safe_add(x))
})
}
pub fn is_lack_of_capacity(&self, data_capacity: Capacity) -> CapacityResult<bool> {
self.occupied_capacity(data_capacity)
.map(|cap| cap > self.capacity().unpack())
}
}
impl packed::CellOutputBuilder {
pub fn build_exact_capacity(
self,
data_capacity: Capacity,
) -> CapacityResult<packed::CellOutput> {
Capacity::bytes(8)
.and_then(|x| x.safe_add(data_capacity))
.and_then(|x| self.lock.occupied_capacity().and_then(|y| y.safe_add(x)))
.and_then(|x| {
self.type_
.to_opt()
.as_ref()
.map(packed::Script::occupied_capacity)
.transpose()
.and_then(|y| y.unwrap_or_else(Capacity::zero).safe_add(x))
})
.map(|x| self.capacity(x.pack()).build())
}
}
impl packed::CellOutputVec {
pub fn total_capacity(&self) -> CapacityResult<Capacity> {
self.as_reader()
.iter()
.map(|output| {
let cap: Capacity = output.capacity().unpack();
cap
})
.try_fold(Capacity::zero(), Capacity::safe_add)
}
}
|
<?php
class TestCSSPageBreakAfter extends GenericTest {
function testCSSPageBreakAfter1() {
$tree = $this->runPipeline(file_get_contents('test.css.page.break.after.1.html'));
$div = $tree->get_element_by_id('div');
$this->assertEqual(PAGE_BREAK_AVOID, $div->getCSSProperty(CSS_PAGE_BREAK_AFTER));
}
function testCSSPageBreakAfter2() {
$tree = $this->runPipeline(file_get_contents('test.css.page.break.after.2.html'),
$media);
$page_heights = PageBreakLocator::getPages($tree,
mm2pt($media->real_height()),
mm2pt($media->height() - $media->margins['top']));
$this->assertEqual(count($page_heights), 2);
$div = $tree->get_element_by_id('div');
$h1 = $tree->get_element_by_id('h1');
$this->assertEqual($page_heights[0], $div->get_full_height());
}
}
?>
|
package nodemcu
import (
"bufio"
"bytes"
"errors"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"github.com/tarm/serial"
)
var (
errUnexpectedData = errors.New("Unexpected data")
errACKFail = errors.New("ACK failure")
errNotReady = errors.New("Device isn't ready to receive data")
)
// NodeMCU is the main data structure
type NodeMCU struct {
cfg *serial.Config
port *serial.Port
logger *log.Logger
ackBuf []byte
GPIO *GPIOModule
}
// File wraps FS ops
type File struct {
Name string
Size int
node *NodeMCU
}
// Remove removes a file
func (f *File) Remove() error {
s := fmt.Sprintf("file.remove(\"%s\")\r\n", f.Name)
f.node.logger.Printf("Run is called: %s\n", s)
err := f.node.WriteString(s)
if err != nil {
return err
}
_, err = f.node.ReadStrings()
return err
}
// Run is an alias for NodeMCU.Run
func (f *File) Run() error {
return f.node.Run(f.Name)
}
// HardwareInfo contains hardware info.
type HardwareInfo struct {
ChipID int
FlashSize int
FlashMode int
FlashSpeed int
FlashID int
}
// WriteString writes stuff
func (n *NodeMCU) WriteString(input string) error {
_, err := n.port.Write([]byte(input))
if err != nil {
return err
}
return nil
}
// ReadStrings reads multiline output
func (n *NodeMCU) ReadStrings() ([]string, error) {
defer n.port.Flush()
reader := bufio.NewReader(n.port)
text, err := reader.ReadString('>')
if err != nil {
return nil, err
}
splits := strings.Split(text, "\r\n")
return splits, nil
}
func (n *NodeMCU) fixStrings(splits []string) []string {
return splits[1 : len(splits)-1]
}
// TODO: handle errors
func (n *NodeMCU) parseTab(input []string, intValue bool) (map[string]interface{}, error) {
tab := make(map[string]interface{}, 0)
for _, ln := range input {
if !strings.Contains(ln, "|") {
continue
}
splits := strings.Split(ln, "|")
key := strings.TrimSpace(splits[0])
val := strings.TrimSpace(splits[1])
if !intValue {
tab[key] = val
continue
}
i, _ := strconv.Atoi(val)
tab[key] = i
}
return tab, nil
}
// Sync runs test code
// TODO: add timeout handler
func (n *NodeMCU) Sync() error {
defer n.port.Flush()
n.logger.Println("Sync is called")
for {
if err := n.WriteString("print(1024*2);\r\n"); err != nil {
return err
}
output, err := n.ReadStrings()
if err != nil {
return err
}
if len(output) == 0 {
return errUnexpectedData
}
for _, ln := range output {
if strings.Contains(ln, "2048") {
n.logger.Println("Sync ok")
return nil
}
}
}
}
// ListFiles returns a list of NodeMCUFile, including file size
func (n *NodeMCU) ListFiles() ([]File, error) {
n.logger.Println("ListFiles is called")
files := make([]File, 0)
n.WriteString(listFilesCode)
output, err := n.ReadStrings()
if err != nil {
return nil, err
}
output = n.fixStrings(output)
for _, v := range output {
splits := strings.Split(v, "|")
sz, err := strconv.Atoi(strings.TrimSpace(splits[1]))
if err != nil {
continue
}
name := strings.TrimSpace(splits[0])
f := File{Name: name, Size: sz, node: n}
files = append(files, f)
}
n.logger.Printf("Found %d files\n", len(files))
return files, nil
}
// Run invokes an existing Lua script
// TODO: capture output
func (n *NodeMCU) Run(filename string) error {
s := fmt.Sprintf("dofile(\"%s\")\r\n", filename)
n.logger.Printf("Run is called: %s\n", s)
err := n.WriteString(s)
if err != nil {
return err
}
_, err = n.ReadStrings()
return err
}
// HardwareInfo gets HW info
func (n *NodeMCU) HardwareInfo() (*HardwareInfo, error) {
n.logger.Println("HardwareInfo is called")
n.WriteString(hardwareInfoCode)
output, err := n.ReadStrings()
if err != nil {
return nil, err
}
output = n.fixStrings(output)
m, err := n.parseTab(output, true)
if err != nil {
return nil, err
}
hwInfo := &HardwareInfo{}
for k, v := range m {
switch k {
case "chip_id":
hwInfo.ChipID = v.(int)
case "flash_size":
hwInfo.FlashSize = v.(int)
case "flash_mode":
hwInfo.FlashMode = v.(int)
case "flash_speed":
hwInfo.FlashSpeed = v.(int)
case "flash_id":
hwInfo.FlashID = v.(int)
}
}
return hwInfo, nil
}
// ReadACK checks the ACK reply
func (n *NodeMCU) ReadACK() error {
if n.ackBuf == nil {
n.ackBuf = make([]byte, 1)
}
readBytes, err := n.port.Read(n.ackBuf)
if err != nil {
return err
}
if readBytes == 0 || err != nil {
return errACKFail
}
if n.ackBuf[0] != 0x06 {
return errACKFail
}
n.logger.Println("ACK ok")
return nil
}
// ReadyToRecv checks if the node is ready to receive data
func (n *NodeMCU) ReadyToRecv() bool {
n.logger.Println("ReadyToRecv is called")
signalBuf := make([]byte, 64)
signalReadBytes, err := n.port.Read(signalBuf)
if err != nil || signalReadBytes == 0 {
return false
}
return bytes.ContainsRune(signalBuf, 'C')
}
// SendFile uploads a file to the device
func (n *NodeMCU) SendFile(inputFile string) error {
n.logger.Println("SendFile is called")
startTime := time.Now()
file, err := os.Open(inputFile)
if err != nil {
return err
}
defer file.Close()
n.logger.Printf("File opened '%s'\n", inputFile)
n.logger.Println("SendFile is called, loading recv code")
n.WriteString(recvCode)
n.ReadStrings()
n.port.Flush()
time.Sleep(1 * time.Second)
n.logger.Println("Calling recv()")
n.WriteString("recv()\r\n")
n.port.Flush()
time.Sleep(1 * time.Second)
if !n.ReadyToRecv() {
return errNotReady
}
n.logger.Println("Device is ready to receive data")
filename := []byte(filepath.Base(inputFile))
filename = append(filename, 0)
n.logger.Println("Passing filename to recv()")
n.port.Write(filename)
n.port.Flush()
err = n.ReadACK()
if err != nil {
return err
}
reader := bufio.NewReader(file)
readBytes := 0
noChunks := 1
n.logger.Println("Starting to write file data")
var buf []byte
for {
buf = make([]byte, 128)
l, err := reader.Read(buf[:cap(buf)])
data := []byte{0x1, byte(l)}
data = append(data, buf...)
n.port.Write(data)
n.port.Flush()
err = n.ReadACK()
if err != nil {
panic(err)
}
n.logger.Printf("Sending chunk %d, initial size is %d bytes\n", noChunks, len(data))
buf = buf[:l]
if l == 0 {
break
}
noChunks++
readBytes += len(data)
if err != nil && err != io.EOF {
log.Fatal(err)
}
}
diff := time.Since(startTime)
n.logger.Printf("SendFile finished, took %d milliseconds, %d chunks sent, total bytes %d\n",
int64(diff.Milliseconds()), noChunks, readBytes)
return nil
}
// Restart calls node.restart()
func (n *NodeMCU) Restart() error {
defer n.port.Flush()
n.logger.Println("Restart is called")
err := n.WriteString("node.restart()\r\n")
if err != nil {
return err
}
return nil
}
// Compile calls node.compile()
func (n *NodeMCU) Compile(filename string) error {
s := fmt.Sprintf("compile(\"%s\")\r\n", filename)
n.logger.Printf("Compile is called: %s\n", s)
err := n.WriteString(s)
if err != nil {
return err
}
_, err = n.ReadStrings()
return err
}
// SetLogger sets the logger
func (n *NodeMCU) SetLogger(l *log.Logger) {
n.logger = l
}
// NewNodeMCU creates a new NodeMCU object and initializes the serial connection
// Logging is disabled by default
func NewNodeMCU(port string, baudRate int) (node *NodeMCU, err error) {
node = &NodeMCU{
cfg: &serial.Config{Name: port, Baud: baudRate},
logger: log.New(ioutil.Discard, "", log.LstdFlags),
}
// Enable GPIO module:
node.GPIO = &GPIOModule{node: node}
node.port, err = serial.OpenPort(node.cfg)
return
}
|
<?php
namespace App\Http\Controllers;
use App\Actions\Account\StoreAccount;
use App\Actions\Account\UpdateAccount;
use App\Actions\Account\DeleteAccount;
use Illuminate\Http\Request;
use Inertia\Inertia;
use App\Models\User;
use Illuminate\Support\Facades\Redirect;
class AccountController extends Controller
{
public function create()
{
return Inertia::render('Account', ['is-new' => true]);
}
public function store(StoreAccount $action, Request $request, User $user)
{
$action->execute($request->all(), $user);
return Inertia::render('Home', [
'users' => user::all()
]);
}
public function edit(User $user)
{
$props = ['user' => $user];
return Inertia::render('Account', $props);
}
public function update(UpdateAccount $action, Request $request, User $user)
{
$action->execute($request->all(), $user);
return Redirect::route('home.index');
}
public function confirmation(User $user)
{
$props = [
'user' => $user,
'is-confirmation' => true
];
return Inertia::render('Account', $props);
}
public function destroy(DeleteAccount $action, User $user)
{
$action->execute($user);
return Redirect::route('home.index');
}
}
|
#!/usr/bin/env zsh
_fzf_complete_composer() {
if [[ $@ = 'composer'* ]]; then
_fzf_complete_composer-run-script '' $@
return
fi
_fzf_path_completion "$prefix" $@
}
_fzf_complete_composer-run-script() {
local fzf_options=$1
shift
local composer=./composer.json
if [[ ! -f $composer ]]; then
return
fi
_fzf_complete --ansi --read0 --print0 --tiebreak=index ${(Q)${(Z+n+)fzf_options}} -- $@ < <(php -r '
echo implode(
"\0",
array_keys(
(array) json_decode(
stream_get_contents(STDIN)
)->scripts
)
);' < $composer 2> /dev/null)
}
_fzf_complete_composer-run-script_post() {
local script
local input=$(cat)
for script in ${(0)input}; do
echo ${${(q+)script}//\\n/\\\\n}
done
}
|
# fdt-rs
[](https://crates.io/crates/fdt-rs)
[](https://crates.io/crates/fdt-rs)
[](https://docs.rs/fdt-rs/)
[](https://gitlab.com/ertos/fdt-rs)
[](https://gitlab.com/ertos/fdt-rs)
A Flattened Device Tree parser for embedded no-std environments
## Usage
Add this to your `Cargo.toml`:
```toml
[dependencies.fdt-rs]
version = "0.3"
```
and this to your crate root:
```rust
extern crate fdt_rs;
```
## Features
This crate can be used without the standard library (`#![no_std]`) by disabling
the default `std` feature. Use this in `Cargo.toml`:
```toml
[dependencies.fdt-rs]
version = "0.3"
default-features = false
```
## Example
The following example stashes a flattened device tree in memory, parses that
device tree into a `fdt_rs::DevTree` object, searches the device tree for
"ns16550a" compatible nodes, and (if found) prints each nodes' name.
```rust
extern crate fdt_rs;
use fdt_rs::prelude::*;
use fdt_rs::base::*;
// Place a device tree image into the rust binary and
// align it to a 32-byte boundary by using a wrapper struct.
#[repr(align(4))] struct _Wrapper<T>(T);
pub const FDT: &[u8] = &_Wrapper(*include_bytes!("../tests/riscv64-virt.dtb")).0;
fn main() {
// Initialize the devtree using an &[u8] array.
let devtree = unsafe {
// Get the actual size of the device tree after reading its header.
let size = DevTree::read_totalsize(FDT).unwrap();
let buf = &FDT[..size];
// Create the device tree handle
DevTree::new(buf).unwrap()
};
// Iterate through all "ns16550a" compatible nodes within the device tree.
// If found, print the name of each node (including unit address).
let mut node_iter = devtree.compatible_nodes("ns16550a");
while let Some(node) = node_iter.next().unwrap() {
println!("{}", node.name().unwrap());
}
}
```
|
<?php
namespace Zanichelli\HealthCheck\Http\Models;
use Exception;
use Illuminate\Support\Facades\Log;
use Illuminate\Support\Facades\Storage;
use Zanichelli\HealthCheck\Http\Constants\Service;
use Zanichelli\HealthCheck\Http\Models\Status;
class S3Checker implements CheckerInterface
{
private $diskName;
public function __construct(string $diskName)
{
$this->diskName = $diskName;
}
public function check(): Status
{
$status = new Status(Service::S3 . '/' . $this->diskName);
try {
Storage::disk($this->diskName)->exists('file.txt');
} catch (Exception $e) {
Log::error($e->getMessage());
$status->setAvailable(false);
$status->setMessage(trans('healthcheck::messages.ErrorConnectionS3'));
}
return $status;
}
}
|
<?php
namespace App\Http\Controllers\Notes;
use App\Http\Controllers\Controller;
use App\Models\NotesModel;
use Illuminate\Http\Request;
/**
* Notes controller that contains methods for the RESTful API
*
* Class NotesController
*/
class NotesController extends Controller
{
/**
* Gets previously set note using note ID
*
* @param Request $request
* @param int $noteId
* @return string
*/
public function getNote(Request $request, $noteId)
{
// Try to get notes by ID
$note = NotesModel::find($noteId);
// Not found
if (empty($note)) {
// TODO: extend ResponseFactory and use success() and fail() to handle JSON success and JSON fail
return response()->json('Not found.', 404);
}
return response()->json($note->toArray());
}
/**
* Create new note
*
* @param Request $request
* @return string
*/
public function postNote(Request $request)
{
// TODO: extend exception handler to output JSON message on validation failure
try {
$this->validate($request, [
'title' => 'required|max:50',
'note' => 'max:1000',
]);
} catch (\Exception $e) {
// TODO: actually show the errors... in JSON (see note above)
return response()->json('Bad parameters.', 400);
}
$note = NotesModel::create([
'title' => $request->input('title'),
'note' => $request->input('note', ''),
]);
return response()->json($note->toArray());
}
/**
* Overwrites a note
*
* @param Request $request
* @param int $noteId
* @return string
*/
public function putNote(Request $request, $noteId)
{
// TODO: extend exception handler to output JSON message on validation failure
try {
$this->validate($request, [
'title' => 'required|max:50',
'note' => 'max:1000',
]);
} catch (\Exception $e) {
// TODO: actually show the errors... in JSON (see note above)
return response()->json('Bad parameters.', 400);
}
// Try to get notes by ID
$note = NotesModel::find($noteId);
// Not found
// Assume we do not want users to insert at arbitrary IDs, return failure
if (empty($note)) {
return response()->json('Not found.', 404);
}
// Update the thing
$note->update([
'title' => $request->input('title'),
'note' => $request->input('note', ''),
]);
return response()->json($note->toArray());
}
/**
* Deletes a note
*
* @param Request $request
* @param int $noteId
* @return string
*/
public function deleteNote(Request $request, $noteId)
{
// Try to get notes by ID
$note = NotesModel::find($noteId);
// Not found
if (empty($note)) {
return response()->json('Not found.', 404);
}
// Delete the note
$note->delete();
return response()->json('OK');
}
}
|
/**
* @license
* Copyright Alibaba.com All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://github.com/NG-ZORRO/ng-zorro-antd/blob/master/LICENSE
*/
import {
ChangeDetectionStrategy,
ChangeDetectorRef,
Component,
ElementRef,
Input,
OnChanges,
SimpleChanges,
ViewChild,
ViewEncapsulation
} from '@angular/core';
import { NgStyleInterface } from 'ng-zorro-antd/core/types';
import { InputBoolean } from 'ng-zorro-antd/core/util';
import { NzTooltipDirective } from 'ng-zorro-antd/tooltip';
import { NzSliderService } from './slider.service';
import { NzSliderShowTooltip } from './typings';
@Component({
changeDetection: ChangeDetectionStrategy.OnPush,
encapsulation: ViewEncapsulation.None,
selector: 'nz-slider-handle',
exportAs: 'nzSliderHandle',
preserveWhitespaces: false,
template: `
<div
#handle
class="ant-slider-handle"
tabindex="0"
nz-tooltip
[ngStyle]="style"
[nzTooltipTitle]="tooltipFormatter === null || tooltipVisible === 'never' ? null : tooltipTitle"
[nzTooltipTrigger]="null"
[nzTooltipPlacement]="tooltipPlacement"
></div>
`,
host: {
'(mouseenter)': 'enterHandle()',
'(mouseleave)': 'leaveHandle()'
}
})
export class NzSliderHandleComponent implements OnChanges {
@ViewChild('handle', { static: false }) handleEl: ElementRef;
@ViewChild(NzTooltipDirective, { static: false }) tooltip: NzTooltipDirective;
@Input() vertical: string;
@Input() offset: number;
@Input() value: number;
@Input() tooltipVisible: NzSliderShowTooltip = 'default';
@Input() tooltipPlacement: string;
@Input() tooltipFormatter: (value: number) => string;
@Input() @InputBoolean() active = false;
tooltipTitle: string;
style: NgStyleInterface = {};
constructor(private sliderService: NzSliderService, private cdr: ChangeDetectorRef) {}
ngOnChanges(changes: SimpleChanges): void {
const { offset, value, active, tooltipVisible } = changes;
if (offset) {
this.updateStyle();
}
if (value) {
this.updateTooltipTitle();
this.updateTooltipPosition();
}
if (active) {
if (active.currentValue) {
this.toggleTooltip(true);
} else {
this.toggleTooltip(false);
}
}
if (tooltipVisible?.currentValue === 'always') {
Promise.resolve().then(() => this.toggleTooltip(true, true));
}
}
enterHandle = () => {
if (!this.sliderService.isDragging) {
this.toggleTooltip(true);
this.updateTooltipPosition();
this.cdr.detectChanges();
}
};
leaveHandle = () => {
if (!this.sliderService.isDragging) {
this.toggleTooltip(false);
this.cdr.detectChanges();
}
};
focus(): void {
this.handleEl.nativeElement.focus();
}
private toggleTooltip(show: boolean, force: boolean = false): void {
if (!force && (this.tooltipVisible !== 'default' || !this.tooltip)) {
return;
}
if (show) {
this.tooltip.show();
} else {
this.tooltip.hide();
}
}
private updateTooltipTitle(): void {
this.tooltipTitle = this.tooltipFormatter ? this.tooltipFormatter(this.value) : `${this.value}`;
}
private updateTooltipPosition(): void {
if (this.tooltip) {
Promise.resolve().then(() => this.tooltip.updatePosition());
}
}
private updateStyle(): void {
this.style = {
[this.vertical ? 'bottom' : 'left']: `${this.offset}%`,
transform: this.vertical ? null : 'translateX(-50%)'
};
this.cdr.markForCheck();
}
}
|
/**
* @type {import('electron-builder').Configuration}
* @see https://www.electron.build/configuration/configuration
*/
const config = {
appId: 'com.electron.scrapbook',
productName: 'Scrapbook',
artifactName: '${name}-${version}-${os}-${arch}-setup.${ext}',
directories: {
output: 'dist',
buildResources: 'buildResources'
},
files: [
'packages/**/dist/**'
],
nsis: {
oneClick: false,
perMachine: false,
createDesktopShortcut: false,
createStartMenuShortcut: true,
runAfterFinish: false
},
win: {
target: {
target: 'nsis',
arch: 'x64'
}
}
};
module.exports = config;
|
---
title: "cowsay mixin"
vanity: "https://github.com/carolynvs/porter-cowsay"
url: "/mixin/cowsay/"
---
|
export class EmployeeProgram {
constructor(
//public efid: string = "",
public empid: string = "",
public finyear: string = "",
public pgid: string = "",
public depid: string = "",
public desid: string = "",
public reportto: string = "",
) { }
}
|
AF-A0AV96-F1-model_v1_Repair.pdb RA381G; A . .
AF-A1A4S6-F1-model_v1_Repair.pdb IA340T; A . .
AF-A2PYH4-F1-model_v1_Repair.pdb SA227T; A . .
AF-A4D1E1-F1-model_v1_Repair.pdb RA406I; A . .
AF-A6H8Y1-F1-model_v1_Repair.pdb GA254V; A . .
AF-A6NDV4-F1-model_v1_Repair.pdb RA377H; A . .
AF-A6NI47-F1-model_v1_Repair.pdb GA411A; A . .
AF-A6NJG6-F1-model_v1_Repair.pdb QA136E; A . .
AF-A6NKT7-F1-model_v1_Repair.pdb SA875L; A . .
AF-A6NN14-F1-model_v1_Repair.pdb GA1054C; A . .
AF-A8MTZ7-F1-model_v1_Repair.pdb SA163C; A . .
AF-B2RTY4-F1-model_v1_Repair.pdb MA2158I; A . .
AF-B4DX44-F1-model_v1_Repair.pdb VA41F; A . .
AF-O00186-F1-model_v1_Repair.pdb DA575N; A . .
AF-O00522-F1-model_v1_Repair.pdb NA441S; A . .
AF-O14525-F1-model_v1_Repair.pdb RA629S; A . .
AF-O14593-F1-model_v1_Repair.pdb RA236Q; A . .
AF-O14936-F1-model_v1_Repair.pdb PA420R; A . .
AF-O15131-F1-model_v1_Repair.pdb HA343Q; A . .
AF-O15169-F1-model_v1_Repair.pdb TA656S; A . .
AF-O15234-F1-model_v1_Repair.pdb GA166D; A . .
AF-O15327-F1-model_v1_Repair.pdb NA835K; A . .
AF-O15379-F1-model_v1_Repair.pdb DA391N; A . .
AF-O15511-F1-model_v1_Repair.pdb DA90N; A . .
AF-O43156-F1-model_v1_Repair.pdb VA699L; A . .
AF-O43236-F1-model_v1_Repair.pdb MA474L; A . .
AF-O43292-F1-model_v1_Repair.pdb PA136Q; A . .
AF-O43295-F1-model_v1_Repair.pdb NA431K; A . .
AF-O43345-F1-model_v1_Repair.pdb FA1268C; A . .
AF-O43451-F1-model_v1_Repair.pdb NA647K; A . .
AF-O43451-F1-model_v1_Repair.pdb SA732I; A . .
AF-O43491-F1-model_v1_Repair.pdb YA311H; A . .
AF-O43663-F1-model_v1_Repair.pdb QA231H; A . .
AF-O43759-F1-model_v1_Repair.pdb AA127T; A . .
AF-O60264-F1-model_v1_Repair.pdb RA142L; A . .
AF-O60303-F1-model_v1_Repair.pdb KA8E; A . .
AF-O60336-F1-model_v1_Repair.pdb LA1296V; A . .
AF-O60504-F1-model_v1_Repair.pdb GA252R; A . .
AF-O60667-F1-model_v1_Repair.pdb PA82Q; A . .
AF-O60732-F1-model_v1_Repair.pdb RA1066C; A . .
AF-O75051-F1-model_v1_Repair.pdb PA1019L; A . .
AF-O75051-F1-model_v1_Repair.pdb RA1323Q; A . .
AF-O75095-F1-model_v1_Repair.pdb TA1459N; A . .
AF-O75116-F1-model_v1_Repair.pdb RA491L; A . .
AF-O75147-F1-model_v1_Repair.pdb QA1517H; A . .
AF-O75150-F1-model_v1_Repair.pdb EA276D; A . .
AF-O75165-F1-model_v1_Repair.pdb LA2006V; A . .
AF-O75165-F1-model_v1_Repair.pdb NA1049D; A . .
AF-O75197-F1-model_v1_Repair.pdb TA1014I; A . .
AF-O75694-F1-model_v1_Repair.pdb VA75A; A . .
AF-O75881-F1-model_v1_Repair.pdb IA475N; A . .
AF-O75955-F1-model_v1_Repair.pdb CA34Y; A . .
AF-O94813-F1-model_v1_Repair.pdb KA1396N; A . .
AF-O94900-F1-model_v1_Repair.pdb AA456T; A . .
AF-O94913-F1-model_v1_Repair.pdb PA188L; A . .
AF-O94972-F1-model_v1_Repair.pdb EA187G; A . .
AF-O95025-F1-model_v1_Repair.pdb SA274Y; A . .
AF-O95140-F1-model_v1_Repair.pdb EA436K; A . .
AF-O95163-F1-model_v1_Repair.pdb FA429V; A . .
AF-O95210-F1-model_v1_Repair.pdb GA345C; A . .
AF-O95271-F1-model_v1_Repair.pdb SA1264G; A . .
AF-O95274-F1-model_v1_Repair.pdb RA306S; A . .
AF-O95363-F1-model_v1_Repair.pdb RA423W; A . .
AF-O95411-F1-model_v1_Repair.pdb GA111V; A . .
AF-O95425-F1-model_v1_Repair.pdb DA2147H; A . .
AF-O95861-F1-model_v1_Repair.pdb VA114F; A . .
AF-P00519-F1-model_v1_Repair.pdb VA1128E; A . .
AF-P00533-F1-model_v1_Repair.pdb QA701R; A . .
AF-P01116-F1-model_v1_Repair.pdb GA12V; A . .
AF-P01215-F1-model_v1_Repair.pdb CA31F; A . .
AF-P01241-F1-model_v1_Repair.pdb RA42H; A . .
AF-P02549-F1-model_v1_Repair.pdb SA1676I; A . .
AF-P02686-F1-model_v1_Repair.pdb TA17K; A . .
AF-P02760-F1-model_v1_Repair.pdb TA67M; A . .
AF-P02768-F1-model_v1_Repair.pdb GA95E; A . .
AF-P04062-F1-model_v1_Repair.pdb KA512R; A . .
AF-P04259-F1-model_v1_Repair.pdb AA412T; A . .
AF-P04279-F1-model_v1_Repair.pdb HA399Y; A . .
AF-P04637-F1-model_v1_Repair.pdb CA176W; A . .
AF-P07197-F1-model_v1_Repair.pdb EA723D; A . .
AF-P07205-F1-model_v1_Repair.pdb NA32K; A . .
AF-P08123-F1-model_v1_Repair.pdb GA57C; A . .
AF-P08567-F1-model_v1_Repair.pdb CA250F; A . .
AF-P0CG32-F1-model_v1_Repair.pdb RA85G; A . .
AF-P0CG39-F1-model_v1_Repair.pdb WA917C; A . .
AF-P11215-F1-model_v1_Repair.pdb KA830N; A . .
AF-P11216-F1-model_v1_Repair.pdb MA148V; A . .
AF-P11362-F1-model_v1_Repair.pdb EA792K; A . .
AF-P12111-F1-model_v1_Repair.pdb RA137W; A . .
AF-P12270-F1-model_v1_Repair.pdb DA936Y; A . .
AF-P13611-F1-model_v1_Repair.pdb SA618C; A . .
AF-P13611-F2-model_v1_Repair.pdb SA418C; A . .
AF-P13611-F3-model_v1_Repair.pdb SA218C; A . .
AF-P13611-F4-model_v1_Repair.pdb SA18C; A . .
AF-P14207-F1-model_v1_Repair.pdb HA82Q; A . .
AF-P14210-F1-model_v1_Repair.pdb DA626N; A . .
AF-P15498-F1-model_v1_Repair.pdb RA828L; A . .
AF-P15918-F1-model_v1_Repair.pdb RA443S; A . .
AF-P16234-F1-model_v1_Repair.pdb PA6Q; A . .
AF-P16444-F1-model_v1_Repair.pdb GA395W; A . .
AF-P16885-F1-model_v1_Repair.pdb GA363W; A . .
AF-P17038-F1-model_v1_Repair.pdb KA782N; A . .
AF-P17040-F1-model_v1_Repair.pdb EA798D; A . .
AF-P18505-F1-model_v1_Repair.pdb RA167I; A . .
AF-P20023-F1-model_v1_Repair.pdb PA777L; A . .
AF-P20930-F1-model_v1_Repair.pdb SA657Y; A . .
AF-P20930-F2-model_v1_Repair.pdb SA457Y; A . .
AF-P20930-F3-model_v1_Repair.pdb SA257Y; A . .
AF-P20930-F4-model_v1_Repair.pdb SA57Y; A . .
AF-P21589-F1-model_v1_Repair.pdb NA190H; A . .
AF-P21817-F13-model_v1_Repair.pdb VA1348G; A . .
AF-P21817-F14-model_v1_Repair.pdb VA1148G; A . .
AF-P21817-F15-model_v1_Repair.pdb VA948G; A . .
AF-P21817-F16-model_v1_Repair.pdb VA748G; A . .
AF-P21817-F17-model_v1_Repair.pdb VA548G; A . .
AF-P21817-F18-model_v1_Repair.pdb VA348G; A . .
AF-P21817-F19-model_v1_Repair.pdb VA148G; A . .
AF-P22680-F1-model_v1_Repair.pdb KA309E; A . .
AF-P27037-F1-model_v1_Repair.pdb LA147R; A . .
AF-P27544-F1-model_v1_Repair.pdb PA131Q; A . .
AF-P27694-F1-model_v1_Repair.pdb FA523I; A . .
AF-P28566-F1-model_v1_Repair.pdb NA2K; A . .
AF-P31645-F1-model_v1_Repair.pdb VA281M; A . .
AF-P35236-F1-model_v1_Repair.pdb EA99K; A . .
AF-P35749-F1-model_v1_Repair.pdb PA517T; A . .
AF-P40197-F1-model_v1_Repair.pdb AA307D; A . .
AF-P43115-F1-model_v1_Repair.pdb GA205W; A . .
AF-P45880-F1-model_v1_Repair.pdb PA147L; A . .
AF-P46093-F1-model_v1_Repair.pdb VA46A; A . .
AF-P47992-F1-model_v1_Repair.pdb SA105P; A . .
AF-P48051-F1-model_v1_Repair.pdb DA226V; A . .
AF-P48357-F1-model_v1_Repair.pdb SA189R; A . .
AF-P48454-F1-model_v1_Repair.pdb RA213K; A . .
AF-P49590-F1-model_v1_Repair.pdb AA190T; A . .
AF-P50395-F1-model_v1_Repair.pdb DA415E; A . .
AF-P50479-F1-model_v1_Repair.pdb TA154P; A . .
AF-P55287-F1-model_v1_Repair.pdb EA379D; A . .
AF-P56715-F1-model_v1_Repair.pdb PA836L; A . .
AF-P57081-F1-model_v1_Repair.pdb KA116N; A . .
AF-P59047-F1-model_v1_Repair.pdb GA307V; A . .
AF-P62502-F1-model_v1_Repair.pdb FA9L; A . .
AF-P62993-F1-model_v1_Repair.pdb RA207C; A . .
AF-P63000-F1-model_v1_Repair.pdb YA32C; A . .
AF-P82987-F1-model_v1_Repair.pdb EA735D; A . .
AF-P82987-F1-model_v1_Repair.pdb LA1382I; A . .
AF-P98160-F10-model_v1_Repair.pdb HA362Y; A . .
AF-P98160-F11-model_v1_Repair.pdb HA162Y; A . .
AF-P98160-F5-model_v1_Repair.pdb HA1362Y; A . .
AF-P98160-F6-model_v1_Repair.pdb HA1162Y; A . .
AF-P98160-F7-model_v1_Repair.pdb HA962Y; A . .
AF-P98160-F8-model_v1_Repair.pdb HA762Y; A . .
AF-P98160-F9-model_v1_Repair.pdb HA562Y; A . .
AF-Q01082-F1-model_v1_Repair.pdb QA914L; A . .
AF-Q01538-F1-model_v1_Repair.pdb SA38A; A . .
AF-Q01814-F1-model_v1_Repair.pdb YA940H; A . .
AF-Q04759-F1-model_v1_Repair.pdb VA180F; A . .
AF-Q04771-F1-model_v1_Repair.pdb QA278P; A . .
AF-Q06141-F1-model_v1_Repair.pdb MA5I; A . .
AF-Q06455-F1-model_v1_Repair.pdb GA79V; A . .
AF-Q06587-F1-model_v1_Repair.pdb GA216R; A . .
AF-Q06787-F1-model_v1_Repair.pdb AA413G; A . .
AF-Q07002-F1-model_v1_Repair.pdb PA106L; A . .
AF-Q08345-F1-model_v1_Repair.pdb MA311V; A . .
AF-Q08477-F1-model_v1_Repair.pdb RA488S; A . .
AF-Q08830-F1-model_v1_Repair.pdb LA156V; A . .
AF-Q08ER8-F1-model_v1_Repair.pdb AA3T; A . .
AF-Q09666-F11-model_v1_Repair.pdb IA1322M; A . .
AF-Q09666-F12-model_v1_Repair.pdb IA1122M; A . .
AF-Q09666-F13-model_v1_Repair.pdb IA922M; A . .
AF-Q09666-F14-model_v1_Repair.pdb IA722M; A . .
AF-Q09666-F15-model_v1_Repair.pdb IA522M; A . .
AF-Q09666-F16-model_v1_Repair.pdb IA322M; A . .
AF-Q09666-F17-model_v1_Repair.pdb IA122M; A . .
AF-Q13263-F1-model_v1_Repair.pdb GA414S; A . .
AF-Q13561-F1-model_v1_Repair.pdb MA88L; A . .
AF-Q13591-F1-model_v1_Repair.pdb PA435R; A . .
AF-Q14123-F1-model_v1_Repair.pdb SA129R; A . .
AF-Q14145-F1-model_v1_Repair.pdb DA294Y; A . .
AF-Q14246-F1-model_v1_Repair.pdb FA494L; A . .
AF-Q14449-F1-model_v1_Repair.pdb HA147R; A . .
AF-Q14511-F1-model_v1_Repair.pdb DA630E; A . .
AF-Q14515-F1-model_v1_Repair.pdb TA119I; A . .
AF-Q14520-F1-model_v1_Repair.pdb GA458E; A . .
AF-Q14562-F1-model_v1_Repair.pdb VA864I; A . .
AF-Q14669-F1-model_v1_Repair.pdb IA1396M; A . .
AF-Q14678-F1-model_v1_Repair.pdb VA586A; A . .
AF-Q14746-F1-model_v1_Repair.pdb PA413Q; A . .
AF-Q14789-F2-model_v1_Repair.pdb KA1255Q; A . .
AF-Q14789-F3-model_v1_Repair.pdb KA1055Q; A . .
AF-Q14789-F4-model_v1_Repair.pdb KA855Q; A . .
AF-Q14789-F5-model_v1_Repair.pdb KA655Q; A . .
AF-Q14789-F6-model_v1_Repair.pdb KA455Q; A . .
AF-Q14789-F7-model_v1_Repair.pdb KA255Q; A . .
AF-Q14789-F8-model_v1_Repair.pdb KA55Q; A . .
AF-Q14966-F1-model_v1_Repair.pdb SA1433A; A . .
AF-Q14999-F1-model_v1_Repair.pdb HA1266R; A . .
AF-Q15063-F1-model_v1_Repair.pdb QA836R; A . .
AF-Q15262-F1-model_v1_Repair.pdb TA854P; A . .
AF-Q15303-F1-model_v1_Repair.pdb GA107V; A . .
AF-Q15413-F1-model_v1_Repair.pdb AA371D; A . .
AF-Q15413-F2-model_v1_Repair.pdb AA171D; A . .
AF-Q15427-F1-model_v1_Repair.pdb GA19V; A . .
AF-Q15438-F1-model_v1_Repair.pdb PA247R; A . .
AF-Q15814-F1-model_v1_Repair.pdb VA65I; A . .
AF-Q15910-F1-model_v1_Repair.pdb SA533L; A . .
AF-Q16526-F1-model_v1_Repair.pdb RA431H; A . .
AF-Q16777-F1-model_v1_Repair.pdb SA123I; A . .
AF-Q16799-F1-model_v1_Repair.pdb PA280S; A . .
AF-Q2TB10-F1-model_v1_Repair.pdb PA444L; A . .
AF-Q2VIQ3-F1-model_v1_Repair.pdb LA434P; A . .
AF-Q2WGJ9-F1-model_v1_Repair.pdb KA1329R; A . .
AF-Q3MIS6-F1-model_v1_Repair.pdb VA221F; A . .
AF-Q3T8J9-F1-model_v1_Repair.pdb NA605I; A . .
AF-Q4G0P3-F10-model_v1_Repair.pdb NA1095K; A . .
AF-Q4G0P3-F11-model_v1_Repair.pdb NA895K; A . .
AF-Q4G0P3-F12-model_v1_Repair.pdb NA695K; A . .
AF-Q4G0P3-F13-model_v1_Repair.pdb NA495K; A . .
AF-Q4G0P3-F14-model_v1_Repair.pdb NA295K; A . .
AF-Q4G0P3-F15-model_v1_Repair.pdb NA95K; A . .
AF-Q4G0P3-F2-model_v1_Repair.pdb IA1247V; A . .
AF-Q4G0P3-F3-model_v1_Repair.pdb IA1047V; A . .
AF-Q4G0P3-F4-model_v1_Repair.pdb IA847V; A . .
AF-Q4G0P3-F5-model_v1_Repair.pdb IA647V; A . .
AF-Q4G0P3-F6-model_v1_Repair.pdb IA447V; A . .
AF-Q4G0P3-F7-model_v1_Repair.pdb IA247V; A . .
AF-Q4G0P3-F8-model_v1_Repair.pdb IA47V; A . .
AF-Q4G0P3-F9-model_v1_Repair.pdb NA1295K; A . .
AF-Q4LDE5-F1-model_v1_Repair.pdb AA1135P; A . .
AF-Q4LDE5-F2-model_v1_Repair.pdb AA935P; A . .
AF-Q4LDE5-F3-model_v1_Repair.pdb AA735P; A . .
AF-Q4LDE5-F4-model_v1_Repair.pdb AA535P; A . .
AF-Q4LDE5-F5-model_v1_Repair.pdb AA335P; A . .
AF-Q4LDE5-F6-model_v1_Repair.pdb AA135P; A . .
AF-Q4LE39-F1-model_v1_Repair.pdb AA407T; A . .
AF-Q4ZHG4-F1-model_v1_Repair.pdb NA439K; A . .
AF-Q4ZJI4-F1-model_v1_Repair.pdb PA91S; A . .
AF-Q53FD0-F1-model_v1_Repair.pdb SA309Y; A . .
AF-Q53LP3-F1-model_v1_Repair.pdb KA520R; A . .
AF-Q5H8C1-F1-model_v1_Repair.pdb LA814I; A . .
AF-Q5H9I0-F1-model_v1_Repair.pdb DA357E; A . .
AF-Q5JXA9-F1-model_v1_Repair.pdb DA154Y; A . .
AF-Q5JYT7-F1-model_v1_Repair.pdb PA1094S; A . .
AF-Q5MCW4-F1-model_v1_Repair.pdb YA242F; A . .
AF-Q5SVQ8-F1-model_v1_Repair.pdb GA542D; A . .
AF-Q5SVQ8-F1-model_v1_Repair.pdb VA382F; A . .
AF-Q5T1R4-F1-model_v1_Repair.pdb IA86V; A . .
AF-Q5T200-F1-model_v1_Repair.pdb RA499L; A . .
AF-Q5T4S7-F10-model_v1_Repair.pdb RA736C; A . .
AF-Q5T4S7-F11-model_v1_Repair.pdb RA536C; A . .
AF-Q5T4S7-F12-model_v1_Repair.pdb RA336C; A . .
AF-Q5T4S7-F13-model_v1_Repair.pdb RA136C; A . .
AF-Q5T4S7-F16-model_v1_Repair.pdb IA1254M; A . .
AF-Q5T4S7-F17-model_v1_Repair.pdb IA1054M; A . .
AF-Q5T4S7-F18-model_v1_Repair.pdb IA854M; A . .
AF-Q5T4S7-F19-model_v1_Repair.pdb IA654M; A . .
AF-Q5T4S7-F20-model_v1_Repair.pdb IA454M; A . .
AF-Q5T4S7-F7-model_v1_Repair.pdb RA1336C; A . .
AF-Q5T4S7-F8-model_v1_Repair.pdb RA1136C; A . .
AF-Q5T4S7-F9-model_v1_Repair.pdb RA936C; A . .
AF-Q5TAH2-F1-model_v1_Repair.pdb LA790R; A . .
AF-Q5VST9-F1-model_v1_Repair.pdb RA465H; A . .
AF-Q5VST9-F2-model_v1_Repair.pdb RA265H; A . .
AF-Q5VST9-F3-model_v1_Repair.pdb RA65H; A . .
AF-Q5VTH9-F1-model_v1_Repair.pdb EA413K; A . .
AF-Q5VU65-F1-model_v1_Repair.pdb LA35R; A . .
AF-Q5VYS8-F1-model_v1_Repair.pdb RA1458K; A . .
AF-Q5VZL5-F1-model_v1_Repair.pdb DA91E; A . .
AF-Q5W5X9-F1-model_v1_Repair.pdb VA275F; A . .
AF-Q66GS9-F1-model_v1_Repair.pdb MA725K; A . .
AF-Q685J3-F1-model_v1_Repair.pdb PA1284H; A . .
AF-Q685J3-F2-model_v1_Repair.pdb PA1084H; A . .
AF-Q685J3-F3-model_v1_Repair.pdb PA884H; A . .
AF-Q685J3-F4-model_v1_Repair.pdb PA684H; A . .
AF-Q685J3-F5-model_v1_Repair.pdb PA484H; A . .
AF-Q685J3-F6-model_v1_Repair.pdb PA284H; A . .
AF-Q685J3-F7-model_v1_Repair.pdb PA84H; A . .
AF-Q68CP9-F1-model_v1_Repair.pdb DA1282H; A . .
AF-Q6ECI4-F1-model_v1_Repair.pdb KA103N; A . .
AF-Q6FI13-F1-model_v1_Repair.pdb SA123I; A . .
AF-Q6GPH4-F1-model_v1_Repair.pdb PA212Q; A . .
AF-Q6ISB3-F1-model_v1_Repair.pdb SA19G; A . .
AF-Q6KC79-F1-model_v1_Repair.pdb GA884W; A . .
AF-Q6KC79-F2-model_v1_Repair.pdb GA684W; A . .
AF-Q6KC79-F3-model_v1_Repair.pdb GA484W; A . .
AF-Q6KC79-F4-model_v1_Repair.pdb GA284W; A . .
AF-Q6KC79-F5-model_v1_Repair.pdb GA84W; A . .
AF-Q6NUQ1-F1-model_v1_Repair.pdb DA612H; A . .
AF-Q6P995-F1-model_v1_Repair.pdb NA792S; A . .
AF-Q6P9B9-F1-model_v1_Repair.pdb DA475V; A . .
AF-Q6TFL4-F1-model_v1_Repair.pdb WA378R; A . .
AF-Q6UVM3-F1-model_v1_Repair.pdb LA1065H; A . .
AF-Q6UWB4-F1-model_v1_Repair.pdb PA178R; A . .
AF-Q6UXX5-F1-model_v1_Repair.pdb HA101P; A . .
AF-Q6UXX5-F1-model_v1_Repair.pdb QA169E; A . .
AF-Q6W4X9-F1-model_v1_Repair.pdb TA1498K; A . .
AF-Q6WKZ4-F1-model_v1_Repair.pdb MA273K; A . .
AF-Q6YHK3-F1-model_v1_Repair.pdb GA653V; A . .
AF-Q6ZP80-F1-model_v1_Repair.pdb PA93L; A . .
AF-Q6ZR52-F1-model_v1_Repair.pdb RA297I; A . .
AF-Q6ZT98-F1-model_v1_Repair.pdb IA234V; A . .
AF-Q6ZU80-F1-model_v1_Repair.pdb HA1012D; A . .
AF-Q6ZU80-F1-model_v1_Repair.pdb KA1024N; A . .
AF-Q6ZU80-F1-model_v1_Repair.pdb SA69G; A . .
AF-Q6ZUT9-F1-model_v1_Repair.pdb DA811V; A . .
AF-Q6ZVN8-F1-model_v1_Repair.pdb HA166R; A . .
AF-Q70CQ2-F10-model_v1_Repair.pdb PA797S; A . .
AF-Q70CQ2-F11-model_v1_Repair.pdb PA597S; A . .
AF-Q70CQ2-F12-model_v1_Repair.pdb PA397S; A . .
AF-Q70CQ2-F7-model_v1_Repair.pdb PA1397S; A . .
AF-Q70CQ2-F8-model_v1_Repair.pdb PA1197S; A . .
AF-Q70CQ2-F9-model_v1_Repair.pdb PA997S; A . .
AF-Q70EK9-F1-model_v1_Repair.pdb GA361S; A . .
AF-Q7L622-F1-model_v1_Repair.pdb TA312I; A . .
AF-Q7LBC6-F1-model_v1_Repair.pdb VA593G; A . .
AF-Q7RTV2-F1-model_v1_Repair.pdb VA161L; A . .
AF-Q7Z388-F1-model_v1_Repair.pdb EA85D; A . .
AF-Q7Z407-F1-model_v1_Repair.pdb EA476D; A . .
AF-Q7Z407-F2-model_v1_Repair.pdb EA276D; A . .
AF-Q7Z407-F3-model_v1_Repair.pdb EA76D; A . .
AF-Q7Z570-F1-model_v1_Repair.pdb KA809R; A . .
AF-Q7Z6G8-F1-model_v1_Repair.pdb AA744S; A . .
AF-Q7Z6I8-F1-model_v1_Repair.pdb HA174Y; A . .
AF-Q86TZ1-F1-model_v1_Repair.pdb AA262T; A . .
AF-Q86UY8-F1-model_v1_Repair.pdb KA355E; A . .
AF-Q86VD1-F1-model_v1_Repair.pdb IA678F; A . .
AF-Q86WA6-F1-model_v1_Repair.pdb SA72N; A . .
AF-Q86WI1-F1-model_v1_Repair.pdb AA390E; A . .
AF-Q86WI1-F2-model_v1_Repair.pdb AA190E; A . .
AF-Q86WJ1-F1-model_v1_Repair.pdb QA644K; A . .
AF-Q86WN1-F1-model_v1_Repair.pdb RA142W; A . .
AF-Q86WZ0-F1-model_v1_Repair.pdb LA484I; A . .
AF-Q86X02-F1-model_v1_Repair.pdb KA441E; A . .
AF-Q86Y13-F1-model_v1_Repair.pdb QA33P; A . .
AF-Q8IUG5-F1-model_v1_Repair.pdb GA330C; A . .
AF-Q8IUG5-F1-model_v1_Repair.pdb GA330V; A . .
AF-Q8IVF2-F10-model_v1_Repair.pdb GA864C; A . .
AF-Q8IVF2-F11-model_v1_Repair.pdb GA664C; A . .
AF-Q8IVF2-F12-model_v1_Repair.pdb GA464C; A . .
AF-Q8IVF2-F13-model_v1_Repair.pdb GA264C; A . .
AF-Q8IVF2-F14-model_v1_Repair.pdb GA64C; A . .
AF-Q8IVF2-F18-model_v1_Repair.pdb MA1247R; A . .
AF-Q8IVF2-F19-model_v1_Repair.pdb MA1047R; A . .
AF-Q8IVF2-F1-model_v1_Repair.pdb LA1032V; A . .
AF-Q8IVF2-F20-model_v1_Repair.pdb MA847R; A . .
AF-Q8IVF2-F21-model_v1_Repair.pdb MA647R; A . .
AF-Q8IVF2-F22-model_v1_Repair.pdb MA447R; A . .
AF-Q8IVF2-F23-model_v1_Repair.pdb MA247R; A . .
AF-Q8IVF2-F2-model_v1_Repair.pdb LA832V; A . .
AF-Q8IVF2-F3-model_v1_Repair.pdb LA632V; A . .
AF-Q8IVF2-F4-model_v1_Repair.pdb LA432V; A . .
AF-Q8IVF2-F5-model_v1_Repair.pdb LA232V; A . .
AF-Q8IVF2-F6-model_v1_Repair.pdb LA32V; A . .
AF-Q8IVF2-F8-model_v1_Repair.pdb GA1264C; A . .
AF-Q8IVF2-F9-model_v1_Repair.pdb GA1064C; A . .
AF-Q8IVP9-F1-model_v1_Repair.pdb LA46F; A . .
AF-Q8IWI9-F1-model_v1_Repair.pdb DA116Y; A . .
AF-Q8IZF2-F1-model_v1_Repair.pdb PA1226Q; A . .
AF-Q8IZF2-F1-model_v1_Repair.pdb QA1216L; A . .
AF-Q8IZH2-F1-model_v1_Repair.pdb EA779Q; A . .
AF-Q8IZU9-F1-model_v1_Repair.pdb TA66K; A . .
AF-Q8IZX4-F1-model_v1_Repair.pdb HA1182Q; A . .
AF-Q8IZY2-F1-model_v1_Repair.pdb VA950L; A . .
AF-Q8N0U7-F1-model_v1_Repair.pdb LA525S; A . .
AF-Q8N143-F1-model_v1_Repair.pdb LA28R; A . .
AF-Q8N145-F1-model_v1_Repair.pdb AA509G; A . .
AF-Q8N1G4-F1-model_v1_Repair.pdb VA329I; A . .
AF-Q8N2C7-F1-model_v1_Repair.pdb DA1327Y; A . .
AF-Q8N2C7-F2-model_v1_Repair.pdb DA1127Y; A . .
AF-Q8N2C7-F3-model_v1_Repair.pdb DA927Y; A . .
AF-Q8N2C7-F4-model_v1_Repair.pdb DA727Y; A . .
AF-Q8N2C7-F5-model_v1_Repair.pdb DA527Y; A . .
AF-Q8N2C7-F6-model_v1_Repair.pdb DA327Y; A . .
AF-Q8N2C7-F7-model_v1_Repair.pdb DA127Y; A . .
AF-Q8N2S1-F1-model_v1_Repair.pdb GA940R; A . .
AF-Q8N393-F1-model_v1_Repair.pdb CA651S; A . .
AF-Q8N4C6-F1-model_v1_Repair.pdb KA1197Q; A . .
AF-Q8N4C6-F1-model_v1_Repair.pdb KA1247R; A . .
AF-Q8N4N8-F1-model_v1_Repair.pdb CA395R; A . .
AF-Q8N7R1-F1-model_v1_Repair.pdb DA193E; A . .
AF-Q8N806-F1-model_v1_Repair.pdb GA123V; A . .
AF-Q8N8A2-F1-model_v1_Repair.pdb RA977G; A . .
AF-Q8N8U9-F1-model_v1_Repair.pdb PA397S; A . .
AF-Q8N998-F1-model_v1_Repair.pdb KA144T; A . .
AF-Q8NB14-F1-model_v1_Repair.pdb KA947N; A . .
AF-Q8NB50-F1-model_v1_Repair.pdb DA760Y; A . .
AF-Q8NCM8-F15-model_v1_Repair.pdb HA1375P; A . .
AF-Q8NCM8-F16-model_v1_Repair.pdb HA1175P; A . .
AF-Q8ND71-F1-model_v1_Repair.pdb HA34Y; A . .
AF-Q8NEK8-F1-model_v1_Repair.pdb DA49N; A . .
AF-Q8NET1-F1-model_v1_Repair.pdb AA20G; A . .
AF-Q8NET4-F1-model_v1_Repair.pdb AA802V; A . .
AF-Q8NEZ3-F1-model_v1_Repair.pdb DA355V; A . .
AF-Q8NFH4-F1-model_v1_Repair.pdb HA317Q; A . .
AF-Q8NFY9-F1-model_v1_Repair.pdb VA499L; A . .
AF-Q8NG85-F1-model_v1_Repair.pdb PA166S; A . .
AF-Q8NGJ8-F1-model_v1_Repair.pdb MA303K; A . .
AF-Q8NGS8-F1-model_v1_Repair.pdb PA79T; A . .
AF-Q8NGX8-F1-model_v1_Repair.pdb RA16L; A . .
AF-Q8NGZ0-F1-model_v1_Repair.pdb LA72F; A . .
AF-Q8NH18-F1-model_v1_Repair.pdb KA298T; A . .
AF-Q8NH49-F1-model_v1_Repair.pdb FA12C; A . .
AF-Q8NH56-F1-model_v1_Repair.pdb HA253N; A . .
AF-Q8NH69-F1-model_v1_Repair.pdb SA188T; A . .
AF-Q8TAT5-F1-model_v1_Repair.pdb TA469K; A . .
AF-Q8TB69-F1-model_v1_Repair.pdb LA105W; A . .
AF-Q8TB73-F1-model_v1_Repair.pdb IA298M; A . .
AF-Q8TBE7-F1-model_v1_Repair.pdb QA32K; A . .
AF-Q8TDB6-F1-model_v1_Repair.pdb PA8Q; A . .
AF-Q8TDW7-F10-model_v1_Repair.pdb NA1010H; A . .
AF-Q8TDW7-F11-model_v1_Repair.pdb NA810H; A . .
AF-Q8TDW7-F12-model_v1_Repair.pdb NA610H; A . .
AF-Q8TDW7-F13-model_v1_Repair.pdb NA410H; A . .
AF-Q8TDW7-F14-model_v1_Repair.pdb NA210H; A . .
AF-Q8TDW7-F15-model_v1_Repair.pdb NA10H; A . .
AF-Q8TDW7-F9-model_v1_Repair.pdb NA1210H; A . .
AF-Q8WWQ2-F1-model_v1_Repair.pdb KA287N; A . .
AF-Q8WXG9-F1-model_v1_Repair.pdb EA815V; A . .
AF-Q8WXG9-F2-model_v1_Repair.pdb EA615V; A . .
AF-Q8WXG9-F3-model_v1_Repair.pdb EA415V; A . .
AF-Q8WXG9-F4-model_v1_Repair.pdb EA215V; A . .
AF-Q8WXG9-F5-model_v1_Repair.pdb EA15V; A . .
AF-Q8WXI8-F1-model_v1_Repair.pdb TA42S; A . .
AF-Q8WY22-F1-model_v1_Repair.pdb SA56G; A . .
AF-Q8WZ75-F1-model_v1_Repair.pdb IA512S; A . .
AF-Q8WZ92-F1-model_v1_Repair.pdb EA84D; A . .
AF-Q92736-F1-model_v1_Repair.pdb PA816T; A . .
AF-Q92736-F2-model_v1_Repair.pdb PA616T; A . .
AF-Q92736-F3-model_v1_Repair.pdb PA416T; A . .
AF-Q92736-F4-model_v1_Repair.pdb PA216T; A . .
AF-Q92736-F5-model_v1_Repair.pdb PA16T; A . .
AF-Q92750-F1-model_v1_Repair.pdb VA247F; A . .
AF-Q92841-F1-model_v1_Repair.pdb SA685F; A . .
AF-Q92932-F1-model_v1_Repair.pdb GA471V; A . .
AF-Q93009-F1-model_v1_Repair.pdb MA761L; A . .
AF-Q93052-F1-model_v1_Repair.pdb GA336V; A . .
AF-Q96AE4-F1-model_v1_Repair.pdb RA65K; A . .
AF-Q96AQ1-F1-model_v1_Repair.pdb AA239T; A . .
AF-Q96EH3-F1-model_v1_Repair.pdb YA188S; A . .
AF-Q96FK6-F1-model_v1_Repair.pdb VA144G; A . .
AF-Q96G91-F1-model_v1_Repair.pdb FA107C; A . .
AF-Q96HP4-F1-model_v1_Repair.pdb AA254V; A . .
AF-Q96JB1-F2-model_v1_Repair.pdb SA1203Y; A . .
AF-Q96JB1-F3-model_v1_Repair.pdb SA1003Y; A . .
AF-Q96JB1-F4-model_v1_Repair.pdb SA803Y; A . .
AF-Q96JB1-F5-model_v1_Repair.pdb SA603Y; A . .
AF-Q96JB1-F6-model_v1_Repair.pdb SA403Y; A . .
AF-Q96JB1-F7-model_v1_Repair.pdb SA203Y; A . .
AF-Q96JB1-F8-model_v1_Repair.pdb SA3Y; A . .
AF-Q96JN8-F1-model_v1_Repair.pdb NA820H; A . .
AF-Q96JT2-F1-model_v1_Repair.pdb LA199I; A . .
AF-Q96K37-F1-model_v1_Repair.pdb SA162C; A . .
AF-Q96M91-F1-model_v1_Repair.pdb IA241M; A . .
AF-Q96MU6-F1-model_v1_Repair.pdb DA425V; A . .
AF-Q96N67-F1-model_v1_Repair.pdb HA622Y; A . .
AF-Q96NE9-F1-model_v1_Repair.pdb VA459F; A . .
AF-Q96NH3-F1-model_v1_Repair.pdb DA158G; A . .
AF-Q96PP9-F1-model_v1_Repair.pdb TA433R; A . .
AF-Q96Q89-F1-model_v1_Repair.pdb MA1467I; A . .
AF-Q96QU8-F1-model_v1_Repair.pdb DA283Y; A . .
AF-Q96RJ6-F1-model_v1_Repair.pdb RA94K; A . .
AF-Q96RS0-F1-model_v1_Repair.pdb GA361S; A . .
AF-Q96RW7-F1-model_v1_Repair.pdb EA1139Q; A . .
AF-Q96RW7-F2-model_v1_Repair.pdb EA939Q; A . .
AF-Q96RW7-F3-model_v1_Repair.pdb EA739Q; A . .
AF-Q96RW7-F4-model_v1_Repair.pdb EA539Q; A . .
AF-Q96RW7-F5-model_v1_Repair.pdb EA339Q; A . .
AF-Q96RW7-F6-model_v1_Repair.pdb EA139Q; A . .
AF-Q96RY5-F1-model_v1_Repair.pdb SA1107I; A . .
AF-Q96SJ8-F1-model_v1_Repair.pdb GA62W; A . .
AF-Q96SN8-F1-model_v1_Repair.pdb EA1338V; A . .
AF-Q99466-F1-model_v1_Repair.pdb GA426A; A . .
AF-Q99661-F1-model_v1_Repair.pdb YA374C; A . .
AF-Q99683-F1-model_v1_Repair.pdb DA674V; A . .
AF-Q99707-F1-model_v1_Repair.pdb GA492R; A . .
AF-Q99715-F10-model_v1_Repair.pdb GA1009V; A . .
AF-Q99715-F9-model_v1_Repair.pdb GA1209V; A . .
AF-Q9BQK8-F1-model_v1_Repair.pdb QA758L; A . .
AF-Q9BRQ6-F1-model_v1_Repair.pdb SA126N; A . .
AF-Q9BRR3-F1-model_v1_Repair.pdb YA246N; A . .
AF-Q9BSE2-F1-model_v1_Repair.pdb RA235H; A . .
AF-Q9BSH5-F1-model_v1_Repair.pdb EA161K; A . .
AF-Q9BSJ2-F1-model_v1_Repair.pdb HA668Q; A . .
AF-Q9BTY2-F1-model_v1_Repair.pdb IA83K; A . .
AF-Q9BXR5-F1-model_v1_Repair.pdb QA702R; A . .
AF-Q9BXU7-F1-model_v1_Repair.pdb FA194Y; A . .
AF-Q9BYJ1-F1-model_v1_Repair.pdb AA586T; A . .
AF-Q9BYK8-F1-model_v1_Repair.pdb RA329C; A . .
AF-Q9BYP7-F1-model_v1_Repair.pdb RA967K; A . .
AF-Q9BZE3-F1-model_v1_Repair.pdb AA17E; A . .
AF-Q9C0B6-F1-model_v1_Repair.pdb SA355C; A . .
AF-Q9C0C7-F1-model_v1_Repair.pdb EA347K; A . .
AF-Q9C0C9-F1-model_v1_Repair.pdb TA1052K; A . .
AF-Q9C0F0-F1-model_v1_Repair.pdb TA153N; A . .
AF-Q9GZV5-F1-model_v1_Repair.pdb YA118F; A . .
AF-Q9H0A0-F1-model_v1_Repair.pdb EA20V; A . .
AF-Q9H112-F1-model_v1_Repair.pdb SA33I; A . .
AF-Q9H171-F1-model_v1_Repair.pdb NA202I; A . .
AF-Q9H2S9-F1-model_v1_Repair.pdb TA396R; A . .
AF-Q9H2T7-F1-model_v1_Repair.pdb PA76A; A . .
AF-Q9H4A5-F1-model_v1_Repair.pdb RA162H; A . .
AF-Q9H5Y7-F1-model_v1_Repair.pdb PA476L; A . .
AF-Q9H6S0-F1-model_v1_Repair.pdb DA855N; A . .
AF-Q9H799-F1-model_v1_Repair.pdb MA419V; A . .
AF-Q9H799-F2-model_v1_Repair.pdb MA219V; A . .
AF-Q9H799-F3-model_v1_Repair.pdb MA19V; A . .
AF-Q9H8M5-F1-model_v1_Repair.pdb EA431D; A . .
AF-Q9H939-F1-model_v1_Repair.pdb DA124H; A . .
AF-Q9H993-F1-model_v1_Repair.pdb GA51D; A . .
AF-Q9HAN9-F1-model_v1_Repair.pdb IA177V; A . .
AF-Q9HBW9-F1-model_v1_Repair.pdb DA277Y; A . .
AF-Q9HC10-F1-model_v1_Repair.pdb AA1053S; A . .
AF-Q9HCU9-F1-model_v1_Repair.pdb LA145Q; A . .
AF-Q9HDC9-F1-model_v1_Repair.pdb PA159S; A . .
AF-Q9NP64-F1-model_v1_Repair.pdb YA118C; A . .
AF-Q9NP73-F1-model_v1_Repair.pdb YA621D; A . .
AF-Q9NP85-F1-model_v1_Repair.pdb KA126N; A . .
AF-Q9NPH5-F1-model_v1_Repair.pdb CA113F; A . .
AF-Q9NQ75-F1-model_v1_Repair.pdb AA271P; A . .
AF-Q9NRM7-F1-model_v1_Repair.pdb YA506F; A . .
AF-Q9NS40-F1-model_v1_Repair.pdb AA674D; A . .
AF-Q9NSD7-F1-model_v1_Repair.pdb DA5H; A . .
AF-Q9NUB1-F1-model_v1_Repair.pdb TA438S; A . .
AF-Q9NX38-F1-model_v1_Repair.pdb RA118C; A . .
AF-Q9NY27-F1-model_v1_Repair.pdb VA56E; A . .
AF-Q9NY99-F1-model_v1_Repair.pdb MA286V; A . .
AF-Q9NYC9-F1-model_v1_Repair.pdb LA800I; A . .
AF-Q9NYC9-F2-model_v1_Repair.pdb LA600I; A . .
AF-Q9NYC9-F3-model_v1_Repair.pdb LA400I; A . .
AF-Q9NYC9-F4-model_v1_Repair.pdb LA200I; A . .
AF-Q9NYF8-F1-model_v1_Repair.pdb RA667L; A . .
AF-Q9NYU2-F1-model_v1_Repair.pdb KA306Q; A . .
AF-Q9NYW7-F1-model_v1_Repair.pdb SA109R; A . .
AF-Q9NZL4-F1-model_v1_Repair.pdb GA170C; A . .
AF-Q9NZQ3-F1-model_v1_Repair.pdb RA602C; A . .
AF-Q9NZQ9-F1-model_v1_Repair.pdb RA102G; A . .
AF-Q9NZR2-F1-model_v1_Repair.pdb EA1331G; A . .
AF-Q9NZR2-F2-model_v1_Repair.pdb EA1131G; A . .
AF-Q9NZR2-F3-model_v1_Repair.pdb EA931G; A . .
AF-Q9NZR2-F4-model_v1_Repair.pdb EA731G; A . .
AF-Q9NZR2-F5-model_v1_Repair.pdb EA531G; A . .
AF-Q9NZR2-F6-model_v1_Repair.pdb EA331G; A . .
AF-Q9NZR2-F7-model_v1_Repair.pdb EA131G; A . .
AF-Q9P0V3-F1-model_v1_Repair.pdb DA147E; A . .
AF-Q9P219-F1-model_v1_Repair.pdb KA867E; A . .
AF-Q9P219-F1-model_v1_Repair.pdb RA629W; A . .
AF-Q9P2D1-F1-model_v1_Repair.pdb TA687A; A . .
AF-Q9P2D1-F2-model_v1_Repair.pdb TA487A; A . .
AF-Q9P2D1-F3-model_v1_Repair.pdb TA287A; A . .
AF-Q9P2D1-F4-model_v1_Repair.pdb TA87A; A . .
AF-Q9P2F8-F1-model_v1_Repair.pdb QA1622H; A . .
AF-Q9P2H3-F1-model_v1_Repair.pdb CA518Y; A . .
AF-Q9P2N2-F1-model_v1_Repair.pdb DA229E; A . .
AF-Q9UBS3-F1-model_v1_Repair.pdb TA143P; A . .
AF-Q9UBZ9-F1-model_v1_Repair.pdb EA578K; A . .
AF-Q9UDY6-F1-model_v1_Repair.pdb RA70Q; A . .
AF-Q9UER7-F1-model_v1_Repair.pdb AA226G; A . .
AF-Q9UER7-F1-model_v1_Repair.pdb RA397W; A . .
AF-Q9UFH2-F12-model_v1_Repair.pdb KA1324Q; A . .
AF-Q9UFH2-F13-model_v1_Repair.pdb KA1124Q; A . .
AF-Q9UFH2-F14-model_v1_Repair.pdb KA924Q; A . .
AF-Q9UFH2-F15-model_v1_Repair.pdb KA724Q; A . .
AF-Q9UFH2-F16-model_v1_Repair.pdb KA524Q; A . .
AF-Q9UFH2-F17-model_v1_Repair.pdb KA324Q; A . .
AF-Q9UH99-F1-model_v1_Repair.pdb RA190C; A . .
AF-Q9UHB7-F1-model_v1_Repair.pdb SA795F; A . .
AF-Q9UHC1-F1-model_v1_Repair.pdb GA762V; A . .
AF-Q9UHD2-F1-model_v1_Repair.pdb QA553E; A . .
AF-Q9UHE8-F1-model_v1_Repair.pdb FA272V; A . .
AF-Q9UJ90-F1-model_v1_Repair.pdb GA48R; A . .
AF-Q9UJ99-F1-model_v1_Repair.pdb PA274L; A . .
AF-Q9UK11-F1-model_v1_Repair.pdb PA427S; A . .
AF-Q9UKQ9-F1-model_v1_Repair.pdb GA221C; A . .
AF-Q9UKT9-F1-model_v1_Repair.pdb HA220Q; A . .
AF-Q9UKZ9-F1-model_v1_Repair.pdb SA241T; A . .
AF-Q9ULH4-F1-model_v1_Repair.pdb NA457I; A . .
AF-Q9ULL0-F1-model_v1_Repair.pdb DA520N; A . .
AF-Q9ULM6-F1-model_v1_Repair.pdb LA500V; A . .
AF-Q9UMR5-F1-model_v1_Repair.pdb FA241L; A . .
AF-Q9UQ52-F1-model_v1_Repair.pdb VA641L; A . .
AF-Q9Y216-F1-model_v1_Repair.pdb RA344K; A . .
AF-Q9Y250-F1-model_v1_Repair.pdb EA305D; A . .
AF-Q9Y297-F1-model_v1_Repair.pdb VA479G; A . .
AF-Q9Y2A7-F1-model_v1_Repair.pdb DA777Y; A . .
AF-Q9Y2B2-F1-model_v1_Repair.pdb SA104F; A . .
AF-Q9Y3R0-F1-model_v1_Repair.pdb QA989H; A . .
AF-Q9Y3Z3-F1-model_v1_Repair.pdb GA289R; A . .
AF-Q9Y3Z3-F1-model_v1_Repair.pdb RA55M; A . .
AF-Q9Y4G2-F1-model_v1_Repair.pdb RA304L; A . .
AF-Q9Y512-F1-model_v1_Repair.pdb RA346L; A . .
AF-Q9Y5E5-F1-model_v1_Repair.pdb PA133L; A . .
AF-Q9Y5G1-F1-model_v1_Repair.pdb IA442V; A . .
AF-Q9Y5H0-F1-model_v1_Repair.pdb RA198W; A . .
AF-Q9Y5H1-F1-model_v1_Repair.pdb NA165K; A . .
AF-Q9Y5H7-F1-model_v1_Repair.pdb KA123M; A . .
AF-Q9Y5P0-F1-model_v1_Repair.pdb RA309M; A . .
AF-Q9Y5X3-F1-model_v1_Repair.pdb EA359D; A . .
|
module Spree
module LineItems
class Update
prepend Spree::ServiceModule::Base
include Helper
def call(line_item:, line_item_attributes: {}, options: {})
ActiveRecord::Base.transaction do
return failure(line_item) unless line_item.update(line_item_attributes)
recalculate_service.call(order: line_item.order, line_item: line_item, options: options)
end
success(line_item)
end
end
end
end
|
// Copyright © 2015 ~ 2017 Sunsoft Studio, All rights reserved.
// Umizoo is a framework can help you develop DDD and CQRS style applications.
//
// Created by young.han with Visual Studio 2017 on 2017-08-08.
using System.Runtime.Serialization;
namespace Umizoo.Messaging
{
/// <summary>
/// 表示创建或修改一个聚合根的命令
/// </summary>
[DataContract]
public abstract class Command<TAggregateRootId> : Command
{
/// <summary>
/// 聚合根ID
/// </summary>
[DataMember(Name = "aggregateRootId")]
public TAggregateRootId AggregateRootId { get; set; }
protected override string GetRoutingKey()
{
if (AggregateRootId != null) return AggregateRootId.ToString();
return null;
}
/// <summary>
/// 输出字符串信息
/// </summary>
public override string ToString()
{
return string.Concat(GetType().FullName, "#", AggregateRootId);
}
}
}
|
/*
* @Description:
* @Version: 1.0
* @Autor: gll
* @Date: 2020-12-05 14:47:32
* @LastEditors: gll
* @LastEditTime: 2020-12-11 14:39:30
*/
import customZhCn from '@/locale/lang/zh-CN'
import customEnUs from '@/locale/lang/en-US'
import zhCnLocale from '@/locale/lang/zh-CN'
import enUsLocale from '@/locale/lang/en-US'
const messages = {
'zh-CN': Object.assign(zhCnLocale, customZhCn),
'en-US': Object.assign(enUsLocale, customEnUs)
}
export default messages
|
#!/usr/bin/env bats
load helpers "install.sh"
#function setup() {}
#function teardown() {}
@test '#should_the_dotfile_be_skipped should return 0 when the file that is out of target was specified' {
run should_the_dotfile_be_skipped ".vim"
[[ "$status" -ne 0 ]]
run should_the_dotfile_be_skipped ".tmux.conf"
[[ "$status" -ne 0 ]]
run should_the_dotfile_be_skipped "foo"
[[ "$status" -ne 0 ]]
}
@test '#should_the_dotfile_be_skipped should not return 0 when the file that is in the scope of target was specified' {
run should_the_dotfile_be_skipped ".git"
[[ "$status" -eq 0 ]]
run should_the_dotfile_be_skipped ".DS_Store"
[[ "$status" -eq 0 ]]
run should_the_dotfile_be_skipped ".gitignore"
[[ "$status" -eq 0 ]]
run should_the_dotfile_be_skipped ".gitmodules"
[[ "$status" -eq 0 ]]
run should_the_dotfile_be_skipped ".vim.swp"
[[ "$status" -eq 0 ]]
run should_the_dotfile_be_skipped ".foo.swp"
[[ "$status" -eq 0 ]]
run should_the_dotfile_be_skipped "${DOTDIR}"
[[ "$status" -eq 0 ]]
run should_the_dotfile_be_skipped "${BACKUPDIR}"
[[ "$status" -eq 0 ]]
}
|
% Copyright (c) Facebook, Inc. and its affiliates.
%
% This source code is licensed under the MIT license found in the
% LICENSE file in the root directory of this source tree.
-module(features_short_circuit).
-export([
test_and_Ok/0,
test_and_Bad/0,
test_andalso_Ok/0,
test_andalso_Bad/0,
test_or_Ok/0,
test_or_Bad/0,
test_orelse_Ok/0,
test_orelse_Bad/0
]).
accepts_one(1) ->
true.
test_and_Ok() ->
% All fine here
true and accepts_one(1).
test_and_Bad() ->
% Fails because no short circuit
false and accepts_one(0).
test_andalso_Ok() ->
% Ok because short circuit
false andalso accepts_one(0).
test_andalso_Bad() ->
% Fails because LHS comes first
accepts_one(0) andalso false.
test_or_Ok() ->
% All fine here
false or accepts_one(1).
test_or_Bad() ->
% Fails because no short circuit
true or accepts_one(0).
test_orelse_Ok() ->
% Ok because short circuit
true orelse accepts_one(0).
test_orelse_Bad() ->
% Fails because LHS comes first
accepts_one(0) orelse true.
|
package com.example.simpletodo;
import androidx.room.Dao;
import androidx.room.Delete;
import androidx.room.Insert;
import androidx.room.Query;
import androidx.room.Update;
import java.util.List;
@Dao
public interface TodoDao {
@Query("SELECT * FROM todos")
List<Todo> getAll();
@Insert
void insert(Todo todo);
@Delete
void delete(Todo todo);
@Query("UPDATE todos SET task = :newTask WHERE id = :id")
void update(int id, String newTask);
@Query("SELECT * from todos WHERE id = :id")
List<Todo> select(int id);
}
|
module correlation
! JLR 30/6/2014
!
! uneven sampled correlation using Gaussian kernel using method of
! Rehfeld et al, Nonlinear Processes in Geophysics, 2011
implicit none
contains
function correlate_gaussian(x,y,tx,ty)
real :: correlate_gaussian
real, intent(in) :: x(:),y(:),tx(:),ty(:)
integer :: n,m,i,j
real :: b,h,d,xmean,ymean,txmean,tymean,delta_t,pi
real :: sdx(size(x)),sdy(size(x)),num(size(x))
real :: sdxg,sdyg,numg,deng
n=size(x,1)
m=size(y,1)
pi=abs(atan2(0.0,-1.0))
! calc mean for x series
xmean=0.0
do i=1,n
xmean=xmean+x(i)
enddo
xmean=xmean/n
txmean=(tx(n)-tx(1))/(n-1)
! calc mean for y series
ymean=0.0
do j=1,m
ymean=ymean+y(j)
enddo
ymean=ymean/m
tymean=(ty(m)-ty(1))/(m-1)
sdx=0.0
sdy=0.0
num=0.0
delta_t=max(txmean,tymean)
h=delta_t/4
!$acc kernels loop gang, vector(64)
do i=1,n
do j=1,m
d=ty(j)-tx(i)
b=exp(-d**2/(2*h**2))/sqrt(2*pi*h)
num(i)=num(i)+(x(i)-xmean)*(y(j)-ymean)*b
sdx(i)=sdx(i)+b*(x(i)-xmean)**2
sdy(i)=sdy(i)+b*(y(j)-ymean)**2
enddo
enddo
!$acc end kernels
sdxg=0.0
sdyg=0.0
numg=0.0
do i=1,n
sdxg=sdxg+sdx(i)
sdyg=sdyg+sdy(i)
numg=numg+num(i)
enddo
correlate_gaussian=numg/sqrt(sdxg*sdyg)
end function
end module
|
// Copyright (c) Microsoft. All rights reserved.
namespace TestResultCoordinator
{
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Azure.Devices.Edge.ModuleUtil;
using Microsoft.Azure.Devices.Edge.Util;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
class Program
{
static readonly ILogger Logger = ModuleUtil.CreateLogger("TestResultCoordinator");
static async Task Main(string[] args)
{
Logger.LogInformation($"Starting TestResultCoordinator with the following settings:\r\n{Settings.Current}");
(CancellationTokenSource cts, ManualResetEventSlim completed, Option<object> handler) = ShutdownHandler.Init(TimeSpan.FromSeconds(5), Logger);
Logger.LogInformation("Creating WebHostBuilder...");
Task webHost = CreateHostBuilder(args).Build().RunAsync(cts.Token);
await Task.WhenAny(cts.Token.WhenCanceled(), webHost);
completed.Set();
handler.ForEach(h => GC.KeepAlive(h));
Logger.LogInformation("TestResultCoordinator Main() exited.");
}
static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.ConfigureWebHostDefaults(webBuilder =>
{
webBuilder
.UseUrls($"http://*:{Settings.Current.WebHostPort}")
.UseSockets()
.UseStartup<Startup>();
});
}
}
|
/**
* Created by Administrator on 2016/8/28.
*/
$(function () {
$('.dropdown-toggle').click(function () {
if ($(window).width() > 767) {
location.href="http://myshop.index.com/";
}
});
});
|
/*
*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.genie.agent.cli;
import com.beust.jcommander.Parameter;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.netflix.genie.common.internal.dto.v4.Criterion;
import com.netflix.genie.common.util.GenieObjectMapper;
import lombok.Getter;
import org.springframework.stereotype.Component;
import java.io.File;
import java.util.List;
import java.util.Set;
/**
* Implementation of JobRequestArguments arguments delegate.
*
* @author mprimi
* @since 4.0.0
*/
@Getter
@Component
class JobRequestArgumentsImpl implements ArgumentDelegates.JobRequestArguments {
@VisibleForTesting
static final String DEFAULT_JOBS_DIRECTORY = "/tmp/genie/jobs/";
// Notice this is a 'main' argument: it has no name and binds to all unnamed arguments that
// appear on the command-line after the named options.
@Parameter(description = "[commandArg1, [commandArg2 [...]]")
private List<String> commandArguments = Lists.newArrayList();
@Parameter(
names = {"--jobDirectoryLocation"},
description = "The local directory in which the job directory is created and executed from",
converter = ArgumentConverters.FileConverter.class
)
private File jobDirectoryLocation = new File(DEFAULT_JOBS_DIRECTORY);
@Parameter(
names = {"--interactive"},
description = "Proxi standard {input,output,error} to the parent terminal, also disable console logging"
)
private boolean interactive;
@Parameter(
names = {"--archivalDisabled"},
description = "Disable server-side archival of logs (does not disable logs upload and short-term retrieval)"
)
private boolean archivalDisabled;
@Parameter(
names = {"--timeout"},
description = "Time (in seconds) after which a running job is forcefully terminated"
)
private Integer timeout;
@Parameter(
names = {"--jobId"},
description = "Unique job identifier"
)
private String jobId;
@Parameter(
names = {"--clusterCriterion"},
description = "Criterion for cluster selection, can be repeated (see CRITERION SYNTAX)",
converter = ArgumentConverters.CriterionConverter.class
)
private List<Criterion> clusterCriteria = Lists.newArrayList();
@Parameter(
names = {"--commandCriterion"},
description = "Criterion for command selection (see CRITERION SYNTAX)",
converter = ArgumentConverters.CriterionConverter.class
)
private Criterion commandCriterion;
@Parameter(
names = {"--applicationIds"},
description = "Override the applications a command normally depends on, can be repeated"
)
private List<String> applicationIds = Lists.newArrayList();
@Parameter(
names = {"--jobName"},
description = "Name of the job"
)
private String jobName;
@Parameter(
names = {"--user"},
description = "Username launching this job",
hidden = true // Not advertised to avoid abuse, but available for legitimate use cases
)
private String user = System.getProperty("user.name", "unknown-user");
@Parameter(
names = {"--email"},
description = "Email address where to send a job completion notification"
)
private String email;
@Parameter(
names = {"--grouping"},
description = "Group of jobs this job belongs to"
)
private String grouping;
@Parameter(
names = {"--groupingInstance"},
description = "Group instance this job belongs to"
)
private String groupingInstance;
@Parameter(
names = {"--jobDescription"},
description = "Job description"
)
private String jobDescription;
@Parameter(
names = {"--jobTag"},
description = "Job tag, can be repeated"
)
private Set<String> jobTags = Sets.newHashSet();
@Parameter(
names = {"--jobVersion"},
description = "Job version"
)
private String jobVersion;
@Parameter(
names = {"--jobMetadata"},
description = "JSON job metadata",
converter = ArgumentConverters.JSONConverter.class
)
private JsonNode jobMetadata = GenieObjectMapper.getMapper().createObjectNode();
@Parameter(
names = {"--jobSpecificationFile"},
description = "The local file where to load a job specification from, rather than resolving via server",
converter = ArgumentConverters.FileConverter.class
)
private File jobSpecificationFile;
}
|
package eightsidedsquare.Illegal.common.block;
import net.minecraft.block.Block;
import net.minecraft.block.BlockState;
import net.minecraft.block.ShapeContext;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.shape.VoxelShape;
import net.minecraft.world.BlockView;
public class SmallBlock extends Block {
private final VoxelShape OUTLINE_SHAPE;
public SmallBlock(Settings settings, double diameter) {
super(settings);
OUTLINE_SHAPE = Block.createCuboidShape(8.0D - diameter / 2.0D, 0, 8.0D - diameter / 2.0D, 8.0D + diameter / 2.0D, diameter, 8.0D + diameter / 2.0D);
}
@SuppressWarnings("deprecation")
public VoxelShape getOutlineShape(BlockState state, BlockView world, BlockPos pos, ShapeContext context) {
return this.OUTLINE_SHAPE;
}
}
|
; Miniscule: the world's smallest generic virus (only 31 bytes long!)
; (C) 1992 Nowhere Man and [NuKE] WaReZ
; Written on January 22, 1991
code segment 'CODE'
assume cs:code,ds:code,es:code,ss:code
org 0100h
main proc near
; Find the name of the first file and return it in the DTA. No checking
; is done for previous infections, and ANY file (except directory "files")
; will be infected, including data, texts, etc. So either a file is corrupted
; (in the case of data or text) or infected (.EXE and .COM files). Files that
; have the read-only flag set are immune to Miniscule.
mov ah,04Eh ; DOS find first file function
mov cl,020h ; CX holds attribute mask
mov dx,offset star_dot_com ; DX points to the file mask
int 021h
; Open the file that we've found for writing only and put the handle into
; BX (DOS stupidly returns the file handle in AX, but all other DOS functions
; require it to be in AX, so we have to move it).
mov ax,03D01h ; DOS open file function, w/o
mov dx,009Eh ; DX points to the found file
int 021h
xchg bx,ax ; BX holds the file handle
; Write the virus to the file. The first 31 bytes at offset 0100h (ie: the
; virus) are written into the beginning of the victim. No attempt is made
; to preserve the victim's executability. This also destroys the file's date
; and time, making Miniscule's activity painfully obvious. Also, if the
; victim is smaller than 31 bytes (rare), then it will grow to exactly 31.
mov ah,040h ; DOS write to file function
dec cx ; CX now holds 01Fh (length)
mov dx,offset main ; DX points to start of code
int 021h
; Exit. I chose to use a RET statement here to save one byte (RET is one byte
; long, INT 020h is two), so don't try to compile this as an .EXE file; it
; will crash, as only .COMs RETurn correctly (DOS again). However INFECTED
; .EXE programs will run successfully (unless they are larger than 64k, in
; which case DOS will refuse to run it.
ret ; RETurn to DOS
main endp
; The only data required in this program, and it's only four bytes long. This
; is the file mask that the DOS find first file function will use when
; searching. Do not change this to .EXE (or whatever) because this virus
; is size dependent (if you know what you're doing, go ahead [at you're own
; risk]).
star_dot_com db "*.*",0 ; File search mask
finish label near
code ends
end main
; There you have it: thirty-one bytes of pure terror -- NOT! As you can
; pretty well guess, this virus is very lame. Due to its poor reproduction,
; it is hardly a threat (hitting one file, if you're lucky), but it works,
; and it fits the definition of a virus. There is no way to make this code
; any smaller (at least under MS-DOS), except if you made it only infect
; one specific file (and the file would have to have a one- or two-byte name,
; too), and that would be next to useless.
|
# Numerical Vectors
<div class="warning">
WIP: Quite a complex section
</div>
|
/**
* Created by Kelvin on 6/28/2016.
*/
'use strict';
describe('Controller: RSVPCodeCtrl', function () {
// load the controller's module
beforeEach(module('tiffanyAndKelvin'));
beforeEach(module('templates'));
var RSVPCodeCtrl,
scope,
rootScope,
deferred;
var rsvpFactory = {};
var state = {
go: function(name, args) {
console.log(name);
}
};
// Initialize the controller and a mock scope
beforeEach(inject(function ($controller, $rootScope, $q) {
rootScope = $rootScope;
scope = $rootScope.$new();
deferred = $q.defer();
var promise = deferred.promise;
//var resolvedValue;
rsvpFactory.getData = function() {
return promise;
};
RSVPCodeCtrl = $controller('RSVPCodeCtrl', {
RSVPFactory: rsvpFactory,
$state: state
});
spyOn(state, 'go');
}));
it('should initialize isBusy to false', function() {
expect(RSVPCodeCtrl.isBusy).toBe(false);
});
it('should initialize invalidCode to null', function() {
expect(RSVPCodeCtrl.invalidCode).toBe(null);
});
it('should initialize formModel to have a code property', function() {
var expected = {
code: ''
};
expect(RSVPCodeCtrl.formModel).toEqual(expected);
});
it('should initialize form to an empty object', function() {
var expected = {};
expect(RSVPCodeCtrl.form).toEqual(expected);
});
it('should not change invalidCode if it is null', function() {
var expected = null;
var code = 'test';
RSVPCodeCtrl.invalidCode = null;
RSVPCodeCtrl.codeWatcher(code);
expect(RSVPCodeCtrl.invalidCode).toBe(expected)
});
it('should change invalidCode to null if it does not match the code', function() {
var expected = 'hello';
var code = 'test';
RSVPCodeCtrl.invalidCode = expected;
RSVPCodeCtrl.codeWatcher(code);
expect(RSVPCodeCtrl.invalidCode).toBe(null);
});
it('should not change invalidCode to null if it does match the code', function() {
var expected = 'test';
var code = 'test';
RSVPCodeCtrl.invalidCode = expected;
RSVPCodeCtrl.codeWatcher(code);
expect(RSVPCodeCtrl.invalidCode).toBe(expected);
});
describe('submit', function() {
var formModel = {
code: 'foo'
};
beforeEach(function() {
RSVPCodeCtrl.submit(formModel);
});
it('should call state go with rsvp and the formModel code when successful', function() {
deferred.resolve();
scope.$apply();
expect(state.go).toHaveBeenCalledWith('rsvp', {code: formModel.code});
});
it('should set invalidCode to the formModel code when unsuccessful', function() {
deferred.reject();
scope.$apply();
expect(RSVPCodeCtrl.invalidCode).toEqual(formModel.code);
});
it('should set isBusy to true', function() {
expect(RSVPCodeCtrl.isBusy).toBe(true);
});
it('should set isBusy to false when the promise finishes', function() {
deferred.reject();
scope.$apply();
expect(RSVPCodeCtrl.isBusy).toBe(false);
});
});
});
|
import static org.junit.Assert.*;
import java.io.IOException;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class SearchUnitTest1 {
private Graph g1;
public final static int DF = 0;
public final static int BF = 1;
public final static int SAME = 2;
public static boolean printValues = true;
private enum Tests {
TestGraphBDS01("TestGraph01", SAME, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9),
TestGraphBS03("TestGraph02", BF, 0, 4, 2, 1, 3),
TestGraphDS03("TestGraph02", DF, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
String name;
int[] reihenfolge;
int art;
Tests(String name, int art, int... zahl) {
this.name = name;
this.art = art;
this.reihenfolge = zahl;
}
}
@Before
public void setUp() throws Exception {
// read graph from file
try {
g1 = GraphIO.loadGraph("tests/testgraphen/graphBFS_VS_DFS.txt");
g1.setShowSteps(false);
} catch (IOException e) {
e.printStackTrace();
}
}
@Test
public void testDepthFirstOrderEmptyGraph() {
Graph g2 = new DiGraph();
boolean succeed=false;
try{
List<Node> nodeList = g2.depthFirstSearch(0);
if(nodeList==null)
succeed=true;
}
catch(RuntimeException e){
succeed=true;
}
assertTrue("testDepthFirstOrderEmptyGraph failed",succeed);
}
@Test
public void testDepthFirstOrderNonExistingNode() {
boolean succeed=false;
try{
List<Node> nodeList = g1.depthFirstSearch(100);
if(nodeList==null)
succeed=true;
}
catch(RuntimeException e){
succeed=true;
}
assertTrue("testDepthFirstOrderEmptyGraph failed",succeed);
}
@Test
public void testDepthFirstOrder() {
Tests test = Tests.TestGraphBDS01;
g1 = loadTxt(test.name);
if (!compareIDS(g1.depthFirstSearch(0), DF, test,printValues))
fail("Reinfolge nicht korekkt");
}
@Test
public void testBreadthFirstOrderEmptyGraph() {
Graph g2 = new DiGraph();
boolean succeed=false;
try{
List<Node> nodeList = g2.breadthFirstSearch(0);
if(nodeList==null)
succeed=true;
}
catch(RuntimeException e){
succeed=true;
}
assertTrue("testDepthFirstOrderEmptyGraph failed",succeed);
}
@Test
public void testBreadthFirstOrderNonExistingNode() {
boolean succeed=false;
try{
List<Node> nodeList = g1.breadthFirstSearch(100);
if(nodeList==null)
succeed=true;
}
catch(RuntimeException e){
succeed=true;
}
assertTrue("testDepthFirstOrderEmptyGraph failed",succeed);
}
@Test
public void testBreadthFirstOrder() {
Tests test = Tests.TestGraphBS03;
g1 = loadTxt(test.name);
if (!compareIDS(g1.breadthFirstSearch(0), BF, test, printValues))
fail("Reinfolge nicht korekkt");
}
public static Graph loadTxt(String name) {
Graph ret = null;
try {
ret = GraphIO.loadGraph("tests/testgraphen/" + name + ".txt");
ret.setShowSteps(false);
} catch (IOException e) {
System.out.println("file not found: " + "tests/testgraphen/" + name);
e.printStackTrace();
}
return ret;
}
/**
* Prints the list in the console
*
* @param kind
* what kind of search
* @param nodeList
* the list
*/
public static void printNodeList(int kind, List<Node> nodeList) {
if (nodeList == null || nodeList.size() == 0) {
System.out.println("CompareList: " + nodeList);
return;
}
System.out.println("\n" + (kind == 0 ? "depth first seach" : "breadth first seach"));
for (Node node : nodeList) {
System.out.print("ID: " + node.id + " edges --> ");
for (Edge edge : node.edges) {
System.out.print(edge.endnode.id + ", ");
}
System.out.println("");
}
System.out.println("");
}
public static boolean compareIDS(List<Node> returnValue, int kindOfSearch, Tests testCase, boolean printCompare) {
if (returnValue == null || testCase == null || returnValue.size() != testCase.reihenfolge.length) {
if(printCompare)
System.out.println("CompareList: " + returnValue);
return false;
}
if (kindOfSearch != testCase.art && testCase.art != 2)
System.out.println("Achtung du verwendest evtl die Falsche Suche");
for (int i = 0; i < returnValue.size(); i++) {
if (printCompare)
System.out.println(returnValue.get(i).id + " == " + testCase.reihenfolge[i]);
if (returnValue.get(i).id != testCase.reihenfolge[i])
return false;
}
return true;
}
}
|
CREATE TABLE `pizza` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`categorie` varchar(50) NOT NULL DEFAULT 'SANS_VIANDE',
`code` varchar(3) DEFAULT NULL,
`nom` varchar(50) DEFAULT NULL,
`prix` decimal(10,2) NOT NULL,
`url_image` varchar(500) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `UK_uwl42bkm53vnwm0ncklemrij` (`code`)
);
CREATE TABLE `performance` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`service` varchar(255) DEFAULT NULL,
`date` timestamp,
`temps_execution` int(255) DEFAULT NULL,
);
|
module DMPRoadmap
class Application < Rails::Application
WickedPdf.config = {
:exe_path => '/usr/local/bundle/bin/wkhtmltopdf'
}
end
end
|
INSERT INTO demo(title, description) VALUES ('Call of the wild', 'bla bla bla bla')
|
ENV["RAILS_ENV"] = "test"
$:.unshift File.dirname(__FILE__)
$:.unshift File.expand_path('../../lib', __FILE__)
require "scenario/config/environment"
require 'rspec/rails'
require 'capybara/rspec'
require 'pry'
require 'database_cleaner/active_record'
RSpec.configure do |config|
config.before(:suite) do
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.clean_with(:truncation)
end
config.around(:each) do |example|
DatabaseCleaner.cleaning do
example.run
end
end
end
# Patching Rack::FakeCAS so that it uses the real, configured service URL as the service param
require 'rack/fake_cas'
class Rack::FakeCAS
protected
def login_page
<<-EOS
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8"/>
<title>Fake CAS</title>
</head>
<body>
<form action="#{@request.script_name}/login" method="post">
<input type="hidden" name="service" value="#{RackCAS.config.service}"/>
<label for="username">Username</label>
<input id="username" name="username" type="text"/>
<label for="password">Password</label>
<input id="password" name="password" type="password"/>
<input type="submit" value="Login"/>
</form>
</body>
</html>
EOS
end
end
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f }
|
module SportDb
class CsvEventImporter
def self.read( path, league:, season:,
headers: nil )
txt = File.open( path, 'r:utf-8' ) {|f| f.read }
parse( txt, league: league,
season: season,
headers: headers )
end
def self.parse( txt, league:, season:,
headers: nil )
new( txt, league: league,
season: season,
headers: headers ).parse
end
def initialize( txt, league:, season:, headers: nil )
@txt = txt
@headers = headers
raise ArgumentError("string expected for league; got #{league.class.name}") unless league.is_a? String
raise ArgumentError("string expected for season; got #{season.class.name}") unless season.is_a? String
## try mapping of league here - why? why not?
@league = Import.catalog.leagues.find!( league )
@season = Season.parse( season )
end
def parse
## todo/fix: add headers options (pass throughto CsvMatchReader)
## add filters too why? why not?
## todo/fix:
## add normalize: false/mapping: false flag for NOT mapping club/team names
## make normalize: false the default, anyways - why? why not?
opts = {}
opts[:headers] = @headers if @headers
matches = CsvMatchParser.parse( @txt, **opts )
matchlist = Import::Matchlist.new( matches )
team_names = matchlist.teams ## was: find_teams_in_matches_txt( matches_txt )
puts "#{team_names.size} teams:"
pp team_names
## note: allows duplicates - will return uniq struct recs in teams
teams = Import.catalog.teams.find_by!( name: team_names,
league: @league )
## build mapping - name => team struct record
team_mappings = team_names.zip( teams ).to_h
pp team_mappings
#######
# start with database updates / sync here
event_rec = Sync::Event.find_or_create_by( league: @league,
season: @season )
## todo/fix:
## add check if event has teams
## if yes - only double check and do NOT create / add teams
## number of teams must match (use teams only for lookup/alt name matches)
## note: allows duplicates - will return uniq db recs in teams
## and mappings from names to uniq db recs
## todo/fix: rename to team_recs_cache or team_cache - why? why not?
# maps struct record "canonical" team name to active record db record!!
## note: use "canonical" team name as hash key for now (and NOT the object itself) - why? why not?
team_recs = Sync::Team.find_or_create( team_mappings.values.uniq )
## todo/fix/check:
## add check if event has teams
## if yes - only double check and do NOT create / add teams
## number of teams must match (use teams only for lookup/alt name matches)
## add teams to event
## todo/fix: check if team is alreay included?
## or clear/destroy_all first!!!
event_rec.teams = team_recs ## todo/check/fix: use team_ids instead - why? why not?
## add catch-all/unclassified "dummy" round
# round_rec = Model::Round.create!(
# event_id: event_rec.id,
# title: 'Matchday ??? / Missing / Catch-All', ## find a better name?
# pos: 999,
# start_at: event_rec.start_at.to_date
# )
## add matches
matches.each do |match|
team1_rec = Sync::Team.cache[ team_mappings[match.team1].name ]
team2_rec = Sync::Team.cache[ team_mappings[match.team2].name ]
if match.date.nil?
puts "!!! WARN: skipping match - play date missing!!!!!"
pp match
else
## find last pos - check if it can be nil? yes, is nil if no records found
max_pos = Model::Match.where( event_id: event_rec.id ).maximum( 'pos' )
max_pos = max_pos ? max_pos+1 : 1
rec = Model::Match.create!(
event_id: event_rec.id,
team1_id: team1_rec.id,
team2_id: team2_rec.id,
## round_id: round_rec.id, -- note: now optional
pos: max_pos,
date: Date.strptime( match.date, '%Y-%m-%d' ),
score1: match.score1,
score2: match.score2,
score1i: match.score1i,
score2i: match.score2i,
)
end
end # each match
event_rec # note: return event database record
end # method parse
end # class CsvEventImporter
end # module SportDb
|
package annotator.tests;
public class ConstructorParam {
public ConstructorParam(int paramB) {}
}
|
package zapmixin
import "go.uber.org/zap/zapcore"
var AllLevels = []zapcore.Level{
zapcore.DebugLevel,
zapcore.InfoLevel,
zapcore.WarnLevel,
zapcore.ErrorLevel,
zapcore.PanicLevel,
zapcore.FatalLevel,
}
func getLevelThreshold(l zapcore.Level) []zapcore.Level {
for i := range AllLevels {
if AllLevels[i] == l {
return AllLevels[i:]
}
}
return []zapcore.Level{}
}
|
### 2.0.0 - 2018-03-28
This is a breaking maintenance release.
* (SERVER-2160) Remove logic for JRuby 1.7 vs 9k, assuming 9k always
* Bump JRuby version to 9.1.16.0-1
### 1.1.0 - 2018-02-27
* (SERVER-2130) Default to jit compile-mode for jruby 9k
* (SERVER-2081) Set KCode, source encoding and external encoding to ensure
source files are parsed as UTF-8 by default in jruby 1.7
### 1.0.0 - 2018-01-09
* (SERVER-2060) Splay JRuby instance flushing
### 0.12.0 - 2018-01-25
This is a minor release.
* (SERVER-2081) Change the encoding settings on the JRuby scripting container
to use UTF-8 for parsing source files, external encoding, and KCode. This
resolves a bug with mismatched encodings that can sometimes appear when
interpolating translated strings into non-translated ones.
### 0.11.2 - 2017-12-14
This is a patch release.
* (SERVER-1780) Insert hashCode into profiler output file name, to ensure
that new files don't clobber each other.
### 0.11.1 - 2017-12-12
This is a patch release.
* (SERVER-1874) Update to jruby 9.1.15.0
### 0.11.0 - 2017-12-11
This is a feature release
* (SERVER-1821) Bump JRuby 9k dep to 9.1.9.0-2 for ffi memory leak fix
* (SERVER-1840) Bump to JRuby 9.1.11.0-1
* (SERVER-1780) Enable jruby profiling for ScriptingContainers
### 0.10.0 - 2017-05-26
This is a breaking maintenance / minor feature release.
Maintenance:
* Bump JRuby 1.7 dependencies to 1.7.27.
Breaking changes:
* Support for configuring the Ruby language `compat-version`, including the
related functions, has been removed in this release.
The `compat-version` was only previously functional for JRuby 1.7.x
releases since each JRuby 9k release only supports a single language
version. JRuby 1.7.27 effectively breaks the ability to use Ruby language
version 2.0, however, due to a regression - see:
https://github.com/jruby/jruby/issues/4613 - which is unlikely to be fixed
in the future since the JRuby 1.7.x series has been declared EOL. For
Ruby language 2+ support, users will now have to use jruby-utils with a
jruby-deps 9.x.x.x version dependency.
Features:
* Added a `jruby-version-info` function for getting the version string that
JRuby reports at the command line.
### 0.9.1 - 2017-05-19
This is a minor release.
* Remove warning for jar deletion failure. Changes in upstream JRuby will
remove some of the temporary jars that are copied at runtime, so the logging
for our cleanup of these jars is not necessary.
* jruby-deps version bumps - JRuby 9k to 9.1.9.0-1 and JRuby 1.7 to 1.7.26-2.
### 0.9.0 - 2017-05-03
This is a feature release.
* [SERVER-1630](https://tickets.puppetlabs.com/browse/SERVER-1630) Added
support for exercising JRuby 9k. jruby-utils continues to depend upon
JRuby 1.7.26 but jruby-utils consumers can now successfully override
the 1.7.26 dependency with JRuby 9k-based dependencies. JRuby dependencies
are now derived from single upstream Maven artifact, puppetlabs/jruby-deps.
A jruby9k profile was added for executing the jruby-utils tests under
JRuby 9k.
### 0.8.0 - 2017-03-01
This is a minor feature and bugfix release.
* Introduce `:flush-timeout` config setting to specify how long a pool flush
attempt will wait for jruby instances to be returned to the pool before
aborting the attempt
* Add `lockWithTimeout` method to the `JrubyPool` class to facilitate the new
`:flush-timeout` setting
### 0.7.0 - 2017-01-03
This is a bugfix and internal improvement release.
* Remove the need for a second `JrubyPool` during pool flushes to improve
system stability
* Fix possibility of race condition where the pool lock can be granted even
when some jruby instances are still doing work
### 0.6.0 - 2016-12-19
This is a feature release.
* [SERVER-1475](https://tickets.puppetlabs.com/browse/SERVER-1475) Introduce
an Slf4jLogger class for use in routing JRuby log output to slf4j.
### 0.5.0 - 2016-11-22
This is a minor feature release.
* [SERVER-377](https://tickets.puppetlabs.com/browse/SERVER-377) Allow proxy
environment variables to pass through to scripting container.
### 0.4.1 - 2016-11-02
This is a minor release.
* Introduce a ScriptingContainer interface as an abstraction in place of a concrete
ScriptingContainer class in various public APIs.
### 0.4.0 - 2016-10-19
This is a minor feature release.
Features:
* Introduce support for a `compat-version` config option, which allows the MRI
compatibility version to be configured. Only supports 1.9 or 2.0, with 1.9
being the default if not specified.
### 0.3.0 - 2016-10-05
This is a minor feature release and maintenance release.
Features:
* Introduce support for `gem-path` config option, which will result in having the
GEM_PATH set for the jruby intepreters.
Maintenance:
* Introduce lein-parent for managing dependency versions
* Fixes to race conditions and timeouts in tests
### 0.2.1
This is a maintenance release.
* Bump dependencies to JRuby 1.7.26.
### 0.2.0
This is a feature release.
* [SERVER-584](https://tickets.puppetlabs.com/browse/SERVER-584)
Introduce a setting to allow configured environment variables to be passed
through to the JRuby scripting container.
### 0.1.0
* Initial release.
|
var a: String? = "Hello world"
var b: String? = "Hello world"
var c: String? = "Hello other world"
val d = <warning descr="SSR">a != b</warning>
val e = <warning descr="SSR">!(a?.equals(b) ?: (b === null))</warning>
val f = a != c
val g = c != b
val h = !(a?.equals(c) ?: (b === null))
val i = !(c?.equals(b) ?: (b === null))
val j = !(a?.equals(b) ?: (c === null))
val k = !(a?.equals(b)!!)
val l = a !== b
val m = a == b
|
;; Flipping out
;; Write a higher-order function which flips the order of the arguments of an input function.
(defn flip-out [f]
(fn fo [x y] (f y x)))
;; Tests
(println (= 3 ((flip-out nth) 2 [1 2 3 4 5])))
(println (= true ((flip-out >) 7 8)))
(println (= 4 ((flip-out quot) 2 8)))
(println (= [1 2 3] ((flip-out take) [1 2 3 4 5] 3)))
|
UPDATE ${type:raw}s
SET
${keyvalues:raw}
WHERE
id = ${id}
;
|
//
// classEmptyPointVisitFunc.cpp
// CppCore
//
// Created by paramAki on 2021/11/21.
//
//#include <iostream>
//using namespace std;
//
//class Person{
//public:
// void showName(){
// cout << "thi is a Person class" << endl;
// }
//
// void showAge(){
// if(this == NULL) return;
// cout << "age = " << m_Age << endl;
// }
//private:
// int m_Age;
//};
//
//int main(){
// Person *p = NULL;
// // 空指针访问成员函数
// p->showName();
// p->showAge();
//
// return 0;
//}
|
#include "ShaderData.h"
std::shared_ptr<QOpenGLShaderProgram> ShaderData::get_shader_program() {
if (shader_program == nullptr) {
shader_program = std::make_shared<QOpenGLShaderProgram>();
shader_program->addShaderFromSourceFile(QOpenGLShader::Vertex,
vertex.c_str());
shader_program->addShaderFromSourceFile(QOpenGLShader::Fragment,
fragment.c_str());
shader_program->link();
}
return shader_program;
}
|
# Sequelize
本文档介绍如何在 Midway 中使用 Sequelize 模块。
相关信息:
| 描述 | |
| -------------------- | ---- |
| 可作为主框架独立使用 | ❌ |
| 包含自定义日志 | ❌ |
| 可独立添加中间件 | ❌ |
## 使用方法:
```bash
$ npm i @midwayjs/sequelize@beta --save
```
## 安装数据库 Driver
常用数据库驱动如下,选择你对应连接的数据库类型安装:
```bash
# for MySQL or MariaDB,也可以使用 mysql2 替代
npm install mysql --save
npm install mysql2 --save
# for PostgreSQL or CockroachDB
npm install pg --save
# for SQLite
npm install sqlite3 --save
# for Microsoft SQL Server
npm install mssql --save
# for sql.js
npm install sql.js --save
# for Oracle
npm install oracledb --save
# for MongoDB(experimental)
npm install mongodb --save
```
## 引入模块
在 configuration.ts 文件中
```typescript
import { App, Configuration } from '@midwayjs/decorator';
import { ILifeCycle } from '@midwayjs/core';
import { Application } from 'egg';
import { join } from 'path';
import * as sequlize from '@midwayjs/sequelize';
@Configuration({
imports: [sequlize],
importConfigs: [join(__dirname, './config')],
})
export class ContainerLifeCycle implements ILifeCycle {
@App()
app: Application;
async onReady() {
}
}
```
## 配置
在 config.default.ts 中配置:
```typescript
export const sequelize = {
options: {
database: 'test4',
username: 'root',
password: '123456',
host: '127.0.0.1', // 此处支持idb上面vipserver key的那种方式,也支持aliyun的地址。
port: 3306,
encrypt: false,
dialect: 'mysql',
define: { charset: 'utf8' },
timezone: '+08:00',
logging: console.log
},
sync: false // 本地的时候,可以通过sync: true直接createTable
}
```
## 业务层
定义Entity
```typescript
import { Column, Model } from "sequelize-typescript";
import { BaseTable } from "@midwayjs/sequelize";
@BaseTable
export class Photo extends Model{
@Column({
comment: '名字'
})
name: string;
}
```
使用Entity:
查询列表
```typescript
import { Config, Controller, Get, Provide } from '@midwayjs/decorator';
import { Photo } from '../entity/Photo';
@Provide()
@Controller('/')
export class HomeController {
@Get('/')
async home() {
let result = await Photo.findAll()
console.log(result);
return 'hello world'
}
}
```
增加数据:
```typescript
import { Config, Controller, Get, Provide } from '@midwayjs/decorator';
import { Photo } from '../entity/Photo';
@Provide()
@Controller('/')
export class HomeController {
@Post('/add')
async home() {
let result = await Photo.create({
name: '123'
});
console.log(result);
return 'hello world'
}
}
```
删除:
```typescript
import { Config, Controller, Get, Provide } from '@midwayjs/decorator';
import { Photo } from '../entity/Photo';
@Provide()
@Controller('/')
export class HomeController {
@Post('/delete')
async home() {
await UserModel.destroy({
where: {
name: '123'
}
});
return 'hello world'
}
}
```
查找单个:
```typescript
import { Config, Controller, Get, Provide } from '@midwayjs/decorator';
import { Photo } from '../entity/Photo';
@Provide()
@Controller('/')
export class HomeController {
@Post('/delete')
async home() {
let result = await UserModel.findOne({
where: {
name: '123'
}
});
return 'hello world'
}
}
```
联合查询:
```typescript
import { Config, Controller, Get, Provide } from '@midwayjs/decorator';
import { Photo } from '../entity/Photo';
import { Op } from 'sequelize';
@Provide()
@Controller('/')
export class HomeController {
@Get('/')
async home() {
let result = await Photo.findAll({
where: {
[Op.or]: [{name: "23"}, {name: "34"}] // SELECT * FROM photo WHERE name = "23" OR name = "34";
}
})
console.log(result);
return 'hello world'
}
}
```
关于OP的更多用法:[https://sequelize.org/v5/manual/querying.html](https://sequelize.org/v5/manual/querying.html)
如果遇到比较复杂的,可以使用raw query方法:
[https://sequelize.org/v5/manual/raw-queries.html](https://sequelize.org/v5/manual/raw-queries.html)
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Models\Author;
use Illuminate\Support\Facades\Validator;
use Response;
class AuthorController extends Controller
{
public $data;
public function __construct(){
$this->data = new Author();
}
public function saveMessage(Request $request){
$this->data->author_name =$request->author_name;
$this->data->author_msg =$request->msg;
$validator = Validator::make(request()->all(), [
'author_name' => 'required',
'msg' => 'required|max:255',
]);
if ($validator->fails()) {
return response()->json([
'errors' => $validator->errors(),
]);
}
$details = $this->data->save();
if ($details==1) {
return "Data Saved successfully";
}else{
return "Something Went Wrong";
}
}
public function getAuthorData(){
$getTable = $this->data->getAuthorTable();
return view('authorIndex')->with('display',$getTable);
}
public function getAuthorDataByID($id){
$result = $this->data->getAuthorTableByID($id);
$data = [
'Author Name' => $result->author_name,
'Author Message' => $result->author_msg,
'Date' => $result->created_at
];
return $data;
}
}
|
(ns appengine.datastore.core
(:import (com.google.appengine.api.datastore
DatastoreConfig DatastoreServiceFactory
Entity Key Query KeyFactory Transaction)))
;; Bound per-thread as part of the dotransaction macro using binding
;; Do not change this value directly except if you know what you are
;; doing. If you are using this manually, after you are done with a
;; transaction, make sure this is nil.
(def *thread-local-transaction* nil)
(defn datastore
"Creates a DatastoreService using the default or the provided
configuration.
Examples:
(datastore)
; => #<DatastoreServiceImpl com.google.appengine.api.datastore.DatastoreServiceImpl@a7b68a>"
([] (datastore DatastoreConfig/DEFAULT))
([configuration] (DatastoreServiceFactory/getDatastoreService configuration)))
(defn create-key
"Creates a new Key using the given kind and id. If the parent-key is
given, the new key will be a child of the parent-key.
Examples:
(create-key \"country\" \"de\")
; => #<Key country(\"de\")>
(create-key (create-key \"continent\" \"eu\") \"country\" \"de\")
; => #<Key continent(\"eu\")/country(\"de\")>"
([kind id]
(create-key nil kind id))
([#^Key parent-key kind id]
;; if no id passed, return nil. Works
;; with entities.clj to get make-entity and create-entity macros
;; where the key is automatically generated by the ds
(if-not (or (nil? id) (.equals "" id))
(KeyFactory/createKey parent-key kind
(if (integer? id) (Long/valueOf (str id))
(str id)))
nil)))
(defn key->string
"Returns a \"websafe\" string from the given Key.
Examples:
(key->string (create-key \"continent\" \"eu\"))
; => \"agR0ZXN0chELEgljb250aW5lbnQiAmV1DA\"
(key->string (create-key (create-key \"continent\" \"eu\") \"country\" \"de\")
; => \"agR0ZXN0ciALEgljb250aW5lbnQiAmV1DAsSB2NvdW50cnkiAmRlDA\""
[key] (KeyFactory/keyToString key))
(defn string->key
"Returns a Key from the given \"websafe\" string.
Examples:
(string->key \"agR0ZXN0chELEgljb250aW5lbnQiAmV1DA\")
; => #<Key country(\"de\")>
(string->key \"agR0ZXN0ciALEgljb250aW5lbnQiAmV1DAsSB2NvdW50cnkiAmRlDA\")
; => #<Key continent(\"eu\")/country(\"de\")>"
[string] (KeyFactory/stringToKey string))
(defn entity->map
"Converts an instance of com.google.appengine.api.datastore. Entity
to a PersistentHashMap with properties stored under keyword keys, plus
the entity's kind stored under :kind, key stored under :key, and the
key of the entity's parent, if any, under :parent-key.
Examples:
(entity->map (doto (Entity. \"continent\") (.setProperty \"name\" \"Europe\")))
; => {:name \"Europe\", :kind \"continent\", :key #<Key continent(no-id-yet)>}"
[#^Entity entity]
(reduce #(assoc %1 (keyword (key %2)) (val %2))
(merge {:kind (.getKind entity) :key (.getKey entity)}
(if (.. entity getKey getParent)
{:parent-key (.. entity getKey getParent)} {}))
(.entrySet (.getProperties entity))))
(declare properties)
(defn map->entity
"Converts a PersistentHashMap or struct into a Entity instance. The
map must have the key or kind of the entity stored under the :key or
a :kind keywords. If the map has a :parent-key Key, the Entity
instance will be a child of the Entity with the :parent-key Key.
Examples:
(map->entity {:key \"continent\" :name \"Europe\"})
; => #<Entity <Entity [continent(no-id-yet)]:
; name = Europe"
[map]
(reduce #(do (.setProperty %1 (name (first %2)) (second %2)) %1)
(if (and (:kind map) (:parent-key map))
(Entity. (:kind map) (:parent-key map))
(Entity. (or (:key map) (:kind map))))
(properties map)))
(defmulti properties
"Returns the properties of the given record as a map."
class)
(defmethod properties Entity [#^Entity entity]
(properties (entity->map entity)))
(defmethod properties :default [map]
(dissoc (merge {} map) :key :kind :parent-key))
(defn filter-keys [keys]
"Takes a sequences and returns a sequence of Key objects, doing
conversions from Entity or map (with a :key), if possible."
(remove nil? (map #(cond (instance? Key %) %
(instance? Entity %) (.getKey %)
(instance? Key (:key %)) (:key %)) keys)))
(defmulti get-entity
"Retrieves a PersistentHashMap of an Entity in the datastore. The
entity can be retrieved by an instance of Key, Entity, a
PersistentHashMap with a :key keyword, or an ISeq of keys, in which
case a PersistentHashMap of appengine Keys to PersistentHashmaps of
Entities is returned."
(fn [& args] (if (second args) (class (second args)) :add-transaction)))
(defmethod get-entity :add-transaction
[identifier] (get-entity *thread-local-transaction* (if (nil? identifier)
{} identifier)))
(defmethod get-entity Entity
[transaction #^Entity entity]
(get-entity transaction (.getKey entity)))
(defmethod get-entity Key
[transaction key]
(entity->map (.get (datastore) transaction key)))
(defmethod get-entity clojure.lang.PersistentVector
[transaction keys]
(get-entity transaction (seq keys)))
(defmethod get-entity clojure.lang.ISeq
[transaction keys]
(into {} (for [[key entity] (.get (datastore) transaction
(filter-keys keys))]
[key (entity->map entity)])))
(defmethod get-entity :default
[transaction map]
(if-let [key (:key map)]
(entity->map (.get (datastore) transaction key))))
(defmulti put-entity
"Puts the given record into the datastore and returns a
PersistentHashMap of the record. The record must be an instance of
Entity or PersistentHashMap."
(fn [& args] (if (second args) (class (second args)) :add-transaction)))
(defmethod put-entity :add-transaction
[identifier] (put-entity *thread-local-transaction* (if (nil? identifier)
{} identifier)))
(defmethod put-entity Entity
[transaction #^Entity entity]
(let [key (.put (datastore) transaction entity)]
(assoc (entity->map entity) :key key)))
(defmethod put-entity :default
[transaction map] (put-entity transaction (map->entity map)))
(defn find-all
"Executes the given com.google.appengine.api.datastore.Query
and returns the results as a lazy sequence of items converted
with entity->map."
([#^Query query] (find-all *thread-local-transaction* query))
([transaction #^Query query]
(let [data-service (datastore)
results (.asIterable (.prepare data-service transaction query))]
(map entity->map results))))
(defn create-entity
"Takes a map of keyword-value pairs or struct and puts a new Entity
in the Datastore. The map or struct must include a :kind String. If
the passed-in record includes a :parent-key Key, then the entity
created will be a child of the Entity with key :parent-key.
Returns the saved Entity converted with entity->map (which will
include the assigned :key)."
([record] (put-entity record))
([transaction record] (put-entity transaction record)))
(defmulti update-entity
"Updates the record with the given properites. The record must be an
instance of Entity, Key or PersistentHashMap. If one of the attributes'
values is :remove, then the correponding property name is removed
from that entity. If one of the attributes't values is nil, then the
corresponding property name is set to Java's null for that property."
(fn [arg & args] (if (second args) (class (first args)) :add-transaction)))
(defmethod update-entity :add-transaction
[identifier attributes] (update-entity *thread-local-transaction*
(if (nil? identifier) {} identifier)
attributes))
(defmethod update-entity Entity
[transaction #^Entity entity attributes]
(doseq [[attribute value] attributes]
(if (not= value :remove)
(.setProperty entity (name attribute) value)
(.removeProperty entity (name attribute))))
(put-entity transaction entity))
(defmethod update-entity Key
[transaction key attributes]
(update-entity transaction (get-entity transaction key) attributes))
(defmethod update-entity :default
[transaction map attributes]
(update-entity transaction (map->entity map) attributes))
(defmulti delete-entity
"Deletes the record."
(fn [& args] (if (second args) (class (second args)) :add-transaction)))
;; Uses the boolean value false from the multimethod to catch
;; all calls where no transaction has been specified. Set
;; the transaction to *thread-local-transaction* (which is nil except
;; if we are in a (dotransaction body). The datastore treats nil
;; as a request to create a per-request-only transaction.
;;
;; same basic principle behind update, put, create, etc.
(defmethod delete-entity :add-transaction
[identifier] (delete-entity *thread-local-transaction* (if (nil? identifier)
{} identifier)))
(defmethod delete-entity Entity
[transaction #^Entity entity]
(delete-entity transaction (.getKey entity)))
(defmethod delete-entity Key
[transaction key]
(.delete (datastore) transaction (into-array [key])))
(defmethod delete-entity clojure.lang.PersistentVector
[transaction keys]
(delete-entity transaction (seq keys)))
(defmethod delete-entity clojure.lang.ISeq
[transaction keys]
(.delete (datastore) transaction (filter-keys keys)))
(defmethod delete-entity :default
[transaction map]
(if-let [key (:key map)]
(delete-entity transaction key)))
|
#!/bin/bash
set -ex
pushd source-code
echo "Fetching Dependencies & Building Code..."
./gradlew common-api:assemble > /dev/null
echo "Running Tests..."
./gradlew common-api:test
popd
exit 0
|
## Azure Migrate for Servers
1. [Lift & Shift DC Migration Overview](./overview.md)
2. [Scan State](./scan.md)
3. [Assess and Select Migration Waves](./assess.md)
4. [Design and Build Landing Zone](./landingzone.md)
5. [Replication of Migration Waves](./replication.md)
6. [End to End Test of Migration Waves](./testing.md)
7. [Migration Waves Execution and Post Go-Live](./migration.md)
8. [QnA](./faq.md)
9. [Resources & Training](./resources.md)
|
#!/usr/bin/ruby -w
# Loops
$i = 0
$num = 5
while $i < $num do
# while $i < $num
# while $i < $num ;
# while $i < $num \\
puts("inside loop i = #$i")
$i += 1
end
$i2 = 1
$num2 = 6
begin
puts("modifier while i = #$i2")
$i2 += 2
end while $i2 < $num2
for i in 0..5
puts "Value of local variable is #{i}"
end
for i in 0..5
if i > 2 then
puts "i is #{i}"
redo
end
i += 1
end
# for i in 0..5
# retry if i > 2
# puts "Value of local variable is #{i}"
# end
# for i in 9..19
# retry if i >= 11
# puts "i = #{i}"
#
# end
|
---
title: 北邮计算机考研大纲
date: 2020-02-20 17:12:35 +0800
category: conquer
tags: 考研
excerpt: BUPT考研大纲
---
# 803计算机学科基础综合
## 考查目标
计算机学科基础综合考试涵盖数据结构、计算机组成原理、操作系统和计算机网络等学科专业基础课程。要求考生比较系统地掌握上述专业基础课程的基本概念、基本原理和基本方法,能够综合运用所学的基本原理和基本方法分析、判断和解决有关理论问题和实际问题。
## 考试形式和试卷结构
1、试卷满分及考试时间
本试卷满分为150分,考试时间为180分钟。
2、答题方式
答题方式为闭卷、笔试。
3、试卷内容结构
数据结构 45分
计算机组成原理 45分
操作系统 35分
计算机网络 25分
4、试卷题型结构
单项选择题 80分 (40小题,每小题2分)
综合应用题 70分
## 考查内容
### 数据结构
【考查目标】
1、掌握数据结构的基本概念、基本原理和基本方法。
2、掌握数据的逻辑结构、存储结构及基本操作的实现,能够对算法进行基本的时间复杂度与空间复杂度的分析。
3、能够运用数据结构基本原理和方法进行问题的分析与求解,具备采用C或C++语言设计与实现算法的能力。
一、线性表
(一)线性表的定义和基本操作
(二)线性表的实现
1、顺序存储
2、链式存储
3、线性表的应用
二、栈、队列和数组
(一)栈和队列的基本概念
(二)栈和队列的顺序存储结构
(三)栈和队列的链式存储结构
(四)栈和队列的应用
(五)特殊矩阵的压缩存储
三、树与二叉树
(一)树的基本概念
(二)二叉树
1、二叉树的定义及其主要特征
2、二叉树的顺序存储结构和链式存储结构
3、二叉树的遍历
4、线索二叉树的基本概念和构造
(三)树、森林
1、树的存储结构
2、森林与二叉树的转换
3、树和森林的遍历
(四)树与二叉树的应用
1、二叉排序树
2、平衡二叉树
3、哈夫曼(Huffman)树和哈夫曼编码
四、图
(一)图的基本概念
(二)图的存储及基本操作
1、邻接矩阵法
2、邻接表法
3、邻接多重表、十字链表
(三)图的遍历
1、深度优先搜索
2、广度优先搜索
(四)图的基本应用
1、最小(代价)生成树
2、最短路径
3、拓扑排序
4、关键路径
五、查找
(一)查找的基本概念
(二)顺序查找法
(三)分块查找法
(四)折半查找法
(五)B树及其基本操作、B+树的基本概念
(六)散列(Hash)表
(七)字符串模式匹配
(八)查找算法的分析及应用
六、排序
(一)排序的基本概念
(二)插入排序
1、直接插入排序
2、折半插入排序
(三)气泡排序(bubble sort)
(四)简单选择排序
(五)希尔排序(shell sort)
(六)快速排序
(七)堆排序
(八)二路归并排序(merge sort)
(九)基数排序
(十)外部排序
(十一)各种内部排序算法的比较
(十二)排序算法的应用
### 计算机组成原理
【考查目标】
1、理解单处理器计算机系统中各部件的内部工作原理、组成结构以及相互连接方式,具有完整的计算机系统的整机概念。
2、理解计算机系统层次化结构概念,熟悉硬件与软件之间的界面,掌握指令集体系结构的基本知识和基本实现方法。
3、能够综合运用计算机组成的基本原理和基本方法,对有关计算机硬件系统中的理论和实际问题进行计算、分析,对一些基本部件进行简单设计;并能对高级程序设计语言(如C语言)中的相关问题进行分析。
一、计算机系统概述
(一)计算机发展历程
(二)计算机系统层次结构
1、计算机系统的基本组成
2、计算机硬件的基本组成
3、计算机软件和硬件的关系
4、计算机的工作过程
(三)计算机性能指标
吞吐量、响应时间;CPU时钟周期、主频、CPI、CPU执行时间;MIPS、MFLOPS 、GFLOPS、TFLOPS、PFLOPS。
二、数据的表示和运算
(一)数制与编码
1、进位计数制及其相互转换
2、真值和机器数
3、BCD码
4、字符与字符串
5、校验码
(二)定点数的表示和运算
1、定点数的表示
无符号数的表示;有符号整数的表示。
2、定点数的运算
定点数的位移运算;原码定点数的加减运算;补码定点数的加/减运算;定点数的乘/除运算;溢出概念和判别方法。
(三)浮点数的表示和运算
1、浮点数的表示
IEEE 754标准
2、浮点数的加/减运算
(四)算术逻辑单元ALU
1、串行加法器和并行加法器
2、算术逻辑单元ALU的功能和结构
三、存储器层次结构
(一)存储器的分类
(二)存储器的层次化结构
(三)半导体随机存取存储器
1、SRAM存储器
2、DRAM存储器
3、只读存储器
4、Flash存储器
(四)主存储器与CPU的连接
(五)双口RAM和多模块存储器
(六)高速缓冲存储器(Cache)
1、Cache的基本工作原理
2、Cach和主存之间的映射方式
3、Cache中主存块的替换算法
4、Cache写策略
(七)虚拟存储器
1、虚拟存储器的基本概念
2、页式虚拟存储器
3、段式虚拟存储器
4、段页式虚拟存储器
5、TLB(快表)
四、指令系统
(一)指令格式
1、指令的基本格式
2、定长操作码指令格式
3、扩展操作码指令格式
(二)指令的寻址方式
1、有效地址的概念
2、数据寻址和指令寻址
3、常见寻址方式
(三)CISC和RISC的基本概念
五、中央处理器(CPU)
(一)CPU的功能和基本结构
(二)指令执行过程
(三)数据通路的功能和基本结构
(四)控制器的功能和工作原理
1、硬布线控制器
2、微程序控制器
微程序、微指令和微命令;微指令的编码方式;微地址的形式方式。
(五)指令流水线
1、指令流水线的基本概念
2、指令流水线的基本实现
3、超标量和动态流水线的基本概念
(六)多核处理器的基本概念
六、总线
(一)总线概述
1、总线的基本概念
2、总线的分类
3、总线的组成及性能指标
(二 )总线仲裁
1、集中仲裁方式
2、分布仲裁方式
(三)总线操作和定时
1、同步定时方式
2、异步定时方式
(四)总线标准
七、输入输出(I/O)系统
(一)I/O系统基本概念
(二)外部设备
1、输入设备:键盘、鼠标
2、输出设备:显示器、打印机
3、外存储器:硬盘存储器、磁盘阵列、光盘存储器
(三)I/O接口(I/O控制器)
1、I/O接口的功能和基本结构
2、I/O端口及其编址
(四)I/O方式
1、程序查询方式
2、程序中断方式
中断的基本概念;中断响应过程;中断处理过程;多重中断和中断屏蔽的概念。
3、DMA方式
DMA控制器的组成,DMA传送过程。
### 操作系统
【考查目标】
1、掌握操作系统的基本概念、基本原理和基本功能,理解操作系统的整体运行过程。
2、掌握操作系统进程、内存、文件和I/O管理的策略、算法、机制以及相互关系。
3、能够运用所学的操作系统原理、方法与技术分析问题和解决问题,并能利用C语言描述相关算法。
一、操作系统概述
(一)操作系统的概念、特征、功能和提供的服务
(二)操作系统的发展与分类
(三)操作系统的运行环境
1、内核态与用户态
2、中断、异常
3、系统调用
(四)操作系统体系结构
二、进程管理
(一)进程与线程
1、进程概念
2、进程的状态与转换
3、进程控制
4、进程组织
5、进程通信
共享存储系统;消息传递系统;管道通信。
6、线程概念与多线程模型
(二)处理机调度
1、调度的基本概念
2、调度时机、切换与过程
3、调度的基本准则
4、调度方式
5、典型调度算法
先来先服务调度算法;短作业(短进程、短线程)优先调度算法;时间片轮转
调度算法;优先级调度算法;高响应比优先调度算法;多级反馈队列调度算法。
(三)同步与互斥
1、进程同步的基本概念
2、实现临界区互斥的基本方法
软件实现方法;硬件实现方法。
3、信号量
4、管程
5、经典同步问题
生产者-消费者问题;读者-写者问题;哲学家进餐问题。
(四)死锁
1、死锁的概念
2、死锁处理策略
3、死锁预防
4、死锁避免
系统安全状态,银行家算法。
5、死锁检测和解除
三、内存管理
(一)内存管理基础
1、内存管理概念
程序装入与链接;逻辑地址与物理地址空间;内存保护。
2、交换与覆盖
3、连续分配管理方式
4、非连续分配管理方式
分页管理方式;分段管理方式;段页式管理方式。
(二)虚拟内存管理
1、虚拟内存基本概念
2、请求分页管理方式
3、页面置换算法
最佳置换算法(OPT);先进先出置换算法(FIFO);最近最少使用置换算法(LRU);时钟置换算法(CLOCK)。
4、页面分配策略
5、工作集
6.、抖动
四、文件管理
(一)文件系统基础
1、文件概念
2、文件的逻辑结构
顺序文件;索引文件;索引顺序文件。
3、目录结构
文件控制块和索引节点;单级目录结构和两级目录结构;树形目录结构;图形目录结构。
4、文件共享
5、文件保护
访问类型;访问控制。
(二)文件系统实现
1、文件系统层次结构
2、目录实现
3、文件实现
(三)磁盘组织与管理
1、磁盘的结构
2、磁盘调度算法
3、磁盘的管理
五、输入输出(I/O)管理
(一)I/O管理概述
1、I/O控制方式
2.、I/O软件层次结构
(二)I/O核心子系统
1、I/O调度概念
2、高速缓存与缓冲区
3、设备分配与回收
4、假脱机技术(SPOOLing)
### 计算机网络
【考查目标】
1、掌握计算机网络的基本概念、基本原理和基本方法。
2、掌握计算机网络的体系结构和典型网络协议,了解典型网络设备的组成和特点,理解典型网络设备的工作原理。
3、能够运用计算机网络的基本概念、基本原理和基本方法进行网络系统的分析、设计和应用。
一、计算机网络体系结构
(一)计算机网络概述
1、计算机网络的概念、组成与功能
2、计算机网络的分类
3、计算机网络的标准化工作及相关组织
(二)计算机网络体系结构与参考模型
1、计算机网络分层结构
2、计算机网络协议、接口、服务等概念
3、ISO/OSI参考模型和TCP/IP模型
二、物理层
(一)通信基础
1、信道、信号、宽带、码元、波特、速率、信源与信宿等基本概念
2、奈奎斯特定理与香农定理
3、编码与调制
4、电路交换、报文交换与分组交换
5、数据报与虚电路
(二)传输介质
1、双绞线、同轴电缆、光纤与无线传输介质
2、物理层接口的特性
(三)物理层设备
1、中继器
2、集线器
三、数据链路层
(一)数据链路层的功能
(二)组帧
(三)差错控制
1、检错编码
2、纠错编码
(四)流量控制与可靠传输机制
1、流量控制、可靠传输与滑轮窗口机制
2、停止-等待协议
3、后退N帧协议(GBN)
4、选择重传协议(SR)
(五)介质访问控制
1、信道划分
频分多路复用、时分多路复用、波分多路复用、码分多路复用的概念和基本原理。
2、随即访问
ALOHA协议;CSMA协议;CSMA/CD协议;CSMA/CA协议。
3、轮询访问:令牌传递协议
(六)局域网
1、局域网的基本概念与体系结构
2、以太网与IEEE 802.3
3、IEEE 802.11
4、令牌环网的基本原理
(七)广域网
1.广域网的基本概念
2、PPP协议
3、HDLC协议
(八)数据链路层设备
1、网桥的概念和基本原理
2、局域网交换机及其工作原理。
四、网络层
(一)网络层的功能
1、异构网络互联
2、路由与转发
3、拥塞控制
(二)路由算法
1、静态路由与动态路由
2、距离-向量路由算法
3、链路状态路由算法
4、层次路由
(三)IPv4
1、IPv4分组
2、IPv4地址与NAT
3、子网划分与子网掩码、CIDR
4、ARP协议、DHCP协议与ICMP协议
(四)IPv6
1、IPv6的主要特点 2、IPv6地址
(五)路由协议
1、自治系统
2、域内路由与域间路由
3、RIP路由协议
4、OSPF路由协议
5、BGP路由协议
(六)IP组播
1、组播的概念
2、IP组播地址
(七)移动IP
1、移动IP的概念
2、移动IP的通信过程
(八)网络层设备
1、路由器的组成和功能
2、路由表与路由转发
五、传输层
(一)传输层提供的服务
1、传输层的功能
2、传输层寻址与端口
3、无连接服务与面向连接服务
(二)UDP协议
1.UDP数据报
2.UDP校验
(三)TCP协议
1、TCP段
2、TCP连接管理
3、TCP可靠传输
4、TCP流量控制与拥塞控制
六、应用层
(一)网络应用模型
1、客户/服务器模型
2、P2P模型
(二)DNS系统
1、层次域名空间
2、域名服务器
3、域名解析过程
(三)FTP
1、FTP协议的工作原理
2、控制连接与数据连接
(四)电子邮件
1、电子邮件系统的组成结构
2、电子邮件格式与MIME
3、SMTP协议与POP3协议
(五)WWW
1、WWW的概念与组成结构
2、HTTP协议
|
/**
* Copyright (c) OpenLens Authors. All rights reserved.
* Licensed under MIT License. See LICENSE in root directory for more information.
*/
import { LensExtension } from "../lens-extension";
import { Console } from "console";
import { stdout, stderr } from "process";
console = new Console(stdout, stderr);
let ext: LensExtension;
describe("lens extension", () => {
beforeEach(async () => {
ext = new LensExtension({
manifest: {
name: "foo-bar",
version: "0.1.1",
engines: { lens: "^5.5.0" },
},
id: "/this/is/fake/package.json",
absolutePath: "/absolute/fake/",
manifestPath: "/this/is/fake/package.json",
isBundled: false,
isEnabled: true,
isCompatible: true,
});
});
describe("name", () => {
it("returns name", () => {
expect(ext.name).toBe("foo-bar");
});
});
});
|
package objects
import "fmt"
// Game represents the state of the game.
type Game struct {
Cursor Cursor
Dimensions Dimensions
World Dimensions
Buildings []Building
Turn Turn
Funds int
Revenue int
Population int
PopulationGrowth int
Flashes []Flash
Frame int
Debug bool
}
// String returns a user readable version of the game.
func (state *Game) String() string {
return fmt.Sprintf("<world %s | window %s | cursor %s>", &state.World, &state.Dimensions, &state.Cursor.Location)
}
|
//
// GMVPNServer.h
// VPNTest
//
// Created by Георгий Малюков on 22.12.16.
// Copyright © 2016 Georgiy Malyukov. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface GMVPNServer : NSObject<NSCoding> {
}
@property (copy, nonatomic) NSString *name;
@property (copy, nonatomic) NSString *address;
@property (copy, nonatomic) NSString *remoteId;
@property (weak, nonatomic) NSString *password;
@property (readonly, nonatomic) NSString *keychainService;
@property (readonly, nonatomic) NSData *keychainPasswordReference;
#pragma mark - Utils
- (BOOL)isEqualToServer:(GMVPNServer *)server;
@end
|
use ggez::{Context, filesystem, GameResult};
use crate::npc::NPCTable;
use crate::scene::Scene;
use crate::shared_game_state::SharedGameState;
use crate::stage::StageData;
use crate::text_script::TextScript;
pub struct LoadingScene {
tick: usize,
}
impl LoadingScene {
pub fn new() -> Self {
Self {
tick: 0,
}
}
}
impl Scene for LoadingScene {
fn tick(&mut self, state: &mut SharedGameState, ctx: &mut Context) -> GameResult {
// deferred to let the loading image draw
if self.tick == 1 {
let stages = StageData::load_stage_table(ctx, &state.base_path)?;
state.stages = stages;
let npc_tbl = filesystem::open(ctx, [&state.base_path, "/npc.tbl"].join(""))?;
let npc_table = NPCTable::load_from(npc_tbl)?;
state.npc_table = npc_table;
let head_tsc = filesystem::open(ctx, [&state.base_path, "/Head.tsc"].join(""))?;
let head_script = TextScript::load_from(head_tsc, &state.constants)?;
state.textscript_vm.set_global_script(head_script);
let arms_item_tsc = filesystem::open(ctx, [&state.base_path, "/ArmsItem.tsc"].join(""))?;
let arms_item_script = TextScript::load_from(arms_item_tsc, &state.constants)?;
state.textscript_vm.set_inventory_script(arms_item_script);
let stage_select_tsc = filesystem::open(ctx, [&state.base_path, "/StageSelect.tsc"].join(""))?;
let stage_select_script = TextScript::load_from(stage_select_tsc, &state.constants)?;
state.textscript_vm.set_stage_select_script(stage_select_script);
state.start_intro(ctx)?;
}
self.tick += 1;
Ok(())
}
fn draw(&self, state: &mut SharedGameState, ctx: &mut Context) -> GameResult {
let batch = state.texture_set.get_or_load_batch(ctx, &state.constants, "Loading")?;
batch.add(((state.canvas_size.0 - batch.width() as f32) / 2.0).floor(),
((state.canvas_size.1 - batch.height() as f32) / 2.0).floor());
batch.draw(ctx)?;
Ok(())
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use common_datavalues::prelude::*;
use common_exception::Result;
use serde_json::json;
use crate::scalars::scalar_function_test::test_scalar_functions;
use crate::scalars::scalar_function_test::ScalarFunctionTest;
#[test]
fn test_as_boolean_function() -> Result<()> {
let tests = vec![
ScalarFunctionTest {
name: "test_as_boolean",
columns: vec![
Series::from_data(vec![
VariantValue::from(json!(true)),
VariantValue::from(json!(false)),
VariantValue::from(json!(123)),
VariantValue::from(json!(12.34)),
VariantValue::from(json!("abc")),
VariantValue::from(json!([1,2,3])),
VariantValue::from(json!({"a":"b"})),
]),
],
expect: Series::from_data(vec![Some(true), Some(false), None, None, None, None, None]),
error: "",
},
];
test_scalar_functions("as_boolean", &tests)
}
fn test_as_integer_function() -> Result<()> {
let tests = vec![
ScalarFunctionTest {
name: "test_as_integer",
columns: vec![
Series::from_data(vec![
VariantValue::from(json!(true)),
VariantValue::from(json!(false)),
VariantValue::from(json!(123)),
VariantValue::from(json!(12.34)),
VariantValue::from(json!("abc")),
VariantValue::from(json!([1,2,3])),
VariantValue::from(json!({"a":"b"})),
]),
],
expect: Series::from_data(vec![None, None, Some(123), None, None, None, None]),
error: "",
},
];
test_scalar_functions("as_integer", &tests)
}
fn test_as_float_function() -> Result<()> {
let tests = vec![
ScalarFunctionTest {
name: "test_as_float",
columns: vec![
Series::from_data(vec![
VariantValue::from(json!(true)),
VariantValue::from(json!(false)),
VariantValue::from(json!(123)),
VariantValue::from(json!(12.34)),
VariantValue::from(json!("abc")),
VariantValue::from(json!([1,2,3])),
VariantValue::from(json!({"a":"b"})),
]),
],
expect: Series::from_data(vec![None, None, Some(123.0), Some(12.34), None, None, None]),
error: "",
},
];
test_scalar_functions("as_float", &tests)
}
fn test_as_string_function() -> Result<()> {
let tests = vec![
ScalarFunctionTest {
name: "test_as_string",
columns: vec![
Series::from_data(vec![
VariantValue::from(json!(true)),
VariantValue::from(json!(false)),
VariantValue::from(json!(123)),
VariantValue::from(json!(12.34)),
VariantValue::from(json!("abc")),
VariantValue::from(json!([1,2,3])),
VariantValue::from(json!({"a":"b"})),
]),
],
expect: Series::from_data(vec![None, None, None, None, Some("abc"), None, None]),
error: "",
},
];
test_scalar_functions("as_string", &tests)
}
fn test_as_array_function() -> Result<()> {
let tests = vec![
ScalarFunctionTest {
name: "test_as_array",
columns: vec![
Series::from_data(vec![
VariantValue::from(json!(true)),
VariantValue::from(json!(false)),
VariantValue::from(json!(123)),
VariantValue::from(json!(12.34)),
VariantValue::from(json!("abc")),
VariantValue::from(json!([1,2,3])),
VariantValue::from(json!({"a":"b"})),
]),
],
expect: Series::from_data(vec![None, None, None, None, None, Some(VariantValue::from(json!([1,2,3]))), None]),
error: "",
},
];
test_scalar_functions("as_array", &tests)
}
fn test_as_object_function() -> Result<()> {
let tests = vec![
ScalarFunctionTest {
name: "test_as_object_function",
columns: vec![
Series::from_data(vec![
VariantValue::from(json!(true)),
VariantValue::from(json!(false)),
VariantValue::from(json!(123)),
VariantValue::from(json!(12.34)),
VariantValue::from(json!("abc")),
VariantValue::from(json!([1,2,3])),
VariantValue::from(json!({"a":"b"})),
]),
],
expect: Series::from_data(vec![None, None, None, None, None, None, Some(VariantValue::from(json!({"a":"b"})))]),
error: "",
},
];
test_scalar_functions("as_object", &tests)
}
|
//! Output.
pub mod data;
pub use self::data::*;
|
package insertion_sort_list
import (
"testing"
)
func TestInsertionSortListIteration(t *testing.T) {
fifthNode := &ListNode{1, nil}
fourthNode := &ListNode{5, fifthNode}
thirdNode := &ListNode{2, fourthNode}
secondNode := &ListNode{4, thirdNode}
firstNode := &ListNode{1000, secondNode}
resultVal := []int{1, 2, 4, 5, 1000}
head := insertionSortListIteration(firstNode)
for i := 0; i < 5; i++ {
if head.Val != resultVal[i] {
t.Error("Not Sorted")
}
head = head.Next
}
}
func TestInsertionSortListRecursion(t *testing.T) {
fifthNode := &ListNode{1, nil}
fourthNode := &ListNode{5, fifthNode}
thirdNode := &ListNode{2, fourthNode}
secondNode := &ListNode{4, thirdNode}
firstNode := &ListNode{1000, secondNode}
resultVal := []int{1, 2, 4, 5, 1000}
head := insertionSortListRecursion(firstNode)
for i := 0; i < 5; i++ {
if head.Val != resultVal[i] {
t.Error("Not Sorted")
}
head = head.Next
}
}
|
class OprahsFavoriteThings::CLI
def start
OprahsFavoriteThings::Scraper.scrape_favorites
favorites = OprahsFavoriteThings::Favorites.all
input = nil
while input != "exit"
puts ""
puts "Welcome to Oprah's Favorite Things of 2018. From this list, enter a number from 1-15:"
favorites.each_with_index do |favorite, index|
puts "#{index}. #{favorite.title}"
end
puts ""
input = gets.strip.to_i
print_favorite(favorites[input])
puts "Please type the number of the product you would you like to see."
puts ""
input = gets.strip.to_i
print_favorite(favorites[input])
puts "What favorites would you like more information on?"
puts ""
input = gets.strip.to_i
print_favorite(favorites[input])
print_favorite(favorites[input])
puts ""
puts "Would you like to see another favorite? Please enter Y or N"
input = gets.strip.downcase
if input == "y"
start
elsif input == "n"
puts ""
puts" Thank you. Goodbye."
exit
end
end
end
def print_favorite(favorite)
puts ""
puts ""
puts "Description: #{favorite.description}"
puts "Price: #{favorite.price}"
puts "Retailer: #{favorite.retailer}"
end
end
|
package org.craftsrecords.talkadvisor.recommendation.profile
class ProfileAlreadyExistsException(userId: String) : RuntimeException("A profile already exists for the user $userId")
|
#!/bin/bash
DIR=$(dirname "${BASH_SOURCE[0]}")
source "$DIR/shared.sh"
ASO_IDENTITY_ID=$(az identity show -n $ASO_IDENTITY_NAME -g $INFRA_RESOURCE_GROUP --query "id" -o tsv)
ASO_IDENTITY_CLIENTID=$(az identity show -n $ASO_IDENTITY_NAME -g $INFRA_RESOURCE_GROUP --query "clientId" -o tsv)
APP_IDENTITY_ID=$(az identity show -n $APP_IDENTITY_NAME -g $APP_RESOURCE_GROUP --query "id" -o tsv)
APP_IDENTITY_CLIENTID=$(az identity show -n $APP_IDENTITY_NAME -g $APP_RESOURCE_GROUP --query "clientId" -o tsv)
AKS_SP_ID=$(az aks show -g $INFRA_RESOURCE_GROUP -n $AKS_NAME --query "servicePrincipalProfile.clientId" -o tsv)
DEV_IP=$(curl ifconfig.me)
cat << EOF
tenantID: ${AZURE_TENANT_ID}
subscription: ${AZURE_SUBSCRIPTION_ID}
aks:
name: ${AKS_NAME}
rg: ${INFRA_RESOURCE_GROUP}
vnet: <MANUAL_STEP>
dev:
ip: ${DEV_IP}
app:
name: ${APP_SUFFIX}
namespace: default
image: docker.io/jupflueg/aso-votes-app:latest
identity:
name: ${APP_IDENTITY_NAME}
clientID: ${APP_IDENTITY_CLIENTID}
aso:
clientID: ${ASO_IDENTITY_CLIENTID}
EOF
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Net;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using Maestro.Data;
using Maestro.Web.Api.v2020_02_20.Models;
using Microsoft.AspNetCore.ApiVersioning;
using Microsoft.AspNetCore.ApiVersioning.Swashbuckle;
using Microsoft.AspNetCore.Mvc;
using Microsoft.DotNet.Services.Utility;
using Microsoft.EntityFrameworkCore;
using Channel = Maestro.Data.Models.Channel;
namespace Maestro.Web.Api.v2020_02_20.Controllers
{
/// <summary>
/// Exposes methods to Create/Read/Delete <see cref="DefaultChannel"/> mapping information.
/// </summary>
[Route("default-channels")]
[ApiVersion("2020-02-20")]
public class DefaultChannelsController : v2018_07_16.Controllers.DefaultChannelsController
{
private readonly BuildAssetRegistryContext _context;
// Branch names can't possibly start with -, so we'll use this fact to guarantee the user
// wants to use a regex, and not direct matching.
private const string _regexBranchPrefix = "-regex:";
public DefaultChannelsController(BuildAssetRegistryContext context)
: base(context)
{
_context = context;
}
/// <summary>
/// Gets a list of all <see cref="DefaultChannel"/> mappings that match the given search criteria.
/// </summary>
/// <param name="repository"></param>
/// <param name="branch"></param>
/// <param name="channelId"></param>
/// <param name="enabled">True if the default channel should be initially enabled or disabled.</param>
[HttpGet]
[SwaggerApiResponse(HttpStatusCode.OK, Type = typeof(List<DefaultChannel>), Description = "The list of DefaultChannels")]
public override IActionResult List(string repository = null, string branch = null, int? channelId = null, bool? enabled = null)
{
IQueryable<Data.Models.DefaultChannel> query = _context.DefaultChannels.Include(dc => dc.Channel)
.AsNoTracking();
if (!string.IsNullOrEmpty(repository))
{
query = query.Where(dc => dc.Repository == repository);
}
if (channelId.HasValue)
{
query = query.Where(dc => dc.ChannelId == channelId.Value);
}
if (enabled.HasValue)
{
query = query.Where(dc => dc.Enabled == enabled.Value);
}
List<DefaultChannel> results = query.AsEnumerable().Select(dc => new DefaultChannel(dc)).ToList();
if (!string.IsNullOrEmpty(branch))
{
List<DefaultChannel> branchFilteredResults = new List<DefaultChannel>();
foreach (DefaultChannel defaultChannel in results)
{
// Branch name expressed as a regular expression: must start with '-regex:' and have at least one more character.
// - Skips NormalizeBranchName here because internally everything is stored without that.
// If there's a pattern of users doing '-regex:/refs/heads/release.*' this could be revisited.
if (defaultChannel.Branch.StartsWith(_regexBranchPrefix, StringComparison.InvariantCultureIgnoreCase) &&
defaultChannel.Branch.Length > _regexBranchPrefix.Length &&
new Regex(defaultChannel.Branch.Substring(_regexBranchPrefix.Length)).IsMatch(branch))
{
branchFilteredResults.Add(defaultChannel);
}
else if (defaultChannel.Branch == GitHelpers.NormalizeBranchName(branch))
{
branchFilteredResults.Add(defaultChannel);
}
}
return Ok(branchFilteredResults);
}
return Ok(results);
}
[ApiRemoved]
public override Task<IActionResult> Create([FromBody, Required] v2018_07_16.Models.DefaultChannel.DefaultChannelCreateData data)
{
throw new NotImplementedException();
}
/// <summary>
/// Creates a <see cref="DefaultChannel"/> mapping.
/// </summary>
/// <param name="data">An object containing the data for the new <see cref="DefaultChannel"/></param>
[HttpPost]
[SwaggerApiResponse(HttpStatusCode.Created, Description = "DefaultChannel successfully created")]
[SwaggerApiResponse(HttpStatusCode.Conflict, Description = "A DefaultChannel matching the data already exists")]
[ValidateModelState]
public async Task<IActionResult> Create([FromBody, Required] DefaultChannel.DefaultChannelCreateData data)
{
int channelId = data.ChannelId;
Channel channel = await _context.Channels.FindAsync(channelId);
if (channel == null)
{
return NotFound(new ApiError($"The channel with id '{channelId}' was not found."));
}
Data.Models.DefaultChannel defaultChannel;
// Due to abundant retry logic, we'll return a normal response even if this is creating a duplicate, by simply
// returning the one that already exists vs. HTTP 409 / 500
var existingInstance = _context.DefaultChannels
.Where(d => d.Channel == channel &&
d.Repository == data.Repository &&
d.Branch == data.Branch)
.FirstOrDefault();
if (existingInstance != null)
{
defaultChannel = existingInstance;
}
else
{
defaultChannel = new Data.Models.DefaultChannel
{
Channel = channel,
Repository = data.Repository,
Branch = data.Branch,
Enabled = data.Enabled ?? true
};
await _context.DefaultChannels.AddAsync(defaultChannel);
await _context.SaveChangesAsync();
}
return CreatedAtRoute(
new
{
action = "Get",
id = defaultChannel.Id
},
new DefaultChannel(defaultChannel));
}
[ApiRemoved]
public override Task<IActionResult> Update(int id, [FromBody] v2018_07_16.Models.DefaultChannel.DefaultChannelUpdateData update)
{
throw new NotImplementedException();
}
/// <summary>
/// Update an existing default channel with new data.
/// </summary>
/// <param name="id">Id of default channel</param>
/// <param name="update">Default channel update data</param>
/// <returns>Updated default channel data.</returns>
[HttpPatch("{id}")]
[SwaggerApiResponse(HttpStatusCode.OK, Type = typeof(DefaultChannel), Description = "Default channel successfully updated")]
[SwaggerApiResponse(HttpStatusCode.NotFound, Description = "The existing default channel does not exist.")]
[SwaggerApiResponse(HttpStatusCode.Conflict, Description = "A DefaultChannel matching the data already exists")]
[ValidateModelState]
public async Task<IActionResult> Update(int id, [FromBody] DefaultChannel.DefaultChannelUpdateData update)
{
Data.Models.DefaultChannel defaultChannel = await _context.DefaultChannels.FindAsync(id);
if (defaultChannel == null)
{
return NotFound();
}
bool doUpdate = false;
if (!string.IsNullOrEmpty(update.Branch))
{
defaultChannel.Branch = update.Branch;
doUpdate = true;
}
if (!string.IsNullOrEmpty(update.Repository))
{
defaultChannel.Repository = update.Repository;
doUpdate = true;
}
if (update.ChannelId.HasValue)
{
int channelId = update.ChannelId.Value;
Channel channel = await _context.Channels.FindAsync(channelId);
if (channel == null)
{
return NotFound(new ApiError($"The channel with id '{channelId}' was not found."));
}
defaultChannel.ChannelId = channelId;
defaultChannel.Channel = channel;
doUpdate = true;
}
if (update.Enabled.HasValue)
{
defaultChannel.Enabled = update.Enabled.Value;
doUpdate = true;
}
if (doUpdate)
{
_context.DefaultChannels.Update(defaultChannel);
await _context.SaveChangesAsync();
}
return Ok(new DefaultChannel(defaultChannel));
}
/// <summary>
/// Gets a single <see cref="DefaultChannel"/>.
/// </summary>
/// <param name="id">The id of the <see cref="DefaultChannel"/></param>
[HttpGet("{id}")]
[SwaggerApiResponse(HttpStatusCode.OK, Type = typeof(DefaultChannel), Description = "The requested DefaultChannel")]
[ValidateModelState]
public override async Task<IActionResult> Get(int id)
{
Data.Models.DefaultChannel defaultChannel = await _context.DefaultChannels.FindAsync(id);
if (defaultChannel == null)
{
return NotFound();
}
return Ok(new DefaultChannel(defaultChannel));
}
}
}
|
//! Parsing of token-stream into data-structures.
use super::*;
/// Parses the stream of tokens into a list.
pub fn parse_list(
tokens: &mut PeekableTokenStream<impl TokenStream>
) -> Result<Vec<Expression>, ParseError> {
let mut list = Vec::default();
while let Some(t) = tokens.peek() {
if let TokenContent::Symbol(s) = t.content {
if s == Symbol::BraketRight {
drop(tokens.next());
break;
}
if s == Symbol::Comma {
drop(tokens.next());
continue;
}
}
let expr = parse_expression(tokens, false)?;
list.push(expr);
}
Ok(list)
}
/// Parses the stream of tokens into a key/value-map.
pub fn parse_map(
tokens: &mut PeekableTokenStream<impl TokenStream>
) -> Result<FxHashMap<CompactString, Expression>, ParseError> {
let mut map = FxHashMap::default();
while let Some(t) = tokens.peek().cloned() {
match t.content {
TokenContent::Remainder(r )
=> return Err(ParseError::Unrecognized(t.start, r)),
TokenContent::Symbol(s) => {
if s == Symbol::CurlyRight {
drop(tokens.next());
break;
}
if s == Symbol::Comma {
drop(tokens.next());
continue;
}
return Err(ParseError::Unexpected(format!("symbol '{}'", s).into()));
},
TokenContent::Literal(Literal::Str(s)) => {
let key = s;
drop(tokens.next()); // eat key
let next = tokens.next();
if let Some(Token {content: TokenContent::Symbol(Symbol::EqualSign), ..}) = next {
// everything checks out, continue on...
} else {
return Err(ParseError::Unexpected("token".into()));
}
let expr = parse_expression(tokens, false)?;
map.insert(key, expr);
},
TokenContent::Literal(l) => return Err(ParseError::Unexpected(format!("literal {:?}", l).into())),
g @ TokenContent::Group(_, _) => return Err(ParseError::Unexpected(format!("group {:?}", g).into()))
};
}
Ok(map)
}
|
require 'fiber'
module CartoDB
class ConnectionPool
MAX_POOL_SIZE = 300
def initialize
@pool = {}
end
def fetch(configuration, &block)
if @pool[connection_id(configuration)]
Rails.logger.debug "[pool] Found a connection for #{connection_id(configuration)} (#{@pool.keys.size})"
@pool[connection_id(configuration)][:last_accessed] = Time.now
@pool[connection_id(configuration)][:connection]
else
if @pool.size >= MAX_POOL_SIZE
#close_connections!
close_oldest_connection!
end
connection = yield
@pool[connection_id(configuration)] = { :connection => connection, :last_accessed => Time.now }
Rails.logger.debug "[pool] Creating a new connection for #{connection_id(configuration)} (#{@pool.keys.size})"
connection
end
end
def close_connections!
@pool.each do |connection_id, conn|
conn[:connection].disconnect
end
@pool = {}
end
def close_oldest_connection!
older = nil
oldest_access = nil
@pool.each do |connection_id, conn|
if oldest_access.nil? || oldest_access < conn[:last_accessed]
oldest_access = conn[:last_accessed]
older = connection_id
end
end
@pool[older][:connection].disconnect
@pool.delete(older)
end
private
def connection_id(configuration)
"#{configuration['database']}:#{configuration['username']}"
end
end
end
|
# parallax-news-corp
News blog theme made with materialize css <br>
Link to parallax news corp theme<br>
https://news-corp.netlify.com/
|
package com.infinum.dbinspector.domain.shared.base
internal interface BaseInteractor<InputModel, OutputModel> {
suspend operator fun invoke(input: InputModel): OutputModel
}
|
# Semana06
Hola Mundo!
Primaria: https://grmack01.github.io/Semana06.io/
Secundaria: https://grmack01.github.io/Semana06.io/index.html
|
class LodestoneLogger
extend SingleForwardable
def_delegators :logger, :info, :error, :warn, :level
class << self
def logger
if @logger.nil?
file = File.new("log/#{ENV['RACK_ENV']}.log", 'a')
file.sync = true
@logger = Logger.new(file)
end
@logger
end
end
end
|
import request from '@/utils/request'
export function fetchJob(query) {
return request({
url: '/job/job',
method: 'get',
params: query
})
}
export function editJob(query) {
return request({
url: '/job/job',
method: 'post',
params: query
})
}
export function deleteJob(query) {
return request({
url: '/job/job/' + query.id,
method: 'delete'
})
}
export function runJob(query) {
return request({
url: '/job/job/run',
method: 'post',
params: query
})
}
export function enableJob(query) {
return request({
url: '/job/job/status',
method: 'post',
params: query
})
}
|
// Copyright (c) 2014 GitHub, Inc.
// Use of this source code is governed by the MIT license that can be
// found in the LICENSE file.
#ifndef ATOM_BROWSER_EXTENSIONS_ATOM_EXTENSION_SYSTEM_FACTORY_H_
#define ATOM_BROWSER_EXTENSIONS_ATOM_EXTENSION_SYSTEM_FACTORY_H_
#include "atom/browser/extensions/atom_extension_system.h"
#include "base/memory/singleton.h"
#include "components/keyed_service/content/browser_context_keyed_service_factory.h"
#include "extensions/browser/extension_system_provider.h"
namespace extensions {
class ExtensionSystem;
class AtomExtensionSystemSharedFactory :
public BrowserContextKeyedServiceFactory {
public:
static AtomExtensionSystem::Shared* GetForBrowserContext(
content::BrowserContext* context);
static AtomExtensionSystemSharedFactory* GetInstance();
private:
friend struct base::DefaultSingletonTraits<AtomExtensionSystemSharedFactory>;
AtomExtensionSystemSharedFactory();
~AtomExtensionSystemSharedFactory() override;
// BrowserContextKeyedServiceFactory implementation:
KeyedService* BuildServiceInstanceFor(
content::BrowserContext* context) const override;
content::BrowserContext* GetBrowserContextToUse(
content::BrowserContext* context) const override;
DISALLOW_COPY_AND_ASSIGN(AtomExtensionSystemSharedFactory);
};
// A factory that provides ShellExtensionSystem for app_shell.
class AtomExtensionSystemFactory : public ExtensionSystemProvider {
public:
// ExtensionSystemProvider implementation:
ExtensionSystem* GetForBrowserContext(
content::BrowserContext* context) override;
static AtomExtensionSystemFactory* GetInstance();
private:
friend struct base::DefaultSingletonTraits<AtomExtensionSystemFactory>;
AtomExtensionSystemFactory();
~AtomExtensionSystemFactory() override;
// BrowserContextKeyedServiceFactory implementation:
KeyedService* BuildServiceInstanceFor(
content::BrowserContext* context) const override;
content::BrowserContext* GetBrowserContextToUse(
content::BrowserContext* context) const override;
bool ServiceIsCreatedWithBrowserContext() const override;
DISALLOW_COPY_AND_ASSIGN(AtomExtensionSystemFactory);
};
} // namespace extensions
#endif // ATOM_BROWSER_EXTENSIONS_ATOM_EXTENSION_SYSTEM_FACTORY_H_
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cmp;
use std::iter::Peekable;
use syntax::codemap::{self, CodeMap, BytePos};
use Indent;
use comment::{FindUncommented, rewrite_comment, find_comment_end};
use config::Config;
/// Formatting tactic for lists. This will be cast down to a
/// DefinitiveListTactic depending on the number and length of the items and
/// their comments.
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
pub enum ListTactic {
// One item per row.
Vertical,
// All items on one row.
Horizontal,
// Try Horizontal layout, if that fails then vertical.
HorizontalVertical,
// HorizontalVertical with a soft limit of n characters.
LimitedHorizontalVertical(usize),
// Pack as many items as possible per row over (possibly) many rows.
Mixed,
}
impl_enum_decodable!(ListTactic, Vertical, Horizontal, HorizontalVertical, Mixed);
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
pub enum SeparatorTactic {
Always,
Never,
Vertical,
}
impl_enum_decodable!(SeparatorTactic, Always, Never, Vertical);
impl SeparatorTactic {
pub fn from_bool(b: bool) -> SeparatorTactic {
if b {
SeparatorTactic::Always
} else {
SeparatorTactic::Never
}
}
}
pub struct ListFormatting<'a> {
pub tactic: DefinitiveListTactic,
pub separator: &'a str,
pub trailing_separator: SeparatorTactic,
pub indent: Indent,
pub width: usize,
// Non-expressions, e.g. items, will have a new line at the end of the list.
// Important for comment styles.
pub ends_with_newline: bool,
pub config: &'a Config,
}
pub fn format_fn_args<I>(items: I, width: usize, offset: Indent, config: &Config) -> Option<String>
where
I: Iterator<Item = ListItem>,
{
// ...
}
pub fn format_item_list<I>(
items: I,
width: usize,
offset: Indent,
config: &Config,
) -> Option<String>
where
I: Iterator<Item = ListItem>,
{
list_helper(items, width, offset, config, ListTactic::HorizontalVertical)
}
pub fn list_helper<I>(
items: I,
width: usize,
offset: Indent,
config: &Config,
tactic: ListTactic,
) -> Option<String>
where
I: Iterator<Item = ListItem>,
{
let item_vec: Vec<_> = items.collect();
let tactic = definitive_tactic(&item_vec, tactic, width);
let fmt = ListFormatting {
tactic: tactic,
separator: ",",
trailing_separator: SeparatorTactic::Never,
indent: offset,
width: width,
ends_with_newline: false,
config: config,
};
write_list(&item_vec, &fmt)
}
impl AsRef<ListItem> for ListItem {
fn as_ref(&self) -> &ListItem {
self
}
}
pub struct ListItem {
// None for comments mean that they are not present.
pub pre_comment: Option<String>,
// Item should include attributes and doc comments. None indicates a failed
// rewrite.
pub item: Option<String>,
pub post_comment: Option<String>,
// Whether there is extra whitespace before this item.
pub new_lines: bool,
}
impl ListItem {
pub fn is_multiline(&self) -> bool {
// ...
}
pub fn has_line_pre_comment(&self) -> bool {
self.pre_comment.as_ref().map_or(false, |comment| comment.starts_with("//"))
}
pub fn from_str<S: Into<String>>(s: S) -> ListItem {
ListItem {
pre_comment: None,
item: Some(s.into()),
post_comment: None,
new_lines: false,
}
}
}
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
/// The definitive formatting tactic for lists.
pub enum DefinitiveListTactic {
Vertical,
Horizontal,
Mixed,
}
pub fn definitive_tactic<I, T>(items: I, tactic: ListTactic, width: usize) -> DefinitiveListTactic
where
I: IntoIterator<Item = T> + Clone,
T: AsRef<ListItem>,
{
let pre_line_comments =
items.clone().into_iter().any(|item| item.as_ref().has_line_pre_comment());
let limit = match tactic {
_ if pre_line_comments => return DefinitiveListTactic::Vertical,
ListTactic::Mixed => return DefinitiveListTactic::Mixed,
ListTactic::Horizontal => return DefinitiveListTactic::Horizontal,
ListTactic::Vertical => return DefinitiveListTactic::Vertical,
ListTactic::LimitedHorizontalVertical(limit) => ::std::cmp::min(width, limit),
ListTactic::HorizontalVertical => width,
};
let (sep_count, total_width) = calculate_width(items.clone());
let sep_len = ", ".len(); // FIXME: make more generic?
let total_sep_len = sep_len * sep_count.checked_sub(1).unwrap_or(0);
let real_total = total_width + total_sep_len;
}
// Format a list of commented items into a string.
// TODO: add unit tests
pub fn write_list<I, T>(items: I, formatting: &ListFormatting) -> Option<String>
where
I: IntoIterator<Item = T>,
T: AsRef<ListItem>,
{
let tactic = formatting.tactic;
let sep_len = formatting.separator.len();
// Now that we know how we will layout, we can decide for sure if there
// will be a trailing separator.
let trailing_separator = needs_trailing_separator(formatting.trailing_separator, tactic);
let mut result = String::new();
let mut iter = items.into_iter().enumerate().peekable();
let mut line_len = 0;
let indent_str = &formatting.indent.to_string(formatting.config);
while let Some((i, item)) = iter.next() {
let item = item.as_ref();
let inner_item = try_opt!(item.item.as_ref());
let first = i == 0;
let last = iter.peek().is_none();
let separate = !last || trailing_separator;
let item_sep_len = if separate { sep_len } else { 0 };
// Item string may be multi-line. Its length (used for block comment alignment)
// Should be only the length of the last line.
let item_last_line = if item.is_multiline() {
inner_item.lines().last().unwrap_or("")
} else {
inner_item.as_ref()
};
let mut item_last_line_width = item_last_line.len() + item_sep_len;
if item_last_line.starts_with(indent_str) {
item_last_line_width -= indent_str.len();
}
match tactic {
DefinitiveListTactic::Horizontal if !first => {
result.push(' ');
}
DefinitiveListTactic::Vertical if !first => {
result.push('\n');
result.push_str(indent_str);
}
DefinitiveListTactic::Mixed => {
let total_width = total_item_width(item) + item_sep_len;
// 1 is space between separator and item.
if line_len > 0 && line_len + 1 + total_width > formatting.width {
result.push('\n');
result.push_str(indent_str);
line_len = 0;
}
if line_len > 0 {
result.push(' ');
line_len += 1;
}
line_len += total_width;
}
_ => {}
}
result.push_str(&inner_item[..]);
if separate {
result.push_str(formatting.separator);
}
if !last && tactic == DefinitiveListTactic::Vertical && item.new_lines {
result.push('\n');
}
}
Some(result)
}
pub struct ListItems<'a, I, F1, F2, F3>
where
I: Iterator,
{
codemap: &'a CodeMap,
inner: Peekable<I>,
get_lo: F1,
get_hi: F2,
get_item_string: F3,
prev_span_end: BytePos,
next_span_start: BytePos,
terminator: &'a str,
}
impl<'a, T, I, F1, F2, F3> Iterator for ListItems<'a, I, F1, F2, F3>
where
I: Iterator<Item = T>,
F1: Fn(&T) -> BytePos,
F2: Fn(&T) -> BytePos,
F3: Fn(&T) -> Option<String>,
{
type Item = ListItem;
fn next(&mut self) -> Option<Self::Item> {
// ...
}
}
// Creates an iterator over a list's items with associated comments.
pub fn itemize_list<'a, T, I, F1, F2, F3>(
codemap: &'a CodeMap,
inner: I,
terminator: &'a str,
get_lo: F1,
get_hi: F2,
get_item_string: F3,
prev_span_end: BytePos,
next_span_start: BytePos,
) -> ListItems<'a, I, F1, F2, F3>
where
I: Iterator<Item = T>,
F1: Fn(&T) -> BytePos,
F2: Fn(&T) -> BytePos,
F3: Fn(&T) -> Option<String>,
{
ListItems {
codemap: codemap,
inner: inner.peekable(),
get_lo: get_lo,
get_hi: get_hi,
get_item_string: get_item_string,
prev_span_end: prev_span_end,
next_span_start: next_span_start,
terminator: terminator,
}
}
fn needs_trailing_separator(
separator_tactic: SeparatorTactic,
list_tactic: DefinitiveListTactic,
) -> bool {
match separator_tactic {
SeparatorTactic::Always => true,
SeparatorTactic::Vertical => list_tactic == DefinitiveListTactic::Vertical,
SeparatorTactic::Never => false,
}
}
/// Returns the count and total width of the list items.
fn calculate_width<I, T>(items: I) -> (usize, usize)
where
I: IntoIterator<Item = T>,
T: AsRef<ListItem>,
{
// ...
}
fn total_item_width(item: &ListItem) -> usize {
// ...
}
fn comment_len(comment: Option<&str>) -> usize {
match comment {
Some(s) => {
let text_len = s.trim().len();
if text_len > 0 {
// We'll put " /*" before and " */" after inline comments.
text_len + 6
} else {
text_len
}
}
None => 0,
}
}
|
import React from 'react';
import { ButtonGroup } from 'react-button-group-tis';
import 'react-button-group-tis/dist/index.css';
const App = () => {
return (
<div className='App'>
<div className='Title'>
ButtonGroup Tis
</div>
<ButtonGroup
className='ButtonGroup'
items={['Windows', 'Linux']}
onItemClick={(index, item) => console.log(`"${item}" selected!`)}
darkMode={false}
/>
</div>
);
};
export default App;
|
<?php
declare(strict_types=1);
namespace Longman\LaravelLodash\Redis;
use Illuminate\Redis\Connectors\PredisConnector;
use Illuminate\Redis\RedisManager as BaseRedisManager;
use InvalidArgumentException;
use Longman\LaravelLodash\Redis\Connectors\PhpRedisConnector;
class RedisManager extends BaseRedisManager
{
/**
* Get the connector instance for the current driver.
*
* @return \Longman\LaravelLodash\Redis\Connectors\PhpRedisConnector|\Illuminate\Redis\Connectors\PredisConnector
*/
protected function connector(): PredisConnector|PhpRedisConnector
{
return match ($this->driver) {
'predis' => new PredisConnector(),
'phpredis' => new PhpRedisConnector(),
default => throw new InvalidArgumentException('Redis driver ' . $this->driver . ' does not exists'),
};
}
}
|
import { Power3, TimelineMax, Back } from 'gsap/gsap-core'
import * as THREE from 'three'
export default (engine:any) => {
// Create a new cube with simple geometry & material
const geometry = new THREE.BoxGeometry(15, 15, 15)
const texture = new THREE.MeshBasicMaterial()
const cube = new THREE.Mesh(geometry, texture)
// Create a new timeline
// I like to add my timeline to my element object so I know to which element it refers to
// But we could declare a new variable which contains the timeline
const tl = new TimelineMax({
repeat: -1,
repeatDelay: 1
})
// I add a few animations in my timeline
// The cube turn on itself
tl.to(cube.rotation, 3, {
y: -Math.PI * 3.25,
x: -Math.PI * 1.25,
ease: Back.easeInOut
})
// Then it will move to the left and fade out
tl.to(cube.position, 1, {
x: -150,
ease: Power3.easeOut
})
tl.to(cube.material, 1, {
opacity: 0,
ease: Power3.easeOut
}, '-=1')
// Move the cube without transition
tl.set(cube.position, {
x: 100,
y: -100
})
// Fade In the cube
tl.to(cube.material, 1, {
opacity: 1,
ease: Power3.easeOut
})
// It goes back to its initial position
tl.to(cube.position, 3, {
x: 0,
y: 0,
ease: Back.easeInOut.config(2)
})
tl.to(cube.rotation, 3, {
x: 0,
y: 0,
ease: Back.easeInOut.config(2)
}, '-=3')
// Add the cube in the scene
engine.add(cube)
}
|
<?php
namespace App\Http\Controllers;
use App\Adress;
use Illuminate\Http\Request;
class UserController extends Controller
{
public function index()
{
$adress = Adress::all();
// @dd($adress)
// echo json_encode($adress);
return view('dashboard', compact('adress'));
}
}
|
import 'package:dojin_hub/log/debug_log.dart';
import 'package:flutter/widgets.dart';
abstract class WidgetListener {
final FocusNode focusNode = FocusNode();
WidgetListener();
void requestFocus(BuildContext context) {
FocusScope.of(context).requestFocus(focusNode);
}
void unFocus(BuildContext context) {
DebugLog.d('call unFocus');
FocusScope.of(context).unfocus();
}
void onHasFocus(Function f) {
focusNode.addListener(() {
if (focusNode.hasFocus) {
f();
}
});
}
void onLostFocus(Function f) {
focusNode.addListener(() {
if (!focusNode.hasFocus) {
f();
}
});
}
void dispose() {
focusNode.dispose();
}
}
|
package edu.rice.owltorrent.network.messages;
import edu.rice.owltorrent.common.entity.Bitfield;
import java.io.IOException;
import org.junit.Test;
/** @author shijie */
public class BitfieldMessageTest {
@Test
public void bytesAndBack() throws IOException {
Bitfield bitfield = new Bitfield(16);
bitfield.setBit(0);
bitfield.setBit(8);
bitfield.setBit(9);
bitfield.setBit(10);
bitfield.setBit(11);
bitfield.setBit(12);
bitfield.setBit(13);
bitfield.setBit(15);
GenericMessageTestHelper.binaryAndBackWorks(new BitfieldMessage(bitfield));
}
@Test
public void bytesAndBack2() throws IOException {
Bitfield bitfield = new Bitfield(706);
for (int i = 0; i < 706; i++) {
bitfield.setBit(i);
}
GenericMessageTestHelper.binaryAndBackWorks(new BitfieldMessage(bitfield));
}
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
#
import Axon as _Axon
from Kamaelia.Internet.TCPClient import TCPClient
from Kamaelia.Util.Graphline import Graphline
from Axon.Ipc import producerFinished, shutdownMicroprocess
class channel(object):
"""\
This is an ugly hack - the send here is one helluvahack.
(works with a socket and a component. It's deliberate but
ugly as hell"""
# Sock here is currently a component, and default inbox
def __init__(self, sock, channel):
self.sock = sock
self.channel = channel
def join(self):
self.sock.send ( 'JOIN %s\r\n' % self.channel)
def say(self, message):
self.sock.send ( 'PRIVMSG %s :%s\r\n' % (self.channel, message))
def leave(self):
self.sock.send("PART %s\r\n" % self.channel)
def topic(self, newTopic):
self.sock.send("TOPIC %s :%s\r\n" % (self.channel, newTopic))
class IRC_Client(_Axon.Component.component):
"""\
This is the base client. It is broken in the same was as
the earliest internet handling code was. In many respects this
is the logical counterpart to a TCPServer which upon connection
should spawn the equivalent of a Connected Socket Adaptor. Since
this could happen in various different ways, please hack on a
COPY of this file, rather that this file. (Keeps this file
relatively simple)
Specifically - consider that in order to make this work "properly"
it needs to handle the chat session multiplexing that happens by
default in IRC. There are MANY ways this could be achieved.
"""
Inboxes = ["inbox", "control", "talk", "topic"]
Outboxes = ["outbox", "signal", "heard" ]
def __init__(self, nick="kamaeliabot",
nickinfo="Kamaelia",
defaultChannel="#kamaeliatest"):
super(IRC_Client, self).__init__()
self.nick = nick
self.nickinfo = nickinfo
self.defaultChannel = defaultChannel
self.channels = {}
def login(self, nick, nickinfo, password = None, username=None):
"""Should be abstracted out as far as possible.
Protocol can be abstracted into the following kinds of items:
- The independent atoms of the transactions in the protocol
- The orchestration of the molecules of the atoms of
transactions of the protocol.
- The higher level abstractions for handling the protocol
"""
self.send ( 'NICK %s\r\n' % nick )
if password:
self.send('PASS %s\r\n' % password )
if not username:
username = nick
self.send ( 'USER %s %s %s :%s\r\n' % (username,nick,nick, nickinfo))
def join(self, someChannel):
chan = channel(self,someChannel)
chan.join()
return chan
def main(self):
"Handling here is pretty naff really :-)"
self.login(self.nick, self.nickinfo)
self.channels[self.defaultChannel] = self.join(self.defaultChannel)
seen_VERSION = False
while not self.shutdown():
data=""
if self.dataReady("talk"):
data = self.recv("talk")
self.channels[self.defaultChannel].say(data)
elif self.dataReady("topic"):
newtopic = self.recv("topic")
self.channels[self.defaultChannel].topic(newtopic)
elif self.dataReady("inbox"):
lines = self.recv()
if "\r" in lines:
lines.replace("\r","\n")
lines = lines.split("\n")
for data in lines:
if "PRIVMSG" in data:
if data[0] == ":":
data = data[1:]
if ("VERSION" in data) and not seen_VERSION:
seen_Version = True
else:
data = data[data.find(":")+1:]
self.send(data, "heard")
elif "PING" in data:
reply = "PONG" + data[data.find("PING")+4:]
self.send(reply+"\r\n")
if data.find(self.nick) != -1:
if data.find("LEAVE") != -1:
break
if not self.anyReady(): # Axon 1.1.3 (See CVS)
self.pause() # Wait for response :-)
yield 1
self.channels[self.defaultChannel].leave()
# print self.nick + "... is leaving\n" # Check with and IRC client instead.
def shutdown(self):
while self.dataReady("control"):
msg = self.recv("control")
if isinstance(msg, producerFinished) or isinstance(msg, shutdownMicroprocess):
return True
return False
class SimpleIRCClient(_Axon.Component.component):
"Sample integration of the IRCClient into a networked environment"
Inboxes = {
"inbox" : "Stuff that's being said on the channel",
"control" : "Shutdown control/info",
"topic" : "Change topic on the channel",
}
Outboxes = ["outbox", "signal"]
def __init__(self, host="127.0.0.1",
port=6667,
nick="kamaeliabot",
nickinfo="Kamaelia",
defaultChannel="#kamaeliatest",
IRC_Handler=IRC_Client):
super(SimpleIRCClient, self).__init__()
self.host = host
self.port = port
self.nick = nick
self.nickinfo = nickinfo
self.defaultChannel = defaultChannel
self.IRC_Handler = IRC_Handler
def main(self):
import random
port=self.port
host = self.host
# client = TCPClient(host,port)
# clientProtocol = self.IRC_Handler(self.nick, self.nickinfo, self.defaultChannel)
subsystem = Graphline(
SELF = self,
CLIENT = TCPClient(host,port),
PROTO = self.IRC_Handler(self.nick, self.nickinfo, self.defaultChannel),
linkages = {
("CLIENT" , "") : ("CLIENT", ""),
("CLIENT" , "") : ("PROTO" , ""),
("PROTO" , "") : ("CLIENT", ""),
("PROTO" , "") : ("PROTO" , ""),
("CLIENT" , "outbox") : ("PROTO" , "inbox"),
("PROTO" , "outbox") : ("CLIENT", "inbox"),
("PROTO" , "heard") : ("SELF", "outbox"),
("SELF" , "inbox") : ("PROTO" , "talk"),
("SELF" , "topic") : ("PROTO" , "topic"),
("SELF" , "control") : ("PROTO" , "control"),
("PROTO" , "signal") : ("CLIENT", "control"),
("CLIENT" , "signal") : ("SELF" , "signal"),
}
)
# self.link((client,"outbox"), (clientProtocol,"inbox"))
# self.link((clientProtocol,"outbox"), (client,"inbox"))
#
# self.link((clientProtocol, "heard"), (self, "outbox"), passthrough=2)
# self.link((self, "inbox"), (clientProtocol, "talk"), passthrough=1)
# self.link((self, "topic"), (clientProtocol, "topic"), passthrough=1)
#
# self.link((self, "control"), (clientProtocol, "control"), passthrough=1)
# self.link((clientProtocol, "signal"), (client, "control"))
# self.link((client, "signal"), (self, "signal"), passthrough=2)
#
self.addChildren(subsystem)
yield _Axon.Ipc.newComponent(*(self.children))
while 1:
self.pause()
yield 1
if __name__ == '__main__':
from Axon.Scheduler import scheduler
from Kamaelia.Util.Console import ConsoleReader
from Kamaelia.UI.Pygame.Ticker import Ticker
from Kamaelia.Util.PipelineComponent import pipeline
pipeline(
ConsoleReader(),
SimpleIRCClient(host="127.0.0.1", nick="kamaeliabot", defaultChannel="#kamtest"),
Ticker(render_right = 800,render_bottom = 600),
).run()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.