Dataset Preview
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code: DatasetGenerationError
Exception: TypeError
Message: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
Traceback: Traceback (most recent call last):
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2011, in _prepare_split_single
writer.write_table(table)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 585, in write_table
pa_table = table_cast(pa_table, self._schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2302, in table_cast
return cast_table_to_schema(table, schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in cast_table_to_schema
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in <listcomp>
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in wrapper
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in <listcomp>
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2122, in cast_array_to_feature
raise TypeError(f"Couldn't cast array of type\n{_short_str(array.type)}\nto\n{_short_str(feature)}")
TypeError: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
parquet_operations = convert_to_parquet(builder)
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
builder.download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
self._download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
self._prepare_split(split_generator, **prepare_split_kwargs)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
for job_id, done, content in self._prepare_split_single(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
raise DatasetGenerationError("An error occurred while generating the dataset") from e
datasets.exceptions.DatasetGenerationError: An error occurred while generating the datasetNeed help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
text
string | meta
dict |
|---|---|
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'moromi'
|
{
"content_hash": "0fc720dfd2e3f4a25cc89fe0210ab058",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 58,
"avg_line_length": 38,
"alnum_prop": 0.6447368421052632,
"repo_name": "moromi/moromi",
"id": "0809e4f67ce1a189f3f50a437d83cd295eb507e0",
"size": "76",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/spec_helper.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "2353"
},
{
"name": "Shell",
"bytes": "131"
}
]
}
|
// Copyright (c) Microsoft Corporation. All rights reserved.
using System;
using System.Security.Cryptography;
using Security.Cryptography.Properties;
namespace Security.Cryptography
{
/// <summary>
/// <para>
/// The AesCng class provides a wrapper for the CNG implementation of the AES algorithm. It
/// provides the same interface as the other AES implementations shipped with the .NET Framework,
/// including <see cref="AesManaged" /> and <see cref="AesCryptoServiceProvider" />.
/// </para>
/// <para>
/// AesCng uses the BCrypt layer of CNG to do its work, and requires Windows Vista and the .NET
/// Framework 3.5.
/// </para>
/// <para>
/// Since most of the AesCng APIs are inherited from the <see cref="Aes" /> base class, see the
/// documentation for Aes for a complete API description.
/// </para>
/// </summary>
public sealed class AesCng : Aes, ICngSymmetricAlgorithm
{
private BCryptSymmetricAlgorithm m_symmetricAlgorithm;
/// <summary>
/// Constructs an AesCng object. The default settings for this object are:
/// <list type="bullet">
/// <item>Algorithm provider - Microsoft Primitive Algorithm Provider</item>
/// <item>Block size - 128 bits</item>
/// <item>Feedback size - 8 bits</item>
/// <item>Key size - 256 bits</item>
/// <item>Cipher mode - CipherMode.CBC</item>
/// <item>Padding mode - PaddingMode.PKCS7</item>
/// </list>
/// </summary>
public AesCng() : this(CngProvider2.MicrosoftPrimitiveAlgorithmProvider)
{
}
/// <summary>
/// Constructs an AesCng object using the specified algorithm provider. The default settings for
/// this object are:
/// <list type="bullet">
/// <item>Algorithm provider - Microsoft Primitive Algorithm Provider</item>
/// <item>Block size - 128 bits</item>
/// <item>Feedback size - 8 bits</item>
/// <item>Key size - 256 bits</item>
/// <item>Cipher mode - CipherMode.CBC</item>
/// <item>Padding mode - PaddingMode.PKCS7</item>
/// </list>
/// </summary>
/// <exception cref="ArgumentNullException">if <paramref name="algorithmProvider"/> is null</exception>
/// <param name="algorithmProvider">algorithm provider to use for AES computation</param>
public AesCng(CngProvider algorithmProvider)
{
if (algorithmProvider == null)
throw new ArgumentNullException("algorithmProvider");
m_symmetricAlgorithm = new BCryptSymmetricAlgorithm(new CngAlgorithm(BCryptNative.AlgorithmName.Aes),
algorithmProvider,
LegalBlockSizesValue,
LegalKeySizesValue);
// Propigate the default properties from the Aes class to the implementation algorithm.
m_symmetricAlgorithm.BlockSize = BlockSizeValue;
m_symmetricAlgorithm.KeySize = KeySizeValue;
m_symmetricAlgorithm.Mode = ModeValue;
m_symmetricAlgorithm.Padding = PaddingValue;
}
protected override void Dispose(bool disposing)
{
try
{
if (disposing && m_symmetricAlgorithm != null)
{
(m_symmetricAlgorithm as IDisposable).Dispose();
}
}
finally
{
base.Dispose(disposing);
}
}
//
// Forwarded APIs
//
public override int BlockSize
{
get { return m_symmetricAlgorithm.BlockSize; }
set { m_symmetricAlgorithm.BlockSize = value; }
}
public CngChainingMode CngMode
{
get { return m_symmetricAlgorithm.CngMode; }
set { m_symmetricAlgorithm.CngMode = value; }
}
public override int FeedbackSize
{
get { return m_symmetricAlgorithm.FeedbackSize; }
set { m_symmetricAlgorithm.FeedbackSize = value; }
}
public override byte[] IV
{
get { return m_symmetricAlgorithm.IV; }
set { m_symmetricAlgorithm.IV = value; }
}
public override byte[] Key
{
get { return m_symmetricAlgorithm.Key; }
set { m_symmetricAlgorithm.Key = value; }
}
public override int KeySize
{
get { return m_symmetricAlgorithm.KeySize; }
set { m_symmetricAlgorithm.KeySize = value; }
}
public override KeySizes[] LegalBlockSizes
{
get { return m_symmetricAlgorithm.LegalBlockSizes; }
}
public override KeySizes[] LegalKeySizes
{
get { return m_symmetricAlgorithm.LegalBlockSizes; }
}
/// <summary>
/// Gets or sets the cipher mode to use during encryption or decryption. Supported modes are:
/// <list type="bullet">
/// <item>CipherMode.CBC</item>
/// <item>CipherMode.ECB</item>
/// <item>CipherMode.CFB</item>
/// </list>
/// </summary>
public override CipherMode Mode
{
get { return m_symmetricAlgorithm.Mode; }
set { m_symmetricAlgorithm.Mode = value; }
}
public override PaddingMode Padding
{
get { return m_symmetricAlgorithm.Padding; }
set { m_symmetricAlgorithm.Padding = value; }
}
public CngProvider Provider
{
get { return m_symmetricAlgorithm.Provider; }
}
public override ICryptoTransform CreateDecryptor()
{
return m_symmetricAlgorithm.CreateDecryptor();
}
public override ICryptoTransform CreateDecryptor(byte[] rgbKey, byte[] rgbIV)
{
return m_symmetricAlgorithm.CreateDecryptor(rgbKey, rgbIV);
}
public override ICryptoTransform CreateEncryptor()
{
return m_symmetricAlgorithm.CreateEncryptor();
}
public override ICryptoTransform CreateEncryptor(byte[] rgbKey, byte[] rgbIV)
{
return m_symmetricAlgorithm.CreateEncryptor(rgbKey, rgbIV);
}
public override void GenerateIV()
{
m_symmetricAlgorithm.GenerateIV();
}
public override void GenerateKey()
{
m_symmetricAlgorithm.GenerateKey();
}
}
}
|
{
"content_hash": "0a4538a603ac13e9d010d404ffabee20",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 113,
"avg_line_length": 35.75773195876289,
"alnum_prop": 0.5479313824419778,
"repo_name": "hdracer/HIISDA",
"id": "67db02a22a09389acffd89599688961ce5e9ecb5",
"size": "6939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Security.Cryptography/src/AesCng.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "663869"
}
]
}
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.videoanalyzer.models;
import com.azure.core.util.ExpandableStringEnum;
import com.fasterxml.jackson.annotation.JsonCreator;
import java.util.Collection;
/** Defines values for PublicNetworkAccess. */
public final class PublicNetworkAccess extends ExpandableStringEnum<PublicNetworkAccess> {
/** Static value Enabled for PublicNetworkAccess. */
public static final PublicNetworkAccess ENABLED = fromString("Enabled");
/** Static value Disabled for PublicNetworkAccess. */
public static final PublicNetworkAccess DISABLED = fromString("Disabled");
/**
* Creates or finds a PublicNetworkAccess from its string representation.
*
* @param name a name to look for.
* @return the corresponding PublicNetworkAccess.
*/
@JsonCreator
public static PublicNetworkAccess fromString(String name) {
return fromString(name, PublicNetworkAccess.class);
}
/**
* Gets known PublicNetworkAccess values.
*
* @return known PublicNetworkAccess values.
*/
public static Collection<PublicNetworkAccess> values() {
return values(PublicNetworkAccess.class);
}
}
|
{
"content_hash": "77bc6359616d43d1a6f28336c77cdb98",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 90,
"avg_line_length": 35.1578947368421,
"alnum_prop": 0.7357784431137725,
"repo_name": "Azure/azure-sdk-for-java",
"id": "5f8455885a91b0e4b6809cac2f40f81e0395931e",
"size": "1336",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/videoanalyzer/azure-resourcemanager-videoanalyzer/src/main/java/com/azure/resourcemanager/videoanalyzer/models/PublicNetworkAccess.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "8762"
},
{
"name": "Bicep",
"bytes": "15055"
},
{
"name": "CSS",
"bytes": "7676"
},
{
"name": "Dockerfile",
"bytes": "2028"
},
{
"name": "Groovy",
"bytes": "3237482"
},
{
"name": "HTML",
"bytes": "42090"
},
{
"name": "Java",
"bytes": "432409546"
},
{
"name": "JavaScript",
"bytes": "36557"
},
{
"name": "Jupyter Notebook",
"bytes": "95868"
},
{
"name": "PowerShell",
"bytes": "737517"
},
{
"name": "Python",
"bytes": "240542"
},
{
"name": "Scala",
"bytes": "1143898"
},
{
"name": "Shell",
"bytes": "18488"
},
{
"name": "XSLT",
"bytes": "755"
}
]
}
|
using namespace serial;
class Bluetooth {
private:
Serial *connection;
bool buzz;
bool isConnected;
public:
bool connect(const std::string &port,int baud);
void sendDatas(int16_t angle,int8_t speed = 3,int8_t zero_angle = 0);
void turnOnBuzzer();
void turnOffBuzzer();
void start();
void stop();
bool isUsed(const std::string &port);
};
#endif
|
{
"content_hash": "57e26a32fcc12390db6025fc873c331f",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 77,
"avg_line_length": 25.41176470588235,
"alnum_prop": 0.5902777777777778,
"repo_name": "AyricRobo/28Dey",
"id": "32e48de6b8c48494ab300a06ff255aee54aa8042",
"size": "520",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "include/bluetooth.hpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "11858"
},
{
"name": "CMake",
"bytes": "260"
}
]
}
|
package org.openqa.selenium;
/**
* @deprecated Use InvalidSelectorException instead
*/
@Deprecated
public class XPathLookupException extends InvalidSelectorException {
public XPathLookupException(String message) {
super(message);
}
public XPathLookupException(String message, Throwable cause) {
super(message, cause);
}
}
|
{
"content_hash": "eb4f12c11f23ea2807ddf27a2beb81c4",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 68,
"avg_line_length": 18.210526315789473,
"alnum_prop": 0.7543352601156069,
"repo_name": "qamate/iOS-selenium-server",
"id": "27463c3199d52132434d483f9dd3665db12800ae",
"size": "956",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "java/client/src/org/openqa/selenium/XPathLookupException.java",
"mode": "33188",
"license": "apache-2.0",
"language": []
}
|
package com.github.agjacome.httpserver.model.repository;
import java.time.Instant;
import java.util.List;
import java.util.UUID;
import com.github.agjacome.httpserver.model.Session;
import com.github.agjacome.httpserver.model.User;
public interface SessionRepository extends Repository<UUID, Session> {
public default ResultBuilder<Session, List<Session>> searchByUser(
final User user
) {
return search(s -> s.getUser().equals(user));
}
public default ResultBuilder<Session, List<Session>> searchByInstant(
final Instant instant
) {
return search(s -> s.getInstant().equals(instant));
}
}
|
{
"content_hash": "28516d4543f42e387115776556db2822",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 73,
"avg_line_length": 27.166666666666668,
"alnum_prop": 0.7177914110429447,
"repo_name": "agjacome/httpserver-web-test",
"id": "edde5f2236d001ca949759a06c9a5d9c833dd84b",
"size": "652",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/github/agjacome/httpserver/model/repository/SessionRepository.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2005"
},
{
"name": "Java",
"bytes": "154439"
}
]
}
|
class Admin::TaxSettingsController < Admin::BaseController
def update
Spree::Config.set(params[:preferences])
respond_to do |format|
format.html {
redirect_to admin_tax_settings_path
}
end
end
end
|
{
"content_hash": "88cb61fece94568f0b4cc423fb703c2e",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 58,
"avg_line_length": 18.153846153846153,
"alnum_prop": 0.6610169491525424,
"repo_name": "evil-c/spree_core",
"id": "76d71a83c21fc126e50b46b088ac45d9513bd868",
"size": "236",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/controllers/admin/tax_settings_controller.rb",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "353468"
},
{
"name": "Ruby",
"bytes": "649944"
}
]
}
|
package org.jmxdatamart.Loader;
import org.jmxdatamart.common.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.util.Properties;
public class DataMartDB {
private final org.slf4j.Logger logger = LoggerFactory.getLogger(this.getClass());
private final String mainTableName = "mainTable";
private FieldAttribute testID;
private FieldAttribute importTime ;
private FieldAttribute importedFile; //use to check if the embedded database has been imported, avoid duplicated import.
private Setting.DBInfo dbInfo;
private DBHandler targetDatabase;
private Properties additional;
private Connection targetConn;
public Setting.DBInfo getDbInfo() {
return dbInfo;
}
public Properties getAdditional() {
return additional;
}
public String getMainTableName() {
return mainTableName;
}
public FieldAttribute getTestID() {
return testID;
}
public FieldAttribute getImportTime() {
return importTime;
}
public FieldAttribute getImportedFile() {
return importedFile;
}
public DBHandler getTargetDatabase() {
return targetDatabase;
}
public DataMartDB( Setting.DBInfo dbInfo, Properties ad){
this.dbInfo = dbInfo;
this.additional = ad;
testID = new FieldAttribute("testId",DataType.LONG,true);
importTime = new FieldAttribute("importTime",DataType.DATETIME,false);
importedFile = new FieldAttribute("importFile",DataType.STRING,false);
if (dbInfo.getDatabaseType().equals(DataType.SupportedDatabase.MSSQL)){
targetDatabase = new MssqlHandler();
((MssqlHandler)targetDatabase).setJdbcurl(dbInfo.getJdbcUrl());
}
else if (dbInfo.getDatabaseType().equals(DataType.SupportedDatabase.HSQL))
targetDatabase = new HypersqlHandler();
else if (dbInfo.getDatabaseType().equals(DataType.SupportedDatabase.DERBY))
targetDatabase = new DerbyHandler();
}
}
|
{
"content_hash": "76e6a484700695824a552c2bf2c34fc8",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 124,
"avg_line_length": 30.352941176470587,
"alnum_prop": 0.6952519379844961,
"repo_name": "TeamDewberry/jmxdatamart",
"id": "b421b8c8b2214c5b55fdf5ff29784e99d6663f63",
"size": "3443",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jmx-loader/src/main/java/org/jmxdatamart/Loader/DataMartDB.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Java",
"bytes": "245215"
}
]
}
|
class CreateSpacepubTags < ActiveRecord::Migration
def change
create_table :spacepub_tags do |t|
t.string :name, :default => ""
t.string :slug, :default => ""
t.timestamps
end
end
end
|
{
"content_hash": "ac292ac371f7a4ef01dad05f77f49779",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 50,
"avg_line_length": 23.77777777777778,
"alnum_prop": 0.6308411214953271,
"repo_name": "hacknite/spacepub",
"id": "d9d3f4f177ed328f9d00d05bbf086bce8b403541",
"size": "214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20120523042912_create_spacepub_tags.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CoffeeScript",
"bytes": "14614"
},
{
"name": "JavaScript",
"bytes": "27978"
},
{
"name": "Ruby",
"bytes": "54626"
}
]
}
|
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "2446e53d5ab91e1a5052be78ac14a397",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "7c6b0ecf4710637ba526092971718be418e5a8dc",
"size": "197",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Bacillariophyta/Bacillariophyceae/Acnanthales/Achnanthaceae/Achnanthes/Achnanthes okunoi/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": []
}
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.tanc.proxy</groupId>
<artifactId>proxy-demo</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<name>proxy-demo</name>
<parent>
<groupId>org.demo</groupId>
<artifactId>tanc-java-demo</artifactId>
<version>${configure.maven.version}</version>
</parent>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>
|
{
"content_hash": "de707b66799347566bd270540f277d92",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 108,
"avg_line_length": 32,
"alnum_prop": 0.62875,
"repo_name": "gogotanc/java-demo",
"id": "e6741ad2fc1e1cdf28eb5662f9a2b050a0774650",
"size": "800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "proxy/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "112316"
}
]
}
|
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'capybara/rspec'
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
include Warden::Test::Helpers
Warden.test_mode!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = false
config.global_fixtures = [
:pops,
:nodes,
'sys.sensor_modes',
:guiconfig,
:sortings,
:destination_rate_policy,
:session_refresh_methods,
'sys.sensor_levels',
:disconnect_policy,
:diversion_policy,
:filter_types,
:sdp_c_location,
:codecs,
:dump_level,
'class4.dtmf_send_modes',
'class4.dtmf_receive_modes'
]
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
config.include FactoryGirl::Syntax::Methods
config.before(:suite) do
DatabaseCleaner.clean_with :truncation
end
config.before(:each) do
DatabaseCleaner.strategy = :transaction
end
config.before(:each, js: true) do
DatabaseCleaner.strategy = :truncation
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
end
|
{
"content_hash": "32c8b10df73f9c3466384da3bffd6b49",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 79,
"avg_line_length": 32.13253012048193,
"alnum_prop": 0.6970378702662168,
"repo_name": "sashker/yeti-web",
"id": "e025f96647f78a9e4c79cc0a7edf0aabf7de0a4d",
"size": "2667",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/spec_helper.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8459"
},
{
"name": "CoffeeScript",
"bytes": "1300"
},
{
"name": "HTML",
"bytes": "41176"
},
{
"name": "JavaScript",
"bytes": "26512"
},
{
"name": "Makefile",
"bytes": "3574"
},
{
"name": "PLpgSQL",
"bytes": "13572210"
},
{
"name": "Python",
"bytes": "8459"
},
{
"name": "Ruby",
"bytes": "788438"
},
{
"name": "SQLPL",
"bytes": "1451929"
},
{
"name": "Shell",
"bytes": "10054"
}
]
}
|
package com.dremio.exec.planner.cost;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.calcite.plan.AbstractRelOptPlanner;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.hep.HepRelVertex;
import org.apache.calcite.plan.volcano.RelSubset;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.metadata.NullSentinel;
import org.apache.calcite.rel.metadata.RelMetadataCache;
import com.dremio.common.exceptions.UserException;
import com.dremio.exec.planner.physical.PlannerSettings;
import com.dremio.exec.planner.physical.PrelUtil;
public class DremioRelMetadataCache implements RelMetadataCache {
public static final String MAX_METADATA_CALL_ERROR_MESSAGE =
"Max Rel Metadata call count exceeded";
private final AtomicLong putCallCount = new AtomicLong();
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
private final Map<RelNode, Map<Object, Object>> map = new HashMap<>();
@Override
public boolean clear(RelNode rel) {
lock.writeLock().lock();
try {
return map.remove(rel) != null;
} finally {
lock.writeLock().unlock();
}
}
public void clear() {
lock.writeLock().lock();
try {
map.clear();
} finally {
lock.writeLock().unlock();
}
}
@Override
public Object remove(RelNode relNode, Object args) {
lock.writeLock().lock();
try {
Map<Object, Object> row = map.get(relNode);
if (row == null) {
return null;
}
return row.remove(args);
} finally {
lock.writeLock().unlock();
}
}
@Override
public Object get(RelNode relNode, Object args) {
lock.readLock().lock();
try {
Map<Object, Object> row = map.get(relNode);
if (row == null) {
return null;
}
return row.get(args);
} finally {
lock.readLock().unlock();
}
}
@Override
public Object put(RelNode relNode, Object args, Object value) {
RelOptPlanner planner = relNode.getCluster().getPlanner();
long pcc = putCallCount.incrementAndGet();
lock.writeLock().lock();
try {
if (value != NullSentinel.ACTIVE || relNode instanceof RelSubset || relNode instanceof HepRelVertex) {
Map<Object, Object> row = map.get(relNode);
if (row == null) {
//Only check when a we see a new RelNode to make sure the overhead is minimized.
final PlannerSettings settings;
if (planner instanceof AbstractRelOptPlanner
&& null != (settings = PrelUtil.getPlannerSettings(planner))) {
long maxCallCount = settings.maxMetadataCallCount();
if (pcc > maxCallCount) {
throw UserException.planError()
.message(MAX_METADATA_CALL_ERROR_MESSAGE).buildSilently();
}
((AbstractRelOptPlanner) planner).checkCancel();
}
row = new HashMap<>();
map.put(relNode, row);
}
return row.put(args, value);
} else {
return null;
}
} finally {
lock.writeLock().unlock();
}
}
}
|
{
"content_hash": "2056a312c1678cb559edbffdea239ef9",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 108,
"avg_line_length": 29.85185185185185,
"alnum_prop": 0.6522952853598015,
"repo_name": "dremio/dremio-oss",
"id": "de8daca169cfcc29e5faa7ca1ab9b9651f0cf741",
"size": "3834",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sabot/kernel/src/main/java/com/dremio/exec/planner/cost/DremioRelMetadataCache.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "47376"
},
{
"name": "Dockerfile",
"bytes": "1668"
},
{
"name": "FreeMarker",
"bytes": "132156"
},
{
"name": "GAP",
"bytes": "15936"
},
{
"name": "HTML",
"bytes": "6544"
},
{
"name": "Java",
"bytes": "39679012"
},
{
"name": "JavaScript",
"bytes": "5439822"
},
{
"name": "Less",
"bytes": "547002"
},
{
"name": "SCSS",
"bytes": "95688"
},
{
"name": "Shell",
"bytes": "16063"
},
{
"name": "TypeScript",
"bytes": "887739"
}
]
}
|
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "2f1fcda99788c208f812ec26e60930e1",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "d248bad381f74167fd6150c51869673407d7fff5",
"size": "174",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Lamiaceae/Salvia/Salvia shannoni/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": []
}
|
Rails.application.config.assets.version = '1.0'
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
Rails.application.config.assets.precompile += %w( RadarChart.js )
Rails.application.config.assets.precompile += %w( data.js )
|
{
"content_hash": "6ae91c7537cfaba0534f8a64485289b9",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 93,
"avg_line_length": 50.166666666666664,
"alnum_prop": 0.770764119601329,
"repo_name": "egarza54/Student-Radar-Maps",
"id": "77c8a0a3d3c3c44000c8b46d800eedea35d05192",
"size": "439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/initializers/assets.rb",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2102"
},
{
"name": "CoffeeScript",
"bytes": "0"
},
{
"name": "JavaScript",
"bytes": "13372"
},
{
"name": "Ruby",
"bytes": "21579"
}
]
}
|
local protobuf = require "protobuf"
module('BceLengthenIndate_pb', package.seeall)
local LENGTHENINDATE = protobuf.Descriptor();
local LENGTHENINDATE_SHOPID_FIELD = protobuf.FieldDescriptor();
local LENGTHENINDATE_INDATETYPE_FIELD = protobuf.FieldDescriptor();
local LENGTHENINDATE_ID_FIELD = protobuf.FieldDescriptor();
local LENGTHENINDATE_PROPPOS_FIELD = protobuf.FieldDescriptor();
local BCELENGTHENINDATE = protobuf.Descriptor();
local BCELENGTHENINDATE_PEW_FIELD = protobuf.FieldDescriptor();
local BCELENGTHENINDATE_INDATETYPE_FIELD = protobuf.FieldDescriptor();
local BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD = protobuf.FieldDescriptor();
local BCELENGTHENINDATE_LENGTHENINDATESET_FIELD = protobuf.FieldDescriptor();
LENGTHENINDATE_SHOPID_FIELD.name = "shopid"
LENGTHENINDATE_SHOPID_FIELD.full_name = ".com.xinqihd.sns.gameserver.proto.LengthenIndate.shopid"
LENGTHENINDATE_SHOPID_FIELD.number = 1
LENGTHENINDATE_SHOPID_FIELD.index = 0
LENGTHENINDATE_SHOPID_FIELD.label = 1
LENGTHENINDATE_SHOPID_FIELD.has_default_value = true
LENGTHENINDATE_SHOPID_FIELD.default_value = 0
LENGTHENINDATE_SHOPID_FIELD.type = 5
LENGTHENINDATE_SHOPID_FIELD.cpp_type = 1
LENGTHENINDATE_INDATETYPE_FIELD.name = "indatetype"
LENGTHENINDATE_INDATETYPE_FIELD.full_name = ".com.xinqihd.sns.gameserver.proto.LengthenIndate.indatetype"
LENGTHENINDATE_INDATETYPE_FIELD.number = 2
LENGTHENINDATE_INDATETYPE_FIELD.index = 1
LENGTHENINDATE_INDATETYPE_FIELD.label = 1
LENGTHENINDATE_INDATETYPE_FIELD.has_default_value = true
LENGTHENINDATE_INDATETYPE_FIELD.default_value = 0
LENGTHENINDATE_INDATETYPE_FIELD.type = 5
LENGTHENINDATE_INDATETYPE_FIELD.cpp_type = 1
LENGTHENINDATE_ID_FIELD.name = "id"
LENGTHENINDATE_ID_FIELD.full_name = ".com.xinqihd.sns.gameserver.proto.LengthenIndate.id"
LENGTHENINDATE_ID_FIELD.number = 3
LENGTHENINDATE_ID_FIELD.index = 2
LENGTHENINDATE_ID_FIELD.label = 1
LENGTHENINDATE_ID_FIELD.has_default_value = true
LENGTHENINDATE_ID_FIELD.default_value = ""
LENGTHENINDATE_ID_FIELD.type = 9
LENGTHENINDATE_ID_FIELD.cpp_type = 9
LENGTHENINDATE_PROPPOS_FIELD.name = "proppos"
LENGTHENINDATE_PROPPOS_FIELD.full_name = ".com.xinqihd.sns.gameserver.proto.LengthenIndate.proppos"
LENGTHENINDATE_PROPPOS_FIELD.number = 4
LENGTHENINDATE_PROPPOS_FIELD.index = 3
LENGTHENINDATE_PROPPOS_FIELD.label = 1
LENGTHENINDATE_PROPPOS_FIELD.has_default_value = true
LENGTHENINDATE_PROPPOS_FIELD.default_value = 0
LENGTHENINDATE_PROPPOS_FIELD.type = 5
LENGTHENINDATE_PROPPOS_FIELD.cpp_type = 1
LENGTHENINDATE.name = "LengthenIndate"
LENGTHENINDATE.full_name = ".com.xinqihd.sns.gameserver.proto.LengthenIndate"
LENGTHENINDATE.nested_types = {}
LENGTHENINDATE.enum_types = {}
LENGTHENINDATE.fields = {LENGTHENINDATE_SHOPID_FIELD, LENGTHENINDATE_INDATETYPE_FIELD, LENGTHENINDATE_ID_FIELD, LENGTHENINDATE_PROPPOS_FIELD}
LENGTHENINDATE.is_extendable = false
LENGTHENINDATE.extensions = {}
BCELENGTHENINDATE_PEW_FIELD.name = "pew"
BCELENGTHENINDATE_PEW_FIELD.full_name = ".com.xinqihd.sns.gameserver.proto.BceLengthenIndate.pew"
BCELENGTHENINDATE_PEW_FIELD.number = 1
BCELENGTHENINDATE_PEW_FIELD.index = 0
BCELENGTHENINDATE_PEW_FIELD.label = 2
BCELENGTHENINDATE_PEW_FIELD.has_default_value = true
BCELENGTHENINDATE_PEW_FIELD.default_value = 0
BCELENGTHENINDATE_PEW_FIELD.type = 5
BCELENGTHENINDATE_PEW_FIELD.cpp_type = 1
BCELENGTHENINDATE_INDATETYPE_FIELD.name = "indateType"
BCELENGTHENINDATE_INDATETYPE_FIELD.full_name = ".com.xinqihd.sns.gameserver.proto.BceLengthenIndate.indateType"
BCELENGTHENINDATE_INDATETYPE_FIELD.number = 2
BCELENGTHENINDATE_INDATETYPE_FIELD.index = 1
BCELENGTHENINDATE_INDATETYPE_FIELD.label = 2
BCELENGTHENINDATE_INDATETYPE_FIELD.has_default_value = true
BCELENGTHENINDATE_INDATETYPE_FIELD.default_value = 0
BCELENGTHENINDATE_INDATETYPE_FIELD.type = 5
BCELENGTHENINDATE_INDATETYPE_FIELD.cpp_type = 1
BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD.name = "indateGoldType"
BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD.full_name = ".com.xinqihd.sns.gameserver.proto.BceLengthenIndate.indateGoldType"
BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD.number = 3
BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD.index = 2
BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD.label = 1
BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD.has_default_value = true
BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD.default_value = 0
BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD.type = 5
BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD.cpp_type = 1
BCELENGTHENINDATE_LENGTHENINDATESET_FIELD.name = "lengthenindateset"
BCELENGTHENINDATE_LENGTHENINDATESET_FIELD.full_name = ".com.xinqihd.sns.gameserver.proto.BceLengthenIndate.lengthenindateset"
BCELENGTHENINDATE_LENGTHENINDATESET_FIELD.number = 4
BCELENGTHENINDATE_LENGTHENINDATESET_FIELD.index = 3
BCELENGTHENINDATE_LENGTHENINDATESET_FIELD.label = 3
BCELENGTHENINDATE_LENGTHENINDATESET_FIELD.has_default_value = false
BCELENGTHENINDATE_LENGTHENINDATESET_FIELD.default_value = {}
BCELENGTHENINDATE_LENGTHENINDATESET_FIELD.message_type = LENGTHENINDATE
BCELENGTHENINDATE_LENGTHENINDATESET_FIELD.type = 11
BCELENGTHENINDATE_LENGTHENINDATESET_FIELD.cpp_type = 10
BCELENGTHENINDATE.name = "BceLengthenIndate"
BCELENGTHENINDATE.full_name = ".com.xinqihd.sns.gameserver.proto.BceLengthenIndate"
BCELENGTHENINDATE.nested_types = {}
BCELENGTHENINDATE.enum_types = {}
BCELENGTHENINDATE.fields = {BCELENGTHENINDATE_PEW_FIELD, BCELENGTHENINDATE_INDATETYPE_FIELD, BCELENGTHENINDATE_INDATEGOLDTYPE_FIELD, BCELENGTHENINDATE_LENGTHENINDATESET_FIELD}
BCELENGTHENINDATE.is_extendable = false
BCELENGTHENINDATE.extensions = {}
BceLengthenIndate = protobuf.Message(BCELENGTHENINDATE)
LengthenIndate = protobuf.Message(LENGTHENINDATE)
_G.BCELENGTHENINDATE_PB_BCELENGTHENINDATE = BCELENGTHENINDATE
_G.LENGTHENINDATE_PB_LENGTHENINDATE = LENGTHENINDATE
|
{
"content_hash": "76426b16ad9ea9ef73e427e842d4eb15",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 175,
"avg_line_length": 49.11206896551724,
"alnum_prop": 0.8328945058802879,
"repo_name": "wangqi/gameserver",
"id": "423935886a41f775fb511bdbd4ce6c83e9c83fea",
"size": "5740",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/src/gensrc/lua/BceLengthenIndate_pb.lua",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "IDL",
"bytes": "33142"
},
{
"name": "Java",
"bytes": "7197480"
},
{
"name": "Lua",
"bytes": "2013259"
},
{
"name": "Shell",
"bytes": "37724"
}
]
}
|
package us.misterwok.app.obj;
/**
* Created by hoyin on 17/4/14.
*/
public class LeftMenuItem {
String name;
int icon;
public LeftMenuItem(int icon, String name) {
this.icon = icon;
this.name = name;
}
public String getName() {
return name;
}
public int getIcon() {
return icon;
}
}
|
{
"content_hash": "3e1bf2386746344a7338f9343aff3dab",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 48,
"avg_line_length": 15.391304347826088,
"alnum_prop": 0.5621468926553672,
"repo_name": "hoyin258/MisterWok",
"id": "1f6d80c0dfce5483582a9d43a3fb53b113206138",
"size": "354",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/us/misterwok/app/obj/LeftMenuItem.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "2002"
},
{
"name": "Java",
"bytes": "1316074"
}
]
}
|
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Colour;
using osu.Framework.Graphics.Shapes;
using osu.Game.Screens.Menu;
using osuTK.Graphics;
namespace osu.Game.Tests.Visual
{
[TestFixture]
public class TestCaseButtonSystem : OsuTestCase
{
public override IReadOnlyList<Type> RequiredTypes => new[]
{
typeof(ButtonSystem),
typeof(ButtonArea),
typeof(Button)
};
public TestCaseButtonSystem()
{
OsuLogo logo;
ButtonSystem buttons;
Children = new Drawable[]
{
new Box
{
Colour = ColourInfo.GradientVertical(Color4.Gray, Color4.WhiteSmoke),
RelativeSizeAxes = Axes.Both,
},
buttons = new ButtonSystem(),
logo = new OsuLogo()
};
buttons.SetOsuLogo(logo);
foreach (var s in Enum.GetValues(typeof(ButtonSystemState)).OfType<ButtonSystemState>().Skip(1))
AddStep($"State to {s}", () => buttons.State = s);
}
}
}
|
{
"content_hash": "89607ea6c2ece4ca24aa2e33a7bda895",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 108,
"avg_line_length": 28.8125,
"alnum_prop": 0.586406362979031,
"repo_name": "naoey/osu",
"id": "8ea2ab9dde240779100ebcb8019a242b22806b6e",
"size": "1385",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "osu.Game.Tests/Visual/TestCaseButtonSystem.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "4122627"
},
{
"name": "PowerShell",
"bytes": "2550"
},
{
"name": "Ruby",
"bytes": "6173"
},
{
"name": "Shell",
"bytes": "1031"
}
]
}
|
ACCEPTED
#### According to
Index Fungorum
#### Published in
null
#### Original name
Sphaeria comptoniae Westend.
### Remarks
null
|
{
"content_hash": "614abaaa44bb78f79ffc6f220486554e",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 28,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.7142857142857143,
"repo_name": "mdoering/backbone",
"id": "11be0b9ba7174d2e214991b99a7d7ecb747e0dc9",
"size": "214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Sordariomycetes/Diaporthales/Gnomoniaceae/Cryptodiaporthe/Cryptodiaporthe aubertii/Cryptodiaporthe aubertii comptoniae/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": []
}
|
require 'spec_helper'
module Qernel
describe Slot do
before :all do
NastyCache.instance.expire!
Etsource::Base.loader('spec/fixtures/etsource')
end
describe '.factory' do
let(:node) { FactoryBot.build(:node) }
context 'when type=nil' do
it 'should be an ordinary slot' do
slot = Qernel::Slot.factory(
nil, 1, node,
Qernel::Carrier.new(key: :electricity), :output)
expect(slot).to be_a(Qernel::Slot)
expect(slot).not_to be_a(Qernel::Slot::Elastic)
end
end
context 'when type=invalid' do
it 'should be an ordinary slot' do
slot = Qernel::Slot.factory(:invalid,
1, node, Qernel::Carrier.new(key: :loss), :input)
expect(slot).to be_a(Qernel::Slot)
expect(slot).not_to be_a(Qernel::Slot::Elastic)
end
end
context 'when type=elastic' do
it 'should be an elastic slot' do
slot = Qernel::Slot.factory(:elastic,
1, node, Qernel::Carrier.new(key: :loss), :output)
expect(slot).to be_a(Qernel::Slot::Elastic)
end
end
end
end
end
|
{
"content_hash": "200eb148be5bb73f334e690b0c8e0b3c",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 62,
"avg_line_length": 26.333333333333332,
"alnum_prop": 0.5687763713080168,
"repo_name": "quintel/etengine",
"id": "7f73f97ed3a7ed9f163f2316426d76af3d695ead",
"size": "1185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/models/qernel/slot_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8720"
},
{
"name": "CoffeeScript",
"bytes": "10941"
},
{
"name": "Dockerfile",
"bytes": "570"
},
{
"name": "HTML",
"bytes": "24245"
},
{
"name": "Haml",
"bytes": "73816"
},
{
"name": "JavaScript",
"bytes": "480986"
},
{
"name": "Ruby",
"bytes": "1556794"
},
{
"name": "SCSS",
"bytes": "5800"
},
{
"name": "Sass",
"bytes": "17259"
},
{
"name": "Shell",
"bytes": "2088"
}
]
}
|
<?xml version='1.0'?>
<root xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'
xsi:noNamespaceSchemaLocation="NCName.xsd" >
<complexTest>
<!-- value=-foo -->
<comp_foo>-foo</comp_foo>
</complexTest>
<simpleTest>-foo</simpleTest>
</root>
|
{
"content_hash": "5b26e26219ff2bfb9e0c67701504661c",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 59,
"avg_line_length": 26.1,
"alnum_prop": 0.6436781609195402,
"repo_name": "titellus/schematron",
"id": "ab5f212a0e0dc8f5f4ee971d0d0cc662d43af53d",
"size": "261",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "xsd2sch/test/msData/datatypes/NCName006.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2398"
},
{
"name": "Java",
"bytes": "68299"
},
{
"name": "Red",
"bytes": "9003"
},
{
"name": "Shell",
"bytes": "593"
},
{
"name": "XProc",
"bytes": "16202"
},
{
"name": "XSLT",
"bytes": "1003199"
}
]
}
|
export const multiply = (ingredient, quantity) => {
let multiplier = quantity / ingredient.quantity
return {
id: null,
name: ingredient.name,
parentId: ingredient.id,
parentQuantity: ingredient.quantity,
unit: ingredient.unit,
calories: ingredient.calories * multiplier,
protein: ingredient.protein * multiplier,
carbs: ingredient.carbs * multiplier,
fats: ingredient.fats * multiplier
}
}
export const getById = (store, ingredientID) => {
let ingredients = store.state.ingredients.ingredients
let originalIngredient = ingredients.filter((i) => {
return i.id === ingredientID
})
if (originalIngredient.length > 0) {
return originalIngredient[0]
}
return null
}
export const nutrientReducer = (totals, food) => {
totals.calories += food.calories
totals.protein += food.protein
totals.carbs += food.carbs
totals.fats += food.fats
return totals
}
export const getEmptyFood = () => {
return {
id: null,
name: '',
unit: 'g',
parentId: null,
parentQuantity: null,
quantity: 0,
calories: 0,
protein: 0,
carbs: 0,
fats: 0
}
}
|
{
"content_hash": "2d93d50f41d29be7e03005cf3a247434",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 55,
"avg_line_length": 22.78,
"alnum_prop": 0.6619841966637401,
"repo_name": "pgk/hmgf",
"id": "ff945dfc85ac97f8fdadef6bdee753978ecd81a1",
"size": "1140",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hmgf_spa/src/models/Foods.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "196"
},
{
"name": "JavaScript",
"bytes": "25750"
},
{
"name": "Vue",
"bytes": "37204"
}
]
}
|
import React from 'react';
import PropTypes from 'prop-types';
import DialogView from './dialog-view';
import RepositoryHomeSelectionView from './repository-home-selection-view';
import DirectorySelect from './directory-select';
import RemoteConfigurationView from './remote-configuration-view';
import TabGroup from '../tab-group';
import {TabbableInput} from './tabbable';
import Octicon from '../atom/octicon';
const DIALOG_TEXT = {
create: {
heading: 'Create GitHub repository',
hostPath: 'Destination path:',
progressMessage: 'Creating repository...',
acceptText: 'Create',
},
publish: {
heading: 'Publish GitHub repository',
hostPath: 'Local path:',
progressMessage: 'Publishing repository...',
acceptText: 'Publish',
},
};
export default class CreateDialogView extends React.Component {
static propTypes = {
// Relay
user: PropTypes.object,
// Model
request: PropTypes.shape({
identifier: PropTypes.oneOf(['create', 'publish']).isRequired,
getParams: PropTypes.func.isRequired,
cancel: PropTypes.func.isRequired,
}).isRequired,
error: PropTypes.instanceOf(Error),
isLoading: PropTypes.bool.isRequired,
inProgress: PropTypes.bool.isRequired,
selectedOwnerID: PropTypes.string.isRequired,
repoName: PropTypes.object.isRequired,
selectedVisibility: PropTypes.oneOf(['PUBLIC', 'PRIVATE']).isRequired,
localPath: PropTypes.object.isRequired,
sourceRemoteName: PropTypes.object.isRequired,
selectedProtocol: PropTypes.oneOf(['https', 'ssh']).isRequired,
acceptEnabled: PropTypes.bool.isRequired,
// Change callbacks
didChangeOwnerID: PropTypes.func.isRequired,
didChangeVisibility: PropTypes.func.isRequired,
didChangeProtocol: PropTypes.func.isRequired,
accept: PropTypes.func.isRequired,
// Atom environment
currentWindow: PropTypes.object.isRequired,
workspace: PropTypes.object.isRequired,
commands: PropTypes.object.isRequired,
config: PropTypes.object.isRequired,
}
constructor(props) {
super(props);
this.tabGroup = new TabGroup();
}
render() {
const text = DIALOG_TEXT[this.props.request.identifier];
return (
<DialogView
progressMessage={text.progressMessage}
acceptEnabled={this.props.acceptEnabled}
acceptText={text.acceptText}
accept={this.props.accept}
cancel={this.props.request.cancel}
tabGroup={this.tabGroup}
inProgress={this.props.inProgress}
error={this.props.error}
workspace={this.props.workspace}
commands={this.props.commands}>
<h1 className="github-Create-header">
<Octicon icon="globe" />
{text.heading}
</h1>
<div className="github-Create-repo block">
<RepositoryHomeSelectionView
tabGroup={this.tabGroup}
commands={this.props.commands}
autofocusName
user={this.props.user}
nameBuffer={this.props.repoName}
selectedOwnerID={this.props.selectedOwnerID}
didChangeOwnerID={this.props.didChangeOwnerID}
isLoading={this.props.isLoading}
/>
</div>
<div className="github-Create-visibility block">
<span className="github-Create-visibilityHeading">Visibility:</span>
<label className="github-Create-visibilityOption input-label">
<TabbableInput
tabGroup={this.tabGroup}
commands={this.props.commands}
className="input-radio"
type="radio"
name="visibility"
value="PUBLIC"
checked={this.props.selectedVisibility === 'PUBLIC'}
onChange={this.didChangeVisibility}
/>
<Octicon icon="globe" />
Public
</label>
<label className="github-Create-visibilityOption input-label">
<TabbableInput
tabGroup={this.tabGroup}
commands={this.props.commands}
className="input-radio"
type="radio"
name="visibility"
value="PRIVATE"
checked={this.props.selectedVisibility === 'PRIVATE'}
onChange={this.didChangeVisibility}
/>
<Octicon icon="mirror-private" />
Private
</label>
</div>
<div className="github-Create-localPath">
<DirectorySelect
tabGroup={this.tabGroup}
commands={this.props.commands}
currentWindow={this.props.currentWindow}
buffer={this.props.localPath}
disabled={this.props.request.identifier === 'publish'}
/>
</div>
<RemoteConfigurationView
tabGroup={this.tabGroup}
commands={this.props.commands}
currentProtocol={this.props.selectedProtocol}
didChangeProtocol={this.props.didChangeProtocol}
sourceRemoteBuffer={this.props.sourceRemoteName}
/>
</DialogView>
);
}
componentDidMount() {
this.tabGroup.autofocus();
}
didChangeVisibility = event => this.props.didChangeVisibility(event.target.value);
}
|
{
"content_hash": "e0f5a8ba5568ad4922bdf0c164439231",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 84,
"avg_line_length": 33.35031847133758,
"alnum_prop": 0.6361726508785333,
"repo_name": "atom/github",
"id": "77cf1d8806e31dfcd1301bbd845fedf8dfc608a3",
"size": "5236",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/views/create-dialog-view.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "991"
},
{
"name": "HTML",
"bytes": "568"
},
{
"name": "JavaScript",
"bytes": "3100517"
},
{
"name": "Less",
"bytes": "96627"
},
{
"name": "Shell",
"bytes": "3284"
}
]
}
|
using System;
using System.Collections.Generic;
namespace TurkishStemmer.Transitions
{
using States;
using Suffixes;
public class Transition
{
private readonly State startState;
private readonly State nextState;
private readonly string word;
private readonly Suffix suffix;
private bool marked;
public Transition(State startState, State nextState, string word, Suffix suffix, bool marked)
{
this.startState = startState;
this.nextState = nextState;
this.word = word;
this.suffix = suffix;
this.marked = false;
}
public State StartState
{
get { return startState; }
}
public State NextState
{
get { return nextState; }
}
public String Word
{
get { return word; }
}
public Suffix Suffix
{
get { return suffix; }
}
public Boolean Marked
{
get { return marked; }
set { marked = value; }
}
public IEnumerable<Transition> SimilarTransitions(IEnumerable<Transition> transitions)
{
foreach (Transition transition in transitions)
{
if (this.startState == transition.startState &&
this.nextState == transition.nextState)
{
yield return transition;
}
}
}
}
}
|
{
"content_hash": "5bc4bac89d0bceea13cc19de3290a6c0",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 101,
"avg_line_length": 24.46031746031746,
"alnum_prop": 0.5236859182349124,
"repo_name": "otuncelli/turkish-stemmer-csharp",
"id": "3be085777e14bd30386e4dd6e46e76c2994edcb6",
"size": "1543",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Transitions/Transition.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "49622"
}
]
}
|
@protocol NotCompletedDelegate
-(void) ZhiFuBaoStyle;
@end
@interface NotCompletedViewController : SZTableViewController
@property (nonatomic, strong) NSArray *maintain;
@property (nonatomic, weak) id<NotCompletedDelegate> delegate;
@property (nonatomic , assign) BOOL isHidden;
-(SZFinalMaintenanceUnitItem *)isExistenceWithQRCode:(SZQRCodeProcotolitem *)item;
@end
|
{
"content_hash": "e73e442dfbfb292eec4e54c1c2bf2e35",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 82,
"avg_line_length": 33.63636363636363,
"alnum_prop": 0.8162162162162162,
"repo_name": "sunzeboy/OTISPJ",
"id": "e3413d9d730f06a94f4dc56630fb6584447433ea",
"size": "578",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OTIS_PJ/OTIS_PJ/Classes/Maintenance/Controller/NotCompletedViewController.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6578"
},
{
"name": "HTML",
"bytes": "4710"
},
{
"name": "Objective-C",
"bytes": "5858662"
},
{
"name": "Ruby",
"bytes": "305"
},
{
"name": "Shell",
"bytes": "7953"
},
{
"name": "Swift",
"bytes": "15281"
}
]
}
|
import System.Reflection;
import System.Runtime.CompilerServices;
[assembly: System.CLSCompliant(true)]
//
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
//
[assembly:AssemblyTitle("log4net - ConsoleApp")]
[assembly:AssemblyDescription("log4net ConsoleApp")]
[assembly:AssemblyConfiguration("")]
[assembly:AssemblyProduct("log4net - ConsoleApp")]
[assembly:AssemblyCulture("")]
//
// In order to sign your assembly you must specify a key to use. Refer to the
// Microsoft .NET Framework documentation for more information on assembly signing.
//
// Use the attributes below to control which key is used for signing.
//
// Notes:
// (*) If no key is specified, the assembly is not signed.
// (*) KeyName refers to a key that has been installed in the Crypto Service
// Provider (CSP) on your machine. KeyFile refers to a file which contains
// a key.
// (*) If the KeyFile and the KeyName values are both specified, the
// following processing occurs:
// (1) If the KeyName can be found in the CSP, that key is used.
// (2) If the KeyName does not exist and the KeyFile does exist, the key
// in the KeyFile is installed into the CSP and used.
// (*) In order to create a KeyFile, you can use the sn.exe (Strong Name) utility.
// When specifying the KeyFile, the location of the KeyFile should be
// relative to the project output directory which is
// %Project Directory%\obj\<configuration>. For example, if your KeyFile is
// located in the project directory, you would specify the AssemblyKeyFile
// attribute as [assembly: AssemblyKeyFile("..\\..\\mykey.snk")]
// (*) Delay Signing is an advanced option - see the Microsoft .NET Framework
// documentation for more information on this.
//
[assembly:AssemblyDelaySign(false)]
[assembly:AssemblyKeyFile("")]
// We do not use a CSP key for strong naming
// [assembly: AssemblyKeyName("")]
|
{
"content_hash": "c31724bf0b3ba9a1657d6f6ef03cb0d1",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 84,
"avg_line_length": 46.13333333333333,
"alnum_prop": 0.7148362235067437,
"repo_name": "babaru/log4net",
"id": "ccf309dca03a1f19c432cdba69321fe8e572ba21",
"size": "2691",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/sscli/1.0/Tutorials/ConsoleApp/js/src/AssemblyInfo.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "4796"
},
{
"name": "C#",
"bytes": "1998536"
},
{
"name": "C++",
"bytes": "7532"
},
{
"name": "JavaScript",
"bytes": "32440"
},
{
"name": "Shell",
"bytes": "2722"
},
{
"name": "Visual Basic",
"bytes": "51625"
}
]
}
|
layout: post
title: "Habit Burger: Charburger"
subtitle: "The ultimate combination of delicious burgers and great customer service"
date: 2016-07-16 12:00:00
author: "Philip Yoo"
header-img: "img/post-img/2016-07-16-habit-burger-header.jpg"
---
<!-- http://emoji.codes/# -->
<div class="convert-emoji">
<p>:smiley::smiley::hamburger::fries::beer::heavy_plus_sign::tomato::cheese::bread::cow::heavy_dollar_sign::eight::thumbsup:</p>
<hr/>
<p>:pager::tada::anguished::anguished::arrow_left::sweat::pray::ok_hand::ok_hand::hamburger::arrow_right::smiley::smiley::speech_balloon:</p>
<hr/>
<p>:tongue::100::100::fire::hamburger::tongue::thumbsup::fries::ok_hand::hot_pepper::hot_pepper::hamburger::x::scream:</p>
<hr/>
<p>:slight_smile::arrow_right::smiley::smiley::free::hamburger::exclamation::thumbsup::thumbsup::clap::clap::leftwards_arrow_with_hook:</p>
<hr/>
<p>:tongue::eight::cloud::seven::slight_smile::keycap_ten::dollar::nine::leftwards_arrow_with_hook::keycap_ten:</p>
<hr/>
<p>:four::four::heavy_division_sign::five::zero:</p>
</div>
<button type="button" class="btn" id="review-toggle">Read the Text Review</button>
<div class="text-review">
<img src="{{ site.baseurl }}/img/post-img/2016-07-16-habit-burger-img1.jpg" alt="Habit Burger Meal Image">
<p>Today, I had lunch at Habit Burger in Fair Lawn, NJ. I've been to a Habit Burger in a different location once before, and I enjoyed the food so much that I invited my friend to come along. I decided to go with my previous order: a Charbuger with fries and fountain drink. The burger comes with mayonnaise, pickles, tomato, lettuce, caramelized onions, cheese, toasted bun, and slightly-charred beef patty (hence its name). The total order came to around $8 with tax included, so it's very affordable. My friend had the same.</p>
<p>While we waited for our orders, the pager buzzed indicating it was time to pick up our delicious meal. Unfortunately, a different customer mistook our burgers for his and took them to his table before we arrived to the counter. The manager apologized and told us that he would remake our burgers. This was no problem since this would allow my friend and me to further catch up and converse. We thanked him and went back to our table. A few minutes later, our orders were brought to the table.</p>
<p>It was simply delicious. It had all the ingredients a burger should have with the slight charred flavor you get from grilling a patty over coals or charcoal. The fries were crispy but weren't out of this world. But then again, the burger is the star of the meal. There's also a pepper bar where you can pick up pepperoncini, sliced or whole, to accompany your meal. Before I knew it, the burger was gone even though I tried to savor each bite.</p>
<p>Later, an employee, apologizing for the mistake, came over with a coupon (pictured below) for a free Charburger. I was totally fine with the hiccup and it wasn't the restaurant's fault. However, this level of customer service and the quality of the food convinced me to return to Habit Burger once more with an empty stomach.</p>
<img src="{{ site.baseurl }}/img/post-img/2016-07-16-habit-burger-img2.jpg" alt="Habit Burger Coupon Image">
<h3>Rating:</h3>
<ul>
<li>Taste = 8/10</li>
<li>Atmosphere = 7/10</li>
<li>Customer Service = 10/10</li>
<li>Cost = 9/10</li>
<li>Making restaurant visits a habit = 10/10</li>
</ul>
<strong>Total: 44/50</strong>
</div>
|
{
"content_hash": "f62a63138be0f4418f2fe92b492df2e9",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 532,
"avg_line_length": 86.375,
"alnum_prop": 0.7319826338639652,
"repo_name": "philipsdyoo/emojireviews",
"id": "5d490345f17614dc2f4def54b98f8ca3441cffcb",
"size": "3459",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "_posts/2016-07-16-habit-burger.markdown",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "20034"
},
{
"name": "HTML",
"bytes": "17583"
},
{
"name": "JavaScript",
"bytes": "53914"
},
{
"name": "Ruby",
"bytes": "985"
}
]
}
|
package org.apache.activemq.artemis.tests.integration.journal;
import java.io.File;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.LockSupport;
import org.apache.activemq.artemis.api.core.Pair;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.core.io.IOCallback;
import org.apache.activemq.artemis.core.io.SequentialFile;
import org.apache.activemq.artemis.core.io.SequentialFileFactory;
import org.apache.activemq.artemis.core.io.nio.NIOSequentialFileFactory;
import org.apache.activemq.artemis.core.journal.PreparedTransactionInfo;
import org.apache.activemq.artemis.core.journal.RecordInfo;
import org.apache.activemq.artemis.core.journal.impl.AbstractJournalUpdateTask;
import org.apache.activemq.artemis.core.journal.impl.JournalCompactor;
import org.apache.activemq.artemis.core.journal.impl.JournalFile;
import org.apache.activemq.artemis.core.journal.impl.JournalFileImpl;
import org.apache.activemq.artemis.core.journal.impl.JournalImpl;
import org.apache.activemq.artemis.core.message.impl.CoreMessage;
import org.apache.activemq.artemis.core.persistence.impl.journal.JournalStorageManager;
import org.apache.activemq.artemis.core.persistence.impl.journal.OperationContextImpl;
import org.apache.activemq.artemis.tests.unit.core.journal.impl.JournalImplTestBase;
import org.apache.activemq.artemis.tests.unit.core.journal.impl.fakes.SimpleEncoding;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.apache.activemq.artemis.utils.ActiveMQThreadFactory;
import org.apache.activemq.artemis.utils.IDGenerator;
import org.apache.activemq.artemis.utils.actors.OrderedExecutorFactory;
import org.apache.activemq.artemis.utils.SimpleIDGenerator;
import org.apache.activemq.artemis.utils.critical.EmptyCriticalAnalyzer;
import org.jboss.logging.Logger;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
public class NIOJournalCompactTest extends JournalImplTestBase {
private static final Logger logger = Logger.getLogger(NIOJournalCompactTest.class);
private static final int NUMBER_OF_RECORDS = 100;
IDGenerator idGenerator = new SimpleIDGenerator(100000);
// General tests
// =============
@Test
public void testControlFile() throws Exception {
ArrayList<JournalFile> dataFiles = new ArrayList<>();
for (int i = 0; i < 5; i++) {
SequentialFile file = fileFactory.createSequentialFile("file-" + i + ".tst");
dataFiles.add(new JournalFileImpl(file, 0, JournalImpl.FORMAT_VERSION));
}
ArrayList<JournalFile> newFiles = new ArrayList<>();
for (int i = 0; i < 3; i++) {
SequentialFile file = fileFactory.createSequentialFile("file-" + i + ".tst.new");
newFiles.add(new JournalFileImpl(file, 0, JournalImpl.FORMAT_VERSION));
}
ArrayList<Pair<String, String>> renames = new ArrayList<>();
renames.add(new Pair<>("a", "b"));
renames.add(new Pair<>("c", "d"));
AbstractJournalUpdateTask.writeControlFile(fileFactory, dataFiles, newFiles, renames);
ArrayList<String> strDataFiles = new ArrayList<>();
ArrayList<String> strNewFiles = new ArrayList<>();
ArrayList<Pair<String, String>> renamesRead = new ArrayList<>();
Assert.assertNotNull(JournalCompactor.readControlFile(fileFactory, strDataFiles, strNewFiles, renamesRead));
Assert.assertEquals(dataFiles.size(), strDataFiles.size());
Assert.assertEquals(newFiles.size(), strNewFiles.size());
Assert.assertEquals(renames.size(), renamesRead.size());
Iterator<String> iterDataFiles = strDataFiles.iterator();
for (JournalFile file : dataFiles) {
Assert.assertEquals(file.getFile().getFileName(), iterDataFiles.next());
}
Assert.assertFalse(iterDataFiles.hasNext());
Iterator<String> iterNewFiles = strNewFiles.iterator();
for (JournalFile file : newFiles) {
Assert.assertEquals(file.getFile().getFileName(), iterNewFiles.next());
}
Assert.assertFalse(iterNewFiles.hasNext());
Iterator<Pair<String, String>> iterRename = renames.iterator();
for (Pair<String, String> rename : renamesRead) {
Pair<String, String> original = iterRename.next();
Assert.assertEquals(original.getA(), rename.getA());
Assert.assertEquals(original.getB(), rename.getB());
}
Assert.assertFalse(iterNewFiles.hasNext());
}
@Test
public void testCrashRenamingFiles() throws Exception {
internalCompactTest(false, false, true, false, false, false, false, false, false, false, true, false, false);
}
@Test
public void testCrashDuringCompacting() throws Exception {
internalCompactTest(false, false, true, false, false, false, false, false, false, false, false, false, false);
}
@Test
public void testCompactwithPendingXACommit() throws Exception {
internalCompactTest(true, false, false, false, false, false, false, true, false, false, true, true, true);
}
@Test
public void testCompactwithPendingXAPrepareAndCommit() throws Exception {
internalCompactTest(false, true, false, false, false, false, false, true, false, false, true, true, true);
}
@Test
public void testCompactwithPendingXAPrepareAndDelayedCommit() throws Exception {
internalCompactTest(false, true, false, false, false, false, false, true, false, true, true, true, true);
}
@Test
public void testCompactwithPendingCommit() throws Exception {
internalCompactTest(true, false, false, false, false, false, false, true, false, false, true, true, true);
}
@Test
public void testCompactwithDelayedCommit() throws Exception {
internalCompactTest(false, true, false, false, false, false, false, true, false, true, true, true, true);
}
@Test
public void testCompactwithPendingCommitFollowedByDelete() throws Exception {
internalCompactTest(false, false, false, false, false, false, false, true, true, false, true, true, true);
}
@Test
public void testCompactwithConcurrentUpdateAndDeletes() throws Exception {
internalCompactTest(false, false, true, false, true, true, false, false, false, false, true, true, true);
tearDown();
setUp();
internalCompactTest(false, false, true, false, true, false, true, false, false, false, true, true, true);
}
@Test
public void testCompactwithConcurrentDeletes() throws Exception {
internalCompactTest(false, false, true, false, false, true, false, false, false, false, true, true, true);
tearDown();
setUp();
internalCompactTest(false, false, true, false, false, false, true, false, false, false, true, true, true);
}
@Test
public void testCompactwithConcurrentUpdates() throws Exception {
internalCompactTest(false, false, true, false, true, false, false, false, false, false, true, true, true);
}
@Test
public void testCompactWithConcurrentAppend() throws Exception {
internalCompactTest(false, false, true, true, false, false, false, false, false, false, true, true, true);
}
@Test
public void testCompactFirstFileReclaimed() throws Exception {
setup(2, 60 * 1024, false);
final byte recordType = (byte) 0;
journal = new JournalImpl(fileSize, minFiles, minFiles, 0, 0, fileFactory, filePrefix, fileExtension, maxAIO);
journal.start();
journal.loadInternalOnly();
journal.appendAddRecord(1, recordType, "test".getBytes(), true);
journal.forceMoveNextFile();
journal.appendUpdateRecord(1, recordType, "update".getBytes(), true);
journal.appendDeleteRecord(1, true);
journal.appendAddRecord(2, recordType, "finalRecord".getBytes(), true);
for (int i = 10; i < 100; i++) {
journal.appendAddRecord(i, recordType, ("tst" + i).getBytes(), true);
journal.forceMoveNextFile();
journal.appendUpdateRecord(i, recordType, ("uptst" + i).getBytes(), true);
journal.appendDeleteRecord(i, true);
}
journal.testCompact();
journal.stop();
List<RecordInfo> records1 = new ArrayList<>();
List<PreparedTransactionInfo> preparedRecords = new ArrayList<>();
journal.start();
journal.load(records1, preparedRecords, null);
assertEquals(1, records1.size());
}
@Test
public void testCompactPrepareRestart() throws Exception {
setup(2, 60 * 1024, false);
createJournal();
startJournal();
load();
startCompact();
addTx(1, 2);
prepare(1, new SimpleEncoding(10, (byte) 0));
finishCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
startCompact();
commit(1);
finishCompact();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactPrepareRestart2() throws Exception {
setup(2, 60 * 1024, false);
createJournal();
startJournal();
load();
addTx(1, 2);
prepare(1, new SimpleEncoding(10, (byte) 0));
stopJournal();
createJournal();
startJournal();
loadAndCheck();
startCompact();
commit(1);
finishCompact();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactPrepareRestart3() throws Exception {
setup(2, 60 * 1024, false);
createJournal();
startJournal();
load();
addTx(1, 2, 3);
prepare(1, new SimpleEncoding(10, (byte) 0));
startCompact();
commit(1);
finishCompact();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testOnRollback() throws Exception {
setup(2, 60 * 1024, false);
createJournal();
startJournal();
journal.setAutoReclaim(false);
load();
add(1);
updateTx(2, 1);
rollback(2);
journal.testCompact();
stopJournal();
startJournal();
loadAndCheck();
stopJournal();
}
@Test
public void testCompactSecondFileReclaimed() throws Exception {
setup(2, 60 * 1024, false);
createJournal();
startJournal();
load();
addTx(1, 1, 2, 3, 4);
journal.forceMoveNextFile();
addTx(1, 5, 6, 7, 8);
commit(1);
journal.forceMoveNextFile();
journal.testCompact();
add(10);
stopJournal();
startJournal();
loadAndCheck();
stopJournal();
}
@Test
public void testIncompleteTXDuringcompact() throws Exception {
setup(2, 60 * 1024, false);
createJournal();
startJournal();
load();
add(1);
updateTx(2, 1);
journal.testCompact();
journal.testCompact();
commit(2);
stopJournal();
startJournal();
loadAndCheck();
stopJournal();
}
private void internalCompactTest(final boolean preXA,
// prepare before compact
final boolean postXA,
// prepare after compact
final boolean regularAdd,
final boolean performAppend,
final boolean performUpdate,
boolean performDelete,
boolean performNonTransactionalDelete,
final boolean pendingTransactions,
final boolean deleteTransactRecords,
final boolean delayCommit,
final boolean createControlFile,
final boolean deleteControlFile,
final boolean renameFilesAfterCompacting) throws Exception {
if (performNonTransactionalDelete) {
performDelete = false;
}
if (performDelete) {
performNonTransactionalDelete = false;
}
setup(2, 60 * 4096, true);
ArrayList<Long> liveIDs = new ArrayList<>();
ArrayList<Pair<Long, Long>> transactedRecords = new ArrayList<>();
final CountDownLatch latchDone = new CountDownLatch(1);
final CountDownLatch latchWait = new CountDownLatch(1);
journal = new JournalImpl(fileSize, minFiles, minFiles, 0, 0, fileFactory, filePrefix, fileExtension, maxAIO) {
@Override
protected SequentialFile createControlFile(final List<JournalFile> files,
final List<JournalFile> newFiles,
final Pair<String, String> pair) throws Exception {
if (createControlFile) {
return super.createControlFile(files, newFiles, pair);
} else {
throw new IllegalStateException("Simulating a crash during compact creation");
}
}
@Override
protected void deleteControlFile(final SequentialFile controlFile) throws Exception {
if (deleteControlFile) {
super.deleteControlFile(controlFile);
}
}
@Override
protected void renameFiles(final List<JournalFile> oldFiles,
final List<JournalFile> newFiles) throws Exception {
if (renameFilesAfterCompacting) {
super.renameFiles(oldFiles, newFiles);
}
}
@Override
public void onCompactDone() {
latchDone.countDown();
instanceLog.debug("Waiting on Compact");
try {
ActiveMQTestBase.waitForLatch(latchWait);
} catch (InterruptedException e) {
e.printStackTrace();
}
instanceLog.debug("Done");
}
};
journal.setAutoReclaim(false);
startJournal();
load();
long transactionID = 0;
if (regularAdd) {
for (int i = 0; i < NIOJournalCompactTest.NUMBER_OF_RECORDS / 2; i++) {
add(i);
if (i % 10 == 0 && i > 0) {
journal.forceMoveNextFile();
}
update(i);
}
for (int i = NIOJournalCompactTest.NUMBER_OF_RECORDS / 2; i < NIOJournalCompactTest.NUMBER_OF_RECORDS; i++) {
addTx(transactionID, i);
updateTx(transactionID, i);
if (i % 10 == 0) {
journal.forceMoveNextFile();
}
commit(transactionID++);
update(i);
}
}
if (pendingTransactions) {
for (long i = 0; i < 100; i++) {
long recordID = idGenerator.generateID();
addTx(transactionID, recordID);
updateTx(transactionID, recordID);
if (preXA) {
prepare(transactionID, new SimpleEncoding(10, (byte) 0));
}
transactedRecords.add(new Pair<>(transactionID++, recordID));
}
}
if (regularAdd) {
for (int i = 0; i < NIOJournalCompactTest.NUMBER_OF_RECORDS; i++) {
if (!(i % 10 == 0)) {
delete(i);
} else {
liveIDs.add((long) i);
}
}
}
journal.forceMoveNextFile();
Thread t = new Thread() {
@Override
public void run() {
try {
journal.testCompact();
} catch (Exception e) {
e.printStackTrace();
}
}
};
t.start();
ActiveMQTestBase.waitForLatch(latchDone);
int nextID = NIOJournalCompactTest.NUMBER_OF_RECORDS;
if (performAppend) {
for (int i = 0; i < 50; i++) {
add(nextID++);
if (i % 10 == 0) {
journal.forceMoveNextFile();
}
}
for (int i = 0; i < 50; i++) {
// A Total new transaction (that was created after the compact started) to add new record while compacting
// is still working
addTx(transactionID, nextID++);
commit(transactionID++);
if (i % 10 == 0) {
journal.forceMoveNextFile();
}
}
}
if (performUpdate) {
int count = 0;
for (Long liveID : liveIDs) {
if (count++ % 2 == 0) {
update(liveID);
} else {
// A Total new transaction (that was created after the compact started) to update a record that is being
// compacted
updateTx(transactionID, liveID);
commit(transactionID++);
}
}
}
if (performDelete) {
int count = 0;
for (long liveID : liveIDs) {
if (count++ % 2 == 0) {
instanceLog.debug("Deleting no trans " + liveID);
delete(liveID);
} else {
instanceLog.debug("Deleting TX " + liveID);
// A Total new transaction (that was created after the compact started) to delete a record that is being
// compacted
deleteTx(transactionID, liveID);
commit(transactionID++);
}
instanceLog.debug("Deletes are going into " + ((JournalImpl) journal).getCurrentFile());
}
}
if (performNonTransactionalDelete) {
for (long liveID : liveIDs) {
delete(liveID);
}
}
if (pendingTransactions && !delayCommit) {
for (Pair<Long, Long> tx : transactedRecords) {
if (postXA) {
prepare(tx.getA(), new SimpleEncoding(10, (byte) 0));
}
if (tx.getA() % 2 == 0) {
commit(tx.getA());
if (deleteTransactRecords) {
delete(tx.getB());
}
} else {
rollback(tx.getA());
}
}
}
/** Some independent adds and updates */
for (int i = 0; i < 1000; i++) {
long id = idGenerator.generateID();
add(id);
delete(id);
if (i % 100 == 0) {
journal.forceMoveNextFile();
}
}
journal.forceMoveNextFile();
latchWait.countDown();
t.join();
if (pendingTransactions && delayCommit) {
for (Pair<Long, Long> tx : transactedRecords) {
if (postXA) {
prepare(tx.getA(), new SimpleEncoding(10, (byte) 0));
}
if (tx.getA() % 2 == 0) {
commit(tx.getA());
if (deleteTransactRecords) {
delete(tx.getB());
}
} else {
rollback(tx.getA());
}
}
}
long lastId = idGenerator.generateID();
add(lastId);
if (createControlFile && deleteControlFile && renameFilesAfterCompacting) {
journal.testCompact();
}
journal.flush();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
journal.forceMoveNextFile();
update(lastId);
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactAddAndUpdateFollowedByADelete() throws Exception {
setup(2, 60 * 1024, false);
SimpleIDGenerator idGen = new SimpleIDGenerator(1000);
createJournal();
journal.setAutoReclaim(false);
startJournal();
load();
long consumerTX = idGen.generateID();
long firstID = idGen.generateID();
long appendTX = idGen.generateID();
long addedRecord = idGen.generateID();
addTx(consumerTX, firstID);
startCompact();
addTx(appendTX, addedRecord);
commit(appendTX);
updateTx(consumerTX, addedRecord);
commit(consumerTX);
delete(addedRecord);
finishCompact();
journal.forceMoveNextFile();
long newRecord = idGen.generateID();
add(newRecord);
update(newRecord);
journal.testCompact();
instanceLog.debug("Debug after compact\n" + journal.debug());
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testStressAppends() throws Exception {
setup(2, 60 * 1024, true);
final int NUMBER_OF_RECORDS = 200;
SimpleIDGenerator idGen = new SimpleIDGenerator(1000);
createJournal();
journal.setAutoReclaim(false);
startJournal();
load();
AtomicBoolean running = new AtomicBoolean(true);
Thread t = new Thread() {
@Override
public void run() {
while (running.get()) {
journal.testCompact();
}
}
};
t.start();
for (int i = 0; i < NUMBER_OF_RECORDS; i++) {
long tx = idGen.generateID();
addTx(tx, idGen.generateID());
LockSupport.parkNanos(1000);
commit(tx);
}
running.set(false);
t.join(50000);
if (t.isAlive()) {
t.interrupt();
Assert.fail("supposed to join thread");
}
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testSimpleCommitCompactInBetween() throws Exception {
setup(2, 60 * 1024, false);
final int NUMBER_OF_RECORDS = 1;
SimpleIDGenerator idGen = new SimpleIDGenerator(1000);
createJournal();
journal.setAutoReclaim(false);
startJournal();
load();
for (int i = 0; i < NUMBER_OF_RECORDS; i++) {
long tx = idGen.generateID();
addTx(tx, idGen.generateID());
journal.testCompact();
journal.testCompact();
journal.testCompact();
journal.testCompact();
logger.debug("going to commit");
commit(tx);
}
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactAddAndUpdateFollowedByADelete2() throws Exception {
setup(2, 60 * 1024, false);
SimpleIDGenerator idGen = new SimpleIDGenerator(1000);
createJournal();
journal.setAutoReclaim(false);
startJournal();
load();
long firstID = idGen.generateID();
long consumerTX = idGen.generateID();
long appendTX = idGen.generateID();
long addedRecord = idGen.generateID();
addTx(consumerTX, firstID);
startCompact();
addTx(appendTX, addedRecord);
commit(appendTX);
updateTx(consumerTX, addedRecord);
commit(consumerTX);
long deleteTXID = idGen.generateID();
deleteTx(deleteTXID, addedRecord);
commit(deleteTXID);
finishCompact();
journal.forceMoveNextFile();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactAddAndUpdateFollowedByADelete3() throws Exception {
setup(2, 60 * 1024, false);
SimpleIDGenerator idGen = new SimpleIDGenerator(1000);
createJournal();
journal.setAutoReclaim(false);
startJournal();
load();
long firstID = idGen.generateID();
long consumerTX = idGen.generateID();
long addedRecord = idGen.generateID();
add(firstID);
updateTx(consumerTX, firstID);
startCompact();
addTx(consumerTX, addedRecord);
commit(consumerTX);
delete(addedRecord);
finishCompact();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactAddAndUpdateFollowedByADelete4() throws Exception {
setup(2, 60 * 1024, false);
SimpleIDGenerator idGen = new SimpleIDGenerator(1000);
createJournal();
startJournal();
load();
long consumerTX = idGen.generateID();
long firstID = idGen.generateID();
long appendTX = idGen.generateID();
long addedRecord = idGen.generateID();
startCompact();
addTx(consumerTX, firstID);
addTx(appendTX, addedRecord);
commit(appendTX);
updateTx(consumerTX, addedRecord);
commit(consumerTX);
delete(addedRecord);
finishCompact();
journal.forceMoveNextFile();
long newRecord = idGen.generateID();
add(newRecord);
update(newRecord);
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactAddAndUpdateFollowedByADelete6() throws Exception {
setup(2, 60 * 1024, false);
SimpleIDGenerator idGen = new SimpleIDGenerator(1000);
createJournal();
journal.setAutoReclaim(false);
startJournal();
load();
long tx0 = idGen.generateID();
long tx1 = idGen.generateID();
long add1 = idGen.generateID();
long add2 = idGen.generateID();
startCompact();
addTx(tx0, add1);
rollback(tx0);
addTx(tx1, add1, add2);
commit(tx1);
finishCompact();
long tx2 = idGen.generateID();
updateTx(tx2, add1, add2);
commit(tx2);
delete(add1);
startCompact();
delete(add2);
finishCompact();
journal.forceMoveNextFile();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testDeleteWhileCleanup() throws Exception {
setup(2, 60 * 1024, false);
createJournal();
startJournal();
load();
for (int i = 0; i < 100; i++) {
add(i);
}
journal.forceMoveNextFile();
for (int i = 10; i < 90; i++) {
delete(i);
}
startCompact();
// Delete part of the live records while cleanup still working
for (int i = 1; i < 5; i++) {
delete(i);
}
finishCompact();
// Delete part of the live records after cleanup is done
for (int i = 5; i < 10; i++) {
delete(i);
}
assertEquals(9, journal.getCurrentFile().getNegCount(journal.getDataFiles()[0]));
journal.forceMoveNextFile();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactAddAndUpdateFollowedByADelete5() throws Exception {
setup(2, 60 * 1024, false);
SimpleIDGenerator idGen = new SimpleIDGenerator(1000);
createJournal();
startJournal();
load();
long appendTX = idGen.generateID();
long appendOne = idGen.generateID();
long appendTwo = idGen.generateID();
long updateTX = idGen.generateID();
addTx(appendTX, appendOne);
startCompact();
addTx(appendTX, appendTwo);
commit(appendTX);
updateTx(updateTX, appendOne);
updateTx(updateTX, appendTwo);
commit(updateTX);
// delete(appendTwo);
finishCompact();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testSimpleCompacting() throws Exception {
setup(2, 60 * 1024, false);
createJournal();
startJournal();
load();
int NUMBER_OF_RECORDS = 1000;
// add and remove some data to force reclaiming
{
ArrayList<Long> ids = new ArrayList<>();
for (int i = 0; i < NUMBER_OF_RECORDS; i++) {
long id = idGenerator.generateID();
ids.add(id);
add(id);
if (i > 0 && i % 100 == 0) {
journal.forceMoveNextFile();
}
}
for (Long id : ids) {
delete(id);
}
journal.forceMoveNextFile();
journal.checkReclaimStatus();
}
long transactionID = 0;
for (int i = 0; i < NUMBER_OF_RECORDS / 2; i++) {
add(i);
if (i % 10 == 0 && i > 0) {
journal.forceMoveNextFile();
}
update(i);
}
for (int i = NUMBER_OF_RECORDS / 2; i < NUMBER_OF_RECORDS; i++) {
addTx(transactionID, i);
updateTx(transactionID, i);
if (i % 10 == 0) {
journal.forceMoveNextFile();
}
commit(transactionID++);
update(i);
}
for (int i = 0; i < NUMBER_OF_RECORDS; i++) {
if (!(i % 10 == 0)) {
delete(i);
}
}
journal.forceMoveNextFile();
instanceLog.debug("Number of Files: " + journal.getDataFilesCount());
instanceLog.debug("Before compact ****************************");
instanceLog.debug(journal.debug());
instanceLog.debug("*****************************************");
journal.testCompact();
add(idGenerator.generateID());
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testLiveSize() throws Exception {
setup(2, 60 * 1024, true);
createJournal();
startJournal();
loadAndCheck();
ArrayList<Long> listToDelete = new ArrayList<>();
ArrayList<Integer> expectedSizes = new ArrayList<>();
for (int i = 0; i < 10; i++) {
long id = idGenerator.generateID();
listToDelete.add(id);
expectedSizes.add(recordLength + JournalImpl.SIZE_ADD_RECORD + 1);
add(id);
journal.forceMoveNextFile();
update(id);
expectedSizes.add(recordLength + JournalImpl.SIZE_ADD_RECORD + 1);
journal.forceMoveNextFile();
}
JournalFile[] files = journal.getDataFiles();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
journal.forceMoveNextFile();
JournalFile[] files2 = journal.getDataFiles();
Assert.assertEquals(files.length, files2.length);
for (int i = 0; i < files.length; i++) {
Assert.assertEquals(expectedSizes.get(i).intValue(), files[i].getLiveSize());
Assert.assertEquals(expectedSizes.get(i).intValue(), files2[i].getLiveSize());
}
for (long id : listToDelete) {
delete(id);
}
journal.forceMoveNextFile();
JournalFile[] files3 = journal.getDataFiles();
for (JournalFile file : files3) {
Assert.assertEquals(0, file.getLiveSize());
}
stopJournal();
createJournal();
startJournal();
loadAndCheck();
files3 = journal.getDataFiles();
for (JournalFile file : files3) {
Assert.assertEquals(0, file.getLiveSize());
}
}
@Test
public void testCompactFirstFileWithPendingCommits() throws Exception {
setup(2, 60 * 1024, true);
createJournal();
startJournal();
loadAndCheck();
long tx = idGenerator.generateID();
for (int i = 0; i < 10; i++) {
addTx(tx, idGenerator.generateID());
}
journal.forceMoveNextFile();
ArrayList<Long> listToDelete = new ArrayList<>();
for (int i = 0; i < 10; i++) {
if (i == 5) {
commit(tx);
}
long id = idGenerator.generateID();
listToDelete.add(id);
add(id);
}
journal.forceMoveNextFile();
for (Long id : listToDelete) {
delete(id);
}
journal.forceMoveNextFile();
// This operation used to be journal.cleanup(journal.getDataFiles()[0]); when cleanup was still in place
journal.testCompact();
journal.checkReclaimStatus();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactFirstFileWithPendingCommits3() throws Exception {
setup(2, 60 * 1024, true);
createJournal();
startJournal();
loadAndCheck();
long tx = idGenerator.generateID();
for (int i = 0; i < 10; i++) {
addTx(tx, idGenerator.generateID());
}
journal.forceMoveNextFile();
ArrayList<Long> listToDelete = new ArrayList<>();
for (int i = 0; i < 10; i++) {
long id = idGenerator.generateID();
listToDelete.add(id);
add(id);
}
journal.forceMoveNextFile();
for (Long id : listToDelete) {
delete(id);
}
journal.forceMoveNextFile();
rollback(tx);
journal.forceMoveNextFile();
journal.checkReclaimStatus();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactFirstFileWithPendingCommits2() throws Exception {
setup(2, 60 * 1024, true);
createJournal();
startJournal();
loadAndCheck();
long tx = idGenerator.generateID();
for (int i = 0; i < 10; i++) {
addTx(tx, idGenerator.generateID());
}
journal.forceMoveNextFile();
ArrayList<Long> listToDelete = new ArrayList<>();
for (int i = 0; i < 10; i++) {
long id = idGenerator.generateID();
listToDelete.add(id);
add(id);
}
journal.forceMoveNextFile();
for (Long id : listToDelete) {
delete(id);
}
journal.forceMoveNextFile();
startCompact();
instanceLog.debug("Committing TX " + tx);
commit(tx);
finishCompact();
journal.checkReclaimStatus();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactFirstFileWithPendingCommits4() throws Exception {
setup(2, 60 * 1024, true);
createJournal();
startJournal();
loadAndCheck();
long[] ids = new long[10];
long tx0 = idGenerator.generateID();
for (int i = 0; i < 10; i++) {
ids[i] = idGenerator.generateID();
addTx(tx0, ids[i]);
}
long tx1 = idGenerator.generateID();
journal.forceMoveNextFile();
ArrayList<Long> listToDelete = new ArrayList<>();
for (int i = 0; i < 10; i++) {
long id = idGenerator.generateID();
listToDelete.add(id);
add(id);
}
journal.forceMoveNextFile();
for (Long id : listToDelete) {
delete(id);
}
journal.forceMoveNextFile();
startCompact();
instanceLog.debug("Committing TX " + tx1);
rollback(tx0);
for (int i = 0; i < 10; i++) {
addTx(tx1, ids[i]);
}
journal.forceMoveNextFile();
commit(tx1);
finishCompact();
journal.checkReclaimStatus();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactFirstFileWithPendingCommits5() throws Exception {
setup(2, 60 * 1024, true);
createJournal();
startJournal();
loadAndCheck();
long[] ids = new long[10];
long tx0 = idGenerator.generateID();
for (int i = 0; i < 10; i++) {
ids[i] = idGenerator.generateID();
addTx(tx0, ids[i]);
}
long tx1 = idGenerator.generateID();
journal.forceMoveNextFile();
ArrayList<Long> listToDelete = new ArrayList<>();
for (int i = 0; i < 10; i++) {
long id = idGenerator.generateID();
listToDelete.add(id);
add(id);
}
journal.forceMoveNextFile();
for (Long id : listToDelete) {
delete(id);
}
journal.forceMoveNextFile();
startCompact();
instanceLog.debug("Committing TX " + tx1);
rollback(tx0);
for (int i = 0; i < 10; i++) {
addTx(tx1, ids[i]);
}
journal.forceMoveNextFile();
commit(tx1);
finishCompact();
journal.checkReclaimStatus();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactFirstFileWithPendingCommits6() throws Exception {
setup(2, 60 * 1024, true);
createJournal();
startJournal();
loadAndCheck();
long[] ids = new long[10];
long tx0 = idGenerator.generateID();
for (int i = 0; i < 10; i++) {
ids[i] = idGenerator.generateID();
addTx(tx0, ids[i]);
}
commit(tx0);
startCompact();
for (int i = 0; i < 10; i++) {
delete(ids[i]);
}
finishCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testCompactFirstFileWithPendingCommits7() throws Exception {
setup(2, 60 * 1024, true);
createJournal();
startJournal();
loadAndCheck();
long tx0 = idGenerator.generateID();
add(idGenerator.generateID());
long[] ids = new long[]{idGenerator.generateID(), idGenerator.generateID()};
addTx(tx0, ids[0]);
addTx(tx0, ids[1]);
journal.forceMoveNextFile();
commit(tx0);
journal.forceMoveNextFile();
delete(ids[0]);
delete(ids[1]);
journal.forceMoveNextFile();
journal.testCompact();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
}
@Test
public void testLiveSizeTransactional() throws Exception {
setup(2, 60 * 1024, true);
createJournal();
startJournal();
loadAndCheck();
ArrayList<Long> listToDelete = new ArrayList<>();
ArrayList<Integer> expectedSizes = new ArrayList<>();
for (int i = 0; i < 10; i++) {
long tx = idGenerator.generateID();
long id = idGenerator.generateID();
listToDelete.add(id);
// Append Record Transaction will make the recordSize as exactly recordLength (discounting SIZE_ADD_RECORD_TX)
addTx(tx, id);
expectedSizes.add(recordLength);
journal.forceMoveNextFile();
updateTx(tx, id);
// uPDATE Record Transaction will make the recordSize as exactly recordLength (discounting SIZE_ADD_RECORD_TX)
expectedSizes.add(recordLength);
journal.forceMoveNextFile();
expectedSizes.add(0);
commit(tx);
journal.forceMoveNextFile();
}
JournalFile[] files = journal.getDataFiles();
stopJournal();
createJournal();
startJournal();
loadAndCheck();
journal.forceMoveNextFile();
JournalFile[] files2 = journal.getDataFiles();
Assert.assertEquals(files.length, files2.length);
for (int i = 0; i < files.length; i++) {
Assert.assertEquals(expectedSizes.get(i).intValue(), files[i].getLiveSize());
Assert.assertEquals(expectedSizes.get(i).intValue(), files2[i].getLiveSize());
}
long tx = idGenerator.generateID();
for (long id : listToDelete) {
deleteTx(tx, id);
}
commit(tx);
journal.forceMoveNextFile();
JournalFile[] files3 = journal.getDataFiles();
for (JournalFile file : files3) {
Assert.assertEquals(0, file.getLiveSize());
}
stopJournal();
createJournal();
startJournal();
loadAndCheck();
files3 = journal.getDataFiles();
for (JournalFile file : files3) {
Assert.assertEquals(0, file.getLiveSize());
}
}
@Test
public void testStressDeletesNoSync() throws Throwable {
Configuration config = createBasicConfig().setJournalFileSize(100 * 1024).setJournalSyncNonTransactional(false).setJournalSyncTransactional(false).setJournalCompactMinFiles(0).setJournalCompactPercentage(0);
final AtomicInteger errors = new AtomicInteger(0);
final AtomicBoolean running = new AtomicBoolean(true);
final AtomicLong seqGenerator = new AtomicLong(1);
final ExecutorService executor = Executors.newCachedThreadPool(ActiveMQThreadFactory.defaultThreadFactory());
final ExecutorService ioexecutor = Executors.newCachedThreadPool(ActiveMQThreadFactory.defaultThreadFactory());
OrderedExecutorFactory factory = new OrderedExecutorFactory(executor);
OrderedExecutorFactory iofactory = new OrderedExecutorFactory(ioexecutor);
final ExecutorService deleteExecutor = Executors.newCachedThreadPool(ActiveMQThreadFactory.defaultThreadFactory());
final JournalStorageManager storage = new JournalStorageManager(config, EmptyCriticalAnalyzer.getInstance(), factory, iofactory);
storage.start();
try {
storage.loadInternalOnly();
((JournalImpl) storage.getMessageJournal()).setAutoReclaim(false);
final LinkedList<Long> survivingMsgs = new LinkedList<>();
Runnable producerRunnable = new Runnable() {
@Override
public void run() {
try {
while (running.get()) {
final long[] values = new long[100];
long tx = seqGenerator.incrementAndGet();
OperationContextImpl ctx = new OperationContextImpl(executor);
storage.setContext(ctx);
for (int i = 0; i < 100; i++) {
long id = seqGenerator.incrementAndGet();
values[i] = id;
CoreMessage message = new CoreMessage(id, 100);
message.getBodyBuffer().writeBytes(new byte[1024]);
storage.storeMessageTransactional(tx, message);
}
CoreMessage message = new CoreMessage(seqGenerator.incrementAndGet(), 100);
survivingMsgs.add(message.getMessageID());
logger.debug("Going to store " + message);
// This one will stay here forever
storage.storeMessage(message);
logger.debug("message stored " + message);
logger.debug("Going to commit " + tx);
storage.commit(tx);
logger.debug("Committed " + tx);
ctx.executeOnCompletion(new IOCallback() {
@Override
public void onError(int errorCode, String errorMessage) {
}
@Override
public void done() {
deleteExecutor.execute(new Runnable() {
@Override
public void run() {
try {
for (long messageID : values) {
storage.deleteMessage(messageID);
}
} catch (Throwable e) {
e.printStackTrace();
errors.incrementAndGet();
}
}
});
}
});
}
} catch (Throwable e) {
e.printStackTrace();
errors.incrementAndGet();
}
}
};
Runnable compressRunnable = new Runnable() {
@Override
public void run() {
try {
while (running.get()) {
Thread.sleep(500);
instanceLog.debug("Compacting");
((JournalImpl) storage.getMessageJournal()).testCompact();
((JournalImpl) storage.getMessageJournal()).checkReclaimStatus();
}
} catch (Throwable e) {
e.printStackTrace();
errors.incrementAndGet();
}
}
};
Thread producerThread = new Thread(producerRunnable);
producerThread.start();
Thread compactorThread = new Thread(compressRunnable);
compactorThread.start();
Thread.sleep(1000);
running.set(false);
producerThread.join();
compactorThread.join();
deleteExecutor.shutdown();
assertTrue("delete executor failted to terminate", deleteExecutor.awaitTermination(30, TimeUnit.SECONDS));
storage.stop();
executor.shutdown();
assertTrue("executor failed to terminate", executor.awaitTermination(30, TimeUnit.SECONDS));
ioexecutor.shutdown();
assertTrue("ioexecutor failed to terminate", ioexecutor.awaitTermination(30, TimeUnit.SECONDS));
Assert.assertEquals(0, errors.get());
} catch (Throwable e) {
e.printStackTrace();
throw e;
} finally {
try {
storage.stop();
} catch (Exception e) {
e.printStackTrace();
}
executor.shutdownNow();
deleteExecutor.shutdownNow();
ioexecutor.shutdownNow();
}
}
@Override
@After
public void tearDown() throws Exception {
File testDir = new File(getTestDir());
File[] files = testDir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith(filePrefix) && name.endsWith(fileExtension);
}
});
for (File file : files) {
assertEquals("File " + file + " doesn't have the expected number of bytes", fileSize, file.length());
}
super.tearDown();
}
@Override
protected SequentialFileFactory getFileFactory() throws Exception {
return new NIOSequentialFileFactory(getTestDirfile(), 1);
}
}
|
{
"content_hash": "f91dcc98b41963d3ebeb7d86e32c4b4f",
"timestamp": "",
"source": "github",
"line_count": 1856,
"max_line_length": 213,
"avg_line_length": 25.10075431034483,
"alnum_prop": 0.5763625045613583,
"repo_name": "kjniemi/activemq-artemis",
"id": "dee22fbf3599118a161ccedaf73efa3db36b6b42",
"size": "47386",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/integration-tests/src/test/java/org/apache/activemq/artemis/tests/integration/journal/NIOJournalCompactTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "11634"
},
{
"name": "C",
"bytes": "26250"
},
{
"name": "C++",
"bytes": "1197"
},
{
"name": "CMake",
"bytes": "4260"
},
{
"name": "CSS",
"bytes": "11732"
},
{
"name": "HTML",
"bytes": "19106"
},
{
"name": "Java",
"bytes": "23126786"
},
{
"name": "Shell",
"bytes": "31153"
}
]
}
|
using Griffin.Networking.Messages;
namespace Griffin.Networking
{
/// <summary>
/// Context assigned to each channel to be able to continue down the chain or to change direction in the pipe
/// </summary>
/// <remarks>
/// It depends of the type of channel how the processing is done. A <see cref="IDownstreamHandler"/> will let the
/// processing continue down the pipe when calling <see cref="SendDownstream"/> while it moves the message to the beginning
/// of the pipe if calling <see cref="SendUpstream"/> (to let all up stream handlers have a chance to process the message).
/// </remarks>
public interface IPipelineHandlerContext
{
/// <summary>
/// Send message up towards the client
/// </summary>
/// <param name="message">Message to process</param>
void SendUpstream(IPipelineMessage message);
/// <summary>
/// Sned the message down towards the channel
/// </summary>
/// <param name="message">Message to process</param>
void SendDownstream(IPipelineMessage message);
}
}
|
{
"content_hash": "1289b3a08635ff902fe967efec8a6038",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 127,
"avg_line_length": 42.03703703703704,
"alnum_prop": 0.6405286343612335,
"repo_name": "NanaYngvarrdottir/Software-Testing",
"id": "8b6fc2adc72247969cd911805d64a3361b275621",
"size": "1135",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Dependencies/Griffin.Networking/IPipelineHandlerContext.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": []
}
|
package password
import (
"crypto/rand"
"crypto/sha512"
"crypto/subtle"
"encoding/base64"
"fmt"
"strconv"
"strings"
"golang.org/x/crypto/pbkdf2"
)
const (
saltLen = 16
keyLen = 64
encodePassword = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789./"
)
// Iterations is the number of iterations used when hashing a password
var Iterations = 25000
// Hash returns a hashed string password in PBKDF2/SHA256 format
func Hash(password string) (string, error) {
encoding := base64.NewEncoding(encodePassword).WithPadding(base64.NoPadding)
randByte := make([]byte, saltLen)
_, err := rand.Read(randByte)
if err != nil {
return "", err
}
salt := make([]byte, encoding.EncodedLen(saltLen))
encoding.Encode(salt, randByte)
dk := pbkdf2.Key([]byte(password), salt, Iterations, keyLen, sha512.New)
hashedPW := fmt.Sprintf("$pbkdf2-sha512$%d$%s$%s", Iterations, string(salt), encoding.EncodeToString(dk))
return hashedPW, nil
}
// Verify returns true if the given password matches with the given hash.
func Verify(password, hash string) bool {
encoding := base64.NewEncoding(encodePassword).WithPadding(base64.NoPadding)
split := strings.Split(strings.TrimPrefix(hash, "$"), "$")
salt := []byte(split[2])
iter, _ := strconv.Atoi(split[1])
dk := pbkdf2.Key([]byte(password), salt, iter, keyLen, sha512.New)
hashedPW := fmt.Sprintf("$pbkdf2-sha512$%d$%s$%s", iter, string(salt), encoding.EncodeToString(dk))
if subtle.ConstantTimeCompare([]byte(hash), []byte(hashedPW)) == 0 {
return false
}
return true
}
|
{
"content_hash": "905302e1206ed3a2d71a1ff1844c229c",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 106,
"avg_line_length": 27.473684210526315,
"alnum_prop": 0.710727969348659,
"repo_name": "hexya-erp/hexya",
"id": "20171e1e87175c6837c6cf2422b76ea02c9e2f82",
"size": "1820",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/tools/password/password.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "1077198"
},
{
"name": "Shell",
"bytes": "274"
}
]
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Frameset//EN""http://www.w3.org/TR/REC-html40/frameset.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc on Mon May 07 18:52:23 PDT 2001 -->
<TITLE>
HTTPClient API: Interface CookiePolicyHandler
</TITLE>
<LINK REL ="stylesheet" TYPE="text/css" HREF="../stylesheet.css" TITLE="Style">
</HEAD>
<BODY BGCOLOR="white">
<!-- ========== START OF NAVBAR ========== -->
<A NAME="navbar_top"><!-- --></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-summary.html"><FONT ID="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT ID="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT ID="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../deprecated-list.html"><FONT ID="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../index-all.html"><FONT ID="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../help-doc.html"><FONT ID="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../HTTPClient/AuthorizationPrompter.html"><B>PREV CLASS</B></A>
<A HREF="../HTTPClient/FilenameMangler.html"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../index.html" TARGET="_top"><B>FRAMES</B></A>
<A HREF="CookiePolicyHandler.html" TARGET="_top"><B>NO FRAMES</B></A></FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: INNER | FIELD | CONSTR | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | CONSTR | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<!-- =========== END OF NAVBAR =========== -->
<HR>
<!-- ======== START OF CLASS DATA ======== -->
<H2>
<FONT SIZE="-1">
HTTPClient</FONT>
<BR>
Interface CookiePolicyHandler</H2>
<HR>
<DL>
<DT>public interface <B>CookiePolicyHandler</B></DL>
<P>
This is the interface that a cookie policy handler must implement. A
policy handler allows you to control which cookies are accepted and
which are sent.
<P>
<DL>
<DT><B>Since: </B><DD>V0.3</DD>
<DT><B>Version: </B><DD>0.3-3 06/05/2001</DD>
<DT><B>Author: </B><DD>Ronald Tschalär</DD>
<DT><B>See Also: </B><DD><A HREF="../HTTPClient/CookieModule.html#setCookiePolicyHandler(HTTPClient.CookiePolicyHandler)"><CODE>CookieModule.setCookiePolicyHandler(HTTPClient.CookiePolicyHandler)</CODE></A></DL>
<HR>
<P>
<!-- ======== INNER CLASS SUMMARY ======== -->
<!-- =========== FIELD SUMMARY =========== -->
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<!-- ========== METHOD SUMMARY =========== -->
<A NAME="method_summary"><!-- --></A>
<TABLE BORDER="1" CELLPADDING="3" CELLSPACING="0" WIDTH="100%">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TD COLSPAN=2><FONT SIZE="+2">
<B>Method Summary</B></FONT></TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> boolean</CODE></FONT></TD>
<TD><CODE><B><A HREF="../HTTPClient/CookiePolicyHandler.html#acceptCookie(HTTPClient.Cookie, HTTPClient.RoRequest, HTTPClient.RoResponse)">acceptCookie</A></B>(<A HREF="../HTTPClient/Cookie.html">Cookie</A> cookie,
<A HREF="../HTTPClient/RoRequest.html">RoRequest</A> req,
<A HREF="../HTTPClient/RoResponse.html">RoResponse</A> resp)</CODE>
<BR>
This method is called for each cookie that a server tries to set via
the Set-Cookie header.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> boolean</CODE></FONT></TD>
<TD><CODE><B><A HREF="../HTTPClient/CookiePolicyHandler.html#sendCookie(HTTPClient.Cookie, HTTPClient.RoRequest)">sendCookie</A></B>(<A HREF="../HTTPClient/Cookie.html">Cookie</A> cookie,
<A HREF="../HTTPClient/RoRequest.html">RoRequest</A> req)</CODE>
<BR>
This method is called for each cookie that is eligible for sending
with a request (according to the matching rules for the path, domain,
protocol, etc).</TD>
</TR>
</TABLE>
<P>
<!-- ============ FIELD DETAIL =========== -->
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<!-- ============ METHOD DETAIL ========== -->
<A NAME="method_detail"><!-- --></A>
<TABLE BORDER="1" CELLPADDING="3" CELLSPACING="0" WIDTH="100%">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TD COLSPAN=1><FONT SIZE="+2">
<B>Method Detail</B></FONT></TD>
</TR>
</TABLE>
<A NAME="acceptCookie(HTTPClient.Cookie, HTTPClient.RoRequest, HTTPClient.RoResponse)"><!-- --></A><H3>
acceptCookie</H3>
<PRE>
public boolean <B>acceptCookie</B>(<A HREF="../HTTPClient/Cookie.html">Cookie</A> cookie,
<A HREF="../HTTPClient/RoRequest.html">RoRequest</A> req,
<A HREF="../HTTPClient/RoResponse.html">RoResponse</A> resp)</PRE>
<DL>
<DD>This method is called for each cookie that a server tries to set via
the Set-Cookie header. This enables you to implement your own
cookie acceptance policy.<DD><DL>
<DT><B>Parameters:</B><DD><CODE>cookie</CODE> - the cookie in question<DD><CODE>req</CODE> - the request sent which prompted the response<DD><CODE>resp</CODE> - the response which is trying to set the cookie<DT><B>Returns:</B><DD>true if this cookie should be accepted, false if it is to
be rejected.</DL>
</DD>
</DL>
<HR>
<A NAME="sendCookie(HTTPClient.Cookie, HTTPClient.RoRequest)"><!-- --></A><H3>
sendCookie</H3>
<PRE>
public boolean <B>sendCookie</B>(<A HREF="../HTTPClient/Cookie.html">Cookie</A> cookie,
<A HREF="../HTTPClient/RoRequest.html">RoRequest</A> req)</PRE>
<DL>
<DD>This method is called for each cookie that is eligible for sending
with a request (according to the matching rules for the path, domain,
protocol, etc). This enables you to control the sending of cookies.<DD><DL>
<DT><B>Parameters:</B><DD><CODE>cookie</CODE> - the cookie in question<DD><CODE>req</CODE> - the request this cookie is to be sent with<DT><B>Returns:</B><DD>true if this cookie should be sent, false if it is to be
ignored.</DL>
</DD>
</DL>
<!-- ========= END OF CLASS DATA ========= -->
<HR>
<!-- ========== START OF NAVBAR ========== -->
<A NAME="navbar_bottom"><!-- --></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-summary.html"><FONT ID="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT ID="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT ID="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../deprecated-list.html"><FONT ID="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../index-all.html"><FONT ID="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../help-doc.html"><FONT ID="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../HTTPClient/AuthorizationPrompter.html"><B>PREV CLASS</B></A>
<A HREF="../HTTPClient/FilenameMangler.html"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../index.html" TARGET="_top"><B>FRAMES</B></A>
<A HREF="CookiePolicyHandler.html" TARGET="_top"><B>NO FRAMES</B></A></FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: INNER | FIELD | CONSTR | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | CONSTR | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<!-- =========== END OF NAVBAR =========== -->
<HR>
</BODY>
</HTML>
|
{
"content_hash": "589ebf9556afbefb57a564d108b44499",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 287,
"avg_line_length": 44.62085308056872,
"alnum_prop": 0.6371747211895911,
"repo_name": "unrelatedlabs/java-wemo-bridge",
"id": "feadf9435175f9714fd4797eb47be064bcf37209",
"size": "9415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "target/HTTPClient/doc/api/HTTPClient/CookiePolicyHandler.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1559"
},
{
"name": "Groovy",
"bytes": "1157"
},
{
"name": "HTML",
"bytes": "1316028"
},
{
"name": "Java",
"bytes": "860256"
},
{
"name": "JavaScript",
"bytes": "3237"
},
{
"name": "Makefile",
"bytes": "3715"
},
{
"name": "Perl",
"bytes": "218"
},
{
"name": "Shell",
"bytes": "721"
}
]
}
|
var fs = require('fs');
var path = require('path');
var merge = require('merge-stream');
var gulp = require('gulp');
var rename = require('gulp-rename');
var uglify = require('gulp-uglify');
var concat = require('gulp-concat');
var autoprefixer = require('gulp-autoprefixer');
var plumber = require('gulp-plumber');
var sourcemaps = require('gulp-sourcemaps');
var sass = require('gulp-sass');
var zip = require('gulp-zip');
var del = require('del');
var processhtml = require('gulp-processhtml');
var browserSync = require('browser-sync').create();
var reload = browserSync.reload;
var generateIndex = require('./generate-index');
var gulpCopy = require('gulp-copy');
var gulpSequence = require('gulp-sequence');
// Pug Templates
var pug = require('gulp-pug');
// Image compression
var imagemin = require('gulp-imagemin');
var imageminPngquant = require('imagemin-pngquant');
var imageminJpegRecompress = require('imagemin-jpeg-recompress');
// File paths
var DIST_PATH = 'dist';
var SRC_PATH = 'src/banner_list';
var ZIP_PATH = 'dist';
var FOLDERS = getFolders(SRC_PATH);
var GSCONTROL = 'src/scripts/GSDevTools.min.js';
// get banners dirs for process
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
// Static server
gulp.task('server', function() {
browserSync.init({
notify: false,
port: 9000,
reloadDelay: 1000,
server: {
baseDir: DIST_PATH
}
});
});
// SASS
gulp.task('sass', function() {
console.log('>>>> STARTING STYLES TASK 🖌 <<<<');
var cssTask = FOLDERS.map(function(FOLDERS) {
return gulp.src(path.join(SRC_PATH, FOLDERS, '/scss/*.scss'))
.pipe(plumber(function(err) {
console.log('>>>> STYLES TASK ERROR 💔 <<<<');
console.log(err);
this.emit('end');
}))
.pipe(sourcemaps.init())
.pipe(autoprefixer({
browsers: ['last 3 versions']
}))
.pipe(sass({
outputStyle: 'compressed'
}))
.pipe(rename("styles.css"))
.pipe(gulp.dest(DIST_PATH + '/' + FOLDERS + '/css'))
.pipe(browserSync.stream());
});
return (cssTask);
});
// Scripts
gulp.task('scripts', function() {
console.log('>>>> STARTING SCRIPTS TASK <<<<');
var jsTask = FOLDERS.map(function(FOLDERS) {
return gulp.src(path.join(SRC_PATH, FOLDERS, '/js/*.js'))
.pipe(plumber(function(err) {
console.log('SCRIPTS TASK ERROR');
console.log(err);
this.emit('end');
}))
.pipe(sourcemaps.init())
.pipe(uglify())
.pipe(concat('main.js'))
.pipe(gulp.dest(DIST_PATH + '/' + FOLDERS + '/js'))
});
return (jsTask);
});
gulp.task('scriptsDev', function () {
console.log('>>>> STARTING SCRIPTS TASK <<<<');
var jsTask = FOLDERS.map(function (FOLDERS) {
return gulp.src([GSCONTROL, path.join(SRC_PATH, FOLDERS, '/js/*.js')])
.pipe(plumber(function (err) {
console.log('SCRIPTS TASK ERROR');
console.log(err);
this.emit('end');
}))
.pipe(sourcemaps.init())
.pipe(uglify())
.pipe(concat('main.js'))
.pipe(gulp.dest(DIST_PATH + '/' + FOLDERS + '/js'))
});
return (jsTask);
});
// Images
gulp.task('images', function() {
console.log('>>>> STARTING IMAGES TASK 🖼 <<<<');
var imgsTask = FOLDERS.map(function(FOLDERS) {
return gulp.src(path.join(SRC_PATH, FOLDERS, '/img/*'))
.pipe(imagemin())
.pipe(gulp.dest(DIST_PATH + '/' + FOLDERS + '/images'));
});
return (imgsTask);
});
// Pug
gulp.task('templates', function() {
console.log('>>>> STARTING TEMPLATES TASK 📄 <<<<');
var pugTask = FOLDERS.map(function(FOLDERS) {
return gulp.src(path.join(SRC_PATH, FOLDERS, '/pug/*.pug'))
.pipe(pug({
pretty: false
}))
.pipe(rename("index.html"))
.pipe(gulp.dest(DIST_PATH + '/' + FOLDERS));
});
return (pugTask);
})
// Delete dest folder before build
gulp.task('clean', function() {
console.log('>>>> STARTING DEL TASK ✂️ <<<<');
return del.sync([
DIST_PATH
]);
});
// Zip banners per folder
gulp.task('zips', function() {
console.log('>>>> STARTING ZIPS TASK 🗜 <<<<');
var zipTask = FOLDERS.map(function(FOLDERS) {
return gulp.src(path.join(ZIP_PATH, FOLDERS, '**/*'))
.pipe(zip(FOLDERS + '.zip'))
.pipe(gulp.dest(ZIP_PATH + '/' + 'ZIPS'));
});
return (zipTask);
});
// Generate dinamic index.html for banners
gulp.task('processHtml', function() {
return gulp.src('src/index.html')
.pipe(processhtml({data:{bannerList: generateIndex()}}))
.pipe(gulp.dest(DIST_PATH + '/'));
});
// Copy static folder for distribute
gulp.task('copy', function () {
return gulp.src('src/static/*')
.pipe(gulpCopy(DIST_PATH + '/', {
prefix: 1,
}));
})
// Tasks
gulp.task('build', ['images', 'templates', 'sass', 'scripts', 'processHtml', 'copy'], function() {});
gulp.task('scaffold', ['images', 'templates', 'sass', 'scriptsDev', 'processHtml'], function() {});
gulp.task('watch', ['scaffold', 'server'], function() {
console.log('>>>> STARTING WATCH TASK 👀 <<<<');
gulp.watch(SRC_PATH + '/**/scss/*.scss', ['sass', browserSync.reload]);
gulp.watch(SRC_PATH + '/**/img/*.{png,jpeg,jpg,svg,gif}', ['images', browserSync.reload]);
gulp.watch(SRC_PATH + '/**/pug/*.pug', ['templates', reload]);
gulp.watch(SRC_PATH + '/**/js/*.js', ['scriptsDev', browserSync.reload]);
});
gulp.task('default', ['clean'], function() {
gulp.start('watch');
});
gulp.task('distribute', function(cb) {
gulpSequence('clean', 'build', function () {
setTimeout(function (){
gulp.start('zips');
}, 4000);
});
});
|
{
"content_hash": "a334c2c93b196def88aa8c07ca6b2a55",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 101,
"avg_line_length": 26.476394849785407,
"alnum_prop": 0.5564921381099044,
"repo_name": "hangarlabs/base-html5-banner",
"id": "6bd85b950c8e69d28b4e876f8be31a8ab8e903b7",
"size": "6191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gulpfile.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10727"
},
{
"name": "HTML",
"bytes": "8285"
},
{
"name": "JavaScript",
"bytes": "18400"
}
]
}
|
namespace Google.Cloud.Dialogflow.V2Beta1.Snippets
{
// [START dialogflow_v2beta1_generated_EntityTypes_GetEntityType_sync_flattened1_resourceNames]
using Google.Cloud.Dialogflow.V2Beta1;
public sealed partial class GeneratedEntityTypesClientSnippets
{
/// <summary>Snippet for GetEntityType</summary>
/// <remarks>
/// This snippet has been automatically generated and should be regarded as a code template only.
/// It will require modifications to work:
/// - It may require correct/in-range values for request initialization.
/// - It may require specifying regional endpoints when creating the service client as shown in
/// https://cloud.google.com/dotnet/docs/reference/help/client-configuration#endpoint.
/// </remarks>
public void GetEntityType1ResourceNames()
{
// Create client
EntityTypesClient entityTypesClient = EntityTypesClient.Create();
// Initialize request argument(s)
EntityTypeName name = EntityTypeName.FromProjectEntityType("[PROJECT]", "[ENTITY_TYPE]");
// Make the request
EntityType response = entityTypesClient.GetEntityType(name);
}
}
// [END dialogflow_v2beta1_generated_EntityTypes_GetEntityType_sync_flattened1_resourceNames]
}
|
{
"content_hash": "d5a9e96b663d1ed36301d3bba729f353",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 105,
"avg_line_length": 49.888888888888886,
"alnum_prop": 0.6889383815887157,
"repo_name": "googleapis/google-cloud-dotnet",
"id": "543e5ce4b59951a5f7272640d44fcb6a0fcb52c3",
"size": "1969",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "apis/Google.Cloud.Dialogflow.V2Beta1/Google.Cloud.Dialogflow.V2Beta1.GeneratedSnippets/EntityTypesClient.GetEntityType1ResourceNamesSnippet.g.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "767"
},
{
"name": "C#",
"bytes": "319820004"
},
{
"name": "Dockerfile",
"bytes": "3415"
},
{
"name": "PowerShell",
"bytes": "3303"
},
{
"name": "Python",
"bytes": "2744"
},
{
"name": "Shell",
"bytes": "65881"
}
]
}
|
/*
* dscommons.h
*
* Created on: Jan 25, 2016
* Author: pvan
*/
#ifndef INCLUDE_DATASTRUCT_DSCOMMONS_H_
#define INCLUDE_DATASTRUCT_DSCOMMONS_H_
#ifdef __cplusplus
extern "C" {
#endif
#include <sys/kclib.h>
#include <stdint.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdlib.h>
#ifndef REGISTER_UINT_TYPE
#define REGISTER_UINT_TYPE uint64_t
#endif
typedef REGISTER_UINT_TYPE ruint_t;
typedef char* string;
#define SEARCH_PREDICATE_TYPE(type) search_predicate_ ## type ## _t
#define SEARCH_PREDICATE(type) typedef bool (* SEARCH_PREDICATE_TYPE(type))(type element, void* passed_data)
#define CMP_FUNC_TYPE(type) cmp_func_ ## type ## _t
#define CMP_FUNC(type) typedef int (*CMP_FUNC_TYPE(type))(const type a, const type b)
#define KEY_DEALLOC_FUNC_TYPE(type) key_deallocator_func ## type ## _t
#define KEY_DEALLOC_FUNC(type) typedef void (*KEY_DEALLOC_FUNC_TYPE(type))(type a)
#define VALUE_DEALLOC_FUNC_TYPE(type) value_deallocator_func ## type ## _t
#define VALUE_DEALLOC_FUNC(type) typedef void (*VALUE_DEALLOC_FUNC_TYPE(type))(type a)
#define HASH_FUNC_TYPE(type) hash_function_ ## type ## _t
#define HASH_FUNC(type) typedef uint32_t (* HASH_FUNC_TYPE(type)) (type key)
#define EQ_FUNC_TYPE(type) eq_function_ ## type ## _t
#define EQ_FUNC(type) typedef bool (* EQ_FUNC_TYPE(type)) (type key_a, type key_b)
/* Concrete func definitions */
uint32_t uint32_hash_function(uint32_t integer);
bool uint32_eq_function(uint32_t a, uint32_t b);
uint32_t uint64_hash_function(uint64_t integer);
bool uint64_eq_function(uint64_t a, uint64_t b);
uint32_t string_hash_function(string string);
bool string_eq_function(string a, string b);
#define CE_TYPE(type) __chained_element_ ## type ## _s
#define CE_TYPEDEF(type) \
struct CE_TYPE(type) { \
struct CE_TYPE(type)* previous; \
struct CE_TYPE(type)* next; \
type data; \
uint32_t pq; \
}
#define CE_ELEMENT_GETTER_TYPE(type) ce_getter_ ## type ## _t
#define CE_ELEMENT_GETTER(type) typedef struct CE_TYPE(type)* (*CE_ELEMENT_GETTER_TYPE(type))(type data)
#ifdef __cplusplus
}
#endif
#endif /* INCLUDE_DATASTRUCT_DSCOMMONS_H_ */
|
{
"content_hash": "144dca60335c6ae176cdf57ec4d4f4ab",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 108,
"avg_line_length": 33.296875,
"alnum_prop": 0.7038948850305021,
"repo_name": "Enerccio/kclib",
"id": "28cccd5b4f1fa5893094b8195c9ac11081fb7c4e",
"size": "2131",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "include/datastruct/dscommons.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "4852"
},
{
"name": "C",
"bytes": "178637"
},
{
"name": "C++",
"bytes": "10678"
},
{
"name": "Makefile",
"bytes": "7078"
},
{
"name": "Objective-C",
"bytes": "9390"
}
]
}
|
jQuery(document).ready(function(){
jQuery('.<%= params.slugifiedContentName %>Bloc .infos .play').click(function() {
var player = jQuery(this).parent().parent().siblings('.player');
player.attr('src',player.attr('future_src'));
player.show()
})
});
|
{
"content_hash": "2101342c0a5620f62f5d806a70993d5d",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 82,
"avg_line_length": 37.57142857142857,
"alnum_prop": 0.6539923954372624,
"repo_name": "Inouit/generator-wecce",
"id": "023b9fdc1db8e850f75f1d729e7704c9a1c8a62a",
"size": "263",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/templates/clickToPlay/assets/_clickToPlay.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "686"
},
{
"name": "JavaScript",
"bytes": "17307"
},
{
"name": "TypeScript",
"bytes": "4107"
}
]
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_17) on Mon Apr 12 12:09:32 PDT 2010 -->
<TITLE>
Uses of Package org.apache.hadoop.chukwa.util (chukwa 0.4.0 API)
</TITLE>
<META NAME="date" CONTENT="2010-04-12">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Package org.apache.hadoop.chukwa.util (chukwa 0.4.0 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/chukwa/util/package-use.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-use.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Package<br>org.apache.hadoop.chukwa.util</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../org/apache/hadoop/chukwa/util/package-summary.html">org.apache.hadoop.chukwa.util</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.chukwa.database"><B>org.apache.hadoop.chukwa.database</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.chukwa.util"><B>org.apache.hadoop.chukwa.util</B></A></TD>
<TD> </TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.chukwa.database"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../../org/apache/hadoop/chukwa/util/package-summary.html">org.apache.hadoop.chukwa.util</A> used by <A HREF="../../../../../org/apache/hadoop/chukwa/database/package-summary.html">org.apache.hadoop.chukwa.database</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../../org/apache/hadoop/chukwa/util/class-use/DatabaseWriter.html#org.apache.hadoop.chukwa.database"><B>DatabaseWriter</B></A></B>
<BR>
</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.chukwa.util"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../../org/apache/hadoop/chukwa/util/package-summary.html">org.apache.hadoop.chukwa.util</A> used by <A HREF="../../../../../org/apache/hadoop/chukwa/util/package-summary.html">org.apache.hadoop.chukwa.util</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../../org/apache/hadoop/chukwa/util/class-use/ConstRateValidator.ByteRange.html#org.apache.hadoop.chukwa.util"><B>ConstRateValidator.ByteRange</B></A></B>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../../org/apache/hadoop/chukwa/util/class-use/DaemonWatcher.html#org.apache.hadoop.chukwa.util"><B>DaemonWatcher</B></A></B>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../../org/apache/hadoop/chukwa/util/class-use/Filter.html#org.apache.hadoop.chukwa.util"><B>Filter</B></A></B>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../../org/apache/hadoop/chukwa/util/class-use/PidFile.html#org.apache.hadoop.chukwa.util"><B>PidFile</B></A></B>
<BR>
</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/chukwa/util/package-use.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-use.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © ${year} The Apache Software Foundation
</BODY>
</HTML>
|
{
"content_hash": "8cd65dda380773437344c67edd1c91f3",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 258,
"avg_line_length": 42.14009661835749,
"alnum_prop": 0.6287974320761206,
"repo_name": "intel-hadoop/HiTune",
"id": "b36a4d220d1b234a95d2891c5f5595abc9acd7b9",
"size": "8723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chukwa-hitune-dist/docs/api/org/apache/hadoop/chukwa/util/package-use.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "2358813"
},
{
"name": "JavaScript",
"bytes": "815932"
},
{
"name": "Perl",
"bytes": "9633"
},
{
"name": "Racket",
"bytes": "3118"
},
{
"name": "Shell",
"bytes": "67322"
},
{
"name": "XML",
"bytes": "1335"
}
]
}
|
/* jslint node: true */
"use strict";
require("date.format");
global.modulesCache = global.modulesCache || {};
if(global.modulesCache['json.decycled']){
return;
} else {
global.modulesCache['json.decycled'] = true;
}
JSON.decycler = decycler;
JSON.decycled = decycled;
JSON.revive = revive;
function decycler(val,config){
config = typeof config === 'number'?{deep:config}:(config || {});
config.deep = config.deep || 10;
return decycleWalker([],[],val,config);
}
function decycled(val,config){
config = typeof config === 'number'?{deep:config}:(config || {});
val = decycler(val,config);
try {
return JSON.stringify(val,undefined,config.spacer);
} catch(e){
return e;
}
}
var reviverDate = /^\[Date:((\d{4})\/(\d{2})\/(\d{2}) (\d{2})\:(\d{2})\:(\d{2})\:(\d{4})) UTC\]$/;
var reviverRegExp = /^\[Regexp:\/(.+)\/\]$/;
var reviverError = /^\[Error:([\W\w]+)\]$/;
var reviverFunction = /^\[Function:(.+)\]$/;
function revive(val,functions){
try {
return JSON.parse(val,reviver);
} catch(e){
return e;
}
function reviver(key,val){
if(reviverDate.test(val)){
val = reviverDate.exec(val);
val = Date.UTC(val[2],parseInt(val[3],10)-1,val[4],val[5],val[6],val[7],val[8]);
return new Date(val);
} else if(reviverRegExp.test(val)){
val = reviverRegExp.exec(val)[1];
return new RegExp(val);
} else if(reviverError.test(val)){
val = reviverError.exec(val)[1];
var error = new Error(val.split('\n')[0]);
if(error.stack){
error.stack = val;
}
return error;
} else if(functions && reviverFunction.test(val)){
val = reviverFunction.exec(val)[1];
try {
return (new Function("return "+val+";"))();
} catch(error){
return error;
}
} else {
return val;
}
}
}
function decycleWalker(parents,path,val,config){
if(['undefined','number','boolean','string'].indexOf(typeof val)>=0 || val === null){
return val;
} else if(typeof val === 'object' && val.constructor === Date){
return config.dates!==false?'[Date:'+val.format('{YYYY}/{MM}/{DD} {hh}:{mm}:{ss}:{mss} UTC',true)+']':val;
//val.format('{YYYY}/{MM}/{DD} {hh}:{mm}:{ss} UTC:·{params.tz>=0?"+"+params.tz:params.tz}·');
} else if(typeof val === 'object' && val.constructor === RegExp){
return config.regexps!==false?'[Regexp:'+val.toString()+']':val;
} else if(typeof val === 'object' && val.constructor && typeof val.constructor.name === 'string' && val.constructor.name.slice(-5)==='Error'){
var stack = (val.stack || '').split('\n').slice(1);
var message = (val.message || val.toString());
var error = message+"\n"+stack;
return config.errors!==false?'[Error:'+error+']':val;
} else if(typeof val === 'object'){
if(parents.indexOf(val) >= 0){
var point = path.slice(0,parents.indexOf(val)).join('.');
return '[Circular'+(point?':'+point:'')+']';
} else {
var copy,i,k,l;
if(val.constructor && typeof val.constructor.name === 'string' && val.constructor.name.slice(-5)==='Array'){
if(parents.length>=config.deep){
return '[Array:'+val.constructor.name+']';
} else {
copy = [];
for(i=0,l=val.length;i<l;i++){
copy[i]=decycleWalker(parents.concat([val]),path.concat(i),val[i],config);
}
return copy;
}
} else {
if(parents.length>=config.deep){
return '[Object:'+(val.constructor && val.constructor.name?val.constructor.name:'Object')+']';
} else {
copy = {};
for(i=0,k=Object.keys(val),l=k.length;i<l;i++){
copy[k[i]]=decycleWalker(parents.concat([val]),path.concat([k[i]]),val[k[i]],config);
}
return copy;
}
}
}
} else if(typeof val === 'function') {
return config.functions===true?'[Function:'+val.toString()+']':undefined;
} else {
return val.toString();
}
}
|
{
"content_hash": "5bf6f747b97a068431efef053d4681b9",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 144,
"avg_line_length": 34.14655172413793,
"alnum_prop": 0.5703105276445342,
"repo_name": "bifuer/JSON.decycled",
"id": "41baea14dc89f4f795bacbcde0595ff9a9d590a7",
"size": "3963",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "7635"
}
]
}
|
date > /tmp/boottime.txt
if [ ! -f /mnt/hhvm/wp-config.php ]; then
WORDPRESS_DB="wordpress"
MYSQL_PASSWORD=`pwgen -c -n -1 12`
WORDPRESS_PASSWORD=`pwgen -c -n -1 12`
#This is so the passwords show up in logs.
echo mysql root password: $MYSQL_PASSWORD
echo wordpress password: $WORDPRESS_PASSWORD
echo $MYSQL_PASSWORD > /mysql-pw.txt
echo $WORDPRESS_PASSWORD > /wordpress-pw.txt
sed -e "s/database_name_here/$WORDPRESS_DB/
s/username_here/$WORDPRESS_DB/
s/password_here/$WORDPRESS_PASSWORD/
/'AUTH_KEY'/s/put your unique phrase here/`pwgen -c -n -1 65`/
/'SECURE_AUTH_KEY'/s/put your unique phrase here/`pwgen -c -n -1 65`/
/'LOGGED_IN_KEY'/s/put your unique phrase here/`pwgen -c -n -1 65`/
/'NONCE_KEY'/s/put your unique phrase here/`pwgen -c -n -1 65`/
/'AUTH_SALT'/s/put your unique phrase here/`pwgen -c -n -1 65`/
/'SECURE_AUTH_SALT'/s/put your unique phrase here/`pwgen -c -n -1 65`/
/'LOGGED_IN_SALT'/s/put your unique phrase here/`pwgen -c -n -1 65`/
/'NONCE_SALT'/s/put your unique phrase here/`pwgen -c -n -1 65`/" /mnt/hhvm/wp-config-sample.php > /mnt/hhvm/wp-config.php
# zh_TW version
sed -i "s/'WPLANG', ''/'WPLANG', 'zh_TW'/g" /mnt/hhvm/wp-config.php
#
DB_HOST='getenv("DB_1_PORT_3306_TCP_ADDR") . ":" . getenv("DB_1_PORT_3306_TCP_PORT")'
sed -i "s/'DB_HOST', 'localhost'/'DB_HOST', $DB_HOST/g" /mnt/hhvm/wp-config.php
# set permissions for plugin installation without ftp/ftps
chmod -R 777 /mnt/hhvm/wp-content
sed -i "s/define('WP_DEBUG.*/define('FS_METHOD','direct');\ndefine('FS_CHMOD_DIR', 0777);\ndefine('FS_CHMOD_FILE', 0777);\n&/" /mnt/hhvm/wp-config.php
chown -R www-data:www-data /mnt/hhvm
fi
|
{
"content_hash": "cbd3dc6619982dc4641a9c1f2ae648d1",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 150,
"avg_line_length": 46.42857142857143,
"alnum_prop": 0.6892307692307692,
"repo_name": "y12studio/y12wordpress",
"id": "c972b83d82d4704bfa876414fcec91b12095ffc0",
"size": "1635",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "projects/fig-banana/wpinit.sh",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "31"
},
{
"name": "CSS",
"bytes": "157"
},
{
"name": "JavaScript",
"bytes": "13980"
},
{
"name": "PHP",
"bytes": "51069"
},
{
"name": "Shell",
"bytes": "4016"
}
]
}
|
import m from "mithril";
import $ from "jquery";
import '@progress/kendo-ui/js/kendo.datetimepicker';
function DateTimePicker(ignore) {
let dateTime;
let datetimepicker;
let InternalDateTime = function(initialDateTime) {
this.dateTime = initialDateTime;
this.format = function() {
return this.dateTime.format();
}
};
return {
oninit: function(vnode) {
dateTime = new InternalDateTime(vnode.attrs.dateTime);
},
oncreate: function (vnode) {
$("#" + vnode.attrs.dateTimePickerId).kendoDateTimePicker({
value: dateTime.format(),
dateInput: true,
format: "MMM dd, yyyy HH:mm",
change: function (element) {
vnode.attrs.onChange(this.value());
}
});
datetimepicker = $("#" + vnode.attrs.dateTimePickerId).data("kendoDateTimePicker");
},
view: function (vnode) {
if (datetimepicker && vnode.attrs.enabled !== undefined) {
datetimepicker.enable(vnode.attrs.enabled);
}
if (vnode.attrs.dateTime.isSame(dateTime.dateTime)) {
return m("input", {id: vnode.attrs.dateTimePickerId, class: "kendo-date-time-picker"});
} else {
dateTime.dateTime = vnode.attrs.dateTime;
datetimepicker.value(dateTime.dateTime.format());
return m("input", {id: vnode.attrs.dateTimePickerId, class: "kendo-date-time-picker"});
}
}
}
}
export default DateTimePicker
|
{
"content_hash": "bc5beec45e681ac6d967368eb5f9f485",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 103,
"avg_line_length": 32.03921568627451,
"alnum_prop": 0.5587515299877601,
"repo_name": "pietermartin/sqlg",
"id": "1e181491e613bb05f69610a7413c1832db3e59f3",
"size": "1634",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sqlg-ui/src/main/web/sqlg/v1/src/components/datatime/dateTimePicker.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "506736"
},
{
"name": "D",
"bytes": "103"
},
{
"name": "Dockerfile",
"bytes": "74"
},
{
"name": "Groovy",
"bytes": "53"
},
{
"name": "HTML",
"bytes": "1789673"
},
{
"name": "Java",
"bytes": "5902303"
},
{
"name": "JavaScript",
"bytes": "1419859"
},
{
"name": "SCSS",
"bytes": "34849"
},
{
"name": "Shell",
"bytes": "298"
}
]
}
|
'''
Canvas stress
=============
This example tests the performance of our Graphics engine by drawing large
numbers of small sqaures. You should see a black canvas with buttons and a
label at the bottom. Pressing the buttons adds small colored squares to the
canvas.
'''
from kivy.core.window import Window
from kivy.uix.button import Button
from kivy.uix.spinner import Spinner
from kivy.uix.widget import Widget
from kivy.uix.label import Label
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.splitter import Splitter
from kivy.uix.image import Image
from kivy.app import App
from kivy.clock import Clock
from kivy.graphics import Color, Rectangle
from kivy.uix.behaviors import ToggleButtonBehavior
from kivy.uix.popup import Popup
from random import random as r
from functools import partial
from os import path, walk
from agent_dir import *
import agent_list
import env_list
def gen_popup(title, text, dismiss=True):
"""Generate a popup."""
popup_layout = BoxLayout(orientation='vertical')
content = Button(text='Close', size_hint=(1, .3))
popup_layout.add_widget(Label(text=text))
popup = Popup(title=title,
content=popup_layout)
if dismiss:
popup_layout.add_widget(content)
content.bind(on_press=popup.dismiss)
return popup
class Renderer(Widget):
def __init__(self):
super(Renderer, self).__init__()
self._imgs = {}
self._tile_size = (0, 0)
self.add_images_from_folder(self._imgs, './img')
self.add_images_from_folder(self._imgs, './agent_dir/img')
def add_images_from_folder(self, container, folder):
images = {}
for root, dirs, files in walk(path.abspath(folder)):
for file_ in files:
name, ext = path.splitext(path.basename(file_))
if ext == '.png':
images[name] = Image(source=path.join(root, file_))
container.update(images)
def real_pos(self, x, y):
return (self.x + x * self._tile_size[0],
self.y + y * self._tile_size[1])
def set_tile_size(self, env):
if env is not None:
n_x, n_y = max([thing.location for thing in env.things])
tile_x = self.width / float(n_x + 1)
tile_y = self.height / float(n_y + 1)
self._tile_size = (tile_x, tile_y)
def clear(self):
self.canvas.clear()
def draw(self, env):
self.clear()
if env is not None:
walls = []
dirts = []
cleans = []
agents = []
for thing in env.things:
if isinstance(thing, Wall):
walls.append(thing)
elif isinstance(thing, Dirt):
dirts.append(thing)
elif isinstance(thing, Clean):
cleans.append(thing)
elif isinstance(thing, Agent):
agents.append(thing)
with self.canvas:
for wall in walls:
Rectangle(texture=self._imgs.get('wall').texture,
pos=self.real_pos(*wall.location),
size=self._tile_size)
for dirt in dirts:
# Color(0.9, 0, 0, 0.6)
# 2016: campo trivelle. RGB=(51, 153, 255)
Color(0.2, 0.6, 1, 1)
Rectangle(
pos=self.real_pos(*dirt.location),
size=self._tile_size)
Color(1, 1, 1, 1)
Rectangle(texture=self._imgs.get('trash').texture,
pos=self.real_pos(*dirt.location),
size=self._tile_size)
for clean in cleans:
# Color(0, 0.9, 0, 0.6)
# 2016: campo trivelle. RGB=(51, 153, 255)
Color(0.2, 0.6, 1, 1)
Rectangle(
pos=self.real_pos(*clean.location),
size=self._tile_size)
for agent in agents:
Color(1, 1, 1, 1)
Rectangle(
texture=self._imgs.get(
agent.img if agent.img is not None else agent.id.lower()).texture,
pos=self.real_pos(*agent.location),
size=self._tile_size)
class ToggleButton(ToggleButtonBehavior, Image):
def __init__(self, **kwargs):
super(ToggleButton, self).__init__(**kwargs)
self.source = 'atlas://data/images/defaulttheme/checkbox_off'
def on_state(self, widget, value):
if value == 'down':
self.source = 'atlas://data/images/defaulttheme/checkbox_on'
else:
self.source = 'atlas://data/images/defaulttheme/checkbox_off'
class VacuumEnv(App):
def __init__(self, **kwargs):
super(VacuumEnv, self).__init__(**kwargs)
self._agents = {}
self._maps = {}
self._wid = None
self._env = None
self._step = 0
self._100_steps_pressed = False
self._agent_objs = {
'agent_1': None,
'agent_2': None,
'agent_3': None,
'agent_4': None
}
self._loading = gen_popup("WARNING", "Loading...", dismiss=False)
self._load_done = gen_popup(
"INFO", "All resources loaded", dismiss=True)
def _resize_env(self, *largs):
self._wid.set_tile_size(self._env)
self._wid.draw(self._env)
def on_resize(self, window, width, height, *largs):
Clock.schedule_once(self._resize_env)
def splitter_on_release(self):
self._wid.set_tile_size(self._env)
self._wid.draw(self._env)
def splitter_on_press(self):
self._wid.set_tile_size(self._env)
self._wid.draw(self._env)
def load_agents_and_maps(self, spinner_list, spinner_map):
self._loading.open()
self._agents = agent_list.load_agents()
self._maps = env_list.get_maps()
for num, spinner in enumerate(spinner_list, 1):
spinner.values = [
"agent_{0}".format(num)] + [elm for elm in sorted(self._agents.keys())]
spinner_map.values = [
elm for elm in sorted(self._maps.keys())] + ['Maps']
self._loading.dismiss()
self._load_done.open()
def select_map(self, t_btn_random, label_steps, instance, data, *largs):
self._wid.clear()
self._env = self._maps.get(data, None)
if self._env is not None:
self._env = self._env()
self._step = 0
label_steps.text = '{0}'.format(self._step)
for name, agent in self._agent_objs.items():
agent.__init__()
if agent is not None:
if t_btn_random.state == 'down':
self._env.add_thing(agent,
location=self._env.random_location())
else:
self._env.add_thing(agent,
location=self._env.start_from)
self._wid.set_tile_size(self._env)
self._wid.draw(self._env)
def select_agent(self, agent_id, t_btn_random, spinner, text, *largs):
if text in ['agent_1', 'agent_2', 'agent_3', 'agent_4']:
spinner.text = agent_id
if self._env is not None and self._agent_objs[agent_id] is not None:
self._env.delete_thing(self._agent_objs[agent_id])
self._agent_objs[agent_id] = None
else:
if self._agent_objs[agent_id] is not None:
self._env.delete_thing(self._agent_objs[agent_id])
self._agent_objs[agent_id] = self._agents[text]()
self._agent_objs[agent_id].id = agent_id
if self._env is not None:
if t_btn_random.state == 'down':
self._env.add_thing(self._agent_objs[agent_id],
location=self._env.random_location())
else:
self._env.add_thing(self._agent_objs[agent_id],
location=self._env.start_from)
self._wid.draw(self._env)
def step(self, *largs, **kwargs):
if self._env is not None:
self._env.step()
self._wid.draw(self._env)
self._step += 1
kwargs['label_steps'].text = '{0}'.format(self._step)
for id_, label in kwargs['label_agents'].items():
if self._agent_objs[id_] is not None:
label.text = "{0}".format(
self._agent_objs[id_].performance)
def evt_100_steps(self, steps, *largs, **kwargs):
if self._env is not None:
if not self._100_steps_pressed and steps == 100:
self._100_steps_pressed = True
kwargs['btn_100step'].state = 'down'
self.step(*largs, **kwargs)
Clock.schedule_once(
partial(self.evt_100_steps, steps-1, *largs, **kwargs), 1. / 30.)
elif steps < 100:
if steps == 0:
kwargs['btn_100step'].state = 'normal'
self._100_steps_pressed = False
else:
kwargs['btn_100step'].state = 'down'
self.step(*largs, **kwargs)
Clock.schedule_once(
partial(self.evt_100_steps, steps-1, *largs, **kwargs), 1. / 30.)
def evt_step(self, *largs, **kwargs):
Clock.schedule_once(partial(self.step, *largs, **kwargs))
def reset(self, spinn_map, t_btn_random, label_steps, spinn_agents, label_agents, *largs, **kwargs):
self._wid.clear()
self._step = 0
label_steps.text = '{0}'.format(self._step)
for label in label_agents:
label.text = '{0}'.format(self._step)
self._env = self._maps.get(spinn_map.text, None)
if self._env is not None:
self._env = self._env()
for spinner in spinn_agents:
if spinner.text not in ['agent_1', 'agent_2', 'agent_3', 'agent_4']:
self._agent_objs[spinner.id] = self._agents[spinner.text]()
self._agent_objs[spinner.id].id = spinner.id
if t_btn_random.state == 'down':
self._env.add_thing(self._agent_objs[spinner.id],
location=self._env.random_location())
else:
self._env.add_thing(self._agent_objs[spinner.id],
location=self._env.start_from)
self._wid.set_tile_size(self._env)
self._wid.draw(self._env)
def build(self):
self._wid = Renderer()
##
# First row
label_steps = Label(
text='{0}'.format(self._step), color=(0.1, 1, 0.1, 1))
btn_load = Button(text='Load')
btn_step = Button(text='Step >')
btn_100step = Button(text='100 Step >')
spinn_map = Spinner(
text='Maps',
shorten=True,
shorten_from='right',
halign='left',
text_size=(64, None),
values=["Maps"]
)
label_random_pos = Label(
text='rand p', size=(100, 42), size_hint=(None, 1))
t_btn_random = ToggleButton(size=(64, 42), size_hint=(None, 1))
btn_reset = Button(text='Reset')
lay_splitter = BoxLayout(orientation='vertical')
##
# Second row
spinn_agent_01 = Spinner(
id='agent_1',
text='agent_1',
shorten=True,
shorten_from='right',
text_size=(200, None),
values=["agent_1"]
)
label_agent_01 = Label(text='0')
spinn_agent_02 = Spinner(
id='agent_2',
text='agent_2',
shorten=True,
shorten_from='right',
text_size=(200, None),
values=["agent_1"]
)
label_agent_02 = Label(text='0')
##
# Third row
spinn_agent_03 = Spinner(
id='agent_3',
text='agent_3',
shorten=True,
shorten_from='right',
text_size=(200, None),
values=["agent_1"]
)
label_agent_03 = Label(text='0')
spinn_agent_04 = Spinner(
id='agent_4',
text='agent_4',
shorten=True,
shorten_from='right',
text_size=(200, None),
values=["agent_1"]
)
label_agent_04 = Label(text='0')
##
# Layout
lay_actions = BoxLayout()
lay_actions.add_widget(btn_load)
lay_actions.add_widget(btn_step)
lay_actions.add_widget(btn_100step)
lay_actions.add_widget(label_steps)
lay_actions.add_widget(spinn_map)
lay_actions.add_widget(label_random_pos)
lay_actions.add_widget(t_btn_random)
lay_actions.add_widget(btn_reset)
lay_splitter.add_widget(lay_actions)
lay_agent_row_0 = BoxLayout()
lay_agent_row_0.add_widget(spinn_agent_01)
lay_agent_row_0.add_widget(label_agent_01)
lay_agent_row_0.add_widget(label_agent_02)
lay_agent_row_0.add_widget(spinn_agent_02)
lay_agent_row_1 = BoxLayout()
lay_agent_row_1.add_widget(spinn_agent_03)
lay_agent_row_1.add_widget(label_agent_03)
lay_agent_row_1.add_widget(label_agent_04)
lay_agent_row_1.add_widget(spinn_agent_04)
lay_splitter.add_widget(lay_agent_row_0)
lay_splitter.add_widget(lay_agent_row_1)
splitter = Splitter(sizable_from='top')
splitter.add_widget(lay_splitter)
splitter.min_size = 128
splitter.size_hint_y = 0.16
root = BoxLayout(orientation='vertical')
root.add_widget(self._wid)
root.add_widget(splitter)
Window.minimum_width = 640
Window.minimum_height = 480
##
# Events
Window.bind(on_resize=self.on_resize)
btn_load.on_press = partial(self.load_agents_and_maps,
[spinn_agent_01,
spinn_agent_02,
spinn_agent_03,
spinn_agent_04],
spinn_map)
btn_step.on_press = partial(self.evt_step,
label_steps=label_steps,
label_agents={
'agent_1': label_agent_01,
'agent_2': label_agent_02,
'agent_3': label_agent_03,
'agent_4': label_agent_04,
})
btn_100step.on_press = partial(self.evt_100_steps, 100,
label_steps=label_steps,
btn_100step=btn_100step,
label_agents={
'agent_1': label_agent_01,
'agent_2': label_agent_02,
'agent_3': label_agent_03,
'agent_4': label_agent_04,
})
btn_reset.on_press = partial(self.reset,
spinn_map,
t_btn_random,
label_steps,
[
spinn_agent_01,
spinn_agent_02,
spinn_agent_03,
spinn_agent_04
],
[
label_agent_01,
label_agent_02,
label_agent_03,
label_agent_04,
])
spinn_map.bind(
text=partial(self.select_map, t_btn_random, label_steps))
spinn_agent_01.bind(
text=partial(self.select_agent, spinn_agent_01.id, t_btn_random,))
spinn_agent_02.bind(
text=partial(self.select_agent, spinn_agent_02.id, t_btn_random,))
spinn_agent_03.bind(
text=partial(self.select_agent, spinn_agent_03.id, t_btn_random,))
spinn_agent_04.bind(
text=partial(self.select_agent, spinn_agent_04.id, t_btn_random,))
splitter.on_press = partial(self.splitter_on_press)
splitter.on_release = partial(self.splitter_on_release)
return root
if __name__ == '__main__':
VacuumEnv().run()
|
{
"content_hash": "200f88caba2e4bb5529dae21196efada",
"timestamp": "",
"source": "github",
"line_count": 473,
"max_line_length": 104,
"avg_line_length": 36.35729386892178,
"alnum_prop": 0.4874105948711985,
"repo_name": "DMIunipg/AI-Project-VacuumEnvironment",
"id": "6626572eb13969cf3df2595bb6c6db843ed7cef4",
"size": "17197",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aima-ui-4a.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "254211"
}
]
}
|
from ElementBase import ElementBase
from ElementParameter import ElementParameter
from ElementValue import ElementValue
import speech
class SpeakText(ElementBase):
def __init__(self):
self.status = 'running'
self.output = None
self.params = []
self.type = 'Standard'
self.setup_params()
def can_handle_list(self):
return False
def setup_params(self):
self.params.append(ElementParameter(name='speechlanguage',displayName='Speech Language',display=True,type='list',value='en_US',allowedValues=speech.get_languages(), isVariableAllowed=False))
def get_status(self):
return self.status
def get_input_type(self):
return 'string'
def get_output(self):
return self.output
def get_output_type(self):
return None
def get_params(self):
return self.params
def set_params(self, params = None):
self.params = params or []
def get_description(self):
return 'Speak the text that is input to it'
def get_title(self):
return 'Speak Text'
def get_icon(self):
return 'iob:volume_medium_32'
def get_category(self):
return 'Text'
def get_type(self):
return self.type
def run(self, input):
lang = self.get_param_by_name('speechlanguage')
speech.say(input.value, lang.value)
self.status = 'complete'
|
{
"content_hash": "31224a78b9301dbdb79c74d7672176c4",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 192,
"avg_line_length": 22.75,
"alnum_prop": 0.7127158555729984,
"repo_name": "shaun-h/istaflow",
"id": "ba72ff3314cf914c0ebf9ea23676f24dd528012e",
"size": "1290",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "elements/SpeakText.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "160332"
}
]
}
|
DNServiceManager::DNServiceManager()
{
}
DNServiceManager::~DNServiceManager()
{
}
|
{
"content_hash": "eb252e795806f1c1aee1c7121cc88391",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 37,
"avg_line_length": 10.625,
"alnum_prop": 0.7529411764705882,
"repo_name": "XDApp/libDNService",
"id": "6273b00f1feabed590d5e7ac9ad1fa63fcdedd91",
"size": "137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Source/DNService/DNServiceManager.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3335163"
},
{
"name": "C++",
"bytes": "2532879"
},
{
"name": "CMake",
"bytes": "1468"
},
{
"name": "Objective-C",
"bytes": "58256"
}
]
}
|
((bbn)=>{let script=document.createElement('script');script.innerHTML=`<!-- HTML Document -->
<div :class="['bbn-iblock', componentClass]">
<bbn-button :notext="true"
icon="nf nf-mdi-grid"
:text="_('Open selector')"
@click="showWindow=!showWindow"
ref="button"/>
<bbn-floater :title="false"
v-if="showWindow"
@close="showWindow=false"
:auto-hide="true"
:scrollable="false"
:element="buttonElement">
<div class="bbn-grid-configuration-container bbn-grid"
:style="{gridTemplateRows: 'repeat(' + rows + ', ' + realCellSize + ')'}">
<div v-for="rowidx in rows"
class="bbn-grid-configuration-container bbn-grid"
:style="{gridTemplateColumns: 'repeat(' + cols + ', ' + realCellSize + ')'}">
<div v-for="colidx in cols"
:class="['bbn-bordered', {'bbn-state-selected': (currentRow >= rowidx) && (currentCol >= colidx)}]"
@mouseenter="mouseEnter(colidx, rowidx)"
@mouseleave="mouseLeave()"
@click="$emit('select', [colidx, rowidx])"
:title="_('Row') + ': ' + rowidx + ' / ' + _('Col') + ': ' + colidx"/>
</div>
</div>
</bbn-floater>
</div>`;script.setAttribute('id','bbn-tpl-component-grid-configuration');script.setAttribute('type','text/x-template');document.body.insertAdjacentElement('beforeend',script);(function(){"use strict";Vue.component('bbn-grid-configuration',{name:'bbn-grid-configuration',mixins:[bbn.vue.basicComponent],props:{rows:{type:Number,default:20},cols:{type:Number,default:20},cellSize:{type:[String,Number],default:'1rem'}},data(){return{showWindow:false,currentRow:-1,currentCol:-1}},computed:{realCellSize(){return bbn.fn.isNumber(this.cellSize)?this.cellSize+'px':this.cellSize;},numGrids(){return this.cols*this.rows;},buttonElement(){let btn=this.getRef("button");if(btn){return btn.$el;}
return null;}},methods:{mouseEnter(colidx,rowidx){this.currentRow=rowidx;this.currentCol=colidx;},mouseLeave(){this.currentRow=-1;this.currentCol=-1;}}});})();})(bbn);
|
{
"content_hash": "597a16760d2be4a6244e2274af517124",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 686,
"avg_line_length": 71.33333333333333,
"alnum_prop": 0.6191588785046729,
"repo_name": "nabab/bbn-vue",
"id": "172c86dbcd20a48d99da1db2a6ff895e2ec9305b",
"size": "2140",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dist/js_single_files/components/grid-configuration/grid-configuration.min.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "324685"
},
{
"name": "JavaScript",
"bytes": "1932157"
},
{
"name": "Less",
"bytes": "129552"
}
]
}
|
package birdz.lib.simulations;
import birdz.lib.genetic.FitnessCalc;
import birdz.lib.genetic.Individual;
public class StringFitnessCalc implements FitnessCalc {
String alphabet = "abcdefghijklmnopqrstuvwxyz";
String target = "hello";
double[] outputs;
@Override
public double getFitness(Individual i) {
double[] targetValues = getTargetValues();
outputs = i.fire(targetValues);
double error= 0.0;
for(int j = 0; j < target.length(); j++)
error += Math.abs(targetValues[j] - outputs[j]);
return 0 - error;
}
@Override
public double getIdealFitness() {
return -0.000001;
}
@Override
public int getNumInputs() {
return target.length();
}
private double[] getTargetValues() {
double[] inputs = new double[target.length()];
char[] targetChars = target.toCharArray();
for(int i = 0; i < target.length(); i++)
inputs[i] = alphabet.indexOf((targetChars[i])) / alphabet.length();
return inputs;
}
@Override
public String displayFitness(Individual i) {
if(outputs.length == 0)
return "";
String fitness = "";
for(int j = 0; j < outputs.length; j++)
fitness = fitness + alphabet.charAt((int) (outputs[j] * alphabet.length()));
return fitness;
}
@Override
public int getNumLayers() {
// TODO Auto-generated method stub
return 0;
}
@Override
public int getLayerSize() {
// TODO Auto-generated method stub
return 0;
}
}
|
{
"content_hash": "d5d7f5444f8c82d98a72d5e6395b7a39",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 79,
"avg_line_length": 22.5,
"alnum_prop": 0.6817204301075269,
"repo_name": "SpyGuyIan/Birdz",
"id": "4a167df53ea38e11bef6a6bc14dd77a3adae4036",
"size": "1395",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/birdz/lib/simulations/StringFitnessCalc.java",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "42609"
}
]
}
|
FROM balenalib/jn30b-nano-debian:stretch-run
ENV NODE_VERSION 12.20.1
ENV YARN_VERSION 1.22.4
RUN buildDeps='curl libatomic1' \
&& set -x \
&& for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& apt-get update && apt-get install -y $buildDeps --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& echo "3154628c02f2c920fed77e8dce1a8ae32333260666ebaaa7a3cd230f45d13e42 node-v$NODE_VERSION-linux-arm64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-arm64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Debian Stretch \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v12.20.1, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh
|
{
"content_hash": "badbcb7923568f4fa14ceb5b64ee11dd",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 693,
"avg_line_length": 64.86666666666666,
"alnum_prop": 0.7040082219938335,
"repo_name": "nghiant2710/base-images",
"id": "bc98c3b888a9425bd04be6a456311b918008d1a0",
"size": "2940",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/node/jn30b-nano/debian/stretch/12.20.1/run/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "144558581"
},
{
"name": "JavaScript",
"bytes": "16316"
},
{
"name": "Shell",
"bytes": "368690"
}
]
}
|
package com.google.cloud.memcache.v1beta2;
import com.google.api.core.BetaApi;
import com.google.cloud.memcache.v1beta2.CloudMemcacheGrpc.CloudMemcacheImplBase;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import io.grpc.stub.StreamObserver;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import javax.annotation.Generated;
@BetaApi
@Generated("by gapic-generator-java")
public class MockCloudMemcacheImpl extends CloudMemcacheImplBase {
private List<AbstractMessage> requests;
private Queue<Object> responses;
public MockCloudMemcacheImpl() {
requests = new ArrayList<>();
responses = new LinkedList<>();
}
public List<AbstractMessage> getRequests() {
return requests;
}
public void addResponse(AbstractMessage response) {
responses.add(response);
}
public void setResponses(List<AbstractMessage> responses) {
this.responses = new LinkedList<Object>(responses);
}
public void addException(Exception exception) {
responses.add(exception);
}
public void reset() {
requests = new ArrayList<>();
responses = new LinkedList<>();
}
@Override
public void listInstances(
ListInstancesRequest request, StreamObserver<ListInstancesResponse> responseObserver) {
Object response = responses.poll();
if (response instanceof ListInstancesResponse) {
requests.add(request);
responseObserver.onNext(((ListInstancesResponse) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method ListInstances, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
ListInstancesResponse.class.getName(),
Exception.class.getName())));
}
}
@Override
public void getInstance(GetInstanceRequest request, StreamObserver<Instance> responseObserver) {
Object response = responses.poll();
if (response instanceof Instance) {
requests.add(request);
responseObserver.onNext(((Instance) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method GetInstance, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Instance.class.getName(),
Exception.class.getName())));
}
}
@Override
public void createInstance(
CreateInstanceRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method CreateInstance, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void updateInstance(
UpdateInstanceRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method UpdateInstance, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void updateParameters(
UpdateParametersRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method UpdateParameters, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void deleteInstance(
DeleteInstanceRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method DeleteInstance, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void applyParameters(
ApplyParametersRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method ApplyParameters, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void applySoftwareUpdate(
ApplySoftwareUpdateRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method ApplySoftwareUpdate, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
@Override
public void rescheduleMaintenance(
RescheduleMaintenanceRequest request, StreamObserver<Operation> responseObserver) {
Object response = responses.poll();
if (response instanceof Operation) {
requests.add(request);
responseObserver.onNext(((Operation) response));
responseObserver.onCompleted();
} else if (response instanceof Exception) {
responseObserver.onError(((Exception) response));
} else {
responseObserver.onError(
new IllegalArgumentException(
String.format(
"Unrecognized response type %s for method RescheduleMaintenance, expected %s or %s",
response == null ? "null" : response.getClass().getName(),
Operation.class.getName(),
Exception.class.getName())));
}
}
}
|
{
"content_hash": "8cc465fde5b07eade13521c269bd0245",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 102,
"avg_line_length": 36.90638297872341,
"alnum_prop": 0.6516776202006226,
"repo_name": "googleapis/google-cloud-java",
"id": "8261ac32702681dc61ee817089a2a122fa1e2ca2",
"size": "9268",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "java-memcache/google-cloud-memcache/src/test/java/com/google/cloud/memcache/v1beta2/MockCloudMemcacheImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2614"
},
{
"name": "HCL",
"bytes": "28592"
},
{
"name": "Java",
"bytes": "826434232"
},
{
"name": "Jinja",
"bytes": "2292"
},
{
"name": "Python",
"bytes": "200408"
},
{
"name": "Shell",
"bytes": "97954"
}
]
}
|
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../dummy/config/environment", __FILE__)
require 'rspec/rails'
require 'capybara/rspec'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f}
RSpec.configure do |config|
# == Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.mock_with :rspec
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
config.include Capybara::DSL, :example_group => { :file_path => /\bspec\/requests\// }
end
|
{
"content_hash": "3423f1db14c7072eeeb2ef8b2acfd439",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 88,
"avg_line_length": 35.45161290322581,
"alnum_prop": 0.7142857142857143,
"repo_name": "volontarian/Import",
"id": "7ab06abe977c879cfafa76f5459dcee9448db5bb",
"size": "1099",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/spec_helper.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "421"
},
{
"name": "Ruby",
"bytes": "27248"
}
]
}
|
End of preview.
No dataset card yet
- Downloads last month
- 3