Dataset Preview
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code: DatasetGenerationError
Exception: TypeError
Message: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
Traceback: Traceback (most recent call last):
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2011, in _prepare_split_single
writer.write_table(table)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 585, in write_table
pa_table = table_cast(pa_table, self._schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2302, in table_cast
return cast_table_to_schema(table, schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in cast_table_to_schema
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in <listcomp>
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in wrapper
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in <listcomp>
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2122, in cast_array_to_feature
raise TypeError(f"Couldn't cast array of type\n{_short_str(array.type)}\nto\n{_short_str(feature)}")
TypeError: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
parquet_operations = convert_to_parquet(builder)
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
builder.download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
self._download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
self._prepare_split(split_generator, **prepare_split_kwargs)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
for job_id, done, content in self._prepare_split_single(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
raise DatasetGenerationError("An error occurred while generating the dataset") from e
datasets.exceptions.DatasetGenerationError: An error occurred while generating the datasetNeed help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
text
string | meta
dict |
|---|---|
/* eslint-disable no-unused-expressions */
import { expect } from 'chai';
import Store from '../src/Store';
import { flaxs } from '../src/Flaxs';
describe('Store', () => {
const mockStore = new Store({
testMethod: () => true,
}, ({ actionType }) => {
switch (actionType) {
case 'ADD_TEST':
return true;
default:
return true;
}
});
function mockDispatchCallback({ actionType, ...params }) {
let newState;
switch (actionType) {
case 'READY':
newState = { ...this.state, loaded: true };
break;
case 'ADD':
if (!this.state.todos.includes(params.item)) {
newState = {
...this.state,
todos: [
...this.state.todos,
params.item,
],
};
}
break;
default:
return true;
}
const currentState = this.emitChangeIfStoreChanged(newState);
expect(currentState.loaded).to.be.true;
return true;
}
const mockStoreWithState = flaxs.createStore({
testMethod: () => true,
}, mockDispatchCallback, {
loaded: false,
todos: [],
});
it('should return a new instance with methods attached via the methods argument', () => {
expect(mockStore.testMethod).to.be.defined;
});
it('should attach the supplied callback to the new instance', () => {
expect(mockStore.callback).to.be.defined;
});
it('should be merged with EventEmitter', () => {
expect('on' in mockStore).to.be.true;
expect('removeListener' in mockStore).to.be.true;
expect('emit' in mockStore).to.be.true;
});
it('should create a mixin property', () => {
expect(mockStore.mixin).to.be.defined;
});
it('should return a dispatcherID when getDispatchToken is called', () => {
mockStore.dispatcherID = 5;
expect(mockStore.getDispatchToken()).to.equal(5);
});
it('should throw if a supplied method is named "callback"', () => {
expect(() => {
Store.constructor({
callback: () => true,
}, () => {});
}).to.throw(/.*/);
});
it('should throw if a supplied method is named "mixin"', () => {
expect(() => {
Store.constructor({
mixin: () => true,
}, () => {});
}).to.throw(/.*/);
});
it('should store values on the state', () => {
expect(mockStoreWithState.state).to.not.be.undefined;
const beforeReadyState = mockStoreWithState.state;
flaxs.dispatcher.dispatch({
actionType: 'READY',
});
expect(mockStoreWithState.state).to.not.equal(beforeReadyState);
flaxs.dispatcher.dispatch({
actionType: 'ADD',
item: 'First Item',
});
expect(mockStoreWithState.state.todos.length).to.equal(1);
});
describe('MasterStore', () => {
beforeEach(() => {
flaxs.store.mergeState('startingWith', 'test');
});
it('should have an already defined store', () => {
expect(flaxs.store.state).to.not.be.undefined;
expect(flaxs.store.state.startingWith).to.equal('test');
});
it('should merge states', () => {
flaxs.store.mergeState('user', { info: null });
expect(flaxs.store.state.user).to.eql({ info: null });
});
it('should contain frozen referenced objects', () => {
flaxs.store.mergeState('user', { info: {
status: 'ACTIVE',
userType: 'GUEST',
} });
expect(Object.isFrozen(flaxs.store.state)).to.be.true;
expect(Object.isFrozen(flaxs.store.state.startingWith)).to.be.true;
expect(Object.isFrozen(flaxs.store.state.user)).to.be.true;
expect(Object.isFrozen(flaxs.store.state.user.info)).to.be.false;
});
it('should create a tree object if the namespace contains dots', () => {
flaxs.store.mergeState('user', {
status: 'ACTIVE',
userType: 'GUEST',
});
flaxs.store.mergeState('user.preferences', {
language: 'en',
color: 'blue',
notifications: { push: true, browser: false },
});
expect(flaxs.store.state.user).to.eql({
status: 'ACTIVE',
userType: 'GUEST',
preferences: {
language: 'en',
color: 'blue',
notifications: { push: true, browser: false },
},
});
expect(Object.isFrozen(flaxs.store.state)).to.be.true;
expect(Object.isFrozen(flaxs.store.state.user)).to.be.true;
expect(Object.isFrozen(flaxs.store.state.user.preferences)).to.be.true;
expect(Object.isFrozen(flaxs.store.state.user.preferences.notifications)).to.be.false;
});
it('shold get the dispatcherIds according to its namespace', () => {
const countReducers = flaxs.store.getDispatchTokens().length;
flaxs.createReducer('one', state => state, { pointOne: 1.1, pointTwo: 1.2 });
flaxs.createReducer('two', state => state, { pointTwo: 2.2, pointThree: 2.3 });
flaxs.createReducer('three', state => state, { pointOne: 3.1, pointThree: 3.3 });
expect(flaxs.store.getDispatchTokens().length).to.equal(countReducers + 3);
expect(flaxs.store.getDispatchTokens('two')).to.eql([flaxs.store.dispatcherIds.two]);
expect(flaxs.store.getDispatchTokens(['one', 'three'])).to.eql([
flaxs.store.dispatcherIds.one,
flaxs.store.dispatcherIds.three,
]);
});
});
});
|
{
"content_hash": "afe6aee03fb0bcba79e5ffed1a59c3ef",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 92,
"avg_line_length": 30.15340909090909,
"alnum_prop": 0.5924250989259469,
"repo_name": "jcperez-ch/flaxs",
"id": "8adef836c58fd786de7bdf3e9bd100b75a00917c",
"size": "5307",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/Store.test.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "29560"
}
]
}
|
Imports System
Imports System.Reflection
Imports System.Runtime.InteropServices
' General Information about an assembly is controlled through the following
' set of attributes. Change these attribute values to modify the information
' associated with an assembly.
' Review the values of the assembly attributes
<Assembly: AssemblyTitle("PayUMoneyCSharp")>
<Assembly: AssemblyDescription("")>
<Assembly: AssemblyCompany("")>
<Assembly: AssemblyProduct("PayUMoneyCSharp")>
<Assembly: AssemblyCopyright("Copyright © 2014")>
<Assembly: AssemblyTrademark("")>
<Assembly: ComVisible(False)>
'The following GUID is for the ID of the typelib if this project is exposed to COM
<Assembly: Guid("87b68f62-d1ce-4dde-ae98-0d17d8a667d5")>
' Version information for an assembly consists of the following four values:
'
' Major Version
' Minor Version
' Build Number
' Revision
'
' You can specify all the values or you can default the Build and Revision Numbers
' by using the '*' as shown below:
' <Assembly: AssemblyVersion("1.0.*")>
<Assembly: AssemblyVersion("1.0.0.0")>
<Assembly: AssemblyFileVersion("1.0.0.0")>
|
{
"content_hash": "f31f9d32a8a90c9c2390b423c9a93e34",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 83,
"avg_line_length": 33.64705882352941,
"alnum_prop": 0.7482517482517482,
"repo_name": "prashantkakde31/Catalyst",
"id": "73b0be54a87d2003199ffe70ef2fe3d2210b7bbe",
"size": "1147",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/Documents/PUM-ASP-master/PUM-ASP-master/PayUMoneyCSharp/My Project/AssemblyInfo.vb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "340593"
},
{
"name": "C#",
"bytes": "993370"
},
{
"name": "CSS",
"bytes": "4108677"
},
{
"name": "HTML",
"bytes": "2615434"
},
{
"name": "JavaScript",
"bytes": "5852627"
},
{
"name": "PHP",
"bytes": "89928"
},
{
"name": "PowerShell",
"bytes": "40672"
},
{
"name": "Visual Basic .NET",
"bytes": "296821"
}
]
}
|
'''
Work with Libraries - show persons and communities with maxUsed biggr 80%
Author: Christoph Stoettner
Mail: christoph.stoettner@stoeps.de
Documentation: http://scripting101.org
Version: 5.0.1
Date: 09/19/2015
License: Apache 2.0
'''
execfile("filesAdmin.py")
import sys
noresult = 0
# Create two lists for personal and community libraries
persLib = FilesLibraryService.browsePersonal("title", "true", 1, 100000)
comLib = FilesLibraryService.browseCommunity("title", "true", 1, 100000)
tabs = ''
def multiply_tabs(sth, size):
return ''.join(["%s" % sth for s in xrange(size)])
def printLibraries(librarieslist):
result = librarieslist
counter = len(result)
print "\nName" + str(multiply_tabs(' ', 40 - len('Name'))) + "Percent Used" + " " + 'maxSize (GB)' + ' ' + 'size (GB)'
print str(multiply_tabs('-', 100))
for i in range(counter):
# You can change the percent value here
if (result[i]['percentUsed'] >= 0.80):
titleLen = len(str(result[i]['title']))
if titleLen <= 40:
strLen = 38 - len(str(result[i]['title']))
elif titleLen > 40:
strLen = 0
print str(result[i]['title'])[:38] + str(multiply_tabs(' ', strLen)) + ' ' + str(round(result[i]['percentUsed'], 2)) + str(multiply_tabs(' ', 12)) + str(round(result[i]['maximumSize'] / 1073741824.0, 2)) + str(multiply_tabs(' ', 12)) + str(round(result[i]['size'] / 1073741824.0, 2))
print "\n\nPersonal Libraries bigger 80% used Space: "
printLibraries(persLib)
print "\n\nCommunity Libraries bigger 80% used Space: "
printLibraries(comLib)
|
{
"content_hash": "b140ca018863f5070189234a642b9939",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 296,
"avg_line_length": 34.770833333333336,
"alnum_prop": 0.6213301378070701,
"repo_name": "stoeps13/ibmcnx2",
"id": "e2d40c9e1683febd35f321c3f44e4d0173598276",
"size": "1669",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ibmcnx/cnx/LibraryListLarge.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "183639"
},
{
"name": "Shell",
"bytes": "391"
}
]
}
|
namespace asio {
namespace ip {
template <typename Elem, typename Traits, typename InternetProtocol>
std::basic_ostream<Elem, Traits>& operator<<(
std::basic_ostream<Elem, Traits>& os,
const basic_endpoint<InternetProtocol>& endpoint)
{
asio::ip::detail::endpoint tmp_ep(endpoint.address(), endpoint.port());
asio::error_code ec;
std::string s = tmp_ep.to_string(ec);
if (ec)
{
if (os.exceptions() & std::basic_ostream<Elem, Traits>::failbit)
asio::detail::throw_error(ec);
else
os.setstate(std::basic_ostream<Elem, Traits>::failbit);
}
else
for (std::string::iterator i = s.begin(); i != s.end(); ++i)
os << os.widen(*i);
return os;
}
} // namespace ip
} // namespace asio
#include "RCF/external/asio/asio/detail/pop_options.hpp"
#endif // !defined(BOOST_NO_IOSTREAM)
#endif // ASIO_IP_IMPL_BASIC_ENDPOINT_HPP
|
{
"content_hash": "8390b20861454cdc7e1750cce17a9b1b",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 73,
"avg_line_length": 27.1875,
"alnum_prop": 0.6620689655172414,
"repo_name": "ASMlover/study",
"id": "09f0e5772189db63d2c90e76db161abe8ff0f576",
"size": "1515",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cplusplus/RCF/include/RCF/external/asio/asio/ip/impl/basic_endpoint.hpp",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "3055440"
},
{
"name": "Batchfile",
"bytes": "4662"
},
{
"name": "Brainfuck",
"bytes": "571"
},
{
"name": "C",
"bytes": "13569580"
},
{
"name": "C#",
"bytes": "3959"
},
{
"name": "C++",
"bytes": "14741264"
},
{
"name": "CMake",
"bytes": "543917"
},
{
"name": "CSS",
"bytes": "11505"
},
{
"name": "Common Lisp",
"bytes": "114"
},
{
"name": "Emacs Lisp",
"bytes": "6042"
},
{
"name": "Go",
"bytes": "105203"
},
{
"name": "Groovy",
"bytes": "2907"
},
{
"name": "HTML",
"bytes": "911945"
},
{
"name": "Lex",
"bytes": "9370"
},
{
"name": "Lua",
"bytes": "32829"
},
{
"name": "Makefile",
"bytes": "1000611"
},
{
"name": "NASL",
"bytes": "3609"
},
{
"name": "NewLisp",
"bytes": "5805"
},
{
"name": "Perl",
"bytes": "594"
},
{
"name": "Python",
"bytes": "2752752"
},
{
"name": "SWIG",
"bytes": "91"
},
{
"name": "Shell",
"bytes": "9993"
},
{
"name": "Vim script",
"bytes": "92204"
},
{
"name": "Yacc",
"bytes": "6278"
}
]
}
|
package com.github.ambry.tools.perf.rest;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.JmxReporter;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Snapshot;
import com.github.ambry.commons.SSLFactory;
import com.github.ambry.config.SSLConfig;
import com.github.ambry.config.VerifiableProperties;
import com.github.ambry.rest.RestUtils;
import com.github.ambry.utils.Pair;
import com.github.ambry.utils.Time;
import com.github.ambry.utils.Utils;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.http.DefaultFullHttpRequest;
import io.netty.handler.codec.http.DefaultHttpRequest;
import io.netty.handler.codec.http.HttpChunkedInput;
import io.netty.handler.codec.http.HttpClientCodec;
import io.netty.handler.codec.http.HttpContent;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpObject;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.handler.codec.http.LastHttpContent;
import io.netty.handler.ssl.SslHandler;
import io.netty.handler.stream.ChunkedInput;
import io.netty.handler.stream.ChunkedWriteHandler;
import io.netty.util.concurrent.GenericFutureListener;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import joptsimple.ArgumentAcceptingOptionSpec;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A Netty based client to evaluate performance of the front end.
*/
public class NettyPerfClient {
private static final String GET = "GET";
private static final String POST = "POST";
private static final List<String> SUPPORTED_REQUEST_TYPES = Arrays.asList(GET, POST);
private static final Logger logger = LoggerFactory.getLogger(NettyPerfClient.class);
private final String host;
private final int port;
private final String path;
private final List<String> pathList;
private AtomicInteger counter = new AtomicInteger();
private final int concurrency;
private final long totalSize;
private final byte[] chunk;
private final SSLFactory sslFactory;
private final String serviceId;
private final String targetAccountName;
private final String targetContainerName;
private final List<Pair<String, String>> customHeaders = new ArrayList<>();
private final Bootstrap b = new Bootstrap();
private final ChannelConnectListener channelConnectListener = new ChannelConnectListener();
private final MetricRegistry metricRegistry = new MetricRegistry();
private final JmxReporter reporter = JmxReporter.forRegistry(metricRegistry).build();
private final PerfClientMetrics perfClientMetrics = new PerfClientMetrics(metricRegistry);
private final CountDownLatch shutdownLatch = new CountDownLatch(1);
private final AtomicLong totalRequestCount = new AtomicLong(0);
private EventLoopGroup group;
private long perfClientStartTime;
private volatile boolean isRunning = false;
private AtomicBoolean shutdownCalled = new AtomicBoolean(false);
/**
* Abstraction class for all the parameters that are expected.
*/
private static class ClientArgs {
final String host;
final Integer port;
final String path;
final String pathFileName;
final String requestType;
final Integer concurrency;
final Long postBlobTotalSize;
final Integer postBlobChunkSize;
final String targetAccountName;
final String targetContainerName;
final List<String> customHeaders;
final String serviceId;
final Integer testTime;
final String sslPropsFilePath;
private final Logger logger = LoggerFactory.getLogger(getClass());
/**
* Parses the arguments provided and extracts them into variables that can be retrieved.
* @param args the command line argument list.
*/
protected ClientArgs(String args[]) {
OptionParser parser = new OptionParser();
ArgumentAcceptingOptionSpec<String> host = parser.accepts("host", "Front end host to contact")
.withOptionalArg()
.describedAs("host")
.ofType(String.class)
.defaultsTo("localhost");
ArgumentAcceptingOptionSpec<Integer> port = parser.accepts("port", "Front end port")
.withOptionalArg()
.describedAs("port")
.ofType(Integer.class)
.defaultsTo(1174);
ArgumentAcceptingOptionSpec<String> path = parser.accepts("path", "Resource path (prefix with a '/')")
.withOptionalArg()
.describedAs("path")
.ofType(String.class)
.defaultsTo("/");
ArgumentAcceptingOptionSpec<String> pathFileName = parser.accepts("pathFileName", "file contains pathes")
.withOptionalArg()
.describedAs("pathFileName")
.ofType(String.class);
ArgumentAcceptingOptionSpec<String> requestType =
parser.accepts("requestType", "The type of request to make (POST, GET)")
.withOptionalArg()
.describedAs("requestType")
.ofType(String.class)
.defaultsTo(GET);
ArgumentAcceptingOptionSpec<Integer> concurrency = parser.accepts("concurrency", "Number of parallel requests")
.withOptionalArg()
.describedAs("concurrency")
.ofType(Integer.class)
.defaultsTo(1);
ArgumentAcceptingOptionSpec<Long> postBlobTotalSize =
parser.accepts("postBlobTotalSize", "Total size in bytes of blob to be POSTed")
.withOptionalArg()
.describedAs("postBlobTotalSize")
.ofType(Long.class);
ArgumentAcceptingOptionSpec<Integer> postBlobChunkSize =
parser.accepts("postBlobChunkSize", "Size in bytes of each chunk that will be POSTed")
.withOptionalArg()
.describedAs("postBlobChunkSize")
.ofType(Integer.class);
ArgumentAcceptingOptionSpec<String> targetAccountName =
parser.accepts("targetAccountName", "Target account name for POSTs")
.withOptionalArg()
.describedAs("targetAccountName")
.ofType(String.class);
ArgumentAcceptingOptionSpec<String> targetContainerName =
parser.accepts("targetContainerName", "Target container name for POSTs")
.withOptionalArg()
.describedAs("targetContainerName")
.ofType(String.class);
ArgumentAcceptingOptionSpec<String> customHeader =
parser.accepts("customHeader", "Add http header for the request. HeaderName:HeaderValue")
.withOptionalArg()
.describedAs("customHeader")
.ofType(String.class);
ArgumentAcceptingOptionSpec<String> serviceId = parser.accepts("serviceId", "serviceId representing the caller")
.withOptionalArg()
.describedAs("serviceId")
.ofType(String.class)
.defaultsTo("NettyPerfClient");
ArgumentAcceptingOptionSpec<Integer> testTime = parser.accepts("testTime",
"How long the perf test should run for, in seconds. If not set, the test will run until interrupted")
.withOptionalArg()
.describedAs("testTime")
.ofType(Integer.class);
ArgumentAcceptingOptionSpec<String> sslPropsFilePath =
parser.accepts("sslPropsFilePath", "The path to the properties file with SSL settings. Set to enable SSL.")
.withOptionalArg()
.describedAs("sslPropsFilePath")
.ofType(String.class);
OptionSet options = parser.parse(args);
this.host = options.valueOf(host);
this.port = options.valueOf(port);
this.path = options.valueOf(path);
this.pathFileName = options.valueOf(pathFileName);
this.requestType = options.valueOf(requestType);
this.concurrency = options.valueOf(concurrency);
this.postBlobTotalSize = options.valueOf(postBlobTotalSize);
this.postBlobChunkSize = options.valueOf(postBlobChunkSize);
this.targetAccountName = options.valueOf(targetAccountName);
this.targetContainerName = options.valueOf(targetContainerName);
this.customHeaders = options.valuesOf(customHeader);
this.serviceId = options.valueOf(serviceId);
this.testTime = options.valueOf(testTime);
this.sslPropsFilePath = options.valueOf(sslPropsFilePath);
validateArgs();
logger.info("Host: {}", this.host);
logger.info("Port: {}", this.port);
logger.info("Path: {}", this.path);
logger.info("Path File Name: {}", this.pathFileName);
logger.info("Request type: {}", this.requestType);
logger.info("Concurrency: {}", this.concurrency);
logger.info("Post blob total size: {}", this.postBlobTotalSize);
logger.info("Post blob chunk size: {}", this.postBlobChunkSize);
logger.info("SSL properties file path: {}", this.sslPropsFilePath);
logger.info("Custom Headers: {}", this.customHeaders);
}
/**
* Validates the arguments given and verifies relationships b/w them if any exist.
*/
private void validateArgs() {
if (!SUPPORTED_REQUEST_TYPES.contains(requestType)) {
throw new IllegalArgumentException("Unsupported request type: " + requestType);
} else if (requestType.equals(POST)) {
if (postBlobTotalSize == null || postBlobTotalSize <= 0 || postBlobChunkSize == null
|| postBlobChunkSize <= 0) {
throw new IllegalArgumentException(
"Total size to be posted and size of each chunk need to be specified with POST and have to be > 0");
} else if (targetAccountName == null || targetAccountName.isEmpty() || targetContainerName == null
|| targetContainerName.isEmpty()) {
throw new IllegalArgumentException("TargetAccountName and TargetContainerName cannot be empty with POST");
}
}
if (serviceId == null || serviceId.isEmpty()) {
throw new IllegalArgumentException("serviceId cannot be empty");
}
}
}
/**
* Invokes the {@link NettyPerfClient} with the command line arguments.
* @param args command line arguments.
*/
public static void main(String[] args) {
try {
ClientArgs clientArgs = new ClientArgs(args);
final NettyPerfClient nettyPerfClient =
new NettyPerfClient(clientArgs.host, clientArgs.port, clientArgs.path, clientArgs.pathFileName,
clientArgs.concurrency, clientArgs.postBlobTotalSize, clientArgs.postBlobChunkSize,
clientArgs.sslPropsFilePath, clientArgs.serviceId, clientArgs.targetAccountName,
clientArgs.targetContainerName, clientArgs.customHeaders);
// attach shutdown handler to catch control-c
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
logger.info("Received shutdown signal. Requesting NettyPerfClient shutdown");
nettyPerfClient.shutdown();
}));
nettyPerfClient.start();
ScheduledExecutorService scheduler = null;
if (clientArgs.testTime != null) {
scheduler = Executors.newSingleThreadScheduledExecutor();
scheduler.schedule(nettyPerfClient::shutdown, clientArgs.testTime, TimeUnit.SECONDS);
}
nettyPerfClient.awaitShutdown();
if (scheduler != null) {
Utils.shutDownExecutorService(scheduler, 30, TimeUnit.SECONDS);
}
} catch (Exception e) {
logger.error("Exception during execution of NettyPerfClient", e);
}
}
/**
* Creates an instance of NettyPerfClient
* @param host host to contact.
* @param port port to contact.
* @param path resource path.
* @param concurrency number of parallel requests.
* @param totalSize the total size in bytes of a blob to be POSTed ({@code null} if non-POST).
* @param chunkSize size in bytes of each chunk to be POSTed ({@code null} if non-POST).
* @param sslPropsFilePath the path to the SSL properties, or {@code null} to disable SSL.
* @param serviceId serviceId of the caller to represent the identity
* @param targetAccountName target account name for POST
* @param targetContainerName target container name for POST
* @param customHeaders list of http headers name:value to be added.
* @throws IOException
* @throws GeneralSecurityException
*/
private NettyPerfClient(String host, int port, String path, String pathFileName, int concurrency, Long totalSize,
Integer chunkSize, String sslPropsFilePath, String serviceId, String targetAccountName,
String targetContainerName, List<String> customHeaders) throws Exception {
this.host = host;
this.port = port;
this.path = path;
if (pathFileName != null) {
this.pathList = Files.readAllLines(Paths.get(pathFileName));
} else {
this.pathList = null;
}
this.concurrency = concurrency;
if (chunkSize != null) {
this.totalSize = totalSize;
chunk = new byte[chunkSize];
new Random().nextBytes(chunk);
} else {
this.totalSize = 0;
chunk = null;
}
sslFactory = sslPropsFilePath != null ? SSLFactory.getNewInstance(
new SSLConfig(new VerifiableProperties(Utils.loadProps(sslPropsFilePath)))) : null;
this.serviceId = serviceId;
this.targetAccountName = targetAccountName;
this.targetContainerName = targetContainerName;
if (customHeaders != null && customHeaders.size() > 0) {
for (String customHeader : customHeaders) {
String[] customHeaderNameValue = customHeader.split(":");
this.customHeaders.add(new Pair<>(customHeaderNameValue[0], customHeaderNameValue[1]));
}
}
logger.info("Instantiated NettyPerfClient which will interact with host {}, port {}, path {} with concurrency {}",
this.host, this.port, this.pathList == null ? this.path : "has " + this.pathList.size() + "paths",
this.concurrency);
}
/**
* Starts the NettyPerfClient.
* @throws InterruptedException
*/
protected void start() {
logger.info("Starting NettyPerfClient");
reporter.start();
group = new NioEventLoopGroup(concurrency);
b.group(group).channel(NioSocketChannel.class).handler(new ChannelInitializer<SocketChannel>() {
@Override
public void initChannel(SocketChannel ch) throws Exception {
if (sslFactory != null) {
ch.pipeline().addLast(new SslHandler(sslFactory.createSSLEngine(host, port, SSLFactory.Mode.CLIENT)));
}
ch.pipeline().addLast(new HttpClientCodec()).addLast(new ChunkedWriteHandler()).addLast(new ResponseHandler());
}
});
logger.info("Connecting to {}:{}", host, port);
b.remoteAddress(host, port);
perfClientStartTime = System.currentTimeMillis();
for (int i = 0; i < concurrency; i++) {
b.connect().addListener(channelConnectListener);
}
isRunning = true;
logger.info("Created {} channel(s)", concurrency);
logger.info("NettyPerfClient started");
}
/**
* Shuts down the NettyPerfClient.
*/
protected void shutdown() {
if (shutdownCalled.compareAndSet(false, true)) {
logger.info("Shutting down NettyPerfClient");
isRunning = false;
group.shutdownGracefully();
long totalRunTimeInMs = System.currentTimeMillis() - perfClientStartTime;
try {
if (!group.awaitTermination(5, TimeUnit.SECONDS)) {
logger.error("Netty worker did not shutdown within timeout");
} else {
logger.info("NettyPerfClient shutdown complete");
}
} catch (InterruptedException e) {
logger.error("NettyPerfClient shutdown interrupted", e);
} finally {
logger.info("Executed for approximately {} s and sent {} requests ({} requests/sec)",
(float) totalRunTimeInMs / (float) Time.MsPerSec, totalRequestCount.get(),
(float) totalRequestCount.get() * (float) Time.MsPerSec / (float) totalRunTimeInMs);
Snapshot rttStatsSnapshot = perfClientMetrics.requestRoundTripTimeInMs.getSnapshot();
logger.info("RTT stats: Min - {} ms, Mean - {} ms, Max - {} ms", rttStatsSnapshot.getMin(),
rttStatsSnapshot.getMean(), rttStatsSnapshot.getMax());
logger.info("RTT stats: 95th percentile - {} ms, 99th percentile - {} ms, 999th percentile - {} ms",
rttStatsSnapshot.get95thPercentile(), rttStatsSnapshot.get99thPercentile(),
rttStatsSnapshot.get999thPercentile());
reporter.stop();
shutdownLatch.countDown();
}
}
}
/**
* Blocking function to wait on the NettyPerfClient shutting down.
* @throws InterruptedException
*/
protected void awaitShutdown() throws InterruptedException {
shutdownLatch.await();
}
/**
* Custom handler that sends out the request and receives and processes the response.
*/
private class ResponseHandler extends SimpleChannelInboundHandler<HttpObject> {
private final Logger logger = LoggerFactory.getLogger(getClass());
private HttpRequest request;
private HttpResponse response;
private ChunkedInput<HttpContent> chunkedInput;
private int chunksReceived;
private long sizeReceived;
private long lastChunkReceiveTime;
private long requestStartTime;
private long requestId = 0;
@Override
public void channelActive(ChannelHandlerContext ctx) {
perfClientMetrics.channelCreationRate.mark();
logger.trace("Channel {} active", ctx.channel());
sendRequest(ctx);
}
@Override
public void channelRead0(ChannelHandlerContext ctx, HttpObject in) {
long currentChunkReceiveTime = System.currentTimeMillis();
boolean recognized = false;
if (in instanceof HttpResponse) {
recognized = true;
long responseReceiveStart = currentChunkReceiveTime - requestStartTime;
perfClientMetrics.timeToFirstResponseChunkInMs.update(responseReceiveStart);
logger.trace("Response receive has started on channel {}. Took {} ms", ctx.channel(), responseReceiveStart);
response = (HttpResponse) in;
if (response.status() != HttpResponseStatus.OK) {
logger.error("Got Response code {} and headers were {}", response.status().code(), response.headers());
}
}
if (in instanceof HttpContent) {
recognized = true;
perfClientMetrics.delayBetweenChunkReceiveInMs.update(currentChunkReceiveTime - lastChunkReceiveTime);
chunksReceived++;
int bytesReceivedThisTime = ((HttpContent) in).content().readableBytes();
sizeReceived += bytesReceivedThisTime;
perfClientMetrics.bytesReceiveRate.mark(bytesReceivedThisTime);
if (in instanceof LastHttpContent) {
long requestRoundTripTime = currentChunkReceiveTime - requestStartTime;
perfClientMetrics.requestRoundTripTimeInMs.update(requestRoundTripTime);
perfClientMetrics.getContentSizeInBytes.update(sizeReceived);
perfClientMetrics.getChunkCount.update(chunksReceived);
logger.trace(
"Final content received on channel {}. Took {} ms. Total chunks received - {}. Total size received - {}",
ctx.channel(), requestRoundTripTime, chunksReceived, sizeReceived);
if (HttpUtil.isKeepAlive(response) && isRunning) {
logger.trace("Sending new request on channel {}", ctx.channel());
sendRequest(ctx);
} else if (!isRunning) {
logger.info("Closing channel {} because NettyPerfClient has been shutdown", ctx.channel());
ctx.close();
} else {
perfClientMetrics.requestResponseError.inc();
logger.error("Channel {} not kept alive. Last response status was {} and header was {}", ctx.channel(),
response.status(), response.headers());
ctx.close();
}
}
}
if (!recognized) {
throw new IllegalStateException("Unexpected HttpObject - " + in.getClass());
}
lastChunkReceiveTime = currentChunkReceiveTime;
}
@Override
public void channelInactive(ChannelHandlerContext ctx) {
logger.trace("Channel {} inactive", ctx.channel());
ctx.close();
if (isRunning) {
perfClientMetrics.unexpectedDisconnectionError.inc();
logger.info("Creating a new channel to keep up concurrency");
b.connect().addListener(channelConnectListener);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
perfClientMetrics.requestResponseError.inc();
logger.error("Exception caught on channel {} while processing request/response", ctx.channel(), cause);
ctx.close();
}
/**
* Sends the request according to the configuration.
* @param ctx the {@link ChannelHandlerContext} to use to send the request.
*/
private void sendRequest(ChannelHandlerContext ctx) {
requestId++;
long globalId = totalRequestCount.incrementAndGet();
logger.trace("Sending request with global ID {} and local ID {} on channel {}", globalId, requestId,
ctx.channel());
reset();
perfClientMetrics.requestRate.mark();
ctx.writeAndFlush(request);
if (request.method().equals(HttpMethod.POST)) {
ctx.writeAndFlush(chunkedInput);
}
logger.trace("Request {} scheduled to be sent on channel {}", requestId, ctx.channel());
}
/**
* Resets all state in preparation for the next request-response.
*/
private void reset() {
if (chunk != null) {
chunkedInput = new HttpChunkedInput(new RepeatedBytesInput());
request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, path);
HttpUtil.setContentLength(request, totalSize);
request.headers().add(RestUtils.Headers.BLOB_SIZE, totalSize);
request.headers().add(RestUtils.Headers.SERVICE_ID, serviceId);
request.headers().add(RestUtils.Headers.AMBRY_CONTENT_TYPE, "application/octet-stream");
request.headers().add(RestUtils.Headers.TARGET_ACCOUNT_NAME, targetAccountName);
request.headers().add(RestUtils.Headers.TARGET_CONTAINER_NAME, targetContainerName);
} else {
if (pathList == null) {
request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, path);
} else {
request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET,
pathList.get(counter.getAndIncrement() % pathList.size()));
}
}
for (Pair<String, String> headerNameValue : customHeaders) {
request.headers().add(headerNameValue.getFirst(), headerNameValue.getSecond());
}
chunksReceived = 0;
sizeReceived = 0;
lastChunkReceiveTime = 0;
requestStartTime = System.currentTimeMillis();
response = null;
}
/**
* Returns a chunk with the same data again and again until a fixed size is reached.
*/
private class RepeatedBytesInput implements ChunkedInput<ByteBuf> {
private final AtomicBoolean metricRecorded = new AtomicBoolean(false);
private long streamed = 0;
private long startTime;
private long lastChunkSendTime = 0;
private final Logger logger = LoggerFactory.getLogger(getClass());
/**
* Creates an instance that repeatedly sends the same chunk up to the configured size.
*/
protected RepeatedBytesInput() {
if (totalSize < 0 || (totalSize > 0 && chunk.length < 1)) {
throw new IllegalArgumentException("Invalid argument(s)");
}
}
@Override
public boolean isEndOfInput() {
boolean isEndOfInput = streamed >= totalSize;
if (isEndOfInput && metricRecorded.compareAndSet(false, true)) {
long postChunksTime = System.currentTimeMillis() - startTime;
perfClientMetrics.postChunksTimeInMs.update(postChunksTime);
logger.debug("Took {} ms to POST the blob of size {}", postChunksTime, streamed);
}
return isEndOfInput;
}
@Override
public void close() {
logger.debug("Size streamed - {}", streamed);
}
@Override
public ByteBuf readChunk(ChannelHandlerContext ctx) throws Exception {
return readChunk(ctx.alloc());
}
@Override
public ByteBuf readChunk(ByteBufAllocator allocator) throws Exception {
ByteBuf buf = null;
if (streamed == 0) {
startTime = System.currentTimeMillis();
}
if (!isEndOfInput()) {
long currentChunkSendTime = System.currentTimeMillis();
int remaining = (totalSize - streamed) > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) (totalSize - streamed);
int toWrite = Math.min(chunk.length, remaining);
buf = Unpooled.wrappedBuffer(chunk, 0, toWrite);
streamed += toWrite;
if (lastChunkSendTime > 0) {
perfClientMetrics.delayBetweenChunkSendInMs.update(currentChunkSendTime - lastChunkSendTime);
}
lastChunkSendTime = currentChunkSendTime;
}
return buf;
}
@Override
public long length() {
return totalSize;
}
@Override
public long progress() {
return streamed;
}
}
}
/**
* Channel connection listener that prints error if channel could not be connected.
*/
private class ChannelConnectListener implements GenericFutureListener<ChannelFuture> {
@Override
public void operationComplete(ChannelFuture future) {
if (!future.isSuccess()) {
perfClientMetrics.connectError.inc();
logger.error("Channel {} to {}:{} could not be connected.", future.channel(), host, port, future.cause());
}
}
}
/**
* Metrics that track peformance.
*/
private static class PerfClientMetrics {
public final Meter bytesReceiveRate;
public final Meter channelCreationRate;
public final Meter requestRate;
public final Histogram delayBetweenChunkReceiveInMs;
public final Histogram delayBetweenChunkSendInMs;
public final Histogram getContentSizeInBytes;
public final Histogram getChunkCount;
public final Histogram postChunksTimeInMs;
public final Histogram requestRoundTripTimeInMs;
public final Histogram timeToFirstResponseChunkInMs;
public final Counter connectError;
public final Counter requestResponseError;
public final Counter unexpectedDisconnectionError;
/**
* Creates an instance of PerfClientMetrics.
* @param metricRegistry the {@link MetricRegistry} instance to use.
*/
protected PerfClientMetrics(MetricRegistry metricRegistry) {
bytesReceiveRate = metricRegistry.meter(MetricRegistry.name(ResponseHandler.class, "BytesReceiveRate"));
channelCreationRate = metricRegistry.meter(MetricRegistry.name(ResponseHandler.class, "ChannelCreationRate"));
requestRate = metricRegistry.meter(MetricRegistry.name(ResponseHandler.class, "RequestRate"));
delayBetweenChunkReceiveInMs =
metricRegistry.histogram(MetricRegistry.name(ResponseHandler.class, "DelayBetweenChunkReceiveInMs"));
delayBetweenChunkSendInMs = metricRegistry.histogram(
MetricRegistry.name(ResponseHandler.RepeatedBytesInput.class, "DelayBetweenChunkSendInMs"));
getContentSizeInBytes = metricRegistry.histogram(
MetricRegistry.name(ResponseHandler.RepeatedBytesInput.class, "GetContentSizeInBytes"));
getChunkCount =
metricRegistry.histogram(MetricRegistry.name(ResponseHandler.RepeatedBytesInput.class, "GetChunkCount"));
postChunksTimeInMs =
metricRegistry.histogram(MetricRegistry.name(ResponseHandler.RepeatedBytesInput.class, "PostChunksTimeInMs"));
requestRoundTripTimeInMs =
metricRegistry.histogram(MetricRegistry.name(ResponseHandler.class, "RequestRoundTripTimeInMs"));
timeToFirstResponseChunkInMs =
metricRegistry.histogram(MetricRegistry.name(ResponseHandler.class, "TimeToFirstResponseChunkInMs"));
connectError = metricRegistry.counter(MetricRegistry.name(ResponseHandler.class, "ConnectError"));
requestResponseError = metricRegistry.counter(MetricRegistry.name(ResponseHandler.class, "RequestResponseError"));
unexpectedDisconnectionError =
metricRegistry.counter(MetricRegistry.name(ResponseHandler.class, "UnexpectedDisconnectionError"));
}
}
}
|
{
"content_hash": "ae2b58bdbf3a4af2300aaaa9593e771b",
"timestamp": "",
"source": "github",
"line_count": 680,
"max_line_length": 120,
"avg_line_length": 43.536764705882355,
"alnum_prop": 0.6961324100658672,
"repo_name": "pnarayanan/ambry",
"id": "45f48b886bb55af5dd67edfc2956209cc1f2163a",
"size": "30115",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ambry-tools/src/main/java/com.github.ambry/tools/perf/rest/NettyPerfClient.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "8137909"
},
{
"name": "Python",
"bytes": "7025"
}
]
}
|
FROM vitess/k8s AS k8s
FROM debian:stretch-slim
# Set up Vitess environment (just enough to run pre-built Go binaries)
ENV VTROOT /vt
# Prepare directory structure.
RUN mkdir -p /vt/bin && mkdir -p /vtdataroot
# Copy certs to allow https calls
COPY --from=k8s /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt
# Copy binaries
COPY --from=k8s /vt/bin/vtworker /vt/bin/
# add vitess user/group and add permissions
RUN groupadd -r --gid 2000 vitess && \
useradd -r -g vitess --uid 1000 vitess && \
chown -R vitess:vitess /vt && \
chown -R vitess:vitess /vtdataroot
|
{
"content_hash": "9de9ef9895ab0780e7c2b4883e82a1fb",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 85,
"avg_line_length": 28.38095238095238,
"alnum_prop": 0.7214765100671141,
"repo_name": "enisoc/vitess",
"id": "f1fab9bceb4204f51a07264e714e7959986f54d8",
"size": "596",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "docker/k8s/vtworker/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "10113"
},
{
"name": "Dockerfile",
"bytes": "15467"
},
{
"name": "Go",
"bytes": "9084650"
},
{
"name": "HTML",
"bytes": "39017"
},
{
"name": "Java",
"bytes": "1105729"
},
{
"name": "JavaScript",
"bytes": "19008"
},
{
"name": "Makefile",
"bytes": "11045"
},
{
"name": "Python",
"bytes": "1302983"
},
{
"name": "Shell",
"bytes": "83938"
},
{
"name": "Smarty",
"bytes": "76056"
},
{
"name": "TypeScript",
"bytes": "165695"
},
{
"name": "Yacc",
"bytes": "65017"
}
]
}
|
import {MessengerClient} from 'pandora-messenger';
import {ObjectConsumer} from './object-proxying/ObjectConsumer';
import {HubClient} from './hub/HubClient';
export interface Location {
initialization?: boolean;
appName?: string;
processName?: string;
pid?: string;
clientId?: string;
}
export interface Selector extends Location {
objectName?: string;
objectTag?: string;
}
export const selectorSchema = [
'clientId',
'appName',
'processName',
'pid',
'objectName',
'tag'
];
export interface ObjectMessage extends HubMessage {
propertyName: string;
}
export interface SubscribeMessage extends HubMessage {
register: string;
}
export interface HubMessage extends MessagePackage {
action: string;
}
export interface MessagePackage {
needReply?: boolean;
broadcast?: boolean;
host?: Selector;
remote?: Selector;
data?: any;
timeout?: number;
}
export interface ReplyPackage extends MessagePackage {
success?: boolean;
error?: any;
batchReply?: Array<ReplyPackage>;
}
export interface PublishPackage extends MessagePackage {
broadcast?: null;
remote?: null;
data: {
selector: Selector
};
}
export interface LookupPackage extends MessagePackage {
broadcast?: null;
remote?: null;
data: {
selector: Selector
};
}
export interface ForceReplyFn {
(ReplyPackage): void;
}
export interface SelectedInfo {
client: MessengerClient;
selector: Selector;
}
export interface DispatchHandler {
dispatch(message: HubMessage): Promise<any> | any
}
export interface ObjectDescription {
name: string;
tag?: string;
}
export interface Introspection {
properties: Array<{
name: string;
type: string;
}>;
methods: Array<{
type: string;
name: string;
length: number;
}>;
}
export interface ObjectProxyBehaviour {
host: {
invoke (host: any, method: string, params: any[]): Promise<any>;
getProperty (host: any, name: string): Promise<any>;
introspect(host: any): Introspection;
subscribe(hub: HubClient, objectDescription: ObjectDescription, host: any, register: string): Promise<any>;
unsubscribe(hub: HubClient, objectDescription: ObjectDescription, host: any, register: string): Promise<any>;
};
proxy: {
invoke (proxy: any, consumer: ObjectConsumer, method: string, params: any[]): Promise<any>
getProperty (proxy: any, consumer: ObjectConsumer, name: string): Promise<any>;
subscribe(proxy, consumer: ObjectConsumer, register: string, fn);
unsubscribe(proxy, consumer: ObjectConsumer, register: string, fn?);
};
}
export interface ClientOptions {
location: Location;
logger?: any;
}
export interface FacadeSetupOptions extends ClientOptions {
}
export interface ConsumerExtInfo {
timeout: number;
}
|
{
"content_hash": "433b5ca44cf420fd4d6935a4d0efe09d",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 113,
"avg_line_length": 21.763779527559056,
"alnum_prop": 0.7145441389290883,
"repo_name": "midwayjs/pandora",
"id": "d32aa37f0c24b9dcdabc919226abb091fc6fd2a7",
"size": "2764",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/hub/src/domain.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "20787"
},
{
"name": "Shell",
"bytes": "2517"
},
{
"name": "TypeScript",
"bytes": "694860"
}
]
}
|
<?php
declare(strict_types = 1);
namespace KoolKode\BPMN\Test;
use KoolKode\BPMN\ManagementService;
use KoolKode\BPMN\Delegate\DelegateTaskRegistry;
use KoolKode\BPMN\Delegate\Event\TaskExecutedEvent;
use KoolKode\BPMN\Engine\ProcessEngine;
use KoolKode\BPMN\Engine\VirtualExecution;
use KoolKode\BPMN\History\HistoricActivityInstance;
use KoolKode\BPMN\History\HistoryService;
use KoolKode\BPMN\Job\Executor\JobExecutor;
use KoolKode\BPMN\Job\Scheduler\TestJobScheduler;
use KoolKode\BPMN\Repository\Deployment;
use KoolKode\BPMN\Repository\RepositoryService;
use KoolKode\BPMN\Runtime\RuntimeService;
use KoolKode\BPMN\Runtime\Event\MessageThrownEvent;
use KoolKode\BPMN\Task\TaskService;
use KoolKode\Database\Test\DatabaseTestTrait;
use KoolKode\Event\EventDispatcher;
use KoolKode\Expression\ExpressionContextFactory;
use KoolKode\Process\Event\CreateExpressionContextEvent;
use KoolKode\Util\UUID;
use Monolog\Logger;
use Monolog\Handler\StreamHandler;
use Monolog\Processor\PsrLogMessageProcessor;
use PHPUnit\Framework\TestCase;
/**
* Sets up in in-memory Sqlite databse and a process engine using it.
*
* @author Martin Schröder
*/
abstract class BusinessProcessTestCase extends TestCase
{
use DatabaseTestTrait;
protected $conn;
/**
* @var EventDispatcher
*/
protected $eventDispatcher;
/**
* @var ProcessEngine
*/
protected $processEngine;
/**
* @var JobExecutor
*/
protected $jobExecutor;
/**
* @var DelegateTaskRegistry
*/
protected $delegateTasks;
/**
* @var RepositoryService
*/
protected $repositoryService;
/**
* @var RuntimeService
*/
protected $runtimeService;
/**
* @var TaskService
*/
protected $taskService;
/**
* @var HistoryService
*/
protected $historyService;
/**
* @var ManagementService
*/
protected $managementService;
private $messageHandlers;
private $serviceTaskHandlers;
protected function setUp()
{
parent::setUp();
$this->eventDispatcher = new EventDispatcher();
$this->conn = static::createConnection('bpm_', $this->eventDispatcher);
static::migrateDirectoryUp($this->conn, __DIR__ . '/../../migration');
$logger = null;
if (null !== ($logLevel = $this->getLogLevel())) {
$stderr = \fopen('php://stderr', 'wb');
$levels = \array_change_key_case(Logger::getLevels(), \CASE_UPPER);
$logger = new Logger('BPMN');
$logger->pushHandler(new StreamHandler($stderr, $levels[\strtoupper($logLevel)]));
$logger->pushProcessor(new PsrLogMessageProcessor());
\fwrite($stderr, "\n");
\fwrite($stderr, \sprintf("TEST CASE: %s\n", $this->getName()));
// $this->conn->setDebug(true);
// $this->conn->setLogger($logger);
}
$this->messageHandlers = [];
$this->serviceTaskHandlers = [];
// Provide message handler subscriptions.
$this->eventDispatcher->connect(function (MessageThrownEvent $event) {
$def = $this->repositoryService->createProcessDefinitionQuery()->processDefinitionId($event->execution->getProcessDefinitionId())->findOne();
$key = $def->getKey();
$id = $event->execution->getActivityId();
if (isset($this->messageHandlers[$key][$id])) {
return $this->messageHandlers[$key][$id]->execute($event);
}
if (isset($this->messageHandlers['*'][$id])) {
return $this->messageHandlers['*'][$id]->execute($event);
}
});
$this->eventDispatcher->connect(function (TaskExecutedEvent $event) {
$execution = $this->runtimeService->createExecutionQuery()->executionId($event->execution->getExecutionId())->findOne();
$key = $execution->getProcessDefinition()->getKey();
$id = $event->execution->getActivityId();
if (isset($this->serviceTaskHandlers[$key][$id])) {
$this->serviceTaskHandlers[$key][$id]->execute($event->execution);
}
if (isset($this->serviceTaskHandlers['*'][$id])) {
$this->serviceTaskHandlers['*'][$id]->execute($event->execution);
}
});
// Allow for assertions in expressions, e.g. #{ @test.assertEquals(2, processVariable) }
$this->eventDispatcher->connect(function (CreateExpressionContextEvent $event) {
$event->access->setVariable('@test', $this);
});
$this->delegateTasks = new DelegateTaskRegistry();
$this->processEngine = new ProcessEngine($this->conn, $this->eventDispatcher, new ExpressionContextFactory());
$this->processEngine->setDelegateTaskFactory($this->delegateTasks);
$this->processEngine->setLogger($logger);
$scheduler = new TestJobScheduler($this->processEngine);
$this->jobExecutor = new JobExecutor($this->processEngine, $scheduler);
$this->processEngine->setJobExecutor($this->jobExecutor);
$this->repositoryService = $this->processEngine->getRepositoryService();
$this->runtimeService = $this->processEngine->getRuntimeService();
$this->taskService = $this->processEngine->getTaskService();
$this->historyService = $this->processEngine->getHistoryService();
$this->managementService = $this->processEngine->getManagementService();
$ref = new \ReflectionClass(static::class);
foreach ($ref->getMethods() as $method) {
if ($method->isStatic() || !$method->hasReturnType()) {
continue;
}
switch ($method->getReturnType()) {
case MessageHandler::class:
$method->setAccessible(true);
$handler = $method->invoke($this);
$this->messageHandlers[$handler->getProcessKey() ?? '*'][$handler->getMessageName()] = $handler;
break;
case ServiceTaskHandler::class:
$method->setAccessible(true);
$handler = $method->invoke($this);
$this->serviceTaskHandlers[$handler->getProcessKey() ?? '*'][$handler->getServiceTask()] = $handler;
break;
}
}
}
public function getRepositoryService(): RepositoryService
{
return $this->repositoryService;
}
public function getRuntimeService(): RuntimeService
{
return $this->runtimeService;
}
public function getTaskService(): TaskService
{
return $this->taskService;
}
public function getHistoryService(): HistoryService
{
return $this->historyService;
}
public function getManagementService(): ManagementService
{
return $this->managementService;
}
/**
* Get the minimum level of log messages to be displayed.
*
* Logging is not enabled by default.
*/
public function getlogLevel(): ?string
{
return null;
}
protected function deployFile(string $file): Deployment
{
if (!\preg_match("'^(?:(?:[a-z]:)|(/+)|([^:]+://))'i", $file)) {
$file = \dirname((new \ReflectionClass(\get_class($this)))->getFileName()) . \DIRECTORY_SEPARATOR . $file;
}
return $this->repositoryService->deployProcess(new \SplFileInfo($file));
}
protected function deployDirectory(string $file, array $extensions = []): Deployment
{
if (!\preg_match("'^(?:(?:[a-z]:)|(/+)|([^:]+://))'i", $file)) {
$file = \dirname((new \ReflectionClass(\get_class($this)))->getFileName()) . \DIRECTORY_SEPARATOR . $file;
}
$builder = $this->repositoryService->createDeployment(\pathinfo($file, \PATHINFO_FILENAME));
$builder->addExtensions($extensions);
$builder->addDirectory($file);
return $this->repositoryService->deploy($builder);
}
protected function deployArchive(string $file, array $extensions = []): Deployment
{
if (!\preg_match("'^(?:(?:[a-z]:)|(/+)|([^:]+://))'i", $file)) {
$file = \dirname((new \ReflectionClass(\get_class($this)))->getFileName()) . \DIRECTORY_SEPARATOR . $file;
}
$builder = $this->repositoryService->createDeployment(\pathinfo($file, \PATHINFO_FILENAME));
$builder->addExtensions($extensions);
$builder->addArchive($file);
return $this->repositoryService->deploy($builder);
}
protected function registerMessageHandler(?string $processDefinitionKey, string $nodeId, callable $callback): void
{
$this->messageHandlers[$processDefinitionKey ?? '*'][$nodeId] = new MessageHandler($nodeId, $processDefinitionKey, $callback);
}
protected function registerServiceTaskHandler(?string $processDefinitionKey, string $activityId, callable $callback): void
{
$this->serviceTaskHandlers[$processDefinitionKey ?? '*'][$activityId] = new ServiceTaskHandler($activityId, $processDefinitionKey, $callback);
}
protected function dumpProcessInstances(?UUID $id = null): void
{
$query = $this->runtimeService->createProcessInstanceQuery();
if ($id !== null) {
$query->processInstanceId($id);
}
foreach ($query->findAll() as $proc) {
echo "\n";
$this->dumpExecution($this->processEngine->findExecution($proc->getId()));
echo "\n";
}
}
protected function dumpExecution(VirtualExecution $exec): void
{
$node = $exec->getNode();
$nodeId = ($node === null) ? null : $node->getId();
\printf("%s%s [ %s ]\n", \str_repeat(' ', $exec->getExecutionDepth()), $nodeId, $exec->getId());
foreach ($exec->findChildExecutions() as $child) {
$this->dumpExecution($child);
}
}
protected function findCompletedActivityDefinitionKeys(?UUID $processId = null): array
{
$query = $this->historyService->createHistoricActivityInstanceQuery()->completed(true)->orderByStartedAt();
if ($processId !== null) {
$query->processInstanceId($processId);
}
return \array_map(function (HistoricActivityInstance $activity) {
return $activity->getDefinitionKey();
}, $query->findAll());
}
}
|
{
"content_hash": "1a5bd8d3531587e9652237f62fa7d4f3",
"timestamp": "",
"source": "github",
"line_count": 320,
"max_line_length": 153,
"avg_line_length": 33.9625,
"alnum_prop": 0.5909090909090909,
"repo_name": "koolkode/bpmn",
"id": "2daf803bf6a8e772179d90b6e291aedbe6c62c64",
"size": "11099",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Test/BusinessProcessTestCase.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "483140"
}
]
}
|
def pages(domo):
'''Page Docs: https://developer.domo.com/docs/domo-apis/pages
'''
domo.logger.info("\n**** Domo API - Page Examples ****\n")
# Create a page
page = domo.pages.create('New Page')
domo.logger.info("Created Page {}".format(page['id']))
# Create a subpage
subpage = domo.pages.create('Sub Page', parentId=page['id'])
domo.logger.info("Created Subpage {}".format(subpage['id']))
# Update the page using returned page
page['name'] = 'Updated Page'
domo.pages.update(**page)
domo.logger.info("Renamed Page {}".format(page['id']))
# Turn subpage into to top-level page using keyword argument
domo.pages.update(subpage['id'], parentId=None)
domo.logger.info("Moved Page to top level {}".format(subpage['id']))
# Get the page
page = domo.pages.get(page['id'])
# List pages
page_list = list(domo.pages.list())
domo.logger.info("Retrieved a list of {} top-level page(s)".format(
len(page_list)))
# Create a few collections
collections = [
domo.pages.create_collection(page['id'], 'First Collection'),
domo.pages.create_collection(page['id'], 'Second Collection'),
]
domo.logger.info("Created two collections on page {}".format(page['id']))
# Get collections
collection_list = domo.pages.get_collections(page['id'])
domo.logger.info("Retrieved a list of {} collections".format(
len(collection_list)))
# Update collection
collections[1]['title'] = 'Last Collection'
domo.pages.update_collection(page['id'], **collections[1])
domo.logger.info("Updated collection {}: {}".format(collections[1]['id'],
collections[1]['title']))
# Clean up
for collection in collections:
domo.pages.delete_collection(page['id'], collection['id'])
domo.pages.delete(page['id'])
domo.pages.delete(subpage['id'])
domo.logger.info("Deleted collections and pages")
|
{
"content_hash": "a0ebec1b7c8916b1a1e645ea7b2375a4",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 79,
"avg_line_length": 38.833333333333336,
"alnum_prop": 0.5956127801621364,
"repo_name": "domoinc/domo-python-sdk",
"id": "b6926d0fae7ade517d79df8dc51bfb9dea5537bd",
"size": "2099",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/page.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "99151"
},
{
"name": "Shell",
"bytes": "70"
}
]
}
|
var solve_quad = require('../Modules/quad.js');
var fs = require ('fs');
exports.test_any = function(test){
var any_input = require('any_input.json');
test.equals(JSON.stringify({"x":"Any"}),JSON.stringify(solve_quad.fujeclypse(any_input.a,any_input.b,any_input.c)));
test.done()
}
exports.test_none = function(test){
var none_input = require('none_input.json');
test.equals(JSON.stringify({"x1":"NaN", "x2":"NaN"}),JSON.stringify(solve_quad.fujeclypse(none_input.a,none_input.b,none_input.c)));
test.done()
}
exports.test_x1 = function(test){
var x1_input = require('x1_input.json');
test.equals(JSON.stringify({"x":"0,5"}),JSON.stringify(solve_quad.fujeclypse(x1_input.a,x1_input.b,x1_input.c)));
test.done()
}
exports.test_x2 = function(test){
var x2_input = require('x2_input.json');
test.equals(JSON.stringify({"x1":"0,14", "x2":"-1"}),JSON.stringify(solve_quad.fujeclypse(x2_input.a,x2_input.b,x2_input.c)));
test.done()
}
|
{
"content_hash": "847c6c3fcd7ddd4ad740d3215885a3d8",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 133,
"avg_line_length": 35.03703703703704,
"alnum_prop": 0.686046511627907,
"repo_name": "thebravoman/software_engineering_2016",
"id": "63ec1d8448b0a7410e9f95d697d1c6daf30bf695",
"size": "946",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "h09_nodejs_modules_tests/app_b_02/Test/test.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3852"
},
{
"name": "C++",
"bytes": "2303"
},
{
"name": "CSS",
"bytes": "72949"
},
{
"name": "CoffeeScript",
"bytes": "20467"
},
{
"name": "HTML",
"bytes": "464471"
},
{
"name": "JavaScript",
"bytes": "1300267"
},
{
"name": "Python",
"bytes": "25249"
},
{
"name": "Ruby",
"bytes": "1747072"
},
{
"name": "Shell",
"bytes": "3144"
},
{
"name": "TypeScript",
"bytes": "30735"
}
]
}
|
package msa.rehearsal.base;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.widget.Toast;
import msa.rehearsal.RehearsalApp;
import msa.rehearsal.injector.HasComponent;
import msa.rehearsal.injector.components.ApplicationComponent;
import msa.rehearsal.injector.modules.ActivityModule;
/**
* Base {@link android.app.Activity} class for every Activity in this application.
*/
public abstract class BaseActivity extends AppCompatActivity implements HasComponent {
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
initializeInjector(getApplicationComponent());
}
public RehearsalApp getMedikoeApplication() {
return ((RehearsalApp) getApplication());
}
public ApplicationComponent getApplicationComponent() {
return ((RehearsalApp) getApplication()).getApplicationComponent();
}
protected ActivityModule getActivityModule() {
return new ActivityModule(this);
}
protected abstract void initializeInjector(ApplicationComponent applicationComponent);
@SuppressWarnings("ConstantConditions")
protected void setToolBarTitle(String title) {
getSupportActionBar().setTitle(title);
}
@SuppressWarnings("ConstantConditions")
protected void setToolBarTitleVisibility(boolean state) {
getSupportActionBar().setDisplayShowTitleEnabled(state);
}
protected void showToast(String message) {
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}
}
|
{
"content_hash": "3bc1b355155ac33809ba1a51e0a1d3a0",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 90,
"avg_line_length": 30.79245283018868,
"alnum_prop": 0.7549019607843137,
"repo_name": "abeemukthees/Arena",
"id": "47697ad341598f2024a5b2a3c4f7842ac845b9d9",
"size": "2234",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Rehearsal/src/main/java/msa/rehearsal/base/BaseActivity.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "625302"
},
{
"name": "Kotlin",
"bytes": "94339"
}
]
}
|
<!DOCTYPE HTML>
<html>
<head>
<title>main curl test file</title>
<script>
curl = {
paths: {
curl: '../src/curl/'
}
};
</script>
<script src="../dist/curl-kitchen-sink/curl.js"></script>
<script type="text/javascript">
curl(
[
'stuff/three',
'css!stuff/base',
'text!stuff/template.html'
]
).then(
function (three, link, template) {
(three == 3 ? success : failure)('A module with dependencies loaded.');
(typeof template == 'string' && template.length ? success : failure)('text resource loaded.');
}
);
function write (msg) {
curl(['domReady!'], function () {
document.body.appendChild(document.createElement('div')).innerHTML = msg;
});
}
function failure (msg) { write('FAILED: ' + msg); }
function success (msg) { write('SUCCESS: ' + msg); }
</script>
</head>
<body>
<p>This text should all be the same <span style="color: #2faced;">color</span> if the css file loaded.</p>
</body>
</html>
|
{
"content_hash": "1bb44bbff87d7d737ac3c3aa85644789",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 106,
"avg_line_length": 19.428571428571427,
"alnum_prop": 0.6186974789915967,
"repo_name": "pchaussalet/dr_devops",
"id": "94b5cbc346598c6b05a9a47200624772b2902f08",
"size": "952",
"binary": false,
"copies": "4",
"ref": "refs/heads/gh-pages",
"path": "bower_components/curl/test/dist-kitchen-sink.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16499"
},
{
"name": "HTML",
"bytes": "7964"
},
{
"name": "JavaScript",
"bytes": "14580"
}
]
}
|
package gov.hhs.fha.nhinc.subscription.repository.dialectalgorithms.simple;
import gov.hhs.fha.nhinc.subscription.repository.roottopicextractor.IRootTopicExtractionStrategy;
import gov.hhs.fha.nhinc.subscription.repository.roottopicextractor.RootTopicExtractorHelper;
import gov.hhs.fha.nhinc.subscription.repository.service.SubscriptionRepositoryException;
import gov.hhs.fha.nhinc.xmlCommon.XmlUtility;
import org.w3c.dom.Node;
import org.apache.log4j.Logger;
/**
*
* @author rayj
*/
public class SimpleDialectRootTopicExtractor implements IRootTopicExtractionStrategy {
private static final Logger LOG = Logger.getLogger(SimpleDialectRootTopicExtractor.class);
public String extractRootTopicFromTopicExpressionNode(Node topicExpression) throws SubscriptionRepositoryException {
LOG.debug("begin SimpleDialectRootTopicExtractor.extractRootTopicFromTopicExpressionNode topicExpression='"
+ XmlUtility.serializeNodeIgnoreFaults(topicExpression) + "'");
String rootTopic = null;
String topicValue = XmlUtility.getNodeValue(topicExpression);
topicValue = RootTopicExtractorHelper.ReplaceNamespacePrefixesWithNamespaces(topicValue, topicExpression);
rootTopic = topicValue;
return rootTopic;
}
}
|
{
"content_hash": "842680014f52eb98f9f43b252b06cecf",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 120,
"avg_line_length": 44.10344827586207,
"alnum_prop": 0.8037529319781079,
"repo_name": "sailajaa/CONNECT",
"id": "0887d24a61379cab6663d735d496ded4de214ae1",
"size": "2971",
"binary": false,
"copies": "2",
"ref": "refs/heads/CONNECT_integration",
"path": "Product/Production/Services/HIEMCore/src/main/java/gov/hhs/fha/nhinc/subscription/repository/dialectalgorithms/simple/SimpleDialectRootTopicExtractor.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "5539"
},
{
"name": "Groovy",
"bytes": "1641"
},
{
"name": "Java",
"bytes": "12552183"
},
{
"name": "Python",
"bytes": "773"
},
{
"name": "Shell",
"bytes": "14607"
},
{
"name": "XSLT",
"bytes": "35057"
}
]
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using FluentAssertions;
using NUnit.Framework;
using Nest.Tests.MockData.Domain;
namespace Nest.Tests.Integration.Core.TermVectors
{
[TestFixture]
public class MultiTermVectorsTests : IntegrationTests
{
[Test]
public void MultiTermVectorsTest()
{
var result = Client.MultiTermVectors<ElasticsearchProject>(s => s
.Fields(ep => ep.Content)
.Ids("1", "2")
);
result.IsValid.Should().BeTrue();
result.Documents.Should().NotBeNull();
result.Documents.Count().Should().Be(2);
foreach (var document in result.Documents)
{
document.TermVectors.Count().Should().Be(1);
document.TermVectors.First().Key.Should().Be("content");
}
}
[Test]
public void MultiTermVectorsTest_DocumentsInBody()
{
var result = Client.MultiTermVectors<ElasticsearchProject>(s => s
.Fields(ep => ep.Content)
.Documents(
m=>m.Id(1).TermStatistics(),
m=>m.Id(2).FieldStatistics().Offsets(false)
)
);
result.IsValid.Should().BeTrue();
result.Documents.Should().NotBeNull();
result.Documents.Count().Should().Be(2);
foreach (var document in result.Documents)
{
document.TermVectors.Count().Should().Be(1);
document.TermVectors.First().Key.Should().Be("content");
}
}
[Test]
[SkipVersion("1.2.0 - 9.9.9", "Failing since ES 1.2: https://github.com/elasticsearch/elasticsearch/issues/6451")]
public void MultiTermVectorsNonExistentIdTest()
{
var result = Client.MultiTermVectors<ElasticsearchProject>(s => s
.Ids("thisiddoesnotexist")
);
result.IsValid.Should().BeTrue();
result.Documents.Count().Should().Be(1);
result.Documents.First().Found.Should().Be(false);
}
}
}
|
{
"content_hash": "409c7de28ccbc8ab9d2914389ed70fb6",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 116,
"avg_line_length": 25.042857142857144,
"alnum_prop": 0.6833998859098688,
"repo_name": "ststeiger/elasticsearch-net",
"id": "929af3c0b79b853f1ff2a721292f03ba4dc36165",
"size": "1755",
"binary": false,
"copies": "4",
"ref": "refs/heads/develop",
"path": "src/Tests/Nest.Tests.Integration/Core/TermVectors/MultiTermVectorsTests.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3047"
},
{
"name": "C#",
"bytes": "9479248"
},
{
"name": "F#",
"bytes": "17125"
},
{
"name": "HTML",
"bytes": "187041"
},
{
"name": "Pascal",
"bytes": "85615"
},
{
"name": "PowerShell",
"bytes": "90973"
},
{
"name": "Shell",
"bytes": "1849"
}
]
}
|
package com.mobilepetroleum.radialencapsulation;
import classycle.util.StringPattern;
import java.util.regex.Pattern;
class StringPatterns {
static StringPattern exclude(String... excludes) {
Pattern[] patterns = new Pattern[excludes.length];
for (int i = 0, excludesLength = excludes.length; i < excludesLength; i++) {
String exclude = excludes[i];
patterns[i] = Pattern.compile(exclude);
}
return new Exclude(patterns);
}
static class Exclude implements StringPattern {
Pattern[] excludes;
Exclude(Pattern[] excludes) {
this.excludes = excludes;
}
@Override
public boolean matches(String string) {
for (Pattern exclude : excludes) {
if (exclude.matcher(string).matches()) {
return false;
}
}
return true;
}
}
}
|
{
"content_hash": "b4342c14d3858e326c22aaed16cd4733",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 84,
"avg_line_length": 26.13888888888889,
"alnum_prop": 0.5749202975557917,
"repo_name": "MobilePetroleum/radial-encapsulation-maven-plugin",
"id": "6e57e39c18787f30a803c5e8b2fdf968d2af2060",
"size": "941",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/mobilepetroleum/radialencapsulation/StringPatterns.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "4919"
}
]
}
|
This project is a part of the Electron ecosystem. As such, all contributions to this project follow
[Electron's code of conduct](https://github.com/electron/electron/blob/main/CODE_OF_CONDUCT.md)
where appropriate.
|
{
"content_hash": "901f4a6d78dbd7fadd563e2f62cddc89",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 99,
"avg_line_length": 71.66666666666667,
"alnum_prop": 0.8,
"repo_name": "maxogden/electron-packager",
"id": "8c48a630ac040bfc2e3d709e5da4512e47b50821",
"size": "234",
"binary": false,
"copies": "1",
"ref": "refs/heads/dependabot/npm_and_yarn/typedoc-0.22.12",
"path": "CODE_OF_CONDUCT.md",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "72082"
}
]
}
|
/*
================================================================================
Name: Luis Regus
ID: 00264958
Created: 1/31/14
Description: This program determines if a costumer has exceeded the credit limit on a charge account.
usage: This program asks the user for the account number, beginning balance, total charges, and credit limit. If the credit limit of the account is exceeded then it will print some info of the account and it will let the user know that the credit limit was exceeded.
Sample
----------------------------------------------------------------------------
Enter account number (-1 to end): 100
Enter beginning balance: 5394.78
Enter total charges: 1000.00
Enter total credits: 500.00
Enter credit limit: 5500.00
Account: 100
Credit Limit: 5500.00
Balance: 5894.78
Credit limit exceeded
Enter account number (-1 to end): 200
Enter beginning balance: 1000.00
Enter total charges: 123.45
Enter total credits: 321.00
Enter credit limit: 1500.00
Enter account number (-1 to end): 300
Enter beginning balance: 500.00
Enter total charges: 274.73
Enter total credits: 100.00
Enter credit limit: 800.00
Enter account number (-1 to end): -1
----------------------------------------------------------------------------
================================================================================
*/
#include <stdio.h>
int main( void )
{
int accountNumber;
float initialBalance;
float charges;
float credits;
float creditLimit;
float newBalance = 0;
printf("Enter account number (-1 to end): ");
scanf("%d", &accountNumber);
while(accountNumber != -1)
{
printf("Enter beginning balance: ");
scanf("%f", &initialBalance);
printf("Enter total charges: ");
scanf("%f", &charges);
printf("Enter total credits: ");
scanf("%f", &credits);
printf("Enter credit limit: ");
scanf("%f", &creditLimit);
newBalance = initialBalance + charges - credits;
if(newBalance > creditLimit)
{
printf("Account: %d\n", accountNumber);
printf("Credit Limit: %.2f\n", creditLimit);
printf("Balance: %.2f\n", newBalance);
printf("Credit limit exceeded\n");
}
printf("\nEnter account number (-1 to end): ");
scanf("%d", &accountNumber);
}
return 0;
}
|
{
"content_hash": "cf04eb064da055a34ede07c018f5a2a4",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 271,
"avg_line_length": 24.285714285714285,
"alnum_prop": 0.5676470588235294,
"repo_name": "lmregus/Portfolio",
"id": "f740b4d717d0ebd5b990c8d64380943a939954c9",
"size": "2380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "C-C++/creditLimit.c",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "27682"
},
{
"name": "C++",
"bytes": "25458"
},
{
"name": "CSS",
"bytes": "12842"
},
{
"name": "HTML",
"bytes": "49171"
},
{
"name": "Java",
"bytes": "99711"
},
{
"name": "JavaScript",
"bytes": "827"
},
{
"name": "Python",
"bytes": "42857"
},
{
"name": "Shell",
"bytes": "5710"
}
]
}
|
define([
'./Point',
'./Line',
'./util/MathUtil',
'underscore',
'utility/Class',
'utility/Setter'
], function(Point, Line, MathUtil, _, Class, Setter) {
var Edge = Line.extend({
_init: function(pointA, pointB) {
this._super([pointA, pointB]);
this.pointA = this.points[0];
this.pointB = this.points[1];
},
getLength: function() {
return this.pointA.getDistance(this.pointB);
},
equals: function(other) {
return this.pointA.equals(other.pointA) && this.pointB.equals(other.pointB);
},
getGradient: function() {
return (this.pointB.y - this.pointA.y) / (this.pointB.x - this.pointA.x);
},
/**
* @returns {Number} The angle in radians in the range [-Math.PI, MATH.PI].
*/
getAngle: function() {
var xDiff = this.pointB.x - this.pointA.x;
var yDiff = this.pointB.y - this.pointA.y;
return Math.atan2(yDiff, xDiff);
},
/**
* @param {Edge} other
* @returns The angle in radians between this {@link Edge} and the given one in the range
* [-Math.PI, MATH.PI].
*/
getAngleBetween: function(other) {
var toVector = function(edge) {
var point = edge.pointB.translate(edge.pointA.negate());
return [point.x, point.y];
};
var angle = MathUtil.dotProduct(toVector(this), toVector(other)) /
(this.getLength() * other.getLength());
return Math.acos(angle);
},
/**
* Translates this {@link Edge} such that the first {@link Point} is at the origin (0,0).
*/
translateToOrigin: function() {
this.translate(this.pointA.negate());
},
/**
* @param {Point} point
* @param {Object} [args]
* @param {Number} [args.sensitivity] The difference in (x,y) values to treat as significant.
* @returns {Boolean} Whether the given {@link Point} is on this {@link Edge}.
*/
contains: function(point, args) {
args = _.extend({
sensitivity: Point.DEFAULT_SENSITIVITY
}, args);
return !MathUtil.sigDiff(this.getY(point.x), point.y, args.sensitivity);
},
getY: function(x, ignoreDomain) {
if (ignoreDomain || this.isInDomain(x)) {
return this.getGradient() * x + this.getYIntercept();
} else {
return null;
}
},
isInDomain: function(x) {
return (x >= this.pointA.x && x <= this.pointB.x) ||
(x >= this.pointB.x && x <= this.pointA.x);
},
getYIntercept: function() {
return this.pointA.y - this.pointA.x * this.getGradient();
},
/**
* @param {Point}
* @return {Edge} The edge perpendicular to this one which passes through the given point.
*/
getPerpendicularThroughPoint: function(point) {
var m1 = this.getGradient();
var c1 = this.getYIntercept();
var c2 = point.y + (1 / m1) * point.x;
var x = 0;
var y = 0;
// If the gradient formed by the perpendicular line is is infinity, use the x value as the
// given point and the y from either point of this edge (they are equal, since m1 will be 0).
if (c2 === Infinity || c2 === -Infinity) {
x = point.x;
y = this.pointA.y;
} else {
x = (c2 - c1) / ((Math.pow(m1, 2) + 1) / m1);
y = m1 * x + c1;
}
return new Edge(point, new Point(x, y));
},
getPoints: function() {
return [this.pointA.clone(), this.pointB.clone()];
},
/**
* @param {Point} other
* @param {Object} [args]
* @param {Number} [args.sensitivity] The difference in (x,y) values to treat as significant.
* @return Whether the given {@link Edge} partially or fully overlaps this one. This means
* they must be parallel and share a point.
*/
overlaps: function(other, args) {
args = _.extend({
sensitivity: Point.DEFAULT_SENSITIVITY
}, args);
var sensitivity = args.sensitivity;
return !MathUtil.sigDiff(this.getGradient(), other.getGradient(), sensitivity) &&
(this.contains(other.pointA, args) || this.contains(other.pointB, args));
},
/**
* Translates each edge point by the given ratio from the centroid.
* @param {Number} scale
*/
scale: function(scale, centroid) {
centroid = centroid || this.getCentroid();
this.pointA.set(centroid.interpolate(this.pointA, scale));
this.pointB.set(centroid.interpolate(this.pointB, scale));
return this;
},
translate: function(point) {
this.pointA.set(this.pointA.translate(point));
this.pointB.set(this.pointB.translate(point));
return this;
},
reverse: function () {
var tmpPoint = this.pointA;
this.pointA = this.pointB;
this.pointB = tmpPoint;
return this;
},
getCentroid: function() {
return new Point((this.pointA.x + this.pointB.x) / 2, (this.pointA.y + this.pointB.y) / 2);
},
clone: function() {
return new Edge(this.pointA.clone(), this.pointB.clone());
}
});
Setter.mixin(Edge, {
fromArray: function(points) {
points = _.map(points, function(point) {
return new Point(point[0], point[1]);
});
return new Edge(points[0], points[1]);
}
});
return Edge;
});
|
{
"content_hash": "3b302e39e73c1e4b14301f0486fdd439",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 99,
"avg_line_length": 29.960227272727273,
"alnum_prop": 0.5850559453821355,
"repo_name": "urbanetic/subdiv",
"id": "adbae726b2552641c34604722b09193b40a7de0e",
"size": "5273",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/Edge.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "391"
},
{
"name": "HTML",
"bytes": "2302"
},
{
"name": "JavaScript",
"bytes": "154049"
}
]
}
|
Advanced Puppet Tutorial Files for Cascadia 2015
Slides: http://goo.gl/V6BcCi
Tutorial Questions: http://goo.gl/du4Hqi
|
{
"content_hash": "eb4c4ae841c1357a9da7f2c72314fa54",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 48,
"avg_line_length": 24.2,
"alnum_prop": 0.7851239669421488,
"repo_name": "uphillian/cascadia2015",
"id": "82fbc8bc54d90b22013b40ccb860a389c448eac5",
"size": "136",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": []
}
|
var _ = require('../util')
var Dep = require('./dep')
var arrayMethods = require('./array')
var arrayKeys = Object.getOwnPropertyNames(arrayMethods)
/**
* Observer class that are attached to each observed
* object. Once attached, the observer converts target
* object's property keys into getter/setters that
* collect dependencies and dispatches updates.
*
* @param {Array|Object} value
* @constructor
*/
function Observer (value) {
this.value = value
this.dep = new Dep()
_.define(value, '__ob__', this)
if (_.isArray(value)) {
var augment = _.hasProto
? protoAugment
: copyAugment
augment(value, arrayMethods, arrayKeys)
this.observeArray(value)
} else {
this.walk(value)
}
}
// Static methods
/**
* Attempt to create an observer instance for a value,
* returns the new observer if successfully observed,
* or the existing observer if the value already has one.
*
* @param {*} value
* @param {Vue} [vm]
* @return {Observer|undefined}
* @static
*/
Observer.create = function (value, vm) {
if (!value || typeof value !== 'object') {
return
}
var ob
if (
value.hasOwnProperty('__ob__') &&
value.__ob__ instanceof Observer
) {
ob = value.__ob__
} else if (
(_.isArray(value) || _.isPlainObject(value)) &&
!Object.isFrozen(value) &&
!value._isVue
) {
ob = new Observer(value)
}
if (ob && vm) {
ob.addVm(vm)
}
return ob
}
// Instance methods
/**
* Walk through each property and convert them into
* getter/setters. This method should only be called when
* value type is Object.
*
* @param {Object} obj
*/
Observer.prototype.walk = function (obj) {
var keys = Object.keys(obj)
var i = keys.length
while (i--) {
this.convert(keys[i], obj[keys[i]])
}
}
/**
* Observe a list of Array items.
*
* @param {Array} items
*/
Observer.prototype.observeArray = function (items) {
var i = items.length
while (i--) {
Observer.create(items[i])
}
}
/**
* Convert a property into getter/setter so we can emit
* the events when the property is accessed/changed.
*
* @param {String} key
* @param {*} val
*/
Observer.prototype.convert = function (key, val) {
defineReactive(this.value, key, val)
}
/**
* Add an owner vm, so that when $set/$delete mutations
* happen we can notify owner vms to proxy the keys and
* digest the watchers. This is only called when the object
* is observed as an instance's root $data.
*
* @param {Vue} vm
*/
Observer.prototype.addVm = function (vm) {
(this.vms || (this.vms = [])).push(vm)
}
/**
* Remove an owner vm. This is called when the object is
* swapped out as an instance's $data object.
*
* @param {Vue} vm
*/
Observer.prototype.removeVm = function (vm) {
this.vms.$remove(vm)
}
// helpers
/**
* Augment an target Object or Array by intercepting
* the prototype chain using __proto__
*
* @param {Object|Array} target
* @param {Object} proto
*/
function protoAugment (target, src) {
target.__proto__ = src
}
/**
* Augment an target Object or Array by defining
* hidden properties.
*
* @param {Object|Array} target
* @param {Object} proto
*/
function copyAugment (target, src, keys) {
var i = keys.length
var key
while (i--) {
key = keys[i]
_.define(target, key, src[key])
}
}
/**
* Define a reactive property on an Object.
*
* @param {Object} obj
* @param {String} key
* @param {*} val
*/
function defineReactive (obj, key, val) {
var dep = new Dep()
var childOb = Observer.create(val)
Object.defineProperty(obj, key, {
enumerable: true,
configurable: true,
get: function metaGetter () {
if (Dep.target) {
dep.depend()
if (childOb) {
childOb.dep.depend()
}
if (_.isArray(val)) {
for (var e, i = 0, l = val.length; i < l; i++) {
e = val[i]
e && e.__ob__ && e.__ob__.dep.depend()
}
}
}
return val
},
set: function metaSetter (newVal) {
if (newVal === val) return
val = newVal
childOb = Observer.create(newVal)
dep.notify()
}
})
}
// Attach to the util object so it can be used elsewhere.
_.defineReactive = defineReactive
module.exports = Observer
|
{
"content_hash": "39dd643d70db6f1ce944ccf1d4af6d2c",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 59,
"avg_line_length": 20.684466019417474,
"alnum_prop": 0.6176953766721427,
"repo_name": "satriashp/e-rapor",
"id": "190b3e22c2af1f53968ba617a9c147028c8b6850",
"size": "4261",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "resources/assets/vendor/vue/src/observer/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "356"
},
{
"name": "JavaScript",
"bytes": "395128"
},
{
"name": "PHP",
"bytes": "1139671"
}
]
}
|
package org.scalajs.testsuite.javalib.util
import java.{util => ju}
import org.scalajs.testsuite.javalib.util.concurrent.CopyOnWriteArrayListFactory
import scala.reflect.ClassTag
trait CollectionsSynchronizedListTest
extends CollectionsOnListTest {
def originalFactory: ListFactory
def factory: ListFactory = {
new ListFactory {
override def implementationName: String =
s"synchronizedList(${originalFactory.implementationName})"
override def empty[E: ClassTag]: ju.List[E] =
ju.Collections.synchronizedList(originalFactory.empty[E])
override def allowsMutationThroughIterator: Boolean =
originalFactory.allowsMutationThroughIterator
}
}
}
class CollectionsOnSynchronizedListAbstractListTest
extends CollectionsSynchronizedCollectionTest {
def originalFactory: ListFactory = new AbstractListFactory
}
class CollectionsOnSynchronizedListArrayListTest extends CollectionsSynchronizedListTest {
def originalFactory: ListFactory = new ArrayListFactory
}
class CollectionsOnSynchronizedListLinkedListTest
extends CollectionsSynchronizedListTest {
def originalFactory: ListFactory = new LinkedListFactory
}
class CollectionsOnSynchronizedListCopyOnWriteArrayListTest
extends CollectionsSynchronizedListTest {
def originalFactory: ListFactory = new CopyOnWriteArrayListFactory
}
|
{
"content_hash": "1ed6ad6b867b410945cad01bc4abcbd6",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 90,
"avg_line_length": 29.06382978723404,
"alnum_prop": 0.8096632503660323,
"repo_name": "gzm0/scala-js",
"id": "4cf77dc8fcd9c8bdaa72b97a43d0e2a2a77c4488",
"size": "1638",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/util/CollectionsOnSynchronizedListTest.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1030"
},
{
"name": "JavaScript",
"bytes": "5883"
},
{
"name": "Scala",
"bytes": "8081190"
},
{
"name": "Shell",
"bytes": "1299"
}
]
}
|
package Pre_competition_dev;
public enum NavBugState {
DIRECT,BUGGING,CLOCKWISE,ANTI_CLOCKWISE,UNREACHABLE
}
|
{
"content_hash": "15ae5f88ea65c7af663d4c0893317e1e",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 52,
"avg_line_length": 23.2,
"alnum_prop": 0.7844827586206896,
"repo_name": "3urningChrome/Battlecode_Arena",
"id": "3e0878f0da2d99cdfab95ab0b4900d9ee8e983cb",
"size": "116",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Battlecode_Arena/bin/battlecode/ide/eclipse/Pre_competition_dev/NavBugState.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3881"
},
{
"name": "CoffeeScript",
"bytes": "844"
},
{
"name": "HTML",
"bytes": "20830"
},
{
"name": "Java",
"bytes": "32224"
},
{
"name": "JavaScript",
"bytes": "41909"
},
{
"name": "Ruby",
"bytes": "58127"
}
]
}
|
\section{Our technique}
We suggest a \clos{}-based \emph{protocol} defining the set of
operations on a first-class environment. This protocol contains
around $40$ generic functions. The details of the proposed protocol
can be found in the appendix of this paper. The protocol has been
implemented as part of the \sicl{} project.%
\footnote{See https://github.com/robert-strandh/SICL.}
Mainly, the protocol contains versions of \commonlisp{} environment
functions such as \texttt{fboundp}, \texttt{find-class}, etc. that
take an additional required \texttt{environment} argument.
For a simple example, consider the \sicl{} implementation of the
standard \commonlisp{} function \texttt{fboundp}:
{\small\begin{verbatim}
(defun fboundp (name)
(sicl-genv:fboundp
name
(load-time-value (sicl-genv:global-environment))))
\end{verbatim}}
In this example \texttt{sicl-genv} is the nickname for the package
named \texttt{sicl-global-environment} which contains the symbols of
the protocol defined in this paper. In each global environment, the
function \texttt{global-environment} in that package returns
the value of the environment itself. When the definition in the
example above is \emph{loaded}, either as source or from a previously
compiled file, the value of the \texttt{load-time-value} form will
therefore be the global environment in which the definition is loaded,
thereby permanently \emph{linking} this definition to that global
environment.
In addition to these functions, the protocol contains a set of
functions for accessing \emph{cells} that in most implementations
would be stored elsewhere. Thus, a binding of a function name to a
function object contains an indirection in the form of a
\emph{function cell}. The same holds for the binding of a variable
name (a symbol) to its \emph{global value}. In our implementation,
these cells are ordinary \texttt{cons} cells with the \texttt{car}
containing the value of the binding, and the \texttt{cdr} containing
\texttt{nil}. The reason for using ordinary \texttt{cons} cells is
that they are already supported in any \commonlisp{} implementation.
The only possible reason for choosing a different representation for
cells would be to save one word in each cell, since the \texttt{cdr}
slot in each of our \texttt{cons} cells is wasted. However, the saved
space would probably be more than consumed by the space occupied by
specialized inspector functionality for dealing with custom cell
representations.
These cells are created as needed. The first time a reference to a
function is made, the corresponding cell is created. Compiled code
that refers to a global function will have the corresponding cell in
its run-time environment. The cost of accessing a function at
run-time is therefore no greater in our implementation than in an
implementation that accesses the function through the symbol naming
it, hence our claim that there is no performance penalty for accessing
this information at run-time.
The \sicl{} compiler translates a reference to a global function (say
\texttt{foo}) into something similar to this code:
{\small\begin{verbatim}
(car
(load-time-value
(sicl-genv:function-cell
'foo
(sicl-genv:global-environment+))))
\end{verbatim}}
except that what is shown as \texttt{car} is not the full
\commonlisp{} function, because the argument is known to be a
\texttt{cons} cell. When the code containing this reference is
loaded, the resulting machine code will refer to a local variable
containing the \texttt{cons} cell of the current global environment
that is permanently assigned to holding the function definition of
\texttt{foo}.
Our technique does, however, incur a performance penalty for functions
such as \texttt{fdefinition} and \texttt{symbol-value} with an
argument that is computed at run-time%
\footnote{When the argument is a constant, a suitable
\emph{compiler-macro} can turn the form into an access of the
corresponding cell.}
compared to an implementation in which each symbol contains slots for
these objects. However, even in a high-performance implementation such
as \sbcl{}, these values are \emph{not} contained in symbol slots.
The performance penalty incurred on these functions depends on the
exact representation of the environment. The representation of the
environment is outside the scope of this paper, however. Here, we
only consider the \emph{protocol} for accessing it. However, it is
not hard to devise a reasonable implementation. In SICL, we use a
hash table for each namespace with the keys being the corresponding
\emph{names}%
\footnote{Functions are named by symbols and lists; variables are
named by symbols; packages are named by strings; classes are named
by symbols; etc.}
of the entities in that namespace.
While it is \emph{possible} for the application programmer to create
new global environments, it would not be a common thing to do, at
least not for the applications of first-class global environments that
we have considered so far. For that reason, we have not streamlined
any particular technique for doing so. The difficulty is not in
\emph{creating} the environment per se, but rather in filling it with
useful objects. For the purpose of bootstrapping, we currently fill
environments by loading code into it from files.
|
{
"content_hash": "a91e0b911d236f36466f7485902c2fc3",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 71,
"avg_line_length": 48.85321100917431,
"alnum_prop": 0.7885446009389672,
"repo_name": "vtomole/SICL",
"id": "a78b371d61e89556599652f2540a9c53ed6a321b",
"size": "5325",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Papers/Global-environments/sec-our-method.tex",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "9045"
},
{
"name": "C",
"bytes": "583"
},
{
"name": "Common Lisp",
"bytes": "5246394"
},
{
"name": "Gnuplot",
"bytes": "616"
},
{
"name": "HTML",
"bytes": "13809"
},
{
"name": "Makefile",
"bytes": "19329"
},
{
"name": "NewLisp",
"bytes": "13253"
},
{
"name": "PostScript",
"bytes": "173076"
},
{
"name": "Shell",
"bytes": "9568"
},
{
"name": "TeX",
"bytes": "1772033"
}
]
}
|
/**
* @file utility/list.c
* Fichier implémentant une liste en utilisant la mise en oeuvre par pointeur.
* @author Hector Basset
* @date 12 décembre 2014
*/
//librairies du système
#include <stdlib.h>
#include <string.h>
//librairies utilitaires
#include "boolean.h"
#include "list.h"
typedef struct ListElement ListElement;
struct List {
ListElement * first;
ListElement * last;
ListElement * current;
unsigned int size;
unsigned int size_of_elements;
};
struct ListElement {
void * value;
ListElement * previous;
ListElement * next;
};
static ListElement * new_element(List * list, void * value) {
ListElement * element = malloc(sizeof(ListElement));
element->value = malloc(list->size_of_elements);
memcpy(element->value, value, list->size_of_elements);
return element;
}
static void first_element(List * list, ListElement * element) {
list->first = list->last = list->current = element;
element->previous = element->next = NULL;
list->size = 1;
}
static ListElement * element_free(ListElement * element) {
ListElement * next = element->next;
free(element->value);
free(element);
return next;
}
List * new_list(unsigned int size_of_elements) {
List * list = malloc(sizeof(List));
list->first = NULL;
list->last = NULL;
list->current = NULL;
list->size = 0;
list->size_of_elements = size_of_elements;
return list;
}
void list_free(List * list) {
for (list_begin(list) ; !list_out(list) ; list->current = element_free(list->current)) {
}
free(list);
}
boolean list_empty(List * list) {
return list->size == 0;
}
unsigned int list_size(List * list) {
return list->size;
}
void list_begin(List * list) {
list->current = list->first;
}
void list_end(List * list) {
list->current = list->last;
}
void list_previous(List * list) {
list->current = list->current->previous;
}
void list_next(List * list) {
list->current = list->current->next;
}
boolean list_at_begin(List * list) {
return list->current == list->first;
}
boolean list_at_end(List * list) {
return list->current == list->last;
}
boolean list_out(List * list) {
return list->current == NULL;
}
void list_add_before(List * list, void * value) {
ListElement * element = new_element(list, value);
if (list_empty(list)) {
first_element(list, element);
} else {
element->previous = list->current->previous;
if (list_at_begin(list)) {
list->first = element;
} else {
list->current->previous->next = element;
}
element->next = list->current;
list->current->previous = element;
list->size++;
}
}
void list_add_after(List * list, void * value) {
ListElement * element = new_element(list, value);
if (list_empty(list)) {
first_element(list, element);
} else {
element->next = list->current->next;
if (list_at_end(list)) {
list->last = element;
} else {
list->current->next->previous = element;
}
element->previous = list->current;
list->current->next = element;
list->size++;
}
}
void list_add_begin(List * list, void * value) {
ListElement * element = new_element(list, value);
if (list_empty(list)) {
first_element(list, element);
} else {
element->previous = NULL;
element->next = list->first;
list->first->previous = element;
list->first = element;
list->size++;
}
}
void list_add_end(List * list, void * value) {
ListElement * element = new_element(list, value);
if (list_empty(list)) {
first_element(list, element);
} else {
element->next = NULL;
element->previous = list->last;
list->last->next = element;
list->last = element;
list->size++;
}
}
void list_remove(List * list) {
if (list_at_begin(list)) {
list->first = list->current->next;
} else {
list->current->previous->next = list->current->next;
}
if (list_at_end(list)) {
list->last = list->current->previous;
} else {
list->current->next->previous = list->current->previous;
}
list->current = element_free(list->current);
list->size--;
}
void list_remove_first(List * list) {
ListElement * current = list_at_begin(list) ? list->first->next : list->current;
list_begin(list);
list_remove(list);
list->current = current;
}
void list_remove_last(List * list) {
ListElement * current = list_at_end(list) ? NULL : list->current;
list_end(list);
list_remove(list);
list->current = current;
}
void list_remove_value(List * list, void * value) {
ListElement * current = list->current;
for (list_begin(list) ; !list_out(list) ; ) {
if (memcmp(list->current->value, value, list->size_of_elements) == 0) {
if (list->current == current) {
current = current->next;
}
list_remove(list);
} else {
list_next(list);
}
}
list->current = current;
}
void list_clear(List * list) {
for (list_begin(list) ; !list_out(list) ; list->current = element_free(list->current)) {
}
list->first = list->last = NULL;
list->size = 0;
}
void list_get_value(List * list, void * pointer) {
memcpy(pointer, list->current->value, list->size_of_elements);
}
void list_get_first(List * list, void * pointer) {
memcpy(pointer, list->first->value, list->size_of_elements);
}
void list_get_last(List * list, void * pointer) {
memcpy(pointer, list->last->value, list->size_of_elements);
}
boolean list_contains(List * list, void * value) {
ListElement * current = list->current;
for (list_begin(list) ; !list_out(list) ; list_next(list)) {
if (memcmp(list->current->value, value, list->size_of_elements) == 0) {
list->current = current;
return true;
}
}
list->current = current;
return false;
}
|
{
"content_hash": "c6bde7a91e95fa2f669d78bad9cc443a",
"timestamp": "",
"source": "github",
"line_count": 237,
"max_line_length": 89,
"avg_line_length": 23.189873417721518,
"alnum_prop": 0.6610262008733624,
"repo_name": "AyHeYo/ProjetRoguelike",
"id": "f51721fb3ede6609d1ce9341dec757b6c04fe7b3",
"size": "5499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/utility/list.c",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "75866"
},
{
"name": "C++",
"bytes": "8085"
},
{
"name": "Makefile",
"bytes": "1798"
},
{
"name": "Objective-C",
"bytes": "3588"
}
]
}
|
'use strict';
exports.cart = require('./cart');
exports.cartItem = require('./cart_item');
exports.user = require('./my_cart');
|
{
"content_hash": "36294989974ef1197197e9c97337e95c",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 42,
"avg_line_length": 25.8,
"alnum_prop": 0.6589147286821705,
"repo_name": "apigee-127/phrixus",
"id": "1dba206144648ec94df2aa99b25c00aa64a0232d",
"size": "129",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cart/src/controllers/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1666"
},
{
"name": "JavaScript",
"bytes": "156592"
},
{
"name": "Shell",
"bytes": "516"
}
]
}
|
<?php
namespace Lolautruche\PaylineBundle\Tests\Event;
use Lolautruche\PaylineBundle\Event\PaymentNotificationEvent;
use Lolautruche\PaylineBundle\Payline\PaylineResult;
use PHPUnit\Framework\TestCase;
use Symfony\Component\HttpFoundation\Response;
class PaymentNotificationEventTest extends TestCase
{
public function testConstruct()
{
$paylineResult = new PaylineResult([]);
$event = new PaymentNotificationEvent($paylineResult);
self::assertSame($paylineResult, $event->getPaylineResult());
}
public function testSetGetResponse()
{
$event = new PaymentNotificationEvent(new PaylineResult([]));
self::assertNull($event->getResponse());
self::assertFalse($event->hasResponse());
$response = new Response();
$event->setResponse($response);
self::assertTrue($event->hasResponse());
self::assertSame($response, $event->getResponse());
}
public function testIsSuccessful()
{
// Successful result
$result = new PaylineResult([
'result' => [
'code' => PaylineResult::CODE_TRANSACTION_APPROVED,
'shortMessage' => 'foo',
'longMessage' => 'bar',
],
]);
$event = new PaymentNotificationEvent($result);
self::assertTrue($event->isPaymentSuccessful());
// Unsuccessful result
$result = new PaylineResult([
'result' => [
'code' => '12345',
'shortMessage' => 'foo',
'longMessage' => 'bar',
],
]);
$event = new PaymentNotificationEvent($result);
self::assertFalse($event->isPaymentSuccessful());
}
public function testIsCanceled()
{
// Canceled
$result = new PaylineResult([
'result' => [
'code' => PaylineResult::CODE_TRANSACTION_CANCELED,
'shortMessage' => 'foo',
'longMessage' => 'bar',
],
]);
$event = new PaymentNotificationEvent($result);
self::assertTrue($event->isPaymentCanceledByUser());
// Not canceled
$result = new PaylineResult([
'result' => [
'code' => PaylineResult::CODE_TRANSACTION_APPROVED,
'shortMessage' => 'foo',
'longMessage' => 'bar',
],
]);
$event = new PaymentNotificationEvent($result);
self::assertFalse($event->isPaymentCanceledByUser());
}
public function testIsDuplicate()
{
// Duplicate
$result = new PaylineResult([
'result' => [
'code' => PaylineResult::CODE_TRANSACTION_DUPLICATE,
'shortMessage' => 'foo',
'longMessage' => 'bar',
],
'transaction' => [
'id' => '26272160000636',
'date' => '28/09/2016 16:00',
'isDuplicated' => '0',
'isPossibleFraud' => '0'
]
]);
$event = new PaymentNotificationEvent($result);
self::assertTrue($event->isPaymentDuplicate());
$result = new PaylineResult([
'result' => [
'code' => PaylineResult::CODE_TRANSACTION_APPROVED,
'shortMessage' => 'foo',
'longMessage' => 'bar',
],
'transaction' => [
'id' => '26272160000636',
'date' => '28/09/2016 16:00',
'isDuplicated' => '1',
'isPossibleFraud' => '0'
]
]);
$event = new PaymentNotificationEvent($result);
self::assertTrue($event->isPaymentDuplicate());
// Unsuccessful result
$result = new PaylineResult([
'result' => [
'code' => PaylineResult::CODE_TRANSACTION_APPROVED,
'shortMessage' => 'foo',
'longMessage' => 'bar',
],
'transaction' => [
'id' => '26272160000636',
'date' => '28/09/2016 16:00',
'isDuplicated' => '0',
'isPossibleFraud' => '0'
]
]);
$event = new PaymentNotificationEvent($result);
self::assertFalse($event->isPaymentDuplicate());
}
}
|
{
"content_hash": "f5a0c40dae5057bac569a53f547e5dbe",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 69,
"avg_line_length": 32.43283582089552,
"alnum_prop": 0.5149562816382881,
"repo_name": "lolautruche/LolautruchePaylineBundle",
"id": "6e1f4c9de5eceb9fac1e885542c3a53e59d2e141",
"size": "4599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tests/Event/PaymentNotificationEventTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "78315"
}
]
}
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>tactician-stdlib: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.10.2 / tactician-stdlib - 1.0~beta1+8.11</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
tactician-stdlib
<small>
1.0~beta1+8.11
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-06-30 02:36:08 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-06-30 02:36:08 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-threads base
base-unix base
conf-findutils 1 Virtual package relying on findutils
coq 8.10.2 Formal proof management system
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.05.0 The OCaml compiler (virtual package)
ocaml-base-compiler 4.05.0 Official 4.05.0 release
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.9.5 A library manager for OCaml
# opam file:
opam-version: "2.0"
name: "coq-tactician-stdlib"
synopsis: "Recompiles Coq's standard libary with Tactician's instrumentation loaded"
description: """
*** WARNING *** This package will overwrite Coq's standard library files.
This package recompiles Coq's standard library with Tactician's (`coq-tactician`)
instrumentation loaded such that Tactician can learn from the library. When you
install this package, the current `.vo` files of the standard library are backed
in the folder `user-contrib/Tactician/stdlib-backup`. Then exactly the same `.vo`
files are installed, except that they also contain Tactician's tactic databases.
Upon removal of this package, the original files will be placed back.
"""
homepage: "https://coq-tactician.github.io"
dev-repo: "git+https://github.com/coq-tactician/coq-tactician-stdlib"
bug-reports: "https://github.com/coq-tactician/coq-tactician-stdlib/issues"
maintainer: "Lasse Blaauwbroek <lasse@blaauwbroek.eu>"
authors: "Lasse Blaauwbroek <lasse@blaauwbroek.eu"
messages: [
"*** WARNING ***"
"This package will overwrite Coq's standard library files."
"A backup of the original files will be placed under Coq's"
"library directory at user-contrib/tactician-stdlib-backup/"
"and they will be restored when you remove this package"
]
post-messages: ["
--- The standard library was successfully recompiled ---
In order to finish the process, you should run
tactician recompile
" {success}]
depends: [
"coq" {>= "8.11" & < "8.12~"}
"coq-tactician"
]
build: [
[make "-j%{jobs}%"]
]
install: [
[make "install"]
]
remove: [
[make "restore"]
]
url {
src: "https://github.com/coq-tactician/coq-tactician-stdlib/archive/1.0-beta1-8.11.tar.gz"
}
tags: [
"keyword:tactic-learning"
"keyword:machine-learning"
"keyword:automation"
"keyword:proof-synthesis"
"category:Miscellaneous/Coq Extensions"
"logpath:Tactician"
]
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-tactician-stdlib.1.0~beta1+8.11 coq.8.10.2</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.10.2).
The following dependencies couldn't be met:
- coq-tactician-stdlib -> coq-tactician -> ocaml >= 4.08
base of this switch (use `--unlock-base' to force)
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-tactician-stdlib.1.0~beta1+8.11</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
|
{
"content_hash": "bad06cb7c86a5ed5763170ab9a7b2367",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 159,
"avg_line_length": 41.348958333333336,
"alnum_prop": 0.5709787126842172,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "a42ef00d1a5d6130d86c0eb9aae09526bdf1c569",
"size": "7964",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.05.0-2.0.1/released/8.10.2/tactician-stdlib/1.0~beta1+8.11.html",
"mode": "33188",
"license": "mit",
"language": []
}
|
using System;
using System.Reflection;
namespace ceenq.com.Core.Extensions {
public static class TypeExtensions
{
public static bool IsIEnumerable(this PropertyInfo property)
{
Type inter;
Type type = property.PropertyType;
inter = type.GetInterface("System.Collections.Generic.IReadOnlyCollection`1", false);
if (inter != null)
return true;
inter = type.GetInterface("System.Collections.IEnumerable", false);
if (inter != null && type.GetMethod("Add", new Type[] { typeof(object) }) != null)
return true;
inter = type.GetInterface("System.Collections.Generic.IEnumerable`1", false);
if (inter != null && type.GetMethod("Add", new Type[] { inter.GetGenericArguments()[0] }) != null)
return true;
return false;
}
public static PropertyInfo[] GetPublicProperties(this Type type)
{
return type.GetProperties(BindingFlags.Public | BindingFlags.Instance | BindingFlags.GetProperty | BindingFlags.SetProperty);
}
}
}
|
{
"content_hash": "f818a94bc7d21f32c7944c19ee2c73f2",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 137,
"avg_line_length": 36.87096774193548,
"alnum_prop": 0.6097987751531059,
"repo_name": "bill-cooper/catc-cms",
"id": "02504cd816aacfca4603a346464ba229f3814e4c",
"size": "1143",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Orchard.Web/Modules/ceenq.com.Core/Extensions/TypeExtensions.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "1401"
},
{
"name": "Batchfile",
"bytes": "5675"
},
{
"name": "C",
"bytes": "4755"
},
{
"name": "C#",
"bytes": "10526440"
},
{
"name": "CSS",
"bytes": "1254118"
},
{
"name": "Cucumber",
"bytes": "89076"
},
{
"name": "HTML",
"bytes": "100974"
},
{
"name": "JavaScript",
"bytes": "4268248"
},
{
"name": "PowerShell",
"bytes": "22046"
},
{
"name": "SQLPL",
"bytes": "1827"
},
{
"name": "TypeScript",
"bytes": "51422"
},
{
"name": "XSLT",
"bytes": "119918"
}
]
}
|
set -x
set -euvo pipefail
apt-get update
apt-get install build-essential git -y
cd /opt/
NODE_ENV=production
PACKAGE=meteor-spk-0.3.0
PACKAGE_FILENAME="$PACKAGE.tar.xz"
CACHE_TARGET="/host-dot-sandstorm/caches/${PACKAGE_FILENAME}"
# Fetch meteor-spk tarball if not cached
if [ ! -f "$CACHE_TARGET" ] ; then
curl https://dl.sandstorm.io/${PACKAGE_FILENAME} > "$CACHE_TARGET"
fi
# Extract to /opt
tar xf "$CACHE_TARGET"
# Create symlink so we can rely on the path /opt/meteor-spk
ln -s "${PACKAGE}" meteor-spk
# Add bash, and its dependencies, so they get mapped into the image.
# Bash runs the launcher script.
cp -a /bin/bash /opt/meteor-spk/meteor-spk.deps/bin/
cp -a /lib/x86_64-linux-gnu/libncurses.so.* /opt/meteor-spk/meteor-spk.deps/lib/x86_64-linux-gnu/
cp -a /lib/x86_64-linux-gnu/libtinfo.so.* /opt/meteor-spk/meteor-spk.deps/lib/x86_64-linux-gnu/
# Unfortunately, Meteor does not explicitly make it easy to cache packages, but
# we know experimentally that the package is mostly directly extractable to a
# user's $HOME/.meteor directory.
METEOR_RELEASE=1.4.2
METEOR_PLATFORM=os.linux.x86_64
METEOR_TARBALL_FILENAME="meteor-bootstrap-${METEOR_PLATFORM}.tar.gz"
METEOR_TARBALL_URL="https://d3sqy0vbqsdhku.cloudfront.net/packages-bootstrap/${METEOR_RELEASE}/${METEOR_TARBALL_FILENAME}"
METEOR_CACHE_TARGET="/host-dot-sandstorm/caches/${METEOR_TARBALL_FILENAME}"
# Fetch meteor tarball if not cached
if [ ! -f "$METEOR_CACHE_TARGET" ] ; then
curl "$METEOR_TARBALL_URL" > "${METEOR_CACHE_TARGET}.partial"
mv "${METEOR_CACHE_TARGET}"{.partial,}
fi
# Extract as unprivileged user, which is the usual meteor setup
cd /home/vagrant/
su -c "tar xf '${METEOR_CACHE_TARGET}'" vagrant
# Link into global PATH
ln -s /home/vagrant/.meteor/meteor /usr/bin/meteor
chown vagrant:vagrant /home/vagrant -R
|
{
"content_hash": "690d4ead2ef8e56a6206b59970a5cc1b",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 122,
"avg_line_length": 35.666666666666664,
"alnum_prop": 0.7383177570093458,
"repo_name": "yuyixg/Rocket.Chat",
"id": "97913ab6281bae1eb8d4cb45c8fb51e3e8cb9cec",
"size": "1831",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": ".sandstorm/setup.sh",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "548"
},
{
"name": "CSS",
"bytes": "270698"
},
{
"name": "Cap'n Proto",
"bytes": "3868"
},
{
"name": "CoffeeScript",
"bytes": "886483"
},
{
"name": "HTML",
"bytes": "417885"
},
{
"name": "JavaScript",
"bytes": "1488424"
},
{
"name": "Ruby",
"bytes": "4237"
},
{
"name": "Shell",
"bytes": "9398"
}
]
}
|
## Data retrieval / Finders
Finder methods are intended to query data from the database. They do *not* return plain objects but instead return model instances. Because finder methods return model instances you can call any model instance member on the result as described in the documentation for [*instances*](http://docs.sequelizejs.com/en/latest/docs/instances/).
In this document we'll explore what finder methods can do:
### find - Search for one specific element in the database
```js
// search for known ids
Project.findById(123).then(function(project) {
// project will be an instance of Project and stores the content of the table entry
// with id 123. if such an entry is not defined you will get null
})
// search for attributes
Project.findOne({ where: {title: 'aProject'} }).then(function(project) {
// project will be the first entry of the Projects table with the title 'aProject' || null
})
Project.findOne({
where: {title: 'aProject'},
attributes: ['id', ['name', 'title']]
}).then(function(project) {
// project will be the first entry of the Projects table with the title 'aProject' || null
// project.title will contain the name of the project
})
```
### findOrCreate - Search for a specific element or create it if not available
The method `findOrCreate` can be used to check if a certain element already exists in the database. If that is the case the method will result in a respective instance. If the element does not yet exist, it will be created.
Let's assume we have an empty database with a `User` model which has a `username` and a `job`.
```js
User
.findOrCreate({where: {username: 'sdepold'}, defaults: {job: 'Technical Lead JavaScript'}})
.spread(function(user, created) {
console.log(user.get({
plain: true
}))
console.log(created)
/*
{
username: 'sdepold',
job: 'Technical Lead JavaScript',
id: 1,
createdAt: Fri Mar 22 2013 21: 28: 34 GMT + 0100(CET),
updatedAt: Fri Mar 22 2013 21: 28: 34 GMT + 0100(CET)
}
created: true
*/
})
```
The code created a new instance. So when we already have an instance ...
```js
User
.create({ username: 'fnord', job: 'omnomnom' })
.then(function() {
User
.findOrCreate({where: {username: 'fnord'}, defaults: {job: 'something else'}})
.spread(function(user, created) {
console.log(user.get({
plain: true
}))
console.log(created)
/*
{
username: 'fnord',
job: 'omnomnom',
id: 2,
createdAt: Fri Mar 22 2013 21: 28: 34 GMT + 0100(CET),
updatedAt: Fri Mar 22 2013 21: 28: 34 GMT + 0100(CET)
}
created: false
*/
})
})
```
... the existing entry will not be changed. See the `job` of the second user, and the fact that created was false.
### findAndCountAll - Search for multiple elements in the database, returns both data and total count
This is a convenience method that combines`findAll` and `count` (see below) this is useful when dealing with queries related to pagination where you want to retrieve data with a `limit` and `offset` but also need to know the total number of records that match the query:
The success handler will always receive an object with two properties:
* `count` - an integer, total number records matching the where clause
* `rows` - an array of objects, the records matching the where clause, within the limit and offset range
```js
Project
.findAndCountAll({
where: {
title: {
$like: 'foo%'
}
},
offset: 10,
limit: 2
})
.then(function(result) {
console.log(result.count);
console.log(result.rows);
});
```
`findAndCountAll` also supports includes. Only the includes that are marked as `required` will be added to the count part:
Suppose you want to find all users who have a profile attached:
```js
User.findAndCountAll({
include: [
{ model: Profile, required: true}
],
limit 3
});
```
Because the include for `Profile` has `required` set it will result in an inner join, and only the users who have a profile will be counted. If we remove `required` from the include, both users with and without profiles will be counted. Adding a `where` clause to the include automatically makes it required:
```js
User.findAndCountAll({
include: [
{ model: Profile, where: { active: true }}
],
limit 3
});
```
The query above will only count users who have an active profile, because `required` is implicitly set to true when you add a where clause to the include.
The options object that you pass to `findAndCountAll` is the same as for `findAll` (described below).
### findAll - Search for multiple elements in the database
```js
// find multiple entries
Project.findAll().then(function(projects) {
// projects will be an array of all Project instances
})
// also possible:
Project.all().then(function(projects) {
// projects will be an array of all Project instances
})
// search for specific attributes - hash usage
Project.findAll({ where: { name: 'A Project' } }).then(function(projects) {
// projects will be an array of Project instances with the specified name
})
// search with string replacements
Project.findAll({ where: ["id > ?", 25] }).then(function(projects) {
// projects will be an array of Projects having a greater id than 25
})
// search within a specific range
Project.findAll({ where: { id: [1,2,3] } }).then(function(projects) {
// projects will be an array of Projects having the id 1, 2 or 3
// this is actually doing an IN query
})
Project.findAll({
where: {
id: {
$and: {a: 5} // AND (a = 5)
$or: [{a: 5}, {a: 6}] // (a = 5 OR a = 6)
$gt: 6, // id > 6
$gte: 6, // id >= 6
$lt: 10, // id < 10
$lte: 10, // id <= 10
$ne: 20, // id != 20
$between: [6, 10], // BETWEEN 6 AND 10
$notBetween: [11, 15], // NOT BETWEEN 11 AND 15
$in: [1, 2], // IN [1, 2]
$notIn: [1, 2], // NOT IN [1, 2]
$like: '%hat', // LIKE '%hat'
$notLike: '%hat' // NOT LIKE '%hat'
$iLike: '%hat' // ILIKE '%hat' (case insensitive) (PG only)
$notILike: '%hat' // NOT ILIKE '%hat' (PG only)
$overlap: [1, 2] // && [1, 2] (PG array overlap operator)
$contains: [1, 2] // @> [1, 2] (PG array contains operator)
$contained: [1, 2] // <@ [1, 2] (PG array contained by operator)
$any: [2,3] // ANY ARRAY[2, 3]::INTEGER (PG only)
},
status: {
$not: false, // status NOT FALSE
}
}
})
```
### Complex filtering / OR / NOT queries
It's possible to do complex where queries with multiple levels of nested AND, OR and NOT conditions. In order to do that you can use `$or`, `$and` or `$not`:
```js
Project.findOne({
where: {
name: 'a project',
$or: [
{ id: [1,2,3] },
{ id: { $gt: 10 } }
]
}
})
Project.findOne({
where: {
name: 'a project',
id: {
$or: [
[1,2,3],
{ $gt: 10 }
]
}
}
})
```
Both pieces of code code will generate the following:
```sql
SELECT *
FROM `Projects`
WHERE (
`Projects`.`name` = 'a project'
AND (`Projects`.`id` IN (1,2,3) OR `Projects`.`id` > 10)
)
LIMIT 1;
```
`$not` example:
```js
Project.findOne({
where: {
name: 'a project',
$not: [
{ id: [1,2,3] },
{ array: { $contains: [3,4,5] } }
]
}
});
```
Will generate:
```sql
SELECT *
FROM `Projects`
WHERE (
`Projects`.`name` = 'a project'
AND NOT (`Projects`.`id` IN (1,2,3) OR `Projects`.`array` @> ARRAY[1,2,3]::INTEGER[])
)
LIMIT 1;
```
### Manipulating the dataset with limit, offset, order and group
To get more relevant data, you can use limit, offset, order and grouping:
```js
// limit the results of the query
Project.findAll({ limit: 10 })
// step over the first 10 elements
Project.findAll({ offset: 10 })
// step over the first 10 elements, and take 2
Project.findAll({ offset: 10, limit: 2 })
```
The syntax for grouping and ordering are equal, so below it is only explained with a single example for group, and the rest for order. Everything you see below can also be done for group
```js
Project.findAll({order: 'title DESC'})
// yields ORDER BY title DESC
Project.findAll({group: 'name'})
// yields GROUP BY name
```
Notice how in the two examples above, the string provided is inserted verbatim into the query, i.e. column names are not escaped. When you provide a string to order/group, this will always be the case. If you want to escape column names, you should provide an array of arguments, even though you only want to order/group by a single column
```js
something.findOne({
order: [
'name',
// will return `name`
'username DESC',
// will return `username DESC` -- i.e. don't do it!
['username', 'DESC'],
// will return `username` DESC
sequelize.fn('max', sequelize.col('age')),
// will return max(`age`)
[sequelize.fn('max', sequelize.col('age')), 'DESC'],
// will return max(`age`) DESC
[sequelize.fn('otherfunction', sequelize.col('col1'), 12, 'lalala'), 'DESC'],
// will return otherfunction(`col1`, 12, 'lalala') DESC
[sequelize.fn('otherfunction', sequelize.fn('awesomefunction', sequelize.col('col'))), 'DESC']
// will return otherfunction(awesomefunction(`col`)) DESC, This nesting is potentially infinite!
[{ raw: 'otherfunction(awesomefunction(`col`))' }, 'DESC']
// This won't be quoted, but direction will be added
]
})
```
To recap, the elements of the order/group array can be the following:
* String - will be quoted
* Array - first element will be quoted, second will be appended verbatim
* Object -
* Raw will be added verbatim without quoting
* Everything else is ignored, and if raw is not set, the query will fail
* Sequelize.fn and Sequelize.col returns functions and quoted cools
### Raw queries
Sometimes you might be expecting a massive dataset that you just want to display, without manipulation. For each row you select, Sequelize creates an instance with functions for update, delete, get associations etc. If you have thousands of rows, this might take some time. If you only need the raw data and don't want to update anything, you can do like this to get the raw data.
```js
// Are you expecting a massive dataset from the DB,
// and don't want to spend the time building DAOs for each entry?
// You can pass an extra query option to get the raw data instead:
Project.findAll({ where: { ... }, raw: true })
```
### count - Count the occurrences of elements in the database
There is also a method for counting database objects:
```js
Project.count().then(function(c) {
console.log("There are " + c + " projects!")
})
Project.count({ where: ["id > ?", 25] }).then(function(c) {
console.log("There are " + c + " projects with an id greater than 25.")
})
```
### max - Get the greatest value of a specific attribute within a specific table
And here is a method for getting the max value of an attribute:f
```js
/*
Let's assume 3 person objects with an attribute age.
The first one is 10 years old,
the second one is 5 years old,
the third one is 40 years old.
*/
Project.max('age').then(function(max) {
// this will return 40
})
Project.max('age', { where: { age: { lt: 20 } } }).then(function(max) {
// will be 10
})
```
### min - Get the least value of a specific attribute within a specific table
And here is a method for getting the min value of an attribute:
```js
/*
Let's assume 3 person objects with an attribute age.
The first one is 10 years old,
the second one is 5 years old,
the third one is 40 years old.
*/
Project.min('age').then(function(min) {
// this will return 5
})
Project.min('age', { where: { age: { $gt: 5 } } }).then(function(min) {
// will be 10
})
```
### sum - Sum the value of specific attributes
In order to calculate the sum over a specific column of a table, you can
use the `sum` method.
```js
/*
Let's assume 3 person objects with an attribute age.
The first one is 10 years old,
the second one is 5 years old,
the third one is 40 years old.
*/
Project.sum('age').then(function(sum) {
// this will return 55
})
Project.sum('age', { where: { age: { $gt: 5 } } }).then(function(sum) {
// will be 50
})
```
## Eager loading
When you are retrieving data from the database there is a fair chance that you also want to get associations with the same query - this is called eager loading. The basic idea behind that, is the use of the attribute `include` when you are calling `find` or `findAll`. Lets assume the following setup:
```js
var User = sequelize.define('user', { name: Sequelize.STRING })
, Task = sequelize.define('task', { name: Sequelize.STRING })
, Tool = sequelize.define('tool', { name: Sequelize.STRING })
Task.belongsTo(User)
User.hasMany(Task)
User.hasMany(Tool, { as: 'Instruments' })
sequelize.sync().then(function() {
// this is where we continue ...
})
```
OK. So, first of all, let's load all tasks with their associated user.
```js
Task.findAll({ include: [ User ] }).then(function(tasks) {
console.log(JSON.stringify(tasks))
/*
[{
"name": "A Task",
"id": 1,
"createdAt": "2013-03-20T20:31:40.000Z",
"updatedAt": "2013-03-20T20:31:40.000Z",
"userId": 1,
"user": {
"name": "John Doe",
"id": 1,
"createdAt": "2013-03-20T20:31:45.000Z",
"updatedAt": "2013-03-20T20:31:45.000Z"
}
}]
*/
})
```
Notice that the accessor (the `User` property in the resulting instance) is singular because the association is one-to-something.
Next thing: Loading of data with many-to-something associations!
```js
User.findAll({ include: [ Task ] }).then(function(users) {
console.log(JSON.stringify(users))
/*
[{
"name": "John Doe",
"id": 1,
"createdAt": "2013-03-20T20:31:45.000Z",
"updatedAt": "2013-03-20T20:31:45.000Z",
"tasks": [{
"name": "A Task",
"id": 1,
"createdAt": "2013-03-20T20:31:40.000Z",
"updatedAt": "2013-03-20T20:31:40.000Z",
"userId": 1
}]
}]
*/
})
```
Notice that the accessor (the `Tasks` property in the resulting instance) is plural because the association is many-to-something.
If an association is aliased (using the `as` option), you must specify this alias when including the model. Notice how the user's `Tool`s are aliased as `Instruments` above. In order to get that right you have to specify the model you want to load, as well as the alias:
```js
User.findAll({ include: [{ model: Tool, as: 'Instruments' }] }).then(function(users) {
console.log(JSON.stringify(users))
/*
[{
"name": "John Doe",
"id": 1,
"createdAt": "2013-03-20T20:31:45.000Z",
"updatedAt": "2013-03-20T20:31:45.000Z",
"Instruments": [{
"name": "Toothpick",
"id": 1,
"createdAt": null,
"updatedAt": null,
"userId": 1
}]
}]
*/
})
```
When eager loading we can also filter the associated model using `where`. This will return all `User`s in which the `where` clause of `Tool` model matches rows.
```js
User.findAll({
include: [{
model: Tool,
as: 'Instruments',
where: { name: { $like: '%ooth%' } }
}]
}).then(function(users) {
console.log(JSON.stringify(users))
/*
[{
"name": "John Doe",
"id": 1,
"createdAt": "2013-03-20T20:31:45.000Z",
"updatedAt": "2013-03-20T20:31:45.000Z",
"Instruments": [{
"name": "Toothpick",
"id": 1,
"createdAt": null,
"updatedAt": null,
"userId": 1
}]
}],
[{
"name": "John Smith",
"id": 2,
"createdAt": "2013-03-20T20:31:45.000Z",
"updatedAt": "2013-03-20T20:31:45.000Z",
"Instruments": [{
"name": "Toothpick",
"id": 1,
"createdAt": null,
"updatedAt": null,
"userId": 1
}]
}],
*/
})
```
When an eager loaded model is filtered using `include.where` then `include.required` is implicitly set to
`true`. This means that an inner join is done returning parent models with any matching children.
### Including everything
To include all attributes, you can pass a single object with `all: true`:
```js
User.findAll({ include: [{ all: true }]});
```
### Including soft deleted records
In case you want to eager load soft deleted records you can do that by setting `include.paranoid` to `true`
```js
User.findAll({
include: [{
model: Tool,
where: { name: { $like: '%ooth%' } },
paranoid: true // query and loads the soft deleted records
}]
});
```
### Ordering Eager Loaded Associations
In the case of a one-to-many relationship.
```js
Company.findAll({ include: [ Division ], order: [ [ Division, 'name' ] ] });
Company.findAll({ include: [ Division ], order: [ [ Division, 'name', 'DESC' ] ] });
Company.findAll({
include: [ { model: Division, as: 'Div' } ],
order: [ [ { model: Division, as: 'Div' }, 'name' ] ]
});
Company.findAll({
include: [ { model: Division, include: [ Department ] } ],
order: [ [ Division, Department, 'name' ] ]
});
```
In the case of many-to-many joins, you are also able to sort by attributes in the through table.
```js
Company.findAll({
include: [ { model: Division, include: [ Department ] } ],
order: [ [ Division, DepartmentDivision, 'name' ] ]
});
```
### Nested eager loading
You can use nested eager loading to load all related models of a related model:
```js
User.findAll({
include: [
{model: Tool, as: 'Instruments', include: [
{model: Teacher, include: [ /* etc */]}
]}
]
}).then(function(users) {
console.log(JSON.stringify(users))
/*
[{
"name": "John Doe",
"id": 1,
"createdAt": "2013-03-20T20:31:45.000Z",
"updatedAt": "2013-03-20T20:31:45.000Z",
"Instruments": [{ // 1:M and N:M association
"name": "Toothpick",
"id": 1,
"createdAt": null,
"updatedAt": null,
"userId": 1,
"Teacher": { // 1:1 association
"name": "Jimi Hendrix"
}
}]
}]
*/
})
```
This will produce an outer join. However, a `where` clause on a related model will create an inner join and return only the instances that have matching sub-models. To return all parent instances, you should add `required: false`.
```js
User.findAll({
include: [{
model: Tool,
as: 'Instruments',
include: [{
model: Teacher,
where: {
school: "Woodstock Music School"
},
required: false
}]
}]
}).then(function(users) {
/* ... */
})
```
The query above will return all users, and all their instruments, but only those teachers associated with `Woodstock Music School`.
Include all also supports nested loading:
```js
User.findAll({ include: [{ all: true, nested: true }]});
```
|
{
"content_hash": "62cfaa9dad05adee356851c386859f2b",
"timestamp": "",
"source": "github",
"line_count": 659,
"max_line_length": 380,
"avg_line_length": 29.104704097116844,
"alnum_prop": 0.6248175182481752,
"repo_name": "oss92/sequelize",
"id": "e92c53b86116796a57f2807bee4ea3b066ac26d8",
"size": "19180",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "docs/docs/models-usage.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "2295085"
}
]
}
|
package androidpath.ll.eventbusdemo.Models;
import java.util.List;
/**
* Created by Le on 2015/5/27.
*/
public class Event {
/**
* Loading List
*/
public static class ItemListEvent {
private List<Item> items;
public ItemListEvent(List<Item> items) {
this.items = items;
}
public List<Item> getItems() {
return items;
}
}
}
|
{
"content_hash": "9f8c2752c3d2b6bfc2869ff62016f87c",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 48,
"avg_line_length": 17.333333333333332,
"alnum_prop": 0.5576923076923077,
"repo_name": "LLin233/Le-Android-Demo-Stack",
"id": "eadc6ab3d3d2255cab875c860541821b2ee1c0ca",
"size": "416",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "eventbusdemo/src/main/java/androidpath/ll/eventbusdemo/Models/Event.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "95502"
}
]
}
|
// Node packages.
import fs from 'fs'
import path from 'path'
// Project packages.
import { moveAsset, walk, readYamlMetaData } from '@bldr/media-manager'
import * as log from '@bldr/log'
/**
* Rename a media asset after the `id` in the meta data file.
*
* @param filePath - The media asset file path.
*/
function renameFromIdOneFile (filePath: string): void {
let result: { [key: string]: any }
try {
result = readYamlMetaData(filePath)
} catch (error) {
log.error(filePath)
log.error(error)
return
}
if (result.ref != null) {
let ref: string = result.ref
const oldPath = filePath
// .mp4
const extension = path.extname(oldPath)
const oldBaseName = path.basename(oldPath, extension)
let newPath = null
// Gregorianik_HB_Alleluia-Ostermesse -> Alleluia-Ostermesse
ref = ref.replace(/.*_[A-Z]{2,}_/, '')
log.info(ref)
if (ref !== oldBaseName) {
newPath = path.join(path.dirname(oldPath), `${ref}${extension}`)
} else {
return
}
moveAsset(oldPath, newPath)
}
}
/**
* Rename a media asset or all child asset of the parent working directory
* after the `id` in the meta data file.
*
* @param files - An array of input files, comes from the commanders’
* variadic parameter `[files...]`.
*/
async function action (files: string[]): Promise<void> {
await walk({
asset (relPath) {
if (fs.existsSync(`${relPath}.yml`)) {
renameFromIdOneFile(relPath)
}
}
}, {
path: files
})
}
module.exports = action
|
{
"content_hash": "0532f05f9016e0cc9281fc2edcfb404e",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 74,
"avg_line_length": 24.444444444444443,
"alnum_prop": 0.6331168831168831,
"repo_name": "JosefFriedrich-nodejs/baldr",
"id": "aeb726976004a6d4b75f72b6ac23d2869bcdcc4c",
"size": "1542",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/mgmt/cli/src/commands/ref-to-filename/action.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4504"
},
{
"name": "HTML",
"bytes": "1515"
},
{
"name": "JavaScript",
"bytes": "161185"
},
{
"name": "Shell",
"bytes": "261"
}
]
}
|
package io.moquette.server;
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
import org.eclipse.paho.client.mqttv3.MqttCallback;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import java.util.concurrent.*;
/**
* Used in test to collect all messages received asynchronously by MqttClient.
*/
public class MessageCollector implements MqttCallback {
private final class ReceivedMessage {
private final MqttMessage message;
private final String topic;
private ReceivedMessage(MqttMessage message, String topic) {
this.message = message;
this.topic = topic;
}
}
private BlockingQueue<ReceivedMessage> m_messages = new LinkedBlockingQueue<>();
private boolean m_connectionLost;
/**
* Return the message from the queue if not empty, else return null with wait period.
*/
public MqttMessage getMessageImmediate() {
if (m_messages.isEmpty()) {
return null;
}
try {
return m_messages.take().message;
} catch (InterruptedException e) {
return null;
}
}
public MqttMessage waitMessage(int delay) {
try {
ReceivedMessage msg = m_messages.poll(delay, TimeUnit.SECONDS);
if (msg == null) {
return null;
}
return msg.message;
} catch (InterruptedException e) {
return null;
}
}
public String getTopic() {
try {
return m_messages.poll(5, TimeUnit.SECONDS).topic;
} catch (InterruptedException e) {
return null;
}
}
void reinit() {
m_messages = new LinkedBlockingQueue<>();
m_connectionLost = false;
}
public boolean connectionLost() {
return m_connectionLost;
}
@Override
public void connectionLost(Throwable cause) {
m_connectionLost = true;
}
@Override
public void messageArrived(String topic, MqttMessage message) throws Exception {
m_messages.offer(new ReceivedMessage(message, topic));
}
/**
* Invoked when the message sent to a server is ACKED (PUBACK or PUBCOMP by the server)
*/
@Override
public void deliveryComplete(IMqttDeliveryToken token) {
// try {
// m_messages.offer(new ReceivedMessage(token.waitMessage(), token.getTopics()[0]));
// } catch (MqttException e) {
// e.printStackTrace();
// }
}
}
|
{
"content_hash": "fa37beedbe24680cb41bb02573464287",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 92,
"avg_line_length": 26.956989247311828,
"alnum_prop": 0.6118867171918628,
"repo_name": "windbender/moquette",
"id": "d8103634e62e55c430adcf4875409fb0e2e56572",
"size": "3104",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "broker/src/test/java/io/moquette/server/MessageCollector.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3827"
},
{
"name": "Groovy",
"bytes": "94012"
},
{
"name": "HTML",
"bytes": "37552"
},
{
"name": "Java",
"bytes": "638212"
},
{
"name": "Ruby",
"bytes": "212"
},
{
"name": "Shell",
"bytes": "3609"
}
]
}
|
var cookieFactory = defaultCookieFactory;
var StateSource = require('../stateSource');
class CookieStateSource extends StateSource {
constructor(options) {
super(options);
this._isCookieStateSource = true;
this._cookies = cookieFactory(this.context);
}
get(key) {
return this._cookies.get(key);
}
set(key, value, options) {
return this._cookies.set(key, value, options);
}
expire(key) {
return this._cookies.expire(key);
}
static setCookieFactory(value) {
cookieFactory = value;
}
}
function defaultCookieFactory() {
return require('cookies-js');
}
module.exports = CookieStateSource;
|
{
"content_hash": "21d192ae7435d5d4cc9286cdcaf33547",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 50,
"avg_line_length": 20.03125,
"alnum_prop": 0.6895475819032761,
"repo_name": "oliverwoodings/marty",
"id": "52ed21a5c8f75f466975749706a2afa717c6c6e9",
"size": "641",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/stateSource/inbuilt/cookie.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "658659"
},
{
"name": "Makefile",
"bytes": "1605"
},
{
"name": "Shell",
"bytes": "1728"
}
]
}
|
from tempest.api.volume import base
from tempest import config
from tempest.lib import decorators
CONF = config.CONF
# NOTE(zhufl): This inherits from BaseVolumeAdminTest because
# it requires force_tenant_isolation=True, which need admin
# credentials to create non-admin users for the tests.
class AbsoluteLimitsTests(base.BaseVolumeAdminTest): # noqa
# avoid existing volumes of pre-defined tenant
force_tenant_isolation = True
@classmethod
def resource_setup(cls):
super(AbsoluteLimitsTests, cls).resource_setup()
# Create a shared volume for tests
cls.volume = cls.create_volume()
@classmethod
def skip_checks(cls):
super(AbsoluteLimitsTests, cls).skip_checks()
if not CONF.auth.use_dynamic_credentials:
raise cls.skipException("Must use dynamic credentials.")
@decorators.idempotent_id('8e943f53-e9d6-4272-b2e9-adcf2f7c29ad')
def test_get_volume_absolute_limits(self):
# get volume limit for a tenant
absolute_limits = \
self.volume_limits_client.show_limits(
)['limits']['absolute']
# verify volume limits and defaults per tenants
self.assertEqual(absolute_limits['totalGigabytesUsed'],
CONF.volume.volume_size)
self.assertEqual(absolute_limits['totalVolumesUsed'], 1)
self.assertEqual(absolute_limits['totalSnapshotsUsed'], 0)
self.assertEqual(absolute_limits['totalBackupsUsed'], 0)
self.assertEqual(absolute_limits['totalBackupGigabytesUsed'], 0)
|
{
"content_hash": "1cb3debccc02d648deb929c1e85b933b",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 72,
"avg_line_length": 37.23809523809524,
"alnum_prop": 0.6962915601023018,
"repo_name": "masayukig/tempest",
"id": "00a3375458d21539ed2462b46f521e71febadb8f",
"size": "2200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tempest/api/volume/test_volume_absolute_limits.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4682048"
},
{
"name": "Shell",
"bytes": "12734"
}
]
}
|
local Application = dofile(_G.spoonPath.."/application.lua")
return Application:new("Home")
|
{
"content_hash": "2e48abbe1423fd7309549792ec0e83d1",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 60,
"avg_line_length": 31,
"alnum_prop": 0.7526881720430108,
"repo_name": "eggcaker/dotfiles",
"id": "bf524a89ea6d022155e6f45f6964e96b0dfd0cc7",
"size": "93",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "macos/hammerspoon.symlink/Spoons/Ki.spoon/entities/home.lua",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AppleScript",
"bytes": "8023"
},
{
"name": "CSS",
"bytes": "2003"
},
{
"name": "D",
"bytes": "131"
},
{
"name": "Go",
"bytes": "1178"
},
{
"name": "HTML",
"bytes": "1130"
},
{
"name": "JavaScript",
"bytes": "152818"
},
{
"name": "Jupyter Notebook",
"bytes": "4229"
},
{
"name": "Lua",
"bytes": "348514"
},
{
"name": "Makefile",
"bytes": "613"
},
{
"name": "Python",
"bytes": "36440"
},
{
"name": "Shell",
"bytes": "130877"
}
]
}
|
/**
* Created by WangGuoyan on 2017/2/23.
*/
$(function () {
var jsCheck = isInclude("ibcpLayer.js") == false ? function () {
document.write(getRootPath() + "/WebUI/Public/ibcpLayer.js");
} : true;
});
//判断js/css是否存在
function isInclude(name) {
var js = /js$/i.test(name);
var es = document.getElementsByTagName(js ? 'script' : 'link');
for (var i = 0; i < es.length; i++) {
if (es[i][js ? 'src' : 'href'].indexOf(name) != -1) {
return true;
}
}
return false;
}
//获取根目录
function getRootPath() {
//获取当前网址,如: http://localhost:8083/uimcardprj/share/meun.jsp
var curWwwPath = window.document.location.href;
//获取主机地址之后的目录,如: uimcardprj/share/meun.jsp
var pathName = window.document.location.pathname;
var pos = curWwwPath.indexOf(pathName);
//获取主机地址,如: http://localhost:8083
var localhostPaht = curWwwPath.substring(0, pos);
//获取带"/"的项目名,如:/uimcardprj
var projectName = pathName.substring(0, pathName.substr(1).indexOf('/') + 1);
return (localhostPaht + projectName);
}
function callPrint(type, id) {
//称量标签信息初始化
var weightingLabelPath = "/WebUI/Public/WeightLabel.html?resultId=" + id;
if (type = "weighting") {
var LabelPrintIndex = ibcpLayer.ShowIframe(getRootPath()
+ weightingLabelPath, "称量标签打印", "125mm", "130mm", false, function () {
});
}
}
|
{
"content_hash": "655dde829967d3f6ff803ec4dbf403d5",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 79,
"avg_line_length": 28.82608695652174,
"alnum_prop": 0.6538461538461539,
"repo_name": "15292137182/era",
"id": "55e4a0dd14dc41c3b97dc8c4372fc84c71f7616c",
"size": "1462",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/webapp/core/WebUI/Public/Print-Label-Cmd.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "504376"
},
{
"name": "HTML",
"bytes": "152667"
},
{
"name": "Java",
"bytes": "110866"
},
{
"name": "JavaScript",
"bytes": "3188365"
},
{
"name": "Vue",
"bytes": "335591"
}
]
}
|
#ifndef PCL_SAMPLE_CONSENSUS_MODEL_PERPENDICULARPLANE_H_
#define PCL_SAMPLE_CONSENSUS_MODEL_PERPENDICULARPLANE_H_
#include <pcl/sample_consensus/sac_model_plane.h>
#include <pcl/common/common.h>
namespace pcl
{
/** \brief SampleConsensusModelPerpendicularPlane defines a model for 3D plane segmentation using additional
* angular constraints. The plane must be perpendicular to an user-specified axis (\ref setAxis), up to an user-specified angle threshold (\ref setEpsAngle).
* The model coefficients are defined as:
* - \b a : the X coordinate of the plane's normal (normalized)
* - \b b : the Y coordinate of the plane's normal (normalized)
* - \b c : the Z coordinate of the plane's normal (normalized)
* - \b d : the fourth <a href="http://mathworld.wolfram.com/HessianNormalForm.html">Hessian component</a> of the plane's equation
*
*
* Code example for a plane model, perpendicular (within a 15 degrees tolerance) with the Z axis:
* \code
* SampleConsensusModelPerpendicularPlane<pcl::PointXYZ> model (cloud);
* model.setAxis (Eigen::Vector3f (0.0, 0.0, 1.0));
* model.setEpsAngle (pcl::deg2rad (15));
* \endcode
*
* \note Please remember that you need to specify an angle > 0 in order to activate the axis-angle constraint!
*
* \author Radu B. Rusu
* \ingroup sample_consensus
*/
template <typename PointT>
class SampleConsensusModelPerpendicularPlane : public SampleConsensusModelPlane<PointT>
{
public:
using SampleConsensusModel<PointT>::model_name_;
typedef typename SampleConsensusModelPlane<PointT>::PointCloud PointCloud;
typedef typename SampleConsensusModelPlane<PointT>::PointCloudPtr PointCloudPtr;
typedef typename SampleConsensusModelPlane<PointT>::PointCloudConstPtr PointCloudConstPtr;
typedef boost::shared_ptr<SampleConsensusModelPerpendicularPlane> Ptr;
/** \brief Constructor for base SampleConsensusModelPerpendicularPlane.
* \param[in] cloud the input point cloud dataset
* \param[in] random if true set the random seed to the current time, else set to 12345 (default: false)
*/
SampleConsensusModelPerpendicularPlane (const PointCloudConstPtr &cloud,
bool random = false)
: SampleConsensusModelPlane<PointT> (cloud, random)
, axis_ (Eigen::Vector3f::Zero ())
, eps_angle_ (0.0)
{
model_name_ = "SampleConsensusModelPerpendicularPlane";
sample_size_ = 3;
model_size_ = 4;
}
/** \brief Constructor for base SampleConsensusModelPerpendicularPlane.
* \param[in] cloud the input point cloud dataset
* \param[in] indices a vector of point indices to be used from \a cloud
* \param[in] random if true set the random seed to the current time, else set to 12345 (default: false)
*/
SampleConsensusModelPerpendicularPlane (const PointCloudConstPtr &cloud,
const std::vector<int> &indices,
bool random = false)
: SampleConsensusModelPlane<PointT> (cloud, indices, random)
, axis_ (Eigen::Vector3f::Zero ())
, eps_angle_ (0.0)
{
model_name_ = "SampleConsensusModelPerpendicularPlane";
sample_size_ = 3;
model_size_ = 4;
}
/** \brief Empty destructor */
virtual ~SampleConsensusModelPerpendicularPlane () {}
/** \brief Set the axis along which we need to search for a plane perpendicular to.
* \param[in] ax the axis along which we need to search for a plane perpendicular to
*/
inline void
setAxis (const Eigen::Vector3f &ax) { axis_ = ax; }
/** \brief Get the axis along which we need to search for a plane perpendicular to. */
inline Eigen::Vector3f
getAxis () { return (axis_); }
/** \brief Set the angle epsilon (delta) threshold.
* \param[in] ea the maximum allowed difference between the plane normal and the given axis.
* \note You need to specify an angle > 0 in order to activate the axis-angle constraint!
*/
inline void
setEpsAngle (const double ea) { eps_angle_ = ea; }
/** \brief Get the angle epsilon (delta) threshold. */
inline double
getEpsAngle () { return (eps_angle_); }
/** \brief Select all the points which respect the given model coefficients as inliers.
* \param[in] model_coefficients the coefficients of a plane model that we need to compute distances to
* \param[in] threshold a maximum admissible distance threshold for determining the inliers from the outliers
* \param[out] inliers the resultant model inliers
*/
void
selectWithinDistance (const Eigen::VectorXf &model_coefficients,
const double threshold,
std::vector<int> &inliers);
/** \brief Count all the points which respect the given model coefficients as inliers.
*
* \param[in] model_coefficients the coefficients of a model that we need to compute distances to
* \param[in] threshold maximum admissible distance threshold for determining the inliers from the outliers
* \return the resultant number of inliers
*/
virtual int
countWithinDistance (const Eigen::VectorXf &model_coefficients,
const double threshold);
/** \brief Compute all distances from the cloud data to a given plane model.
* \param[in] model_coefficients the coefficients of a plane model that we need to compute distances to
* \param[out] distances the resultant estimated distances
*/
void
getDistancesToModel (const Eigen::VectorXf &model_coefficients,
std::vector<double> &distances);
/** \brief Return an unique id for this model (SACMODEL_PERPENDICULAR_PLANE). */
inline pcl::SacModel
getModelType () const { return (SACMODEL_PERPENDICULAR_PLANE); }
protected:
using SampleConsensusModel<PointT>::sample_size_;
using SampleConsensusModel<PointT>::model_size_;
/** \brief Check whether a model is valid given the user constraints.
* \param[in] model_coefficients the set of model coefficients
*/
virtual bool
isModelValid (const Eigen::VectorXf &model_coefficients);
/** \brief The axis along which we need to search for a plane perpendicular to. */
Eigen::Vector3f axis_;
/** \brief The maximum allowed difference between the plane normal and the given axis. */
double eps_angle_;
};
}
#ifdef PCL_NO_PRECOMPILE
#include <pcl/sample_consensus/impl/sac_model_perpendicular_plane.hpp>
#endif
#endif //#ifndef PCL_SAMPLE_CONSENSUS_MODEL_PERPENDICULARPLANE_H_
|
{
"content_hash": "817536db5a5da2e4a2fdd812e7e91310",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 160,
"avg_line_length": 44.98051948051948,
"alnum_prop": 0.6601703479139599,
"repo_name": "stfuchs/pcl",
"id": "43e7d0e1c942cc46e77d69e89a395e36b3bccca1",
"size": "8719",
"binary": false,
"copies": "16",
"ref": "refs/heads/master",
"path": "sample_consensus/include/pcl/sample_consensus/sac_model_perpendicular_plane.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "191481"
},
{
"name": "C++",
"bytes": "19372376"
},
{
"name": "CMake",
"bytes": "438143"
},
{
"name": "Cuda",
"bytes": "923635"
},
{
"name": "GLSL",
"bytes": "2653"
},
{
"name": "Matlab",
"bytes": "3407"
},
{
"name": "Objective-C",
"bytes": "3520"
},
{
"name": "Objective-C++",
"bytes": "7489"
},
{
"name": "Python",
"bytes": "29169"
},
{
"name": "Shell",
"bytes": "9840"
}
]
}
|
#define EIGEN_USE_THREADS
#if TENSORFLOW_USE_SYCL
#define EIGEN_USE_SYCL
#endif
#include "tensorflow/core/kernels/dense_update_ops.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/register_types.h"
#include "tensorflow/core/kernels/assign_op.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/platform/mutex.h"
#include "tensorflow/core/platform/types.h"
namespace tensorflow {
template <typename Device, typename T>
class AssignOpT : public AssignOp {
public:
using AssignOp::AssignOp;
void Copy(OpKernelContext* context, Tensor* lhs, const Tensor& rhs) override {
functor::DenseUpdate<Device, T, ASSIGN> copy;
copy(context->eigen_device<Device>(), lhs->flat<T>(), rhs.flat<T>());
}
};
// TODO(jeff): Get rid of use_exclusive_lock_ option
template <typename Device, typename T, DenseUpdateType OP>
class DenseUpdateOp : public OpKernel {
public:
explicit DenseUpdateOp(OpKernelConstruction* context) : OpKernel(context) {
OP_REQUIRES_OK(context,
context->GetAttr("use_locking", &use_exclusive_lock_));
const DataType dt = DataTypeToEnum<T>::v();
OP_REQUIRES_OK(context, context->MatchSignature({MakeRefType(dt), dt},
{MakeRefType(dt)}));
}
void Compute(OpKernelContext* context) override {
// We always return the input ref.
context->forward_ref_input_to_ref_output(0, 0);
if (use_exclusive_lock_) {
mutex_lock l(*context->input_ref_mutex(0));
DoUpdate(context);
} else {
DoUpdate(context);
}
}
private:
void DoUpdate(OpKernelContext* context) {
Tensor Tparams = context->mutable_input(0, use_exclusive_lock_);
const Tensor& Tupdate = context->input(1);
OP_REQUIRES(context, Tparams.IsInitialized(),
errors::FailedPrecondition("Attempting to use uninitialized "
"parameters: ",
def().input(0)));
OP_REQUIRES(
context, Tparams.IsSameSize(Tupdate),
errors::InvalidArgument("Parameters and update must be the same size"));
functor::DenseUpdate<Device, T, OP> update_functor;
update_functor(context->eigen_device<Device>(), Tparams.flat<T>(),
Tupdate.flat<T>());
}
bool use_exclusive_lock_;
};
typedef Eigen::ThreadPoolDevice CPUDevice;
typedef Eigen::GpuDevice GPUDevice;
#define REGISTER_KERNELS(type) \
REGISTER_KERNEL_BUILDER( \
Name("Assign").Device(DEVICE_CPU).TypeConstraint<type>("T"), \
AssignOpT<CPUDevice, type>);
TF_CALL_ALL_TYPES(REGISTER_KERNELS);
TF_CALL_QUANTIZED_TYPES(REGISTER_KERNELS);
#undef REGISTER_KERNELS
#if TENSORFLOW_USE_SYCL
typedef Eigen::SyclDevice SYCLDevice;
#define REGISTER_SYCL_KERNEL(type) \
REGISTER_KERNEL_BUILDER( \
Name("Assign") \
.Device(DEVICE_SYCL) \
.TypeConstraint<type>("T"), \
AssignOpT<SYCLDevice, type>);
TF_CALL_NUMBER_TYPES(REGISTER_SYCL_KERNEL);
#undef REGISTER_SYCL_KERNEL
#endif
#if GOOGLE_CUDA
// Only register 'Assign' on GPU for the subset of types also supported by
// 'Variable' (see variable_ops.cc.)
#define REGISTER_GPU_KERNELS(type) \
namespace functor { \
template <> \
void DenseUpdate<GPUDevice, type, ASSIGN>::operator()( \
const GPUDevice& d, typename TTypes<type>::Flat lhs, \
typename TTypes<type>::ConstFlat rhs); \
extern template struct DenseUpdate<GPUDevice, type, ASSIGN>; \
} \
REGISTER_KERNEL_BUILDER( \
Name("Assign").Device(DEVICE_GPU).TypeConstraint<type>("T"), \
AssignOpT<GPUDevice, type>);
TF_CALL_GPU_NUMBER_TYPES(REGISTER_GPU_KERNELS);
#undef REGISTER_GPU_KERNELS
#endif // GOOGLE_CUDA
#define REGISTER_KERNELS(type) \
REGISTER_KERNEL_BUILDER( \
Name("AssignAdd").Device(DEVICE_CPU).TypeConstraint<type>("T"), \
DenseUpdateOp<CPUDevice, type, DenseUpdateType::ADD>); \
REGISTER_KERNEL_BUILDER( \
Name("AssignSub").Device(DEVICE_CPU).TypeConstraint<type>("T"), \
DenseUpdateOp<CPUDevice, type, DenseUpdateType::SUB>);
TF_CALL_NUMBER_TYPES(REGISTER_KERNELS);
#undef REGISTER_KERNELS
#if GOOGLE_CUDA
// Forward declarations of the functor specializations for GPU.
namespace functor {
#define DECLARE_GPU_SPEC_FOR_OP(T, OP) \
template <> \
void DenseUpdate<GPUDevice, T, OP>::operator()( \
const GPUDevice& d, typename TTypes<T>::Flat params, \
typename TTypes<T>::ConstFlat update); \
extern template struct DenseUpdate<GPUDevice, T, OP>;
#define DECLARE_GPU_SPEC(T) \
DECLARE_GPU_SPEC_FOR_OP(T, DenseUpdateType::ADD); \
DECLARE_GPU_SPEC_FOR_OP(T, DenseUpdateType::SUB)
TF_CALL_GPU_NUMBER_TYPES(DECLARE_GPU_SPEC);
#undef DECLARE_GPU_SPEC
#undef DECLARE_GPU_SPEC_FOR_OP
} // namespace functor
#define REGISTER_GPU_KERNELS(type) \
REGISTER_KERNEL_BUILDER( \
Name("AssignAdd").Device(DEVICE_GPU).TypeConstraint<type>("T"), \
DenseUpdateOp<GPUDevice, type, DenseUpdateType::ADD>); \
REGISTER_KERNEL_BUILDER( \
Name("AssignSub").Device(DEVICE_GPU).TypeConstraint<type>("T"), \
DenseUpdateOp<GPUDevice, type, DenseUpdateType::SUB>);
TF_CALL_GPU_NUMBER_TYPES(REGISTER_GPU_KERNELS);
#undef REGISTER_GPU_KERNELS
#endif // end GOOGLE_CUDA
} // namespace tensorflow
|
{
"content_hash": "c5b66ea2c6e0a3ec111403442b2bded7",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 80,
"avg_line_length": 40.29677419354839,
"alnum_prop": 0.579891130323407,
"repo_name": "DCSaunders/tensorflow",
"id": "baa8f830919110eeff360dda54895909d243ecff",
"size": "6914",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/core/kernels/dense_update_ops.cc",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6641"
},
{
"name": "C",
"bytes": "90766"
},
{
"name": "C++",
"bytes": "13984868"
},
{
"name": "CMake",
"bytes": "110983"
},
{
"name": "CSS",
"bytes": "774"
},
{
"name": "Go",
"bytes": "87708"
},
{
"name": "HTML",
"bytes": "534592"
},
{
"name": "Java",
"bytes": "57002"
},
{
"name": "JavaScript",
"bytes": "13406"
},
{
"name": "Jupyter Notebook",
"bytes": "1833475"
},
{
"name": "Makefile",
"bytes": "26235"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "64592"
},
{
"name": "Perl",
"bytes": "4412"
},
{
"name": "Protocol Buffer",
"bytes": "143116"
},
{
"name": "Python",
"bytes": "13808086"
},
{
"name": "Shell",
"bytes": "276793"
},
{
"name": "TypeScript",
"bytes": "749115"
}
]
}
|
var Parser = require('../lib/parser').Parser;
var PubSub = require('../');
module.exports = {
setUp: function (callback) {
callback();
},
tearDown: function (callback) {
// clean up
callback();
},
testParser: function (test) {
test.expect(4);
var n = 1;
var parser = new Parser('\n', function (chunk) {
switch(n)
{
case 1:
test.equal('chunk1', chunk);
break;
case 2:
test.equal('chunk2', chunk);
break;
default:
test.ok(false);
}
n++;
});
parser.add('chunk1\nchunk2');
test.equals(2, n);
parser.add('\nchunk3');
test.equals('chunk3', parser.buffer);
test.done();
},
testSubToPub: function (test) {
test.expect(5);
var publisher = new PubSub.Publisher();
var subscriber = new PubSub.Subscriber();
publisher.listen(8002);
setTimeout(function () {
publisher.publish('channel_one', 'message_one');
publisher.publish('channel_two', {message: 'two'});
publisher.publish('channel_three', ['message', 'three']);
}, 1000);
subscriber.subscribe('channel_one');
subscriber.subscribe('channel_two');
subscriber.connect(8002, '127.0.0.1');
subscriber.connect(8002, '127.0.0.1');
subscriber.connect(8003, '127.0.0.1');
subscriber.subscribe('channel_one');
try {
subscriber.subscribe('a\nb');
} catch (e) {
test.ok(true);
}
var n = 1;
subscriber.on('message', function (channel, data) {
switch(n)
{
case 1:
test.equal('channel_one', channel);
test.equal('message_one', data);
break;
case 2:
test.equal('channel_two', channel);
test.deepEqual({message: 'two'}, data);
break;
default:
test.ok(false);
}
n++;
});
setTimeout(function () {
subscriber.unsubscribe('channel_one');
subscriber.unsubscribe('channel_two');
subscriber.unsubscribe('channel_two');
subscriber.unsubscribe('channel_x');
subscriber.disconnect(8002, '127.0.0.1');
subscriber.disconnect(8003, '127.0.0.1');
publisher.close();
setTimeout(function () {
test.done();
}, 2000);
}, 2000);
},
testPubToSub: function (test) {
test.expect(4);
var publisher = new PubSub.Publisher();
var subscriber = new PubSub.Subscriber();
publisher.connect(8002, '127.0.0.1');
publisher.connect(8002, '127.0.0.1');
publisher.connect(8003, '127.0.0.1');
setTimeout(function () {
publisher.publish('channel_one', 'message_one');
publisher.publish('channel_two', {message: 'two'});
publisher.publish('channel_three', ['message', 'three']);
}, 1000);
subscriber.subscribe('channel_one');
subscriber.listen(8002);
subscriber.subscribe('channel_two');
var n = 1;
subscriber.on('message', function (channel, data) {
switch(n)
{
case 1:
test.equal('channel_one', channel);
test.equal('message_one', data);
break;
case 2:
test.equal('channel_two', channel);
test.deepEqual({message: 'two'}, data);
break;
default:
test.ok(false);
}
n++;
});
setTimeout(function () {
subscriber.unsubscribe('channel_one');
subscriber.unsubscribe('channel_two');
subscriber.unsubscribe('channel_undefined');
publisher.disconnect(8002, '127.0.0.1');
publisher.disconnect(8003, '127.0.0.1');
subscriber.close();
setTimeout(function () {
test.done();
}, 2000);
}, 2000);
}
};
|
{
"content_hash": "18bb16a02f33b3a30b170d042f27fd43",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 60,
"avg_line_length": 14.679487179487179,
"alnum_prop": 0.6026200873362445,
"repo_name": "x25/pub-sub-js",
"id": "1cd538fa261996250aa860942d0050f84f216f28",
"size": "3435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "13725"
}
]
}
|
OUT= sequential parallel
SHAREDOBJS= shared.o
SEQOBJS= sequential.o $(SHAREDOBJS)
PAROBJS= parallel.o thread.o $(SHAREDOBJS)
TESTOBJS= test.o $(SHAREDOBJS)
CFLAGS+= -Wall
all: CFLAGS+= -O2
all: $(OUT)
verify: CFLAGS+= -DVERIFY
verify: $(OUT)
debug: CFLAGS+= -g -DDEBUG
debug: $(OUT)
vdebug: CFLAGS+= -g -DDEBUG -DVERIFY
vdebug: $(OUT)
test: CFLAGS+= -g -DTEST
run_prep: clean all experiments.sh
experiments.sh: generate_tests.py
python generate_tests.py > experiments.sh
chmod +x experiments.sh
sequential: $(SEQOBJS)
$(CC) -o sequential $(SEQOBJS)
parallel: $(PAROBJS)
$(CC) -o parallel $(PAROBJS) -lpthread
test: $(TESTOBJS)
$(CC) -o test $(TESTOBJS)
sequential.o: sequential.c
parallel.o: parallel.c
thread.o: thread.c thread.h
shared.o: shared.c shared.h
test.o: test.c
clean:
-rm -f *.o $(OUT) test experiments.sh experiments
|
{
"content_hash": "f749dc7bdef26bd4dff762fe0ef71329",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 50,
"avg_line_length": 17.46938775510204,
"alnum_prop": 0.7009345794392523,
"repo_name": "a-krebs/parallel-matrix",
"id": "c913fcf96885f17f92430c0a2dacc9a0073114d6",
"size": "856",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "Makefile",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "17558"
},
{
"name": "Python",
"bytes": "823"
}
]
}
|
python ./scripts/check_python.py
[ $? -ne 0 ] && exit 1
SAMPLES="
datatypes_conf.xml.sample
reports_wsgi.ini.sample
tool_conf.xml.sample
universe_wsgi.ini.sample
tool-data/alignseq.loc.sample
tool-data/annotation_profiler_options.xml.sample
tool-data/annotation_profiler_valid_builds.txt.sample
tool-data/binned_scores.loc.sample
tool-data/blastdb.loc.sample
tool-data/bowtie_indices.loc.sample
tool-data/encode_datasets.loc.sample
tool-data/liftOver.loc.sample
tool-data/maf_index.loc.sample
tool-data/maf_pairwise.loc.sample
tool-data/microbial_data.loc.sample
tool-data/phastOdds.loc.sample
tool-data/quality_scores.loc.sample
tool-data/regions.loc.sample
tool-data/sam_fa_indices.loc.sample
tool-data/sequence_index_base.loc.sample
tool-data/sequence_index_color.loc.sample
tool-data/twobit.loc.sample
"
DIRS="
database
database/files
database/tmp
database/compiled_templates
database/job_working_directory
database/import
database/pbs
static/genetrack/plots
"
for sample in $SAMPLES; do
file=`echo $sample | sed -e 's/\.sample$//'`
if [ -f $file ]; then
echo "Not overwriting existing $file"
else
echo "Copying $sample to $file"
cp $sample $file
fi
done
for dir in $DIRS; do
if [ ! -d $dir ]; then
echo "Creating $dir"
mkdir $dir
fi
done
python ./scripts/fetch_eggs.py
|
{
"content_hash": "29b52aedb9d3505c281d98108481b3eb",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 53,
"avg_line_length": 23.56140350877193,
"alnum_prop": 0.7431124348473567,
"repo_name": "volpino/Yeps-EURAC",
"id": "07dc090b20c68b744ce1a37a66f2e230596f465b",
"size": "1354",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.sh",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1028241"
},
{
"name": "Perl",
"bytes": "46988"
},
{
"name": "Python",
"bytes": "3728724"
},
{
"name": "Shell",
"bytes": "24718"
}
]
}
|
var express = require('express');
var path = require('path');
var Page = require('./../model/Page');
var Parse = require('./../core/parse');
var pages = require('./../core/pages');
var router = express.Router();
router.param('domain', function(req, res, next, domain) {
if (pages.get(domain)) {
next();
} else {
// retrieve page from Parse DB
var queryPages = new Parse.Query(Page);
queryPages
.equalTo('domain', domain)
.limit(1)
.find()
.then(function(results) {
if (results && results.length) {
pages.add(results[0]);
next();
} else {
res.send(404, 'Sorry, we cannot find that!');
}
}, function(err) {
res.send(404, 'Sorry, we cannot find that!');
});
}
});
router.get('/:domain', function(req, res, next) {
var domain = req.params.domain; // current domain
var page = pages.get(domain);
// render the single page app
res.sendfile(page.getPath('index.html'));
});
router.get('/:domain/*', function(req, res, next) {
var domain = req.params.domain; // current domain
var page = pages.get(domain);
var url = req.params[0];
// check if url has
// a file extension
if (url && url.match(/^.*\.[^\\]+$/)) {
res.sendfile(page.getPath(url));
} else {
// render the single page app
res.sendfile(page.getPath('index.html'));
}
});
module.exports = router;
|
{
"content_hash": "e766a2b6bd1b2ffd11bd06d70d7b659b",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 65,
"avg_line_length": 25.419354838709676,
"alnum_prop": 0.5215736040609137,
"repo_name": "olivierlesnicki/singlepage.io",
"id": "cc3b8825afc3065c2eef50caf35bbccee939a87a",
"size": "1576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/router/domain.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10742"
},
{
"name": "JavaScript",
"bytes": "18899"
}
]
}
|
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
tools:ignore="contentDescription">
<RelativeLayout
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:background="@drawable/bookshelf_header_bg" >
<ImageView
android:id="@+id/shelf_image_title"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerInParent="true"
android:background="@drawable/bookshelf_header_logo" />
</RelativeLayout>
<ListView
android:id="@+id/shelf_list"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:scrollbars="none"
android:divider="#00000000"
android:cacheColorHint="#00000000"/>
</LinearLayout>
|
{
"content_hash": "fb5faf87904f18b0685c4a8c2c5a8b4a",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 72,
"avg_line_length": 32.13333333333333,
"alnum_prop": 0.7219917012448133,
"repo_name": "sundayliu/android-demo",
"id": "3702188f0c8deaffa7a3fb30e7b04f42f02676f1",
"size": "964",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "book/AngelReader/res/layout/main.xml",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "197"
},
{
"name": "HTML",
"bytes": "1905974"
},
{
"name": "Java",
"bytes": "888583"
},
{
"name": "Makefile",
"bytes": "199"
}
]
}
|
package pl.touk.sputnik.processor.codenarc;
import pl.touk.sputnik.configuration.Configuration;
import pl.touk.sputnik.configuration.GeneralOption;
import pl.touk.sputnik.processor.ReviewProcessorFactory;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NodeCardReviewProcessorFactoryTest {
@Test
public void testIsEnabled() throws Exception {
Configuration configuration = mock(Configuration.class);
when(configuration.getProperty(eq(GeneralOption.CODE_NARC_ENABLED))).thenReturn("true");
ReviewProcessorFactory factory = new NodeCardReviewProcessorFactory();
assertTrue(factory.isEnabled(configuration));
}
@Test
public void testCreate() throws Exception {
Configuration configuration = mock(Configuration.class);
when(configuration.getProperty(eq(GeneralOption.CODE_NARC_ENABLED))).thenReturn("true");
ReviewProcessorFactory factory = new NodeCardReviewProcessorFactory();
assertNotNull(factory.create(configuration));
}
}
|
{
"content_hash": "1281766a982f86d1d3fccb43a9088ba2",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 96,
"avg_line_length": 34.1764705882353,
"alnum_prop": 0.7633390705679862,
"repo_name": "platan/sputnik",
"id": "b5120e866463c1be3ad9ec94ac1974c1567f8603",
"size": "1162",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/test/java/pl/touk/sputnik/processor/codenarc/NodeCardReviewProcessorFactoryTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "659"
},
{
"name": "HTML",
"bytes": "112"
},
{
"name": "Java",
"bytes": "315214"
},
{
"name": "JavaScript",
"bytes": "42"
},
{
"name": "Scala",
"bytes": "196"
},
{
"name": "Shell",
"bytes": "2248"
},
{
"name": "TypeScript",
"bytes": "224"
}
]
}
|
layout: post
title: "Path To Inspiration"
category: posts
---
Design in any web project can be a long and arduous process. Here's a collection of links to help you.
Enjoy!
## Concept
<h5>These sites are good to get a general idea of possible layouts and style paths to take.</h5>
* [TheBestDesigns](http://www.thebestdesigns.com/) - General list of hand picked sites from across the web.
* [Awwwards](http://www.awwwards.com/) - More awesome sites of various styles.
* [Dribbble](http://dribbble.com/) - Snaps of icons, doodles, sites, and more from a talented group of people.
* [MinimalSites](http://www.minimalsites.com/) - Another list of sites, but on the much simpler side.
* [Httpster](http://httpster.net/) - A curated list of the best sites around the Internet.
* [siteInspire](http://www.siteinspire.com/) - Get inspired.
* [Designer News](https://news.layervault.com/) - The best design / development links from the 'net.
## Creation
<h5>These sites are good for building a basic framework for the site.</h5>
* Color
* [ColorPicker](http://www.colorpicker.com/) - Simple yet affective. The name says it all.
* [Kuler](https://kuler.adobe.com) - Easily create or view a custom color swatch for your site.
* [Spectral](http://jxnblk.github.io/Spectral/) - Minimal color swatch creation.
* [Unsemantic](http://unsemantic.com/) - Provides a good grid framework for small to large projects.
* [MDN](https://developer.mozilla.org/) - Documentation for HTML, CSS, JS, etc.
* [Bootstrap](http://getbootstrap.com/) - Very popular framework for building modern websites and web apps.
* [Bootswatch](http://bootswatch.com/) - A list of free themes for Bootstrap.
* [Iconmonstr](http://iconmonstr.com/) - A collection of free, simple icons.
* [Ionicons](http://ionicons.com) - Another great collection of free icons.
* [Subtle Patterns](http://subtlepatterns.com/) - A massive archive of free backgrounds for your projects.
* [Google Fonts](https://www.google.com/fonts) - Tons of free, easy to use webfonts.
* [Text to ASCII](http://patorjk.com/software/taag/#p=display&f=Graffiti&t=Type%20Something%20) - Add some sexy Easter eggs in your source code.
* [CSS3 Animation Cheat Sheet](http://www.justinaguilar.com/animations/#) - A collection of CSS3 animation snippets.
* [Flat UI](http://designmodo.github.io/Flat-UI/) - A superb HTML and CSS framework for flat design. It's the juice.
* [CSS2Stylus](http://css2stylus.com/) - Convert your CSS to Stylus.
* [YMNNJQ](http://youmightnotneedjquery.com/) - See jQuery functions in natural JS. No libraries.
* [User Inter Faces](http://uifaces.com/) - Get ipsum images for user images.
## Tools
<h5>Every painter needs a brush. Well... Most do.</h5>
* Sublime Text
* Themes
* [Flatron](https://github.com/Codingbean/Flatron) - It's flat, purple, and hella sexy.
* [FileZilla](https://filezilla-project.org/) - For those who fear FTP in the terminal.
* [WinSCP](http://winscp.net) - Upload files to a MEAN stack or VPS server.
* [Sketch](http://www.bohemiancoding.com/sketch/) [OS X only] - A professional vector graphics app.
## Collaborate
<h5>Working with a team? These links are for you.</h5>
* [Cloud9](https://c9.io/) - An awesome, zen way to work with others in the cloud.
* [Gist](https://gist.github.com/) - Share code and text with others fast. Like a mini repository.
* [Scratchpad.IO](http://scratchpad.io/) - A simple, RTC tool for coding and previewing websites.
* [Red Pen](https://redpen.io/) - Share your design and get feedback seamlessly.
* [Invoice Ninja](https://www.invoiceninja.com) - Open source invoicing platform.
## Backends
<h5>Every awesome web app needs an awesome backend.</h5>
* [Firebase](https://www.firebase.com/) - A real-time front-end database for your sites.
* [DigitalOcean](http://digitalocean.com) - A cheap and quality VPS hosting company.
## Testing
<h5>Before you launch, these are good tools to make sure your site is ready for stardom.</h5>
* [Placehold](http://placehold.it/) - Generate custom placeholder images of any size.
* [Browserling](https://browserling.com/) - Cross-browser test your website.
* [CodePen](http://codepen.io/) - A free web editor in your browser.
* [WooRank](http://www.woorank.com/) - A review tool for SEO elements, W3 validation, and numerous other variables. Free basic service.
* [Yslow](http://yslow.org/) - Tool for analyzing and finding fixes for multiple causes of slow site loading.
* [Hurl.it](http://www.hurl.it/) - Make HTTP requests in the browser.
* [Localtunnel](http://progrium.com/localtunnel/) - Open localhost ports to the world.
## Good Reads
<h5>Need something to read in your spare time?</h5>
* [Smashing Magazine](http://www.smashingmagazine.com/) - A magazine for all things digital design.
* [CSS-Tricks](http://css-tricks.com/) - Awesome blog on anything and everything related to CSS.
* [Codrops](http://tympanus.net/codrops/) - Great collection of design techniques for modern web developmers and designers.
* [Insert HTML](http://www.inserthtml.com/) - A blog covering current and upcoming techniques and technologies for web developers.
* [1stWebDesign](http://www.1stwebdesigner.com/blog/) - A good, simple blog on web development and design.
* [A List Apart](http://alistapart.com/) - Guest bloggers and full books on topics from coding to business practices.
* [GoodUI](http://goodui.org/) - It's definitely not bad.
* [Mentor](http://mentor.so) - Get random advice from other designers and developers.
:heart: & :coffee:
Please see [original repo](https://github.com/NoahBuscher/Inspire)
|
{
"content_hash": "9fd4302f6332db2355bef1858e7b1a0b",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 144,
"avg_line_length": 64.79069767441861,
"alnum_prop": 0.7304379038047379,
"repo_name": "roachhd/roachhd.github.io",
"id": "e12377fb4f9f53326435f44611ac4b19b2acb801",
"size": "5576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2014-10-31-path-to-inspiration.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "66353"
},
{
"name": "HTML",
"bytes": "239551"
},
{
"name": "JavaScript",
"bytes": "13695"
},
{
"name": "Less",
"bytes": "7124"
},
{
"name": "Ruby",
"bytes": "2931"
},
{
"name": "SCSS",
"bytes": "20175"
},
{
"name": "Vim script",
"bytes": "11973"
}
]
}
|
#pragma once
#include <aws/groundstation/GroundStation_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
namespace Aws
{
namespace GroundStation
{
namespace Model
{
enum class ContactStatus
{
NOT_SET,
AVAILABLE,
AWS_CANCELLED,
AWS_FAILED,
CANCELLED,
CANCELLING,
COMPLETED,
FAILED,
FAILED_TO_SCHEDULE,
PASS,
POSTPASS,
PREPASS,
SCHEDULED,
SCHEDULING
};
namespace ContactStatusMapper
{
AWS_GROUNDSTATION_API ContactStatus GetContactStatusForName(const Aws::String& name);
AWS_GROUNDSTATION_API Aws::String GetNameForContactStatus(ContactStatus value);
} // namespace ContactStatusMapper
} // namespace Model
} // namespace GroundStation
} // namespace Aws
|
{
"content_hash": "4c537d27b228eb613fa06dc2e718df10",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 85,
"avg_line_length": 18.743589743589745,
"alnum_prop": 0.7277701778385773,
"repo_name": "aws/aws-sdk-cpp",
"id": "6c8a3f2cee574bb34847803ab9641ff9e186f506",
"size": "850",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "aws-cpp-sdk-groundstation/include/aws/groundstation/model/ContactStatus.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "309797"
},
{
"name": "C++",
"bytes": "476866144"
},
{
"name": "CMake",
"bytes": "1245180"
},
{
"name": "Dockerfile",
"bytes": "11688"
},
{
"name": "HTML",
"bytes": "8056"
},
{
"name": "Java",
"bytes": "413602"
},
{
"name": "Python",
"bytes": "79245"
},
{
"name": "Shell",
"bytes": "9246"
}
]
}
|
End of preview.
No dataset card yet
- Downloads last month
- 2