Dataset Preview
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code: DatasetGenerationError
Exception: TypeError
Message: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
Traceback: Traceback (most recent call last):
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2011, in _prepare_split_single
writer.write_table(table)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 585, in write_table
pa_table = table_cast(pa_table, self._schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2302, in table_cast
return cast_table_to_schema(table, schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in cast_table_to_schema
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in <listcomp>
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in wrapper
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in <listcomp>
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2122, in cast_array_to_feature
raise TypeError(f"Couldn't cast array of type\n{_short_str(array.type)}\nto\n{_short_str(feature)}")
TypeError: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
parquet_operations = convert_to_parquet(builder)
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
builder.download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
self._download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
self._prepare_split(split_generator, **prepare_split_kwargs)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
for job_id, done, content in self._prepare_split_single(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
raise DatasetGenerationError("An error occurred while generating the dataset") from e
datasets.exceptions.DatasetGenerationError: An error occurred while generating the datasetNeed help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
text
string | meta
dict |
|---|---|
$( "#popup-form" ).submit(function( event ) {
// stop form submitting
event.preventDefault();
// hide submit and show progress bar
$('#submit').hide();
$('#progress').show();
// post link to slack
post();
});
// Submit form on Cmd + Enter or Ctrl + Enter
$('#context').keydown( function ( event ) {
if ( ( event.ctrlKey || event.metaKey ) && event.keyCode == 13 ) {
$('#popup-form').submit();
}
});
(function(){
'use strict';
if(localStorage.getItem('channel_override') === 'yes'){
$('#channel-col').show();
$('#button-col').addClass('col-xs-6').removeClass('col-xs-12');
} else {
$('#channel-col').hide();
$('#button-col').addClass('col-xs-12').removeClass('col-xs-6');
}
$('#channel').attr("placeholder", localStorage.getItem('channel'));
})();
function post(){
chrome.tabs.query({
active: true,
'lastFocusedWindow': true
}, function(tabs) {
// active tab URL
var url = tabs[0].url;
// grab config values from local storage
// required variables
var domain = localStorage.getItem('domain');
var apitoken = localStorage.getItem('token');
if ( !domain ) { // Required variables are missing so show alert
chrome.tabs.create({ url: chrome.extension.getURL("options.html#required") });
} else { // all required variables are set
// optional variables (will use default values if not set)
// get value of channel sent by input
var channel = $('#channel').val();
// if this was blank - set channel to value from localstorage
channel = (channel) ? channel : localStorage.getItem('channel');
// if channel is still empty - set to #random
channel = (channel) ? channel : '#random';
// add hash to channel if it's not there, unless it's being sent to a user like @user
if ( channel.substring(0, 1) !== '#' && channel.substring(0, 1) !== '@' ) {
channel = '#' + channel;
}
var username = localStorage.getItem('username');
username = (username) ? username : 'slacklink';
var icon_emoji = localStorage.getItem('emoji');
icon_emoji = (icon_emoji) ? icon_emoji : ':slack:';
// do we need to add a comment to the URL
var postcontext = $('#context').val();
if( postcontext ){
posttext = ( postcontext + "\n" + url );
} else {
posttext = url;
}
// payload for slack API request
var payload = {
channel: channel,
username: username,
text: posttext,
icon_emoji: icon_emoji,
unfurl_links: 'true'
};
// Insert our visual feedback dependencies
chrome.tabs.insertCSS( null, { file: 'assets/css/visualfeedback.css' });
chrome.tabs.executeScript( null, { file: 'assets/js/visualfeedback.js' });
// make API request to slack
$.ajax({
type: 'POST',
url: domain,
data: JSON.stringify(payload)
}).always(function() {
// Tidy up our visual feedback dependencies
chrome.tabs.executeScript( null, { file: 'assets/js/tidy.js' });
// close popup
window.close();
}).fail(function(){
// Log error if post fails
console.error('Uh oh, something\'s wrong. Check your Slacklink options');
});
}
});
}
|
{
"content_hash": "ab946e6b218d4bdd7b31e9fd029f492c",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 88,
"avg_line_length": 23.598484848484848,
"alnum_prop": 0.6221508828250402,
"repo_name": "robjmills/slacklink",
"id": "caa24221cefb514fdc99a65091812eef17992986",
"size": "3166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/assets/js/slacklink.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1909"
},
{
"name": "HTML",
"bytes": "4070"
},
{
"name": "JavaScript",
"bytes": "7224"
}
]
}
|
package lang.roma.core;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Collection;
import lang.roma.engine.annotation.Optional;
import lang.roma.engine.annotation.Property;
import lang.roma.error.RomaException;
import lang.roma.type.Statement;
public class ParameterInfo {
public enum State {
NORMAL, OPTIONAL, CONTEXT_PROPERTY, SCOPE, COLLECTION
};
private Type type;
private State state = State.NORMAL;
// represents the amount that we should update the command line index
private int increment = 1;
private Object parameter;
public ParameterInfo(Type param, Annotation[] annotations) {
this.type = param;
for (Annotation annotation : annotations) {
if (annotation instanceof Optional) {
state = State.OPTIONAL;
} else if (annotation instanceof Property) {
increment = 0;
state = State.CONTEXT_PROPERTY;
this.parameter = ((Property) annotation).value();
}
}
if (param == Scope.class) {
state = State.SCOPE;
increment = 0;
}
}
public int incr() {
return increment;
}
public boolean isOptional() {
return state == State.OPTIONAL;
}
public Object render(Scope scope, Statement statement, int tokenIndex)
throws RomaException {
switch (state) {
case NORMAL:
case OPTIONAL:
return scope.get(type, statement.get(tokenIndex));
case CONTEXT_PROPERTY:
return scope.get((String) parameter);
case SCOPE:
return scope;
// Collection is a special case which takes a Collection
// object and
// adds all the remaining arguments into that collection
case COLLECTION:
try {
Object list = type.getClass().newInstance();
ParameterizedType parameterizedType = (ParameterizedType) type
.getClass().getGenericSuperclass();
Type generic = parameterizedType.getActualTypeArguments()[0];
Method add = Collection.class.getDeclaredMethod("add",
Object.class);
for (int index = tokenIndex; index < statement.size(); ++index) {
add.invoke(list, scope.get(generic, statement.get(index)));
}
} catch (Exception e) {
throw new RomaException(statement.startPos(),
"failed to get COLLECTION");
}
}
return null;
}
}
|
{
"content_hash": "ff43290828e5223297a2a74bd6bcf4ce",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 71,
"avg_line_length": 27.149425287356323,
"alnum_prop": 0.6871295512277731,
"repo_name": "JEBailey/roma",
"id": "b5ad478838ae20de82fc917b04a3dbead5c54933",
"size": "2362",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/lang/roma/core/ParameterInfo.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "77323"
}
]
}
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE web-app
PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd">
<web-app>
<!-- Servlets -->
<servlet>
<servlet-name>SimpleServlet</servlet-name>
<servlet-class>org.spiffyui.hellospiffymaven.server.SimpleServlet</servlet-class>
</servlet>
<servlet>
<servlet-name>JSLocaleServlet</servlet-name>
<servlet-class>org.spiffyui.server.JSLocaleServlet</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>SimpleServlet</servlet-name>
<url-pattern>/simple/*</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>JSLocaleServlet</servlet-name>
<url-pattern>/js/lib/i18n/date</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>JSLocaleServlet</servlet-name>
<url-pattern>/js/lib/i18n/jquery.ui.datepicker.js</url-pattern>
</servlet-mapping>
<!-- Default page to serve -->
<welcome-file-list>
<welcome-file>index.html</welcome-file>
</welcome-file-list>
</web-app>
|
{
"content_hash": "c2065158a66d40df5d87a3ab2cfc3034",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 85,
"avg_line_length": 28.025,
"alnum_prop": 0.656556645851918,
"repo_name": "spiffyui/spiffyui",
"id": "748fc3a9b8dab05031b410addb27564ea09b1898",
"size": "1121",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spiffyui/build/maven/example-war/src/main/webapp/WEB-INF/web.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "67800"
},
{
"name": "HTML",
"bytes": "218605"
},
{
"name": "Java",
"bytes": "966893"
},
{
"name": "JavaScript",
"bytes": "483659"
}
]
}
|
import base64
from flask.ext.oauthlib import client
from werkzeug import urls
import flask
import auth
import config
import model
import util
from main import app
reddit_config = dict(
access_token_method='POST',
access_token_params={'grant_type': 'authorization_code'},
access_token_url='https://ssl.reddit.com/api/v1/access_token',
authorize_url='https://ssl.reddit.com/api/v1/authorize',
base_url='https://oauth.reddit.com/api/v1/',
consumer_key=model.Config.get_master_db().reddit_client_id,
consumer_secret=model.Config.get_master_db().reddit_client_secret,
request_token_params={'scope': 'identity', 'state': util.uuid()},
)
reddit = auth.create_oauth_app(reddit_config, 'reddit')
def reddit_handle_oauth2_response():
access_args = {
'code': flask.request.args.get('code'),
'client_id': reddit.consumer_key,
'redirect_uri': flask.session.get('%s_oauthredir' % reddit.name),
}
access_args.update(reddit.access_token_params)
auth_header = 'Basic %s' % base64.b64encode(
('%s:%s' % (reddit.consumer_key, reddit.consumer_secret)).encode('latin1')
).strip().decode('latin1')
response, content = reddit.http_request(
reddit.expand_url(reddit.access_token_url),
method=reddit.access_token_method,
data=urls.url_encode(access_args),
headers={
'Authorization': auth_header,
'User-Agent': config.USER_AGENT,
},
)
data = client.parse_response(response, content)
if response.code not in (200, 201):
raise client.OAuthException(
'Invalid response from %s' % reddit.name,
type='invalid_response', data=data,
)
return data
reddit.handle_oauth2_response = reddit_handle_oauth2_response
@app.route('/api/auth/callback/reddit/')
def reddit_authorized():
response = reddit.authorized_response()
if response is None or flask.request.args.get('error'):
flask.flash('You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (response['access_token'], '')
me = reddit.request('me')
user_db = retrieve_user_from_reddit(me.data)
return auth.signin_user_db(user_db)
@reddit.tokengetter
def get_reddit_oauth_token():
return flask.session.get('oauth_token')
@app.route('/signin/reddit/')
def signin_reddit():
return auth.signin_oauth(reddit)
def retrieve_user_from_reddit(response):
auth_id = 'reddit_%s' % response['id']
user_db = model.User.get_by('auth_ids', auth_id)
if user_db:
return user_db
return auth.create_user_db(
auth_id=auth_id,
name=response['name'],
username=response['name'],
)
|
{
"content_hash": "a311e4f7d5a84fc0380f6e52fa238926",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 78,
"avg_line_length": 28.26086956521739,
"alnum_prop": 0.6953846153846154,
"repo_name": "lipis/electron-crash-reporter",
"id": "369ad031f027b2ac680272debd6d21dd8527359a",
"size": "2617",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "main/auth/reddit.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5399"
},
{
"name": "CoffeeScript",
"bytes": "16008"
},
{
"name": "HTML",
"bytes": "82075"
},
{
"name": "JavaScript",
"bytes": "65"
},
{
"name": "Python",
"bytes": "129250"
}
]
}
|
FromFailuresBack
================
A Symfony project created on March 2, 2017, 11:09 am.
|
{
"content_hash": "4114260f2854b7d562688388698f921b",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 53,
"avg_line_length": 22.25,
"alnum_prop": 0.6292134831460674,
"repo_name": "ducdeswin7/FromFailuresBack",
"id": "821ce3acd969df18c8136f7ce171a0c7dbca38bb",
"size": "89",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "341172"
},
{
"name": "HTML",
"bytes": "133930"
},
{
"name": "JavaScript",
"bytes": "87363"
},
{
"name": "PHP",
"bytes": "166719"
}
]
}
|
// Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
//
// This file implements the "bridge" between Java and C++ and enables
// calling c++ rocksdb::DB methods from Java side.
#include <jni.h>
#include <stdio.h>
#include <stdlib.h>
#include <algorithm>
#include <functional>
#include <string.h>
#include <memory>
#include <string>
#include <tuple>
#include <vector>
#include "include/org_rocksdb_RocksDB.h"
#include "include/org_rocksdb_ByteArray.h"
#include "rocksdb/cache.h"
#include "rocksdb/db.h"
#include "rocksdb/options.h"
#include "rocksdb/types.h"
#include "rocksjni/portal.h"
#include "util/compression.h"
jfieldID rocksdb::JniUtil::ByteArray_buffer;
jfieldID rocksdb::JniUtil::ByteArray_length;
void Java_org_rocksdb_ByteArray_initFieldIDs(JNIEnv *env, jclass cls) {
rocksdb::JniUtil::initByteArrayFieldIDs(env, cls);
}
#ifdef min
#undef min
#endif
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::Open
jlong rocksdb_open_helper(JNIEnv* env, jlong jopt_handle, jstring jdb_path,
std::function<rocksdb::Status(
const rocksdb::Options&, const std::string&, rocksdb::DB**)> open_fn
) {
auto* opt = reinterpret_cast<rocksdb::Options*>(jopt_handle);
rocksdb::DB* db = nullptr;
const char* db_path = env->GetStringUTFChars(jdb_path, NULL);
rocksdb::Status s = open_fn(*opt, db_path, &db);
env->ReleaseStringUTFChars(jdb_path, db_path);
if (s.ok()) {
return reinterpret_cast<jlong>(db);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
return 0;
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: open
* Signature: (JLjava/lang/String;)J
*/
jlong Java_org_rocksdb_RocksDB_open__JLjava_lang_String_2(
JNIEnv* env, jclass jcls, jlong jopt_handle, jstring jdb_path) {
return rocksdb_open_helper(env, jopt_handle, jdb_path,
(rocksdb::Status(*)
(const rocksdb::Options&, const std::string&, rocksdb::DB**)
)&rocksdb::DB::Open
);
}
/*
* Class: org_rocksdb_RocksDB
* Method: openROnly
* Signature: (JLjava/lang/String;)J
*/
jlong Java_org_rocksdb_RocksDB_openROnly__JLjava_lang_String_2(
JNIEnv* env, jclass jcls, jlong jopt_handle, jstring jdb_path) {
return rocksdb_open_helper(env, jopt_handle, jdb_path, [](
const rocksdb::Options& options,
const std::string& db_path, rocksdb::DB** db) {
return rocksdb::DB::OpenForReadOnly(options, db_path, db);
});
}
jlongArray rocksdb_open_helper(JNIEnv* env, jlong jopt_handle,
jstring jdb_path, jobjectArray jcolumn_names, jlongArray jcolumn_options,
std::function<rocksdb::Status(
const rocksdb::DBOptions&, const std::string&,
const std::vector<rocksdb::ColumnFamilyDescriptor>&,
std::vector<rocksdb::ColumnFamilyHandle*>*,
rocksdb::DB**)> open_fn
) {
auto* opt = reinterpret_cast<rocksdb::DBOptions*>(jopt_handle);
const char* db_path = env->GetStringUTFChars(jdb_path, NULL);
std::vector<rocksdb::ColumnFamilyDescriptor> column_families;
jsize len_cols = env->GetArrayLength(jcolumn_names);
jlong* jco = env->GetLongArrayElements(jcolumn_options, NULL);
for(int i = 0; i < len_cols; i++) {
jobject jcn = env->GetObjectArrayElement(jcolumn_names, i);
jbyteArray jcn_ba = reinterpret_cast<jbyteArray>(jcn);
jbyte* jcf_name = env->GetByteArrayElements(jcn_ba, NULL);
const int jcf_name_len = env->GetArrayLength(jcn_ba);
//TODO(AR) do I need to make a copy of jco[i] ?
std::string cf_name (reinterpret_cast<char *>(jcf_name), jcf_name_len);
rocksdb::ColumnFamilyOptions* cf_options =
reinterpret_cast<rocksdb::ColumnFamilyOptions*>(jco[i]);
column_families.push_back(
rocksdb::ColumnFamilyDescriptor(cf_name, *cf_options));
env->ReleaseByteArrayElements(jcn_ba, jcf_name, JNI_ABORT);
env->DeleteLocalRef(jcn);
}
env->ReleaseLongArrayElements(jcolumn_options, jco, JNI_ABORT);
std::vector<rocksdb::ColumnFamilyHandle*> handles;
rocksdb::DB* db = nullptr;
rocksdb::Status s = open_fn(*opt, db_path, column_families,
&handles, &db);
// check if open operation was successful
if (s.ok()) {
jsize resultsLen = 1 + len_cols; //db handle + column family handles
std::unique_ptr<jlong[]> results =
std::unique_ptr<jlong[]>(new jlong[resultsLen]);
results[0] = reinterpret_cast<jlong>(db);
for(int i = 1; i <= len_cols; i++) {
results[i] = reinterpret_cast<jlong>(handles[i - 1]);
}
jlongArray jresults = env->NewLongArray(resultsLen);
env->SetLongArrayRegion(jresults, 0, resultsLen, results.get());
return jresults;
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
return NULL;
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: openROnly
* Signature: (JLjava/lang/String;[[B[J)[J
*/
jlongArray Java_org_rocksdb_RocksDB_openROnly__JLjava_lang_String_2_3_3B_3J(
JNIEnv* env, jclass jcls, jlong jopt_handle, jstring jdb_path,
jobjectArray jcolumn_names, jlongArray jcolumn_options) {
return rocksdb_open_helper(env, jopt_handle, jdb_path, jcolumn_names,
jcolumn_options, [](
const rocksdb::DBOptions& options, const std::string& db_path,
const std::vector<rocksdb::ColumnFamilyDescriptor>& column_families,
std::vector<rocksdb::ColumnFamilyHandle*>* handles, rocksdb::DB** db) {
return rocksdb::DB::OpenForReadOnly(options, db_path, column_families,
handles, db);
});
}
/*
* Class: org_rocksdb_RocksDB
* Method: open
* Signature: (JLjava/lang/String;[[B[J)[J
*/
jlongArray Java_org_rocksdb_RocksDB_open__JLjava_lang_String_2_3_3B_3J(
JNIEnv* env, jclass jcls, jlong jopt_handle, jstring jdb_path,
jobjectArray jcolumn_names, jlongArray jcolumn_options) {
return rocksdb_open_helper(env, jopt_handle, jdb_path, jcolumn_names,
jcolumn_options, (rocksdb::Status(*)
(const rocksdb::DBOptions&, const std::string&,
const std::vector<rocksdb::ColumnFamilyDescriptor>&,
std::vector<rocksdb::ColumnFamilyHandle*>*, rocksdb::DB**)
)&rocksdb::DB::Open
);
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::ListColumnFamilies
/*
* Class: org_rocksdb_RocksDB
* Method: listColumnFamilies
* Signature: (JLjava/lang/String;)[[B
*/
jobjectArray Java_org_rocksdb_RocksDB_listColumnFamilies(
JNIEnv* env, jclass jclazz, jlong jopt_handle, jstring jdb_path) {
std::vector<std::string> column_family_names;
auto* opt = reinterpret_cast<rocksdb::Options*>(jopt_handle);
const char* db_path = env->GetStringUTFChars(jdb_path, 0);
rocksdb::Status s = rocksdb::DB::ListColumnFamilies(*opt, db_path,
&column_family_names);
env->ReleaseStringUTFChars(jdb_path, db_path);
jclass jcls_ba = env->FindClass("[B");
jobjectArray jresults = env->NewObjectArray(
static_cast<jsize>(column_family_names.size()), jcls_ba, NULL);
if (s.ok()) {
for (std::vector<std::string>::size_type i = 0;
i < column_family_names.size(); i++) {
jbyteArray jcf_value =
env->NewByteArray(static_cast<jsize>(column_family_names[i].size()));
env->SetByteArrayRegion(
jcf_value, 0, static_cast<jsize>(column_family_names[i].size()),
reinterpret_cast<const jbyte*>(column_family_names[i].data()));
env->SetObjectArrayElement(jresults, static_cast<jsize>(i), jcf_value);
env->DeleteLocalRef(jcf_value);
}
}
return jresults;
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::Put
void rocksdb_put_helper(JNIEnv* env, rocksdb::DB* db,
const rocksdb::WriteOptions& write_options,
rocksdb::ColumnFamilyHandle* cf_handle, jbyteArray jkey,
jint jkey_off, jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len) {
jbyte* key = new jbyte[jkey_len];
env->GetByteArrayRegion(jkey, jkey_off, jkey_len, key);
jbyte* value = new jbyte[jval_len];
env->GetByteArrayRegion(jval, jval_off, jval_len, value);
rocksdb::Slice key_slice(reinterpret_cast<char*>(key), jkey_len);
rocksdb::Slice value_slice(reinterpret_cast<char*>(value), jval_len);
rocksdb::Status s;
if (cf_handle != nullptr) {
s = db->Put(write_options, cf_handle, key_slice, value_slice);
} else {
// backwards compatibility
s = db->Put(write_options, key_slice, value_slice);
}
// cleanup
delete [] value;
delete [] key;
if (s.ok()) {
return;
}
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
/*
* Class: org_rocksdb_RocksDB
* Method: put
* Signature: (J[BII[BII)V
*/
void Java_org_rocksdb_RocksDB_put__J_3BII_3BII(JNIEnv* env, jobject jdb,
jlong jdb_handle,
jbyteArray jkey, jint jkey_off,
jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
static const rocksdb::WriteOptions default_write_options =
rocksdb::WriteOptions();
rocksdb_put_helper(env, db, default_write_options, nullptr, jkey, jkey_off,
jkey_len, jval, jval_off, jval_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: put
* Signature: (J[BII[BIIJ)V
*/
void Java_org_rocksdb_RocksDB_put__J_3BII_3BIIJ(JNIEnv* env, jobject jdb,
jlong jdb_handle,
jbyteArray jkey, jint jkey_off,
jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len,
jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
static const rocksdb::WriteOptions default_write_options =
rocksdb::WriteOptions();
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
rocksdb_put_helper(env, db, default_write_options, cf_handle, jkey,
jkey_off, jkey_len, jval, jval_off, jval_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: put
* Signature: (JJ[BII[BII)V
*/
void Java_org_rocksdb_RocksDB_put__JJ_3BII_3BII(JNIEnv* env, jobject jdb,
jlong jdb_handle,
jlong jwrite_options_handle,
jbyteArray jkey, jint jkey_off,
jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto write_options = reinterpret_cast<rocksdb::WriteOptions*>(
jwrite_options_handle);
rocksdb_put_helper(env, db, *write_options, nullptr, jkey, jkey_off, jkey_len,
jval, jval_off, jval_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: put
* Signature: (JJ[BII[BIIJ)V
*/
void Java_org_rocksdb_RocksDB_put__JJ_3BII_3BIIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jwrite_options_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len, jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto write_options = reinterpret_cast<rocksdb::WriteOptions*>(
jwrite_options_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
rocksdb_put_helper(env, db, *write_options, cf_handle, jkey, jkey_off,
jkey_len, jval, jval_off, jval_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
}
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::Write
/*
* Class: org_rocksdb_RocksDB
* Method: write0
* Signature: (JJJ)V
*/
void Java_org_rocksdb_RocksDB_write0(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jlong jwrite_options_handle, jlong jwb_handle) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto* write_options = reinterpret_cast<rocksdb::WriteOptions*>(
jwrite_options_handle);
auto* wb = reinterpret_cast<rocksdb::WriteBatch*>(jwb_handle);
rocksdb::Status s = db->Write(*write_options, wb);
if (!s.ok()) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: write1
* Signature: (JJJ)V
*/
void Java_org_rocksdb_RocksDB_write1(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jlong jwrite_options_handle, jlong jwbwi_handle) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto* write_options = reinterpret_cast<rocksdb::WriteOptions*>(
jwrite_options_handle);
auto* wbwi = reinterpret_cast<rocksdb::WriteBatchWithIndex*>(jwbwi_handle);
auto* wb = wbwi->GetWriteBatch();
rocksdb::Status s = db->Write(*write_options, wb);
if (!s.ok()) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::KeyMayExist
jboolean key_may_exist_helper(JNIEnv* env, rocksdb::DB* db,
const rocksdb::ReadOptions& read_opt,
rocksdb::ColumnFamilyHandle* cf_handle, jbyteArray jkey, jint jkey_off,
jint jkey_len, jobject jstring_buffer) {
std::string value;
bool value_found = false;
jbyte* key = new jbyte[jkey_len];
env->GetByteArrayRegion(jkey, jkey_off, jkey_len, key);
rocksdb::Slice key_slice(reinterpret_cast<char*>(key), jkey_len);
bool keyMayExist;
if (cf_handle != nullptr) {
keyMayExist = db->KeyMayExist(read_opt, cf_handle, key_slice,
&value, &value_found);
} else {
keyMayExist = db->KeyMayExist(read_opt, key_slice,
&value, &value_found);
}
// cleanup
delete [] key;
// extract the value
if (value_found && !value.empty()) {
jclass clazz = env->GetObjectClass(jstring_buffer);
jmethodID mid = env->GetMethodID(clazz, "append",
"(Ljava/lang/String;)Ljava/lang/StringBuffer;");
jstring new_value_str = env->NewStringUTF(value.c_str());
env->CallObjectMethod(jstring_buffer, mid, new_value_str);
}
return static_cast<jboolean>(keyMayExist);
}
/*
* Class: org_rocksdb_RocksDB
* Method: keyMayExist
* Signature: (J[BIILjava/lang/StringBuffer;)Z
*/
jboolean Java_org_rocksdb_RocksDB_keyMayExist__J_3BIILjava_lang_StringBuffer_2(
JNIEnv* env, jobject jdb, jlong jdb_handle, jbyteArray jkey, jint jkey_off,
jint jkey_len, jobject jstring_buffer) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
return key_may_exist_helper(env, db, rocksdb::ReadOptions(),
nullptr, jkey, jkey_off, jkey_len, jstring_buffer);
}
/*
* Class: org_rocksdb_RocksDB
* Method: keyMayExist
* Signature: (J[BIIJLjava/lang/StringBuffer;)Z
*/
jboolean Java_org_rocksdb_RocksDB_keyMayExist__J_3BIIJLjava_lang_StringBuffer_2(
JNIEnv* env, jobject jdb, jlong jdb_handle, jbyteArray jkey, jint jkey_off,
jint jkey_len, jlong jcf_handle, jobject jstring_buffer) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto* cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(
jcf_handle);
if (cf_handle != nullptr) {
return key_may_exist_helper(env, db, rocksdb::ReadOptions(),
cf_handle, jkey, jkey_off, jkey_len, jstring_buffer);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
return true;
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: keyMayExist
* Signature: (JJ[BIILjava/lang/StringBuffer;)Z
*/
jboolean Java_org_rocksdb_RocksDB_keyMayExist__JJ_3BIILjava_lang_StringBuffer_2(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jread_options_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len, jobject jstring_buffer) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto& read_options = *reinterpret_cast<rocksdb::ReadOptions*>(
jread_options_handle);
return key_may_exist_helper(env, db, read_options,
nullptr, jkey, jkey_off, jkey_len, jstring_buffer);
}
/*
* Class: org_rocksdb_RocksDB
* Method: keyMayExist
* Signature: (JJ[BIIJLjava/lang/StringBuffer;)Z
*/
jboolean Java_org_rocksdb_RocksDB_keyMayExist__JJ_3BIIJLjava_lang_StringBuffer_2(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jread_options_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len, jlong jcf_handle,
jobject jstring_buffer) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto& read_options = *reinterpret_cast<rocksdb::ReadOptions*>(
jread_options_handle);
auto* cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(
jcf_handle);
if (cf_handle != nullptr) {
return key_may_exist_helper(env, db, read_options, cf_handle,
jkey, jkey_off, jkey_len, jstring_buffer);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
return true;
}
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::Get
void rocksdb_get_helper(
JNIEnv* env, rocksdb::DB* db, const rocksdb::ReadOptions& read_opt,
rocksdb::ColumnFamilyHandle* column_family_handle, jbyteArray jkey,
jint jkey_len, jobject target) {
auto get = [&db, &read_opt, &column_family_handle] (const rocksdb::Slice key, std::string *value) -> rocksdb::Status {
if (column_family_handle != nullptr) {
return db->Get(read_opt, column_family_handle, key, value);
} else {
// backwards compatibility
return db->Get(read_opt, key, value);
}
};
rocksdb::JniUtil::k_op_bytes_into(get, env, nullptr, jkey, jkey_len, target);
}
jbyteArray rocksdb_get_helper(
JNIEnv* env, rocksdb::DB* db, const rocksdb::ReadOptions& read_opt,
rocksdb::ColumnFamilyHandle* column_family_handle, jbyteArray jkey,
jint jkey_off, jint jkey_len) {
jbyte* key = new jbyte[jkey_len];
env->GetByteArrayRegion(jkey, jkey_off, jkey_len, key);
rocksdb::Slice key_slice(
reinterpret_cast<char*>(key), jkey_len);
std::string value;
rocksdb::Status s;
if (column_family_handle != nullptr) {
s = db->Get(read_opt, column_family_handle, key_slice, &value);
} else {
// backwards compatibility
s = db->Get(read_opt, key_slice, &value);
}
// cleanup
delete [] key;
if (s.IsNotFound()) {
return nullptr;
}
if (s.ok()) {
jbyteArray jret_value = env->NewByteArray(static_cast<jsize>(value.size()));
env->SetByteArrayRegion(jret_value, 0, static_cast<jsize>(value.size()),
reinterpret_cast<const jbyte*>(value.c_str()));
return jret_value;
}
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
return nullptr;
}
jbyteArray rocksdb_get_snappy_compressed_bytes_helper(
JNIEnv* env, rocksdb::DB* db, const rocksdb::ReadOptions& read_opt,
rocksdb::ColumnFamilyHandle* column_family_handle, jbyteArray jkey,
jint jkey_len) {
if (!rocksdb::Snappy_Supported()) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, rocksdb::Status::Corruption("Snappy compression not supported"));
return nullptr;
}
jboolean isCopy;
jbyte* key = env->GetByteArrayElements(jkey, &isCopy);
rocksdb::Slice key_slice(
reinterpret_cast<char*>(key), jkey_len);
std::string value;
rocksdb::Status s;
if (column_family_handle != nullptr) {
s = db->Get(read_opt, column_family_handle, key_slice, &value);
} else {
// backwards compatibility
s = db->Get(read_opt, key_slice, &value);
}
// trigger java unref on key.
// by passing JNI_ABORT, it will simply release the reference without
// copying the result back to the java byte array.
env->ReleaseByteArrayElements(jkey, key, JNI_ABORT);
if (s.IsNotFound()) {
return nullptr;
}
if (s.ok()) {
size_t uncompressed_length = 0;
if (!rocksdb::Snappy_GetUncompressedLength(value.c_str(), value.size(), &uncompressed_length)) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, rocksdb::Status::Corruption("Unable to get uncompressed length"));
return nullptr;
}
jsize items = static_cast<jsize>(uncompressed_length);
jbyteArray jret_value = env->NewByteArray(items);
char *uncompressed = (char *) env->GetPrimitiveArrayCritical((jarray) jret_value, 0);
if (uncompressed == 0) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, rocksdb::Status::Corruption("Unable to allocate output buffer"));
return nullptr;
}
bool uncompress_ok = rocksdb::Snappy_Uncompress(value.c_str(), value.size(), uncompressed);
env->ReleasePrimitiveArrayCritical((jarray) jret_value, uncompressed, 0);
if (!uncompress_ok) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, rocksdb::Status::Corruption("Unable to uncompress value"));
return nullptr;
}
return jret_value;
}
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
return nullptr;
}
void rocksdb_get_snappy_compressed_bytes_into_helper(
JNIEnv* env, rocksdb::DB* db, const rocksdb::ReadOptions& read_opt,
rocksdb::ColumnFamilyHandle* column_family_handle, jbyteArray jkey,
jint jkey_len, jobject target) {
auto get = [&db, &read_opt, &column_family_handle] (const rocksdb::Slice key, std::string *value) -> rocksdb::Status {
if (column_family_handle != nullptr) {
return db->Get(read_opt, column_family_handle, key, value);
} else {
// backwards compatibility
return db->Get(read_opt, key, value);
}
};
rocksdb::JniUtil::k_op_snappy_compressed_bytes_into(get, env, nullptr, jkey, jkey_len, target);
}
/*
* Class: org_rocksdb_RocksDB
* Method: get
* Signature: (J[BII)[B
*/
jbyteArray Java_org_rocksdb_RocksDB_get__J_3BII(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len) {
return rocksdb_get_helper(env,
reinterpret_cast<rocksdb::DB*>(jdb_handle),
rocksdb::ReadOptions(), nullptr,
jkey, jkey_off, jkey_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: get
* Signature: (J[BIIJ)[B
*/
jbyteArray Java_org_rocksdb_RocksDB_get__J_3BIIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len, jlong jcf_handle) {
auto db_handle = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
return rocksdb_get_helper(env, db_handle, rocksdb::ReadOptions(),
cf_handle, jkey, jkey_off, jkey_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
// will never be evaluated
return env->NewByteArray(0);
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: get
* Signature: (JJ[BII)[B
*/
jbyteArray Java_org_rocksdb_RocksDB_get__JJ_3BII(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len) {
return rocksdb_get_helper(env,
reinterpret_cast<rocksdb::DB*>(jdb_handle),
*reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle), nullptr,
jkey, jkey_off, jkey_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: get
* Signature: (JJ[BIIJ)[B
*/
jbyteArray Java_org_rocksdb_RocksDB_get__JJ_3BIIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len, jlong jcf_handle) {
auto db_handle = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto& ro_opt = *reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
return rocksdb_get_helper(env, db_handle, ro_opt, cf_handle,
jkey, jkey_off, jkey_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
// will never be evaluated
return env->NewByteArray(0);
}
}
jbyteArray Java_org_rocksdb_RocksDB_getSnappyCompressedBytes__JJ_3BI(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jbyteArray jkey, jint jkey_len) {
return rocksdb_get_snappy_compressed_bytes_helper(env,
reinterpret_cast<rocksdb::DB*>(jdb_handle),
*reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle), nullptr,
jkey, jkey_len);
}
jbyteArray Java_org_rocksdb_RocksDB_getSnappyCompressedBytes__JJ_3BIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jbyteArray jkey, jint jkey_len, jlong jcf_handle) {
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
return rocksdb_get_snappy_compressed_bytes_helper(env,
reinterpret_cast<rocksdb::DB*>(jdb_handle),
*reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle), cf_handle,
jkey, jkey_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
// will never be evaluated
return env->NewByteArray(0);
}
}
void Java_org_rocksdb_RocksDB_getSnappyCompressedBytesInto__JJ_3BILorg_rocksdb_ByteArray_2(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jbyteArray jkey, jint jkey_len, jobject target) {
rocksdb_get_snappy_compressed_bytes_into_helper(env,
reinterpret_cast<rocksdb::DB*>(jdb_handle),
*reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle), nullptr,
jkey, jkey_len, target);
}
void Java_org_rocksdb_RocksDB_getSnappyCompressedBytesInto__JJ_3BILorg_rocksdb_ByteArray_2J(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jbyteArray jkey, jint jkey_len, jobject target, jlong jcf_handle) {
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
rocksdb_get_snappy_compressed_bytes_into_helper(env,
reinterpret_cast<rocksdb::DB*>(jdb_handle),
*reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle), cf_handle,
jkey, jkey_len, target);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
// will never be evaluated
return;
}
}
void Java_org_rocksdb_RocksDB_get__JJ_3BILorg_rocksdb_ByteArray_2(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jbyteArray jkey, jint jkey_len, jobject target) {
rocksdb_get_helper(env,
reinterpret_cast<rocksdb::DB*>(jdb_handle),
*reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle), nullptr,
jkey, jkey_len, target);
}
void Java_org_rocksdb_RocksDB_get__JJ_3BILorg_rocksdb_ByteArray_2J(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jbyteArray jkey, jint jkey_len, jobject target, jlong jcf_handle) {
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
rocksdb_get_helper(env,
reinterpret_cast<rocksdb::DB*>(jdb_handle),
*reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle), cf_handle,
jkey, jkey_len, target);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
// will never be evaluated
return;
}
}
jint rocksdb_get_helper(JNIEnv* env, rocksdb::DB* db,
const rocksdb::ReadOptions& read_options,
rocksdb::ColumnFamilyHandle* column_family_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len,
jbyteArray jval, jint jval_off, jint jval_len) {
static const int kNotFound = -1;
static const int kStatusError = -2;
jbyte* key = new jbyte[jkey_len];
env->GetByteArrayRegion(jkey, jkey_off, jkey_len, key);
rocksdb::Slice key_slice(
reinterpret_cast<char*>(key), jkey_len);
// TODO(yhchiang): we might save one memory allocation here by adding
// a DB::Get() function which takes preallocated jbyte* as input.
std::string cvalue;
rocksdb::Status s;
if (column_family_handle != nullptr) {
s = db->Get(read_options, column_family_handle, key_slice, &cvalue);
} else {
// backwards compatibility
s = db->Get(read_options, key_slice, &cvalue);
}
// cleanup
delete [] key;
if (s.IsNotFound()) {
return kNotFound;
} else if (!s.ok()) {
// Here since we are throwing a Java exception from c++ side.
// As a result, c++ does not know calling this function will in fact
// throwing an exception. As a result, the execution flow will
// not stop here, and codes after this throw will still be
// executed.
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
// Return a dummy const value to avoid compilation error, although
// java side might not have a chance to get the return value :)
return kStatusError;
}
jint cvalue_len = static_cast<jint>(cvalue.size());
jint length = std::min(jval_len, cvalue_len);
env->SetByteArrayRegion(jval, jval_off, length,
reinterpret_cast<const jbyte*>(cvalue.c_str()));
return cvalue_len;
}
// cf multi get
jobjectArray multi_get_helper(JNIEnv* env, jobject jdb, rocksdb::DB* db,
const rocksdb::ReadOptions& rOpt, jobjectArray jkeys,
jintArray jkey_offs, jintArray jkey_lens,
jlongArray jcolumn_family_handles) {
std::vector<rocksdb::ColumnFamilyHandle*> cf_handles;
if (jcolumn_family_handles != nullptr) {
jsize len_cols = env->GetArrayLength(jcolumn_family_handles);
jlong* jcfh = env->GetLongArrayElements(jcolumn_family_handles, NULL);
for (int i = 0; i < len_cols; i++) {
auto* cf_handle =
reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcfh[i]);
cf_handles.push_back(cf_handle);
}
env->ReleaseLongArrayElements(jcolumn_family_handles, jcfh, JNI_ABORT);
}
std::vector<rocksdb::Slice> keys;
std::vector<std::pair<jbyte*, jobject>> keys_to_free;
jsize len_keys = env->GetArrayLength(jkeys);
if (env->EnsureLocalCapacity(len_keys) != 0) {
// out of memory
return NULL;
}
jint* jkey_off = env->GetIntArrayElements(jkey_offs, NULL);
jint* jkey_len = env->GetIntArrayElements(jkey_lens, NULL);
for (int i = 0; i < len_keys; i++) {
jobject jkey = env->GetObjectArrayElement(jkeys, i);
jbyteArray jkey_ba = reinterpret_cast<jbyteArray>(jkey);
jint len_key = jkey_len[i];
jbyte* key = new jbyte[len_key];
env->GetByteArrayRegion(jkey_ba, jkey_off[i], len_key, key);
rocksdb::Slice key_slice(reinterpret_cast<char*>(key), len_key);
keys.push_back(key_slice);
keys_to_free.push_back(std::pair<jbyte*, jobject>(key, jkey));
}
// cleanup jkey_off and jken_len
env->ReleaseIntArrayElements(jkey_lens, jkey_len, JNI_ABORT);
env->ReleaseIntArrayElements(jkey_offs, jkey_off, JNI_ABORT);
std::vector<std::string> values;
std::vector<rocksdb::Status> s;
if (cf_handles.size() == 0) {
s = db->MultiGet(rOpt, keys, &values);
} else {
s = db->MultiGet(rOpt, cf_handles, keys, &values);
}
// free up allocated byte arrays
for (auto it = keys_to_free.begin(); it != keys_to_free.end(); ++it) {
delete [] it->first;
env->DeleteLocalRef(it->second);
}
keys_to_free.clear();
// prepare the results
jclass jcls_ba = env->FindClass("[B");
jobjectArray jresults =
env->NewObjectArray(static_cast<jsize>(s.size()), jcls_ba, NULL);
// add to the jresults
for (std::vector<rocksdb::Status>::size_type i = 0; i != s.size(); i++) {
if (s[i].ok()) {
jbyteArray jentry_value =
env->NewByteArray(static_cast<jsize>(values[i].size()));
env->SetByteArrayRegion(
jentry_value, 0, static_cast<jsize>(values[i].size()),
reinterpret_cast<const jbyte*>(values[i].c_str()));
env->SetObjectArrayElement(jresults, static_cast<jsize>(i), jentry_value);
env->DeleteLocalRef(jentry_value);
}
}
return jresults;
}
/*
* Class: org_rocksdb_RocksDB
* Method: multiGet
* Signature: (J[[B[I[I)[[B
*/
jobjectArray Java_org_rocksdb_RocksDB_multiGet__J_3_3B_3I_3I(
JNIEnv* env, jobject jdb, jlong jdb_handle, jobjectArray jkeys,
jintArray jkey_offs, jintArray jkey_lens) {
return multi_get_helper(env, jdb, reinterpret_cast<rocksdb::DB*>(jdb_handle),
rocksdb::ReadOptions(), jkeys, jkey_offs, jkey_lens, nullptr);
}
/*
* Class: org_rocksdb_RocksDB
* Method: multiGet
* Signature: (J[[B[I[I[J)[[B
*/
jobjectArray Java_org_rocksdb_RocksDB_multiGet__J_3_3B_3I_3I_3J(
JNIEnv* env, jobject jdb, jlong jdb_handle, jobjectArray jkeys,
jintArray jkey_offs, jintArray jkey_lens,
jlongArray jcolumn_family_handles) {
return multi_get_helper(env, jdb, reinterpret_cast<rocksdb::DB*>(jdb_handle),
rocksdb::ReadOptions(), jkeys, jkey_offs, jkey_lens,
jcolumn_family_handles);
}
/*
* Class: org_rocksdb_RocksDB
* Method: multiGet
* Signature: (JJ[[B[I[I)[[B
*/
jobjectArray Java_org_rocksdb_RocksDB_multiGet__JJ_3_3B_3I_3I(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jobjectArray jkeys, jintArray jkey_offs, jintArray jkey_lens) {
return multi_get_helper(env, jdb, reinterpret_cast<rocksdb::DB*>(jdb_handle),
*reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle), jkeys, jkey_offs,
jkey_lens, nullptr);
}
/*
* Class: org_rocksdb_RocksDB
* Method: multiGet
* Signature: (JJ[[B[I[I[J)[[B
*/
jobjectArray Java_org_rocksdb_RocksDB_multiGet__JJ_3_3B_3I_3I_3J(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jobjectArray jkeys, jintArray jkey_offs, jintArray jkey_lens,
jlongArray jcolumn_family_handles) {
return multi_get_helper(env, jdb, reinterpret_cast<rocksdb::DB*>(jdb_handle),
*reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle), jkeys, jkey_offs,
jkey_lens, jcolumn_family_handles);
}
/*
* Class: org_rocksdb_RocksDB
* Method: get
* Signature: (J[BII[BII)I
*/
jint Java_org_rocksdb_RocksDB_get__J_3BII_3BII(JNIEnv* env, jobject jdb,
jlong jdb_handle,
jbyteArray jkey, jint jkey_off,
jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len) {
return rocksdb_get_helper(env, reinterpret_cast<rocksdb::DB*>(jdb_handle),
rocksdb::ReadOptions(), nullptr, jkey, jkey_off,
jkey_len, jval, jval_off, jval_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: get
* Signature: (J[BII[BIIJ)I
*/
jint Java_org_rocksdb_RocksDB_get__J_3BII_3BIIJ(JNIEnv* env, jobject jdb,
jlong jdb_handle,
jbyteArray jkey, jint jkey_off,
jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len,
jlong jcf_handle) {
auto db_handle = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
return rocksdb_get_helper(env, db_handle, rocksdb::ReadOptions(), cf_handle,
jkey, jkey_off, jkey_len, jval, jval_off,
jval_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
// will never be evaluated
return 0;
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: get
* Signature: (JJ[BII[BII)I
*/
jint Java_org_rocksdb_RocksDB_get__JJ_3BII_3BII(JNIEnv* env, jobject jdb,
jlong jdb_handle,
jlong jropt_handle,
jbyteArray jkey, jint jkey_off,
jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len) {
return rocksdb_get_helper(
env, reinterpret_cast<rocksdb::DB*>(jdb_handle),
*reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle), nullptr, jkey,
jkey_off, jkey_len, jval, jval_off, jval_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: get
* Signature: (JJ[BII[BIIJ)I
*/
jint Java_org_rocksdb_RocksDB_get__JJ_3BII_3BIIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jropt_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len, jlong jcf_handle) {
auto db_handle = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto& ro_opt = *reinterpret_cast<rocksdb::ReadOptions*>(jropt_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
return rocksdb_get_helper(env, db_handle, ro_opt, cf_handle, jkey, jkey_off,
jkey_len, jval, jval_off, jval_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
// will never be evaluated
return 0;
}
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::Delete()
void rocksdb_delete_helper(
JNIEnv* env, rocksdb::DB* db, const rocksdb::WriteOptions& write_options,
rocksdb::ColumnFamilyHandle* cf_handle, jbyteArray jkey, jint jkey_off,
jint jkey_len) {
jbyte* key = new jbyte[jkey_len];
env->GetByteArrayRegion(jkey, jkey_off, jkey_len, key);
rocksdb::Slice key_slice(reinterpret_cast<char*>(key), jkey_len);
rocksdb::Status s;
if (cf_handle != nullptr) {
s = db->Delete(write_options, cf_handle, key_slice);
} else {
// backwards compatibility
s = db->Delete(write_options, key_slice);
}
// cleanup
delete [] key;
if (!s.ok()) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
return;
}
/*
* Class: org_rocksdb_RocksDB
* Method: delete
* Signature: (J[BII)V
*/
void Java_org_rocksdb_RocksDB_delete__J_3BII(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
static const rocksdb::WriteOptions default_write_options =
rocksdb::WriteOptions();
rocksdb_delete_helper(env, db, default_write_options, nullptr,
jkey, jkey_off, jkey_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: delete
* Signature: (J[BIIJ)V
*/
void Java_org_rocksdb_RocksDB_delete__J_3BIIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len, jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
static const rocksdb::WriteOptions default_write_options =
rocksdb::WriteOptions();
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
rocksdb_delete_helper(env, db, default_write_options, cf_handle,
jkey, jkey_off, jkey_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: delete
* Signature: (JJ[BII)V
*/
void Java_org_rocksdb_RocksDB_delete__JJ_3BII(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jlong jwrite_options, jbyteArray jkey, jint jkey_off, jint jkey_len) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto write_options = reinterpret_cast<rocksdb::WriteOptions*>(jwrite_options);
rocksdb_delete_helper(env, db, *write_options, nullptr, jkey, jkey_off,
jkey_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: delete
* Signature: (JJ[BIIJ)V
*/
void Java_org_rocksdb_RocksDB_delete__JJ_3BIIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jlong jwrite_options, jbyteArray jkey, jint jkey_off, jint jkey_len,
jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto write_options = reinterpret_cast<rocksdb::WriteOptions*>(jwrite_options);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
rocksdb_delete_helper(env, db, *write_options, cf_handle, jkey, jkey_off,
jkey_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
}
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::SingleDelete()
void rocksdb_single_delete_helper(
JNIEnv* env, rocksdb::DB* db, const rocksdb::WriteOptions& write_options,
rocksdb::ColumnFamilyHandle* cf_handle, jbyteArray jkey, jint jkey_len) {
jbyte* key = env->GetByteArrayElements(jkey, 0);
rocksdb::Slice key_slice(reinterpret_cast<char*>(key), jkey_len);
rocksdb::Status s;
if (cf_handle != nullptr) {
s = db->SingleDelete(write_options, cf_handle, key_slice);
} else {
// backwards compatibility
s = db->SingleDelete(write_options, key_slice);
}
// trigger java unref on key and value.
// by passing JNI_ABORT, it will simply release the reference without
// copying the result back to the java byte array.
env->ReleaseByteArrayElements(jkey, key, JNI_ABORT);
if (!s.ok()) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: singleDelete
* Signature: (J[BI)V
*/
void Java_org_rocksdb_RocksDB_singleDelete__J_3BI(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jbyteArray jkey, jint jkey_len) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
static const rocksdb::WriteOptions default_write_options =
rocksdb::WriteOptions();
rocksdb_single_delete_helper(env, db, default_write_options, nullptr,
jkey, jkey_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: singleDelete
* Signature: (J[BIJ)V
*/
void Java_org_rocksdb_RocksDB_singleDelete__J_3BIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jbyteArray jkey, jint jkey_len, jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
static const rocksdb::WriteOptions default_write_options =
rocksdb::WriteOptions();
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
rocksdb_single_delete_helper(env, db, default_write_options, cf_handle,
jkey, jkey_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: singleDelete
* Signature: (JJ[BIJ)V
*/
void Java_org_rocksdb_RocksDB_singleDelete__JJ_3BI(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jlong jwrite_options, jbyteArray jkey, jint jkey_len) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto write_options = reinterpret_cast<rocksdb::WriteOptions*>(jwrite_options);
rocksdb_single_delete_helper(env, db, *write_options, nullptr, jkey,
jkey_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: singleDelete
* Signature: (JJ[BIJ)V
*/
void Java_org_rocksdb_RocksDB_singleDelete__JJ_3BIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jlong jwrite_options, jbyteArray jkey, jint jkey_len,
jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto write_options = reinterpret_cast<rocksdb::WriteOptions*>(jwrite_options);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
rocksdb_single_delete_helper(env, db, *write_options, cf_handle, jkey,
jkey_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
}
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::Merge
void rocksdb_merge_helper(JNIEnv* env, rocksdb::DB* db,
const rocksdb::WriteOptions& write_options,
rocksdb::ColumnFamilyHandle* cf_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len,
jbyteArray jval, jint jval_off, jint jval_len) {
jbyte* key = new jbyte[jkey_len];
env->GetByteArrayRegion(jkey, jkey_off, jkey_len, key);
jbyte* value = new jbyte[jkey_len];
env->GetByteArrayRegion(jval, jval_off, jval_len, value);
rocksdb::Slice key_slice(reinterpret_cast<char*>(key), jkey_len);
rocksdb::Slice value_slice(reinterpret_cast<char*>(value), jval_len);
rocksdb::Status s;
if (cf_handle != nullptr) {
s = db->Merge(write_options, cf_handle, key_slice, value_slice);
} else {
s = db->Merge(write_options, key_slice, value_slice);
}
// cleanup
delete [] value;
delete [] key;
if (s.ok()) {
return;
}
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
/*
* Class: org_rocksdb_RocksDB
* Method: merge
* Signature: (J[BII[BII)V
*/
void Java_org_rocksdb_RocksDB_merge__J_3BII_3BII(JNIEnv* env, jobject jdb,
jlong jdb_handle,
jbyteArray jkey, jint jkey_off,
jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
static const rocksdb::WriteOptions default_write_options =
rocksdb::WriteOptions();
rocksdb_merge_helper(env, db, default_write_options, nullptr, jkey, jkey_off,
jkey_len, jval, jval_off, jval_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: merge
* Signature: (J[BII[BIIJ)V
*/
void Java_org_rocksdb_RocksDB_merge__J_3BII_3BIIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle, jbyteArray jkey, jint jkey_off,
jint jkey_len, jbyteArray jval, jint jval_off, jint jval_len,
jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
static const rocksdb::WriteOptions default_write_options =
rocksdb::WriteOptions();
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
rocksdb_merge_helper(env, db, default_write_options, cf_handle, jkey,
jkey_off, jkey_len, jval, jval_off, jval_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: merge
* Signature: (JJ[BII[BII)V
*/
void Java_org_rocksdb_RocksDB_merge__JJ_3BII_3BII(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jwrite_options_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto write_options = reinterpret_cast<rocksdb::WriteOptions*>(
jwrite_options_handle);
rocksdb_merge_helper(env, db, *write_options, nullptr, jkey, jkey_off,
jkey_len, jval, jval_off, jval_len);
}
/*
* Class: org_rocksdb_RocksDB
* Method: merge
* Signature: (JJ[BII[BIIJ)V
*/
void Java_org_rocksdb_RocksDB_merge__JJ_3BII_3BIIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jwrite_options_handle,
jbyteArray jkey, jint jkey_off, jint jkey_len, jbyteArray jval,
jint jval_off, jint jval_len, jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto write_options = reinterpret_cast<rocksdb::WriteOptions*>(
jwrite_options_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
if (cf_handle != nullptr) {
rocksdb_merge_helper(env, db, *write_options, cf_handle, jkey, jkey_off,
jkey_len, jval, jval_off, jval_len);
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env,
rocksdb::Status::InvalidArgument("Invalid ColumnFamilyHandle."));
}
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::~DB()
/*
* Class: org_rocksdb_RocksDB
* Method: disposeInternal
* Signature: (J)V
*/
void Java_org_rocksdb_RocksDB_disposeInternal(
JNIEnv* env, jobject java_db, jlong jhandle) {
delete reinterpret_cast<rocksdb::DB*>(jhandle);
}
jlong rocksdb_iterator_helper(
rocksdb::DB* db, rocksdb::ReadOptions read_options,
rocksdb::ColumnFamilyHandle* cf_handle) {
rocksdb::Iterator* iterator = nullptr;
if (cf_handle != nullptr) {
iterator = db->NewIterator(read_options, cf_handle);
} else {
iterator = db->NewIterator(read_options);
}
return reinterpret_cast<jlong>(iterator);
}
/*
* Class: org_rocksdb_RocksDB
* Method: iterator
* Signature: (J)J
*/
jlong Java_org_rocksdb_RocksDB_iterator__J(
JNIEnv* env, jobject jdb, jlong db_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(db_handle);
return rocksdb_iterator_helper(db, rocksdb::ReadOptions(),
nullptr);
}
/*
* Class: org_rocksdb_RocksDB
* Method: iterator
* Signature: (JJ)J
*/
jlong Java_org_rocksdb_RocksDB_iterator__JJ(
JNIEnv* env, jobject jdb, jlong db_handle,
jlong jread_options_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(db_handle);
auto& read_options = *reinterpret_cast<rocksdb::ReadOptions*>(
jread_options_handle);
return rocksdb_iterator_helper(db, read_options,
nullptr);
}
/*
* Class: org_rocksdb_RocksDB
* Method: iteratorCF
* Signature: (JJ)J
*/
jlong Java_org_rocksdb_RocksDB_iteratorCF__JJ(
JNIEnv* env, jobject jdb, jlong db_handle, jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(db_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
return rocksdb_iterator_helper(db, rocksdb::ReadOptions(),
cf_handle);
}
/*
* Class: org_rocksdb_RocksDB
* Method: iteratorCF
* Signature: (JJJ)J
*/
jlong Java_org_rocksdb_RocksDB_iteratorCF__JJJ(
JNIEnv* env, jobject jdb, jlong db_handle, jlong jcf_handle,
jlong jread_options_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(db_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
auto& read_options = *reinterpret_cast<rocksdb::ReadOptions*>(
jread_options_handle);
return rocksdb_iterator_helper(db, read_options,
cf_handle);
}
/*
* Class: org_rocksdb_RocksDB
* Method: iterators
* Signature: (J[JJ)[J
*/
jlongArray Java_org_rocksdb_RocksDB_iterators(
JNIEnv* env, jobject jdb, jlong db_handle,
jlongArray jcolumn_family_handles, jlong jread_options_handle) {
auto* db = reinterpret_cast<rocksdb::DB*>(db_handle);
auto& read_options = *reinterpret_cast<rocksdb::ReadOptions*>(
jread_options_handle);
std::vector<rocksdb::ColumnFamilyHandle*> cf_handles;
if (jcolumn_family_handles != nullptr) {
jsize len_cols = env->GetArrayLength(jcolumn_family_handles);
jlong* jcfh = env->GetLongArrayElements(jcolumn_family_handles, NULL);
for (int i = 0; i < len_cols; i++) {
auto* cf_handle =
reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcfh[i]);
cf_handles.push_back(cf_handle);
}
env->ReleaseLongArrayElements(jcolumn_family_handles, jcfh, JNI_ABORT);
}
std::vector<rocksdb::Iterator*> iterators;
rocksdb::Status s = db->NewIterators(read_options,
cf_handles, &iterators);
if (s.ok()) {
jlongArray jLongArray =
env->NewLongArray(static_cast<jsize>(iterators.size()));
for (std::vector<rocksdb::Iterator*>::size_type i = 0;
i < iterators.size(); i++) {
env->SetLongArrayRegion(jLongArray, static_cast<jsize>(i), 1,
reinterpret_cast<const jlong*>(&iterators[i]));
}
return jLongArray;
} else {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
return NULL;
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: getDefaultColumnFamily
* Signature: (J)J
*/
jlong Java_org_rocksdb_RocksDB_getDefaultColumnFamily(
JNIEnv* env, jobject jobj, jlong jdb_handle) {
auto* db_handle = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto* cf_handle = db_handle->DefaultColumnFamily();
return reinterpret_cast<jlong>(cf_handle);
}
/*
* Class: org_rocksdb_RocksDB
* Method: createColumnFamily
* Signature: (J[BJ)J
*/
jlong Java_org_rocksdb_RocksDB_createColumnFamily(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jbyteArray jcolumn_name, jlong jcolumn_options) {
rocksdb::ColumnFamilyHandle* handle;
auto db_handle = reinterpret_cast<rocksdb::DB*>(jdb_handle);
jbyte* cfname = env->GetByteArrayElements(jcolumn_name, 0);
const int len = env->GetArrayLength(jcolumn_name);
auto* cfOptions =
reinterpret_cast<rocksdb::ColumnFamilyOptions*>(jcolumn_options);
rocksdb::Status s = db_handle->CreateColumnFamily(
*cfOptions, std::string(reinterpret_cast<char *>(cfname), len), &handle);
env->ReleaseByteArrayElements(jcolumn_name, cfname, 0);
if (s.ok()) {
return reinterpret_cast<jlong>(handle);
}
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
return 0;
}
/*
* Class: org_rocksdb_RocksDB
* Method: dropColumnFamily
* Signature: (JJ)V;
*/
void Java_org_rocksdb_RocksDB_dropColumnFamily(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jcf_handle) {
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
auto db_handle = reinterpret_cast<rocksdb::DB*>(jdb_handle);
rocksdb::Status s = db_handle->DropColumnFamily(cf_handle);
if (!s.ok()) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
}
/*
* Method: getSnapshot
* Signature: (J)J
*/
jlong Java_org_rocksdb_RocksDB_getSnapshot(
JNIEnv* env, jobject jdb, jlong db_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(db_handle);
const rocksdb::Snapshot* snapshot = db->GetSnapshot();
return reinterpret_cast<jlong>(snapshot);
}
/*
* Method: releaseSnapshot
* Signature: (JJ)V
*/
void Java_org_rocksdb_RocksDB_releaseSnapshot(
JNIEnv* env, jobject jdb, jlong db_handle, jlong snapshot_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(db_handle);
auto snapshot = reinterpret_cast<rocksdb::Snapshot*>(snapshot_handle);
db->ReleaseSnapshot(snapshot);
}
/*
* Class: org_rocksdb_RocksDB
* Method: getProperty0
* Signature: (JLjava/lang/String;I)Ljava/lang/String;
*/
jstring Java_org_rocksdb_RocksDB_getProperty0__JLjava_lang_String_2I(
JNIEnv* env, jobject jdb, jlong db_handle, jstring jproperty,
jint jproperty_len) {
auto db = reinterpret_cast<rocksdb::DB*>(db_handle);
const char* property = env->GetStringUTFChars(jproperty, 0);
rocksdb::Slice property_slice(property, jproperty_len);
std::string property_value;
bool retCode = db->GetProperty(property_slice, &property_value);
env->ReleaseStringUTFChars(jproperty, property);
if (!retCode) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, rocksdb::Status::NotFound());
}
return env->NewStringUTF(property_value.data());
}
/*
* Class: org_rocksdb_RocksDB
* Method: getProperty0
* Signature: (JJLjava/lang/String;I)Ljava/lang/String;
*/
jstring Java_org_rocksdb_RocksDB_getProperty0__JJLjava_lang_String_2I(
JNIEnv* env, jobject jdb, jlong db_handle, jlong jcf_handle,
jstring jproperty, jint jproperty_len) {
auto db = reinterpret_cast<rocksdb::DB*>(db_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
const char* property = env->GetStringUTFChars(jproperty, 0);
rocksdb::Slice property_slice(property, jproperty_len);
std::string property_value;
bool retCode = db->GetProperty(cf_handle, property_slice, &property_value);
env->ReleaseStringUTFChars(jproperty, property);
if (!retCode) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, rocksdb::Status::NotFound());
}
return env->NewStringUTF(property_value.data());
}
/*
* Class: org_rocksdb_RocksDB
* Method: getLongProperty
* Signature: (JLjava/lang/String;I)L;
*/
jlong Java_org_rocksdb_RocksDB_getLongProperty__JLjava_lang_String_2I(
JNIEnv* env, jobject jdb, jlong db_handle, jstring jproperty,
jint jproperty_len) {
auto db = reinterpret_cast<rocksdb::DB*>(db_handle);
const char* property = env->GetStringUTFChars(jproperty, 0);
rocksdb::Slice property_slice(property, jproperty_len);
uint64_t property_value = 0;
bool retCode = db->GetIntProperty(property_slice, &property_value);
env->ReleaseStringUTFChars(jproperty, property);
if (!retCode) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, rocksdb::Status::NotFound());
}
return property_value;
}
/*
* Class: org_rocksdb_RocksDB
* Method: getLongProperty
* Signature: (JJLjava/lang/String;I)L;
*/
jlong Java_org_rocksdb_RocksDB_getLongProperty__JJLjava_lang_String_2I(
JNIEnv* env, jobject jdb, jlong db_handle, jlong jcf_handle,
jstring jproperty, jint jproperty_len) {
auto db = reinterpret_cast<rocksdb::DB*>(db_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
const char* property = env->GetStringUTFChars(jproperty, 0);
rocksdb::Slice property_slice(property, jproperty_len);
uint64_t property_value;
bool retCode = db->GetIntProperty(cf_handle, property_slice, &property_value);
env->ReleaseStringUTFChars(jproperty, property);
if (!retCode) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, rocksdb::Status::NotFound());
}
return property_value;
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::Flush
void rocksdb_flush_helper(
JNIEnv* env, rocksdb::DB* db, const rocksdb::FlushOptions& flush_options,
rocksdb::ColumnFamilyHandle* column_family_handle) {
rocksdb::Status s;
if (column_family_handle != nullptr) {
s = db->Flush(flush_options, column_family_handle);
} else {
s = db->Flush(flush_options);
}
if (!s.ok()) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: flush
* Signature: (JJ)V
*/
void Java_org_rocksdb_RocksDB_flush__JJ(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jlong jflush_options) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto flush_options = reinterpret_cast<rocksdb::FlushOptions*>(jflush_options);
rocksdb_flush_helper(env, db, *flush_options, nullptr);
}
/*
* Class: org_rocksdb_RocksDB
* Method: flush
* Signature: (JJJ)V
*/
void Java_org_rocksdb_RocksDB_flush__JJJ(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jlong jflush_options, jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto flush_options = reinterpret_cast<rocksdb::FlushOptions*>(jflush_options);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
rocksdb_flush_helper(env, db, *flush_options, cf_handle);
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::CompactRange - Full
void rocksdb_compactrange_helper(JNIEnv* env, rocksdb::DB* db,
rocksdb::ColumnFamilyHandle* cf_handle, jboolean jreduce_level,
jint jtarget_level, jint jtarget_path_id) {
rocksdb::Status s;
rocksdb::CompactRangeOptions compact_options;
compact_options.change_level = jreduce_level;
compact_options.target_level = jtarget_level;
compact_options.target_path_id = static_cast<uint32_t>(jtarget_path_id);
if (cf_handle != nullptr) {
s = db->CompactRange(compact_options, cf_handle, nullptr, nullptr);
} else {
// backwards compatibility
s = db->CompactRange(compact_options, nullptr, nullptr);
}
if (s.ok()) {
return;
}
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
/*
* Class: org_rocksdb_RocksDB
* Method: compactRange0
* Signature: (JZII)V
*/
void Java_org_rocksdb_RocksDB_compactRange0__JZII(JNIEnv* env,
jobject jdb, jlong jdb_handle, jboolean jreduce_level,
jint jtarget_level, jint jtarget_path_id) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
rocksdb_compactrange_helper(env, db, nullptr, jreduce_level,
jtarget_level, jtarget_path_id);
}
/*
* Class: org_rocksdb_RocksDB
* Method: compactRange
* Signature: (JZIIJ)V
*/
void Java_org_rocksdb_RocksDB_compactRange__JZIIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle,
jboolean jreduce_level, jint jtarget_level,
jint jtarget_path_id, jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
rocksdb_compactrange_helper(env, db, cf_handle, jreduce_level,
jtarget_level, jtarget_path_id);
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::CompactRange - Range
void rocksdb_compactrange_helper(JNIEnv* env, rocksdb::DB* db,
rocksdb::ColumnFamilyHandle* cf_handle, jbyteArray jbegin, jint jbegin_len,
jbyteArray jend, jint jend_len, jboolean jreduce_level, jint jtarget_level,
jint jtarget_path_id) {
jbyte* begin = env->GetByteArrayElements(jbegin, 0);
jbyte* end = env->GetByteArrayElements(jend, 0);
const rocksdb::Slice begin_slice(reinterpret_cast<char*>(begin), jbegin_len);
const rocksdb::Slice end_slice(reinterpret_cast<char*>(end), jend_len);
rocksdb::Status s;
rocksdb::CompactRangeOptions compact_options;
compact_options.change_level = jreduce_level;
compact_options.target_level = jtarget_level;
compact_options.target_path_id = static_cast<uint32_t>(jtarget_path_id);
if (cf_handle != nullptr) {
s = db->CompactRange(compact_options, cf_handle, &begin_slice, &end_slice);
} else {
// backwards compatibility
s = db->CompactRange(compact_options, &begin_slice, &end_slice);
}
env->ReleaseByteArrayElements(jbegin, begin, JNI_ABORT);
env->ReleaseByteArrayElements(jend, end, JNI_ABORT);
if (s.ok()) {
return;
}
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
/*
* Class: org_rocksdb_RocksDB
* Method: compactRange0
* Signature: (J[BI[BIZII)V
*/
void Java_org_rocksdb_RocksDB_compactRange0__J_3BI_3BIZII(JNIEnv* env,
jobject jdb, jlong jdb_handle, jbyteArray jbegin, jint jbegin_len,
jbyteArray jend, jint jend_len, jboolean jreduce_level,
jint jtarget_level, jint jtarget_path_id) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
rocksdb_compactrange_helper(env, db, nullptr, jbegin, jbegin_len,
jend, jend_len, jreduce_level, jtarget_level, jtarget_path_id);
}
/*
* Class: org_rocksdb_RocksDB
* Method: compactRange
* Signature: (JJ[BI[BIZII)V
*/
void Java_org_rocksdb_RocksDB_compactRange__J_3BI_3BIZIIJ(
JNIEnv* env, jobject jdb, jlong jdb_handle, jbyteArray jbegin,
jint jbegin_len, jbyteArray jend, jint jend_len,
jboolean jreduce_level, jint jtarget_level,
jint jtarget_path_id, jlong jcf_handle) {
auto db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
rocksdb_compactrange_helper(env, db, cf_handle, jbegin, jbegin_len,
jend, jend_len, jreduce_level, jtarget_level, jtarget_path_id);
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::PauseBackgroundWork
/*
* Class: org_rocksdb_RocksDB
* Method: pauseBackgroundWork
* Signature: (J)V
*/
void Java_org_rocksdb_RocksDB_pauseBackgroundWork(
JNIEnv* env, jobject jobj, jlong jdb_handle) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto s = db->PauseBackgroundWork();
if (s.ok()) {
return;
}
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::ContinueBackgroundWork
/*
* Class: org_rocksdb_RocksDB
* Method: continueBackgroundWork
* Signature: (J)V
*/
void Java_org_rocksdb_RocksDB_continueBackgroundWork(
JNIEnv* env, jobject jobj, jlong jdb_handle) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto s = db->ContinueBackgroundWork();
if (s.ok()) {
return;
}
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::GetLatestSequenceNumber
/*
* Class: org_rocksdb_RocksDB
* Method: getLatestSequenceNumber
* Signature: (J)V
*/
jlong Java_org_rocksdb_RocksDB_getLatestSequenceNumber(JNIEnv* env,
jobject jdb, jlong jdb_handle) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
return db->GetLatestSequenceNumber();
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB enable/disable file deletions
/*
* Class: org_rocksdb_RocksDB
* Method: enableFileDeletions
* Signature: (J)V
*/
void Java_org_rocksdb_RocksDB_disableFileDeletions(JNIEnv* env,
jobject jdb, jlong jdb_handle) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
rocksdb::Status s = db->DisableFileDeletions();
if (!s.ok()) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: enableFileDeletions
* Signature: (JZ)V
*/
void Java_org_rocksdb_RocksDB_enableFileDeletions(JNIEnv* env,
jobject jdb, jlong jdb_handle, jboolean jforce) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
rocksdb::Status s = db->EnableFileDeletions(jforce);
if (!s.ok()) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::GetUpdatesSince
/*
* Class: org_rocksdb_RocksDB
* Method: getUpdatesSince
* Signature: (JJ)J
*/
jlong Java_org_rocksdb_RocksDB_getUpdatesSince(JNIEnv* env,
jobject jdb, jlong jdb_handle, jlong jsequence_number) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
rocksdb::SequenceNumber sequence_number =
static_cast<rocksdb::SequenceNumber>(jsequence_number);
std::unique_ptr<rocksdb::TransactionLogIterator> iter;
rocksdb::Status s = db->GetUpdatesSince(sequence_number, &iter);
if (s.ok()) {
return reinterpret_cast<jlong>(iter.release());
}
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
return 0;
}
/*
* Class: org_rocksdb_RocksDB
* Method: setOptions
* Signature: (JJ[Ljava/lang/String;[Ljava/lang/String;)V
*/
void Java_org_rocksdb_RocksDB_setOptions(JNIEnv* env, jobject jdb,
jlong jdb_handle, jlong jcf_handle, jobjectArray jkeys,
jobjectArray jvalues) {
std::unordered_map<std::string, std::string> options_map;
const jsize len = env->GetArrayLength(jkeys);
assert(len == env->GetArrayLength(jvalues));
for (int i = 0; i < len; i++) {
jobject jobj_key = env->GetObjectArrayElement(jkeys, i);
jobject jobj_value = env->GetObjectArrayElement(jvalues, i);
jstring jkey = reinterpret_cast<jstring>(jobj_key);
jstring jval = reinterpret_cast<jstring>(jobj_value);
const char* key = env->GetStringUTFChars(jkey, NULL);
const char* value = env->GetStringUTFChars(jval, NULL);
std::string s_key(key);
std::string s_value(value);
env->ReleaseStringUTFChars(jkey, key);
env->ReleaseStringUTFChars(jval, value);
env->DeleteLocalRef(jobj_key);
env->DeleteLocalRef(jobj_value);
options_map[s_key] = s_value;
}
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto* cf_handle = reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
db->SetOptions(cf_handle, options_map);
}
//////////////////////////////////////////////////////////////////////////////
// rocksdb::DB::AddFile
void add_file_helper(JNIEnv* env, const jobjectArray& jfile_path_list,
int file_path_list_len,
std::vector<std::string>* file_path_list) {
for (int i = 0; i < file_path_list_len; i++) {
jstring jfile_path =
static_cast<jstring>(env->GetObjectArrayElement(jfile_path_list, i));
const char* file_path = env->GetStringUTFChars(jfile_path, NULL);
file_path_list->push_back(std::string(file_path));
env->ReleaseStringUTFChars(jfile_path, file_path);
env->DeleteLocalRef(jfile_path);
}
}
/*
* Class: org_rocksdb_RocksDB
* Method: addFile
* Signature: (JJ[Ljava/lang/String;IZ)V
*/
void Java_org_rocksdb_RocksDB_addFile__JJ_3Ljava_lang_String_2IZ(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jcf_handle,
jobjectArray jfile_path_list, jint jfile_path_list_len,
jboolean jmove_file) {
auto* db = reinterpret_cast<rocksdb::DB*>(jdb_handle);
std::vector<std::string> file_path_list;
add_file_helper(env, jfile_path_list, static_cast<int>(jfile_path_list_len),
&file_path_list);
auto* column_family =
reinterpret_cast<rocksdb::ColumnFamilyHandle*>(jcf_handle);
rocksdb::IngestExternalFileOptions ifo;
ifo.move_files = static_cast<bool>(jmove_file);
ifo.snapshot_consistency = true;
ifo.allow_global_seqno = false;
ifo.allow_blocking_flush = false;
rocksdb::Status s =
db->IngestExternalFile(column_family, file_path_list, ifo);
if (!s.ok()) {
rocksdb::RocksDBExceptionJni::ThrowNew(env, s);
}
}
|
{
"content_hash": "73d8739c58ae7633eb8ebfc54e106463",
"timestamp": "",
"source": "github",
"line_count": 1972,
"max_line_length": 120,
"avg_line_length": 35.551217038539555,
"alnum_prop": 0.6520318941047256,
"repo_name": "tsheasha/rocksdb",
"id": "4690b5c5df97fdfb488422aabfcd07701a67d04c",
"size": "70107",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "java/rocksjni/rocksjni.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "81273"
},
{
"name": "C++",
"bytes": "7334953"
},
{
"name": "CMake",
"bytes": "32091"
},
{
"name": "Java",
"bytes": "919137"
},
{
"name": "Makefile",
"bytes": "82180"
},
{
"name": "PHP",
"bytes": "48509"
},
{
"name": "Perl",
"bytes": "244299"
},
{
"name": "PowerShell",
"bytes": "9458"
},
{
"name": "Python",
"bytes": "20674"
},
{
"name": "Ruby",
"bytes": "700"
},
{
"name": "Shell",
"bytes": "82760"
}
]
}
|
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
class Food extends CI_Controller {
public function __construct() {
parent::__construct();
$this->load->helper(array('form', 'security'));
$this->load->library('form_validation');
$this->load->model('Answer_model');
$user = 1;
}
public function index() {
$data['title'] = 'Food';
$argPost['userid'] = $this->session->userdata('userid');
$argPost['type'] = 4 ;
$data['data'] = $this->Answer_model->getAnswers($argPost);
$this->load->view('admin/survey/food',$data);
}
}
|
{
"content_hash": "97990e6fccbba3bfdc7d15febd4e302a",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 70,
"avg_line_length": 26.40740740740741,
"alnum_prop": 0.5035063113604488,
"repo_name": "animesh21/gsp-portal-new",
"id": "e4d522aea74eca2da50a6a84cf1d90ea2f5f868d",
"size": "713",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/controllers/admin/survey/Food.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1326049"
},
{
"name": "HTML",
"bytes": "28341964"
},
{
"name": "Hack",
"bytes": "12578"
},
{
"name": "JavaScript",
"bytes": "3238193"
},
{
"name": "PHP",
"bytes": "41793583"
},
{
"name": "Redcode",
"bytes": "6105"
},
{
"name": "Shell",
"bytes": "383"
}
]
}
|
module Journeyapi
VERSION = "0.0.1"
end
|
{
"content_hash": "c8095a2cdade06993cb4f66f4bee882a",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 19,
"avg_line_length": 14,
"alnum_prop": 0.6904761904761905,
"repo_name": "fireworksinnovation/journeyapi",
"id": "f64c5187657e9f31a8956e24629bd596513e34d0",
"size": "42",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/journeyapi/version.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "4345"
}
]
}
|
using System.Collections.Generic;
using log4net.Appender;
using log4net.Util;
namespace Swartz.Logging
{
public class SwartzFileAppender : RollingFileAppender
{
/// <summary>
/// The number of suffix attempts that will be made on each OpenFile method call.
/// </summary>
private const int Retries = 50;
/// <summary>
/// Maximum number of suffixes recorded before a cleanup happens to recycle memory.
/// </summary>
private const int MaxSuffixes = 100;
/// <summary>
/// Dictionary of already known suffixes (based on previous attempts) for a given filename.
/// </summary>
private static readonly Dictionary<string, int> Suffixes = new Dictionary<string, int>();
/// <summary>
/// Opens the log file adding an incremental suffix to the filename if required due to an openning failure (usually,
/// locking).
/// </summary>
/// <param name="fileName">The filename as specified in the configuration file.</param>
/// <param name="append">Boolean flag indicating weather the log file should be appended if it already exists.</param>
protected override void OpenFile(string fileName, bool append)
{
lock (this)
{
var fileOpened = false;
var completeFilename = GetNextOutputFileName(fileName);
var currentFilename = fileName;
if (Suffixes.Count > MaxSuffixes)
{
Suffixes.Clear();
}
if (!Suffixes.ContainsKey(completeFilename))
{
Suffixes[completeFilename] = 0;
}
var newSuffix = Suffixes[completeFilename];
for (var i = 1; !fileOpened && i <= Retries; i++)
{
try
{
if (newSuffix > 0)
{
currentFilename = $"{fileName}-{newSuffix}";
}
BaseOpenFile(currentFilename, append);
fileOpened = true;
}
catch
{
newSuffix = Suffixes[completeFilename] + 1;
LogLog.Error(typeof(SwartzFileAppender),
$"SwartzFileAppender: Failed to open [{fileName}]. Attempting [{fileName}-{newSuffix}] instead.");
}
}
Suffixes[completeFilename] = newSuffix;
}
}
/// <summary>
/// Calls the base class OpenFile method. Allows this method to be mocked.
/// </summary>
/// <param name="fileName">The filename as specified in the configuration file.</param>
/// <param name="append">Boolean flag indicating weather the log file should be appended if it already exists.</param>
protected virtual void BaseOpenFile(string fileName, bool append)
{
base.OpenFile(fileName, append);
}
}
}
|
{
"content_hash": "9bb44a16382ecf69b109725ea9768e0a",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 128,
"avg_line_length": 37.03488372093023,
"alnum_prop": 0.5249607535321821,
"repo_name": "tuizhis/Swartz",
"id": "fd617246e974d8245a59ac4943ec926db123c194",
"size": "3187",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Swartz.Framework/Logging/SwartzFileAppender.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C#",
"bytes": "388081"
}
]
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('registry', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='registry',
name='visibility',
field=models.CharField(default=b'Unpublished', max_length=20, choices=[(b'Published', b'Published'), (b'Unpublished', b'Unpublished')]),
),
]
|
{
"content_hash": "14d5b6ed8fe6943d56bbe21b665761e3",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 148,
"avg_line_length": 26.22222222222222,
"alnum_prop": 0.6165254237288136,
"repo_name": "zacherytapp/wedding",
"id": "e90b2c93d74380e60d29ccf7c0dbcf23f2b6d50b",
"size": "496",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "weddingapp/apps/registry/migrations/0002_auto_20151102_0334.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "5256"
},
{
"name": "HTML",
"bytes": "30171786"
},
{
"name": "JavaScript",
"bytes": "15796"
},
{
"name": "Python",
"bytes": "48894"
}
]
}
|
<?php
/**
* CustomView Record Model Class
*/
class CustomView_Record_Model extends Vtiger_Base_Model {
// Constants to identify different status of the custom view
const CV_STATUS_DEFAULT = 0;
const CV_STATUS_PRIVATE = 1;
const CV_STATUS_PENDING = 2;
const CV_STATUS_PUBLIC = 3;
/**
* Function to get the Id
* @return <Number> Custom View Id
*/
public function getId() {
return $this->get('cvid');
}
/**
* Function to get the Owner Id
* @return <Number> Id of the User who created the Custom View
*/
public function getOwnerId() {
return $this->get('userid');
}
/**
* Function to get the Owner Name
* @return <String> Custom View creator User Name
*/
public function getOwnerName() {
$ownerId = $this->getOwnerId();
$entityNames = getEntityName('Users', array($ownerId));
return $entityNames[$ownerId];
}
/**
* Function to get the Module to which the record belongs
* @return Vtiger_Module_Model
*/
public function getModule() {
return $this->module;
}
/**
* Function to set the Module to which the record belongs
* @param <String> $moduleName
* @return Vtiger_Record_Model or Module Specific Record Model instance
*/
public function setModule($moduleName) {
$this->module = Vtiger_Module_Model::getInstance($moduleName);
return $this;
}
/**
* Function to set the Module to which the record belongs from the Module model instance
* @param <Vtiger_Module_Model> $module
* @return Vtiger_Record_Model or Module Specific Record Model instance
*/
public function setModuleFromInstance($module) {
$this->module = $module;
return $this;
}
/**
* Function to check if the view is marked as default
* @return <Boolean> true/false
*/
public function isDefault() {
$db = PearDatabase::getInstance();
$userPrivilegeModel = Users_Privileges_Model::getCurrentUserPrivilegesModel();
$result = $db->pquery('SELECT default_cvid FROM vtiger_user_module_preferences WHERE userid = ? AND tabid = ?',
array($userPrivilegeModel->getId(), $this->getModule()->getId()));
if($db->num_rows($result)) {
$cvId = $db->query_result($result, 0, 'default_cvid');
if($cvId === $this->getId()) {
return true;
} else {
return false;
}
}
return ($this->get('setdefault') == 1);
}
/**
* Function to check if the view is created by the current user or is default view
* @return <Boolean> true/false
*/
public function isMine() {
$userPrivilegeModel = Users_Privileges_Model::getCurrentUserPrivilegesModel();
return ($this->get('status') == self::CV_STATUS_DEFAULT || $this->get('userid') == $userPrivilegeModel->getId());
}
/**
* Function to check if the view is approved to be Public
* @return <Boolean> true/false
*/
public function isPublic() {
return (!$this->isMine() && $this->get('status') == self::CV_STATUS_PUBLIC);
}
/**
* Function to check if the view is marked as Private
* @return <Boolean> true/false
*/
public function isPrivate() {
return ($this->get('status') == self::CV_STATUS_PRIVATE);
}
/**
* Function to check if the view is requested to be Public and is awaiting for Approval
* @return <Boolean> true/false
*/
public function isPending() {
return (!$this->isMine() && $this->get('status') == self::CV_STATUS_PENDING);
}
/**
* Function to check if the view is created by one of the users, who is below the current user in the role hierarchy
* @return <Boolean> true/false
*/
public function isOthers() {
return (!$this->isMine() && $this->get('status') != self::CV_STATUS_PUBLIC);
}
/**
* Function which checks if a view is set to Public by the user which may/may not be approved.
* @return <Boolean> true/false
*/
public function isSetPublic() {
return ($this->get('status') == self::CV_STATUS_PUBLIC || $this->get('status') == self::CV_STATUS_PENDING);
}
public function isEditable() {
if($this->get('viewname') == 'All') {
return false;
}
$currentUser = Users_Record_Model::getCurrentUserModel();
if($currentUser->isAdminUser()) {
return true;
}
if($this->isMine() || $this->isOthers()) {
return true;
}
return false;
}
public function isDeletable() {
return $this->isEditable();
}
/**
* Function which provides the records for the current view
* @param <Boolean> $skipRecords - List of the RecordIds to be skipped
* @return <Array> List of RecordsIds
*/
public function getRecordIds($skipRecords=false, $module= false) {
$db = PearDatabase::getInstance();
$cvId = $this->getId();
$moduleModel = $this->getModule();
$moduleName = $moduleModel->get('name');
$baseTableName = $moduleModel->get('basetable');
$baseTableId = $moduleModel->get('basetableid');
$listViewModel = Vtiger_ListView_Model::getInstance($moduleName, $cvId);
$queryGenerator = $listViewModel->get('query_generator');
$searchKey = $this->get('search_key');
$searchValue = $this->get('search_value');
$operator = $this->get('operator');
if(!empty($searchValue)) {
$queryGenerator->addUserSearchConditions(array('search_field' => $searchKey, 'search_text' => $searchValue, 'operator' => $operator));
}
$searchParams = $this->get('search_params');
if(empty($searchParams)) {
$searchParams = array();
}
$transformedSearchParams = Vtiger_Util_Helper::transferListSearchParamsToFilterCondition($searchParams, $moduleModel);
$glue = "";
if(count($queryGenerator->getWhereFields()) > 0 && (count($transformedSearchParams)) > 0) {
$glue = QueryGenerator::$AND;
}
$queryGenerator->parseAdvFilterList($transformedSearchParams, $glue);
$listQuery = $queryGenerator->getQuery();
if($module == 'RecycleBin'){
$listQuery = preg_replace("/vtiger_crmentity.deleted\s*=\s*0/i", 'vtiger_crmentity.deleted = 1', $listQuery);
}
if($skipRecords && !empty($skipRecords) && is_array($skipRecords) && count($skipRecords) > 0) {
$listQuery .= ' AND '.$baseTableName.'.'.$baseTableId.' NOT IN ('. implode(',', $skipRecords) .')';
}
$result = $db->query($listQuery);
$noOfRecords = $db->num_rows($result);
$recordIds = array();
for($i=0; $i<$noOfRecords; ++$i) {
$recordIds[] = $db->query_result($result, $i, $baseTableId);
}
return $recordIds;
}
/**
* Function to save the custom view record
*/
public function save() {
$db = PearDatabase::getInstance();
$currentUserModel = Users_Record_Model::getCurrentUserModel();
$cvId = $this->getId();
$moduleModel = $this->getModule();
$moduleName = $moduleModel->get('name');
$viewName = $this->get('viewname');
$setDefault = intval($this->get('setdefault'));
$setMetrics = intval($this->get('setmetrics'));
$status = $this->get('status');
if($status == self::CV_STATUS_PENDING) {
if($currentUserModel->isAdminUser()) {
$status = self::CV_STATUS_PUBLIC;
}
}
if(!$cvId) {
$cvId = $db->getUniqueID("vtiger_customview");
$this->set('cvid', $cvId);
$sql = 'INSERT INTO vtiger_customview(cvid, viewname, setdefault, setmetrics, entitytype, status, userid) VALUES (?,?,?,?,?,?,?)';
$params = array($cvId, $viewName, $setDefault, $setMetrics, $moduleName, $status, $currentUserModel->getId());
$db->pquery($sql, $params);
} else {
$sql = 'UPDATE vtiger_customview SET viewname=?, setdefault=?, setmetrics=?, status=? WHERE cvid=?';
$params = array($viewName, $setDefault, $setMetrics, $status, $cvId);
$db->pquery($sql, $params);
$db->pquery('DELETE FROM vtiger_cvcolumnlist WHERE cvid = ?', array($cvId));
$db->pquery('DELETE FROM vtiger_cvstdfilter WHERE cvid = ?', array($cvId));
$db->pquery('DELETE FROM vtiger_cvadvfilter WHERE cvid = ?', array($cvId));
$db->pquery('DELETE FROM vtiger_cvadvfilter_grouping WHERE cvid = ?', array($cvId));
}
if($setDefault == 1) {
$query = 'SELECT 1 FROM vtiger_user_module_preferences WHERE userid = ? AND tabid = ?';
$queryParams = array($currentUserModel->getId(), $moduleModel->getId());
$queryResult = $db->pquery($query, $queryParams);
if($db->num_rows($queryResult) > 0) {
$updateSql = 'UPDATE vtiger_user_module_preferences SET default_cvid = ? WHERE userid = ? AND tabid = ?';
$updateParams = array($cvId, $currentUserModel->getId(), $moduleModel->getId());
$db->pquery($updateSql, $updateParams);
} else {
$insertSql = 'INSERT INTO vtiger_user_module_preferences(userid, tabid, default_cvid) VALUES (?,?,?)';
$insertParams = array($currentUserModel->getId(), $moduleModel->getId(), $cvId);
$db->pquery($insertSql, $insertParams);
}
} else {
$deleteSql = 'DELETE FROM vtiger_user_module_preferences WHERE userid = ? AND tabid = ? AND default_cvid = ?';
$deleteParams = array($currentUserModel->getId(), $moduleModel->getId(), $cvId);
$db->pquery($deleteSql, $deleteParams);
}
$selectedColumnsList = $this->get('columnslist');
if(!empty($selectedColumnsList)) {
$noOfColumns = count($selectedColumnsList);
for($i=0; $i<$noOfColumns; $i++) {
$columnSql = 'INSERT INTO vtiger_cvcolumnlist (cvid, columnindex, columnname) VALUES (?,?,?)';
$columnParams = array($cvId, $i, $selectedColumnsList[$i]);
$db->pquery($columnSql, $columnParams);
}
} else {
//no fields were sent so add default All filter columns
$defaultModuleFilter = $db->pquery('SELECT cvid FROM vtiger_customview WHERE setdefault = 1 AND entitytype = ?',
array($moduleName));
$defaultViewId = $db->query_result($defaultModuleFilter, 0, 'cvid');
//User Specific filterId
if(empty($defaultViewId)) {
$userDefaultModuleFilter = $db->pquery('SELECT default_cvid FROM vtiger_user_module_preferences WHERE
userid = ? AND tabid = ?', array($currentUserModel->id, $moduleModel->getId()));
$defaultViewId = $db->query_result($userDefaultModuleFilter, 0, 'default_cvid');
}
//First filterid of module
if(empty($defaultViewId)) {
$firstDefaultFilter = $db->pquery('SELECT cvid FROM vtiger_customview WHERE entitytype = ?', array($moduleName));
$defaultViewId = $db->query_result($firstDefaultFilter, 0, 'cvid');
}
// Get the defaults filters columnlist
$columnSql = "INSERT INTO vtiger_cvcolumnlist (cvid, columnindex, columnname)
SELECT ?, columnindex, columnname FROM vtiger_cvcolumnlist WHERE cvid = ?";
$db->pquery($columnSql, array($cvId, $defaultViewId));
}
$stdFilterList = $this->get('stdfilterlist');
if(!empty($stdFilterList) && !empty($stdFilterList['columnname'])) {
$stdFilterSql = 'INSERT INTO vtiger_cvstdfilter(cvid,columnname,stdfilter,startdate,enddate) VALUES (?,?,?,?,?)';
$stdFilterParams = array($cvId, $stdFilterList['columnname'], $stdFilterList['stdfilter'],
$db->formatDate($stdFilterList['startdate'], true),
$db->formatDate($stdFilterList['enddate'], true));
$db->pquery($stdFilterSql, $stdFilterParams);
}
$advFilterList = $this->get('advfilterlist');
if(!empty($advFilterList)) {
foreach($advFilterList as $groupIndex => $groupInfo) {
if(empty($groupInfo)) continue;
$groupColumns = $groupInfo['columns'];
$groupCondition = $groupInfo['condition'];
foreach($groupColumns as $columnIndex => $columnCondition) {
if(empty($columnCondition)) continue;
$advFilterColumn = $columnCondition['columnname'];
$advFilterComparator = $columnCondition['comparator'];
$advFitlerValue = $columnCondition['value'];
$advFilterColumnCondition = $columnCondition['column_condition'];
$columnInfo = explode(":",$advFilterColumn);
$fieldName = $columnInfo[2];
$fieldModel = $moduleModel->getField($fieldName);
//Required if Events module fields are selected for the condition
if(!$fieldModel) {
$modulename = $moduleModel->get('name');
if($modulename == 'Calendar') {
$eventModuleModel = Vtiger_Module_model::getInstance('Events');
$fieldModel = $eventModuleModel->getField($fieldName);
}
}
$fieldType = $fieldModel->getFieldDataType();
if($fieldType == 'currency') {
if($fieldModel->get('uitype') == '72') {
// Some of the currency fields like Unit Price, Totoal , Sub-total - doesn't need currency conversion during save
$advFitlerValue = CurrencyField::convertToDBFormat($advFitlerValue, null, true);
} else {
$advFitlerValue = CurrencyField::convertToDBFormat($advFitlerValue);
}
}
$temp_val = explode(",",$advFitlerValue);
if(($fieldType == 'date' || ($fieldType == 'time' && $fieldName != 'time_start' && $fieldName != 'time_end') || ($fieldType == 'datetime')) && ($fieldType != '' && $advFitlerValue != '' )) {
$val = Array();
for($x=0;$x<count($temp_val);$x++) {
//if date and time given then we have to convert the date and
//leave the time as it is, if date only given then temp_time
//value will be empty
if(trim($temp_val[$x]) != '') {
$date = new DateTimeField(trim($temp_val[$x]));
if($fieldType == 'date') {
$val[$x] = DateTimeField::convertToDBFormat(
trim($temp_val[$x]));
} elseif($fieldType == 'datetime') {
$val[$x] = $date->getDBInsertDateTimeValue();
} else {
$val[$x] = $date->getDBInsertTimeValue();
}
}
}
$advFitlerValue = implode(",",$val);
}
$advCriteriaSql = 'INSERT INTO vtiger_cvadvfilter(cvid,columnindex,columnname,comparator,value,groupid,column_condition)
values (?,?,?,?,?,?,?)';
$advCriteriaParams = array($cvId, $columnIndex, $advFilterColumn, $advFilterComparator, $advFitlerValue, $groupIndex, $advFilterColumnCondition);
$db->pquery($advCriteriaSql, $advCriteriaParams);
// Update the condition expression for the group to which the condition column belongs
$groupConditionExpression = '';
if(!empty($advFilterList[$groupIndex]["conditionexpression"])) {
$groupConditionExpression = $advFilterList[$groupIndex]["conditionexpression"];
}
$groupConditionExpression = $groupConditionExpression .' '. $columnIndex .' '. $advFilterColumnCondition;
$advFilterList[$groupIndex]["conditionexpression"] = $groupConditionExpression;
}
$groupConditionExpression = $advFilterList[$groupIndex]["conditionexpression"];
if(empty($groupConditionExpression)) continue; // Case when the group doesn't have any column criteria
$advGroupSql = 'INSERT INTO vtiger_cvadvfilter_grouping(groupid,cvid,group_condition,condition_expression) VALUES (?,?,?,?)';
$advGroupParams = array($groupIndex, $cvId, $groupCondition, $groupConditionExpression);
$db->pquery($advGroupSql, $advGroupParams);
}
}
}
/**
* Function to delete the custom view record
*/
public function delete() {
$db = PearDatabase::getInstance();
$cvId = $this->getId();
$db->pquery('DELETE FROM vtiger_customview WHERE cvid = ?', array($cvId));
$db->pquery('DELETE FROM vtiger_cvcolumnlist WHERE cvid = ?', array($cvId));
$db->pquery('DELETE FROM vtiger_cvstdfilter WHERE cvid = ?', array($cvId));
$db->pquery('DELETE FROM vtiger_cvadvfilter WHERE cvid = ?', array($cvId));
$db->pquery('DELETE FROM vtiger_cvadvfilter_grouping WHERE cvid = ?', array($cvId));
// To Delete the mini list widget associated with the filter
$db->pquery('DELETE FROM vtiger_module_dashboard_widgets WHERE filterid = ?', array($cvId));
}
/**
* Function to get the list of selected fields for the current custom view
* @return <Array> List of Field Column Names
*/
public function getSelectedFields() {
$db = PearDatabase::getInstance();
$query = 'SELECT vtiger_cvcolumnlist.* FROM vtiger_cvcolumnlist
INNER JOIN vtiger_customview ON vtiger_customview.cvid = vtiger_cvcolumnlist.cvid
WHERE vtiger_customview.cvid = ? ORDER BY vtiger_cvcolumnlist.columnindex';
$params = array($this->getId());
$result = $db->pquery($query, $params);
$noOfFields = $db->num_rows($result);
$selectedFields = array();
for($i=0; $i<$noOfFields; ++$i) {
$columnIndex = $db->query_result($result, $i, 'columnindex');
$columnName = $db->query_result($result, $i, 'columnname');
$selectedFields[$columnIndex] = $columnName;
}
return $selectedFields;
}
/**
* Function to get the Standard filter condition for the current custom view
* @return <Array> Standard filter condition
*/
public function getStandardCriteria() {
$db = PearDatabase::getInstance();
$cvId = $this->getId();
if(empty($cvId)) {
return array();
}
$query = 'SELECT vtiger_cvstdfilter.* FROM vtiger_cvstdfilter
INNER JOIN vtiger_customview ON vtiger_customview.cvid = vtiger_cvstdfilter.cvid
WHERE vtiger_cvstdfilter.cvid = ?';
$params = array($this->getId());
$result = $db->pquery($query, $params);
$stdfilterrow = $db->fetch_array($result);
if(!empty($stdfilterrow)){
$stdfilterlist = array();
$stdfilterlist["columnname"] = $stdfilterrow["columnname"];
$stdfilterlist["stdfilter"] = $stdfilterrow["stdfilter"];
if ($stdfilterrow["stdfilter"] == "custom" || $stdfilterrow["stdfilter"] == "") {
if ($stdfilterrow["startdate"] != "0000-00-00" && $stdfilterrow["startdate"] != "") {
$startDateTime = new DateTimeField($stdfilterrow["startdate"] . ' ' . date('H:i:s'));
$stdfilterlist["startdate"] = $startDateTime->getDisplayDate();
}
if ($stdfilterrow["enddate"] != "0000-00-00" && $stdfilterrow["enddate"] != "") {
$endDateTime = new DateTimeField($stdfilterrow["enddate"] . ' ' . date('H:i:s'));
$stdfilterlist["enddate"] = $endDateTime->getDisplayDate();
}
} else { //if it is not custom get the date according to the selected duration
$datefilter = self::getDateForStdFilterBytype($stdfilterrow["stdfilter"]);
$startDateTime = new DateTimeField($datefilter[0] . ' ' . date('H:i:s'));
$stdfilterlist["startdate"] = $startDateTime->getDisplayDate();
$endDateTime = new DateTimeField($datefilter[1] . ' ' . date('H:i:s'));
$stdfilterlist["enddate"] = $endDateTime->getDisplayDate();
}
}
return $stdfilterlist;
}
/**
* Function to get the list of advanced filter conditions for the current custom view
* @return <Array> - All the advanced filter conditions for the custom view, grouped by the condition grouping
*/
public function getAdvancedCriteria() {
$db = PearDatabase::getInstance();
$default_charset = vglobal('default_charset');
$cvId = $this->getId();
$advft_criteria = array();
if(empty($cvId)) {
return $advft_criteria;
}
$sql = 'SELECT * FROM vtiger_cvadvfilter_grouping WHERE cvid = ? ORDER BY groupid';
$groupsresult = $db->pquery($sql, array($this->getId()));
$i = 1;
$j = 0;
while ($relcriteriagroup = $db->fetch_array($groupsresult)) {
$groupId = $relcriteriagroup["groupid"];
$groupCondition = $relcriteriagroup["group_condition"];
$ssql = 'select vtiger_cvadvfilter.* from vtiger_customview
inner join vtiger_cvadvfilter on vtiger_cvadvfilter.cvid = vtiger_customview.cvid
left join vtiger_cvadvfilter_grouping on vtiger_cvadvfilter.cvid = vtiger_cvadvfilter_grouping.cvid
and vtiger_cvadvfilter.groupid = vtiger_cvadvfilter_grouping.groupid';
$ssql.= " where vtiger_customview.cvid = ? AND vtiger_cvadvfilter.groupid = ? order by vtiger_cvadvfilter.columnindex";
$result = $db->pquery($ssql, array($this->getId(), $groupId));
$noOfColumns = $db->num_rows($result);
if ($noOfColumns <= 0)
continue;
while ($relcriteriarow = $db->fetch_array($result)) {
$criteria = array();
$criteria['columnname'] = html_entity_decode($relcriteriarow["columnname"], ENT_QUOTES, $default_charset);
$criteria['comparator'] = $relcriteriarow["comparator"];
$advfilterval = html_entity_decode($relcriteriarow["value"], ENT_QUOTES, $default_charset);
$col = explode(":", $relcriteriarow["columnname"]);
$temp_val = explode(",", $relcriteriarow["value"]);
if ($col[4] == 'D' || ($col[4] == 'T' && $col[1] != 'time_start' && $col[1] != 'time_end') || ($col[4] == 'DT')) {
$val = Array();
for ($x = 0; $x < count($temp_val); $x++) {
if ($col[4] == 'D') {
/** while inserting in db for due_date it was taking date and time values also as it is
* date time field. We only need to take date from that value
*/
if($col[0] == 'vtiger_activity' && $col[1] == 'due_date'){
$originalValue = $temp_val[$x];
$dateTime = explode(' ',$originalValue);
$temp_val[$x] = $dateTime[0];
}
$date = new DateTimeField(trim($temp_val[$x]));
$val[$x] = $date->getDisplayDate();
} elseif ($col[4] == 'DT') {
$comparator = array('e','n','b','a');
if(in_array($criteria['comparator'], $comparator)) {
$originalValue = $temp_val[$x];
$dateTime = explode(' ',$originalValue);
$temp_val[$x] = $dateTime[0];
}
$date = new DateTimeField(trim($temp_val[$x]));
$val[$x] = $date->getDisplayDateTimeValue();
} else {
$date = new DateTimeField(trim($temp_val[$x]));
$val[$x] = $date->getDisplayTime();
}
}
$advfilterval = implode(",", $val);
}
$criteria['value'] = Vtiger_Util_Helper::toSafeHTML(decode_html($advfilterval));
$criteria['column_condition'] = $relcriteriarow["column_condition"];
$groupId = $relcriteriarow['groupid'];
$advft_criteria[$groupId]['columns'][$j] = $criteria;
$advft_criteria[$groupId]['condition'] = $groupCondition;
$j++;
}
if (!empty($advft_criteria[$groupId]['columns'][$j - 1]['column_condition'])) {
$advft_criteria[$groupId]['columns'][$j - 1]['column_condition'] = '';
}
$i++;
}
// Clear the condition (and/or) for last group, if any.
if (!empty($advft_criteria[$i - 1]['condition']))
$advft_criteria[$i - 1]['condition'] = '';
return $advft_criteria;
}
/**
* Function returns standard filter sql
* @return <String>
*/
public function getCVStdFilterSQL() {
$customView = new CustomView();
return $customView->getCVStdFilterSQL($this->getId());
}
/**
* Function returns Advanced filter sql
* @return <String>
*/
public function getCVAdvFilterSQL() {
$customView = new CustomView();
return $customView->getCVAdvFilterSQL($this->getId());
}
/**
* Function returns approve url
* @return String - approve url
*/
public function getCreateUrl() {
return 'index.php?module=CustomView&view=EditAjax&source_module='.$this->getModule()->get('name');
}
/**
* Function returns approve url
* @return String - approve url
*/
public function getEditUrl() {
return 'module=CustomView&view=EditAjax&source_module='.$this->getModule()->get('name').'&record='.$this->getId();
}
/**
* Function returns approve url
* @return String - approve url
*/
public function getApproveUrl() {
return 'index.php?module=CustomView&action=Approve&sourceModule='.$this->getModule()->get('name').'&record='.$this->getId();
}
/**
* Function returns deny url
* @return String - deny url
*/
public function getDenyUrl() {
return 'index.php?module=CustomView&action=Deny&sourceModule='.$this->getModule()->get('name').'&record='.$this->getId();
}
/**
* Functions returns delete url
* @return String - delete url
*/
public function getDeleteUrl() {
return 'index.php?module=CustomView&action=Delete&sourceModule='.$this->getModule()->get('name').'&record='.$this->getId();
}
public function approve() {
$db = PearDatabase::getInstance();
$db->pquery('UPDATE vtiger_customview SET status = ? WHERE cvid = ?',
array(self::CV_STATUS_PUBLIC, $this->getId()));
}
public function deny() {
$db = PearDatabase::getInstance();
$db->pquery('UPDATE vtiger_customview SET status = ? WHERE cvid = ?',
array(self::CV_STATUS_PRIVATE, $this->getId()));
}
/**
* Function to get the date values for the given type of Standard filter
* @param <String> $type
* @return <Array> - 2 date values representing the range for the given type of Standard filter
*/
protected static function getDateForStdFilterBytype($type) {
$today = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d"), date("Y")));
$tomorrow = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") + 1, date("Y")));
$yesterday = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") - 1, date("Y")));
$currentmonth0 = date("Y-m-d", mktime(0, 0, 0, date("m"), "01", date("Y")));
$currentmonth1 = date("Y-m-t");
$lastmonth0 = date("Y-m-d", mktime(0, 0, 0, date("m") - 1, "01", date("Y")));
$lastmonth1 = date("Y-m-t", strtotime("-1 Month"));
$nextmonth0 = date("Y-m-d", mktime(0, 0, 0, date("m") + 1, "01", date("Y")));
$nextmonth1 = date("Y-m-t", strtotime("+1 Month"));
$lastweek0 = date("Y-m-d", strtotime("-2 week Sunday"));
$lastweek1 = date("Y-m-d", strtotime("-1 week Saturday"));
$thisweek0 = date("Y-m-d", strtotime("-1 week Sunday"));
$thisweek1 = date("Y-m-d", strtotime("this Saturday"));
$nextweek0 = date("Y-m-d", strtotime("this Sunday"));
$nextweek1 = date("Y-m-d", strtotime("+1 week Saturday"));
$next7days = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") + 6, date("Y")));
$next30days = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") + 29, date("Y")));
$next60days = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") + 59, date("Y")));
$next90days = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") + 89, date("Y")));
$next120days = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") + 119, date("Y")));
$last7days = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") - 6, date("Y")));
$last30days = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") - 29, date("Y")));
$last60days = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") - 59, date("Y")));
$last90days = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") - 89, date("Y")));
$last120days = date("Y-m-d", mktime(0, 0, 0, date("m"), date("d") - 119, date("Y")));
$currentFY0 = date("Y-m-d", mktime(0, 0, 0, "01", "01", date("Y")));
$currentFY1 = date("Y-m-t", mktime(0, 0, 0, "12", date("d"), date("Y")));
$lastFY0 = date("Y-m-d", mktime(0, 0, 0, "01", "01", date("Y") - 1));
$lastFY1 = date("Y-m-t", mktime(0, 0, 0, "12", date("d"), date("Y") - 1));
$nextFY0 = date("Y-m-d", mktime(0, 0, 0, "01", "01", date("Y") + 1));
$nextFY1 = date("Y-m-t", mktime(0, 0, 0, "12", date("d"), date("Y") + 1));
if (date("m") <= 4) {
$cFq = date("Y-m-d", mktime(0, 0, 0, "01", "01", date("Y")));
$cFq1 = date("Y-m-d", mktime(0, 0, 0, "04", "30", date("Y")));
$nFq = date("Y-m-d", mktime(0, 0, 0, "05", "01", date("Y")));
$nFq1 = date("Y-m-d", mktime(0, 0, 0, "08", "31", date("Y")));
$pFq = date("Y-m-d", mktime(0, 0, 0, "09", "01", date("Y") - 1));
$pFq1 = date("Y-m-d", mktime(0, 0, 0, "12", "31", date("Y") - 1));
} else if (date("m") > 4 and date("m") <= 8) {
$pFq = date("Y-m-d", mktime(0, 0, 0, "01", "01", date("Y")));
$pFq1 = date("Y-m-d", mktime(0, 0, 0, "04", "30", date("Y")));
$cFq = date("Y-m-d", mktime(0, 0, 0, "05", "01", date("Y")));
$cFq1 = date("Y-m-d", mktime(0, 0, 0, "08", "31", date("Y")));
$nFq = date("Y-m-d", mktime(0, 0, 0, "09", "01", date("Y")));
$nFq1 = date("Y-m-d", mktime(0, 0, 0, "12", "31", date("Y")));
} else {
$nFq = date("Y-m-d", mktime(0, 0, 0, "01", "01", date("Y") + 1));
$nFq1 = date("Y-m-d", mktime(0, 0, 0, "04", "30", date("Y") + 1));
$pFq = date("Y-m-d", mktime(0, 0, 0, "05", "01", date("Y")));
$pFq1 = date("Y-m-d", mktime(0, 0, 0, "08", "31", date("Y")));
$cFq = date("Y-m-d", mktime(0, 0, 0, "09", "01", date("Y")));
$cFq1 = date("Y-m-d", mktime(0, 0, 0, "12", "31", date("Y")));
}
$dateValues = array();
if ($type == "today") {
$dateValues[0] = $today;
$dateValues[1] = $today;
} elseif ($type == "yesterday") {
$dateValues[0] = $yesterday;
$dateValues[1] = $yesterday;
} elseif ($type == "tomorrow") {
$dateValues[0] = $tomorrow;
$dateValues[1] = $tomorrow;
} elseif ($type == "thisweek") {
$dateValues[0] = $thisweek0;
$dateValues[1] = $thisweek1;
} elseif ($type == "lastweek") {
$dateValues[0] = $lastweek0;
$dateValues[1] = $lastweek1;
} elseif ($type == "nextweek") {
$dateValues[0] = $nextweek0;
$dateValues[1] = $nextweek1;
} elseif ($type == "thismonth") {
$dateValues[0] = $currentmonth0;
$dateValues[1] = $currentmonth1;
} elseif ($type == "lastmonth") {
$dateValues[0] = $lastmonth0;
$dateValues[1] = $lastmonth1;
} elseif ($type == "nextmonth") {
$dateValues[0] = $nextmonth0;
$dateValues[1] = $nextmonth1;
} elseif ($type == "next7days") {
$dateValues[0] = $today;
$dateValues[1] = $next7days;
} elseif ($type == "next30days") {
$dateValues[0] = $today;
$dateValues[1] = $next30days;
} elseif ($type == "next60days") {
$dateValues[0] = $today;
$dateValues[1] = $next60days;
} elseif ($type == "next90days") {
$dateValues[0] = $today;
$dateValues[1] = $next90days;
} elseif ($type == "next120days") {
$dateValues[0] = $today;
$dateValues[1] = $next120days;
} elseif ($type == "last7days") {
$dateValues[0] = $last7days;
$dateValues[1] = $today;
} elseif ($type == "last30days") {
$dateValues[0] = $last30days;
$dateValues[1] = $today;
} elseif ($type == "last60days") {
$dateValues[0] = $last60days;
$dateValues[1] = $today;
} else if ($type == "last90days") {
$dateValues[0] = $last90days;
$dateValues[1] = $today;
} elseif ($type == "last120days") {
$dateValues[0] = $last120days;
$dateValues[1] = $today;
} elseif ($type == "thisfy") {
$dateValues[0] = $currentFY0;
$dateValues[1] = $currentFY1;
} elseif ($type == "prevfy") {
$dateValues[0] = $lastFY0;
$dateValues[1] = $lastFY1;
} elseif ($type == "nextfy") {
$dateValues[0] = $nextFY0;
$dateValues[1] = $nextFY1;
} elseif ($type == "nextfq") {
$dateValues[0] = $nFq;
$dateValues[1] = $nFq1;
} elseif ($type == "prevfq") {
$dateValues[0] = $pFq;
$dateValues[1] = $pFq1;
} elseif ($type == "thisfq") {
$dateValues[0] = $cFq;
$dateValues[1] = $cFq1;
} else {
$dateValues[0] = "";
$dateValues[1] = "";
}
return $dateValues;
}
/**
* Function to get all the date filter type informations
* @return <Array>
*/
public static function getDateFilterTypes() {
$dateFilters = Array('custom' => array('label' => 'LBL_CUSTOM'),
'prevfy' => array('label' => 'LBL_PREVIOUS_FY'),
'thisfy' => array('label' => 'LBL_CURRENT_FY'),
'nextfy' => array('label' => 'LBL_NEXT_FY'),
'prevfq' => array('label' => 'LBL_PREVIOUS_FQ'),
'thisfq' => array('label' => 'LBL_CURRENT_FQ'),
'nextfq' => array('label' => 'LBL_NEXT_FQ'),
'yesterday' => array('label' => 'LBL_YESTERDAY'),
'today' => array('label' => 'LBL_TODAY'),
'tomorrow' => array('label' => 'LBL_TOMORROW'),
'lastweek' => array('label' => 'LBL_LAST_WEEK'),
'thisweek' => array('label' => 'LBL_CURRENT_WEEK'),
'nextweek' => array('label' => 'LBL_NEXT_WEEK'),
'lastmonth' => array('label' => 'LBL_LAST_MONTH'),
'thismonth' => array('label' => 'LBL_CURRENT_MONTH'),
'nextmonth' => array('label' => 'LBL_NEXT_MONTH'),
'last7days' => array('label' => 'LBL_LAST_7_DAYS'),
'last30days' => array('label' => 'LBL_LAST_30_DAYS'),
'last60days' => array('label' => 'LBL_LAST_60_DAYS'),
'last90days' => array('label' => 'LBL_LAST_90_DAYS'),
'last120days' => array('label' => 'LBL_LAST_120_DAYS'),
'next30days' => array('label' => 'LBL_NEXT_30_DAYS'),
'next60days' => array('label' => 'LBL_NEXT_60_DAYS'),
'next90days' => array('label' => 'LBL_NEXT_90_DAYS'),
'next120days' => array('label' => 'LBL_NEXT_120_DAYS')
);
foreach($dateFilters as $filterType => $filterDetails) {
$dateValues = self::getDateForStdFilterBytype($filterType);
$dateFilters[$filterType]['startdate'] = $dateValues[0];
$dateFilters[$filterType]['enddate'] = $dateValues[1];
}
return $dateFilters;
}
/**
* Function to get all the supported advanced filter operations
* @return <Array>
*/
public static function getAdvancedFilterOptions() {
return array(
'e' => 'LBL_EQUALS',
'n' => 'LBL_NOT_EQUAL_TO',
's' => 'LBL_STARTS_WITH',
'ew' => 'LBL_ENDS_WITH',
'c' => 'LBL_CONTAINS',
'k' => 'LBL_DOES_NOT_CONTAIN',
'l' => 'LBL_LESS_THAN',
'g' => 'LBL_GREATER_THAN',
'm' => 'LBL_LESS_THAN_OR_EQUAL',
'h' => 'LBL_GREATER_OR_EQUAL',
'b' => 'LBL_BEFORE',
'a' => 'LBL_AFTER',
'bw' => 'LBL_BETWEEN',
);
}
/**
* Function to get the advanced filter option names by Field type
* @return <Array>
*/
public static function getAdvancedFilterOpsByFieldType() {
return array(
'V' => array('e','n','s','ew','c','k'),
'N' => array('e','n','l','g','m','h'),
'T' => array('e','n','l','g','m','h','bw','b','a'),
'I' => array('e','n','l','g','m','h'),
'C' => array('e','n'),
'D' => array('e','n','bw','b','a'),
'DT' => array('e','n','bw','b','a'),
'NN' => array('e','n','l','g','m','h'),
'E' => array('e','n','s','ew','c','k')
);
}
/**
* Function to get all the accessible Custom Views, for a given module if specified
* @param <String> $moduleName
* @return <Array> - Array of Vtiger_CustomView_Record models
*/
public static function getAll($moduleName='') {
$db = PearDatabase::getInstance();
$userPrivilegeModel = Users_Privileges_Model::getCurrentUserPrivilegesModel();
$currentUser = Users_Record_Model::getCurrentUserModel();
$sql = 'SELECT * FROM vtiger_customview';
$params = array();
if(!empty($moduleName)) {
$sql .= ' WHERE entitytype=?';
$params[] = $moduleName;
}
if(!$userPrivilegeModel->isAdminUser()) {
$userParentRoleSeq = $userPrivilegeModel->get('parent_role_seq');
$sql .= " AND ( vtiger_customview.userid = ? OR vtiger_customview.status = 0 OR vtiger_customview.status = 3
OR vtiger_customview.userid IN (
SELECT vtiger_user2role.userid FROM vtiger_user2role
INNER JOIN vtiger_users ON vtiger_users.id = vtiger_user2role.userid
INNER JOIN vtiger_role ON vtiger_role.roleid = vtiger_user2role.roleid
WHERE vtiger_role.parentrole LIKE '".$userParentRoleSeq."::%')
)";
$params[] = $currentUser->getId();
}
$result = $db->pquery($sql, $params);
$noOfCVs = $db->num_rows($result);
$customViews = array();
for ($i=0; $i<$noOfCVs; ++$i) {
$row = $db->query_result_rowdata($result, $i);
$customView = new self();
if(strlen(decode_html($row['viewname'])) > 40) {
$row['viewname'] = substr(decode_html($row['viewname']), 0, 36).'...';
}
$customViews[] = $customView->setData($row)->setModule($row['entitytype']);
}
return $customViews;
}
/**
* Function to get the instance of Custom View module, given custom view id
* @param <Integer> $cvId
* @return CustomView_Record_Model instance, if exists. Null otherwise
*/
public static function getInstanceById($cvId) {
$db = PearDatabase::getInstance();
$sql = 'SELECT * FROM vtiger_customview WHERE cvid = ?';
$params = array($cvId);
$result = $db->pquery($sql, $params);
if($db->num_rows($result) > 0) {
$row = $db->query_result_rowdata($result, 0);
$customView = new self();
return $customView->setData($row)->setModule($row['entitytype']);
}
return null;
}
/**
* Function to get all the custom views, of a given module if specified, grouped by their status
* @param <String> $moduleName
* @return <Array> - Associative array of Status label to an array of Vtiger_CustomView_Record models
*/
public static function getAllByGroup($moduleName='') {
$customViews = self::getAll($moduleName);
$groupedCustomViews = array();
foreach ($customViews as $index => $customView) {
if($customView->isMine()) {
$groupedCustomViews['Mine'][] = $customView;
} elseif($customView->isPublic()) {
$groupedCustomViews['Public'][] = $customView;
} elseif($customView->isPending()) {
$groupedCustomViews['Pending'][] = $customView;
} else {
$groupedCustomViews['Others'][] = $customView;
}
}
return $groupedCustomViews;
}
/**
* Function to get Clean instance of this record
* @return self
*/
public static function getCleanInstance() {
return new self();
}
/**
* function to check duplicates from database
* @param <type> $viewName
* @param <type> module name entity type in database
* @return <boolean> true/false
*/
public function checkDuplicate() {
$db = PearDatabase::getInstance();
$query = "SELECT 1 FROM vtiger_customview WHERE viewname = ? AND entitytype = ?";
$params = array($this->get('viewname'), $this->getModule()->getName());
$cvid = $this->getId();
if ($cvid) {
$query .= " AND cvid != ?";
array_push($params, $cvid);
}
$result = $db->pquery($query, $params);
if ($db->num_rows($result)) {
return true;
}
return false;
}
/**
* Function used to transform the older filter condition to suit newer filters.
* The newer filters have only two groups one with ALL(AND) condition between each
* filter and other with ANY(OR) condition, this functions tranforms the older
* filter with 'AND' condition between filters of a group and will be placed under
* match ALL conditions group and the rest of it will be placed under match Any group.
* @return <Array>
*/
function transformToNewAdvancedFilter() {
$standardFilter = $this->transformStandardFilter();
$advancedFilter = $this->getAdvancedCriteria();
$allGroupColumns = $anyGroupColumns = array();
foreach($advancedFilter as $index=>$group) {
$columns = $group['columns'];
$and = $or = 0;
$block = $group['condition'];
if(count($columns) != 1) {
foreach($columns as $column) {
if($column['column_condition'] == 'and') {
++$and;
} else {
++$or;
}
}
if($and == count($columns)-1 && count($columns) != 1) {
$allGroupColumns = array_merge($allGroupColumns, $group['columns']);
} else {
$anyGroupColumns = array_merge($anyGroupColumns, $group['columns']);
}
} else if($block == 'and' || $index == 1) {
$allGroupColumns = array_merge($allGroupColumns, $group['columns']);
} else {
$anyGroupColumns = array_merge($anyGroupColumns, $group['columns']);
}
}
if($standardFilter){
$allGroupColumns = array_merge($allGroupColumns,$standardFilter);
}
$transformedAdvancedCondition = array();
$transformedAdvancedCondition[1] = array('columns' => $allGroupColumns, 'condition' => 'and');
$transformedAdvancedCondition[2] = array('columns' => $anyGroupColumns, 'condition' => '');
return $transformedAdvancedCondition;
}
/*
* Function used to tranform the standard filter as like as advanced filter format
* @returns array of tranformed standard filter
*/
public function transformStandardFilter(){
$standardFilter = $this->getStandardCriteria();
if(!empty($standardFilter)){
$tranformedStandardFilter = array();
$tranformedStandardFilter['comparator'] = 'bw';
$fields = explode(':',$standardFilter['columnname']);
if($fields[1] == 'createdtime' || $fields[1] == 'modifiedtime' ||($fields[0] == 'vtiger_activity' && $fields[1] == 'date_start')){
$tranformedStandardFilter['columnname'] = $standardFilter['columnname'].':DT';
$date[] = $standardFilter['startdate'].' 00:00:00';
$date[] = $standardFilter['enddate'].' 00:00:00';
$tranformedStandardFilter['value'] = implode(',',$date);
} else{
$tranformedStandardFilter['columnname'] = $standardFilter['columnname'].':D';
$tranformedStandardFilter['value'] = $standardFilter['startdate'].','.$standardFilter['enddate'];
}
return array($tranformedStandardFilter);
} else{
return false;
}
}
/**
* Function gives default custom view for a module
* @param <String> $module
* @return <CustomView_Record_Model>
*/
public static function getAllFilterByModule($module) {
$db = PearDatabase::getInstance();
$query = "SELECT cvid FROM vtiger_customview WHERE viewname='All' AND entitytype = ?";
$result = $db->pquery($query, array($module));
$viewId = $db->query_result($result, 0, 'cvid');
if(!$viewId) {
$customView = new CustomView($module);
$viewId = $customView->getViewId($module);
}
return self::getInstanceById($viewId);
}
}
|
{
"content_hash": "6cf40cf2b389b8d464710d8be4305a8d",
"timestamp": "",
"source": "github",
"line_count": 1074,
"max_line_length": 195,
"avg_line_length": 38.01117318435754,
"alnum_prop": 0.6197334901038605,
"repo_name": "basiljose1/byjcrm",
"id": "bb697942eff1d8b4865283749d435a1b6122a9cb",
"size": "41350",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/CustomView/models/Record.php",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "30817"
},
{
"name": "ApacheConf",
"bytes": "1321"
},
{
"name": "Batchfile",
"bytes": "20387"
},
{
"name": "C",
"bytes": "492107"
},
{
"name": "C++",
"bytes": "18023"
},
{
"name": "CSS",
"bytes": "1199491"
},
{
"name": "CoffeeScript",
"bytes": "1232"
},
{
"name": "Groff",
"bytes": "60690"
},
{
"name": "HTML",
"bytes": "1498811"
},
{
"name": "JavaScript",
"bytes": "4770826"
},
{
"name": "Makefile",
"bytes": "8221"
},
{
"name": "PHP",
"bytes": "39287363"
},
{
"name": "Perl",
"bytes": "50950"
},
{
"name": "Ruby",
"bytes": "1074"
},
{
"name": "Shell",
"bytes": "53700"
},
{
"name": "Smarty",
"bytes": "1908263"
},
{
"name": "XSLT",
"bytes": "27654"
},
{
"name": "Yacc",
"bytes": "14820"
}
]
}
|
(function ($, window, document, undefined) {
var pluginName = "threadChecker",
defaults = {
'queryInterval': 30*1000,
'threadId': undefined,
'lastAnswerId': undefined,
'ajax': {}
};
function Plugin(element, options) {
this.element = $(element);
this.settings = $.extend({}, defaults, options);
this._defaults = defaults;
this._name = pluginName;
this.intervalId = null;
this.init();
}
Plugin.prototype = {
init: function () {
this.bindListeners();
},
bindListeners: function () {
var _this = this;
var interval = this.settings.queryInterval;
this.intervalId = Visibility.every(interval, 5 * interval, function () {
if (window.Pace !== undefined) {
Pace.ignore(_this.query.bind(_this));
} else {
_this.query();
}
});
},
query: function () {
var _this = this;
$.ajax($.extend({}, {
type: 'GET',
dataType: 'json',
data: this.prepareQueryData(),
success: this.processQuery.bind(this),
statusCode: {
403: function () {
// Once '403 Forbidden' response received,
// user is deauthenticated. Stop flooding server.
clearInterval(_this.intervalId);
}
}
}, this.settings.ajax));
},
prepareQueryData: function () {
return {
id: this.settings.threadId,
answer_id: this.settings.lastAnswerId
};
},
processQuery: function (data) {
if (data.html) {
this.element.html(data.html);
}
}
};
$.fn[pluginName] = function (options) {
this.each(function () {
if (!$.data(this, 'plugin_' + pluginName)) {
$.data(this, 'plugin_' + pluginName, new Plugin(this, options));
}
});
return this;
};
})(jQuery, window, document);
|
{
"content_hash": "3e536e8877a11fcf12e5136a8374b4d9",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 84,
"avg_line_length": 31.15068493150685,
"alnum_prop": 0.449868073878628,
"repo_name": "hiqdev/hipanel-module-ticket",
"id": "f6b49872966c751cd977f6de8a4039ff5df65422",
"size": "2274",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/assets/js/threadChecker.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "5286"
},
{
"name": "PHP",
"bytes": "174461"
}
]
}
|
#include "qemu-common.h"
#include "qemu-timer.h"
#include "monitor.h"
#include "hw/usb.h"
USBDevice *usb_host_device_open(const char *devname)
{
return NULL;
}
int usb_host_device_close(const char *devname)
{
return 0;
}
void usb_host_info(Monitor *mon)
{
monitor_printf(mon, " No devices\n");
}
|
{
"content_hash": "533228c39df92fc15eef1d393f4c88f7",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 52,
"avg_line_length": 14.761904761904763,
"alnum_prop": 0.6774193548387096,
"repo_name": "indashnet/InDashNet.Open.UN2000",
"id": "ed032193d799f11cc4c1520c2dc0778aece71a39",
"size": "1477",
"binary": false,
"copies": "80",
"ref": "refs/heads/master",
"path": "android/external/qemu/usb-dummy-android.c",
"mode": "33261",
"license": "apache-2.0",
"language": []
}
|
class UpdateDevicesRelation < ActiveRecord::Migration
def change
change_column :devices, :user_id, :integer, null: false
change_column :devices, :token_id, :integer, null: false
end
end
|
{
"content_hash": "2817fea6db22eae6c133cc362fbd2e03",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 60,
"avg_line_length": 33,
"alnum_prop": 0.7323232323232324,
"repo_name": "czajkovsky/sailhero-api",
"id": "6a28b94e418be87b25176ed32670e189f412c339",
"size": "198",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20150117143645_update_devices_relation.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "145615"
}
]
}
|
int main(int argc, char** argv) {
log4cpp::Category& log(log4cpp::Category::getInstance("test-intercept_stream"));
log.setPriority(log4cpp::Priority::DEBUG);
try {
log4cpp::Appender* a = new log4cpp::OstreamAppender("test-intercept_stream/stdout", &std::cerr);
a->setLayout(new log4cpp::BasicLayout());
log.addAppender(a);
}
catch (std::bad_alloc& ex) {
std::cerr << "Unable to initialize log4cpp: " << ex.what() << std::endl;
return EXIT_FAILURE;
}
log << log4cpp::Priority::DEBUG << "log4cpp target initialized!";
std::cout << "this will go to normal stdout" << std::endl;
log << log4cpp::Priority::INFO << "Installing stream interceptor for std::cout";
std::auto_ptr<DSN::intercept_stream> interceptor(new DSN::intercept_stream(std::cout,
log4cpp::Priority::DEBUG, log,
"cout"));
std::cout << "this goes to intercepted cout" << std::endl;
interceptor.reset();
log << log4cpp::Priority::INFO << "Removed stream interceptor for std::cout";
std::cout << "this goes to normal stdout again" << std::endl;
log4cpp::Category::shutdown();
return EXIT_SUCCESS;
}
|
{
"content_hash": "4808777d75be8aa3b0dc22d96c718210",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 100,
"avg_line_length": 36.87096774193548,
"alnum_prop": 0.6570428696412949,
"repo_name": "png85/dsnutil",
"id": "71715a0387a858e0f275f9f3a71f808a6db0bc32",
"size": "1234",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test-intercept_stream.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2556"
},
{
"name": "C++",
"bytes": "33083"
},
{
"name": "CMake",
"bytes": "27925"
}
]
}
|
Craft field type for text fields that match a regex pattern.
## Installation
To install Match Input, follow these steps:
1. Install with Composer via `composer require marionnewlevant/match-input` from your project directory
2. Install plugin in the Craft Control Panel under Settings > Plugins
or
1. Install via the Plugin Store
Match Input works on Craft 3.x.
## Match Input Overview
A _Match Input_ field is
a _Plain Text_ field with the addition of a regex pattern that the field must match to
be valid.
## Using Match Input
When you create the field, you specify the `Input Mask`.
This is the [PCRE Pattern](http://php.net/manual/en/pcre.pattern.php) which the
input is required to match.
You also specify an `Error Message` to display when the field does not match the
pattern.
If you need to translate the `Error Message` (for a multi-language control panel), those translations
will be in the `site` translation category.
## Sample Input Masks
- `https://` - not a valid pattern (no delimiters)
- `/https:\/\//` - valid pattern, will match string with `https://` in it anywhere
- `#https://#` - valid pattern, will match string with `https://` in it anywhere (sometimes / isn't the best delimiter)
- `#^https://#` - will match string that begins with `https://`
- `/^\d{5}(-\d{4})?$/` - will match 5 digits, optionally followed by `-` and 4 digits (uses ^ and $ to match the entire string)
## Acknowledgements
Brought to you by [Marion Newlevant](http://marion.newlevant.com).
Icon interior by SlideGenius from the Noun Project
|
{
"content_hash": "f5b874ab7f237b321eb9ae33285b49c1",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 127,
"avg_line_length": 36.88095238095238,
"alnum_prop": 0.7353131052291801,
"repo_name": "marionnewlevant/craft3-match_input",
"id": "2af02119174d6aabc584a67b4fe02abc57b5625e",
"size": "1589",
"binary": false,
"copies": "1",
"ref": "refs/heads/v2",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2705"
},
{
"name": "PHP",
"bytes": "7527"
}
]
}
|
namespace lava {
FirstPass::FirstPass() {
symtable = new std::unordered_map<std::string, std::future<ObjTree *> >();
}
FirstPass::~FirstPass() {
for (auto it = symtable->begin(); it != symtable->end(); it++) {
delete &it->first;
delete &it->second;
}
delete symtable;
}
ObjTree *FirstPass::Pass(ObjTree *obj) {
obj = DefTraverse(obj);
return obj;
}
// builds hash table from def'd ids, mitigating side effects of def.
ObjTree *FirstPass::DefTraverse(ObjTree *obj) {
if (obj->self != NULL && obj->self->type == TYPE_ID && obj->self->str->compare(KEYWORD_DEF) == 0) {
// type checking
if (obj->children->size() > 1) {
if (obj->children->at(0)->self->type == TYPE_ID) {
// Add tree as arg to async
if (!symtable->count(std::string(*obj->children->at(0)->self->str))) {
symtable->insert(std::make_pair<std::string, std::future<ObjTree *> >(std::string(*obj->children->at(0)->self->str), std::future<ObjTree *>(std::async(std::launch::async, [](ObjTree* obj){
// lambda function
std::cout << *obj << std::endl;
return obj;
}, obj))));
return obj->children->at(0);
} else {
// memory management...
delete obj->self;
ObjTree *ret = obj->children->at(0);
for (std::vector<ObjTree *>::iterator it = obj->children->begin() + 1; it != obj->children->end(); it++) {
delete *it;
}
delete obj->children;
return ret;
}
} else {
err << "'def': must have id literal, found '" << *obj->children->at(0) << "'" << "\n";
return obj;
}
} else {
err << "'def': nothing to define" << "\n";
return obj;
}
}
if (obj->children->size() != 0) {
for (std::vector<ObjTree *>::iterator it = obj->children->begin(); it != obj->children->end(); it++) {
*it = DefTraverse(*it);
}
}
return obj;
}
}
#endif // LAVA_FIRSTPASS_FIRSTPASS_H
|
{
"content_hash": "e421d426dd87602d6a6ddec4f6e6f6f4",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 208,
"avg_line_length": 36,
"alnum_prop": 0.4739858906525573,
"repo_name": "cptaffe/lava",
"id": "ee144ed8ceb1ed1161409b7479b14d1511220a70",
"size": "2482",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/firstpass/firstpass.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "261"
},
{
"name": "C++",
"bytes": "19738"
},
{
"name": "Common Lisp",
"bytes": "72"
}
]
}
|
namespace ash {
namespace {
class ShowWallpaperAnimationObserver : public ui::ImplicitAnimationObserver,
public views::WidgetObserver {
public:
ShowWallpaperAnimationObserver(RootWindowController* root_window_controller,
views::Widget* wallpaper_widget,
bool is_initial_animation)
: root_window_controller_(root_window_controller),
wallpaper_widget_(wallpaper_widget),
is_initial_animation_(is_initial_animation) {
DCHECK(wallpaper_widget_);
wallpaper_widget_->AddObserver(this);
}
~ShowWallpaperAnimationObserver() override {
StopObservingImplicitAnimations();
if (wallpaper_widget_)
wallpaper_widget_->RemoveObserver(this);
}
private:
// Overridden from ui::ImplicitAnimationObserver:
void OnImplicitAnimationsScheduled() override {
if (is_initial_animation_)
root_window_controller_->OnInitialWallpaperAnimationStarted();
}
void OnImplicitAnimationsCompleted() override {
root_window_controller_->OnWallpaperAnimationFinished(wallpaper_widget_);
delete this;
}
// Overridden from views::WidgetObserver.
void OnWidgetDestroying(views::Widget* widget) override { delete this; }
RootWindowController* root_window_controller_;
views::Widget* wallpaper_widget_;
// Is this object observing the initial brightness/grayscale animation?
const bool is_initial_animation_;
DISALLOW_COPY_AND_ASSIGN(ShowWallpaperAnimationObserver);
};
} // namespace
WallpaperWidgetController::WallpaperWidgetController(views::Widget* widget)
: widget_(widget),
widget_parent_(WmLookup::Get()->GetWindowForWidget(widget)->GetParent()) {
DCHECK(widget_);
widget_->AddObserver(this);
widget_parent_->AddObserver(this);
}
WallpaperWidgetController::~WallpaperWidgetController() {
if (widget_) {
views::Widget* widget = widget_;
RemoveObservers();
widget->CloseNow();
}
}
void WallpaperWidgetController::OnWidgetDestroying(views::Widget* widget) {
RemoveObservers();
}
void WallpaperWidgetController::SetBounds(const gfx::Rect& bounds) {
if (widget_)
widget_->SetBounds(bounds);
}
bool WallpaperWidgetController::Reparent(WmWindow* root_window, int container) {
if (widget_) {
widget_parent_->RemoveObserver(this);
WmWindow* window = WmLookup::Get()->GetWindowForWidget(widget_);
root_window->GetChildByShellWindowId(container)->AddChild(window);
widget_parent_ = WmLookup::Get()->GetWindowForWidget(widget_)->GetParent();
widget_parent_->AddObserver(this);
return true;
}
// Nothing to reparent.
return false;
}
void WallpaperWidgetController::RemoveObservers() {
widget_parent_->RemoveObserver(this);
widget_->RemoveObserver(this);
widget_ = nullptr;
}
void WallpaperWidgetController::OnWindowBoundsChanged(
WmWindow* window,
const gfx::Rect& old_bounds,
const gfx::Rect& new_bounds) {
SetBounds(new_bounds);
}
void WallpaperWidgetController::StartAnimating(
RootWindowController* root_window_controller) {
if (widget_) {
ui::ScopedLayerAnimationSettings settings(
widget_->GetLayer()->GetAnimator());
settings.AddObserver(new ShowWallpaperAnimationObserver(
root_window_controller, widget_,
WmShell::Get()->wallpaper_delegate()->ShouldShowInitialAnimation()));
// When |widget_| shows, AnimateShowWindowCommon() is called to do the
// animation. Sets transition duration to 0 to avoid animating to the
// show animation's initial values.
settings.SetTransitionDuration(base::TimeDelta());
widget_->Show();
}
}
AnimatingWallpaperWidgetController::AnimatingWallpaperWidgetController(
WallpaperWidgetController* controller)
: controller_(controller) {}
AnimatingWallpaperWidgetController::~AnimatingWallpaperWidgetController() {}
void AnimatingWallpaperWidgetController::StopAnimating() {
if (controller_)
controller_->widget()->GetLayer()->GetAnimator()->StopAnimating();
}
WallpaperWidgetController* AnimatingWallpaperWidgetController::GetController(
bool pass_ownership) {
if (pass_ownership)
return controller_.release();
return controller_.get();
}
} // namespace ash
|
{
"content_hash": "c5344efabce793b44c484086c6ca5830",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 80,
"avg_line_length": 31.69402985074627,
"alnum_prop": 0.7205085943018601,
"repo_name": "google-ar/WebARonARCore",
"id": "b86bf614994b390ab847f70f4b1c4f1b5b7d94d5",
"size": "4850",
"binary": false,
"copies": "1",
"ref": "refs/heads/webarcore_57.0.2987.5",
"path": "ash/common/wallpaper/wallpaper_widget_controller.cc",
"mode": "33188",
"license": "apache-2.0",
"language": []
}
|
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("ASC.ActiveDirectory.Tests")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Ascensio System SIA")]
[assembly: AssemblyProduct("ASC.ActiveDirectory.Tests")]
[assembly: AssemblyCopyright("(c) Ascensio System SIA. All rights reserved")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("c0cdef45-20f4-49b0-8ba0-f2a3626ad193")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
{
"content_hash": "27a88b5b4047b5efcce3bf233838feea",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 84,
"avg_line_length": 40.77777777777778,
"alnum_prop": 0.7506811989100818,
"repo_name": "ONLYOFFICE/CommunityServer",
"id": "9e8d0a7dd7f7e61ec26a32d3130b42217b472b73",
"size": "1468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common/ASC.ActiveDirectory/ASC.ActiveDirectory.Tests/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "1C Enterprise",
"bytes": "9505"
},
{
"name": "ASP.NET",
"bytes": "1543455"
},
{
"name": "Batchfile",
"bytes": "11616"
},
{
"name": "C",
"bytes": "261"
},
{
"name": "C#",
"bytes": "37439795"
},
{
"name": "C++",
"bytes": "10200"
},
{
"name": "CSS",
"bytes": "632783"
},
{
"name": "Classic ASP",
"bytes": "43003"
},
{
"name": "F#",
"bytes": "9164"
},
{
"name": "HTML",
"bytes": "1350144"
},
{
"name": "Handlebars",
"bytes": "942"
},
{
"name": "JavaScript",
"bytes": "10480939"
},
{
"name": "Less",
"bytes": "1348959"
},
{
"name": "Makefile",
"bytes": "3611"
},
{
"name": "Perl",
"bytes": "6729"
},
{
"name": "PowerShell",
"bytes": "21562"
},
{
"name": "Procfile",
"bytes": "15"
},
{
"name": "Ruby",
"bytes": "30941"
},
{
"name": "Shell",
"bytes": "36554"
},
{
"name": "VBScript",
"bytes": "1225"
},
{
"name": "Visual Basic .NET",
"bytes": "13062"
},
{
"name": "XSLT",
"bytes": "105207"
}
]
}
|
/**
* Autogenerated by renren-api2-generator 2013-07-05 11:01:59
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package com.renren.api.service;
import java.util.*;
/**
*
* 评论
*/
public class Comment {
/**
* 评论的ID
*/
private long id;
/**
* 评论的类型
*/
private CommentType commentType;
/**
* 被评论对象的ID
*/
private long entryId;
/**
* 评论对象所有者的ID
*/
private long entryOwnerId;
/**
* 评论者的ID
*/
private long authorId;
/**
* 评论的内容
*/
private String content;
/**
* 评论的时间
*/
private String time;
public void setId(long id) {
this.id = id;
}
public long getId() {
return this.id;
}
public void setCommentType(CommentType commentType) {
this.commentType = commentType;
}
public CommentType getCommentType() {
return this.commentType;
}
public void setEntryId(long entryId) {
this.entryId = entryId;
}
public long getEntryId() {
return this.entryId;
}
public void setEntryOwnerId(long entryOwnerId) {
this.entryOwnerId = entryOwnerId;
}
public long getEntryOwnerId() {
return this.entryOwnerId;
}
public void setAuthorId(long authorId) {
this.authorId = authorId;
}
public long getAuthorId() {
return this.authorId;
}
public void setContent(String content) {
this.content = content;
}
public String getContent() {
return this.content;
}
public void setTime(String time) {
this.time = time;
}
public String getTime() {
return this.time;
}
}
|
{
"content_hash": "f0ddd5708d40c9f028c0dbc493385be2",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 67,
"avg_line_length": 17.294117647058822,
"alnum_prop": 0.6714285714285714,
"repo_name": "sibojia/ihomepage",
"id": "2c1967602e4eb603d9a696345a9e46dedfda5652",
"size": "1546",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "infohub-yt/src/com/renren/api/service/Comment.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "48386"
},
{
"name": "Java",
"bytes": "562148"
},
{
"name": "JavaScript",
"bytes": "24610"
},
{
"name": "Python",
"bytes": "32790"
}
]
}
|
import os
import shutil
import unittest
from css import css
from css import selector
class TestStyle(unittest.TestCase):
def setUp(self):
self.original_path = os.getcwd()
self.dump_folder = '.\\dump_folder'
os.chdir(os.path.dirname(os.path.abspath(__file__)))
if os.path.isdir(self.dump_folder):
shutil.rmtree(self.dump_folder, ignore_errors=True)
os.mkdir(self.dump_folder)
else:
os.mkdir(self.dump_folder)
def tearDown(self):
os.chdir(self.original_path)
def test_css_1(self):
s1 = selector.create_selector('.myclass')
s2 = selector.create_selector('p')
s3 = selector.create_selector('#myid')
c1 = css.CSS(selectors=[s1, s2, s3])
c2 = css.create_css(selectors=[s1, s2, s3])
expected_output = '.myclass {\n}\n' \
'p {\n}\n' \
'#myid {\n}'
self.assertEqual(str(c1), expected_output)
self.assertEqual(str(c2), expected_output)
def test_css_2(self):
s1 = selector.create_selector('.myclass')
s2 = selector.create_selector('p')
s3 = selector.create_selector('#myid')
c1 = css.create_css()
c1.append_selector(s1)
c1.append_selector(s2)
c1.append_selector(s3)
c2 = css.create_css()
c2.append_selectors([s1, s2, s3])
expected_output = '.myclass {\n}\n' \
'p {\n}\n' \
'#myid {\n}'
self.assertEqual(str(c1), expected_output)
self.assertEqual(str(c2), expected_output)
def test_css_3(self):
c = css.create_css()
c.add_selector('p.aclass', style=[('color', 'red')])
c.add_selector('.myclass', style=[('color', 'blue')])
c.add_selector('#myid', style=[('color', 'black')])
expected_output = 'p.aclass {\n color: red;\n}\n' \
'.myclass {\n color: blue;\n}\n' \
'#myid {\n color: black;\n}'
self.assertEqual(str(c), expected_output)
def test_css_4(self):
c = css.create_css()
c.add_selector(selectors=['p.aclass', '.myclass', '#myid'], style=[('color', 'red')])
expected_output = 'p.aclass, .myclass, #myid {\n color: red;\n}'
self.assertEqual(str(c), expected_output)
def test_css_5(self):
c = css.create_css()
s1 = c.add_selector('p.aclass', style=[('color', 'red')])
s2 = c.add_selector('.myclass', style=[('color', 'blue')])
s3 = c.add_selector('#myid', style=[('color', 'black')])
self.assertEqual(str(s1), 'p.aclass {\n color: red;\n}')
self.assertEqual(str(s2), '.myclass {\n color: blue;\n}')
self.assertEqual(str(s3), '#myid {\n color: black;\n}')
if __name__ == 'main':
unittest.main()
|
{
"content_hash": "3fc15b8ff3a2add24c3a66857c4a1ab1",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 93,
"avg_line_length": 34.80722891566265,
"alnum_prop": 0.5351332641052268,
"repo_name": "PeterPyPan/css",
"id": "4ac64988143e966ea9925cea1866ba4f242614d7",
"size": "2889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_css.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "27"
},
{
"name": "CSS",
"bytes": "2495"
},
{
"name": "Python",
"bytes": "23132"
}
]
}
|
using Lucene.Net.Analysis;
using Lucene.Net.Documents;
using Lucene.Net.Randomized.Generators;
using Lucene.Net.Search;
using Lucene.Net.Store;
using Lucene.Net.Support;
using Lucene.Net.Util;
using NUnit.Framework;
using System;
using System.Collections.Generic;
namespace Lucene.Net.Index.Sorter
{
[SuppressCodecs("Lucene3x")]
public class TestEarlyTermination : LuceneTestCase
{
private int numDocs;
private List<string> terms;
private Directory dir;
private Sort sort;
private RandomIndexWriter iw;
private IndexReader reader;
public override void SetUp()
{
base.SetUp();
sort = new Sort(new SortField("ndv1", SortFieldType.INT64));
}
private Document RandomDocument()
{
Document doc = new Document();
doc.Add(new NumericDocValuesField("ndv1", Random().nextInt(10)));
doc.Add(new NumericDocValuesField("ndv2", Random().nextInt(10)));
doc.Add(new StringField("s", RandomInts.RandomFrom(Random(), terms), Field.Store.YES));
return doc;
}
private void CreateRandomIndexes(int maxSegments)
{
dir = NewDirectory();
numDocs = AtLeast(150);
int numTerms = TestUtil.NextInt(Random(), 1, numDocs / 5);
ISet<string> randomTerms = new HashSet<string>();
while (randomTerms.size() < numTerms)
{
randomTerms.add(TestUtil.RandomSimpleString(Random()));
}
terms = new List<string>(randomTerms);
int seed = Random().Next();
IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed)));
iwc.SetMergePolicy(TestSortingMergePolicy.NewSortingMergePolicy(sort));
iw = new RandomIndexWriter(new Random(seed), dir, iwc);
for (int i = 0; i < numDocs; ++i)
{
Document doc = RandomDocument();
iw.AddDocument(doc);
if (i == numDocs / 2 || (i != numDocs - 1 && Random().nextInt(8) == 0))
{
iw.Commit();
}
if (Random().nextInt(15) == 0)
{
string term = RandomInts.RandomFrom(Random(), terms);
iw.DeleteDocuments(new Term("s", term));
}
}
reader = iw.Reader;
}
public override void TearDown()
{
reader.Dispose();
iw.Dispose();
dir.Dispose();
base.TearDown();
}
[Test]
public void TestEarlyTermination_()
{
CreateRandomIndexes(5);
int numHits = TestUtil.NextInt(Random(), 1, numDocs / 10);
Sort sort = new Sort(new SortField("ndv1", SortFieldType.INT64, false));
bool fillFields = Random().nextBoolean();
bool trackDocScores = Random().nextBoolean();
bool trackMaxScore = Random().nextBoolean();
bool inOrder = Random().nextBoolean();
TopFieldCollector collector1 = Search.TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder);
TopFieldCollector collector2 = Search.TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder);
IndexSearcher searcher = NewSearcher(reader);
int iters = AtLeast(5);
for (int i = 0; i < iters; ++i)
{
TermQuery query = new TermQuery(new Term("s", RandomInts.RandomFrom(Random(), terms)));
searcher.Search(query, collector1);
searcher.Search(query, new EarlyTerminatingSortingCollector(collector2, sort, numHits));
}
assertTrue(collector1.TotalHits >= collector2.TotalHits);
AssertTopDocsEquals(collector1.GetTopDocs().ScoreDocs, collector2.GetTopDocs().ScoreDocs);
}
[Test]
public void TestEarlyTerminationDifferentSorter()
{
// test that the collector works correctly when the index was sorted by a
// different sorter than the one specified in the ctor.
CreateRandomIndexes(5);
int numHits = TestUtil.NextInt(Random(), 1, numDocs / 10);
Sort sort = new Sort(new SortField("ndv2", SortFieldType.INT64, false));
bool fillFields = Random().nextBoolean();
bool trackDocScores = Random().nextBoolean();
bool trackMaxScore = Random().nextBoolean();
bool inOrder = Random().nextBoolean();
// LUCENENET specific:
// we are changing this test to use Lucene.Net 4.9-like behavior rather than going through all of the effort to
// fix a hard-to-find null reference exception problem.
// https://github.com/apache/lucene-solr/commit/c59f13f9918faeeb4e69acd41731e674ce88f912
//TopFieldCollector collector1 = TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder);
//TopFieldCollector collector2 = TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder);
IndexSearcher searcher = NewSearcher(reader);
int iters = AtLeast(5);
for (int i = 0; i < iters; ++i)
{
// LUCENENET specific:
// we are changing this test to use Lucene.Net 4.9-like behavior rather than going through all of the effort to
// fix a hard-to-find null reference exception problem.
// https://github.com/apache/lucene-solr/commit/c59f13f9918faeeb4e69acd41731e674ce88f912
TopFieldCollector collector1 = TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder);
TopFieldCollector collector2 = TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder);
TermQuery query = new TermQuery(new Term("s", RandomInts.RandomFrom(Random(), terms)));
searcher.Search(query, collector1);
Sort different = new Sort(new SortField("ndv2", SortFieldType.INT64));
searcher.Search(query, new EarlyTerminatingSortingCollectorHelper(collector2, different, numHits));
assertTrue(collector1.TotalHits >= collector2.TotalHits);
AssertTopDocsEquals(collector1.GetTopDocs().ScoreDocs, collector2.GetTopDocs().ScoreDocs);
}
}
internal class EarlyTerminatingSortingCollectorHelper : EarlyTerminatingSortingCollector
{
public EarlyTerminatingSortingCollectorHelper(ICollector @in, Sort sort, int numDocsToCollect)
: base(@in, sort, numDocsToCollect)
{
}
public override void SetNextReader(AtomicReaderContext context)
{
base.SetNextReader(context);
assertFalse("segment should not be recognized as sorted as different sorter was used", m_segmentSorted);
}
}
private static void AssertTopDocsEquals(ScoreDoc[] scoreDocs1, ScoreDoc[] scoreDocs2)
{
assertEquals(scoreDocs1.Length, scoreDocs2.Length);
for (int i = 0; i < scoreDocs1.Length; ++i)
{
ScoreDoc scoreDoc1 = scoreDocs1[i];
ScoreDoc scoreDoc2 = scoreDocs2[i];
assertEquals(scoreDoc1.Doc, scoreDoc2.Doc);
assertEquals(scoreDoc1.Score, scoreDoc2.Score, 0.001f);
}
}
}
}
|
{
"content_hash": "8ba44576bb093ea2a5ee7e18bf0829fa",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 142,
"avg_line_length": 44.69942196531792,
"alnum_prop": 0.6024828656407604,
"repo_name": "laimis/lucenenet",
"id": "bd44cb93247106919f1db8a361ad87e67599bdd8",
"size": "8596",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Lucene.Net.Tests.Misc/Index/Sorter/TestEarlyTermination.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4805"
},
{
"name": "C#",
"bytes": "41089688"
},
{
"name": "Gnuplot",
"bytes": "2444"
},
{
"name": "HTML",
"bytes": "79746"
},
{
"name": "PowerShell",
"bytes": "73932"
},
{
"name": "XSLT",
"bytes": "21773"
}
]
}
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>monae: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.8.0 / monae - 0.0.6</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
monae
<small>
0.0.6
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-07-12 09:05:10 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-07-12 09:05:10 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-threads base
base-unix base
camlp5 7.14 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-perl 2 Virtual package relying on perl
coq 8.8.0 Formal proof management system
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.04.2 The OCaml compiler (virtual package)
ocaml-base-compiler 4.04.2 Official 4.04.2 release
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.9.5 A library manager for OCaml
# opam file:
opam-version: "2.0"
maintainer: "reynald.affeldt@aist.go.jp"
homepage: "https://github.com/affeldt-aist/monae"
bug-reports: "https://github.com/affeldt-aist/monae/issues"
dev-repo: "git+https://github.com/affeldt-aist/monae.git"
license: "GPL-3.0-or-later"
authors: [
"Reynald Affeldt"
"David Nowak"
"Takafumi Saikawa"
"Jacques Garrigue"
"Celestine Sauvage"
"Kazunari Tanaka"
]
build: [
[make "-j%{jobs}%"]
]
install: [
[make "install"]
]
depends: [
"coq" { >= "8.10" & < "8.12~" }
"coq-infotheo" { >= "0.0.7" & < "0.1" }
]
synopsis: "Monae"
description: """
This repository contains a formalization of monads including several
models, examples of monadic equational reasoning, and an application
to program semantics.
"""
tags: [
"category:Computer Science/Semantics and Compilation/Semantics"
"keyword: monads"
"keyword: effects"
"keyword: probability"
"keyword: nondeterminism"
"logpath:monae"
"date:2019-12-06"
]
url {
http: "https://github.com/affeldt-aist/monae/archive/0.0.6.tar.gz"
checksum: "sha512=6b4d32e7fe1833c4b481135e2a8e2c70c290d182e9c63fe42ca499f16647a6b24746f246edec136692aa82a2d37e4d3df58440d116022b29cae459413ed7ceb3"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-monae.0.0.6 coq.8.8.0</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.8.0).
The following dependencies couldn't be met:
- coq-monae -> coq >= 8.10 -> ocaml >= 4.05.0
base of this switch (use `--unlock-base' to force)
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-monae.0.0.6</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
|
{
"content_hash": "ec7c8abe7d1db1567cc10f9e68998da7",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 159,
"avg_line_length": 39.375,
"alnum_prop": 0.5483781918564528,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "fb21bde32e6f80fec210736f4df2f06ba29a6fce",
"size": "7270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.04.2-2.0.5/released/8.8.0/monae/0.0.6.html",
"mode": "33188",
"license": "mit",
"language": []
}
|
#pragma once
#include <aws/rds/RDS_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/rds/model/ResponseMetadata.h>
#include <utility>
namespace Aws
{
template<typename RESULT_TYPE>
class AmazonWebServiceResult;
namespace Utils
{
namespace Xml
{
class XmlDocument;
} // namespace Xml
} // namespace Utils
namespace RDS
{
namespace Model
{
/**
* <p/><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/DBClusterParameterGroupNameMessage">AWS
* API Reference</a></p>
*/
class AWS_RDS_API ResetDBClusterParameterGroupResult
{
public:
ResetDBClusterParameterGroupResult();
ResetDBClusterParameterGroupResult(const Aws::AmazonWebServiceResult<Aws::Utils::Xml::XmlDocument>& result);
ResetDBClusterParameterGroupResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Xml::XmlDocument>& result);
/**
* <p>The name of the DB cluster parameter group.</p> <p>Constraints:</p> <ul> <li>
* <p>Must be 1 to 255 letters or numbers.</p> </li> <li> <p>First character must
* be a letter</p> </li> <li> <p>Can't end with a hyphen or contain two consecutive
* hyphens</p> </li> </ul> <note> <p>This value is stored as a lowercase
* string.</p> </note>
*/
inline const Aws::String& GetDBClusterParameterGroupName() const{ return m_dBClusterParameterGroupName; }
/**
* <p>The name of the DB cluster parameter group.</p> <p>Constraints:</p> <ul> <li>
* <p>Must be 1 to 255 letters or numbers.</p> </li> <li> <p>First character must
* be a letter</p> </li> <li> <p>Can't end with a hyphen or contain two consecutive
* hyphens</p> </li> </ul> <note> <p>This value is stored as a lowercase
* string.</p> </note>
*/
inline void SetDBClusterParameterGroupName(const Aws::String& value) { m_dBClusterParameterGroupName = value; }
/**
* <p>The name of the DB cluster parameter group.</p> <p>Constraints:</p> <ul> <li>
* <p>Must be 1 to 255 letters or numbers.</p> </li> <li> <p>First character must
* be a letter</p> </li> <li> <p>Can't end with a hyphen or contain two consecutive
* hyphens</p> </li> </ul> <note> <p>This value is stored as a lowercase
* string.</p> </note>
*/
inline void SetDBClusterParameterGroupName(Aws::String&& value) { m_dBClusterParameterGroupName = std::move(value); }
/**
* <p>The name of the DB cluster parameter group.</p> <p>Constraints:</p> <ul> <li>
* <p>Must be 1 to 255 letters or numbers.</p> </li> <li> <p>First character must
* be a letter</p> </li> <li> <p>Can't end with a hyphen or contain two consecutive
* hyphens</p> </li> </ul> <note> <p>This value is stored as a lowercase
* string.</p> </note>
*/
inline void SetDBClusterParameterGroupName(const char* value) { m_dBClusterParameterGroupName.assign(value); }
/**
* <p>The name of the DB cluster parameter group.</p> <p>Constraints:</p> <ul> <li>
* <p>Must be 1 to 255 letters or numbers.</p> </li> <li> <p>First character must
* be a letter</p> </li> <li> <p>Can't end with a hyphen or contain two consecutive
* hyphens</p> </li> </ul> <note> <p>This value is stored as a lowercase
* string.</p> </note>
*/
inline ResetDBClusterParameterGroupResult& WithDBClusterParameterGroupName(const Aws::String& value) { SetDBClusterParameterGroupName(value); return *this;}
/**
* <p>The name of the DB cluster parameter group.</p> <p>Constraints:</p> <ul> <li>
* <p>Must be 1 to 255 letters or numbers.</p> </li> <li> <p>First character must
* be a letter</p> </li> <li> <p>Can't end with a hyphen or contain two consecutive
* hyphens</p> </li> </ul> <note> <p>This value is stored as a lowercase
* string.</p> </note>
*/
inline ResetDBClusterParameterGroupResult& WithDBClusterParameterGroupName(Aws::String&& value) { SetDBClusterParameterGroupName(std::move(value)); return *this;}
/**
* <p>The name of the DB cluster parameter group.</p> <p>Constraints:</p> <ul> <li>
* <p>Must be 1 to 255 letters or numbers.</p> </li> <li> <p>First character must
* be a letter</p> </li> <li> <p>Can't end with a hyphen or contain two consecutive
* hyphens</p> </li> </ul> <note> <p>This value is stored as a lowercase
* string.</p> </note>
*/
inline ResetDBClusterParameterGroupResult& WithDBClusterParameterGroupName(const char* value) { SetDBClusterParameterGroupName(value); return *this;}
inline const ResponseMetadata& GetResponseMetadata() const{ return m_responseMetadata; }
inline void SetResponseMetadata(const ResponseMetadata& value) { m_responseMetadata = value; }
inline void SetResponseMetadata(ResponseMetadata&& value) { m_responseMetadata = std::move(value); }
inline ResetDBClusterParameterGroupResult& WithResponseMetadata(const ResponseMetadata& value) { SetResponseMetadata(value); return *this;}
inline ResetDBClusterParameterGroupResult& WithResponseMetadata(ResponseMetadata&& value) { SetResponseMetadata(std::move(value)); return *this;}
private:
Aws::String m_dBClusterParameterGroupName;
ResponseMetadata m_responseMetadata;
};
} // namespace Model
} // namespace RDS
} // namespace Aws
|
{
"content_hash": "b9cac0459195e18b7d25348a0600996b",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 166,
"avg_line_length": 42.166666666666664,
"alnum_prop": 0.6713721061547149,
"repo_name": "JoyIfBam5/aws-sdk-cpp",
"id": "ebb812f83819011c32eb655d72534d31e782aff2",
"size": "5886",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-cpp-sdk-rds/include/aws/rds/model/ResetDBClusterParameterGroupResult.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11868"
},
{
"name": "C++",
"bytes": "167818064"
},
{
"name": "CMake",
"bytes": "591577"
},
{
"name": "HTML",
"bytes": "4471"
},
{
"name": "Java",
"bytes": "271801"
},
{
"name": "Python",
"bytes": "85650"
},
{
"name": "Shell",
"bytes": "5277"
}
]
}
|
module.exports = (sequelize, DataTypes) => {
let Image = sequelize.define('Image', {
fileName: DataTypes.STRING,
width: DataTypes.SMALLINT.UNSIGNED,
height: DataTypes.SMALLINT.UNSIGNED,
type: DataTypes.STRING,
extension: DataTypes.STRING
});
return Image;
};
|
{
"content_hash": "806714e147556c6f17dcb971cb06c4a4",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 44,
"avg_line_length": 28.363636363636363,
"alnum_prop": 0.6314102564102564,
"repo_name": "nathanhood/mmdb",
"id": "17a3e9aa69f0afc6a725abfd5df225aace5fb698",
"size": "312",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/models/image.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "14004"
},
{
"name": "JavaScript",
"bytes": "267957"
}
]
}
|
from google.cloud import dlp_v2
async def sample_delete_dlp_job():
# Create a client
client = dlp_v2.DlpServiceAsyncClient()
# Initialize request argument(s)
request = dlp_v2.DeleteDlpJobRequest(
name="name_value",
)
# Make the request
await client.delete_dlp_job(request=request)
# [END dlp_v2_generated_DlpService_DeleteDlpJob_async]
|
{
"content_hash": "95065f3e5ae7fb48946a35270ab834ae",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 54,
"avg_line_length": 22.235294117647058,
"alnum_prop": 0.6931216931216931,
"repo_name": "googleapis/python-dlp",
"id": "8737125bd35bcbdcdee1dfd4cd082cdd9d244113",
"size": "1751",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/generated_samples/dlp_v2_generated_dlp_service_delete_dlp_job_async.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "1279108"
},
{
"name": "Shell",
"bytes": "30651"
}
]
}
|
End of preview.
No dataset card yet
- Downloads last month
- 1