text stringlengths 1 1.05M |
|---|
/**
* Forge SDK
* The Forge Platform contains an expanding collection of web service components that can be used with Autodesk cloud-based products or your own technologies. Take advantage of Autodesk’s expertise in design and engineering.
*
* OpenAPI spec version: 0.1.0
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
module.exports = (function() {
'use strict';
var ApiClient = require('../ApiClient'),
BadInput = require('../model/BadInput'),
Conflict = require('../model/Conflict'),
CreateRef = require('../model/CreateRef'),
Folder = require('../model/Folder'),
Forbidden = require('../model/Forbidden'),
JsonApiCollection = require('../model/JsonApiCollection'),
NotFound = require('../model/NotFound'),
Refs = require('../model/Refs');
/**
* Folders service.
* @module api/FoldersApi
*/
/**
* Constructs a new FoldersApi.
* @alias module:api/FoldersApi
* @class
* @param {module:ApiClient} apiClient Optional API client implementation to use,
* default to {@link module:ApiClient#instance} if unspecified.
*/
var exports = function(apiClient) {
this.apiClient = apiClient || ApiClient.instance;
/**
* Returns the folder by ID for any folder within a given project. All folders or sub-folders within a project are associated with their own unique ID, including the root folder.
* @param {String} projectId the project id
* @param {String} folderId the folder id
* data is of type: {module:model/Folder}
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.getFolder = function(projectId, folderId, oauth2client, credentials) {
return this.getFolder2(projectId, folderId, {}, oauth2client, credentials);
};
/**
* Returns the folder by ID for any folder within a given project. All folders or sub-folders within a project are associated with their own unique ID, including the root folder.
* @param {String} projectId the project id
* @param {String} folderId the folder id
* @param {Object} opts Optional parameters
* @param {String} opts.xuserid API call will be limited to act on behalf of only the user specified
* @param {String} opts.ifModifiedSince If the requested object has not been modified since the time specified in this field, an entity will not be returned from the server; instead, a 304 (not modified) response will be returned without any message body.
* data is of type: {module:model/Folder}
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.getFolder2 = function(projectId, folderId, opts, oauth2client, credentials) {
opts = opts || {};
var postBody = null;
// verify the required parameter 'projectId' is set
if (projectId == undefined || projectId == null) {
return Promise.reject("Missing the required parameter 'projectId' when calling getFolder");
}
// verify the required parameter 'folderId' is set
if (folderId == undefined || folderId == null) {
return Promise.reject("Missing the required parameter 'folderId' when calling getFolder");
}
var pathParams = {
'project_id': projectId,
'folder_id': folderId
};
var queryParams = {
};
var headerParams = {
'x-user-id': opts.xuserid,
'If-Modified-Since': opts.ifModifiedSince
};
var formParams = {
};
var contentTypes = ['application/vnd.api+json'];
var accepts = ['application/vnd.api+json', 'application/json'];
var returnType = Folder;
return this.apiClient.callApi(
'/data/v1/projects/{project_id}/folders/{folder_id}', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, returnType, oauth2client, credentials
);
};
/**
* Returns a collection of items and folders within a folder. Items represent word documents, fusion design files, drawings, spreadsheets, etc.
* @param {String} projectId the project id
* @param {String} folderId the folder id
* @param {Object} opts Optional parameters
* @param {String} opts.xuserid API call will be limited to act on behalf of only the user specified
* @param {Array.<String>} opts.filterType filter by the `type` of the `ref` target
* @param {Array.<String>} opts.filterId filter by the `id` of the `ref` target
* @param {Array.<String>} opts.filterExtensionType filter by the extension type
* @param {Integer} opts.pageNumber specify the page number
* @param {Integer} opts.pageLimit specify the maximal number of elements per page
* @param {Boolean} opts.includeHidden Refers to items and folders that were deleted from BIM 360 Docs projects.
* @param {Array.<*>} opts['filter[*]<-modifier>'] generic filter / <-modifier> is optional
* data is of type: {module:model/JsonApiCollection}
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.getFolderContents = function(projectId, folderId, opts, oauth2client, credentials) {
opts = opts || {};
var postBody = null;
// verify the required parameter 'projectId' is set
if (projectId == undefined || projectId == null) {
return Promise.reject("Missing the required parameter 'projectId' when calling getFolderContents");
}
// verify the required parameter 'folderId' is set
if (folderId == undefined || folderId == null) {
return Promise.reject("Missing the required parameter 'folderId' when calling getFolderContents");
}
var pathParams = {
'project_id': projectId,
'folder_id': folderId
};
var queryParams = {
'filter[type]': this.apiClient.buildCollectionParam(opts['filterType'], 'csv'),
'filter[id]': this.apiClient.buildCollectionParam(opts['filterId'], 'csv'),
'filter[extension.type]': this.apiClient.buildCollectionParam(opts['filterExtensionType'], 'csv'),
'page[number]': opts['pageNumber'],
'page[limit]': opts['pageLimit'],
// 'page[number]': this.apiClient.buildCollectionParam(opts['pageNumber'], 'csv'),
// 'page[limit]': this.apiClient.buildCollectionParam(opts['pageLimit'], 'csv'),
'includeHidden': opts.includeHidden
};
var keys = Object.keys(opts).filter(function(elt) { return (new RegExp(/^filter\[/).test(elt)); });
var that = this;
keys.map (function(elt) {
queryParams[elt] = that.apiClient.buildCollectionParam(opts[elt], 'csv');
return (elt);
});
var headerParams = {
'x-user-id': opts.xuserid
};
var formParams = {
};
var contentTypes = ['application/vnd.api+json'];
var accepts = ['application/vnd.api+json', 'application/json'];
var returnType = JsonApiCollection;
return this.apiClient.callApi(
'/data/v1/projects/{project_id}/folders/{folder_id}/contents', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, returnType, oauth2client, credentials
);
};
/**
* Returns the parent folder (if it exists). In a project, subfolders and resource items are stored under a folder except the root folder which does not have a parent of its own.
* @param {String} projectId the project id
* @param {String} folderId the folder id
* data is of type: {module:model/Folder}
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.getFolderParent = function(projectId, folderId, oauth2client, credentials) {
this.getFolderParent2(projectId, folderId, {}, oauth2client, credentials)
};
/**
* Returns the parent folder (if it exists). In a project, subfolders and resource items are stored under a folder except the root folder which does not have a parent of its own.
* @param {String} projectId the project id
* @param {String} folderId the folder id
* @param {Object} opts Optional parameters
* @param {String} opts.xuserid API call will be limited to act on behalf of only the user specified
* data is of type: {module:model/Folder}
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.getFolderParent2 = function(projectId, folderId, opts, oauth2client, credentials) {
opts = opts || {};
var postBody = null;
// verify the required parameter 'projectId' is set
if (projectId == undefined || projectId == null) {
return Promise.reject("Missing the required parameter 'projectId' when calling getFolderParent");
}
// verify the required parameter 'folderId' is set
if (folderId == undefined || folderId == null) {
return Promise.reject("Missing the required parameter 'folderId' when calling getFolderParent");
}
var pathParams = {
'project_id': projectId,
'folder_id': folderId
};
var queryParams = {
};
var headerParams = {
'x-user-id': opts.xuserid
};
var formParams = {
};
var contentTypes = ['application/vnd.api+json'];
var accepts = ['application/vnd.api+json', 'application/json'];
var returnType = Folder;
return this.apiClient.callApi(
'/data/v1/projects/{project_id}/folders/{folder_id}/parent', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, returnType, oauth2client, credentials
);
};
/**
* Returns the resources (items, folders, and versions) which have a custom relationship with the given folder_id. Custom relationships can be established between a folder and other resources within the 'data' domain service (folders, items, and versions).
* @param {String} projectId the project id
* @param {String} folderId the folder id
* @param {Object} opts Optional parameters
* @param {String} opts.xuserid API call will be limited to act on behalf of only the user specified
* @param {Array.<String>} opts.filterType filter by the `type` of the `ref` target
* @param {Array.<String>} opts.filterId filter by the `id` of the `ref` target
* @param {Array.<String>} opts.filterExtensionType filter by the extension type
* @param {Array.<*>} opts['filter[*]<-modifier>'] generic filter / <-modifier> is optional
* data is of type: {module:model/JsonApiCollection}
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.getFolderRefs = function(projectId, folderId, opts, oauth2client, credentials) {
opts = opts || {};
var postBody = null;
// verify the required parameter 'projectId' is set
if (projectId == undefined || projectId == null) {
return Promise.reject("Missing the required parameter 'projectId' when calling getFolderRefs");
}
// verify the required parameter 'folderId' is set
if (folderId == undefined || folderId == null) {
return Promise.reject("Missing the required parameter 'folderId' when calling getFolderRefs");
}
var pathParams = {
'project_id': projectId,
'folder_id': folderId
};
var queryParams = {
'filter[type]': this.apiClient.buildCollectionParam(opts['filterType'], 'csv'),
'filter[id]': this.apiClient.buildCollectionParam(opts['filterId'], 'csv'),
'filter[extension.type]': this.apiClient.buildCollectionParam(opts['filterExtensionType'], 'csv')
};
var keys = Object.keys(opts).filter(function(elt) { return (new RegExp(/^filter\[/).test(elt)); });
var that = this;
keys.map (function(elt) {
queryParams[elt] = that.apiClient.buildCollectionParam(opts[elt], 'csv');
return (elt);
});
var headerParams = {
'x-user-id': opts.xuserid
};
var formParams = {
};
var contentTypes = ['application/vnd.api+json'];
var accepts = ['application/vnd.api+json', 'application/json'];
var returnType = JsonApiCollection;
return this.apiClient.callApi(
'/data/v1/projects/{project_id}/folders/{folder_id}/refs', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, returnType, oauth2client, credentials
);
};
/**
* Returns a collection of links for the given folder_id. Custom relationships can be established between a folder and other external resources residing outside the data domain service. A link’s href defines the target URI to access a resource.
* @param {String} projectId the project id
* @param {String} folderId the folder id
* @param {Object} opts Optional parameters
* @param {String} opts.xuserid API call will be limited to act on behalf of only the user specified
* @param {Array.<String>} opts.filterType filter by the `type` of the `ref` target
* @param {Array.<String>} opts.filterId filter by the `id` of the `ref` target
* @param {Array.<String>} opts.filterExtensionType filter by the extension type
* @param {Array.<String>} opts.filterMimeType Filter by mime type.
* @param {Array.<*>} opts['filter[*]<-modifier>'] generic filter / <-modifier> is optional
* data is of type: {module:model/JsonApiCollection}
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.getFolderRelationshipsLinks = function(projectId, folderId, opts, oauth2client, credentials) {
opts = opts || {};
var postBody = null;
// verify the required parameter 'projectId' is set
if (projectId == undefined || projectId == null) {
return Promise.reject("Missing the required parameter 'projectId' when calling getFolderRefs");
}
// verify the required parameter 'folderId' is set
if (folderId == undefined || folderId == null) {
return Promise.reject("Missing the required parameter 'folderId' when calling getFolderRefs");
}
var pathParams = {
'project_id': projectId,
'folder_id': folderId
};
var queryParams = {
'filter[type]': this.apiClient.buildCollectionParam(opts['filterType'], 'csv'),
'filter[id]': this.apiClient.buildCollectionParam(opts['filterId'], 'csv'),
'filter[extension.type]': this.apiClient.buildCollectionParam(opts['filterExtensionType'], 'csv'),
'filter[mimeType]': this.apiClient.buildCollectionParam(opts['filterMimeType'], 'csv')
};
var keys = Object.keys(opts).filter(function(elt) { return (new RegExp(/^filter\[/).test(elt)); });
var that = this;
keys.map (function(elt) {
queryParams[elt] = that.apiClient.buildCollectionParam(opts[elt], 'csv');
return (elt);
});
var headerParams = {
'x-user-id': opts.xuserid
};
var formParams = {
};
var contentTypes = ['application/vnd.api+json'];
var accepts = ['application/vnd.api+json', 'application/json'];
var returnType = JsonApiCollection;
return this.apiClient.callApi(
'/data/v1/projects/{project_id}/folders/{folder_id}/relationships/links', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, returnType, oauth2client, credentials
);
};
/**
* Returns the custom relationships that are associated to the given folder_id. Custom relationships can be established between a folder and other resources within the 'data' domain service (folders, items, and versions).
* @param {String} projectId the project id
* @param {String} folderId the folder id
* @param {Object} opts Optional parameters
* @param {String} opts.xuserid API call will be limited to act on behalf of only the user specified
* @param {Array.<String>} opts.filterType filter by the `type` of the `ref` target
* @param {Array.<String>} opts.filterId filter by the `id` of the `ref` target
* @param {Array.<String>} opts.filterRefType filter by `refType`
* @param {module:model/String} opts.filterDirection filter by the direction of the reference
* @param {Array.<String>} opts.filterExtensionType filter by the extension type
* @param {Array.<*>} opts['filter[*]<-modifier>'] generic filter / <-modifier> is optional
* data is of type: {module:model/Refs}
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.getFolderRelationshipsRefs = function(projectId, folderId, opts, oauth2client, credentials) {
opts = opts || {};
var postBody = null;
// verify the required parameter 'projectId' is set
if (projectId == undefined || projectId == null) {
return Promise.reject("Missing the required parameter 'projectId' when calling getFolderRelationshipsRefs");
}
// verify the required parameter 'folderId' is set
if (folderId == undefined || folderId == null) {
return Promise.reject("Missing the required parameter 'folderId' when calling getFolderRelationshipsRefs");
}
var pathParams = {
'project_id': projectId,
'folder_id': folderId
};
var queryParams = {
'filter[type]': this.apiClient.buildCollectionParam(opts['filterType'], 'csv'),
'filter[id]': this.apiClient.buildCollectionParam(opts['filterId'], 'csv'),
'filter[refType]': this.apiClient.buildCollectionParam(opts['filterRefType'], 'csv'),
'filter[direction]': opts['filterDirection'],
'filter[extension.type]': this.apiClient.buildCollectionParam(opts['filterExtensionType'], 'csv')
};
var keys = Object.keys(opts).filter(function(elt) { return (new RegExp(/^filter\[/).test(elt)); });
var that = this;
keys.map (function(elt) {
queryParams[elt] = that.apiClient.buildCollectionParam(opts[elt], 'csv');
return (elt);
});
var headerParams = {
'x-user-id': opts.xuserid
};
var formParams = {
};
var contentTypes = ['application/vnd.api+json'];
var accepts = ['application/vnd.api+json', 'application/json'];
var returnType = Refs;
return this.apiClient.callApi(
'/data/v1/projects/{project_id}/folders/{folder_id}/relationships/refs', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, returnType, oauth2client, credentials
);
};
/**
* Filters the data of a folder and recursively in the subfolders of any project accessible to you.
* @param {String} projectId the project id
* @param {String} folderId the folder id
* @param {Object} opts Optional parameters
* @param {Array.<*>} opts['filter[*]<-modifier>'] generic filter / <-modifier> is optional
* @param {Integer} opts.pageNumber specify the page number
* data is of type: {module:model/Refs}
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.search = function(projectId, folderId, opts, oauth2client, credentials) {
opts = opts || {};
var postBody = null;
// verify the required parameter 'projectId' is set
if (projectId == undefined || projectId == null) {
return Promise.reject("Missing the required parameter 'projectId' when calling getFolderRelationshipsRefs");
}
// verify the required parameter 'folderId' is set
if (folderId == undefined || folderId == null) {
return Promise.reject("Missing the required parameter 'folderId' when calling getFolderRelationshipsRefs");
}
var pathParams = {
'project_id': projectId,
'folder_id': folderId
};
var queryParams = {
'page[number]': opts['pageNumber'],
'page[limit]': opts['pageLimit']
// 'page[number]': this.apiClient.buildCollectionParam(opts['pageNumber'], 'csv'),
// 'page[limit]': this.apiClient.buildCollectionParam(opts['pageLimit'], 'csv'),
};
var keys = Object.keys(opts).filter(function(elt) { return (new RegExp(/^filter\[/).test(elt)); });
var that = this;
keys.map (function(elt) {
queryParams[elt] = that.apiClient.buildCollectionParam(opts[elt], 'csv');
return (elt);
});
var headerParams = {
//'x-user-id': opts.xuserid
};
var formParams = {
};
var contentTypes = ['application/vnd.api+json'];
var accepts = ['application/vnd.api+json', 'application/json'];
var returnType = null;
return this.apiClient.callApi(
'/data/v1/projects/{project_id}/folders/{folder_id}/search', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, returnType, oauth2client, credentials
);
};
/**
* Creates a new folder in the data domain service
* @param {String} projectId the project id
* @param {module:model/CreateFolder} body describe the folder to be created
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.postFolder = function(projectId, body, oauth2client, credentials) {
return this.postFolder2(projectId, body, {}, oauth2client, credentials);
};
/**
* Creates a new folder in the data domain service
* @param {String} projectId the project id
* @param {Object} opts Optional parameters
* @param {String} opts.xuserid API call will be limited to act on behalf of only the user specified
* @param {module:model/CreateFolder} body describe the folder to be created
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.postFolder2 = function(projectId, body, opts, oauth2client, credentials) {
opts = opts || {};
var postBody = body;
// verify the required parameter 'body' is set
if (body == undefined || body == null) {
return Promise.reject("Missing the required parameter 'body' when calling postFolder");
}
var pathParams = {
'project_id': projectId
};
var queryParams = {
};
var headerParams = {
'x-user-id': opts.xuserid
};
var formParams = {
};
var contentTypes = ['application/vnd.api+json'];
var accepts = ['application/vnd.api+json', 'application/json'];
var returnType = null;
return this.apiClient.callApi(
'/data/v1/projects/{project_id}/folders', 'POST',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, returnType, oauth2client, credentials
);
};
/**
* Creates a custom relationship between a folder and another resource within the 'data' domain service (folder, item, or version).
* @param {String} projectId the project id
* @param {String} folderId the folder id
* @param {module:model/CreateRef} body describe the ref to be created
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.postFolderRelationshipsRef = function(projectId, folderId, body, oauth2client, credentials) {
return this.postFolderRelationshipsRef2(projectId, folderId, body, {}, oauth2client, credentials)
};
/**
* Creates a custom relationship between a folder and another resource within the 'data' domain service (folder, item, or version).
* @param {String} projectId the project id
* @param {String} folderId the folder id
* @param {Object} opts Optional parameters
* @param {String} opts.xuserid API call will be limited to act on behalf of only the user specified
* @param {module:model/CreateRef} body describe the ref to be created
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.postFolderRelationshipsRef2 = function(projectId, folderId, body, opts, oauth2client, credentials) {
opts = opts || {};
var postBody = body;
// verify the required parameter 'projectId' is set
if (projectId == undefined || projectId == null) {
return Promise.reject("Missing the required parameter 'projectId' when calling postFolderRelationshipsRef");
}
// verify the required parameter 'folderId' is set
if (folderId == undefined || folderId == null) {
return Promise.reject("Missing the required parameter 'folderId' when calling postFolderRelationshipsRef");
}
// verify the required parameter 'body' is set
if (body == undefined || body == null) {
return Promise.reject("Missing the required parameter 'body' when calling postFolderRelationshipsRef");
}
var pathParams = {
'project_id': projectId,
'folder_id': folderId
};
var queryParams = {
};
var headerParams = {
'x-user-id': opts.xuserid
};
var formParams = {
};
var contentTypes = ['application/vnd.api+json'];
var accepts = ['application/vnd.api+json', 'application/json'];
var returnType = null;
return this.apiClient.callApi(
'/data/v1/projects/{project_id}/folders/{folder_id}/relationships/refs', 'POST',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, returnType, oauth2client, credentials
);
};
/**
* Modifies folder names. You can also use this endpoint to delete and restore BIM 360 Docs folders by using the hidden attribute.
* @param {String} projectId the project id
* @param {String} folderId the folder id
* @param {Object} opts Optional parameters
* @param {String} opts.xuserid API call will be limited to act on behalf of only the user specified
* @param {Object} body describe the ref to be created
* @param {Object} oauth2client oauth2client for the call
* @param {Object} credentials credentials for the call
*/
this.patchFolder = function(projectId, folderId, body, opts, oauth2client, credentials) {
opts = opts || {};
var postBody = body;
// verify the required parameter 'projectId' is set
if (projectId == undefined || projectId == null) {
return Promise.reject("Missing the required parameter 'projectId' when calling patchFolder");
}
// verify the required parameter 'folderId' is set
if (folderId == undefined || folderId == null) {
return Promise.reject("Missing the required parameter 'folderId' when calling patchFolder");
}
// verify the required parameter 'body' is set
if (body == undefined || body == null) {
return Promise.reject("Missing the required parameter 'body' when calling patchFolder");
}
var pathParams = {
'project_id': projectId,
'folder_id': folderId
};
var queryParams = {
};
var headerParams = {
'x-user-id': opts.xuserid
};
var formParams = {
};
var contentTypes = ['application/vnd.api+json'];
var accepts = ['application/vnd.api+json', 'application/json'];
var returnType = null;
return this.apiClient.callApi(
'/data/v1/projects/{project_id}/folders/{folder_id}', 'PATCH',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, returnType, oauth2client, credentials
);
};
};
return exports;
}());
|
<filename>luabindings/luabind/detail/operator_id.hpp
// Copyright (c) 2003 <NAME> and <NAME>
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
// ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
// OR OTHER DEALINGS IN THE SOFTWARE.
#ifndef LUABIND_OPERATOR_ID_HPP_INCLUDED
#define LUABIND_OPERATOR_ID_HPP_INCLUDED
#include <luabind/config.hpp>
namespace luabind { namespace detail {
enum operator_id
{
op_add = 0,
op_sub,
op_mul,
op_div,
op_pow,
op_lt,
op_le,
op_eq,
op_call,
op_unm,
op_tostring,
op_concat,
op_len,
number_of_operators
};
inline const char* get_operator_name(int i)
{
static const char* a[number_of_operators] = {
"__add", "__sub", "__mul", "__div", "__pow",
"__lt", "__le", "__eq", "__call", "__unm",
"__tostring", "__concat", "__len" };
return a[i];
}
inline const char* get_operator_symbol(int i)
{
static const char* a[number_of_operators] = {
"+", "-", "*", "/", "^", "<",
"<=", "==", "()", "- (unary)",
"tostring", "..", "#" };
return a[i];
}
inline bool is_unary(int i)
{
// the reason why unary minus is not considered a unary operator here is
// that it always is given two parameters, where the second parameter always
// is nil.
return i == op_tostring;
}
}}
#endif // LUABIND_OPERATOR_ID_HPP_INCLUDED
|
require "agent/errors"
module Agent
class Pop
attr_reader :uuid, :blocking_once, :notifier, :object
def initialize(options={})
@object = nil
@uuid = options[:uuid] || UUID.generate
@blocking_once = options[:blocking_once]
@notifier = options[:notifier]
@mutex = Mutex.new
@cvar = ConditionVariable.new
@received = false
@closed = false
end
def received?
@received
end
def closed?
@closed
end
def wait
@mutex.synchronize do
until @received || @closed
@cvar.wait(@mutex)
end
return false if @closed
received?
end
end
def send
@mutex.synchronize do
if @blocking_once
_, error = @blocking_once.perform do
@object = yield unless @closed
@received = true
@cvar.signal
@notifier.notify(self) if @notifier
end
return error
else
begin
@object = yield unless @closed
@received = true
@cvar.signal
@notifier.notify(self) if @notifier
rescue Errors::Rollback
end
end
end
end
def close
@mutex.synchronize do
return if @received
@closed = true
@cvar.broadcast
@notifier.notify(self) if @notifier
end
end
end
end
|
mvn -f src/pom.xml install -P scala-2.12_spark-3.1.1 -D skipTests
|
#!/bin/bash
#script to update code from github
git checkout
git pull
|
<reponame>gfw-api/gfw-imazon-alerts-api<filename>app/src/routes/api/v2/imazonAlerts.router.js
const Router = require('koa-router');
const logger = require('logger');
const CartoDBServiceV2 = require('services/cartoDBServiceV2');
const NotFound = require('errors/notFound');
const ImazonAlertsSerializerV2 = require('serializers/imazonAlertsSerializerV2');
const router = new Router({
prefix: '/imazon-alerts'
});
class ImazonAlertsRouterV2 {
static* getAdm0() {
logger.info('Obtaining national data');
const data = yield CartoDBServiceV2.getAdm0(this.params.iso, this.query.alertQuery, this.query.period);
this.body = ImazonAlertsSerializerV2.serialize(data);
}
static* getAdm1() {
logger.info('Obtaining subnational data');
const data = yield CartoDBServiceV2.getAdm1(this.params.iso, this.params.id1, this.query.alertQuery, this.query.period);
this.body = ImazonAlertsSerializerV2.serialize(data);
}
static* getAdm2() {
logger.info('Obtaining subnational data');
const data = yield CartoDBServiceV2.getAdm2(this.params.iso, this.params.id1, this.params.id2, this.query.alertQuery, this.query.period);
this.body = ImazonAlertsSerializerV2.serialize(data);
}
static* use() {
logger.info('Obtaining use data with name %s and id %s', this.params.name, this.params.id);
try {
const data = yield CartoDBServiceV2.getUse(this.params.name, this.params.id, this.query.alertQuery, this.query.period);
this.body = ImazonAlertsSerializerV2.serialize(data);
} catch (err) {
if (err instanceof NotFound) {
this.throw(404, 'Table not found');
return;
}
throw err;
}
}
static* wdpa() {
logger.info('Obtaining wpda data with id %s', this.params.id);
const data = yield CartoDBServiceV2.getWdpa(this.params.id, this.query.alertQuery, this.query.period);
this.body = ImazonAlertsSerializerV2.serialize(data);
}
static* world() {
logger.info('Obtaining world data');
this.assert(this.query.geostore, 400, 'GeoJSON param required');
try {
const data = yield CartoDBServiceV2.getWorld(this.query.geostore, this.query.alertQuery, this.query.period);
this.body = ImazonAlertsSerializerV2.serialize(data);
} catch (err) {
if (err instanceof NotFound) {
this.throw(404, 'Geostore not found');
return;
}
throw err;
}
}
static checkGeojson(geojson) {
if (geojson.type.toLowerCase() === 'polygon') {
return {
type: 'FeatureCollection',
features: [{
type: 'Feature',
geometry: geojson
}]
};
}
if (geojson.type.toLowerCase() === 'feature') {
return {
type: 'FeatureCollection',
features: [geojson]
};
}
return geojson;
}
static* worldWithGeojson() {
logger.info('Obtaining world data with geostore');
this.assert(this.request.body.geojson, 400, 'GeoJSON param required');
try {
const data = yield CartoDBServiceV2.getWorldWithGeojson(ImazonAlertsRouterV2.checkGeojson(this.request.body.geojson), this.query.alertQuery, this.query.period);
this.body = ImazonAlertsSerializerV2.serialize(data);
} catch (err) {
if (err instanceof NotFound) {
this.throw(404, 'Geostore not found');
return;
}
throw err;
}
}
static* latest() {
logger.info('Obtaining latest data');
try {
const data = yield CartoDBServiceV2.latest(this.query.limit);
this.body = ImazonAlertsSerializerV2.serializeLatest(data);
} catch (err) {
if (err instanceof NotFound) {
this.throw(404, 'Geostore not found');
return;
}
throw err;
}
}
}
const isCached = function* isCached(next) {
if (yield this.cashed()) {
return;
}
yield next;
};
router.get('/admin/:iso', isCached, ImazonAlertsRouterV2.getAdm0);
router.get('/admin/:iso/:id1', isCached, ImazonAlertsRouterV2.getAdm1);
router.get('/admin/:iso/:id1/:id2', isCached, ImazonAlertsRouterV2.getAdm2);
router.get('/use/:name/:id', isCached, ImazonAlertsRouterV2.use);
router.get('/wdpa/:id', isCached, ImazonAlertsRouterV2.wdpa);
router.get('/', isCached, ImazonAlertsRouterV2.world);
router.post('/', ImazonAlertsRouterV2.worldWithGeojson);
router.get('/latest', isCached, ImazonAlertsRouterV2.latest);
module.exports = router;
|
export const SET_SELECTED_COURSE = "SET_SELECTED_COURSE"; |
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class CheckPassword {
public static boolean isValidPassword(String password) {
Pattern p = Pattern.compile("^(?=.*[A-Z])(?=.*[a-z])(?=.*[!@#$%^&*])[A-Za-z\\d!@#$%^&*]{8,}$");
Matcher m = p.matcher(password);
return m.matches();
}
public static void main(String[] args) {
System.out.println(isValidPassword("MyP@ssword"));
}
} |
/*
* Copyright 2018-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.filippov.data.validation.tool.metadata;
import com.filippov.data.validation.tool.AbstractUnitTest;
import com.filippov.data.validation.tool.TestUuidGenerator;
import com.filippov.data.validation.tool.model.Transformer;
import com.filippov.data.validation.tool.model.metadata.Metadata;
import com.filippov.data.validation.tool.model.pair.ColumnPair;
import com.filippov.data.validation.tool.model.pair.TablePair;
import com.filippov.data.validation.tool.utils.uuid.RandomUuidRuntimeGenerator;
import com.filippov.data.validation.tool.validation.transformer.datatype.obj.ObjectToIntegerTransformer;
import com.filippov.data.validation.tool.validation.transformer.datatype.obj.ObjectToStringTransformer;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
public class RuntimeMetadataBinderTest extends AbstractUnitTest {
static Object[][] tablePairProvider() {
return new Object[][]{
{USERS, USERS_TABLE_PAIR, new ObjectToStringTransformer()},
{DEPARTMENTS, DEPARTMENTS_TABLE_PAIR, new ObjectToStringTransformer()}
};
}
static Object[][] columnPairProvider() {
return new Object[][]{
{USERS, USERS_ID, USERS_ID_COLUMN_PAIR, new ObjectToStringTransformer()},
{USERS, USERS_USERNAME, USERS_USERNAME_COLUMN_PAIR, new ObjectToStringTransformer()},
{USERS, USERS_PASSWORD, USERS_PASSWORD_COLUMN_PAIR, new ObjectToStringTransformer()},
{DEPARTMENTS, DEPARTMENTS_ID, DEPARTMENTS_ID_COLUMN_PAIR, new ObjectToStringTransformer()},
{DEPARTMENTS, DEPARTMENTS_NAME, DEPARTMENTS_NAME_COLUMN_PAIR, new ObjectToStringTransformer()},
{DEPARTMENTS, DEPARTMENTS_NUMBER_OF_EMPLOYEES, DEPARTMENTS_NUMBER_OF_EMPLOYEES_COLUMN_PAIR, new ObjectToIntegerTransformer()}
};
}
@ParameterizedTest()
@MethodSource("tablePairProvider")
void tablePairBinderTest(String tablePairName, TablePair expectedTablePair, Transformer<Object, ?> expectedTransformer) {
final Metadata bindedMetadata = new RuntimeMetadataBinder(new TestUuidGenerator())
.bind(LEFT_DATASOURCE.getMetadata(), RIGHT_DATASOURCE.getMetadata());
assertThat(bindedMetadata.getTablePairByName(tablePairName)).isNotEmpty();
final TablePair tablePair = bindedMetadata.getTablePairByName(tablePairName).get();
assertThat(tablePair.getName()).isEqualTo(expectedTablePair.getName());
assertThat(tablePair.getLeftDatasourceTable()).isEqualTo(expectedTablePair.getLeftDatasourceTable());
assertThat(tablePair.getRightDatasourceTable()).isEqualTo(expectedTablePair.getRightDatasourceTable());
final ColumnPair keyColumnPair = tablePair.getKeyColumnPair();
assertThat(keyColumnPair.getName()).isEqualTo(expectedTablePair.getKeyColumnPair().getName());
assertThat(keyColumnPair.getTablePair()).isEqualTo(tablePair);
assertThat(keyColumnPair.getLeftDatasourceColumn()).isEqualTo(expectedTablePair.getKeyColumnPair().getLeftDatasourceColumn());
assertThat(keyColumnPair.getRightDatasourceColumn()).isEqualTo(expectedTablePair.getKeyColumnPair().getRightDatasourceColumn());
assertThat(keyColumnPair.getLeftTransformer())
.isExactlyInstanceOf(expectedTransformer.getClass());
assertThat(keyColumnPair.getRightTransformer())
.isExactlyInstanceOf(expectedTransformer.getClass());
}
@ParameterizedTest()
@MethodSource("columnPairProvider")
void tablePairBinderTest(String tablePairName, String columnPairName, ColumnPair expectedColumnPair, Transformer<Object, ?> expectedTransformer) {
final Metadata bindedMetadata = new RuntimeMetadataBinder(new RandomUuidRuntimeGenerator())
.bind(LEFT_DATASOURCE.getMetadata(), RIGHT_DATASOURCE.getMetadata());
final Optional<TablePair> tablePairOptional = bindedMetadata.getTablePairByName(tablePairName);
assertThat(tablePairOptional).isNotEmpty();
final TablePair tablePair = tablePairOptional.get();
assertThat(bindedMetadata.getColumnPairByName(tablePair, columnPairName)).isNotEmpty();
final ColumnPair columnPair = bindedMetadata.getColumnPairByName(tablePair, columnPairName).get();
assertThat(columnPair.getName()).isEqualTo(expectedColumnPair.getName());
assertThat(columnPair.getTablePair()).isEqualTo(tablePair);
assertThat(columnPair.getLeftDatasourceColumn()).isEqualTo(expectedColumnPair.getLeftDatasourceColumn());
assertThat(columnPair.getRightDatasourceColumn()).isEqualTo(expectedColumnPair.getRightDatasourceColumn());
assertThat(columnPair.getLeftTransformer())
.isExactlyInstanceOf(expectedTransformer.getClass());
assertThat(columnPair.getRightTransformer())
.isExactlyInstanceOf(expectedTransformer.getClass());
}
}
|
<reponame>zj-dreamly/shiro-security
package com.github.zj.dreamly.util;
import com.github.zj.dreamly.exception.CustomException;
import com.github.zj.dreamly.model.common.Constant;
import com.github.zj.dreamly.util.common.SerializableUtil;
import com.github.zj.dreamly.util.common.StringUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import java.util.Set;
/**
* JedisUtil(推荐存Byte数组,存Json字符串效率更慢)
*
* @author 苍海之南
* @date 2018/9/4 15:45
*/
@Component
public class JedisUtil {
/**
* 静态注入JedisPool连接池
* 本来是正常注入JedisUtil,可以在Controller和Service层使用,但是重写Shiro的CustomCache无法注入JedisUtil
* 现在改为静态注入JedisPool连接池,JedisUtil直接调用静态方法即可
* https://blog.csdn.net/W_Z_W_888/article/details/79979103
*/
private static JedisPool jedisPool;
@Autowired
public void setJedisPool(JedisPool jedisPool) {
JedisUtil.jedisPool = jedisPool;
}
/**
* 获取Jedis实例
*
* @return redis.clients.jedis.Jedis
* @author 苍海之南
* @date 2018/9/4 15:47
*/
public static synchronized Jedis getJedis() {
try {
if (jedisPool != null) {
return jedisPool.getResource();
} else {
return null;
}
} catch (Exception e) {
throw new CustomException("获取Jedis资源异常:" + e.getMessage());
}
}
/**
* 释放Jedis资源
*
* @author 苍海之南
* @date 2018/9/5 9:16
*/
public static void closePool() {
try {
jedisPool.close();
} catch (Exception e) {
throw new CustomException("释放Jedis资源异常:" + e.getMessage());
}
}
/**
* 获取redis键值-object
*
* @return java.lang.Object
* @author 苍海之南
* @date 2018/9/4 15:47
*/
public static Object getObject(String key) {
try (Jedis jedis = jedisPool.getResource()) {
byte[] bytes = jedis.get(key.getBytes());
if (StringUtil.isNotNull(bytes)) {
return SerializableUtil.unserializable(bytes);
}
} catch (Exception e) {
throw new CustomException("获取Redis键值getObject方法异常:key=" + key + " cause=" + e.getMessage());
}
return null;
}
/**
* 设置redis键值-object
*
* @return java.lang.String
* @author 苍海之南
* @date 2018/9/4 15:49
*/
public static String setObject(String key, Object value) {
try (Jedis jedis = jedisPool.getResource()) {
return jedis.set(key.getBytes(), SerializableUtil.serializable(value));
} catch (Exception e) {
throw new CustomException("设置Redis键值setObject方法异常:key=" + key + " value=" + value + " cause=" + e.getMessage());
}
}
/**
* 设置redis键值-object-expiretime
*
* @return java.lang.String
* @author 苍海之南
* @date 2018/9/4 15:50
*/
public static String setObject(String key, Object value, int expiretime) {
String result;
try (Jedis jedis = jedisPool.getResource()) {
result = jedis.set(key.getBytes(), SerializableUtil.serializable(value));
if (Constant.OK.equals(result)) {
jedis.expire(key.getBytes(), expiretime);
}
return result;
} catch (Exception e) {
throw new CustomException("设置Redis键值setObject方法异常:key=" + key + " value=" + value + " cause=" + e.getMessage());
}
}
/**
* 获取redis键值-Json
*
* @return java.lang.Object
* @author 苍海之南
* @date 2018/9/4 15:47
*/
public static String getJson(String key) {
try (Jedis jedis = jedisPool.getResource()) {
return jedis.get(key);
} catch (Exception e) {
throw new CustomException("获取Redis键值getJson方法异常:key=" + key + " cause=" + e.getMessage());
}
}
/**
* 设置redis键值-Json
*
* @return java.lang.String
* @author 苍海之南
* @date 2018/9/4 15:49
*/
public static String setJson(String key, String value) {
try (Jedis jedis = jedisPool.getResource()) {
return jedis.set(key, value);
} catch (Exception e) {
throw new CustomException("设置Redis键值setJson方法异常:key=" + key + " value=" + value + " cause=" + e.getMessage());
}
}
/**
* 设置redis键值-Json-expiretime
*
* @return java.lang.String
* @author 苍海之南
* @date 2018/9/4 15:50
*/
public static String setJson(String key, String value, int expiretime) {
String result;
try (Jedis jedis = jedisPool.getResource()) {
result = jedis.set(key, value);
if (Constant.OK.equals(result)) {
jedis.expire(key, expiretime);
}
return result;
} catch (Exception e) {
throw new CustomException("设置Redis键值setJson方法异常:key=" + key + " value=" + value + " cause=" + e.getMessage());
}
}
/**
* 删除key
*
* @return java.lang.Long
* @author 苍海之南
* @date 2018/9/4 15:50
*/
public static Long delKey(String key) {
try (Jedis jedis = jedisPool.getResource()) {
return jedis.del(key.getBytes());
} catch (Exception e) {
throw new CustomException("删除Redis的键delKey方法异常:key=" + key + " cause=" + e.getMessage());
}
}
/**
* key是否存在
*
* @return java.lang.Boolean
* @author 苍海之南
* @date 2018/9/4 15:51
*/
public static Boolean exists(String key) {
try (Jedis jedis = jedisPool.getResource()) {
return jedis.exists(key.getBytes());
} catch (Exception e) {
throw new CustomException("查询Redis的键是否存在exists方法异常:key=" + key + " cause=" + e.getMessage());
}
}
/**
* 模糊查询获取key集合(keys的速度非常快,但在一个大的数据库中使用它仍然可能造成性能问题,生产不推荐使用)
*
* @return java.util.Set<java.lang.String>
* @author 苍海之南
* @date 2018/9/6 9:43
*/
public static Set<String> keysS(String key) {
try (Jedis jedis = jedisPool.getResource()) {
return jedis.keys(key);
} catch (Exception e) {
throw new CustomException("模糊查询Redis的键集合keysS方法异常:key=" + key + " cause=" + e.getMessage());
}
}
/**
* 模糊查询获取key集合(keys的速度非常快,但在一个大的数据库中使用它仍然可能造成性能问题,生产不推荐使用)
*
* @return java.util.Set<java.lang.String>
* @author 苍海之南
* @date 2018/9/6 9:43
*/
public static Set<byte[]> keysB(String key) {
try (Jedis jedis = jedisPool.getResource()) {
return jedis.keys(key.getBytes());
} catch (Exception e) {
throw new CustomException("模糊查询Redis的键集合keysB方法异常:key=" + key + " cause=" + e.getMessage());
}
}
/**
* 获取过期剩余时间
*
* @return java.lang.String
* @author 苍海之南
* @date 2018/9/11 16:26
*/
public static Long ttl(String key) {
Long result;
try (Jedis jedis = jedisPool.getResource()) {
result = jedis.ttl(key);
return result;
} catch (Exception e) {
throw new CustomException("获取Redis键过期剩余时间ttl方法异常:key=" + key + " cause=" + e.getMessage());
}
}
}
|
const orderModel = require('../models/orders.model')
const transferModel = require('../models/transfers.model')
const newOrder = async (req, res) => {
try {
const order = await orderModel.create(
{
user: req.body.user,
orderContent: req.body.orderContent,
origin: req.body.origin,
destination: req.body.destination
}
)
var transfer = await transferModel.findOne({origin : order.origin,destination : order.destination}) || null
if(transfer === null){
transfer = await transferModel.create({
origin : order.origin,
destination : order.destination,
orders : [],
status : 'Not Prepared'
})
}
transfer.orders.push(order)
console.log(transfer.orders)
await transfer.save()
res.send('Congrats! Your order between ' + order.origin + ' and ' + order.destination + ' have been placed.')
} catch (error) {
console.log(error)
res.status(500).send(error)
}
}
function getAllOrders(req, res) {
orderModel.find().then(response => res.json(response)).catch((err) => error(err, res))
}
function getOrderById(req, res) {
orderModel.find({ _id: req.params.id }).then(response => res.json(response)).catch((err) => error(err, res))
}
function updateOrder(req, res) {
orderModel
.findByIdAndUpdate(req.params.id, req.body, {
new: true,
runValidators: true
})
.then(response => res.json(response))
.catch((err) => error(err, res))
}
function deleteOrderById(req, res) {
orderDeleted = req.params.id
orderModel
.remove({ _id: req.params.id })
.then(response => res.json('The order with ID ' + orderDeleted + ' has been deleted.'))
.catch(err => error(err, res))
}
module.exports = {
newOrder,
getAllOrders,
getOrderById,
updateOrder,
deleteOrderById
} |
<filename>client.py
from game import *
logging.basicConfig(filename='log.log', level=logging.DEBUG)
def main():
# network setup
host = 'localhost'
port = 5000
last_shot_hit = False
last_move = None
player_won = False
is_server = input("Are you a server or a client? (c/s)").lower()[0] == "s"
player_turn = not is_server
if not is_server:
host = input("Enter hostname (default: localhost)") or host
port = int(input("Enter port (default: 5000)") or port)
with Network(host, port, is_server) as net:
# init
player_board = create_empty_board()
enemy_board = create_empty_board()
place_ships(player_board, enemy_board)
print("Okay, let's start:")
print_boards(player_board, enemy_board)
# game on
while not player_lost(player_board):
if player_turn:
x, y, exit_ = ask_player_for_shot()
if exit_:
break
last_move = Shot(x, y, last_shot_hit)
net.send(bytes(last_move))
else:
print("Waiting for response...")
data = net.recv()
if not data:
player_won = True
break
enemy_shot = Shot.decode(data)
# true if enemy hit player
last_shot_hit = update_player_board(enemy_shot, player_board)
if last_move:
last_move.last_shot_hit = enemy_shot.last_shot_hit
update_enemy_board(last_move, enemy_board)
print_boards(player_board, enemy_board)
player_turn = not player_turn
if player_won:
print("You won!")
else:
print("You lost!")
if __name__ == "__main__":
main()
|
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.math;
/**
* Simple enum useful in telling tensor functions which axis you would like to operate on
*/
public enum Axis {
/**
* The first axis
*/
X,
/**
* The second axis
*/
Y,
/**
* The third axis
*/
Z
}
|
SELECT COUNT(*)
FROM users
WHERE DATE(created_at) >= DATE_SUB(CURDATE(), INTERVAL 7 DAY); |
module.exports = {
type: 'object',
properties: {
prototypeId: { type: 'string' },
datachannelName: { type: 'string' },
datachannelId: { type: 'string' },
datachannelDescription: { type: 'string' },
type: { type: 'number' },
channelType: { type: 'object' },
isHidden: { type: 'boolean' },
format: { type: 'object' },
createUserId: { type: 'string' },
updatedAt: { type: 'integer' },
createdAt: { type: 'integer' },
isActive: { type: 'boolean' },
hasHistory: { type: 'boolean' },
},
required: [
'prototypeId',
'datachannelId',
'datachannelName',
'datachannelDescription',
'format',
'type',
'channelType',
'isHidden',
'createUserId',
'updatedAt',
'createdAt',
'isActive',
'hasHistory',
],
};
|
import { Component } from '@angular/core';
import { Http } from '@angular/http';
import { NavController } from 'ionic-angular';
import { ToastController } from 'ionic-angular';
import 'rxjs/add/operator/map';
// import { DatePicker } from '@ionic-native/date-picker';
@Component({
templateUrl: 'feedback.page.html',
styleUrls: ['feedback.page.scss']
})
export class FeedbackPage {
appName = 'Home Diary';
todo: any;
answers: Array<any>;
today: any;
questions: any;
numOfDays: Array<Number>;
constructor(private navController: NavController, public toastCtrl: ToastController, public http: Http) {
this.todo = {};
this.answers = [];
this.numOfDays = new Array(30);
this.today = new Date().toISOString();
}
ngOnInit() {
this.http.get('src/api/form_day_1.json').map(res => res.json()).subscribe(data => {
this.questions = data.questions;
// let n = data.questions.length;
});
}
submitForm() {
console.log(this.answers);
// let toast = this.toastCtrl.create({
// message: 'Submitted successfully',
// duration: 3000,
// showCloseButton: true,
// position: 'bottom',
// closeButtonText: 'x'
// });
// toast.present();
}
}
|
#!/bin/sh
# Installation based on https://raspberrypi.stackexchange.com/questions/108592/use-systemd-networkd-for-general-networking/108593#108593
echo Please ensure that internet is working, otherwise this script may break your device
echo Press enter to continue ...
read
echo Removing Raspberry pi\' default network managers
apt --autoremove purge ifupdown dhcpcd5 isc-dhcp-client isc-dhcp-common rsyslog
apt-mark hold ifupdown dhcpcd5 isc-dhcp-client isc-dhcp-common rsyslog raspberrypi-net-mods openresolv
rm -rf /etc/network /etc/dhcp
echo Setting up Systemd Networkd
apt --autoremove purge avahi-daemon
apt-mark hold avahi-daemon libnss-mdns
apt install libnss-resolve
ln -sf /run/systemd/resolve/stub-resolv.conf /etc/resolv.conf
systemctl enable systemd-networkd.service systemd-resolved.service
echo Configuring Systemd Networkd
# Fix possible dns errors
echo "DNSSEC=no" >> /etc/systemd/resolved.conf
# Setup dynamic ethernet connection
cat > /etc/systemd/network/20-wired.network <<EOF
[Match]
Name=e*
[Network]
LLMNR=no
LinkLocalAddressing=no
MulticastDNS=yes
DHCP=ipv4
EOF
# Setup wifi for hotspot
cat > /etc/systemd/network/08-wlan0.network <<EOF
[Match]
Name=wlan0
[Network]
Address=192.168.1.1/24
MulticastDNS=yes
DHCPServer=yes
IPMasquerade=yes
DHCPServer=yes
[DHCPServer]
DNS=84.200.69.80 1.1.1.1
EOF
# Ensure that it is readable
chmod +r /etc/systemd/network/20-wired.network /etc/systemd/network/08-wlan0.network
# Setup wpa supplicant
# Based on: https://raspberrypi.stackexchange.com/questions/88214/setting-up-a-raspberry-pi-as-an-access-point-the-easy-way/88234#88234
cat > /etc/wpa_supplicant/wpa_supplicant-wlan0.conf <<EOF
country=DE
ctrl_interface=DIR=/var/run/wpa_supplicant GROUP=netdev
update_config=1
network={
ssid="Robot"
mode=2
frequency=2437
#key_mgmt=NONE # uncomment this for an open hotspot
key_mgmt=WPA-PSK
proto=RSN WPA
psk="password"
}
EOF
chmod 600 /etc/wpa_supplicant/wpa_supplicant-wlan0.conf
systemctl disable wpa_supplicant.service
systemctl enable wpa_supplicant@wlan0.service
rfkill unblock wlan
echo Finished network setup
exit
|
import React, { Component } from 'react';
class HealthApp extends Component {
render() {
return (
<div>
<h1>Health Tracking App</h1>
<h3>User Profiles</h3>
<div>
{/* Render user profiles here. */}
</div>
<h3>Health Data</h3>
<div>
{/* Render health data display and tracking components. */}
</div>
</div>
);
}
}
export default HealthApp; |
class StringChecker:
def containsVowels(self, s):
vowels = set('aeiouAEIOU')
for letter in s:
if letter in vowels:
return True
return False |
package triangulate
type Triangle struct {
X1 float32
Y1 float32
X2 float32
Y2 float32
X3 float32
Y3 float32
}
type vec2 struct {
x float32
y float32
}
// Code translated from
// https://github.com/CMU-Graphics/DrawSVG/blob/master/src/triangulation.cpp
// TODO find and implement an algorithm on your own.
func Triangulate(points []float32) []*Triangle {
contour := []vec2{}
for i := 0; i < len(points); i += 2 {
contour = append(contour, vec2{points[i], points[i+1]})
}
// Initialize list of vertices in the polygon
n := len(contour)
if n < 3 {
return nil
}
// We want a counter-clockwise polygon in V
V := make([]int, n)
if 0.0 < area(contour) {
for v := 0; v < n; v++ {
V[v] = v
}
} else {
for v := 0; v < n; v++ {
V[v] = (n - 1) - v
}
}
nv := n
// Remove nv-2 Vertices, each time creating a triangle
triangles := []*Triangle{}
count := 2 * nv // Error detection
for m, v := 0, nv-1; nv > 2; {
// If we loop it is likely a non-simple polygon
if 0 >= count {
return triangles // Error, probably a bad polygon!
}
count -= 1
// Three consecutive vertices in current polygon, <u,v,w>
u := v
if nv <= u { // prev
u = 0
}
v = u + 1
if nv <= v { // new v
v = 0
}
w := v + 1
if nv <= w { // net
w = 0
}
//fmt.Println("u,v,w", u, v, w)
if snip(contour, u, v, w, nv, V) {
var a, b, c, s, t int
a, b, c = V[u], V[v], V[w]
/*
fmt.Printf("nv %d, (%f, %f, %f, %f, %f, %f)\n", nv,
contour[a].x, contour[a].y,
contour[b].x, contour[b].y,
contour[c].x, contour[c].y)
*/
triangles = append(triangles, &Triangle{
contour[a].x, contour[a].y,
contour[b].x, contour[b].y,
contour[c].x, contour[c].y,
})
m += 1
// Remove v from remaining polygon
s, t = v, v+1
for t < nv {
//fmt.Println("s, t", s, t)
V[s] = V[t]
s += 1
t += 1
}
nv -= 1
count = 2 * nv // reset error detection counter
}
//fmt.Println("nv", nv)
}
return triangles
}
func area(contour []vec2) float32 {
n := len(contour)
a := float32(0.0)
p, q := n-1, 0
for q < n {
a += contour[p].x*contour[q].y - contour[q].x*contour[p].y
p = q
q += 1
}
return a * 0.5
}
func snip(contour []vec2, u, v, w, n int, V []int) bool {
const EPSILON = 0.0000000001
Ax := contour[V[u]].x
Ay := contour[V[u]].y
Bx := contour[V[v]].x
By := contour[V[v]].y
Cx := contour[V[w]].x
Cy := contour[V[w]].y
if EPSILON > (((Bx - Ax) * (Cy - Ay)) - ((By - Ay) * (Cx - Ax))) {
return false
}
for p := 0; p < n; p++ {
if p == u || p == v || p == w {
continue
}
Px := contour[V[p]].x
Py := contour[V[p]].y
if inside(Ax, Ay, Bx, By, Cx, Cy, Px, Py) {
return false
}
}
return true
}
func inside(Ax, Ay, Bx, By, Cx, Cy, Px, Py float32) bool {
ax, ay := Cx-Bx, Cy-By
bx, by := Ax-Cx, Ay-Cy
cx, cy := Bx-Ax, By-Ay
apx, apy := Px-Ax, Py-Ay
bpx, bpy := Px-Bx, Py-By
cpx, cpy := Px-Cx, Py-Cy
aCROSSbp := ax*bpy - ay*bpx
cCROSSap := cx*apy - cy*apx
bCROSScp := bx*cpy - by*cpx
return aCROSSbp >= 0.0 && bCROSScp >= 0.0 && cCROSSap >= 0.0
}
|
#!/bin/bash
set -e
if [[ $# -ne 0 ]]; then
echo "Usage: $0"
exit 1
fi
VALIDATOR_DIR=${VALIDATOR_DIR:-~/trash/cm_ext}
POINT_VERSION=${POINT_VERSION:-1}
SRC_DIR=${SRC_DIR:-csd-src}
BUILD_DIR=${BUILD_DIR:-build-csd}
#
# validate directory
java -jar $VALIDATOR_DIR/validator/target/validator.jar \
-s $SRC_DIR/descriptor/service.sdl
# Make Build Directory
if [ -d $BUILD_DIR ];
then
rm -rf $BUILD_DIR
fi
# Make directory
mkdir $BUILD_DIR
cd $SRC_DIR
jar -cvf NIFI-1.2.jar *
cd ..
mv $SRC_DIR/NIFI-1.2.jar $BUILD_DIR/.
|
#!/bin/bash
source ./ci/functions.sh
runBuild=false
echo "Reviewing changes that might affect the Gradle build..."
currentChangeSetAffectsTests
retval=$?
if [ "$retval" == 0 ]
then
echo "Found changes that require the build to run test cases."
runBuild=true
else
echo "Changes do NOT affect project test cases."
runBuild=false
fi
if [ "$runBuild" = false ]; then
exit 0
fi
prepCommand="echo 'Running command...'; "
gradle="./gradlew $@"
gradleBuild=""
gradleBuildOptions="--stacktrace --build-cache --configure-on-demand --no-daemon -DtestCategoryType=COUCHDB "
echo -e "***********************************************"
echo -e "Gradle build started at `date`"
echo -e "***********************************************"
./ci/tests/couchdb/run-couchdb-server.sh
gradleBuild="$gradleBuild testCouchDb coveralls -x test -x javadoc -x check \
-DskipNpmLint=true -DskipGradleLint=true -DskipSass=true -DskipNpmLint=true --parallel \
-DskipNodeModulesCleanUp=true -DskipNpmCache=true -DskipNestedConfigMetadataGen=true "
if [[ "${TRAVIS_COMMIT_MESSAGE}" == *"[show streams]"* ]]; then
gradleBuild="$gradleBuild -DshowStandardStreams=true "
fi
if [[ "${TRAVIS_COMMIT_MESSAGE}" == *"[rerun tasks]"* ]]; then
gradleBuild="$gradleBuild --rerun-tasks "
fi
if [[ "${TRAVIS_COMMIT_MESSAGE}" == *"[refresh dependencies]"* ]]; then
gradleBuild="$gradleBuild --refresh-dependencies "
fi
if [ -z "$gradleBuild" ]; then
echo "Gradle build will be ignored since no commands are specified to run."
else
tasks="$gradle $gradleBuildOptions $gradleBuild"
echo -e "***************************************************************************************"
echo $prepCommand
echo $tasks
echo -e "***************************************************************************************"
waitloop="while sleep 9m; do echo -e '\n=====[ Gradle build is still running ]====='; done &"
eval $waitloop
waitRetVal=$?
eval $prepCommand
eval $tasks
retVal=$?
echo -e "***************************************************************************************"
echo -e "Gradle build finished at `date` with exit code $retVal"
echo -e "***************************************************************************************"
if [ $retVal == 0 ]; then
echo "Gradle build finished successfully."
else
echo "Gradle build did NOT finish successfully."
exit $retVal
fi
fi
|
if string1 == string2:
print("Equal")
else:
print("Not Equal") |
<reponame>leoj3n/gatsby-starter-personal-blog
import React from "react";
import { Provider } from "react-redux";
// import PropTypes from "prop-types";
// import createStore from "./src/state/store";
// remove the JSS style tag generated on the server to avoid conflicts with the one added on the client
// exports.onInitialClientRender = function() {
// // eslint-disable-next-line no-undef
// var ssStyles = window.document.getElementById("server-side-jss");
// ssStyles && ssStyles.parentNode.removeChild(ssStyles);
// };
export const wrapRootElement = ({ element }) => {
// const store = createStore();
const ConnectedRootElement = (
<Provider store={store}>
{element}
</Provider>
);
// ConnectedRootElement.propTypes = {
// children: PropTypes.object.isRequired
// };
return ConnectedRootElement;
};
|
export default class TransactionCount {
id: string;
length: number;
constructor(id: string, length: number) {
this.id = id;
this.length = length;
}
incrementCount() {
this.length++;
}
getCount() {
return this.length;
}
} |
#!/bin/bash
set -ex
VERSION=$1
IMAGE_NAME="networknt/com.networknt.rest-query-1.0.0"
showHelp() {
echo " "
echo "Error: $1"
echo " "
echo " build.sh [VERSION]"
echo " "
echo " where [VERSION] version of the docker image that you want to publish (example: 0.0.1)"
echo " "
echo " example: ./build.sh 0.0.1"
echo " "
}
build() {
echo "Building ..."
mvn clean install
echo "Successfully built!"
}
cleanup() {
if [[ "$(docker images -q $IMAGE_NAME 2> /dev/null)" != "" ]]; then
echo "Removing old $IMAGE_NAME images"
docker images | grep $IMAGE_NAME | awk '{print $3}' | xargs docker rmi -f
echo "Cleanup completed!"
fi
}
publish() {
echo "Building Docker image with version $VERSION"
docker build -t $IMAGE_NAME:$VERSION -t $IMAGE_NAME:latest -f ./docker/Dockerfile . --no-cache=true
docker build -t $IMAGE_NAME:$VERSION-redhat -f ./docker/Dockerfile-Redhat . --no-cache=true
echo "Images built with version $VERSION"
echo "Pushing image to DockerHub"
docker push $IMAGE_NAME
echo "Image successfully published!"
}
if [ -z $VERSION ]; then
showHelp "[VERSION] parameter is missing"
exit
fi
build;
cleanup;
publish;
|
class Game {
constructor() {
// https://catanshop.com/images/thumbs/0000276_reversible-game-board.jpeg
var mapkey = `\
T10(l-S2).F11(tl-L2).M11(tr-*3)
-M6.P9.P4.C8(r-G2)
-P3(tl-B2).D0.F2.M5.P9
-T8(l-*3).C5.C6.M3.T10.F3(tr-*3)
+F4.T6.F12.T12.D0(r-R2)
+P8(bl-*3).C10.M11.C6
+F3.P5(bl-S2).T4(br-*3)\
`
this.start()
}
start() {
// each turn, check player for actions (after roll)
// 1st,2nd turn, they can place house and road free
// if their total points == victory, End
}
place(player, item, location) {
//
}
roll(player) {
// disribute resources to all players
}
end() {
//
}
}
class Board {
constructor(mapkey, playerCount) {
// generate the map DS
}
place(item, location) {
//
}
}
class Player {
constructor() {
this.hand_resources = {
// todo - get from the const
S: 0, L: 0, B: 0, R:0, G:0,
}
this.hand_development_cards = {}
}
addResource(resource, count = 1) {
this[resource] += count
}
hasResource(resource, count = 1) {}
removeResource(resource, count = 1) {}
addPiece(type) {}
hasPiece(type) {}
removePiece(type) {}
}
// Not necessary, can get away with string literals
class Resource {
constructor(type) {
this.type = type
}
}
const RESOURCES = {
P: 'Pasture', S: 'Sheep',
T: 'Trees', L: 'Lumber',
C: 'Clay', B: 'Brick',
M: 'Mountain', R: 'Rock',
F: 'Fields', G: 'Grain',
}
const DEVELOPMENT_CARDS = {
K: 'Knight',
R: 'Road building', Y: 'Year of plenty', M: 'Monopoly',
V: 'Victory point',
}
const PIECES = {
S: 'Settlement', C: 'City', R: 'Road',
}
|
using System;
namespace FizzBuzz
{
class Program
{
static void Main(string[] args)
{
for (int i = 1; i <= 100; i++)
{
string output = "";
if (i % 3 == 0) output += "Fizz";
if (i % 5 == 0) output += "Buzz";
Console.WriteLine(output != "" ? output : i);
}
}
}
} |
<reponame>masatake/sourcegraph
import { getFileInfoWithoutCommitIDsFromMultiFileDiffCodeView } from './scrape'
import * as testCodeViews from './test-code-views'
describe('Bitbucket scrape.ts', () => {
describe('getDiffFileInfoFromMultiFileDiffCodeView()', () => {
it('should get the FileInfo for an added file', () => {
jsdom.reconfigure({
url: 'https://bitbucket.test/projects/SOURCEGRAPH/repos/mux/pull-requests/1/diff#dir/new_file.go',
})
const codeView = document.createElement('div')
codeView.innerHTML = testCodeViews.pr.added
const fileInfo = getFileInfoWithoutCommitIDsFromMultiFileDiffCodeView(codeView)
expect(fileInfo).toStrictEqual({
baseFilePath: undefined,
baseRepoName: undefined,
filePath: 'dir/new_file.go',
project: 'SOURCEGRAPH',
repoSlug: 'mux',
repoName: 'bitbucket.test/SOURCEGRAPH/mux',
})
})
it('should get the FileInfo for a modified file', () => {
jsdom.reconfigure({
url: 'https://bitbucket.test/projects/SOURCEGRAPH/repos/mux/pull-requests/1/diff#dir/mux.go',
})
const codeView = document.createElement('div')
codeView.innerHTML = testCodeViews.pr.modified
const fileInfo = getFileInfoWithoutCommitIDsFromMultiFileDiffCodeView(codeView)
expect(fileInfo).toStrictEqual({
baseFilePath: 'dir/mux.go',
baseRepoName: 'bitbucket.test/SOURCEGRAPH/mux',
filePath: 'dir/mux.go',
project: 'SOURCEGRAPH',
repoSlug: 'mux',
repoName: 'bitbucket.test/SOURCEGRAPH/mux',
})
})
it('should get the FileInfo for a deleted file', () => {
jsdom.reconfigure({
url: 'https://bitbucket.test/projects/SOURCEGRAPH/repos/mux/pull-requests/1/diff#dir/old_test.go',
})
const codeView = document.createElement('div')
codeView.innerHTML = testCodeViews.pr.deleted
const fileInfo = getFileInfoWithoutCommitIDsFromMultiFileDiffCodeView(codeView)
expect(fileInfo).toStrictEqual({
baseFilePath: 'dir/old_test.go',
baseRepoName: 'bitbucket.test/SOURCEGRAPH/mux',
filePath: 'dir/old_test.go', // TODO should really be undefined?
project: 'SOURCEGRAPH',
repoSlug: 'mux',
repoName: 'bitbucket.test/SOURCEGRAPH/mux',
})
})
it('should get the FileInfo for a copied file', () => {
jsdom.reconfigure({
url: 'https://bitbucket.test/projects/SOURCEGRAPH/repos/mux/pull-requests/1/diff#dir/mux.1.go',
})
const codeView = document.createElement('div')
codeView.innerHTML = testCodeViews.pr.copied
const fileInfo = getFileInfoWithoutCommitIDsFromMultiFileDiffCodeView(codeView)
expect(fileInfo).toStrictEqual({
baseFilePath: 'dir/mux.go',
baseRepoName: 'bitbucket.test/SOURCEGRAPH/mux',
filePath: 'dir/mux.1.go',
project: 'SOURCEGRAPH',
repoSlug: 'mux',
repoName: 'bitbucket.test/SOURCEGRAPH/mux',
})
})
it('should get the FileInfo for a renamed file', () => {
jsdom.reconfigure({
url: 'https://bitbucket.test/projects/SOURCEGRAPH/repos/mux/pull-requests/1/diff#dir/mux_test_moved.go',
})
const codeView = document.createElement('div')
codeView.innerHTML = testCodeViews.pr.renamed
const fileInfo = getFileInfoWithoutCommitIDsFromMultiFileDiffCodeView(codeView)
expect(fileInfo).toStrictEqual({
baseFilePath: 'dir/mux_test.go',
baseRepoName: 'bitbucket.test/SOURCEGRAPH/mux',
filePath: 'dir/mux_test_moved.go',
project: 'SOURCEGRAPH',
repoSlug: 'mux',
repoName: 'bitbucket.test/SOURCEGRAPH/mux',
})
})
it('should get the FileInfo for a moved file', () => {
jsdom.reconfigure({
url: 'https://bitbucket.test/projects/SOURCEGRAPH/repos/mux/pull-requests/1/diff#dir/test-dir/route.go',
})
const codeView = document.createElement('div')
codeView.innerHTML = testCodeViews.pr.moved
const fileInfo = getFileInfoWithoutCommitIDsFromMultiFileDiffCodeView(codeView)
expect(fileInfo).toStrictEqual({
baseFilePath: 'dir/route.go',
baseRepoName: 'bitbucket.test/SOURCEGRAPH/mux',
filePath: 'dir/test-dir/route.go',
project: 'SOURCEGRAPH',
repoSlug: 'mux',
repoName: 'bitbucket.test/SOURCEGRAPH/mux',
})
})
})
})
|
import React from 'react'
import PropTypes from 'prop-types'
import Link from 'gatsby-link'
import Helmet from 'react-helmet'
import './icons.scss';
import './icons/scss/font-awesome.scss';
import resume from '../../docs/resume.pdf';
class Icons extends React.Component {
render() {
let icon;
if (typeof window !== 'undefined' && window.location.pathname != "/decal") {
icon = <div className="icons">
<a href="https://github.com/stylate"><i className="fa fa-github fa-fw"></i></a>
<a href="https://instagram.com/alanstheory"><i className="fa fa-instagram fa-fw"></i></a>
<a href="https://letterboxd.com/carbine"><i className="fa fa-film fa-fw"></i></a>
<a href="https://open.spotify.com/user/flexcy_?si=K2yrBitRSP-l0F8WphXsUQ"><i className="fa fa-spotify fa-fw"></i></a>
<a href={resume}><i className="fa fa-file-o fa-fw"></i></a>
</div>
} else {
icon = <div/>
}
return (
icon
)
}
}
export default Icons;
|
class StudentInfo:
def __init__(self, name, age):
self.name = name
self.age = age |
#!/usr/bin/env bash
#
# Copyright (c) 2019-2020 The Adinkracoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
export CONTAINER_NAME=ci_native_tsan
export DOCKER_NAME_TAG=ubuntu:hirsute
export PACKAGES="clang llvm libc++abi-dev libc++-dev python3-zmq"
export DEP_OPTS="CC=clang CXX='clang++ -stdlib=libc++'"
export GOAL="install"
export ADINKRACOIN_CONFIG="--enable-zmq --with-gui=no CPPFLAGS='-DARENA_DEBUG -DDEBUG_LOCKORDER' CXXFLAGS='-g' --with-sanitizers=thread CC=clang CXX='clang++ -stdlib=libc++'"
|
def find_first_params(evaluator, current_module, func_name):
result = None
try:
# This is like backtracking: Get the first possible result.
for mod in imports.get_modules_containing_name(evaluator, [current_module], func_name):
result = get_params_for_module(mod)
if result:
break
finally:
# cleanup: remove the listener; important: should not stick.
func.listeners.remove(listener)
return result |
#!/bin/bash
# please stop all rabbitmq node is running
# disable all plugins for each node
# you can use reset node through rabbitmqctl or rabbitmqadmin or rm mnesia db from $HOME_RABBITMQ/var/lib/mnesia
#ensure that no one use port, and use -detached to start nodes without save pid
RABBITMQ_NODE_PORT=5675 RABBITMQ_NODENAME=master rabbitmq-server -detached
RABBITMQ_NODE_PORT=5676 RABBITMQ_NODENAME=slave rabbitmq-server -detached
rabbitmqctl -n master@HOST_NAME stop_app
rabbitmqctl -n master@HOST_NAME reset
rabbitmqctl -n master@HOST_NAME start_app
rabbitmqctl -n slave@HOST_NAME stop_app
rabbitmqctl -n slave@HOST_NAME reset
#reset node for node that become master
rabbitmqctl -n rabbit1@HOST_NAME join_cluster rabbit@HOST_NAME
rabbitmqctl -n rabbit2@HOST_NAME join_cluster rabbit@HOST_NAME
rabbitmqctl -n rabbit1@HOST_NAME start_app
rabbitmqctl -n rabbit2@HOST_NAME start_app
|
#!/bin/bash
# Read input line by line
while IFS= read -r line; do
# Extract repository name and test description using regex
if [[ $line =~ ^([a-zA-Z0-9_-]+)dean-s-oliver/([a-zA-Z0-9_-]+)$ ]]; then
reponame=${BASH_REMATCH[1]}
description=$(echo "$line" | grep -oP "(?<=test_description=')[^']+(?=')")
# Print formatted output
echo "Repository: $reponame"
echo "Test Description: $description"
fi
done |
def classify_by_length(items):
'''This function classifies the items in a list of strings into 'short' and 'long'.'''
short_items = []
long_items = []
for item in items:
if len(item) <= 5:
short_items.append(item)
else:
long_items.append(item)
return short_items, long_items |
package examples.collections;
public class Laptop implements Comparable<Laptop>{
private String name;
private int ram;
private double price;
public Laptop(String name, int ram, double price) {
super();
this.name = name;
this.ram = ram;
this.price = price;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getRam() {
return ram;
}
public void setRam(int ram) {
this.ram = ram;
}
public double getPrice() {
return price;
}
public void setPrice(double price) {
this.price = price;
}
@Override
public String toString() {
return "Laptop{" +
"name='" + name + '\'' +
", ram=" + ram +
", price=" + price +
'}';
}
@Override
public int compareTo(Laptop lap2) {
if (this.getRam() > lap2.getRam()) {
return 1;
}
return -1;
}
}
|
#define CHECKER_ROWS 10
#define CHECKER_COLS 10
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
bool isOn = false;
int row = int(floor(fragCoord.x / CHECKER_COLS));
int col = int(floor(fragCoord.y / CHECKER_ROWS));
if (row % 2 == 0)
isOn = col % 2 == 0;
else
isOn = col % 2 == 1;
if (isOn)
fragColor = vec4(0.0, 0.0, 0.0, 1.0);
else
fragColor = vec4(1.0, 1.0, 1.0, 1.0);
} |
#!/usr/bin/env bash
set -e -o pipefail
cd `dirname $0`
PACKAGES=(core
compiler
common
forms
platform-browser
platform-browser-dynamic
platform-server
platform-webworker
platform-webworker-dynamic
http
upgrade
router
compiler-cli
language-service
benchpress)
BUILD_ALL=true
BUNDLE=true
VERSION_PREFIX=$(node -p "require('./package.json').version")
VERSION_SUFFIX="-$(git log --oneline -1 | awk '{print $1}')"
ROUTER_VERSION_PREFIX=$(node -p "require('./package.json').version.replace(/^2/, '3')")
REMOVE_BENCHPRESS=false
for ARG in "$@"; do
case "$ARG" in
--packages=*)
PACKAGES_STR=${ARG#--packages=}
PACKAGES=( ${PACKAGES_STR//,/ } )
BUILD_ALL=false
;;
--bundle=*)
BUNDLE=( "${ARG#--bundle=}" )
;;
--publish)
VERSION_SUFFIX=""
REMOVE_BENCHPRESS=true
;;
*)
echo "Unknown option $ARG."
exit 1
;;
esac
done
VERSION="${VERSION_PREFIX}${VERSION_SUFFIX}"
ROUTER_VERSION="${ROUTER_VERSION_PREFIX}${VERSION_SUFFIX}"
echo "====== BUILDING: Version ${VERSION} (Router ${ROUTER_VERSION})"
export NODE_PATH=${NODE_PATH}:$(pwd)/dist/all:$(pwd)/dist/tools
TSC="node --max-old-space-size=3000 dist/tools/@angular/tsc-wrapped/src/main"
UGLIFYJS=`pwd`/node_modules/.bin/uglifyjs
TSCONFIG=./tools/tsconfig.json
echo "====== (tools)COMPILING: \$(npm bin)/tsc -p ${TSCONFIG} ====="
rm -rf ./dist/tools/
mkdir -p ./dist/tools/
$(npm bin)/tsc -p ${TSCONFIG}
cp ./tools/@angular/tsc-wrapped/package.json ./dist/tools/@angular/tsc-wrapped
if [[ ${BUILD_ALL} == true ]]; then
rm -rf ./dist/all/
mkdir -p ./dist/all/
echo "====== Copying files needed for e2e tests ====="
cp -r ./modules/playground ./dist/all/
cp -r ./modules/playground/favicon.ico ./dist/
#rsync -aP ./modules/playground/* ./dist/all/playground/
mkdir ./dist/all/playground/vendor
cd ./dist/all/playground/vendor
ln -s ../../../../node_modules/core-js/client/core.js .
ln -s ../../../../node_modules/zone.js/dist/zone.js .
ln -s ../../../../node_modules/zone.js/dist/long-stack-trace-zone.js .
ln -s ../../../../node_modules/systemjs/dist/system.src.js .
ln -s ../../../../node_modules/base64-js .
ln -s ../../../../node_modules/reflect-metadata/Reflect.js .
ln -s ../../../../node_modules/rxjs .
ln -s ../../../../node_modules/angular/angular.js .
ln -s ../../../../node_modules/hammerjs/hammer.js .
cd -
echo "====== Copying files needed for benchmarks ====="
cp -r ./modules/benchmarks ./dist/all/
cp -r ./modules/benchmarks/favicon.ico ./dist/
mkdir ./dist/all/benchmarks/vendor
cd ./dist/all/benchmarks/vendor
ln -s ../../../../node_modules/core-js/client/core.js .
ln -s ../../../../node_modules/zone.js/dist/zone.js .
ln -s ../../../../node_modules/zone.js/dist/long-stack-trace-zone.js .
ln -s ../../../../node_modules/systemjs/dist/system.src.js .
ln -s ../../../../node_modules/reflect-metadata/Reflect.js .
ln -s ../../../../node_modules/rxjs .
ln -s ../../../../node_modules/angular/angular.js .
ln -s ../../../../bower_components/polymer .
ln -s ../../../../node_modules/incremental-dom/dist/incremental-dom-cjs.js
cd -
TSCONFIG=./modules/tsconfig.json
echo "====== (all)COMPILING: \$(npm bin)/tsc -p ${TSCONFIG} ====="
# compile ts code
$TSC -p modules/tsconfig.json
rm -rf ./dist/packages-dist
fi
for PACKAGE in ${PACKAGES[@]}
do
PWD=`pwd`
SRCDIR=${PWD}/modules/@angular/${PACKAGE}
DESTDIR=${PWD}/dist/packages-dist/${PACKAGE}
UMD_ES5_PATH=${DESTDIR}/bundles/${PACKAGE}.umd.js
UMD_TESTING_ES5_PATH=${DESTDIR}/bundles/${PACKAGE}-testing.umd.js
UMD_STATIC_ES5_PATH=${DESTDIR}/bundles/${PACKAGE}-static.umd.js
UMD_UPGRADE_ES5_PATH=${DESTDIR}/bundles/${PACKAGE}-upgrade.umd.js
UMD_ES5_MIN_PATH=${DESTDIR}/bundles/${PACKAGE}.umd.min.js
UMD_STATIC_ES5_MIN_PATH=${DESTDIR}/bundles/${PACKAGE}-static.umd.min.js
UMD_UPGRADE_ES5_MIN_PATH=${DESTDIR}/bundles/${PACKAGE}-upgrade.umd.min.js
if [[ ${PACKAGE} != router ]]; then
LICENSE_BANNER=${PWD}/modules/@angular/license-banner.txt
fi
if [[ ${PACKAGE} == router ]]; then
LICENSE_BANNER=${PWD}/modules/@angular/router-license-banner.txt
fi
rm -rf ${DESTDIR}
echo "====== COMPILING: ${TSC} -p ${SRCDIR}/tsconfig-build.json ====="
$TSC -p ${SRCDIR}/tsconfig-build.json
if [[ -e ${SRCDIR}/tsconfig-upgrade.json ]]; then
echo "====== COMPILING: ${TSC} -p ${SRCDIR}/tsconfig-upgrade.json ====="
$TSC -p ${SRCDIR}/tsconfig-upgrade.json
fi
cp ${SRCDIR}/package.json ${DESTDIR}/
cp ${PWD}/modules/@angular/README.md ${DESTDIR}/
if [[ -e ${SRCDIR}/tsconfig-testing.json ]]; then
echo "====== COMPILING TESTING: ${TSC} -p ${SRCDIR}/tsconfig-testing.json"
$TSC -p ${SRCDIR}/tsconfig-testing.json
fi
if [[ -e ${SRCDIR}/tsconfig-2015.json ]]; then
echo "====== COMPILING ESM: ${TSC} -p ${SRCDIR}/tsconfig-2015.json"
${TSC} -p ${SRCDIR}/tsconfig-2015.json
fi
echo "====== TSC 1.8 d.ts compat for ${DESTDIR} ====="
# safely strips 'readonly' specifier from d.ts files to make them compatible with tsc 1.8
if [ "$(uname)" == "Darwin" ]; then
find ${DESTDIR} -type f -name '*.d.ts' -print0 | xargs -0 sed -i '' -e 's/\(^ *(static |private )*\)*readonly */\1/g'
find ${DESTDIR} -type f -name '*.d.ts' -print0 | xargs -0 sed -i '' -e 's/\/\/\/ <reference types="node" \/>//g'
find ${DESTDIR} -type f -name '*.d.ts' -print0 | xargs -0 sed -i '' -E 's/^( +)abstract ([[:alnum:]]+\:)/\1\2/g'
else
find ${DESTDIR} -type f -name '*.d.ts' -print0 | xargs -0 sed -i -e 's/\(^ *(static |private )*\)*readonly */\1/g'
find ${DESTDIR} -type f -name '*.d.ts' -print0 | xargs -0 sed -i -e 's/\/\/\/ <reference types="node" \/>//g'
find ${DESTDIR} -type f -name '*.d.ts' -print0 | xargs -0 sed -i -E 's/^( +)abstract ([[:alnum:]]+\:)/\1\2/g'
fi
if [[ ${PACKAGE} == benchpress ]]; then
cp ${SRCDIR}/*.md ${DESTDIR}
cp -r ${SRCDIR}/docs ${DESTDIR}
fi
if [[ ${BUNDLE} == true && ${PACKAGE} != compiler-cli && ${PACKAGE} != benchpress ]]; then
echo "====== BUNDLING: ${SRCDIR} ====="
mkdir ${DESTDIR}/bundles
(
cd ${SRCDIR}
echo "====== Rollup ${PACKAGE} index"
../../../node_modules/.bin/rollup -c rollup.config.js
cat ${LICENSE_BANNER} > ${UMD_ES5_PATH}.tmp
cat ${UMD_ES5_PATH} >> ${UMD_ES5_PATH}.tmp
mv ${UMD_ES5_PATH}.tmp ${UMD_ES5_PATH}
$UGLIFYJS -c --screw-ie8 --comments -o ${UMD_ES5_MIN_PATH} ${UMD_ES5_PATH}
if [[ -e rollup-testing.config.js ]]; then
echo "====== Rollup ${PACKAGE} testing"
../../../node_modules/.bin/rollup -c rollup-testing.config.js
echo "{\"main\": \"../bundles/${PACKAGE}-testing.umd.js\"}" > ${DESTDIR}/testing/package.json
cat ${LICENSE_BANNER} > ${UMD_TESTING_ES5_PATH}.tmp
cat ${UMD_TESTING_ES5_PATH} >> ${UMD_TESTING_ES5_PATH}.tmp
mv ${UMD_TESTING_ES5_PATH}.tmp ${UMD_TESTING_ES5_PATH}
fi
if [[ -e rollup-static.config.js ]]; then
echo "====== Rollup ${PACKAGE} static"
../../../node_modules/.bin/rollup -c rollup-static.config.js
# create dir because it doesn't exist yet, we should move the src code here and remove this line
mkdir ${DESTDIR}/static
echo "{\"main\": \"../bundles/${PACKAGE}-static.umd.js\"}" > ${DESTDIR}/static/package.json
cat ${LICENSE_BANNER} > ${UMD_STATIC_ES5_PATH}.tmp
cat ${UMD_STATIC_ES5_PATH} >> ${UMD_STATIC_ES5_PATH}.tmp
mv ${UMD_STATIC_ES5_PATH}.tmp ${UMD_STATIC_ES5_PATH}
$UGLIFYJS -c --screw-ie8 --comments -o ${UMD_STATIC_ES5_MIN_PATH} ${UMD_STATIC_ES5_PATH}
fi
if [[ -e rollup-upgrade.config.js ]]; then
echo "====== Rollup ${PACKAGE} upgrade"
../../../node_modules/.bin/rollup -c rollup-upgrade.config.js
# create dir because it doesn't exist yet, we should move the src code here and remove this line
mkdir ${DESTDIR}/upgrade
echo "{\"main\": \"../bundles/${PACKAGE}-upgrade.umd.js\"}" > ${DESTDIR}/upgrade/package.json
cat ${LICENSE_BANNER} > ${UMD_UPGRADE_ES5_PATH}.tmp
cat ${UMD_UPGRADE_ES5_PATH} >> ${UMD_UPGRADE_ES5_PATH}.tmp
mv ${UMD_UPGRADE_ES5_PATH}.tmp ${UMD_UPGRADE_ES5_PATH}
$UGLIFYJS -c --screw-ie8 --comments -o ${UMD_UPGRADE_ES5_MIN_PATH} ${UMD_UPGRADE_ES5_PATH}
fi
) 2>&1 | grep -v "as external dependency"
fi
(
echo "====== VERSION: Updating version references"
cd ${DESTDIR}
echo "====== EXECUTE: perl -p -i -e \"s/0\.0\.0\-PLACEHOLDER/${VERSION}/g\" $""(grep -ril 0\.0\.0\-PLACEHOLDER .)"
perl -p -i -e "s/0\.0\.0\-PLACEHOLDER/${VERSION}/g" $(grep -ril 0\.0\.0\-PLACEHOLDER .) < /dev/null 2> /dev/null
echo "====== EXECUTE: perl -p -i -e \"s/0\.0\.0\-ROUTERPLACEHOLDER/${ROUTER_VERSION}/g\" $""(grep -ril 0\.0\.0\-ROUTERPLACEHOLDER .)"
perl -p -i -e "s/0\.0\.0\-ROUTERPLACEHOLDER/${ROUTER_VERSION}/g" $(grep -ril 0\.0\.0\-ROUTERPLACEHOLDER .) < /dev/null 2> /dev/null
)
done
echo ""
echo "====== Building examples: ./modules/@angular/examples/build.sh ====="
./modules/@angular/examples/build.sh
if [[ ${REMOVE_BENCHPRESS} == true ]]; then
echo ""
echo "==== Removing benchpress from publication"
rm -r dist/packages-dist/benchpress
fi
|
import axios from 'axios';
import cli from 'cli-ux';
import { ApiResponse, Client } from '@elastic/elasticsearch';
import { Config, ESSearchResponse, ESIndexSources } from '../../../../global';
const fetchRemoteLinks = async (
userConfig: Config,
serverName: string | undefined,
issueKey: string,
eClient: Client,
issuesIndex: string,
) => {
const jiraServer = userConfig.jira.find(j => j.name === serverName);
if (jiraServer !== undefined) {
cli.action.start('Fetching remote link for issue ' + issueKey);
const response = await axios({
method: 'get',
url:
jiraServer.config.host +
'/rest/api/latest/issue/' +
issueKey +
'/remotelink',
auth: {
username: jiraServer.config.username,
password: <PASSWORD>,
},
validateStatus: function(status) {
return status >= 200 && status < 500; // default
},
});
if (response.data !== undefined) {
cli.action.stop(' - ' + response.data.length + ' links found');
if (response.data.length > 0) {
// We then add metadata about the linked issue
const esQuery = {
bool: {
// eslint-disable-next-line @typescript-eslint/camelcase
must: [{ match_all: {} }],
filter: [
{
bool: {
// eslint-disable-next-line @typescript-eslint/camelcase
minimum_should_match: 1,
should: response.data
.filter((l: any) => !l.object.title.includes('//'))
.map((l: any) => {
return {
// eslint-disable-next-line @typescript-eslint/camelcase
match_phrase: {
key: l.object.title,
},
};
}),
},
},
],
should: [],
// eslint-disable-next-line @typescript-eslint/camelcase
must_not: [],
},
};
const esIssues: ApiResponse<ESSearchResponse<
ESIndexSources
>> = await eClient.search({
index: issuesIndex,
body: {
from: 0,
size: 10000,
query: esQuery,
},
});
// eslint-disable-next-line @typescript-eslint/camelcase
const foundLinks = esIssues.body.hits.hits.map((i: any) => i._source);
return {
key: issueKey,
remoteLinks: response.data
.filter((l: any) => !l.object.title.includes('//'))
.map((l: any) => {
const foundLinkedIssue = foundLinks.find(
(i: any) => i.key === l.object.title,
);
if (foundLinkedIssue === undefined) {
return {
key: l.object.title,
remoteLink: l,
};
} else {
return {
...foundLinkedIssue,
key: l.object.title,
remoteLink: l,
};
}
}),
};
}
}
cli.action.stop(' - unable to find issues');
}
return { key: issueKey, remoteLinks: [] };
};
export default fetchRemoteLinks;
|
<reponame>wise-team/node-docker-watchdog<filename>src/Response.ts
import ow from "ow";
export interface ResponseEntity {
strategy: string;
metadata: object;
}
export type Response = ResponseEntity[];
export namespace Response {
export function validate(o: Response) {
ow(o, ow.array.minLength(1).ofType(ow.object.hasKeys("strategy", "metadata")));
}
}
|
import * as React from "react";
import StudentProjects from "components/demoday/past/Spring21StudentProjects";
import { makePastProjectsPage } from "components/demoday/past/PastProjectsPage";
export { getStaticProps } from "components/Layout";
export default makePastProjectsPage({
period: "Spring 2021",
projects: <StudentProjects />,
});
|
<gh_stars>0
import { BrowserRouter as Router, Routes, Route } from 'react-router-dom';
import { useState } from 'react';
import { UserContext } from './context/UserContext';
import { BalanceContext } from './context/BalanceContext';
import { ShareContext } from './context/ShareContext';
import { PricesContext } from './context/PricesContext';
import LandingPage from './pages/LandingPage';
import PortfolioPage from './pages/PortfolioPage';
import NewsPage from './pages/NewsPage';
import AboutPage from './pages/AboutPage';
import LoginPage from './pages/LoginPage';
import TermPage from './pages/TermPage';
import SignupPage from './pages/SignupPage';
import PrivacyPolicyPage from './pages/PrivacyPolicyPage';
function App() {
const [user, setUser] = useState(null);
const [balance, setBalance] = useState(500000);
const [shares, setShares] = useState([]);
const [prices, setPrices] = useState([]);
return (
<Router>
<UserContext.Provider value={{ user, setUser }}>
<BalanceContext.Provider value={{ balance, setBalance }}>
<ShareContext.Provider value={{ shares, setShares }}>
<PricesContext.Provider value={{ prices, setPrices }}>
<div className="App bg-gray-100 min-h-screen">
<Routes>
<Route exact path="/" element={<LandingPage />} />
<Route path="/portfolio" element={<PortfolioPage />} />
<Route path="/news" element={<NewsPage />} />
<Route path="/about" element={<AboutPage />} />
<Route path="/login" element={<LoginPage />} />
<Route path="/terms" element={<TermPage />} />
<Route path="/signup" element={<SignupPage />} />
<Route
path="/privacy-policy"
element={<PrivacyPolicyPage />}
/>
</Routes>
</div>
</PricesContext.Provider>
</ShareContext.Provider>
</BalanceContext.Provider>
</UserContext.Provider>
</Router>
);
}
export default App;
|
<filename>public/js/burgers.js
// Make sure we wait to attach our handlers until the DOM is fully loaded.
//function to move a burger to devoured
$(function () {
$(".change-status").on("click", function (event) {
var id = $(this).data("id");
var burger_name = $(this).data("burger_name");
var newDevour = true; //$(this).data("newdevour");
var devourBurger = {
id: id,
burger_name: burger_name,
devoured: newDevour
};
console.log(devourBurger);
// Send the PUT request to update the devoured state.
$.ajax("/api/burgers/" + id, {
type: "PUT",
data: devourBurger
}).then(
function () {
console.log("yummy!" + id);
// Reload the page to get the updated list
location.reload();
});
});
// Adds a new burger to the list // preventDefault on a SUBMIT event
$(".createNEW-form").on("submit", function (event) {
event.preventDefault();
var newBurger = {
burger_name: $("#hambrg").val().trim(),
devoured: false
};
// Send the POST request.
$.ajax("/api/burgers", {
type: "POST",
data: newBurger
}).then(
function () {
console.log("created new burger");
// Reload the page to get the updated list
location.reload();
});
});
});
|
const router = require('express').Router()
const Svida = require('../models/svida')
const SvidaController = require('../controllers/svidaController')
router.get('/', SvidaController.getEverything)
router.get('/:dni', SvidaController.getByDNI)
router.post('/', SvidaController.crearPoliza)
router.put('/:dni', SvidaController.updateByDNI)
router.delete('/:dni', SvidaController.borrarPoliza)
module.exports = router |
#!/usr/bin/env bash
set -o errexit #abort if any command fails
deploy_directory=dist
deploy_branch=gh-pages
#if no user identity is already set in the current git environment, use this:
default_username=deploy.sh
default_email=
#repository to deploy to. must be readable and writable.
repo=origin
# Parse arg flags
while : ; do
if [[ $1 = "-v" || $1 = "--verbose" ]]; then
verbose=true
shift
elif [[ $1 = "-s" || $1 = "--setup" ]]; then
setup=true
shift
elif [[ $1 = "-e" || $1 = "--allow-empty" ]]; then
allow_empty=true
shift
else
break
fi
done
#echo expanded commands as they are executed (for debugging)
function enable_expanded_output {
if [ $verbose ]; then
set -o xtrace
set +o verbose
fi
}
#this is used to avoid outputting the repo URL, which may contain a secret token
function disable_expanded_output {
if [ $verbose ]; then
set +o xtrace
set -o verbose
fi
}
enable_expanded_output
function set_user_id {
if [[ -z `git config user.name` ]]; then
git config user.name "$default_username"
fi
if [[ -z `git config user.email` ]]; then
git config user.email "$default_email"
fi
}
function restore_head {
if [[ $previous_branch = "HEAD" ]]; then
#we weren't on any branch before, so just set HEAD back to the commit it was on
git update-ref --no-deref HEAD $commit_hash $deploy_branch
else
git symbolic-ref HEAD refs/heads/$previous_branch
fi
git reset --mixed
}
if ! git diff --exit-code --quiet --cached; then
echo Aborting due to uncommitted changes in the index >&2
exit 1
fi
commit_title=`git log -n 1 --format="%s" HEAD`
commit_hash=`git log -n 1 --format="%H" HEAD`
previous_branch=`git rev-parse --abbrev-ref HEAD`
if [ $setup ]; then
mkdir -p "$deploy_directory"
git --work-tree "$deploy_directory" checkout --orphan $deploy_branch
git --work-tree "$deploy_directory" rm -r "*"
git --work-tree "$deploy_directory" add --all
git --work-tree "$deploy_directory" commit -m "initial publish"$'\n\n'"generated from commit $commit_hash"
git push $repo $deploy_branch
restore_head
exit
fi
if [ ! -d "$deploy_directory" ]; then
echo "Deploy directory '$deploy_directory' does not exist. Aborting." >&2
exit 1
fi
if [[ -z `ls -A "$deploy_directory" 2> /dev/null` && -z $allow_empty ]]; then
echo "Deploy directory '$deploy_directory' is empty. Aborting. If you're sure you want to deploy an empty tree, use the -e flag." >&2
exit 1
fi
disable_expanded_output
git fetch --force $repo $deploy_branch:$deploy_branch
enable_expanded_output
#make deploy_branch the current branch
git symbolic-ref HEAD refs/heads/$deploy_branch
#put the previously committed contents of deploy_branch branch into the index
git --work-tree "$deploy_directory" reset --mixed --quiet
git --work-tree "$deploy_directory" add --all
set +o errexit
diff=$(git --work-tree "$deploy_directory" diff --exit-code --quiet HEAD)$?
set -o errexit
case $diff in
0) echo No changes to files in $deploy_directory. Skipping commit.;;
1)
set_user_id
git --work-tree "$deploy_directory" commit -m \
"publish: $commit_title"$'\n\n'"generated from commit $commit_hash"
disable_expanded_output
#--quiet is important here to avoid outputting the repo URL, which may contain a secret token
git push --quiet $repo $deploy_branch
enable_expanded_output
;;
*)
echo git diff exited with code $diff. Aborting. Staying on branch $deploy_branch so you can debug. To switch back to master, use: git symbolic-ref HEAD refs/heads/master && git reset --mixed >&2
exit $diff
;;
esac
restore_head
|
package usecase
type CommandUsecase interface{}
type commandUsecase struct{}
func NewCommandUsecase() CommandUsecase {
return &commandUsecase{}
}
|
// 加密,其实只是签名,有md5和sha1两种形式
const crypto = require('crypto');
const md5 = crypto.createHash('md5').update('123456').digest('hex');
const sha = crypto.createHash('sha256').update('wayiji').digest('hex');
console.log('md5--> ', md5);
console.log('sha--> ', sha);
// 为了提高安详性,可通过双层加密
function md5Func(passStr){
return crypto.createHash('md5').update(passStr).digest('hex');
}
console.log('两层-->', md5Func(md5Func('123456')+'wangyijiang'))
/**
* 进程和线程
* 1.进程:可以理解为一个程序;拥有独立的执行空间/存储;
* 2.线程:一个进程中至少要有一个进程;同一进程中的所有线程共享一套空间/代码.
* 3.多进程:慢/简单/安全:成本高(慢);安全(进程间隔离);进程间通信麻烦;写代码简单;
* 4.多线程:快/复杂/脆弱:成本低(快);不安全(线程要死一起死,这是普通情况.比如Java的线程都是运行在虚拟机上,有一个线程死了,虚拟机可以选择性的进行关闭);线程间通信容易;写代码复杂(资源共享,同步读写等).
*/
|
<filename>stack/framework/hal/chips/cc1101/cc1101_constants.h
/*
* Copyright (c) 2015-2021 University of Antwerp, Aloxy NV.
*
* This file is part of Sub-IoT.
* See https://github.com/Sub-IoT/Sub-IoT-Stack for further info.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* cc1101_constants.h
*
* Created on: Dec 10, 2012
* Author: <EMAIL>
*/
#ifndef CONSTANTS_H_
#define CONSTANTS_H_
//------------------------------------------------------------------------------------------------------
// CC1101 STROBE, CONTROL AND STATUS REGISTER
#define IOCFG2 0x00 // GDO2 output pin configuration
#define IOCFG1 0x01 // GDO1 output pin configuration
#define IOCFG0 0x02 // GDO0 output pin configuration
#define FIFOTHR 0x03 // RX FIFO and TX FIFO thresholds
#define SYNC1 0x04 // Sync word, high byte
#define SYNC0 0x05 // Sync word, low byte
#define PKTLEN 0x06 // Packet length
#define PKTCTRL1 0x07 // Packet automation control
#define PKTCTRL0 0x08 // Packet automation control
#define ADDR 0x09 // Device address
#define CHANNR 0x0A // Channel number
#define FSCTRL1 0x0B // Frequency synthesizer control
#define FSCTRL0 0x0C // Frequency synthesizer control
#define FREQ2 0x0D // Frequency control word, high byte
#define FREQ1 0x0E // Frequency control word, middle byte
#define FREQ0 0x0F // Frequency control word, low byte
#define MDMCFG4 0x10 // Modem configuration
#define MDMCFG3 0x11 // Modem configuration
#define MDMCFG2 0x12 // Modem configuration
#define MDMCFG1 0x13 // Modem configuration
#define MDMCFG0 0x14 // Modem configuration
#define DEVIATN 0x15 // Modem deviation setting
#define MCSM2 0x16 // Main Radio Control State Machine configuration
#define MCSM1 0x17 // Main Radio Control State Machine configuration
#define MCSM0 0x18 // Main Radio Control State Machine configuration
#define FOCCFG 0x19 // Frequency Offset Compensation configuration
#define BSCFG 0x1A // Bit Synchronization configuration
#define AGCCTRL2 0x1B // AGC control
#define AGCCTRL1 0x1C // AGC control
#define AGCCTRL0 0x1D // AGC control
#define WOREVT1 0x1E // High byte Event 0 timeout
#define WOREVT0 0x1F // Low byte Event 0 timeout
#define WORCTRL 0x20 // Wake On Radio control
#define FREND1 0x21 // Front end RX configuration
#define FREND0 0x22 // Front end TX configuration
#define FSCAL3 0x23 // Frequency synthesizer calibration
#define FSCAL2 0x24 // Frequency synthesizer calibration
#define FSCAL1 0x25 // Frequency synthesizer calibration
#define FSCAL0 0x26 // Frequency synthesizer calibration
#define RCCTRL1 0x27 // RC oscillator configuration
#define RCCTRL0 0x28 // RC oscillator configuration
#define FSTEST 0x29 // Frequency synthesizer calibration control
#define PTEST 0x2A // Production test
#define AGCTEST 0x2B // AGC test
#define TEST2 0x2C // Various test settings
#define TEST1 0x2D // Various test settings
#define TEST0 0x2E // Various test settings
// Strobe commands
#define RF_SRES 0x30 // Reset chip.
#define RF_SFSTXON 0x31 // Enable and calibrate frequency synthesizer (if MCSM0.FS_AUTOCAL=1).
// If in RX/TX: Go to a wait state where only the synthesizer is
// running (for quick RX / TX turnaround).
#define RF_SXOFF 0x32 // Turn off crystal oscillator.
#define RF_SCAL 0x33 // Calibrate frequency synthesizer and turn it off
// (enables quick start).
#define RF_SRX 0x34 // Enable RX. Perform calibration first if coming from IDLE and
// MCSM0.FS_AUTOCAL=1.
#define RF_STX 0x35 // In IDLE state: Enable TX. Perform calibration first if
// MCSM0.FS_AUTOCAL=1. If in RX state and CCA is enabled:
// Only go to TX if channel is clear.
#define RF_SIDLE 0x36 // Exit RX / TX, turn off frequency synthesizer and exit
// Wake-On-Radio mode if applicable.
#define RF_SAFC 0x37 // Perform AFC adjustment of the frequency synthesizer
#define RF_SWOR 0x38 // Start automatic RX polling sequence (Wake-on-Radio)
#define RF_SPWD 0x39 // Enter power down mode when CSn goes high.
#define RF_SFRX 0x3A // Flush the RX FIFO buffer.
#define RF_SFTX 0x3B // Flush the TX FIFO buffer.
#define RF_SWORRST 0x3C // Reset real time clock.
#define RF_SNOP 0x3D // No operation. May be used to pad strobe commands to two
// bytes for simpler software.
//Status registers
#define PARTNUM 0x30
#define VERSION 0x31
#define FREQEST 0x32
#define LQI 0x33
#define RSSI 0x34
#define MARCSTATE 0x35
#define WORTIME1 0x36
#define WORTIME0 0x37
#define PKTSTATUS 0x38
#define VCO_VC_DAC 0x39
#define TXBYTES 0x3A
#define RXBYTES 0x3B
#define PATABLE 0x3E
#define TXFIFO 0x3F
#define RXFIFO 0x3F
// Definitions for burst/single access to registers
#define WRITE_BURST 0x40
#define READ_SINGLE 0x80
#define READ_BURST 0xC0
typedef enum {
CC1101_CHIPSTATE_SLEEP = 0,
CC1101_CHIPSTATE_IDLE = 1,
CC1101_CHIPSTATE_TX = 19
// TODO other states not used for now
} cc1101_chipstate_t;
typedef enum {
CC1101_GDO0 = 0,
CC1101_GDO1 = 1,
CC1101_GDO2 = 2,
} cc1101_gdOx_t;
#endif /* CONSTANTS_H_ */
|
<gh_stars>0
function local_read_json_rows(columns,callback,results)
{
if(typeof static_local_db=='undefined')
{
open_local_db(function()
{
local_read_json_rows(columns,callback,results);
});
}
else
{
var table=columns.data_store;
var cols=columns.indexes;
var count=0;
var start_index=0;
var access_control=false;
if(typeof columns.count!='undefined')
{
count=parseInt(columns.count);
}
if(typeof columns.start_index!='undefined')
{
start_index=parseInt(columns.start_index);
}
var access_store=table;
if(typeof columns.access!='undefined')
{
access_control=true;
if(typeof columns.access.data_store!='undefined')
{
access_store=columns.access.data_store;
}
}
var account_name=get_session_var('acc_name');
var rolename=get_session_var('user_roles');
var roles_array=rolename.split("--");
//console.log(account_name+"-"+rolename);
var filter=new Array();
var sort_index='last_updated';
var sort_order='prev';
var lowerbound=['0','0'];
var upperbound=['9999999999','9999999999'];
var bound_count=0;
for(var j=0;j<cols.length;j++)
{
if(typeof cols[j].lowerbound!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=""+cols[j].lowerbound;
fil.type='lowerbound';
filter.push(fil);
lowerbound=[fil.value,'0'];
sort_index=cols[j].index;
if(bound_count==0)
{
var upperbound=['9999999999','9999999999'];
}
bound_count+=1;
}
if(typeof cols[j].upperbound!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=""+cols[j].upperbound;
fil.type='upperbound';
filter.push(fil);
upperbound=[fil.value,'999999999999'];
sort_index=cols[j].index;
if(bound_count==0)
{
lowerbound=['0','0'];
}
bound_count+=1;
}
if(typeof cols[j].array!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].array;
fil.type='array';
filter.push(fil);
}
if(typeof cols[j].approx_array!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].approx_array;
fil.type='approx_array';
filter.push(fil);
}
if(typeof cols[j].all_approx_array!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].all_approx_array;
fil.type='all_approx_array';
filter.push(fil);
}
if(typeof cols[j].unequal!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].unequal;
fil.type='unequal';
filter.push(fil);
}
if(typeof cols[j].isnull!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].isnull;
fil.type='isnull';
filter.push(fil);
}
if(typeof cols[j].value!='undefined' && cols[j].value!="")
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].value;
fil.type='';
filter.push(fil);
}
if(typeof cols[j].exact!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].exact;
fil.type='exact';
filter.push(fil);
sort_index=cols[j].index;
lowerbound=[fil.value,'0'];
upperbound=[fil.value,'99999999'];
bound_count=0;
}
}
function local_read_json_rows_filtering(record)
{
for(var i=0;i<filter.length;i++)
{
if(typeof record[filter[i].name]!="undefined")
{
var string=record[filter[i].name].toString().toLowerCase();
if(filter[i].type!='array')
{
var search_word=filter[i].value.toString().toLowerCase();
if(filter[i].type=='')
{
if(string.indexOf(search_word)===-1)
{
return false;
}
}
if(filter[i].type=='exact')
{
if(search_word!==string)
{
return false;
}
}
if(filter[i].type=='unequal')
{
if(search_word==string)
{
return false;
}
}
if(filter[i].type=='isnull')
{
if(filter[i].value=='no' && string=="null")
{
return false;
}
else if(filter[i].value=='yes' && string!="null")
{
return false;
}
}
if(filter[i].type=='upperbound')
{
if(parseFloat(record[filter[i].name])>=parseFloat(filter[i].value))
{
return false;
}
}
else if(filter[i].type=='lowerbound')
{
if(parseFloat(record[filter[i].name])<=parseFloat(filter[i].value))
{
return false;
}
}
}
else if(filter[i].type=='array')
{
if(filter[i].value.indexOf(string)==-1)
{
return false;
}
}
if(filter[i].type=='approx_array')
{
var approx_array=filter[i].value;
var sub_match=false;
for(var ab in approx_array)
{
if(string.indexOf(approx_array[ab])>-1)
{
sub_match=true;
break;
}
}
if(!sub_match)
{
return false;
}
}
if(filter[i].type=='all_approx_array')
{
var all_approx_array=filter[i].value;
for(var ab in all_approx_array)
{
if(string.indexOf(all_approx_array[ab])==-1)
{
return false;
}
}
}
}
else
{
if(filter[i].type!='unequal')
{
return false;
}
if(filter[i].type=='isnull')
{
if(filter[i].value=='no')
{
return false;
}
}
}
}
return true;
}
function local_read_json_rows_data_traversing()
{
var sort_key=IDBKeyRange.bound(lowerbound,upperbound);
var objectstore=static_local_db.transaction([table],"readonly").objectStore(table).index(sort_index);
if(filter.length>0)
{
if(filter[0].name=='id')
{
objectstore=static_local_db.transaction([table],"readonly").objectStore(table);
sort_key=IDBKeyRange.only(filter[0].value);
}
}
var read_request=objectstore.openCursor(sort_key,sort_order);
localdb_open_requests+=1;
read_request.onsuccess=function(e)
{
var result=e.target.result;
if(result)
{
var record=result.value;
//console.log(record);
var match_word=local_read_json_rows_filtering(record);
if(match_word===true)
{
if(start_index==0)
{
results.push(record);
}
else
{
start_index-=1;
}
if(results.length===count)
{
localdb_open_requests-=1;
callback(results);
}
else
{
result.continue();
}
}
else
{
result.continue();
}
}
else
{
localdb_open_requests-=1;
callback(results);
}
};
}
function local_read_json_rows_object_traversing(access_conditions_array)
{
var sort_key=IDBKeyRange.bound(lowerbound,upperbound);
var ac_transaction=static_local_db.transaction([table,'object_access'],"readonly");
var t_objectstore=ac_transaction.objectStore(table).index(sort_index);
var o_objectstore=ac_transaction.objectStore('object_access').index('record_id');
if(filter.length>0)
{
if(filter[0].name=='id')
{
objectstore=static_local_db.transaction([table],"readonly").objectStore(table);
sort_key=IDBKeyRange.only(filter[0].value);
}
}
var read_request=t_objectstore.openCursor(sort_key,sort_order);
localdb_open_requests+=1;
read_request.onsuccess=function(e)
{
var result=e.target.result;
//console.log(result);
if(result)
{
var record=result.value;
//console.log(record);
var match_word=local_read_json_rows_filtering(record);
if(match_word===true)
{
var object_key=IDBKeyRange.bound([record.id,'000000000'],[record.id,'99999999999999']);
var object_read_request=o_objectstore.openCursor(object_key,sort_order);
object_read_request.onsuccess=function(oe)
{
var oresult=oe.target.result;
//console.log(oresult);
if(oresult)
{
var orecord=oresult.value;
//console.log(orecord);
if(orecord.tablename==access_store)
{
//console.log('check1');
if(orecord.user_type=='user')
{
//console.log('check1');
if(orecord.user.indexOf(account_name)!=-1)
{
//console.log('check1');
if(start_index==0)
{
//console.log('check1');
results.push(record);
}
else
{
//console.log('check1');
start_index-=1;
}
if(results.length===count)
{
//console.log('check1');
localdb_open_requests-=1;
callback(results);
}
else
{
//console.log('check1');
result.continue();
}
}
}
else if(orecord.user_type=='role')
{
//console.log('check1');
for(var aa in roles_array)
{
//console.log('check1');
if(roles_array[aa]!="" && orecord.user.indexOf(roles_array[aa])!=-1)
{
//console.log('check1');
if(start_index==0)
{
//console.log('check1');
results.push(record);
}
else
{
//console.log('check1');
start_index-=1;
}
if(results.length===count)
{
//console.log('check1');
localdb_open_requests-=1;
callback(results);
}
else
{
//console.log('check1');
result.continue();
}
break;
}
}
result.continue();
}
}
else
{
//console.log('check1');
oresult.continue();
}
}
else
{
//console.log('check1');
for(var bb in access_conditions_array)
{
//console.log('check1');
if(access_conditions_array[bb].user_type=='field')
{
//console.log('check1');
if(record[access_conditions_array[bb].user].indexOf(account_name)!=-1)
{
//console.log('check1');
if(access_conditions_array[bb].criteria_field=="" || access_conditions_array[bb].criteria_field==null || record[access_conditions_array[bb].criteria_field]==access_conditions_array[bb].criteria_value)
{
if(start_index==0)
{
results.push(record);
}
else
{
start_index-=1;
}
if(results.length===count)
{
localdb_open_requests-=1;
callback(results);
}
else
{
result.continue();
}
}
else
{
result.continue();
}
}
}
else
{
//console.log('check1');
if(access_conditions_array[bb].criteria_field=="" || access_conditions_array[bb].criteria_field==null || record[access_conditions_array[bb].criteria_field]==access_conditions_array[bb].criteria_value)
{
if(start_index==0)
{
//console.log('check1');
results.push(record);
}
else
{
//console.log('check1');
start_index-=1;
}
if(results.length===count)
{
//console.log('check1');
localdb_open_requests-=1;
callback(results);
}
else
{
//console.log('check1');
result.continue();
}
}
else
{
//console.log('check1');
result.continue();
}
}
}
if(access_conditions_array.length==0)
{
result.continue();
}
}
};
}
else
{
//console.log('check1');
result.continue();
}
}
else
{
//console.log('check1');
localdb_open_requests-=1;
callback(results);
}
};
}
if(!access_control)
{
local_read_json_rows_data_traversing();
}
else
{
var access_conditions_array=[];
var ac_objectstore=static_local_db.transaction(['access_conditions'],"readonly").objectStore('access_conditions').index('tablename');
var ac_lowerbound=[access_store,'0'];
var ac_upperbound=[access_store,'9999999999'];
var ac_key=IDBKeyRange.bound(ac_lowerbound,ac_upperbound);
var ac_read_req=ac_objectstore.openCursor(ac_key,sort_order);
ac_read_req.onsuccess=function(e)
{
var result=e.target.result;
if(result)
{
var record=result.value;
if(record.user_type=='field')
{
access_conditions_array.push(record);
}
else if(record.user_type=='user')
{
if(record.user.indexOf(account_name)!=-1)
{
access_conditions_array.push(record);
}
}
else if(record.user_type=='role')
{
for(var aa in roles_array)
{
if(roles_array[aa]!="" && record.user.indexOf(roles_array[aa])!=-1)
{
access_conditions_array.push(record);
break;
}
}
}
result.continue();
}
else
{
// console.log(access_conditions_array);
local_read_json_rows_object_traversing(access_conditions_array);
}
}
}
}
};
function local_read_json_column(columns,callback,results)
{
if(typeof static_local_db=='undefined')
{
open_local_db(function()
{
local_read_json_column(columns,callback,results);
});
}
else
{
var table=columns.data_store;
var count=0;
var start_index=0;
var result_column_name=columns.return_column;
var sum=false;
if(typeof columns.sum!='undefined')
{
sum=true;
}
var sum_result=0;
if(typeof columns.count!='undefined')
{
count=parseInt(columns.count);
}
if(typeof columns.start_index!='undefined')
{
start_index=parseInt(columns.start_index);
}
var filter=new Array();
var sort_index='last_updated';
var sort_order='prev';
var lowerbound=['0','0'];
var upperbound=['9999999999','9999999999'];
var bound_count=0;
if(typeof columns.indexes!='undefined')
{
var cols=columns.indexes;
for(var j=0;j<cols.length;j++)
{
if(typeof cols[j].lowerbound!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=""+cols[j].lowerbound;
fil.type='lowerbound';
filter.push(fil);
lowerbound=[fil.value,'0'];
sort_index=cols[j].index;
if(bound_count==0)
{
var upperbound=['9999999999','9999999999'];
}
bound_count+=1;
}
if(typeof cols[j].upperbound!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=""+cols[j].upperbound;
fil.type='upperbound';
filter.push(fil);
upperbound=[fil.value,'999999999999'];
sort_index=cols[j].index;
if(bound_count==0)
{
lowerbound=['0','0'];
}
bound_count+=1;
}
if(typeof cols[j].array!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].array;
fil.type='array';
filter.push(fil);
}
if(typeof cols[j].approx_array!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].approx_array;
fil.type='approx_array';
filter.push(fil);
}
if(typeof cols[j].all_approx_array!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].all_approx_array;
fil.type='all_approx_array';
filter.push(fil);
}
if(typeof cols[j].unequal!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].unequal;
fil.type='unequal';
filter.push(fil);
}
if(typeof cols[j].isnull!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].isnull;
fil.type='isnull';
filter.push(fil);
}
if(typeof cols[j].value!='undefined' && cols[j].value!="")
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].value;
fil.type='';
filter.push(fil);
}
if(typeof cols[j].exact!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].exact;
fil.type='exact';
filter.push(fil);
sort_index=cols[j].index;
lowerbound=[fil.value,'0'];
upperbound=[fil.value,'99999999'];
bound_count=0;
}
}
}
var sort_key=IDBKeyRange.bound(lowerbound,upperbound);
var objectstore=static_local_db.transaction([table],"readonly").objectStore(table).index(sort_index);
if(filter.length>0)
{
if(filter[0].name=='id')
{
objectstore=static_local_db.transaction([table],"readonly").objectStore(table);
sort_key=IDBKeyRange.only(filter[0].value);
}
}
var read_request=objectstore.openCursor(sort_key,sort_order);
localdb_open_requests+=1;
read_request.onsuccess=function(e)
{
var result=e.target.result;
if(result)
{
var record=result.value;
var match_word=true;
for(var i=0;i<filter.length;i++)
{
if(typeof record[filter[i].name]!="undefined")
{
var string=record[filter[i].name].toString().toLowerCase();
if(filter[i].type!='array')
{
var search_word=filter[i].value.toString().toLowerCase();
if(filter[i].type=='')
{
if(string.indexOf(search_word)===-1)
{
match_word=false;
break;
}
}
if(filter[i].type=='exact')
{
if(search_word!==string)
{
match_word=false;
break;
}
}
if(filter[i].type=='unequal')
{
if(search_word==string)
{
match_word=false;
break;
}
}
if(filter[i].type=='isnull')
{
if(filter[i].value=='no' && string=="null")
{
match_word=false;
break;
}
else if(filter[i].value=='yes' && string!="null")
{
march_word=false;
break;
}
}
if(filter[i].type=='upperbound')
{
if(parseFloat(record[filter[i].name])>=parseFloat(filter[i].value))
{
match_word=false;
break;
}
}
else if(filter[i].type=='lowerbound')
{
if(parseFloat(record[filter[i].name])<=parseFloat(filter[i].value))
{
match_word=false;
break;
}
}
}
else if(filter[i].type=='array')
{
if(filter[i].value.indexOf(string)==-1)
{
match_word=false;
break;
}
}
if(filter[i].type=='approx_array')
{
var approx_array=filter[i].value;
var sub_match=false;
for(var ab in approx_array)
{
if(string.indexOf(approx_array[ab])>-1)
{
sub_match=true;
break;
}
}
if(!sub_match)
{
match_word=false;
break;
}
}
if(filter[i].type=='all_approx_array')
{
var all_approx_array=filter[i].value;
var sub_match=true;
for(var ab in all_approx_array)
{
if(string.indexOf(all_approx_array[ab])==-1)
{
sub_match=false;
break;
}
}
if(!sub_match)
{
match_word=false;
break;
}
}
}
else
{
if(filter[i].type!='unequal')
{
match_word=false;
break;
}
if(filter[i].type=='isnull')
{
if(filter[i].value=='no')
{
match_word=false;
break;
}
}
}
}
if(match_word===true)
{
//console.log(columns);
if(sum)
{
sum_result+=parseFloat(record[result_column_name]);
result.continue();
}
else
{
results.push(record[result_column_name]);
if(results.length===count)
{
localdb_open_requests-=1;
results=vUtil.arrayUnique(results);
callback(results);
}
else
{
result.continue();
}
}
}
else
{
result.continue();
}
}
else
{
localdb_open_requests-=1;
if(sum)
{
callback([sum_result]);
}
else
{
results=vUtil.arrayUnique(results);
callback(results);
}
}
};
}
};
function local_read_json_count(columns,callback)
{
if(typeof static_local_db=='undefined')
{
open_local_db(function()
{
local_read_json_count(columns,callback);
});
}
else
{
var table=columns.data_store;
var count=0;
var start_index=0;
var result_count=0;
if(typeof columns.count!='undefined')
{
count=parseInt(columns.count);
}
if(typeof columns.start_index!='undefined')
{
start_index=parseInt(columns.start_index);
}
var filter=new Array();
var sort_index='last_updated';
var sort_order='prev';
var lowerbound=['0','0'];
var upperbound=['9999999999','9999999999'];
var bound_count=0;
if(typeof columns.indexes!='undefined')
{
var cols=columns.indexes;
for(var j=0;j<cols.length;j++)
{
if(typeof cols[j].lowerbound!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=""+cols[j].lowerbound;
fil.type='lowerbound';
filter.push(fil);
lowerbound=[fil.value,'0'];
sort_index=cols[j].index;
if(bound_count==0)
{
var upperbound=['9999999999','9999999999'];
}
bound_count+=1;
}
if(typeof cols[j].upperbound!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=""+cols[j].upperbound;
fil.type='upperbound';
filter.push(fil);
upperbound=[fil.value,'999999999999'];
sort_index=cols[j].index;
if(bound_count==0)
{
lowerbound=['0','0'];
}
bound_count+=1;
}
if(typeof cols[j].array!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].array;
fil.type='array';
filter.push(fil);
}
if(typeof cols[j].approx_array!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].approx_array;
fil.type='approx_array';
filter.push(fil);
}
if(typeof cols[j].all_approx_array!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].all_approx_array;
fil.type='all_approx_array';
filter.push(fil);
}
if(typeof cols[j].unequal!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].unequal;
fil.type='unequal';
filter.push(fil);
}
if(typeof cols[j].isnull!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].isnull;
fil.type='isnull';
filter.push(fil);
}
if(typeof cols[j].value!='undefined' && cols[j].value!="")
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].value;
fil.type='';
filter.push(fil);
}
if(typeof cols[j].exact!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].exact;
fil.type='exact';
filter.push(fil);
sort_index=cols[j].index;
lowerbound=[fil.value,'0'];
upperbound=[fil.value,'99999999'];
bound_count=0;
}
}
}
var sort_key=IDBKeyRange.bound(lowerbound,upperbound);
var objectstore=static_local_db.transaction([table],"readonly").objectStore(table).index(sort_index);
if(filter.length>0)
{
if(filter[0].name=='id')
{
objectstore=static_local_db.transaction([table],"readonly").objectStore(table);
sort_key=IDBKeyRange.only(filter[0].value);
}
}
var read_request=objectstore.openCursor(sort_key,sort_order);
localdb_open_requests+=1;
read_request.onsuccess=function(e)
{
var result=e.target.result;
if(result)
{
var record=result.value;
var match_word=true;
for(var i=0;i<filter.length;i++)
{
if(typeof record[filter[i].name]!="undefined")
{
var string=record[filter[i].name].toString().toLowerCase();
if(filter[i].type!='array')
{
var search_word=filter[i].value.toString().toLowerCase();
if(filter[i].type=='')
{
if(string.indexOf(search_word)===-1)
{
match_word=false;
break;
}
}
if(filter[i].type=='exact')
{
if(search_word!==string)
{
match_word=false;
break;
}
}
if(filter[i].type=='unequal')
{
if(search_word==string)
{
match_word=false;
break;
}
}
if(filter[i].type=='isnull')
{
if(filter[i].value=='no' && string=="null")
{
match_word=false;
break;
}
else if(filter[i].value=='yes' && string!="null")
{
march_word=false;
break;
}
}
if(filter[i].type=='upperbound')
{
if(parseFloat(record[filter[i].name])>=parseFloat(filter[i].value))
{
match_word=false;
break;
}
}
else if(filter[i].type=='lowerbound')
{
if(parseFloat(record[filter[i].name])<=parseFloat(filter[i].value))
{
match_word=false;
break;
}
}
}
else if(filter[i].type=='array')
{
if(filter[i].value.indexOf(string)==-1)
{
match_word=false;
break;
}
}
if(filter[i].type=='approx_array')
{
var approx_array=filter[i].value;
var sub_match=false;
for(var ab in approx_array)
{
if(string.indexOf(approx_array[ab])>-1)
{
sub_match=true;
break;
}
}
if(!sub_match)
{
match_word=false;
break;
}
}
if(filter[i].type=='all_approx_array')
{
var all_approx_array=filter[i].value;
var sub_match=true;
for(var ab in all_approx_array)
{
if(string.indexOf(all_approx_array[ab])==-1)
{
sub_match=false;
break;
}
}
if(!sub_match)
{
match_word=false;
break;
}
}
}
else
{
if(filter[i].type!='unequal')
{
match_word=false;
break;
}
if(filter[i].type=='isnull')
{
if(filter[i].value=='no')
{
match_word=false;
break;
}
}
}
}
if(match_word===true)
{
if(start_index==0)
{
result_count+=1;
}
else
{
start_index-=1;
}
if(result_count===count)
{
localdb_open_requests-=1;
callback(result_count);
}
else
{
result.continue();
}
}
else
{
result.continue();
}
}
else
{
localdb_open_requests-=1;
callback(result_count);
}
};
}
};
/**
* This function generated a custom report
* @param report_id
* @param results
* @param callback
* @returns
*/
function local_generate_report_json(report_id,callback)
{
if(typeof static_local_db=='undefined')
{
open_local_db(function()
{
local_generate_report_json(report_id,callback);
});
}
else
{
show_loader();
var report_tables=[];
var report_fields=[];
var field_conditions=[];
var value_conditions=[];
var results=[];
var keyValue=IDBKeyRange.bound([report_id,'0'],[report_id,'99999999']);
static_local_db.transaction(['report_items'],"readonly").objectStore('report_items').index('report_id').openCursor(keyValue).onsuccess=function(e)
{
var result=e.target.result;
if(result)
{
var record=result.value;
report_tables.push(record['table1']);
report_fields.push([record['table1'],record['field1']]);
if(record['condition1']!='none')
{
if(record['condition1'].indexOf('field')!=-1)
{
report_tables.push(record['table2']);
report_fields.push([record['table2'],record['field2']]);
field_conditions.push([record['table1'],record['field1'],record['condition1'],record['table2'],record['field2']]);
}
else
{
value_conditions.push([record['table1'],record['field1'],record['condition1'],record['value']]);
}
}
result.continue();
}
else
{
report_tables=vUtil.arrayUnique(report_tables);
report_fields=vUtil.arrayUnique(report_fields);
field_conditions=vUtil.arrayUnique(field_conditions);
value_conditions=vUtil.arrayUnique(value_conditions);
var trans=static_local_db.transaction(report_tables,"readonly");
var cursors=[];
function open_cursor(i)
{
if(i<report_tables.length)
{
var objectStore=trans.objectStore(report_tables[i]);
var j=i+1;
objectStore.openCursor().onsuccess=function(event)
{
cursors[report_tables[i]]=event.target.result;
if(cursors[report_tables[i]])
{
if(j==report_tables.length)
{
var match_word=true;
for(var y in field_conditions)
{
if(field_conditions[y][2]=='equals field' && cursors[field_conditions[y][0]].value[field_conditions[y][1]]!=cursors[field_conditions[y][3]].value[field_conditions[y][4]])
{
match_word=false;
break;
}
if(field_conditions[y][2]=='not equals field' && cursors[field_conditions[y][0]].value[field_conditions[y][1]]==cursors[field_conditions[y][3]].value[field_conditions[y][4]])
{
match_word=false;
break;
}
if(field_conditions[y][2]=='greater than field' && cursors[field_conditions[y][0]].value[field_conditions[y][1]]<=cursors[field_conditions[y][3]].value[field_conditions[y][4]])
{
match_word=false;
break;
}
if(field_conditions[y][2]=='less than field' && cursors[field_conditions[y][0]].value[field_conditions[y][1]]>=cursors[field_conditions[y][3]].value[field_conditions[y][4]])
{
match_word=false;
break;
}
}
for(var z in value_conditions)
{
if(value_conditions[z][2]=='equals value' && cursors[value_conditions[z][0]].value[value_conditions[z][1]]!=value_conditions[z][3])
{
match_word=false;
break;
}
if(value_conditions[z][2]=='not equals value' && cursors[value_conditions[z][0]].value[value_conditions[z][1]]==value_conditions[z][3])
{
match_word=false;
break;
}
if(value_conditions[z][2]=='greater than value' && cursors[value_conditions[z][0]].value[value_conditions[z][1]]<=value_conditions[z][3])
{
match_word=false;
break;
}
if(value_conditions[z][2]=='less than value' && cursors[value_conditions[z][0]].value[value_conditions[z][1]]>=value_conditions[z][3])
{
match_word=false;
break;
}
}
if(match_word===true)
{
var data_array=new Object();
for(var x=0;x<report_fields.length;x++)
{
data_array[report_fields[x][1]]=cursors[report_fields[x][0]].value[report_fields[x][1]];
}
results.push(data_array);
}
cursors[report_tables[i]].continue();
}
else
{
open_cursor(j);
}
}
else if((i-1)>=0)
{
cursors[report_tables[i-1]].continue();
}
else
{
callback(results);
hide_loader();
}
}
}
};
open_cursor(0);
}
};
}
}
/**
*
* @param data_xml
* @param activity_xml
* @returns
*/
function local_delete_json(columns,func)
{
if(typeof static_local_db=='undefined')
{
open_local_db(function()
{
local_delete_json(columns,func);
});
}
else
{
show_loader();
localdb_open_requests+=1;
var table=columns.data_store;
var cols=columns.data;
var log='no';
var activity_data=[];
var result_count=0;
if(typeof columns.log!='undefined')
{
log=columns.log;
}
if(typeof columns.log_data!='undefined')
{
activity_data=columns.log_data;
}
var filter=new Array();
var sort_index='last_updated';
var sort_order='prev';
var lowerbound=['0','0'];
var upperbound=['9999999999','9999999999'];
var bound_count=0;
for(var j=0;j<cols.length;j++)
{
var fil=new Object();
fil.name=cols[j].index;
if(typeof cols[j].lowerbound!='undefined')
{
fil.value=""+cols[j].lowerbound;
fil.type='lowerbound';
filter.push(fil);
lowerbound=[fil.value,'0'];
sort_index=cols[j].index;
if(bound_count==0)
{
var upperbound=['9999999999','9999999999'];
}
bound_count+=1;
}
if(typeof cols[j].upperbound!='undefined')
{
fil.value=""+cols[j].upperbound;
fil.type='upperbound';
filter.push(fil);
upperbound=[fil.value,'999999999999'];
sort_index=cols[j].index;
if(bound_count==0)
{
lowerbound=['0','0'];
}
bound_count+=1;
}
if(typeof cols[j].array!='undefined')
{
fil.value=cols[j].array;
fil.type='array';
filter.push(fil);
}
if(typeof cols[j].approx_array!='undefined')
{
fil.value=cols[j].approx_array;
fil.type='approx_array';
filter.push(fil);
}
if(typeof cols[j].unequal!='undefined')
{
fil.value=cols[j].unequal;
fil.type='unequal';
filter.push(fil);
}
if(typeof cols[j].value!='undefined' && cols[j].value!="")
{
fil.value=cols[j].value;
fil.type='';
filter.push(fil);
}
if(typeof cols[j].exact!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].exact;
fil.type='exact';
filter.push(fil);
sort_index=cols[j].index;
lowerbound=[fil.value,'0'];
upperbound=[fil.value,'99999999'];
bound_count=0;
}
}
var objectStore=static_local_db.transaction([table],"readwrite").objectStore(table);
if(filter[0].name=='id')
{
var get_req=objectStore.get(filter[0].value);
get_req.onsuccess=function(e)
{
localdb_open_requests-=1;
var data=get_req.result;
if(data)
{
var match_word=true;
for(var i=1;i<filter.length;i++)
{
if(typeof record[filter[i].name]!="undefined")
{
var string=record[filter[i].name].toString().toLowerCase();
if(filter[i].type!='array')
{
var search_word=filter[i].value.toString().toLowerCase();
if(filter[i].type=='')
{
if(string.indexOf(search_word)===-1)
{
match_word=false;
break;
}
}
if(filter[i].type=='exact')
{
if(search_word!==string)
{
match_word=false;
break;
}
}
if(filter[i].type=='unequal')
{
if(search_word==string)
{
match_word=false;
break;
}
}
if(filter[i].type=='upperbound')
{
if(parseFloat(record[filter[i].name])>=parseFloat(filter[i].value))
{
match_word=false;
break;
}
}
else if(filter[i].type=='lowerbound')
{
if(parseFloat(record[filter[i].name])<=parseFloat(filter[i].value))
{
match_word=false;
break;
}
}
}
else if(filter[i].type=='array')
{
if(filter[i].value.indexOf(string)==-1)
{
match_word=false;
break;
}
}
if(filter[i].type=='approx_array')
{
var approx_array=filter[i].value;
var sub_match=false;
for(var ab in approx_array)
{
if(string.indexOf(approx_array[ab])>-1)
{
sub_match=true;
break;
}
}
if(!sub_match)
{
match_word=false;
break;
}
}
}
else
{
if(filter[i].type!='unequal')
{
match_word=false;
break;
}
}
}
if(match_word===true)
{
localdb_open_requests+=1;
objectStore.delete(filter[0].value).onsuccess=function(e)
{
var id=vUtil.newKey();
var act_row={id:""+id,
type:'delete',
status:'unsynced',
user_display:log,
tablename:table,
data_type:'json',
data_id:filter[0].value,
data_xml:JSON.stringify(cols),
updated_by:get_name(),
last_updated:""+get_my_time()};
if(log=='yes')
{
act_row['title']=activity_data['title'];
act_row['notes']=activity_data['notes'];
act_row['link_to']=activity_data['link_to'];
}
static_local_db.transaction(['activities'],"readwrite").objectStore('activities').put(act_row).onsuccess=function(e)
{
localdb_open_requests-=1;
hide_loader();
if(typeof func!="undefined")
{
func();
}
};
};
}
}
};
}
else
{
var keyValue=IDBKeyRange.bound([filter[0].value,'0'],[filter[0].value,'99999999']);
var delete_ids_array=[];
objectStore.index(filter[0].name).openCursor(keyValue).onsuccess=function(e)
{
var result=e.target.result;
if(result)
{
var record=result.value;
var match_word=true;
for(var i=0;i<filter.length;i++)
{
if(typeof record[filter[i].name]!="undefined")
{
var string=record[filter[i].name].toString().toLowerCase();
if(filter[i].type!='array')
{
var search_word=filter[i].value.toString().toLowerCase();
if(filter[i].type=='')
{
if(string.indexOf(search_word)===-1)
{
match_word=false;
break;
}
}
if(filter[i].type=='exact')
{
if(search_word!==string)
{
match_word=false;
break;
}
}
if(filter[i].type=='unequal')
{
if(search_word==string)
{
match_word=false;
break;
}
}
if(filter[i].type=='upperbound')
{
if(parseFloat(record[filter[i].name])>=parseFloat(filter[i].value))
{
match_word=false;
break;
}
}
else if(filter[i].type=='lowerbound')
{
if(parseFloat(record[filter[i].name])<=parseFloat(filter[i].value))
{
match_word=false;
break;
}
}
}
else if(filter[i].type=='array')
{
if(filter[i].value.indexOf(string)==-1)
{
match_word=false;
break;
}
}
if(filter[i].type=='approx_array')
{
var approx_array=filter[i].value;
var sub_match=false;
for(var ab in approx_array)
{
if(string.indexOf(approx_array[ab])>-1)
{
sub_match=true;
break;
}
}
if(!sub_match)
{
match_word=false;
break;
}
}
}
else
{
if(filter[i].type!='unequal')
{
match_word=false;
break;
}
}
}
if(match_word===true)
{
delete_ids_array.push(record.id);
}
result.continue();
}
else
{
var i=0;
var j=0;
var os1=static_local_db.transaction([table],"readwrite").objectStore(table);
var os2=static_local_db.transaction(['activities'],"readwrite").objectStore('activities');
function delete_records()
{
if(i<delete_ids_array.length)
{
localdb_open_requests+=1;
os1.delete(delete_ids_array[i]).onsuccess=function(e)
{
localdb_open_requests-=1;
delete_records();
};
i++;
}
};
var activity_id=vUtil.newKey();
function insert_activities()
{
if(j<delete_ids_array.length)
{
localdb_open_requests+=1;
var act_row={id:""+(activity_id+j),
type:'delete',
status:'unsynced',
user_display:log,
tablename:table,
data_type:'json',
data_id:delete_ids_array[j],
data_xml:JSON.stringify(cols),
updated_by:get_name(),
last_updated:""+get_my_time()};
if(log=='yes')
{
act_row['title']=activity_data['title'];
act_row['notes']=activity_data['notes'];
act_row['link_to']=activity_data['link_to'];
}
os2.put(act_row).onsuccess=function(e)
{
localdb_open_requests-=1;
insert_activities();
};
j++;
}
};
delete_records();
insert_activities();
localdb_open_requests-=1;
}
};
}
var local_delete_complete=setInterval(function()
{
if(localdb_open_requests===0)
{
clearInterval(local_delete_complete);
hide_loader();
if(typeof func!="undefined")
{
func();
}
}
},500);
}
};
function local_create_json(data_json,func)
{
if(typeof static_local_db=='undefined')
{
open_local_db(function()
{
local_create_json(data_json,func);
});
}
else
{
localdb_open_requests+=1;
show_loader();
var table=data_json.data_store;
var cols=data_json.data;
var log='no';
var activity_data=[];
if(typeof data_json.log!='undefined')
{
log=data_json.log;
}
if(typeof data_json.log_data!='undefined')
{
activity_data=data_json.log_data;
}
var unique=new Array();
var indexed_col=new Array();
for(var j=0;j<cols.length;j++)
{
if(typeof cols[j]['unique']!='undefined' && cols[j]['unique']=='yes')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].value;
unique.push(fil);
}
else if(typeof cols[j]['uniqueWith']!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.value=cols[j].value;
fil.uniqueWith=cols[j]['uniqueWith'];
unique.push(fil);
}
indexed_col[cols[j].index]=cols[j].value;
}
var data_id=indexed_col['id'];
var objectStore=static_local_db.transaction([table],"readwrite").objectStore(table);
function local_create_json_put()
{
var data_row=new Object();
for(var j=0;j<cols.length;j++)
{
data_row[cols[j].index]=""+cols[j].value;
}
var put_req=objectStore.put(data_row);
put_req.onsuccess=function(e)
{
var id=vUtil.newKey();
var act_row={id:""+id,
type:'create',
status:'unsynced',
data_type:'json',
data_xml:JSON.stringify(cols),
user_display:log,
tablename:table,
data_id:data_id,
updated_by:get_name(),
last_updated:""+get_my_time()};
if(log=='yes')
{
act_row['title']=activity_data['title'];
act_row['notes']=activity_data['notes'];
act_row['link_to']=activity_data['link_to'];
}
static_local_db.transaction(['activities'],"readwrite").objectStore('activities').put(act_row).onsuccess=function(e)
{
localdb_open_requests-=1;
hide_loader();
if(typeof func!="undefined")
{
func();
}
};
};
};
function local_create_json_unique(index)
{
if(index<unique.length)
{
var unique_element=unique[index];
var key=IDBKeyRange.bound([unique_element.value,'0'],[unique_element.value,'99999999']);
objectStore.index(unique_element.name).openCursor(key).onsuccess=function(e)
{
var result=e.target.result;
if(result)
{
var match_word=true;
if(typeof unique_element.uniqueWith!='undefined')
{
var record=result.value;
for(var i in unique_element.uniqueWith)
{
if(record[unique_element.uniqueWith[i]]!=indexed_col[unique_element.uniqueWith[i]])
{
match_word=false;
break;
}
}
}
if(match_word)
{
localdb_open_requests-=1;
hide_loader();
if(typeof data_json['warning']!='undefined' && data_json['warning']=='no')
{}
else
{
$("#modal5_link").click();
}
}
else
{
result.continue();
}
}
else
{
local_create_json_unique(index+1);
}
};
}
else
{
local_create_json_put();
}
};
local_create_json_unique(0);
}
}
function local_create_batch_json(data_json,func)
{
if(typeof static_local_db=='undefined')
{
open_local_db(function()
{
local_create_batch_json(data_json,func);
});
}
else
{
if(typeof data_json.loader!='undefined' && data_json.loader=='no')
{}else{show_loader();}
var table=data_json.data_store;
var rows=data_json.data;
var log='no';
var activity_data=[];
var result_count=0;
if(typeof data_json.log!='undefined')
{
log=data_json.log;
}
if(typeof data_json.log_data!='undefined')
{
activity_data=data_json.log_data;
}
var unique=new Array();
if(rows.length>0)
{
var cols=rows[0];
for(var j=0;j<cols.length;j++)
{
if(typeof cols[j]['unique']!='undefined' && cols[j]['unique']=='yes')
{
var fil=new Object();
fil.name=cols[j].index;
unique.push(fil);
}
else if(typeof cols[j]['uniqueWith']!='undefined')
{
var fil=new Object();
fil.name=cols[j].index;
fil.uniqueWith=cols[j]['uniqueWith'];
unique.push(fil);
}
}
var transaction=static_local_db.transaction([table,'activities'],"readwrite");
var os1=transaction.objectStore(table);
var os2=transaction.objectStore('activities');
var activity_id=vUtil.newKey();
var i=0;
var success_count=0;
function create_records()
{
if(i<rows.length)
{
var cols=rows[i];
localdb_open_requests+=1;
local_create_json_unique(cols,0);
}
};
function local_create_json_put(data_row,cols)
{
os1.put(data_row).onsuccess=function(e)
{
success_count+=1;
var id=vUtil.newKey();
var act_row={id:""+(activity_id+i),
type:'create',
status:'unsynced',
data_type:'json',
data_xml:JSON.stringify(cols),
user_display:'no',
tablename:table,
data_id:data_row['id'],
updated_by:get_name(),
last_updated:""+get_my_time()};
os2.put(act_row).onsuccess=function(e)
{
i+=1;
localdb_open_requests-=1;
create_records();
};
};
};
function local_create_json_unique(cols,index)
{
var data_row=new Object();
for(var j=0;j<cols.length;j++)
{
data_row[cols[j].index]=""+cols[j].value;
}
if(index<unique.length)
{
var kv=IDBKeyRange.bound([data_row[unique[index].name],'0'],[data_row[unique[index].name],'99999999']);
os1.index(unique[index].name).openCursor(kv).onsuccess=function(e)
{
var result=e.target.result;
if(result)
{
var match_word=true;
if(typeof unique[index].uniqueWith!='undefined')
{
var record=result.value;
for(var x in unique[index].uniqueWith)
{
if(record[unique[index].uniqueWith[x]]!=data_row[unique[index].uniqueWith[x]])
{
match_word=false;
break;
}
}
}
if(match_word)
{
i+=1;
localdb_open_requests-=1;
create_records();
}
else
{
result.continue();
}
}
else
{
local_create_json_unique(cols,index+1);
}
};
}
else
{
local_create_json_put(data_row,cols);
}
};
create_records();
var local_create_complete=setInterval(function()
{
if(localdb_open_requests===0)
{
var act_row={id:""+(activity_id+i+5),
type:'create',
status:'unsynced',
title:'Data import',
notes:'Added '+success_count+' records for '+activity_data['title'],
data_xml:JSON.stringify(rows),
data_type:'json',
user_display:log,
data_id:"",
tablename:"",
link_to:activity_data['link_to'],
updated_by:""+get_name(),
last_updated:""+get_my_time()};
var transaction=static_local_db.transaction([table,'activities'],"readwrite");
var os3=transaction.objectStore('activities');
os3.put(act_row).onsuccess=function(e){};
clearInterval(local_create_complete);
if(typeof data_json.loader!='undefined' && data_json.loader=='no')
{}else{hide_loader();}
if(typeof func!='undefined')
{
func();
}
}
},2000);
}
else
{
if(typeof data_json.loader!='undefined' && data_json.loader=='no')
{}else{hide_loader();}
if(typeof func!='undefined')
{
func();
}
}
}
}
function local_update_json(data_json,func)
{
if(typeof static_local_db=='undefined')
{
open_local_db(function()
{
local_update_json(data_json,func);
});
}
else
{
localdb_open_requests+=1;
var table=data_json.data_store;
var cols=data_json.data;
var log='no';
var activity_data=[];
if(typeof data_json.log!='undefined')
{
log=data_json.log;
}
if(typeof data_json.log_data!='undefined')
{
activity_data=data_json.log_data;
}
var data_id=cols[0]['value'];
//console.log(table+"-"+data_id);
var os1=static_local_db.transaction([table],"readwrite").objectStore(table);
var req=os1.get(data_id);
req.onsuccess=function(e)
{
var data_record=req.result;
if(data_record)
{
//console.log('found local record '+data_record);
for(var j=0;j<cols.length;j++)
{
data_record[cols[j]['index']]=""+cols[j]['value'];
}
var put_req=os1.put(data_record);
put_req.onsuccess=function(e)
{
var id=vUtil.newKey();
var act_row={id:""+id,
type:'update',
status:'unsynced',
data_type:'json',
data_xml:JSON.stringify(cols),
user_display:log,
tablename:table,
data_id:data_id,
updated_by:get_name(),
last_updated:""+get_my_time()};
if(log=='yes')
{
act_row['title']=activity_data['title'];
act_row['notes']=activity_data['notes'];
act_row['link_to']=activity_data['link_to'];
}
static_local_db.transaction(['activities'],"readwrite").objectStore('activities').put(act_row).onsuccess=function(e)
{
localdb_open_requests-=1;
if(typeof func!="undefined")
{
func();
}
};
};
}
};
}
}
function local_update_batch_json(data_json,func)
{
if(typeof static_local_db=='undefined')
{
open_local_db(function()
{
local_update_batch_json(data_json,func);
});
}
else
{
if(typeof data_json.loader!='undefined' && data_json.loader=='no')
{}else{show_loader();}
var table=data_json.data_store;
var rows=data_json.data;
var log='no';
var activity_data=[];
var result_count=0;
if(typeof data_json.log!='undefined')
{
log=data_json.log;
}
if(typeof data_json.log_data!='undefined')
{
activity_data=data_json.log_data;
}
//console.log(rows.length);
var transaction=static_local_db.transaction([table,'activities'],"readwrite");
var os1=transaction.objectStore(table);
var os2=transaction.objectStore('activities');
var i=0;
var success_count=0;
var activity_id=vUtil.newKey();
function update_records()
{
if(i<rows.length)
{
var cols=rows[i];
var data_id=cols[0]['value'];
localdb_open_requests+=1;
var req=os1.get(data_id);
req.onsuccess=function(e)
{
var data_record=req.result;
if(data_record)
{
for(var j=0;j<cols.length;j++)
{
data_record[cols[j]['index']]=""+cols[j]['value'];
}
var put_req=os1.put(data_record);
put_req.onsuccess=function(e)
{
var id=vUtil.newKey();
var act_row={id:""+(activity_id+i),
type:'update',
status:'unsynced',
data_type:'json',
data_xml:JSON.stringify(cols),
user_display:'no',
tablename:table,
data_id:data_record['id'],
updated_by:get_name(),
last_updated:""+get_my_time()};
os2.put(act_row).onsuccess=function(e)
{
i++;
success_count+=1;
localdb_open_requests-=1;
update_records();
};
};
}
else
{
i++;
localdb_open_requests-=1;
update_records();
}
};
req.onerror=function(e)
{
i++;
localdb_open_requests-=1;
update_records();
};
}
};
update_records();
var local_update_complete=setInterval(function()
{
//console.log(localdb_open_requests);
if(localdb_open_requests===0)
{
var act_row={id:""+(activity_id+i+5),
type:'update',
status:'unsynced',
title:'Data import',
notes:'Updated '+success_count+' records for '+activity_data['title'],
data_xml:JSON.stringify(rows),
data_type:'json',
user_display:log,
data_id:"",
tablename:"",
link_to:activity_data['link_to'],
updated_by:""+get_name(),
last_updated:""+get_my_time()};
var transaction=static_local_db.transaction(['activities'],"readwrite");
var os3=transaction.objectStore('activities');
os3.put(act_row).onsuccess=function(e){};
clearInterval(local_update_complete);
if(typeof data_json.loader!='undefined' && data_json.loader=='no')
{}else{hide_loader();}
if(typeof func!='undefined')
{
func();
}
}
},2000);
}
}
|
package reactivefeign.spring.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.client.actuator.HasFeatures;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import reactivefeign.ReactiveFeign;
import java.util.ArrayList;
import java.util.List;
@Configuration
@ConditionalOnClass(ReactiveFeign.class)
@EnableConfigurationProperties({ReactiveFeignClientProperties.class})
public class ReactiveFeignAutoConfiguration {
@Autowired(required = false)
private List<ReactiveFeignClientSpecification> configurations = new ArrayList<>();
@Bean
public HasFeatures reactiveFeignFeature() {
return HasFeatures.namedFeature("ReactiveFeign", ReactiveFeign.class);
}
@Bean
public ReactiveFeignContext reactiveFeignContext() {
ReactiveFeignContext context = new ReactiveFeignContext();
context.setConfigurations(this.configurations);
return context;
}
// @Configuration
// @ConditionalOnClass(name = "feign.hystrix.HystrixFeign")
// protected static class HystrixFeignTargeterConfiguration {
// @Bean
// @ConditionalOnMissingBean
// public Targeter feignTargeter() {
// return new HystrixTargeter();
// }
// }
//
// @Configuration
// @ConditionalOnMissingClass("feign.hystrix.HystrixFeign")
// protected static class DefaultFeignTargeterConfiguration {
// @Bean
// @ConditionalOnMissingBean
// public Targeter feignTargeter() {
// return new DefaultTargeter();
// }
// }
}
|
<reponame>plotter/platform2
import { Pak } from './pak';
export class View {
public static fromJSON(json: ViewJSON): View {
let view = new View();
view.locked = json.locked;
view.uniqueId = json.uniqueId;
view.pane = json.pane;
view.moduleUrl = json.moduleUrl;
return view;
}
public locked: boolean;
public uniqueId: string;
public pane: PaneType;
public moduleUrl: string;
public pak: Pak;
}
export type PaneType = 'nav' | 'main' | 'alt';
export interface ViewJSON {
locked: boolean;
uniqueId: string;
pane: PaneType;
moduleUrl: string;
}
|
<reponame>smagill/opensphere-desktop
package io.opensphere.mantle.data.merge.gui;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.LayoutManager;
import java.awt.Stroke;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.awt.font.LineMetrics;
import java.awt.geom.Rectangle2D;
import java.io.IOException;
import javax.swing.JComponent;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.TransferHandler;
import org.apache.log4j.Logger;
import io.opensphere.core.util.swing.EventQueueUtilities;
import io.opensphere.mantle.data.merge.gui.DataTypeKeyMoveDNDCoordinator.KeyMoveListener;
/**
* The Class NewKeyDropTargetPanel.
*/
@SuppressWarnings("serial")
public class NewKeyDropTargetPanel extends JPanel implements KeyMoveListener
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(NewKeyDropTargetPanel.class);
/** The Constant dash. */
private static final float[] ourDash = { 5.0f };
/** The Allow only with special keys. */
private final boolean myAllowOnlyWithSpecialKeys;
/** The Data type merge panel. */
private final DataTypeMergePanel myDataTypeMergePanel;
/** The Line color. */
private Color myLineColor = Color.white;
/** The Message parts. */
private final String[] myMessageParts;
/**
* Instantiates a new new key drop target panel.
*
* @param cdr the cdr
* @param dtmp the dtmp
* @param lm the lm
* @param msg the msg
* @param specialKeysOnly the special keys only
*/
public NewKeyDropTargetPanel(DataTypeKeyMoveDNDCoordinator cdr, DataTypeMergePanel dtmp, LayoutManager lm, String msg,
boolean specialKeysOnly)
{
super(lm);
cdr.addKeyMoveListener(this);
myDataTypeMergePanel = dtmp;
myAllowOnlyWithSpecialKeys = specialKeysOnly;
myMessageParts = msg.split("\n");
setTransferHandler(new TransferHandler()
{
@SuppressWarnings("PMD.SimplifiedTernary")
@Override
public boolean canImport(TransferHandler.TransferSupport info)
{
if (!info.isDataFlavorSupported(TypeKeyEntry.ourDataFlavor))
{
return false;
}
Transferable tf = info.getTransferable();
try
{
TypeKeyEntry tke = (TypeKeyEntry)tf.getTransferData(TypeKeyEntry.ourDataFlavor);
return myAllowOnlyWithSpecialKeys ? tke.getSpecialKeyType() != null : true;
}
catch (UnsupportedFlavorException | IOException e)
{
LOGGER.warn(e);
}
return false;
}
@Override
public int getSourceActions(JComponent c)
{
return MOVE;
}
@Override
public boolean importData(TransferHandler.TransferSupport info)
{
if (!info.isDrop())
{
return false;
}
// Check for String flavor
if (!info.isDataFlavorSupported(TypeKeyEntry.ourDataFlavor))
{
EventQueueUtilities.invokeLater(() -> JOptionPane.showMessageDialog(null, "Doesn't accept a drop of this type."));
return false;
}
TypeKeyEntry data;
try
{
data = (TypeKeyEntry)info.getTransferable().getTransferData(TypeKeyEntry.ourDataFlavor);
}
catch (UnsupportedFlavorException | IOException e)
{
if (LOGGER.isTraceEnabled())
{
LOGGER.trace(e, e);
}
return false;
}
if (data.getOwner() != null)
{
data.setOwner(null);
((TypeKeyPanel)data.getOwner()).acceptedTransferOfEntry(data);
}
final TypeKeyEntry toAdd = data;
EventQueueUtilities.invokeLater(() -> myDataTypeMergePanel.createNewMappedTypeFromEntry(toAdd, myAllowOnlyWithSpecialKeys));
return true;
}
});
}
@Override
public void keyMoveCompleted(TypeKeyEntry entry, TypeKeyPanel origPanel)
{
setLineColor(Color.white);
}
@SuppressWarnings("PMD.SimplifiedTernary")
@Override
public void keyMoveInitiated(TypeKeyEntry entry, TypeKeyPanel sourcePanel, Object source)
{
boolean acceptable = myAllowOnlyWithSpecialKeys ? entry.getSpecialKeyType() != null : true;
setLineColor(acceptable ? Color.green : Color.red);
}
@Override
protected void paintComponent(Graphics g)
{
super.paintComponent(g);
Graphics2D g2D = (Graphics2D)g;
Stroke origStroke = g2D.getStroke();
BasicStroke bs = new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 10f, ourDash, 0f);
g2D.setStroke(bs);
g2D.setColor(myLineColor);
g2D.drawRoundRect(0, 2, getWidth() - 1, getHeight() - 3, 20, 20);
FontMetrics fm = g2D.getFontMetrics();
g2D.setColor(Color.white);
g2D.setStroke(origStroke);
int totalheight = 0;
for (String part : myMessageParts)
{
LineMetrics lm = fm.getLineMetrics(part, g);
totalheight += lm.getHeight();
}
int startY = (int)(0.5 * (getHeight() - totalheight)) + fm.getHeight() - 3;
for (String part : myMessageParts)
{
Rectangle2D r = fm.getStringBounds(part, g);
int x = (int)(getWidth() / 2.0 - r.getWidth() / 2.0);
g2D.drawString(part, x, startY);
startY += r.getHeight();
}
}
/**
* Sets the line color.
*
* @param c the new line color
*/
private void setLineColor(final Color c)
{
EventQueueUtilities.runOnEDT(() ->
{
myLineColor = c;
repaint();
});
}
}
|
#import dataset
import pandas as pd
#define data
X = pd.DataFrame()
#perform data normalization
X = (X-X.mean())/X.std()
#one-hot encode the labels
X = pd.get_dummies(X, columns=['label'])
#split dataset into train and test
x_train, x_test, y_train, y_test = train_test_split(X.drop(labels = 'label',axis = 1), X['label'], test_size = 0.2)
#transform the train and test data
scaler = StandardScaler()
x_train = scaler.fit_transform(x_train)
x_test = scaler.transform(x_test) |
function addArray(array1, array2) {
let result = [];
let i = 0;
for (i; i < array1.length; i++) {
result.push(array1[i] + array2[i]);
}
return result;
} |
<gh_stars>0
package io.cattle.platform.metadata;
public interface MetadataModOperation<T> {
T modify(T obj);
}
|
<filename>src/shaders/glsl/project.readback.js
export default /* glsl */ `// This is three.js' global uniform, missing from fragment shaders.
uniform mat4 projectionMatrix;
vec4 readbackPosition(vec3 position, vec4 stpq) {
vec4 pos = projectionMatrix * vec4(position, 1.0);
vec3 final = pos.xyz / pos.w;
if (final.z < -1.0) {
return vec4(0.0, 0.0, 0.0, -1.0);
}
else {
return vec4(final, -position.z);
}
}
`; |
function set-title-precmd() {
printf "\e]2;%s\a" "${PWD/#$HOME/~}"
}
function set-title-preexec() {
printf "\e]2;%s\a" "$1"
}
autoload -Uz add-zsh-hook
add-zsh-hook precmd set-title-precmd
add-zsh-hook preexec set-title-preexec
|
import os
import json
import re
from dateutil import parser
from tokenizers import normalizers
from tokenizers.normalizers import NFD, StripAccents, Lowercase, BertNormalizer, Sequence, Strip
from tokenizers import pre_tokenizers
from tokenizers.pre_tokenizers import Whitespace
from project.server.main.utils_swift import upload_object, download_object
from project.server.main.logger import get_logger
normalizer = Sequence([BertNormalizer(clean_text=True,
handle_chinese_chars=True,
strip_accents=True,
lowercase=True), Strip()])
pre_tokenizer = pre_tokenizers.Sequence([Whitespace()])
logger = get_logger(__name__)
def normalize(x, min_length = 0):
normalized = normalizer.normalize_str(x)
normalized = normalized.replace('\n', ' ')
normalized = re.sub(' +', ' ', normalized)
return " ".join([e[0] for e in pre_tokenizer.pre_tokenize_str(normalized) if len(e[0]) > min_length])
def get_repository(a_repo: str) -> str:
if a_repo.replace('www.', '')[0:3].lower() == 'hal':
return 'HAL'
for r in ['bioRxiv', 'medRxiv', 'arXiv', 'Research Square', 'Zenodo', 'Archimer', 'RePEc', 'CiteSeerX', 'univOAK']:
if r.lower().replace(' ', '') in a_repo.lower():
return r
if 'lilloa' in a_repo.lower():
return 'LillOA (Lille Open Archive)'
if 'ucl.ac.uk' in a_repo.lower():
return 'UCL Discovery'
if 'lirias' in a_repo.lower() and 'kuleuven' in a_repo.lower():
return 'LIRIAS (KU Leuven)'
if 'pure.atira.dk' in a_repo.lower():
return 'Pure (Denmark)'
if 'digital.csic.es' in a_repo.lower():
return 'DIGITAL.CSIC (Spain)'
if 'escholarship.org/ark' in a_repo.lower():
return 'California Digital Library - eScholarship'
if 'jupiter.its.unimelb.edu.au' in a_repo.lower():
return 'University of Melbourne - Minerva Access'
if 'helda.helsinki' in a_repo.lower():
return 'HELDA - Digital Repository of the University of Helsinki'
if 'osti.gov' in a_repo.lower():
return 'US Office of Scientific and Technical Information'
for f in ['pubmedcentral', 'ncbi.nlm.nih.gov/pmc', 'europepmc']:
if f in a_repo:
return 'PubMed Central'
return a_repo
def get_millesime(x: str) -> str:
try:
if x[0:4] < '2021':
return x[0:4]
month = int(x[4:6])
if 1 <= month <= 3:
return x[0:4] + 'Q1'
if 4 <= month <= 6:
return x[0:4] + 'Q2'
if 7 <= month <= 9:
return x[0:4] + 'Q3'
if 10 <= month <= 12:
return x[0:4] + 'Q4'
return 'unk'
except:
return x
def get_aurehal_from_OS(collection_name, aurehal_type):
target_file = f'aurehal_{collection_name}_{aurehal_type}_dict.json'
os.system(f'rm -rf {target_file}.gz')
os.system(f'rm -rf {target_file}')
download_object('hal', f'{collection_name}/aurehal_{aurehal_type}_dict.json.gz', f'{target_file}.gz')
os.system(f'gunzip {target_file}.gz')
return json.load(open(target_file, 'r'))
def parse_hal(notice, aurehal, snapshot_date):
res = {}
res['sources'] = ['HAL']
if isinstance(notice.get('doiId_s'), str):
res['doi'] = notice.get('doiId_s').lower().strip()
external_ids = []
if isinstance(notice.get('halId_s'), str):
external_ids.append({'id_type': 'hal_id', 'id_value': notice.get('halId_s')})
res['hal_id'] = notice.get('halId_s')
if isinstance(notice.get('nntId_s'), str):
external_ids.append({'id_type': 'nnt_id', 'id_value': notice.get('nntId_s')})
res['nnt_id'] = notice.get('nntId_s')
if external_ids:
res['external_ids'] = external_ids
title = ''
if isinstance(notice.get('title_s'), list):
if len(notice.get('title_s')) > 0:
title = notice.get('title_s')[0].strip()
subtitle = ''
if isinstance(notice.get('subTitle_s'), list):
if len(notice.get('subTitle_s')) > 0:
subtitle = notice.get('subTitle_s')[0].strip()
if title and subtitle:
title = f'{title} : {subtitle}'
if title:
res['title'] = title
if isinstance(notice.get('abstract_s'), list):
if len(notice.get('abstract_s')) > 0:
res['abstract'] = [{'abstract': notice.get('abstract_s')[0].strip()}]
if isinstance(notice.get('structId_i'), list):
affiliations = []
for s in notice.get('structId_i'):
structId = str(s)
if structId in aurehal['structure']:
if aurehal['structure'][structId] not in affiliations:
affiliations.append(aurehal['structure'][structId])
else:
logger.debug(f'from structure;{structId}; not in aurehal data ? type: {type(structId)}')
if affiliations:
res['affiliations'] = affiliations
countries = []
for aff in affiliations:
if isinstance(aff.get('detected_countries'), list):
countries += aff.get('detected_countries')
res['detected_countries'] = list(set(countries))
if isinstance(notice.get('docType_s'), str):
doctype = notice.get('docType_s')
if doctype == 'ART':
res['genre'] = 'journal-article'
elif doctype in ['COMM', 'DOUV']:
res['genre'] = 'proceedings'
elif doctype == 'OUV':
res['genre'] = 'book'
elif doctype == 'COUV':
res['genre'] = 'book-chapter'
elif doctype == 'THESE':
res['genre'] = 'thesis'
else:
res['genre'] = doctype.lower()
## AUTHORS
authors_affiliations = {}
if isinstance(notice.get('authIdHasStructure_fs'), list):
for facet in notice.get('authIdHasStructure_fs'):
authorId = str(facet.split('JoinSep')[0].split('FacetSep')[0]).replace('_', '')
structId = str(facet.split('JoinSep')[1].split('FacetSep')[0]).replace('_', '')
if authorId not in authors_affiliations:
authors_affiliations[authorId] = []
if structId in aurehal['structure']:
authors_affiliations[authorId].append(aurehal['structure'][structId])
else:
logger.debug(f'from authors : struct ;{structId}; not in aurehal; type: {type(structId)};facet {facet}')
authors = []
nb_auth_quality = 0
if isinstance(notice.get('authQuality_s'), list):
nb_auth_quality = len(notice.get('authQuality_s'))
if isinstance(notice.get('authId_i'), list):
for authorId in notice.get('authId_i'):
authorIdStr = str(authorId)
if authorIdStr in aurehal['author']:
author = aurehal['author'][authorIdStr]
if authorIdStr in authors_affiliations:
author['affiliations'] = authors_affiliations[authorIdStr]
authors.append(author)
else:
logger.debug(f'author ;{authorIdStr}; not in aureal ?; type: {type(authorIdStr)}')
if authors:
nb_author = len(notice.get('authId_i'))
for ix, a in enumerate(authors):
a['author_position'] = ix + 1
if nb_author == nb_auth_quality:
a['role'] = notice.get('authQuality_s')[ix]
res['authors'] = authors
# DATE
publication_date = None
for f in ['publicationDate_s', 'ePublicationDate_s', 'defenseDate_s', 'producedDate_s']:
if isinstance(notice.get(f), str) and publication_date is None:
try:
publication_date = parser.parse(notice[f]).isoformat()
res['published_date'] = publication_date
res['year'] = publication_date[0:4]
#logger.debug(f'publication_date keeping {f}')
except:
pass
# PUBLISHER
if isinstance(notice.get('journalPublisher_s'), str):
res['publisher'] = notice.get('journalPublisher_s').strip()
# ISSN
journal_issns = []
for f in ['journalIssn_s', 'journalEissn_s']:
if isinstance(notice.get(f), str):
journal_issns.append(notice.get(f).strip())
if journal_issns:
res['journal_issns'] = ','.join(journal_issns)
# KEYWORDS
keywords = []
if isinstance(notice.get('keyword_s'), list):
for k in notice['keyword_s']:
if isinstance(k, str):
keywords.append({'keyword': k.strip()})
if keywords:
res['keywords'] = keywords
# HAL CLASSIF
hal_classification = []
if isinstance(notice.get('en_domainAllCodeLabel_fs'), list):
for c in notice.get('en_domainAllCodeLabel_fs'):
hal_classification.append({'code': c.split('FacetSep')[0], 'label': c.split('FacetSep')[1]})
if hal_classification:
res['hal_classification'] = hal_classification
### GRANTS
grants = []
if isinstance(notice.get('funding_s'), list):
for f in notice['funding_s']:
if isinstance(f, str):
grants.append({'grant':f})
if isinstance(notice.get('anrProjectReference_s'), list):
for f in notice['anrProjectReference_s']:
if isinstance(f, str):
grants.append({'grantid':f, 'agency': 'ANR', 'country': 'France'})
if isinstance(notice.get('europeanProjectReference_s'), list):
for f in notice['europeanProjectReference_s']:
if isinstance(f, str):
grants.append({'grantid':f, 'agency': 'Europe'})
if grants:
res['grants'] = grants
res['has_grant'] = True
else:
res['has_grant'] = False
## OA #####
oa_details = {}
is_oa = False
oa_host_type = None
if notice.get('openAccess_bool') or notice.get('linkExtUrl_s'):
is_oa = True
observation_date = get_millesime(snapshot_date)
oa_locations = []
license = None
if isinstance(notice.get('licence_s'), str):
license = notice.get('licence_s')
if isinstance(notice.get('fileMain_s'), str):
is_oa = True
oa_host_type = 'repository'
oa_locations.append(
{'url': notice.get('fileMain_s'),
'repository_institution': 'HAL',
'license': license,
'host_type': oa_host_type})
elif isinstance(notice.get('linkExtUrl_s'), str):
is_oa = True
oa_host_type = None
url = notice.get('linkExtId_s').strip()
if get_repository(url) != url:
host_type = 'repository'
repository = get_repository(url)
oa_locations.append(
{'url': url,
'repository_normalized': repository,
'license': host_type,
'host_type': oa_host_type})
elif 'doi' in notice.get('linkExtId_s').lower().strip():
oa_host_type = 'publisher'
oa_locations.append(
{'url': url,
'license': host_type,
'host_type': oa_host_type})
res['oa_details'] = {}
res['oa_details'][snapshot_date] = {'is_oa': is_oa, 'snapshot_date': snapshot_date, 'observation_date': observation_date}
if is_oa:
res['oa_details'][snapshot_date]['oa_host_type'] = oa_host_type
if oa_host_type == 'repository':
res['oa_details'][snapshot_date]['oa_colors'] = ['green']
res['oa_details'][snapshot_date]['oa_colors_with_priority_to_publisher'] = ['green_only']
else:
res['oa_details'][snapshot_date]['oa_colors'] = ['other']
res['oa_details'][snapshot_date]['oa_colors_with_priority_to_publisher'] = ['other']
res['oa_details'][snapshot_date]['repositories'] = [k['repository_normalized'] for k in oa_locations if 'repository_normalized' in k]
res['oa_details'][snapshot_date]['oa_locations'] = oa_locations
## title - first author
title_first_author = ""
if res.get('title'):
title_first_author += normalize(res.get('title'), 1).strip()
if isinstance(res.get('authors'), list) and len(res['authors']) > 0:
if res['authors'][0].get('full_name'):
title_first_author += ';'+normalize(res['authors'][0].get('full_name'), 1)
if title_first_author:
res['title_first_author'] = title_first_author
return res
|
<filename>src/main/java/com/supanadit/restsuite/component/input/InputSearchCollection.java
package com.supanadit.restsuite.component.input;
public class InputSearchCollection extends InputComponent {
public InputSearchCollection() {
setPlaceholder("Search Collection");
}
}
|
import React from "react";
import { isPrimitive, ObjectValues } from "./Utils";
export interface IRenderBodyProps {
// headerProps?: any;
// headerContainerProps?: any;
items?: any[];
}
export const RenderBody: React.FC<IRenderBodyProps> = (props) => {
const { items } = props;
return (
<tbody>
{items &&
items.map((item: any, index: number) => {
return (
<tr>
{ObjectValues(item).map((entry) => (
<td>{isPrimitive(entry) ? entry : ""}</td>
))}
</tr>
);
})}
</tbody>
);
};
|
#!/bin/bash
# Author: Robin Wen
# Date: 2015-1-1 08:49:08
# Desc: Auto push after update the repo.
# Update: Update Commit info at 2015-03-04 17:17:10
# Update: Test Git Config at 2015-03-07 09:32:09.
# Update: Optimize display.
git add -A .
git commit -m "[Post] Update article."
git push origin master
|
# -----------------------------------------------------------------------------
#
# Package : ansi-blue
# Version : 0.1.1
# Source repo : https://github.com/jonschlinkert/ansi-blue
# Tested on : RHEL 8.3
# Script License: Apache License, Version 2 or later
# Maintainer : BulkPackageSearch Automation <sethp@us.ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
PACKAGE_NAME=ansi-blue
PACKAGE_VERSION=0.1.1
PACKAGE_URL=https://github.com/jonschlinkert/ansi-blue
yum -y update && yum install -y yum-utils nodejs nodejs-devel nodejs-packaging npm python38 python38-devel ncurses git gcc gcc-c++ libffi libffi-devel ncurses git jq make cmake
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/appstream/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/baseos/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/7Server/ppc64le/optional/
yum install -y firefox liberation-fonts xdg-utils && npm install n -g && n latest && npm install -g npm@latest && export PATH="$PATH" && npm install --global yarn grunt-bump xo testem acorn
OS_NAME=`python3 -c "os_file_data=open('/etc/os-release').readlines();os_info = [i.replace('PRETTY_NAME=','').strip() for i in os_file_data if i.startswith('PRETTY_NAME')];print(os_info[0])"`
HOME_DIR=`pwd`
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/clone_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" > /home/tester/output/version_tracker
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
PACKAGE_VERSION=$(jq -r ".version" package.json)
# run the test command from test.sh
if ! npm install && npm audit fix && npm audit fix --force; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
if ! npm test; then
echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails"
exit 0
else
echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success"
exit 0
fi |
//
// Texture handling.
//
#define __MSC__
#include "game.h"
#include <glide.h>
#include "gltexture.h"
//
// Hmm...
//
extern UWORD floor_texture_sizes[];
#define TEXTURE_MAX_TEXTURES 1024
#define TEXTURE_NUM_STANDARD 10
#define TEXTURE_NORM_SIZE 32
#define TEXTURE_FIDD_SIZE 36
#define TEXTURE_NORM_SQUARES 8
#define TEXTURE_FIDD_SQUARES 7
//
// The texture pages.
//
#define TEXTURE_TYPE_NONE 0
#define TEXTURE_TYPE_TRILINEAR 1
#define TEXTURE_TYPE_ADDITIVE_NOZWRITE 2
#define TEXTURE_TYPE_ALPHABLEND 3
#define TEXTURE_TYPE_ALPHABLEND_NOZWRITE 4
typedef struct
{
ULONG type;
GrTexInfo textinfo;
ULONG address_tmu0;
ULONG address_tmu1;
} TEXTURE_Texture;
TEXTURE_Texture TEXTURE_texture[TEXTURE_MAX_TEXTURES];
SLONG TEXTURE_page_num_standard;
SLONG TEXTURE_page_fog;
SLONG TEXTURE_page_moon;
SLONG TEXTURE_page_clouds;
SLONG TEXTURE_page_water;
SLONG TEXTURE_page_puddle;
SLONG TEXTURE_page_shadow;
SLONG TEXTURE_page_detail;
//
// The number of textures loaded.
//
SLONG TEXTURE_num_textures;
void TEXTURE_get_minitexturebits_uvs(
UWORD texture,
SLONG *page,
float *u0,
float *v0,
float *u1,
float *v1,
float *u2,
float *v2,
float *u3,
float *v3)
{
SLONG tx;
SLONG ty;
SLONG tpage;
SLONG trot;
SLONG tflip;
SLONG tsize;
SLONG num;
static const float base_u = 0.0F;
static const float base_v = 0.0F;
static const float base_size = 256.0F;
num = texture & 0x3ff;
trot = (texture >> 0xa) & 0x3;
tflip = (texture >> 0xc) & 0x3;
tsize = (texture >> 0xe) & 0x3;
//
// The page is easy!
//
*page = num;
if (*page >= TEXTURE_page_num_standard)
{
*page = 0;
}
//
// The texture coordinates depend of the rotation.
//
switch(trot)
{
case 0:
*u0 = base_u;
*v0 = base_v;
*u1 = base_u + base_size;
*v1 = base_v;
*u2 = base_u;
*v2 = base_v + base_size;
*u3 = base_u + base_size;
*v3 = base_v + base_size;
break;
case 1:
*u2 = base_u;
*v2 = base_v;
*u0 = base_u + base_size;
*v0 = base_v;
*u3 = base_u;
*v3 = base_v + base_size;
*u1 = base_u + base_size;
*v1 = base_v + base_size;
break;
case 2:
*u3 = base_u;
*v3 = base_v;
*u2 = base_u + base_size;
*v2 = base_v;
*u1 = base_u;
*v1 = base_v + base_size;
*u0 = base_u + base_size;
*v0 = base_v + base_size;
break;
case 3:
*u1 = base_u;
*v1 = base_v;
*u3 = base_u + base_size;
*v3 = base_v;
*u0 = base_u;
*v0 = base_v + base_size;
*u2 = base_u + base_size;
*v2 = base_v + base_size;
break;
}
}
SLONG TEXTURE_get_fiddled_position(
SLONG square_u,
SLONG square_v,
SLONG page,
float *u,
float *v)
{
SLONG num;
{
num = square_u + square_v * TEXTURE_NORM_SQUARES + page * (TEXTURE_NORM_SQUARES * TEXTURE_NORM_SQUARES);
square_u = num % 7; num /= 7;
square_v = num % 7; num /= 7;
page = num;
*u = float(square_u * TEXTURE_FIDD_SIZE + (TEXTURE_FIDD_SIZE - TEXTURE_NORM_SIZE >> 1));
*v = float(square_v * TEXTURE_FIDD_SIZE + (TEXTURE_FIDD_SIZE - TEXTURE_NORM_SIZE >> 1));
}
return page;
}
void TEXTURE_load()
{
SLONG i;
Gu3dfInfo fileinfo;
ULONG texture_mem_required;
ULONG address_tmu0 = grTexMinAddress(GR_TMU0);
ULONG address_tmu1 = grTexMinAddress(GR_TMU1);
CBYTE name_3df[32];
TEXTURE_Texture *tt;
//
// Load the individual pages.
//
#define TEXTURE_NUM_FIDDLED (8 * 8 * 9)
for (i = 0; i < TEXTURE_NUM_FIDDLED; i++)
{
tt = &TEXTURE_texture[i];
tt->type = TEXTURE_TYPE_TRILINEAR;
tt->address_tmu0 = 0;
tt->address_tmu1 = 0;
sprintf(name_3df, "3dfx\\splitup\\3df\\tex%03d.3df", i);
if (gu3dfGetInfo(name_3df, &fileinfo))
{
fileinfo.data = malloc(fileinfo.mem_required);
if (fileinfo.data)
{
if (gu3dfLoad(name_3df, &fileinfo))
{
//
// The file has been loaded into memory.
//
tt->textinfo.smallLodLog2 = fileinfo.header.small_lod;
tt->textinfo.largeLodLog2 = fileinfo.header.large_lod;
tt->textinfo.aspectRatioLog2 = fileinfo.header.aspect_ratio;
tt->textinfo.format = fileinfo.header.format;
tt->textinfo.data = fileinfo.data;
//
// Download into TMU0.
//
texture_mem_required = grTexTextureMemRequired(
GR_MIPMAPLEVELMASK_ODD,
&tt->textinfo);
if (address_tmu0 + texture_mem_required > grTexMaxAddress(GR_TMU0))
{
TRACE("Not enough texture memory in TMU0 to load %s\n", name_3df);
}
else
{
grTexDownloadMipMap(
GR_TMU0,
address_tmu0,
GR_MIPMAPLEVELMASK_ODD,
&tt->textinfo);
tt->address_tmu0 = address_tmu0;
address_tmu0 += texture_mem_required;
}
//
// Download into TMU1.
//
texture_mem_required = grTexTextureMemRequired(
GR_MIPMAPLEVELMASK_EVEN,
&tt->textinfo);
if (address_tmu1 + texture_mem_required > grTexMaxAddress(GR_TMU1))
{
TRACE("Not enough texture memory in TMU1 to load %s\n", name_3df);
}
else
{
grTexDownloadMipMap(
GR_TMU1,
address_tmu1,
GR_MIPMAPLEVELMASK_EVEN,
&tt->textinfo);
tt->address_tmu1 = address_tmu1;
address_tmu1 += texture_mem_required;
}
}
free(fileinfo.data);
}
}
else
{
TRACE("Could not load %s\n", name_3df);
}
}
TEXTURE_page_num_standard = TEXTURE_NUM_FIDDLED + 0;
//
// The extra texture pages are all put in TMU1
//
#define TEXTURE_NUM_EXTRA 6
TEXTURE_page_fog = TEXTURE_NUM_FIDDLED + 0;
TEXTURE_page_moon = TEXTURE_NUM_FIDDLED + 1;
TEXTURE_page_clouds = TEXTURE_NUM_FIDDLED + 2;
TEXTURE_page_water = TEXTURE_NUM_FIDDLED + 3;
TEXTURE_page_puddle = TEXTURE_NUM_FIDDLED + 4;
TEXTURE_page_detail = TEXTURE_NUM_FIDDLED + 5;
CBYTE *extra_name[TEXTURE_NUM_EXTRA] =
{
"3dfx\\fog.3df",
"3dfx\\moon.3df",
"3dfx\\clouds.3df",
"3dfx\\water.3df",
"3dfx\\puddle.3df",
"3dfx\\detail.3df"
};
ULONG extra_type[TEXTURE_NUM_EXTRA] =
{
TEXTURE_TYPE_ADDITIVE_NOZWRITE,
TEXTURE_TYPE_ALPHABLEND_NOZWRITE,
TEXTURE_TYPE_ADDITIVE_NOZWRITE,
TEXTURE_TYPE_ALPHABLEND_NOZWRITE,
TEXTURE_TYPE_ALPHABLEND,
TEXTURE_TYPE_NONE
};
for (i = 0; i < TEXTURE_NUM_EXTRA; i++)
{
tt = &TEXTURE_texture[TEXTURE_NUM_FIDDLED + i];
tt->type = extra_type[i];
tt->address_tmu0 = 0;
tt->address_tmu1 = 0;
if (gu3dfGetInfo(extra_name[i], &fileinfo))
{
fileinfo.data = malloc(fileinfo.mem_required);
if (fileinfo.data)
{
if (gu3dfLoad(extra_name[i], &fileinfo))
{
//
// The file has been loaded into memory.
//
tt->textinfo.smallLodLog2 = fileinfo.header.small_lod;
tt->textinfo.largeLodLog2 = fileinfo.header.large_lod;
tt->textinfo.aspectRatioLog2 = fileinfo.header.aspect_ratio;
tt->textinfo.format = fileinfo.header.format;
tt->textinfo.data = fileinfo.data;
//
// Download into TMU1.
//
texture_mem_required = grTexTextureMemRequired(
GR_MIPMAPLEVELMASK_BOTH,
&tt->textinfo);
if (address_tmu1 + texture_mem_required > grTexMaxAddress(GR_TMU1))
{
TRACE("Not enough texture memory in TMU1 to load %s\n", name_3df);
}
else
{
grTexDownloadMipMap(
GR_TMU1,
address_tmu1,
GR_MIPMAPLEVELMASK_BOTH,
&tt->textinfo);
tt->address_tmu1 = address_tmu1;
address_tmu1 += texture_mem_required;
}
}
free(fileinfo.data);
}
}
else
{
TRACE("Could not load %s\n", extra_name[i]);
}
}
TEXTURE_num_textures = TEXTURE_NUM_FIDDLED + TEXTURE_NUM_EXTRA;
//
// Set NCC table 0 as the default.
//
grTexNCCTable(GR_TEXTABLE_NCC0);
}
//
// The states we can be in.
//
#define TEXTURE_STATE_TRILINEAR 0
#define TEXTURE_STATE_DETAIL 1
#define TEXTURE_STATE_ADDITIVE_NOZWRITE 2
SLONG TEXTURE_current_page_tmu0 = -1;
SLONG TEXTURE_current_page_tmu1 = -1;
SLONG TEXTURE_current_state = -1;
void TEXTURE_set_page(SLONG page, SLONG multi)
{
TEXTURE_Texture *tt;
ASSERT(WITHIN(page, 0, TEXTURE_num_textures - 1));
tt = &TEXTURE_texture[page];
//
// What state should we be in and which pages should we be using.
//
SLONG state;
SLONG page_tmu0;
SLONG page_tmu1;
ULONG evenodd_tmu0;
ULONG evenodd_tmu1;
switch(tt->type)
{
case TEXTURE_TYPE_TRILINEAR:
if (multi == TEXTURE_MULTI_DETAIL)
{
state = TEXTURE_STATE_DETAIL;
page_tmu0 = page;
page_tmu1 = TEXTURE_page_detail;
evenodd_tmu0 = GR_MIPMAPLEVELMASK_ODD;
evenodd_tmu1 = GR_MIPMAPLEVELMASK_BOTH;
}
else
{
state = TEXTURE_STATE_TRILINEAR;
page_tmu0 = page;
page_tmu1 = page;
evenodd_tmu0 = GR_MIPMAPLEVELMASK_ODD;
evenodd_tmu1 = GR_MIPMAPLEVELMASK_EVEN;
}
break;
case TEXTURE_TYPE_ADDITIVE_NOZWRITE:
state = TEXTURE_STATE_ADDITIVE_NOZWRITE;
page_tmu0 = TEXTURE_current_page_tmu0;
page_tmu1 = TEXTURE_page_fog;
evenodd_tmu0 = GR_MIPMAPLEVELMASK_BOTH;
evenodd_tmu1 = GR_MIPMAPLEVELMASK_BOTH;
break;
default:
ASSERT(0);
break;
}
//
// Make sure the texture pages are correct.
//
if (page_tmu0 != TEXTURE_current_page_tmu0)
{
ASSERT(WITHIN(page_tmu0, 0, TEXTURE_num_textures - 1));
TEXTURE_Texture *tt_tmu0 = &TEXTURE_texture[page_tmu0];
grTexSource(
GR_TMU0,
tt_tmu0->address_tmu0,
evenodd_tmu0,
&tt_tmu0->textinfo);
TEXTURE_current_page_tmu0 = page_tmu0;
}
if (page_tmu1 != TEXTURE_current_page_tmu1)
{
ASSERT(WITHIN(page_tmu1, 0, TEXTURE_num_textures - 1));
TEXTURE_Texture *tt_tmu1 = &TEXTURE_texture[page_tmu1];
grTexSource(
GR_TMU1,
tt_tmu1->address_tmu1,
evenodd_tmu1,
&tt_tmu1->textinfo);
TEXTURE_current_page_tmu1 = page_tmu1;
}
//
// Make sure the state is correct.
//
if (state != TEXTURE_current_state)
{
switch(state)
{
case TEXTURE_STATE_TRILINEAR:
grDepthMask(FXTRUE);
grTexCombine(
GR_TMU0,
GR_COMBINE_FUNCTION_SCALE_OTHER_MINUS_LOCAL_ADD_LOCAL, GR_COMBINE_FACTOR_ONE_MINUS_LOD_FRACTION,
GR_COMBINE_FUNCTION_SCALE_OTHER_MINUS_LOCAL_ADD_LOCAL, GR_COMBINE_FACTOR_ONE_MINUS_LOD_FRACTION,
FXFALSE,
FXFALSE);
grAlphaBlendFunction(
GR_BLEND_ONE,
GR_BLEND_ZERO,
GR_BLEND_ONE,
GR_BLEND_ZERO);
grTexMipMapMode(
GR_TMU0,
GR_MIPMAP_NEAREST,
FXTRUE);
grTexMipMapMode(
GR_TMU1,
GR_MIPMAP_NEAREST,
FXTRUE);
break;
case TEXTURE_STATE_DETAIL:
grDepthMask(FXTRUE);
grTexCombine(
GR_TMU0,
GR_COMBINE_FUNCTION_SCALE_OTHER_ADD_LOCAL, GR_COMBINE_FACTOR_ONE_MINUS_DETAIL_FACTOR,
GR_COMBINE_FUNCTION_SCALE_OTHER_ADD_LOCAL, GR_COMBINE_FACTOR_ONE_MINUS_DETAIL_FACTOR,
FXFALSE,
FXFALSE);
grAlphaBlendFunction(
GR_BLEND_ONE,
GR_BLEND_ZERO,
GR_BLEND_ONE,
GR_BLEND_ZERO);
grTexMipMapMode(
GR_TMU0,
GR_MIPMAP_DISABLE,
FXTRUE);
grTexMipMapMode(
GR_TMU1,
GR_MIPMAP_DISABLE,
FXTRUE);
//
// Detail-texturing constants.
//
grTexDetailControl(
GR_TMU0,
1,
6,
1.0F);
break;
case TEXTURE_STATE_ADDITIVE_NOZWRITE:
grDepthMask(FXFALSE);
grTexCombine(
GR_TMU0,
GR_COMBINE_FUNCTION_SCALE_OTHER, GR_COMBINE_FACTOR_ONE,
GR_COMBINE_FUNCTION_SCALE_OTHER, GR_COMBINE_FACTOR_ONE,
FXFALSE,
FXFALSE);
grAlphaBlendFunction(
GR_BLEND_ONE,
GR_BLEND_ONE,
GR_BLEND_ZERO,
GR_BLEND_ZERO);
grTexMipMapMode(
GR_TMU0,
GR_MIPMAP_DISABLE,
FXTRUE);
grTexMipMapMode(
GR_TMU1,
GR_MIPMAP_DISABLE,
FXTRUE);
break;
default:
ASSERT(0);
break;
}
TEXTURE_current_state = state;
}
}
void TEXTURE_init_states()
{
TEXTURE_current_page_tmu0 = -1;
TEXTURE_current_page_tmu1 = -1;
TEXTURE_current_state = -1;
}
void TEXTURE_fix_prim_textures()
{
SLONG i;
SLONG j;
PrimFace3 *f3;
PrimFace4 *f4;
SLONG page;
SLONG av_u;
SLONG av_v;
SLONG base_u;
SLONG base_v;
for (i = 1; i < next_prim_face3; i++)
{
f3 = &prim_faces3[i];
av_u = (f3->UV[0][0] + f3->UV[1][0] + f3->UV[2][0]) * 85 >> 8;
av_v = (f3->UV[0][1] + f3->UV[1][1] + f3->UV[2][1]) * 85 >> 8;
av_u /= TEXTURE_NORM_SIZE;
av_v /= TEXTURE_NORM_SIZE;
base_u = av_u * TEXTURE_NORM_SIZE;
base_v = av_v * TEXTURE_NORM_SIZE;
//
// All coordinates relative to the base now...
//
for (j = 0; j < 3; j++)
{
f3->UV[j][0] -= base_u;
f3->UV[j][1] -= base_v;
SATURATE(f3->UV[j][0], 0, 32);
SATURATE(f3->UV[j][1], 0, 32);
if (f3->UV[j][0] == 31) {f3->UV[j][0] = 32;}
if (f3->UV[j][1] == 31) {f3->UV[j][1] = 32;}
}
page = av_u + av_v * TEXTURE_NORM_SQUARES + f3->TexturePage * TEXTURE_NORM_SQUARES * TEXTURE_NORM_SQUARES;
SATURATE(page, 0, 575);
//
// The 9th and 10th bits of page go in the top two bits of UV[0][0]!
//
f3->UV[0][0] |= (page >> 2) & 0xc0;
f3->TexturePage = (page >> 0) & 0xff;
}
for (i = 1; i < next_prim_face4; i++)
{
f4 = &prim_faces4[i];
av_u = (f4->UV[0][0] + f4->UV[1][0] + f4->UV[2][0] + f4->UV[3][0]) >> 2;
av_v = (f4->UV[0][1] + f4->UV[1][1] + f4->UV[2][1] + f4->UV[3][1]) >> 2;
av_u /= TEXTURE_NORM_SIZE;
av_v /= TEXTURE_NORM_SIZE;
base_u = av_u * TEXTURE_NORM_SIZE;
base_v = av_v * TEXTURE_NORM_SIZE;
//
// All coordinates relative to the base now...
//
for (j = 0; j < 4; j++)
{
f4->UV[j][0] -= base_u;
f4->UV[j][1] -= base_v;
SATURATE(f4->UV[j][0], 0, 32);
SATURATE(f4->UV[j][1], 0, 32);
if (f4->UV[j][0] == 31) {f4->UV[j][0] = 32;}
if (f4->UV[j][1] == 31) {f4->UV[j][1] = 32;}
}
page = av_u + av_v * TEXTURE_NORM_SQUARES + f4->TexturePage * TEXTURE_NORM_SQUARES * TEXTURE_NORM_SQUARES;
SATURATE(page, 0, 575);
//
// The 9th and 10th bits of page go in the top two bits of UV[0][0]!
//
f4->UV[0][0] |= (page >> 2) & 0xc0;
f4->TexturePage = (page >> 0) & 0xff;
}
}
|
import java.util.Scanner;
public class LargerFloat {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
System.out.print("Enter two float values: ");
float num1 = input.nextFloat();
float num2 = input.nextFloat();
if (num1 > num2){
System.out.println("The larger value is: " + num1);
}
else if (num2 > num1){
System.out.println("The larger value is: " + num2);
}
else{
System.out.println("Both numbers are equal.");
}
}
} |
var fs = require('fs');
var AWS = require('aws-sdk');
//Assumes S3 credentials in environment vars AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY
AWS.config.region = 'eu-central-1';
export function archiveVideo(metadata, video) {
archiveVideoToFile(metadata, video);
}
export function getFromArchive(callback) {
var result = [];
new AWS.S3().listObjects({
Bucket: 'bacid-dk'
}).on('success', function handlePage(response) {
//if (response.hasNextPage()) {
// response.nextPage().on('success', handlePage).send();
//}
//Returning objects on first page only
for (var i in response.data.Contents) {
result.push({
key: response.data.Contents[i].Key,
url: 'http://bacid-dk.s3.amazonaws.com/' + response.data.Contents[i].Key
});
}
callback(undefined, result);
}).send();
}
export function archiveVideoToFile(metadata, video) {
new AWS.S3().upload({
Bucket: 'bacid-dk',
Key: metadata.name + '.jpg',
Body: video.jpg
}).on('httpUploadProgress', function(evt) {
console.log(evt);
}).send(function(err, data) {
console.log(err, data)
});
}
/*
export function archiveVideoToFile(metadata, video) {
fs.writeFile("output/LAST.MP4", video.mp4, function(err) {
if (err) {
return console.log(err);
}
});
}
*/ |
MAVEN_VERSION=3.3-jdk-8
docker run -it --rm -v "$PWD":/usr/src/mymaven -v "$HOME/.m2:/usr/src/mymaven/.m2" -w /usr/src/mymaven maven:${MAVEN_VERSION} mvn clean package |
#!/bin/bash
PWD=`pwd`
PLUGINHOME="$PWD"
INFO_FILE="$PLUGINHOME/nfwebcrypto.info"
export LD_LIBRARY_PATH="$PLUGINHOME"
# Assemble the chrome command line.
CHROME="/opt/google/chrome/chrome"
# This snippet was copied from /sbin/session_manager_setup.sh on Chrome OS.
# It parses data from the .info file to put into the command line to register
# the plugin, including making the version number and other info visible
# visible in chrome://plugins
FILE_NAME=
PLUGIN_NAME=
DESCRIPTION=
VERSION=
MIME_TYPES=
. $INFO_FILE
PLUGIN_STRING="${PLUGIN}${FILE_NAME}"
if [ -n "$PLUGIN_NAME" ]; then
PLUGIN_STRING="${PLUGIN_STRING}#${PLUGIN_NAME}"
PLUGIN_STRING="${PLUGIN_STRING}#"
[ -n "$VERSION" ] && PLUGIN_STRING="${PLUGIN_STRING}#${VERSION}"
fi
PLUGIN_STRING="${PLUGIN_STRING};${MIME_TYPES}"
REGISTER_PLUGINS="${REGISTER_PLUGINS}${COMMA}${PLUGIN_STRING}"
COMMA=","
# end snippet
# NOTE: Can't put "CrOS" in user agent or else chrome://plugins won't work (known chrome bug)
USERAGENT="Mozilla/5.0 (X11; CrOS armv7l 2876.0.0) AppleWebKit/537.10 (KHTML, like Gecko) Chrome/30.0.1262.2 Safari/537.10"
#URL="http://localhost/nfwebcrypto/test_qa.html?spec=SignVerifyRSA%20SignVerifyLargeData.#"
URL="http://netflix.github.io/NfWebCrypto/web/test_qa.html"
OPT=(
--register-pepper-plugins=$REGISTER_PLUGINS
--profile-directory="nfwc"
--ppapi-out-of-process
--user-agent="$USERAGENT"
)
#--enable-dcheck
#--enable-accelerated-plugins
#--enable-logging
#--user-agent="$USERAGENT"
# Finally, echo and then run the command to launch chrome
echo $CHROME "${OPT[@]}" "$URL"
$CHROME "${OPT[@]}" "$URL"
|
<filename>packages-node/providence-analytics/src/program/types/analyzers/find-exports.d.ts
import {
SpecifierName,
SpecifierSource,
PathRelativeFromProjectRoot,
RootFileMapEntry,
RootFile,
AnalyzerQueryResult,
FindAnalyzerOutputFile,
} from '../core';
export interface FindExportsAnalyzerResult extends AnalyzerQueryResult {
queryOutput: FindExportsAnalyzerOutputFile[];
}
export interface FindExportsAnalyzerOutputFile extends FindAnalyzerOutputFile {
/** path relative from project root for which a result is generated based on AST traversal */
file: PathRelativeFromProjectRoot;
/** result of AST traversal for file in project */
result: FindExportsAnalyzerEntry[];
}
export interface FindExportsAnalyzerEntry {
/**
* The specifiers found in an export statement.
*
* For example:
* - file `export class X {}` gives `['X']`
* - file `export default const y = 0` gives `['[default]']`
* - file `export { y, z } from 'project'` gives `['y', 'z']`
*/
exportSpecifiers: SpecifierName[];
/**
* The original "source" string belonging to specifier.
* For example:
* - file `export { x } from './my/file';` gives `"./my/file"`
* - file `export { x } from 'project';` gives `"project"`
*/
source: SpecifierSource;
/**
* The normalized "source" string belonging to specifier
* (based on file system information, resolves right names and extensions).
* For example:
* - file `export { x } from './my/file';` gives `"./my/file.js"`
* - file `export { x } from 'project';` gives `"project"` (only files in current project are resolved)
* - file `export { x } from '../';` gives `"../index.js"`
*/
normalizedSource: SpecifierSource;
/** map of tracked down Identifiers */
rootFileMap: RootFileMapEntry[];
}
/**
* Iterable version of `FindExportsAnalyzerEntry`.
* Makes it easier to do comparisons inside MatchAnalyzers
*/
export interface IterableFindExportsAnalyzerEntry {
file: PathRelativeFromProjectRoot;
specifier: SpecifierName;
/**
* The local name of an export. Example:
* 'a' in case of `export {a as b} from 'c';`
*/
localSpecifier: SpecifierName;
source: SpecifierSource | null;
rootFile: RootFile;
meta?: object;
}
|
def to_json_compatible_object(field):
if isinstance(field, (str, int, float, bool)) or field is None:
return field
elif isinstance(field, list):
return [to_json_compatible_object(item) for item in field]
elif isinstance(field, dict):
return {key: to_json_compatible_object(value) for key, value in field.items()}
else:
raise ValueError("Unsupported data type in the input dictionary") |
<reponame>robisacommonusername/SVGBuilder
require_relative '../Base/SVGContainer'
class SVG < SVGAbstract::SVGContainer
class Group < SVGAbstract::SVGContainer
include SVGAbstract::StylableMixin
include SVGAbstract::TransformableMixin
def initialize
super()
stylable_init
transformable_init
@name = 'g'
yield self if block_given?
return self
end
end
end
|
#include "Etterna/Globals/global.h"
#include "NotesLoader.h"
#include "NotesLoaderBMS.h"
#include "NotesLoaderDWI.h"
#include "NotesLoaderKSF.h"
#include "NotesLoaderSM.h"
#include "NotesLoaderSMA.h"
#include "NotesLoaderSSC.h"
#include "RageUtil/Utils/RageUtil.h"
#include "NotesLoaderOSU.h"
void
NotesLoader::GetMainAndSubTitlesFromFullTitle(const RString& sFullTitle,
RString& sMainTitleOut,
RString& sSubTitleOut)
{
const RString sLeftSeps[] = { "\t", " -", " ~", " (", " [" };
for (unsigned i = 0; i < ARRAYLEN(sLeftSeps); i++) {
size_t iBeginIndex = sFullTitle.find(sLeftSeps[i]);
if (iBeginIndex == string::npos)
continue;
sMainTitleOut = sFullTitle.Left(static_cast<int>(iBeginIndex));
sSubTitleOut = sFullTitle.substr(iBeginIndex + 1,
sFullTitle.size() - iBeginIndex + 1);
return;
}
sMainTitleOut = sFullTitle;
sSubTitleOut = "";
};
bool
NotesLoader::LoadFromDir(const RString& sPath,
Song& out,
set<RString>& BlacklistedImages,
bool load_autosave)
{
vector<RString> list;
BlacklistedImages.clear();
SSCLoader loaderSSC;
loaderSSC.GetApplicableFiles(sPath, list, load_autosave);
if (!list.empty()) {
if (!loaderSSC.LoadFromDir(sPath, out, load_autosave)) {
return false;
}
return true;
}
SMALoader loaderSMA;
loaderSMA.GetApplicableFiles(sPath, list);
if (!list.empty())
return loaderSMA.LoadFromDir(sPath, out);
SMLoader loaderSM;
loaderSM.GetApplicableFiles(sPath, list);
if (!list.empty())
return loaderSM.LoadFromDir(sPath, out);
DWILoader::GetApplicableFiles(sPath, list);
if (!list.empty())
return DWILoader::LoadFromDir(sPath, out, BlacklistedImages);
BMSLoader::GetApplicableFiles(sPath, list);
if (!list.empty())
return BMSLoader::LoadFromDir(sPath, out);
/*
PMSLoader::GetApplicableFiles( sPath, list );
if( !list.empty() )
return PMSLoader::LoadFromDir( sPath, out );
*/
KSFLoader::GetApplicableFiles(sPath, list);
if (!list.empty())
return KSFLoader::LoadFromDir(sPath, out);
OsuLoader::GetApplicableFiles(sPath, list);
if (!list.empty())
return OsuLoader::LoadFromDir(sPath, out);
return false;
}
|
<filename>springboot_jwt/src/main/java/com/oven/config/InterceptorConfig.java
package com.oven.config;
import com.oven.interceptor.JwtInterceptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
* 拦截器配置
*/
@Configuration
public class InterceptorConfig implements WebMvcConfigurer {
@Bean
public JwtInterceptor jwtInterceptor() {
return new JwtInterceptor();
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
InterceptorRegistration registration = registry.addInterceptor(jwtInterceptor());
registration.addPathPatterns("/jwt/**").excludePathPatterns("/jwt/login");
}
} |
import pandas as pd
from sklearn.cluster import KMeans
from sklearn.preprocessing import MinMaxScaler
# Load customer data
data = pd.read_csv('data.csv')
# Perform feature scaling
scaler = MinMaxScaler()
data2 = scaler.fit_transform(data)
# Create customer segmentation
kmeans = KMeans(n_clusters=3).fit(data2)
# Assign segment labels and save as a new column
data['Segment'] = kmeans.labels_
print(data) |
package goribot
import (
"github.com/PuerkitoBio/goquery"
"log"
"sync/atomic"
"time"
)
// DefaultUA is the default User-Agent of spider
const DefaultUA = "Goribot"
// Spider is the core spider struct
type Spider struct {
ThreadPoolSize uint64
DepthFirst bool
Downloader func(r *Request) (*Response, error)
Cache *CacheManger
onRespHandlers []func(ctx *Context)
onTaskHandlers []func(ctx *Context, req *Task) *Task
onItemHandlers []func(ctx *Context, i interface{}) interface{}
onErrorHandlers []func(ctx *Context, err error)
taskQueue *TaskQueue
workingThread uint64
}
// NewSpider create a new spider and run extension func to config the spider
func NewSpider(exts ...func(s *Spider)) *Spider {
s := &Spider{
taskQueue: NewTaskQueue(),
Cache: NewCacheManger(),
Downloader: Download,
DepthFirst: true,
ThreadPoolSize: 30,
}
for _, e := range exts {
e(s)
}
return s
}
// Run the spider and wait to all task done
func (s *Spider) Run() {
worker := func(t *Task) {
defer atomic.AddUint64(&s.workingThread, ^uint64(0))
resp, err := s.Downloader(t.Request)
ctx := &Context{
Request: t.Request,
Response: resp,
Items: []interface{}{},
Meta: t.Meta,
drop: false,
}
if err != nil {
log.Println("Downloader error", err)
s.handleError(ctx, err)
} else {
ctx.Text = resp.Text
ctx.Html = resp.Html
ctx.Json = resp.Json
s.handleResp(ctx)
if !ctx.IsDrop() {
for _, h := range t.onRespHandlers {
h(ctx)
if ctx.IsDrop() {
break
}
}
}
}
for _, i := range ctx.Tasks {
s.AddTask(ctx, i)
}
s.handleItem(ctx)
}
for (!s.taskQueue.IsEmpty()) || atomic.LoadUint64(&s.workingThread) > 0 {
if (!s.taskQueue.IsEmpty()) && (atomic.LoadUint64(&s.workingThread) < s.ThreadPoolSize || s.ThreadPoolSize == 0) {
atomic.AddUint64(&s.workingThread, 1)
go worker(s.taskQueue.Pop())
} else {
time.Sleep(100 * time.Nanosecond)
}
}
}
// AddTask add a task to the queue
func (s *Spider) AddTask(ctx *Context, t *Task) {
t = s.handleTask(ctx, t)
if t == nil {
return
}
if t.Request.Header.Get("User-Agent") == "" {
t.Request.Header.Set("User-Agent", DefaultUA)
}
if s.DepthFirst {
s.taskQueue.PushInHead(t)
} else {
s.taskQueue.Push(t)
}
}
// TodoContext -- If a task created by `spider.NewTask` as seed task,the OnTask handler will get TodoContext as ctx param
var TodoContext = &Context{
Text: "",
Html: &goquery.Document{},
Json: map[string]interface{}{},
Request: &Request{},
Response: &Response{},
Tasks: []*Task{},
Items: []interface{}{},
Meta: map[string]interface{}{},
drop: false,
}
// NewTask create a task and add it to the queue
func (s *Spider) NewTask(req *Request, RespHandler ...func(ctx *Context)) {
s.AddTask(TodoContext, NewTask(req, RespHandler...))
}
// NewTaskWithMeta create a task with meta data and add it to the queue
func (s *Spider) NewTaskWithMeta(req *Request, meta map[string]interface{}, RespHandler ...func(ctx *Context)) {
t := NewTask(req, RespHandler...)
t.Meta = meta
s.AddTask(TodoContext, t)
}
func (s *Spider) handleResp(ctx *Context) {
for _, h := range s.onRespHandlers {
h(ctx)
if ctx.IsDrop() == true {
return
}
}
}
func (s *Spider) handleTask(ctx *Context, t *Task) *Task {
for _, h := range s.onTaskHandlers {
t = h(ctx, t)
if t == nil {
return nil
}
}
return t
}
func (s *Spider) handleItem(ctx *Context) {
for _, h := range s.onItemHandlers {
for _, i := range ctx.Items {
i = h(ctx, i)
if i == nil {
return
}
}
}
}
func (s *Spider) handleError(ctx *Context, err error) {
for _, h := range s.onErrorHandlers {
h(ctx, err)
}
}
// OnResp add an On Response handler func to the spider
func (s *Spider) OnResp(h func(ctx *Context)) {
s.onRespHandlers = append(s.onRespHandlers, h)
}
// OnTask add an On New Task handler func to the spider
func (s *Spider) OnTask(h func(ctx *Context, t *Task) *Task) {
s.onTaskHandlers = append(s.onTaskHandlers, h)
}
// OnItem add an On New Item handler func to the spider. For some storage
func (s *Spider) OnItem(h func(ctx *Context, i interface{}) interface{}) {
s.onItemHandlers = append(s.onItemHandlers, h)
}
// OnError add an On Error handler func to the spider
func (s *Spider) OnError(h func(ctx *Context, err error)) {
s.onErrorHandlers = append(s.onErrorHandlers, h)
}
|
/* This is free and unencumbered software released into the public domain. */
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "module.h"
#include <cassert> /* for assert() */
#include <cstring> /* for std::strcmp() */
////////////////////////////////////////////////////////////////////////////////
/* Constants */
static const char* const cpr_module_names[] = {
/*
#ifndef DISABLE_LIST
"list",
#endif
#ifndef DISABLE_MAP
"map",
#endif
#ifndef DISABLE_SET
"set",
#endif
*/
#ifndef DISABLE_STRING
"string",
#endif
#ifndef DISABLE_VECTOR
"vector",
#endif
};
////////////////////////////////////////////////////////////////////////////////
/* Functions */
bool
cpr_module_exists(const char* const module_name) {
assert(module_name != nullptr);
if (module_name != nullptr) {
for (const char* cpr_module_name : cpr_module_names) {
if (std::strcmp(cpr_module_name, module_name) == 0) {
return true; /* found */
}
}
}
return false; /* not found */
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-STG/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-STG/512+0+512-pad-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function truncate_and_pad_first_half_full --eval_function last_element_eval |
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
cd "build/consortium-$HOST" || (echo "could not enter distdir build/consortium-$HOST"; exit 1)
if [ "$RUN_UNIT_TESTS" = "true" ]; then
BEGIN_FOLD unit-tests
DOCKER_EXEC LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib make $MAKEJOBS check VERBOSE=1
END_FOLD
fi
if [ "$RUN_FUNCTIONAL_TESTS" = "true" ]; then
BEGIN_FOLD functional-tests
DOCKER_EXEC test/functional/test_runner.py --combinedlogslen=4000 ${TEST_RUNNER_EXTRA} --quiet
END_FOLD
fi
|
#!/bin/bash
ocamlc -dparsetree draft.ml |
<filename>test/test_uri.py<gh_stars>10-100
"""Functional and representative tests for the URI datatype/representation."""
import pytest
from uri import Path
from uri.qso import SENTINEL
from uri.uri import URI
URI_COMPONENTS = [
('http://', dict(
relative = True,
scheme = 'http',
base = 'http://',
path = Path('.'),
)),
('https://', dict(
relative = True,
scheme = 'https',
base = 'https://',
path = Path('.'),
)),
('/foo', dict(
relative = True,
path = Path('/foo'),
base = '/foo',
summary = '/foo',
heirarchical = '/foo',
resource = '/foo',
)),
('http://user:pass@example.com/over/there?name=ferret#anchor', dict(
authority = 'user:pass@example.com',
fragment = 'anchor',
user = 'user',
username = 'user',
password = '<PASSWORD>',
heirarchical = 'user:pass@example.com/over/there',
host = 'example.com',
path = Path('/over/there'),
query = 'name=ferret',
scheme = 'http',
authentication = 'user:pass',
auth = 'user:pass',
base = 'http://user:pass@example.com/over/there',
summary = 'example.com/over/there',
relative = False,
resource = '/over/there?name=ferret#anchor',
)),
# From Wikipedia - https://en.wikipedia.org/wiki/Query_string
('http://example.com/over/there?name=ferret', dict(
authority = 'example.com',
heirarchical = 'example.com/over/there',
host = 'example.com',
path = Path('/over/there'),
query = 'name=ferret',
base = 'http://example.com/over/there',
scheme = 'http',
summary = 'example.com/over/there',
relative = False,
resource = '/over/there?name=ferret',
)),
('http://example.com/path/to/page?name=ferret&color=purple', dict(
authority = 'example.com',
heirarchical = 'example.com/path/to/page',
host = 'example.com',
path = Path('/path/to/page'),
query = 'name=ferret&color=purple',
scheme = 'http',
base = 'http://example.com/path/to/page',
summary = 'example.com/path/to/page',
relative = False,
resource = '/path/to/page?name=ferret&color=purple',
)),
# RFC 3986 (URI) - http://pretty-rfc.herokuapp.com/RFC3986
('ftp://ftp.is.co.za/rfc/rfc1808.txt', dict(
authority = 'ftp.is.co.za',
host = 'ftp.is.co.za',
path = Path('/rfc/rfc1808.txt'),
heirarchical = 'ftp.is.co.za/rfc/rfc1808.txt',
scheme = 'ftp',
base = 'ftp://ftp.is.co.za/rfc/rfc1808.txt',
summary = 'ftp.is.co.za/rfc/rfc1808.txt',
relative = False,
resource = '/rfc/rfc1808.txt',
)),
('ldap://[2001:db8::7]/c=GB?objectClass?one', dict(
authority = '[2001:db8::7]',
path = Path('/c=GB'),
scheme = 'ldap',
query = 'objectClass?one',
host = '2001:db8::7',
heirarchical = '[2001:db8::7]/c=GB',
base = 'ldap://[2001:db8::7]/c=GB',
summary = '[2001:db8::7]/c=GB',
relative = False,
resource = '/c=GB?objectClass?one',
)),
('http://www.ietf.org/rfc/rfc2396.txt', dict(
authority = 'www.ietf.org',
scheme = 'http',
host = 'www.ietf.org',
path = Path('/rfc/rfc2396.txt'),
heirarchical = 'www.ietf.org/rfc/rfc2396.txt',
base = 'http://www.ietf.org/rfc/rfc2396.txt',
summary = 'www.ietf.org/rfc/rfc2396.txt',
relative = False,
resource = '/rfc/rfc2396.txt',
)),
('mailto:<EMAIL>', dict(
scheme = 'mailto',
path = Path('<EMAIL>'),
heirarchical = '<EMAIL>',
summary = '<EMAIL>',
base = 'mailto:<EMAIL>',
relative = False,
resource = '<EMAIL>',
)),
('tel:+1-816-555-1212', dict(
scheme = 'tel',
path = Path('+1-816-555-1212'),
heirarchical = '+1-816-555-1212',
summary = '+1-816-555-1212',
base = 'tel:+1-816-555-1212',
relative = False,
resource = '+1-816-555-1212',
)),
('telnet://192.0.2.16:80/', dict(
port = 80,
scheme = 'telnet',
host = '192.0.2.16',
authority = '192.0.2.16:80',
path = Path('/'),
heirarchical = '192.0.2.16:80/',
summary = '192.0.2.16/',
base = 'telnet://192.0.2.16:80/',
relative = False,
resource = '/',
)),
('urn:oasis:names:specification:docbook:dtd:xml:4.1.2', dict(
scheme = 'urn',
path = Path('oasis:names:specification:docbook:dtd:xml:4.1.2'), # TODO
heirarchical = 'oasis:names:specification:docbook:dtd:xml:4.1.2',
summary = 'oasis:names:specification:docbook:dtd:xml:4.1.2',
base = 'urn:oasis:names:specification:docbook:dtd:xml:4.1.2',
relative = False,
resource = 'oasis:names:specification:docbook:dtd:xml:4.1.2',
)),
# IDNA (Internationalized Domain Name) Encoding
('https://xn--ls8h.la/', dict(
scheme = 'https',
path = Path('/'),
host = '💩.la',
authority = 'xn--ls8h.la',
heirarchical = 'xn--ls8h.la/',
summary = '💩.la/',
base = 'https://xn--ls8h.la/',
relative = False,
resource = '/',
))
]
for _uri, _parts in URI_COMPONENTS:
_parts['uri'] = _uri
if 'query' in _parts: _parts['qs'] = _parts['query']
if 'host' in _parts: _parts['hostname'] = _parts['host']
@pytest.fixture
def instance():
return URI('http://user:pass@example.com/over/there?name=ferret#anchor')
@pytest.fixture
def empty():
return URI('http://example.com/over/there')
def test_wsgi_unpacking():
webob = pytest.importorskip('webob')
url = 'https://example.com/foo/bar?baz=27'
request = webob.Request.blank(url)
uri = URI.from_wsgi(request)
assert str(uri) == url
@pytest.mark.parametrize('string,attributes', URI_COMPONENTS)
class TestURI:
def test_truthiness(self, string, attributes):
instance = URI(string)
assert instance
def test_identity(self, string, attributes):
instance = URI(string)
assert str(instance) == attributes['uri']
def test_identity_bytes(self, string, attributes):
instance = URI(string)
assert bytes(instance) == attributes['uri'].encode('utf-8')
def test_identity_comparison(self, string, attributes):
instance = URI(string)
assert instance == attributes['uri']
def test_inverse_bad_comparison(self, string, attributes):
instance = URI(string)
assert instance != "fnord"
def test_length(self, string, attributes):
instance = URI(string)
assert len(instance) == len(string)
@pytest.mark.parametrize('component', URI.__all_parts__ | {'base', 'qs', 'summary', 'relative'})
def test_component(self, string, attributes, component):
instance = URI(string)
value = getattr(instance, component, SENTINEL)
if component not in attributes:
assert value in (None, SENTINEL, '')
return
assert value == attributes[component]
class TestURIBasics:
def test_uri_error(self):
with pytest.raises(TypeError):
URI(foo="bar")
def test_empty(self):
instance = URI()
assert str(instance) == ""
assert not instance
def test_html_representation(self, instance):
markupsafe = pytest.importorskip('markupsafe')
html = markupsafe.escape(instance)
expect = '<a href="http://user:pass@example.com/over/there?name=ferret#anchor">example.com/over/there</a>'
assert html == expect
def test_protocol_relative_shortcut(self, instance):
https = URI("https://")
instance = https // instance
assert str(instance) == "https://user:pass@example.com/over/there?name=ferret#anchor"
def test_rooted(self, instance):
instance = instance / "/foo"
assert str(instance) == "http://user:pass@example.com/foo"
def test_relative(self, instance):
instance = instance / "foo"
assert str(instance) == "http://user:pass@example.com/over/foo"
def test_relative_assignment(self, instance):
instance /= "bar"
assert str(instance) == "http://user:pass@example.com/over/bar"
def test_resolution_by_uri(self, instance):
assert str(instance.resolve('/baz')) == "http://user:pass@example.com/baz"
assert str(instance.resolve('baz')) == "http://user:pass@example.com/over/baz"
def test_resolution_overriding(self, instance):
expect = "http://example.com/over/there?name=ferret#anchor"
assert str(instance.resolve(user=None, password=None)) == expect
def test_resolution_error(self, instance):
with pytest.raises(TypeError):
instance.resolve(unknown="fnord")
def test_qs_assignment(self):
instance = URI("http://example.com")
assert str(instance) == "http://example.com/"
instance.qs = "foo=bar"
assert str(instance) == "http://example.com/?foo=bar"
def test_path_usage(self):
path = Path("/foo/bar/baz")
instance = URI(path)
assert instance.scheme == 'file'
assert str(instance) == "file:///foo/bar/baz"
def test_group_assignment(self, empty):
with pytest.raises(TypeError):
empty.authority = "bobdole.com"
def test_protocol_assignment(self, empty):
assert empty.scheme == 'http'
empty.scheme = b'ftp'
assert empty.scheme == 'ftp'
def test_empty_protocol_assignment(self, empty):
assert empty.scheme == 'http'
empty.scheme = None
assert str(empty) == "//example.com/over/there"
def test_bad_assignment(self, empty):
with pytest.raises(AttributeError):
empty.safe_uri = 'http://example.com'
def test_rooted_path_authority_resolution(self):
uri = URI('http://example.com/diz')
uri.path = '/foo/bar'
assert str(uri) == "http://example.com/foo/bar"
def test_rootless_path_authority_error(self):
uri = URI('http://example.com')
with pytest.raises(ValueError):
uri.path = 'foo/bar'
class TestURIDictlike:
def test_get(self, instance):
assert instance['name'] == 'ferret'
def test_get_authenticated(self, instance):
secure = instance['username':'password']
assert instance is not secure
assert secure.user == 'username'
assert secure.password == 'password'
assert str(secure) == 'http://username:password@example.com/over/there?name=ferret#anchor'
def test_set_new(self, instance, empty):
instance['foo'] = 'bar'
assert str(instance) == 'http://user:pass@example.com/over/there?name=ferret&foo=bar#anchor'
empty['bar'] = 'baz'
assert str(empty) == 'http://example.com/over/there?bar=baz'
def test_set_replace(self, instance):
instance['name'] = 'lemur'
assert str(instance) == 'http://user:pass@example.com/over/there?name=lemur#anchor'
def test_del(self, instance):
del instance['name']
assert str(instance) == 'http://user:pass@example.com/over/there#anchor'
def test_iter(self, instance):
assert list(instance) == ["name=ferret"]
def test_get_fail(self, instance, empty):
with pytest.raises(KeyError):
instance['foo']
with pytest.raises(KeyError):
empty['name']
def test_repr(self, instance, empty):
assert repr(instance) == "URI('http://user@example.com/over/there?name=ferret#anchor')"
assert repr(empty) == "URI('http://example.com/over/there')"
|
<reponame>samarth9201/Elevator-management-system
var Elevator = artifacts.require("./Elevator.sol");
module.exports = function(deployer) {
deployer.deploy(Elevator);
};
|
# !/bin/bash -x
COP="$GOPATH/src/github.com/hyperledger/fabric-cop"
COPEXEC="$COP/bin/cop"
TESTDATA="$COP/testdata"
SCRIPTDIR="$COP/scripts"
CSR="$TESTDATA/csr.json"
HOST="http://localhost:8888"
RUNCONFIG="$TESTDATA/postgres.json"
INITCONFIG="$TESTDATA/csr_ecdsa256.json"
RC=0
: ${COP_DEBUG="false"}
while getopts "k:l:x:" option; do
case "$option" in
x) COP_HOME="$OPTARG" ;;
k) KEYTYPE="$OPTARG" ;;
l) KEYLEN="$OPTARG" ;;
esac
done
: ${KEYTYPE="ecdsa"}
: ${KEYLEN="256"}
: ${COP_DEBUG="false"}
test -z "$COP_HOME" && COP_HOME=$HOME/cop
CLIENTCERT="$COP_HOME/cert.pem"
CLIENTKEY="$COP_HOME/key.pem"
export COP_HOME
$COPEXEC client reenroll $HOST <(echo "{
\"hosts\": [
\"admin@fab-client.raleigh.ibm.com\",
\"fab-client.raleigh.ibm.com\",
\"127.0.0.2\"
],
\"key\": {
\"algo\": \"$KEYTYPE\",
\"size\": $KEYLEN
},
\"names\": [
{
\"O\": \"Hyperledger\",
\"O\": \"Fabric\",
\"OU\": \"COP\",
\"OU\": \"FVT\",
\"STREET\": \"Miami Blvd.\",
\"DC\": \"peer\",
\"UID\": \"admin\",
\"L\": \"Raleigh\",
\"L\": \"RTP\",
\"ST\": \"North Carolina\",
\"C\": \"US\"
}
]
}")
RC=$?
$($COP_DEBUG) && printAuth $CLIENTCERT $CLIENTKEY
exit $RC
|
import math
class Circle:
def __init__(self, radius):
self.radius = radius
def circumference(self):
return 2 * math.pi * self.radius |
import pandas as pd
# Load data from the orders table
orders = pd.read_csv('orders.csv')
# Generate summary statistics for the orders table
summary_stats = orders.describe()
# Generate a report containing summary statistics
report = open('report.html', 'w')
report.write(summary_stats.to_html())
report.close() |
#!/bin/bash
MFX_REPO="https://github.com/lu-zero/mfx_dispatch.git"
MFX_COMMIT="7e4d221c36c630c1250b23a5dfa15657bc04c10c"
ffbuild_enabled() {
return 0
}
ffbuild_dockerbuild() {
git-mini-clone "$MFX_REPO" "$MFX_COMMIT" mfx
cd mfx
autoreconf -i
local myconf=(
--prefix="$FFBUILD_PREFIX"
--disable-shared
--enable-static
--with-pic
)
if [[ $TARGET == win* || $TARGET == linux* ]]; then
myconf+=(
--host="$FFBUILD_TOOLCHAIN"
)
else
echo "Unknown target"
return -1
fi
./configure "${myconf[@]}"
make -j4
make install
}
ffbuild_configure() {
echo --enable-libmfx
}
ffbuild_unconfigure() {
echo --disable-libmfx
}
|
function factor($n) {
if ($n == 0) {
return 1;
}
$fact = 1;
for ($i =1; $i <= $n; $i++) {
$fact *= $i;
}
return $fact;
}
printf("Factorial of 6 = %d", factor(6)); |
def residual_block(input_tensor, filters3, kernel_size, stage, block, strides, is_training, reuse):
conv_name_3 = 'conv' + str(stage) + '_' + str(block) + '_1x1_increase'
bn_name_3 = 'bn' + str(stage) + '_' + str(block) + '_1x1_increase'
x = tf.layers.conv2d(input_tensor, filters3, (kernel_size, kernel_size), use_bias=False, padding='SAME', kernel_initializer=kernel_initializer, name=conv_name_3, reuse=reuse)
x = tf.layers.batch_normalization(x, training=is_training, name=bn_name_3, reuse=reuse)
conv_name_4 = 'conv' + str(stage) + '_' + str(block) + '_1x1_shortcut'
bn_name_4 = 'bn' + str(stage) + '_' + str(block) + '_1x1_shortcut'
shortcut = tf.layers.conv2d(input_tensor, filters3, (kernel_size, kernel_size), use_bias=False, strides=strides, padding='SAME', kernel_initializer=kernel_initializer, name=conv_name_4, reuse=reuse)
shortcut = tf.layers.batch_normalization(shortcut, training=is_training, name=bn_name_4, reuse=reuse)
x = tf.add(shortcut, x)
x = tf.nn.relu(x)
return x |
//Blockchain based Credit Scoring
/*
Name ID
<NAME> - 2017A7PS1482H
<NAME> - 2017A7PS0065H
<NAME> - 2017A7PS0086H
<NAME> - 2015A7PS0010H
*/
#include <iostream>
#include <functional>
#include <string>
#include <sstream>
#include <stdlib.h>
#include <time.h>
#include <vector>
#include <iomanip>
#include "stdafx.h"
#include <mysql.h>
#include "mysql_connection.h"
#define SERVER "localhost"
#define USER "user1"
#define PASSWORD "<PASSWORD>"
#define DATABASE "crypto"
#define PORT 3306
using namespace std;
int qstate;
template <typename T>
/*
Database tables used:
1. Lender table named as lender
2. Borrower table named as borrower
3. Transactions table named as block
4. Borrower-requests table named as requests
*/
std::string to_string(T value)
{
//create an output string stream
std::ostringstream os;
//throw the value into the string stream
os << value;
//convert the string stream into a string and return
return os.str();
}
//Structure to store borrower-request
struct Borrower
{
std::string username;
long int amount;
};
bool verifyTransaction(std::string username)
{
std::cout << "\n\nVerifying Transaction using Zero Knowledge Proof";
int g=0, p=0, y=0, r, h, b, rnd, s, i;
bool val = true;
/*
Get g,p,y from borrower table of corresponding username
*/
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
string query11 = "Select g,p,y from borrower where username='" + username + "'";
const char* q11 = query11.c_str();
qstate = mysql_query(conn, q11);
if (!qstate) {
res = mysql_store_result(conn);
while (row = mysql_fetch_row(res)) {
g = stoi(row[0]);
p = stoi(row[1]);
y = stoi(row[2]);
}
}
}
else {
puts("Connection to database has failed");
}
srand(time(0));
//Choosing a random no. of rounds from 6 to 10
rnd = rand() % 5 + 6;
std::cout << "\n\ng value: " << g << "\np value: " << p << "\ny value: " << y << "\nNumber of rounds: " << rnd;
//Zero Knowledge Proof Algorithm
for (i = 0; i < rnd; i++)
{
std::cout << "\n\nRound " << i + 1 << " of " << rnd << ":";
//Step 1
std::cout << "\nChoose a random number r where 0 <= r < " << p - 1 << ".";
std::cout << "\nValue of h ( h = (" << g << "^r) mod " << p << " ): ";
std::cin >> h;
//Step 2
//Choosing random bit 0 or 1
b = rand() % 2;
std::cout << "\nYour random bit b value: " << b;
//Step 3
std::cout << "\nValue of s ( s = (r + " << b << "*credit score) mod " << p - 1 << " ): ";
std::cin >> s;
//Step 4
int lhs = 1, rhs = 1, j;
for (j = 0; j < s; j++)
{
lhs *= g;
if (lhs >= g)
{
lhs %= p;
}
}
if (b == 0)
{
rhs = h % p;
}
else
{
rhs = (h * y) % p;
}
//ZKP failing condition
if (lhs != rhs)
{
val = false;
}
}
return val;
}
bool mineBlock(std::string username)
{
std::cout << "\n\nProof of Work";
bool val;
std::string input, hash_input, user_input;
int i;
srand(time(0));
//Generating a 7-digit random input
for (i = 0; i < 7; i++)
{
char r = rand() % 10 + 48;
input.push_back(r);
}
//Generating hash value of the input
std::hash<std::string> hash_string;
hash_input = to_string(hash_string(input));
//Proof of Work
//Asking the user to decrypt the hash value and obtain the input
std::cout << "\nHash value: " << hash_input;
std::cout << "\nDecrypt the hash value and obtain the input.\nInput: ";
std::cin >> user_input;
return true;
if (input == user_input)
{
return true;
}
else
{
return false;
}
}
//Creating and adding block to the blockchain
void createBlock(std::string lender, std::string borrower, long int amount)
{
/*
remove the entry in borrower-request table with corresponding borrower and amount
*/
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
string query4 = "Delete from requests where (borrower_uid='" + borrower + "' and amount=" + to_string(amount) + ")";
const char* q4 = query4.c_str();
qstate = mysql_query(conn, q4);
}
else {
puts("Connection to database has failed");
}
//Deleted the selected entry from requests table
//getting number of rows already in blockchain
int n;
string query14 = "Select count(*) from block";
const char* q14 = query14.c_str();
qstate = mysql_query(conn, q14);
if (!qstate) {
res = mysql_store_result(conn);
while (row = mysql_fetch_row(res)) {
n = stoi(row[0]);
}
}
if (n==0)
{
std::string hash_new;
std::string prev_hash = "0000000";
std::hash<std::string> hash_string;
hash_new = to_string(hash_string(prev_hash));
/* Adding first block in database*/
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
string query5 = "INSERT into block values('" + lender + "','" + borrower + "'," + to_string(amount) + ",'" + hash_new + "','" + prev_hash + "')";
const char* q5 = query5.c_str();
qstate = mysql_query(conn, q5);
//another query for storing hash_new into prev_hash of next tuple
string query12 = "INSERT into block(prev_hash) values('" + hash_new + "')";
const char* q12 = query12.c_str();
qstate = mysql_query(conn, q12);
}
else {
puts("Connection to database has failed");
}
//Added first block in database
}
else
{
std::string prev_hash;
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
string query13 = "Select prev_hash from block where lender_uid='undefined'";
const char* q13 = query13.c_str();
qstate = mysql_query(conn, q13);
if (!qstate) {
res = mysql_store_result(conn);
while (row = mysql_fetch_row(res)) {
prev_hash = row[0];
}
}
/*
add lender, borrower, amount in transactions table
*/
//Adding the block in database
std::string hash_new;
std::hash<std::string> hash_string;
hash_new = to_string(hash_string(prev_hash));
string query6 = "UPDATE block SET lender_uid='" + lender + "',borrower_uid='" + borrower + "',amount=" + to_string(amount) + ",hash='" + hash_new + "' where prev_hash='" + prev_hash + "'";
const char* q6 = query6.c_str();
qstate = mysql_query(conn, q6);
//another query for storing hash_new into prev_hash of next tuple
string query17 = "INSERT into block(prev_hash) values('" + hash_new + "')";
const char* q17 = query17.c_str();
qstate = mysql_query(conn, q17);
}
else {
puts("Connection to database has failed");
}
//Added block in database
}
}
void viewUser(std::string username, int opt)
{
if (opt == 0)
{
int n;
string lender_transactions[100];
int amount[100];
//fetch lender previous transactions
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
string query15 = "select borrower_uid,amount from block where lender_uid='" + username + "'";
const char* q15 = query15.c_str();
qstate = mysql_query(conn, q15);
if (!qstate) {
res = mysql_store_result(conn);
int i = 0;
while (row = mysql_fetch_row(res)) {
lender_transactions[i]=row[0];
amount[i]=stoi(row[1]);
i++;
}
n = i;
}
}
else {
puts("Connection to database has failed");
}
//fetched lender previous transactions
//Display all transactions
cout << " Borrower_username " << "Amount\n";
for (int i = 0; i < n; i++) {
// printf("%d. %s %d\n", i + 1, lender_transactions[i], amount[i]);
cout << i + 1 << ". " << setfill(' ') << setw(4 - (to_string(i + 2)).size()) << lender_transactions[i] << setfill(' ') << setw(29 - lender_transactions[i].size()) << amount[i] <<'\n';
}
}
else
{
int n;
string borrower_transactions[100];
int amount[100];
//fetch borrower previous transactions
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
string query15 = "select lender_uid,amount from block where borrower_uid='" + username + "'";
const char* q15 = query15.c_str();
qstate = mysql_query(conn, q15);
if (!qstate) {
res = mysql_store_result(conn);
int i = 0;
while (row = mysql_fetch_row(res)) {
borrower_transactions[i]=row[0];
amount[i]=stoi(row[1]);
i++;
}
n = i;
}
}
else {
puts("Connection to database has failed");
}
//fetched borrower previous transactions
//Display all transactions
cout << " Lender_username " << "Amount\n";
for (int i = 0; i < n; i++) {
cout << i + 1 << ". " << setfill(' ') << setw(6 - (to_string(i + 2)).size()) << borrower_transactions[i] << setfill(' ') << setw(27 - borrower_transactions[i].size()) << amount[i] << '\n';
}
}
}
int main(int argc, char** argv)
{
int code;
std::string username, password;
std::cout << "Blockchain Loan Interface\n\n";
std::cout << "1. Lender\n2. Borrower\nChoose code: ";
std::cin >> code;
if (code == 1)
{
int code1;
std::cout << "\n1. Login\n2. Register\nChoose code: ";
std::cin >> code1;
if (code1 == 1)
{
int code2;
std::cout << "\nLogin";
std::cout << "Username:\n";
std::cin.ignore();
getline(std::cin, username);
std::cout << "Password:\n";
getline(std::cin, password);
std::string db_pass;
/*
search through lender table and find table.user==username.
store the corresponding password in db_pass.
*/
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
string query1 = "Select password from lender where username ='" + username + "'";
const char* q1 = query1.c_str();
qstate = mysql_query(conn, q1);
if (!qstate) {
res = mysql_store_result(conn);
while (row = mysql_fetch_row(res)) {
db_pass = row[0];
}
}
}
else {
puts("Connection to database has failed");
}
if (password == db_pass)
{
std::cout << "\n ***Login Successful***\n";
int code3;
std::cout << "\n1. Check requests\n2. View previous transactions\nChoose code: ";
std::cin >> code3;
if (code3 == 1)
{
int n=0, i;
Borrower b[100];
/*
store no. of entries in borrower-request table in n
get all the entries from borrower-request table and store in b array
*/
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
string query2 = "Select count(*) from requests";
const char* q2 = query2.c_str();
qstate = mysql_query(conn, q2);
if (!qstate) {
res = mysql_store_result(conn);
while (row = mysql_fetch_row(res)) {
n = stoi(row[0]);
}
}
string query3 = "Select * from requests";
const char *q3 = query3.c_str();
qstate = mysql_query(conn, q3);
if (!qstate) {
res = mysql_store_result(conn);
int i = 0;
while (row = mysql_fetch_row(res)) {
b[i].username = row[0];
b[i].amount = stoi(row[1]);
i++;
}
}
}
else {
puts("Connection to database has failed");
}
//gathered information from requests table;
for (i = 0; i < n; i++)
{
std::cout << "\n" << i + 1 << ". Borrower: " << b[i].username << " Request: " << b[i].amount;
}
std::cout << "\n";
std::cout << "\nChoose code: ";
int code4;
std::cin >> code4;
//A new block is created for the transaction between the lender and the chosen borrower
createBlock(username, b[code4 - 1].username, b[code4 - 1].amount);
std::cout << "\nLoan Sanctioned!\nBlock Created!";
}
else if (code3 == 2)
{
//Second argument 0 denotes lender's previous transactions
viewUser(username, 0);
}
else
{
std::cout << "\nError";
}
}
else
{
std::cout << "\nLogin failed";
}
}
else if (code1 == 2)
{
int g, p, f;
std::cout << "\nRegister";
std::cout << "\n\nUsername:\n";
std::cin.ignore();
getline(std::cin, username);
std::cout << "Password:\n";
getline(std::cin, password);
/*
add username, password in lender table
*/
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
//puts("Successful connection to database:");
string query = "Insert into lender values('" + username + "','" + password + "')";
const char* q = query.c_str();
qstate = mysql_query(conn, q);
}
else {
puts("Connection to database has failed");
}
std::cout << "\nSuccessfully Registered!";
}
else
{
std::cout << "\nError";
}
}
else if (code == 2)
{
int code1;
std::cout << "\n1. Login\n2. Register\nChoose code: ";
std::cin >> code1;
if (code1 == 1)
{
int code2;
std::cout << "\nLogin";
std::cout << "Username:\n";
std::cin.ignore();
getline(std::cin, username);
std::cout << "Password:\n";
getline(std::cin, password);
std::string db_pass;
/*
search through borrower table and find table.user==username.
store the corresponding password in db_pass.
*/
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
//puts("Successful connection to database:");
string query = "Select password from borrower where username ='" + username + "'";
const char* q = query.c_str();
qstate = mysql_query(conn, q);
if (!qstate) {
res = mysql_store_result(conn);
while (row = mysql_fetch_row(res)) {
db_pass = row[0];
break;
}
}
}
else {
puts("Connection to database has failed");
}
if (password == <PASSWORD>)
{
int code3;
std::cout << "\n ***LOGIN SUCCESSFUL***\n";
std::cout << "\n1. Ask new loan\n2. View previous transactions\nChoose code: ";
std::cin >> code3;
if (code3 == 1)
{
//verifyTransaction returns false if ZKP fails, true if otherwise
bool val = verifyTransaction(username);
if (!val)
{
std::cout << "\nSecurity Breach. Request not created.";
}
else
{
std::cout << "\nCredit score verified successfully through Zero Knowledge Proof!";
//mineBlock returns false if POW is not done successfully, true if otherwise
bool val1 = mineBlock(username);
if (!val1)
{
std::cout << "\nProof of Work not done successfully. Request not created.";
}
else
{
std::cout << "\nProof of Work done successfully!";
int amount;
std::cout << "\n\nLoan request amount: ";
std::cin >> amount;
/*
add username and amount in borrower-request table
*/
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
//puts("Successful connection to database:");
string query = "Insert into requests values('" + username + "'," + to_string(amount) + ")";
const char* q = query.c_str();
qstate = mysql_query(conn, q);
}
else {
puts("Connection to database has failed");
}
std::cout << "\nRequest created successfully!";
std::cout << "\nYou have done all the work to successfully mine a block! Your block will be added if a lender approves your request and transacts with you!";
}
}
}
else if (code3 == 2)
{
//Second argument 1 denotes borrower's previous transactions
viewUser(username, 1);
}
else
{
std::cout << "\nError";
}
}
else
{
std::cout << "\nLogin failed";
}
}
else if (code1 == 2)
{
int g, p, y;
std::cout << "\nRegister";
std::cout << "\n\nUsername:\n";
std::cin.ignore();
getline(std::cin, username);
std::cout << "Password:\n";
getline(std::cin, password);
std::cout << "g of y function of credit score: ";
std::cin >> g;
std::cout << "p of y function of credit score: ";
std::cin >> p;
std::cout << "y function value of credit score ( y = (" << g << "^credit score) mod " << p << " ): ";
std::cin >> y;
/*
add username, password, g, p, f in borrower database
*/
MYSQL* conn;
MYSQL_ROW row;
MYSQL_RES* res;
conn = mysql_init(0);
conn = mysql_real_connect(conn, "localhost", "user1", "Ati1283", "crypto", 3306, NULL, 0);
if (conn) {
string query = "Insert into borrower values('" + username + "','" + password + "'," + to_string(g) + "," + to_string(p) + "," + to_string(y) + ")";
const char* q = query.c_str();
qstate = mysql_query(conn, q);
}
else {
puts("Connection to database has failed");
}
std::cout << "\nSuccessfully Registered!";
}
else
{
std::cout << "\nError";
}
}
else
{
std::cout << "\nError";
}
return 0;
}
|
from model import Generator_DUSENET
from model import Discriminator_DC
from torch.autograd import Variable
from torchvision.utils import save_image
import torch
import torch.nn.functional as F
import nibabel as nib
from util.util import *
import numpy as np
import os
import time
import datetime
from tqdm import tqdm
import csv
class Solver(object):
"""Solver for training and testing StarGAN."""
def __init__(self, pet3_train_loader,
pet3_test_loader1, pet3_test_loader2,
pet3_test_loader3, pet3_test_loader4,
pet3_test_loader5, pet3_test_loader6,
config):
"""Initialize configurations."""
self.config = config
# Data loader.
self.pet3_train_loader = pet3_train_loader
self.pet3_test_loader1 = pet3_test_loader1
self.pet3_test_loader2 = pet3_test_loader2
self.pet3_test_loader3 = pet3_test_loader3
self.pet3_test_loader4 = pet3_test_loader4
self.pet3_test_loader5 = pet3_test_loader5
self.pet3_test_loader6 = pet3_test_loader6
# Model configurations.
self.c_dim = config.c_dim
self.patch_size = config.patch_size_train if config.mode == 'train' else config.patch_size_test
self.g = config.g
self.d = config.d
self.g_conv_dim = config.g_conv_dim
self.d_conv_dim = config.d_conv_dim
self.g_repeat_num = config.g_repeat_num
self.d_repeat_num = config.d_repeat_num
self.lambda_cls = config.lambda_cls
self.lambda_rec = config.lambda_rec
self.lambda_pair = config.lambda_pair
self.lambda_gp = config.lambda_gp
self.use_MR = config.use_MR
# Training configurations.
self.dataset = config.dataset
self.batch_size = config.batch_size
self.num_iters = config.num_iters
self.num_iters_decay = config.num_iters_decay
self.g_lr = config.g_lr
self.d_lr = config.d_lr
self.n_critic = config.n_critic
self.beta1 = config.beta1
self.beta2 = config.beta2
self.resume_iters = config.resume_iters
# Test configurations.
self.test_iters = config.test_iters
# Miscellaneous.
self.use_tensorboard = config.use_tensorboard
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# Directories.
self.log_dir = config.log_dir
self.sample_dir = config.sample_dir
self.model_save_dir = config.model_save_dir
self.result_dir = config.result_dir
# Step size.
self.log_step = config.log_step
self.sample_step = config.sample_step
self.validate_step = config.validate_step
self.model_save_step = config.model_save_step
self.lr_update_step = config.lr_update_step
# Build the model and tensorboard.
self.build_model()
if self.use_tensorboard:
self.build_tensorboard()
def build_model(self):
"""Create a generator and a discriminator."""
if self.dataset in ['PET3']:
input_dim = 1
if self.use_MR:
input_dim = input_dim + 1
if self.g == 'DUSENET':
self.G = Generator_DUSENET(input_dim, self.g_conv_dim, self.c_dim, self.g_repeat_num)
if self.d == 'DC':
self.D = Discriminator_DC(self.patch_size[0], self.d_conv_dim, self.c_dim, self.d_repeat_num)
self.g_optimizer = torch.optim.Adam(self.G.parameters(), self.g_lr, [self.beta1, self.beta2])
self.d_optimizer = torch.optim.Adam(self.D.parameters(), self.d_lr, [self.beta1, self.beta2])
self.print_network(self.G, 'G')
self.print_network(self.D, 'D')
self.G.to(self.device)
self.D.to(self.device)
def print_network(self, model, name):
"""Print out the network information."""
num_params = 0
for p in model.parameters():
num_params += p.numel()
print(model)
print(name)
print("The number of parameters: {}".format(num_params))
def restore_model(self, resume_iters):
"""Restore the trained generator and discriminator."""
print('Loading the trained models from step {}...'.format(resume_iters))
G_path = os.path.join(self.model_save_dir, '{}-G.ckpt'.format(resume_iters))
# D_path = os.path.join(self.model_save_dir, '{}-D.ckpt'.format(resume_iters))
self.G.load_state_dict(torch.load(G_path, map_location=lambda storage, loc: storage))
# self.D.load_state_dict(torch.load(D_path, map_location=lambda storage, loc: storage))
def build_tensorboard(self):
"""Build a tensorboard logger."""
from logger import Logger
self.logger = Logger(self.log_dir)
def update_lr(self, g_lr, d_lr):
"""Decay learning rates of the generator and discriminator."""
for param_group in self.g_optimizer.param_groups:
param_group['lr'] = g_lr
for param_group in self.d_optimizer.param_groups:
param_group['lr'] = d_lr
def reset_grad(self):
"""Reset the gradient buffers."""
self.g_optimizer.zero_grad()
self.d_optimizer.zero_grad()
def denorm(self, x):
"""Convert the range from [-1, 1] to [0, 1]."""
out = (x + 1) / 2
return out.clamp_(0, 1)
def gradient_penalty(self, y, x):
"""Compute gradient penalty: (L2_norm(dy/dx) - 1)**2."""
weight = torch.ones(y.size()).to(self.device)
dydx = torch.autograd.grad(outputs=y,
inputs=x,
grad_outputs=weight,
retain_graph=True,
create_graph=True,
only_inputs=True)[0]
dydx = dydx.view(dydx.size(0), -1)
dydx_l2norm = torch.sqrt(torch.sum(dydx**2, dim=1))
return torch.mean((dydx_l2norm-1)**2)
def label2onehot(self, labels, dim):
"""Convert label indices to one-hot vectors."""
batch_size = labels.size(0)
out = torch.zeros(batch_size, dim)
out[np.arange(batch_size), labels.long()] = 1
return out
def create_labels(self, c_org, c_dim=3):
"""Generate target domain labels for debugging and testing."""
c_trg_list = []
for i in range(c_dim):
c_trg = self.label2onehot(torch.ones(c_org.size(0))*i, c_dim)
c_trg_list.append(c_trg.to(self.device))
return c_trg_list
def classification_loss(self, logit, target):
"""Compute binary or softmax cross entropy loss."""
return F.cross_entropy(logit, target)
def train(self):
"""Train StarGAN within a single dataset."""
# Set data loader.
if self.dataset == 'PET3':
data_train_loader = self.pet3_train_loader
data_test_loader1 = self.pet3_test_loader1
data_test_loader2 = self.pet3_test_loader2
data_test_loader3 = self.pet3_test_loader3
data_test_loader4 = self.pet3_test_loader4
data_test_loader5 = self.pet3_test_loader5
data_test_loader6 = self.pet3_test_loader6
# Fetch fixed inputs for debugging.
data_train_iter = iter(data_train_loader)
x_org, x_trg, c_org, c_trg, x_MR = next(data_train_iter)
x_fixed_org = x_org
x_fixed_org = x_fixed_org.to(self.device)
x_fixed_MR = x_MR
x_fixed_MR = x_fixed_MR.to(self.device)
c_fixed_list = self.create_labels(c_org, self.c_dim)
# Learning rate cache for decaying.
g_lr = self.g_lr
d_lr = self.d_lr
# Start training from scratch or resume training.
start_iters = 0
if self.resume_iters:
start_iters = self.resume_iters
self.restore_model(self.resume_iters)
# Start training.
print('Start training...')
start_time = time.time()
for i in range(start_iters, self.num_iters):
# =================================================================================== #
# 1. Preprocess input data #
# =================================================================================== #
# Fetch real images and labels.
data_train_iter = iter(data_train_loader)
x_real_org, x_real_trg, label_org, label_trg, x_MR = next(data_train_iter)
# Generate original and target domain labels randomly.
if self.dataset == 'PET3':
c_org = self.label2onehot(label_org, self.c_dim)
c_trg = self.label2onehot(label_trg, self.c_dim)
x_real_org = x_real_org.to(self.device) # Input images.
x_real_trg = x_real_trg.to(self.device)
x_MR = x_MR.to(self.device)
c_org = c_org.to(self.device) # Original domain labels.
c_trg = c_trg.to(self.device) # Target domain labels.
label_org = label_org.to(self.device) # Labels for computing domain classification loss.
label_trg = label_trg.to(self.device) # Labels for computing domain classification loss.
if self.d == 'DC':
# =================================================================================== #
# 2. Train the discriminator #
# =================================================================================== #
# Compute loss with real images.
out_src, out_cls = self.D(x_real_org)
d_loss_real = - torch.mean(out_src)
d_loss_cls = self.classification_loss(out_cls, label_org)
# Compute loss with fake images.
inp = x_real_org
if self.use_MR:
inp = torch.cat([inp, x_MR], 1)
x_fake_trg = self.G(inp, c_trg)
out_src, out_cls = self.D(x_fake_trg.detach())
d_loss_fake = torch.mean(out_src)
# Compute loss for gradient penalty.
alpha = torch.rand(x_real_org.size(0), 1, 1, 1, 1).to(self.device)
x_hat = (alpha * x_real_org.data + (1 - alpha) * x_fake_trg.data).requires_grad_(True)
out_src, _ = self.D(x_hat)
d_loss_gp = self.gradient_penalty(out_src, x_hat)
# Backward and optimize.
d_loss = d_loss_real + d_loss_fake + self.lambda_cls * d_loss_cls + self.lambda_gp * d_loss_gp
self.reset_grad()
d_loss.backward()
self.d_optimizer.step()
# Logging.
loss = {}
loss['D/loss_real'] = d_loss_real.item()
loss['D/loss_fake'] = d_loss_fake.item()
loss['D/loss_cls'] = d_loss_cls.item()
loss['D/loss_gp'] = d_loss_gp.item()
# =================================================================================== #
# 3. Train the generator #
# =================================================================================== #
if (i + 1) % self.n_critic == 0:
# Original-to-target domain.
inp = x_real_org
if self.use_MR:
inp = torch.cat([inp, x_MR], 1)
x_fake_trg = self.G(inp, c_trg)
out_src, out_cls = self.D(x_fake_trg)
g_loss_fake = - torch.mean(out_src)
g_loss_cls = self.classification_loss(out_cls, label_trg)
# Target-to-original domain.
inp = x_fake_trg
if self.use_MR:
inp = torch.cat([inp, x_MR], 1)
x_reconst = self.G(inp, c_org)
g_loss_rec = torch.mean(torch.abs(x_real_org - x_reconst))
# Target-target paired loss
g_loss_pair = torch.mean(torch.abs(x_fake_trg - x_real_trg))
# Backward and optimize.
g_loss = g_loss_fake + self.lambda_rec * g_loss_rec + self.lambda_cls * g_loss_cls + self.lambda_pair * g_loss_pair
self.reset_grad()
g_loss.backward()
self.g_optimizer.step()
# Logging.
loss['G/loss_fake'] = g_loss_fake.item()
loss['G/loss_rec'] = g_loss_rec.item()
loss['G/loss_cls'] = g_loss_cls.item()
loss['G/loss_pair'] = g_loss_pair.item()
# =================================================================================== #
# 4. Miscellaneous #
# =================================================================================== #
# Print out training information.
if (i+1) % self.log_step == 0:
et = time.time() - start_time
et = str(datetime.timedelta(seconds=et))[:-7]
log = "Elapsed [{}], Iteration [{}/{}]".format(et, i+1, self.num_iters)
for tag, value in loss.items():
log += ", {}: {:.4f}".format(tag, value)
print(log)
if self.use_tensorboard:
for tag, value in loss.items():
self.logger.scalar_summary(tag, value, i+1)
# Save model checkpoints.
if (i+1) % self.model_save_step == 0:
G_path = os.path.join(self.model_save_dir, '{}-G.ckpt'.format(i+1))
D_path = os.path.join(self.model_save_dir, '{}-D.ckpt'.format(i+1))
torch.save(self.G.state_dict(), G_path)
torch.save(self.D.state_dict(), D_path)
print('Saved model checkpoints into {}...'.format(self.model_save_dir))
# Decay learning rates.
if (i+1) % self.lr_update_step == 0 and (i+1) > (self.num_iters - self.num_iters_decay):
g_lr -= (self.g_lr / float(self.num_iters_decay))
d_lr -= (self.d_lr / float(self.num_iters_decay))
self.update_lr(g_lr, d_lr)
print ('Decayed learning rates, g_lr: {}, d_lr: {}.'.format(g_lr, d_lr))
# Translate fixed images for debugging.
if (i+1) % self.sample_step == 0:
with torch.no_grad():
x_fake_list = [x_fixed_org]
for c_fixed in c_fixed_list:
inp = x_fixed_org
if self.use_MR:
inp = torch.cat([inp, x_fixed_MR], 1)
x_fake_list.append(self.G(inp, c_fixed))
x_concat = torch.cat(x_fake_list, dim=4)
sample_path = os.path.join(self.sample_dir, '{}-images.jpg'.format(i+1))
ss = x_concat.shape
save_image(self.denorm(x_concat.data.cpu().view(ss[0], ss[1], ss[2], -1)), sample_path, nrow=1, padding=1)
print('Saved real and fake images into {}...'.format(sample_path))
# =================================================================================== #
# 5. Validation on test set #
# =================================================================================== #
if (i+1) % self.validate_step == 0:
with torch.no_grad():
# validate 0-1 / 0-2 / 1-0 / 1-2 / 2-0 / 2-1
psnr_mean_set = np.zeros(6)
mse_mean_set = np.zeros(6)
for ii in range(0, 6):
if ii == 0:
val_bar = tqdm(data_test_loader1)
if ii == 1:
val_bar = tqdm(data_test_loader2)
if ii == 2:
val_bar = tqdm(data_test_loader3)
if ii == 3:
val_bar = tqdm(data_test_loader4)
if ii == 4:
val_bar = tqdm(data_test_loader5)
if ii == 5:
val_bar = tqdm(data_test_loader6)
avg_psnr = AverageMeter()
avg_mse = AverageMeter()
for (x_real_org, x_real_trg, label_org, label_trg, x_MR) in val_bar:
# Prepare input images and target domain labels.
x_real_org = x_real_org.to(self.device)
x_MR = x_MR.to(self.device)
c_org = self.label2onehot(label_org, self.c_dim)
c_trg = self.label2onehot(label_trg, self.c_dim)
c_org = c_org.to(self.device)
c_trg = c_trg.to(self.device)
# Translate images.
inp = x_real_org
if self.use_MR:
inp = torch.cat([inp, x_MR], 1)
x_fake_trg = self.G(inp, c_trg)
# Calculate metrics
psnr_ = psnr(x_fake_trg.cpu(), x_real_trg.cpu())
mse_ = mse(x_fake_trg.cpu(), x_real_trg.cpu())
avg_psnr.update(psnr_)
avg_mse.update(mse_)
if ii == 0:
message = 'PSNR-01: {:4f} '.format(avg_psnr.avg)
message += 'MSE-01: {:4f} '.format(avg_mse.avg)
if ii == 1:
message = 'PSNR-02: {:4f} '.format(avg_psnr.avg)
message += 'MSE-02: {:4f} '.format(avg_mse.avg)
if ii == 2:
message = 'PSNR-10: {:4f} '.format(avg_psnr.avg)
message += 'MSE-10: {:4f} '.format(avg_mse.avg)
if ii == 3:
message = 'PSNR-12: {:4f} '.format(avg_psnr.avg)
message += 'MSE-12: {:4f} '.format(avg_mse.avg)
if ii == 4:
message = 'PSNR-20: {:4f} '.format(avg_psnr.avg)
message += 'MSE-20: {:4f} '.format(avg_mse.avg)
if ii == 5:
message = 'PSNR-21: {:4f} '.format(avg_psnr.avg)
message += 'MSE-21: {:4f} '.format(avg_mse.avg)
val_bar.set_description(desc=message)
psnr_mean_set[ii] = avg_psnr.avg
mse_mean_set[ii] = avg_mse.avg
# save all validate metrics 0-1 / 0-2 / 1-0 / 1-2 / 2-0 / 2-1
with open(os.path.join(self.sample_dir, 'vali_metrics.csv'), 'a') as f:
writer = csv.writer(f)
writer.writerow([i,
psnr_mean_set[0], mse_mean_set[0],
psnr_mean_set[1], mse_mean_set[1],
psnr_mean_set[2], mse_mean_set[2],
psnr_mean_set[3], mse_mean_set[3],
psnr_mean_set[4], mse_mean_set[4],
psnr_mean_set[5], mse_mean_set[5]])
def test(self):
"""Translate images using StarGAN trained on a single dataset."""
# Load the trained generator.
self.restore_model(self.test_iters)
# Set data loader.
if self.dataset == 'PET3':
data_test_loader1 = self.pet3_test_loader1
data_test_loader2 = self.pet3_test_loader2
data_test_loader3 = self.pet3_test_loader3
data_test_loader4 = self.pet3_test_loader4
data_test_loader5 = self.pet3_test_loader5
data_test_loader6 = self.pet3_test_loader6
with torch.no_grad():
for ii in range(0,6):
if ii == 0:
val_bar = tqdm(data_test_loader1)
if ii == 1:
val_bar = tqdm(data_test_loader2)
if ii == 2:
val_bar = tqdm(data_test_loader3)
if ii == 3:
val_bar = tqdm(data_test_loader4)
if ii == 4:
val_bar = tqdm(data_test_loader5)
if ii == 5:
val_bar = tqdm(data_test_loader6)
avg_psnr = AverageMeter()
avg_mse = AverageMeter()
for nn, (x_real_org, x_real_trg, label_org, label_trg, x_MR) in enumerate(val_bar):
# Prepare input images and target domain labels.
x_real_org = x_real_org.to(self.device)
x_MR = x_MR.to(self.device)
c_org = self.label2onehot(label_org, self.c_dim)
c_trg = self.label2onehot(label_trg, self.c_dim)
c_org = c_org.to(self.device)
c_trg = c_trg.to(self.device)
# Translate images.
inp = x_real_org
if self.use_MR:
inp = torch.cat([inp, x_MR], 1)
x_fake_trg = self.G(inp, c_trg)
# Calculate metrics
psnr_ = psnr(x_fake_trg.cpu(), x_real_trg.cpu())
mse_ = mse(x_fake_trg.cpu(), x_real_trg.cpu())
avg_psnr.update(psnr_)
avg_mse.update(mse_)
if ii == 0:
message = 'PSNR-01: {:4f} '.format(avg_psnr.avg)
message += 'MSE-01: {:4f} '.format(avg_mse.avg)
if ii == 1:
message = 'PSNR-02: {:4f} '.format(avg_psnr.avg)
message += 'MSE-02: {:4f} '.format(avg_mse.avg)
if ii == 2:
message = 'PSNR-10: {:4f} '.format(avg_psnr.avg)
message += 'MSE-10: {:4f} '.format(avg_mse.avg)
if ii == 3:
message = 'PSNR-12: {:4f} '.format(avg_psnr.avg)
message += 'MSE-12: {:4f} '.format(avg_mse.avg)
if ii == 4:
message = 'PSNR-20: {:4f} '.format(avg_psnr.avg)
message += 'MSE-20: {:4f} '.format(avg_mse.avg)
if ii == 5:
message = 'PSNR-21: {:4f} '.format(avg_psnr.avg)
message += 'MSE-21: {:4f} '.format(avg_mse.avg)
val_bar.set_description(desc=message)
# Save into nii for future analysis
if ii == 0:
folder_name = '01'
if ii == 1:
folder_name = '02'
if ii == 2:
folder_name = '10'
if ii == 3:
folder_name = '12'
if ii == 4:
folder_name = '20'
if ii == 5:
folder_name = '21'
result_path = os.path.join(self.result_dir, folder_name)
if not os.path.exists(result_path):
os.makedirs(result_path)
x_real_org_nib = nib.Nifti1Image(x_real_org.cpu().numpy()[0,0,:,:,:], affine=np.eye(4))
x_fake_trg_nib = nib.Nifti1Image(x_fake_trg.cpu().numpy()[0,0,:,:,:], affine=np.eye(4))
x_real_trg_nib = nib.Nifti1Image(x_real_trg.cpu().numpy()[0,0,:,:,:], affine=np.eye(4))
nib.save(x_real_org_nib, os.path.join(result_path, str(nn) + '_real_org.nii.gz'))
nib.save(x_fake_trg_nib, os.path.join(result_path, str(nn) + '_fake_trg.nii.gz'))
nib.save(x_real_trg_nib, os.path.join(result_path, str(nn) + '_real_trg.nii.gz'))
|
<filename>src/commands/index.ts
export * from './cancelDeployment'
export * from './deleteDeployment'
export * from './deploy'
export * from './listDeployments'
export * from './poll'
|
<reponame>dayatstuff/gatsby-datocms-starter
import styled from 'styled-components';
import { font, colors, z, bz } from '../consts/style';
import { fadeInUp } from '../style/animations';
export const PageWrapper = styled.div`
height: calc(100vh - 10rem);
display: flex;
justify-content: center;
align-items: center;
max-width: 100%;
padding: 2rem;
`;
export const PageInner = styled.div`
width: 75rem;
max-width: 100%;
text-align: center;
pre {
background: rgba(0, 0, 0, 0.1);
padding: 0.2rem 0.4rem;
font-size: 1.2rem;
}
button {
${font.button};
background: ${colors.dark};
border: none;
color: white;
padding: 0.35em 0.7em;
margin-top: 0.7em;
}
`;
export const PageTitle = styled.h1`
${font.h1}
`;
export const PostLink = styled.div`
margin-bottom: 1em;
a {
color: ${colors.light};
background: ${colors.purple};
padding: 0.35em 0.7em;
font-style: italic;
&:hover {
text-decoration: none;
background: ${colors.dark};
}
}
`;
export const ModalWrapper = styled.div`
position: fixed;
top: 0;
left: 0;
height: 100%;
width: 100%;
background: rgba(0, 0, 0, 0.3);
display: flex;
justify-content: center;
align-items: center;
padding: 2rem;
${z.modalBackground};
`;
export const ModalInner = styled.div`
background: white;
position: relative;
max-width: 100%;
max-height: 100%;
height: 25rem;
width: 30rem;
display: flex;
justify-content: center;
align-items: center;
animation: ${fadeInUp} 0.3s;
${bz};
${font.h1};
${z.modal};
`;
|
/*******************************************************************************
* This file is part of the Symfony eclipse plugin.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
******************************************************************************/
package com.dubture.symfony.twig.codeassist.strategies;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.dltk.core.ISourceRange;
import org.eclipse.dltk.core.IType;
import org.eclipse.jface.text.BadLocationException;
import com.dubture.symfony.core.codeassist.contexts.RouteCompletionContext;
import com.dubture.symfony.core.model.SymfonyModelAccess;
import com.dubture.symfony.index.model.Route;
import com.dubture.symfony.twig.codeassist.CompletionProposalFlag;
import com.dubture.twig.core.codeassist.ICompletionContext;
import com.dubture.twig.core.codeassist.ICompletionProposalFlag;
import com.dubture.twig.core.codeassist.ICompletionReporter;
import com.dubture.twig.core.codeassist.context.AbstractTwigCompletionContext;
import com.dubture.twig.core.codeassist.strategies.AbstractTwigCompletionStrategy;
/**
* Completes route names inside a {@link RouteCompletionContext}
* @author <NAME> <<EMAIL>>
*/
public class RouteCompletionStrategy extends AbstractTwigCompletionStrategy {
public static int workaroundCount = 0;
public RouteCompletionStrategy(ICompletionContext context) {
super(context);
}
@Override
public void apply(ICompletionReporter reporter) throws BadLocationException {
AbstractTwigCompletionContext context = (AbstractTwigCompletionContext) getContext();
//TODO: this needs caching!!!
List<Route> routes = SymfonyModelAccess.getDefault().findRoutes(context.getScriptProject());
ISourceRange range = getReplacementRange(context);
SymfonyModelAccess model = SymfonyModelAccess.getDefault();
String prefix = context.getPrefix();
for (Route route : routes) {
IType controller = model.findController(route.bundle, route.controller, context.getScriptProject());
if (controller == null) {
continue;
}
if (StringUtils.startsWithIgnoreCase(route.name, prefix)) {
reporter.reportKeyword(route.name, range, new ICompletionProposalFlag[]{CompletionProposalFlag.ROUTE});
}
}
}
}
|
<reponame>iamonuwa/angular2typescript
function getStock() {
return {
symbol: "IBM",
price: 100.00,
open: 99.5,
volume: 100000
};
}
var _a = getStock(), symbol = _a.symbol, price = _a.price;
console.log("The price of " + symbol + " is " + price);
|
export { default } from 'ember-medium-editor/components/me-extension'; |
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
REPO_ROOT_DIR="$(dirname $(cd $(dirname $BASH_SOURCE) && pwd))"
SCRIPTS_DIR="$REPO_ROOT_DIR/scripts"
PROJECT_ID=$(gcloud config list project --format "value(core.project)")
PROJECT_NUMBER=$(gcloud projects describe $PROJECT_ID --format="value(projectNumber)")
|
def match_file_type(file_path: str, file_type_pattern: str) -> bool:
if "*" not in file_type_pattern:
return file_path.endswith(file_type_pattern)
else:
prefix, suffix = file_type_pattern.split("*")
return file_path.startswith(prefix) and file_path.endswith(suffix) |
#!/usr/bin/env bash
set -e
SCRIPT_DIR_NAME="$( cd "$( dirname "$0" )" && pwd )"
if [ $# != 4 ]; then
echo "usage: $(basename "$0") <username> <password> <current tag> <new tag>" >&2
exit 1
fi
USERNAME=${1:-}
PASSWORD=${2:-}
CURRENT_TAG=${3:-}
NEW_TAG=${4:-}
"$SCRIPT_DIR_NAME/cicd-tools/tag-and-push-docker-image.sh" \
"${USERNAME}" \
"${PASSWORD}" \
"${CURRENT_TAG}" \
"${NEW_TAG}"
"$SCRIPT_DIR_NAME/database/tag-and-push-docker-image.sh" \
"${USERNAME}" \
"${PASSWORD}" \
"${CURRENT_TAG}" \
"${NEW_TAG}"
exit 0
|
package com.bv.eidss.data.generated;
public class Farm_database {
public static final String create_sql =
"CREATE TABLE Farm\n" +
"(\n" + // system fields
" id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL UNIQUE\n" +
", strLastSynError TEXT NULL\n" +
", intStatus INT NULL\n" +
", intChanged INT NULL\n" +
", datCreateDate DATE NULL\n" +
"\n" + // case fields
", intRowStatus INT NULL\n" +
", uidOfflineCaseID TEXT NULL\n" +
", idParent BIGINT NULL\n" +
", idfFarm BIGINT NULL\n" +
", idfsHerd BIGINT NULL\n" +
", blnIsRoot INT NULL\n" +
", strFarmName TEXT NULL\n" +
", strFarmCode TEXT NULL\n" +
", idfRootFarm BIGINT NULL\n" +
", strOwnerLastName TEXT NULL\n" +
", strOwnerFirstName TEXT NULL\n" +
", strOwnerMiddleName TEXT NULL\n" +
", strPhone TEXT NULL\n" +
", strFax TEXT NULL\n" +
", strEmail TEXT NULL\n" +
", idfsRegion BIGINT NULL\n" +
", idfsRayon BIGINT NULL\n" +
", idfsSettlement BIGINT NULL\n" +
", strStreetName TEXT NULL\n" +
", strBuilding TEXT NULL\n" +
", strHouse TEXT NULL\n" +
", strApartment TEXT NULL\n" +
", strPostCode TEXT NULL\n" +
", dblLongitude NULL\n" +
", dblLatitude NULL\n" +
")";
}
|
const { rollup } = require('rollup')
const uglify = require('rollup-plugin-uglify')
const { minify } = require('uglify-js')
const replace = require('rollup-plugin-replace')
const babel = require('rollup-plugin-babel')
const packages = require('../package.json')
const build = async (opts) => {
let packagesName = opts.destName.split('/')
packagesName = packagesName[packagesName.length - 1]
const plugins = [
babel({
babelrc: false,
presets: [
['es2015-rollup'],
'stage-0'
],
plugins: ['transform-object-assign']
}),
replace({
'__version__': packages.version,
'__name__': packagesName,
'process.env.NODE_ENV': JSON.stringify(opts.env)
})
]
if (opts.env === 'production') {
plugins.push(uglify({
compress: {
drop_debugger: true,
drop_console: true
}
}, minify))
}
const bundle = await rollup({ entry: opts.entry, plugins })
const name = opts.env === 'production' ? opts.destName + '.min' : opts.destName
await bundle.write({
moduleName: opts.moduleName,
format: 'umd',
dest: `dist/${name}.js`,
sourceMap: true,
exports: 'named'
})
}
const builds = [
{
moduleName: 'Vuet',
destName: 'vuet',
entry: 'src/index.js',
env: 'development'
},
{
moduleName: 'Vuet',
destName: 'vuet',
entry: 'src/index.js',
env: 'production'
},
{
moduleName: 'VuetScroll',
destName: '../packages/vuet-scroll/dist/vuet-scroll',
entry: 'packages/vuet-scroll/src/index.js',
env: 'development'
},
{
moduleName: 'VuetScroll',
destName: '../packages/vuet-scroll/dist/vuet-scroll',
entry: 'packages/vuet-scroll/src/index.js',
env: 'production'
},
{
moduleName: 'VuetRoute',
destName: '../packages/vuet-route/dist/vuet-route',
entry: 'packages/vuet-route/src/index.js',
env: 'development'
},
{
moduleName: 'VuetRoute',
destName: '../packages/vuet-route/dist/vuet-route',
entry: 'packages/vuet-route/src/index.js',
env: 'production'
},
{
moduleName: 'VuetStore',
destName: '../packages/vuet-store/dist/vuet-store',
entry: 'packages/vuet-store/src/index.js',
env: 'development'
},
{
moduleName: 'VuetStore',
destName: '../packages/vuet-store/dist/vuet-store',
entry: 'packages/vuet-store/src/index.js',
env: 'production'
}
]
builds.forEach(opts => build(opts))
|
#!/usr/bin/env bash
git clone https://github.com/pawamoy/zunit
( cd ./zunit && ./build.zsh )
chmod u+x zunit/zunit
cp zunit/zunit ${1:-/usr/local/bin}
git clone https://github.com/molovo/revolver
chmod u+x revolver/revolver
cp revolver/revolver ${1:-/usr/local/bin}
rm -rf ./zunit ./revolver
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.