Dataset Preview
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code: DatasetGenerationError
Exception: TypeError
Message: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
Traceback: Traceback (most recent call last):
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2011, in _prepare_split_single
writer.write_table(table)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 585, in write_table
pa_table = table_cast(pa_table, self._schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2302, in table_cast
return cast_table_to_schema(table, schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in cast_table_to_schema
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in <listcomp>
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in wrapper
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in <listcomp>
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2122, in cast_array_to_feature
raise TypeError(f"Couldn't cast array of type\n{_short_str(array.type)}\nto\n{_short_str(feature)}")
TypeError: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
parquet_operations = convert_to_parquet(builder)
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
builder.download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
self._download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
self._prepare_split(split_generator, **prepare_split_kwargs)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
for job_id, done, content in self._prepare_split_single(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
raise DatasetGenerationError("An error occurred while generating the dataset") from e
datasets.exceptions.DatasetGenerationError: An error occurred while generating the datasetNeed help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
text
string | meta
dict |
|---|---|
package com.liferay.tasks.service.impl;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import com.liferay.asset.kernel.service.AssetEntryLocalServiceUtil;
import com.liferay.counter.kernel.service.CounterLocalServiceUtil;
import com.liferay.message.boards.kernel.service.MBMessageLocalServiceUtil;
import com.liferay.portal.kernel.exception.PortalException;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.json.JSONFactoryUtil;
import com.liferay.portal.kernel.json.JSONObject;
import com.liferay.portal.kernel.model.User;
import com.liferay.portal.kernel.model.UserNotificationDeliveryConstants;
import com.liferay.portal.kernel.notifications.NotificationEvent;
import com.liferay.portal.kernel.notifications.NotificationEventFactoryUtil;
import com.liferay.portal.kernel.notifications.UserNotificationManagerUtil;
import com.liferay.portal.kernel.service.ServiceContext;
import com.liferay.portal.kernel.service.UserLocalServiceUtil;
import com.liferay.portal.kernel.service.UserNotificationEventLocalServiceUtil;
import com.liferay.portal.kernel.util.PortalUtil;
import com.liferay.portal.kernel.util.StringPool;
import com.liferay.portal.kernel.util.Validator;
import com.liferay.social.kernel.service.SocialActivityLocalServiceUtil;
import com.liferay.tasks.exception.TasksEntryDueDateException;
import com.liferay.tasks.exception.TasksEntryTitleException;
import com.liferay.tasks.model.TasksEntry;
import com.liferay.tasks.model.TasksEntryConstants;
import com.liferay.tasks.service.base.TasksEntryLocalServiceBaseImpl;
import com.liferay.tasks.social.TasksActivityKeys;
import com.liferay.tasks.util.TasksPortletKeys;
/**
* @author Ryan Park
* @author Jonathan Lee
*/
public class TasksEntryLocalServiceImpl extends TasksEntryLocalServiceBaseImpl {
public TasksEntry addTasksEntry(
long userId, String title, int priority, long assigneeUserId,
int dueDateMonth, int dueDateDay, int dueDateYear, int dueDateHour,
int dueDateMinute, boolean addDueDate,
ServiceContext serviceContext)
throws PortalException, SystemException {
// Tasks entry
User user = UserLocalServiceUtil.getUserById(userId);
long groupId = serviceContext.getScopeGroupId();
Date now = new Date();
validate(title);
Date dueDate = null;
if (addDueDate) {
dueDate = PortalUtil.getDate(
dueDateMonth, dueDateDay, dueDateYear, dueDateHour,
dueDateMinute, user.getTimeZone(),
TasksEntryDueDateException.class);
}
long tasksEntryId = CounterLocalServiceUtil.increment();
TasksEntry tasksEntry = tasksEntryPersistence.create(tasksEntryId);
tasksEntry.setGroupId(groupId);
tasksEntry.setCompanyId(user.getCompanyId());
tasksEntry.setUserId(user.getUserId());
tasksEntry.setUserName(user.getFullName());
tasksEntry.setCreateDate(now);
tasksEntry.setModifiedDate(now);
tasksEntry.setTitle(title);
tasksEntry.setPriority(priority);
tasksEntry.setAssigneeUserId(assigneeUserId);
tasksEntry.setDueDate(dueDate);
tasksEntry.setStatus(TasksEntryConstants.STATUS_OPEN);
tasksEntryPersistence.update(tasksEntry);
// Resources
resourceLocalService.addModelResources(tasksEntry, serviceContext);
// Asset
updateAsset(
userId, tasksEntry, serviceContext.getAssetCategoryIds(),
serviceContext.getAssetTagNames());
// Social
JSONObject extraDataJSONObject = JSONFactoryUtil.createJSONObject();
extraDataJSONObject.put("title", tasksEntry.getTitle());
SocialActivityLocalServiceUtil.addActivity(
userId, groupId, TasksEntry.class.getName(), tasksEntryId,
TasksActivityKeys.ADD_ENTRY, extraDataJSONObject.toString(),
assigneeUserId);
// Notifications
sendNotificationEvent(
tasksEntry, TasksEntryConstants.STATUS_ALL, assigneeUserId,
serviceContext);
return tasksEntry;
}
@Override
public TasksEntry deleteTasksEntry(long tasksEntryId)
throws PortalException, SystemException {
TasksEntry tasksEntry = tasksEntryPersistence.findByPrimaryKey(
tasksEntryId);
return deleteTasksEntry(tasksEntry);
}
@Override
public TasksEntry deleteTasksEntry(TasksEntry tasksEntry)
throws PortalException, SystemException {
// Tasks entry
tasksEntryPersistence.remove(tasksEntry);
// Asset
AssetEntryLocalServiceUtil.deleteEntry(
TasksEntry.class.getName(), tasksEntry.getTasksEntryId());
// Message boards
MBMessageLocalServiceUtil.deleteDiscussionMessages(
TasksEntry.class.getName(), tasksEntry.getTasksEntryId());
// Social
SocialActivityLocalServiceUtil.deleteActivities(
TasksEntry.class.getName(), tasksEntry.getTasksEntryId());
return tasksEntry;
}
public List<TasksEntry> getAssigneeTasksEntries(
long userId, int start, int end)
throws SystemException {
return tasksEntryPersistence.findByAssigneeUserId(userId, start, end);
}
public int getAssigneeTasksEntriesCount(long userId)
throws SystemException {
return tasksEntryPersistence.countByAssigneeUserId(userId);
}
public List<TasksEntry> getGroupAssigneeTasksEntries(
long groupId, long userId, int start, int end)
throws SystemException {
return tasksEntryPersistence.findByG_A(groupId, userId, start, end);
}
public int getGroupAssigneeTasksEntriesCount(long groupId, long userId)
throws SystemException {
return tasksEntryPersistence.countByG_A(groupId, userId);
}
public List<TasksEntry> getGroupResolverTasksEntries(
long groupId, long userId, int start, int end)
throws SystemException {
return tasksEntryPersistence.findByG_R(groupId, userId, start, end);
}
public int getGroupResolverTasksEntriesCount(long groupId, long userId)
throws SystemException {
return tasksEntryPersistence.countByG_R(groupId, userId);
}
public List<TasksEntry> getGroupUserTasksEntries(
long groupId, long userId, int start, int end)
throws SystemException {
return tasksEntryPersistence.findByG_U(groupId, userId, start, end);
}
public int getGroupUserTasksEntriesCount(long groupId, long userId)
throws SystemException {
return tasksEntryPersistence.countByG_U(groupId, userId);
}
public List<TasksEntry> getResolverTasksEntries(
long userId, int start, int end)
throws SystemException {
return tasksEntryPersistence.findByResolverUserId(userId, start, end);
}
public int getResolverTasksEntriesCount(long userId)
throws SystemException {
return tasksEntryPersistence.countByResolverUserId(userId);
}
public List<TasksEntry> getTasksEntries(long groupId, int start, int end)
throws SystemException {
return tasksEntryPersistence.findByGroupId(groupId, start, end);
}
public List<TasksEntry> getTasksEntries(
long groupId, int priority, long assigneeUserId,
long reporterUserId, int status, long[] assetTagIds,
long[] notAssetTagIds, int start, int end)
throws SystemException {
return tasksEntryFinder.findByG_P_A_R_S_T_N(
groupId, priority, assigneeUserId, reporterUserId, status,
assetTagIds, notAssetTagIds, start, end);
}
public int getTasksEntriesCount(long groupId) throws SystemException {
return tasksEntryPersistence.countByGroupId(groupId);
}
public int getTasksEntriesCount(
long groupId, int priority, long assigneeUserId,
long reporterUserId, int status, long[] tagsEntryIds,
long[] notTagsEntryIds)
throws SystemException {
return tasksEntryFinder.countByG_P_A_R_S_T_N(
groupId, priority, assigneeUserId, reporterUserId, status,
tagsEntryIds, notTagsEntryIds);
}
@Override
public TasksEntry getTasksEntry(long tasksEntryId)
throws PortalException, SystemException {
return tasksEntryPersistence.findByPrimaryKey(tasksEntryId);
}
public List<TasksEntry> getUserTasksEntries(long userId, int start, int end)
throws SystemException {
return tasksEntryPersistence.findByUserId(userId, start, end);
}
public int getUserTasksEntriesCount(long userId) throws SystemException {
return tasksEntryPersistence.countByUserId(userId);
}
public void updateAsset(
long userId, TasksEntry tasksEntry, long[] assetCategoryIds,
String[] assetTagNames)
throws PortalException, SystemException {
AssetEntryLocalServiceUtil.updateEntry(
userId, tasksEntry.getGroupId(), TasksEntry.class.getName(),
tasksEntry.getTasksEntryId(), assetCategoryIds, assetTagNames);
}
public TasksEntry updateTasksEntry(
long tasksEntryId, String title, int priority, long assigneeUserId,
long resolverUserId, int dueDateMonth, int dueDateDay,
int dueDateYear, int dueDateHour, int dueDateMinute,
boolean addDueDate, int status, ServiceContext serviceContext)
throws PortalException, SystemException {
// Tasks entry
Date now = new Date();
TasksEntry tasksEntry = tasksEntryPersistence.findByPrimaryKey(
tasksEntryId);
User user = UserLocalServiceUtil.getUserById(tasksEntry.getUserId());
validate(title);
Date dueDate = null;
if (addDueDate) {
dueDate = PortalUtil.getDate(
dueDateMonth, dueDateDay, dueDateYear, dueDateHour,
dueDateMinute, user.getTimeZone(),
TasksEntryDueDateException.class);
}
long oldAssigneeUserId = tasksEntry.getAssigneeUserId();
int oldStatus = tasksEntry.getStatus();
tasksEntry.setModifiedDate(now);
tasksEntry.setTitle(title);
tasksEntry.setPriority(priority);
tasksEntry.setAssigneeUserId(assigneeUserId);
tasksEntry.setDueDate(dueDate);
if (status == TasksEntryConstants.STATUS_RESOLVED) {
tasksEntry.setResolverUserId(resolverUserId);
tasksEntry.setFinishDate(now);
}
else {
tasksEntry.setResolverUserId(0);
tasksEntry.setFinishDate(null);
}
tasksEntry.setStatus(status);
tasksEntryPersistence.update(tasksEntry);
// Asset
updateAsset(
tasksEntry.getUserId(), tasksEntry,
serviceContext.getAssetCategoryIds(),
serviceContext.getAssetTagNames());
// Social
addSocialActivity(status, tasksEntry, serviceContext);
// Notifications
sendNotificationEvent(
tasksEntry, oldStatus, oldAssigneeUserId, serviceContext);
return tasksEntry;
}
public TasksEntry updateTasksEntryStatus(
long tasksEntryId, long resolverUserId, int status,
ServiceContext serviceContext)
throws PortalException, SystemException {
// Tasks entry
Date now = new Date();
TasksEntry tasksEntry = tasksEntryPersistence.findByPrimaryKey(
tasksEntryId);
tasksEntry.setModifiedDate(now);
if (status == TasksEntryConstants.STATUS_RESOLVED) {
tasksEntry.setResolverUserId(resolverUserId);
tasksEntry.setFinishDate(now);
}
else {
tasksEntry.setResolverUserId(0);
tasksEntry.setFinishDate(null);
}
int oldStatus = tasksEntry.getStatus();
tasksEntry.setStatus(status);
tasksEntryPersistence.update(tasksEntry);
// Social
addSocialActivity(status, tasksEntry, serviceContext);
// Notifications
sendNotificationEvent(
tasksEntry, oldStatus, tasksEntry.getAssigneeUserId(),
serviceContext);
return tasksEntry;
}
protected void addSocialActivity(
int status, TasksEntry tasksEntry, ServiceContext serviceContext)
throws PortalException, SystemException {
int activity = TasksActivityKeys.UPDATE_ENTRY;
if (status == TasksEntryConstants.STATUS_REOPENED) {
activity = TasksActivityKeys.REOPEN_ENTRY;
}
else if (status == TasksEntryConstants.STATUS_RESOLVED) {
activity = TasksActivityKeys.RESOLVE_ENTRY;
}
JSONObject extraDataJSONObject = JSONFactoryUtil.createJSONObject();
extraDataJSONObject.put("title", tasksEntry.getTitle());
SocialActivityLocalServiceUtil.addActivity(
serviceContext.getUserId(), tasksEntry.getGroupId(),
TasksEntry.class.getName(), tasksEntry.getTasksEntryId(), activity,
extraDataJSONObject.toString(), tasksEntry.getAssigneeUserId());
}
protected void sendNotificationEvent(
TasksEntry tasksEntry, int oldStatus, long oldAssigneeUserId,
ServiceContext serviceContext)
throws PortalException, SystemException {
HashSet<Long> receiverUserIds = new HashSet<Long>(3);
receiverUserIds.add(oldAssigneeUserId);
receiverUserIds.add(tasksEntry.getUserId());
receiverUserIds.add(tasksEntry.getAssigneeUserId());
receiverUserIds.remove(serviceContext.getUserId());
JSONObject notificationEventJSONObject =
JSONFactoryUtil.createJSONObject();
notificationEventJSONObject.put(
"classPK", tasksEntry.getTasksEntryId());
notificationEventJSONObject.put("userId", serviceContext.getUserId());
for (long receiverUserId : receiverUserIds) {
if ((receiverUserId == 0) ||
!UserNotificationManagerUtil.isDeliver(
receiverUserId, TasksPortletKeys.TASKS, 0,
TasksEntryConstants.STATUS_ALL,
UserNotificationDeliveryConstants.TYPE_WEBSITE)) {
continue;
}
String title = StringPool.BLANK;
if (oldStatus == TasksEntryConstants.STATUS_ALL) {
title = "x-assigned-you-a-task";
}
else if (tasksEntry.getAssigneeUserId() != oldAssigneeUserId) {
if (receiverUserId == oldAssigneeUserId) {
title = "x-reassigned-your-task";
}
else {
title = "x-assigned-you-a-task";
}
}
else if (tasksEntry.getStatus() != oldStatus) {
if ((tasksEntry.getStatus() !=
TasksEntryConstants.STATUS_OPEN) &&
(tasksEntry.getStatus() !=
TasksEntryConstants.STATUS_REOPENED) &&
(tasksEntry.getStatus() !=
TasksEntryConstants.STATUS_RESOLVED)) {
return;
}
String statusLabel = TasksEntryConstants.getStatusLabel(
tasksEntry.getStatus());
title = "x-" + statusLabel + "-the-task";
}
else {
title = "x-modified-the-task";
}
notificationEventJSONObject.put("title", title);
NotificationEvent notificationEvent =
NotificationEventFactoryUtil.createNotificationEvent(
System.currentTimeMillis(), TasksPortletKeys.TASKS,
notificationEventJSONObject);
notificationEvent.setDeliveryRequired(0);
UserNotificationEventLocalServiceUtil.addUserNotificationEvent(
receiverUserId, notificationEvent);
}
}
protected void validate(String title) throws PortalException {
if (Validator.isNull(title)) {
throw new TasksEntryTitleException();
}
}
}
|
{
"content_hash": "01bb909c727e952074e7b9bcc9951185",
"timestamp": "",
"source": "github",
"line_count": 490,
"max_line_length": 80,
"avg_line_length": 28.76530612244898,
"alnum_prop": 0.7779354380986165,
"repo_name": "gamerson/liferay-blade-cli",
"id": "a10efaf0bd8562fd7bd22d7216074398fa5e60c6",
"size": "14910",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cli/test-resources/projects/tasks-plugins-sdk/portlets/tasks-portlet/docroot/WEB-INF/src/com/liferay/tasks/service/impl/TasksEntryLocalServiceImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "851"
},
{
"name": "CSS",
"bytes": "14549"
},
{
"name": "Dockerfile",
"bytes": "286"
},
{
"name": "FreeMarker",
"bytes": "726"
},
{
"name": "Groovy",
"bytes": "8672"
},
{
"name": "Java",
"bytes": "1755046"
},
{
"name": "JavaScript",
"bytes": "5761"
},
{
"name": "SCSS",
"bytes": "23"
},
{
"name": "Shell",
"bytes": "11643"
}
]
}
|
package org.elasticsearch.action.fieldcaps;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import static org.elasticsearch.common.xcontent.ObjectParser.fromList;
public class FieldCapabilitiesRequest extends ActionRequest
implements IndicesRequest.Replaceable {
public static final ParseField FIELDS_FIELD = new ParseField("fields");
public static final String NAME = "field_caps_request";
private String[] indices = Strings.EMPTY_ARRAY;
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen();
private String[] fields = Strings.EMPTY_ARRAY;
private static ObjectParser<FieldCapabilitiesRequest, Void> PARSER =
new ObjectParser<>(NAME, FieldCapabilitiesRequest::new);
static {
PARSER.declareStringArray(fromList(String.class, FieldCapabilitiesRequest::fields),
FIELDS_FIELD);
}
public FieldCapabilitiesRequest() {}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
fields = in.readStringArray();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(fields);
}
public static FieldCapabilitiesRequest parseFields(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
/**
* The list of field names to retrieve
*/
public FieldCapabilitiesRequest fields(String... fields) {
if (fields == null || fields.length == 0) {
throw new IllegalArgumentException("specified fields can't be null or empty");
}
Set<String> fieldSet = new HashSet<>(Arrays.asList(fields));
this.fields = fieldSet.toArray(new String[0]);
return this;
}
public String[] fields() {
return fields;
}
/**
*
* The list of indices to lookup
*/
public FieldCapabilitiesRequest indices(String... indices) {
this.indices = indices;
return this;
}
public FieldCapabilitiesRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
@Override
public String[] indices() {
return indices;
}
@Override
public IndicesOptions indicesOptions() {
return indicesOptions;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (fields == null || fields.length == 0) {
validationException =
ValidateActions.addValidationError("no fields specified", validationException);
}
return validationException;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FieldCapabilitiesRequest that = (FieldCapabilitiesRequest) o;
if (!Arrays.equals(indices, that.indices)) return false;
if (!indicesOptions.equals(that.indicesOptions)) return false;
return Arrays.equals(fields, that.fields);
}
@Override
public int hashCode() {
int result = Arrays.hashCode(indices);
result = 31 * result + indicesOptions.hashCode();
result = 31 * result + Arrays.hashCode(fields);
return result;
}
}
|
{
"content_hash": "a398275ec3bdcdc9c5d9a4945b2cbce0",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 98,
"avg_line_length": 31.677165354330707,
"alnum_prop": 0.694009445687298,
"repo_name": "jprante/elasticsearch",
"id": "7eab9112162821555a9c68993a40e900902fae7e",
"size": "4811",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "11081"
},
{
"name": "Batchfile",
"bytes": "13877"
},
{
"name": "Emacs Lisp",
"bytes": "3341"
},
{
"name": "FreeMarker",
"bytes": "45"
},
{
"name": "Groovy",
"bytes": "312099"
},
{
"name": "HTML",
"bytes": "3399"
},
{
"name": "Java",
"bytes": "39576139"
},
{
"name": "Perl",
"bytes": "7271"
},
{
"name": "Python",
"bytes": "54851"
},
{
"name": "Shell",
"bytes": "108816"
}
]
}
|
#ifndef QISCIICODEC_P_H
#define QISCIICODEC_P_H
//
// W A R N I N G
// -------------
//
// This file is not part of the Qt API. It exists purely as an
// implementation detail. This header file may change from version to
// version without notice, or even be removed.
//
// We mean it.
//
#include "QtCore/qtextcodec.h"
QT_BEGIN_NAMESPACE
#ifndef QT_NO_CODECS
class QIsciiCodec : public QTextCodec {
public:
explicit QIsciiCodec(int i) : idx(i) {}
~QIsciiCodec();
QByteArray name() const;
int mibEnum() const;
QString convertToUnicode(const char *, int, ConverterState *) const;
QByteArray convertFromUnicode(const QChar *, int, ConverterState *) const;
private:
int idx;
};
#endif // QT_NO_CODECS
QT_END_NAMESPACE
#endif // QISCIICODEC_P_H
|
{
"content_hash": "0e86d0701fcd09412e717fa2505550ee",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 78,
"avg_line_length": 18.714285714285715,
"alnum_prop": 0.6730279898218829,
"repo_name": "mediathread/mdtprint",
"id": "8db8ed73ac403fcf8d9bf5a09c56e9c69f93c96b",
"size": "2753",
"binary": false,
"copies": "19",
"ref": "refs/heads/master",
"path": "app/bower_components/phantom/src/qt/src/corelib/codecs/qisciicodec_p.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2377"
},
{
"name": "HTML",
"bytes": "17662"
},
{
"name": "JavaScript",
"bytes": "97267"
},
{
"name": "PHP",
"bytes": "1090"
}
]
}
|
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="EntryPointsManager">
<entry_points version="2.0" />
</component>
<component name="MavenProjectsManager">
<option name="originalFiles">
<list>
<option value="$PROJECT_DIR$/pom.xml" />
</list>
</option>
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" assert-keyword="true" jdk-15="true" project-jdk-name="1.7" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
</project>
|
{
"content_hash": "deaeb2ebbcb366d402cce5e210dd8bd9",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 161,
"avg_line_length": 33.35294117647059,
"alnum_prop": 0.6472663139329806,
"repo_name": "damhonglinh/StockMonitor",
"id": "4c4c1de6ff627dacaafbcbaf0f6d0369d68188cb",
"size": "567",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": ".idea/misc.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "84943"
}
]
}
|
package slogo_commands;
/**
* @author Larissa
* purpose: implements XCOORDINATE command
* dependencies: abstract class MathBoolCommand and interface Command
*/
import java.util.List;
import slogo_logic.Turtle;
import slogo_logic.TurtleCommand;
public class XCOORDINATE extends TurtleCommand {
@Override
protected String execute(Turtle t, List<String> params) {
return String.valueOf(t.getX());
}
}
|
{
"content_hash": "c3b2afc560903aab04fe16f39db700b3",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 69,
"avg_line_length": 20.666666666666668,
"alnum_prop": 0.7258064516129032,
"repo_name": "C9Chrispy/SLogo",
"id": "e850664eb43762f305db90ed051e0dcd24b4adce",
"size": "434",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "slogo_commands/XCOORDINATE.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "147438"
}
]
}
|
/**
* This is the Karma configuration file. It contains information about this skeleton
* that provides the test runner with instructions on how to run the tests and
* generate the code coverage report.
*
* For more info, see: http://karma-runner.github.io/0.12/config/configuration-file.html
*/
module.exports = function(config) {
config.set({
/**
* These are the files required to run the tests.
*
* The `Function.prototype.bind` polyfill is required by PhantomJS
* because it uses an older version of JavaScript.
*/
files: [
'./test/polyfill.js',
'./test/main.js'
],
/**
* The actual tests are preprocessed by the karma-webpack plugin, so that
* their source can be properly transpiled.
*/
preprocessors: {
'./test/main.js': ['webpack']
},
/**
* We want to run the tests using the PhantomJS headless browser.
* This is especially useful for continuous integration.
*/
browsers: ['PhantomJS'],
/**
* Use Mocha as the test framework, Sinon for mocking, and
* Chai for assertions.
*/
frameworks: ['mocha', 'sinon-chai'],
/**
* After running the tests, return the results and generate a
* code coverage report.
*/
reporters: ['mocha', 'coverage'],
/**
* When generating a code coverage report, use `lcov` format and
* place the result in coverage/lcov.info
*
* This file will be sent to Coveralls by the `coveralls` npm script.
*/
coverageReporter: {
dir: 'coverage/',
reporters: [
{ type: 'lcovonly', subdir: '.', file: 'lcov.info' },
{ type: 'html', subdir: 'html' }
]
},
/**
* The configuration for the karma-webpack plugin.
*
* This is very similar to the main webpack.local.config.js, with the
* exception of specifying an istanbul-transformer post loader so
* that we can generate an accurate code coverage report.
*/
webpack: {
module: {
loaders: [
{ test: /\.jsx?$/, exclude: /node_modules/, loaders: ['webpack-espower', 'babel'] },
],
postLoaders: [{
test: /\.jsx?$/,
exclude: /(test|node_modules)\//,
loader: 'istanbul-instrumenter'
}]
},
resolve: {
extensions: ['', '.js', '.jsx']
}
},
/**
* Configuration option to turn off verbose logging of webpack compilation.
*/
webpackMiddleware: {
noInfo: true
},
/**
* Once the mocha test suite returns, we want to exit from the test runner as well.
*/
singleRun: true,
/**
* List of plugins
*/
plugins: [
'karma-*'
],
});
}
|
{
"content_hash": "773a13220beb6c50f5938f836e70d8b8",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 94,
"avg_line_length": 26.33653846153846,
"alnum_prop": 0.5797736400146039,
"repo_name": "boneyao/react-emoji",
"id": "5aa123069aa635d1d84718f219d667f65bb46cd4",
"size": "2739",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "karma.config.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "314"
},
{
"name": "JavaScript",
"bytes": "7224"
},
{
"name": "Shell",
"bytes": "660"
}
]
}
|
export{Angulartics2Piwik}from"./piwik";
|
{
"content_hash": "dd8942ce917a7fbb80186c431d32a092",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 39,
"avg_line_length": 39,
"alnum_prop": 0.8205128205128205,
"repo_name": "cdnjs/cdnjs",
"id": "609140f29bcaaa9b25671a6b5a59491459f9ec37",
"size": "39",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "ajax/libs/angulartics2/7.4.0/piwik/esm5/angulartics2-piwik.min.js",
"mode": "33188",
"license": "mit",
"language": []
}
|
"""
Helpers to interact with the ERFA library, in particular for leap seconds.
"""
from datetime import datetime, timedelta
from warnings import warn
import numpy as np
from astropy.utils.decorators import classproperty
from astropy.utils.exceptions import ErfaWarning
from .ufunc import get_leap_seconds, set_leap_seconds, dt_eraLEAPSECOND
class leap_seconds:
"""Leap second management.
This singleton class allows access to ERFA's leap second table,
using the methods 'get', 'set', and 'update'.
One can also check expiration with 'expires' and 'expired'.
Note that usage of the class is similar to a ``ScienceState`` class,
but it cannot be used as a context manager.
"""
_expires = None
"""Explicit expiration date inferred from leap-second table."""
_expiration_days = 180
"""Number of days beyond last leap second at which table expires."""
def __init__(self):
raise RuntimeError("This class is a singleton. Do not instantiate.")
@classmethod
def get(cls):
"""Get the current leap-second table used internally."""
return get_leap_seconds()
@classmethod
def validate(cls, table):
"""Validate a leap-second table.
Parameters
----------
table : array-like
Must have 'year', 'month', and 'tai_utc' entries. If a 'day'
entry is present, it will be checked that it is always 1.
If ``table`` has an 'expires' attribute, it will be interpreted
as an expiration date.
Returns
-------
array : `~numpy.ndarray`
Structures array with 'year', 'month', 'tai_utc'.
expires: `~datetime.datetime` or None
Possible expiration date inferred from the table. `None` if not
present or if not a `~datetime.datetime` or `~astropy.time.Time`
instance and not parsable as a 'dd month yyyy' string.
Raises
------
ValueError
If the leap seconds in the table are not on the 1st of January or
July, or if the sorted TAI-UTC do not increase in increments of 1.
"""
try:
day = table['day']
except Exception:
day = 1
expires = getattr(table, 'expires', None)
if expires is not None and not isinstance(expires, datetime):
# Maybe astropy Time? Cannot go via strftime, since that
# might need leap-seconds. If not, try standard string
# format from leap_seconds.dat and leap_seconds.list
isot = getattr(expires, 'isot', None)
try:
if isot is not None:
expires = datetime.strptime(isot.partition('T')[0],
'%Y-%m-%d')
else:
expires = datetime.strptime(expires, '%d %B %Y')
except Exception as exc:
warn(f"ignoring non-datetime expiration {expires}; "
f"parsing it raised {exc!r}", ErfaWarning)
expires = None
# Take care of astropy Table.
if hasattr(table, '__array__'):
table = table.__array__()[list(dt_eraLEAPSECOND.names)]
table = np.array(table, dtype=dt_eraLEAPSECOND, copy=False,
ndmin=1)
# Simple sanity checks.
if table.ndim > 1:
raise ValueError("can only pass in one-dimensional tables.")
if not np.all(((day == 1) &
(table['month'] == 1) | (table['month'] == 7)) |
(table['year'] < 1972)):
raise ValueError("leap seconds inferred that are not on "
"1st of January or 1st of July.")
if np.any((table['year'][:-1] > 1970) &
(np.diff(table['tai_utc']) != 1)):
raise ValueError("jump in TAI-UTC by something else than one.")
return table, expires
@classmethod
def set(cls, table=None):
"""Set the ERFA leap second table.
Note that it is generally safer to update the leap-second table than
to set it directly, since most tables do not have the pre-1970 changes
in TAI-UTC that are part of the built-in ERFA table.
Parameters
----------
table : array-like or `None`
Leap-second table that should at least hold columns of 'year',
'month', and 'tai_utc'. Only simple validation is done before it
is being used, so care need to be taken that entries are correct.
If `None`, reset the ERFA table to its built-in values.
Raises
------
ValueError
If the leap seconds in the table are not on the 1st of January or
July, or if the sorted TAI-UTC do not increase in increments of 1.
"""
if table is None:
expires = None
else:
table, expires = cls.validate(table)
set_leap_seconds(table)
cls._expires = expires
@classproperty
def expires(cls):
"""The expiration date of the current ERFA table.
This is either a date inferred from the last table used to update or
set the leap-second array, or a number of days beyond the last leap
second.
"""
if cls._expires is None:
last = cls.get()[-1]
return (datetime(last['year'], last['month'], 1) +
timedelta(cls._expiration_days))
else:
return cls._expires
@classproperty
def expired(cls):
"""Whether the leap second table is valid beyond the present."""
return cls.expires < datetime.now()
@classmethod
def update(cls, table):
"""Add any leap seconds not already present to the ERFA table.
This method matches leap seconds with those present in the ERFA table,
and extends the latter as necessary.
If the ERFA leap seconds file was corrupted, it will be reset.
If the table is corrupted, the ERFA file will be unchanged.
Parameters
----------
table : array-like or `~astropy.utils.iers.LeapSeconds`
Array or table with TAI-UTC from leap seconds. Should have
'year', 'month', and 'tai_utc' columns.
Returns
-------
n_update : int
Number of items updated.
Raises
------
ValueError
If the leap seconds in the table are not on the 1st of January or
July, or if the sorted TAI-UTC do not increase in increments of 1.
"""
table, expires = cls.validate(table)
# Get erfa table and check it is OK; if not, reset it.
try:
erfa_ls, _ = cls.validate(cls.get())
except Exception:
cls.set()
erfa_ls = cls.get()
# Create the combined array and use it (validating the combination).
ls = np.union1d(erfa_ls, table)
cls.set(ls)
# If the update table has an expiration beyond that inferred from
# the new leap second second array, use it (but, now that the new
# array is set, do not allow exceptions due to misformed expires).
try:
if expires is not None and expires > cls.expires:
cls._expires = expires
except Exception as exc:
warn("table 'expires' attribute ignored as comparing it "
"with a datetime raised an error:\n" + str(exc),
ErfaWarning)
return len(ls) - len(erfa_ls)
|
{
"content_hash": "4fa16c594c373720e1eb6a2110668156",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 78,
"avg_line_length": 35.46046511627907,
"alnum_prop": 0.5750262329485835,
"repo_name": "MSeifert04/astropy",
"id": "0c1333b7b8e8d80604654af9f314185d677b6b50",
"size": "7688",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "astropy/_erfa/helpers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "444651"
},
{
"name": "C++",
"bytes": "1057"
},
{
"name": "HTML",
"bytes": "1172"
},
{
"name": "Objective-C",
"bytes": "615"
},
{
"name": "Python",
"bytes": "9891588"
},
{
"name": "TeX",
"bytes": "853"
}
]
}
|
This is the public Twilio REST API.
## Overview
This API client was generated by the [OpenAPI Generator](https://openapi-generator.tech) project from the OpenAPI specs located at [twilio/twilio-oai](https://github.com/twilio/twilio-oai/tree/main/spec). By using the [OpenAPI-spec](https://www.openapis.org/) from a remote server, you can easily generate an API client.
- API version: 1.37.3
- Package version: 1.0.0
- Build package: com.twilio.oai.TwilioGoGenerator
For more information, please visit [https://support.twilio.com](https://support.twilio.com)
## Installation
Install the following dependencies:
```shell
go get github.com/stretchr/testify/assert
go get golang.org/x/net/context
```
Put the package under your project folder and add the following in import:
```golang
import "./openapi"
```
## Documentation for API Endpoints
All URIs are relative to *https://monitor.twilio.com*
Class | Method | HTTP request | Description
------------ | ------------- | ------------- | -------------
*AlertsApi* | [**FetchAlert**](docs/AlertsApi.md#fetchalert) | **Get** /v1/Alerts/{Sid} |
*AlertsApi* | [**ListAlert**](docs/AlertsApi.md#listalert) | **Get** /v1/Alerts |
*EventsApi* | [**FetchEvent**](docs/EventsApi.md#fetchevent) | **Get** /v1/Events/{Sid} |
*EventsApi* | [**ListEvent**](docs/EventsApi.md#listevent) | **Get** /v1/Events |
## Documentation For Models
- [ListAlertResponse](docs/ListAlertResponse.md)
- [MonitorV1AlertInstance](docs/MonitorV1AlertInstance.md)
- [MonitorV1Event](docs/MonitorV1Event.md)
- [MonitorV1Alert](docs/MonitorV1Alert.md)
- [ListEventResponse](docs/ListEventResponse.md)
- [ListAlertResponseMeta](docs/ListAlertResponseMeta.md)
## Documentation For Authorization
## accountSid_authToken
- **Type**: HTTP basic authentication
Example
```golang
auth := context.WithValue(context.Background(), sw.ContextBasicAuth, sw.BasicAuth{
UserName: "username",
Password: "password",
})
r, err := client.Service.Operation(auth, args)
```
|
{
"content_hash": "504b7122ab7d4fd3e84923b1b4e56fbc",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 321,
"avg_line_length": 30.892307692307693,
"alnum_prop": 0.7116533864541833,
"repo_name": "twilio/twilio-go",
"id": "3ba1f82933782cc6f6bbafc7e6b79e70e0590ee2",
"size": "2037",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "rest/monitor/v1/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "200"
},
{
"name": "Go",
"bytes": "4840470"
},
{
"name": "Makefile",
"bytes": "1414"
}
]
}
|
newparam(:refreshonly) do
desc <<-'EOT'
The command should only be run as a
refresh mechanism for when a dependent object is changed. It only
makes sense to use this option when this command depends on some
other object; it is useful for triggering an action:
Note that only `subscribe` and `notify` can trigger actions, not `require`,
so it only makes sense to use `refreshonly` with `subscribe` or `notify`.
EOT
newvalues(:true, :false)
end
|
{
"content_hash": "64ff637d320c588a20492660d3a631f7",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 79,
"avg_line_length": 34.07142857142857,
"alnum_prop": 0.7148846960167715,
"repo_name": "Dlozitskiy/orawls",
"id": "b4885ff60041ab09cbe6c5a47771d05607bf650d",
"size": "477",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "lib/puppet/type/wls_exec/refreshonly.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "544507"
},
{
"name": "Puppet",
"bytes": "175647"
},
{
"name": "Python",
"bytes": "586"
},
{
"name": "Ruby",
"bytes": "389001"
},
{
"name": "Shell",
"bytes": "1636"
}
]
}
|
<?php
namespace Symfony\Bundle\FrameworkBundle\Command;
use Symfony\Bundle\FrameworkBundle\Secrets\AbstractVault;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\ConsoleOutputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\Component\Console\Style\SymfonyStyle;
/**
* @author Tobias Schultze <http://tobion.de>
* @author Jérémy Derussé <jeremy@derusse.com>
* @author Nicolas Grekas <p@tchwork.com>
*
* @internal
*/
final class SecretsSetCommand extends Command
{
protected static $defaultName = 'secrets:set';
protected static $defaultDescription = 'Set a secret in the vault';
private $vault;
private $localVault;
public function __construct(AbstractVault $vault, AbstractVault $localVault = null)
{
$this->vault = $vault;
$this->localVault = $localVault;
parent::__construct();
}
protected function configure()
{
$this
->setDescription(self::$defaultDescription)
->addArgument('name', InputArgument::REQUIRED, 'The name of the secret')
->addArgument('file', InputArgument::OPTIONAL, 'A file where to read the secret from or "-" for reading from STDIN')
->addOption('local', 'l', InputOption::VALUE_NONE, 'Update the local vault.')
->addOption('random', 'r', InputOption::VALUE_OPTIONAL, 'Generate a random value.', false)
->setHelp(<<<'EOF'
The <info>%command.name%</info> command stores a secret in the vault.
<info>%command.full_name% <name></info>
To reference secrets in services.yaml or any other config
files, use <info>"%env(<name>)%"</info>.
By default, the secret value should be entered interactively.
Alternatively, provide a file where to read the secret from:
<info>php %command.full_name% <name> filename</info>
Use "-" as a file name to read from STDIN:
<info>cat filename | php %command.full_name% <name> -</info>
Use <info>--local</info> to override secrets for local needs.
EOF
)
;
}
protected function execute(InputInterface $input, OutputInterface $output): int
{
$errOutput = $output instanceof ConsoleOutputInterface ? $output->getErrorOutput() : $output;
$io = new SymfonyStyle($input, $errOutput);
$name = $input->getArgument('name');
$vault = $input->getOption('local') ? $this->localVault : $this->vault;
if (null === $vault) {
$io->error('The local vault is disabled.');
return 1;
}
if ($this->localVault === $vault && !\array_key_exists($name, $this->vault->list())) {
$io->error(sprintf('Secret "%s" does not exist in the vault, you cannot override it locally.', $name));
return 1;
}
if (0 < $random = $input->getOption('random') ?? 16) {
$value = strtr(substr(base64_encode(random_bytes($random)), 0, $random), '+/', '-_');
} elseif (!$file = $input->getArgument('file')) {
$value = $io->askHidden('Please type the secret value');
if (null === $value) {
$io->warning('No value provided: using empty string');
$value = '';
}
} elseif ('-' === $file) {
$value = file_get_contents('php://stdin');
} elseif (is_file($file) && is_readable($file)) {
$value = file_get_contents($file);
} elseif (!is_file($file)) {
throw new \InvalidArgumentException(sprintf('File not found: "%s".', $file));
} elseif (!is_readable($file)) {
throw new \InvalidArgumentException(sprintf('File is not readable: "%s".', $file));
}
if ($vault->generateKeys()) {
$io->success($vault->getLastMessage());
if ($this->vault === $vault) {
$io->caution('DO NOT COMMIT THE DECRYPTION KEY FOR THE PROD ENVIRONMENT⚠️');
}
}
$vault->seal($name, $value);
$io->success($vault->getLastMessage() ?? 'Secret was successfully stored in the vault.');
if (0 < $random) {
$errOutput->write(' // The generated random value is: <comment>');
$output->write($value);
$errOutput->writeln('</comment>');
$io->newLine();
}
if ($this->vault === $vault && null !== $this->localVault->reveal($name)) {
$io->comment('Note that this secret is overridden in the local vault.');
}
return 0;
}
}
|
{
"content_hash": "7617d28689a5b9ea0c973baaf8701a9a",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 128,
"avg_line_length": 35.233082706766915,
"alnum_prop": 0.6032863849765259,
"repo_name": "Nyholm/symfony",
"id": "20b898f073cbc3e1b23b2ab6646fe01b6f09bd67",
"size": "4922",
"binary": false,
"copies": "5",
"ref": "refs/heads/5.x",
"path": "src/Symfony/Bundle/FrameworkBundle/Command/SecretsSetCommand.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "49627"
},
{
"name": "HTML",
"bytes": "16735"
},
{
"name": "Hack",
"bytes": "26"
},
{
"name": "JavaScript",
"bytes": "27689"
},
{
"name": "Lua",
"bytes": "1846"
},
{
"name": "PHP",
"bytes": "22653869"
},
{
"name": "Shell",
"bytes": "3153"
},
{
"name": "Twig",
"bytes": "361226"
}
]
}
|
title: asn0
type: products
image: /img/Screen Shot 2017-05-09 at 11.56.54 AM.png
heading: n0
description: lksadjf lkasdjf lksajdf lksdaj flksadj flksa fdj
main:
heading: Foo Bar BAz
description: |-
***This is i a thing***kjh hjk kj
# Blah Blah
## Blah
### Baah
image1:
alt: kkkk
---
|
{
"content_hash": "d12cb1f33daa60db1e658e363e0998b7",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 61,
"avg_line_length": 22.2,
"alnum_prop": 0.6636636636636637,
"repo_name": "pblack/kaldi-hugo-cms-template",
"id": "a1d507204a2506988a4c830c275dd05830baff2b",
"size": "337",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "site/content/pages2/asn0.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "94394"
},
{
"name": "HTML",
"bytes": "18889"
},
{
"name": "JavaScript",
"bytes": "10014"
}
]
}
|
"""
Tests for netapi, i.e. the interface native modules will be developed against
"""
import pytest
from micropsi_core import runtime as micropsi
from micropsi_core.nodenet.node import Nodetype
from micropsi_core.tests import test_node_logic
def prepare(fixed_nodenet):
nodenet = micropsi.get_nodenet(fixed_nodenet)
netapi = nodenet.netapi
source = netapi.create_node("Register", None, "Source")
netapi.link(source, "gen", source, "gen")
source.activation = 1
nodenet.step()
return nodenet, netapi, source
def add_dummyworld(fixed_nodenet):
nodenet = micropsi.get_nodenet(fixed_nodenet)
if nodenet.world:
nodenet.world.unregister_nodenet(nodenet.uid)
worlduid = micropsi.new_world("DummyWorld", "DummyWorld", "DummyOwner")[1]
nodenet.world = micropsi.worlds[worlduid]
nodenet.worldadapter = "DummyWorldAdapter"
nodenet.world.register_nodenet("DummyWorldAdapter", nodenet)
return nodenet.world
def test_node_netapi_create_register_node(fixed_nodenet):
# test register node creation
net, netapi, source = prepare(fixed_nodenet)
node = netapi.create_node("Register", None, "TestName")
# basic logic tests
assert node is not None
root_ns = netapi.get_nodespace(None)
assert node.parent_nodespace == root_ns.uid
assert node.type == "Register"
assert node.uid is not None
assert len(node.get_gate('gen').get_links()) == 0
assert len(node.get_gate('gen').activations) == 1
# frontend/persistency-oriented data dictionary test
assert node.data['uid'] == node.uid
assert node.data['name'] == node.name
assert node.data['type'] == node.type
node = netapi.create_node("Register", None)
#TODO: teh weirdness, server-internally, we return uids as names, clients don't see this, confusion ensues
#assert node.data['name'] == node.name
def test_node_netapi_create_pipe_node(fixed_nodenet):
# test concept node generation
net, netapi, source = prepare(fixed_nodenet)
node = netapi.create_node("Pipe", None, "TestName")
# basic logic tests
assert node is not None
assert node.parent_nodespace == netapi.get_nodespace(None).uid
assert node.type == "Pipe"
assert node.uid is not None
assert len(node.get_gate('gen').get_links()) == 0
assert len(node.get_gate('gen').activations) == 1
assert len(node.get_gate('sub').get_links()) == 0
assert len(node.get_gate('sub').activations) == 1
assert len(node.get_gate('sur').get_links()) == 0
assert len(node.get_gate('sur').activations) == 1
assert len(node.get_gate('por').get_links()) == 0
assert len(node.get_gate('por').activations) == 1
assert len(node.get_gate('ret').get_links()) == 0
assert len(node.get_gate('ret').activations) == 1
assert len(node.get_gate('cat').get_links()) == 0
assert len(node.get_gate('cat').activations) == 1
assert len(node.get_gate('exp').get_links()) == 0
assert len(node.get_gate('exp').activations) == 1
# frontend/persistency-oriented data dictionary test
assert node.data['uid'] == node.uid
for key in node.get_gate_types():
assert key not in node.data['gate_parameters']
for parameter, value in node.nodetype.gate_defaults[key].items():
assert node.get_gate(key).get_parameter(parameter) == value
assert node.data['name'] == node.name
assert node.data['type'] == node.type
node = netapi.create_node("Pipe", None)
#TODO: teh weirdness, server-internally, we return uids as names, clients don't see this, confusion ensues
#assert node.data['name'] == node.name
@pytest.mark.engine("dict_engine")
def test_node_netapi_create_concept_node(fixed_nodenet):
# test concept node generation
net, netapi, source = prepare(fixed_nodenet)
node = netapi.create_node("Concept", None, "TestName")
# basic logic tests
assert node is not None
assert node.parent_nodespace == netapi.get_nodespace(None).uid
assert node.type == "Concept"
assert node.uid is not None
assert len(node.get_gate('gen').get_links()) == 0
assert len(node.get_gate('gen').activations) == 1
assert len(node.get_gate('sub').get_links()) == 0
assert len(node.get_gate('sub').activations) == 1
assert len(node.get_gate('sur').get_links()) == 0
assert len(node.get_gate('sur').activations) == 1
assert len(node.get_gate('por').get_links()) == 0
assert len(node.get_gate('por').activations) == 1
assert len(node.get_gate('ret').get_links()) == 0
assert len(node.get_gate('ret').activations) == 1
assert len(node.get_gate('cat').get_links()) == 0
assert len(node.get_gate('cat').activations) == 1
assert len(node.get_gate('exp').get_links()) == 0
assert len(node.get_gate('exp').activations) == 1
assert len(node.get_gate('sym').get_links()) == 0
assert len(node.get_gate('sym').activations) == 1
assert len(node.get_gate('ref').get_links()) == 0
assert len(node.get_gate('ref').activations) == 1
# frontend/persistency-oriented data dictionary test
assert node.data['uid'] == node.uid
assert node.data['name'] == node.name
assert node.data['type'] == node.type
node = netapi.create_node("Pipe", None)
#TODO: teh weirdness, server-internally, we return uids as names, clients don't see this, confusion ensues
#assert node.data['name'] == node.name
def test_node_netapi_create_node_in_nodespace(fixed_nodenet):
# test register node in nodespace creation
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "NestedNodespace")
node = netapi.create_node("Register", nodespace.uid, "TestName")
assert node.parent_nodespace == nodespace.uid
assert node.data['parent_nodespace'] == nodespace.uid
def test_node_netapi_get_nodespace_one(fixed_nodenet):
# test single nodespace querying
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "TestName")
queried_nodespace = netapi.get_nodespace(nodespace.uid)
assert queried_nodespace.uid == nodespace.uid
assert queried_nodespace.name == nodespace.name
def test_node_netapi_get_nodespace_multi(fixed_nodenet):
# test nodespace listing
net, netapi, source = prepare(fixed_nodenet)
nodespace1 = netapi.create_nodespace(None, "TestName1")
nodespace2 = netapi.create_nodespace(None, "TestName2")
nodespace3 = netapi.create_nodespace(nodespace2.uid, "TestName3")
root_ns = netapi.get_nodespace(None)
queried_nodespaces = netapi.get_nodespaces(root_ns.uid)
assert len(queried_nodespaces) == 2
assert nodespace1.uid in [x.uid for x in queried_nodespaces]
assert nodespace2.uid in [x.uid for x in queried_nodespaces]
assert nodespace3.uid not in [x.uid for x in queried_nodespaces]
def test_node_netapi_get_node(fixed_nodenet):
# test register node creation
net, netapi, source = prepare(fixed_nodenet)
node = netapi.create_node("Register", None, "TestName")
queried_node = netapi.get_node(node.uid)
assert queried_node.uid == node.uid
assert queried_node.name == node.name
assert queried_node.data == node.data
assert queried_node.type == node.type
def test_node_netapi_get_nodes(fixed_nodenet):
# test get_nodes plain
net, netapi, source = prepare(fixed_nodenet)
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", None, "TestName2")
nodes = netapi.get_nodes(netapi.get_nodespace(None).uid)
assert node1.uid in [n.uid for n in nodes]
assert node2.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_by_name(fixed_nodenet):
# test get_nodes by name
net, netapi, source = prepare(fixed_nodenet)
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", None, "TestName2")
nodes = netapi.get_nodes(netapi.get_nodespace(None).uid, node_name_prefix="TestName")
assert len(nodes) == 2
assert node1.uid in [n.uid for n in nodes]
assert node2.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_by_nodespace(fixed_nodenet):
# test get_nodes by name and nodespace
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "NestedNodespace")
node1 = netapi.create_node("Register", nodespace.uid, "TestName1")
node2 = netapi.create_node("Register", nodespace.uid, "TestName2")
nodes = netapi.get_nodes(nodespace.uid)
assert len(nodes) == 2
assert node1.uid in [n.uid for n in nodes]
assert node2.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_by_nodetype(fixed_nodenet):
# test get_nodes by name and nodespace
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "NestedNodespace")
node1 = netapi.create_node("Pipe", nodespace.uid, "TestName1")
node2 = netapi.create_node("Register", nodespace.uid, "TestName2")
nodes = netapi.get_nodes(nodetype="Register")
assert len(nodes) == 2
uids = [n.uid for n in nodes]
assert node1.uid not in uids
assert node2.uid in uids
assert source.uid in uids
def test_node_netapi_get_nodes_by_name_and_nodespace(fixed_nodenet):
# test get_nodes by name and nodespace
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "NestedNodespace")
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", nodespace.uid, "TestName2")
nodes = netapi.get_nodes(nodespace.uid, "TestName")
assert len(nodes) == 1
assert node2.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_in_gate_field(fixed_nodenet):
# test get_nodes_in_gate_field
net, netapi, source = prepare(fixed_nodenet)
node1 = netapi.create_node("Pipe", None, "TestName1")
node2 = netapi.create_node("Pipe", None, "TestName2")
node3 = netapi.create_node("Pipe", None, "TestName3")
node4 = netapi.create_node("Pipe", None, "TestName4")
netapi.link_with_reciprocal(node1, node2, "subsur")
netapi.link_with_reciprocal(node1, node3, "subsur")
netapi.link_with_reciprocal(node1, node4, "subsur")
netapi.link_with_reciprocal(node2, node3, "porret")
nodes = netapi.get_nodes_in_gate_field(node1, "sub")
assert len(nodes) == 3
assert node2.uid in [n.uid for n in nodes]
assert node3.uid in [n.uid for n in nodes]
assert node4.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_in_gate_field_all_links(fixed_nodenet):
# test get_nodes_in_gate_field without specifying a gate parameter
net, netapi, source = prepare(fixed_nodenet)
node1 = netapi.create_node("Pipe", None, "TestName1")
node2 = netapi.create_node("Pipe", None, "TestName2")
node3 = netapi.create_node("Pipe", None, "TestName3")
node4 = netapi.create_node("Pipe", None, "TestName4")
netapi.link_with_reciprocal(node1, node2, "subsur")
netapi.link_with_reciprocal(node1, node3, "subsur")
netapi.link_with_reciprocal(node1, node4, "subsur")
netapi.link_with_reciprocal(node2, node3, "porret")
nodes = netapi.get_nodes_in_gate_field(node2)
assert len(nodes) == 2
assert node1.uid in [n.uid for n in nodes]
assert node3.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_in_gate_field_with_limitations(fixed_nodenet):
# test get_nodes_in_gate_field with limitations: no por links
net, netapi, source = prepare(fixed_nodenet)
node1 = netapi.create_node("Pipe", None, "TestName1")
node2 = netapi.create_node("Pipe", None, "TestName2")
node3 = netapi.create_node("Pipe", None, "TestName3")
node4 = netapi.create_node("Pipe", None, "TestName4")
netapi.link_with_reciprocal(node1, node2, "subsur")
netapi.link_with_reciprocal(node1, node3, "subsur")
netapi.link_with_reciprocal(node1, node4, "subsur")
netapi.link_with_reciprocal(node2, node3, "porret")
nodes = netapi.get_nodes_in_gate_field(node1, "sub", ["por"])
assert len(nodes) == 2
assert node3.uid in [n.uid for n in nodes]
assert node4.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_in_gate_field_with_limitations_and_nodespace(fixed_nodenet):
# test get_nodes_in_gate_field with limitations: no por links
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "NestedNodespace")
node1 = netapi.create_node("Pipe", None, "TestName1")
node2 = netapi.create_node("Pipe", None, "TestName2")
node3 = netapi.create_node("Pipe", None, "TestName3")
node4 = netapi.create_node("Pipe", nodespace.uid, "TestName4")
netapi.link_with_reciprocal(node1, node2, "subsur")
netapi.link_with_reciprocal(node1, node3, "subsur")
netapi.link_with_reciprocal(node1, node4, "subsur")
netapi.link_with_reciprocal(node2, node3, "porret")
nodes = netapi.get_nodes_in_gate_field(node1, "sub", ["por"], netapi.get_nodespace(None).uid)
assert len(nodes) == 1
assert node3.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_in_slot_field(fixed_nodenet):
# test get_nodes_in_slot_field
net, netapi, source = prepare(fixed_nodenet)
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", None, "TestName2")
node3 = netapi.create_node("Register", None, "TestName3")
node4 = netapi.create_node("Register", None, "TestName4")
netapi.link(node2, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node4, "gen", node1, "gen")
nodes = netapi.get_nodes_in_slot_field(node1, "gen")
assert len(nodes) == 3
assert node2.uid in [n.uid for n in nodes]
assert node3.uid in [n.uid for n in nodes]
assert node4.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_in_slot_field_all_links(fixed_nodenet):
# test get_nodes_in_slot_field without a gate parameter
net, netapi, source = prepare(fixed_nodenet)
net, netapi, source = prepare(fixed_nodenet)
node1 = netapi.create_node("Pipe", None, "TestName1")
node2 = netapi.create_node("Pipe", None, "TestName2")
node3 = netapi.create_node("Pipe", None, "TestName3")
node4 = netapi.create_node("Pipe", None, "TestName4")
netapi.link_with_reciprocal(node1, node2, "subsur")
netapi.link_with_reciprocal(node1, node3, "subsur")
netapi.link_with_reciprocal(node1, node4, "subsur")
netapi.link_with_reciprocal(node2, node3, "porret")
nodes = netapi.get_nodes_in_slot_field(node1)
assert len(nodes) == 3
assert node2.uid in [n.uid for n in nodes]
assert node3.uid in [n.uid for n in nodes]
assert node4.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_with_nodespace_limitation(fixed_nodenet):
# test get_nodes_feed with nodespace limitation
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "NestedNodespace")
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", None, "TestName2")
node3 = netapi.create_node("Register", None, "TestName3")
node4 = netapi.create_node("Register", nodespace.uid, "TestName4")
netapi.link(node2, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node4, "gen", node1, "gen")
nodes = netapi.get_nodes_in_slot_field(node1, "gen", None, netapi.get_nodespace(None).uid)
assert len(nodes) == 2
assert node2.uid in [n.uid for n in nodes]
assert node3.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_in_slot_field_with_limitations_and_nodespace(fixed_nodenet):
# test get_nodes_in_gate_field with limitations: no por links
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "NestedNodespace")
node1 = netapi.create_node("Pipe", None, "TestName1")
node2 = netapi.create_node("Pipe", None, "TestName2")
node3 = netapi.create_node("Pipe", None, "TestName3")
node4 = netapi.create_node("Pipe", nodespace.uid, "TestName4")
netapi.link_with_reciprocal(node1, node2, "subsur")
netapi.link_with_reciprocal(node1, node3, "subsur")
netapi.link_with_reciprocal(node1, node4, "subsur")
netapi.link_with_reciprocal(node2, node3, "porret")
nodes = netapi.get_nodes_in_slot_field(node1, "sur", ["por"], netapi.get_nodespace(None).uid)
assert len(nodes) == 1
assert node3.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_active(fixed_nodenet):
# test get_nodes_active
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "NestedNodespace")
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", None, "TestName2")
node3 = netapi.create_node("Register", None, "TestName3")
node4 = netapi.create_node("Register", nodespace.uid, "TestName4")
netapi.link(node2, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node4, "gen", node1, "gen")
netapi.link(source, "gen", node2, "gen", 0.5)
netapi.link(source, "gen", node4, "gen", 0.5)
net.step()
net.step()
nodes = netapi.get_nodes_active(netapi.get_nodespace(None).uid, "Register", 0.7, "gen")
assert len(nodes) == 2
assert node1.uid in [n.uid for n in nodes]
assert source.uid in [n.uid for n in nodes]
nodes = netapi.get_nodes_active(netapi.get_nodespace(None).uid, "Register")
assert len(nodes) == 2
assert node1.uid in [n.uid for n in nodes]
assert source.uid in [n.uid for n in nodes]
def test_node_netapi_get_nodes_active_with_nodespace_limitation(fixed_nodenet):
# test get_nodes_active with nodespace filtering
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "NestedNodespace")
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", None, "TestName2")
node3 = netapi.create_node("Register", None, "TestName3")
node4 = netapi.create_node("Register", nodespace.uid, "TestName4")
netapi.link(node2, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node4, "gen", node1, "gen")
netapi.link(source, "gen", node2, "gen", 0.5)
netapi.link(source, "gen", node4, "gen", 0.5)
net.step()
net.step()
nodes = netapi.get_nodes_active(nodespace.uid, "Register", 0.4)
assert len(nodes) == 1
assert node4.uid in [n.uid for n in nodes]
def test_node_netapi_delete_node(fixed_nodenet):
# test simple delete node case
net, netapi, source = prepare(fixed_nodenet)
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", None, "TestName2")
node3 = netapi.create_node("Register", None, "TestName3")
netapi.link(node2, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
olduid = node1.uid
netapi.delete_node(node1)
with pytest.raises(KeyError):
netapi.get_node(olduid)
assert len(node2.get_gate("gen").get_links()) == 0
def test_node_netapi_delete_nodespace(fixed_nodenet):
# test delete node case deleting a nodespace
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, "NestedNodespace")
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", None, "TestName2")
node3 = netapi.create_node("Register", None, "TestName3")
node4 = netapi.create_node("Register", nodespace.uid, "TestName4")
netapi.link(node2, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node3, "gen", node1, "gen")
netapi.link(node4, "gen", node1, "gen")
node4uid = node4.uid
netapi.delete_nodespace(nodespace)
with pytest.raises(KeyError):
netapi.get_node(node4uid)
def test_node_netapi_link(fixed_nodenet):
# test linking nodes
net, netapi, source = prepare(fixed_nodenet)
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", None, "TestName2")
netapi.link(node2, "gen", node1, "gen")
assert len(node2.get_gate("gen").get_links()) == 1
for link in node2.get_gate("gen").get_links():
# basic internal logic
assert link.source_node.uid == node2.uid
assert link.target_node.uid == node1.uid
assert link.weight == 1
found = False
for otherside_link in node1.get_slot("gen").get_links():
if otherside_link.uid == link.uid:
found = True
assert found
# frontend/persistency-facing
assert link.data['weight'] == link.weight
assert link.data['uid'] == link.uid
assert link.data['source_node_uid'] == node2.uid
assert link.data['target_node_uid'] == node1.uid
def test_node_netapi_link_change_weight(fixed_nodenet):
# test linking nodes, the changing weights
net, netapi, source = prepare(fixed_nodenet)
node1 = netapi.create_node("Register", None, "TestName1")
node2 = netapi.create_node("Register", None, "TestName2")
netapi.link(node2, "gen", node1, "gen")
net.step()
netapi.link(node2, "gen", node1, "gen", 0.8)
assert len(node2.get_gate("gen").get_links()) == 1
for link in node2.get_gate("gen").get_links():
# basic internal logic
assert link.source_node.uid == node2.uid
assert link.target_node.uid == node1.uid
assert round(link.weight, 5) == 0.8
found = False
for otherside_link in node1.get_slot("gen").get_links():
if otherside_link.uid == link.uid:
found = True
assert found
# frontend/persistency-facing
assert link.data['weight'] == link.weight
assert link.data['uid'] == link.uid
assert link.data['source_node_uid'] == node2.uid
assert link.data['target_node_uid'] == node1.uid
def test_node_netapi_link_with_reciprocal(fixed_nodenet):
# test linking pipe and concept nodes with reciprocal links
net, netapi, source = prepare(fixed_nodenet)
n_head = netapi.create_node("Pipe", None, "Head")
n_a = netapi.create_node("Pipe", None, "A")
n_b = netapi.create_node("Pipe", None, "B")
n_c = netapi.create_node("Pipe", None, "C")
netapi.link_with_reciprocal(n_head, n_a, "subsur")
netapi.link_with_reciprocal(n_head, n_b, "subsur")
netapi.link_with_reciprocal(n_head, n_c, "subsur")
netapi.link_with_reciprocal(n_a, n_b, "porret", 0.5)
netapi.link_with_reciprocal(n_b, n_c, "porret", 0.5)
assert len(n_head.get_gate("sub").get_links()) == 3
assert len(n_head.get_slot("sur").get_links()) == 3
assert len(n_a.get_gate("sur").get_links()) == 1
assert len(n_a.get_slot("sub").get_links()) == 1
assert len(n_b.get_gate("sur").get_links()) == 1
assert len(n_b.get_slot("sub").get_links()) == 1
assert len(n_c.get_gate("sur").get_links()) == 1
assert len(n_c.get_slot("sub").get_links()) == 1
assert len(n_a.get_gate("por").get_links()) == 1
assert len(n_a.get_slot("ret").get_links()) == 1
assert len(n_a.get_slot("por").get_links()) == 0
assert len(n_b.get_gate("por").get_links()) == 1
assert len(n_b.get_slot("ret").get_links()) == 1
assert len(n_b.get_gate("ret").get_links()) == 1
assert len(n_b.get_slot("por").get_links()) == 1
assert len(n_c.get_gate("por").get_links()) == 0
assert len(n_c.get_slot("ret").get_links()) == 0
for link in n_b.get_gate("por").get_links():
assert link.weight == 0.5
@pytest.mark.engine("dict_engine")
def test_node_netapi_link_with_reciprocal_and_concepts(fixed_nodenet):
# test linking pipe and concept nodes with reciprocal links
net, netapi, source = prepare(fixed_nodenet)
n_head = netapi.create_node("Pipe", None, "Head")
n_d = netapi.create_node("Concept", None, "D")
n_e = netapi.create_node("Concept", None, "E")
netapi.link_with_reciprocal(n_head, n_d, "catexp")
netapi.link_with_reciprocal(n_d, n_e, "symref")
assert len(n_d.get_gate("sym").get_links()) == 1
assert len(n_d.get_slot("gen").get_links()) == 2
assert len(n_head.get_gate("cat").get_links()) == 1
assert len(n_head.get_slot("exp").get_links()) == 1
def test_node_netapi_unlink(fixed_nodenet):
# test completely unlinking a node
net, netapi, source = prepare(fixed_nodenet)
n_head = netapi.create_node("Pipe", None, "Head")
n_a = netapi.create_node("Pipe", None, "A")
n_b = netapi.create_node("Pipe", None, "B")
n_c = netapi.create_node("Pipe", None, "C")
n_d = netapi.create_node("Pipe", None, "D")
nodes = [n_a, n_b, n_c, n_d]
for source in nodes:
for target in nodes:
netapi.link_with_reciprocal(source, target, "porret")
netapi.unlink(n_b)
assert len(n_a.get_slot('por').get_links()) == 3
assert len(n_b.get_slot('por').get_links()) == 3
assert len(n_c.get_slot('por').get_links()) == 3
assert len(n_d.get_slot('por').get_links()) == 3
def test_node_netapi_unlink_specific_link(fixed_nodenet):
# test removing a specific link
net, netapi, source = prepare(fixed_nodenet)
n_head = netapi.create_node("Pipe", None, "Head")
n_a = netapi.create_node("Pipe", None, "A")
n_b = netapi.create_node("Pipe", None, "B")
n_c = netapi.create_node("Pipe", None, "C")
n_d = netapi.create_node("Pipe", None, "D")
nodes = [n_a, n_b, n_c, n_d]
for source in nodes:
for target in nodes:
netapi.link_with_reciprocal(source, target, "porret")
netapi.unlink(n_b, "por", n_c, "por")
assert len(n_a.get_slot('por').get_links()) == 4
assert len(n_b.get_slot('por').get_links()) == 4
assert len(n_c.get_slot('por').get_links()) == 3
assert len(n_d.get_slot('por').get_links()) == 4
def test_node_netapi_unlink_gate(fixed_nodenet):
# test unlinking a gate
net, netapi, source = prepare(fixed_nodenet)
n_head = netapi.create_node("Pipe", None, "Head")
n_a = netapi.create_node("Pipe", None, "A")
n_b = netapi.create_node("Pipe", None, "B")
n_c = netapi.create_node("Pipe", None, "C")
n_d = netapi.create_node("Pipe", None, "D")
nodes = [n_a, n_b, n_c, n_d]
for source in nodes:
for target in nodes:
netapi.link_with_reciprocal(source, target, "porret")
netapi.unlink(n_b, "por")
assert len(n_a.get_slot('por').get_links()) == 3
assert len(n_b.get_slot('por').get_links()) == 3
assert len(n_c.get_slot('por').get_links()) == 3
assert len(n_d.get_slot('por').get_links()) == 3
def test_node_netapi_unlink_direction(fixed_nodenet):
# test unlinking a gate
net, netapi, source = prepare(fixed_nodenet)
n_head = netapi.create_node("Pipe", None, "Head")
n_a = netapi.create_node("Pipe", None, "A")
n_b = netapi.create_node("Pipe", None, "B")
n_c = netapi.create_node("Pipe", None, "C")
netapi.link_with_reciprocal(n_head, n_a, "subsur")
netapi.link_with_reciprocal(n_head, n_b, "subsur")
netapi.link_with_reciprocal(n_head, n_c, "subsur")
nodes = [n_a, n_b, n_c]
for source in nodes:
for target in nodes:
netapi.link_with_reciprocal(source, target, "porret")
netapi.unlink_direction(n_b, "por")
assert len(n_head.get_gate('sub').get_links()) == 3
assert len(n_head.get_slot('sur').get_links()) == 3
assert len(n_a.get_slot('por').get_links()) == 2
assert len(n_b.get_slot('por').get_links()) == 0
assert len(n_c.get_slot('por').get_links()) == 2
netapi.unlink_direction(n_head, "sub")
assert len(n_head.get_gate('sub').get_links()) == 0
assert len(n_head.get_slot('sur').get_links()) == 3
assert len(n_a.get_slot('sub').get_links()) == 0
assert len(n_b.get_slot('sub').get_links()) == 0
assert len(n_c.get_slot('sub').get_links()) == 0
def test_node_netapi_import_actors(fixed_nodenet):
# test importing data targets as actors
net, netapi, source = prepare(fixed_nodenet)
world = test_node_logic.add_dummyworld(fixed_nodenet)
root_ns = netapi.get_nodespace(None)
netapi.import_actors(root_ns.uid, "test_")
actors = netapi.get_nodes(root_ns.uid, "test_")
assert len(actors) == 1
assert actors[0].get_parameter('datatarget') == "test_target"
# do it again, make sure we can call import multiple times
netapi.import_actors(root_ns.uid, "test_")
actors = netapi.get_nodes(root_ns.uid, "test_")
assert len(actors) == 1
def test_node_netapi_import_sensors(fixed_nodenet):
# test importing data sources as sensors
net, netapi, source = prepare(fixed_nodenet)
world = test_node_logic.add_dummyworld(fixed_nodenet)
root_ns = netapi.get_nodespace(None)
netapi.import_sensors(root_ns.uid, "test_")
sensors = netapi.get_nodes(root_ns.uid, "test_")
assert len(sensors) == 1
assert sensors[0].get_parameter('datasource') == "test_source"
# do it again, make sure we can call import multiple times
netapi.import_sensors(root_ns.uid, "test_")
sensors = netapi.get_nodes(root_ns.uid, "test_")
assert len(sensors) == 1
def test_set_gate_function(fixed_nodenet):
# test setting a custom gate function
from micropsi_core.nodenet.gatefunctions import sigmoid
net, netapi, source = prepare(fixed_nodenet)
some_other_node_type = netapi.create_node("Pipe", None)
netapi.unlink(source, "gen")
net.step()
assert source.get_gate("gen").activation == 0
netapi.set_gatefunction(netapi.get_nodespace(None).uid, "Register", "gen", "sigmoid")
source.set_gate_parameter('gen', 'theta', 1)
net.step()
assert round(source.get_gate("gen").activation, 5) == round(sigmoid(0, 0, 1), 5)
assert some_other_node_type.get_gate("gen").activation == 0
def test_autoalign(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
for uid in net.get_node_uids():
net.get_node(uid).position = (12, 13)
netapi.autoalign_nodespace(netapi.get_nodespace(None).uid)
positions = []
for uid in net.get_node_uids():
if net.get_node(uid).parent_nodespace == netapi.get_nodespace(None).uid:
positions.extend(net.get_node(uid).position)
assert set(positions) != set((12, 13))
for uid in net.get_node_uids():
net.get_node(uid).position = (12, 13)
netapi.autoalign_nodespace('InVaLiD')
positions = []
for uid in net.get_node_uids():
positions.extend(net.get_node(uid).position)
assert set(positions) == set((12, 13))
def test_copy_nodes(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
nodespace = netapi.create_nodespace(None, name='copy')
a1 = netapi.get_node('n0001')
a2 = netapi.get_node('n0002')
mapping = netapi.copy_nodes([a1, a2], nodespace.uid)
assert a1 in mapping
assert a2 in mapping
assert a1.name == mapping[a1].name
assert mapping[a1].parent_nodespace == nodespace.uid
assert mapping[a2].parent_nodespace == nodespace.uid
assert set(nodespace.get_known_ids()) == set([mapping[a1].uid, mapping[a2].uid])
assert len(mapping[a1].get_slot('gen').get_links()) == 0 # incoming link from outside not copied
assert mapping[a1].get_gate('por').get_links()[0].target_node.uid == mapping[a2].uid
assert a1.clone_parameters() == mapping[a1].clone_parameters()
assert a1.get_gate_parameters() == mapping[a1].get_gate_parameters()
def test_group_nodes_by_names(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
sepp1 = netapi.create_node("Register", None, "sepp1")
sepp2 = netapi.create_node("Register", None, "sepp2")
sepp3 = netapi.create_node("Register", None, "sepp3")
netapi.group_nodes_by_names(None, node_name_prefix="sepp")
seppen_act = netapi.get_activations(None, "sepp")
assert len(seppen_act) == 3
def test_group_nodes_by_ids(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
ids = ["n0001", "n0002"]
netapi.group_nodes_by_ids(None, ids, "some")
some_act = netapi.get_activations(None, "some")
assert len(some_act) == 2
def test_ungroup_nodes(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
ids = ["n0001", "n0002"]
netapi.group_nodes_by_ids(None, ids, "some")
netapi.ungroup_nodes(None, "some")
def test_get_activations(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
sepp1 = netapi.create_node("Register", None, "sepp1")
sepp2 = netapi.create_node("Register", None, "sepp2")
sepp3 = netapi.create_node("Register", None, "sepp3")
netapi.group_nodes_by_names(None, node_name_prefix="sepp")
seppen_act = netapi.get_activations(None, "sepp")
assert len(seppen_act) == 3
assert seppen_act[0] == 0
assert seppen_act[1] == 0
assert seppen_act[2] == 0
netapi.link(source, "gen", sepp2, "gen")
net.step()
seppen_act = netapi.get_activations(None, "sepp")
assert seppen_act[0] == 0
assert seppen_act[1] == 1
assert seppen_act[2] == 0
def test_substitute_activations(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
sepp1 = netapi.create_node("Register", None, "sepp1").uid
sepp2 = netapi.create_node("Register", None, "sepp2").uid
sepp3 = netapi.create_node("Register", None, "sepp3").uid
netapi.group_nodes_by_names(None, node_name_prefix="sepp")
netapi.link(source, "gen", netapi.get_node(sepp2), "gen")
net.step()
suddenly_a_wild_activation_appears = [0.2, -1, 42]
netapi.substitute_activations(None, "sepp", suddenly_a_wild_activation_appears)
assert round(netapi.get_node(sepp1).get_gate('gen').activation, 2) == 0.2
assert round(netapi.get_node(sepp2).get_gate('gen').activation, 2) == -1
assert round(netapi.get_node(sepp3).get_gate('gen').activation, 2) == 42
netapi.link(netapi.get_node(sepp2), "gen", netapi.get_node(sepp3), "gen")
net.step()
seppen_act = netapi.get_activations(None, "sepp")
assert round(seppen_act[0], 2) == 0
assert round(seppen_act[1], 2) == 1
assert round(seppen_act[2], 2) == -1
def test_get_thetas(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
sepp1 = netapi.create_node("Register", None, "sepp1")
sepp2 = netapi.create_node("Register", None, "sepp2")
sepp3 = netapi.create_node("Register", None, "sepp3")
netapi.group_nodes_by_names(None, node_name_prefix="sepp")
seppen_theta = netapi.get_thetas(None, "sepp")
assert len(seppen_theta) == 3
assert seppen_theta[0] == 0
assert seppen_theta[1] == 0
assert seppen_theta[2] == 0
def test_set_thetas(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
sepp1 = netapi.create_node("Register", None, "sepp1")
sepp2 = netapi.create_node("Register", None, "sepp2")
sepp3 = netapi.create_node("Register", None, "sepp3")
netapi.group_nodes_by_names(None, node_name_prefix="sepp")
some_thetas = [1, 2, 3]
netapi.set_thetas(None, "sepp", some_thetas)
net.step()
seppen_theta = netapi.get_thetas(None, "sepp")
assert round(seppen_theta[0], 2) == 1
assert round(seppen_theta[1], 2) == 2
assert round(seppen_theta[2], 2) == 3
def test_get_link_weights(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
sepp1 = netapi.create_node("Register", None, "sepp1")
sepp2 = netapi.create_node("Register", None, "sepp2")
sepp3 = netapi.create_node("Register", None, "sepp3")
netapi.group_nodes_by_names(None, node_name_prefix="sepp")
hugo1 = netapi.create_node("Register", None, "hugo1")
hugo2 = netapi.create_node("Register", None, "hugo2")
netapi.group_nodes_by_names(None, node_name_prefix="hugo")
netapi.link(sepp2, "gen", hugo1, "gen", 0.4)
w = netapi.get_link_weights(None, "sepp", None, "hugo")
value = None
# list style indexing
try:
value = round(float(w[0][1]), 2)
except:
pass
# numpy style indexing
try:
value = round(float(w[0, 1]), 2)
except:
pass
assert value == 0.4
def test_set_link_weights(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
sepp1 = netapi.create_node("Register", None, "sepp1")
sepp2 = netapi.create_node("Register", None, "sepp2")
sepp3 = netapi.create_node("Register", None, "sepp3")
netapi.group_nodes_by_names(None, node_name_prefix="sepp")
hugo1 = netapi.create_node("Register", None, "hugo1")
hugo2 = netapi.create_node("Register", None, "hugo2")
netapi.group_nodes_by_names(None, node_name_prefix="hugo")
netapi.link(sepp2, "gen", hugo1, "gen", 0.4)
w = netapi.get_link_weights(None, "sepp", None, "hugo")
# change value
# list style indexing
try:
w[0][1] = 0.6
except:
pass
# numpy style indexing
try:
w[0, 1] = 0.6
except:
pass
netapi.set_link_weights(None, "sepp", None, "hugo", w)
assert round(float(netapi.get_node(sepp2.uid).get_gate('gen').get_links()[0].weight), 2) == 0.6
# remove link
# list style indexing
try:
w[0][1] = 0
except:
pass
# numpy style indexing
try:
w[0, 1] = 0
except:
pass
netapi.set_link_weights(None, "sepp", None, "hugo", w)
assert len(netapi.get_node(sepp2.uid).get_gate('gen').get_links()) == 0
# create link
# list style indexing
try:
w[1][1] = 0.5
except:
pass
# numpy style indexing
try:
w[1, 1] = 0.5
except:
pass
netapi.set_link_weights(None, "sepp", None, "hugo", w)
assert len(netapi.get_node(sepp2.uid).get_gate('gen').get_links()) == 1
def test_get_node_ids(fixed_nodenet):
net, netapi, source = prepare(fixed_nodenet)
sepp1 = netapi.create_node("Register", None, "sepp1")
sepp2 = netapi.create_node("Register", None, "sepp2")
sepp3 = netapi.create_node("Register", None, "sepp3")
netapi.group_nodes_by_names(None, node_name_prefix="sepp")
seppen_ids = netapi.get_node_ids(None, "sepp")
assert len(seppen_ids) == 3
assert seppen_ids[0] == sepp1.uid
assert seppen_ids[1] == sepp2.uid
assert seppen_ids[2] == sepp3.uid
|
{
"content_hash": "26ab494f8bd2904b8aad142b5eaf0b52",
"timestamp": "",
"source": "github",
"line_count": 992,
"max_line_length": 110,
"avg_line_length": 38.83568548387097,
"alnum_prop": 0.6566904607397793,
"repo_name": "printedheart/micropsi2",
"id": "2b9a89c33941afb00eea6b5dfbabcd760556f284",
"size": "38574",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "micropsi_core/tests/test_node_netapi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12960"
},
{
"name": "JavaScript",
"bytes": "646150"
},
{
"name": "Makefile",
"bytes": "383"
},
{
"name": "Python",
"bytes": "1139277"
},
{
"name": "Shell",
"bytes": "153"
},
{
"name": "Smarty",
"bytes": "96103"
}
]
}
|
(function(angular) {
'use strict';
var mod = angular.module('ngFormLib.controls.formReset', []);
mod.directive('formReset', ['$parse', function($parse) {
function resetFieldState(controlMap) {
// Loops through the controlMap and reset's each field's state
for (var item in controlMap) {
var controlList = controlMap[item];
for (var j = 0, jLen = controlList.length; j < jLen; j++) {
var control = controlList[j].controller;
control.fieldState = '';
}
}
}
return {
restrict: 'A',
require: '^form',
link: function(scope, element, attr, controller) {
var ngModelGet = $parse(attr.formReset),
ngModelSet = ngModelGet.assign;
if (!ngModelSet) {
throw Error('formReset requires an assignable scope-expression. "' + attr.formReset + '" is un-assignable.');
}
// Get a copy of the data as soon as the directive is created, which is after the scope/controller has been initialised (safe)
var originalData = angular.copy(ngModelGet(scope));
element.on('click', function() {
if (typeof controller.setSubmitted === 'function') {
controller.setSubmitted(false);
}
// Use a *copy* of the original data, as we don't want originalData to be modified by subsequent changes to the model by the form controls
ngModelSet(scope, angular.copy(originalData));
resetFieldState(controller._controls || {});
controller.$setPristine();
scope.$emit('event:FormReset');
scope.$digest();
});
}
};
}]);
})(window.angular);
|
{
"content_hash": "1d03aa5a67df1fea24c8fec4fd193615",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 148,
"avg_line_length": 33.6,
"alnum_prop": 0.6011904761904762,
"repo_name": "uglow/grunt-modular-project",
"id": "56a6b021c023134de1ef90af26c45175a938a567",
"size": "1680",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testsrc/modules/ngFormLib/controls/formReset/FormReset.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18429"
},
{
"name": "HTML",
"bytes": "80509"
},
{
"name": "JavaScript",
"bytes": "189395"
}
]
}
|
var mod = angular.module('user');
mod.component('loginForm', {
bindings: {
email: '=',
password: '=',
},
require: {
//resCtrl: '^restaurantDetail'
},
transclude: true,
templateUrl: 'user/login.html',
controller: ['$scope', 'LoginForm', function LoginFormController($scope, LoginForm) {
$scope.email = 'Email@address.com';
$scope.password = "";
$scope.submit = function() {
LoginForm.login($scope.email, $scope.password);
}
}]
});
mod.service('LoginForm', ['$http', 'authManager', 'User', function($http, authManager, User) {
return {
login: function(email,pass) {
// separate into user model?
$http.post('/api/users/login', {'email': email, 'password': pass }).
then(
User.processLoginResult,
function(err){alert(err);}
);
}
}
}]);
|
{
"content_hash": "c865cbfed7b678b6e81161d084e3bc74",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 94,
"avg_line_length": 25.736842105263158,
"alnum_prop": 0.5051124744376279,
"repo_name": "grantrules/pizzarank",
"id": "8ef41677a712afc93a4cc421835837e1ac030c69",
"size": "978",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pizzarank-client/user/user-login-form.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3392"
},
{
"name": "HTML",
"bytes": "17292"
},
{
"name": "JavaScript",
"bytes": "43407"
},
{
"name": "Nginx",
"bytes": "460"
},
{
"name": "Shell",
"bytes": "264"
}
]
}
|
<polymer-element name="iris-rpc" attributes="path timeout trace">
<script>
Polymer('iris-rpc', {
// id : 'main',
path : '/rpc',
online : false,
timeout : 30,
created : function() {
this.subscriptions = { };
this.pending = { };
this._req = 1;
this.events = new BRT.types.Events();
},
ready : function() {
var self = this;
if(!BRT.rpc)
BRT.rpc = { }
if(BRT.rpc[this.id])
throw new Error("BRT RPC with id "+this.id+" already exists");
BRT.rpc[this.id] = this;
self.timeout = this.timeout || 30;
self.connected = false;
self.events.emitAsync('rpc-connecting');
self.socket = io(window.location.origin+this.path);
self.socket.on('ready', function(){
self.online = true;
})
self.socket.on('connect', function() {
console.log("BRT RPC connected");
self.events.emit('rpc-connect');
})
self.socket.on('error', function() {
console.log("BRT RPC error", arguments);
self.events.emit('rpc-error');
})
self.socket.on('disconnect', function() {
self.online = false;
console.log("BRT RPC disconnected",arguments);
self.events.emit('rpc-disconnect');
_.each(self.pending, function(info, id) {
info.callback({ error : "Connection Closed"});
})
self.pending = { }
})
self.socket.on('message', function(msg) {
if(self.trace) {
if(self.trace === 1 || self.trace === true)
console.log('RPC ['+self.id+']:',msg.op);
else
if(self.trace === 2)
console.log('RPC ['+self.id+']:',msg.op,msg);
}
self.events.emit(msg.op, msg);
})
self.socket.on('rpc::response', function(msg) {
if(msg._resp && self.pending[msg._resp])
self.pending[msg._resp].callback.call(this, msg.err, msg.resp);
else
if(!self.pending[msg._resp]) {
console.log("BRT RPC received unknown rpc callback (strange server-side retransmit?)");
}
delete self.pending[msg._resp];
})
function timeoutMonitor() {
// var self = this;
var ts = Date.now();
var purge = [ ]
_.each(this.pending, function(info, id) {
if(ts - info.ts > self.timeout * 1000) {
info.callback({ error : "Timeout "});
purge.push(id);
}
})
_.each(purge, function(id) {
delete pending[id];
})
BRT.dpc(1000, timeoutMonitor);
}
BRT.dpc(1000, timeoutMonitor);
},
on : function(op, callback) {
this.events.on(op, callback);
},
dispatch : function(msg, callback) {
if(!callback)
return this.socket.emit('message', msg);
this.pending[this._req] = {
ts : Date.now(),
callback : function(err, resp) {
callback(err, resp);
}
}
this.socket.emit('rpc::request', {
req : msg,
_req : this._req,
});
this._req++;
},
// timeoutMonitor();
});
</script>
</polymer-element>
<polymer-element name="iris-rpc-client" attributes="pair">
<script>
Polymer('iris-rpc-client', {
//BRT.define('BRT.types.RPC.Client', function() {
/* var self = this;
// BRT.types.Events.apply(self, arguments);
BRT.inherit(self, 'BRT.types.Events');
// BRT.rpc.addListener(self);
self.rpc = {
handlers : [ ]
}
self.rpc.on = function(op,fn) {
self.rpc.handlers.push(BRT.rpc.on(op, fn));
}
self.on('destroy', function() {
_.each(self.rpc.handlers, function(uuid) {
BRT.rpc.off(uuid);
})
})
*/
})
</script>
</polymer-element>
<script>
BRT.define('BRT.subscribe', function(owner, options, fn) {
var op = options.realm+':'+options.op+':'+options.ident;
var sub = BRT.subscriptions[op];
if(!sub)
sub = BRT.subscriptions[op] = []
sub.push({
owner : owner,
options : options,
fn : fn
})
//var rpc = BRT.find('iris-rpc#main');
BRT.rpc.main.dispatch({
op : 'subscribe',
info : options
}, function(err, resp) {
// if(err)
});
BRT.types.Events.Sink(owner, BRT.rpc.main.events, op, function(args) {
fn.apply(owner, arguments);
})
// BRT.inherit(target, 'BRT.types.RPC.Client');
/* target.rpc.on(options.op, function(args) {
fn.apply(target, arguments);
})
*/
// target.EventSink()
})
BRT.define('BRT.unsubscribe', function(owner, options) {
//var rpc = BRT.find('iris-rpc#main');
BRT.rpc.main.dispatch({
op : 'unsubscribe',
info : options
}, function(err, resp) {
})
})
</script>
|
{
"content_hash": "8cc5f9b8d010fe292161fd2b76bcfd4d",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 103,
"avg_line_length": 25.473684210526315,
"alnum_prop": 0.4793388429752066,
"repo_name": "aspectron/iris-app",
"id": "7014106fabbaa38b6fecaedb4b3393325eafd461",
"size": "5324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "http/rpc.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "6425"
},
{
"name": "JavaScript",
"bytes": "642219"
}
]
}
|
@class DateBlock, Lecture;
@interface Date : NSManagedObject
@property (nonatomic, retain) NSNumber * active;
@property (nonatomic, retain) NSString * day;
@property (nonatomic, retain) NSString * month;
@property (nonatomic, retain) NSString * note;
@property (nonatomic, retain) NSString * startTime;
@property (nonatomic, retain) NSString * stopTime;
@property (nonatomic, retain) NSString * year;
@property (nonatomic, retain) DateBlock *dateBlock;
@property (nonatomic, retain) Lecture *lecture;
@end
|
{
"content_hash": "272468d31d7bb8e394c76bce1b6f5a86",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 51,
"avg_line_length": 33.93333333333333,
"alnum_prop": 0.7583497053045186,
"repo_name": "chrisrathjen/eStudentV2",
"id": "962839d47aa353f9a936cbc1da21076cb958d2ab",
"size": "707",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "eStudent/Date.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2600"
},
{
"name": "Objective-C",
"bytes": "1018479"
}
]
}
|
<?php
namespace phpcassa\UUID;
/**
* @package phpcassa\Util
*/
class UUIDException extends \Exception {
}
|
{
"content_hash": "c560995bed87ef3eb0fd2a80726dc603",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 40,
"avg_line_length": 13.625,
"alnum_prop": 0.7064220183486238,
"repo_name": "profire-co/Uyghur",
"id": "065c08d915b0e480b7ee3093e42561010bd547ec",
"size": "109",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "src/libs/phpcassa/phpcassa/UUID/UUIDException.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "2682"
},
{
"name": "C",
"bytes": "1656"
},
{
"name": "C++",
"bytes": "33330"
},
{
"name": "CSS",
"bytes": "5452"
},
{
"name": "JavaScript",
"bytes": "4277"
},
{
"name": "PHP",
"bytes": "503607"
},
{
"name": "Shell",
"bytes": "199728"
}
]
}
|
define("dojox/charting/themes/IndigoNation", ["../Theme", "./common"], function(Theme, themes){
// notes: colors generated by moving in 30 degree increments around the hue circle,
// at 90% saturation, using a B value of 75 (HSB model).
themes.IndigoNation=new Theme({
colors: [
"#93a4d0",
"#3b4152",
"#687291",
"#9faed9",
"#8290b8"
]
});
return themes.IndigoNation;
});
|
{
"content_hash": "b01822f171f63a760b8bc2b5b711e0fe",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 95,
"avg_line_length": 25,
"alnum_prop": 0.6425,
"repo_name": "difio/difio",
"id": "c34e083cfbd40230c59b37889a0197146dcd9bd0",
"size": "410",
"binary": false,
"copies": "48",
"ref": "refs/heads/master",
"path": "static/js/dojo-1.7.2/dojox/charting/themes/IndigoNation.js.uncompressed.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "19954"
},
{
"name": "CSS",
"bytes": "2233793"
},
{
"name": "JavaScript",
"bytes": "18591955"
},
{
"name": "PHP",
"bytes": "38090"
},
{
"name": "Python",
"bytes": "389889"
},
{
"name": "Shell",
"bytes": "9076"
},
{
"name": "XSLT",
"bytes": "47380"
}
]
}
|
import { RowNode } from "../../entities/rowNode";
import { BeanStub } from "../../context/beanStub";
import { Logger } from "../../logger";
import { RowNodeBlockLoader } from "./rowNodeBlockLoader";
import { AgEvent } from "../../events";
import { NumberSequence } from "../../utils";
import { IRowNodeBlock } from "../../interfaces/iRowNodeBlock";
import { RowRenderer } from "../../rendering/rowRenderer";
export interface RowNodeCacheParams {
initialRowCount: number;
blockSize?: number;
overflowSize: number;
sortModel: any;
filterModel: any;
maxBlocksInCache?: number;
rowHeight: number;
lastAccessedSequence: NumberSequence;
rowNodeBlockLoader?: RowNodeBlockLoader;
dynamicRowHeight: boolean;
}
export interface CacheUpdatedEvent extends AgEvent {
}
export declare abstract class RowNodeCache<T extends IRowNodeBlock, P extends RowNodeCacheParams> extends BeanStub {
static EVENT_CACHE_UPDATED: string;
private static MAX_EMPTY_BLOCKS_TO_KEEP;
private virtualRowCount;
private maxRowFound;
protected rowRenderer: RowRenderer;
protected cacheParams: P;
private active;
blocks: {
[blockNumber: string]: T;
};
private blockCount;
protected logger: Logger;
abstract getRow(rowIndex: number): RowNode | null;
protected constructor(cacheParams: P);
private destroyAllBlocks;
protected init(): void;
isActive(): boolean;
getVirtualRowCount(): number;
hack_setVirtualRowCount(virtualRowCount: number): void;
isMaxRowFound(): boolean;
protected onPageLoaded(event: any): void;
private purgeBlocksIfNeeded;
private isBlockCurrentlyDisplayed;
protected postCreateBlock(newBlock: T): void;
protected removeBlockFromCache(blockToRemove: T): void;
protected checkBlockToLoad(): void;
protected checkVirtualRowCount(block: T, lastRow?: number): void;
setVirtualRowCount(rowCount: number, maxRowFound?: boolean): void;
forEachNodeDeep(callback: (rowNode: RowNode, index: number) => void, sequence?: NumberSequence): void;
forEachBlockInOrder(callback: (block: T, id: number) => void): void;
protected forEachBlockInReverseOrder(callback: (block: T, id: number) => void): void;
private forEachBlockId;
protected getBlockIdsSorted(): number[];
protected getBlock(blockId: string | number): T;
protected setBlock(id: number, block: T): void;
protected destroyBlock(block: T): void;
protected onCacheUpdated(): void;
private destroyAllBlocksPastVirtualRowCount;
purgeCache(): void;
getRowNodesInRange(firstInRange: RowNode, lastInRange: RowNode): RowNode[];
}
|
{
"content_hash": "43de96f11b53ae1dec1bf9591c726af5",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 116,
"avg_line_length": 41.40625,
"alnum_prop": 0.720754716981132,
"repo_name": "ceolter/angular-grid",
"id": "d0c124c919e1bb1dfb56a03e5962ce3eedbf6476",
"size": "2804",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "community-modules/core/dist/es6/modules/rowNodeCache/rowNodeCache.d.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "67272"
},
{
"name": "JavaScript",
"bytes": "2291855"
},
{
"name": "TypeScript",
"bytes": "671875"
}
]
}
|
/*
* Bearer Authentication Protocol
*
* Bearer Authentication is for authorizing API requests. Once
* a user is created, a token is also generated for that user
* in its passport. This token can be used to authenticate
* API requests.
*
*/
exports.authorize = function(token, done) {
Passport.findOne({ accessToken: token }, function(err, passport) {
if (err) { return done(err); }
if (!passport) { return done(null, false); }
User.findById(passport.user, function(err, user) {
if (err) { return done(err); }
if (!user) { return done(null, false); }
return done(null, user, { scope: 'all' });
});
});
};
|
{
"content_hash": "58eaa84632bb4a35d18402e6179caf86",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 68,
"avg_line_length": 28.565217391304348,
"alnum_prop": 0.6377473363774734,
"repo_name": "Esya/crumblr",
"id": "14f4f4889e60bd3e40a5825829558d0a6ac49f3a",
"size": "657",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "api/services/protocols/bearer.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3510"
},
{
"name": "HTML",
"bytes": "7259"
},
{
"name": "JavaScript",
"bytes": "139082"
}
]
}
|
// .NAME vtkResliceCursorActor - Represent a reslice cursor
// .SECTION Description
// A reslice cursor consists of a pair of lines (cross hairs), thin or thick,
// that may be interactively manipulated for thin/thick reformats through the
// data.
// .SECTION See Also
// vtkResliceCursor vtkResliceCursorPolyDataAlgorithm vtkResliceCursorWidget
// vtkResliceCursorRepresentation vtkResliceCursorLineRepresentation
#ifndef vtkResliceCursorActor_h
#define vtkResliceCursorActor_h
#include "vtkInteractionWidgetsModule.h" // For export macro
#include "vtkProp3D.h"
class vtkResliceCursor;
class vtkResliceCursorPolyDataAlgorithm;
class vtkPolyDataMapper;
class vtkActor;
class vtkProperty;
class vtkBoundingBox;
class VTKINTERACTIONWIDGETS_EXPORT vtkResliceCursorActor : public vtkProp3D
{
public:
// Description:
// Standard VTK methods
static vtkResliceCursorActor *New();
vtkTypeMacro(vtkResliceCursorActor,vtkProp3D);
void PrintSelf(ostream& os, vtkIndent indent);
// Description:
// Get the cursor algorithm. The cursor must be set on the algorithm
vtkGetObjectMacro( CursorAlgorithm, vtkResliceCursorPolyDataAlgorithm );
// Description:
// Support the standard render methods.
virtual int RenderOpaqueGeometry(vtkViewport *viewport);
// Description:
// Does this prop have some translucent polygonal geometry? No.
virtual int HasTranslucentPolygonalGeometry();
// Description:
// Release any graphics resources that are being consumed by this actor.
// The parameter window could be used to determine which graphic
// resources to release.
void ReleaseGraphicsResources(vtkWindow *);
// Description:
// Get the bounds for this Actor as (Xmin,Xmax,Ymin,Ymax,Zmin,Zmax).
double *GetBounds();
// Description:
// Get the actors mtime plus consider its algorithm.
unsigned long int GetMTime();
// Description:
// Get property of the internal actor.
vtkProperty *GetCenterlineProperty( int i );
vtkProperty *GetThickSlabProperty( int i );
// Description:
// Get the centerline actor along a particular axis
vtkActor * GetCenterlineActor(int axis);
// Description:
// Set the user matrix on all the internal actors.
virtual void SetUserMatrix( vtkMatrix4x4 *matrix);
protected:
vtkResliceCursorActor();
~vtkResliceCursorActor();
void UpdateViewProps( vtkViewport * v = NULL );
void UpdateHoleSize( vtkViewport * v );
vtkResliceCursorPolyDataAlgorithm * CursorAlgorithm;
vtkPolyDataMapper * CursorCenterlineMapper[3];
vtkActor * CursorCenterlineActor[3];
vtkPolyDataMapper * CursorThickSlabMapper[3];
vtkActor * CursorThickSlabActor[3];
vtkProperty * CenterlineProperty[3];
vtkProperty * ThickSlabProperty[3];
private:
vtkResliceCursorActor(const vtkResliceCursorActor&); // Not implemented.
void operator=(const vtkResliceCursorActor&); // Not implemented.
};
#endif
|
{
"content_hash": "f40cdbd5357780c60676a51283941490",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 77,
"avg_line_length": 32.02127659574468,
"alnum_prop": 0.739202657807309,
"repo_name": "hendradarwin/VTK",
"id": "73ea235386a34b4ea23800987f7e85a055c9bb38",
"size": "3601",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "Interaction/Widgets/vtkResliceCursorActor.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "37444"
},
{
"name": "Bison",
"bytes": "174503"
},
{
"name": "C",
"bytes": "51910235"
},
{
"name": "C++",
"bytes": "67775294"
},
{
"name": "CSS",
"bytes": "186729"
},
{
"name": "Cuda",
"bytes": "29062"
},
{
"name": "GAP",
"bytes": "14120"
},
{
"name": "Java",
"bytes": "196895"
},
{
"name": "JavaScript",
"bytes": "1111664"
},
{
"name": "Objective-C",
"bytes": "93926"
},
{
"name": "Objective-C++",
"bytes": "257535"
},
{
"name": "Pascal",
"bytes": "3255"
},
{
"name": "Perl",
"bytes": "173168"
},
{
"name": "Prolog",
"bytes": "4406"
},
{
"name": "Python",
"bytes": "15934475"
},
{
"name": "Shell",
"bytes": "61955"
},
{
"name": "Tcl",
"bytes": "1887699"
}
]
}
|
<?xml version="1.0" encoding="utf-8"?>
<?xml-stylesheet type="text/xsl" href="#xslt"?>
<!DOCTYPE xsl:stylesheet [<!ATTLIST xsl:stylesheet id ID #IMPLIED>]>
<xsl:stylesheet id="xslt" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
<xsl:template match="/">
<result wild="yes"><xsl:copy-of select="'PASS'"/></result>
</xsl:template>
</xsl:stylesheet>
|
{
"content_hash": "a2266aee464362ff64fc1677a36522e8",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 89,
"avg_line_length": 46.375,
"alnum_prop": 0.6711590296495957,
"repo_name": "gsnedders/presto-testo",
"id": "62a9b7a2d665dbb0de57b91fdf62821d681b58c8",
"size": "373",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "core/standards/xslt-bootstrap/reftests/185-ref.xml",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "2312"
},
{
"name": "ActionScript",
"bytes": "23470"
},
{
"name": "AutoHotkey",
"bytes": "8832"
},
{
"name": "Batchfile",
"bytes": "5001"
},
{
"name": "C",
"bytes": "116512"
},
{
"name": "C++",
"bytes": "219486"
},
{
"name": "CSS",
"bytes": "207914"
},
{
"name": "Erlang",
"bytes": "18523"
},
{
"name": "Groff",
"bytes": "674"
},
{
"name": "HTML",
"bytes": "103359143"
},
{
"name": "Haxe",
"bytes": "3874"
},
{
"name": "Java",
"bytes": "125658"
},
{
"name": "JavaScript",
"bytes": "22516843"
},
{
"name": "Makefile",
"bytes": "13409"
},
{
"name": "PHP",
"bytes": "531453"
},
{
"name": "Perl",
"bytes": "321672"
},
{
"name": "Python",
"bytes": "948191"
},
{
"name": "Ruby",
"bytes": "1006850"
},
{
"name": "Shell",
"bytes": "12140"
},
{
"name": "Smarty",
"bytes": "1860"
},
{
"name": "XSLT",
"bytes": "2567445"
}
]
}
|
package org.apache.commons.scxml2.model;
import org.apache.commons.scxml2.ActionExecutionContext;
import org.apache.commons.scxml2.SCXMLExpressionException;
/**
* The class in this SCXML object model that corresponds to the
* <elseif> SCXML element.
*
*/
public class ElseIf extends Action {
/**
* Serial version UID.
*/
private static final long serialVersionUID = 1L;
/**
* An conditional expression which can be evaluated to true or false.
*/
private String cond;
/**
* Constructor.
*/
public ElseIf() {
}
/**
* Get the conditional expression.
*
* @return Returns the cond.
*/
public final String getCond() {
return cond;
}
/**
* Set the conditional expression.
*
* @param cond The cond to set.
*/
public final void setCond(final String cond) {
this.cond = cond;
}
/**
* {@inheritDoc}
*/
@Override
public void execute(final ActionExecutionContext exctx) throws ModelException, SCXMLExpressionException {
// nothing to do, the <if> container will take care of this
}
}
|
{
"content_hash": "77f948cdc68da75659dc111c69483e58",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 109,
"avg_line_length": 20.42105263157895,
"alnum_prop": 0.6194158075601375,
"repo_name": "apache/commons-scxml",
"id": "834c5966b61cedd56bf22c69d19c1dd4d895e20e",
"size": "1965",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/java/org/apache/commons/scxml2/model/ElseIf.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "11240"
},
{
"name": "Java",
"bytes": "844669"
},
{
"name": "Shell",
"bytes": "157"
},
{
"name": "XSLT",
"bytes": "68539"
}
]
}
|
End of preview.
No dataset card yet
- Downloads last month
- 5