text stringlengths 1 1.05M |
|---|
package com.jgabrielfreitas.blurimageview;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
import butterknife.Bind;
import butterknife.OnClick;
import com.jgabrielfreitas.core.BlurImageView;
import com.jgabrielfreitas.layoutid.annotations.InjectLayout;
import com.jgabrielfreitas.permissions.core.interfaces.OnPermissionRequest;
import com.jgabrielfreitas.permissions.core.managers.StoragePermissionManager;
import com.karumi.dexter.PermissionToken;
import static android.content.Intent.ACTION_PICK;
import static android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
import static android.provider.MediaStore.MediaColumns.DATA;
@InjectLayout(layout = R.layout.activity_main)
public class MainActivity extends BaseActivity implements OnSeekBarChangeListener, OnPermissionRequest {
private static final int RESULT_LOAD_IMAGE = 100;
@Bind(R.id.dogBlurImageView) BlurImageView dogBlurImageView;
@Bind(R.id.blurSeekBar) SeekBar blurSeekBar;
protected void modifyViews() {
blurSeekBar.setOnSeekBarChangeListener(this);
}
@OnClick(R.id.chooseFromGalleryButton) public void getImageFromGallery() {
StoragePermissionManager storagePermissionManager = new StoragePermissionManager(this, this);
storagePermissionManager.requestPermission();
}
@Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
Log.e("seekbar", "Progress --> " + progress);
dogBlurImageView.setBlur(progress);
}
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == RESULT_LOAD_IMAGE && resultCode == RESULT_OK && null != data) {
Uri selectedImage = data.getData();
String[] filePathColumn = { DATA };
Cursor cursor = getContentResolver().query(selectedImage, filePathColumn, null, null, null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
String picturePath = cursor.getString(columnIndex);
cursor.close();
dogBlurImageView.setImageBitmap(BitmapFactory.decodeFile(picturePath));
}
}
@Override public void onStartTrackingTouch(SeekBar seekBar) {}
@Override public void onStopTrackingTouch(SeekBar seekBar) {}
@Override public void onPermissionAllowed() {
//Intent i = new Intent(ACTION_PICK, EXTERNAL_CONTENT_URI);
//startActivityForResult(i, RESULT_LOAD_IMAGE);
dogBlurImageView.setImageDrawable(ContextCompat.getDrawable(this, R.drawable.code));
}
@Override public void onPermissionDenied() {
toast("Permission denied");
}
@Override public void onPermissionDeniedButAskAgain(PermissionToken permissionToken) {
permissionToken.continuePermissionRequest();
}
}
|
<filename>src/app/app.component.ts
import { Component } from '@angular/core';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css'],
})
export class AppComponent {
isLoggedIn:boolean= false;
title: string = "Revature Community";
constructor() {}
ngAfterViewChecked(){
this.isLoggedIn = (localStorage.getItem("isLoggedIn") === "true");
}
}
|
// {namespace name=backend/swag_import_export/view/session}
// {block name="backend/swag_import_export/view/manager/session"}
Ext.define('Shopware.apps.SwagImportExport.view.manager.Session', {
extend: 'Ext.grid.Panel',
/**
* List of short aliases for class names. Most useful for defining xtypes for widgets.
* @string
*/
alias: 'widget.swag-import-export-manager-session',
title: '{s name=swag_import_export/manager/log/title}Protocol{/s}',
initComponent: function() {
var me = this;
me.store = me.buildStore();
me.selModel = me.buildSelectionModel();
me.columns = me.buildColumns();
me.dockedItems = me.buildDockedItems();
me.on('selectionchange', function(selModel, selected) {
me.down('#deletebutton').setDisabled(selected.length === 0);
});
me.callParent(arguments);
},
buildStore: function() {
return Ext.create('Shopware.apps.SwagImportExport.store.SessionList', {
sorters: [
{ property: 'createdAt', direction: 'DESC' }
]
});
},
buildSelectionModel: function() {
return {
selType: 'checkboxmodel',
allowDeselect: true,
mode: 'SIMPLE'
};
},
buildColumns: function() {
var me = this;
return {
defaults: {
menuDisabled: true,
draggable: false
},
items: [{
xtype: 'datecolumn',
header: '{s name=swag_import_export/manager/log/header_date}Date{/s}',
dataIndex: 'createdAt',
format: 'd.m.Y H:i:s',
flex: 1
}, {
header: '{s name=swag_import_export/manager/log/header_file}File{/s}',
dataIndex: 'fileName',
renderer: function(value, view, record) {
return '<a href={url action="downloadFile"}' + '?type=' + record.get('type') + '&fileName=' + record.get('fileUrl') + ' >' + value + '</a>';
},
flex: 2
}, {
header: '{s name=swag_import_export/manager/log/header_status}Status{/s}',
dataIndex: 'state',
width: 60,
renderer: me.renderStatus
}, {
header: '{s name=swag_import_export/manager/log/header_type}Type{/s}',
dataIndex: 'type',
width: 60,
renderer: me.renderType
}, {
header: '{s name=swag_import_export/manager/log/header_profile}Profile{/s}',
dataIndex: 'profileName',
flex: 1
}, {
header: '{s name=swag_import_export/manager/log/header_user}User{/s}',
dataIndex: 'username',
width: 80
}, {
xtype: 'actioncolumn',
header: '{s name=swag_import_export/manager/log/header_actions}Actions{/s}',
width: 80,
items: [{
iconCls: 'sprite-magnifier',
handler: function(view, rowIndex, colIndex, item, opts, record) {
me.fireEvent('showSessionDetails', view, record);
}
}, {
iconCls: 'sprite-arrow-circle-315',
handler: function(view, rowIndex, colIndex, item, opts, record) {
me.fireEvent('resumeSession', view, record);
},
getClass: function(value, meta, record) {
if (record.get('type') == 'import' && record.get('state') == 'closed') {
return 'x-hide-display';
}
}
}, {
iconCls: 'sprite-minus-circle-frame',
handler: function (view, rowIndex, colIndex, item, opts, record) {
me.fireEvent('deleteSession', view, [record]);
}
}]
}]
};
},
buildDockedItems: function() {
var me = this;
return [
me.buildMenuBar(),
me.buildPagingBar()
];
},
buildMenuBar: function() {
var me = this;
return {
xtype: 'toolbar',
ui: 'shopware-ui',
dock: 'top',
items: [{
text: '{s name=swag_import_export/manager/log/button_delete_operations}Delete selected operation(s){/s}',
iconCls: 'sprite-minus-circle-frame',
itemId: 'deletebutton',
disabled: true,
handler: function() {
var selectionModel = me.getSelectionModel(),
records = selectionModel.getSelection();
if (records.length > 0) {
me.fireEvent('deleteSession', me, records);
}
}
}]
};
},
buildPagingBar: function() {
var me = this;
return {
xtype: 'pagingtoolbar',
dock: 'bottom',
displayInfo: true,
store: me.getStore()
};
},
renderStatus: function(value) {
var cls = 'sprite-cross';
if (value == 'closed') {
cls = 'sprite-tick';
}
return Ext.String.format(
'<div class="[0]" style="width: 13px; height: 13px;"> </div>',
cls
);
},
renderType: function(value) {
if (value == 'export') {
return '{s name=swag_import_export/manager/log/export}export{/s}';
}
return '{s name=swag_import_export/manager/log/import}import{/s}';
}
});
// {/block}
|
package pulse.io.readers;
import java.io.File;
import pulse.util.Reflexive;
/**
* There are two types of {@code AbstractHandler}s, which are used to either
* update/populate existing objects or convert data into new objects of a given
* type. The superclass contains basic methods of checking compliance to a
* pre-set extension.
*
*/
public interface AbstractHandler extends Reflexive {
/**
* Retrieves the supported extension of files, which this
* {@code AbstractHandler} is able to process.
*
* @return a {@code String} (usually, lower-case) containing the supported
* extension.
*/
public String getSupportedExtension();
/**
* Checks if the file suffix for {@code file} matches the {@code extension}.
*
* @param file the {@code File} to process
* @param extension a String, which needs to be checked against the suffix
* of {@code File}
* @return {@code false} if {@code file} is a directory or if it has a
* suffix different from {@code extension}. True otherwise.
*/
public static boolean extensionsMatch(File file, String extension) {
if (file.isDirectory()) {
return false;
}
String name = file.getName();
/*
* The below code is based on string helper function by <NAME>
*/
int suffixLength = extension.length();
return name.regionMatches(true, name.length() - suffixLength, extension, 0, suffixLength);
}
/**
* Invokes {@code extensionMatch} with the second argument set as
* {@code getSupportedExtension()}.
*
* @param file the file to be checked
* @return {@code true} if extensions match, false otherwise.
* @see extensionsMatch
* @see getSupportedExtension
*/
public default boolean isExtensionSupported(File file) {
return extensionsMatch(file, getSupportedExtension());
}
}
|
#include <iostream>
#include <math.h>
using namespace std;
int main()
{
int n = 16;
cout << sqrt(n);
return 0;
} |
<filename>src/codersrank-activity.js
import { fetchData } from './shared/fetch-data';
import { renderChart } from './shared/render-chart';
import { renderError } from './shared/render-error';
import { renderLoading } from './shared/render-loading';
import { icons } from './shared/icons';
// eslint-disable-next-line
const COMPONENT_TAG = 'codersrank-activity';
const STATE_IDLE = 0;
const STATE_LOADING = 1;
const STATE_ERROR = 2;
const STATE_SUCCESS = 3;
// eslint-disable-next-line
const STYLES = `$_STYLES_$`;
// eslint-disable-next-line
class CodersRankActivity extends HTMLElement {
constructor() {
super();
this.tempDiv = document.createElement('div');
this.shadowEl = this.attachShadow({ mode: 'closed' });
this.stylesEl = document.createElement('style');
this.stylesEl.textContent = STYLES;
this.shadowEl.appendChild(this.stylesEl);
this.onMouseEnter = this.onMouseEnter.bind(this);
this.onMouseLeave = this.onMouseLeave.bind(this);
this.mounted = false;
this.state = STATE_IDLE;
this.data = null;
}
// eslint-disable-next-line
getTotalActivities(data = {}) {
let total = 0;
Object.keys(data).forEach((date) => {
Object.keys(data[date]).forEach((source) => {
total += data[date][source] || 0;
});
});
return total;
}
emitData(data = {}) {
const event = new CustomEvent('data', {
detail: { data, total: this.getTotalActivities(data) },
});
this.dispatchEvent(event);
}
emitError(err) {
const event = new CustomEvent('error', { detail: err });
this.dispatchEvent(event);
}
static get observedAttributes() {
return ['username', 'weeks', 'svg-width', 'legend', 'labels', 'id'];
}
get tooltip() {
const tooltip = this.getAttribute('tooltip');
if (tooltip === '' || tooltip === 'true') return true;
return false;
}
set tooltip(value) {
this.setAttribute('tooltip', value);
}
get id() {
return this.getAttribute('id');
}
set id(value) {
this.setAttribute('id', value);
}
get username() {
return this.getAttribute('username');
}
set username(value) {
this.setAttribute('username', value);
}
get weeks() {
return Math.min(parseInt(this.getAttribute('weeks') || 52, 10), 52);
}
set weeks(value) {
this.setAttribute('weeks', value);
}
get svgWidth() {
const svgWidth = parseInt(this.getAttribute('svg-width') || 0, 10);
if (!svgWidth && this.weeks < 52) {
return 800 / (52 / this.weeks);
}
return svgWidth || 800;
}
set svgWidth(value) {
this.setAttribute('svg-width', value);
}
set ['svg-width'](value) {
this.setAttribute('svg-width', value);
}
get legend() {
const legend = this.getAttribute('legend');
if (legend === '' || legend === 'true') return true;
return false;
}
set legend(value) {
this.setAttribute('legend', value);
}
get labels() {
const labels = this.getAttribute('labels');
if (labels === '' || labels === 'true') return true;
return false;
}
set labels(value) {
this.setAttribute('labels', value);
}
get step() {
return parseInt(this.getAttribute('step') || 10, 10);
}
set step(value) {
this.setAttribute('step', value);
}
get branding() {
return this.getAttribute('branding') !== 'false';
}
set branding(value) {
this.setAttribute('branding', value);
}
render() {
const {
username,
id,
mounted,
state,
shadowEl,
data,
weeks,
svgWidth,
legend,
labels,
step,
branding,
tempDiv,
} = this;
const ctx = {
data,
weeks,
svgWidth,
legend,
labels,
step,
branding,
};
if ((!username && !id) || !mounted) return;
if (state === STATE_SUCCESS) {
tempDiv.innerHTML = renderChart(ctx);
} else if (state === STATE_ERROR) {
tempDiv.innerHTML = renderError(ctx);
} else if (state === STATE_IDLE || state === STATE_LOADING) {
tempDiv.innerHTML = renderLoading(ctx);
}
let widgetEl = shadowEl.querySelector('.codersrank-activity');
if (widgetEl) {
widgetEl.parentNode.removeChild(widgetEl);
}
widgetEl = tempDiv.querySelector('.codersrank-activity');
if (!widgetEl) return;
this.widgetEl = widgetEl;
this.detachEvents();
this.attachEvents();
shadowEl.appendChild(widgetEl);
}
loadAndRender() {
const { username, id } = this;
this.state = STATE_LOADING;
this.render();
fetchData(username, id)
.then((data) => {
this.emitData(data);
this.data = data;
this.state = STATE_SUCCESS;
this.render();
})
.catch((err) => {
this.emitError(err);
this.state = STATE_ERROR;
this.render();
});
}
activitiesInDay(date) {
let activities = 0;
if (!this.data || !date) return activities;
const dayData = this.data[date];
if (dayData) {
Object.keys(dayData).forEach((key) => {
// @ts-ignore
activities += dayData[key];
});
}
return activities;
}
tooltipText(date) {
const data = this.data[date];
const activities = this.activitiesInDay(date);
const formatter = Intl.DateTimeFormat();
// prettier-ignore
return `
<div class="codersrank-activity-tooltip-header">
${formatter.format(new Date(date))} - <b>${activities} activities</b>
</div>
<ul class="codersrank-activity-tooltip-list">
${data.github ? `
<li><i>${icons.github}</i>${data.github} activities</li>
` : ''}
${data.gitlab ? `
<li><i>${icons.gitlab}</i>${data.gitlab} activities</li>
` : ''}
${data.private ? `
<li><i>${icons.folder}</i>${data.private} activities</li>
` : ''}
${data.stackoverflow ? `
<li><i>${icons.stackoverflow}</i>${data.stackoverflow} activities</li>
` : ''}
</ul>
`;
}
showTooltip(date) {
if (!this.data || !date || !this.tooltip || !this.widgetEl) return;
const data = this.data[date];
if (!data) return;
const rectEl = this.shadowEl.querySelector(`[data-date="${date}"]`);
if (!rectEl) return;
this.tempDiv.innerHTML = `
<div class="codersrank-activity-tooltip">
${this.tooltipText(date)}
<div class="codersrank-activity-tooltip-angle"></div>
</div>
`;
const widgetElRect = this.getBoundingClientRect();
const rectElRect = rectEl.getBoundingClientRect();
const tooltipEl = this.tempDiv.querySelector('.codersrank-activity-tooltip');
let left = rectElRect.left - widgetElRect.left;
let diff = -5;
if (left < 110) {
diff = -5 - (110 - left);
left = 110;
}
if (left + 110 > widgetElRect.width) {
diff = -5 + 110 - (widgetElRect.width - left);
left = widgetElRect.width - 110;
}
diff = Math.max(Math.min(diff, 105), -105);
tooltipEl.style.left = `${left}px`;
tooltipEl.style.top = `${rectElRect.top - widgetElRect.top}px`;
tooltipEl.querySelector(
'.codersrank-activity-tooltip-angle',
).style.marginLeft = `${diff}px`;
this.shadowEl.appendChild(tooltipEl);
}
hideTooltip() {
if (!this.tooltip || !this.widgetEl) return;
const tooltipEl = this.shadowEl.querySelector('.codersrank-activity-tooltip');
if (!tooltipEl) return;
this.shadowEl.removeChild(tooltipEl);
}
onMouseEnter(e) {
if (e.target.tagName !== 'rect') return;
const el = e.target;
const date = el.getAttribute('data-date');
this.showTooltip(date);
}
onMouseLeave() {
this.hideTooltip();
}
attributeChangedCallback() {
if (!this.mounted) return;
this.loadAndRender();
}
attachEvents() {
if (!this.widgetEl) return;
this.widgetEl.addEventListener('mouseenter', this.onMouseEnter, true);
this.widgetEl.addEventListener('mouseleave', this.onMouseLeave, true);
}
detachEvents() {
if (!this.widgetEl) return;
this.widgetEl.removeEventListener('mouseenter', this.onMouseEnter, true);
this.widgetEl.removeEventListener('mouseleave', this.onMouseLeave, true);
}
connectedCallback() {
this.width = this.offsetWidth;
this.mounted = true;
this.loadAndRender();
}
disconnectedCallback() {
this.mounted = false;
this.detachEvents();
}
}
// EXPORT
|
import { useSelector } from "react-redux";
/**
* @typedef {import("../store/initialState").DefaultState} DefaultStateType
* @typedef {import("../store/initialState").DefaultUIAlertsObject} DefaultUIAlertsObject
*/
/**
* Select Redux store ui alerts data.
*
* @returns {DefaultUIAlertsObject} Store ui alerts state.
*/
const useStoreUIAlertsSelector = () => {
/**
* Select store ui alerts data.
*
* @param {DefaultStateType} state Application store data.
* @returns {DefaultUIAlertsObject} Store ui alerts data.
*/
const selectStoreUIAlerts = (state) => state.ui.alerts;
return useSelector(selectStoreUIAlerts);
};
export default useStoreUIAlertsSelector;
|
package fontysmultipurposelibrary.communication.messaging;
import fontysmultipurposelibrary.serialization.ISerializer;
import fontysmultipurposelibrary.serialization.SerializationProvider;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
public abstract class MessageHandlerBase<T> implements IMessageHandler {
public void handleMessage(String data, String sessionId) {
ISerializer<String> ser = SerializationProvider.getSerializer();
Type type = ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[0];
T msg = ser.deserialize(data, type);
handleMessageInternal(msg, sessionId);
}
public abstract void handleMessageInternal(T message, String sessionId);
}
|
#!/bin/bash
# This symlinks all dotfiles to ~/. It's safe to run multiple times and will prompt you about
# anything unclear.
answer_is_yes() {
[[ "$REPLY" =~ ^[Yy]$ ]] \
&& return 0 \
|| return 1
}
ask_for_confirmation() {
print_question "$1 (y/n) "
read -n 1
printf "\n"
}
execute() {
$1 &> /dev/null
print_result $? "${2:-$1}"
}
print_error() {
# Print output in red
printf "\e[0;31m [✖] $1 $2\e[0m\n"
}
print_info() {
# Print output in purple
printf "\n\e[0;35m $1\e[0m\n\n"
}
print_question() {
# Print output in yellow
printf "\e[0;33m [?] $1\e[0m"
}
print_result() {
[ $1 -eq 0 ] \
&& print_success "$2" \
|| print_error "$2"
[ "$3" == "true" ] && [ $1 -ne 0 ] \
&& exit
}
print_success() {
# Print output in green
printf "\e[0;32m [✔] $1\e[0m\n"
}
#
# Actual symlink stuff
#
# Finds all .dotfiles in this folder
declare -a FILES_TO_SYMLINK=$(find . -type f -maxdepth 1 -name ".*" -not -name .DS_Store | sed -e 's|//|/|' | sed -e 's|./.|.|')
# Finds all files in these extra config dirs
for dir in $(echo .config .emacs.d .vim bin); do
FILES_TO_SYMLINK="$FILES_TO_SYMLINK $(find $dir -type f -not -name .DS_Store)"
done
main() {
local file=""
local sourcePath=""
local targetPath=""
for file in ${FILES_TO_SYMLINK[@]}; do
sourcePath="$(pwd)/$file"
targetPath="$HOME/$file"
mkdir -p "$(dirname $targetPath)"
if [ -e "$targetPath" ]; then
if [ "$(readlink "$targetPath")" != "$sourcePath" ]; then
ask_for_confirmation "'$targetPath' already exists, do you want to overwrite it?"
if answer_is_yes; then
rm "$targetPath"
execute "ln -fs $sourcePath $targetPath" "$targetPath → $sourcePath"
else
print_error "$targetPath → $sourcePath"
fi
else
print_success "$targetPath → $sourcePath"
fi
else
execute "ln -fs $sourcePath $targetPath" "$targetPath → $sourcePath"
fi
done
}
main
|
<gh_stars>0
package engine.events;
import engine.collisions.HitBox;
import engine.entities.Entity;
/**
* An event that specifies a collision
* @author Albert
*/
public class CollisionEvent extends Event {
private HitBox collidedHitBox;
private Entity collidedWith;
/**
* Creates a new CollisionEvent
* @param hitBox hitbox collided with
* @param collidedWith entity collided with
*/
public CollisionEvent(HitBox hitBox, Entity collidedWith) {
super(EventType.COLLISION.getType());
this.collidedHitBox = hitBox;
this.collidedWith = collidedWith;
}
/**
*
* @return hitbox collided with
*/
public HitBox getCollidedHitBox() {
return collidedHitBox;
}
/**
*
* @return entity collided with
*/
public Entity getCollidedWith() {
return collidedWith;
}
}
|
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.dbmeta.name;
/**
* The value class for the SQL name of table.
* @author jflute
*/
public class TableSqlName {
// ===================================================================================
// Attribute
// =========
protected final String _tableSqlName;
protected final String _correspondingDbName;
protected SqlNameFilter _sqlNameFilter;
protected boolean _locked;
// ===================================================================================
// Constructor
// ===========
public TableSqlName(String tableSqlName, String correspondingDbName) {
_tableSqlName = tableSqlName;
_correspondingDbName = correspondingDbName;
}
public synchronized void xacceptFilter(SqlNameFilter sqlNameFilter) { // called only once
if (_locked) {
String msg = "The object has been locked so your setting is invalid: " + sqlNameFilter;
throw new IllegalStateException(msg);
}
_sqlNameFilter = sqlNameFilter;
_locked = true;
}
// ===================================================================================
// Basic Override
// ==============
@Override
public int hashCode() {
return _tableSqlName.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == null || !(obj instanceof TableSqlName)) {
return false;
}
final TableSqlName target = (TableSqlName) obj;
return _tableSqlName.equals(target._tableSqlName);
}
@Override
public String toString() {
if (_sqlNameFilter != null) {
return _sqlNameFilter.filter(_tableSqlName, _correspondingDbName);
} else {
return _tableSqlName;
}
}
// ===================================================================================
// Accessor
// ========
public String getCorrespondingDbName() {
return _correspondingDbName;
}
}
|
import React from 'react'
import { NavLink } from "react-router-dom";
const Sidebar = ({links}) => {
const navigation = links
.filter(link => link.visible)
.map((link) =>
<NavLink key={link.title} to={link.path} activeClassName="sidebar-active" strict>
<span className={'sidebar-icon ' + link.icon}></span>
<span className="sidebar-title">{link.title}</span>
</NavLink>
);
return (
<div className="side-nav">
<div className="logo">
<span className="icon-logo" />
<span className="title">React</span>
</div>
<div className="sidebar">
{navigation}
</div>
</div>
)
}
export default Sidebar |
#!/bin/sh
# AmpFuzz pre-fuzz config script
# Use this script to create/modify
# config files for the fuzz target
echo "time dgram udp wait root internal" >> /etc/inetd.conf
|
#!/usr/bin/env bash
# This file is only necessary due to https://github.com/hashicorp/terraform/issues/4149
terraform apply -target=module.cloud -auto-approve
terraform apply -target=module.galaxy -auto-approve
terraform apply -target=module.admin_user -auto-approve
terraform apply -auto-approve
echo "Run destroy.sh to shutdown and delete everything" |
<filename>sentinel-pigeon-adapter/src/test/java/com/alibaba/csp/sentinel/adapter/pigeon/SentinelPigeonProviderInterceptorTest.java
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.csp.sentinel.adapter.pigeon;
import com.alibaba.csp.sentinel.BaseTest;
import com.alibaba.csp.sentinel.EntryType;
import com.alibaba.csp.sentinel.SphU;
import com.alibaba.csp.sentinel.adapter.pigeon.provider.DemoService;
import com.alibaba.csp.sentinel.context.Context;
import com.alibaba.csp.sentinel.context.ContextUtil;
import com.alibaba.csp.sentinel.slots.block.BlockException;
import com.dianping.pigeon.remoting.common.domain.InvocationRequest;
import com.dianping.pigeon.remoting.provider.domain.ProviderContext;
import com.dianping.pigeon.remoting.provider.service.method.ServiceMethod;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.lang.reflect.Method;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* @author <NAME>
*/
public class SentinelPigeonProviderInterceptorTest extends BaseTest {
private SentinelPigeonProviderInterceptor interceptor = new SentinelPigeonProviderInterceptor();
@Before
public void setUp() {
cleanUpAll();
}
@After
public void cleanUp() {
cleanUpAll();
}
@Test
public void testPreInvoke() {
final InvocationRequest invocationRequest = mock(InvocationRequest.class);
final ProviderContext providerContext = mock(ProviderContext.class);
final ServiceMethod serviceMethod = mock(ServiceMethod.class);
Method method = DemoService.class.getMethods()[0];
when(providerContext.getRequest()).thenReturn(invocationRequest);
when(providerContext.getServiceMethod()).thenReturn(serviceMethod);
when(serviceMethod.getMethod()).thenReturn(method);
when(invocationRequest.getServiceName()).thenReturn(DemoService.class.getName());
when(invocationRequest.getMethodName()).thenReturn(method.getName());
String[] paramClazzName = new String[2];
int i = 0;
for (Class<?> clazz : method.getParameterTypes()) {
paramClazzName[i] = clazz.getName();
i ++;
}
when(invocationRequest.getParamClassName()).thenReturn(paramClazzName);
interceptor.preInvoke(providerContext);
Context context = ContextUtil.getContext();
assertNotNull(context);
}
@Test
public void testPostInvoke() throws BlockException {
final ProviderContext providerContext = mock(ProviderContext.class);
final ServiceMethod serviceMethod = mock(ServiceMethod.class);
Method method = DemoService.class.getMethods()[0];
String resourceName = MethodUtils.buildResource(method);
ContextUtil.enter(resourceName);
SphU.entry(resourceName, EntryType.IN, 1, method.getParameters());
Throwable ex = new Throwable("service error!");
when(providerContext.getServiceError()).thenReturn(ex);
when(providerContext.getServiceMethod()).thenReturn(serviceMethod);
when(serviceMethod.getMethod()).thenReturn(method);
interceptor.postInvoke(providerContext);
// Context context = ContextUtil.getContext();
// assertNull(context);
}
}
|
def computeFibonacci(n):
if n<0:
print("Incorrect input")
elif n==1:
return 0
elif n==2:
return 1
else:
return computeFibonacci(n-1)+computeFibonacci(n-2) |
cmd=/home/jussi/src/dwilib/print_all.py
models="MonoN KurtN StretchedN BiexpN"
#for m in ${models}; do
# outfile=medians_roi1.txt
# ${cmd} -v -r 1 -s scans_az.txt -g 3+3 3+4 -m pmap/*_$m.txt >> ${outfile}
# ${cmd} -v -r 1 -s scans_pz.txt -g 3+3 3+4 -m pmap/*_$m.txt >> ${outfile}
# ${cmd} -v -r 1 -s scans_cz.txt -g 3+3 3+4 -m pmap/*_$m.txt >> ${outfile}
# outfile=medians_roi2.txt
# ${cmd} -v -r 2 -s scans_az.txt -g 3+3 3+4 -m pmap/*_$m.txt >> ${outfile}
# ${cmd} -v -r 2 -s scans_pz.txt -g 3+3 3+4 -m pmap/*_$m.txt >> ${outfile}
# ${cmd} -v -r 2 -s scans_cz.txt -g 3+3 3+4 -m pmap/*_$m.txt >> ${outfile}
#done
for roi in 1 2; do
outfile=medians_roi${roi}.txt
echo ROI${roi} > ${outfile}
for area in az pz cz; do
echo ${area} >> ${outfile}
for m in ${models}; do
echo ${m} >> ${outfile}
${cmd} -r ${roi} -s scans_${area}.txt -g 3+3 3+4 -m pmap/*_${m}.txt >> ${outfile}
done
done
done
|
cd ../vendor/mame/
emmake make REGENIE=1 -j5 SUBTARGET=c64 SOURCES=src/mame/drivers/c64.cpp
cp mamec64.js ../../drivers
|
<reponame>ted80810/drb-estuary-salinity-ml
import torch
import torch.nn as nn
import time
# Simple LSTM made from scratch
class LSTMDA(nn.Module):
def __init__(self, input_dim, hidden_dim, recur_dropout = 0, dropout = 0):
super().__init__()
self.input_dim = input_dim
self.hidden_size = hidden_dim
self.weight_ih = nn.Parameter(torch.Tensor(input_dim, hidden_dim * 4))
self.weight_hh = nn.Parameter(torch.Tensor(hidden_dim, hidden_dim * 4))
self.bias = nn.Parameter(torch.Tensor(hidden_dim * 4))
self.init_weights()
self.dropout = nn.Dropout(dropout)
self.recur_dropout = nn.Dropout(recur_dropout)
self.dense = nn.Linear(hidden_dim, 1)
def init_weights(self):
for p in self.parameters():
if p.data.ndimension() >= 2:
nn.init.xavier_uniform_(p.data)
else:
nn.init.zeros_(p.data)
def forward(self, x, init_states = None):
"""Assumes x is of shape (batch, sequence, feature)"""
bs, seq_sz, _ = x.size()
hidden_seq = []
if init_states is None:
h_t, c_t = (torch.zeros(bs, self.hidden_size).to(x.device),
torch.zeros(bs, self.hidden_size).to(x.device))
else:
h_t, c_t = init_states
x = self.dropout(x)
HS = self.hidden_size
for t in range(seq_sz):
x_t = x[:, t, :]
# batch the computations into a single matrix multiplication
gates = x_t @ self.weight_ih + h_t @ self.weight_hh + self.bias
i_t, f_t, g_t, o_t = (
torch.sigmoid(gates[:, :HS]), # input
torch.sigmoid(gates[:, HS:HS*2]), # forget
torch.tanh(gates[:, HS*2:HS*3]),
torch.sigmoid(gates[:, HS*3:]), # output
)
c_t = f_t * c_t + i_t * self.recur_dropout(g_t)
h_t = o_t * torch.tanh(c_t)
hidden_seq.append(h_t.unsqueeze(1))
hidden_seq = torch.cat(hidden_seq, dim= 1)
out = self.dense(hidden_seq)
return out, (h_t, c_t)
def evaluate(self, x_val, y_val):
# return predictions and loss for dataset
# load all the data at the same time
# data_loader = DataLoader(dataset = dataset, batch_size = len(dataset),
# shuffle = False, drop_last = False, pin_memory = False)
for i, data in enumerate(x_val):
# input: tensor of shape (batch_size, window_size, input_size)
input = x_val
target = y_val
#input = input.to(device)
#target = target.to(device)
with torch.no_grad():
prediction, _ = self(input)
loss = rmse_masked(target, prediction)
return prediction, loss
def rmse_masked(y_true, y_pred):
num_y_true = torch.count_nonzero(
~torch.isnan(y_true)
)
zero_or_error = torch.where(
torch.isnan(y_true), torch.zeros_like(y_true), y_pred - y_true
)
sum_squared_errors = torch.sum(torch.square(zero_or_error))
rmse_loss = torch.sqrt(sum_squared_errors / num_y_true)
return rmse_loss
# def rmse_weighted(y_true, y_pred): # weighted by covariance matrix from DA; weights are concatonated onto y_true and need to separate out within function
# raise(NotImplementedError)
# return rmse_loss
def fit_torch_model(model, x, y, x_val, y_val, epochs, loss_fn, optimizer):
running_loss_train = []
running_loss_val = []
for i in range(epochs):
start_time = time.time()
if i == 0:
out, (h, c) = model(x)
else:
out, (h, c) = model(x, (h.detach(), c.detach())) # stateful lstm
# .detach() because prev h/c are tied to gradients/weights of
# a different iteration
loss = loss_fn(y, out)
running_loss_train.append(loss.item())
val_preds, val_loss = model.evaluate(x_val, y_val)
running_loss_val.append(val_loss.item())
optimizer.zero_grad()
loss.backward()
optimizer.step()
end_time = time.time()
loop_time = end_time - start_time
print('Epoch %i/' %(i+1) + str(epochs), flush = True)
print('[==============================]',
'{0:.2f}'.format(loop_time) + 's/step',
'- loss: ' + '{0:.4f}'.format(loss.item()),
flush = True)
return model, out, running_loss_train, running_loss_val
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/bigquery/storage/v1beta1/table_reference.proto
require 'google/api/resource_pb'
require 'google/protobuf/timestamp_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/cloud/bigquery/storage/v1beta1/table_reference.proto", :syntax => :proto3) do
add_message "google.cloud.bigquery.storage.v1beta1.TableReference" do
optional :project_id, :string, 1
optional :dataset_id, :string, 2
optional :table_id, :string, 3
end
add_message "google.cloud.bigquery.storage.v1beta1.TableModifiers" do
optional :snapshot_time, :message, 1, "google.protobuf.Timestamp"
end
end
end
module Google
module Cloud
module Bigquery
module Storage
module V1beta1
TableReference = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1beta1.TableReference").msgclass
TableModifiers = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1beta1.TableModifiers").msgclass
end
end
end
end
end
|
<reponame>jasmaa/throw-trash-in-the-ocean
import os
import psycopg2
from dotenv import load_dotenv
def main():
"""Clears all db tables
"""
load_dotenv()
POSTGRES_HOST = os.getenv('POSTGRES_HOST')
POSTGRES_USER = os.getenv('POSTGRES_USER')
POSTGRES_PASSWORD = os.getenv('POSTGRES_PASSWORD')
POSTGRES_DB = os.getenv('POSTGRES_DB')
conn = psycopg2.connect(
host=POSTGRES_HOST,
database=POSTGRES_DB,
user=POSTGRES_USER,
password=<PASSWORD>,
)
cur = conn.cursor()
cur.execute('DELETE FROM rooms')
cur.execute('DELETE FROM users')
cur.execute('DELETE FROM players')
cur.execute('DELETE FROM events')
conn.commit()
cur.close()
if __name__ == '__main__':
main()
|
import React from "react";
import {
Document,
Page,
Text,
View,
PDFViewer,
Image,
} from "@react-pdf/renderer";
import { InvoiceLogo } from "Assets";
import EntryLine from "Components/EntryLine";
import Summary from "Components/Summary";
import Checkout from "Components/Checkout";
// Create Document Component
const InvoiceTemplate = () => (
<PDFViewer
style={{
width: "100%",
height: "95%",
border: "none",
borderRadius: 10,
marginTop: 35,
}}
>
<Document>
<Page size="A4" style={{ backgroundColor: "#F8F8F8" }}>
<View style={{ paddingHorizontal: 30 }}>
<View
style={{
display: "flex",
justifyContent: "center",
alignItems: "center",
marginTop: 10,
}}
>
<View
style={{
width: "100%",
display: "flex",
flexDirection: "row",
width: 100,
height: 30,
}}
>
<Image source={InvoiceLogo} />
</View>
</View>
<View
style={{
display: "flex",
flexDirection: "row",
justifyContent: "space-between",
width: "100%",
marginTop: 40,
}}
>
<View
style={{
display: "flex",
flexDirection: "column",
width: "40%",
}}
>
<EntryLine Heading="Company Name" subHeading="DSME Global" />
<EntryLine Heading="Person Name" subHeading="<NAME>" />
<EntryLine Heading="Phone" subHeading="92 545 6565565" />
<EntryLine Heading="CNIC" subHeading="334305 4565 75677" />
<EntryLine Heading="Sales Person" subHeading="<NAME>" />
</View>
<View
style={{
display: "flex",
flexDirection: "column",
width: "40%",
}}
>
<EntryLine Heading="Date:" subHeading="15th July 2021" />
<EntryLine Heading="Order Id:" subHeading="23204972318" />
</View>
</View>
<View
style={{
width: "100%",
height: 150,
backgroundColor: "#fff",
borderRadius: 10,
marginTop: 20,
padding: 20,
}}
>
<View
style={{
display: "flex",
flexDirection: "row",
alignItems: "center",
width: "100%",
}}
>
<Text
style={{
fontSize: 11,
color: "#0DCBA0",
fontWeight: "bold",
width: "40%",
}}
>
Product Name
</Text>
<View
style={{
display: "flex",
flexDirection: "row",
justifyContent: "space-between",
alignItems: "center",
}}
>
<Text
style={{
fontSize: 11,
color: "#0DCBA0",
fontWeight: "bold",
width: "20%",
}}
>
Qty
</Text>
<Text
style={{
fontSize: 11,
color: "#0DCBA0",
fontWeight: "bold",
width: "20%",
}}
>
Unit Price
</Text>
<Text
style={{
fontSize: 11,
color: "#0DCBA0",
fontWeight: "bold",
width: "20%",
}}
>
Total
</Text>
</View>
</View>
<View
style={{
width: "100%",
height: 1,
backgroundColor: "#2C4C4C",
borderRadius: 2,
marginVertical: 6,
}}
></View>
<Summary />
<Summary />
<Summary />
<View style={{ marginTop: 10 }}>
<Checkout />
<Checkout />
<Checkout />
</View>
</View>
<Text style={{ color: "#2C4C4C", fontSize: 10, marginTop: 8 }}>
This is a system generated purchase order and does not require any
signature / stamps.
</Text>
</View>
<View
style={{
width: "100%",
borderTopLeftRadius: 40,
borderTopRightRadius: 40,
height: 230,
marginTop: 30,
backgroundColor: "#ECECEC",
display: "flex",
flexDirection: "column",
paddingHorizontal: 30,
paddingVertical: 20,
}}
>
<Text
style={{
textDecoration: "underline",
fontSize: 11,
color: "#2C4C4C",
marginBottom: 8,
}}
>
Payment Policy
</Text>
<Text style={{ color: "#2C4C4C", fontSize: 10 }}>
Lorem Ipsum is simply dummy text of the printing and typesetting
industry. Lorem Ipsum has been the industry's standard dummy text
ever since the 1500s, when an unknown printer took a galley of type
and scrambled it to make a type specimen book. It has survived not
only five centuries, but also the leap into electronic typesetting,
remaining essentially unchanged. It was popularised in the 1960s
with the release of Letraset sheets containing Lorem Ipsum passages,
and more recently with desktop publishing software like Aldus
PageMaker including versions of Lorem Ipsum
</Text>
<Text
style={{
textDecoration: "underline",
fontSize: 11,
color: "#2C4C4C",
marginBottom: 6,
marginTop: 18,
}}
>
Payment Office
</Text>
<View
style={{
display: "flex",
flexDirection: "row",
alignItems: "center",
marginBottom: 4,
}}
>
<Text
style={{
marginRight: 20,
width: "15%",
fontSize: 11,
color: "#2C4C4C",
fontWeight: "bold",
}}
>
Arnhem Office
</Text>
<Text
style={{
marginRight: 10,
fontSize: 10,
width: "80%",
color: "#2C4C4C",
}}
>
Meander 251, 6825MC Arnhem, Netherlands
</Text>
</View>
<View
style={{
display: "flex",
flexDirection: "row",
alignItems: "center",
marginBottom: 5,
width: "100%",
}}
>
<Text
style={{
marginRight: 20,
width: "15%",
fontSize: 11,
color: "#2C4C4C",
fontWeight: "bold",
}}
>
Phone #
</Text>
<Text
style={{
marginRight: 10,
fontSize: 10,
width: "20%",
color: "#2C4C4C",
}}
>
+92 545 6565565
</Text>
<Text
style={{
marginRight: 20,
width: "10%",
fontSize: 11,
color: "#2C4C4C",
fontWeight: "bold",
}}
>
Tel #
</Text>
<Text
style={{
marginRight: 10,
fontSize: 10,
width: "22%",
color: "#2C4C4C",
}}
>
+92 545 6565565
</Text>
<Text
style={{
marginRight: 20,
width: "10%",
fontSize: 11,
color: "#2C4C4C",
fontWeight: "bold",
}}
>
Email
</Text>
<Text
style={{
marginRight: 10,
fontSize: 10,
width: "25%",
color: "#2C4C4C",
}}
>
<EMAIL>
</Text>
</View>
</View>
</Page>
</Document>
</PDFViewer>
);
export default InvoiceTemplate;
|
<gh_stars>0
import { mkdirSync, writeFileSync } from "node:fs";
import os from "node:os";
import path from "node:path";
export function writeUserConfig(
oauth_token?: string,
refresh_token?: string,
expiration_time?: string
) {
const lines: string[] = [];
if (oauth_token) {
lines.push(`oauth_token = "${oauth_token}"`);
}
if (refresh_token) {
lines.push(`refresh_token = "${refresh_token}"`);
}
if (expiration_time) {
lines.push(`expiration_time = "${expiration_time}"`);
}
const configPath = path.join(os.homedir(), ".wrangler/config");
mkdirSync(configPath, { recursive: true });
writeFileSync(
path.join(configPath, "default.toml"),
lines.join("\n"),
"utf-8"
);
}
|
#include "DirectInput.h"
float DirectInput::rotx = 0;
float DirectInput::rotz = 0;
float DirectInput::scaleX = 1.0f;
float DirectInput::scaleY = 1.0f;
float DirectInput::moveLeftRight = 0.0f;
float DirectInput::moveBackForward = 0.0f;
float DirectInput::camYaw = 0.0f;
float DirectInput::camPitch = 0.0f;
float DirectInput::camPitch2 = 0.0f;
int DirectInput::sens = 10;
DIMOUSESTATE DirectInput::mouseLastState;
DIMOUSESTATE DirectInput::mouseCurrState;
LPDIRECTINPUT8 DirectInput::directInput;
IDirectInputDevice8* DirectInput::DIKeyboard;
IDirectInputDevice8* DirectInput::DIMouse;
bool DirectInput::InitDirectInput(HWND hwnd, HINSTANCE hInstance)
{
cout << "DIRECT INPUT STARTED\n";
HRESULT hr = DirectInput8Create(hInstance,
DIRECTINPUT_VERSION,
IID_IDirectInput8,
(void**)&directInput,
NULL);
hr = directInput->CreateDevice(GUID_SysKeyboard,
&DIKeyboard,
NULL);
hr = directInput->CreateDevice(GUID_SysMouse,
&DIMouse,
NULL);
hr = DIKeyboard->SetDataFormat(&c_dfDIKeyboard);
hr = DIKeyboard->SetCooperativeLevel(hwnd, DISCL_FOREGROUND | DISCL_NONEXCLUSIVE);
hr = DIMouse->SetDataFormat(&c_dfDIMouse);
hr = DIMouse->SetCooperativeLevel(hwnd, DISCL_EXCLUSIVE | DISCL_NOWINKEY | DISCL_FOREGROUND);
cout << "DIRECT INPUT LOADED\n";
return true;
}
void DirectInput::Release()
{
DIKeyboard->Unacquire();
DIMouse->Unacquire();
directInput->Release();
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_perm_camera_mic = void 0;
var ic_perm_camera_mic = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M20 5h-3.17L15 3H9L7.17 5H4c-1.1 0-2 .9-2 2v12c0 1.1.9 2 2 2h7v-2.09c-2.83-.48-5-2.94-5-5.91h2c0 2.21 1.79 4 4 4s4-1.79 4-4h2c0 2.97-2.17 5.43-5 5.91V21h7c1.1 0 2-.9 2-2V7c0-1.1-.9-2-2-2zm-6 8c0 1.1-.9 2-2 2s-2-.9-2-2V9c0-1.1.9-2 2-2s2 .9 2 2v4z"
},
"children": []
}]
};
exports.ic_perm_camera_mic = ic_perm_camera_mic; |
<gh_stars>1-10
from . import DirProvider
class NullDirProvider(DirProvider):
def search(self):
return self.config['repos']
|
// 云函数入口文件
const cloud = require("wx-server-sdk");
cloud.init();
const db = cloud.database();
const nodemailer = require("nodemailer");
const transporter = nodemailer.createTransport({
service: "163",
port: 465,
secureConnection: true,
auth: {
user: "",
pass: "",
},
});
const mailOptions = {
from: "塔内PC小程序 <<EMAIL>>",
to: "<EMAIL>, <EMAIL>",
subject: "【塔内PC小程序】", // Subject line
// 发送text或者html格式
// text: 'Hello world?', // plain text body
html: "<p></p>", // html body
// cc: "<EMAIL>,<EMAIL>"
};
// 云函数入口函数
exports.main = async (event, context) => {
const wxContext = cloud.getWXContext();
if (event.type === "sendSubMsg") {
return await sendSubMsg(event.form);
}
};
async function sendSubMsg(form) {
try {
const res = await db
.collection("superUser")
.where({
name: form.value,
})
.get();
if (res.data.length === 0) return false;
mailOptions.to = res.data[0].email;
mailOptions.subject = `【塔内PC小程序】${form.name}需要您的帮助`;
mailOptions.html = `<p>${form.wrongDetail}</p>`;
return await transporter.sendMail(mailOptions);
} catch (e) {
cloud.logger().error({ e });
return e;
}
}
|
package io.opensphere.core.model;
/**
* An enumerated type set that represents all the possible relationships of two
* one-dimensional intervals.
*/
public enum RangeRelationType
{
/** One interval is before the other, no touching or overlap. */
BEFORE,
/** One interval is before the other, touching ends. */
BORDERS_BEFORE,
/**
* One interval overlaps the front edge of the other (but not the back
* edge).
*/
OVERLAPS_FRONT_EDGE,
/** One interval is a subset of the other (does not overlap either edge). */
SUBSET,
/** One interval is a superset of the other (overlaps both edges). */
SUPERSET,
/** One interval is equal to the other. */
EQUAL,
/**
* One interval overlaps the back edge of the other (but not the front
* edge).
*/
OVERLAPS_BACK_EDGE,
/** One interval is after the other, touching ends. */
BORDERS_AFTER,
/** One interval is after the other, no touching or overlap. */
AFTER
}
|
/*************************************************************************
*
* ADOBE CONFIDENTIAL
* __________________
*
* Copyright 2016 Adobe Systems Incorporated
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of Adobe Systems Incorporated and its suppliers,
* if any. The intellectual and technical concepts contained
* herein are proprietary to Adobe Systems Incorporated and its
* suppliers and are protected by trade secret or copyright law.
* Dissemination of this information or reproduction of this material
* is strictly forbidden unless prior written permission is obtained
* from Adobe Systems Incorporated.
**************************************************************************/
package com.adobe.demo.wetelco.mobile.dps.mobileclient;
import com.adobe.cq.mobile.dps.DPSException;
import com.adobe.cq.mobile.dps.DPSProject;
import com.adobe.cq.mobile.dps.ui.PublishDataSource;
import com.adobe.demo.wetelco.mobile.dps.mobileclient.requestprocessing.FakeRequest;
import com.adobe.demo.wetelco.mobile.dps.mobileclient.requestprocessing.FakeResponse;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.NameValuePair;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.sling.api.adapter.AdapterManager;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.commons.json.JSONArray;
import org.apache.sling.commons.json.JSONException;
import org.apache.sling.commons.json.JSONObject;
import org.apache.sling.engine.SlingRequestProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Client help in making AEM Mobile web requests
*/
public class AEMMobileClient {
private static final Logger LOGGER = LoggerFactory.getLogger(AEMMobileClient.class);
//OPERATIONS
public static final String PARAM_OPERATION = ":operation";
public static final String OPERATION_PREFIX = "dpsapps:";
public static final String OPERATION_UPLOAD = OPERATION_PREFIX + "dpsUpload";
public static final String OPERATION_PREVIEW = OPERATION_PREFIX + "preview";
public static final String OPERATION_IMPORT = OPERATION_PREFIX + "dpsImport";
// PARAMS
public static final String PARAM_INCLUDE_CONTENT = "includeContent";
public static final String PARAM_CREATE_IF_MISSING = "createIfMissing";
public static final String PARAM_TARGET_COLLECTION = "targetCollection";
public static final String PARAM_ENTITY_TYPE = "entityType";
private ResourceResolver resourceResolver = null;
private SlingRequestProcessor slingRequestProcessor = null;
private AdapterManager adapterManager = null;
public AEMMobileClient(ResourceResolver resourceResolver, SlingRequestProcessor slingRequestProcessor, AdapterManager adapterManager){
this.resourceResolver = resourceResolver;
this.slingRequestProcessor = slingRequestProcessor;
this.adapterManager = adapterManager;
}
public void performOperation(String path, String operation, NameValuePair[] params) throws RequestException, ServletException, IOException {
String label = "POST:"+operation + ":"+path;
LOGGER.info(label);
OutputStream out = new ByteArrayOutputStream();
Map<String, Object> mapparams = new HashMap<String, Object>();
for (int i = 0; i < params.length; i++) {
NameValuePair nvp = params[i];
mapparams.put(nvp.getName(), nvp.getValue());
}
FakeRequest request1 = new FakeRequest("POST", path, mapparams );
FakeResponse response = new FakeResponse(out);
slingRequestProcessor.processRequest(request1, response, resourceResolver);
response.getWriter().flush();
String responseAsString = out.toString();
if (response.getStatusCode() == HttpStatus.SC_OK) {
LOGGER.trace("DPS operation " + operation + ", path: " + path + ": [success]");
} else {
String errorMsg = response.getStatusMsg();
LOGGER.trace("DPS operation " + operation + ", path: " + path + ", code: " + response.getStatusCode() + ": [error:"+errorMsg+"]");
LOGGER.warn(label + " >>> RESPONSE >>> " +responseAsString);
throw new RequestException(response.getStatusCode(), errorMsg==null? "Failed request":errorMsg);
}
}
public void upload(String path, String targetCollection) throws ServletException, IOException, RequestException {
LOGGER.info("Upload " + path + (targetCollection==null?"":" with target collection " + targetCollection));
List<NameValuePair> list = new ArrayList<NameValuePair>();
list.add(new NameValuePair(PARAM_OPERATION, OPERATION_UPLOAD));
list.add(new NameValuePair(PARAM_CREATE_IF_MISSING, "true"));
list.add(new NameValuePair(PARAM_INCLUDE_CONTENT, "true"));
if(targetCollection!=null){
list.add(new NameValuePair(PARAM_TARGET_COLLECTION, targetCollection));
}
performOperation(path, OPERATION_UPLOAD, list.toArray(new NameValuePair[list.size()]));
}
public void importOnDemandContent(String path, String importType) throws ServletException, IOException, RequestException {
//curl -u admin:admin -X POST -F ":operation=dpsapps:dpsImport" -F "entityType=ALL" http://localhost:4502/content/mobileapps/adobe-cares
List<NameValuePair> list = new ArrayList<NameValuePair>();
list.add(new NameValuePair(PARAM_OPERATION, OPERATION_IMPORT));
list.add(new NameValuePair(PARAM_ENTITY_TYPE, importType));
performOperation(path, OPERATION_IMPORT, list.toArray(new NameValuePair[list.size()]));
}
public void preview(String path) throws ServletException, IOException, RequestException {
// curl -u admin:admin -X POST -F ':operation=dpsapps:preview' http://localhost:4502/content/mobileapps/adobe-cares
List<NameValuePair> list = new ArrayList<NameValuePair>();
list.add(new NameValuePair(PARAM_OPERATION, OPERATION_PREVIEW));
performOperation(path, OPERATION_PREVIEW, list.toArray(new NameValuePair[list.size()]));
}
public String getLayoutURI(DPSProject project, String layoutTitle) throws DPSException, JSONException {
PublishDataSource publishDataSource = adapterManager.getAdapter(project, PublishDataSource.class);
JSONObject jsonObject = publishDataSource.getLayouts(null);
JSONArray layoutArray = jsonObject.getJSONArray("data");
for (int i = 0; i < layoutArray.length(); i++) {
JSONObject layout = layoutArray.getJSONObject(i);
if(layoutTitle.equals(layout.get("title"))){
return layout.getString("entityURL");
}
}
return null;
}
}
|
<filename>tapestry-core/src/main/java/org/apache/tapestry5/services/ApplicationStateCreator.java<gh_stars>0
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.tapestry5.services;
/**
* Used by {@link ApplicationStateManager} and {@link ApplicationStatePersistenceStrategy} to create
* a Session State Object (SSO) on demand.
* <p>
* <em>NOTE: The term "Application" here is a hold-over from Tapestry 5.0, which used
* the @ApplicationState (deprecated and deleted) annotation, and called them "ASOs"
* (Application State Objects). This service would be better named "SessionStateCreator"
* (but renaming it would cause backwards compatibility issues).</em>
*
* @param <T>
*/
public interface ApplicationStateCreator<T>
{
/**
* Create a new instance of a session state object.
*/
T create();
}
|
<filename>src/components/Button/styles.ts
import styled, { css } from 'styled-components'
import theme from '../../styles/theme'
interface WrapperProps {
btnType: string
size: string
}
export const Wrapper = styled.button<WrapperProps>`
${(props) =>
props.btnType === 'primary'
? css`
background-color: ${theme.colors.primary};
color: ${theme.colors.white};
border: none;
:hover {
background-color: ${theme.colors.lightblue};
transition: all 0.2s ease 0s;
}
`
: null};
${(props) =>
props.btnType === 'secondary'
? css`
background-color: transparent;
color: ${theme.colors.primary};
border: 2px solid ${theme.colors.primary};
`
: null};
border-radius: ${theme.border.radius};
display: inline-flex;
align-items: center;
justify-content: center;
cursor: pointer;
text-decoration: none;
${(props) => {
switch (props.size) {
case 'small':
return css`
height: 30px;
min-width: 72px;
padding: 0 17px;
span {
font-size: 14px;
font-weight: bold;
letter-spacing: 1px;
}
`
case 'default':
return css`
height: 36px;
min-width: 84px;
padding: 0 23px;
user-select: none;
span {
font-size: 16px;
font-weight: bold;
letter-spacing: 1px;
}
`
default:
return css`
height: 42px;
padding: 0 29px;
min-width: 102px;
user-select: none;
span {
font-size: 16px;
font-weight: bold;
letter-spacing: 1px;
}
`
}
}}
`
|
package com.infamous.framework.http.factory;
import com.infamous.framework.http.Async;
import com.infamous.framework.http.Body;
import com.infamous.framework.http.Header;
import com.infamous.framework.http.Headers;
import com.infamous.framework.http.HttpMethod;
import com.infamous.framework.http.MultiPart;
import com.infamous.framework.http.Part;
import com.infamous.framework.http.PathParam;
import com.infamous.framework.http.QueryParam;
import com.infamous.framework.http.Rest;
import com.infamous.framework.http.Url;
import java.util.List;
import java.util.concurrent.CompletableFuture;
interface RestClientTest {
int testHeader(@com.infamous.framework.http.Header("Dynamic-Header") String dynamicHeader);
int testPath(@PathParam(value = "fileId", encoded = true) String fileId);
int testQuery(@QueryParam(value = "id", encoded = true) String id);
int testUrl(@Url String url);
int testUrlWithFullUrl(@Url(fullUrl = true) String url);
int testWithBody(@Body String object);
int testWithMultipartBody(@Part("files") String file);
@Headers({
"Content-Type: application/xml",
"Static-Header-Name: Static-Header-Value"
})
int testStaticHeader();
@Headers({})
int testEmptyHeader();
@Headers({
"ContentType/application/xml"
})
int testHeadersInvalid();
@Rest(method = HttpMethod.POST)
int testRest();
@Headers({
"Content-Type: application/xml",
"Static-Header-Name: Static-Header-Value"
})
@Rest(method = HttpMethod.POST, url = "/find/{fileId}", contentType = "application/xml")
String blockingRestWithBody(@Url String url, @Body String body, @PathParam("fileId") String fileId,
@QueryParam("group") Integer group,
@Header("Dynamic-Header-Name") String dynamicHeaders);
@Rest(method = HttpMethod.POST, url = "/find", contentType = "application/xml")
String blockingRestWith2Body(@Body String body1, @Body String body2);
@MultiPart
@Rest(method = HttpMethod.POST, url = "/find", contentType = "application/xml")
String blockingRestWithBodyAndMultiPart(@Body String body1, @Part("name") String name);
@MultiPart
@Rest(method = HttpMethod.POST, url = "/find", contentType = "application/xml")
String blockingRestWithMultipartButNotHaveAnyPart();
@Rest(method = HttpMethod.POST, url = "/find", contentType = "application/xml")
void returnVoidMethod();
@Async
@Rest(method = HttpMethod.POST, url = "/find", contentType = "application/xml")
CompletableFuture<String> nonBlockingMethod();
@Async
@Rest(method = HttpMethod.POST, url = "/find", contentType = "application/xml")
String nonBlockingButNotReturnCompletableFuture();
@Async
@Rest(method = HttpMethod.POST, url = "/find", contentType = "application/xml")
List<String> nonBlockingButNotReturnCompletableFuture2();
}
|
#!/usr/bin/env bash
# Inspired by
# ~/.osx — https://mths.be/osx
#
# Get Admin Upfront
#
sudo -v
# Keep-alive: update existing `sudo` time stamp until we're finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
#
# Run settings
#
for settings in $GROK_TOPICS/osx/settings/*; do
echo "... running: $(basename "$settings")"
source $settings;
done
#
# Restart Apps
#
for app in "Activity Monitor" "Address Book" "Calendar" "Contacts" "cfprefsd" \
"Dock" "Finder" "Mail" "Messages" "Safari" "SizeUp" "SystemUIServer" \
"Transmission" "iCal"; do
echo "... restarting $app"
killall "${app}" > /dev/null 2>&1
done
echo "... Done. Kill Terminal and logout/restart to finish."
|
<filename>mp_sort/virtenv/lib/python3.6/site-packages/transcrypt/modules/org/transcrypt/__envir__.js
__envir__.interpreter_name = 'python';
__envir__.transpiler_name = 'transcrypt';
__envir__.executor_name = __envir__.transpiler_name;
__envir__.transpiler_version = '3.7.16';
|
<reponame>updog123/EconomicGames_TEST<gh_stars>1-10
var iframe = document.getElementById("myFrame");
iframe.style.position = "relative";
set_element_size_style();
function set_element_size_style() {
iframe.style.width = iframe.parentElement.clientWidth + "px";
iframe.style.height = "600px"
}
window.addEventListener("resize", set_element_size_style); |
#!/bin/bash
#######################################################################################
# Compiles MUSEN locally.
# Requires common_config.sh in the same directory to run.
#
# To select version of MUSEN to build, run the script as
# $ ./make_musen_host.sh [--target=cli] [--target=gui] [--target=matlab]
# or
# $ ./make_musen_host.sh [-t=c] [-t=g] [-t=m]
# where
# cli/c - command line version (cmusen)
# gui/g - GUI version (musen)
# matlab/m - matlab library version (mmusen)
# Running the script without target parameters will build all versions.
# Running the script with at least one target will disable the rest not mentioned targets.
#
# Before running the script for the first time, run
# $ sudo ./install_prerequisites_host.sh
# to install all required libs and tools. Or run separate scripts as described further.
#
# Build requirements:
# gcc 10 - run './install_gcc.sh -l=host' to install into default path
# cmake v3.18 or newer - run './install_cmake.sh -l=host' to install into default path
# zlib v1.2.11 - run './install_zlib.sh -l=host' to install into ${MUSEN_EXTERNAL_LIBS_PATH}/zlib
# protobuf v3.14.0 - run './install_protobuf.sh -l=host' to install into ${MUSEN_EXTERNAL_LIBS_PATH}/protobuf
# Qt v5.15.2 - run './install_qt.sh -l=host' to install into ${MUSEN_EXTERNAL_LIBS_PATH}/qt
# CUDA v11.2 - run './install_cuda.sh -l=host' to install into default path
# MATLAB 2019b (optional)
#######################################################################################
# make the config file visible regardless of calling location and load it
PATH_PREFIX="${BASH_SOURCE%/*}"
if [[ ! -d "${PATH_PREFIX}" ]]; then PATH_PREFIX="${PWD}"; fi
. "${PATH_PREFIX}/common_config.sh"
# parse arguments
BUILD_CLI=no
BUILD_GUI=no
BUILD_MAT=no
for i in "$@"
do
case $i in
-t=*|--target=*)
TARGET="${i#*=}"
case $TARGET in
c|cli)
BUILD_CLI=yes
;;
g|gui)
BUILD_GUI=yes
;;
m|matlab)
BUILD_MAT=yes
;;
*)
echo "Error! Unknown target: " "${TARGET}"
exit 1
;;
esac
shift
;;
*)
echo "Error! Unknown option: " "${i}"
exit 1
;;
esac
done
# if no targets defined explicitly, build all
if [[ ${BUILD_CLI} == "no" && ${BUILD_GUI} == "no" && ${BUILD_MAT} == "no" ]]; then
BUILD_CLI=yes
BUILD_GUI=yes
BUILD_MAT=yes
fi
#######################################################################################
# PATHS TO EXTERNAL LIBRARIES
export MUSEN_ZLIB_PATH=${MUSEN_EXTERNAL_LIBS_PATH}/zlib
export MUSEN_PROTO_PATH=${MUSEN_EXTERNAL_LIBS_PATH}/protobuf
export MUSEN_QT_PATH=${MUSEN_EXTERNAL_LIBS_PATH}/qt
export MUSEN_CUDA_PATH=/usr/local/cuda-${CUDA_VER}
export OPENGL_PATH=/usr/lib/x86_64-linux-gnu
#######################################################################################
# update build time
./generate_time_header.sh
mv -f ./BuildTime.h ${MUSEN_SRC_PATH}/MUSEN/BuildVersion/
# copy cmake script
cp CMakeLists.txt ${MUSEN_SRC_PATH}/
# png fix
#find ${MUSEN_SRC_PATH}/ -type f -iname '*.png' -exec pngcrush -ow -rem allb -reduce {} \; > png_cleanup_log 2>&1
# cuda
PATH="$PATH":${MUSEN_CUDA_PATH}/bin/
# protobuf
PATH="$PATH":${MUSEN_PROTO_PATH}/bin/
LD_LIBRARY_PATH="${LD_LIBRARY_PATH}":${MUSEN_PROTO_PATH}/lib/
export CPLUS_INCLUDE_PATH=${MUSEN_PROTO_PATH}/include/
# qt
export CMAKE_PREFIX_PATH=${MUSEN_QT_PATH}/${QT_VER}/gcc_64:${CMAKE_PREFIX_PATH}
# create build directory
if [ ! -d ${MUSEN_BUILD_PATH} ]; then
mkdir ${MUSEN_BUILD_PATH}
fi
#######################################################################################
# start build
cmake protobuf -S ${MUSEN_SRC_PATH} -B ${MUSEN_BUILD_PATH}
make protobuf --directory ${MUSEN_BUILD_PATH} --silent --jobs=8
cmake -S ${MUSEN_SRC_PATH} -B ${MUSEN_BUILD_PATH}
if [[ ${BUILD_CLI} == "yes" ]]; then
make cmusen --directory ${MUSEN_BUILD_PATH} --keep-going --silent --jobs=8
fi
if [[ ${BUILD_GUI} == "yes" ]]; then
make musen --directory ${MUSEN_BUILD_PATH} --keep-going --silent --jobs=8
fi
if [[ ${BUILD_MAT} == "yes" ]]; then
make mmusen --directory ${MUSEN_BUILD_PATH} --keep-going --silent --jobs=8
fi
|
<reponame>kneczaj/youtube-viewer
import { Component, OnInit } from '@angular/core';
import {AuthenticationService} from '../services/authentication.service';
import {Router} from '@angular/router';
@Component({
selector: 'yv-profile-menu-page',
template: `
<div class="nav-item dropdown">
<ng-template [ngIf]="!authService.user">
<a class="nav-link" [routerLink]="['/login']" [queryParams]="{ returnUrl: router.url }">
Login
</a>
</ng-template>
<ng-template [ngIf]="authService.user">
<a class="nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-toggle="dropdown">
Welcome {{authService.user}}!
</a>
<div class="dropdown-menu" aria-labelledby="navbarDropdown">
<a class="dropdown-item" (click)="authService.logout()">Logout</a>
</div>
</ng-template>
</div>
`,
styles: []
})
export class ProfileMenuPageComponent implements OnInit {
constructor(
protected authService: AuthenticationService,
protected router: Router
) { }
ngOnInit() {
}
logout() {
this.authService.logout();
}
}
|
#!/bin/bash
# Check if the script is being run as root
if [ "$EUID" -ne 0 ]; then
echo "Please run this script as root"
exit 1
fi
# Check if the unprovisioned_state directory exists
if [ -d "/var/aos/unprovisioned_state" ]; then
# Remove the unprovisioned_state directory and all its contents
rm -rf /var/aos/unprovisioned_state
if [ $? -eq 0 ]; then
echo "Unprovisioned state data removed successfully"
else
echo "Failed to remove unprovisioned state data"
exit 1
fi
else
echo "Unprovisioned state directory not found"
fi
# Restart the AOS service
systemctl restart aos.target
if [ $? -eq 0 ]; then
echo "AOS service restarted successfully"
else
echo "Failed to restart AOS service"
exit 1
fi
exit 0 |
<filename>src/rules-configurations/eslint/comma-style.d.ts
import { RuleConfiguration } from '../../../support/Rule'
type Options = (("first" | "last") | {
exceptions?: {
[key: string]: boolean
}
})[]
type Configuration = RuleConfiguration<'comma-style', 'eslint', Options>
export default Configuration |
# models/customer.rb
class Customer < ApplicationRecord
end
# routes.rb
Rails.application.routes.draw do
resources :customers
end
# customers_controller.rb
class CustomersController < ApplicationController
def index
@customers = Customer.all
end
def create
@customer = Customer.create(customer_params)
end
def update
@customer = Customer.find(params[:id])
@customer.update(customer_params)
end
def destroy
@customer = Customer.find(params[:id])
@customer.destroy
end
private
def customer_params
params.require(:customer).permit(:name, :address, :phone)
end
end |
GBP="Group-Based Policy"
[[ $ENABLE_NFP = True ]] && NFP="Network Function Plugin"
function gbp_configure_nova {
iniset $NOVA_CONF neutron allow_duplicate_networks "True"
}
function gbp_configure_heat {
local HEAT_PLUGINS_DIR="/opt/stack/gbpautomation/gbpautomation/heat"
iniset $HEAT_CONF DEFAULT plugin_dirs "$HEAT_PLUGINS_DIR"
}
function gbp_configure_neutron {
iniset $NEUTRON_CONF group_policy policy_drivers "implicit_policy,resource_mapping,chain_mapping"
iniset $NEUTRON_CONF group_policy extension_drivers "proxy_group"
iniset $NEUTRON_CONF servicechain servicechain_drivers "simplechain_driver"
iniset $NEUTRON_CONF node_composition_plugin node_plumber "stitching_plumber"
iniset $NEUTRON_CONF node_composition_plugin node_drivers "heat_node_driver"
iniset $NEUTRON_CONF quotas default_quota "-1"
iniset $NEUTRON_CONF quotas quota_network "-1"
iniset $NEUTRON_CONF quotas quota_subnet "-1"
iniset $NEUTRON_CONF quotas quota_port "-1"
iniset $NEUTRON_CONF quotas quota_security_group "-1"
iniset $NEUTRON_CONF quotas quota_security_group_rule "-1"
iniset $NEUTRON_CONF quotas quota_router "-1"
iniset $NEUTRON_CONF quotas quota_floatingip "-1"
}
function nfp_configure_neutron {
iniset $NEUTRON_CONF keystone_authtoken admin_tenant_name "service"
iniset $NEUTRON_CONF keystone_authtoken admin_user "neutron"
iniset $NEUTRON_CONF keystone_authtoken admin_password $ADMIN_PASSWORD
iniset $NEUTRON_CONF group_policy policy_drivers "implicit_policy,resource_mapping,chain_mapping"
iniset $NEUTRON_CONF node_composition_plugin node_plumber "admin_owned_resources_apic_plumber"
iniset $NEUTRON_CONF node_composition_plugin node_drivers "nfp_node_driver"
iniset $NEUTRON_CONF admin_owned_resources_apic_tscp plumbing_resource_owner_user "neutron"
iniset $NEUTRON_CONF admin_owned_resources_apic_tscp plumbing_resource_owner_password $ADMIN_PASSWORD
iniset $NEUTRON_CONF admin_owned_resources_apic_tscp plumbing_resource_owner_tenant_name "service"
iniset $NEUTRON_CONF group_policy_implicit_policy default_ip_pool "11.0.0.0/8"
iniset $NEUTRON_CONF group_policy_implicit_policy default_proxy_ip_pool "192.169.0.0/16"
iniset $NEUTRON_CONF group_policy_implicit_policy default_external_segment_name "default"
iniset $NEUTRON_CONF nfp_node_driver is_service_admin_owned "True"
iniset $NEUTRON_CONF nfp_node_driver svc_management_ptg_name "svc_management_ptg"
}
# Process contract
if is_service_enabled group-policy; then
if [[ "$1" == "stack" && "$2" == "pre-install" ]]; then
echo_summary "Preparing $GBP"
elif [[ "$1" == "stack" && "$2" == "install" ]]; then
echo_summary "Installing $GBP"
[[ $ENABLE_APIC_AIM = True ]] && install_apic_aim
if [[ $ENABLE_NFP = True ]]; then
echo_summary "Installing $NFP"
[[ $DISABLE_BUILD_IMAGE = False ]] && prepare_nfp_image_builder
fi
elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then
echo_summary "Configuring $GBP"
gbp_configure_nova
gbp_configure_heat
gbp_configure_neutron
[[ $ENABLE_NFP = True ]] && echo_summary "Configuring $NFP"
[[ $ENABLE_NFP = True ]] && nfp_configure_neutron
# REVISIT move installs to install phase?
# install_apic_ml2
install_gbpclient
install_gbpservice
[[ $ENABLE_NFP = True ]] && install_nfpgbpservice
init_gbpservice
[[ $ENABLE_NFP = True ]] && init_nfpgbpservice
install_gbpheat
install_gbpui
[[ $ENABLE_APIC_AIM = True ]] && configure_apic_aim
stop_apache_server
start_apache_server
elif [[ "$1" == "stack" && "$2" == "extra" ]]; then
echo_summary "Initializing $GBP"
if [[ $ENABLE_NFP = True ]]; then
echo_summary "Initializing $NFP"
[[ $DISABLE_BUILD_IMAGE = False ]] && create_nfp_image
assign_user_role_credential
create_nfp_gbp_resources
get_router_namespace
copy_nfp_files_and_start_process
fi
fi
if [[ "$1" == "unstack" ]]; then
echo_summary "Removing $GBP"
fi
if [[ "$1" == "clean" ]]; then
echo_summary "Cleaning $GBP"
fi
fi
|
const {logError, init} = require('../utils');
// Script steps
init();
logError('No tests written just yet...\n');
|
//
// mulle_objc_kvccache.h
// mulle-objc-runtime
//
// Created by Nat! on 18.07.16.
// Copyright (c) 2016 Nat! - <NAME>.
// Copyright (c) 2016 Codeon GmbH.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// Neither the name of Mulle kybernetiK nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
#ifndef mulle_objc_kvccache_h__
#define mulle_objc_kvccache_h__
#include "mulle-objc-cache.h"
#include "mulle-objc-method.h"
#include <string.h>
//
// storage mechanism for KVC caches
//
// 0 : get 1: take
// 2 : storedGet 3: storedTake
//
// Valuetype can be different for get/stored if ivar is
// declared as short, and method is declared as int
//
struct _mulle_objc_kvcinfo
{
mulle_objc_implementation_t implementation[ 4];
mulle_objc_methodid_t methodid[ 4];
ptrdiff_t offset;
char valueType[ 4];
char cKey[ 1]; // flexible
};
struct _mulle_objc_kvcinfo *_mulle_objc_kvcinfo_new( char *cKey,
struct mulle_allocator *allocator);
static inline void _mulle_objc_kvcinfo_free( struct _mulle_objc_kvcinfo *entry,
struct mulle_allocator *allocator)
{
mulle_allocator_free( allocator, entry);
}
static inline int _mulle_objc_kvcinfo_equals( struct _mulle_objc_kvcinfo *entry,
struct _mulle_objc_kvcinfo *other)
{
return( ! strcmp( entry->cKey, other->cKey));
}
#pragma mark - _mulle_objc_kvccache
struct _mulle_objc_kvccache
{
struct _mulle_objc_cache base;
};
#define MULLE_OBJC_KVCINFO_CONFLICT ((struct _mulle_objc_kvcinfo *) -1)
static inline struct _mulle_objc_kvccache *
mulle_objc_kvccache_new( mulle_objc_cache_uint_t size,
struct mulle_allocator *allocator)
{
return( (struct _mulle_objc_kvccache *) mulle_objc_cache_new( size, allocator));
}
static inline struct _mulle_objc_cacheentry *
_mulle_objc_kvccache_add_entry( struct _mulle_objc_kvccache *cache,
struct _mulle_objc_kvcinfo *info,
mulle_objc_uniqueid_t keyid)
{
return( _mulle_objc_cache_add_pointer_entry( (struct _mulle_objc_cache *) cache, info, keyid));
}
static inline struct _mulle_objc_cacheentry *
_mulle_objc_kvccache_inactivecache_add_entry( struct _mulle_objc_kvccache *cache,
struct _mulle_objc_kvcinfo *info,
mulle_objc_uniqueid_t keyid)
{
return( _mulle_objc_cache_inactivecache_add_pointer_entry( (struct _mulle_objc_cache *) cache, info, keyid));
}
struct _mulle_objc_kvcinfo *_mulle_objc_kvccache_lookup_kvcinfo( struct _mulle_objc_kvccache *cache,
char *key);
#pragma mark - _mulle_objc_kvccachepivot
struct _mulle_objc_kvccachepivot
{
mulle_atomic_pointer_t entries; // for atomic XCHG with pointer indirection
};
static inline struct _mulle_objc_kvccache *_mulle_objc_kvccachepivot_atomicget_cache( struct _mulle_objc_kvccachepivot *pivot)
{
return( (struct _mulle_objc_kvccache *) _mulle_objc_cachepivot_atomicget_cache( (struct _mulle_objc_cachepivot *) pivot));
}
MULLE_C_ALWAYS_INLINE
static inline struct _mulle_objc_cacheentry *_mulle_objc_kvccachepivot_atomicget_entries( struct _mulle_objc_kvccachepivot *p)
{
return( _mulle_objc_cachepivot_atomicget_entries( (struct _mulle_objc_cachepivot *) p));
}
static inline int
_mulle_objc_kvccachepivot_atomiccas_entries( struct _mulle_objc_kvccachepivot *p,
struct _mulle_objc_cacheentry *new_entries,
struct _mulle_objc_cacheentry *old_entries)
{
return( _mulle_objc_cachepivot_atomiccas_entries( (struct _mulle_objc_cachepivot *) p, new_entries, old_entries));
}
int _mulle_objc_kvccachepivot_invalidate( struct _mulle_objc_kvccachepivot *pivot,
struct _mulle_objc_kvccache *empty_cache,
struct mulle_allocator *allocator);
int _mulle_objc_kvccachepivot_set_kvcinfo( struct _mulle_objc_kvccachepivot *pivot,
struct _mulle_objc_kvcinfo *info,
struct _mulle_objc_kvccache *empty_cache,
struct mulle_allocator *allocator);
#endif /* mulle_objc_kvccache_h */
|
function handleRequest(method: string, path: string): string {
const uppercaseMethod = method.toUpperCase();
const predefinedEndpoints = ['endpoint1', 'endpoint2', 'endpoint3']; // Example predefined endpoints
if (predefinedEndpoints.includes(path)) {
if (uppercaseMethod === 'POST') {
return `Handling POST request for ${path}`;
} else {
return `Invalid method for ${path}`;
}
} else {
return 'Endpoint not found';
}
}
// Example usage:
console.log(handleRequest('POST', 'endpoint1')); // Output: Handling POST request for endpoint1
console.log(handleRequest('GET', 'endpoint2')); // Output: Invalid method for endpoint2
console.log(handleRequest('PUT', 'endpoint4')); // Output: Endpoint not found |
class DataProcessor:
def __init__(self, prefix: str):
self.prefix = prefix
def apply_operation(self, data: dict, operation: callable) -> dict:
"""
Apply the given operation to data attributes with names starting with the prefix.
Args:
data (dict): A dictionary of data attributes.
operation (callable): A function that represents the operation to be applied.
Returns:
dict: A dictionary containing the modified data attributes.
"""
modified_data = {}
for key, value in data.items():
if key.startswith(self.prefix):
modified_data[key] = operation(value)
else:
modified_data[key] = value
return modified_data |
import random
characters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
length = 8
randstring = ''.join(random.choice(characters) for i in range(length)) |
#!/bin/bash
source "common.sh"
$Multini -o "$OutIni" --set "$InIni" MultipleKeySection Key onlyone
compare
|
from typing import List
def bubble_sort(arr: List[int]) -> List[int]:
n = len(arr)
for i in range(n):
# Flag to check if any swap occurred in the current pass
swapped = False
for j in range(0, n-i-1):
if arr[j] > arr[j+1]:
arr[j], arr[j+1] = arr[j+1], arr[j]
swapped = True
# If no two elements were swapped in the inner loop, the array is already sorted
if not swapped:
break
return arr |
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_stop_circle = void 0;
var ic_stop_circle = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M8,16h8V8H8V16z M12,2C6.48,2,2,6.48,2,12s4.48,10,10,10s10-4.48,10-10 S17.52,2,12,2L12,2z",
"fill-rule": "evenodd"
},
"children": [{
"name": "path",
"attribs": {
"d": "M8,16h8V8H8V16z M12,2C6.48,2,2,6.48,2,12s4.48,10,10,10s10-4.48,10-10 S17.52,2,12,2L12,2z",
"fill-rule": "evenodd"
},
"children": []
}]
}]
}]
};
exports.ic_stop_circle = ic_stop_circle; |
SCRIPT=$(readlink -f "$0")
SCRIPTPATH=$(dirname "$SCRIPT")
BRANCH=$(git symbolic-ref --short HEAD)
TAG=$(echo $BRANCH| sed 's/\(dev-[0-9]*\).*/\1/')
FILENAME="pipeline-$TAG.yml"
echo "branch=$BRANCH"
echo " tag=$TAG"
echo "# Created by script $0" > $FILENAME
echo "# From pipeline_template.yml" >> $FILENAME
echo "" >> $FILENAME
TEMPLATE=$SCRIPTPATH/pipeline_template.yml
cat $TEMPLATE | sed "s/___BRANCH___/$BRANCH/g" | sed "s/___TAG___/$TAG/g" >> $FILENAME
fly -t tutorial set-pipeline -c $FILENAME -p $BRANCH -l ~/concourse-credentials.yml
echo "Created concourse pipeline $TAG"
|
import * as types from '../actionTypes';
export function addPlayer(name) {
return {
type: types.ADD_PLAYER,
name
}
}
export function sendVote(idFrom, idFor) {
return {
// type: types.SEND_VOTE,
type: types.SET_VOTE,
vote: {
from: idFrom,
for: idFor
}
}
}
export function setReady(id) {
return {
type: types.SET_READY,
id
}
} |
#!/bin/sh
# Copyright (c) 2013-2016 The bitphantom Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
set -e
srcdir="$(dirname "$0")"
cd "$srcdir"
if [ -z "${LIBTOOLIZE}" ] && GLIBTOOLIZE="$(command -v glibtoolize)"; then
LIBTOOLIZE="${GLIBTOOLIZE}"
export LIBTOOLIZE
fi
command -v autoreconf >/dev/null || \
(echo "configuration failed, please install autoconf first" && exit 1)
autoreconf --install --force --warnings=all
|
<reponame>fernandobritto/CedroTech-API
import { Router } from 'express'
import usersRouter from '@app/users/routes/users.routes'
import sessionsRouter from '@app/users/routes/sessions.routes'
import documentsRouter from '@app/documents/routes/documents.routes'
const routes = Router()
routes.use('/users', usersRouter)
routes.use('/sessions', sessionsRouter)
routes.use('/documents', documentsRouter)
export default routes
|
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part2.sh
# Description: OpenWrt DIY script part 2 (After Update feeds)
#
# Modify default IP
sed -i 's/192.168.1.1/192.168.31.1/g' package/base-files/files/bin/config_generate
echo '修改主机名'
sed -i "s/hostname='OpenWrt'/hostname='AX3600'/g" package/base-files/files/bin/config_generate
cat package/base-files/files/bin/config_generate |grep hostname=
echo '=========Alert hostname OK!========='
# 修改连接数
#sed -i 's/net.netfilter.nf_conntrack_max=.*/net.netfilter.nf_conntrack_max=65535/g' package/kernel/linux/files/sysctl-nf-conntrack.conf
#修正连接数(by ベ七秒鱼ベ)
sed -i '/customized in this file/a net.netfilter.nf_conntrack_max=65535' package/base-files/files/etc/sysctl.conf
# themes添加(svn co 命令意思:指定版本如https://github)
git clone https://github.com/xiaoqingfengATGH/luci-theme-infinityfreedom package/luci-theme-infinityfreedom
git clone https://github.com/Leo-Jo-My/luci-theme-opentomcat.git package/luci-theme-opentomcat
git clone https://github.com/openwrt-develop/luci-theme-atmaterial.git package/luci-theme-atmaterial
git clone https://github.com/jerrykuku/luci-theme-argon.git package/luci-theme-argon
git clone https://github.com/sirpdboy/luci-app-netdata.git package/luci-app-netdata
#添加额外非必须软件包
git clone https://github.com/rufengsuixing/luci-app-adguardhome.git package/luci-app-adguardhome
git clone https://github.com/vernesong/OpenClash.git package/OpenClash
git clone https://github.com/destan19/OpenAppFilter.git package/OpenAppFilter
git clone https://github.com/zzsj0928/luci-app-pushbot.git package/luci-app-pushbot
git clone https://github.com/riverscn/openwrt-iptvhelper.git package/openwrt-iptvhelper
git clone https://github.com/jerrykuku/luci-app-jd-dailybonus.git package/luci-app-jd-dailybonus
#添加smartdns
git clone https://github.com/kiddin9/luci-app-dnsfilter.git package/luci-app-dnsfilter
git clone https://github.com/pymumu/openwrt-smartdns package/smartdns
git clone -b lede https://github.com/pymumu/luci-app-smartdns.git package/luci-app-smartdns
# Add luci-theme-argon
#git clone --depth=1 -b 18.06 https://github.com/jerrykuku/luci-theme-argon
#git clone --depth=1 https://github.com/jerrykuku/luci-app-argon-config
#rm -rf ../lean/luci-theme-argon
|
function binarySearch(arr, val) {
let left = 0;
let right = arr.length - 1;
while (left <= right) {
const mid = Math.floor((left + right) / 2);
if (arr[mid] === val) {
return mid;
} else if (arr[mid] < val) {
left = mid + 1;
} else {
right = mid - 1;
}
}
return -1;
}
console.log(binarySearch(arr, val)); // output: 2 |
"""
Declares a @builtin decorator class for tagging php built-in functions, as well as implements and exports most if not all php built-in functions.
"""
import pyphp.scope as scope
import builtin
import string
import regex
import constants
import datetime
import lang
def gen_builtins():
modules=[constants, datetime, lang, string, regex]
for module in modules:
for member_name in dir(module):
member = getattr(module, member_name)
if member_name == 'CONSTANTS':
for k,v in member:
yield (k, v)
elif isinstance(member, builtin.builtin):
yield(member_name, member)
builtins = scope.scope(
dict(x for x in gen_builtins()),
name='phpbuiltins'
)
|
<reponame>akelly1954/Samples
#pragma once
/////////////////////////////////////////////////////////////////////////////////
// MIT License
//
// Copyright (c) 2022 <NAME>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
/////////////////////////////////////////////////////////////////////////////////
#include <Utility.hpp>
#include <string>
#include <map>
#include <utility>
#include <MainLogger.hpp>
namespace EnetUtil
{
// Used to get a list of IP addresses belonging to this system.
// std::map.first is the string ip address. The pair.first member
// is the string interface name, and pair.second is the address family.
// The first overload sets up a logger and calls the second overload.
// Or, use your own.
// Returns empty map if not found or any errors occurred.
std::map<std::string, std::pair<std::string, std::string>> get_all_self_ip_addresses();
std::map<std::string, std::pair<std::string, std::string>> get_all_self_ip_addresses(Log::Logger logger);
// Used to get a single IP address belonging to this system
// which is actually connected to the internet.
// This is done by attempting a connection to '8.8.8.8' which
// is google dns. The ip address parameter to the function is
// the ip address to be used instead of 8.8.8.8
// Returns empty string if not found or any errors occured.
std::string get_primary_self_ip_address(Log::Logger logger,
std::string useIpAddress = "8.8.8.8",
uint16_t udpDnsPort = 53);
// This is an overloaded version of the function for callers that do not
// have a logger configured.
std::string get_primary_self_ip_address( std::string useIpAddress = "8.8.8.8",
uint16_t udpDnsPort = 53);
// For any IP address belonging to this system which is actually connected
// to the internet, get the MAC address for that interface. The goal is
// to get the primary IP address' MAC address using the interface name for
// that ip address.
// Returns empty string if not found or any errors occured.
std::string get_self_mac_address(Log::Logger logger, std::string interfaceName);
// This is an overloaded version of the function for callers that do not
// have a logger configured.
std::string get_self_mac_address(std::string interfaceName);
} // namespace EnetUtil
|
make_ros() {
local file=$1
local name=$2
local lan=$3
echo "/ip firewall address-list remove [/ip firewall address-list find list=$name]"
echo "/ip firewall address-list"
if [[ ! -z "$lan" ]]; then
echo "add address=10.0.0.0/8 disabled=no list=$name"
echo "add address=172.16.0.0/12 disabled=no list=$name"
echo "add address=192.168.0.0/16 disabled=no list=$name"
fi
while read line; do
echo "add address=$line disabled=no list=$name"
done < $file
}
make_ros_ipv6() {
local file=$1
local name=$2
local lan=$3
echo "/ipv6 firewall address-list remove [/ipv6 firewall address-list find list=$name]"
echo "/ipv6 firewall address-list"
if [[ ! -z "$lan" ]]; then
echo "add address=fd00::/8 disabled=no list=$name"
fi
while read line; do
echo "add address=$line disabled=no list=$name"
done < $file
}
for file in *.txt; do
name=${file%.*}
if [ -z ${file##*.ipv6.txt} ]; then
make_ros_ipv6 $file $name > $name.rsc
else
make_ros $file $name > $name.rsc
fi
done
# Generate backward Compatibility address-list.rsc
make_ros chnroutes.txt novpn yes > address-list.rsc
make_ros_ipv6 chnroutes.ipv6.txt novpn.ipv6 yes > address-list.ipv6.rsc
|
<filename>Intro to JS Unit testing/test/ship_test.js<gh_stars>1-10
var expect = require('chai').expect;
describe('SHIP METHODS', () => {
describe('Check for ship', () => {
var checkForShip = require('../game_logic/ship_methods').checkForShip;
var player;
// Hook to reuse the player object
before(() => {
player = {
ships: [
{
locations: [
[0, 0],
[0, 1],
],
},
{
locations: [
[8, 8],
[8, 9],
],
},
{
locations: [
[6, 6],
[6, 7],
[6, 8],
],
},
],
};
});
it('Should correctly report no ship at a given player coordinate', () => {
expect(checkForShip(player, [9, 9])).to.be.false;
});
it('Should correctly report hit at a given player coordinate', () => {
expect(checkForShip(player, [0, 0])).to.deep.equal(player.ships[0]);
});
it('Should handle ships located at more than one coordinate', () => {
expect(checkForShip(player, [9, 9])).to.be.false;
expect(checkForShip(player, [0, 0])).to.deep.equal(player.ships[0]);
});
it('Should be able to handle multiple ships', () => {
expect(checkForShip(player, [0, 1])).to.deep.equal(player.ships[0]);
expect(checkForShip(player, [0, 0])).to.deep.equal(player.ships[0]);
expect(checkForShip(player, [9, 9])).to.be.false;
expect(checkForShip(player, [8, 8])).to.deep.equal(player.ships[1]);
expect(checkForShip(player, [6, 7])).to.deep.equal(player.ships[2]);
expect(checkForShip(player, [6, 8])).to.deep.equal(player.ships[2]);
});
});
describe('Damage Ship', () => {
var damageShip = require('../game_logic/ship_methods').damageShip;
it('Should register damage on a given ship at a given location', () => {
var ship = {
locations: [[0, 0]],
damage: [],
};
damageShip(ship, [0, 0]);
expect(ship.damage).to.not.be.empty;
expect(ship.damage[0]).to.deep.equal([0, 0]);
});
});
describe('fire a shot at an opposing player', function () {
var fireAtLocation = require('../game_logic/ship_methods').fireAtLocation;
var player2;
beforeEach(function () {
player2 = {
ships: [
{
locations: [[0, 0]],
damage: [],
},
],
};
});
// reference for teardown, to be used with databases
// after(function() {
// console.log('entire test suite completed');
// });
//
// afterEach(function() {
// console.log('one unit test completed');
// });
it('should add to ship damage array at guessed coordinates on confirmed hit', function () {
var player1Guess = [0, 0];
fireAtLocation(player2, player1Guess);
expect(player2.ships[0].damage[0]).to.deep.equal(player1Guess);
});
it('should confirm if the guess is a miss', function () {
var player1Guess = [0, 1];
expect(fireAtLocation(player2, player1Guess)).to.be.false;
});
it('should NOT record damage if there is no ship at the guessed coordinate', function () {
var player1Guess = [1, 1];
fireAtLocation(player2, player1Guess);
expect(player2.ships[0].damage).to.be.empty;
});
});
});
|
import tkinter as tk
# create the application window
app = tk.Tk()
# set the title and size of the window
app.title('My GUI Application')
app.geometry("200x100")
# create a label
label = tk.Label(app, text = "Hello, world!")
# pack the label in the window
label.pack()
# always be sure to ensure the main window loop runs
app.mainloop() |
#!/usr/bin/env bash
set -e
# SETTINGS
################################################################################
MICROWAVE_HOST="http://35.185.105.222"
MICROWAVE_PORT="8989"
MICROWAVE_PATH="/unoconv/pdf"
MICROWAVE_URL="${MICROWAVE_HOST}:${MICROWAVE_PORT}${MICROWAVE_PATH}"
# INPUT PROCESSING
################################################################################
if [ -z "$1" ]; then
echo -e "Missing command args. You must provide file path use the -d option "
echo -e "Usage:"
echo -e " ./topdf.sh DomeDir/somefile.docx # --> SomeDir/somefile.pdf"
echo -e " OR "
echo -e " ./topdf.sh -d # convert all docx/pptx files in current dir and subdirs"
exit 1
fi
if [ "$1" == "-d" ]
then #### DIRECTORY MODE ######################################################
echo "Processing all office files in current dir " `pwd`
echo "A. Converting all DOCXs via microwave )))))))))))))))))))))))))))))))))"
find . -name '*.docx' -print0 |
while IFS= read -r -d $'\0' line;
do
echo " - converting $line";
$0 "$line"
done
echo "B. Converting all PPTX [ ] [ ] [ ] [ ] [ ] [ ] [ ] [ ] "
find . -name '*.pptx' -print0 |
while IFS= read -r -d $'\0' line;
do
echo " - converting $line";
$0 "$line"
done
else #### FILE MODE ###########################################################
FILENAME=$1 # path to file we want to process
filename_noext="${FILENAME%.*}"
ext="${FILENAME##*.}"
OUTFILENAME="${filename_noext}.pdf"
echo "$OUTFILENAME"
#
if [ ! -f "$OUTFILENAME" ]
then
echo "Running:"
echo "curl --form file=@\"${FILENAME}\" ${MICROWAVE_URL} > ${OUTFILENAME}"
curl --form file=@"${FILENAME}" ${MICROWAVE_URL} > "$OUTFILENAME"
fi
fi
|
<gh_stars>1-10
import { Component } from '@angular/core';
import { NavController } from 'ionic-angular';
import { Events } from 'ionic-angular';
import { BluetoothlocationProvider } from '../../providers/btlocationprovider';
@Component({
selector: 'page-about',
templateUrl: 'about.html'
})
export class AboutPage {
devices: any = {};
devicesKeys:any = [];
currBLESensorId: string = '';
currBLESensorRssi: any;
constructor(
public navCtrl: NavController,
private btlocation: BluetoothlocationProvider,
public events: Events
) {
this.devices = {};
events.subscribe('btloc:device', (locX, locY, device) => {
//this.devices.push(device);
//console.log(locX,locY,JSON.stringify(device));
var highestRssi = -200;
var highestRssiId = '';
var loopedTillIndex = 0;
var found = false;
this.devicesKeys = Object.keys(this.devices);
for(var k in this.devicesKeys){
loopedTillIndex = loopedTillIndex +1;
if(this.devices[k] != null && this.devices[k].rssi > highestRssi){
highestRssi = this.devices[k].rssi;
highestRssiId = k;
}
if(k == device.id){
//update
this.devices[k].rssi = device.rssi;
found=true;
break;
}
}
if(!found){
this.devices[device.id] = device;
if(device.rssi > highestRssi){
highestRssi = device.rssi;
highestRssiId = device.id;
}
}else{
//we still need to check remaining devices in our list to find the highest rssi
for(var i = loopedTillIndex; i<this.devicesKeys.length; i++){
if(this.devices[this.devicesKeys[i]].rssi > highestRssi){
highestRssi = this.devices[this.devicesKeys[i]].rssi;
highestRssiId = this.devicesKeys[i];
}
}
}
this.currBLESensorId = highestRssiId;
this.currBLESensorRssi = highestRssi;
});
}
ionViewWillEnter(){
this.btlocation.start(false);
}
ionViewDidLeave(){
this.btlocation.stop();
}
}
|
/*
* This work is licensed under a Creative Commons Attribution-NonCommercial 3.0 United States License.
* For more information go to http://creativecommons.org/licenses/by-nc/3.0/us/
*/
package org.eastsideprep.spacecritters.alieninterfaces;
/**
*
* @author gmein
*/
public class AlienSpecies {
public String domainName;
public String packageName;
public String className;
private String speciesName;
public int speciesID;
public IntegerPosition position; // for view purposes
public boolean isResident;
public AlienSpecies(String domainName, String packageName, String className, int id) {
this.domainName = domainName;
this.packageName = packageName;
this.className = className;
this.speciesID = id;
this.isResident = className.toLowerCase().endsWith("resident");
}
public AlienSpecies(String domainName, String packageName, String className, int id, int x, int y) { //[Q]
this.domainName = domainName;
this.packageName = packageName;
this.className = className;
this.speciesID = id;
this.position = new IntegerPosition(x, y);
this.isResident = className.toLowerCase().endsWith("resident");
}
public String getFullSpeciesName() {
if (speciesName == null) {
speciesName = domainName + ":" + packageName + ":" + className;
}
return speciesName;
}
}
|
import re
def spell_correction(word):
# Remove punctuation
word = re.sub(r'[^\w\s]', '', word)
# Separate words using whitespace
word_list = word.split()
# Create a set to store the dictionary words
dict_words = set()
# Open the words.txt dictionary
with open('words.txt', 'r') as file:
# Read each line and add to the dictionary
for line in file:
dict_words.add(line.rstrip('\n'))
# Create a correction list
corrections = []
# Iterate over the word list
for word in word_list:
# If word is in the dictionary, add to correction list
if word in dict_words:
corrections.append(word)
# Otherwise, search for a correction
else:
# Connect character to the ones before and after
for pos in range(1,len(word)):
# Store prefix and suffix
prefix = word[:pos]
suffix = word[pos+1:]
# Store possible correction
correction = prefix + word[pos] + suffix
# Check if the correction is in the dictionary
if correction in dict_words:
corrections.append(correction)
break
# Return the corrected words
return ' '.join(corrections) |
package models;
import java.util.Date;
import java.util.concurrent.Callable;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.bson.types.ObjectId;
import org.jongo.MongoCollection;
import org.jongo.marshall.jackson.oid.Id;
//@lombok.Getter
public class Coin extends Model {
@Id
private ObjectId _id;
private ObjectId payId;
private ObjectId userId;
private int amount;
private Date transationDate;
public Coin() {
}
public Coin(ObjectId userId, ObjectId payId, int amount, Date transactionDate){
this._id = new ObjectId();
this.userId = userId;
this.payId = payId;
this.amount = amount;
this.transationDate = transactionDate;
}
public void save() {
MongoCollection coinCol = jongo.getCollection("coin");
coinCol.save(this);
}
/*public static Coin get(ObjectId obj_id) {
String key = "payment:" + obj_id;
Pay pay = cache(key, Pay.class, new Callable<Pay>() {
public Pay call() {
MongoCollection payCol = jongo.getCollection("pay");
Pay pay = payCol.findOne(obj_id).as(Pay.class);
return pay;
}
});
return pay;
}*/
}
|
<gh_stars>0
import java.util.regex.Pattern;
public class Abbreviator {
public String abbreviate(String string) {
return Pattern.compile("[a-zA-Z]{4,}").matcher(string).replaceAll(m -> {
var s = m.group();
var l = s.length();
return s.charAt(0) + Integer.toString(l - 2) + s.charAt(l - 1);
});
}
}
|
#!/bin/bash
echo "Setting up virtual environment"
VENV="venv/bin/activate"
if [[ ! -f $VENV ]]; then
python3.8 -m venv venv
. $VENV
pip install --upgrade pip setuptools
if [ $1 == 'databricks' ]
then
echo "Installing dbt-spark"
pip install dbt-spark[ODBC] --upgrade --pre
elif [ $1 == 'azuresql' ]
then
echo "Installing dbt-sqlserver"
pip install dbt-sqlserver --upgrade --pre
elif [ $1 == 'athena' ]
then
echo "Installing dbt-athena"
pip install git+https://github.com/Tomme/dbt-athena.git --upgrade --pre
else
echo "Installing dbt-$1"
pip install dbt-$1 --upgrade --pre
fi
fi
. $VENV
echo "Changing working directory: integration_tests"
cd integration_tests
if [[ ! -e ~/.dbt/profiles.yml ]]; then
echo "Copying sample profile"
mkdir -p ~/.dbt
cp ci/sample.profiles.yml ~/.dbt/profiles.yml
fi
echo "Starting integration tests"
dbt deps --target $1
dbt seed --full-refresh --target $1
dbt run-operation prep_external --target $1
dbt run-operation stage_external_sources --var 'ext_full_refresh: true' --target $1
dbt run-operation stage_external_sources --target $1
dbt test --target $1
|
class Customer
attr_accessor :name, :address
def initialize(name, address)
@name = name
@address = address
end
end |
INSERT INTO login_records (login_info_column) VALUES (#{param}) |
<reponame>vishaltanwar96/iTrack<filename>src/task/models.py
from django.db import models
from user.models import User
from project.models import Project
from shared.models import AbstractEntity, AbstractRemarksHistory
from itrack.model_field_validation import past_date_validator
class Task(AbstractEntity):
entity = "Task"
assigned_to = models.ForeignKey(
User, on_delete=models.CASCADE, related_name="responsible"
)
assigned_by = models.ForeignKey(
User, on_delete=models.CASCADE, related_name="creator"
)
reviewed_by = models.ForeignKey(
User, on_delete=models.CASCADE, related_name="reviewer", null=True
)
project = models.ForeignKey(Project, on_delete=models.CASCADE)
# dependency limited to one person for now
dependency = models.CharField(max_length=255, null=True)
expected_completion_date = models.DateField(validators=[past_date_validator])
actual_completion_date = models.DateField(
null=True, validators=[past_date_validator]
)
class Meta:
db_table = "task"
class TaskRemarksHistory(AbstractRemarksHistory):
task = models.ForeignKey(Task, on_delete=models.CASCADE)
class Meta:
db_table = "task_remarks_history"
|
<reponame>samuelgrigolato/tafeito<filename>src/components/App/index.tsx
import React, { useEffect } from 'react';
import { Routes, Route } from "react-router-dom";
import Login from '../../screens/Login';
import Tasks from '../../screens/Tasks';
import { useLocalStorage } from '../../hooks/useLocalStorage'
import { useNavigate } from 'react-router-dom';
import { TokenProps } from '../../hooks/useAxios';
const App = () => {
const [tokenObj, setTokenObj] = useLocalStorage<TokenProps>("token", {token:null});
let navigate = useNavigate();
useEffect(() => {
function checkTokenData() {
const localToken = localStorage.getItem('token');
if (localToken) {
const localTokenObj:TokenProps = JSON.parse(localToken);
if(localTokenObj.token){
navigate('/tasks');
} else {
navigate('/login');
}
} else {
navigate('/login');
}
}
window.addEventListener('storage', checkTokenData)
return () => {
window.removeEventListener('storage', checkTokenData)
}
}, [])
useEffect(() => {
if (tokenObj.token !== null) {
navigate('/tasks');
} else {
navigate('/login');
}
}, [tokenObj]);
return (
<Routes>
<Route path="/login" element={<Login updateToken={(token) => setTokenObj({token})} />} />
<Route path="/tasks" element={<Tasks updateToken={(token) => setTokenObj({token})}/>} />
<Route path="*" element={<div>not found</div>} />
</Routes>
);
}
export default App; |
<div>
<h1>Restaurant Menu</h1>
<ul>
<li><strong>Burger</strong> - $5.00</li>
<li><strong>Fries</strong> - $2.50</li>
<li><strong>Soda</strong> - $1.00</li>
</ul>
</div> |
var activeCueIds = [];
var nScrubs = -1; //number of times scrubed. Could put in a function to reset after x number scrubs, and then instead load new subs in same genre.
$( document ).ready(function() {
//for when DOM is ready..
//initialise with subs:
scrubTrack(nScrubs + 1);
var iframe_active = $(".activevideo:first").data("zid");
if (iframe_active == null || iframe_active <= 0) $('#ajax-loader-video').toggle();
$('#'+iframe_active).load(function(){
$('#ajax-loader-video').toggle();
});
document.getElementById('video').addEventListener('loadedmetadata', function() {
this.currentTime = 3;
}, false);
//Enable swiping...
$(".swipescrub").swipe( {
//Generic swipe handler for all directions
swipe: function(event, direction, distance, duration, fingerCount, fingerData) {
if (direction == null) {return false;}
if (direction == 'up') {
console.log("scrub up " + nScrubs);
scrubTrack(nScrubs);
}
if (direction == 'down') {console.log("down");} //return false;
if (direction == 'right') console.log("right"); //return false;
if (direction == 'left') {console.log("left");} //return false;
//if (direction == 'left') rotatelement(".info-panels",180); //return false;
},
//Default is 75px
threshold:15
});
});
function scrubTrack(offsetIndex){
var track = video.addTextTrack("captions", "comedy", "en");
track.mode = "showing";
//arrays for upcoming cues
var starttime = [], endtime = [] , text = [] ;
$selector = $("#comedy li").eq(offsetIndex);
//making a buffer of 60 seconds from the list-items in the api, storing them in the starttime, endtime and text arrays.
$selector.each(function(i){
starttime[i] = $selector.eq(i).find(".start").text();
endtime[i] = $selector.eq(i).find(".end").text();
text[i] = $selector.eq(i).find(".text").text();
});
//console.log(starttime.length, starttime);
//console.log($selector);
for (var i = 0; i < starttime.length; i++) {
console.log(i, starttime[i], endtime[i], text[i]);
//newCue = new VTTCue(starttime[i], endtime[i], text[i]);
track.addCue(new VTTCue(starttime[i], endtime[i], text[i]));
//storing cue ID's in the activeCueIds array - to be used for deletion
//activeCueIds[i] = newCue.id;
}
/*track.addCue(new VTTCue(0, 12, "Loaded Cues"));
track.addCue(new VTTCue(18.7, 21.5, "This blade has a dark past."));
track.addCue(new VTTCue(22.8, 26.8, "It has shed much innocent blood."));
track.addCue(new VTTCue(29, 32.45, "You're a fool for traveling alone, so completely unprepared."));
track.addCue(new VTTCue(32.75, 35.8, "You're lucky your blood's still flowing."));
track.addCue(new VTTCue(36.25, 37.3, "Thank you."));
track.addCue(new VTTCue(38.5, 40, "So..."));
track.addCue(new VTTCue(40.4, 44.8, "What brings you to the land of the gatekeepers?"));
track.addCue(new VTTCue(46, 48.5, "I'm searching for someone."));
track.addCue(new VTTCue(49, 53.2, "Someone very dear? A kindred spirit?"));
track.addCue(new VTTCue(54.4, 56, "A dragon."));
track.addCue(new VTTCue(58.85, 61.75, "A dangerous quest for a lone hunter."));
track.addCue(new VTTCue(62.95, 65.87, "I've been alone for as long as I can remember."));
track.addCue(new VTTCue(118.25, 119.5, "We're almost done. Shhh...")); */
//return offsetIndex;
offsetIndex++;
}
/*laravel specifics? don't know where these came from */
function pinClone() {
balancelement(".info-panels");
var ZIDkey_val = $("#info:first").data("todo");
//$('#ajax-loader-pindu').toggle();
$('.active').slideToggle();
var nextactivetodo = $('.active').nextAll('.col-lg-4:first');
$( ".active" ).remove();
nextactivetodo.slideToggle().addClass( "active" );
//Get value and make sure it is not null
if (ZIDkey_val.length == 0) window.location.reload();
$.ajax({
url: "pages/functions/votes_addupvote.php",
type: "post",
dataType:"json",
data: { ZID_key : ZIDkey_val },
statusCode: {
404: function() {
alert("Big problem! Page not found");
}
},
success: function(data){
if (data.bool_flag) {
return;
} else {
console.log(data.message + ' ' + data.dbconn);
return;
//$('#ajax-loader-pindu').toggle();
// window.location.reload();
}
},
error:function(data){
console.log(' Function error ');
$('#ajax-loader-pindu').toggle();
//window.location.reload();
}
});
return;
}
function pinRemove() {
balancelement(".info-panels");
var ZIDkey_val = $("#info:first").data("todo");
//$('#ajax-loader-pindu').toggle();
$('.active').slideToggle();
var nextactivetodo = $('.active').nextAll('.col-lg-4:first');
$( ".active" ).remove();
nextactivetodo.slideToggle().addClass( "active" );
//Get value and make sure it is not null
if (ZIDkey_val.length == 0) window.location.reload();
$.ajax({
url: "pages/functions/votes_adddownvote.php",
type: "post",
dataType:"json",
data: { ZID_key : ZIDkey_val },
statusCode: {
404: function() {
alert("Big problem! Page not found");
}
},
success: function(data){
if (data.bool_flag) {
return;
} else {
console.log(data.message + ' ' + data.dbconn);
return;
//$('#ajax-loader-pindu').toggle();
// window.location.reload();
}
},
error:function(data){
console.log(' Function error ');
$('#ajax-loader-pindu').toggle();
//window.location.reload();
}
});
return;
}
function visitorSubmit() {
$('#ajax-loader-visitorSubmit').toggle();
var ZIDkey_val = $("#visitorTask").val();
if (ZIDkey_val.length == 0) window.location.reload();
$.ajax({
url: "pages/functions/tasks_visitors_add.php",
type: "post",
dataType:"json",
data: { body_tasks : ZIDkey_val },
statusCode: {
404: function() {
alert("Big problem! Page not found");
}
},
success: function(data){
if (data.bool_flag) {
window.location.assign("./login.html");
return;
} else {
$('#ajax-loader-pindu').toggle();
console.log(data.message + ' ' + data.dbconn);
alert(data.message + ' ' + data.dbconn);
window.location.reload();
return;
}
},
error:function(data){
console.log(' Function error ');
$('#ajax-loader-pindu').toggle();
//window.location.reload();
}
});
}
function pinLater() {
$('#ajax-loader-pindu').toggle();
//$('#ajax-loader-later').toggle();
window.location.reload();
}
|
package com.example.masato.weatherforecast.model.weekweather;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.List;
/**
* Created by masato on 17/03/20.
*/
public class Item {
@Expose
@SerializedName("title")
private String title;
@Expose
@SerializedName("pubDate")
private String pubDate;
@Expose
@SerializedName("link")
private String link;
@Expose
@SerializedName("guid")
private String guid;
@Expose
@SerializedName("author")
private String author;
@Expose
@SerializedName("thumbnail")
private String thumbnail;
@Expose
@SerializedName("description")
private String description;
@Expose
@SerializedName("content")
private String content;
@Expose
@SerializedName("enclosure")
private List<Object> enclosure;
@Expose
@SerializedName("categories")
private List<String> categories;
public String getTitle() {
return title;
}
public String getPubDate() {
return pubDate;
}
public String getLink() {
return link;
}
public String getGuid() {
return guid;
}
public String getAuthor() {
return author;
}
public String getThumbnail() {
return thumbnail;
}
public String getDescription() {
return description;
}
public String getContent() {
return content;
}
public Object getEnclosure() {
return enclosure;
}
public List<String> getCategories() {
return categories;
}
public String getDateData() {
return title.split(" - ", 0)[3];
}
public String getTelop() {
return title.split(" - ", 0)[1];
}
}
|
<filename>plejd/types/Mqtt.d.ts
/* eslint-disable no-use-before-define */
export type TopicType = 'config' | 'state' | 'availability' | 'set';
export type TOPIC_TYPES = { [key: string]: TopicType };
export type MqttType = 'light' | 'scene' | 'switch' | 'device_automation';
export type MQTT_TYPES = { [key: string]: MqttType };
export interface OutputDevice {
bleOutputAddress: number;
deviceId: string;
dim?: number;
dimmable: boolean;
hiddenFromRoomList?: boolean;
hiddenFromIntegrations?: boolean;
hiddenFromSceneList?: boolean;
name: string;
output: number;
roomId: string;
state: boolean | undefined;
type: string;
typeName: string;
version: string;
uniqueId: string;
}
|
<reponame>opentaps/opentaps-1
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.ofbiz.ebaystore;
import org.ofbiz.base.util.UtilDateTime;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.base.util.UtilValidate;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.GenericValue;
import java.util.*;
import org.ofbiz.service.DispatchContext;
import org.ofbiz.service.LocalDispatcher;
import org.ofbiz.service.ServiceUtil;
import com.ebay.sdk.ApiContext;
import com.ebay.sdk.ApiException;
import com.ebay.sdk.SdkException;
import com.ebay.sdk.call.GetFeedbackCall;
import com.ebay.sdk.call.GetItemsAwaitingFeedbackCall;
import com.ebay.sdk.call.GetUserCall;
import com.ebay.sdk.call.LeaveFeedbackCall;
import com.ebay.soap.eBLBaseComponents.CommentTypeCodeType;
import com.ebay.soap.eBLBaseComponents.DetailLevelCodeType;
import com.ebay.soap.eBLBaseComponents.FeedbackDetailType;
import com.ebay.soap.eBLBaseComponents.FeedbackRatingDetailCodeType;
import com.ebay.soap.eBLBaseComponents.ItemRatingDetailArrayType;
import com.ebay.soap.eBLBaseComponents.ItemRatingDetailsType;
import com.ebay.soap.eBLBaseComponents.PaginatedTransactionArrayType;
import com.ebay.soap.eBLBaseComponents.SiteCodeType;
import com.ebay.soap.eBLBaseComponents.TransactionArrayType;
import com.ebay.soap.eBLBaseComponents.TransactionType;
import javolution.util.FastList;
import javolution.util.FastMap;
public class EbayFeedback {
public static final String resource = "EbayUiLabels";
private static final String module = EbayFeedback.class.getName();
public static Map<String, Object> loadFeedback(DispatchContext dctx, Map<String, ? extends Object> context) {
Map<String, Object> result = FastMap.newInstance();
LocalDispatcher dispatcher = dctx.getDispatcher();
GenericValue userLogin = (GenericValue) context.get("userLogin");
Delegator delegator = dctx.getDelegator();
Locale locale = (Locale) context.get("locale");
String productStoreId = (String) context.get("productStoreId");
ApiContext apiContext = EbayStoreHelper.getApiContext(productStoreId, locale, delegator);
try {
Map<String, Object> inMap = FastMap.newInstance();
inMap.put("productStoreId", productStoreId);
inMap.put("userLogin", userLogin);
Map<String, Object> resultUser = dispatcher.runSync("getEbayStoreUser", inMap);
String userID = (String)resultUser.get("userLoginId");
GetFeedbackCall feedbackCall = new GetFeedbackCall();
feedbackCall.setApiContext(apiContext);
SiteCodeType SiteCodeType = EbayStoreHelper.getSiteCodeType(productStoreId,locale, delegator);
feedbackCall.setSite(SiteCodeType.US);
feedbackCall.setUserID(userID);
DetailLevelCodeType[] detailLevelCodeType = {DetailLevelCodeType.RETURN_ALL};
feedbackCall.setDetailLevel(detailLevelCodeType);
FeedbackDetailType[] feedback = feedbackCall.getFeedback();
if (feedback != null) {
String partyId = null;
GenericValue userLoginEx = delegator.findByPrimaryKey("UserLogin", UtilMisc.toMap("userLoginId", userID));
if (userLoginEx == null) {
//Party
GenericValue party = delegator.makeValue("Party");
partyId = delegator.getNextSeqId("Party");
party.put("partyId", partyId);
party.put("partyTypeId", "PERSON");
party.create();
//UserLogin
userLoginEx = delegator.makeValue("UserLogin");
userLoginEx.put("userLoginId", userID);
userLoginEx.put("partyId", partyId);
userLoginEx.create();
} else {
partyId = userLoginEx.getString("partyId");
}
//PartyRole For eBay User
List partyRoles = delegator.findByAnd("PartyRole", UtilMisc.toMap("partyId", partyId, "roleTypeId", "OWNER"));
if (partyRoles.size() == 0) {
GenericValue partyRole = delegator.makeValue("PartyRole");
partyRole.put("partyId", partyId);
partyRole.put("roleTypeId", "OWNER");
partyRole.create();
}
int feedbackLength = feedback.length;
for (int i = 0; i < feedbackLength; i++) {
//convert to ofbiz
String contentId = feedback[i].getFeedbackID();
Date eBayDateTime = feedback[i].getCommentTime().getTime();
GenericValue contentCheck = delegator.findByPrimaryKey("Content", UtilMisc.toMap("contentId", contentId));
if (contentCheck != null) {
continue;
}
String textData = feedback[i].getCommentText();
String commentingUserId= feedback[i].getCommentingUser();
String commentingPartyId = null;
List CommentingUserLogins = delegator.findByAnd("UserLogin", UtilMisc.toMap("userLoginId", commentingUserId));
if (CommentingUserLogins.size() == 0) {
//Party
GenericValue party = delegator.makeValue("Party");
commentingPartyId = delegator.getNextSeqId("Party");
party.put("partyId", commentingPartyId);
party.put("partyTypeId", "PERSON");
party.create();
//UserLogin
userLoginEx = delegator.makeValue("UserLogin");
userLoginEx.put("userLoginId", commentingUserId);
userLoginEx.put("partyId", commentingPartyId);
userLoginEx.create();
} else {
userLoginEx = (GenericValue)CommentingUserLogins.get(0);
commentingPartyId = userLoginEx.getString("partyId");
}
//DataResource
GenericValue dataResource = delegator.makeValue("DataResource");
String dataResourceId = delegator.getNextSeqId("DataResource");
dataResource.put("dataResourceId", dataResourceId);
dataResource.put("dataResourceTypeId", "ELECTRONIC_TEXT");
dataResource.put("mimeTypeId", "text/html");
dataResource.create();
//ElectronicText
GenericValue electronicText = delegator.makeValue("ElectronicText");
electronicText.put("dataResourceId", dataResourceId);
electronicText.put("textData", textData);
electronicText.create();
//Content
GenericValue content = delegator.makeValue("Content");
content.put("contentId", contentId);
content.put("contentTypeId", "DOCUMENT");
content.put("dataResourceId", dataResourceId);
content.put("createdDate", UtilDateTime.toTimestamp(eBayDateTime));
content.create();
//ContentPurpose
GenericValue contentPurpose = delegator.makeValue("ContentPurpose");
contentPurpose.put("contentId", contentId);
contentPurpose.put("contentPurposeTypeId", "FEEDBACK");
contentPurpose.create();
//PartyRole For eBay Commentator
List commentingPartyRoles = delegator.findByAnd("PartyRole", UtilMisc.toMap("partyId", commentingPartyId, "roleTypeId", "COMMENTATOR"));
if (commentingPartyRoles.size() == 0) {
GenericValue partyRole = delegator.makeValue("PartyRole");
partyRole.put("partyId", commentingPartyId);
partyRole.put("roleTypeId", "COMMENTATOR");
partyRole.create();
}
//ContentRole for eBay User
List contentRoles = delegator.findByAnd("ContentRole", UtilMisc.toMap("partyId", partyId, "roleTypeId", "OWNER", "contentId", contentId));
if (contentRoles.size() == 0) {
GenericValue contentRole = delegator.makeValue("ContentRole");
contentRole.put("contentId", contentId);
contentRole.put("partyId", partyId);
contentRole.put("roleTypeId", "OWNER");
contentRole.put("fromDate", UtilDateTime.nowTimestamp());
contentRole.create();
}
//ContentRole for Commentator
List commentingContentRoles = delegator.findByAnd("ContentRole", UtilMisc.toMap("partyId", commentingPartyId, "roleTypeId", "COMMENTATOR", "contentId", contentId));
if (commentingContentRoles.size() == 0) {
GenericValue contentRole = delegator.makeValue("ContentRole");
contentRole.put("contentId", contentId);
contentRole.put("partyId", commentingPartyId);
contentRole.put("roleTypeId", "COMMENTATOR");
contentRole.put("fromDate", UtilDateTime.nowTimestamp());
contentRole.create();
}
}
}
} catch (ApiException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SdkException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
String successMsg = "Load eBay Feedback Successfull.";
result = ServiceUtil.returnSuccess(successMsg);
return result;
}
public static Map<String, Object> getItemsAwaitingFeedback(DispatchContext dctx, Map<String, ? extends Object> context) {
Map<String, Object> result = FastMap.newInstance();
LocalDispatcher dispatcher = dctx.getDispatcher();
GenericValue userLogin = (GenericValue) context.get("userLogin");
Delegator delegator = dctx.getDelegator();
Locale locale = (Locale) context.get("locale");
String productStoreId = (String) context.get("productStoreId");
ApiContext apiContext = EbayStoreHelper.getApiContext(productStoreId, locale, delegator);
List itemsResult = FastList.newInstance();
try {
GetItemsAwaitingFeedbackCall awaitingFeedbackCall = new GetItemsAwaitingFeedbackCall();
awaitingFeedbackCall.setApiContext(apiContext);
awaitingFeedbackCall.getItemsAwaitingFeedback();
PaginatedTransactionArrayType itemsAwaitingFeedback = awaitingFeedbackCall.getReturnedItemsAwaitingFeedback();
TransactionArrayType items = itemsAwaitingFeedback.getTransactionArray();
GetUserCall getUserCall = new GetUserCall(apiContext);
String commentingUser = getUserCall.getUser().getUserID();
for (int i = 0;i < items.getTransactionLength(); i++) {
Map<String, Object> entry = FastMap.newInstance();
TransactionType transection = items.getTransaction(i);
entry.put("itemID", transection.getItem().getItemID());
entry.put("commentingUser", commentingUser);
entry.put("title", transection.getItem().getTitle());
entry.put("transactionID", transection.getTransactionID());
if (transection.getBuyer() != null) {
entry.put("userID", transection.getBuyer().getUserID());
entry.put("role", "buyer");
}
if (transection.getItem().getSeller() != null) {
entry.put("userID", transection.getItem().getSeller().getUserID());
entry.put("role", "seller");
}
if (transection.getShippingDetails()!=null) {
entry.put("shippingCost", transection.getShippingDetails().getDefaultShippingCost().getValue());
entry.put("shippingCurrency", transection.getShippingDetails().getDefaultShippingCost().getCurrencyID().name());
}
if (transection.getFeedbackLeft() != null) {
entry.put("commentType", transection.getFeedbackLeft().getCommentType().name());
}
itemsResult.add(entry);
}
result.put("itemsAwaitingFeedback", itemsResult);
} catch (ApiException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SdkException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return result;
}
public static Map<String, Object> leaveFeedback(DispatchContext dctx, Map<String, ? extends Object> context) {
Map<String, Object>result = FastMap.newInstance();
Delegator delegator = dctx.getDelegator();
Locale locale = (Locale) context.get("locale");
String productStoreId = (String) context.get("productStoreId");
ApiContext apiContext = EbayStoreHelper.getApiContext(productStoreId, locale, delegator);
String itemId = (String) context.get("itemId");
String targetUser = (String) context.get("targetUser");
String transactionId = (String) context.get("transactionId");
String commentingUser = (String) context.get("commentingUser");
String role = (String) context.get("role");
String commentType = (String) context.get("commentType");
String commentText = (String) context.get("commentText");
String AqItemAsDescribed = null;
String ratingItem = (String) context.get("ratingItem");
String ratingComm = (String) context.get("ratingComm");
String ratingShip = (String) context.get("ratingShip");
String ratingShipHand = (String) context.get("ratingShipHand");
try {
if (commentType != null) {
LeaveFeedbackCall leaveFeedbackCall = new LeaveFeedbackCall();
leaveFeedbackCall.setApiContext(apiContext);
leaveFeedbackCall.setTargetUser(targetUser);
leaveFeedbackCall.setTransactionID(transactionId);
if (role.equals("seller")) {
ItemRatingDetailArrayType sellerItemRatingDetailArray = new ItemRatingDetailArrayType();
//The item description
ItemRatingDetailsType itemRatingDetailsType1 = new ItemRatingDetailsType();
int ratingItemValue = 0;
if (UtilValidate.isInteger(ratingItem)) {
ratingItemValue = Integer.parseInt(ratingItem);
}
if (ratingItemValue < 3) {
int AqItemAsDescribedId = Integer.parseInt((String) context.get("AqItemAsDescribedId"));
switch (AqItemAsDescribedId) {
case 5:
AqItemAsDescribed = "ItemNotReceived";
break;
case 6:
AqItemAsDescribed = "ItemBadQuality";
break;
case 2:
AqItemAsDescribed = "ItemDamaged";
break;
case 1:
AqItemAsDescribed = "ItemIneligible";
break;
case 3:
AqItemAsDescribed = "ItemLost";
break;
default:
AqItemAsDescribed = "Other";
break;
}
}
itemRatingDetailsType1.setRating(ratingItemValue);
itemRatingDetailsType1.setRatingDetail(FeedbackRatingDetailCodeType.ITEM_AS_DESCRIBED);
//The seller's communication
ItemRatingDetailsType itemRatingDetailsType2 = new ItemRatingDetailsType();
int ratingCommValue = 0;
if (UtilValidate.isInteger(ratingComm)) {
ratingCommValue = Integer.parseInt(ratingComm);
}
itemRatingDetailsType2.setRating(ratingCommValue);
itemRatingDetailsType2.setRatingDetail(FeedbackRatingDetailCodeType.COMMUNICATION);
//the seller ship the item
ItemRatingDetailsType itemRatingDetailsType3 = new ItemRatingDetailsType();
int ratingShipValue = 0;
if (UtilValidate.isInteger(ratingShip)) {
ratingShipValue = Integer.parseInt(ratingShip);
}
itemRatingDetailsType3.setRating(ratingShipValue);
itemRatingDetailsType3.setRatingDetail(FeedbackRatingDetailCodeType.SHIPPING_TIME);
//the shipping and handling charges
ItemRatingDetailsType itemRatingDetailsType4 = new ItemRatingDetailsType();
int ratingShipHandValue = 0;
if (UtilValidate.isInteger(ratingShipHand)) {
ratingShipHandValue = Integer.parseInt(ratingShipHand);
}
itemRatingDetailsType4.setRating(ratingShipHandValue);
itemRatingDetailsType4.setRatingDetail(FeedbackRatingDetailCodeType.SHIPPING_AND_HANDLING_CHARGES);
//Rating Summary
ItemRatingDetailsType[] itemRatingDetailsType = {itemRatingDetailsType1, itemRatingDetailsType2, itemRatingDetailsType3, itemRatingDetailsType4};
sellerItemRatingDetailArray.setItemRatingDetails(itemRatingDetailsType);
leaveFeedbackCall.setSellerItemRatingDetailArray(sellerItemRatingDetailArray);
}
FeedbackDetailType feedbackDetail = new FeedbackDetailType();
feedbackDetail.setItemID(itemId);
feedbackDetail.setCommentingUser(commentingUser);
feedbackDetail.setCommentText(commentText);
feedbackDetail.setCommentTime(Calendar.getInstance());
if (commentType.equals("positive")) {
feedbackDetail.setCommentType(CommentTypeCodeType.POSITIVE);
} else if (commentType.equals("neutral")) {
feedbackDetail.setCommentType(CommentTypeCodeType.NEUTRAL);
} else if (commentType.equals("negative")) {
feedbackDetail.setCommentType(CommentTypeCodeType.NEGATIVE);
}
leaveFeedbackCall.setFeedbackDetail(feedbackDetail);
leaveFeedbackCall.leaveFeedback();
}
} catch (ApiException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SdkException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
result = ServiceUtil.returnSuccess();
return result;
}
} |
<filename>src/modules/chooseGame/view/pages/Profile/components/ImageInput/styles.ts
import styled from 'styled-components/native';
export const Image = styled.Image`
width: 120px;
height: 120px;
border-radius: 60px;
margin-bottom: ${({ theme }) => theme.layout.spacing(4)};
`;
|
#pragma once
// This file was created automatically, do not modify the contents of this file.
PRAGMA_DISABLE_DEPRECATION_WARNINGS
#include "CoreMinimal.h"
#include "ManageEventSender.h"
#include "Generate/Manage/ManagePawnAction_Repeat.h"
#include "Runtime/AIModule/Classes/Actions/PawnAction_Repeat.h"
// Source file C:\Program Files\Epic Games\UE_4.22\Engine\Source\Runtime\AIModule\Classes\Actions\PawnAction_Repeat.h:11
class E_PROTECTED_WRAP_UPawnAction_Repeat : protected UPawnAction_Repeat
{
public:
bool PushSubAction_WRAP()
{
return PushSubAction();
}
};
extern "C"
{
DOTNET_EXPORT auto E_PROP_UPawnAction_Repeat_ActionToRepeat_GET(UPawnAction_Repeat* Ptr) { return ConvertToManage_ObjectPointerDescription(Ptr->ActionToRepeat); }
DOTNET_EXPORT void E_PROP_UPawnAction_Repeat_ActionToRepeat_SET(UPawnAction_Repeat* Ptr, UPawnAction* Value) { Ptr->ActionToRepeat = Value; }
DOTNET_EXPORT auto E_PROP_UPawnAction_Repeat_RecentActionCopy_GET(UPawnAction_Repeat* Ptr) { return ConvertToManage_ObjectPointerDescription(Ptr->RecentActionCopy); }
DOTNET_EXPORT void E_PROP_UPawnAction_Repeat_RecentActionCopy_SET(UPawnAction_Repeat* Ptr, UPawnAction* Value) { Ptr->RecentActionCopy = Value; }
DOTNET_EXPORT auto E_PROP_UPawnAction_Repeat_RepeatsLeft_GET(UPawnAction_Repeat* Ptr) { return Ptr->RepeatsLeft; }
DOTNET_EXPORT void E_PROP_UPawnAction_Repeat_RepeatsLeft_SET(UPawnAction_Repeat* Ptr, int32 Value) { Ptr->RepeatsLeft = Value; }
DOTNET_EXPORT INT_PTR E_NewObject_UPawnAction_Repeat(UObject* Parent, char* Name)
{
return (INT_PTR)NewObject<UPawnAction_Repeat>(Parent, FName(UTF8_TO_TCHAR(Name)));
}
DOTNET_EXPORT auto E_UPawnAction_Repeat_CreateAction(UPawnAction_Repeat* Self, UWorld& World, UPawnAction* ActionToRepeat, int32 NumberOfRepeats)
{
auto& _p0 = World;
auto _p1 = ActionToRepeat;
auto _p2 = NumberOfRepeats;
return ConvertToManage_ObjectPointerDescription(Self->CreateAction(_p0, _p1, _p2));
}
DOTNET_EXPORT auto E_UPawnAction_Repeat_PushSubAction(UPawnAction_Repeat* Self)
{
return ((E_PROTECTED_WRAP_UPawnAction_Repeat*)Self)->PushSubAction_WRAP();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_Tick(UPawnAction* Self, float DeltaTime)
{
auto _p0 = DeltaTime;
((UManagePawnAction_Repeat*)Self)->_Supper__Tick(_p0);
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_BeginDestroy(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__BeginDestroy();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_FinishDestroy(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__FinishDestroy();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_MarkAsEditorOnlySubobject(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__MarkAsEditorOnlySubobject();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_PostCDOContruct(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__PostCDOContruct();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_PostEditImport(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__PostEditImport();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_PostInitProperties(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__PostInitProperties();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_PostLoad(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__PostLoad();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_PostNetReceive(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__PostNetReceive();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_PostRepNotifies(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__PostRepNotifies();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_PostSaveRoot(UObject* Self, bool bCleanupIsRequired)
{
auto _p0 = bCleanupIsRequired;
((UManagePawnAction_Repeat*)Self)->_Supper__PostSaveRoot(_p0);
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_PreDestroyFromReplication(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__PreDestroyFromReplication();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_PreNetReceive(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__PreNetReceive();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_ShutdownAfterError(UObject* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__ShutdownAfterError();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_CreateCluster(UObjectBaseUtility* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__CreateCluster();
}
DOTNET_EXPORT auto E__Supper__UPawnAction_Repeat_OnClusterMarkedAsPendingKill(UObjectBaseUtility* Self)
{
((UManagePawnAction_Repeat*)Self)->_Supper__OnClusterMarkedAsPendingKill();
}
}
PRAGMA_ENABLE_DEPRECATION_WARNINGS
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Support level6 operator test cases.
"""
import math
import numpy as np
import tvm
from tvm import relay
from tvm.relay.testing import ctx_list
import topi.testing
def test_argsort():
def verify_argsort(shape, axis, is_ascend):
x = relay.var("x", relay.TensorType(shape, "float32"))
z = relay.argsort(x, axis=axis, is_ascend=is_ascend)
zz = relay.ir_pass.infer_type(z)
func = relay.Function([x], z)
x_data = np.random.uniform(size=shape).astype("float32")
if is_ascend:
ref_res = np.argsort(x_data, axis=axis)
else:
ref_res = np.argsort(-x_data, axis=axis)
for target, ctx in ctx_list():
for kind in ["graph", "debug"]:
intrp = relay.create_executor(kind, ctx=ctx, target=target)
op_res = intrp.evaluate(func)(x_data)
tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.astype("float"), rtol=1e-5)
verify_argsort((2, 3, 4), axis=0, is_ascend=False)
verify_argsort((1, 4, 6), axis=1, is_ascend=True)
verify_argsort((3, 5, 6), axis=-1, is_ascend=False)
if __name__ == "__main__":
test_argsort()
|
filtered_strings = [string for string in strings if re.match(r"^hello.*world$", string)]
# filtered_strings = ["hello_world"] |
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --gres=gpu:1 # request GPU generic resource
#SBATCH --cpus-per-task=2 #Maximum of CPU cores per GPU request: 6 on Cedar, 16 on Graham.
#SBATCH --mem=8000M # memory per node
#SBATCH --time=0-01:30 # time (DD-HH:MM)
#SBATCH --output=./job_script_output/Camera2_Oct_05_1300_1400_Prescribed_behavior_1_%N-%j.out # %N for node name, %j for jobID
## Main processing command
## -v: path to the raw video file
## -o: directory to save processed video
python ./process_video_low_frequent_frame.py -v ../ROM_raw_videos_clips/Oct_05/Camera2_Oct_05_1300_1400_Prescribed_behavior_1.mp4 -o ../ROM_raw_videos_clips_processed_camera2/Oct_05
|
#!/bin/sh -e
# only execute anything if either
# - running under orb with package = builder
# - not running under opam at all
if [ "$ORB_BUILDING_PACKAGE" != "builder-web" -a "$OPAM_PACKAGE_NAME" != "" ]; then
exit 0;
fi
basedir=$(realpath "$(dirname "$0")"/../..)
pdir=$basedir/packaging/FreeBSD
bdir=$basedir/_build/install/default/bin
#tmptmpl=$(basename "$0")
#tmpd=$(mktemp -t "$tmptmpl")
tmpd=$basedir/_build/stage
manifest=$tmpd/+MANIFEST
rootdir=$tmpd/rootdir
sbindir=$rootdir/usr/local/sbin
rcdir=$rootdir/usr/local/etc/rc.d
libexecdir=$rootdir/usr/local/libexec
trap 'rm -rf $tmpd' 0 INT EXIT
mkdir -p "$sbindir" "$libexecdir" "$rcdir"
# stage service scripts
install -U $pdir/rc.d/builder_web $rcdir/builder_web
# stage app binaries
install -U $bdir/builder-web $libexecdir/builder-web
install -U $bdir/builder-migrations $sbindir/builder-migrations
install -U $bdir/builder-db $sbindir/builder-db
# create +MANIFEST
flatsize=$(find "$rootdir" -type f -exec stat -f %z {} + |
awk 'BEGIN {s=0} {s+=$1} END {print s}')
sed -e "s:%%FLATSIZE%%:${flatsize}:" "$pdir/MANIFEST" > "$manifest"
{
printf '\nfiles {\n'
find "$rootdir" -type f -exec sha256 -r {} + | sort |
awk '{print " " $2 ": \"" $1 "\"," }'
find "$rootdir" -type l | sort |
awk "{print \" \"\$1 \": -,\"}"
printf '}\n'
} | sed -e "s:${rootdir}::" >> "$manifest"
export SOURCE_DATE_EPOCH=$(git log -1 --pretty=format:%ct)
pkg create -r "$rootdir" -M "$manifest" -o $basedir/
mv $basedir/builder_web-*.pkg $basedir/builder_web.pkg
echo 'bin: [ "builder_web.pkg" ]' > $basedir/builder-web.install
echo 'doc: [ "README.md" ]' >> $basedir/builder-web.install
|
import os
def get_recent_commits(directory_path):
"""
Returns a list of the most recent commit files in the specified directory.
Args:
directory_path (str): The path to the directory containing commit files.
Returns:
list: A list of the most recent commit files in the directory.
"""
current_directory = os.getcwd() # Get the current working directory
os.chdir(directory_path) # Change to the specified directory
commit_files = [f for f in os.listdir('.') if f.endswith('.zip')] # Get all commit files with ".zip" extension
sorted_commit_files = sorted(commit_files, key=lambda f: os.path.getmtime(f)) # Sort the commit files based on modification time
os.chdir(current_directory) # Change back to the original directory
return sorted_commit_files # Return the list of most recent commit files |
#include <stdio.h>
int g_a = 100;
int g_arr[100][3];
int (*g_parr)[100][3] = &g_arr;
union { int a; unsigned short b; } g_foo = { 1 };
union { short a; char b[10]; } g_bar = { .b = "Hello" };
struct { int a; unsigned short b; } g_x = { 2, 3 };
struct { short a; char b[10]; int c; } g_y = { .b = "Hi", .a = 11 };
struct { int a[3], b; } v[] = { [0].a = {1}, [1].a[0] = 2 };
struct { int a[3], b; } w[] = { {{1000, 0, 0}}, {{2000, 0, 0}} };
int main() {
int a = 100;
union { int a; unsigned short b; } foo = { 1 };
union { short a; char b[10]; } bar = { .b = "Hello" };
struct { int a; unsigned short b; } x = { 2, 3 };
struct { short a; int b; char c[10]; } y = { .c = "Hi", .a = 11 };
printf("a = %d, g_a = %d\n", a, g_a);
printf("foo.a = %d, foo.b = %d\n", foo.a, foo.b);
printf("bar.a = %d, bar.b = %s\n", bar.a, bar.b);
printf("g_foo.a = %d, g_foo.b = %d\n", g_foo.a, g_foo.b);
printf("g_bar.a = %d, g_bar.b = %s\n", g_bar.a, g_bar.b);
printf("x.a = %d, x.b = %d\n", x.a, x.b);
printf("y.a = %d, y.c = %s\n", y.a, y.c);
printf("g_x.a = %d, g_x.b = %d\n", g_x.a, g_x.b);
printf("g_y.a = %d, g_y.b = %s\n", g_y.a, g_y.b);
printf("w[0].a[0] = %d, w[1].a[0] = %d\n", w[0].a[0], w[1].a[0]);
return 0;
}
|
import { Component, OnInit, ViewEncapsulation } from '@angular/core';
@Component({
selector: 'app-preferred-smartcard',
templateUrl: './preferred-smartcard.component.html',
styleUrls: ['./preferred-smartcard.component.css'],
encapsulation: ViewEncapsulation.None,
})
export class PreferredSmartcardComponent implements OnInit {
constructor() { }
ngOnInit() {
}
}
|
<reponame>Anerco/SimpleWebAuthn
import {
PublicKeyCredentialCreationOptionsJSON,
RegistrationCredential,
RegistrationCredentialJSON,
} from '@simplewebauthn/typescript-types';
import utf8StringToBuffer from '../helpers/utf8StringToBuffer';
import bufferToBase64URLString from '../helpers/bufferToBase64URLString';
import base64URLStringToBuffer from '../helpers/base64URLStringToBuffer';
import { browserSupportsWebauthn } from '../helpers/browserSupportsWebauthn';
import toPublicKeyCredentialDescriptor from '../helpers/toPublicKeyCredentialDescriptor';
/**
* Begin authenticator "registration" via WebAuthn attestation
*
* @param creationOptionsJSON Output from @simplewebauthn/server's generateRegistrationOptions(...)
*/
export default async function startRegistration(
creationOptionsJSON: PublicKeyCredentialCreationOptionsJSON,
): Promise<RegistrationCredentialJSON> {
if (!browserSupportsWebauthn()) {
throw new Error('WebAuthn is not supported in this browser');
}
// We need to convert some values to Uint8Arrays before passing the credentials to the navigator
const publicKey: PublicKeyCredentialCreationOptions = {
...creationOptionsJSON,
challenge: base64URLStringToBuffer(creationOptionsJSON.challenge),
user: {
...creationOptionsJSON.user,
id: utf8StringToBuffer(creationOptionsJSON.user.id),
},
excludeCredentials: creationOptionsJSON.excludeCredentials.map(toPublicKeyCredentialDescriptor),
};
// Wait for the user to complete attestation
const credential = (await navigator.credentials.create({ publicKey })) as RegistrationCredential;
if (!credential) {
throw new Error('Registration was not completed');
}
const { id, rawId, response, type } = credential;
// Convert values to base64 to make it easier to send back to the server
const credentialJSON: RegistrationCredentialJSON = {
id,
rawId: bufferToBase64URLString(rawId),
response: {
attestationObject: bufferToBase64URLString(response.attestationObject),
clientDataJSON: bufferToBase64URLString(response.clientDataJSON),
},
type,
clientExtensionResults: credential.getClientExtensionResults(),
};
/**
* Include the authenticator's transports if the browser supports querying for them
*/
if (typeof response.getTransports === 'function') {
credentialJSON.transports = response.getTransports();
}
return credentialJSON;
}
|
#!/bin/bash
set -euo pipefail
# Deploy OAS Specification to Apigee
curl --fail -H "Content-Type: application/x-www-form-urlencoded;charset=utf-8" -H "Accept: application/json;charset=utf-8" -H "Authorization: Basic ZWRnZWNsaTplZGdlY2xpc2VjcmV0" -X POST https://login.apigee.com/oauth/token -d "username=$APIGEE_USERNAME&password=$APIGEE_PASSWORD&grant_type=password" | jq -r .access_token > /tmp/access_token
APIGEE_ACCESS_TOKEN=$(cat /tmp/access_token)
curl --fail -X PUT "https://apigee.com/dapi/api/organizations/emea-demo8/specs/doc/$APIGEE_SPEC_ID/content" -H "Authorization: Bearer $APIGEE_ACCESS_TOKEN" -H 'Content-Type: text/plain' --data '@dist/hello-world-api.json'
curl --fail -X PUT "https://apigee.com/portals/api/sites/emea-demo8-nhsdportal/apidocs/$APIGEE_PORTAL_API_ID/snapshot" -H "Authorization: Bearer $APIGEE_ACCESS_TOKEN"
curl --fail -X POST "https://apigee.com/portals/api/sites/emea-demo8-nhsdportal/resource-entitlements/apis/$APIGEE_PORTAL_API_ID" -H "Authorization: Bearer $APIGEE_ACCESS_TOKEN" -H 'Content-Type: application/json' --data $'{"isPublic": true, "authEntitled": false, "explicitAudiences": [], "orgname": "emea-demo8"}'
|
var proRubApp = angular.module('proRubApp', ['ngRoute'])
.config(['$interpolateProvider', '$routeProvider', '$locationProvider', function($interpolateProvider, $routeProvider, $locationProvider){
//set interpolateProvider to reset handlebars
$interpolateProvider.startSymbol('((');
$interpolateProvider.endSymbol('))');
$routeProvider.
when('/', {
templateUrl: '/views/home.html',
controller: 'homeCtrl'
}).
when('/addDegree', {
templateUrl: '/views/adddegree.html',
controller: 'addDegreeCtrl'
}).
when('/degree/:degree', {
templateUrl: '/views/degree.html',
controller: 'degreeCtrl'
}).
//'enter new course' needs to be dynamic
when('/degree/:degree/addCourse', {
templateUrl: '/views/newcourse.html',
controller: 'newCourseCtrl'
}).
when('/:degree/:course/addRubric', {
templateUrl: '/views/addrubric.html',
controller: 'addrubricCtrl'
}).
when('/degree/:degree/:course/:rubricTitle/audit', {
templateUrl: '/views/audit.html',
controller: 'auditCtrl'
}).
when('/degree/:degree/:course/:rubricTitle/audit/editMode', {
templateUrl: '/views/editMode.html',
controller: 'editModeCtrl'
}).
when('/degree/:degree/:course/:rubricTitle/history', {
templateUrl: '/views/history.html',
controller: 'historyCtrl'
}).
when('/degree/:degree/:course/:rubricTitle/history/view/:id', {
templateUrl: '/views/historyView.html',
controller: 'historyViewCtrl'
}).
otherwise({
redirectTo: '/'
});
}]);
proRubApp.filter('calcGrade', function() {
return function(rubric) {
var sectionGrades = [];
var finalGrade = 0;
// If there is a rubric
if (rubric) {
rubric.sections.forEach(function(section, index){
// Create a variable to hold the sum of the items
var itemSum = 0;
section.items.forEach(function(item, index){
itemSum += item.grade;
});
// FIXME: This doesn't need to grab the average, needs to multiply by 1 / section.items.length
// Get the average grade for the section
section.grade = itemSum / section.items.length;
sectionGrades.push(section.grade * section.weight);
finalGrade += sectionGrades[index];
});
}
return finalGrade.toFixed();
}
});
proRubApp.filter('formatDate', function(){
return function(input) {
return new Date(input).toString();
}
});
proRubApp.controller('homeCtrl', ['$scope', '$http',
function ($scope, $http,$routeParams,$location) {
// Fetches all of the degrees
$http.get('/api/fetchDegrees')
.success(function(data){
// Make the data available to the DOM
$scope.data = data;
}).error(function(){
// TODO: Add error handling
});
// Remove a degree
$scope.removeDegree = function(){
//Send a GET Request to the API with the degree abbreviation
$http.get('/api/deleteDegree/'+ $routeParams.degree)
// Once we catch a response run this code
.then(function(result){
// Forward the user to the home page
$location.path('/#/');
}, function(){
// TODO: Add error handling
});
}
}]);
// reading one degrees from DB
proRubApp.controller('degreeCtrl', ['$scope', '$http','$routeParams','$location',
function ($scope, $http, $routeParams,$location) {
// Remove a degree
$scope.removeDegree = function(){
//Send a GET Request to the API with the degree abbreviation
$http.get('/api/deleteDegree/'+ $routeParams.degree)
// Once we catch a response run this code
.then(function(result){
// Forward the user to the home page
$location.path('/#/');
}, function(){
// TODO: Add error handling
});
}
// Remove a course
$scope.removeCourse = function(courseAbbr){
//Send a GET Request to the API with the degree title and degree abbreviation
$http.get('/api/deleteCourse/'+ courseAbbr)
// Once we catch a response run this code
.then(function(result){
// Forward the user to the home page
// Fetches all of the degrees
$http.get('/api/fetchDegree/'+ $routeParams.degree)
.success(function(data){
// Make the data available to the DOM
$scope.degreeData = data;
}).error(function(){
// TODO: Add error handling
});
// Grab all of the courses from the database
$http.get('/api/fetchCourses/'+ $routeParams.degree)
.success(function(data){
// Make the data available to the DOM
$scope.coursesData = data;
// For every course, loop through and grab all of the rubrics
$scope.coursesData.forEach(function(course){
$http.get('/api/fetchRubrics/'+ $routeParams.degree + '/' + course.abbr)
.success(function(data){
// creates an array of the rubrics associated with the course
course.rubrics = data;
}).error(function(){
// TODO: Add error handling
});
});
}).error(function(){
// TODO: Add error handling
});
}, function(){
// TODO: Add error handling
});
}
// Fetches all of the degrees
$http.get('/api/fetchDegree/'+ $routeParams.degree)
.success(function(data){
// Make the data available to the DOM
$scope.degreeData = data;
}).error(function(){
// TODO: Add error handling
});
// Grab all of the courses from the database
$http.get('/api/fetchCourses/'+ $routeParams.degree)
.success(function(data){
// Make the data available to the DOM
$scope.coursesData = data;
// For every course, loop through and grab all of the rubrics
$scope.coursesData.forEach(function(course){
$http.get('/api/fetchRubrics/'+ $routeParams.degree + '/' + course.abbr)
.success(function(data){
// creates an array of the rubrics associated with the course
course.rubrics = data;
}).error(function(){
// TODO: Add error handling
});
});
}).error(function(){
// TODO: Add error handling
});
}]);
// Insert a new degree
proRubApp.controller('addDegreeCtrl', ['$scope', '$http',
function ($scope, $http) {
// The function to be run when the user presses "Save Degree Program"
$scope.insertDegree = function(){
// Send a POST Request to the API with the degree title and degree abbreviation
$http.post('/api/newDegree', $scope.degree)
// Once we catch a response run this code
.then(function(result){
// Create the URL we want to redirect to
var targRoute = '/#/degree/' + result.data.abbr + "/addCourse";
// Forward the user to the new degree they just created
window.location.href = targRoute;
}, function(){
// TODO: Add error handling
});
}
}]);
proRubApp.controller('newCourseCtrl', ['$scope', '$http', '$routeParams',
function ($scope, $http, $routeParams) {
$scope.degreeName = $routeParams.degree;
// The function to be run when the user presses "Save Course"
$scope.insertCourse = function(){
// Grabs the current degree from the URL
$scope.course.degreeAbbr = $routeParams.degree;
// Send a POST Request to the API with the degree abbreviation, course abbreviation and course title
$http.post('/api/newCourse', $scope.course)
// Once we catch a response run this code
.then(function(result){
//Create the URL we want to redirect to
var targRoute = '/#/degree/' + result.data.degreeAbbr;
// Forward the user to the degree
window.location.href = targRoute;
}, function(){
// TODO: Add error handling
});
}
}]);
proRubApp.controller('auditCtrl', ['$scope', '$http', '$routeParams', '$filter',
function ($scope, $http, $routeParams, $filter) {
// Remove a course
$scope.removeRubric = function(rubric){
//Send a GET Request to the API with the degree title and degree abbreviation
$http.get('/api/deleteRubric/'+ rubric._id)
// Once we catch a response run this code
.then(function(result){
window.location.href = '/#/degree/' + rubric.degreeAbbr;
}, function(){
// TODO: Add error handling
});
}
$http.get('/api/fetchRubric/' + $routeParams.degree + '/' + $routeParams.course + '/' + $routeParams.rubricTitle)
.success(function(data){
$scope.rubric = data;
// FIXME: Implement rendering HTML output
$scope.output = JSON.stringify($scope.rubric);
// Watch for changes to the scope to update the grade and output the new scope data
$scope.$watch(function(){
$scope.rubric.grade = ~~$filter('calcGrade')($scope.rubric);
});
// Save the audit
$scope.exportAudit = function() {
$http.post('/api/newAudit', $scope.rubric)
// Once we catch a response run this code
.then(function(result){
$scope.rubric = result.data;
}, function(){
// TODO: Add error handling
});
};
// creates an array of the rubrics associated with the course
}).error(function(){
// TODO: Add error handling
});
}]);
proRubApp.controller('editModeCtrl', ['$scope', '$http', '$routeParams',
function ($scope, $http, $routeParams) {
$http.get('/api/fetchRubric/' + $routeParams.degree + '/' + $routeParams.course + '/' + $routeParams.rubricTitle)
.success(function(data){
$scope.rubric = data;
// Create a new item
$scope.newItem = function(index) {
// Grabs the current section index to push to the correct array
$scope.rubric.sections[index].items.push({
// Create a blank slate
title: "",
desc: "",
link: "",
comment: "",
grade: 0
});
};
// Deletes the item using the item index and section index
$scope.delItem = function(index, section) {
// Remove the target item from the array
$scope.rubric.sections[section].items.splice(index, 1);
};
// Updates the rubric with the newest data
$scope.updateRubric = function(){
$http.put('/api/updateRubric', $scope.rubric)
.then(function(data){
var targRoute = '/#/degree/' + $scope.rubric.degreeAbbr + '/' + $scope.rubric.courseAbbr + '/' + $scope.rubric.title + '/audit';
// Forward the user to the audit page
window.location.href = targRoute;
},
function(err){
});
}
// creates an array of the rubrics associated with the course
}).error(function(){
// TODO: Add error handling
});
}]);
proRubApp.controller('addrubricCtrl', ['$scope', '$http', '$routeParams', '$location',
function ($scope, $http, $routeParams, $location) {
$scope.degree = $routeParams.degree;
$scope.course = $routeParams.course;
// Inserts a new rubric
$scope.insertRubric = function() {
$scope.rubric.degreeAbbr = $routeParams.degree;
$scope.rubric.courseAbbr = $routeParams.course;
$http.post('/api/newRubric', $scope.rubric)
// Once we catch a response run this code
.then(function(result){
//Create the URL we want to redirect to
var targRoute = '/degree/' + $scope.rubric.degreeAbbr + '/' + $scope.rubric.courseAbbr + '/' + $scope.rubric.title + '/audit';
// Forward the user to the degree
$location.path(targRoute);
}, function(){
// TODO: Add error handling
});
}
}]);
proRubApp.controller('historyCtrl', ['$scope', '$http', '$routeParams',
function ($scope, $http, $routeParams) {
// Fetches all of the saved audits
$http.get('/api/fetchHistory/' + $routeParams.degree + '/' + $routeParams.course + '/' + $routeParams.rubricTitle)
.success(function(data){
// Make the data available to the DOM
$scope.history = data;
$scope.loc = $routeParams;
}).error(function(){
// TODO: Add error handling
});
}]);
proRubApp.controller('historyViewCtrl', ['$scope', '$http', '$routeParams',
function ($scope, $http, $routeParams) {
// Fetches all of the saved audits
$http.get('/api/fetchHistory/' + $routeParams.id)
.success(function(data){
// Make the data available to the DOM
$scope.history = data;
}).error(function(){
// TODO: Add error handling
});
}]);
|
#!/bin/bash
set -euo pipefail
addOrDelete=${1:-add}
MACHINE_NAME=${2:-default}
gateway=$(docker-machine ip $MACHINE_NAME)
networkname=$(echo $(basename $(pwd)) | tr -d '[:punct:]')_default
subnet=$(docker network inspect mesosactor_default -f '{{(index .IPAM.Config 0).Subnet}}')
if [ "$addOrDelete" = "add" ]; then
sudo route -n add -net ${subnet} ${gateway}
exit 0
fi
if [ "$addOrDelete" = "rm" ]; then
sudo route -n delete -net ${subnet} ${gateway}
exit 0
fi
echo "Unknown command, please only use rm"
exit 1
|
/* eslint-disable */
var icon = require('vue-svgicon')
icon.register({
'nodejs': {
width: 16,
height: 16,
viewBox: '0 0 24 24',
data: `<path pid="0" d="M11.435.153l-9.37 5.43c-.35.203-.564.578-.563.983V17.43c0 .404.215.78.564.982l9.37 5.435c.35.203.78.203 1.13 0l9.366-5.433a1.14 1.14 0 0 0 .565-.982V6.566c0-.404-.216-.78-.566-.984L12.567.152a1.124 1.124 0 0 0-1.13 0"/>`
}
})
|
package main
import "fmt"
func main() {
// 2. 在输出 world
defer fmt.Println("world")
// 1. 先输出 hello
fmt.Println("hello")
// 输出结果
// =======
// hello
// world
fmt.Println("counting")
for i := 0; i < 10; i++ {
defer fmt.Println(i)
}
fmt.Println("done")
// 输出结果
// =======
// counting
// done
// 9
// 8
// 7
// 6
// 5
// 4
// 3
// 2
// 1
// 0
} |
#!/usr/bin/env bash
#
# install sidecar in kubernetes kind
#
set -o errexit
set -o pipefail
set -o nounset
CLUSTER_NAME="sidecar-testing"
BIN_DIR="$(mktemp -d)"
KIND="${BIN_DIR}/kind"
CWD="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
KIND_CONFIG="${CWD}/kind-config.yaml"
SIDECAR_MANIFEST="${CWD}/test/sidecar.yaml"
log() {
echo "[$(date --rfc-3339=seconds -u)] $1"
}
build_dummy_server() {
docker build -t dummy-server:1.0.0 -f "${CWD}/server/Dockerfile" .
}
install_kubectl() {
log 'Installing kubectl...'
curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl
chmod +x ./kubectl
sudo mv ./kubectl /usr/local/bin/kubectl
}
install_kind_release() {
log 'Installing kind...'
KIND_BINARY_URL="https://github.com/kubernetes-sigs/kind/releases/download/${KIND_VERSION}/kind-linux-amd64"
wget -O "${KIND}" "${KIND_BINARY_URL}"
chmod +x "${KIND}"
}
create_kind_cluster() {
log "Creating cluster with kind config from ${KIND_CONFIG}"
"${KIND}" create cluster --name "${CLUSTER_NAME}" --loglevel=debug --config "${KIND_CONFIG}" --image "kindest/node:${K8S_VERSION}"
kubectl cluster-info
echo
log 'Waiting for cluster to be ready...'
until ! grep --quiet 'NotReady' <(kubectl get nodes --no-headers); do
printf '.'
sleep 1
done
echo '✔︎'
echo
kubectl get nodes
echo
log 'Cluster ready!'
echo
"${KIND}" load docker-image dummy-server:1.0.0 --name "${CLUSTER_NAME}"
}
wait_for_pod_ready() {
while [[ $(kubectl get pods $1 -o 'jsonpath={..status.conditions[?(@.type=="Ready")].status}') != "True" ]]; do log "waiting for pod '$1' to become ready..." && sleep 1; done
log "Pod '$1' ready."
}
install_sidecar() {
log "Installing sidecar..."
kubectl apply -f "${SIDECAR_MANIFEST}"
wait_for_pod_ready "sidecar"
wait_for_pod_ready "sidecar-5xx"
wait_for_pod_ready "dummy-server-pod"
# because the sidecar pods signal ready state before we actually opened up all watching subprocesses, we wait some more time
sleep 15
}
install_resources() {
log "Installing resources..."
kubectl apply -f "${CWD}"/test/resources.yaml
}
list_pods() {
log "Retrieving pods..."
kubectl get pods -oyaml
}
pod_logs() {
log "Retrieving logs of 'sidecar'..."
kubectl logs sidecar
log "Retrieving logs of 'sidecar-5xx'..."
kubectl logs sidecar-5xx
log "Retrieving logs of 'dummy-server-pod'..."
kubectl logs dummy-server-pod
}
verify_resources_read() {
log "Downloading resource files from sidecar..."
kubectl cp sidecar:/tmp/hello.world /tmp/hello.world
kubectl cp sidecar:/tmp/cm-kubelogo.png /tmp/cm-kubelogo.png
kubectl cp sidecar:/tmp/secret-kubelogo.png /tmp/secret-kubelogo.png
kubectl cp sidecar:/tmp/script_result /tmp/script_result
kubectl cp sidecar:/tmp/absolute/absolute.txt /tmp/absolute.txt
kubectl cp sidecar:/tmp/relative/relative.txt /tmp/relative.txt
kubectl cp sidecar:/tmp/500.txt /tmp/500.txt || true
log "Downloading resource files from sidecar-5xx..."
kubectl cp sidecar-5xx:/tmp-5xx/hello.world /tmp/5xx/hello.world
kubectl cp sidecar-5xx:/tmp-5xx/cm-kubelogo.png /tmp/5xx/cm-kubelogo.png
kubectl cp sidecar-5xx:/tmp-5xx/secret-kubelogo.png /tmp/5xx/secret-kubelogo.png
# script also generates into '/tmp'
kubectl cp sidecar-5xx:/tmp/script_result /tmp/5xx/script_result
# absolute path in configmap points to /tmp in 'absolute-configmap'
kubectl cp sidecar-5xx:/tmp/absolute/absolute.txt /tmp/5xx/absolute.txt
kubectl cp sidecar-5xx:/tmp-5xx/relative/relative.txt /tmp/5xx/relative.txt
kubectl cp sidecar-5xx:/tmp-5xx/500.txt /tmp/5xx/500.txt
log "Verifying file content from sidecar and sidecar-5xx ..."
# this needs to be the last statement so that it defines the script exit code
echo -n "Hello World!" | diff - /tmp/hello.world &&
diff ${CWD}/kubelogo.png /tmp/cm-kubelogo.png &&
diff ${CWD}/kubelogo.png /tmp/secret-kubelogo.png &&
echo -n "This absolutely exists" | diff - /tmp/absolute.txt &&
echo -n "This relatively exists" | diff - /tmp/relative.txt &&
[ ! -f /tmp/500.txt ] && echo "No 5xx file created" &&
ls /tmp/script_result &&
echo -n "Hello World!" | diff - /tmp/5xx/hello.world &&
diff ${CWD}/kubelogo.png /tmp/5xx/cm-kubelogo.png &&
diff ${CWD}/kubelogo.png /tmp/5xx/secret-kubelogo.png &&
echo -n "This absolutely exists" | diff - /tmp/5xx/absolute.txt &&
echo -n "This relatively exists" | diff - /tmp/5xx/relative.txt &&
echo -n "500" | diff - /tmp/5xx/500.txt &&
ls /tmp/5xx/script_result
}
# cleanup on exit (useful for running locally)
cleanup() {
"${KIND}" delete cluster || true
rm -rf "${BIN_DIR}"
}
trap cleanup EXIT
main() {
install_kubectl
install_kind_release
build_dummy_server
create_kind_cluster
install_sidecar
install_resources
sleep 15
list_pods
pod_logs
# important: this is needs to be the last function in the script so that the response code is picked up by CI
verify_resources_read
}
main
|
<gh_stars>0
const previous = document.getElementById('previous');
const next = document.getElementById('next');
document.addEventListener('keydown', e => {
if(previous && e.keyCode === 37) {
previous.click();
}
if(next && e.keyCode === 39) {
next.click();
}
});
|
#! /bin/bash
# Openrtist screensaver script for GHC 9th floor Demo. This script launches a media player which displays a stream from
# a linux pipe.
# This script should be used together with ../stream_for_screensaver.sh
_V=0
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-v|--verbose)
_V=1
;;
-h|--help)
printf "Usage: openrtist_screensaver.sh [-vh]\n"
exit
esac
shift
done
function log() {
if [[ $_V -eq 1 ]]; then
printf "$@" >> /tmp/openrtist_screensaver.log
fi
}
log "$(date): invoking openrist screensaver\n"
log "killing running mpv\n"
pkill -f -9 ^mpv
log "xscreensaver window id ${XSCREENSAVER_WINDOW} \n"
log "launching mpv \n"
# xscreensaver needs sometime to create a screensaver window. Hence, a short sleep here is needed to make sure the
# xscreensaver window is created before mpv is launched
sleep 5
exec mpv --no-audio --no-cache \
--no-cache-pause --untimed \
--no-correct-pts --really-quiet \
--wid ${XSCREENSAVER_WINDOW} \
--demuxer rawvideo \
--demuxer-rawvideo-w 960 \
--demuxer-rawvideo-h 540 \
--demuxer-rawvideo-mp-format rgb24 \
/tmp/rgbpipe
|
package test161
import (
"crypto/md5"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"github.com/kevinburke/go.uuid"
"io/ioutil"
"os"
"os/exec"
"path"
"sort"
"strings"
"sync"
"time"
)
type SubmissionUserInfo struct {
Email string `yaml:"email"`
Token string `yaml:"token"`
KeyHash string `yaml:"-"`
}
// SubmissionRequests are created by clients and used to generate Submissions.
// A SubmissionRequest represents the data required to run a test161 target
// for evaluation by the test161 server.
type SubmissionRequest struct {
Target string // Name of the target
Users []*SubmissionUserInfo // Email addresses of users
Repository string // Git repository to clone
CommitID string // Git commit id to checkout after cloning
CommitRef string // The ref they're submitting with, if one is set
ClientVersion ProgramVersion // The version of test161 the client is running
EstimatedScores map[string]uint // The local score test161 computed
}
// UploadRequests are created by clients and provide the form fields for
// file uploads. Currently, we only support stats file uploads, but this
// could change.
type UploadRequest struct {
UploadType int
Users []*SubmissionUserInfo
}
const (
UPLOAD_TYPE_USAGE = iota
)
const (
SUBMISSION_SUBMITTED = "submitted" // Submitted and queued
SUBMISSION_BUILDING = "building" // Building the kernel
SUBMISSION_RUNNING = "running" // The tests started running
SUBMISSION_ABORTED = "aborted" // Aborted because one or more tests failed to error
SUBMISSION_COMPLETED = "completed" // Completed
)
type Submission struct {
// Configuration
ID string `bson:"_id,omitempty"`
Users []string `bson:"users"`
Repository string `bson:"repository"`
CommitID string `bson:"commit_id"`
CommitRef string `bson:"commit_ref"` // Just informational
ClientVersion string `bson:"client_version"` // Just informational
// From the environment
OverlayCommitID string `bson:"overlay_commit_id"` // Just informational
IsStaff bool `bson:"is_staff"`
// Target details
TargetID string `bson:"target_id"`
TargetName string `bson:"target_name"`
TargetVersion uint `bson:"target_version"`
IsMetaTarget bool `bson:"is_meta_target"`
// Submitted target, which is different from target details if submitting
// to a subtarget of a metatarget.
SubmittedTargetID string `bson:"submitted_target_id"`
SubmittedTargetName string `bson:"submitted_target_name"`
SubmittedTargetVersion uint `bson:"submitted_target_version"`
PointsAvailable uint `bson:"max_score"`
TargetType string `bson:"target_type"`
// Results
Status string `bson:"status"`
Score uint `bson:"score"`
Performance float64 `bson:"performance"`
TestIDs []string `bson:"tests"`
Errors []string `bson:"errors"`
EstimatedScore uint `bson:"estimated_score"`
SubmissionTime time.Time `bson:"submission_time"`
CompletionTime time.Time `bson:"completion_time"`
Env *TestEnvironment `bson:"-" json:"-"`
BuildTest *BuildTest `bson:"-" json:"-"`
Tests *TestGroup `bson:"-" json:"-"`
students []*Student
// Split information for meta/sub-targets. We store IDs for
// mongo/persistence, and keep references around in case we need them,
// and for testing.
OrigSubmissionID string `bson:"orig_submission_id"`
origTarget *Target
SubSubmissionIDs []string `bson:"sub_submission_ids"`
subSubmissions map[string]*Submission
// From the request, but we need it in case we split the submission.
estimatedScores map[string]uint
}
type TargetStats struct {
TargetName string `bson:"target_name"`
TargetVersion uint `bson:"target_version"`
TargetType string `bson:"target_type"`
MaxScore uint `bson:"max_score"`
TotalSubmissions uint `bson:"total_submissions"`
TotalComplete uint `bson:"total_complete"`
HighScore uint `bson:"high_score"`
LowScore uint `bson:"low_score"`
AvgScore float64 `bson:"avg_score"`
BestPerf float64 `bson:"best_perf"`
WorstPerf float64 `bson:"worst_perf"`
AvgPerf float64 `bson:"avg_perf"`
BestSubmission string `bson:"best_submission_id"`
}
type Student struct {
ID string `bson:"_id"`
Email string `bson:"email"`
Token string `bson:"token"`
PublicKey string `bson:"key"`
// Stats
TotalSubmissions uint `bson:"total_submissions"`
Stats []*TargetStats `bson:"target_stats"`
// Computed, cached.
// 0 == uncached, 1 == false, 2 == true
isStaff int
}
// Target stats sorting
type StatsByName []*TargetStats
func (a StatsByName) Len() int { return len(a) }
func (a StatsByName) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a StatsByName) Less(i, j int) bool { return a[i].TargetName < a[j].TargetName }
// Keep track of pending submissions. Keep this out of the database in case there are
// communication issues so that we don't need to manually reset things in the DB.
var userLock = &sync.Mutex{}
var pendingSubmissions = make(map[string]bool)
// Check users against users database. Don't lock them until we run though
func validateUserRecords(users []*SubmissionUserInfo, env *TestEnvironment) ([]*Student, error) {
allStudents := make([]*Student, 0)
for _, user := range users {
if students, err := getStudents(user.Email, user.Token, env); err != nil {
return nil, err
} else {
allStudents = append(allStudents, students[0])
}
}
return allStudents, nil
}
// Get a particular student from the DB and validate
func getStudents(email, token string, env *TestEnvironment) ([]*Student, error) {
request := map[string]interface{}{
"email": email,
"token": token,
}
students := []*Student{}
if err := env.Persistence.Retrieve(PERSIST_TYPE_STUDENTS, request, nil, &students); err != nil {
return nil, err
}
if len(students) != 1 || students[0].Email != email || students[0].Token != token {
return nil, errors.New("Unable to authenticate student: " + email)
}
return students, nil
}
func validateUsers(users []*SubmissionUserInfo, env *TestEnvironment) ([]*Student, error) {
students := []*Student{}
var err error
if len(users) == 0 {
return students, errors.New("No usernames specified")
}
if env.Persistence != nil && env.Persistence.CanRetrieve() {
if students, err = validateUserRecords(users, env); err != nil {
return students, err
}
}
// Staff flag needs to be all or nothing
staffCnt := 0
for _, student := range students {
if staff, err := student.IsStaff(env); err != nil {
return students, err
} else if staff {
staffCnt += 1
}
}
if staffCnt > 0 && staffCnt < len(students) {
return students, errors.New("Cannot have staff working with students!")
}
return students, nil
}
func (req *SubmissionRequest) Validate(env *TestEnvironment) ([]*Student, error) {
students := []*Student{}
var err error
// Non-user checks
if strings.HasPrefix(req.Repository, "http") {
return students, errors.New("test161 must access your repository via SSH and does not accept submissions from http/https URLs")
}
if _, ok := env.Targets[req.Target]; !ok {
return students, errors.New("Invalid target: " + req.Target)
}
if len(req.Repository) == 0 || len(req.CommitID) == 0 {
return students, errors.New("Must specify a Git repository and commit id")
}
// User checks (shared)
students, err = validateUsers(req.Users, env)
return students, err
}
func (req *UploadRequest) Validate(env *TestEnvironment) ([]*Student, error) {
return validateUsers(req.Users, env)
}
// Create a new Submission that can be evaluated by the test161 server or client.
//
// This submission has a copy of the test environment, so it's safe to pass the
// same enviromnent for multiple submissions. Local fields will be set accordingly.
func NewSubmission(request *SubmissionRequest, origenv *TestEnvironment) (*Submission, []error) {
var students []*Student
var err error
env := origenv.CopyEnvironment()
// Validate the request details and get the list of students for which
// this submission applies. We'll use this list later when we
// actually run the submission.
if students, err = request.Validate(env); err != nil {
return nil, []error{err}
}
// (The target was validated in the previous step)
target := env.Targets[request.Target]
// Create the build configuration. This is a combination of
// the environment, target, and request.
conf := &BuildConf{}
conf.Repo = request.Repository
conf.CommitID = request.CommitID
conf.CacheDir = env.CacheDir
conf.KConfig = target.KConfig
conf.RequiredCommit = target.RequiredCommit
conf.RequiresUserland = target.RequiresUserland
conf.Overlay = target.Name
conf.Users = make([]string, 0, len(request.Users))
for _, u := range request.Users {
conf.Users = append(conf.Users, u.Email)
}
// Add first 'test' (build)
buildTest, err := conf.ToBuildTest(env)
if err != nil {
return nil, []error{err}
}
// Get the TestGroup. The root dir won't be set yet, but that's OK. We'll
// change it after the build
tg, errs := target.Instance(env)
if len(errs) > 0 {
// this should work unless the server is broken
env.Log.Printf("Errors loading target: %v\n", errs)
return nil, []error{errors.New("Errors loading target on the server")}
}
id := uuid.NewV4().String()
s := &Submission{
ID: id,
Repository: request.Repository,
CommitID: request.CommitID,
CommitRef: request.CommitRef,
ClientVersion: request.ClientVersion.String(),
EstimatedScore: uint(0),
TargetID: target.ID,
TargetName: target.Name,
TargetVersion: target.Version,
SubmittedTargetID: target.ID,
SubmittedTargetName: target.Name,
SubmittedTargetVersion: target.Version,
PointsAvailable: target.Points,
TargetType: target.Type,
IsMetaTarget: target.IsMetaTarget,
Status: SUBMISSION_SUBMITTED,
Score: uint(0),
Performance: float64(0.0),
TestIDs: []string{buildTest.ID},
Errors: []string{},
SubmissionTime: time.Now(),
Env: env,
BuildTest: buildTest,
Tests: tg,
origTarget: target,
OrigSubmissionID: id,
SubSubmissionIDs: []string{},
subSubmissions: make(map[string]*Submission),
estimatedScores: request.EstimatedScores,
}
// If this is a subtarget, change the details and "submit to the metatarget".
if target.metaTarget != nil && !target.IsMetaTarget {
s.TargetID = target.metaTarget.ID
s.TargetName = target.metaTarget.Name
s.TargetVersion = target.metaTarget.Version
s.PointsAvailable = target.metaTarget.Points
s.IsMetaTarget = true
}
// This needs to come after the target name is adjusted for metatargets.
if est, ok := request.EstimatedScores[s.TargetName]; ok {
s.EstimatedScore = est
}
// We need the students to later update the students collection. But,
// the submission only cares about user email addresses.
s.students = students
s.Users = make([]string, 0, len(request.Users))
for _, u := range request.Users {
s.Users = append(s.Users, u.Email)
}
// Also, we record whether the students were staff or not so we can
// easily filter out staff submissions. The validate step checks that
// students and staff are not working together
if len(students) > 0 {
// This has already been set during validate.
s.IsStaff, _ = students[0].IsStaff(env)
}
// Try and lock students now so we don't allow multiple submissions.
// This enforces NewSubmission() can only return successfully if none
// of the students has a pending submission. We need to do this
// before we persist the submission.
userLock.Lock()
defer userLock.Unlock()
// First pass - just check
for _, student := range students {
if running := pendingSubmissions[student.Email]; running {
msg := fmt.Sprintf("Cannot submit at this time: User %v has a submission pending.", student.Email)
env.Log.Println(msg)
return nil, []error{errors.New(msg)}
}
}
// Now lock
for _, student := range students {
pendingSubmissions[student.Email] = true
}
if env.Persistence != nil {
if buildTest != nil {
// If we get an error here, we can still hopefully recover. Though,
// build updates won't be seen by the user.
env.notifyAndLogErr("Create Build Test", buildTest, MSG_PERSIST_CREATE, 0)
}
// This we can't recover from
err = env.Persistence.Notify(s, MSG_PERSIST_CREATE, 0)
}
// Unlock so they can resubmit
if err != nil {
for _, student := range students {
delete(pendingSubmissions, student.Email)
}
return nil, []error{err}
}
return s, nil
}
func (s *Submission) TargetStats() (result *TargetStats) {
result = &TargetStats{
TargetName: s.TargetName,
TargetVersion: s.TargetVersion,
TargetType: s.TargetType,
MaxScore: s.PointsAvailable,
}
return
}
// Are the submission results valid, from the perspective of updating statistics?
// We only count submissions that complete successfully for assignments.
// For perf, the score has to be perfect also.
func (s *Submission) validResult() bool {
if s.Status == SUBMISSION_COMPLETED &&
(s.TargetType == TARGET_ASST || s.Score == s.PointsAvailable) {
return true
} else {
return false
}
}
func (student *Student) getStat(targetName string) *TargetStats {
for _, stat := range student.Stats {
if stat.TargetName == targetName {
return stat
}
}
return nil
}
func (student *Student) updateStats(submission *Submission) {
// This might be nil coming out of Mongo
if student.Stats == nil {
student.Stats = make([]*TargetStats, 0)
}
// Only update the student submission count for the original submission.
// This way, we won't increase it 4 times when they submit ASST3.
if submission.ID == submission.OrigSubmissionID {
student.TotalSubmissions += 1
}
// Find the TargetStats to update, or create a new one
stat := student.getStat(submission.TargetName)
if stat == nil {
stat = submission.TargetStats()
student.Stats = append(student.Stats, stat)
// Sort this so it looks right coming out of mongo
sort.Sort(StatsByName(student.Stats))
}
// Always increment submission count, but everything else depends on the
// submission result
stat.TotalSubmissions += 1
// If the target changed, like in ASST3 where we're incrementally building it,
// update the max score so the front-end displays it correctly.
// TODO: We might want to keep multiple version in the stats collection, but
// that would require aggregation (slow) on the front-end.
if stat.TargetVersion < submission.TargetVersion {
stat.MaxScore = submission.PointsAvailable
stat.TargetVersion = submission.TargetVersion
}
if submission.validResult() {
if stat.TargetType == TARGET_ASST {
// High score
if stat.HighScore < submission.Score {
stat.HighScore = submission.Score
stat.BestSubmission = submission.ID
}
// Low score
if stat.LowScore == 0 || stat.LowScore > submission.Score {
stat.LowScore = submission.Score
}
// Average
prevTotal := float64(stat.TotalComplete) * stat.AvgScore
stat.TotalComplete += 1
if stat.TotalComplete == 0 {
stat.TotalComplete = 1
prevTotal = 0
}
stat.AvgScore = (prevTotal + float64(submission.Score)) / float64(stat.TotalComplete)
} else if stat.TargetType == TARGET_PERF {
// Best Perf
if submission.Performance < stat.BestPerf || stat.BestPerf == 0.0 {
stat.BestPerf = submission.Performance
stat.BestSubmission = submission.ID
}
// Worst Perf
if stat.WorstPerf < submission.Performance {
stat.WorstPerf = submission.Performance
}
// Average perf
prevPerfTotal := float64(stat.TotalComplete) * stat.AvgPerf
stat.TotalComplete += 1
if stat.TotalComplete == 0 {
stat.TotalComplete = 1
prevPerfTotal = 0.0
}
stat.AvgPerf = (prevPerfTotal + submission.Performance) / float64(stat.TotalComplete)
}
}
}
func (student *Student) IsStaff(env *TestEnvironment) (bool, error) {
const staff = "services.auth0.user_metadata.staff"
const email = "services.auth0.email"
if student.isStaff == 0 {
if env.Persistence != nil && env.Persistence.CanRetrieve() {
who := map[string]interface{}{
email: student.Email,
staff: true,
}
filter := map[string]interface{}{staff: 1}
res := make([]interface{}, 0)
if err := env.Persistence.Retrieve(PERSIST_TYPE_USERS, who, filter, &res); err != nil {
return false, err
} else {
// Set flag
if len(res) > 0 {
student.isStaff = 2
} else {
student.isStaff = 1
}
}
} else {
return false, errors.New("Unable to detect if student is staff")
}
}
return student.isStaff == 2, nil
}
// Update students. We copy metadata to make this quick and store the
// submision id to look up the full details.
func (s *Submission) updateStudents() {
for _, student := range s.students {
// Update stats
student.updateStats(s)
if s.Env.Persistence != nil {
if err := s.Env.Persistence.Notify(student, MSG_PERSIST_UPDATE, 0); err != nil {
if sbytes, jerr := json.Marshal(student); jerr != nil {
s.Env.Log.Printf("Error updating student: %v (%v)\n", student.Email, err)
} else {
s.Env.Log.Printf("Error updating student: %v (%v)\n", string(sbytes), err)
}
}
}
}
}
func (s *Submission) unlockStudents() {
userLock.Lock()
defer userLock.Unlock()
// Unblock the students from resubmitting
for _, student := range s.students {
delete(pendingSubmissions, student.Email)
}
}
func (s *Submission) finish() {
s.CompletionTime = time.Now()
if s.Status == SUBMISSION_RUNNING {
s.Status = SUBMISSION_COMPLETED
}
// Send the final submission update to the db
s.Env.notifyAndLogErr("Finish Submission", s, MSG_PERSIST_COMPLETE, 0)
if len(s.students) > 0 {
s.updateStudents()
}
}
// Clone the submission and update its details for the given target.
// This requires us to copy the existing object and modify its tests list
// and points to only include those tests that were requried for this
// particular target.
func (s *Submission) cloneAndUpdate(target *Target) *Submission {
var copy Submission = *s
copy.ID = uuid.NewV4().String()
// Target details
copy.TargetID = target.ID
copy.TargetName = target.Name
copy.TargetVersion = target.Version
copy.IsMetaTarget = false
copy.PointsAvailable = target.Points
// Results/tests
copy.Score = uint(0)
copy.EstimatedScore = uint(0)
copy.TestIDs = make([]string, 0)
copy.SubSubmissionIDs = make([]string, 0)
copy.subSubmissions = make(map[string]*Submission)
// Add build test
copy.TestIDs = append(copy.TestIDs, s.TestIDs[0])
// Find all tests that were needed for this sub-part
for _, test := range s.Tests.Tests {
if test.requiredBy[target.Name] {
copy.TestIDs = append(copy.TestIDs, test.ID)
}
}
if est, ok := s.estimatedScores[target.Name]; ok {
copy.EstimatedScore = est
}
return ©
}
// Split the submission into multiple submissions based on the metatarget.
func (s *Submission) split() []*Submission {
// For single target assignments, we skip this step.
if !s.IsMetaTarget {
return nil
}
// We need to create a submission for the original target, as well as
// all previous subtargets.
submissions := make([]*Submission, 0)
// Two cases:
// 1) The original target was a a metatarget. In this case, all of the targets
// are stored in the previousSubtargets.
// 2) We upgraded to a metatarget. In that case, we need to add all of the
// previous subtargets AND the original target.
if !s.origTarget.IsMetaTarget {
submissions = append(submissions, s.cloneAndUpdate(s.origTarget))
}
for _, target := range s.origTarget.previousSubTargets {
submissions = append(submissions, s.cloneAndUpdate(target))
}
return submissions
}
func (s *Submission) abort() {
s.Status = SUBMISSION_ABORTED
s.Score = 0
s.Performance = float64(0)
}
func (s *Submission) updateScore(test *Test) {
s.Score += test.PointsEarned
s.Env.Persistence.Notify(s, MSG_PERSIST_UPDATE, MSG_FIELD_SCORE)
}
// Synchronous submission runner
func (s *Submission) Run() error {
// Run the build first. Right now this is the only thing the front-end sees.
// We'll add the rest of the tests if this passes, otherwise we don't waste the
// disk space.
var err error
// So we know it's not nil
if s.Env.Persistence == nil {
s.Env.Persistence = &DoNothingPersistence{}
}
defer s.unlockStudents()
defer s.finish()
// Build os161
if s.BuildTest != nil {
s.Status = SUBMISSION_BUILDING
s.Env.notifyAndLogErr("Submission Status Building", s, MSG_PERSIST_UPDATE, MSG_FIELD_STATUS)
s.BuildTest.SubmissionID = s.ID
res, err := s.BuildTest.Run(s.Env)
if err != nil {
s.Status = SUBMISSION_ABORTED
s.Env.notifyAndLogErr("Submission Complete (Aborted)", s, MSG_PERSIST_COMPLETE, 0)
s.Errors = append(s.Errors, fmt.Sprintf("%v", err))
return err
}
// Build output
s.Env.RootDir = res.RootDir
// Clean up temp build directory
if len(res.TempDir) > 0 {
defer os.RemoveAll(res.TempDir)
}
s.OverlayCommitID = s.BuildTest.overlayCommitID
}
// Build succeeded, update things accordingly
for _, test := range s.Tests.Tests {
// Link the test to this submission.
test.SubmissionID = s.ID
// Add test IDs to DB
s.TestIDs = append(s.TestIDs, test.ID)
// Create the test object in the DB
// If this fails, we abort the submission beacase we can't verify the results
err = s.Env.Persistence.Notify(test, MSG_PERSIST_CREATE, 0)
if err != nil {
s.Status = SUBMISSION_ABORTED
s.Errors = append(s.Errors, fmt.Sprintf("%v", err))
return err
}
}
// Run it
s.Status = SUBMISSION_RUNNING
s.Env.notifyAndLogErr("Submission Status (Running) ", s, MSG_PERSIST_UPDATE, MSG_FIELD_TESTS|MSG_FIELD_STATUS)
runner := NewDependencyRunner(s.Tests)
done := runner.Run()
// Split up the target into multiple sub-targets. If splits is non-empty,
// we are now running the metatarget up to and including the original target.
// We want to do this *before* running the target so that the front-end can
// display each submission as running simultaneously.
splits := s.split()
for _, newSubmission := range splits {
s.Env.notifyAndLogErr("Create Split Submission", newSubmission, MSG_PERSIST_CREATE, 0)
defer newSubmission.finish()
s.SubSubmissionIDs = append(s.SubSubmissionIDs, newSubmission.ID)
s.subSubmissions[newSubmission.TargetName] = newSubmission
}
// Update the score unless a test aborts, then it's 0 and we abort (eventually)
for r := range done {
if s.Status == SUBMISSION_RUNNING {
if r.Test.Result == TEST_RESULT_ABORT {
s.abort()
for _, other := range splits {
// Abort all
other.abort()
}
} else {
// Always update the metasubmission, and possibly subtarget submissions.
s.updateScore(r.Test)
for _, other := range splits {
if r.Test.TargetName == other.TargetName {
other.updateScore(r.Test)
}
}
}
}
if r.Err != nil {
s.Errors = append(s.Errors, fmt.Sprintf("%v", r.Err))
}
}
return err
}
// On success, KeyGen returns the public key of the newly generated public/private key pair
func KeyGen(email, token string, env *TestEnvironment) (string, error) {
if len(env.KeyDir) == 0 {
return "", errors.New("No key directory specified")
} else if _, err := os.Stat(env.KeyDir); err != nil {
return "", errors.New("Key directory not found")
}
// Find user
students, err := getStudents(email, token, env)
if err != nil {
return "", err
}
studentDir := path.Join(env.KeyDir, email)
privkey := path.Join(studentDir, "id_rsa")
pubkey := privkey + ".pub"
if _, err = os.Stat(studentDir); err == nil {
os.Remove(privkey)
os.Remove(pubkey)
} else {
err = os.Mkdir(studentDir, 0770)
if err != nil {
return "", err
}
}
// Generate key
cmd := exec.Command("ssh-keygen", "-C", "<EMAIL>", "-N", "", "-f", privkey)
cmd.Dir = env.KeyDir
err = cmd.Run()
if err != nil {
return "", err
}
data, err := ioutil.ReadFile(pubkey)
if err != nil {
return "", err
}
keytext := string(data)
// Update user
students[0].PublicKey = keytext
if env.Persistence != nil {
err = env.Persistence.Notify(students[0], MSG_PERSIST_UPDATE, 0)
}
return keytext, nil
}
// RequestKeyResonse is the repsonse we send back during validation if the keys
// aren't up-to-date.
type RequestKeyResonse struct {
User string
Key string
}
// Check if the local copy of the key is up-to-date.
// Return an empty key if the user's key has not been created, or the
// new key if the hash is different.
func (req *SubmissionRequest) CheckUserKeys(env *TestEnvironment) []*RequestKeyResonse {
res := []*RequestKeyResonse{}
for _, user := range req.Users {
studentDir := path.Join(env.KeyDir, user.Email)
privkey := path.Join(studentDir, "id_rsa")
if _, err := os.Stat(privkey); err != nil {
// No key, inform
res = append(res, &RequestKeyResonse{
User: user.Email,
Key: "",
})
continue
}
// Get hash
data, err := ioutil.ReadFile(privkey)
if err != nil {
env.Log.Printf("Error reading private key (%v): %v", privkey, err)
continue
}
raw := md5.Sum(data)
hash := strings.ToLower(hex.EncodeToString(raw[:]))
if hash != user.KeyHash {
res = append(res, &RequestKeyResonse{
User: user.Email,
Key: string(data),
})
}
}
return res
}
|
<filename>biodwh2-reactome/src/main/java/de/unibi/agbi/biodwh2/reactome/etl/Neo4jSessionFactory.java
package de.unibi.agbi.biodwh2.reactome.etl;
import org.neo4j.ogm.config.Configuration;
import org.neo4j.ogm.session.Session;
import org.neo4j.ogm.session.SessionFactory;
/**
* Created by manuel on 13.01.20.
*/
public class Neo4jSessionFactory {
private final static Configuration configuration = new Configuration.Builder().uri("http://localhost:7474")
.credentials("neo4j", "<PASSWORD>wort")
.build();
private final static SessionFactory sessionFactory = new SessionFactory(configuration, "de.unibi.agbi.biodwh2.reactome.entities");
private static Neo4jSessionFactory factory = new Neo4jSessionFactory();
public static Neo4jSessionFactory getInstance() {
return factory;
}
// prevent external instantiation
private Neo4jSessionFactory() {
}
public Session getNeo4jSession() {
return sessionFactory.openSession();
}
}
|
import TimeSegment from '../../../shared/modules/DataRender/vos/TimeSegment';
import Dates from '../../../shared/modules/FormatDatesNombres/Dates/Dates';
import ModuleMaintenance from '../../../shared/modules/Maintenance/ModuleMaintenance';
import MaintenanceVO from '../../../shared/modules/Maintenance/vos/MaintenanceVO';
import ModuleParams from '../../../shared/modules/Params/ModuleParams';
import ForkedTasksController from '../Fork/ForkedTasksController';
import PushDataServerController from '../PushData/PushDataServerController';
export default class MaintenanceServerController {
public static TASK_NAME_set_planned_maintenance_vo = 'MaintenanceServerController.set_planned_maintenance_vo';
public static TASK_NAME_handleTriggerPreC_MaintenanceVO = 'MaintenanceServerController.handleTriggerPreC_MaintenanceVO';
public static TASK_NAME_end_maintenance = 'MaintenanceServerController.end_maintenance';
public static TASK_NAME_start_maintenance = 'ModuleMaintenanceServer.start_maintenance';
public static TASK_NAME_end_planned_maintenance = 'ModuleMaintenanceServer.end_planned_maintenance';
public static getInstance() {
if (!MaintenanceServerController.instance) {
MaintenanceServerController.instance = new MaintenanceServerController();
}
return MaintenanceServerController.instance;
}
private static instance: MaintenanceServerController = null;
/**
* Global application cache - Handled by Main process -----
*/
public planned_maintenance: MaintenanceVO = null;
/**
* ----- Global application cache - Handled by Main process
*/
/**
* Local thread cache -----
* - Monothread car un seul thread, le main, peut et doit l'utiliser en CRUD
*/
private informed_users_tstzs: { [user_id: number]: number } = {};
/**
* ----- Local thread cache
*/
protected constructor() {
ForkedTasksController.getInstance().register_task(MaintenanceServerController.TASK_NAME_set_planned_maintenance_vo, this.set_planned_maintenance_vo.bind(this));
}
public async set_planned_maintenance_vo(maintenance: MaintenanceVO): Promise<void> {
if (!await ForkedTasksController.getInstance().exec_self_on_main_process(MaintenanceServerController.TASK_NAME_set_planned_maintenance_vo, maintenance)) {
return;
}
this.planned_maintenance = maintenance;
}
/**
* WARN : Should only be used on the main process (express)
*/
get has_planned_maintenance() {
ForkedTasksController.getInstance().assert_is_main_process();
return !!this.planned_maintenance;
}
/**
* WARN : only on main thread (express) since called only when on request
* @param user_id
*/
public async inform_user_on_request(user_id: number): Promise<void> {
ForkedTasksController.getInstance().assert_is_main_process();
if (!(this.planned_maintenance && (!this.planned_maintenance.maintenance_over))) {
return;
}
let timeout_info: number = await ModuleParams.getInstance().getParamValueAsInt(ModuleMaintenance.PARAM_NAME_INFORM_EVERY_MINUTES, 1);
if ((!!this.informed_users_tstzs[user_id]) && (Dates.add(this.informed_users_tstzs[user_id], timeout_info, TimeSegment.TYPE_MINUTE) > Dates.now())) {
return;
}
let timeout_minutes_msg1: number = await ModuleParams.getInstance().getParamValueAsInt(ModuleMaintenance.PARAM_NAME_SEND_MSG1_WHEN_SHORTER_THAN_MINUTES);
let timeout_minutes_msg2: number = await ModuleParams.getInstance().getParamValueAsInt(ModuleMaintenance.PARAM_NAME_SEND_MSG2_WHEN_SHORTER_THAN_MINUTES);
let timeout_minutes_msg3: number = await ModuleParams.getInstance().getParamValueAsInt(ModuleMaintenance.PARAM_NAME_SEND_MSG3_WHEN_SHORTER_THAN_MINUTES);
if (Dates.add(this.planned_maintenance.start_ts, -timeout_minutes_msg3, TimeSegment.TYPE_MINUTE) <= Dates.now()) {
await PushDataServerController.getInstance().notifySimpleERROR(user_id, null, ModuleMaintenance.MSG3_code_text);
} else if (Dates.add(this.planned_maintenance.start_ts, -timeout_minutes_msg2, TimeSegment.TYPE_MINUTE) <= Dates.now()) {
await PushDataServerController.getInstance().notifySimpleWARN(user_id, null, ModuleMaintenance.MSG2_code_text);
} else if (Dates.add(this.planned_maintenance.start_ts, -timeout_minutes_msg1, TimeSegment.TYPE_MINUTE) <= Dates.now()) {
await PushDataServerController.getInstance().notifySimpleINFO(user_id, null, ModuleMaintenance.MSG1_code_text);
}
this.informed_users_tstzs[user_id] = Dates.now();
}
} |
<filename>test/controller/HomeControllerTest.java
package controller;
import akka.actor.ActorSystem;
import controllers.AssetsFinder;
import controllers.routes;
import org.junit.BeforeClass;
import org.junit.Test;
import play.Application;
import play.mvc.Call;
import play.mvc.Http;
import play.mvc.Result;
import play.shaded.ahc.org.asynchttpclient.AsyncHttpClient;
import play.shaded.ahc.org.asynchttpclient.AsyncHttpClientConfig;
import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClient;
import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClientConfig;
import play.shaded.ahc.org.asynchttpclient.netty.ws.NettyWebSocket;
import play.shaded.ahc.org.asynchttpclient.ws.WebSocket;
import play.test.Helpers;
import play.test.TestServer;
import play.test.WithApplication;
import play.twirl.api.Content;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.CompletableFuture;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static play.mvc.Http.Status.OK;
import static org.mockito.Mockito.mock;
import static play.test.Helpers.*;
/**
* Test HomeController
*
* @author <NAME>, <NAME>, <NAME>
*/
public class HomeControllerTest extends WithApplication {
/**
* Sets up YoutubeApi to YoutubeTestApi
*/
@Test
public void testIndex() {
running(fakeApplication(), () -> {
Call action = routes.HomeController.index();
Http.RequestBuilder request = Helpers.fakeRequest(action);
Result result = route(fakeApplication(),request);
assertEquals(303, result.status());});
}
/**
* Validates a call on the router for the
* {@code controllers.HomeController.search(searchkey: String,request:Request)} action and validates that the
* returned result correspond to the {@code SEE_OTHER}.
*/
@Test
public void search(){
running(fakeApplication(), () -> {
Call action = routes.HomeController.search("java");
Http.RequestBuilder request = Helpers.fakeRequest(action);
Result result = route(fakeApplication(),request);
assertEquals(OK, result.status());});
//
}
/**
* Validates a call on the router for the
* {@code controllers.HomeController.similar(searchkey: String)} action and validates that the
* returned result correspond to the {@code SEE_OTHER}.
*/
@Test
public void similar(){
running(fakeApplication(), () -> {
Call action = routes.HomeController.similar("ON1tzYUkiuE");
Http.RequestBuilder request = Helpers.fakeRequest(action);
Result result = route(fakeApplication(),request);
assertEquals(OK, result.status());});
}
/**
* Validates a call on the router for the
* {@code controllers.HomeController.CVideos(channelID: String,keyword :String)} action and validates that the
* returned result correspond to the {@code SEE_OTHER}.
*/
@Test
public void videos(){
running(fakeApplication(), () -> {
Call action = routes.HomeController.CVideos("UCiAuybSv94YrdXhrGECtAxQ","java");
Http.RequestBuilder request = Helpers.fakeRequest(action);
Result result = route(fakeApplication(),request);
assertEquals(OK, result.status());});
}
/**
* Validates a call on the router for the
* {@code controllers.routes.profile(channelID: String)} action and validates that the
* returned result correspond to the {@code SEE_OTHER}.
*/
@Test
public void profile() {
running(fakeApplication(), () -> {
Call action = routes.HomeController.profile("UCiAuybSv94YrdXhrGECtAxQ");
Http.RequestBuilder request = Helpers.fakeRequest(action);
Result result = route(fakeApplication(),request);
assertEquals(OK, result.status());});
//
// Http.RequestBuilder request = new Http.RequestBuilder()
// .method(GET)
// .uri("/search");
//
// Result result = route(app, request);
// assertEquals(OK, result.status());
}
/**
* Validates the web socket connection by sending a request and awaiting for a response.
*/
@Test
public void ws(){
TestServer server = testServer(19001);
running(server, () -> {
try {
AsyncHttpClientConfig config = new DefaultAsyncHttpClientConfig.Builder().setMaxRequestRetry(0).build();
AsyncHttpClient client = new DefaultAsyncHttpClient(config);
WebSocketClient webSocketClient = new WebSocketClient(client);
try {
String serverURL = "ws://localhost:19001/ws";
ArrayBlockingQueue<String> queue = new ArrayBlockingQueue<String>(10);
WebSocketClient.LoggingListener listener = new WebSocketClient.LoggingListener((message) -> {
try {
queue.put(message);
} catch (InterruptedException e) {
e.printStackTrace();
}
});
CompletableFuture<NettyWebSocket> completionStage = webSocketClient.call(serverURL, serverURL, listener);
WebSocket searchResult = completionStage.get();
assertTrue(searchResult!= null);
} finally {
client.close();
}
} catch (Exception e) {
System.out.println(e);
}
});
}
}
|
from argparse import ArgumentParser, Namespace
from blok.http_server import get_app
def parse_args() -> Namespace:
parser = ArgumentParser()
parser.add_argument("-b", "--bind", default="0.0.0.0")
parser.add_argument("-d", "--debug", default=False, type=bool)
parser.add_argument("-p", "--port", default=8080, type=int)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
app = get_app()
app.run(host=args.bind, port=args.port, debug=args.debug)
|
#! /bin/sh
tgt="$1"
drv="$2"
nix="${3:-$(nix-build --no-out-link '<nixpkgs>' -A nixUnstable)}"
shift ; shift; shift
"$(dirname "$0")/deploy-nix-to-chroot.sh" "$tgt" "$nix" "$drv" &> /dev/null
mkdir -p "$tgt"/{dev,sys,proc}
test -e "$tgt/dev/null" || mount --bind /dev "$tgt/dev"
test -e "$tgt/sys/class" || mount sys -t sysfs "$tgt/sys"
test -e "$tgt/proc/self" || mount proc -t proc "$tgt/proc"
test -e "$tgt/etc/resolv.conf" || cat /etc/resolv.conf >> "$tgt/etc/resolv.conf"
test -e "$tgt/etc/services" ||
test -L "$tgt/etc/services" ||
cat /etc/services >> "$tgt/etc/services"
test -e "$tgt/etc/protocols" ||
test -L "$tgt/etc/protocols" ||
cat /etc/protocols >> "$tgt/etc/protocols"
test -e "$tgt/bin/sh" ||
test -L "$tgt/bin/sh" ||
{
bash="$(nix-build --no-out-link '<nixpkgs>' -A bash)"
"$(dirname "$0")/deploy-nix-to-chroot.sh" "$tgt" "$nix" "$bash" &> /dev/null
mkdir -p "$tgt/bin"
ln -sf "$bash/bin/sh" "$tgt/bin/sh"
}
test -e "$tgt/$SSL_CERT_FILE" ||
test -L "$tgt/$SSL_CERT_FILE" ||
{
cas="$(nix-build --no-out-link '<nixpkgs>' -A cacert)"
"$(dirname "$0")/deploy-nix-to-chroot.sh" "$tgt" "$nix" "$cas" &> /dev/null
mkdir -p "$(dirname "$tgt/$SSL_CERT_FILE")"
ln -sf "$cas/etc/ca-bundle.crt" "$tgt/$SSL_CERT_FILE"
}
chroot "$tgt" "$nix/bin/nix-store" -r "$drv" "$@"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.