text stringlengths 1 1.05M |
|---|
<filename>server.js
const express = require('express');
const app = express();
require('dotenv').config();
const routes = require('./routes');
const Messages = require('./lib/messages');
const botInit = require('./lib/bot-utils');
const PORT = process.env.PORT || 80;
app.locals.botProcess = null;
app.locals.io = null;
app.locals.messages = new Messages();
// parse incoming string or array data
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
app.use(express.static('public'));
app.use('/', routes);
app.use(function (req, res) {
res.writeHead(404, { 'Content-Type': 'text/html' });
res.end('Not found.');
});
let allowedOrigins;
if (process.env.ORIGINS) {
allowedOrigins = process.env.ORIGINS.split(',');
} else if (process.env.ORIGIN) {
allowedOrigins = [process.env.ORIGIN];
} else {
allowedOrigins = [`http://localhost:${PORT}`];
}
const server = require('http').createServer(app);
app.locals.io = require('socket.io')(server, {
cors: {
origin: function (origin, callback) {
// bypass the requests with no origin (like curl requests, mobile apps, etc )
if (!origin) return callback(null, true);
if (allowedOrigins.indexOf(origin) === -1) {
var msg = `This site ${origin} does not have an access. Only specific domains are allowed to access it.`;
return callback(new Error(msg), false);
}
return callback(null, true);
},
methods: ['GET'],
}
});
// app.use(cors({
// origin: function (origin, callback) {
// // bypass the requests with no origin (like curl requests, mobile apps, etc )
// if (!origin) return callback(null, true);
// if (allowedDomains.indexOf(origin) === -1) {
// var msg = `This site ${origin} does not have an access. Only specific domains are allowed to access it.`;
// return callback(new Error(msg), false);
// }
// return callback(null, true);
// }
// }));
botInit(app.locals);
server.listen(PORT, () => {
console.log(`API server listening on port ${PORT}`);
});
|
#!/bin/bash
dnf install -v -y \
--setopt=install_weak_deps=False --nodocs \
'dnf-command(config-manager)' mageia-repos-cauldron
dnf shell -v -y <<EOF
repo disable mageia-x86_64
repo disable updates-x86_64
repo enable cauldron-x86_64
repo enable cauldron-x86_64-nonfree
repo enable cauldron-x86_64-tainted
config-manager --add-repo http://mirrors.kernel.org/mageia/distrib/cauldron/x86_64/media/core/release cauldron
run
EOF
dnf upgrade -y -v --allowerasing --best --nodocs --setopt=install_weak_deps=False
|
<reponame>xjieinfo/xjgo<filename>xjcore/xjcrypto/uuid.go<gh_stars>1-10
package xjcrypto
import "github.com/google/uuid"
func Uuid() string {
return uuid.NewString()
}
|
#ifndef SPIN_DET_UTIL_H_
#define SPIN_DET_UTIL_H_
#include <vector>
#include "../data.pb.h"
class SpinDetUtil {
public:
static std::vector<int> get_occupied_orbitals(
const data::SpinDeterminant& spin_det);
static bool is_occupied(const data::SpinDeterminant& spin_det, const int orb);
static int get_n_lower_elecs(
const data::SpinDeterminant& spin_det, const int orb);
static int get_highest_orbital(const data::SpinDeterminant& spin_det);
static int get_n_orbs_used(const data::Determinant& det);
static void set_occupation(
data::SpinDeterminant* spin_det, const int orb, const bool occ);
static std::vector<int> get_eor(
const data::SpinDeterminant& lhs, const data::SpinDeterminant& rhs);
private:
static void insert_orbital(
google::protobuf::RepeatedField<google::protobuf::int32>* orbs,
const int orb);
static void remove_orbital(
google::protobuf::RepeatedField<google::protobuf::int32>* orbs,
const int orb);
};
#endif
|
<filename>RRTS/HttpUnit/httpunit-1.7/src/com/meterware/servletunit/ServletUnitHttpSession.java<gh_stars>0
package com.meterware.servletunit;
/********************************************************************************************************************
* $Id: ServletUnitHttpSession.java 678 2004-09-23 22:48:20Z russgold $
*
* Copyright (c) 2000-2004 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
* THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
*******************************************************************************************************************/
import java.util.Date;
import java.util.Enumeration;
import java.util.Hashtable;
import java.net.URL;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpSessionContext;
import javax.servlet.ServletContext;
class ServletUnitHttpSession implements HttpSession {
final static public String SESSION_COOKIE_NAME = "JSESSION";
private ServletContext _servletContext;
private SessionListenerDispatcher _listenerDispatcher;
ServletUnitHttpSession( ServletContext servletContext, SessionListenerDispatcher listenerDispatcher ) {
_servletContext = servletContext;
_listenerDispatcher = listenerDispatcher;
}
/**
* Returns the maximum time interval, in seconds, that the servlet engine will keep this session open
* between client requests. You can set the maximum time interval with the setMaxInactiveInterval method.
**/
public int getMaxInactiveInterval() {
if (_invalid) throw new IllegalStateException();
return _maxInactiveInterval;
}
/**
* Specifies the maximum length of time, in seconds, that the servlet engine keeps this session
* if no user requests have been made of the session.
**/
public void setMaxInactiveInterval( int interval ) {
if (_invalid) throw new IllegalStateException();
_maxInactiveInterval = interval;
}
/**
* Returns a string containing the unique identifier assigned to this session.
* The identifier is assigned by the servlet engine and is implementation dependent.
**/
public String getId() {
if (_invalid) throw new IllegalStateException();
return _id;
}
/**
* Returns the time when this session was created, measured
* in milliseconds since midnight January 1, 1970 GMT.
*
* @exception IllegalStateException if you attempt to get the session's
* creation time after the session has
* been invalidated
**/
public long getCreationTime() {
if (_invalid) throw new IllegalStateException();
return _creationTime;
}
/**
* Returns the last time the client sent a request associated with this session,
* as the number of milliseconds since midnight January 1, 1970 GMT.
**/
public long getLastAccessedTime() {
if (_invalid) throw new IllegalStateException();
return _lastAccessedTime;
}
/**
* Returns true if the Web server has created a session but the client
* has not yet joined. For example, if the server used only
* cookie-based sessions, and the client had disabled the use of cookies,
* then a session would be new.
**/
public boolean isNew() {
return _isNew;
}
/**
* Invalidates this session and unbinds any objects bound to it.
**/
public void invalidate() {
_listenerDispatcher.sendSessionDestroyed( this );
_invalid = true;
_values.clear();
}
/**
* @deprecated no replacement.
**/
public HttpSessionContext getSessionContext() {
return null;
}
/**
* @deprecated as of JSDK 2.2, use getAttribute
**/
public Object getValue( String name ) {
return getAttribute( name );
}
/**
* @deprecated as of JSDK 2.2, use setAttribute
**/
public void putValue( String name, Object value ) {
setAttribute( name, value );
}
/**
* @deprecated as of JSDK 2.2, use removeAttribute
**/
public void removeValue( String name ) {
removeAttribute( name );
}
/**
* @deprecated as of JSDK 2.2, use getAttributeNames.
**/
public String[] getValueNames() {
if (_invalid) throw new IllegalStateException();
return (String[]) _values.keySet().toArray( new String[ _values.size() ]);
}
/**
* Returns the object bound with the specified name in this session or null if no object of that name exists.
**/
public Object getAttribute( String name ) {
if (_invalid) throw new IllegalStateException();
return _values.get( name );
}
/**
* Binds an object to this session, using the name specified. If an object of the same name
* is already bound to the session, the object is replaced.
**/
public void setAttribute( String name, Object value ) {
if (_invalid) throw new IllegalStateException();
if (value == null) {
removeAttribute( name );
} else if (!_values.containsKey( name )) {
_values.put( name, value );
_listenerDispatcher.sendAttributeAdded( this, name, value );
} else {
Object oldValue = _values.get( name );
_values.put( name, value );
_listenerDispatcher.sendAttributeReplaced( this, name, oldValue );
}
}
/**
* Removes the object bound with the specified name from this session. If the session does not
* have an object bound with the specified name, this method does nothing.
**/
public void removeAttribute( String name ) {
if (_invalid) throw new IllegalStateException();
if (_values.containsKey( name )) {
Object oldValue = _values.get( name );
_values.remove( name );
_listenerDispatcher.sendAttributeRemoved( this, name, oldValue );
}
}
/**
* Returns an array containing the names of all the objects bound to this session.
* This method is useful, for example, when you want to delete all the objects bound to this session.
**/
public Enumeration getAttributeNames() {
if (_invalid) throw new IllegalStateException();
return _values.keys();
}
//---------------------------- methods added to HttpSession in JSDK 2.3 ----------------------------------------
/**
* Returns the ServletContext to which this session belongs.
*
* @since 1.3
**/
public ServletContext getServletContext() {
return _servletContext;
}
//-------------------------------------------- package members -------------------------------------------------
/**
* This method should be invoked when a servlet joins an existing session. It will update the last access time
* and mark the session as no longer new.
**/
void access() {
_lastAccessedTime = new Date().getTime();
_isNew = false;
}
URL getOriginalURL() {
return _originalURL;
}
void setOriginalURL( URL originalURL ) {
_originalURL = originalURL;
}
/**
* Sets the authenticated user information for a session.
*
* @param userName the name the user supplied when logging in
* @param roles an array of role names assigned to the user
**/
void setUserInformation( String userName, String[] roles ) {
_userName = userName;
_roles = roles;
}
String getUserName() {
return _userName;
}
String[] getRoles() {
return _roles;
}
boolean isInvalid() {
return _invalid;
}
//------------------------------------- private members ---------------------------------------
private static int _NextID = 1;
private final long _creationTime = new Date().getTime();
private final String _id = Integer.toString( _NextID++ );
private int _maxInactiveInterval;
private long _lastAccessedTime = new Date().getTime();
private boolean _invalid;
private Hashtable _values = new Hashtable();
private boolean _isNew = true;
private String _userName;
private String[] _roles;
private URL _originalURL;
}
|
CREATE TABLE student (
id INT NOT NULL PRIMARY KEY,
first_name VARCHAR(255) NOT NULL,
last_name VARCHAR(255) NOT NULL,
age INT NOT NULL,
school VARCHAR(255) NOT NULL
); |
import FAQPage from '@page-components/FAQPage';
export default FAQPage;
|
<gh_stars>1-10
/* ----------------------- UTILITIES ----------------------- */
function closeFullscreen() {
if (document.exitFullscreen) {
document.exitFullscreen();
} else if (document.mozCancelFullScreen) { /* Firefox */
document.mozCancelFullScreen();
} else if (document.webkitExitFullscreen) { /* Chrome, Safari and Opera */
document.webkitExitFullscreen();
} else if (document.msExitFullscreen) { /* IE/Edge */
document.msExitFullscreen();
}
}
function toggleFullscreen() {
if (
document.fullscreenElement ||
document.mozFullScreenElement ||
document.webkitFullscreenElement ||
document.msFullscreenElement
) {
closeFullscreen();
} else {
const element = document.documentElement;
if (element.requestFullScreen) {
element.requestFullScreen();
} else if (element.webkitRequestFullScreen) {
element.webkitRequestFullScreen();
} else if (element.mozRequestFullScreen) {
element.mozRequestFullScreen();
}
}
}
|
class BankAccount:
def __init__(self):
self.balance = 0
self.transaction_history = []
def deposit(self, amount):
if amount > 0:
self.balance += amount
self.transaction_history.append({'output_hidden_states': True})
def withdraw(self, amount):
if amount > 0 and self.balance >= amount:
self.balance -= amount
self.transaction_history.append({'output_hidden_states': True})
def check_balance(self):
return self.balance
def get_transaction_history(self):
return self.transaction_history |
def execute_with_db(db_node, df):
global db # Access the global db object
# Set db to a valid database interface (e.g., connecting to the database)
db = connect_to_database() # Replace with actual database connection logic
# Call the run function with the provided db_node and return the result
return run(db_node, df)
def run(db_node, df):
func = db.get_executable(db_node) # Retrieve executable function from the database
cp_df = cp(df) # Create a copy of the data frame
return func(cp_df) # Apply the retrieved function to the copied data frame |
import { useCallback } from "react";
import { shallowEqual } from "react-redux";
import { createStyles, makeStyles } from "@material-ui/core";
import { useSelector, State } from "../../../store";
import { BeatId, NoteId } from "../../../store/types/pattern";
import useFirebaseDispatch from "../../../firebase/use-firebase-dispatch";
import GridTableBody from "./grid-table-body";
import GridTableHead from "./grid-table-head";
import { Table, TableContainer } from "./styles";
const useStyles = makeStyles((_theme) =>
createStyles({
cell: ({ barLen }: { barLen: number }) => ({
[`&:nth-of-type(${barLen}n)`]: {
borderRightWidth: 2,
},
[`&:nth-of-type(${barLen}n+1):not(:nth-of-type(1))`]: {
borderLeftWidth: 2,
},
}),
}),
);
const selector = (state: State) => ({
trackConfig: state.track.track?.config,
trackId: state.track.selectedTrackId,
patternId: state.track.selectedPatternId,
pattern: state.track.selectedPatternId
? state.track.patterns[state.track.selectedPatternId]
: undefined,
});
type SelectorReturn = ReturnType<typeof selector>;
const selectorEqual = (a: SelectorReturn, b: SelectorReturn) =>
a.patternId === b.patternId &&
shallowEqual(
a.pattern && Object.keys(a.pattern.notes),
b.pattern && Object.keys(b.pattern.notes),
) &&
shallowEqual(
a.pattern &&
Object.keys(a.pattern.notes).map(
(note) => a.pattern?.notes[note as NoteId]!.order,
),
b.pattern &&
Object.keys(b.pattern.notes).map(
(note) => b.pattern?.notes[note as NoteId]!.order,
),
) &&
shallowEqual(a.trackConfig, b.trackConfig);
export default function TrackerGrid() {
const { trackConfig, trackId, patternId, pattern } = useSelector(
selector,
selectorEqual,
);
const styles = useStyles({ barLen: trackConfig?.barLen ?? 4 });
const firebaseDispatch = useFirebaseDispatch();
const toggleBeat = useCallback(
({
note,
beat,
isActive,
}: {
note: NoteId;
beat: BeatId;
isActive: boolean;
}) => {
if (trackId !== undefined && patternId !== undefined) {
firebaseDispatch
.setBeat({
id: beat,
trackId,
patternId,
noteId: note,
doc: {
isActive,
},
})
.catch(console.error);
} else {
console.error(
"Can't update firebase beat because trackId or patternId aren't set",
);
}
},
[trackId, patternId],
);
if (!trackConfig || !pattern) {
return null;
}
return (
<TableContainer>
<Table>
<GridTableHead
patternLen={trackConfig.patternLen}
cellClassName={styles.cell}
/>
<GridTableBody
key={patternId}
pattern={pattern}
patternLen={trackConfig.patternLen}
toggleBeat={toggleBeat}
cellClassName={styles.cell}
/>
</Table>
</TableContainer>
);
}
|
def process_list(list):
'''This function process a list'''
return [item.lower() for item in list] |
def sort_by_frequency(list_of_strings):
# Initialize results_dict
results_dict = {}
# Iterate through list to populate results_dict
for item in list_of_strings:
if item in results_dict:
results_dict[item] += 1
else:
results_dict[item] = 1
# Sort results_dict
sorted_list = sorted(results_dict, key=lambda key: results_dict[key], reverse=True)
# Return the sorted list
return sorted_list |
#!/usr/bin/env sh
if [ -n "$AMENT_TRACE_SETUP_FILES" ]; then
echo ". \"/Users/lbajo/ros2_mod_ws/install/share/ament_copyright/local_setup.sh\""
fi
if [ -f "/Users/lbajo/ros2_mod_ws/install/share/ament_copyright/local_setup.sh" ]; then
. "/Users/lbajo/ros2_mod_ws/install/share/ament_copyright/local_setup.sh"
fi
if [ -n "$AMENT_TRACE_SETUP_FILES" ]; then
echo ". \"/Users/lbajo/ros2_mod_ws/install/share/ament_lint_cmake/local_setup.sh\""
fi
if [ -f "/Users/lbajo/ros2_mod_ws/install/share/ament_lint_cmake/local_setup.sh" ]; then
. "/Users/lbajo/ros2_mod_ws/install/share/ament_lint_cmake/local_setup.sh"
fi
if [ -n "$AMENT_TRACE_SETUP_FILES" ]; then
echo ". \"/Users/lbajo/ros2_mod_ws/install/share/ament_package/local_setup.sh\""
fi
if [ -f "/Users/lbajo/ros2_mod_ws/install/share/ament_package/local_setup.sh" ]; then
. "/Users/lbajo/ros2_mod_ws/install/share/ament_package/local_setup.sh"
fi
if [ -n "$AMENT_TRACE_SETUP_FILES" ]; then
echo ". \"/Users/lbajo/ros2_mod_ws/install/share/ament_cmake_core/local_setup.sh\""
fi
if [ -f "/Users/lbajo/ros2_mod_ws/install/share/ament_cmake_core/local_setup.sh" ]; then
. "/Users/lbajo/ros2_mod_ws/install/share/ament_cmake_core/local_setup.sh"
fi
if [ -n "$AMENT_TRACE_SETUP_FILES" ]; then
echo ". \"/Users/lbajo/ros2_mod_ws/install/share/ament_cmake_test/local_setup.sh\""
fi
if [ -f "/Users/lbajo/ros2_mod_ws/install/share/ament_cmake_test/local_setup.sh" ]; then
. "/Users/lbajo/ros2_mod_ws/install/share/ament_cmake_test/local_setup.sh"
fi
if [ -n "$AMENT_TRACE_SETUP_FILES" ]; then
echo ". \"/Users/lbajo/ros2_mod_ws/install/share/ament_cmake_lint_cmake/local_setup.sh\""
fi
if [ -f "/Users/lbajo/ros2_mod_ws/install/share/ament_cmake_lint_cmake/local_setup.sh" ]; then
. "/Users/lbajo/ros2_mod_ws/install/share/ament_cmake_lint_cmake/local_setup.sh"
fi
export CMAKE_PREFIX_PATH="$AMENT_PREFIX_PATH:$CMAKE_PREFIX_PATH"
|
/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/compiler/xla/pjrt/tpu_client.h"
#include <memory>
#include <vector>
#include "absl/container/inlined_vector.h"
#include "absl/memory/memory.h"
#include "absl/status/status.h"
#include "tensorflow/compiler/xla/client/client_library.h"
#include "tensorflow/compiler/xla/pjrt/local_device_state.h"
#include "tensorflow/compiler/xla/pjrt/pjrt_stream_executor_client.h"
#include "tensorflow/compiler/xla/pjrt/tracked_device_buffer.h"
#include "tensorflow/compiler/xla/service/shaped_buffer.h"
#include "tensorflow/compiler/xla/service/tpu_computation_placer.h"
#include "tensorflow/compiler/xla/shape.h"
#include "tensorflow/compiler/xla/shape_util.h"
#include "tensorflow/compiler/xla/status.h"
#include "tensorflow/compiler/xla/util.h"
#include "tensorflow/core/platform/casts.h"
#include "tensorflow/core/platform/errors.h"
#include "tensorflow/stream_executor/device_memory.h"
#include "tensorflow/stream_executor/lib/statusor.h"
#include "tensorflow/stream_executor/stream.h"
#include "tensorflow/stream_executor/tpu/tpu_executable_interface.h"
#include "tensorflow/stream_executor/tpu/tpu_executor_interface.h"
#include "tensorflow/stream_executor/tpu/tpu_platform_interface.h"
#include "tensorflow/stream_executor/tpu/tpu_stream.h"
namespace tf_tpu = tensorflow::tpu;
namespace xla {
namespace {
class TpuDeviceState : public LocalDeviceState {
public:
TpuDeviceState(se::StreamExecutor* executor, LocalClient* client,
bool asynchronous);
Status ThenMemcpyDeviceToDevice(se::Stream* transfer_stream,
se::Stream* dst_stream,
se::DeviceMemoryBase src_buffer,
se::DeviceMemoryBase dst_buffer) override;
};
TpuDeviceState::TpuDeviceState(se::StreamExecutor* executor,
LocalClient* client, bool asynchronous)
: LocalDeviceState(executor, client, LocalDeviceState::kAsynchronous,
asynchronous,
/*allow_event_reuse=*/false) {}
Status TpuDeviceState::ThenMemcpyDeviceToDevice(
se::Stream* transfer_stream, se::Stream* dst_stream,
se::DeviceMemoryBase src_buffer, se::DeviceMemoryBase dst_buffer) {
auto* transfer_tpu_stream = tensorflow::down_cast<tf_tpu::TpuStream*>(
transfer_stream->implementation());
TF_RETURN_IF_ERROR(transfer_tpu_stream->EnqueueOnTpuDeviceSendRecvLocal(
src_buffer, dst_buffer));
return Status::OK();
}
class PjRtTpuClient : public PjRtStreamExecutorClient {
public:
PjRtTpuClient(LocalClient* client,
std::vector<std::unique_ptr<PjRtStreamExecutorDevice>> devices,
int task_id);
StatusOr<DeviceAssignment> GetDefaultDeviceAssignment(
int num_replicas, int num_partitions) const override;
bool EnqueueD2DTransfersOnSrcStream() const override { return false; }
StatusOr<absl::optional<std::string>> ExecutableFingerprint(
const PjRtExecutable& executable) const override;
};
PjRtTpuClient::PjRtTpuClient(
LocalClient* client,
std::vector<std::unique_ptr<PjRtStreamExecutorDevice>> devices, int task_id)
: PjRtStreamExecutorClient(kTpuName, client, std::move(devices), task_id,
/*allocator=*/nullptr,
/*host_memory_allocator=*/nullptr,
/*should_stage_host_to_device_transfers=*/false,
/*gpu_run_options=*/nullptr) {}
StatusOr<DeviceAssignment> PjRtTpuClient::GetDefaultDeviceAssignment(
int num_replicas, int num_partitions) const {
tf_tpu::TpuPlatformInterface* platform =
tf_tpu::TpuPlatformInterface::GetRegisteredPlatform();
tf_tpu::TpuHostLocationExternal host = platform->GetTpuHostLocation();
int num_local_devices = host.Cores(kTensorCore).size();
if (num_replicas * num_partitions <= num_local_devices) {
return tf_tpu::TpuComputationPlacer::AssignLocalDevices(host, num_replicas,
num_partitions);
}
// Fallback to default global device assignment if we can't run locally.
return PjRtStreamExecutorClient::GetDefaultDeviceAssignment(num_replicas,
num_partitions);
}
StatusOr<absl::optional<std::string>> PjRtTpuClient::ExecutableFingerprint(
const PjRtExecutable& executable) const {
if (executable.client() != this) {
return InvalidArgument(
"Passed executable from different client (platform '%s') to "
"PjRtTpuClient::ExecutableFingerprint",
executable.client()->platform_name());
}
if (executable.num_partitions() > 1) {
LOG(INFO) << "ExecutableFingerprint not fully implemented for MPMD "
"executables, fingerprint may not be unique.";
}
xla::TpuExecutableInterface* tpu_executable =
tensorflow::down_cast<xla::TpuExecutableInterface*>(
tensorflow::down_cast<const PjRtStreamExecutorExecutable*>(
&executable)
->executables()[0]
->executable());
return absl::optional<std::string>(tpu_executable->fingerprint());
}
StatusOr<std::vector<std::unique_ptr<PjRtStreamExecutorDevice>>> GetTpuDevices(
LocalClient* client,
std::vector<std::unique_ptr<LocalDeviceState>> local_device_states) {
std::vector<std::unique_ptr<PjRtStreamExecutorDevice>> devices;
tf_tpu::TpuTopologyExternal topology =
tf_tpu::TpuPlatformInterface::GetRegisteredPlatform()->topology();
std::map<int, int> core_id_to_device_ordinal;
for (int i = 0; i < client->device_count(); ++i) {
se::StreamExecutor* executor =
client->backend().stream_executor(i).ValueOrDie();
tf_tpu::TpuExecutorInterface* tpu_executor =
tensorflow::down_cast<tf_tpu::TpuExecutorInterface*>(
executor->implementation());
core_id_to_device_ordinal[tpu_executor->GetCoreLocationExternal().Id()] = i;
}
for (const tf_tpu::TpuCoreLocationExternal& core :
topology.cores(TpuCoreTypeEnum::kTensorCore)) {
auto it = core_id_to_device_ordinal.find(core.Id());
int device_ordinal =
(it != core_id_to_device_ordinal.end()) ? it->second : -1;
int task_id = topology.IdForHost(core.host_coordinates());
const tf_tpu::TpuDimensionsExternal coords = core.chip_coordinates();
std::array<int, 3> coords_array = {coords.x, coords.y, coords.z};
std::unique_ptr<LocalDeviceState> local_device_state;
if (device_ordinal >= 0) {
local_device_state = std::move(local_device_states[device_ordinal]);
}
auto device = absl::make_unique<PjRtTpuDevice>(
core, std::move(local_device_state), task_id, coords_array,
std::string(tf_tpu::TpuVersionEnumToString(topology.version())));
devices.push_back(std::move(device));
}
return devices;
}
} // namespace
StatusOr<std::shared_ptr<PjRtClient>> GetTpuClient(
bool asynchronous, absl::Duration init_retry_timeout) {
tf_tpu::TpuPlatformInterface* platform =
tf_tpu::TpuPlatformInterface::GetRegisteredPlatform(
/*initialize_platform=*/true, /*num_tries=*/1);
if (platform == nullptr) {
return InvalidArgument("TpuPlatform is not available.");
}
// NOTE: We retry in a loop since some pod failures are transient (e.g. some
// RPCs may timeout waiting for other hosts to come up, but will succeed
// at a later point if retried).
auto start = absl::Now();
while (true) {
Status status = platform->Initialize({});
if (status.ok()) {
break;
}
LOG(INFO) << "TPU platform initialization failed: " << status;
if ((absl::Now() - start) >= init_retry_timeout) {
return status;
}
absl::SleepFor(absl::Microseconds(10));
}
CHECK(platform->Initialized());
if (platform->VisibleDeviceCount() <= 0) {
return InvalidArgument("No TPU devices found.");
}
LocalClientOptions options;
options.set_platform(platform);
TF_ASSIGN_OR_RETURN(LocalClient * client,
ClientLibrary::GetOrCreateLocalClient(options));
std::vector<std::unique_ptr<LocalDeviceState>> local_device_states;
local_device_states.reserve(client->device_count());
for (int i = 0; i < client->device_count(); ++i) {
se::StreamExecutor* executor =
client->backend().stream_executor(i).ValueOrDie();
local_device_states.push_back(
absl::make_unique<TpuDeviceState>(executor, client, asynchronous));
}
TF_ASSIGN_OR_RETURN(auto devices,
GetTpuDevices(client, std::move(local_device_states)));
int task_id = platform->GetTpuHostLocation().Id();
return std::shared_ptr<PjRtClient>(
absl::make_unique<PjRtTpuClient>(client, std::move(devices), task_id));
}
} // namespace xla
|
#!/bin/bash
export PYTHONUNBUFFERED=1
echo PYTHONUNBUFFERED=$PYTHONUNBUFFERED
export NCCL_LAUNCH_MODE=PARALLEL
echo NCCL_LAUNCH_MODE=$NCCL_LAUNCH_MODE
export NCCL_DEBUG=False
export ONEFLOW_DEBUG_MODE=False
#CUDA_VISIBLE_DEVICES='1'
python train.py configs/ms1mv3_r50.py --device_num_per_node 8 |
<filename>frontend/src/js/external-forms/form-components/DropzoneList.tsx<gh_stars>10-100
import styled from "@emotion/styled";
import React, { ReactNode } from "react";
import { DropTargetMonitor } from "react-dnd";
import IconButton from "../../button/IconButton";
import InfoTooltip from "../../tooltip/InfoTooltip";
import Dropzone, {
ChildArgs,
PossibleDroppableObject,
} from "../../ui-components/Dropzone";
import DropzoneWithFileInput, {
DragItemFile,
} from "../../ui-components/DropzoneWithFileInput";
import Label from "../../ui-components/Label";
import Optional from "../../ui-components/Optional";
const ListItem = styled("div")`
position: relative;
padding: 5px;
box-shadow: 0 0 3px 0 rgba(0, 0, 0, 0.1);
background-color: white;
border-radius: ${({ theme }) => theme.borderRadius};
margin-bottom: 5px;
`;
const StyledIconButton = styled(IconButton)`
position: absolute;
top: 0;
right: 0;
`;
const Row = styled("div")`
display: flex;
align-items: center;
`;
interface PropsT<DroppableObject> {
className?: string;
label?: ReactNode;
tooltip?: string;
optional?: boolean;
dropzoneChildren: (args: ChildArgs) => ReactNode;
items: ReactNode[];
acceptedDropTypes: string[];
onDrop: (
props: DroppableObject | DragItemFile,
monitor: DropTargetMonitor,
) => void;
onDropFile?: (file: File) => void;
onDelete: (idx: number) => void;
disallowMultipleColumns?: boolean;
}
const DropzoneList = <DroppableObject extends PossibleDroppableObject>(
props: PropsT<DroppableObject>,
) => {
// allow at least one column
const showDropzone =
(props.items && props.items.length === 0) || !props.disallowMultipleColumns;
const DropzoneClass = props.onDropFile ? DropzoneWithFileInput : Dropzone;
return (
<div className={props.className}>
<Row>
{props.label && (
<Label>
{props.optional && <Optional />}
{props.label}
</Label>
)}
{props.tooltip && <InfoTooltip text={props.tooltip} />}
</Row>
{props.items && props.items.length > 0 && (
<div>
{props.items.map((item, i) => (
<ListItem key={i}>
<StyledIconButton
icon="times"
onClick={() => props.onDelete(i)}
/>
{item}
</ListItem>
))}
</div>
)}
{showDropzone && (
<DropzoneClass
acceptedDropTypes={props.acceptedDropTypes}
onDrop={props.onDrop}
onSelectFile={props.onDropFile!}
>
{props.dropzoneChildren}
</DropzoneClass>
)}
</div>
);
};
export default DropzoneList;
|
class Node:
def __init__(self, key, value):
self.key = key
self.value = value
self.next = None
class HashTable:
def __init__(self, size):
self.size = size
self.table = [None] * size
def hash_function(self, key):
return sum(ord(char) for char in key) % self.size
def insert(self, key, value):
index = self.hash_function(key)
if self.table[index] is None:
self.table[index] = Node(key, value)
else:
current = self.table[index]
while current:
if current.key == key:
current.value = value
return
if current.next is None:
break
current = current.next
current.next = Node(key, value)
def get(self, key):
index = self.hash_function(key)
current = self.table[index]
while current:
if current.key == key:
return current.value
current = current.next
return None
def remove(self, key):
index = self.hash_function(key)
current = self.table[index]
prev = None
while current:
if current.key == key:
if prev:
prev.next = current.next
else:
self.table[index] = current.next
return
prev = current
current = current.next |
#!/usr/bin/env bash
set -ex
javac -d classes src/main/java/com/github/demomon/jpc/graal/App.java
native-image -ea --no-server -H:Name=jpc-graal -H:Class=com.github.demomon.jpc.graal.App -H:+ReportUnsupportedElementsAtRuntime --static -cp classes/
|
if [ "${INCGFTP}" == "Y" ]; then
INCGTKTWO="Y"
fi
|
<reponame>vmware-tanzu/watch-proxy
// Copyright 2018-2019 VMware, Inc.
// SPDX-License-Identifier: Apache-2.0
package config
import (
"io/ioutil"
"os"
"sort"
"testing"
"time"
)
func TestReadConfig(t *testing.T) {
goodConfig := Config{
Endpoints: []RemoteEndpoint{
RemoteEndpoint{
Type: "http",
Url: "http://test.default.svc.cluster.local",
},
},
ResourcesWatch: []Resource{
Resource{
Name: "namespaces",
},
Resource{
Name: "pods",
},
Resource{
Name: "deployments",
},
},
}
testConf, err := ReadConfig("test_config.yaml")
if err != nil {
t.Errorf(err.Error())
}
if goodConfig.Endpoints[0].Url != testConf.Endpoints[0].Url {
t.Errorf("RemoteEndpoint Configurations do not match, got: %v, want: %v.",
testConf.Endpoints[0].Url, goodConfig.Endpoints[0].Url)
}
goodConfigResources := []string{}
testConfigResources := []string{}
for _, r := range goodConfig.ResourcesWatch {
goodConfigResources = append(goodConfigResources, r.Name)
}
for _, r := range testConf.ResourcesWatch {
testConfigResources = append(testConfigResources, r.Name)
}
sort.Strings(goodConfigResources)
sort.Strings(testConfigResources)
for i := 0; i < len(goodConfigResources); i++ {
if goodConfigResources[i] != testConfigResources[i] {
t.Errorf("ResourcesWatch Configurations do not match, got: %v, want: %v.",
testConfigResources[i], goodConfigResources[i])
}
}
}
func TestDiffConfig(t *testing.T) {
oldRes := []Resource{
Resource{Name: "namespaces"},
Resource{Name: "pods"},
}
newRes := []Resource{
Resource{Name: "deployments"},
Resource{Name: "pods"},
}
newResWatch := []string{"deployments", "pods"}
testConfig := Config{}
testConfig.DiffConfig(oldRes, newRes)
for i := 0; i < len(testConfig.NewResources); i++ {
if testConfig.NewResources[i].Name != newRes[i].Name {
t.Errorf("NewResources Configurations do not match, got: %v, want: %v.",
testConfig.NewResources[i].Name, newRes[i].Name)
}
}
for i := 0; i < len(testConfig.StaleResources); i++ {
if testConfig.StaleResources[i].Name != "namespaces" {
t.Errorf("StaleResources Configurations do not match, got: %v, want: %v.",
testConfig.StaleResources[i].Name, "namespaces")
}
}
testResWatch := []string{}
for _, r := range testConfig.ResourcesWatch {
testResWatch = append(testResWatch, r.Name)
}
sort.Strings(testResWatch)
for i := 0; i < len(testResWatch); i++ {
if testResWatch[i] != newResWatch[i] {
t.Errorf("ResourcesWatch Configurations do not match, got: %v, want: %v.",
testResWatch[i], newResWatch[i])
}
}
}
func TestFileWatcher(t *testing.T) {
fileChange := make(chan bool)
buf := []byte("foo")
_ = ioutil.WriteFile("./file.test", buf, 0644)
fileWatcher(fileChange, "./file.test")
time.Sleep(time.Second * 3)
go func() {
outer:
for {
select {
case _ = <-fileChange:
break outer
}
}
return
}()
buf = []byte("test")
_ = ioutil.WriteFile("./file.test", buf, 0644)
defer os.Remove("./file.test")
}
|
def resolve_packages(commands):
current_directory = '/'
installed_packages = set()
for command in commands:
if command.startswith('cd'):
parts = command.split(' ')
if parts[1] == '../../':
current_directory = '/'
else:
current_directory += parts[1]
elif command == 'npm install':
installed_packages.add(current_directory)
return list(installed_packages) |
var express = require('express');
var router = express.Router();
var passport = require('passport');
var LocalStrategy = require('passport-local').Strategy;
var mongoq = require('mongoq');
var db = module.parent.exports.db;
passport.use(new LocalStrategy({
usernameField: 'username',
passwordField: 'password'
},
function(username, password, done) {
db.collection('users')
.find({username:username,password:password})
.toArray()
.done(function(data){
if(!data[0]){
return done({
'errors': 'Authentication Failed.'
});
}
else{
return done(null,data[0]);
}
})
.fail( function( err ) {
return done(err);
});
}
));
passport.serializeUser(function(user, done) {
done(null, user._id);
});
passport.deserializeUser(function(id, done) {
id = mongoq.mongodb.BSONPure.ObjectID.createFromHexString(id);
db.collection('users')
.find({_id:id})
.toArray()
.done(function(data){
done(null,data[0]);
})
.fail( function( err ) {
done(err);
});
});
|
<reponame>feeedback/hexlet_professions_backend<gh_stars>0
// sc: https://ru.hexlet.io/courses/js-data-abstraction/lessons/interface/exercise_unit
// В этой задаче, тесты написаны для отрезков, которые в свою очередь используют точки.
// Ваша задача, реализовать интерфейсные функции для работы с точками. Внутреннее
// представление точек должно быть основано на полярной системе координат, хотя интерфейс
// предполагает работу с декартовой системой (снаружи).
// points.js
// Реализуйте и экспортируйте интерфейсные функции точек:
// makeDecartPoint. Принимает на вход координаты и возвращает точку. Уже реализован.
// getX
// getY
// const x = 4;
// const y = 8;
// // point хранит в себе данные в полярной системе координат
// const point = makeDecartPoint(x, y);
// // Здесь происходит преобразование из полярной в декартову
// getX(point); // 4
// getY(point); // 8
// Подсказки
// Трансляция декартовых координат в полярные была описана в теории
// Получить x можно по формуле radius * cos(angle)
// Получить y можно по формуле radius * sin(angle)
const makeDecartPoint = (x, y) => {
const point = {
angle: Math.atan2(y, x),
radius: Math.sqrt(x ** 2 + y ** 2),
};
return point;
};
// BEGIN (write your solution here)
const getAngle = (point) => point.angle;
const getRadius = (point) => point.radius;
const polarToDecart = (radius, angle, axis) => {
const fn = { x: 'cos', y: 'sin' };
return Math.floor(radius * Math[fn[axis]](angle));
};
const getX = (point) => polarToDecart(getRadius(point), getAngle(point), 'x');
const getY = (point) => polarToDecart(getRadius(point), getAngle(point), 'y');
// END
export { makeDecartPoint, getX, getY };
|
<reponame>nabeelkhan/Oracle-DBA-Life
set echo on
alter session set sql_trace=true;
begin
for x in ( select /* this is my very nice Comment */ *
from big_table.big_table
where rownum <= 10000 )
loop
null;
end loop;
end;
/
pause
connect /
host tkprof `ls -t $ORACLE_HOME/admin/$ORACLE_SID/udump/*ora_*.trc | head -1` ./tk.prf sys=no
edit tk.prf
|
#!/usr/bin/env bash
MASON_NAME=benchmark
MASON_VERSION=1.0.0-1
MASON_LIB_FILE=lib/libbenchmark.a
. ${MASON_DIR}/mason.sh
function mason_load_source {
mason_download \
https://github.com/google/benchmark/archive/v1.0.0.tar.gz \
dcf87e5faead951fd1e9ab103cb36a7c8ebe4837
mason_extract_tar_gz
export MASON_BUILD_PATH=${MASON_ROOT}/.build/benchmark-1.0.0
}
function mason_compile {
rm -rf build
mkdir -p build
cd build
if [ ${MASON_PLATFORM} == 'ios' ] ; then
# Make sure CMake thinks we're cross-compiling and manually set the exit codes
# because CMake can't run the test programs
echo "set (CMAKE_SYSTEM_NAME Darwin)" > toolchain.cmake
cmake \
-DCMAKE_TOOLCHAIN_FILE=toolchain.cmake \
-DRUN_HAVE_STD_REGEX=1 \
-DRUN_HAVE_POSIX_REGEX=0 \
-DRUN_HAVE_STEADY_CLOCK=0 \
-DCMAKE_CXX_FLAGS="${CFLAGS:-}" \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX="${MASON_PREFIX}" \
-DBENCHMARK_ENABLE_LTO=ON \
-DBENCHMARK_ENABLE_TESTING=OFF \
..
else
cmake \
${MASON_CMAKE_TOOLCHAIN} \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX="${MASON_PREFIX}" \
-DBENCHMARK_ENABLE_LTO=ON \
-DBENCHMARK_ENABLE_TESTING=OFF \
..
fi
make install -j${MASON_CONCURRENCY}
}
function mason_cflags {
echo -isystem ${MASON_PREFIX}/include
}
function mason_ldflags {
echo -lpthread
}
function mason_static_libs {
echo ${MASON_PREFIX}/${MASON_LIB_FILE}
}
mason_run "$@"
|
//Copyright 2019 <NAME>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
// documentation files (the "Software"), to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
// Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
// OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package vrows
import "github.com/stretchr/testify/mock"
type RowserMock struct {
mock.Mock
}
func (m *RowserMock) Next() Rower {
a := m.Called()
r := a.Get(0)
if r == nil {
return nil
}
return r.(Rower)
}
func (m *RowserMock) Close() error {
a := m.Called()
return a.Error(0)
}
type RowerMock struct {
mock.Mock
// ScanMock allows you to edit the values of the scan to more adequately mock the request
ScanMock func(values ...interface{})
}
func (m *RowerMock) Scan(values ...interface{}) error {
a := m.Called(values)
if m.ScanMock != nil {
m.ScanMock(values...)
}
return a.Error(0)
}
func (m *RowerMock) Columns() (columnNames []string) {
a := m.Called()
return a.Get(0).([]string)
}
|
<gh_stars>0
import React from 'react';
import { ImageStamp } from '../../../index';
export default {
title: 'controls/ImageStamp',
component: ImageStamp,
};
export var main = function () { return (React.createElement(ImageStamp, { src: '//v2.grommet.io/assets/Wilderpeople_Ricky.jpg', size: 'large', round: 'full' })); };
export var size = function () { return (React.createElement(ImageStamp, { src: '//v2.grommet.io/assets/Wilderpeople_Ricky.jpg', size: 'medium', round: 'full' })); };
|
<reponame>soupwork/Cisco7to8converter
#Dougs Cisco Password 7 to Secret 8 Converter
# copyright 2019 <NAME>
# This is my Main Program
"""
This program will convert a cisco password 7 (insecure) into a cisco secret 8 (sha256)
This program will input a csv (data: hostname, IP Address, Username, password7)
This program will SSH to a router, and use that to generate the SHA256 secret8
The csv will have a space res'd for the secret 8
The program will add to the csv, the plaintext password and secret8. This will be turned off in production.
The program can also be used to only input a password 7 and display a plaintext word.
"""
#
#
#Sample input lines
#
# python main728.py -f "e:\dougsprogs\Convert728\convert7to8PKG\\testdata728.csv"
# python main728.py -f test.csv -ip 192.168.0.1 -ip 192.168.1.1 -ip 192.168.20.1
# python main728.py -f test.csv -ip 192.168.0.1 -tr 192.168.20.1
# python main728.py -help
# python main728.py -p7 13351601181B0B382F747B
#
import argparse
import getpass
import datetime
import netmiko
#timestamp=datetime.datetime.now()
from convert7to8PKG.cisco7decrypt import decode
import convert7to8PKG.model728 as model
import convert7to8PKG.view728CLI as viewCLI
import convert7to8PKG.controller728 as controller
class CLIparams:
def __init__(self):
inputargs = argparse.ArgumentParser()
inputargs.add_argument('-p7',
help='put in a password 7 after -p7 to have program decrypt and exit')
inputargs.add_argument('-gui',action='store_true',
help='this flag will launch the GUI, if I have finished it')
inputargs.add_argument('-tr',
help='this is a testrouter or sharouter to generate/verify the sha256 secret works')
inputargs.add_argument('-log', action='store_true',
help='this will create a log file "Convert7to8_Log_datetime"')
inputargs.add_argument('-logfile', help='same as log, but allows user to set filename')
inputargs.add_argument('-verbose',action='store_true',
help='this will store extra detail in log and plaintext passwords \
in datafile as well as hashes')
inputargs.add_argument('-f', action='store',
help='This option allowers user to specify a file for input or append.')
inputargs.add_argument('-change', action='store_true',
help='when true(default), this flag will attempt to apply the change to router "')
inputargs.add_argument('-verify', action='store_true',
help='when true(default), this flag will attempt to ssh into the router to verify \
the changed password. It probably won\'t work with Tacacs or Radius')
inputargs.add_argument('-ip',action='append',
help='use this option to specify one or more ip addresses to change')
cliargs = inputargs.parse_args()
self.filename = cliargs.f
self.cliDict={'IPADDRESS':cliargs.ip, 'TESTROUTER':cliargs.tr, 'LOG':cliargs.log , \
'LOGFILE':cliargs.logfile ,'VERBOSE': cliargs.verbose, 'CHANGE':cliargs.change,'VERIFIY':cliargs.verify, 'FILENAME':cliargs.f ,'GUI':cliargs.gui}
def showPW7(self):
print("show PW7")
plaintext = decode(self.pass7)
getlogin=False
print("plaintext is ", plaintext)
return(plaintext)
#End CLIparams
def main():
print("inside main fn")
options=CLIparams()
#print("manually setting options for testing")
#options.setTestOptions()
if options.pass7:
print("Program is decrypting")
plaintext=options.showPW7()
else: #password7 option does not have a value
mainmodel = model.InitializeModel(options.cliDict)
view = viewCLI.UserPrompts() #login details is part of "view"
loginID=view.getLoginID()
print("login id is ", loginID)
if options.cliDict[IPADDRESS] :
initialDict=mainmodel.objdict
print("initial dictionary is ", initialDict)
if not options.tr:
options.tr = options.iplist[0]
if not options.filename: #IP but no filename
testfilename = view.suggestFilename()
print("suggested filename ", testfilename)
createYN = view.createFileYN(newfile=testfilename)
if createYN.uppper() == "Y":
print("program will create the file ", testfilename)
mainmodel.filename = testfilename
mainmodel.createFile()
else:
print("alrighty then. The program will go on without creating the file")
elif options.filename: # filename but no IP Addresses
pass
else: #no IP and no filename - prompt user for router IP
pass
#END MAIN
if __name__ == "__main__":
print("startiing from __main__")
main()
# testcontroller=controller.RemoteRouter(loginID,options.tr)
# hostname = testcontroller.getHostname(options.tr)
#print("plaintext is ",checkpw) #this only applies for -p7
#end main program
|
boolean isLowerCase(char character) {
return (character >= 'a' && character <= 'z');
} |
package epizza.delivery.order;
import org.springframework.hateoas.PagedResources;
public interface OrderServiceClient {
void selectOrder(Integer orderId, DeliveryJob job);
PagedResources<Order> getOrders();
} |
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2014 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cfa.types.java;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
/**
* Description of a simple Java structure's type.
*
* These descriptions are mostly merely primitive types, but include special cases like
* <code>null</code> either. Actually, possible concrete types are all enum constants of
* {@link JBasicType}.
*/
public class JSimpleType implements JType {
private static final long serialVersionUID = 7153757299840260748L;
private final JBasicType type;
private final boolean isPrimitive;
private final static JSimpleType SINGLETON_BOOL = new JSimpleType(JBasicType.BOOLEAN);
private final static JSimpleType SINGLETON_BYTE = new JSimpleType(JBasicType.BYTE);
private final static JSimpleType SINGLETON_SHORT = new JSimpleType(JBasicType.SHORT);
private final static JSimpleType SINGLETON_CHAR = new JSimpleType(JBasicType.CHAR);
private final static JSimpleType SINGLETON_INT = new JSimpleType(JBasicType.INT);
private final static JSimpleType SINGLETON_LONG = new JSimpleType(JBasicType.LONG);
private final static JSimpleType SINGLETON_FLOAT = new JSimpleType(JBasicType.FLOAT);
private final static JSimpleType SINGLETON_DOUBLE = new JSimpleType(JBasicType.DOUBLE);
private final static JSimpleType SINGLETON_NULL = new JSimpleType(JBasicType.NULL);
private final static JSimpleType SINGLETON_UNSPECIFIED = new JSimpleType(JBasicType.UNSPECIFIED);
private final static JSimpleType SINGLETON_VOID = new JSimpleType(JBasicType.VOID);
public static JSimpleType getBoolean() {
return SINGLETON_BOOL;
}
public static JSimpleType getByte() {
return SINGLETON_BYTE;
}
public static JSimpleType getShort() {
return SINGLETON_SHORT;
}
public static JSimpleType getChar() {
return SINGLETON_CHAR;
}
public static JSimpleType getInt() {
return SINGLETON_INT;
}
public static JSimpleType getLong() {
return SINGLETON_LONG;
}
public static JSimpleType getFloat() {
return SINGLETON_FLOAT;
}
public static JSimpleType getDouble() {
return SINGLETON_DOUBLE;
}
public static JSimpleType getNull() {
return SINGLETON_NULL;
}
public static JSimpleType getUnspecified() {
return SINGLETON_UNSPECIFIED;
}
public static JSimpleType getVoid() {
return SINGLETON_VOID;
}
/**
* Creates a new <code>JSimpleType</code> object that represents the given
* basic type.
*
* @param pType the concrete primitive type to represent
*/
private JSimpleType(JBasicType pType) {
type = pType;
switch (type) {
case BOOLEAN:
//$FALL-THROUGH$
case BYTE:
//$FALL-THROUGH$
case INT:
//$FALL-THROUGH$
case SHORT:
//$FALL-THROUGH$
case FLOAT:
//$FALL-THROUGH$
case DOUBLE:
isPrimitive = true;
break;
default:
isPrimitive = false;
}
}
/**
* Returns the concrete primitive type this class represents.
*
* @return the concrete primitive type this class represents
*/
public JBasicType getType() {
return type;
}
@Override
public String toASTString(String pDeclarator) {
List<String> parts = new ArrayList<>();
parts.add(Strings.emptyToNull(type.toASTString()));
parts.add(Strings.emptyToNull(pDeclarator));
return Joiner.on(' ').skipNulls().join(parts);
}
public boolean isPrimitive() {
return isPrimitive;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 7;
result = prime * result + Objects.hashCode(type);
result = prime * result + Boolean.hashCode(isPrimitive);
return result;
}
/**
* Returns whether the given object equals this object.
*
* <p>Two <code>JSimpleType</code> objects equal each other if their stored primitive types equal.</p>
*
* @param obj the object to compare to this object
* @return <code>true</code> if the given object equals this object, <code>false</code> otherwise
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof JSimpleType)) {
return false;
}
JSimpleType other = (JSimpleType) obj;
return type == other.type && isPrimitive == other.isPrimitive;
}
@Override
public String toString() {
switch (type) {
case UNSPECIFIED:
return "unspecified";
default:
return type.toASTString();
}
}
}
|
# configure nagios
/sbin/chkconfig nagios on
# Configure asm-deployer service
rm /etc/rc.d/init.d/asm-deployer
/usr/bin/systemctl daemon-reload
/usr/bin/systemctl enable asm-deployer.service
/bin/sed -i 's:enable_notifications=1:enable_notifications=0:' /etc/nagios/nagios.cfg
/bin/sed -i 's:enable_flap_detection=1:enable_flap_detection=0:' /etc/nagios/nagios.cfg
/bin/sed -i 's:cfg_file=/etc/nagios/objects/localhost.cfg:#cfg_file=/etc/nagios/objects/localhost.cfg:' /etc/nagios/nagios.cfg
/bin/sed -i 's:max_concurrent_checks=4:max_concurrent_checks=0:' /etc/nagios/nagios.cfg
/bin/sed -i 's/cfg_dir=\/etc\/nagios\/conf.d/ /' /etc/nagios/nagios.cfg
/bin/sed -i 's/status_file=\/var\/log\/nagios\/status.dat/status_file=\/var\/spool\/nagios\/status.dat/' /etc/nagios/nagios.cfg
# Grant sudo access to script to build iPXE UEFi floppy
if [ $(grep -c "%razor ALL=NOPASSWD:/opt/asm-deployer/scripts/build_bootable_floppy.sh" /etc/sudoers) -eq 0 ]; then
echo "%razor ALL=NOPASSWD:/opt/asm-deployer/scripts/build_bootable_floppy.sh" >> /etc/sudoers
fi
/sbin/restorecon -v /usr/lib64/nagios/plugins/*
# configure graphite
touch /etc/carbon/storage-aggregation.conf
/bin/sed -i 's:LOG_LISTENER_CONNECTIONS = True:LOG_LISTENER_CONNECTIONS = False:' /etc/carbon/carbon.conf
/bin/sed -i 's:LOG_CACHE_QUEUE_SORTS = True:LOG_CACHE_QUEUE_SORTS = False:' /etc/carbon/carbon.conf
/bin/sed -i 's:ENABLE_LOGROTATION = True:ENABLE_LOGROTATION = False:' /etc/carbon/carbon.conf
grep -q ^SECRET_KEY /etc/graphite-web/local_settings.py
if [ $? -eq 1 ]
then
echo "SECRET_KEY = '$(openssl rand 32 -hex)'" >> /etc/graphite-web/local_settings.py
echo "TIME_ZONE = 'UTC'" >> /etc/graphite-web/local_settings.py
python /usr/lib/python2.7/site-packages/graphite/manage.py syncdb --noinput
cat << EOF > /etc/carbon/storage-schemas.conf
[carbon]
pattern = ^carbon\.
retentions = 60:90d
[asm_thresholds]
pattern = ^asm\..+Threshold
retentions = 1h:30d, 1d:5y
[default]
pattern = .*
retentions = 5m:30d, 1h:1y, 1d:5y
EOF
/bin/sed -i 's:^:#:' /etc/httpd/conf.d/graphite-web.conf
chkconfig carbon-cache on
fi
# Update asm-deployer database
psql -U orion asm_dev < /opt/asm-deployer/db/schema.sql > /dev/null
# executing by default below permissions without validating SECRET_KEY
chown -R csadmin:csadmin /var/lib/graphite-web/
chown -R csadmin:csadmin /var/log/graphite-web/
# Updates for nagios
grep -q asm.cfg /etc/nagios/nagios.cfg || echo "cfg_file=/etc/nagios/objects/asm.cfg" >> /etc/nagios/nagios.cfg
chown -R nagios:nagios /var/log/nagios/
chown -R nagios:nagios /var/spool/nagios/
|
package org.fluentlenium.it;
import org.fluentlenium.adapter.FluentTest;
import org.fluentlenium.adapter.util.SharedDriver;
import org.junit.Assert;
import org.junit.Test;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.htmlunit.HtmlUnitDriver;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
public class Test2 extends FluentIntegTest {
@Test
public void test5() {
goTo(UrlUtil.getAbsoluteUrlFromFile("inputs.html"));
findFirst("input").fill().with("5");
try {
Thread.sleep(5000L);
} catch (InterruptedException e) {
}
assertThat(findFirst("input").getValue()).isEqualTo("5");
}
@Test
public void test6() {
goTo(UrlUtil.getAbsoluteUrlFromFile("inputs.html"));
findFirst("input").fill().with("6");
try {
Thread.sleep(4000L);
} catch (InterruptedException e) {
}
assertThat(findFirst("input").getValue()).isEqualTo("6");
}
@Test
public void test7() {
goTo(UrlUtil.getAbsoluteUrlFromFile("inputs.html"));
findFirst("input").fill().with("7");
try {
Thread.sleep(3000L);
} catch (InterruptedException e) {
}
assertThat(findFirst("input").getValue()).isEqualTo("7");
}
@Test
public void test8() {
goTo(UrlUtil.getAbsoluteUrlFromFile("inputs.html"));
findFirst("input").fill().with("8");
try {
Thread.sleep(2000L);
} catch (InterruptedException e) {
}
assertThat(findFirst("input").getValue()).isEqualTo("8");
}
}
|
#!/bin/bash
fileName=$1
chromosome=$2
workspace=/scratch/workspace/$fileName
#workspace=/shared/shuling/data
#gatk=/shared/workspace/software/gatk/GenomeAnalysisTK-3.3-0/GenomeAnalysisTK.jar
gatk=/shared/shuling/GenomeAnalysisTK-2.4-9-g532efad/GenomeAnalysisTK.jar
aws s3 cp s3://analysis-data-by-ccbb/20170320_Shuling_Califano_dnaseq/analysis_results/$fileName/$fileName.final.$chromosome.bam $workspace/
aws s3 cp s3://analysis-data-by-ccbb/20170320_Shuling_Califano_dnaseq/analysis_results/$fileName/$fileName.final.$chromosome.bai $workspace/
mkdir -p $workspace/temp
# redirecting all output to a file
exec 1>>$workspace/"variant_call.o"
exec 2>>$workspace/"variant_call.e"
/shared/workspace/software/bedtools2/bin/bedtools genomecov -split -ibam $workspace/$fileName.final.$chromosome.bam -bga -g /shared/shuling/resource/bwa/human_g1k_v37.fasta.fai -max 70001 > $workspace/$fileName.final.$chromosome.bed
java -Xms454m -Xmx8g -XX:+UseSerialGC -Djava.io.tmpdir=$workspace/temp \
-jar $gatk \
-T HaplotypeCaller \
-R /shared/shuling/resource/bwa/human_g1k_v37.fasta \
-I $workspace/$fileName.final.$chromosome.bam \
-L $workspace/$fileName.final.$chromosome.bed \
--out $workspace/$fileName.raw.$chromosome.vcf.gz \
--annotation BaseQualityRankSumTest \
--annotation FisherStrand \
--annotation GCContent \
--annotation HaplotypeScore \
--annotation HomopolymerRun \
--annotation MappingQualityRankSumTest \
--annotation MappingQualityZero \
--annotation QualByDepth \
--annotation ReadPosRankSumTest \
--annotation RMSMappingQuality \
--annotation DepthPerAlleleBySample \
--annotation Coverage \
--annotation ClippingRankSumTest \
--standard_min_confidence_threshold_for_calling 30.0 \
--standard_min_confidence_threshold_for_emitting 30.0 \
--dbsnp /shared/shuling/resource/dbsnp_132_b37.leftAligned.vcf
# --annotation DepthPerSampleHC \
aws s3 cp $workspace/$fileName.raw.$chromosome.vcf.gz s3://analysis-data-by-ccbb/20170320_Shuling_Califano_dnaseq/analysis_results/variants/$fileName/
|
#include <iostream>
int main()
{
int Numero{ 4096 };
int* ptr;
int* ptr2;
ptr = &Numero;
ptr2 = ptr;
std::cout << "\nEndereco ptr: " << ptr << "\n";
std::cout << "\nEndereco contido dentro de ptr2: " << ptr2 << "\n";
std::cout << "\nEndereco de ptr na memoria RAM: " << &ptr << "\n";
std::cout << "\nEndereco de ptr2 na memoria RAM: " << &ptr2 << "\n";
*ptr2 = *ptr + 10;
std::cout << "\nValor de numero: " << Numero << "\n";
system("PAUSE");
return 0;
} |
<reponame>bpinhosilva/MyApp
package controllers;
import models.Task;
import play.Logger;
import play.libs.Json;
import play.mvc.BodyParser;
import play.mvc.Controller;
import play.mvc.Http;
import play.mvc.Result;
import utils.TaskBodyParser;
public class TaskController extends Controller {
public Result getTasks() {
return ok(Json.toJson(Task.find.all()));
}
@BodyParser.Of(TaskBodyParser.class)
public Result insertTask() {
Http.RequestBody body = request().body();
Task task = body.as(Task.class);
if (task.getName() == null) {
return badRequest(Json.newObject().put("error", "Name cannot be null"));
}
task.save();
Logger.info("Got body: " + Json.toJson(task));
return ok();
}
} |
// This file is part of SWGANH which is released under the MIT license.
// See file LICENSE or go to http://swganh.com/LICENSE
#pragma once
#include <cstdint>
#include <string>
#include "swganh/byte_buffer.h"
#include "base_swg_message.h"
namespace swganh {
namespace messages {
struct PlayClientEffectObjectMessage : public BaseSwgMessage
{
uint16_t Opcount() const { return 3; }
uint32_t Opcode() const { return 0x8855434A; }
std::string client_effect_file; // e.g. "clienteffect/frs_dark_envy.cef"
std::string auxiliary_string; // Place from where to start the animation. See wiki for examples.
uint64_t object_id;
void OnSerialize(swganh::ByteBuffer& buffer) const
{
buffer.write(client_effect_file);
buffer.write(auxiliary_string);
buffer.write(object_id);
}
void OnDeserialize(swganh::ByteBuffer& buffer)
{
client_effect_file = buffer.read<std::string>();
auxiliary_string = buffer.read<std::string>();
object_id = buffer.read<uint64_t>();
}
};
}} // namespace swganh::messages
|
<gh_stars>1-10
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Script for intercomparison of optical properties between models and
"""
from warnings import filterwarnings
from helpers.model_list import get_model_ids #list of model IDs
import pyaerocom as pya
### TODOs
# 1. include untis and automatic unit conversion (e.g. extinction coeffs
# sometimes in Mm-1 and sometimes in m-1)
# 2. flexible
### Analysis options
# if True, existing output files will be overwritten
REANALYSE_EXISTING = False
# if False, no analysis is performed
RUN_ANALYSIS = True
# if True, only the first model / obsnetwork is analysed
ONLY_FIRST = False
# if True, the analysis will stop whenever an error occurs (else, errors that
# occurred will be written into the logfiles)
RAISE_EXCEPTIONS = False
### Setup of TS_TYPES for colocation
# NOTE: THIS WILL CHANGE SOON as this is too complicated and redundant, that is,
# the analysis should be performed at the highest possible resolution (if not
# other specified) and then downscaling can be done in post based on colocated
# data files
# keys are model source ts_types (i.e. the original model resolution, values
# are corresponding ts_types used for analysis)
TS_TYPES_ANA_OBS_GRIDDED = ['monthly', 'yearly']
TS_TYPES_ANA_OBS_UNGRIDDED = ['daily', 'monthly', 'yearly']
# Leave read ts_type of obsdata flexible, that is, if the analysis ts_type
# is e.g., monthly and the read ts_type for model is e.g., daily, then the
# observation data ts_type can be anything that is in higher resolution or
# equal resolution monthly. If False, it is required to be the same ts_type
# as the model read ts_type (i.e. daily in this example)
TS_TYPE_OBS_FLEX = True
from pyaerocom.analysis import AnalysisSetup as STP
# specify here information about the observation networks and variables (and
# sample frequencies). The script below iterates over all analysis setup
# instances created here (they are dictionaries !), respecively for each
# time interval specified above
ANALYSIS_SETUP = [
# EBAS multicolumn
STP(obs_id='EBASMC',
vars_to_analyse=['ec550aer'], # model domain
alt_vars={'ec550aer': 'scatc550aer'}, #observation
ts_types_ana=TS_TYPES_ANA_OBS_UNGRIDDED,
vert_scheme='surface'),
# Aeronet Sun v3, level 2
STP(obs_id='AeronetSunV3Lev2.daily',
vars_to_analyse=['ang4487aer', 'od550aer'],
ts_types_ana=TS_TYPES_ANA_OBS_UNGRIDDED),
# Aeronet SDA v3, level 2
STP(obs_id='AeronetSDAV3Lev2.daily',
vars_to_analyse=['od550lt1aer', 'od550gt1aer'],
ts_types_ana=TS_TYPES_ANA_OBS_UNGRIDDED),
# Aeronet INV v3, level2
STP(obs_id='AeronetInvV3Lev2.daily',
vars_to_analyse=['abs550aer'],
ts_types_ana=TS_TYPES_ANA_OBS_UNGRIDDED),
# Caliop v3
STP(obs_id='CALIOP3',
vars_to_analyse=['od550aer'],
ts_types_ana=TS_TYPES_ANA_OBS_GRIDDED),
# MISR v3.1
STP(obs_id='MISR_V31',
vars_to_analyse=['od550aer', 'ang4487aer'],
ts_types_ana=TS_TYPES_ANA_OBS_GRIDDED),
# AATSR v4.3
STP(obs_id='AATSR_SU_v4.3',
vars_to_analyse= ['abs550aer', 'ang4487aer', 'od550aer',
'od550dust', 'od550gt1aer', 'od550lt1aer'],
ts_types_ana=TS_TYPES_ANA_OBS_GRIDDED),
# MODIS 6 aqua
STP(obs_id='MODIS6.aqua',
vars_to_analyse= ['od550aer'],
ts_types_ana=TS_TYPES_ANA_OBS_GRIDDED),
# MODIS 6 terra
STP(obs_id='MODIS6.terra',
vars_to_analyse= ['od550aer'],
ts_types_ana=TS_TYPES_ANA_OBS_GRIDDED)
]
# Time intervals to be analysed: (start, stop) -> for single years use year
# number at start and None at stop)
TIME_IVALS = [(2008, None),
(2010, None)]
# Regional filter for analysis
FILTER_NAME = 'WORLD-noMOUNTAINS'
if __name__ == '__main__':
from time import time
filterwarnings('ignore')
t0 = time()
models = get_model_ids()
num = len(ANALYSIS_SETUP)
for i, stp in enumerate(ANALYSIS_SETUP):
if i==1 and ONLY_FIRST:
print(stp)
break
for (START, STOP) in TIME_IVALS:
stp.update(start=START,
stop=STOP,
filter_name=FILTER_NAME,
RAISE_EXCEPTIONS=RAISE_EXCEPTIONS,
TS_TYPE_OBS_FLEX=TS_TYPE_OBS_FLEX,
REANALYSE_EXISTING=REANALYSE_EXISTING)
ana = pya.analysis.Analyser(stp)
if RUN_ANALYSIS:
pya.print_log.info('At: {}, start time = {} ({} of {})'
.format(stp.obs_id, stp.start, i, num))
ana.run(models)
dt = (time()-t0)/60
print('Analysis finished. Total time: {} min'.format(dt))
|
<gh_stars>1-10
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
export const getGreeting = () => cy.get('h1');
|
//#####################################################################
// Copyright 2004-2009, <NAME>, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#include <PhysBAM_Tools/Grids_Uniform/UNIFORM_GRID_ITERATOR_CELL.h>
#include <PhysBAM_Tools/Grids_Uniform_Arrays/ARRAYS_ND.h>
#include <PhysBAM_Tools/Grids_Uniform_Interpolation/LINEAR_INTERPOLATION_UNIFORM.h>
#include <PhysBAM_Tools/Read_Write/Grids_Uniform_Arrays/READ_WRITE_ARRAYS.h>
#include <PhysBAM_Tools/Read_Write/Utilities/FILE_UTILITIES.h>
#include <PhysBAM_Rendering/PhysBAM_OpenGL/OpenGL_Components/OPENGL_COMPONENT_SCALAR_FIELD_3D.h>
using namespace PhysBAM;
template<class T,class T2,class RW> OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
OPENGL_COMPONENT_SCALAR_FIELD_3D(const GRID<TV> &grid_input,OPENGL_COLOR_MAP<T2>* color_map_input)
: OPENGL_COMPONENT("Scalar Field 3D"), opengl_scalar_field(grid_input,*new ARRAY<T2,VECTOR<int,3> >,color_map_input),
scalar_field_filename(""), frame_loaded(-1), valid(false)
{
opengl_scalar_field.values.Resize(grid_input.Domain_Indices());
is_animation = true;
}
template<class T,class T2,class RW> OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
OPENGL_COMPONENT_SCALAR_FIELD_3D(const GRID<TV> &grid_input, const std::string &scalar_field_filename_input,OPENGL_COLOR_MAP<T2>* color_map_input)
: OPENGL_COMPONENT("Scalar Field 3D"), opengl_scalar_field(grid_input,*new ARRAY<T2,VECTOR<int,3> >,color_map_input),
scalar_field_filename(scalar_field_filename_input), frame_loaded(-1), valid(false)
{
is_animation = FILE_UTILITIES::Is_Animated(scalar_field_filename);
}
template<class T,class T2,class RW> OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
OPENGL_COMPONENT_SCALAR_FIELD_3D(const GRID<TV> &grid_input, const std::string &scalar_field_filename_input,OPENGL_COLOR_MAP<T2>* color_map_input,
typename OPENGL_SCALAR_FIELD_3D<T,T2>::DRAW_MODE draw_mode_input)
: OPENGL_COMPONENT("Scalar Field 3D"), opengl_scalar_field(grid_input,*new ARRAY<T2,VECTOR<int,3> >,color_map_input,draw_mode_input),
scalar_field_filename(scalar_field_filename_input), frame_loaded(-1), valid(false)
{
is_animation = FILE_UTILITIES::Is_Animated(scalar_field_filename);
}
template<class T,class T2,class RW> OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
~OPENGL_COMPONENT_SCALAR_FIELD_3D()
{
delete &opengl_scalar_field.values;
}
template<class T,class T2,class RW> bool OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Valid_Frame(int frame_input) const
{
return FILE_UTILITIES::Frame_File_Exists(scalar_field_filename, frame_input);
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Set_Frame(int frame_input)
{
OPENGL_COMPONENT::Set_Frame(frame_input);
Reinitialize();
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Set_Draw(bool draw_input)
{
OPENGL_COMPONENT::Set_Draw(draw_input);
if (draw_input) opengl_scalar_field.Set_Slice(slice);
Reinitialize();
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Display(const int in_color) const
{
if (valid && draw) opengl_scalar_field.Display(in_color);
}
template<class T,class T2,class RW> RANGE<VECTOR<float,3> > OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Bounding_Box() const
{
if (valid && draw) return opengl_scalar_field.Bounding_Box();
else return RANGE<VECTOR<float,3> >::Centered_Box();
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Print_Selection_Info(std::ostream& output_stream,OPENGL_SELECTION* current_selection) const
{
if(Is_Up_To_Date(frame)){
output_stream<<component_name<<": ";
opengl_scalar_field.Print_Selection_Info(output_stream,current_selection);}
}
template<class T> void
Reinitialize_From_Simulation_Helper(const GRID<VECTOR<T,3> >& grid,const GRID<VECTOR<T,3> >& coarse_grid,ARRAY<int,VECTOR<int,3> > &values,const ARRAY<int,VECTOR<int,3> > &simulated_values)
{
PHYSBAM_FATAL_ERROR();
}
template<class T> void
Reinitialize_From_Simulation_Helper(const GRID<VECTOR<T,3> >& grid,const GRID<VECTOR<T,3> >& coarse_grid,ARRAY<bool,VECTOR<int,3> > &values,const ARRAY<bool,VECTOR<int,3> > &simulated_values)
{
PHYSBAM_FATAL_ERROR();
}
template<class T> void
Reinitialize_From_Simulation_Helper(const GRID<VECTOR<T,3> >& grid,const GRID<VECTOR<T,3> >& coarse_grid,ARRAY<T,VECTOR<int,3> > &values,const ARRAY<T,VECTOR<int,3> > &simulated_values)
{
typedef VECTOR<T,3> TV;
LINEAR_INTERPOLATION_UNIFORM<GRID<TV>,T> interpolation;
for(typename GRID<TV>::CELL_ITERATOR iterator(grid);iterator.Valid();iterator.Next())
values(iterator.Cell_Index())=interpolation.Clamped_To_Array(coarse_grid,simulated_values,iterator.Location());
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Reinitialize_From_Simulation()
{
if(draw){
if((is_animation && frame_loaded != frame) || (!is_animation && frame_loaded < 0)){
valid = false;
if(opengl_scalar_field.values_simulated){
if(opengl_scalar_field.upsample_scale>1) Reinitialize_From_Simulation_Helper(opengl_scalar_field.grid,*opengl_scalar_field.coarse_grid,opengl_scalar_field.values,*opengl_scalar_field.values_simulated);
else for(typename GRID<TV>::CELL_ITERATOR iterator(opengl_scalar_field.grid);iterator.Valid();iterator.Next()) opengl_scalar_field.values(iterator.Cell_Index())=(*opengl_scalar_field.values_simulated)(iterator.Cell_Index());}
opengl_scalar_field.Update();
frame_loaded = frame;
valid = true;}}
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Reinitialize()
{
if(is_interactive) Reinitialize_From_Simulation();
else if(draw){
if((is_animation && frame_loaded != frame) || (!is_animation && frame_loaded < 0)){
valid = false;
std::string filename=FILE_UTILITIES::Get_Frame_Filename(scalar_field_filename,frame);
if (FILE_UTILITIES::File_Exists(filename)) FILE_UTILITIES::Read_From_File<RW>(filename,opengl_scalar_field.values);
else return;
opengl_scalar_field.Update();
frame_loaded = frame;
valid = true;}}
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Toggle_Smooth_Slice()
{
opengl_scalar_field.Toggle_Smooth_Slice_Texture();
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Toggle_Draw_Mode()
{
opengl_scalar_field.Toggle_Draw_Mode();
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Toggle_Color_Map()
{
opengl_scalar_field.Toggle_Color_Map();
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Toggle_Increase_Color_Map_Range()
{
opengl_scalar_field.Increase_Scale_Range();
}
template<class T,class T2,class RW> void OPENGL_COMPONENT_SCALAR_FIELD_3D<T,T2,RW>::
Toggle_Decrease_Color_Map_Range()
{
opengl_scalar_field.Decrease_Scale_Range();
}
template class OPENGL_COMPONENT_SCALAR_FIELD_3D<float,int,float>;
template class OPENGL_COMPONENT_SCALAR_FIELD_3D<float,bool,float>;
template class OPENGL_COMPONENT_SCALAR_FIELD_3D<float,float,float>;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class OPENGL_COMPONENT_SCALAR_FIELD_3D<double,int,double>;
template class OPENGL_COMPONENT_SCALAR_FIELD_3D<double,bool,double>;
template class OPENGL_COMPONENT_SCALAR_FIELD_3D<double,double,double>;
#endif
|
git clone https://github.com/pz9115/riscv-gnu-toolchain.git
cd riscv-gnu-toolchain
git submodule update --init
cd riscv-gcc
git remote add syl https://github.com/pz9115/corev-gcc.git
git fetch syl
git checkout syl/development
cd ../riscv-binutils
git remote add syl https://github.com/pz9115/corev-binutils-gdb.git
git fetch syl
git checkout syl/development
cd ../qemu
git remote add plctlab https://github.com/plctlab/plct-qemu.git
git fetch plctlab
git checkout plctlab/plct-zce-dev
cd ..
sed -i '15c qemu-riscv$xlen -cpu rv64,x-zcee=true -r 5.10 "${qemu_args[@]}" -L ${RISC_V_SYSROOT} "$@"' scripts/wrapper/qemu/riscv64-unknown-elf-run
./configure --prefix="$PWD/opt-riscv-rv64zcee" --with-arch=rv64gc_zcee --with-abi=lp64d --with-multilib-generator="rv64gc_zcee-lp64d--"
make report-gcc-newlib -j $(nproc)
make report-binutils-newlib -j $(nproc)
|
<reponame>manjitborah2710/redis_data_viewer
//= link_directory ../javascripts/redis_data_viewer .js
//= link_directory ../stylesheets/redis_data_viewer .css
|
public class BubbleSort {
public static void sort(int[] input) {
int n = input.length;
int temp = 0;
for(int i=0; i < n; i++){
for(int j=1; j < (n-i); j++){
if(input[j-1] > input[j]){
temp = input[j-1];
input[j-1] = input[j];
input[j] = temp;
}
}
}
}
public static void main (String[] args) {
int[] input = {64, 34, 25, 12, 22, 11, 90};
BubbleSort.sort(input);
for(int i : input){
System.out.print(i);
System.out.print(" ");
}
}
} |
<reponame>MartinNeupauer/mongo<gh_stars>1-10
#!/usr/bin/env python
#
# Public Domain 2014-2017 MongoDB, Inc.
# Public Domain 2008-2014 WiredTiger, Inc.
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# runner/__init__.py
# Used as a first import by runners, does any common initialization.
from __future__ import print_function
import os, shutil, sys
thisdir = os.path.dirname(os.path.abspath(__file__))
workgen_src = os.path.dirname(os.path.dirname(thisdir))
wt_dir = os.path.dirname(os.path.dirname(workgen_src))
wt_builddir = os.path.join(wt_dir, 'build_posix')
def _prepend_env_path(pathvar, s):
last = ''
try:
last = ':' + os.environ[pathvar]
except:
pass
os.environ[pathvar] = s + last
# Initialize the python path so needed modules can be imported.
# If the path already works, don't change it.
try:
import wiredtiger
except:
# We'll try hard to make the importing work, we'd like to runners
# to be executable directly without having to set environment variables.
sys.path.insert(0, os.path.join(wt_dir, 'lang', 'python'))
sys.path.insert(0, os.path.join(wt_builddir, 'lang', 'python'))
try:
import wiredtiger
except:
# If the .libs directory is not in our library search path,
# we need to set it and retry. However, the dynamic link
# library has already cached its value, our only option is
# to restart the Python interpreter.
if '_workgen_init' not in os.environ:
os.environ['_workgen_init'] = 'true'
dotlibs = os.path.join(wt_builddir, '.libs')
_prepend_env_path('LD_LIBRARY_PATH', dotlibs)
_prepend_env_path('DYLD_LIBRARY_PATH', dotlibs)
py_args = sys.argv
py_args.insert(0, sys.executable)
try:
os.execv(sys.executable, py_args)
except Exception, exception:
print('re-exec failed: ' + str(exception), file=sys.stderr)
print(' exec(' + sys.executable + ', ' + str(py_args) + ')')
print('Try adding "' + dotlibs + '" to the', file=sys.stderr)
print('LD_LIBRARY_PATH environment variable before running ' + \
'this program again.', file=sys.stderr)
sys.exit(1)
try:
import workgen
except:
sys.path.insert(0, os.path.join(workgen_src, 'workgen'))
sys.path.insert(0, os.path.join(wt_builddir, 'bench', 'workgen'))
import workgen
# Clear out the WT_TEST directory.
shutil.rmtree('WT_TEST', True)
os.mkdir('WT_TEST')
from .core import txn, extensions_config, op_group_transaction, op_log_like, op_multi_table
from .latency import workload_latency
|
using System;
using System.Threading;
public static class NeonHelper
{
public static void WaitFor(Func<bool> condition, TimeSpan timeout, TimeSpan pollInterval)
{
DateTime startTime = DateTime.Now;
while (DateTime.Now - startTime < timeout)
{
if (condition())
{
return;
}
Thread.Sleep(pollInterval);
}
throw new TimeoutException("The condition did not become true within the specified timeout period.");
}
} |
/**
* Created by jakeforaker on 11/19/15.
*/
var React = require('react/addons');
var _ = require('lodash');
var Link = require('./Link');
var Filter = require('./Filter');
var NotificationBar = require('./NotificationBar');
var LinkList = React.createClass({
render: function () {
var props = this.props;
var users = this.props.users;
var links = _.map(this.props.links, function (linkdata) {
return (
<Link
doSomething={props.something}
handleUpvote={props.handleUpvote}
handleRemoveUpvote={props.handleRemoveUpvote}
key={linkdata.objectId + _.uniqueId()}
link={linkdata}
users={users}
currentUser={props.currentUser}
/>
)
});
var nots = function () {
if (this.props.updatedLink) {
return (
<NotificationBar
updatedLink={this.props.updatedLink}
count={props.count}
onDismissNew={props.dismissNew}
/>
)
}
}.bind(this);
return (
<div className="container">
<ul className="links">
{nots()}
<Filter
sortFilter={props.sortFilter}
clearFilter={props.clearFilter}
doFilterByVotes={props.doFilterByVotes}
doFilterByDate={props.doFilterByDate}
channels={props.channels}
/>
{links}
</ul>
</div>
);
//todo -- remember - parent component is App ^^
//todo - sort by date or upvotes count
}
});
module.exports = LinkList; |
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
// source: sku/skupb/sku.proto
/*
Package skupb is a reverse proxy.
It translates gRPC into RESTful JSON APIs.
*/
package skupb
import (
"context"
"io"
"net/http"
"github.com/golang/protobuf/descriptor"
"github.com/golang/protobuf/proto"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
"github.com/grpc-ecosystem/grpc-gateway/utilities"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/status"
)
// Suppress "imported and not used" errors
var _ codes.Code
var _ io.Reader
var _ status.Status
var _ = runtime.String
var _ = utilities.NewDoubleArray
var _ = descriptor.ForMessage
func request_SkuService_New_0(ctx context.Context, marshaler runtime.Marshaler, client SkuServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq UpsertRequest
var metadata runtime.ServerMetadata
newReader, berr := utilities.IOReaderFactory(req.Body)
if berr != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
}
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.New(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_SkuService_New_0(ctx context.Context, marshaler runtime.Marshaler, server SkuServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq UpsertRequest
var metadata runtime.ServerMetadata
newReader, berr := utilities.IOReaderFactory(req.Body)
if berr != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
}
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.New(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_SkuService_Get_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_SkuService_Get_0(ctx context.Context, marshaler runtime.Marshaler, client SkuServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_SkuService_Get_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.Get(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_SkuService_Get_0(ctx context.Context, marshaler runtime.Marshaler, server SkuServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_SkuService_Get_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.Get(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_SkuService_Delete_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_SkuService_Delete_0(ctx context.Context, marshaler runtime.Marshaler, client SkuServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq DeleteRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_SkuService_Delete_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.Delete(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_SkuService_Delete_0(ctx context.Context, marshaler runtime.Marshaler, server SkuServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq DeleteRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_SkuService_Delete_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.Delete(ctx, &protoReq)
return msg, metadata, err
}
func request_SkuService_GetProductSkus_0(ctx context.Context, marshaler runtime.Marshaler, client SkuServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetProductSkusRequest
var metadata runtime.ServerMetadata
newReader, berr := utilities.IOReaderFactory(req.Body)
if berr != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
}
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.GetProductSkus(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_SkuService_GetProductSkus_0(ctx context.Context, marshaler runtime.Marshaler, server SkuServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetProductSkusRequest
var metadata runtime.ServerMetadata
newReader, berr := utilities.IOReaderFactory(req.Body)
if berr != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
}
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.GetProductSkus(ctx, &protoReq)
return msg, metadata, err
}
// RegisterSkuServiceHandlerServer registers the http handlers for service SkuService to "mux".
// UnaryRPC :call SkuServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
func RegisterSkuServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server SkuServiceServer) error {
mux.Handle("POST", pattern_SkuService_New_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_SkuService_New_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_SkuService_New_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_SkuService_Get_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_SkuService_Get_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_SkuService_Get_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("DELETE", pattern_SkuService_Delete_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_SkuService_Delete_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_SkuService_Delete_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_SkuService_GetProductSkus_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_SkuService_GetProductSkus_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_SkuService_GetProductSkus_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
// RegisterSkuServiceHandlerFromEndpoint is same as RegisterSkuServiceHandler but
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterSkuServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
conn, err := grpc.Dial(endpoint, opts...)
if err != nil {
return err
}
defer func() {
if err != nil {
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
return
}
go func() {
<-ctx.Done()
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
}()
}()
return RegisterSkuServiceHandler(ctx, mux, conn)
}
// RegisterSkuServiceHandler registers the http handlers for service SkuService to "mux".
// The handlers forward requests to the grpc endpoint over "conn".
func RegisterSkuServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
return RegisterSkuServiceHandlerClient(ctx, mux, NewSkuServiceClient(conn))
}
// RegisterSkuServiceHandlerClient registers the http handlers for service SkuService
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "SkuServiceClient".
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "SkuServiceClient"
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
// "SkuServiceClient" to call the correct interceptors.
func RegisterSkuServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client SkuServiceClient) error {
mux.Handle("POST", pattern_SkuService_New_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_SkuService_New_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_SkuService_New_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_SkuService_Get_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_SkuService_Get_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_SkuService_Get_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("DELETE", pattern_SkuService_Delete_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_SkuService_Delete_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_SkuService_Delete_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_SkuService_GetProductSkus_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_SkuService_GetProductSkus_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_SkuService_GetProductSkus_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
var (
pattern_SkuService_New_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0}, []string{"sku"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_SkuService_Get_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0}, []string{"sku"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_SkuService_Delete_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0}, []string{"sku"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_SkuService_GetProductSkus_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0}, []string{"sku"}, "", runtime.AssumeColonVerbOpt(true)))
)
var (
forward_SkuService_New_0 = runtime.ForwardResponseMessage
forward_SkuService_Get_0 = runtime.ForwardResponseMessage
forward_SkuService_Delete_0 = runtime.ForwardResponseMessage
forward_SkuService_GetProductSkus_0 = runtime.ForwardResponseMessage
)
|
export declare type TooltipAnchor = 'top' | 'right' | 'bottom' | 'left' | 'center';
export declare type CrosshairType = 'x' | 'y' | 'top-left' | 'top' | 'top-right' | 'right' | 'bottom-right' | 'bottom' | 'bottom-left' | 'left' | 'cross';
//# sourceMappingURL=types.d.ts.map |
package parser;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.sql.Date;
import java.util.ArrayList;
import java.util.Calendar;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.select.Elements;
import org.openqa.selenium.TimeoutException;
import model.CollectedInfo;
import utility.IOHandler;
public abstract class Parser {
// 문자열로 다나와/네이버쇼핑에 검색 후 목록 나열함.
public ArrayList<CollectedInfo> parse(String searchStr, SeleniumManager sm) {
String orgHtml = null;
try {
// URL에 한글을 그대로 넣으면 깨질 수 있기 때문에 UTF-8 혹은 EUC-KR로 변환한다.
String encoded = toUTF8(searchStr);
// 셀레니움으로 크롤링
String targetUrl = getBaseUrl() + encoded;
orgHtml = sm.explicitCrawl(targetUrl, getExplicitClassName(), getTimeout());
// System.out.println(orgHtml);
Document doc = Jsoup.parse(orgHtml);
// 필요한 정보 빼내기
return parseProduct(doc);
}
catch(TimeoutException te) {
// 검색 결과가 없거나 타임아웃
String parserName = this.getClass().getName();
System.out.println(parserName + " : 검색 결과가 없거나 타임아웃 발생");
}
catch(Exception e) {
IOHandler.getInstance().log("Parser.parse", e);
}
return null;
}
// HTML에서 필요한 정보 빼내기
protected abstract ArrayList<CollectedInfo> parseProduct(Document doc);
// 한글을 UTF-8로 변환하는 메소드
protected String toUTF8(String str) throws UnsupportedEncodingException {
return URLEncoder.encode(str, "UTF-8");
}
// 자식 클래스에서 처리할 내용들
protected abstract String getBaseUrl(); // 다나와파서면 다나와의 URL을, 네이버파서면 네이버의 URL을 가져옴
protected abstract String getProductClassName(); // 클래스에 맞게 상품을 특정하는 html-className을 가져옴
protected abstract String getExplicitClassName();
protected abstract String getLowAccuracyClassName();
protected abstract int getTimeout();
// HTML 파싱
protected abstract String getHref(Element product);
protected abstract String getThumbnailUrl(Element product);
protected abstract String getProductName(Element product);
protected abstract String getPrice(Element product);
}
|
#!/bin/bash
GIT_STATUS=`git status | grep "nothing to commit, working tree clean" | wc -l`
if (( "$GIT_STATUS" > 0 )); then
git pull
cd eFormAPI/Plugins/Appointment.Pn/Appointment.Pn
CURRENT_NUMBER_OF_COMMITS=`git log --oneline | wc -l`
PACKAGES=('Microting.eForm' 'Microting.eFormApi.BasePn' 'Microting.AppointmentBase')
PROJECT_NAME='Appointment.Pn.csproj'
REPOSITORY='eform-angular-appointment-plugin'
for PACKAGE_NAME in ${PACKAGES[@]}; do
OLD_VERSION=`dotnet list package | grep "$PACKAGE_NAME " | grep -oP ' \d\.\d+\.\d.*' | grep -oP ' \d.* \b' | xargs`
dotnet add $PROJECT_NAME package $PACKAGE_NAME
NEW_VERSION=`dotnet list package | grep "$PACKAGE_NAME " | grep -oP ' \d\.\d+\.\d.*$' | grep -oP '\d\.\d+\.\d.*$' | grep -oP ' \d\.\d+\.\d.*$' | xargs`
if [ $NEW_VERSION != $OLD_VERSION ]; then
echo "We have a new version of $PACKAGE_NAME, so creating github issue and do a commit message to close that said issue"
RESULT=`curl -X "POST" "https://api.github.com/repos/microting/$REPOSITORY/issues?state=all" \
-H "Cookie: logged_in=no" \
-H "Authorization: token $CHANGELOG_GITHUB_TOKEN" \
-H "Content-Type: text/plain; charset=utf-8" \
-d $'{
"title": "Bump '$PACKAGE_NAME' from '$OLD_VERSION' to '$NEW_VERSION'",
"body": "TBD",
"assignees": [
"renemadsen"
],
"labels": [
".NET",
"backend",
"enhancement"
]
}'`
ISSUE_NUMBER=`echo $RESULT | grep -oP 'number": \d+,' | grep -oP '\d+'`
git add .
git commit -a -m "closes #$ISSUE_NUMBER"
fi
done
NEW_NUMBER_OF_COMMITS=`git log --oneline | wc -l`
if (( $NEW_NUMBER_OF_COMMITS > $CURRENT_NUMBER_OF_COMMITS )); then
CURRENT_GITVERSION=`git tag --sort=-creatordate | cut -d "v" -f 2 | sed -n 1p`
MAJOR_VERSION=`echo $CURRENT_GITVERSION | cut -d "." -f 1`
MINOR_VERSION=`echo $CURRENT_GITVERSION | cut -d "." -f 2`
BUILD_VERSION=`echo $CURRENT_GITVERSION | cut -d "." -f 3`
BUILD_VERSION=$(($BUILD_VERSION + 1))
NEW_GIT_VERSION="v$MAJOR_VERSION.$MINOR_VERSION.$BUILD_VERSION"
git tag "$NEW_GIT_VERSION"
git push --tags
git push
echo "Updated Microting eForm to ${EFORM_VERSION} and pushed new version ${NEW_GIT_VERSION}"
cd ../../../..
github_changelog_generator -u microting -p $REPOSITORY -t $CHANGELOG_GITHUB_TOKEN
git add CHANGELOG.md
git commit -m "Updating changelog"
git push
else
echo "nothing to do, everything is up to date."
fi
else
echo "Working tree is not clean, so we are not going to upgrade. Clean, before doing upgrade!"
fi
|
import BaseValidator from '../validators/-base';
export class OrValidator extends BaseValidator {
constructor(...validators) {
super();
this.validators = validators;
}
check(value) {
return this.validators.some(validator => validator.check(value));
}
}
export default function or(...validators) {
return new OrValidator(...validators);
}
|
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap; // this is thread-safe!
import java.util.concurrent.ConcurrentMap; // this is thread-safe!
// is this class thread-safe?
public class PublishingTracker {
private final ConcurrentMap<String, Point> locations;
public PublishingTracker(Map<String, Point> locations) {
this.locations = new ConcurrentHashMap<String, Point>(locations);
}
public synchronized Map<String, Point> getLocations() {
return Collections.unmodifiableMap(new HashMap<String, Point>(locations));
}
public synchronized Point getLocation(String id) {
return locations.get(id);
}
public synchronized void setLocation(String id, int x, int y) {
if (locations.replace(id, new Point(x, y)) == null) {
throw new IllegalArgumentException("No such ID: " + id);
}
}
// is this class not thread-safe?
class Point {
public final int x, y;
public Point(int x, int y) {
this.x = x;
this.y = y;
}
}
}
|
#!/bin/bash
# install packages
sudo apt-get update
sudo apt-get install -y tmux
sudo apt-get install -y zsh
sudo apt-get install -y xclip
# vim plug
curl -fLo ~/.vim/autoload/plug.vim --create-dirs \
https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim
# install neovim
curl -LO https://github.com/neovim/neovim/releases/latest/download/nvim.appimage
chmod u+x nvim.appimage
mv nvim.appimage ~/
sh -c 'curl -fLo "${XDG_DATA_HOME:-$HOME/.local/share}"/nvim/site/autoload/plug.vim --create-dirs \
https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim'
git clone --depth 1 https://github.com/junegunn/fzf.git ~/.fzf
~/.fzf/install
# copy config files
cp ~/configuration/.zshrc ~/
cp ~/configuration/.bashrc ~/
sudo cp ~/configuration/etc/vim/vimrc.* /etc/vim/
mkdir -p ~/.config/nvim && cp ~/configuration/init.vim ~/.config/nvim/init.vim
cp ~/configuration/.tmux.conf ~/.tmux.conf
cp ~/configuration/.vimrc ~/.vimrc
|
var group__usart__interface__gr_structARM__USART__MODEM__STATUS =
[
[ "cts", "group__usart__interface__gr.html#a0a4ccfb729b3a40a5fd611021268c262", null ],
[ "dsr", "group__usart__interface__gr.html#a437895b17519a16f920ae07461dd67d2", null ],
[ "dcd", "group__usart__interface__gr.html#aa56a9ad6e266df78157f0e04feb4b78c", null ],
[ "ri", "group__usart__interface__gr.html#aa6cf03b82235bedc0acf00acb46130fb", null ],
[ "reserved", "group__usart__interface__gr.html#aa43c4c21b173ada1b6b7568956f0d650", null ]
]; |
package com.yunusseker.mvvmarchitecture.ui.main;
import android.arch.lifecycle.LiveData;
import android.arch.lifecycle.MutableLiveData;
import com.yunusseker.mvvmarchitecture.base.BaseViewModel;
import com.yunusseker.mvvmarchitecture.data.repository.post.PostDataSource;
import com.yunusseker.mvvmarchitecture.data.model.PostResponse;
import com.yunusseker.mvvmarchitecture.util.schedulers.BaseSchedulerProvider;
import javax.inject.Inject;
/**
* Created by yunus.seker on 12.4.2018
*/
public class MainViewModel extends BaseViewModel {
private MutableLiveData<Throwable> error = new MutableLiveData<>();
private MutableLiveData<PostResponse> livedata = new MutableLiveData<>();
@Inject
public MainViewModel(BaseSchedulerProvider schedulerProvider) {
super(schedulerProvider);
}
public LiveData<PostResponse> getLiveData() {
// getCompositeDisposable().add(getDataRepository().getPostModel()
// .observeOn(getSchedulerProvider().ui())
// .subscribeOn(getSchedulerProvider().io())
// .subscribe(livedata::setValue, error::setValue)
// );
return livedata;
}
LiveData<Throwable> getError() {
return error;
}
}
|
<gh_stars>0
package biz.franch.protoi2.carousel;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.view.ViewPager;
import java.util.ArrayList;
import biz.franch.protoi2.main_menu.Banner;
import biz.franch.protoi2.main_menu.MainActivity;
import biz.franch.protoi2.R;
public class MyPagerAdapter extends FragmentPagerAdapter implements
ViewPager.OnPageChangeListener {
private MyLinearLayout cur = null;
private MyLinearLayout next = null;
private MainActivity context;
private FragmentManager fm;
private float scale;
ArrayList<Banner> bannerList;
public MyPagerAdapter(MainActivity context, FragmentManager fm, ArrayList<Banner> bannerList) {
super(fm);
this.fm = fm;
this.context = context;
this.bannerList = bannerList;
}
@Override
public Fragment getItem(int position) {
// make the first pager bigger than others
if (position == bannerList.size() * 1000 / 2)
scale = MainActivity.BIG_SCALE;
else
scale = MainActivity.SMALL_SCALE;
position = position % bannerList.size();
return MyFragment.newInstance(context, position, scale, bannerList);
}
@Override
public int getCount() {
return bannerList.size() * 1000;
}
@Override
public void onPageScrolled(int position, float positionOffset,
int positionOffsetPixels) {
if (positionOffset >= 0f && positionOffset <= 1f) {
cur = getRootView(position);
next = getRootView(position + 1);
cur.setScaleBoth(MainActivity.BIG_SCALE
- MainActivity.DIFF_SCALE * positionOffset);
next.setScaleBoth(MainActivity.SMALL_SCALE
+ MainActivity.DIFF_SCALE * positionOffset);
}
}
@Override
public void onPageSelected(int position) {
}
@Override
public void onPageScrollStateChanged(int state) {
}
private MyLinearLayout getRootView(int position) {
return (MyLinearLayout)
fm.findFragmentByTag(this.getFragmentTag(position))
.getView().findViewById(R.id.root);
}
private String getFragmentTag(int position) {
return "android:switcher:" + context.pager.getId() + ":" + position;
}
}
|
<reponame>Springest/bookboon<filename>test/bookboon/config_test.rb
require "test_helper"
class Bookboon::ConfigTest < Minitest::Test
def setup
@config = Bookboon::Config.new
end
def test_default_language
assert_equal "en", @config.language
end
def test_headers_without_branding_rotation
assert_equal @config.headers, { "Accept-Language" => "en" }
end
def test_headers_with_different_language
@config.language = "nl"
assert_equal @config.headers, { "Accept-Language" => "nl" }
end
def test_headers_with_branding
@config.branding_id = "abc"
assert_equal @config.headers, { "Accept-Language" => "en", "X-Bookboon-Branding" => "abc" }
end
def test_headers_with_rotation
@config.rotation_id = "abc"
assert_equal @config.headers, { "Accept-Language" => "en", "X-Bookboon-Rotation" => "abc" }
end
end
|
package seoul.democracy.issue.domain;
import lombok.Getter;
import lombok.NoArgsConstructor;
import org.hibernate.annotations.GenericGenerator;
import seoul.democracy.issue.dto.CategoryCreateDto;
import seoul.democracy.issue.dto.CategoryUpdateDto;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
/**
* 이슈범주 - 카테고리
*/
@Getter
@NoArgsConstructor
@Entity(name = "TB_ISSUE_CATEGORY")
public class Category {
@Id
@GeneratedValue(generator = "native")
@GenericGenerator(name = "native", strategy = "native")
@Column(name = "CATE_ID")
private Long id;
/**
* 범주 이름
*/
@Column(name = "CATE_NAME")
private String name;
/**
* 사용 여부
*/
@Column(name = "USE_YN")
private Boolean enabled;
/**
* 범주 순번
*/
@Column(name = "CATE_SEQ")
private Integer sequence;
private Category(String name, Boolean enabled, Integer sequence) {
this.name = name;
this.enabled = enabled;
this.sequence = sequence;
}
public static Category create(CategoryCreateDto createDto) {
return new Category(createDto.getName(), createDto.getEnabled(), createDto.getSequence());
}
public Category update(CategoryUpdateDto updateDto) {
this.name = updateDto.getName();
this.enabled = updateDto.getEnabled();
this.sequence = updateDto.getSequence();
return this;
}
}
|
#!/usr/bin/env bash
set -e -x
cd "$(dirname "$0")"
BUILD_DIR="$(pwd)"
DOWNLOAD_DIR="$(pwd)/build"
# Remove Generated files from Schema
rm -rf generated
# Clean CMake files
rm -rf cmake_build
rm -rf CMakeFiles
rm -f CMakeCache.txt
rm -rf dependencies
rm -rf bin
|
<filename>app/src/main/java/com/example/michael/statstracker/MainActivity.java
package com.example.michael.statstracker;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.app.ActionBar;
import android.support.v4.app.Fragment;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.os.Build;
import android.widget.Button;
import android.widget.Toast;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.FileInputStream;
import java.io.IOException;
public class MainActivity extends ActionBarActivity {
protected final String TAG = "MainActivity";
SQLiteDatabase db;
StringBuilder builder = new StringBuilder("");
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button start_recording = (Button)findViewById(R.id.begin_button);
Button end_recording = (Button)findViewById(R.id.end_button);
Button view_stats = (Button)findViewById(R.id.stats_button);
StatsDbHelper dbHelper = new StatsDbHelper(getApplicationContext());
db = dbHelper.getReadableDatabase();
Log.i(TAG, start_recording.toString());
start_recording.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v){
startService(new Intent(v.getContext(), CreeperService.class));
}
});
end_recording.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v){
stopService(new Intent(v.getContext(), CreeperService.class));
}
});
view_stats.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v){
String query = "SELECT * FROM " + StatsContract.StatsEntry.TABLE_NAME;
//String[] columns = new String[] {StatsContract.StatsEntry._ID, StatsContract.StatsEntry.COLUMN_TYPE, StatsContract.StatsEntry.COLUMN_TIME};
Cursor cursor = db.rawQuery(query, null);//db.query(StatsContract.StatsEntry.TABLE_NAME, columns, StatsContract.StatsEntry._ID + "=?",);
if(cursor.moveToFirst()) {
do {
String event_info = builder.append(cursor.getString(1)).append(" ").append(cursor.getString(2)).toString();
builder.setLength(0);
Log.i(TAG, event_info);
} while(cursor.moveToNext());
}
//Intent statsIntent = new Intent(MainActivity.this, StatisticsActivity.class);
//MainActivity.this.startActivity(statsIntent);
}
});
}
@Override
protected void onDestroy() {
Toast.makeText(this, "OHHH YOU KILLED ME", Toast.LENGTH_SHORT).show();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* A placeholder fragment containing a simple view.
*/
// public static class PlaceholderFragment extends Fragment {
//
// public PlaceholderFragment() {
// }
//
// @Override
// public View onCreateView(LayoutInflater inflater, ViewGroup container,
// Bundle savedInstanceState) {
// View rootView = inflater.inflate(R.layout.fragment_main, container, false);
// return rootView;
// }
// }
}
|
#!/bin/sh
#
# run-dev: A quick and dirty script to run a testing setup of local nodes.
set -eu
# Build the contracts
make build-contracts-rs
# Build the node first, so that `sleep` in the loop has an effect.
cargo build -p casper-node
BASEDIR=$(readlink -f $(dirname $0))
CHAINSPEC=$(mktemp -t chainspec_XXXXXXXX --suffix .toml)
TRUSTED_HASH="${TRUSTED_HASH:-}"
# Generate a genesis timestamp 30 seconds into the future, unless explicity given a different one.
TIMESTAMP=$(python3 -c 'from datetime import datetime, timedelta; print((datetime.utcnow() + timedelta(seconds=30)).isoformat("T") + "Z")')
TIMESTAMP=${GENESIS_TIMESTAMP:-$TIMESTAMP}
echo "GENESIS_TIMESTAMP=${TIMESTAMP}"
# Update the chainspec to use the current time as the genesis timestamp.
cp ${BASEDIR}/resources/local/chainspec.toml ${CHAINSPEC}
sed -i "s/^\([[:alnum:]_]*timestamp\) = .*/\1 = \"${TIMESTAMP}\"/" ${CHAINSPEC}
sed -i 's|\.\./\.\.|'"$BASEDIR"'|' ${CHAINSPEC}
sed -i 's|accounts\.csv|'"$BASEDIR"'/resources/local/accounts.csv|' ${CHAINSPEC}
# If no nodes defined, start all.
NODES="${@:-1 2 3 4 5}"
run_node() {
ID=$1
STORAGE_DIR=/tmp/node-${ID}-storage
LOGFILE=/tmp/node-${ID}.log
rm -rf ${STORAGE_DIR}
rm -f ${LOGFILE}
rm -f ${LOGFILE}.stderr
mkdir -p ${STORAGE_DIR}
if [ $1 -ne 1 ]
then
BIND_ADDRESS_ARG=--config-ext=network.bind_address='0.0.0.0:0'
DEPS="--property=After=node-1.service --property=Requires=node-1.service"
else
BIND_ADDRESS_ARG=
DEPS=
fi
if ! [ -z "$TRUSTED_HASH" ]
then
TRUSTED_HASH_ARG=--config-ext=node.trusted_hash="${TRUSTED_HASH}"
else
TRUSTED_HASH_ARG=
fi
echo "$TRUSTED_HASH_ARG"
# We run with a 10 minute timeout, to allow for compilation and loading.
systemd-run \
--user \
--unit node-$ID \
--description "Casper Dev Node ${ID}" \
--collect \
--no-block \
--property=Type=notify \
--property=TimeoutSec=600 \
--property=WorkingDirectory=${BASEDIR} \
$DEPS \
--setenv=RUST_LOG=trace \
--property=StandardOutput=file:${LOGFILE} \
--property=StandardError=file:${LOGFILE}.stderr \
-- \
cargo run -p casper-node \
validator \
resources/local/config.toml \
--config-ext=network.systemd_support=true \
--config-ext=consensus.secret_key_path=secret_keys/node-${ID}.pem \
--config-ext=storage.path=${STORAGE_DIR} \
--config-ext=network.gossip_interval=1000 \
--config-ext=node.chainspec_config_path=${CHAINSPEC} \
${BIND_ADDRESS_ARG} \
${TRUSTED_HASH_ARG}
echo "Started node $ID, logfile: ${LOGFILE}"
# Sleep so that nodes are actually started in sequence.
# Hopefully, fixes some of the race condition issues during startup.
sleep 1;
}
for i in $NODES; do
run_node $i
done;
echo "Test network starting."
echo
echo "To stop all nodes, run stop-dev.sh"
|
<reponame>Csalex01/NISZ<filename>nisz/src/router/index.js<gh_stars>1-10
import { createRouter, createWebHistory } from 'vue-router'
import Index from '../views/Index.vue'
import Login from "../views/Login.vue"
import Signup from "../views/Signup.vue"
import Templates from "../views/Templates.vue"
const routes = [
{
path: '/',
name: 'Index',
component: Index
},
{
path: '/login',
name: 'Login',
component: Login
},
{
path: '/signup',
name: 'Signup',
component: Signup
},
{
path: '/templates',
name: 'Templates',
component: Templates
}
]
const router = createRouter({
history: createWebHistory(process.env.BASE_URL),
routes
})
export default router
|
#!/usr/bin/env bash
# Starting of Gunicorn server with Legion's HTTP handler
PATH={{ path_docker }}:$PATH \
MODEL_LOCATION={{ model_location }} \
{{ gunicorn_bin_docker }} \
--pythonpath /app/ \
--timeout {{ timeout }} \
-b {{ host }}:{{ port }} \
-w {{ workers }} \
--threads {{ threads }} \
{{ wsgi_handler }}
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
PROPFILE=`echo $0 | sed -e 's/svn_ignore.sh/svn_ignore.txt/'`
for dir in `find . -type d`; do
if [[ -f $dir/pom.xml && -d $dir/.svn ]]; then
svn propset svn:ignore -F $PROPFILE $dir
fi;
done;
|
<reponame>mentix02/bog
import React from "react";
import Home from "./pages/Home";
import Write from "./pages/Write";
import SignIn from "./pages/SignIn";
import SignUp from "./pages/SignUp";
import Profile from "./pages/Profile";
import NotFound from "./pages/NotFound";
import Dashboard from "./pages/Dashboard";
import PostDetail from "./pages/PostDetail";
import { Route, Switch } from "react-router-dom";
import PrivateRoute from "./components/PrivateRoute";
const BaseRouter = () => (
<Switch>
<Route exact path="/" component={Home} />
<Route path="/sign-in" component={SignIn} />
<Route path="/sign-up" component={SignUp} />
<Route path="/post/:slug" component={PostDetail} />
<Route path="/profile/:slug" component={Profile} />
<Route path="/not-found" component={NotFound} />
<PrivateRoute>
<Route path="/write" component={Write} />
<Route path="/dashboard" component={Dashboard} />
</PrivateRoute>
<Route component={NotFound} />
</Switch>
);
export default BaseRouter;
|
package seedu.planner.storage;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static seedu.planner.testutil.TypicalModules.CS2040;
import static seedu.planner.testutil.TypicalModules.CS2103T;
import static seedu.planner.testutil.TypicalModules.getTypicalModulePlanner;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Set;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import seedu.planner.commons.exceptions.DataConversionException;
import seedu.planner.model.ModulePlanner;
import seedu.planner.model.ReadOnlyModulePlanner;
import seedu.planner.model.course.Major;
import seedu.planner.model.user.UserProfile;
public class JsonModulePlannerStorageTest {
private static final Path TEST_DATA_FOLDER = Paths.get("src", "test", "data", "JsonModulePlannerStorageTest");
@Rule
public ExpectedException thrown = ExpectedException.none();
@Rule
public TemporaryFolder testFolder = new TemporaryFolder();
@Test
public void readModulePlanner_nullFilePath_throwsNullPointerException() throws Exception {
thrown.expect(NullPointerException.class);
readModulePlanner(null);
}
private java.util.Optional<ReadOnlyModulePlanner> readModulePlanner(String filePath) throws Exception {
return new JsonModulePlannerStorage(Paths.get(filePath))
.readModulePlanner(addToTestDataPathIfNotNull(filePath));
}
private Path addToTestDataPathIfNotNull(String prefsFileInTestDataFolder) {
return prefsFileInTestDataFolder != null
? TEST_DATA_FOLDER.resolve(prefsFileInTestDataFolder)
: null;
}
@Test
public void read_missingFile_emptyResult() throws Exception {
assertFalse(readModulePlanner("NonExistentFile.json").isPresent());
}
@Test
public void read_notXmlFormat_exceptionThrown() throws Exception {
thrown.expect(DataConversionException.class);
readModulePlanner("NotJsonFormatModulePlanner.json");
/* IMPORTANT: Any code below an exception-throwing line (like the one above) will be ignored.
* That means you should not have more than one exception test in one method
*/
}
@Test
public void readModulePlanner_invalidModuleModulePlanner_throwDataConversionException() throws Exception {
thrown.expect(DataConversionException.class);
readModulePlanner("invalidModuleModulePlanner.json");
}
@Test
public void readModulePlanner_invalidAndValidModuleModulePlanner_throwDataConversionException() throws Exception {
thrown.expect(DataConversionException.class);
readModulePlanner("invalidAndValidModuleModulePlanner.json");
}
@Test
public void readAndSaveModulePlanner_allInOrder_success() throws Exception {
Path filePath = testFolder.getRoot().toPath().resolve("TempModulePlanner.xml");
ModulePlanner original = getTypicalModulePlanner();
JsonModulePlannerStorage jsonModulePlannerStorage = new JsonModulePlannerStorage(filePath);
//Save in new file and read back
jsonModulePlannerStorage.saveModulePlanner(original, filePath);
ReadOnlyModulePlanner readBack = jsonModulePlannerStorage.readModulePlanner(filePath).get();
assertEquals(original, new ModulePlanner(readBack));
//Modify data, overwrite exiting file, and read back
original.deleteModules(Set.of(CS2040));
original.addModules(Set.of(CS2103T), 2);
jsonModulePlannerStorage.saveModulePlanner(original, filePath);
readBack = jsonModulePlannerStorage.readModulePlanner(filePath).get();
assertEquals(original, new ModulePlanner(readBack));
//Save and read without specifying file path
original.setUserProfile(new UserProfile(Major.COMPUTER_ENGINEERING, new ArrayList<>()));
jsonModulePlannerStorage.saveModulePlanner(original); //file path not specified
readBack = jsonModulePlannerStorage.readModulePlanner().get(); //file path not specified
assertEquals(original, new ModulePlanner(readBack));
}
@Test
public void saveModulePlanner_nullModulePlanner_throwsNullPointerException() {
thrown.expect(NullPointerException.class);
saveModulePlanner(null, "SomeFile.xml");
}
/**
* Saves {@code ModulePlanner} at the specified {@code filePath}.
*/
private void saveModulePlanner(ReadOnlyModulePlanner modulePlanner, String filePath) {
try {
new JsonModulePlannerStorage(Paths.get(filePath))
.saveModulePlanner(modulePlanner, addToTestDataPathIfNotNull(filePath));
} catch (IOException ioe) {
throw new AssertionError("There should not be an error writing to the file.", ioe);
}
}
@Test
public void saveModulePlanner_nullFilePath_throwsNullPointerException() {
thrown.expect(NullPointerException.class);
saveModulePlanner(new ModulePlanner(), null);
}
}
|
#! /bin/bash
(time python get_pairwise_distances_l1.py Zeisel_UMI_gene_distribution_subsample100.dat Zeisel_UMI_test_l1.dat 32) > temp_op_UMI_l1.txt 2> time_UMI_l1.txt
(time python get_pairwise_distances_l1.py Zeisel_kallisto_TPM_distribution_subsample100.dat Zeisel_kallisto_test_l1.dat 32) > temp_op_kall_l1.txt 2> time_kallisto_l1.txt
(time python get_pairwise_distances_l1.py Zeisel_TCC_distribution_subsample100.dat Zeisel_TCC_test_l1.dat 32) > temp_op_TCC_l1.txt 2> time_TCC_l1.txt
(time python get_pairwise_distances.py Zeisel_UMI_gene_distribution_subsample100.dat Zeisel_UMI_test.dat 32) > temp_op_UMI.txt 2> time_UMI.txt
(time python get_pairwise_distances.py Zeisel_kallisto_TPM_distribution_subsample100.dat Zeisel_kallisto_test.dat 32) > temp_op_kall.txt 2> time_kallisto.txt
(time python get_pairwise_distances.py Zeisel_TCC_distribution_subsample100.dat Zeisel_TCC_test.dat 32) > temp_op_TCC.txt 2> time_TCC.txt
|
#!/bin/bash
# Copies the xcodeproject from the bazel output directory to the BAZEL_WORKSPACE directory when ran
set -euo pipefail
readonly project_path="${PWD}/$(project_short_path)"
readonly dest="${BUILD_WORKSPACE_DIRECTORY}/$(project_short_path)/"
readonly tmp_dest=$(mktemp -d)/$(project_full_path)/
readonly stubs_dir="${dest}/bazelstubs"
mkdir -p "${stubs_dir}"
readonly installer="$(installer_short_path)"
installer_dir=$(dirname "${stubs_dir}/${installer}")
mkdir -p "${installer_dir}"
cp "${installer}" "${stubs_dir}/${installer}"
cp "$(clang_stub_short_path)" "${stubs_dir}/clang-stub"
cp "$(clang_stub_ld_path)" "${stubs_dir}/ld-stub"
cp "$(clang_stub_swiftc_path)" "${stubs_dir}/swiftc-stub"
cp "$(print_json_leaf_nodes_path)" "${stubs_dir}/print_json_leaf_nodes"
cp "$(infoplist_stub)" "${stubs_dir}/Info-stub.plist"
cp "$(build_wrapper_path)" "${stubs_dir}/build-wrapper"
cp "$(output_processor_path)" "${stubs_dir}/output-processor.rb"
rm -fr "${tmp_dest}"
mkdir -p "$(dirname $tmp_dest)"
cp -r "${project_path}" "$tmp_dest"
chmod -R +w "${tmp_dest}"
# always trim three ../ from path, since that's "bazel-out/darwin-fastbuild/bin"
sed -i.bak -E -e 's|([ "])../../../|\1|g' "${tmp_dest}/project.pbxproj"
rm "${tmp_dest}/project.pbxproj.bak"
rsync --recursive --quiet --copy-links "${tmp_dest}" "${dest}"
# The new build system leaves a subdirectory called XCBuildData in the DerivedData directory which causes incremental build and test attempts to fail at launch time.
# The error message says "Cannot attach to pid." This error seems to happen in the Xcode IDE, not when the project is tested from the xcodebuild command.
# Therefore, we force xcode to use the legacy build system by adding the contents of WorkspaceSettings.xcsettings to the generated project.
mkdir -p "$dest/project.xcworkspace/xcshareddata/"
cp "$(workspacesettings_xcsettings_short_path)" "$dest/project.xcworkspace/xcshareddata/"
#
|
#
#/**
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
# Set environment variables here.
# This script sets variables multiple times over the course of starting an hbase process,
# so try to keep things idempotent unless you want to take an even deeper look
# into the startup scripts (bin/hbase, etc.)
# The java implementation to use. Java 1.7+ required.
export JAVA_HOME=/usr/lib/jvm/java-7-oracle-amd64
# Extra Java CLASSPATH elements. Optional.
# export HBASE_CLASSPATH=
# The maximum amount of heap to use. Default is left to JVM default.
# export HBASE_HEAPSIZE=1G
# Uncomment below if you intend to use off heap cache. For example, to allocate 8G of
# offheap, set the value to "8G".
# export HBASE_OFFHEAPSIZE=1G
# Extra Java runtime options.
# Below are what we set by default. May only work with SUN JVM.
# For more on why as well as other possible settings,
# see http://wiki.apache.org/hadoop/PerformanceTuning
export HBASE_OPTS="-XX:+UseConcMarkSweepGC"
# Configure PermSize. Only needed in JDK7. You can safely remove it for JDK8+
export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -XX:PermSize=128m -XX:MaxPermSize=128m"
export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -XX:PermSize=128m -XX:MaxPermSize=128m"
# Uncomment one of the below three options to enable java garbage collection logging for the server-side processes.
# This enables basic gc logging to the .out file.
# export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps"
# This enables basic gc logging to its own file.
# If FILE-PATH is not replaced, the log file(.gc) would still be generated in the HBASE_LOG_DIR .
# export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:<FILE-PATH>"
# This enables basic GC logging to its own file with automatic log rolling. Only applies to jdk 1.6.0_34+ and 1.7.0_2+.
# If FILE-PATH is not replaced, the log file(.gc) would still be generated in the HBASE_LOG_DIR .
# export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:<FILE-PATH> -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=1 -XX:GCLogFileSize=512M"
# Uncomment one of the below three options to enable java garbage collection logging for the client processes.
# This enables basic gc logging to the .out file.
# export CLIENT_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps"
# This enables basic gc logging to its own file.
# If FILE-PATH is not replaced, the log file(.gc) would still be generated in the HBASE_LOG_DIR .
# export CLIENT_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:<FILE-PATH>"
# This enables basic GC logging to its own file with automatic log rolling. Only applies to jdk 1.6.0_34+ and 1.7.0_2+.
# If FILE-PATH is not replaced, the log file(.gc) would still be generated in the HBASE_LOG_DIR .
# export CLIENT_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:<FILE-PATH> -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=1 -XX:GCLogFileSize=512M"
# See the package documentation for org.apache.hadoop.hbase.io.hfile for other configurations
# needed setting up off-heap block caching.
# Uncomment and adjust to enable JMX exporting
# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.
# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html
# NOTE: HBase provides an alternative JMX implementation to fix the random ports issue, please see JMX
# section in HBase Reference Guide for instructions.
# export HBASE_JMX_BASE="-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false"
# export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10101"
# export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10102"
# export HBASE_THRIFT_OPTS="$HBASE_THRIFT_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103"
# export HBASE_ZOOKEEPER_OPTS="$HBASE_ZOOKEEPER_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104"
# export HBASE_REST_OPTS="$HBASE_REST_OPTS $HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10105"
# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.
# export HBASE_REGIONSERVERS=${HBASE_HOME}/conf/regionservers
# Uncomment and adjust to keep all the Region Server pages mapped to be memory resident
#HBASE_REGIONSERVER_MLOCK=true
#HBASE_REGIONSERVER_UID="hbase"
# File naming hosts on which backup HMaster will run. $HBASE_HOME/conf/backup-masters by default.
# export HBASE_BACKUP_MASTERS=${HBASE_HOME}/conf/backup-masters
# Extra ssh options. Empty by default.
# export HBASE_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR"
# Where log files are stored. $HBASE_HOME/logs by default.
# export HBASE_LOG_DIR=${HBASE_HOME}/logs
# Enable remote JDWP debugging of major HBase processes. Meant for Core Developers
# export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=8070"
# export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=8071"
# export HBASE_THRIFT_OPTS="$HBASE_THRIFT_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=8072"
# export HBASE_ZOOKEEPER_OPTS="$HBASE_ZOOKEEPER_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=8073"
# A string representing this instance of hbase. $USER by default.
# export HBASE_IDENT_STRING=$USER
# The scheduling priority for daemon processes. See 'man nice'.
# export HBASE_NICENESS=10
# The directory where pid files are stored. /tmp by default.
# export HBASE_PID_DIR=/var/hadoop/pids
# Seconds to sleep between slave commands. Unset by default. This
# can be useful in large clusters, where, e.g., slave rsyncs can
# otherwise arrive faster than the master can service them.
# export HBASE_SLAVE_SLEEP=0.1
# Tell HBase whether it should manage it's own instance of Zookeeper or not.
# export HBASE_MANAGES_ZK=true
# The default log rolling policy is RFA, where the log file is rolled as per the size defined for the
# RFA appender. Please refer to the log4j.properties file to see more details on this appender.
# In case one needs to do log rolling on a date change, one should set the environment property
# HBASE_ROOT_LOGGER to "<DESIRED_LOG LEVEL>,DRFA".
# For example:
# HBASE_ROOT_LOGGER=INFO,DRFA
# The reason for changing default to RFA is to avoid the boundary case of filling out disk space as
# DRFA doesn't put any cap on the log size. Please refer to HBase-5655 for more context.
|
class Employee {
constructor(name, age, salary){
this.name = name;
this.age = age;
this.salary = salary;
}
getName(){
return this.name;
}
getAge(){
return this.age;
}
getSalary(){
return this.salary;
}
} |
#!/usr/bin/env bash
set -x
GENERATOR_DOCKER_HUB_USERNAME=openshiftioadmin
REGISTRY_URI="quay.io"
REGISTRY_NS="fabric8"
REGISTRY_IMAGE="launcher-documentation"
BUILDER_IMAGE="launcher-documentation-builder"
BUILDER_CONT="launcher-documentation-builder-container"
DEPLOY_IMAGE="launcher-documentation-deploy"
if [ "$TARGET" = "rhel" ]; then
REGISTRY_URL=${REGISTRY_URI}/openshiftio/rhel-${REGISTRY_NS}-${REGISTRY_IMAGE}
DOCKERFILE="Dockerfile.deploy.rhel"
else
REGISTRY_URL=${REGISTRY_URI}/openshiftio/${REGISTRY_NS}-${REGISTRY_IMAGE}
DOCKERFILE="Dockerfile.deploy"
fi
TARGET_DIR="html"
function docker_login() {
local USERNAME=$1
local PASSWORD=$2
local REGISTRY=$3
if [ -n "${USERNAME}" ] && [ -n "${PASSWORD}" ]; then
docker login -u ${USERNAME} -p ${PASSWORD} ${REGISTRY}
fi
}
function tag_push() {
local TARGET_IMAGE=$1
docker tag ${DEPLOY_IMAGE} ${TARGET_IMAGE}
docker push ${TARGET_IMAGE}
}
# Exit on error
set -e
if [ -z "$CICO_LOCAL" ]; then
[ -f jenkins-env ] && cat jenkins-env | grep -e PASS -e USER -e GIT -e DEVSHIFT > inherit-env
[ -f inherit-env ] && . inherit-env
# We need to disable selinux for now, XXX
/usr/sbin/setenforce 0 || :
# Get all the deps in
yum -y install docker make git
service docker start
fi
#CLEAN
docker ps | grep -q ${BUILDER_CONT} && docker stop ${BUILDER_CONT}
docker ps -a | grep -q ${BUILDER_CONT} && docker rm ${BUILDER_CONT}
rm -rf ${TARGET_DIR}/
#BUILD
docker build -t ${BUILDER_IMAGE} -f Dockerfile.build .
mkdir -pm 777 ${TARGET_DIR}/
mkdir -pm 777 ${TARGET_DIR}/docs
mkdir -pm 777 ${TARGET_DIR}/docs/images
docker run --detach=true --name ${BUILDER_CONT} -t -v $(pwd)/${TARGET_DIR}:/${TARGET_DIR}:Z ${BUILDER_IMAGE} /bin/tail -f /dev/null #FIXME
docker exec ${BUILDER_CONT} sh scripts/build_guides.sh
#Need to do this again to set permission of images and html files
chmod -R 0777 ${TARGET_DIR}/
#LOGIN
docker_login "${QUAY_USERNAME}" "${QUAY_PASSWORD}" "${REGISTRY_URI}"
#BUILD DEPLOY IMAGE
docker build -t ${DEPLOY_IMAGE} -f "${DOCKERFILE}" .
#PUSH
if [ -z "$CICO_LOCAL" ]; then
TAG=$(echo $GIT_COMMIT | cut -c1-${DEVSHIFT_TAG_LEN})
tag_push "${REGISTRY_URL}:${TAG}"
tag_push "${REGISTRY_URL}:latest"
fi
|
<filename>src/framework/theme/components/calendar-kit/components/calendar-navigation/calendar-navigation.component.ts
/**
* @license
* Copyright Akveo. All Rights Reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*/
import { ChangeDetectionStrategy, Component, EventEmitter, Input, Output } from '@angular/core';
@Component({
selector: 'nb-calendar-navigation',
styles: [`
:host {
display: flex;
justify-content: center;
}
:host button {
height: 3.125rem;
}
`],
template: `
<button class="btn btn-primary" (click)="changeMode.emit()">
{{ date | nbCalendarDate }}
</button>
`,
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class NbCalendarNavigationComponent {
@Input() date: Date;
@Output() changeMode = new EventEmitter<void>();
}
|
<gh_stars>0
export * from './objects-3d.module';
export * from './abstract-object-3d.directive';
export * from './group';
export * from './mesh';
export * from './points';
export * from './scene'; |
<html>
<head>
<title>Star Wars API Table</title>
</head>
<body>
<div id="app">
<h1>Star Wars Movies</h1>
<table>
<thead>
<tr>
<th>Title</th>
<th>Release Date</th>
</tr>
</thead>
<tbody>
<tr v-for="movie in movies" :key="movie.title">
<td>{{movie.title}}</td>
<td>{{movie.release_date}}</td>
</tr>
</tbody>
</table>
</div>
<script src="https://cdn.jsdelivr.net/npm/vue/dist/vue.js"></script>
<script>
let app = new Vue({
el: "#app",
data: {
movies: []
},
created() {
fetch('http://swapi.dev/api/films/')
.then(response => response.json())
.then(data => {
this.movies = data.results;
})
}
});
</script>
</body>
</html> |
#!/usr/bin/env bash
# Copyright 2019 Wirepas Ltd licensed under Apache License, Version 2.0
#globals
c="foo"
d="foo"
f="foo"
l="foo"
r="foo"
f="foo"
function _parse_long
{
# Gather commands
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
"-c" | "--command")
#echo $1 $2
c=$2
if [[ -z "${c}" ]]
then
echo "invalid command"
usage
exit
fi
shift # past argument
shift
;;
"-l" | "--list")
c="list"
#echo $1 $2
shift # past argument
#shift
;;
"-s" | "--settings")
#echo $1 $2
s="settings"
shift # past argument
shift
;;
"-r" | "--rtt")
#echo $1 $2
r=$2
if [[ -z "${r}" ]]
then
echo "invalid command"
usage
exit
fi
shift # past argument
shift
;;
"-d" | "--device")
#echo $1 $2
d="$2"
shift # past argument
shift
;;
"-h" | "--help")
#echo $1 $2
h="help"
shift # past argument
#shift
;;
"-f" | "--file")
#echo $1 $2
f="$2"
shift # past argument
shift
;;
"--debug" )
set -x
shift
;;
*)
echo "unknown parameter $1"
exit 1
esac
done
}
function usage
{
echo "Usage: $0 COMMAND <OPTIONS>" 1>&2;
echo " --command <options> : flash commands " 1>&2;
echo " erase --device [nodeid] : erase flash" 1>&2;
echo " flash --device [nodeid] --file [filename] : flash firmware image" 1>&2;
echo " " 1>&2;
echo " --list t : list connected devices" 1>&2;
echo " " 1>&2;
echo " --settings : show settings" 1>&2;
echo " --rtt <options> : RTT log commands " 1>&2;
echo " start --device [deviceid] : start logging session" 1>&2;
echo " kill --device [session id] : kill session" 1>&2;
echo " list : list sessions" 1>&2;
}
function show_settings
{
echo "Settings"
echo "Device family: ${WM_DEFAULT_JLINK_DEVICE}"
echo "SDK projects directory: ${WM_UTS_SDK_PATH}"
echo "Firmware images directory: ${WM_UTS_SDK_IMAGES_PATH}"
echo "Connected Devices"
device_list_devices
}
function flash_device
{
if [[ ! -z "$1" ]] && [[ ! -z "$2" ]]
then
jlink_flash_menu "$1" "$2"
else
#ui_errorbox "missing parameters"
usage
fi
}
function erase_device
{
if [[ ! -z "$1" ]]
then
jlink_erase_menu "$1"
else
#ui_errorbox "device id missing"
usage
fi
}
function parse_args
{
if [[ $# -eq "0" ]]
then
exit 0
fi
_parse_long "$@"
if [[ ${h} == "help" ]]
then
usage
exit 0
fi
if [[ ${c} == "flash" ]]
then
flash_device $d $f
elif [[ ${c} == "erase" ]]
then
erase_device $d
elif [[ ${c} == "list" ]]
then
device_list_devices
elif [[ ${s} == "settings" ]]
then
show_settings
elif [[ ${r} == "start" ]]
then
sessionport=$(find_free_port)
rtt_start_session ${d} ${sessionport}
elif [[ ${r} == "kill" ]]
then
rtt_delete_session ${d}
elif [[ ${r} == "list" ]]
then
rtt_find_sessions
else
echo "Invalid command"
usage
fi
exit 0
}
if [[ "$BASH_SOURCE" == "$0" ]]
then
parse_args "$@"
fi
|
#!/usr/bin/env sh
rm -f ./build/*
haxe build.hxml
neko ./build/TestMain.n
open http://rehx.dev/swf.swf
|
<filename>snabbDom/src/use-h-mine.js
import h from "./mySnabbDom/h"
import patch from "./mySnabbDom/patch"
//var myNode1 = h("li", {}, "栗子")
var myNode1 = h("ul",{}, [
h("li",{key:'A'},"A"),
h("li",{key:'B'},"B"),
h("li",{key:'C'},"C"),
h("li",{key:'D'},"D"),
h("li",{key:'E'},"E"),
])
const container = document.getElementById('container')
patch(container, myNode1)
document.getElementById("btn").addEventListener("click", function(){
var myNode = h("ul",{}, [
h("li",{key:'Q'},"Q"),
h("li",{key:'A'},"A"),
h("li",{key:'B'},"B"),
h("li",{key:'C'},"C"),
h("li",{key:'D'},"D"),
h("li",{key:'E'},"E"),
])
//var myNode = h("ul",{}, "文本节点")
patch(myNode1, myNode)
}) |
#!/bin/bash
DATE=`date +%Y%m%d-%H%M%S-%3N`
APP_NAME='tmp1'
mkdir -p log
bash exec-test.sh $APP_NAME 2>&1 | tee log/test-$DATE.txt
|
#!/usr/bin/env bash
BASEDIR=$(dirname "$0")
source "$BASEDIR"/functions.sh
source "$BASEDIR"/unit_test.sh
function build_env() {
branch=$1
register_log "validating branch $branch"
if [ -f ./target/classes/project.properties ]
then
mvn resources:resources
BUILD_ID=$(date +'%y%m%d%H%M%S')
VERSION=$(cat ./target/classes/project.properties | grep "version" | cut -d'=' -f2 || exit)
IMAGE_NAME=$(cat ./target/classes/project.properties | grep "artifactId" | cut -d'=' -f2 || exit)
else
register_log "File './target/classes/project.properties' not exists"
exit 1
fi
IMAGE_TAG="$VERSION"
if [ "$branch" == "develop" ]
then
IMAGE_TAG="$VERSION.$BUILD_ID"
fi
export IMAGE_TAG
export IMAGE_NAME
export BUILD_ID
export VERSION
register_log "Environments: "
register_log "IMAGE_NAME=$IMAGE_NAME"
register_log "IMAGE_TAG=$IMAGE_TAG"
register_log "VERSION=$VERSION"
register_log "BUILD_ID=$BUILD_ID"
}
function steps() {
branch=$1
if [ "$branch" == "develop" ]
then
build_env "$branch"
run_junit_test "$branch"
fi
}
steps "develop" |
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 <NAME> All rights reserved.
#
"""
"""
__version__ = "$Id$"
#end_pymotw_header
import bz2
import contextlib
import os
with contextlib.closing(bz2.BZ2File('example.bz2', 'wb')) as output:
output.write('Contents of the example file go here.\n')
os.system('file example.bz2')
|
<filename>src/cadenza/data/DataTypes.java
package cadenza.data;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.dsl.ImplicitCast;
import com.oracle.truffle.api.dsl.TypeCast;
import com.oracle.truffle.api.dsl.TypeCheck;
import com.oracle.truffle.api.dsl.TypeSystem;
import kotlin.Unit;
// Interesting runtime types
@TypeSystem({
Closure.class,
boolean.class,
int.class,
BigInt.class,
NeutralValue.class,
Unit.class
})
public abstract class DataTypes {
@ImplicitCast
@CompilerDirectives.TruffleBoundary
public static BigInt castBigNumber(int value) { return new BigInt(value); }
@TypeCheck(Unit.class)
public static boolean isUnit(Object value) { return value == Unit.INSTANCE; }
@SuppressWarnings("SameReturnValue")
@TypeCast(Unit.class)
public static Unit asUnit(Object value) {
assert value == Unit.INSTANCE;
return Unit.INSTANCE;
}
} |
<filename>src/styles/component-styles/index.ts
export { Border } from "./border";
export { ColorStop } from "./color-stop";
export { ComponentStyle } from "./component-style";
export { ConditionalComponentStyle } from "./conditional-component-style";
export { ConditionalTableCellStyle } from "./conditional-table-cell-style";
export { ConditionalTableColumnStyle } from "./conditional-table-column-style";
export { ConditionalTableRowStyle } from "./conditional-table-row-style";
export { CornerMask } from "./corner-mask";
export { Fill } from "./fill";
export { GradientFill } from "./gradient-fill";
export { ImageFill } from "./image-fill";
export { VideoFill } from "./video-fill";
export { TableBorder } from "./table-border";
export { TableCellSelector } from "./table-cell-selector";
export { TableCellStyle } from "./table-cell-style";
export { TableColumnSelector } from "./table-column-selector";
export { TableColumnStyle } from "./table-column-style";
export { TableRowSelector } from "./table-row-selector";
export { TableRowStyle } from "./table-row-style";
export { TableStrokeStyle } from "./table-stroke-style";
export { TableStyle } from "./table-style";
import { ComponentStyle } from "./component-style";
/**
* Signature/interface for the object containing named
* `ComponentStyle` properties
* @see https://developer.apple.com/documentation/apple_news/articledocument/componentstyles
*/
export interface ComponentStyles {
[key: string]: ComponentStyle;
}
|
#!/usr/bin/env bash
THIS_DIR=$( (cd "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P) )
set -eo pipefail
# shellcheck source=init-env.sh
source "$THIS_DIR/init-env.sh"
run-ansible-inventory "$@"
|
#!/bin/bash
# TODO: use docker
cd ~
if [ ! -e "gremlin-server" ]; then
curl -L -O https://www.apache.org/dist/incubator/tinkerpop/3.1.0-incubating/apache-gremlin-server-3.1.0-incubating-bin.zip
unzip apache-gremlin-server-3.1.0-incubating-bin.zip
mv apache-gremlin-server-3.1.0-incubating gremlin-server
fi
cd gremlin-server
nohup ./bin/gremlin-server.sh ./conf/gremlin-server-modern.yaml &
sleep 3
|
http-server -a localhost -p 5000
|
<filename>sample/src/main/java/com/fueled/snippety/sample/client/PageClient.java
package com.fueled.snippety.sample.client;
import android.app.Application;
import com.fueled.snippety.sample.entity.TextPage;
import com.google.gson.Gson;
import java.io.IOException;
import okio.BufferedSource;
import okio.Okio;
import okio.Source;
/**
* Created by <EMAIL> on 16/05/2017.
* Copyright (c) 2017 Fueled. All rights reserved.
*/
public class PageClient {
private final Application application;
private final Gson gson;
public PageClient(Application application, Gson gson) {
this.application = application;
this.gson = gson;
}
public TextPage getTextPage(int resourceId) {
try {
return gson.fromJson(openRawResource(resourceId), TextPage.class);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
/**
* Reads the specified raw resource, decode it as UTF-8, and returns the string.
*
* @param resourceId the raw resource id for the page to be retrieved.
* @return the raw resource as a UTF-8 string.
* @throws IOException if an error occurs while reading the specified raw resource.
*/
private String openRawResource(int resourceId) throws IOException {
Source source = Okio.source(application.getResources().openRawResource(resourceId));
BufferedSource buffer = Okio.buffer(source);
try {
return buffer.readUtf8();
} finally {
try {
buffer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
|
#!/usr/bin/env sh
# 确保脚本抛出遇到的错误
set -e
# 生成静态文件
npm run build
# 进入生成的文件夹
cd docs/.vuepress/dist
# deploy to github pages
# echo 'b.xugaoyi.com' > CNAME
if [ -z "$GITHUB_TOKEN" ]; then
msg='deploy'
githubUrl=git@github.com:Ldi123/MyNet.git
git config --global user.name "di"
git config --global user.email "abc@hidi.uu.me"
else
msg='来自github actions的自动部署'
githubUrl=https://di:${GITHUB_TOKEN}@github.com/Ldi123/MyNet.git
git config --global user.name "di"
git config --global user.email "abc@hidi.uu.me"
fi
git init
git add -A
git commit -m "${msg}"
git push -f $githubUrl master:gh-pages # 推送到github gh-pages分支
# deploy to coding pages
# echo 'www.xugaoyi.com\nxugaoyi.com' > CNAME # 自定义域名
# echo 'google.com, pub-7828333725993554, DIRECT, f08c47fec0942fa0' > ads.txt # 谷歌广告相关文件
# if [ -z "$CODING_TOKEN" ]; then # -z 字符串 长度为0则为true;$CODING_TOKEN来自于github仓库`Settings/Secrets`设置的私密环境变量
# codingUrl=git@e.coding.net:xgy/xgy.git
# else
# codingUrl=https://HmuzsGrGQX:${CODING_TOKEN}@e.coding.net/xgy/xgy.git
# fi
# git add -A
# git commit -m "${msg}"
# git push -f $codingUrl master # 推送到coding
cd -
rm -rf docs/.vuepress/dist
|
<reponame>Betterton-Lab/C-GLASS
#ifndef _CGLASS_BR_BEAD_H_
#define _CGLASS_BR_BEAD_H_
#include "object.hpp"
class BrBead : public Object {
protected:
br_bead_parameters *sparams_;
bool zero_temperature_ = false;
bool draw_arrow_ = false;
double gamma_trans_ = 0;
double gamma_rot_ = 0;
double diffusion_ = 0;
double diffusion_rot_ = 0;
double driving_factor_ = 0;
double driving_torque_ = 0;
int chiral_handedness_ = 0;
double noise_tr_ = 1;
double noise_rot_ = 1;
bool alignment_interaction_ = false;
double alignment_torque_ = 0;
void ApplyForcesTorques();
void ApplyBoundaryForces();
void InsertBrBead();
void SetDiffusion();
void Translate();
void Rotate();
void Integrate();
graph_struct g2_;
public:
BrBead(unsigned long seed);
void Init(br_bead_parameters *sparams);
void UpdatePosition();
virtual void GetInteractors(std::vector<Object *> &ix);
virtual int GetCount();
virtual void Draw(std::vector<graph_struct *> &graph_array);
virtual void ZeroForce();
virtual void ReadSpec(std::fstream &ip);
virtual void WriteSpec(std::fstream &op);
// Convert binary data to text. Static to avoid needing to istantiate
// species members.
static void ConvertSpec(std::fstream &ispec, std::fstream &otext);
static void WriteSpecTextHeader(std::fstream &otext);
};
#endif // _CGLASS_BR_BEAD_H_
|
set -eo nounset
cd /sources
test -f pciutils-3.5.5.tar.xz || \
wget --no-check-certificate \
https://www.kernel.org/pub/software/utils/pciutils/pciutils-3.5.5.tar.xz
rm -rf pciutils-3.5.5
tar xf pciutils-3.5.5.tar.xz
pushd pciutils-3.5.5
make PREFIX=/usr \
SHAREDIR=/usr/share/hwdata \
SHARED=no \
STATIC=yes
make PREFIX=/usr \
SHAREDIR=/usr/share/hwdata \
SHARED=no \
STATIC=yes \
install install-lib &&
chmod -v 755 /usr/lib/libpci.a
popd
rm -rf pciutils-3.5.5
|
def print_numbers(n):
i = 0
while i < n:
print(i)
i += 1 |
package scenarios
import (
. "github.com/onsi/ginkgo"
)
var _ = PDescribe("[Dataplane] Pod and Namespace selector based Network Policies for ingress between clusters", func() {
PIt("Should allow communication between selected pods in selected namespace", func() {
By("creating listener pod 1 with label 1 in cluster 1 in namespace 1")
By("creating connecting pod 2 with label 2 in cluster 2 in namespace 2")
By("creating listener pod 3 with label 1 in cluster 1 in namespace 1")
By("creating connecting pod 4 with label 1 in cluster 2 in namespace 1")
By("creating network policy in cluster 1 that allows communication to pod 1 from any pod with label 2 and in namespace 2 in cluster 2")
By("testing connectivity between pods 1 and 2")
By("testing non connectivity between pods 3 and 4")
})
})
var _ = PDescribe("[Ctlplane] Pod and Namespace selector based Network Policies for ingress between clusters", func() {
PIt("Should allow communication between selected pods in selected namespace", func() {
By("creating listener pod 1 with label 1 in cluster 1 in namespace 1")
By("creating listener pod 2 with label 2 in cluster 2 in namespace 2")
By("creating listener pod 3 with label 2 in cluster 2 in namespace 2")
By("creating network policy in cluster 1 that allows communication to pod 1 from any pod with label 2 and in namespace 2 in cluster 2")
By("Waiting for a NetworkPolicy to appear in cluster 1 containing IPs of pods with label 2 and in namespace 2 in cluster 2 in ipBlocks")
})
})
|
mkdir -p ./secrets && rm -rf ./secrets/*
printenv | sed 's;=.*;;' | uniq | grep SECRET_ | while read line; do eval echo \$$line > ./secrets/${line/SECRET_/}; done
httpd -f -p 8000 -h ./secrets |
import tkinter as tk
root = tk.Tk()
root.title('Sum Application')
num1 = tk.StringVar()
num2 = tk.StringVar()
def calculate():
try:
result = int(num1.get()) + int(num2.get())
sumLabel.configure(text="The sum is %d" % result)
except ValueError:
sumLabel.configure(text="Please enter numbers")
num1Field = tk.Entry(root, width=10, textvariable=num1)
num2Field = tk.Entry(root, width=10, textvariable=num2)
sumButton = tk.Button(root, text="Sum", command=calculate)
sumLabel = tk.Label(root, text="")
num1Field.grid(row=0, column=0)
num2Field.grid(row=0, column=1)
sumButton.grid(row=1, column=0, columnspan=2)
sumLabel.grid(row=2, column=0, columnspan=2)
root.mainloop() |
package md.rainlox.weaponryPlugin;
import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
public class Hands implements CommandExecutor {
private final Main plugin;
public Hands(Main plugin) {
this.plugin = plugin;
}
@Override
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) {
String mainHand = plugin.config.getString("messages.commands.mainHand");
String offHand = plugin.config.getString("messages.commands.offHand");
if (!(sender instanceof Player)) {
sender.sendMessage(ChatColor.RED + "Command sender must be player!");
return true;
}
Player p = (Player) sender;
ItemStack itemM = p.getInventory().getItemInMainHand();
ItemStack itemS = p.getInventory().getItemInOffHand();
sender.sendMessage(ChatColor.YELLOW + mainHand + itemM.getType() + " " + offHand + itemS.getType());
sender.sendMessage(mainHand + itemM.getItemMeta().getDisplayName() + " " + offHand + itemS.getItemMeta().getDisplayName());
return true;
}
}
|
#!/bin/bash
# Sync Backups to S3
#
# Sync local backups to an Amazon S3 bucket
#
# @author Connor Bär
# @copyright Copyright (c) 2017 Connor Bär
# @link https://madebyconnor.co/
# @package server-scripts
# @since 1.0.0
# @license MIT
# Get the directory of the currently executing script
DIR="$(dirname "${BASH_SOURCE[0]}")"
# Include file
if [ -f "${DIR}/.env" ]
then
source "${DIR}/.env"
else
echo "File .env is missing, aborting."
exit 1
fi
# Make sure the local backup directory exists
if [[ ! -d "${LOCAL_BACKUPS_PATH}" ]] ; then
echo "Creating asset directory ${LOCAL_BACKUPS_PATH}"
mkdir -p "${LOCAL_BACKUPS_PATH}"
fi
# Sync the local backups to the Amazon S3 bucket
aws s3 sync ${LOCAL_BACKUPS_PATH} s3://${REMOTE_S3_BUCKET}
echo "*** Synced backups to ${REMOTE_S3_BUCKET}"
# Normal exit
exit 0
|
<reponame>rohitkishnan/pf9-ui-plugin<filename>src/stories/common/Alert.stories.js
import React from 'react'
import { addStoriesFromModule } from '../helpers'
import Alert from 'core/components/Alert'
const addStories = addStoriesFromModule(module)
const content = <>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce auctor, lorem in faucibus
finibus, odio sapien lobortis eros, a auctor massa risus in odio. Suspendisse tempus lorem
vel sapien pretium, sed varius sapien ornare. In at auctor tellus. Morbi tempor efficitur
risus a volutpat. Nullam vel neque aliquam, eleifend nulla et, aliquet sapien. Nulla
fermentum posuere lorem, sit amet ultricies sem eleifend vel. Sed libero tellus,
pellentesque at leo quis, fermentum congue est. Proin gravida consequat neque, eu molestie
risus facilisis non.
</p>
<p>
Fusce feugiat massa mauris, eu mollis leo rhoncus quis. Curabitur efficitur ligula quis
tellus rutrum elementum. Phasellus sit amet leo ut diam ornare laoreet. Aenean euismod metus
justo, et commodo lorem scelerisque eu. Vivamus nec velit at arcu convallis lacinia. In hac
habitasse platea dictumst. Mauris elit turpis, elementum a luctus faucibus, consectetur id
arcu.
</p>
<p>
Ut vitae lobortis lectus. Nulla consectetur egestas libero, at sollicitudin mi tincidunt
non. Aenean condimentum placerat nisi. Curabitur consequat quam quis purus finibus gravida.
Cras dui mauris, dignissim id enim vel, sagittis finibus velit. Curabitur non lectus
sagittis, rutrum nisl sed, congue tortor. Vestibulum semper imperdiet ipsum, at consectetur
lectus hendrerit ut.
</p>
</>
addStories('Common Components/Alert', {
info: () => (
<Alert variant="info">
{content}
</Alert>
),
success: () => (
<Alert variant="success">
{content}
</Alert>
),
warning: () => (
<Alert variant="warning">
{content}
</Alert>
),
error: () => (
<Alert variant="error">
{content}
</Alert>
),
})
|
<gh_stars>0
import { Test, TestingModule } from '@nestjs/testing';
import { CatedraticoController } from './catedratico.controller';
describe('Catedratico Controller', () => {
let controller: CatedraticoController;
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
controllers: [CatedraticoController],
}).compile();
controller = module.get<CatedraticoController>(CatedraticoController);
});
it('should be defined', () => {
expect(controller).toBeDefined();
});
});
|
#!/bin/sh
cd ../src
sh make.bash
cd ../experiment
../bin/go run main.go |
<!DOCTYPEhtml>
<html>
<head>
<title>Hello World Page</title>
<style type="text/css">
h1 {
font-family: Helvetica, sans-serif;
text-align: center;
color: #800072;
}
</style>
</head>
<body>
<h1>Hello World</h1>
</body>
</html> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.