text
stringlengths 1
1.05M
|
|---|
def sort_addresses_by_length(addresses):
# Create a dictionary to store addresses grouped by length
addresses_by_length = {}
for address in addresses:
length = len(address)
if length in addresses_by_length:
addresses_by_length[length].append(address)
else:
addresses_by_length[length] = [address]
# Sort the addresses by length in descending order
sorted_addresses = []
for length in sorted(addresses_by_length.keys(), reverse=True):
sorted_addresses.extend(sorted(addresses_by_length[length]))
return sorted_addresses
|
/*
* options.h -- compiler configuration options set at compile time
* Copyright (C) Acorn Computers Ltd. 1988
* SPDX-Licence-Identifier: Apache-2.0
*/
/*
* RCS $Revision$
* Checkin $Date$
* Revising $Author$
*/
#ifndef _options_LOADED
#define _options_LOADED
/*
* The following conditional settings allow the produced compiler (TARGET)
* to depend on the HOST (COMPILING_ON) environment.
* Note that we choose to treat this independently of the target-machine /
* host-machine issue.
*/
#define CPLUSPLUS 1
#define TARGET_HAS_DWARF 1
#define TARGET_VTAB_ELTSIZE 4
/* for indirect VTABLEs optimised for single inheritance */
/* Expire this version at 00:00:01 on Saturday 01 Oct 94 */
/*#define UNIX_TIME_LIMIT 780969601 */
#define TARGET_ENDIANNESS_CONFIGURABLE 1
/* #define TARGET_DEFAULT_BIGENDIAN 0 */ /* 1 => bigendian default */
/* 0 => littleendian default */
/* unset => defaults to host */
#define DISABLE_ERRORS 1 /* -- to enable -Exyz... error suppression */
#define EXTENSION_SYSV 1 /* -- to allow #ident ... */
#define TARGET_HAS_INLINE_ASSEMBLER 1
#define ARM_INLINE_ASSEMBLER 1
#define PROFILE_COUNTS_INLINE 1
#ifndef __acorn
# ifdef TARGET_IS_NEWTON
# define TARGET_MACHINE "Newton"
# define TARGET_SYSTEM "Newton OS"
# define TARGET_IS_NEWTONOS 1
# define TARGET_DEFAULT_BIGENDIAN 1
# define STRUCT_PTR_ALIGN_DEFAULT 0 /* do not assume struct ptrs are 4-byte aligned */
# define NO_INSTORE_FILES 1 /* no in-store headers for Newton. */
# define PCS_DEFAULTS (PCS_CALLCHANGESPSR | PCS_FPE3 | PCS_NOSTACKCHECK)
/* Exploiting registers preserved by callee saves only 200 bytes in the */
/* Newton ROM and makes patching more difficult */
# define DRIVER_OPTIONS {"-zpu1", NULL} /* pc-rel vtables */
# define DO_NOT_EXPLOIT_REGISTERS_PRESERVED_BY_CALLEE 1
# define CFRONT_COMPATIBLE_DESTRUCTORS 1
# define HOST_OBJECT_INCLUDES_SOURCE_EXTN 1 /* .c -> .c.o */
# define EXTENSION_COUNTED_STRINGS 1 /* to enable Pascal-style strings */
# define EXTENSION_UNSIGNED_STRINGS 1 /* and they are unsigned char[] */
# define ALLOW_WHITESPACE_IN_FILENAMES 1 /* to allow as it says... */
# define ONLY_WARN_ON_NONPRINTING_CHAR 1 /* to do as it says... */
# define HOST_DOES_NOT_FORCE_TRAILING_NL 1
# define HOST_WANTS_NO_BANNER 1 /* no routine banner output */
# define DISABLE_ERRORS 1
# define TARGET_WANTS_LINKER_TO_RESOLVE_FUNCTION_REFERENCES 1
# define HOST_CANNOT_INVOKE_ASSEMBLER 1
# define HOST_CANNOT_INVOKE_LINKER 1
# define PUT_FILE_NAME_IN_AREA_NAME 1
# define CHAR_NL 13 /* as MPW C '\n' */
# define CHAR_CR 10 /* as MPW C '\r' */
# define CFRONT_MODE_WARN_LACKS_STORAGE_TYPE 0
# define D_SUPPRESSED (D_SHORTWARN | D_STRUCTPADDING | D_PPNOSYSINCLUDECHECK | \
D_IMPLICITCTOR | D_LOWERINWIDER | D_CFRONTCALLER | \
D_MULTICHAR | D_STRUCTASSIGN)
# define HOST_DOESNT_WANT_FP_OFFSET_TABLES 1
# ifndef COMPILING_ON_MACINTOSH /* ifdef HOST_FILENAMES_ARE_CASE_SENSISTIVE */
# define RETRY_INCLUDE_LOWERCASE
# endif
# ifdef MAKE_WCHAR_T_UNSIGNED_SHORT
/* make wchar_t be unsigned short */
/* maybe this should be cfe somewhere */
# define sizeof_wchar sizeof_short
# define wchar_typespec (bitoftype_(unsigned)|bitoftype_(short)) /* for sem.c */
# define NUM_WCHAR (NUM_INT|NUM_SHORT|NUM_UNSIGN) /* for lex.c */
# endif
# else /* NOT TARGET_IS_NEWTON */
/* usual cross-development case... */
# define TARGET_SYSTEM ""
# define TARGET_IS_RISC_OS 1
# define TARGET_DEFAULT_BIGENDIAN 0
# define PCS_DEFAULTS (PCS_CALLCHANGESPSR | PCS_FPE3 | PCS_SOFTFP)
/* | PCS_NOSTACKCHECK */
/* | PCS_REENTRANT */
/* | PCS_FPREGARGS */
# endif
# define HOST_WANTS_NO_BANNER 1
# define TARGET_HAS_DIVREM_FUNCTION 1 /* divide fn also returns remainder.*/
# define TARGET_HAS_DIV_10_FUNCTION 1 /* fast divide by 10 */
/* the last two would be in target.h*/
/* but are OS-dependent too. */
# ifndef DRIVER_OPTIONS
/* -D__arm done by TARGET_PREDEFINES */
# define DRIVER_OPTIONS {NULL}
# endif
#else /* __acorn is defined */
# ifdef __unix
# define TARGET_SYSTEM "RISCiX"
# define TARGET_IS_UNIX 1
# define NO_INSTORE_FILES 1 /* no in-store headers under Unix. */
# define HOST_WANTS_NO_BANNER 1
/* #define TARGET_HAS_DIVREM_FUNCTION 1 -- divide fn also returns remainder.*/
/* #define TARGET_HAS_DIV_10_FUNCTION 1 -- fast divide by 10 */
/* but not under ARM Unix... */
# ifndef DRIVER_OPTIONS
/* -D__arm done by TARGET_PREDEFINES */
# define DRIVER_OPTIONS {"-zps1", "-D__unix", "-D__acorn", NULL}
# endif
#endif
# ifdef __riscos
# define TARGET_SYSTEM "RISC OS"
# define TARGET_IS_RISC_OS 1
# define TARGET_HAS_DIVREM_FUNCTION 1 /* divide fn also returns remainder.*/
# define TARGET_HAS_DIV_10_FUNCTION 1 /* fast divide by 10 */
/* the last two would be in target.h */
/* but are OS-dependent too. */
# ifndef DRIVER_OPTIONS
/* -D__arm done by TARGET_PREDEFINES */
# define DRIVER_OPTIONS {"-D__riscos", "-D__acorn", NULL}
# endif
# endif
#endif /* defined(__acorn) */
#include "toolver.h"
#define NON_RELEASE_VSN TOOLVER_ARMCPP
#ifndef TARGET_IS_NEWTON
# define PROFILE_COUNTS_INLINE 1
/* to avoid conflict with host compilers */
# define C_INC_VAR "ARMINC"
# define C_LIB_VAR "ARMLIB"
#else
# define PROFILE_DISABLES_TAILCALL 1
# define C_INC_VAR "ARMCIncludes"
# define C_LIB_VAR NULL
#endif
/* #define DO_NOT_EXPLOIT_REGISTERS_PRESERVED_BY_CALLEE 1 */
/* #define MOVC_KILLS_REGISTER_PRESERVED_BY_CALLEE_EXPLOITATION 1 */
/* #define TARGET_STACK_MOVES_ONCE 1 / * Experimental option */
#ifndef RELEASE_VSN
# define ENABLE_ALL 1 /* -- to enable all debugging options */
#endif
/* mac-specific options - find a better home for these sometime! */
#ifdef macintosh
/* The origin of time is 0th Jan 1904... */
# ifdef UNIX_TIME_LIMIT
# define TIME_LIMIT (UNIX_TIME_LIMIT+(66*365+16)*24*3600)
# endif
# ifdef applec
/* work-around for MPW C */
# define NO_STATIC_BANNER 1
# endif
pascal void SpinCursor(short increment); /* copied from CursorCtl.h */
# define ExecuteOnSourceBufferFill() SpinCursor(1)
# ifdef __MWERKS__
# define REBUFFERSTDOUT 1
# endif
#else /* NOT macintosh */
# ifdef UNIX_TIME_LIMIT
# define TIME_LIMIT UNIX_TIME_LIMIT
# endif
#endif
#ifdef TARGET_IS_NEWTON
#endif
#ifdef TIME_LIMIT
# define VENDOR_NAME "Advanced RISC Machines Limited"
#endif
#ifdef CPLUSPLUS
# ifndef CFRONT_MODE_WARN_LACKS_STORAGE_TYPE
# define CFRONT_MODE_WARN_LACKS_STORAGE_TYPE 1
# endif
#endif
#define MSG_TOOL_NAME "armcpp" /* used to load correct NLS message file */
#define TARGET_HAS_ASD
#endif
/* end of cpparm/options.h */
|
#!/usr/bin/env python
# coding: utf8
#
# Copyright (c) 2022 Centre National d'Etudes Spatiales (CNES).
#
# This file is part of Shareloc
# (see https://github.com/CNES/shareloc).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module contains the LOS class to handle line of sights
for geometric models.
"""
# Third party imports
import numpy as np
# Shareloc imports
from shareloc.proj_utils import coordinates_conversion
class LOS:
"""line of sight class"""
def __init__(self, sensor_positions, geometrical_model, alt_min_max=None, fill_nan=False):
"""
LOS Constructor
:param sensor_positions: sensor_positions
:type sensor_positions: numpy array (Nx2)
:param geometrical_model: geometrical model
:type geometrical_model: shareloc.grid or shareloc.rpc.rpc
:param alt_min_max: min/max altitude to compute los, if None model min/max will be used
:type alt_min_max : list
:param fill_nan : fill numpy.nan values with lon and lat offset if true (same as OTB/OSSIM), nan is returned
otherwise
:type fill_nan : boolean
"""
self.geometrical_model = geometrical_model
self.sensors_positions = sensor_positions
self.los_creation(alt_min_max, fill_nan)
def los_creation(self, alt_min_max, fill_nan=False):
"""
create los from extrema : los starting point, and normalized viewing vector
:param alt_min_max: min/max altitude to compute los, if None model min/max will be used
:type alt_min_max : list
"""
self.los_nb = self.sensors_positions.shape[0]
if alt_min_max is None:
alt_min, alt_max = self.geometrical_model.get_alt_min_max()
# LOS construction right
los_extrema = np.zeros([2 * self.los_nb, 3])
list_col, list_row = (self.sensors_positions[:, 0], self.sensors_positions[:, 1])
los_extrema[np.arange(0, 2 * self.los_nb, 2), :] = self.geometrical_model.direct_loc_h(
list_row, list_col, alt_max, fill_nan
)
los_extrema[np.arange(1, 2 * self.los_nb, 2), :] = self.geometrical_model.direct_loc_h(
list_row, list_col, alt_min, fill_nan
)
in_crs = 4326
out_crs = 4978
ecef_coord = coordinates_conversion(los_extrema, in_crs, out_crs)
self.sis = ecef_coord[0::2, :]
vis = self.sis - ecef_coord[1::2, :]
# /!\ normalized
#
# direction vector creation
vis_norm = np.linalg.norm(vis, axis=1)
rep_vis_norm = np.tile(vis_norm, (3, 1)).transpose()
self.vis = vis / rep_vis_norm
def get_sis(self):
"""
returns los hat
TODO: not used. Use python property instead
:return sis
:rtype numpy array
"""
return self.sis
def get_vis(self):
"""
returns los viewing vector
TODO: not used. Use python property instead
:return vis
:rtype numpy array
"""
return self.vis
|
<gh_stars>1-10
// Copyright 2016-present Province of British Columbia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import {authenticate} from '@loopback/authentication';
import {inject} from '@loopback/context';
import {ApplicationConfig, CoreBindings} from '@loopback/core';
import {repository} from '@loopback/repository';
import axios from 'axios';
import Bottleneck from 'bottleneck';
import dns from 'dns';
import _ from 'lodash';
import net from 'net';
import util from 'util';
import {Configuration, Subscription} from '../models';
import {ConfigurationRepository} from '../repositories';
const toSentence = require('underscore.string/toSentence');
const pluralize = require('pluralize');
interface SMSBody {
MessageBody: string;
[key: string]: string;
}
@authenticate(
'ipWhitelist',
'clientCertificate',
'accessToken',
'oidc',
'siteMinder',
'anonymous',
)
export class BaseController {
constructor(
@inject(CoreBindings.APPLICATION_CONFIG)
protected appConfig: ApplicationConfig,
@repository(ConfigurationRepository)
public configurationRepository: ConfigurationRepository,
) {}
static smsClient: any;
static smsLimiter: Bottleneck;
async sendSMS(
to: string,
textBody: string,
subscription: Partial<Subscription>,
) {
if (!BaseController.smsLimiter && this.appConfig?.sms?.throttle?.enabled) {
const smsThrottleCfg = Object.assign({}, this.appConfig.sms.throttle);
delete smsThrottleCfg.enabled;
BaseController.smsLimiter = new Bottleneck(smsThrottleCfg);
}
const smsProvider = this.appConfig.sms.provider;
const smsConfig = this.appConfig.sms.providerSettings[smsProvider];
switch (smsProvider) {
case 'swift': {
const url = `${smsConfig['apiUrlPrefix']}${
smsConfig['accountKey']
}/${encodeURIComponent(to)}`;
const body: SMSBody = {
MessageBody: textBody,
};
if (subscription?.id) {
body.Reference = subscription.id;
}
let req: any = axios.post;
if (BaseController.smsLimiter) {
req = BaseController.smsLimiter.wrap(req);
}
return req(url, body, {
headers: {
'Content-Type': 'application/json;charset=UTF-8',
},
});
}
default: {
// Twilio Credentials
const accountSid = smsConfig.accountSid;
const authToken = smsConfig.authToken;
//require the Twilio module and create a REST client
BaseController.smsClient =
BaseController.smsClient || require('twilio')(accountSid, authToken);
let req = BaseController.smsClient.messages.create;
if (BaseController.smsLimiter) {
req = BaseController.smsLimiter.wrap(req);
}
return req.call(BaseController.smsClient.messages, {
to: to,
from: smsConfig.fromNumber,
body: textBody,
});
}
}
}
nodemailer = require('nodemailer');
directTransport = require('nodemailer-direct-transport');
transport: any;
static emailLimiter: Bottleneck;
async sendEmail(mailOptions: any) {
const smtpCfg =
this.appConfig.email.smtp || this.appConfig.email.defaultSmtp;
if (!this.transport) {
if (smtpCfg.direct) {
this.transport = this.nodemailer.createTransport(
this.directTransport(smtpCfg),
);
} else {
this.transport = this.nodemailer.createTransport(smtpCfg);
}
}
if (
!BaseController.emailLimiter &&
this.appConfig?.email?.throttle?.enabled
) {
const emailThrottleCfg = Object.assign({}, this.appConfig.email.throttle);
delete emailThrottleCfg.enabled;
BaseController.emailLimiter = new Bottleneck(emailThrottleCfg);
}
let info;
try {
let sendMail = this.transport.sendMail;
if (BaseController.emailLimiter) {
sendMail = BaseController.emailLimiter.wrap(sendMail);
}
info = await sendMail.call(this.transport, mailOptions);
if (info?.accepted?.length < 1) {
throw new Error('delivery failed');
}
} catch (ex: any) {
if (
smtpCfg.direct ||
net.isIP(smtpCfg.host) ||
ex.command !== 'CONN' ||
['ECONNECTION', 'ETIMEDOUT'].indexOf(ex.code) === -1
) {
throw ex;
}
const dnsLookupAsync = util.promisify(dns.lookup);
const addresses = await dnsLookupAsync(smtpCfg.host, {all: true});
if (!(addresses instanceof Array)) {
throw ex;
}
// do client retry if there are multiple addresses
for (const [index, address] of addresses.entries()) {
const newSmtpCfg = Object.assign({}, smtpCfg, {host: address.address});
const transport = this.nodemailer.createTransport(newSmtpCfg);
let sendMail = transport.sendMail;
if (BaseController.emailLimiter) {
sendMail = BaseController.emailLimiter.wrap(sendMail);
}
try {
info = await sendMail.call(transport, mailOptions);
if (info?.accepted?.length < 1) {
throw new Error('delivery failed');
}
} catch (newEx: any) {
if (
index < addresses.length - 1 &&
newEx.command === 'CONN' &&
['ECONNECTION', 'ETIMEDOUT'].indexOf(newEx.code) >= 0
) {
continue;
}
throw newEx;
}
break;
}
}
return info;
}
mailMerge(
srcTxt: any,
subscription: Partial<Subscription>,
notification: Partial<Notification>,
httpCtx: any,
) {
let output = srcTxt;
try {
output = output.replace(
/(?<!\\){subscription_confirmation_code(?<!\\)}/gi,
subscription.confirmationRequest?.confirmationCode,
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){service_name(?<!\\)}/gi,
subscription.serviceName,
);
} catch (ex) {}
try {
if (output.match(/(?<!\\){unsubscription_service_names(?<!\\)}/i)) {
const serviceNames = _.union(
[subscription.serviceName],
subscription.unsubscribedAdditionalServices
? subscription.unsubscribedAdditionalServices.names
: [],
);
output = output.replace(
/(?<!\\){unsubscription_service_names(?<!\\)}/gi,
pluralize('service', serviceNames.length) +
' ' +
toSentence(serviceNames),
);
}
} catch (ex) {}
let httpHost;
try {
const req = httpCtx.req || httpCtx.request;
if (req) {
httpHost = req.protocol + '://' + req.get('host');
}
if (this.appConfig.httpHost) {
httpHost = this.appConfig.httpHost;
}
let args: any;
try {
args = httpCtx.getSync('args');
} catch (ex) {}
if (args?.data?.httpHost) {
httpHost = args.data.httpHost;
} else if (httpCtx.instance?.httpHost) {
httpHost = httpCtx.instance.httpHost;
} else if (subscription?.httpHost) {
httpHost = subscription.httpHost;
}
output = output.replace(/(?<!\\){http_host(?<!\\)}/gi, httpHost);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){rest_api_root(?<!\\)}/gi,
this.appConfig.restApiRoot,
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){subscription_id(?<!\\)}/gi,
subscription.id,
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){unsubscription_code(?<!\\)}/gi,
subscription.unsubscriptionCode,
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){unsubscription_url(?<!\\)}/gi,
httpHost +
this.appConfig.restApiRoot +
'/subscriptions/' +
subscription.id +
'/unsubscribe?unsubscriptionCode=' +
subscription.unsubscriptionCode,
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){unsubscription_all_url(?<!\\)}/gi,
httpHost +
this.appConfig.restApiRoot +
'/subscriptions/' +
subscription.id +
'/unsubscribe?unsubscriptionCode=' +
subscription.unsubscriptionCode +
'&additionalServices=_all',
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){subscription_confirmation_url(?<!\\)}/gi,
httpHost +
this.appConfig.restApiRoot +
'/subscriptions/' +
subscription.id +
'/verify?confirmationCode=' +
subscription.confirmationRequest?.confirmationCode,
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){unsubscription_reversion_url(?<!\\)}/gi,
httpHost +
this.appConfig.restApiRoot +
'/subscriptions/' +
subscription.id +
'/unsubscribe/undo?unsubscriptionCode=' +
subscription.unsubscriptionCode,
);
} catch (ex) {}
// for backward compatibilities
try {
output = output.replace(
/(?<!\\){confirmation_code(?<!\\)}/gi,
subscription.confirmationRequest?.confirmationCode,
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){serviceName(?<!\\)}/gi,
subscription.serviceName,
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){restApiRoot(?<!\\)}/gi,
this.appConfig.restApiRoot,
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){subscriptionId(?<!\\)}/gi,
subscription.id,
);
} catch (ex) {}
try {
output = output.replace(
/(?<!\\){unsubscriptionCode(?<!\\)}/gi,
subscription.unsubscriptionCode,
);
} catch (ex) {}
// substitute all other tokens with matching data.data properties
const matches = output.match(/(?<!\\){.+?(?<!\\)}/g);
if (matches) {
matches.forEach(function (e: string) {
try {
const token = (e.match(/(?<!\\){(.+)(?<!\\)}/) ?? [])[1];
const tokenParts = token.split('::');
let val: string;
switch (tokenParts[0]) {
case 'subscription':
val = _.get(subscription.data ?? {}, tokenParts[1]);
break;
case 'notification':
val = _.get(notification.data ?? {}, tokenParts[1]);
break;
default:
val = _.get(notification.data ?? subscription.data ?? {}, token);
}
if (val) {
output = output.replace(e, val);
}
} catch (ex) {}
});
}
// unescape delimiter
output = output.replace(/\\{(.+?)\\}/g, '{$1}');
return output;
}
async getMergedConfig(
configName: string,
serviceName: string,
next?: Function,
) {
let data;
try {
data = await this.configurationRepository.findOne({
where: {
name: configName,
serviceName: serviceName,
},
});
} catch (ex) {
if (next) {
return next(ex, null);
} else {
throw ex;
}
}
let res;
try {
res = _.merge({}, this.appConfig[configName]);
} catch (ex) {}
try {
res = _.merge({}, res, (data as Configuration).value);
} catch (ex) {}
next?.(null, res);
return res;
}
}
export {axios};
|
<gh_stars>1-10
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.course.nodes.gta.ui;
import java.util.ArrayList;
import java.util.List;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.controller.BasicController;
import org.olat.core.util.StringHelper;
import org.olat.group.BusinessGroup;
/**
* Chooser for the available groups.
*
* Initial date: 06.03.2015<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class BusinessGroupChooserController extends BasicController {
private BusinessGroup selectGroup;
public BusinessGroupChooserController(UserRequest ureq, WindowControl wControl,
List<BusinessGroup> myGroups) {
super(ureq, wControl);
VelocityContainer mainVC = createVelocityContainer("groups_chooser");
List<String> links = new ArrayList<>();
for(BusinessGroup myGroup:myGroups) {
String name = "gp-" + myGroup.getKey();
Link link = LinkFactory.createLink(name, myGroup.getName(), getTranslator(), mainVC, this, Link.LINK | Link.NONTRANSLATED);
link.setCustomDisplayText(StringHelper.escapeHtml(myGroup.getName()));
link.setUserObject(myGroup);
mainVC.put(name, link);
links.add(name);
}
mainVC.contextPut("links", links);
putInitialPanel(mainVC);
}
public BusinessGroup getSelectGroup() {
return selectGroup;
}
@Override
protected void event(UserRequest ureq, Component source, Event event) {
if(source instanceof Link) {
Link select = (Link)source;
selectGroup = (BusinessGroup)select.getUserObject();
fireEvent(ureq, Event.DONE_EVENT);
}
}
}
|
TERMUX_SUBPKG_DESCRIPTION="Texlive's collection-latexrecommended"
TERMUX_SUBPKG_DEPENDS="texlive-fontsrecommended, texlive-latexextra, texlive-pictures, texlive-plaingeneric"
TERMUX_SUBPKG_INCLUDE=$($TERMUX_PKG_BUILDER_DIR/parse_tlpdb.py collection-latexrecommended $TERMUX_PKG_TMPDIR/texlive.tlpdb)
TERMUX_SUBPKG_CONFLICTS="texlive-bin (<< 20190410), texlive (<< 20190410)"
termux_step_create_subpkg_debscripts() {
echo "#!$TERMUX_PREFIX/bin/sh" > postinst
echo 'PATH=$PATH:$PREFIX/bin/texlive mktexlsr' >> postinst
}
|
<reponame>pmashchak/config-parser
describe ValueType do
let(:value) { SecureRandom.hex }
subject { described_class.new(value) }
shared_examples :to_value do |input, output|
let(:value) { input }
it "parses #{input} as #{output}" do
expect(subject.to_value).to eq(output)
end
end
it_behaves_like :to_value, 'true', true
it_behaves_like :to_value, 'on', true
it_behaves_like :to_value, 'yes', true
it_behaves_like :to_value, 'false', false
it_behaves_like :to_value, 'off', false
it_behaves_like :to_value, 'no', false
end
|
<gh_stars>0
#include <cstdio>
#include <windows.h>
#include <string>
#include <time.h>
#include <fstream>
#include <cstdlib>
#include <mmsystem.h>
#include <assert.h> // debugging
#include <cstdlib>
// define to enable flac (if you're crazy enough)
// #define __USE_FLAC__
#ifdef __USE_FLAC__
#include "FLAC++/encoder.h"
#include "FLAC/stream_encoder.h"
#endif
/* audio settings */
// from https://msdn.microsoft.com/en-us/library/aa908934.aspx
int const NUM_CHANNELS = 1; // mono audio (stereo would need 2 channels)
int const SAMPLES_PER_SEC = 16000;
int const BITS_PER_SAMPLE = 16;
int const BYTE_PER_SAMPLE = BITS_PER_SAMPLE / 8;
int const BLOCK_ALIGN = NUM_CHANNELS * BYTE_PER_SAMPLE;
int const BYTE_RATE = SAMPLES_PER_SEC * BLOCK_ALIGN;
void CaptureSoundFor(int secs, std::string destfile);
void SaveWavFile(std::string filename, PWAVEHDR pWaveHdr);
void ReadWavFile(std::string filename);
// FLAC
void ConvertWavToFlac(std::string wavfile, std::string flacfile);
#ifdef __USE_FLAC__
static void FlacProgressCallback(const FLAC__StreamEncoder* encoder, FLAC__uint64 bytes_written, FLAC__uint64 samples_written, unsigned frames_written, unsigned total_frames_estimate, void* client_data);
#endif
int main(int argc, char** argv)
{
if (argc < 2)
{
fprintf(stderr, "Missing args. Synthax: cap_wordcloud.exe $period");
return 1;
}
int period = atoi(argv[1]);
int fileID = 0;
for (;;)
{
std::string filename = "sound_capture" + std::to_string(fileID) + ".wav";
CaptureSoundFor(period, filename);
printf("%s\n", filename.c_str()); // print the filename out for the next program
fflush(stdout); // needs forced stdout flush, is part of a pipe
fileID = ++fileID % 10; // no more than 10 sound files
}
return 0;
}
void CaptureSoundFor(int secs, std::string destfile)
{
assert(secs > 0);
int bufferSize = SAMPLES_PER_SEC * BYTE_PER_SAMPLE * NUM_CHANNELS * secs;
WAVEHDR waveHdr;
PBYTE buffer;
HWAVEIN hWaveIn;
/* begin sound capture */
buffer = (PBYTE)malloc(bufferSize);
if (!buffer)
{
printf("Failed to allocate buffers\n");
assert(false);
}
// Open waveform audio for input
WAVEFORMATEX waveform;
waveform.wFormatTag = WAVE_FORMAT_PCM;
waveform.nChannels = NUM_CHANNELS;
waveform.nSamplesPerSec = SAMPLES_PER_SEC;
waveform.nAvgBytesPerSec = BYTE_RATE;
waveform.nBlockAlign = BLOCK_ALIGN;
waveform.wBitsPerSample = BITS_PER_SAMPLE;
waveform.cbSize = 0;
MMRESULT result = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveform, NULL, NULL, CALLBACK_NULL);
if (result)
{
printf("Failed to open waveform input device\n", result);
assert(false);
}
// Set up headers and prepare them
waveHdr.lpData = reinterpret_cast<CHAR*>(buffer);
waveHdr.dwBufferLength = bufferSize;
waveHdr.dwBytesRecorded = 0;
waveHdr.dwUser = 0;
waveHdr.dwFlags = 0;
waveHdr.dwLoops = 1;
waveHdr.lpNext = NULL;
waveHdr.reserved = 0;
waveInPrepareHeader(hWaveIn, &waveHdr, sizeof(WAVEHDR));
// Insert a wave input buffer
result = waveInAddBuffer(hWaveIn, &waveHdr, sizeof(WAVEHDR));
if (result)
{
printf("Failed to read block from device\n");
assert(false);
}
// Commence sampling input
result = waveInStart(hWaveIn);
if (result)
{
printf("Failed to start recording\n");
assert(false);
}
time_t startTime = time(NULL);
// Wait until finished recording
while (waveInUnprepareHeader(hWaveIn, &waveHdr, sizeof(WAVEHDR)) == WAVERR_STILLPLAYING)
;
time_t endTime = time(NULL);
int timeDiff = int(endTime - startTime);
if (secs != timeDiff)
printf("WARNING: requested capture time (%d) is different from real capture time (%d)", secs, timeDiff);
waveInClose(hWaveIn);
#ifndef __USE_FLAC__
SaveWavFile(destfile, &waveHdr);
#else // #ifdef __USE_FLAC__
SaveWavFile("temp.wav", &waveHdr);
ConvertWavToFlac("temp.wav", destfile);
#endif
}
// Read the temporary wav file
void ReadWavFile(char* filename)
{
// random variables used throughout the function
int length, byte_samp, byte_sec;
FILE* file;
// open filepointer readonly
fopen_s(&file, filename, "rb");
if (file == NULL)
{
printf("Wav:: Could not open file: %s", filename);
return;
}
// declare a char buff to store some values in
char buff[5];
buff[4] = '\0';
// read the first 4 bytes
fread((void*)buff, 1, 4, file);
// the first four bytes should be 'RIFF'
if (strcmp((char*)buff, "RIFF") != 0)
{
printf("ReadWavFile:: incorrect file format? First four bytes not 'RIFF'");
return;
}
// read byte 8,9,10 and 11
fseek(file, 4, SEEK_CUR);
fread((void *)buff, 1, 4, file);
// this should read "WAVE"
if (strcmp((char *)buff, "WAVE") != 0)
{
printf("ReadWavFile:: incorrect file format? Could not read 'WAVE'");
return;
}
// read byte 12,13,14,15
fread((void *)buff, 1, 4, file);
// this should read "fmt "
if (strcmp((char *)buff, "fmt ") != 0)
{
printf("ReadWavFile:: incorrect file format? Could not read 'fmt '");
return;
}
fseek(file, 20, SEEK_CUR);
// final one read byte 36,37,38,39
fread((void *)buff, 1, 4, file);
if (strcmp((char *)buff, "data") != 0)
{
printf("ReadWavFile:: incorrect file format? Could not read 'data'");
return;
}
// Now we know it is a wav file, rewind the stream
rewind(file);
// now is it mono or stereo ?
fseek(file, 22, SEEK_CUR);
fread((void *)buff, 1, 2, file);
// bool isMono = (buff[0] & 0x02 == 0);
// read the sample rate
fread((void *)&SAMPLES_PER_SEC, 1, 4, file);
fread((void *)&byte_sec, 1, 4, file);
fread((void *)&byte_samp, 1, 2, file);
fread((void *)&BITS_PER_SAMPLE, 1, 2, file);
fseek(file, 4, SEEK_CUR);
fread((void *)&length, 1, 4, file);
}
void SaveWavFile(std::string filename, PWAVEHDR pWaveHdr)
{
std::fstream file(filename, std::fstream::out | std::fstream::binary);
int pcmsize = sizeof(PCMWAVEFORMAT);
int audioFormat = WAVE_FORMAT_PCM;
int subchunk1size = 16;
int subchunk2size = pWaveHdr->dwBufferLength * NUM_CHANNELS;
int chunksize = (36 + subchunk2size);
// write the wav file per the wav file format
file.seekp(0, std::ios::beg);
file.write("RIFF", 4); // chunk id
file.write((char*)&chunksize, 4); // chunk size (36 + SubChunk2Size))
file.write("WAVE", 4); // format
file.write("fmt ", 4); // subchunk1ID
file.write((char*)&subchunk1size, 4); // subchunk1size (16 for PCM)
file.write((char*)&audioFormat, 2); // AudioFormat (1 for PCM)
file.write((char*)&NUM_CHANNELS, 2); // NumChannels
file.write((char*)&SAMPLES_PER_SEC, 4); // sample rate
file.write((char*)&BYTE_RATE, 4); // byte rate (SampleRate * NumChannels * BitsPerSample/8)
file.write((char*)&BLOCK_ALIGN, 2); // block align (NumChannels * BitsPerSample/8)
file.write((char*)&BITS_PER_SAMPLE, 2); // bits per sample
file.write("data", 4); // subchunk2ID
file.write((char*)&subchunk2size, 4); // subchunk2size (NumSamples * NumChannels * BitsPerSample/8)
file.write(pWaveHdr->lpData, pWaveHdr->dwBufferLength); // data
file.close();
}
#ifdef __USE_FLAC__
static unsigned totalSamples = 0; /* can use a 32-bit number due to WAVE size limitations */
void ConvertWavToFlac(std::string wavfile, std::string flacfile)
{
FLAC__StreamEncoder* encoder = 0;
FLAC__StreamEncoderInitStatus initStatus;
FILE* file;
fopen_s(&file, wavfile.c_str(), "rb");
if (file == NULL)
{
printf("ConvertWavToFlac:: couldn't open input file\n", wavfile);
return;
}
int const READ_BUFFER_SIZE = 4096;
FLAC__byte buffer[READ_BUFFER_SIZE * BYTE_PER_SAMPLE * NUM_CHANNELS]; /* we read the WAVE data into here */
FLAC__int32 pcm[READ_BUFFER_SIZE * NUM_CHANNELS];
// some consistency checks on the string fields of the file
if (fread(buffer, 1, 44, file) != 44 ||
memcmp(buffer, "RIFF", 4) ||
memcmp(buffer + 8, "WAVE", 4) ||
memcmp(buffer + 12, "fmt ", 4) ||
memcmp(buffer + 36, "data", 4))
{
printf("ConvertWavToFlac:: file %s doesn't look like a wav file", wavfile);
fclose(file);
return;
}
// @todo: find better way to read this value, this looks ugly
totalSamples = (((((((unsigned)buffer[43] << 8) | buffer[42]) << 8) | buffer[41]) << 8) | buffer[40]) / BYTE_PER_SAMPLE;
/* allocate the encoder */
encoder = FLAC__stream_encoder_new();
if (encoder == NULL)
{
printf("ConvertWavToFlac:: couldn't allocate encoder\n");
fclose(file);
return;
}
FLAC__bool ok = true;
ok &= FLAC__stream_encoder_set_verify(encoder, true);
ok &= FLAC__stream_encoder_set_compression_level(encoder, 5);
ok &= FLAC__stream_encoder_set_channels(encoder, NUM_CHANNELS);
ok &= FLAC__stream_encoder_set_bits_per_sample(encoder, BITS_PER_SAMPLE);
ok &= FLAC__stream_encoder_set_sample_rate(encoder, SAMPLES_PER_SEC);
ok &= FLAC__stream_encoder_set_total_samples_estimate(encoder, totalSamples);
/* initialize encoder */
if (ok)
{
initStatus = FLAC__stream_encoder_init_file(encoder, flacfile.c_str(), FlacProgressCallback, nullptr);
if (initStatus != FLAC__STREAM_ENCODER_INIT_STATUS_OK)
{
printf("ConvertWavToFlac:: error while initializing encoder: %s\n", FLAC__StreamEncoderInitStatusString[initStatus]);
ok = false;
}
}
/* read blocks of samples from WAVE file and feed to encoder */
if (ok)
{
size_t left = (size_t)totalSamples;
while (ok && left)
{
size_t need = (left > READ_BUFFER_SIZE ? (size_t)READ_BUFFER_SIZE : (size_t)left);
if (fread(buffer, NUM_CHANNELS * (BITS_PER_SAMPLE / 8), need, file) != need)
{
printf("ConvertWavToFlac:: error reading from WAVE file\n");
ok = false;
}
else
{
/* convert the packed little-endian 16-bit PCM samples from WAVE into an interleaved FLAC__int32 buffer for libFLAC */
size_t i;
for (i = 0; i < need * NUM_CHANNELS; i++)
/* inefficient but simple and works on big- or little-endian machines */
pcm[i] = (FLAC__int32)(((FLAC__int16)(FLAC__int8)buffer[2 * i + 1] << 8) | (FLAC__int16)buffer[2 * i]);
/* feed samples to encoder */
ok = FLAC__stream_encoder_process_interleaved(encoder, pcm, need);
}
left -= need;
}
}
ok &= FLAC__stream_encoder_finish(encoder);
printf("ConvertWavToFlac:: %s\n", ok ? "Success" : "FAILED");
if (!ok)
printf("* State: %s\n", FLAC__StreamEncoderStateString[FLAC__stream_encoder_get_state(encoder)]);
FLAC__stream_encoder_delete(encoder);
fclose(file);
}
void FlacProgressCallback(FLAC__StreamEncoder const* /*encoder*/, FLAC__uint64 bytes_written,
FLAC__uint64 samples_written, unsigned frames_written, unsigned total_frames_estimate, void* /*client_data*/)
{
printf("FlacProgressCallback:: Wrote %llu bytes, %llu/%u samples, %u/%u frames\n",
bytes_written, samples_written, totalSamples, frames_written, total_frames_estimate);
}
#endif // __USE_FLAC__
|
<filename>src/automated-scripts/exportDBsToCloud.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Grab the list of databases to be exported in the MSG configuration file and
export them to cloud storage.
Files beyond a maximum limit are split according to the number of chunks set
in the config file.
Usage:
python exportDBsToCloud.py
"""
__author__ = '<NAME> (張道博)'
__copyright__ = 'Copyright (c) 2014, University of Hawaii Smart Energy Project'
__license__ = 'https://raw.github' \
'.com/Hawaii-Smart-Energy-Project/Maui-Smart-Grid/master/BSD' \
'-LICENSE.txt'
from sek.logger import SEKLogger
from msg_notifier import MSGNotifier
from msg_db_exporter import MSGDBExporter
import argparse
import time
COMMAND_LINE_ARGS = None
def processCommandLineArguments():
"""
Generate command-line arguments. Load them into global variable
COMMAND_LINE_ARGS.
"""
global COMMAND_LINE_ARGS
parser = argparse.ArgumentParser(description = '')
parser.add_argument('--dbname', help = 'Database file to be uploaded.')
parser.add_argument('--fullpath',
help = 'Full path to database file to be uploaded.')
parser.add_argument('--testing', action = 'store_true', default = False)
COMMAND_LINE_ARGS = parser.parse_args()
if __name__ == '__main__':
logger = SEKLogger(__name__, 'INFO')
logger.log("Exporting DBs to cloud.")
processCommandLineArguments()
exporter = MSGDBExporter()
notifier = MSGNotifier()
exporter.logger.shouldRecord = True
startTime = time.time()
dbs = exporter.configer.configOptionValue('Export', 'dbs_to_export').split(
',')
fileIDs = exporter.exportDBs(databases = dbs, toCloud = True,
testing = COMMAND_LINE_ARGS.testing,
deleteOutdated = True)
wallTime = time.time() - startTime
wallTimeMin = int(wallTime / 60.0)
wallTimeSec = (wallTime - wallTimeMin * 60.0)
if len(fileIDs) == len(dbs):
exporter.logger.log('No errors occurred during export.', 'info')
else:
exporter.logger.log('ERRORS occurred during export.', 'warning')
exporter.logger.log('Free space remaining: %d' % exporter.freeSpace(),
'info')
exporter.logger.log(
'Wall time: {:d} min {:.2f} s.'.format(wallTimeMin, wallTimeSec - (
wallTimeMin * 60)), 'info')
# Send the available file list by POST.
exporter.sendDownloadableFiles()
# Testing recording log output.
myPath = '{}/{}'.format(exporter.exportTempWorkPath, 'export-report.txt')
fp = open(myPath, 'wb')
fp.write(exporter.logger.recording)
fp.close()
|
import styled from 'styled-components'
import React from "react"
import LargeBtn from '../components/largebtn'
const SmallBtn = styled(LargeBtn)`
height: 30px;
max-width: 100px;
background-color: ${props => props.color ? props.color : 'lightgreen'};
margin-left: auto;
font-size: 16px;
padding-top: 4px;
color: black;
margin-bottom: 10px;
`
export default SmallBtn
|
#perl adjust_checksum.pl mcmthesis.dtx
xetex mcmthesis.dtx
xelatex mcmthesis.dtx
xelatex mcmthesis.dtx
#xelatex -shell-escape mcmthesis.dtx
#xelatex -shell-escape mcmthesis.dtx
xelatex mcmthesis-demo.tex
xelatex mcmthesis-demo.tex
mv LICENSE.tex LICENSE
mv README.tex README
rm *.log *.out *.aux *.glo *.idx
rm -rf _minted-mcmthesis
#open .
|
#!/bin/bash
find_source_files() {
local TEST_SRCDIR=$1
local TEST_WORKSPACE=$2
local SRCDIR
if [ -n "${TEST_SRCDIR}" ]; then
SRCDIR="${TEST_SRCDIR}/${TEST_WORKSPACE}"
else
SRCDIR="$(dirname $0)/.."
fi
echo "${SRCDIR}"
}
# Example usage
# TEST_SRCDIR="/path/to/source"
# TEST_WORKSPACE="workspace"
# find_source_files "${TEST_SRCDIR}" "${TEST_WORKSPACE}"
|
<reponame>Blockception/BC-Minecraft-Molang
/** */
export interface Defined<T> {
/** */
defined: T[];
}
/** */
export namespace Defined {
/**
*
* @param items
* @returns
*/
export function create<T>(items: T[] | undefined = undefined): Defined<T> {
if (!items) {
items = [];
}
return { defined: [] };
}
/**
*
* @returns
*/
export function empty<T>(): Defined<T> {
return { defined: [] };
}
/** Check wheter or not is the given value atleast implements a Defined interface
* @param value The object to examine
* @returns Returns true or false wheter or not the object implements Defined*/
export function is<T>(value: any): value is Defined<T> {
if (typeof value === "object") {
return Array.isArray(value.defined);
}
return false;
}
}
|
from ._ESS import essLocalDimEst as ess_py
from ._mada import mada as mada_py
from ._corint import corint as corint_py
# Assuming the dataset is defined as 'data'
# Step 1: Estimate local dimension using ess_py function
local_dimension = ess_py(data)
# Step 2: Perform statistical analysis using mada_py function
statistical_analysis_result = mada_py(data)
# Step 3: Interpret correlation using corint_py function
correlation_interpretation = corint_py(data)
# Display the results
print("Local Dimension:", local_dimension)
print("Statistical Analysis Result:", statistical_analysis_result)
print("Correlation Interpretation:", correlation_interpretation)
|
<gh_stars>1-10
import React from "react";
import style from "styled-components";
import { Link } from "react-router-dom";
import { connect } from "react-redux";
import { withRouter } from "react-router-dom";
import { logout } from "../store/actions";
const StyleHeader = style.header`
width: 100%;
display: flex;
padding: 10px 30px;
border-bottom: 1px solid #d8d8d8;
`;
const HeaderLeft = style.div`
display: flex;
flex-direction: columns;
align-items: center;
width: 50%;
`;
const HeaderRight = style.div`
margin-left: auto;
display: flex;
align-items: center;
`;
const Logo = style.img`
margin-right: 30px;
`;
const HeaderText = style.h2`
font-size: 18px;
font-weight: 900;
color: #363131;
margin-right: 30px;
`;
const LogoutText = style.h2`
font-size: 18px;
color: #363131;
margin-right: 30px;
cursor: pointer;
opacity: .7;
`;
const LinkButton = style(Link)`
background-color: #2281bf;
color: #fff;
padding: 10px;
border: none;
border-radius: 4px;
font-size: 14px;
text-decoration: none;
font-weight: bold;
cursor: pointer;
&:disabled {
opacity: .5;
}
&:hover {
background-color: #249EEE;
}
}
`;
const Header = props => {
const logoutButton = (e, property) => {
e.stopPropagation();
props.logout(props.history);
};
return (
<>
<StyleHeader>
<HeaderLeft>
<Logo src="images/Logo.svg" />
<HeaderText>Airbnb Pricer</HeaderText>
<LinkButton to={`/add`}>Add Property +</LinkButton>
</HeaderLeft>
<HeaderRight>
<LogoutText onClick={e => logoutButton(e)}>Logout</LogoutText>
</HeaderRight>
</StyleHeader>
</>
);
}
const mapStateToProps = state => ({
});
export default connect(mapStateToProps, { logout })(withRouter(Header));
|
#include <vector>
#include <cmath>
std::vector<double> HPCP(const std::vector<double>& frequencies, const std::vector<double>& magnitudes) {
std::vector<double> hpcp(12, 0.0); // Initialize HPCP vector with zeros
// Iterate over the input frequencies and magnitudes
for (size_t i = 0; i < frequencies.size(); ++i) {
double frequency = frequencies[i];
double magnitude = magnitudes[i];
// Calculate the pitch class index based on the frequency
int pitch_class_index = static_cast<int>(12.0 * std::log2(frequency / 440.0) + 0.5) % 12;
// Update the HPCP vector with the magnitude at the corresponding pitch class index
hpcp[pitch_class_index] += magnitude;
}
return hpcp;
}
int main() {
// Test the HPCP function
std::vector<double> frequencies(10); // Input frequencies
std::vector<double> magnitudes(10); // Input magnitudes
std::vector<double> actual_hpcp = HPCP(frequencies, magnitudes); // Calculate HPCP
std::vector<double> expected_hpcp(12); // Expected HPCP vector
// Compare actual_hpcp with expected_hpcp using a function like EXPECT_VEC_EQ
return 0;
}
|
<reponame>zhangliangInfo/husky
const fs = require('fs');
const path = require('path');
const { resolve } = require('path');
const execa = require('execa');
const cwd = process.cwd();
// 需要包含测试用例的文件
const includesDir = ['src/pages'];
interface CHECKRST {
filename: string;
filedir: string;
path: string;
}
let commitList: CHECKRST[] = [];
const testList: CHECKRST[] = [];
const errorList: CHECKRST[] = [];
/**
* 遍历工程目录
* @param dir
* @param callback
*/
const travel = (dir: any, callback: (pathname: string) => void) => {
fs.readdirSync(dir).forEach((file: number) => {
const pathname: string = path.join(dir, file.toString());
if(fs.statSync(pathname).isDirectory()) {
// console.log(pathname);
travel(pathname, callback)
} else {
callback(pathname);
}
})
}
/**
* 通过pathname获取目录及文件名
* @param pathname
* @param suffix
* @param splitStr
* @returns
*/
const getPathnameAndDir = (pathname: string, suffix: string, splitStr: string | null) => {
const splitPaths: string[] = pathname.split('/');
const length: number = splitPaths?.length;
const filename = splitPaths?.[length - 1];
const filedir = splitPaths?.[length - 2];
return {
filename: filename.indexOf(suffix) > -1 ? filename : null,
filedir,
path: splitStr ? pathname.split(splitStr)[1] : pathname
}
}
/**
* 异常报错,阻止提交代码
* @param errorList
*/
const throwErrorBeforeCommit = (errorList: CHECKRST[]) => {
// console.log('errorList')
// console.log(errorList)
if(errorList?.length) {
const unCreateTestComponent = errorList?.map((error: CHECKRST) => error.filedir);
console.log(`您提交的以下 ${unCreateTestComponent} 页面未检测到您提交的代码中包含测试用例o(╥﹏╥)o, 请完善后提交.`);
process.exit(1);
} else {
console.log('很开心您本次提交的代码中都包含了测试用例^_^');
process.exit(0);
}
}
/**
* 获取本地git缓存中将要提交的数据
*/
const gitStage = () => {
const { stdout } = execa.commandSync('git diff --cached --name-only', { cwd });
// console.log('git commit list')
// console.log(stdout)
commitList = stdout.split('\n').map((pathname: string) => {
return getPathnameAndDir(pathname, '', null) as CHECKRST;
});
}
/**
* 只校验需要测试用例的文件内容
* @returns
*/
const checkOnlyNeedTest = () => {
// 检测本次提交是否有需要验证的文件
const needTestFiles = commitList.filter((commit: CHECKRST) => includesDir.includes(commit.path.split('/' + commit.filedir)[0]));
// console.log('needTestFiles')
// console.log(needTestFiles)
if(!needTestFiles.length) {
return;
}
// if(testList)
needTestFiles.forEach((commit: CHECKRST) => {
if(testList.findIndex((test: CHECKRST) => test.filedir == commit.filedir && test.filename.split('.test')[0] == commit.filename.split(/\.(js|jsx|ts|tsx)/)[0]) == -1) {
errorList.push({
filename: commit.filename,
filedir: commit.filedir,
path: commit.path
})
}
});
throwErrorBeforeCommit(errorList);
}
const callTheCheckHandler = async () => {
await travel(resolve(cwd, './test'), (pathname) => {
testList.push(getPathnameAndDir(pathname, 'test.', cwd) as CHECKRST);
});
// 获取提交的文件列表
await gitStage();
// 检查提交的代码是否包含测试用例
checkOnlyNeedTest();
}
callTheCheckHandler();
|
#!/bin/bash
echo "START PREPROCESS --->"
python run_preprocess.py --config_name config.yaml
echo "<--- END PREPROCESS"
echo "START TRAIN --->"
for i in `seq 0 4`
do
echo "START - FOLD: $i"
python run_train.py --config_name config.yaml --fold $i
ret=$?
if [ $ret -ne 0 ]; then
echo "RAISED EXCEPTION"
exit 1
fi
echo "END - FOLD: $i"
done
echo "<-- END TRAIN"
echo "START VALID --->"
python run_valid.py --ckpt loss --cam
python run_valid.py --ckpt score --cam
echo "<--- END VALID"
echo "START TEST PREPROCESS --->"
python run_test_preprocess.py --config_name config.yaml
echo "<--- END TEST PREPROCESS"
echo "START TEST"
python run_test.py --ckpt loss
python run_test.py --ckpt score
echo "<--- END TEST"
exp_dir=`cat config.yaml | grep -E -o "exp_[0-9]+"`
git add -A .
git commit -m "feat: $exp_dir"
git push
|
/**************************************************************
* DROP FUNCTIONS
**************************************************************/
DROP FUNCTION jsonb_diff_val(JSONB, JSONB);
DROP FUNCTION revert_row_event(INTEGER, INTEGER);
DROP FUNCTION revert_transaction(INTEGER);
DROP FUNCTION revert_transaction_group(INTEGER[]);
DROP FUNCTION revert_block(VARCHAR);
|
var pendingStartCompletion: ErrorHandler?
var pendingStopCompletion: CompletionHandler?
var tunnel: Tunnel?
func manageTunnelOperations(startCompletion: ErrorHandler?, stopCompletion: CompletionHandler?) {
if let startCompletion = startCompletion {
startCompletion(tunnel?.lastError)
pendingStartCompletion = nil
} else if let stopCompletion = stopCompletion {
stopCompletion()
pendingStopCompletion = nil
} else {
cancelProxyWithError(tunnel?.lastError)
}
tunnel = nil
}
|
# Base constants
SCRIPT_DIR=${0:a:h}
MAGE_ROOT_FILE=${SCRIPT_DIR}/mage_root.txt
MAGE_AUTOCOMPLETE_FILE=${SCRIPT_DIR}/mage_autocomplete.txt
function m2:help() {
HELP_MSG="
Description:
Magento 2 zsh autocomplete plugin
Author:
Dominic Dambrogia <domdambrogia+mage-2-plugin@gmail.com>
Functions:
m2 Runs <mage_root>/bin/magento
m2:help View this help message
m2:set_root Set root project directory for Magento
m2:get_root View current root project directory for Magento
m2:set_autocomplete Set/overwrite the autocomplete options
m2:get_autocomplete View current available/cached autocomplete options
For more information see:
${SCRIPT_DIR}/mage-2.plugin.zsh
";
echo ${HELP_MSG}
}
# Main function that calls magento binary
function m2() {
php $(m2:get_root)/bin/magento "$@"
}
# Set the root of your Magento directory.
# This will need to run before any other commands can execute
function m2:set_root() {
[ "$1" != "" ] && echo "$1" > ${MAGE_ROOT_FILE}
[ "$1" = "" ] && echo "Usage: m2:set_root <m2_root>\n"
}
# Retreive the m2 root set by m2:set_root
function m2:get_root() {
[ ! -f ${MAGE_ROOT_FILE} ] && "Missing m2 root. Use m2:set_root"
[ -f ${MAGE_ROOT_FILE} ] && cat ${MAGE_ROOT_FILE};
}
# Sets / overwrites autocomplete options.
# The autocomplete options are written to a file for quick retrieval.
# If you update Magento or add a new CLI option. You will need to run this
# to update your autocomplete options
function m2:set_autocomplete() {
autocomplete=$(m2 --raw --no-ansi list | sed "s/[[:space:]].*//g")
echo $autocomplete > ${MAGE_AUTOCOMPLETE_FILE}
}
# Gets the autocomplete options.
# If none are set/cached, we will do that action and then echo the results
function m2:get_autocomplete() {
[ ! -f ${MAGE_AUTOCOMPLETE_FILE} ] && m2:set_autocomplete
cat ${MAGE_AUTOCOMPLETE_FILE};
}
# Run on the zsh compdef function.
# This adds our autocomplete options
# This should not be called outsite this file
_m2:add_autocomplete () {
compadd $(m2:get_autocomplete)
}
# Zsh default command
compdef _m2:add_autocomplete m2
# Aliases
alias m2:home="cd $(m2:get_root)"
|
<reponame>fjruizruano/TEmin
#!/usr/bin/python
import sys, os
from subprocess import call, Popen
from os import listdir
from os.path import isfile, join
print "Usage: deconseq_run.py ListOfFiles Reference Threads"
try:
files = sys.argv[1]
except:
files = raw_input("Introduce list of files: ")
try:
ref = sys.argv[2]
except:
ref = raw_input("Introduce FASTA reference: ")
try:
thr = sys.argv[3]
except:
thr = raw_input("Introduce number of threads")
refp = ref.split(".")
refpoints = refp[0:-1]
refname = ".".join(refpoints)
files = open(files).readlines()
dsdir = "deconseq-standalone-0.4.3"
elements = os.listdir(".")
if dsdir not in elements:
call("cp -r /usr/local/lib/%s ." % dsdir, shell=True)
call("mkdir %s/db" % dsdir, shell=True)
os.chdir(dsdir+"/db")
call("ln -sf ../../%s" % ref, shell=True)
call("../bwa64 index -p %s -a is %s" % (refname,ref), shell=True)
os.chdir("../")
conf = open("DeconSeqConfig.pm").readlines()
conf_out = open("tmp_conf.txt", "w")
line1 = " "*21+"%s => {name => \047%s\047,\n" % (refname,refname)
line2 = " "*30+"db => \047%s\047},\n" % (refname)
conf.insert(20, line1+line2)
conf_out.write("".join(conf))
conf_out.close()
call("mv tmp_conf.txt DeconSeqConfig.pm", shell=True)
os.chdir("../")
for n in range(0,len(files)/2):
file1 = files[n*2][:-1]
file2 = files[(n*2)+1][:-1]
ext1 = file1.split(".")
if ext1[-1] == "gz":
file1_n = ext1[0:-1]
file1_n = ".".join(file1_n)
print "Uncompressing file %s" % file1
call("seqtk seq %s > %s" % (file1, file1_n), shell=True)
file1 = file1_n
ext2 = file2.split(".")
if ext2[-1] == "gz":
file2_n = ext2[0:-1]
file2_n = ".".join(file2_n)
print "Uncompressing file %s" % file2
call("seqtk seq %s > %s" % (file2, file2_n), shell=True)
file2 = file2_n
filename = file1.split(".")
filename = filename[0]
call("mkdir %s/%s" % (dsdir,filename), shell=True)
os.chdir("%s/%s" % (dsdir,filename))
call("ln -sf ../../%s ." % file1 , shell=True)
call("ln -sf ../../%s ." % file2 , shell=True)
call("FastQ.split.pl %s tmp_queries_1 %s" % (file1, thr), shell=True)
call("FastQ.split.pl %s tmp_queries_2 %s" % (file2, thr), shell=True)
onlyfiles = [f for f in listdir(".") if isfile(join(".",f))]
splits = []
for f in onlyfiles:
if f.startswith("tmp_queries_1") or f.startswith("tmp_queries_2") and f.endswith(".fastq"):
splits.append(f)
splits.sort()
commands = []
for round in range(0,2):
commands.append([])
for n in range(0,len(splits)):
fq = splits[n]
com = "perl deconseq.pl -f ./%s -out_dir %s.dir -dbs %s" % (fq, fq, refname)
rr = n/int(thr)
commands[rr].append(com)
print "Running DeconSeq"
call("ln -s ../db", shell=True)
call("ln -s ../deconseq.pl", shell=True)
call("ln -s ../bwa64", shell=True)
call("ln -s ../DeconSeqConfig.pm", shell=True)
for command in commands:
processes = [Popen(cmd, shell=True) for cmd in command]
for p in processes:
p.wait()
for n in range(1,3):
concat = ["cat"]
for fq in splits:
if fq.startswith("tmp_queries_%s" % (str(n))):
concat.append("%s.dir/*clean*" % (fq))
call("%s > %s_clean_%s.fastq" % (" ".join(concat), filename[:-2], str(n)), shell=True)
call("rm db deconseq.pl bwa64 DeconSeqConfig.pm", shell=True)
call("rm tmp_queries*.fastq", shell=True)
call("rm -r tmp_queries*.fastq.dir", shell=True)
call("rm %s" % (file1), shell=True)
call("rm %s" % (file2), shell=True)
os.chdir("../../")
call("mv %s/%s ." % (dsdir,filename), shell=True)
if ext1[-1] == "gz":
call("rm %s" % (file1), shell=True)
if ext2[-1] == "gz":
call("rm %s" % (file2), shell=True)
call("rm -r %s" % dsdir, shell=True)
|
<gh_stars>1-10
package main
import "github.com/hyrut/go-tkgtools"
import "fmt"
import "time"
func _visitBytes(b []byte){
for _, v := range b{
fmt.Printf("0x%02x,",v)
}
fmt.Println()
}
func main(){
tkg := tkgtools.NewTKGTOOLS()
key := [16]byte{<KEY>}
op := [16]byte{0xcd,0xc2,0x02,0xd5,0x12,0x3e,0x20,0xf6,0x2b,0x6d,0x67,0x6a,0xc7,0x2c,0xb3,0x18}
opc := [16]byte{0xcd,0x63,0xcb,0x71,0x95,0x4a,0x9f,0x4e,0x48,0xa5,0x99,0x4e,0x37,0xa0,0x2b,0xaf}
sqn := [6]byte{0xff,0x9b,0xb4,0xd0,0xb6,0x07}
rand := [16]byte{0x23,0x55,0x3c,0xbe,0x96,0x37,0xa8,0x9d,0x21,0x8a,0xe6,0x4d,0xae,0x47,0xbf,0x35}
amf := [2]byte{0xb9,0xb9}
/*
key := [16]byte{0xfe,0xc8,0x6b,0xa6,0xeb,0x70,0x7e,0xd0,0x89,0x05,0x75,0x7b,0x1b,0xb4,0x4b,0x8f}
op := [16]byte{0xdb,0xc5,0x9a,0xdc,0xb6,0xf9,0xa0,0xef,0x73,0x54,0x77,0xb7,0xfa,0xdf,0x83,0x74}
opc := [16]byte{0x10,0x06,0x02,0x0f,0x0a,0x47,0x8b,0xf6,0xb6,0x99,0xf1,0x5c,0x06,0x2e,0x42,0xb3}
sqn := [6]byte{0x9d,0x02,0x77,0x59,0x5f,0xfc}
rand := [16]byte{0x9f,0x7c,0x8d,0x02,0x1a,0xcc,0xf4,0xdb,0x21,0x3c,0xcf,0xf0,0xc7,0xf7,0x1a,0x6a}
amf := [2]byte{0x72,0x5c}
*/
var mac_a [8]byte
var res [8]byte
var ck [16]byte
var ik [16]byte
var ak [6]byte
testCount := 10000
start := time.Now()
for i:=0; i<testCount; i++{
tkg.F1(&key, &rand, &sqn, &amf, &mac_a, nil, &opc)
}
dur := time.Since(start)
avgExecTime1 := float64(dur)/float64(testCount)
fmt.Println("Average for F1 execuation with default R and C value:", avgExecTime1, "ns.")
start = time.Now()
for i:=0; i<testCount; i++{
tkg.F2345(&key, &rand, &res, &ck, &ik, &ak, &op, nil)
}
dur = time.Since(start)
avgExecTime2345 := float64(dur)/float64(testCount)
fmt.Println("Average for F2345 execuation with default R and C value:", avgExecTime2345, "ns.")
}
/*
Test set 2 in 3GPP TS35.208
Variable Value
K 465b5ce8 b199b49f aa5f0a2e e238a6bc
RAND 23553cbe 9637a89d 218ae64d ae47bf35
SQN ff9bb4d0 b607
AMF b9b9
OP cdc202d5 123e20f6 2b6d676a c72cb318
OPC cd63cb71 954a9f4e 48a5994e 37a02baf
f1 4a9ffac3 54dfafb3
f1* 01cfaf9e c4e871e9
f2 a54211d5 e3ba50bf
f5 aa689c64 8370
f3 b40ba9a3 c58b2a05 bbf0d987 b21bf8cb
f4 f769bcd7 51044604 12767271 1c6d3441
f5* 451e8bec a43b
*/
/*
Test set 3 in 3GPP TS35.208
Variable Value
K fec86ba6 eb707ed0 8905757b 1bb44b8f
RAND 9f7c8d02 1accf4db 213ccff0 c7f71a6a
SQN 9d027759 5ffc
AMF 725c
OP dbc59adc b6f9a0ef 735477b7 fadf8374
OPC 1006020f 0a478bf6 b699f15c 062e42b3
f1 9cabc3e9 9baf7281
f1* 95814ba2 b3044324
f2 8011c48c 0c214ed2
f5 33484dc2 136b
f3 5dbdbb29 54e8f3cd e665b046 179a5098
f4 59a92d3b 476a0443 487055cf 88b2307b
f5* deacdd84 8cc6
*/
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.chain2.web.servlet;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import org.apache.commons.chain2.web.MockEnumeration;
/**
* Mock {@link ServletConfig} implementation.
*/
public class MockServletConfig implements ServletConfig {
private final String servletName;
private final ServletContext servletContext;
private final Map<String, Object> parameters = new HashMap<String, Object>();
/**
* Default Constructor.
*/
public MockServletConfig() {
this("unspecified", new MockServletContext());
}
/**
* Construct an instance with the specified name.
*
* @param servletName the servlet name
*/
public MockServletConfig(String servletName) {
this(servletName, new MockServletContext());
}
/**
* Construct an instance with the specified name and context.
*
* @param servletName the servlet name
* @param servletContext the servlet context
*/
public MockServletConfig(String servletName, ServletContext servletContext) {
this.servletName = servletName;
this.servletContext = servletContext;
}
/**
* Get a specified init parameter.
*
* @param name parameter name
* @return the parameter value
*/
public String getInitParameter(String name) {
return (String)parameters.get(name);
}
/**
* Get the init parameter names.
*
* @return the set of parameter names
*/
public Enumeration<String> getInitParameterNames() {
return (new MockEnumeration<String>(parameters.keySet().iterator()));
}
/**
* Get the servlet context.
*
* @return the servlet context
*/
public ServletContext getServletContext() {
return servletContext;
}
/**
* Return the servlet name.
*
* @return The servlet name
*/
public String getServletName() {
return servletName;
}
/**
* Set a specified init parameter.
*
* @param name parameter name
* @param value the parameter value
*/
public void setInitParameter(String name, String value) {
parameters.put(name, value);
}
}
|
declare -A cpus
cpus[nhm-ex]=GenuineIntel-6-2E
cpus[nhm-ep]=GenuineIntel-6-1E
cpus[nhm-ep]=GenuineIntel-6-1A
cpus[wsm-ex]=GenuineIntel-6-2F
cpus[wsm-sp]=GenuineIntel-6-25
cpus[wsm-dp]=GenuineIntel-6-2C
cpus[snb]=GenuineIntel-6-2A
cpus[jkt]=GenuineIntel-6-2D
cpus[ivt]=GenuineIntel-6-3E
cpus[ivb]=GenuineIntel-6-3A
cpus[hsw]=GenuineIntel-6-45
cpus[slm]=GenuineIntel-6-37
cpus[bnl]=GenuineIntel-6-35
cpus[bdw]=GenuineIntel-6-3D
cpus[hsx]=GenuineIntel-6-3F
cpus[skl]=GenuineIntel-6-5E
cpus[bdw-de]=GenuineIntel-6-56
cpus[bdx]=GenuineIntel-6-4F
cpus[knl]=GenuineIntel-6-57
cpus[skx]=GenuineIntel-6-55-4
cpus[clx]=GenuineIntel-6-55-5
cpus[icl]=GenuineIntel-6-7E
|
<filename>src/server/actions.js
var _ = require('lodash');
function storeSocket(socket) {
return {
type: 'STORE_SOCKET',
payload: {
socket: socket,
id: _.uniqueId('socket_'),
},
};
}
function removeSocket(socketId) {
return {
type: 'REMOVE_SOCKET',
payload: {
socketId,
},
};
}
function createSession(sessionId, owner){
return {
type: 'CREATE_SESSION',
payload: {
sessionId: sessionId || _.uniqueId('SESSION_'),
owner,
},
};
}
function joinSession(sessionId, socketId) {
return {
type: 'JOIN_SESSION',
payload: {
sessionId: sessionId,
socketId: socketId,
},
};
}
function closeSession(sessionId) {
return {
type: 'CLOSE_SESSION',
payload: {
sessionId: sessionId,
},
};
}
function addClock(sessionId, clockName) {
return {
type: 'ADD_CLOCK',
payload: {
sessionId: sessionId,
clockId: _.uniqueId('clock_'),
clockName,
},
};
}
function deleteClock(clockId) {
return {
type: 'DELETE_CLOCK',
payload: { clockId },
};
}
function countDown(data){
return {
type: 'COUNTDOWN',
payload: {
id: data.clockId,
initialServerDate: new Date(),
countdown: data.seconds,
},
};
}
module.exports = {
storeSocket,
removeSocket,
createSession,
joinSession,
closeSession,
addClock,
deleteClock,
countDown,
};
|
package com.huatuo.activity.personal;
import android.content.Context;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import com.huatuo.R;
import com.huatuo.base.BaseActivity;
import com.huatuo.dictionary.MsgId;
import com.huatuo.net.thread.Feedback;
import com.huatuo.util.DialogUtils;
import com.huatuo.util.Toast_Util;
public class ShiYongFanKuiActivity extends BaseActivity {
private Context mContext;
private Feedback feedback;
private Handler feedback_handler;
private EditText et_content;
private TextView tv_zishu, tv_commit;
private int size;
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
mContext = ShiYongFanKuiActivity.this;
setContentView(R.layout.activity_shiyongfankui);
bindListener();
et_content = (EditText) findViewById(R.id.et_content);
tv_zishu = (TextView) findViewById(R.id.tv_zishu);
tv_commit = (TextView) findViewById(R.id.tv_commit);
feedback_handler = new feedback_Handler();
et_content.addTextChangedListener(new TextWatcher() {
private CharSequence temp = "";
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
// TODO Auto-generated method stub
temp = s;
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
// TODO Auto-generated method stub
}
@Override
public void afterTextChanged(Editable s) {
// TODO Auto-generated method stub
size = temp.length();
if (size >= 0) {
tv_zishu.setText(size + "/500");
}
}
});
tv_commit.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0) {
// TODO Auto-generated method stub
feedback(et_content.getText().toString());
}
});
}
private void feedback(String context) {
showCustomCircleProgressDialog(null, getString(R.string.common_toast_net_prompt_submit));
feedback = new Feedback(mContext, feedback_handler, context);
Thread thread = new Thread(feedback);
thread.start();
}
class feedback_Handler extends Handler {
String OutMsg;
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MsgId.DOWN_DATA_S:
closeCustomCircleProgressDialog();
Toast_Util.showToast(mContext, "提交成功");
finish();
break;
case MsgId.DOWN_DATA_F:
closeCustomCircleProgressDialog();
OutMsg = feedback.getOutMsg();
DialogUtils.showToastMsg(mContext, OutMsg, Toast.LENGTH_SHORT);
break;
case MsgId.NET_NOT_CONNECT:
setCustomDialog(getString(R.string.common_toast_net_not_connect), true);
break;
default:
break;
}
}
}
}
|
package fastly
import (
"fmt"
"net/url"
"sort"
"time"
)
// DigitalOcean represents a DigitalOcean response from the Fastly API.
type DigitalOcean struct {
ServiceID string `mapstructure:"service_id"`
ServiceVersion int `mapstructure:"version"`
Name string `mapstructure:"name"`
BucketName string `mapstructure:"bucket_name"`
Domain string `mapstructure:"domain"`
AccessKey string `mapstructure:"access_key"`
SecretKey string `mapstructure:"secret_key"`
Path string `mapstructure:"path"`
Period uint `mapstructure:"period"`
GzipLevel uint `mapstructure:"gzip_level"`
Format string `mapstructure:"format"`
FormatVersion uint `mapstructure:"format_version"`
ResponseCondition string `mapstructure:"response_condition"`
MessageType string `mapstructure:"message_type"`
TimestampFormat string `mapstructure:"timestamp_format"`
Placement string `mapstructure:"placement"`
PublicKey string `mapstructure:"public_key"`
CreatedAt *time.Time `mapstructure:"created_at"`
UpdatedAt *time.Time `mapstructure:"updated_at"`
DeletedAt *time.Time `mapstructure:"deleted_at"`
}
// digitaloceansByName is a sortable list of DigitalOceans.
type digitaloceansByName []*DigitalOcean
// Len, Swap, and Less implement the sortable interface.
func (d digitaloceansByName) Len() int { return len(d) }
func (d digitaloceansByName) Swap(i, j int) { d[i], d[j] = d[j], d[i] }
func (d digitaloceansByName) Less(i, j int) bool {
return d[i].Name < d[j].Name
}
// ListDigitalOceansInput is used as input to the ListDigitalOceans function.
type ListDigitalOceansInput struct {
// ServiceID is the ID of the service (required).
ServiceID string
// ServiceVersion is the specific configuration version (required).
ServiceVersion int
}
// ListDigitalOceans returns the list of DigitalOceans for the configuration version.
func (c *Client) ListDigitalOceans(i *ListDigitalOceansInput) ([]*DigitalOcean, error) {
if i.ServiceID == "" {
return nil, ErrMissingServiceID
}
if i.ServiceVersion == 0 {
return nil, ErrMissingServiceVersion
}
path := fmt.Sprintf("/service/%s/version/%d/logging/digitalocean", i.ServiceID, i.ServiceVersion)
resp, err := c.Get(path, nil)
if err != nil {
return nil, err
}
var digitaloceans []*DigitalOcean
if err := decodeBodyMap(resp.Body, &digitaloceans); err != nil {
return nil, err
}
sort.Stable(digitaloceansByName(digitaloceans))
return digitaloceans, nil
}
// CreateDigitalOceanInput is used as input to the CreateDigitalOcean function.
type CreateDigitalOceanInput struct {
// ServiceID is the ID of the service (required).
ServiceID string
// ServiceVersion is the specific configuration version (required).
ServiceVersion int
Name string `form:"name,omitempty"`
BucketName string `form:"bucket_name,omitempty"`
Domain string `form:"domain,omitempty"`
AccessKey string `form:"access_key,omitempty"`
SecretKey string `form:"secret_key,omitempty"`
Path string `form:"path,omitempty"`
Period uint `form:"period,omitempty"`
GzipLevel uint `form:"gzip_level,omitempty"`
Format string `form:"format,omitempty"`
MessageType string `form:"message_type,omitempty"`
FormatVersion uint `form:"format_version,omitempty"`
ResponseCondition string `form:"response_condition,omitempty"`
TimestampFormat string `form:"timestamp_format,omitempty"`
Placement string `form:"placement,omitempty"`
PublicKey string `form:"public_key,omitempty"`
}
// CreateDigitalOcean creates a new Fastly DigitalOcean.
func (c *Client) CreateDigitalOcean(i *CreateDigitalOceanInput) (*DigitalOcean, error) {
if i.ServiceID == "" {
return nil, ErrMissingServiceID
}
if i.ServiceVersion == 0 {
return nil, ErrMissingServiceVersion
}
path := fmt.Sprintf("/service/%s/version/%d/logging/digitalocean", i.ServiceID, i.ServiceVersion)
resp, err := c.PostForm(path, i, nil)
if err != nil {
return nil, err
}
var digitalocean *DigitalOcean
if err := decodeBodyMap(resp.Body, &digitalocean); err != nil {
return nil, err
}
return digitalocean, nil
}
// GetDigitalOceanInput is used as input to the GetDigitalOcean function.
type GetDigitalOceanInput struct {
// ServiceID is the ID of the service (required).
ServiceID string
// ServiceVersion is the specific configuration version (required).
ServiceVersion int
// Name is the name of the DigitalOcean to fetch.
Name string
}
// GetDigitalOcean gets the DigitalOcean configuration with the given parameters.
func (c *Client) GetDigitalOcean(i *GetDigitalOceanInput) (*DigitalOcean, error) {
if i.ServiceID == "" {
return nil, ErrMissingServiceID
}
if i.ServiceVersion == 0 {
return nil, ErrMissingServiceVersion
}
if i.Name == "" {
return nil, ErrMissingName
}
path := fmt.Sprintf("/service/%s/version/%d/logging/digitalocean/%s", i.ServiceID, i.ServiceVersion, url.PathEscape(i.Name))
resp, err := c.Get(path, nil)
if err != nil {
return nil, err
}
var digitalocean *DigitalOcean
if err := decodeBodyMap(resp.Body, &digitalocean); err != nil {
return nil, err
}
return digitalocean, nil
}
// UpdateDigitalOceanInput is used as input to the UpdateDigitalOcean function.
type UpdateDigitalOceanInput struct {
// ServiceID is the ID of the service (required).
ServiceID string
// ServiceVersion is the specific configuration version (required).
ServiceVersion int
// Name is the name of the DigitalOcean to update.
Name string
NewName *string `form:"name,omitempty"`
BucketName *string `form:"bucket_name,omitempty"`
Domain *string `form:"domain,omitempty"`
AccessKey *string `form:"access_key,omitempty"`
SecretKey *string `form:"secret_key,omitempty"`
Path *string `form:"path,omitempty"`
Period *uint `form:"period,omitempty"`
GzipLevel *uint `form:"gzip_level,omitempty"`
Format *string `form:"format,omitempty"`
FormatVersion *uint `form:"format_version,omitempty"`
ResponseCondition *string `form:"response_condition,omitempty"`
MessageType *string `form:"message_type,omitempty"`
TimestampFormat *string `form:"timestamp_format,omitempty"`
Placement *string `form:"placement,omitempty"`
PublicKey *string `form:"public_key,omitempty"`
}
// UpdateDigitalOcean updates a specific DigitalOcean.
func (c *Client) UpdateDigitalOcean(i *UpdateDigitalOceanInput) (*DigitalOcean, error) {
if i.ServiceID == "" {
return nil, ErrMissingServiceID
}
if i.ServiceVersion == 0 {
return nil, ErrMissingServiceVersion
}
if i.Name == "" {
return nil, ErrMissingName
}
path := fmt.Sprintf("/service/%s/version/%d/logging/digitalocean/%s", i.ServiceID, i.ServiceVersion, url.PathEscape(i.Name))
resp, err := c.PutForm(path, i, nil)
if err != nil {
return nil, err
}
var digitalocean *DigitalOcean
if err := decodeBodyMap(resp.Body, &digitalocean); err != nil {
return nil, err
}
return digitalocean, nil
}
// DeleteDigitalOceanInput is the input parameter to DeleteDigitalOcean.
type DeleteDigitalOceanInput struct {
// ServiceID is the ID of the service (required).
ServiceID string
// ServiceVersion is the specific configuration version (required).
ServiceVersion int
// Name is the name of the DigitalOcean to delete (required).
Name string
}
// DeleteDigitalOcean deletes the given DigitalOcean version.
func (c *Client) DeleteDigitalOcean(i *DeleteDigitalOceanInput) error {
if i.ServiceID == "" {
return ErrMissingServiceID
}
if i.ServiceVersion == 0 {
return ErrMissingServiceVersion
}
if i.Name == "" {
return ErrMissingName
}
path := fmt.Sprintf("/service/%s/version/%d/logging/digitalocean/%s", i.ServiceID, i.ServiceVersion, url.PathEscape(i.Name))
resp, err := c.Delete(path, nil)
if err != nil {
return err
}
var r *statusResp
if err := decodeBodyMap(resp.Body, &r); err != nil {
return err
}
if !r.Ok() {
return ErrStatusNotOk
}
return nil
}
|
<reponame>krpharr/verbose-dollop
import axios from "axios";
export default {
search: function(term, start, max) {
return axios.get(`api/googlebooks/${term}/${start}/${max}`);
},
getBooks: function() {
return axios.get("api/books");
},
saveBook: function(bookObj) {
return axios.post("api/books", bookObj);
},
deleteBook: function(id) {
return axios.delete(`api/books/${id}`);
}
};
|
import fluidsynth
def play_music(midi_file_path, soundfont_path):
try:
fs = fluidsynth.Synth()
fs.start(driver='alsa')
sfid = fs.sfload(soundfont_path)
fs.program_select(0, sfid, 0, 0)
# Load and play the MIDI file
fs.midifile_load(midi_file_path)
fs.play()
# Keep the program running until the MIDI file finishes playing
while fs.get_note_count() > 0:
pass
# Clean up resources
fs.delete()
except FileNotFoundError:
print("File not found. Please check the file paths.")
except fluidsynth.FluidError as e:
print(f"FluidSynth error: {e}")
# Example usage
play_music('example.mid', 'Nice-Keys-Ultimate-V2.3.sf2')
|
class ChessPosition:
def __init__(self):
self.moves = []
def set_moves(self, moves):
self.moves = moves
def add_move(self, move):
self.moves.append(move)
def get_moves(self):
return self.moves
|
#!/usr/bin/env bash
source "../../config.sh"
source "../../jwt.sh"
curl -X PUT https://api.nexmo.com/v1/calls/$UUID/stream \
-H "Authorization: Bearer "$JWT\
-H "Content-Type: application/json"\
-d '{"stream_url": ["https://raw.githubusercontent.com/nexmo-community/ncco-examples/gh-pages/assets/welcome_to_nexmo.mp3"]}'
|
#include <iostream>
#include <array>
#include <numeric>
using namespace std;
int main(){
array<int, 10> arr{1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
for (auto a: arr) cout << a << " " ; // 1 2 3 4 5 6 7 8 9 10
cout << "\n";
double sum= accumulate(arr.begin(), arr.end(), 0);
cout << sum << std::endl; // 55
double mean= sum / arr.size();
cout << mean << endl; // 5.5
cout << (arr[0] == get<0>(arr)); // 1 (1 represents true)
return 0;
}
//notes way to access array
//arr[n],arr.at(n),get<n>(arr)
//to initialize
//array<int, 10> arr{}
|
package com.qtimes.pavilion.base.rx;
import android.content.Context;
import androidx.annotation.NonNull;
import android.util.AttributeSet;
import android.widget.FrameLayout;
import com.qtimes.pavilion.base.rx.lifecycle.LayoutEvent;
import com.qtimes.pavilion.base.rx.lifecycle.LayoutLifecycleProvider;
import com.trello.rxlifecycle.LifecycleTransformer;
import com.trello.rxlifecycle.RxLifecycle;
import rx.subjects.BehaviorSubject;
/**
* Created by gufei on 2016/9/6 0006.
*/
public abstract class RxFrameLayout extends FrameLayout implements LayoutLifecycleProvider {
private final BehaviorSubject<LayoutEvent> lifecycleSubject = BehaviorSubject.create();
public RxFrameLayout(Context context) {
super(context);
}
public RxFrameLayout(Context context, AttributeSet attrs) {
super(context, attrs);
}
public RxFrameLayout(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@NonNull
@Override
public <T> LifecycleTransformer<T> bindUntilEvent(@NonNull LayoutEvent event) {
return RxLifecycle.bindUntilEvent(lifecycleSubject, event);
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
lifecycleSubject.onNext(LayoutEvent.ONATTACHEDTOWINDOW);
}
@Override
protected void onDetachedFromWindow() {
lifecycleSubject.onNext(LayoutEvent.ONDETACHEDFROMWINDOW);
super.onDetachedFromWindow();
}
}
|
import json
class STATUS:
OK = "OK"
ERROR = "ERROR"
class Status:
def __init__(self, status, reason):
self.status = status
self.reason = reason
def eq(self, other_status):
return self.status == other_status
def brief(self):
return self.reason
class Result:
def __init__(self, data):
self.data = data
def get_data(self):
return self.data
def process_status(nodeName: str, status: str, result: object) -> None:
if status == STATUS.OK:
print("'%s' configuration:" % nodeName)
cfg = result.get_data()
data = json.loads(cfg)
print(json.dumps(data, indent=4))
else:
print("\n")
print("!!!Demo terminated, reason: %s" % status.brief().lower())
# Example usage
nodeName = "Node1"
status = STATUS.OK
result = Result('{"key1": "value1", "key2": "value2"}')
process_status(nodeName, status, result)
nodeName = "Node2"
status = Status(STATUS.ERROR, "Connection error")
result = Result(None)
process_status(nodeName, status, result)
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.u1F4BF = void 0;
var u1F4BF = {
"viewBox": "0 0 2600 2760.837",
"children": [{
"name": "path",
"attribs": {
"d": "M2057.5 1870.5Q1938 2075 1735 2191t-435 116-435-116-322.5-320.5T423 1430q0-232 117.5-436T862 672t438-118 438 118 321.5 322 117.5 436q0 236-119.5 440.5zM2055 1335q-3-36-10-71.5t-18-69.5l-472 143q7 23 12 48zm-619.5 213.5Q1493 1492 1493 1412q0-78-57.5-135t-135.5-57q-79 0-136 57.5t-57 134.5q0 78 57 135.5t136 57.5q78 0 135.5-56.5zM548 1322l486 56q18-93 93-155.5t173-62.5q67 0 123 29l244-440q-82-45-174.5-70T1300 654q-287 0-502.5 192.5T548 1322zm5 215q5 36 14.5 70.5T589 1675l451-166-6-25q-3-13-4-25zm1492 15l-476-88q-12 100-89.5 169t-179.5 69q-33 0-63.5-8t-58.5-22l-201 426q74 35 155 54t168 19q275 0 486-177t259-442zm-807-204q28-26 62-26 38 0 64 26.5t26 60.5q0 42-24 67.5t-66 25.5q-36 0-63-25.5t-27-64.5q0-38 28-64z"
},
"children": []
}]
};
exports.u1F4BF = u1F4BF;
|
<reponame>smagill/opensphere-desktop
package io.opensphere.merge.model;
import static org.junit.Assert.assertEquals;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Map;
import org.easymock.EasyMockSupport;
import org.junit.Test;
import io.opensphere.core.model.LatLonAlt;
import io.opensphere.core.model.time.TimeSpan;
import io.opensphere.core.util.collections.New;
import io.opensphere.mantle.data.geom.MapGeometrySupport;
import io.opensphere.mantle.data.geom.impl.SimpleMapPointGeometrySupport;
/**
* Unit test for {@link MergedDataRow}.
*/
public class MergedDataRowTest
{
/**
* test.
*/
@Test
public void test()
{
EasyMockSupport support = new EasyMockSupport();
Map<String, Serializable> data = New.map();
MapGeometrySupport geometry = support.createMock(MapGeometrySupport.class);
TimeSpan span = TimeSpan.get();
support.replayAll();
MergedDataRow row = new MergedDataRow(data, geometry, span);
assertEquals(data, row.getData());
assertEquals(geometry, row.getGeometry());
assertEquals(span, row.getTimespan());
support.verifyAll();
}
/**
* Tests serializing the data.
*
* @throws IOException if the test fails.
* @throws ClassNotFoundException if the test fails.
*/
public void testSerialization() throws IOException, ClassNotFoundException
{
Map<String, Serializable> data = New.map();
data.put("column1", "value1");
MapGeometrySupport geometry = new SimpleMapPointGeometrySupport(LatLonAlt.createFromDegrees(10, 11));
TimeSpan span = TimeSpan.get();
MergedDataRow row = new MergedDataRow(data, geometry, span);
ByteArrayOutputStream out = new ByteArrayOutputStream();
ObjectOutputStream objectOut = new ObjectOutputStream(out);
objectOut.writeObject(row);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
ObjectInputStream objectIn = new ObjectInputStream(in);
MergedDataRow serialized = (MergedDataRow)objectIn.readObject();
assertEquals(data, serialized.getData());
assertEquals(geometry, serialized.getGeometry());
assertEquals(span, serialized.getTimespan());
}
}
|
Proof:
Let S be the sum of the first n odd numbers.
Then S = 1 + 3 + 5 + ... + (2n-1).
Now, rewrite the sum as
S = 1 + 3 + 5 + ... + 2(n-1) + 2n
= 2 + 4 + 6 + ... + 2(n-1) + 2n
= 2(1 + 2 + 3 + ... + (n-1) + n)
= 2(n(n+1)/2)
= n^2.
|
<reponame>xbabka01/yaramod
/**
* @file src/examples/dump_rules_ast/dumper.h
* @brief Implementation of main for AST dumper.
* @copyright (c) 2017 Avast Software, licensed under the MIT license
*/
#include <string>
#include <vector>
#include <yaramod/yaramod.h>
#include "dumper.h"
int main(int argc, char* argv[])
{
std::vector<std::string> args(argv + 1, argv + argc);
if (args.size() != 1)
{
std::cout << "Usage: dump-rules-ast YARA_FILE" << std::endl;
return 1;
}
Dumper dumper;
yaramod::Yaramod yaramod;
auto yaraFile = yaramod.parseFile(args[0]);
for (const auto& rule : yaraFile->getRules())
{
std::cout << "==== RULE: " << rule->getName() << std::endl;
dumper.observe(rule->getCondition());
}
}
|
from pypy.rlib import jit
from pypy.jit.metainterp.test.support import LLJitMixin, OOJitMixin
@jit.dont_look_inside
def escape(x):
return x
class ImmutableFieldsTests:
def test_fields(self):
class X(object):
_immutable_fields_ = ["x"]
def __init__(self, x):
self.x = x
def f(x):
y = escape(X(x))
return y.x + 5
res = self.interp_operations(f, [23])
assert res == 28
self.check_operations_history(getfield_gc=0, getfield_gc_pure=1, int_add=1)
def test_fields_subclass(self):
class X(object):
_immutable_fields_ = ["x"]
def __init__(self, x):
self.x = x
class Y(X):
_immutable_fields_ = ["y"]
def __init__(self, x, y):
X.__init__(self, x)
self.y = y
def f(x, y):
X(x) # force the field 'x' to be on class 'X'
z = escape(Y(x, y))
return z.x + z.y + 5
res = self.interp_operations(f, [23, 11])
assert res == 39
self.check_operations_history(getfield_gc=0, getfield_gc_pure=2,
int_add=2)
def f(x, y):
# this time, the field 'x' only shows up on subclass 'Y'
z = escape(Y(x, y))
return z.x + z.y + 5
res = self.interp_operations(f, [23, 11])
assert res == 39
self.check_operations_history(getfield_gc=0, getfield_gc_pure=2,
int_add=2)
def test_array(self):
class X(object):
_immutable_fields_ = ["y[*]"]
def __init__(self, x):
self.y = x
def f(index):
l = [1, 2, 3, 4]
l[2] = 30
a = escape(X(l))
return a.y[index]
res = self.interp_operations(f, [2], listops=True)
assert res == 30
self.check_operations_history(getfield_gc=0, getfield_gc_pure=1,
getarrayitem_gc=0, getarrayitem_gc_pure=1)
def test_array_in_immutable(self):
class X(object):
_immutable_ = True
_immutable_fields_ = ["lst[*]"]
def __init__(self, lst, y):
self.lst = lst
self.y = y
def f(x, index):
y = escape(X([x], x+1))
return y.lst[index] + y.y + 5
res = self.interp_operations(f, [23, 0], listops=True)
assert res == 23 + 24 + 5
self.check_operations_history(getfield_gc=0, getfield_gc_pure=2,
getarrayitem_gc=0, getarrayitem_gc_pure=1,
int_add=3)
class TestLLtypeImmutableFieldsTests(ImmutableFieldsTests, LLJitMixin):
pass
class TestOOtypeImmutableFieldsTests(ImmutableFieldsTests, OOJitMixin):
pass
|
<reponame>xfys/lovetao<gh_stars>10-100
package com.inner.lovetao.settings.di.module;
import com.inner.lovetao.settings.mvp.contract.ContactServiceContract;
import com.inner.lovetao.settings.mvp.model.ContactServiceModel;
import dagger.Binds;
import dagger.Module;
/**
* desc:
* Created by xcz
*/
@Module
public abstract class ContactServiceModule {
@Binds
abstract ContactServiceContract.Model bindContactServiceModel(ContactServiceModel model);
}
|
<reponame>leongaban/redux-saga-exchange
import * as NS from '../../namespace';
import { initial } from '../initial';
export function dataReducer(state: NS.IReduxState['data'] = initial.data, action: NS.Action): NS.IReduxState['data'] {
switch (action.type) {
case 'LIQUIDITY-POOL:GET_TIO_LOCKED_BALANCE_SUCCESS': {
return {
...state,
tioLocked: action.payload,
};
}
case 'LIQUIDITY-POOL:GET_TOTAL_TIO_SUCCESS': {
return {
...state,
totalTio: action.payload,
};
}
case 'LIQUIDITY-POOL:GET_LP_ASSETS_SUCCESS': {
return {
...state,
...action.payload,
};
}
case 'LIQUIDITY-POOL:GET_USE_LP_SUCCESS':
case 'LIQUIDITY-POOL:SET_USE_LP_SUCCESS': {
return {
...state,
useLiquidityPool: action.payload,
};
}
case 'LIQUIDITY-POOL:POST_LOAN_AGREEMENT_SUCCESS': {
return {
...state,
pandaDocUrl: action.payload.pandaDocUrl,
pandaDocId: action.payload.pandaDocId
};
}
case 'LIQUIDITY-POOL:REMOVE_LOAN_AGREEMENT': {
return {
...state,
pandaDocUrl: undefined,
};
}
default: return state;
}
}
|
def Fibonacci(limit):
# Initializing first two Fibonacci numbers
num1 = 0
num2 = 1
# Initialize empty list
fibonacci_numbers = []
# Add the initialized numbers to the list
fibonacci_numbers.append(num1)
fibonacci_numbers.append(num2)
# Calculate remaining Fibonacci numbers and add them to the list
for i in range(2, limit):
num3 = num1 + num2
fibonacci_numbers.append(num3)
# Re-initialize numbers for next iteration
num1 = num2
num2 = num3
return fibonacci_numbers
# Driver code
limit = 10
print(Fibonacci(limit))
|
package mezz.jei.api.ingredients;
import net.minecraft.client.renderer.block.model.IBakedModel;
import net.minecraft.item.Item;
/**
* Put this interface on your {@link Item} to skip JEI's render optimizations.
*
* This is useful for baked models that use ASM and do not use {@link IBakedModel#isBuiltInRenderer}.
* If your model does not use ASM it should work fine, please report a bug instead of using this interface.
*
* @since JEI 4.7.11
*/
public interface ISlowRenderItem {
}
|
<filename>algorand-spring-starter-demo/src/main/java/com/algorand/starter/demo/controller/CircleController.java
package com.algorand.starter.demo.controller;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.client.RestClientException;
import org.springframework.web.client.RestTemplate;
import com.algorand.starter.demo.cryptopayment.model.AuthorisationRequest;
import com.algorand.starter.demo.cryptopayment.model.Card;
import com.algorand.starter.demo.cryptopayment.model.CryptoWallet;
import com.algorand.starter.demo.cryptopayment.model.Instruction;
import com.algorand.starter.demo.cryptopayment.model.PaymentInstrument;
import com.algorand.starter.demo.cryptopayment.model.Value;
import com.algorand.starter.demo.helper.CryptoPaymentMapper;
import com.algorand.starter.demo.model.BankAccount;
import com.algorand.starter.demo.model.Payout;
import com.algorand.starter.demo.model.WireTransfer;
@RestController
public class CircleController {
@Autowired
private RestTemplate restTemplate;
@PostMapping("bankAccount")
public String createBankAccount(@RequestBody BankAccount bankAccount) throws RestClientException, URISyntaxException {
HttpHeaders headers = new HttpHeaders();
headers.set("Authorization", "Bearer QVBJX0tFWTo2M2UxYjI2YmQxMDA5MjE3ZjFlMTVkZjk4OTk1OTA0NTo0OD<KEY>");
headers.setContentType(MediaType.APPLICATION_JSON);
List<MediaType> mediaTypes = new ArrayList<>();
mediaTypes.add(MediaType.APPLICATION_JSON);
headers.setAccept(mediaTypes);
HttpEntity<BankAccount> entity = new HttpEntity <> (bankAccount, headers);
ResponseEntity<String> exchange = restTemplate.exchange(new URI("https://api-sandbox.circle.com/v1/businessAccount/banks/wires"), HttpMethod.POST, entity, String.class);
return "Bank Account created successfully "+exchange;
}
@GetMapping("bankAccount/{id}")
public String getBankAccount(@PathVariable String id) throws RestClientException, URISyntaxException {
HttpHeaders headers = new HttpHeaders();
headers.set("Authorization", "Bearer <KEY>");
headers.setContentType(MediaType.APPLICATION_JSON);
List<MediaType> mediaTypes = new ArrayList<>();
mediaTypes.add(MediaType.APPLICATION_JSON);
headers.setAccept(mediaTypes);
HttpEntity<HttpHeaders> entity = new HttpEntity <> (headers);
ResponseEntity<String> exchange = restTemplate.exchange(new URI("https://api-sandbox.circle.com/v1/businessAccount/banks/wires/"+id), HttpMethod.GET, entity, String.class);
return "Bank Account "+exchange;
}
@PostMapping("wireTransfer")
public String wireTransfer(@RequestBody WireTransfer wireTransfer) throws RestClientException, URISyntaxException {
HttpHeaders headers = new HttpHeaders();
headers.set("Authorization", "Bearer <KEY>");
headers.setContentType(MediaType.APPLICATION_JSON);
List<MediaType> mediaTypes = new ArrayList<>();
mediaTypes.add(MediaType.APPLICATION_JSON);
headers.setAccept(mediaTypes);
HttpEntity<WireTransfer> entity = new HttpEntity <> (wireTransfer, headers);
ResponseEntity<String> exchange = restTemplate.exchange(new URI("https://api-sandbox.circle.com/v1/mocks/payments/wire"), HttpMethod.POST, entity, String.class);
return "Bank Transfer done successfully "+exchange;
}
@PostMapping("payout")
public String payout(@RequestBody Payout payout) throws RestClientException, URISyntaxException {
HttpHeaders headers = new HttpHeaders();
headers.set("Authorization", "Bearer <KEY>");
headers.setContentType(MediaType.APPLICATION_JSON);
List<MediaType> mediaTypes = new ArrayList<>();
mediaTypes.add(MediaType.APPLICATION_JSON);
headers.setAccept(mediaTypes);
HttpEntity<Payout> entity = new HttpEntity <> (payout, headers);
ResponseEntity<String> exchange = restTemplate.exchange(new URI("https://api-sandbox.circle.com/v1/businessAccount/payouts"), HttpMethod.POST, entity, String.class);
System.out.println("Payout response "+exchange);
return "Bank Transfer done successfully "+exchange;
}
public String mapPayment(@RequestBody String paymentRequest) throws Exception {
String result = "ERROR";
if( paymentRequest != null && paymentRequest.length() > 0) {
CryptoPaymentMapper cryptoPaymentMapper = new CryptoPaymentMapper();
AuthorisationRequest authRequest = cryptoPaymentMapper.mapPaymentRequest(paymentRequest);
//cryptoPaymentMapper.unmapRequest(authRequest);
if ( authRequest != null ) {
Instruction instruction = authRequest.getInstruction();
if ( instruction != null ) {
PaymentInstrument paymentInstrument = instruction.getPaymentInstrument();
//cryptoPaymentMapper.unmapRequest(paymentInstrument);
if ( paymentInstrument != null ) {
System.out.println("Payment Instrument Class Name: " + paymentInstrument.getClass().getName());
if ( paymentInstrument instanceof Card) {
Card card = (Card)paymentInstrument;
String pan = card.getPrimaryAccountNumber();
System.out.println("PAN = " + pan);
}else if ( paymentInstrument instanceof CryptoWallet) {
CryptoWallet cryptoWallet = (CryptoWallet)paymentInstrument;
String tokenId = cryptoWallet.getTokenId();
String provider = cryptoWallet.getProvider();
String blockChain = cryptoWallet.getBlockchain();
System.out.println("TokenId = " + tokenId);
System.out.println("Provider = " + provider);
System.out.println("BlockChain = " + blockChain);
}
}
Value value = instruction.getValue();
if( value != null ) {
Integer amount = value.getAmount();
String currency = value.getCurrency();
System.out.println("amount = " + amount);
System.out.println("currency = " + currency);
result = "SUCCESS";
}
}
}
}
return result;
}
}
|
var ADLbrowserDataTable=(function(){
var self={}
self.showQueryResult= function (data, options) {
var dataSet = [];
var cols = [];
var keys={}
options.selectVars.forEach(function (varName) {
var key = varName.substring(1)
cols.push({title: key})
cols.push({title: key+"Label"})
keys[key] = 1
keys[key+"Label"] = 1
})
data.data.forEach(function (item, indexRow) {
var line = []
for(var key in keys ){
if (item[key])
line.push(item[key].value);
else
line.push("");
}
dataSet.push(line)
})
// $("#ADLquery_tabs").tabs("option", "active", 1);
$('#mainDialogDiv').dialog("open")
$('#mainDialogDiv').html("<table id='dataTableDiv'></table>");
setTimeout(function () {
$('#dataTableDiv').DataTable({
data: dataSet,
columns: cols,
// async: false,
"pageLength": 15,
dom: 'Bfrtip',
buttons: [
'copy', 'csv', 'excel', 'pdf', 'print'
]
})
, 500
})
}
return self;
})()
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-N-VB/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-N-VB/13-512+512+512-NER-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_named_entities_first_third_full --eval_function last_element_eval
|
package com.example.co4sat;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.SwitchCompat;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.pm.PackageManager;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Build;
import android.os.Bundle;
import android.widget.CompoundButton;
import android.widget.TextView;
import android.widget.Toast;
import java.util.Formatter;
import java.util.Locale;
public class SPEED extends AppCompatActivity implements LocationListener {
SwitchCompat sw_metric;
TextView textView3;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_speed);
sw_metric = findViewById(R.id.sw_metric);
textView3 = findViewById(R.id.textView3);
// check gps permission
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && checkSelfPermission(Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED)
{
requestPermissions(new String[]{Manifest.permission.ACCESS_FINE_LOCATION}, 1000);
} else {
// start
doStuff();
}
this.updateSpeed(null);
sw_metric.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
SPEED.this.updateSpeed(null);
}
});
}
@Override
public void onLocationChanged(@NonNull Location location) {
if(location != null) {
CLocation myLocation = new CLocation(location, this.useMetricUnits());
this.updateSpeed(myLocation);
}
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(@NonNull String provider) {
}
@Override
public void onProviderDisabled(@NonNull String provider) {
}
@SuppressLint("MissingPermission")
private void doStuff() {
LocationManager locationManager = (LocationManager) this.getSystemService(Context.LOCATION_SERVICE);
if(locationManager != null) {
locationManager.requestLocationUpdates(locationManager.GPS_PROVIDER, 0, 0, this);
}
Toast.makeText(this, "Waiting for GPS connection", Toast.LENGTH_SHORT).show();
}
private void updateSpeed(CLocation location) {
float nCurrentSpeed = 0;
if(location != null) {
location.setUseMetricUnits(this.useMetricUnits());
nCurrentSpeed = location.getSpeed();
}
Formatter fmt = new Formatter(new StringBuilder());
fmt.format(Locale.FRANCE, "%5.1f", nCurrentSpeed);
String strCurrentSpeed = fmt.toString();
strCurrentSpeed = strCurrentSpeed.replace(" ", "0");
if(this.useMetricUnits()) {
textView3.setText(strCurrentSpeed + "Km/h");
} else {
textView3.setText(strCurrentSpeed + "miles/h");
}
}
private boolean useMetricUnits() {
return sw_metric.isChecked();
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == 1000) {
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
doStuff();
} else {
finish();
}
}
}
}
|
#!/bin/sh
gcc -c ae.c -o ae.o
gcc -c client.c -o client.o -lpthread -std=c11
gcc client.o -o client -lpthread -std=c11
gcc -c server.c -o server.o -lpthread -std=c11
gcc ae.o server.o -o server -lpthread -std=c11
|
#!/bin/bash
echo "Start the mini-cluster with the following arguments : $*"
mvn exec:exec -Dexec.arguments="$*" -Pcluster
|
#!/bin/bash
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
LocalNuGetRepo=$1
export CurrentOnnxRuntimeVersion=$2
IsMacOS=${3:-false}
PACKAGENAME=${PACKAGENAME:-Microsoft.ML.OnnxRuntime}
RunTestCsharp=${RunTestCsharp:-true}
RunTestNative=${RunTestNative:-true}
set -x -e
pushd .
cd $BUILD_SOURCESDIRECTORY
echo "Current NuGet package version is $CurrentOnnxRuntimeVersion"
if [ $RunTestCsharp = "true" ]; then
if [[ $IsMacOS == "True" || $IsMacOS == "true" ]]; then
mkdir -p $BUILD_BINARIESDIRECTORY/models
ln -s $BUILD_SOURCESDIRECTORY/cmake/external/onnx/onnx/backend/test/data/node $BUILD_BINARIESDIRECTORY/models/opset16
fi
# Run C# tests
dotnet restore $BUILD_SOURCESDIRECTORY/csharp/test/Microsoft.ML.OnnxRuntime.EndToEndTests/Microsoft.ML.OnnxRuntime.EndToEndTests.csproj -s $LocalNuGetRepo -s https://api.nuget.org/v3/index.json
if [ $? -ne 0 ]; then
echo "Failed to restore nuget packages for the test project"
exit 1
fi
if [ $PACKAGENAME = "Microsoft.ML.OnnxRuntime.Gpu" ]; then
export TESTONGPU=ON
dotnet test -p:DefineConstants=USE_CUDA $BUILD_SOURCESDIRECTORY/csharp/test/Microsoft.ML.OnnxRuntime.EndToEndTests/Microsoft.ML.OnnxRuntime.EndToEndTests.csproj --no-restore --verbosity detailed
if [ $? -ne 0 ]; then
echo "Failed to build or execute the end-to-end test"
exit 1
fi
dotnet test -p:DefineConstants=USE_TENSORRT $BUILD_SOURCESDIRECTORY/csharp/test/Microsoft.ML.OnnxRuntime.EndToEndTests/Microsoft.ML.OnnxRuntime.EndToEndTests.csproj --no-restore --verbosity detailed
else
dotnet test $BUILD_SOURCESDIRECTORY/csharp/test/Microsoft.ML.OnnxRuntime.EndToEndTests/Microsoft.ML.OnnxRuntime.EndToEndTests.csproj --no-restore --verbosity detailed
fi
if [ $? -ne 0 ]; then
echo "Failed to build or execute the end-to-end test"
exit 1
fi
fi
cd $OldDir
popd
|
const { Product } = require("../models");
const createProduct = async (_, { input }) => {
const newProduct = new Product(input);
await newProduct.save();
return newProduct;
};
module.exports = createProduct;
|
<reponame>mason-fish/brim<filename>zealot/api/archive.ts
import {FetchArgs} from "../fetcher/fetcher"
export type IndexSearchArgs = {
spaceId: string
patterns: string[]
index_name?: string
signal?: AbortSignal
}
export function search({
spaceId,
index_name,
patterns,
signal
}: IndexSearchArgs): FetchArgs {
return {
method: "POST",
path: `/space/${spaceId}/indexsearch?format=ndjson`,
body: JSON.stringify({index_name, patterns}),
signal
}
}
|
<reponame>FreDP47/WashBuddiez
import { Component, OnInit } from '@angular/core';
import { OrderService } from '../services/order.service';
import { Order } from 'app/models/model.interface';
import {environment} from '../../environments/environment.prod';
@Component({
selector: 'app-checkout',
templateUrl: './checkout.component.html',
styleUrls: ['./checkout.component.scss']
})
export class CheckoutComponent {
order: Order;
constructor(private orderService: OrderService) {
this.order = orderService.getOrder();
}
submitOrder() {
const overlayElement = document.getElementById('overlay');
const alertelement = document.getElementById('alert');
if (this.order.finalPrice > 0) {
if (this.order.couponCode != null && this.order.couponCode !== ''
&& this.order.couponCode.toLocaleLowerCase() === 'Wash@10'.toLocaleLowerCase()) {
this.order.finalPrice = this.order.finalPrice * 0.9;
this.orderService.AddCheckoutDetails(this.order);
}
// Sending mail to user and to Admin
overlayElement.style.display = 'block';
this.orderService.sendEmail(`${environment.api_url}/sendmail`, this.order).subscribe(
data => {
const res: any = data;
alertelement.innerText = 'Order submitted successfully.';
alertelement.classList.add('alert-success');
alertelement.style.display = 'block';
this.orderService.resetOrder(new Order());
overlayElement.style.display = 'none';
},
err => {
console.log(err);
alertelement.innerText = 'Some error occurred. Please try again later.';
alertelement.classList.add('alert-danger');
alertelement.style.display = 'block';
this.orderService.resetOrder(new Order());
overlayElement.style.display = 'none';
});
} else {
alertelement.innerText = 'There are no items in your cart. Please add some items from the pricing page.';
alertelement.classList.add('alert-danger');
alertelement.style.display = 'block';
}
}
}
|
<filename>src/test/java/net/andreaskluth/elefantenstark/TestData.java<gh_stars>1-10
package net.andreaskluth.elefantenstark;
import java.sql.Connection;
import net.andreaskluth.elefantenstark.producer.Producer;
import net.andreaskluth.elefantenstark.work.WorkItem;
public class TestData {
public static void scheduleThreeWorkItems(Connection connection) {
Producer producer = new Producer();
producer.produce(connection, WorkItem.hashedOnKey("a", "b", 23));
producer.produce(connection, WorkItem.hashedOnKey("a", "b", 24));
producer.produce(connection, WorkItem.hashedOnKey("c", "d", 12));
}
}
|
def fahrenheit_to_celsius(temperature):
'''This function converts a Fahrenheit temperature to Celsius'''
# Convert the temperature to Celsius
celsius_temp = (temperature - 32) / 1.8
# Return the Celsius temperature
return celsius_temp
|
package cn.finalteam.rxgalleryfinalprovider.ui.activity;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import cn.finalteam.rxgalleryfinalprovider.RxGalleryFinal;
import cn.finalteam.rxgalleryfinalprovider.di.component.RxGalleryFinalComponent;
/**
* Desction:
* Author:pengjianbo
* Date:16/5/16 下午7:36
*/
public abstract class BaseActivity extends AppCompatActivity {
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
RxGalleryFinalComponent rxGalleryFinalComponent = RxGalleryFinal.getRxGalleryFinalComponent();
if(rxGalleryFinalComponent == null) {
mFinishHanlder.sendEmptyMessageDelayed(0, 500);
return;
}
setupComponent(rxGalleryFinalComponent);
}
public abstract void findViews();
protected Handler mFinishHanlder = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
finish();
}
};
protected abstract void setTheme();
protected abstract void setupComponent(RxGalleryFinalComponent rxGalleryFinalComponent);
}
|
# Solution for posFloatOrZeroValidator function
def posFloatOrZeroValidator(value: float) -> None:
if not isinstance(value, (float, int)):
raise ValueError("Input must be a float or an integer")
if value < 0:
raise ValueError("Input must be a positive float or zero")
# Test cases using pytest
import pytest
positive_float_values = [0.5, 1.0, 3.14, 100.0, 0.0]
negative_float_values = [-1.0, -3.14, -100.0]
not_float_type = [None, "string", True, False, [], {}]
def test_PosFloatOrZeroValidator_proper_values():
"""Check if proper positive float values or zero are validated without exceptions."""
for value in positive_float_values:
# no exception is expected
posFloatOrZeroValidator(value)
def test_PosFloatOrZeroValidator_wrong_values():
"""Check if improper positive float values are not validated."""
for value in negative_float_values:
# exception is expected
with pytest.raises(ValueError):
posFloatOrZeroValidator(value)
def test_PosFloatOrZeroValidator_non_float_values():
"""Check if non-float values are not validated."""
for value in not_float_type:
# exception is expected
with pytest.raises(ValueError):
posFloatOrZeroValidator(value)
|
# frozen_string_literal: true
require_relative "../../spec_helper"
# "summary": {
# "duration": 0.02296628,
# "example_count": 4,
# "failure_count": 1,
# "pending_count": 2,
# "errors_outside_of_examples_count": 0
# },
# "summary_line": "4 examples, 1 failure, 2 pending"
RSpec.describe RspecConsolidator::Summary do
let(:json_string) { file_fixture("simple.json").read }
let(:hash) { JSON.parse(json_string)["summary"] }
describe "#initialize" do
it "works with a hash" do
summary = RspecConsolidator::Summary.new(hash)
expect(summary).to_not be_nil
end
end
describe "methods" do
let(:summary) { RspecConsolidator::Summary.new(hash) }
describe ".duration" do
it "returns the value" do
expect(summary.duration).to eq(0.02296628)
end
end
describe ".example_count" do
it "returns the value" do
expect(summary.example_count).to eq(4)
end
end
describe ".failure_count" do
it "returns the value" do
expect(summary.failure_count).to eq(1)
end
end
describe ".pending_count" do
it "returns the value" do
expect(summary.pending_count).to eq(2)
end
end
describe ".errors_outside_of_examples_count" do
it "returns the value" do
expect(summary.errors_outside_of_examples_count).to eq(0)
end
end
end
end
|
x = [1 2 3 4 5 6];
y = [2 4 6 8 10 12];
X = [ones(size(x,1),1) x'];
b = X\y';
yCalc = X*b;
disp('Slope:')
disp(b(2))
disp('Y intercept:')
disp(b(1))
|
describe("IfDirective", function () {
it("for true literal", function () {
var MyComponent = san.defineComponent({
template: '<div><span san-if="true" title="errorrik">errorrik</span></div>'
});
var myComponent = new MyComponent();
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(1);
myComponent.dispose();
document.body.removeChild(wrap);
});
it("for false literal", function () {
var MyComponent = san.defineComponent({
template: '<div><span san-if="false" title="errorrik">errorrik</span></div>'
});
var myComponent = new MyComponent();
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(0);
myComponent.dispose();
document.body.removeChild(wrap);
});
it("for false literal use s-", function () {
var MyComponent = san.defineComponent({
template: '<div><span s-if="false" title="errorrik">errorrik</span></div>'
});
var myComponent = new MyComponent();
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(0);
myComponent.dispose();
document.body.removeChild(wrap);
});
it("render when true, and update soon", function (done) {
var MyComponent = san.defineComponent({
template: '<div><span san-if="cond" title="errorrik">errorrik</span></div>'
});
var myComponent = new MyComponent();
myComponent.data.set('cond', true);
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var span = wrap.firstChild.firstChild;
expect(span.title).toBe('errorrik');
myComponent.data.set('cond', false);
san.nextTick(function () {
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(0);
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
it("render when false, and update soon", function (done) {
var MyComponent = san.defineComponent({
template: '<div><span san-if="!cond" title="errorrik">errorrik</span></div>'
});
var myComponent = new MyComponent();
myComponent.data.set('cond', true);
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(0);
myComponent.data.set('cond', false);
san.nextTick(function () {
var span = wrap.getElementsByTagName('span')[0];
expect(span.title).toBe('errorrik');
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
it("render when false, and update soon, interp compat", function (done) {
var MyComponent = san.defineComponent({
template: '<div><span san-if="{{!cond}}" title="errorrik">errorrik</span></div>'
});
var myComponent = new MyComponent();
myComponent.data.set('cond', true);
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(0);
myComponent.data.set('cond', false);
san.nextTick(function () {
var span = wrap.getElementsByTagName('span')[0];
expect(span.title).toBe('errorrik');
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
it("and else", function (done) {
var MyComponent = san.defineComponent({
template: '<div><span san-if="!cond" title="errorrik">errorrik</span> <span san-else title="varsha">varsha</span></div>'
});
var myComponent = new MyComponent();
myComponent.data.set('cond', true);
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(1);
expect(spans[0].title).toBe('varsha');
myComponent.data.set('cond', false);
san.nextTick(function () {
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(1);
expect(spans[0].title).toBe('errorrik');
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
it("render list, init false, update soon", function (done) {
var MyComponent = san.defineComponent({
template: '<ul><li>name - email</li><li san-if="cond" san-for="p,i in persons" title="{{p.name}}">{{p.name}} - {{p.email}}</li><li>name - email</li></ul>'
});
var myComponent = new MyComponent();
myComponent.data.set('cond', false);
myComponent.data.set('persons', [
{name: 'errorrik', email: '<EMAIL>'},
{name: 'varsha', email: '<EMAIL>'}
]);
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var lis = wrap.getElementsByTagName('li');
expect(lis.length).toBe(2);
myComponent.data.set('cond', true);
san.nextTick(function () {
var lis = wrap.getElementsByTagName('li');
expect(lis.length).toBe(4);
expect(lis[2].getAttribute('title')).toBe('varsha');
expect(lis[2].innerHTML.indexOf('varsha - <EMAIL>')).toBe(0);
expect(lis[1].getAttribute('title')).toBe('errorrik');
expect(lis[1].innerHTML.indexOf('errorrik - <EMAIL>')).toBe(0);
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
it("render list, init true, update soon", function (done) {
var MyComponent = san.defineComponent({
template: '<ul><li>name - email</li><li san-if="cond" san-for="p,i in persons" title="{{p.name}}">{{p.name}} - {{p.email}}</li><li>name - email</li></ul>'
});
var myComponent = new MyComponent();
myComponent.data.set('cond', true);
myComponent.data.set('persons', [
{name: 'errorrik', email: '<EMAIL>'},
{name: 'varsha', email: '<EMAIL>'}
]);
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var lis = wrap.getElementsByTagName('li');
expect(lis.length).toBe(4);
expect(lis[2].getAttribute('title')).toBe('varsha');
expect(lis[2].innerHTML.indexOf('varsha - <EMAIL>')).toBe(0);
expect(lis[1].getAttribute('title')).toBe('errorrik');
expect(lis[1].innerHTML.indexOf('errorrik - <EMAIL>')).toBe(0);
myComponent.data.set('cond', false);
san.nextTick(function () {
var lis = wrap.getElementsByTagName('li');
expect(lis.length).toBe(2);
myComponent.data.unshift('persons',
{name: 'otakustay', email: '<EMAIL>'}
);
myComponent.data.set('cond', true);
san.nextTick(function () {
var lis = wrap.getElementsByTagName('li');
expect(lis.length).toBe(5);
expect(lis[3].getAttribute('title')).toBe('varsha');
expect(lis[3].innerHTML.indexOf('varsha - <EMAIL>')).toBe(0);
expect(lis[1].getAttribute('title')).toBe('otakustay');
expect(lis[1].innerHTML.indexOf('otakustay - <EMAIL>')).toBe(0);
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
});
it("render list, init true, render data use as condition", function (done) {
var MyComponent = san.defineComponent({
template: '<div><ul san-if="persons"><li san-for="p,i in persons" title="{{p.name}}">{{p.name}} - {{p.email}}</li></ul></div>'
});
var myComponent = new MyComponent();
myComponent.data.set('persons', [
{name: 'errorrik', email: '<EMAIL>'},
{name: 'varsha', email: '<EMAIL>'}
]);
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var lis = wrap.getElementsByTagName('li');
expect(lis.length).toBe(2);
expect(lis[1].getAttribute('title')).toBe('varsha');
expect(lis[1].innerHTML.indexOf('varsha - <EMAIL>')).toBe(0);
expect(lis[0].getAttribute('title')).toBe('errorrik');
expect(lis[0].innerHTML.indexOf('errorrik - <EMAIL>')).toBe(0);
myComponent.data.set('persons', [
{name: 'otakustay', email: '<EMAIL>'},
{name: 'errorrik', email: '<EMAIL>'},
{name: 'varsha', email: '<EMAIL>'}
]);
san.nextTick(function () {
var lis = wrap.getElementsByTagName('li');
expect(lis.length).toBe(3);
expect(lis[2].getAttribute('title')).toBe('varsha');
expect(lis[2].innerHTML.indexOf('varsha - <EMAIL>')).toBe(0);
expect(lis[1].getAttribute('title')).toBe('errorrik');
expect(lis[1].innerHTML.indexOf('errorrik - <EMAIL>')).toBe(0);
expect(lis[0].getAttribute('title')).toBe('otakustay');
expect(lis[0].innerHTML.indexOf('otakustay - <EMAIL>')).toBe(0);
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
var TelList = san.defineComponent({
template: '<ul><li san-for="item in list" title="{{item}}">{{item}}</li></ul>'
});
var PersonList = san.defineComponent({
components: {
'ui-tel': TelList
},
template: '<div><dl san-for="item in list"><dt title="{{item.name}}">{{item.name}}</dt><dd><ui-tel list="{{item.tels}}"></ui-tel></dd></dl></div>'
});
it("render component, init false, update soon", function (done) {
var MyComponent = san.defineComponent({
components: {
'ui-person': PersonList
},
template: '<div><ui-person list="{{persons}}" san-if="cond"></ui-person></div>'
});
var myComponent = new MyComponent();
myComponent.data.set('cond', false);
myComponent.data.set('persons', [
{
name: 'erik',
tels: [
'12345678',
'123456789',
]
},
{
name: 'firede',
tels: [
'2345678',
'23456789',
]
}
]);
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var dts = wrap.getElementsByTagName('dt');
var dds = wrap.getElementsByTagName('dd');
expect(dts.length).toBe(0);
expect(dds.length).toBe(0);
myComponent.data.set('cond', true);
myComponent.data.set('persons[1].name', 'leeight');
myComponent.data.set('persons[1].tels', ['12121212', '16161616', '18181818']);
san.nextTick(function () {
var dts = wrap.getElementsByTagName('dt');
expect(dts[0].title).toBe('erik');
expect(dts[1].title).toBe('leeight');
var dds = wrap.getElementsByTagName('dd');
var p1lis = dds[1].getElementsByTagName('li');
expect(p1lis[0].title).toBe('12121212');
expect(p1lis[1].title).toBe('16161616');
expect(p1lis[2].title).toBe('18181818');
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
it("render component, init true, update soon", function (done) {
var MyComponent = san.defineComponent({
components: {
'ui-person': PersonList
},
template: '<div><ui-person list="{{persons}}" san-if="cond"></ui-person></div>'
});
var myComponent = new MyComponent();
myComponent.data.set('cond', true);
myComponent.data.set('persons', [
{
name: 'erik',
tels: [
'12345678',
'123456789',
]
},
{
name: 'firede',
tels: [
'2345678',
'23456789',
]
}
]);
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var dts = wrap.getElementsByTagName('dt');
expect(dts[0].title).toBe('erik');
expect(dts[1].title).toBe('firede');
var dds = wrap.getElementsByTagName('dd');
var p1lis = dds[1].getElementsByTagName('li');
expect(p1lis[0].title).toBe('2345678');
expect(p1lis[1].title).toBe('23456789');
myComponent.data.set('cond', false);
myComponent.data.set('persons[1].name', 'leeight');
myComponent.data.set('persons[1].tels', ['12121212', '16161616', '18181818']);
san.nextTick(function () {
var dts = wrap.getElementsByTagName('dt');
var dds = wrap.getElementsByTagName('dd');
expect(dts.length).toBe(0);
expect(dds.length).toBe(0);
myComponent.data.set('cond', true);
san.nextTick(function () {
var dts = wrap.getElementsByTagName('dt');
expect(dts[0].title).toBe('erik');
expect(dts[1].title).toBe('leeight');
var dds = wrap.getElementsByTagName('dd');
var p1lis = dds[1].getElementsByTagName('li');
expect(p1lis[0].title).toBe('12121212');
expect(p1lis[1].title).toBe('16161616');
expect(p1lis[2].title).toBe('18181818');
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
});
it("change condition expr data twice, first time diffent and second time same", function (done) {
var MyComponent = san.defineComponent({
initData: function () {
return {
totalPage: 5,
current: 5
};
},
template: '<div><span san-if="current - 1 < totalPage">{{ current - 1 }}</span></div>'
});
var myComponent = new MyComponent();
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(1);
myComponent.data.set('current', 6);
myComponent.data.set('totalPage', 6);
san.nextTick(function () {
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(1);
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
it("change condition expr data twice, first time same and second time different", function (done) {
var MyComponent = san.defineComponent({
initData: function () {
return {
totalPage: 5,
current: 5
};
},
template: '<div><span san-if="current - 1 < totalPage">{{ current - 1 }}</span></div>'
});
var myComponent = new MyComponent();
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(1);
myComponent.data.set('totalPage', 6);
myComponent.data.set('current', 6);
san.nextTick(function () {
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(1);
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
it("change condition expr data many times", function (done) {
var MyComponent = san.defineComponent({
initData: function () {
return {
totalPage: 5,
current: 5
};
},
template: '<div><span san-if="current - 1 < totalPage">{{ current - 1 }}</span></div>'
});
var myComponent = new MyComponent();
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(1);
myComponent.data.set('totalPage', 6);
myComponent.data.set('current', 6);
myComponent.data.set('current', 7);
myComponent.data.set('totalPage', 8);
myComponent.data.set('current', 9);
myComponent.data.set('totalPage', 9);
san.nextTick(function () {
var spans = wrap.getElementsByTagName('span');
expect(spans.length).toBe(1);
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
it("condition expr data not be changed, inner element should update view", function (done) {
var MyComponent = san.defineComponent({
initData: function () {
return {
condition: true,
list: ['one', 'two']
};
},
template: '<div><div san-if="condition"><u san-for="item,index in list" title="{{index}}{{item}}">{{index}}{{item}}</u></span></div>'
});
var myComponent = new MyComponent();
var wrap = document.createElement('div');
document.body.appendChild(wrap);
myComponent.attach(wrap);
var us = wrap.getElementsByTagName('u');
expect(us.length).toBe(2);
expect(us[0].title).toBe('0one');
expect(us[0].innerHTML.indexOf('0one')).toBe(0);
myComponent.data.set('list', ['three']);
san.nextTick(function () {
var us = wrap.getElementsByTagName('u');
expect(us.length).toBe(1);
expect(us[0].title).toBe('0three');
expect(us[0].innerHTML.indexOf('0three')).toBe(0);
myComponent.dispose();
document.body.removeChild(wrap);
done();
});
});
});
|
module Boxroom
class Engine < ::Rails::Engine
isolate_namespace Boxroom
initializer 'boxroom.assets.precompile' do |app|
app.config.assets.precompile += %w( boxroom/*.png boxroom/*.jpg boxroom/*.gif )
end
end
end
|
# fd - cd to selected directory
fdr() {
local dir prevcmd
if ! type tree > /dev/null; then
prevcmd='echo "To see perfect preview, install tree" && ls {}'
else
prevcmd='tree -C {} | head -200'
fi
dir=$(fd --hidden --follow --exclude ".git" --exclude "Library" --max-depth 5 | fzf +m --reverse --preview "$prevcmd") &&
cd "$dir"
}
|
#!/usr/bin/env python
""" Problem 64 daily-coding-problem.com """
def is_valid_move(board, move, n):
r, c = move
return 0 <= r < n and 0 <= c < n and board[r][c] is None
def valid_moves(board, r, c, n):
deltas = [
(2, 1),
(1, 2),
(1, -2),
(-2, 1),
(-1, 2),
(2, -1),
(-1, -2),
(-2, -1),
]
all_moves = [(r + r_delta, c + c_delta) for r_delta, c_delta in deltas]
return [move for move in all_moves if is_valid_move(board, move, n)]
def knights_tours(n):
count = 0
for i in range(n):
for j in range(n):
board = [[None for _ in range(n)] for _ in range(n)]
board[i][j] = 0
count += knights_tours_helper(board, [(i, j)], n)
return count
def knights_tours_helper(board, tour, n):
if len(tour) == n * n:
return 1
else:
count = 0
last_r, last_c = tour[-1]
for r, c in valid_moves(board, last_r, last_c, n):
tour.append((r, c))
board[r][c] = len(tour)
count += knights_tours_helper(board, tour, n)
tour.pop()
board[r][c] = None
return count
if __name__ == "__main__":
assert knights_tours(5) == 1728
|
<gh_stars>0
//fix:
// ctx.contextPath().toString() in login, logout in UserController
// see ctx.fullUrl() in register in AdminController
// try query, bound, rs, data, s -> toString() in DAO layer
// try -> .getQuery()
// uncomment authorization validation in controllers
package edu.mdamle;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import edu.mdamle.controllers.AdminController;
import edu.mdamle.controllers.BenefitsCoordinatorController;
import edu.mdamle.controllers.DirectSupervisorController;
import edu.mdamle.controllers.EmployeeController;
import edu.mdamle.controllers.ManagerController;
import edu.mdamle.controllers.UserController;
import edu.mdamle.services.DriverService;
import edu.mdamle.services.DriverServiceImpl;
import edu.mdamle.utils.CassandraUtil;
import io.javalin.Javalin;
public class Driver {
private static final Logger log = LogManager.getLogger(Driver.class);
public static Javalin app = null;
public static void main(String[] args) {
preinitialize();
javalin();
}
public static boolean preinitialize() {
DriverService driverSvc = new DriverServiceImpl();
return driverSvc.preinitialize();
}
public static void javalin() {
if(app == null) {
app = Javalin.create().start(8080);
}
//Admin Controller:
app.put("/accounts", AdminController :: register); //WORKS //LOGGED
app.delete("/accounts/:username", AdminController :: unregister); //WORKS //LOGGED
app.put("/dirsups/:username/supervisor", AdminController :: assignDeptHead); //WORKS //LOGGED
app.put("/employees/:username/dirsup", AdminController :: assignDirSup); //WORKS //LOGGED
app.put("/employees/:username/benco", AdminController :: assignBenCo); //WORKS //LOGGED
//User controller:
app.post("/accounts", UserController :: login); //WORKS //LOGGED
app.delete("/accounts", UserController :: logout); //WORKS //LOGGED
app.get("/trrs/:username", UserController :: viewTrrs); //incompl
app.get("/trrs/:username/:id", UserController :: viewTrr); //incompl
app.get("/inbox/", UserController :: viewMessages); //incompl
app.get("/inbox/:id", UserController :: viewMessage); //incompl
//Manager controller:
app.put("/trrs/:username/:id/approve", ManagerController :: approveTrr); //incompl
app.put("/trrs/:username/:id/requestinfo", ManagerController :: requestInfo); //incompl
app.put("/trrs/:username/:id/confirm", ManagerController :: confirmPassing); //incompl
app.get("/trrs/:username/:id/additionalmaterial", ManagerController :: viewAdditionalMaterial); //incompl
//Employee Controller:
app.put("/trrs/:username", EmployeeController :: createTrr); //--------------------------***************
app.put("/trrs/:username/:id/approvalemail", EmployeeController :: uploadApprovalEmail); //incompl
app.put("/trrs/:username/:id/additionalmaterial", EmployeeController :: uploadAdditionalMaterial); //incompl
app.put("/trrs/:username/:id/presentation", EmployeeController :: uploadPresentation); //incompl
app.put("/trrs/:username/:id/grade", EmployeeController :: uploadGrade); //incompl
app.delete("/trrs/:username/:id", EmployeeController :: cancelTrr); //incoml
//Benefits Coordinator Controller:
app.get("/trrs/:username/:id/approvalemail", BenefitsCoordinatorController :: viewApprovalEmail); //incompl
app.put("/trrs/:username/:id/validateemail", BenefitsCoordinatorController :: validateApprovalEmail); //incompl
app.get("/trrs/:username/:id/grade", BenefitsCoordinatorController :: viewGrade); //incompl
app.put("/trrs/:username/:id", BenefitsCoordinatorController :: changeReimbursementAmount); //incompl
//Direct Supervisor Controller:
app.get("/trrs/:username/:id/presentation", DirectSupervisorController :: viewPresentation); //incompl
}
}
|
#! /bin/sh
set -x
pip3 install c7n
for policy in policies/*
do
custodian run -s out -c $policy
done
|
package com.darian.spring5testdemo;
import com.darian.spring5testdemo.domain.User;
import com.darian.spring5testdemo.service.UserRemoteService;
import com.darian.spring5testdemo.service.UserServiceJUnit5Test;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.Arrays;
@RunWith(SpringRunner.class)
@SpringBootTest
public class Spring5TestDemoApplicationTests {
@Test
public void contextLoads() {
}
@Configuration
public static class MockConfiguration {
// Mock UserRemockService 作为 Spring Bean
@Bean
public UserRemoteService userRemoteService() {
UserRemoteService userRemoteService = Mockito.mock(UserRemoteService.class);
Mockito.when(userRemoteService.findAll()).thenReturn(
Arrays.asList(new User(1L, "darian"), new User(2L, "dairna2")));
return userRemoteService;
}
}
}
|
#!/usr/bin/env bash
test_description="Test sharness tests are correctly written"
. lib/test-lib.sh
for file in $(find .. -maxdepth 1 -name 't*.sh' -type f); do
test_expect_success "test in $file finishes" '
grep -q "^test_done\b" "$file"
'
test_expect_success "test in $file has a description" '
test_must_fail grep -L "^test_description=" "$file"
'
# We have some tests that manually kill.
case "$(basename "$file")" in
t0060-daemon.sh|t0023-shutdown.sh) continue ;;
esac
test_expect_success "test in $file has matching ipfs start/stop" '
awk "/^ *[^#]*test_launch_ipfs_daemon/ { if (count != 0) { exit(1) }; count++ } /^ *[^#]*test_kill_ipfs_daemon/ { if (count != 1) { exit(1) }; count-- } END { exit(count) }" "$file"
'
done
test_done
|
import styled from "styled-components";
export const NotificationContainer = styled.div`
position: fixed;
top: 0;
left: 0;
background-color: blue;
color: white;
width: 100vw;
padding: 3px 20px;
z-index: 10;
`;
|
#!/bin/bash
# shellcheck disable=SC2155,SC2153,SC2038,SC1091,SC2116
################################################################################
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
################################################################################
#
# Build OpenJDK - can be called directly but is typically called by
# docker-build.sh or native-build.sh.
#
# See bottom of the script for the call order and each function for further
# details.
#
# Calls 'configure' then 'make' in order to build OpenJDK
#
################################################################################
set -eu
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# shellcheck source=sbin/prepareWorkspace.sh
source "$SCRIPT_DIR/prepareWorkspace.sh"
# shellcheck source=sbin/common/config_init.sh
source "$SCRIPT_DIR/common/config_init.sh"
# shellcheck source=sbin/common/constants.sh
source "$SCRIPT_DIR/common/constants.sh"
# shellcheck source=sbin/common/common.sh
source "$SCRIPT_DIR/common/common.sh"
export LIB_DIR=$(crossPlatformRealPath "${SCRIPT_DIR}/../pipelines/")
export jreTargetPath
export CONFIGURE_ARGS=""
export ADDITIONAL_MAKE_TARGETS=""
export GIT_CLONE_ARGUMENTS=()
# Parse the CL arguments, defers to the shared function in common-functions.sh
function parseArguments() {
parseConfigurationArguments "$@"
}
# Add an argument to the configure call
addConfigureArg() {
# Only add an arg if it is not overridden by a user-specified arg.
if [[ ${BUILD_CONFIG[USER_SUPPLIED_CONFIGURE_ARGS]} != *"$1"* ]]; then
CONFIGURE_ARGS="${CONFIGURE_ARGS} ${1}${2}"
fi
}
# Add an argument to the configure call (if it's not empty)
addConfigureArgIfValueIsNotEmpty() {
# Only try to add an arg if the second argument is not empty.
if [ -n "$2" ]; then
addConfigureArg "$1" "$2"
fi
}
# Configure the boot JDK
configureBootJDKConfigureParameter() {
addConfigureArgIfValueIsNotEmpty "--with-boot-jdk=" "${BUILD_CONFIG[JDK_BOOT_DIR]}"
}
# Shenandaoh was backported to Java 11 as of 11.0.9 but requires this build
# parameter to ensure its inclusion. For Java 12+ this is automatically set
configureShenandoahBuildParameter() {
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK11_CORE_VERSION}" ]; then
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_HOTSPOT}" ] || [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_CORRETTO}" ]; then
addConfigureArg "--with-jvm-features=" "shenandoahgc"
fi
fi
}
# Configure the boot JDK
configureMacOSCodesignParameter() {
if [ -n "${BUILD_CONFIG[MACOSX_CODESIGN_IDENTITY]}" ]; then
# This command needs to escape the double quotes because they are needed to preserve the spaces in the codesign cert name
addConfigureArg "--with-macosx-codesign-identity=" "\"${BUILD_CONFIG[MACOSX_CODESIGN_IDENTITY]}\""
fi
}
# Get the OpenJDK update version and build version
getOpenJDKUpdateAndBuildVersion() {
cd "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}"
if [ -d "${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/.git" ]; then
# It does exist and it's a repo other than the Temurin one
cd "${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}" || return
if [ -f ".git/shallow.lock" ]; then
echo "Detected lock file, assuming this is an error, removing"
rm ".git/shallow.lock"
fi
# shellcheck disable=SC2154
echo "Pulling latest tags and getting the latest update version using git fetch -q --tags ${BUILD_CONFIG[SHALLOW_CLONE_OPTION]}"
# shellcheck disable=SC2154
echo "NOTE: This can take quite some time! Please be patient"
# shellcheck disable=SC2086
git fetch -q --tags ${BUILD_CONFIG[SHALLOW_CLONE_OPTION]}
local openJdkVersion=$(getOpenJdkVersion)
if [[ "${openJdkVersion}" == "" ]]; then
# shellcheck disable=SC2154
echo "Unable to detect git tag, exiting..."
exit 1
else
echo "OpenJDK repo tag is $openJdkVersion"
fi
local openjdk_update_version
openjdk_update_version=$(echo "${openJdkVersion}" | cut -d'u' -f 2 | cut -d'-' -f 1)
# TODO dont modify config in build script
echo "Version: ${openjdk_update_version} ${BUILD_CONFIG[OPENJDK_BUILD_NUMBER]}"
fi
cd "${BUILD_CONFIG[WORKSPACE_DIR]}"
}
getOpenJdkVersion() {
local version
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_CORRETTO}" ]; then
local corrVerFile=${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/version.txt
local corrVersion="$(cut -d'.' -f 1 <"${corrVerFile}")"
if [ "${corrVersion}" == "8" ]; then
local updateNum="$(cut -d'.' -f 2 <"${corrVerFile}")"
local buildNum="$(cut -d'.' -f 3 <"${corrVerFile}")"
local fixNum="$(cut -d'.' -f 4 <"${corrVerFile}")"
version="jdk8u${updateNum}-b${buildNum}.${fixNum}"
else
local minorNum="$(cut -d'.' -f 2 <"${corrVerFile}")"
local updateNum="$(cut -d'.' -f 3 <"${corrVerFile}")"
local buildNum="$(cut -d'.' -f 4 <"${corrVerFile}")"
local fixNum="$(cut -d'.' -f 5 <"${corrVerFile}")"
version="jdk-${corrVersion}.${minorNum}.${updateNum}+${buildNum}.${fixNum}"
fi
elif [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_DRAGONWELL}" ]; then
local dragonwellVerFile=${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/version.txt
if [ -r "${dragonwellVerFile}" ]; then
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
local updateNum="$(cut -d'.' -f 2 <"${dragonwellVerFile}")"
local buildNum="$(cut -d'.' -f 6 <"${dragonwellVerFile}")"
version="jdk8u${updateNum}-b${buildNum}"
else
local minorNum="$(cut -d'.' -f 2 <"${dragonwellVerFile}")"
local updateNum="$(cut -d'.' -f 3 <"${dragonwellVerFile}")"
local buildNum="$(cut -d'.' -f 5 <"${dragonwellVerFile}")"
version="jdk-11.${minorNum}.${updateNum}+${buildNum}"
fi
else
version=${BUILD_CONFIG[TAG]:-$(getFirstTagFromOpenJDKGitRepo)}
version=$(echo "$version" | cut -d'_' -f 2)
fi
elif [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_BISHENG}" ]; then
local bishengVerFile=${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/version.txt
if [ -r "${bishengVerFile}" ]; then
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
local updateNum="$(cut -d'.' -f 2 <"${bishengVerFile}")"
local buildNum="$(cut -d'.' -f 5 <"${bishengVerFile}")"
version="jdk8u${updateNum}-b${buildNum}"
else
local minorNum="$(cut -d'.' -f 2 <"${bishengVerFile}")"
local updateNum="$(cut -d'.' -f 3 <"${bishengVerFile}")"
local buildNum="$(cut -d'.' -f 5 <"${bishengVerFile}")"
version="jdk-11.${minorNum}.${updateNum}+${buildNum}"
fi
else
version=${BUILD_CONFIG[TAG]:-$(getFirstTagFromOpenJDKGitRepo)}
version=$(echo "$version" | cut -d'-' -f 2 | cut -d'_' -f 1)
fi
else
version=${BUILD_CONFIG[TAG]:-$(getFirstTagFromOpenJDKGitRepo)}
# TODO remove pending #1016
version=${version%_adopt}
version=${version#aarch64-shenandoah-}
fi
echo "${version}"
}
# Ensure that we produce builds with versions strings something like:
#
# openjdk version "1.8.0_131"
# OpenJDK Runtime Environment (build 1.8.0-temurin-<user>_2017_04_17_17_21-b00)
# OpenJDK 64-Bit Server VM (build 25.71-b00, mixed mode)
configureVersionStringParameter() {
stepIntoTheWorkingDirectory
local openJdkVersion=$(getOpenJdkVersion)
echo "OpenJDK repo tag is ${openJdkVersion}"
# --with-milestone=fcs deprecated at jdk12+ and not used for jdk11- (we use --without-version-pre/opt)
if [ "${BUILD_CONFIG[OPENJDK_FEATURE_NUMBER]}" == 8 ] && [ "${BUILD_CONFIG[RELEASE]}" == "true" ]; then
addConfigureArg "--with-milestone=" "fcs"
fi
local dateSuffix=$(date -u +%Y%m%d%H%M)
# Configures "vendor" jdk properties.
# Temurin default values are set after this code block
# TODO 1. We should probably look at having these values passed through a config
# file as opposed to hardcoding in shell
# TODO 2. This highlights us conflating variant with vendor. e.g. OpenJ9 is really
# a technical variant with Eclipse as the vendor
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_DRAGONWELL}" ]]; then
BUILD_CONFIG[VENDOR]="Alibaba"
BUILD_CONFIG[VENDOR_VERSION]="\"(Alibaba Dragonwell)\""
BUILD_CONFIG[VENDOR_URL]="http://www.alibabagroup.com"
BUILD_CONFIG[VENDOR_BUG_URL]="mailto:dragonwell_use@googlegroups.com"
BUILD_CONFIG[VENDOR_VM_BUG_URL]="mailto:dragonwell_use@googlegroups.com"
elif [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]]; then
BUILD_CONFIG[VENDOR_VM_BUG_URL]="https://github.com/eclipse/openj9/issues"
elif [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_BISHENG}" ]]; then
BUILD_CONFIG[VENDOR]="Huawei"
BUILD_CONFIG[VENDOR_VERSION]="Bisheng"
BUILD_CONFIG[VENDOR_BUG_URL]="https://gitee.com/openeuler/bishengjdk-11/issues"
BUILD_CONFIG[VENDOR_VM_BUG_URL]="https://gitee.com/openeuler/bishengjdk-11/issues"
fi
addConfigureArg "--with-vendor-name=" "\"${BUILD_CONFIG[VENDOR]}\""
addConfigureArg "--with-vendor-url=" "${BUILD_CONFIG[VENDOR_URL]:-"https://adoptium.net/"}"
addConfigureArg "--with-vendor-bug-url=" "${BUILD_CONFIG[VENDOR_BUG_URL]:-"https://github.com/adoptium/adoptium-support/issues"}"
addConfigureArg "--with-vendor-vm-bug-url=" "${BUILD_CONFIG[VENDOR_VM_BUG_URL]:-"https://github.com/adoptium/adoptium-support/issues"}"
local buildNumber
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
if [ "${BUILD_CONFIG[RELEASE]}" == "false" ]; then
addConfigureArg "--with-user-release-suffix=" "${dateSuffix}"
fi
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_HOTSPOT}" ]; then
# No JFR support in AIX or zero builds (s390 or armv7l)
if [ "${BUILD_CONFIG[OS_ARCHITECTURE]}" != "s390x" ] && [ "${BUILD_CONFIG[OS_KERNEL_NAME]}" != "aix" ] && [ "${BUILD_CONFIG[OS_ARCHITECTURE]}" != "armv7l" ]; then
addConfigureArg "--enable-jfr" ""
fi
fi
# Set the update version (e.g. 131), this gets passed in from the calling script
local updateNumber=${BUILD_CONFIG[OPENJDK_UPDATE_VERSION]}
if [ -z "${updateNumber}" ]; then
updateNumber=$(echo "${openJdkVersion}" | cut -f1 -d"-" | cut -f2 -d"u")
fi
addConfigureArgIfValueIsNotEmpty "--with-update-version=" "${updateNumber}"
# Set the build number (e.g. b04), this gets passed in from the calling script
buildNumber=${BUILD_CONFIG[OPENJDK_BUILD_NUMBER]}
if [ -z "${buildNumber}" ]; then
buildNumber=$(echo "${openJdkVersion}" | cut -f2 -d"-")
fi
if [ "${buildNumber}" ] && [ "${buildNumber}" != "ga" ]; then
addConfigureArgIfValueIsNotEmpty "--with-build-number=" "${buildNumber}"
fi
elif [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK9_CORE_VERSION}" ]; then
buildNumber=${BUILD_CONFIG[OPENJDK_BUILD_NUMBER]}
if [ -z "${buildNumber}" ]; then
buildNumber=$(echo "${openJdkVersion}" | cut -f2 -d"+")
fi
if [ "${BUILD_CONFIG[RELEASE]}" == "false" ]; then
addConfigureArg "--with-version-opt=" "${dateSuffix}"
else
addConfigureArg "--without-version-opt" ""
fi
addConfigureArg "--without-version-pre" ""
addConfigureArgIfValueIsNotEmpty "--with-version-build=" "${buildNumber}"
else
# > JDK 9
# Set the build number (e.g. b04), this gets passed in from the calling script
buildNumber=${BUILD_CONFIG[OPENJDK_BUILD_NUMBER]}
if [ -z "${buildNumber}" ]; then
# Get build number (eg.10) from tag of potential format "jdk-11.0.4+10_adopt"
buildNumber=$(echo "${openJdkVersion}" | cut -d_ -f1 | cut -f2 -d"+")
fi
if [ "${BUILD_CONFIG[RELEASE]}" == "false" ]; then
addConfigureArg "--with-version-opt=" "${dateSuffix}"
else
addConfigureArg "--without-version-opt" ""
fi
addConfigureArg "--without-version-pre" ""
addConfigureArgIfValueIsNotEmpty "--with-version-build=" "${buildNumber}"
fi
if [ "${BUILD_CONFIG[OPENJDK_FEATURE_NUMBER]}" -gt 8 ]; then
# Derive Adoptium metadata "version" string to use as vendor.version string
# Take openJdkVersion, remove jdk- prefix and build suffix, replace with specified buildNumber
# eg.:
# openJdkVersion = jdk-11.0.7+<build>
# vendor.version = Adoptium-11.0.7+<buildNumber>
#
# Remove "jdk-" prefix from openJdkVersion tag
local derivedOpenJdkMetadataVersion=${openJdkVersion#"jdk-"}
# Remove "+<build>" suffix
derivedOpenJdkMetadataVersion=$(echo "${derivedOpenJdkMetadataVersion}" | cut -f1 -d"+")
# Add "+<buildNumber>" being used
derivedOpenJdkMetadataVersion="${derivedOpenJdkMetadataVersion}+${buildNumber}"
if [ "${BUILD_CONFIG[RELEASE]}" == "false" ]; then
# Not a release build so add date suffix
derivedOpenJdkMetadataVersion="${derivedOpenJdkMetadataVersion}-${dateSuffix}"
fi
addConfigureArg "--with-vendor-version-string=" "${BUILD_CONFIG[VENDOR_VERSION]:-"Temurin"}-${derivedOpenJdkMetadataVersion}"
fi
echo "Completed configuring the version string parameter, config args are now: ${CONFIGURE_ARGS}"
}
# Construct all of the 'configure' parameters
buildingTheRestOfTheConfigParameters() {
if [ -n "$(which ccache)" ]; then
addConfigureArg "--enable-ccache" ""
fi
# Point-in-time dependency for openj9 only
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]]; then
addConfigureArg "--with-freemarker-jar=" "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/freemarker-${FREEMARKER_LIB_VERSION}/freemarker.jar"
fi
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
addConfigureArg "--with-x=" "/usr/include/X11"
addConfigureArg "--with-alsa=" "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/installedalsa"
fi
}
configureDebugParameters() {
# We don't want any extra debug symbols - ensure it's set to release;
# other options include fastdebug and slowdebug.
addConfigureArg "--with-debug-level=" "release"
# If debug symbols package is requested, generate them separately
if [ ${BUILD_CONFIG[CREATE_DEBUG_IMAGE]} == true ]; then
addConfigureArg "--with-native-debug-symbols=" "external"
else
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
addConfigureArg "--disable-zip-debug-info" ""
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" != "${BUILD_VARIANT_OPENJ9}" ]]; then
addConfigureArg "--disable-debug-symbols" ""
fi
else
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" != "${BUILD_VARIANT_OPENJ9}" ]]; then
addConfigureArg "--with-native-debug-symbols=" "none"
fi
fi
fi
}
configureFreetypeLocation() {
if [[ ! "${CONFIGURE_ARGS}" =~ "--with-freetype" ]]; then
if [[ "${BUILD_CONFIG[FREETYPE]}" == "true" ]]; then
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]]; then
case "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" in
jdk8* | jdk9* | jdk10*) addConfigureArg "--with-freetype-src=" "${BUILD_CONFIG[WORKSPACE_DIR]}/libs/freetype" ;;
*) freetypeDir=${BUILD_CONFIG[FREETYPE_DIRECTORY]:-bundled} ;;
esac
else
local freetypeDir="${BUILD_CONFIG[FREETYPE_DIRECTORY]}"
case "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" in
jdk8* | jdk9* | jdk10*) freetypeDir=${BUILD_CONFIG[FREETYPE_DIRECTORY]:-"${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/installedfreetype"} ;;
*) freetypeDir=${BUILD_CONFIG[FREETYPE_DIRECTORY]:-bundled} ;;
esac
echo "setting freetype dir to ${freetypeDir}"
addConfigureArg "--with-freetype=" "${freetypeDir}"
fi
fi
fi
}
# Configure the command parameters
configureCommandParameters() {
configureVersionStringParameter
configureBootJDKConfigureParameter
configureShenandoahBuildParameter
configureMacOSCodesignParameter
configureDebugParameters
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]]; then
echo "Windows or Windows-like environment detected, skipping configuring environment for custom Boot JDK and other 'configure' settings."
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]] && [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
local addsDir="${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/closed/adds"
# This is unfortunately required as if the path does not start with "/cygdrive" the make scripts are unable to find the "/closed/adds" directory.
if ! echo "$addsDir" | grep -E -q "^/cygdrive/"; then
# BUILD_CONFIG[WORKSPACE_DIR] does not seem to be an absolute path, prepend /cygdrive/c/cygwin64/"
echo "Prepending /cygdrive/c/cygwin64/ to BUILD_CONFIG[WORKSPACE_DIR]"
addsDir="/cygdrive/c/cygwin64/$addsDir"
fi
echo "adding source route -with-add-source-root=${addsDir}"
addConfigureArg "--with-add-source-root=" "${addsDir}"
fi
else
echo "Building up the configure command..."
buildingTheRestOfTheConfigParameters
fi
echo "Configuring jvm variants if provided"
addConfigureArgIfValueIsNotEmpty "--with-jvm-variants=" "${BUILD_CONFIG[JVM_VARIANT]}"
if [ "${BUILD_CONFIG[CUSTOM_CACERTS]}" = "true" ] ; then
echo "Configure custom cacerts file security/cacerts"
addConfigureArgIfValueIsNotEmpty "--with-cacerts-file=" "$SCRIPT_DIR/../security/cacerts"
fi
# Finally, we add any configure arguments the user has specified on the command line.
# This is done last, to ensure the user can override any args they need to.
# The substitution allows the user to pass in speech marks without having to guess
# at the number of escapes needed to ensure that they persist up to this point.
CONFIGURE_ARGS="${CONFIGURE_ARGS} ${BUILD_CONFIG[USER_SUPPLIED_CONFIGURE_ARGS]//temporary_speech_mark_placeholder/\"}"
configureFreetypeLocation
echo "Completed configuring the version string parameter, config args are now: ${CONFIGURE_ARGS}"
}
# Make sure we're in the source directory for OpenJDK now
stepIntoTheWorkingDirectory() {
cd "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}" || exit
# corretto/corretto-8 (jdk-8 only) nest their source under /src in their dir
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_CORRETTO}" ] && [ "${BUILD_CONFIG[OPENJDK_FEATURE_NUMBER]}" == "8" ]; then
cd "src"
fi
echo "Should have the source, I'm at $PWD"
}
buildTemplatedFile() {
echo "Configuring command and using the pre-built config params..."
stepIntoTheWorkingDirectory
echo "Currently at '${PWD}'"
FULL_CONFIGURE="bash ./configure --verbose ${CONFIGURE_ARGS}"
echo "Running ./configure with arguments '${FULL_CONFIGURE}'"
# If it's Java 9+ then we also make test-image to build the native test libraries,
# For openj9 add debug-image
JDK_VERSION_NUMBER="${BUILD_CONFIG[OPENJDK_FEATURE_NUMBER]}"
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]]; then
ADDITIONAL_MAKE_TARGETS=" test-image debug-image"
elif [ "$JDK_VERSION_NUMBER" -gt 8 ] || [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDKHEAD_VERSION}" ]; then
ADDITIONAL_MAKE_TARGETS=" test-image"
fi
if [[ "${BUILD_CONFIG[MAKE_EXPLODED]}" == "true" ]]; then
# In order to make an exploded image we cannot have any additional targets
ADDITIONAL_MAKE_TARGETS=""
fi
FULL_MAKE_COMMAND="${BUILD_CONFIG[MAKE_COMMAND_NAME]} ${BUILD_CONFIG[MAKE_ARGS_FOR_ANY_PLATFORM]} ${BUILD_CONFIG[USER_SUPPLIED_MAKE_ARGS]} ${ADDITIONAL_MAKE_TARGETS}"
# shellcheck disable=SC2002
cat "$SCRIPT_DIR/build.template" |
sed -e "s|{configureArg}|${FULL_CONFIGURE}|" \
-e "s|{makeCommandArg}|${FULL_MAKE_COMMAND}|" >"${BUILD_CONFIG[WORKSPACE_DIR]}/config/configure-and-build.sh"
}
executeTemplatedFile() {
stepIntoTheWorkingDirectory
echo "Currently at '${PWD}'"
# We need the exitcode from the configure-and-build.sh script
set +eu
# Execute the build passing the workspace dir and target dir as params for configure.txt
bash "${BUILD_CONFIG[WORKSPACE_DIR]}/config/configure-and-build.sh" ${BUILD_CONFIG[WORKSPACE_DIR]} ${BUILD_CONFIG[TARGET_DIR]}
exitCode=$?
if [ "${exitCode}" -eq 3 ]; then
createOpenJDKFailureLogsArchive
echo "Failed to make the JDK, exiting"
exit 1
elif [ "${exitCode}" -eq 2 ]; then
echo "Failed to configure the JDK, exiting"
echo "Did you set the JDK boot directory correctly? Override by exporting JDK_BOOT_DIR"
echo "For example, on RHEL you would do export JDK_BOOT_DIR=/usr/lib/jvm/java-1.7.0-openjdk-1.7.0.131-2.6.9.0.el7_3.x86_64"
echo "Current JDK_BOOT_DIR value: ${BUILD_CONFIG[JDK_BOOT_DIR]}"
exit 2
fi
# Restore exit behavior
set -eu
}
createOpenJDKFailureLogsArchive() {
echo "OpenJDK make failed, archiving make failed logs"
cd build/*
local adoptLogArchiveDir="TemurinLogsArchive"
# Create new folder for failure logs
rm -rf ${adoptLogArchiveDir}
mkdir ${adoptLogArchiveDir}
# Copy build and failure logs
if [[ -f "build.log" ]]; then
echo "Copying build.log to ${adoptLogArchiveDir}"
cp build.log ${adoptLogArchiveDir}
fi
if [[ -d "make-support/failure-logs" ]]; then
echo "Copying make-support/failure-logs to ${adoptLogArchiveDir}"
mkdir -p "${adoptLogArchiveDir}/make-support"
cp -r "make-support/failure-logs" "${adoptLogArchiveDir}/make-support"
fi
# Find any cores, dumps, ..
find . -name 'core.*' -o -name 'core.*.dmp' -o -name 'javacore.*.txt' -o -name 'Snap.*.trc' -o -name 'jitdump.*.dmp' | sed 's#^./##' | while read -r dump ; do
filedir=$(dirname "${dump}")
echo "Copying ${dump} to ${adoptLogArchiveDir}/${filedir}"
mkdir -p "${adoptLogArchiveDir}/${filedir}"
cp "${dump}" "${adoptLogArchiveDir}/${filedir}"
done
# Archive logs
local makeFailureLogsName=$(echo "${BUILD_CONFIG[TARGET_FILE_NAME]//-jdk/-makefailurelogs}")
createArchive "${adoptLogArchiveDir}" "${makeFailureLogsName}"
}
getGradleJavaHome() {
local gradleJavaHome=""
if [ ${JAVA_HOME+x} ] && [ -d "${JAVA_HOME}" ]; then
gradleJavaHome=${JAVA_HOME}
fi
if [ ${JDK8_BOOT_DIR+x} ] && [ -d "${JDK8_BOOT_DIR}" ]; then
gradleJavaHome=${JDK8_BOOT_DIR}
fi
# Special case arm because for some unknown reason the JDK11_BOOT_DIR that arm downloads is unable to form connection
# to services.gradle.org
if [ ${JDK11_BOOT_DIR+x} ] && [ -d "${JDK11_BOOT_DIR}" ] && [ "${ARCHITECTURE}" != "arm" ]; then
gradleJavaHome=${JDK11_BOOT_DIR}
fi
if [ ! -d "$gradleJavaHome" ]; then
echo "[WARNING] Unable to find java to run gradle with, this build may fail with /bin/java: No such file or directory. Set JAVA_HOME, JDK8_BOOT_DIR or JDK11_BOOT_DIR to squash this warning: $gradleJavaHome" >&2
fi
echo "$gradleJavaHome"
}
getGradleUserHome() {
local gradleUserHome=""
if [ -n "${BUILD_CONFIG[GRADLE_USER_HOME_DIR]}" ]; then
gradleUserHome="${BUILD_CONFIG[GRADLE_USER_HOME_DIR]}"
else
gradleUserHome="${BUILD_CONFIG[WORKSPACE_DIR]}/.gradle"
fi
echo $gradleUserHome
}
parseJavaVersionString() {
ADOPT_BUILD_NUMBER="${ADOPT_BUILD_NUMBER:-1}"
local javaVersion=$(JAVA_HOME="$PRODUCT_HOME" "$PRODUCT_HOME"/bin/java -version 2>&1)
cd "${LIB_DIR}"
local gradleJavaHome=$(getGradleJavaHome)
local version=$(echo "$javaVersion" | JAVA_HOME="$gradleJavaHome" "$gradleJavaHome"/bin/java -cp "target/libs/adopt-shared-lib.jar" ParseVersion -s -f openjdk-semver "$ADOPT_BUILD_NUMBER" | tr -d '\n')
echo "$version"
}
# Print the version string so we know what we've produced
printJavaVersionString() {
stepIntoTheWorkingDirectory
case "${BUILD_CONFIG[OS_KERNEL_NAME]}" in
"darwin")
# shellcheck disable=SC2086
PRODUCT_HOME=$(ls -d ${PWD}/build/*/images/${BUILD_CONFIG[JDK_PATH]}/Contents/Home)
;;
*)
# shellcheck disable=SC2086
PRODUCT_HOME=$(ls -d ${PWD}/build/*/images/${BUILD_CONFIG[JDK_PATH]})
;;
esac
if [[ -d "$PRODUCT_HOME" ]]; then
echo "'$PRODUCT_HOME' found"
if [ ! -r "$PRODUCT_HOME/bin/java" ]; then
echo "===$PRODUCT_HOME===="
ls -alh "$PRODUCT_HOME"
echo "===$PRODUCT_HOME/bin/===="
ls -alh "$PRODUCT_HOME/bin/"
echo "Error 'java' does not exist in '$PRODUCT_HOME'."
exit 3
elif [ "${BUILD_CONFIG[CROSSCOMPILE]}" == "true" ]; then
# job is cross compiled, so we cannot run it on the build system
# So we leave it for now and retrive the version from a downstream job after the build
echo "Warning: java version can't be run on cross compiled build system. Faking version for now..."
else
# print version string around easy to find output
# do not modify these strings as jenkins looks for them
echo "=JAVA VERSION OUTPUT="
"$PRODUCT_HOME"/bin/java -version 2>&1
echo "=/JAVA VERSION OUTPUT="
"$PRODUCT_HOME"/bin/java -version > "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/metadata/version.txt" 2>&1
fi
else
echo "'$PRODUCT_HOME' does not exist, build might have not been successful or not produced the expected JDK image at this location."
exit 3
fi
}
getJdkArchivePath() {
# Todo: Set this to the outcome of https://github.com/adoptium/temurin-build/issues/1016
# local version="$(parseJavaVersionString)
# echo "jdk-${version}"
local version=$(getOpenJdkVersion)
echo "$version"
}
getJreArchivePath() {
local jdkArchivePath=$(getJdkArchivePath)
echo "${jdkArchivePath}-jre"
}
getTestImageArchivePath() {
local jdkArchivePath=$(getJdkArchivePath)
echo "${jdkArchivePath}-test-image"
}
getDebugImageArchivePath() {
local jdkArchivePath=$(getJdkArchivePath)
echo "${jdkArchivePath}-debug-image"
}
# Clean up
removingUnnecessaryFiles() {
local jdkTargetPath=$(getJdkArchivePath)
local jreTargetPath=$(getJreArchivePath)
local testImageTargetPath=$(getTestImageArchivePath)
local debugImageTargetPath=$(getDebugImageArchivePath)
echo "Removing unnecessary files now..."
stepIntoTheWorkingDirectory
cd build/*/images || return
echo "Currently at '${PWD}'"
local jdkPath=$(ls -d ${BUILD_CONFIG[JDK_PATH]})
echo "moving ${jdkPath} to ${jdkTargetPath}"
rm -rf "${jdkTargetPath}" || true
mv "${jdkPath}" "${jdkTargetPath}"
if [ -d "$(ls -d ${BUILD_CONFIG[JRE_PATH]})" ]; then
echo "moving $(ls -d ${BUILD_CONFIG[JRE_PATH]}) to ${jreTargetPath}"
rm -rf "${jreTargetPath}" || true
mv "$(ls -d ${BUILD_CONFIG[JRE_PATH]})" "${jreTargetPath}"
case "${BUILD_CONFIG[OS_KERNEL_NAME]}" in
"darwin") dirToRemove="${jreTargetPath}/Contents/Home" ;;
*) dirToRemove="${jreTargetPath}" ;;
esac
rm -rf "${dirToRemove}"/demo || true
fi
# Test image - check if the config is set and directory exists
local testImagePath="${BUILD_CONFIG[TEST_IMAGE_PATH]}"
if [ -n "${testImagePath}" ] && [ -d "${testImagePath}" ]; then
echo "moving ${testImagePath} to ${testImageTargetPath}"
rm -rf "${testImageTargetPath}" || true
mv "${testImagePath}" "${testImageTargetPath}"
fi
# Debug image - check if the config is set and directory exists
local debugImagePath="${BUILD_CONFIG[DEBUG_IMAGE_PATH]}"
if [ -n "${debugImagePath}" ] && [ -d "${debugImagePath}" ]; then
echo "moving ${debugImagePath} to ${debugImageTargetPath}"
rm -rf "${debugImageTargetPath}" || true
mv "${debugImagePath}" "${debugImageTargetPath}"
fi
# Remove files we don't need
case "${BUILD_CONFIG[OS_KERNEL_NAME]}" in
"darwin") dirToRemove="${jdkTargetPath}/Contents/Home" ;;
*) dirToRemove="${jdkTargetPath}" ;;
esac
rm -rf "${dirToRemove}"/demo || true
# In OpenJ9 builds, debug symbols are captured in the debug image:
# we don't want another copy of them in the main JDK or JRE archives.
# Builds for other variants don't normally include debug symbols,
# but if they were explicitly requested via the configure option
# '--with-native-debug-symbols=(external|zipped)' leave them alone.
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]]; then
deleteDebugSymbols
fi
if [ ${BUILD_CONFIG[CREATE_DEBUG_IMAGE]} == true ] && [ "${BUILD_CONFIG[BUILD_VARIANT]}" != "${BUILD_VARIANT_OPENJ9}" ]; then
case "${BUILD_CONFIG[OS_KERNEL_NAME]}" in
*cygwin*)
# on Windows, we want to take .pdb and .map files
debugSymbols=$(find "${jdkTargetPath}" -type f -name "*.pdb" -o -name "*.map")
;;
darwin)
# on MacOSX, we want to take the files within the .dSYM folders
debugSymbols=$(find "${jdkTargetPath}" -type d -name "*.dSYM" | xargs -I {} find "{}" -type f)
;;
*)
# on other platforms, we want to take .debuginfo files
debugSymbols=$(find "${jdkTargetPath}" -type f -name "*.debuginfo")
;;
esac
# if debug symbols were found, copy them to a different folder
if [ -n "${debugSymbols}" ]; then
echo "Copying found debug symbols to ${debugImageTargetPath}"
mkdir -p "${debugImageTargetPath}"
echo "${debugSymbols}" | cpio -pdm "${debugImageTargetPath}"
fi
deleteDebugSymbols
fi
echo "Finished removing unnecessary files from ${jdkTargetPath}"
}
deleteDebugSymbols() {
# .diz files may be present on any platform
# Note that on AIX, find does not support the '-delete' option.
find "${jdkTargetPath}" "${jreTargetPath}" -type f -name "*.diz" | xargs rm -f || true
case "${BUILD_CONFIG[OS_KERNEL_NAME]}" in
*cygwin*)
# on Windows, we want to remove .map and .pdb files
find "${jdkTargetPath}" "${jreTargetPath}" -type f -name "*.map" -delete || true
find "${jdkTargetPath}" "${jreTargetPath}" -type f -name "*.pdb" -delete || true
;;
darwin)
# on MacOSX, we want to remove .dSYM folders
find "${jdkTargetPath}" "${jreTargetPath}" -type d -name "*.dSYM" | xargs -I "{}" rm -rf "{}"
;;
*)
# on other platforms, we want to remove .debuginfo files
find "${jdkTargetPath}" "${jreTargetPath}" -name "*.debuginfo" | xargs rm -f || true
;;
esac
}
moveFreetypeLib() {
local LIB_DIRECTORY="${1}"
if [ ! -d "${LIB_DIRECTORY}" ]; then
echo "Could not find dir: ${LIB_DIRECTORY}"
return
fi
echo " Performing copying of the free font library to ${LIB_DIRECTORY}, applicable for this version of the JDK. "
local SOURCE_LIB_NAME="${LIB_DIRECTORY}/libfreetype.dylib.6"
if [ ! -f "${SOURCE_LIB_NAME}" ]; then
SOURCE_LIB_NAME="${LIB_DIRECTORY}/libfreetype.dylib"
fi
if [ ! -f "${SOURCE_LIB_NAME}" ]; then
echo "[Error] ${SOURCE_LIB_NAME} does not exist in the ${LIB_DIRECTORY} folder, please check if this is the right folder to refer to, aborting copy process..."
return
fi
local TARGET_LIB_NAME="${LIB_DIRECTORY}/libfreetype.6.dylib"
local INVOKED_BY_FONT_MANAGER="${LIB_DIRECTORY}/libfontmanager.dylib"
echo "Currently at '${PWD}'"
echo "Copying ${SOURCE_LIB_NAME} to ${TARGET_LIB_NAME}"
echo " *** Workaround to fix the MacOSX issue where invocation to ${INVOKED_BY_FONT_MANAGER} fails to find ${TARGET_LIB_NAME} ***"
# codesign freetype before it is bundled
if [ -n "${BUILD_CONFIG[MACOSX_CODESIGN_IDENTITY]}" ]; then
# test if codesign certificate is usable
if touch test && codesign --sign "Developer ID Application: London Jamocha Community CIC" test && rm -rf test; then
ENTITLEMENTS="$WORKSPACE/entitlements.plist"
codesign --entitlements "$ENTITLEMENTS" --options runtime --timestamp --sign "${BUILD_CONFIG[MACOSX_CODESIGN_IDENTITY]}" "${SOURCE_LIB_NAME}"
else
echo "skipping codesign as certificate cannot be found"
fi
fi
cp "${SOURCE_LIB_NAME}" "${TARGET_LIB_NAME}"
if [ -f "${INVOKED_BY_FONT_MANAGER}" ]; then
otool -L "${INVOKED_BY_FONT_MANAGER}"
else
# shellcheck disable=SC2154
echo "[Warning] ${INVOKED_BY_FONT_MANAGER} does not exist in the ${LIB_DIRECTORY} folder, please check if this is the right folder to refer to, this may cause runtime issues, please beware..."
fi
otool -L "${TARGET_LIB_NAME}"
echo "Finished copying ${SOURCE_LIB_NAME} to ${TARGET_LIB_NAME}"
}
# If on a Mac, mac a copy of the font lib as required
makeACopyOfLibFreeFontForMacOSX() {
local DIRECTORY="${1}"
local PERFORM_COPYING=$2
echo "PERFORM_COPYING=${PERFORM_COPYING}"
if [ "${PERFORM_COPYING}" == "false" ]; then
echo " Skipping copying of the free font library to ${DIRECTORY}, does not apply for this version of the JDK. "
return
fi
if [[ "${BUILD_CONFIG[OS_KERNEL_NAME]}" == "darwin" ]]; then
moveFreetypeLib "${DIRECTORY}/Contents/Home/lib"
moveFreetypeLib "${DIRECTORY}/Contents/Home/jre/lib"
fi
}
# Get the tags from the git repo and choose the latest chronologically ordered tag for the given JDK version.
#
# Note, we have to chronologically order, as with a Shallow cloned (depth=1) git repo there is no "topo-order"
# for tags, also commit date order cannot be used either as the commit dates do not necessarily follow chronologically.
#
# Excluding "openj9" tag names as they have other ones for milestones etc. that get in the way
getFirstTagFromOpenJDKGitRepo() {
# Save current directory of caller so we can return to that directory at the end of this function
# Some caller's are not in the git repo root, but instead build/*/images directory like the archive functions
# and any function called after removingUnnecessaryFiles()
local savePwd="${PWD}"
# Change to openjdk git repo root to find build tag
cd "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}"
# JDK8 tag sorting:
# Tag Format "jdk8uLLL-bBB"
# cut chars 1-5 => LLL-bBB
# awk "-b" separator into a single "-" => LLL-BB
# prefix "-" to allow line numbering stable sorting using nl => -LLL-BB
# Sort by build level BB first
# Then do "stable" sort (keeping BB order) by build level LLL
local jdk8_tag_sort1="sort -t- -k3,3n"
local jdk8_tag_sort2="sort -t- -k2,2n"
local jdk8_get_tag_cmd="grep -v _openj9 | grep -v _adopt | cut -c6- | awk -F'[\-b]+' '{print \$1\"-\"\$2}' | sed 's/^/-/' | $jdk8_tag_sort1 | nl | $jdk8_tag_sort2 | cut -f2- | sed 's/^-/jdk8u/' | sed 's/-/-b/' | tail -1"
# JDK11+ tag sorting:
# We use sort and tail to choose the latest tag in case more than one refers the same commit.
# Versions tags are formatted: jdk-V[.W[.X[.P]]]+B; with V, W, X, P, B being numeric.
# Transform "-" to "." in tag so we can sort as: "jdk.V[.W[.X[.P]]]+B"
# Transform "+" to ".0.+" during the sort so that .P (patch) is defaulted to "0" for those
# that don't have one, and the trailing "." to terminate the 5th field from the +
# First, sort on build number (B):
local jdk11plus_tag_sort1="sort -t+ -k2,2n"
# Second, (stable) sort on (V), (W), (X), (P): P(Patch) is optional and defaulted to "0"
local jdk11plus_tag_sort2="sort -t. -k2,2n -k3,3n -k4,4n -k5,5n"
jdk11plus_get_tag_cmd="grep -v _openj9 | grep -v _adopt | sed 's/jdk-/jdk./g' | sed 's/+/.0.+/g' | $jdk11plus_tag_sort1 | nl | $jdk11plus_tag_sort2 | sed 's/\.0\.+/+/g' | cut -f2- | sed 's/jdk./jdk-/g' | tail -1"
# Choose tag search keyword and get cmd based on version
local TAG_SEARCH="jdk-${BUILD_CONFIG[OPENJDK_FEATURE_NUMBER]}*+*"
local get_tag_cmd=$jdk11plus_get_tag_cmd
if [ "${BUILD_CONFIG[OPENJDK_FEATURE_NUMBER]}" == "8" ]; then
TAG_SEARCH="jdk8u*-b*"
get_tag_cmd=$jdk8_get_tag_cmd
fi
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_DRAGONWELL}" ]; then
TAG_SEARCH="dragonwell-*_jdk*"
fi
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_BISHENG}" ] && [ "${BUILD_CONFIG[OS_ARCHITECTURE]}" == "riscv64" ]; then
TAG_SEARCH="jdk-*+*bisheng_riscv"
elif [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_BISHENG}" ] && [ "${BUILD_CONFIG[OPENJDK_FEATURE_NUMBER]}" == "8" ]; then
# Bisheng's JDK8 tags follow the aarch64 convention
TAG_SEARCH="aarch64-shenandoah-jdk8u*-b*"
fi
# If openj9 and the closed/openjdk-tag.gmk file exists which specifies what level the openj9 jdk code is based upon...
# Read OPENJDK_TAG value from that file..
local openj9_openjdk_tag_file="${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/closed/openjdk-tag.gmk"
if [[ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]] && [[ -f "${openj9_openjdk_tag_file}" ]]; then
firstMatchingNameFromRepo=$(grep OPENJDK_TAG ${openj9_openjdk_tag_file} | awk 'BEGIN {FS = "[ :=]+"} {print $2}')
else
git fetch --tags "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}"
firstMatchingNameFromRepo=$(eval "git tag -l $TAG_SEARCH | $get_tag_cmd")
fi
if [ -z "$firstMatchingNameFromRepo" ]; then
echo "WARNING: Failed to identify latest tag in the repository" 1>&2
else
echo "$firstMatchingNameFromRepo"
fi
# Restore pwd
cd "$savePwd"
}
createArchive() {
repoLocation=$1
targetName=$2
archiveExtension=$(getArchiveExtension)
createOpenJDKArchive "${repoLocation}" "OpenJDK"
archive="${PWD}/OpenJDK${archiveExtension}"
echo "Your final archive was created at ${archive}"
echo "Moving the artifact to ${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}"
mv "${archive}" "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/${targetName}"
}
# Create a Tar ball
createOpenJDKTarArchive() {
local jdkTargetPath=$(getJdkArchivePath)
local jreTargetPath=$(getJreArchivePath)
local testImageTargetPath=$(getTestImageArchivePath)
local debugImageTargetPath=$(getDebugImageArchivePath)
echo "OpenJDK JDK path will be ${jdkTargetPath}. JRE path will be ${jreTargetPath}"
if [ -d "${jreTargetPath}" ]; then
# shellcheck disable=SC2001
local jreName=$(echo "${BUILD_CONFIG[TARGET_FILE_NAME]}" | sed 's/-jdk/-jre/')
createArchive "${jreTargetPath}" "${jreName}"
fi
if [ -d "${testImageTargetPath}" ]; then
echo "OpenJDK test image path will be ${testImageTargetPath}."
local testImageName=$(echo "${BUILD_CONFIG[TARGET_FILE_NAME]//-jdk/-testimage}")
createArchive "${testImageTargetPath}" "${testImageName}"
fi
if [ -d "${debugImageTargetPath}" ]; then
echo "OpenJDK debug image path will be ${debugImageTargetPath}."
local debugImageName=$(echo "${BUILD_CONFIG[TARGET_FILE_NAME]//-jdk/-debugimage}")
createArchive "${debugImageTargetPath}" "${debugImageName}"
fi
createArchive "${jdkTargetPath}" "${BUILD_CONFIG[TARGET_FILE_NAME]}"
}
copyFreeFontForMacOS() {
local jdkTargetPath=$(getJdkArchivePath)
local jreTargetPath=$(getJreArchivePath)
makeACopyOfLibFreeFontForMacOSX "${jdkTargetPath}" "${BUILD_CONFIG[COPY_MACOSX_FREE_FONT_LIB_FOR_JDK_FLAG]}"
makeACopyOfLibFreeFontForMacOSX "${jreTargetPath}" "${BUILD_CONFIG[COPY_MACOSX_FREE_FONT_LIB_FOR_JRE_FLAG]}"
}
wipeOutOldTargetDir() {
rm -r "${BUILD_CONFIG[WORKSPACE_DIR]:?}/${BUILD_CONFIG[TARGET_DIR]}" || true
}
createTargetDir() {
# clean out old builds
mkdir -p "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}" || exit
mkdir "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/metadata" || exit
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]; then
mkdir "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/metadata/variant_version" || exit
fi
}
fixJavaHomeUnderDocker() {
# If we are inside docker we cannot trust the JDK_BOOT_DIR that was detected on the host system
if [[ "${BUILD_CONFIG[USE_DOCKER]}" == "true" ]]; then
# clear BUILD_CONFIG[JDK_BOOT_DIR] and re set it
BUILD_CONFIG[JDK_BOOT_DIR]=""
setBootJdk
fi
}
addInfoToReleaseFile() {
# Extra information is added to the release file here
echo "===GENERATING RELEASE FILE==="
cd "$PRODUCT_HOME"
JAVA_LOC="$PRODUCT_HOME/bin/java"
echo "ADDING IMPLEMENTOR"
addImplementor
echo "ADDING BUILD SHA"
addBuildSHA
echo "ADDING FULL VER"
addFullVersion
echo "ADDING SEM VER"
addSemVer
echo "ADDING BUILD OS"
addBuildOS
echo "ADDING VARIANT"
addJVMVariant
echo "ADDING JVM VERSION"
addJVMVersion
# OpenJ9 specific options
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]; then
echo "ADDING HEAP SIZE"
addHeapSize
echo "ADDING J9 TAG"
addJ9Tag
fi
echo "MIRRORING TO JRE"
mirrorToJRE
echo "ADDING IMAGE TYPE"
addImageType
echo "===RELEASE FILE GENERATED==="
}
# shellcheck disable=SC2143
addHeapSize() { # Adds an identifier for heap size on OpenJ9 builds
local heapSize=""
local architecture=$($JAVA_LOC -XshowSettings:properties -version 2>&1 | grep 'os.arch' | sed 's/^.*= //' | tr -d '\r') # Heap size must be standard for x86 builds (openjdk-build/2412)
if [[ $($JAVA_LOC -version 2>&1 | grep 'Compressed References') ]] || [[ "$architecture" == "x86" ]]; then
heapSize="Standard"
else
heapSize="Large"
fi
echo -e HEAP_SIZE=\"$heapSize\" >>release
}
addImplementor() {
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
# shellcheck disable=SC2086
echo -e IMPLEMENTOR=\"${BUILD_CONFIG[VENDOR]}\" >>release
fi
}
addJVMVersion() { # Adds the JVM version i.e. openj9-0.21.0
local jvmVersion=$($JAVA_LOC -XshowSettings:properties -version 2>&1 | grep 'java.vm.version' | sed 's/^.*= //' | tr -d '\r')
# shellcheck disable=SC2086
echo -e JVM_VERSION=\"$jvmVersion\" >>release
}
addFullVersion() { # Adds the full version including build number i.e. 11.0.9+5-202009040847
local fullVer=$($JAVA_LOC -XshowSettings:properties -version 2>&1 | grep 'java.runtime.version' | sed 's/^.*= //' | tr -d '\r')
# shellcheck disable=SC2086
echo -e FULL_VERSION=\"$fullVer\" >>release
}
addJVMVariant() {
# shellcheck disable=SC2086
echo -e JVM_VARIANT=\"${BUILD_CONFIG[BUILD_VARIANT]^}\" >>release
}
addBuildSHA() { # git SHA of the build repository i.e. openjdk-build
local buildSHA=$(git -C "${BUILD_CONFIG[WORKSPACE_DIR]}" rev-parse --short HEAD 2>/dev/null)
if [[ $buildSHA ]]; then
# shellcheck disable=SC2086
echo -e BUILD_SOURCE=\"git:$buildSHA\" >>release
else
echo "Unable to fetch build SHA, does a work tree exist?..."
fi
}
addBuildOS() {
local buildOS="Unknown"
local buildVer="Unknown"
if [ "${BUILD_CONFIG[OS_KERNEL_NAME]}" == "darwin" ]; then
buildOS=$(sw_vers | sed -n 's/^ProductName:[[:blank:]]*//p')
buildVer=$(sw_vers | tail -n 2 | awk '{print $2}')
elif [ "${BUILD_CONFIG[OS_KERNEL_NAME]}" == "linux" ]; then
buildOS=$(uname -s)
buildVer=$(uname -r)
else # Fall back to java properties OS/Version info
buildOS=$($JAVA_LOC -XshowSettings:properties -version 2>&1 | grep 'os.name' | sed 's/^.*= //' | tr -d '\r')
buildVer=$($JAVA_LOC -XshowSettings:properties -version 2>&1 | grep 'os.version' | sed 's/^.*= //' | tr -d '\r')
fi
echo -e BUILD_INFO=\"OS: "$buildOS" Version: "$buildVer"\" >>release
}
addJ9Tag() {
# java.vm.version varies or for OpenJ9 depending on if it is a release build i.e. master-*gitSha* or 0.21.0
# This code makes sure that a version number is always present in the release file i.e. openj9-0.21.0
local j9Location="${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[WORKING_DIR]}/${BUILD_CONFIG[OPENJDK_SOURCE_DIR]}/openj9"
# Pull the tag associated with the J9 commit being used
J9_TAG=$(git -C $j9Location describe --abbrev=0)
# shellcheck disable=SC2086
if [ ${BUILD_CONFIG[RELEASE]} = false ]; then
echo -e OPENJ9_TAG=\"$J9_TAG\" >> release
fi
}
addSemVer() { # Pulls the semantic version from the tag associated with the openjdk repo
local fullVer=$(getOpenJdkVersion)
SEM_VER="$fullVer"
if [ "${BUILD_CONFIG[OPENJDK_CORE_VERSION]}" == "${JDK8_CORE_VERSION}" ]; then
# Translate jdk8uNNN-bBB to 8.0.NNN.BB, where NNN is any number and BB is 2 digits removing leading 0 if present
# eg. jdk8u302-b00 => 8.0.302.0
# eg. jdk8u292-b10 => 8.0.292.10
SEM_VER=$(echo "$SEM_VER" | cut -c4- | awk -F'[-b]+' '{print $1"+"$2}' | sed 's/u/.0./' | sed 's/\+0/\+/')
else
SEM_VER=$(echo "$SEM_VER" | cut -c5-) # i.e. 11.0.2+12
fi
# shellcheck disable=SC2086
echo -e SEMANTIC_VERSION=\"$SEM_VER\" >> release
}
# Disable shellcheck in here as it causes issues with ls on mac
mirrorToJRE() {
stepIntoTheWorkingDirectory
# shellcheck disable=SC2086
case "${BUILD_CONFIG[OS_KERNEL_NAME]}" in
"darwin")
JRE_HOME=$(ls -d ${PWD}/build/*/images/${BUILD_CONFIG[JRE_PATH]}/Contents/Home)
;;
*)
JRE_HOME=$(ls -d ${PWD}/build/*/images/${BUILD_CONFIG[JRE_PATH]})
;;
esac
# shellcheck disable=SC2086
cp -f $PRODUCT_HOME/release $JRE_HOME/release
}
addImageType() {
echo -e IMAGE_TYPE=\"JDK\" >>"$PRODUCT_HOME/release"
echo -e IMAGE_TYPE=\"JRE\" >>"$JRE_HOME/release"
}
addInfoToJson(){
mv "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/configure.txt" "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/metadata/"
addVariantVersionToJson
addVendorToJson
addSourceToJson # Build repository commit SHA
}
addVariantVersionToJson(){
if [ "${BUILD_CONFIG[BUILD_VARIANT]}" == "${BUILD_VARIANT_OPENJ9}" ]; then
local variantJson=$(echo "$J9_TAG" | cut -c8- | tr "-" ".") # i.e. 0.22.0.m2
local major=$(echo "$variantJson" | awk -F[.] '{print $1}')
local minor=$(echo "$variantJson" | awk -F[.] '{print $2}')
local security=$(echo "$variantJson" | awk -F[.] '{print $3}')
local tags=$(echo "$variantJson" | awk -F[.] '{print $4}')
if [[ $(echo "$variantJson" | tr -cd '.' | wc -c) -lt 3 ]]; then # Precaution for when OpenJ9 releases a 1.0.0 version
tags="$minor"
minor=""
fi
echo -n "${major:-"0"}" > "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/metadata/variant_version/major.txt"
echo -n "${minor:-"0"}" > "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/metadata/variant_version/minor.txt"
echo -n "${security:-"0"}" > "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/metadata/variant_version/security.txt"
echo -n "${tags:-""}" > "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/metadata/variant_version/tags.txt"
fi
}
addVendorToJson(){
echo -n "${BUILD_CONFIG[VENDOR]}" > "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/metadata/vendor.txt"
}
addSourceToJson(){ # Pulls the basename of the origin repo, or uses 'openjdk-build' in rare cases of failure
local repoName=$(basename "$(cd "${BUILD_CONFIG[WORKSPACE_DIR]%.git}" && git config --get remote.origin.url 2>/dev/null)")
local buildSHA=$(git -C "${BUILD_CONFIG[WORKSPACE_DIR]}" rev-parse --short HEAD 2>/dev/null)
if [[ $buildSHA ]]; then
echo -n "${repoName:-"openjdk-build"}/$buildSHA" > "${BUILD_CONFIG[WORKSPACE_DIR]}/${BUILD_CONFIG[TARGET_DIR]}/metadata/buildSource.txt"
else
echo "Unable to fetch build SHA, does a work tree exist?..."
fi
}
################################################################################
loadConfigFromFile
fixJavaHomeUnderDocker
cd "${BUILD_CONFIG[WORKSPACE_DIR]}"
parseArguments "$@"
if [[ "${BUILD_CONFIG[ASSEMBLE_EXPLODED_IMAGE]}" == "true" ]]; then
buildTemplatedFile
executeTemplatedFile
removingUnnecessaryFiles
copyFreeFontForMacOS
createOpenJDKTarArchive
showCompletionMessage
exit 0
fi
echo "build.sh : $(date +%T) : Clearing out target dir ..."
wipeOutOldTargetDir
createTargetDir
echo "build.sh : $(date +%T) : Configuring workspace inc. clone and cacerts generation ..."
configureWorkspace
echo "build.sh : $(date +%T) : Initiating build ..."
getOpenJDKUpdateAndBuildVersion
configureCommandParameters
buildTemplatedFile
executeTemplatedFile
echo "build.sh : $(date +%T) : Build complete ..."
if [[ "${BUILD_CONFIG[MAKE_EXPLODED]}" != "true" ]]; then
printJavaVersionString
addInfoToReleaseFile
addInfoToJson
removingUnnecessaryFiles
copyFreeFontForMacOS
createOpenJDKTarArchive
fi
echo "build.sh : $(date +%T) : All done!"
# ccache is not detected properly TODO
# change grep to something like $GREP -e '^1.*' -e '^2.*' -e '^3\.0.*' -e '^3\.1\.[0123]$'`]
# See https://github.com/adoptium/openjdk-jdk8u/blob/dev/common/autoconf/build-performance.m4
|
import React, { DependencyList, ReactElement, useCallback, useState } from 'react';
import Form from '../components/Form';
import { FieldPack, Fields, FormValues } from '../utils/helperTypes';
import { ValidationMode } from '../utils/validationTypes';
import { Config as RecaptchaConfig } from './useRecaptcha';
export interface Config<T extends Fields> {
/** Required, the name of this form. Necessary for the use of IDs */
name: string;
/** Required, declares the fields of the form */
fields: T;
/** Optional, defines the form fields used for this form */
fieldPack?: FieldPack;
/** Optional, defines the global form validation mode. Defaults to `ValidationModes.AFTER_BLUR` */
validationMode?: ValidationMode;
/** Optional, adds a recaptcha check to the form */
recaptcha?: RecaptchaConfig;
/** Optional, the void function that fires on a form submission event */
onSubmit?: (
formValues: FormValues<T>,
helpers: { recaptchaToken?: string },
) => void | Promise<void>;
}
export interface State {
/** The form component generated by the hook */
Form: () => ReactElement;
}
export default function useForm<T extends Fields>(config: Config<T>, deps?: DependencyList): State {
const [isSubmitting, setIsSubmitting] = useState(false);
const FormComponent = useCallback(
() => <Form {...config} {...{ isSubmitting, setIsSubmitting }} />,
[config, deps],
);
return { Form: FormComponent };
}
|
#!/usr/bin/env bash
wget https://www.apache.org/dist/flink/flink-1.10.0/flink-1.10.0-bin-scala_2.11.tgz
wget -P ./lib/ https://repo1.maven.org/maven2/org/apache/flink/flink-json/1.10.0/flink-json-1.10.0.jar | \
wget -P ./lib/ https://repo1.maven.org/maven2/org/apache/flink/flink-sql-connector-kafka_2.11/1.10.0/flink-sql-connector-kafka_2.11-1.10.0.jar | \
wget -P ./lib/ https://repo1.maven.org/maven2/org/apache/flink/flink-sql-connector-elasticsearch6_2.11/1.10.0/flink-sql-connector-elasticsearch6_2.11-1.10.0.jar | \
wget -P ./lib/ https://repo1.maven.org/maven2/org/apache/flink/flink-jdbc_2.11/1.10.0/flink-jdbc_2.11-1.10.0.jar | \
wget -P ./lib/ https://repo1.maven.org/maven2/mysql/mysql-connector-java/5.1.48/mysql-connector-java-5.1.48.jar
|
#!/bin/bash
set -eu
cur=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
source $cur/../_utils/test_prepare
WORK_DIR=$TEST_DIR/$TEST_NAME
db1="downstream_more_column1"
tb1="t1"
db="downstream_more_column"
tb="t"
function run() {
run_sql_file $cur/data/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1
# create table in tidb with AUTO_INCREMENT
run_sql_file $cur/data/tidb.prepare.sql $TIDB_HOST $TIDB_PORT $TIDB_PASSWORD
# start DM worker and master
run_dm_master $WORK_DIR/master $MASTER_PORT $cur/conf/dm-master.toml
check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT
run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
# operate mysql config to worker
cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml
dmctl_operate_source create $WORK_DIR/source1.yaml $SOURCE_ID1
# start DM task in all mode
# schemaTracker create table from dump data
dmctl_start_task_standalone "$cur/conf/dm-task.yaml" "--remove-meta"
# check full load data
run_sql_tidb_with_retry "select count(1) from ${db}.${tb} where c1<100;" "count(1): 2"
run_sql_file $cur/data/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status test" \
"originTable: \`${db1}\`.\`${tb1}\`" 1 \
"targetTable: \`${db}\`.\`${tb}\`" 1 \
"Column count doesn't match value count" 1
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"operate-schema set -s mysql-replica-01 test -d ${db1} -t ${tb1} $cur/data/schema.sql" \
"\"result\": true" 2
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"resume-task test"
# check incremental data
run_sql_tidb_with_retry "select count(1) from ${db}.${tb} where c1>100 and c1<1000;" "count(1): 2"
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"stop-task test" \
"\"result\": true" 2
# start DM task in incremental mode
# schemaTracker create table from downstream
master_status=($(get_master_status))
cp $cur/conf/dm-task-incremental.yaml $WORK_DIR/dm-task-incremental.yaml
sed -i "s/binlog-gtid-placeholder/${master_status[2]}/g" $WORK_DIR/dm-task-incremental.yaml
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"start-task $WORK_DIR/dm-task-incremental.yaml --remove-meta" \
"\"result\": true" 2
run_sql_file $cur/data/db1.increment2.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1
# Column count doesn't match value count
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status test" \
"originTable: $(${db1}).$(${tb1})" 1 \
"targetTable: $(${db}).$(${tb})" 1 \
"Column count doesn't match value count" 1
# operate-schema: flush checkpoint default
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"operate-schema set -s mysql-replica-01 test -d ${db1} -t ${tb1} $cur/data/schema.sql" \
"\"result\": true" 2
check_log_contain_with_retry 'flush table info' $WORK_DIR/worker1/log/dm-worker.log
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"resume-task test"
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status test" \
"\"result\": true" 2
# check incremental data
run_sql_tidb_with_retry "select count(1) from ${db}.${tb} where c1>1000 and c1<10000;" "count(1): 2"
}
cleanup_data downstream_more_column
# also cleanup dm processes in case of last run failed
cleanup_process $*
run $*
cleanup_process $*
echo "[$(date)] <<<<<< test case $TEST_NAME success! >>>>>>"
|
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Scanner;
// Define the HttpRequest class representing an HTTP request
class HttpRequest {
private final String url;
public HttpRequest(String url) {
this.url = url;
}
public String getUrl() {
return url;
}
}
// Define the HttpResponseHandler interface for handling HTTP responses
interface HttpResponseHandler<T> {
T handleResponse(String response);
}
// Implement a concrete class for executing HTTP requests
class SimpleHttpExecutor implements HttpExecutor {
@Override
public <T> T execute(HttpRequest request, HttpResponseHandler<T> responseHandler) throws IOException {
URL url = new URL(request.getUrl());
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
int responseCode = connection.getResponseCode();
if (responseCode == HttpURLConnection.HTTP_OK) {
Scanner scanner = new Scanner(connection.getInputStream());
scanner.useDelimiter("\\A");
String responseBody = scanner.hasNext() ? scanner.next() : "";
scanner.close();
connection.disconnect();
return responseHandler.handleResponse(responseBody);
} else {
connection.disconnect();
throw new IOException("HTTP request failed with response code: " + responseCode);
}
}
}
// Implement a sample HttpResponseHandler for handling string responses
class StringResponseHandler implements HttpResponseHandler<String> {
@Override
public String handleResponse(String response) {
// Perform any necessary processing on the response
return response.toUpperCase(); // Example: Convert response to uppercase
}
}
// Usage example
public class Main {
public static void main(String[] args) {
HttpExecutor httpExecutor = new SimpleHttpExecutor();
HttpRequest httpRequest = new HttpRequest("https://example.com/api/data");
HttpResponseHandler<String> responseHandler = new StringResponseHandler();
try {
String result = httpExecutor.execute(httpRequest, responseHandler);
System.out.println("Processed response: " + result);
} catch (IOException e) {
System.err.println("Error executing HTTP request: " + e.getMessage());
}
}
}
|
#!/bin/bash
set -e
if [ ! -f ../tre_output.json ]; then
# Connect to the remote backend of Terraform
export TF_LOG=""
terraform init -input=false -backend=true -reconfigure -upgrade \
-backend-config="resource_group_name=$TF_VAR_mgmt_resource_group_name" \
-backend-config="storage_account_name=$TF_VAR_mgmt_storage_account_name" \
-backend-config="container_name=$TF_VAR_terraform_state_container_name" \
-backend-config="key=${TRE_ID}"
# Convert the output to json
terraform output -json > ../tre_output.json
fi
# Now create an .env file
./json-to-env.sh < ../tre_output.json > ../private.env
# Pull in the core templates environment variables so we can build up new key/value pairs
source ../.env
# Add a few extra values to the file to help us (i.e. for local debugging api_app and resource processor)
echo "SERVICE_BUS_FULLY_QUALIFIED_NAMESPACE='sb-${TRE_ID}.servicebus.windows.net'" >> ../private.env
echo "TEST_WORKSPACE_APP_ID='${WORKSPACE_API_CLIENT_ID}'" >> ../private.env
# These next ones from Check Dependencies
echo "SUBSCRIPTION_ID='${SUB_ID}'" >> ../private.env
echo "AZURE_SUBSCRIPTION_ID='${SUB_ID}'" >> ../private.env
echo "AZURE_TENANT_ID='${TENANT_ID}'" >> ../private.env
|
class Node {
int value;
Node* left;
Node* right;
Node* parent;
int color;
Node(int val)
{
this->value = val;
left = nullptr;
right = nullptr;
parent = nullptr;
this->color = 'r';
}
};
class RedBlackTree {
Node *root;
public:
RedBlackTree(){ root = nullptr; }
// Insertion
void Insertion(int data);
// Deletion of a node
void Deletion(int data);
// Searching
bool Search(int data);
// Rotate Left
void RotateLeft(Node *&);
// Rotate Right
void RotateRight(Node *&);
};
|
<filename>webauthn4j-core/src/main/java/com/webauthn4j/data/MessageDigestAlgorithm.java
/*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webauthn4j.data;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import com.fasterxml.jackson.databind.exc.InvalidFormatException;
import com.webauthn4j.util.AssertUtil;
import com.webauthn4j.util.MessageDigestUtil;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.io.Serializable;
import java.security.MessageDigest;
import java.util.Objects;
public class MessageDigestAlgorithm implements Serializable {
public static final MessageDigestAlgorithm SHA1 = new MessageDigestAlgorithm("SHA-1");
public static final MessageDigestAlgorithm SHA256 = new MessageDigestAlgorithm("SHA-256");
public static final MessageDigestAlgorithm SHA384 = new MessageDigestAlgorithm("SHA-384");
public static final MessageDigestAlgorithm SHA512 = new MessageDigestAlgorithm("SHA-512");
private final String jcaName;
private MessageDigestAlgorithm(@NonNull String jcaName) {
this.jcaName = jcaName;
}
public static @NonNull MessageDigestAlgorithm create(@NonNull String jcaName) {
AssertUtil.notNull(jcaName, "jcaName must not be null");
return new MessageDigestAlgorithm(jcaName);
}
@SuppressWarnings("unused")
@JsonCreator
private static @NonNull MessageDigestAlgorithm deserialize(String value) throws InvalidFormatException {
try {
return create(value);
} catch (IllegalArgumentException e) {
throw new InvalidFormatException(null, "value is out of range", value, MessageDigestAlgorithm.class);
}
}
@JsonValue
public @NonNull String getJcaName() {
return jcaName;
}
public @NonNull MessageDigest createMessageDigestObject() {
return MessageDigestUtil.createMessageDigest(jcaName);
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MessageDigestAlgorithm that = (MessageDigestAlgorithm) o;
return Objects.equals(jcaName, that.jcaName);
}
@Override
public int hashCode() {
return Objects.hash(jcaName);
}
}
|
"""
PhenoCam Raw Data Processing
This is the code for the the processing of the raw PhenoCam data, downloaded from both the PhenoCam dataset and the images hosted by UNH.
"""
import os
import csv
import json
def delete_tif():
# delete all .tif files in this data directory
directory = "./phenocam_data/"
files_in_directory = os.listdir(directory)
filtered_files = [file for file in files_in_directory if file.endswith(".tif")]
for file in filtered_files:
path_to_file = os.path.join(directory, file)
os.remove(path_to_file)
def delete_meta_txt():
directory = "./phenocam_data/"
files_in_directory = os.listdir(directory)
filtered_files = [file for file in files_in_directory if file.endswith(".txt")]
for file in filtered_files:
path_to_file = os.path.join(directory, file)
os.remove(path_to_file)
def delete_non_deciduous():
# delete all <sitename>_<veg_type>....csv files in this data directory that are not for Deciduous Broadleafs (DB)
directory = "./phenocam_data/"
files_in_directory = os.listdir(directory)
filtered_files = [file for file in files_in_directory if file.endswith(".csv")]
non_DB_data_records = [file for file in filtered_files if (file[file.find("_") + 1:file.find("_") + 3] != "DB")]
for file in non_DB_data_records:
path_to_file = os.path.join(directory, file)
os.remove(path_to_file)
def delete_non_1day_transitions():
# delete all non 1day_transtion_dates CSV files
directory = "./phenocam_data/"
files_in_directory = os.listdir(directory)
filtered_files = [file for file in files_in_directory if file.endswith(".csv")]
for file in filtered_files:
is_transition_dates = file.find('1day_transition_dates')
if is_transition_dates == -1:
path_to_file = os.path.join(directory, file)
os.remove(path_to_file)
def delete_non_1000_roi():
# delete all non 1000 ROI transition dates CSV files
directory = "./phenocam_data/"
files_in_directory = os.listdir(directory)
filtered_files = [file for file in files_in_directory if file.endswith(".csv")]
for file in filtered_files:
is_transition_dates = file.find('1000')
if is_transition_dates == -1:
path_to_file = os.path.join(directory, file)
os.remove(path_to_file)
def delete_non_site_type_I():
# delete all non site type 1 files
directory = "./phenocam_data/"
files_in_directory = os.listdir(directory)
filtered_files = [file for file in files_in_directory if file.endswith(".json")]
for file in filtered_files:
with open(os.path.join(directory, file)) as f:
data = json.load(f)
sitename = data["phenocam_site"]["sitename"]
site_type = data["phenocam_site"]["site_type"]
is_site_type_i = site_type == "I" # choose only site I sites because they have high data quality; long historical data is not as important for just phenophase classification
if not is_site_type_i:
files = [foo for foo in files_in_directory if foo.find(sitename) != -1]
for fo in files:
path_to_file = os.path.join(directory, fo)
os.remove(path_to_file)
def delete_unneeded_sites():
# delete the following remaining sites that I will not be using
remove_these = ['ashbottoms', 'barrocolorado', 'groundhog', 'harvardbarn', 'harvardhemlock', 'hawbeckereddy', 'huyckpreserveny', 'kingmanfarm', 'NEON.D03.JERK.DP1.00033',
'NEON.D05.TREE.DP1.00033', 'ninemileprairie', 'northattleboroma', 'proctor', 'sweetbriar', 'sylvania', 'ugseros', 'uwmfieldsta', 'woodstockvt', 'worcester',
'caryinstitute', 'donanapajarera', 'NEON.D05.STEI.DP1', 'NEON.D05.UNDE.DP1']
directory = "./phenocam_data/"
files_in_directory = os.listdir(directory)
for sitename in remove_these:
files = [foo for foo in files_in_directory if foo.find(sitename) != -1]
for f in files:
path_to_file = os.path.join(directory, f)
os.remove(path_to_file)
def delete_redundant_metas():
# delete all redundant <sitename>_meta.json files
directory = "./phenocam_data/"
files_in_directory = os.listdir(directory)
filtered_files = [file for file in files_in_directory if file.endswith(".json")]
for file in filtered_files:
sitename = file[:file.find('_')]
site_data_files_left = [f for f in files_in_directory if (f.endswith('.csv') and (f.find(sitename) != -1))]
if len(site_data_files_left) == 0:
path_to_file = os.path.join(directory, file)
os.remove(path_to_file)
def remove_csv_headers():
# remove comments at beginning of CSV files
directory = "./phenocam_data/"
files_in_directory = os.listdir(directory)
filtered_files = [file for file in files_in_directory if file.endswith(".csv")]
for file in filtered_files:
path_to_file = os.path.join(directory, file)
lines = list()
with open(path_to_file, 'r') as readFile:
reader = csv.reader(readFile)
for row in reader:
lines.append(row)
lines = lines[16:]
with open(path_to_file, 'w', newline='') as writeFile:
writer = csv.writer(writeFile)
writer.writerows(lines)
def print_all_sites_used():
# print all sites being used
directory = "./phenocam_data/"
files_in_directory = os.listdir(directory)
filtered_files = [file for file in files_in_directory if file.endswith("meta.json")]
print('[{}]'.format(len(filtered_files)))
for file in filtered_files:
sitename = file[:file.find('_')]
print(sitename)
def process_image_data():
# process image data, saving a list for each site containing only one image per day
import json
img_directory = '../PhenoCam_v2/images/'
site_folders = [sitefldr for sitefldr in os.listdir(img_directory)]
for site in site_folders:
sitename = site[:site.find('_')]
site_directory = img_directory + site + '/phenocamdata/' + sitename + '/'
site_imgs = []
for year in [yr for yr in os.listdir(site_directory) if (yr.find('.')==-1)]:
year_directory = site_directory + year + '/'
for month in os.listdir(year_directory):
month_directory = year_directory + month + '/'
imgs = [(month_directory + img) for img in os.listdir(month_directory) if img.endswith('.jpg')]
dates_in_month = []
for img_filename in imgs:
date_str = img_filename[-21:-4]
date_str_no_time = date_str[:10]
if not date_str_no_time in dates_in_month:
dates_in_month.append(date_str_no_time)
for day in dates_in_month:
imgs_in_day = [img_filename for img_filename in imgs if img_filename.find(day) != -1]
distance_to_noon = {}
for img_filename in imgs_in_day:
time = int(img_filename[-10:-4])
if time / 120000 < 1:
distance = 116000 - time
else:
distance = time - 120000
distance_to_noon[img_filename] = distance
closest_img = min(distance_to_noon.items(), key=lambda x: x[1])[0]
site_imgs.append(closest_img)
# save into json in phenocam_data folder
directory = './phenocam_data/'
imgs_json = {'img_file_names' : site_imgs}
with open(directory + sitename + '_imgs.json', 'w') as file:
json.dump(imgs_json, file, indent=4)
if __name__=='__main__':
delete_tif()
delete_meta_txt()
delete_non_deciduous()
delete_non_1day_transitions()
delete_non_1000_roi()
delete_non_site_type_I()
delete_unneeded_sites()
delete_redundant_metas()
remove_csv_headers()
print_all_sites_used()
process_image_data()
|
// Code generated by protoc-gen-go-json. DO NOT EDIT.
// versions:
// - protoc-gen-go-json v1.3.1
// - protoc v3.9.1
// source: lorawan-stack/api/lorawan.proto
package ttnpb
import (
gogo "github.com/TheThingsIndustries/protoc-gen-go-json/gogo"
jsonplugin "github.com/TheThingsIndustries/protoc-gen-go-json/jsonplugin"
strconv "strconv"
)
// MarshalProtoJSON marshals the MType to JSON.
func (x MType) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), MType_name)
}
// MarshalText marshals the MType to text.
func (x MType) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), MType_name)), nil
}
// MarshalJSON marshals the MType to JSON.
func (x MType) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// UnmarshalProtoJSON unmarshals the MType from JSON.
func (x *MType) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(MType_value)
if err := s.Err(); err != nil {
s.SetErrorf("could not read MType enum: %v", err)
return
}
*x = MType(v)
}
// UnmarshalText unmarshals the MType from text.
func (x *MType) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), MType_value)
if err != nil {
return err
}
*x = MType(i)
return nil
}
// UnmarshalJSON unmarshals the MType from JSON.
func (x *MType) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the Major to JSON.
func (x Major) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), Major_name)
}
// MarshalText marshals the Major to text.
func (x Major) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), Major_name)), nil
}
// MarshalJSON marshals the Major to JSON.
func (x Major) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// Major_customvalue contains custom string values that extend Major_value.
var Major_customvalue = map[string]int32{
"R1": 0,
}
// UnmarshalProtoJSON unmarshals the Major from JSON.
func (x *Major) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(Major_value, Major_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read Major enum: %v", err)
return
}
*x = Major(v)
}
// UnmarshalText unmarshals the Major from text.
func (x *Major) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), Major_value)
if err != nil {
return err
}
*x = Major(i)
return nil
}
// UnmarshalJSON unmarshals the Major from JSON.
func (x *Major) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACVersion to JSON.
func (x MACVersion) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), MACVersion_name)
}
// MarshalText marshals the MACVersion to text.
func (x MACVersion) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), MACVersion_name)), nil
}
// MarshalJSON marshals the MACVersion to JSON.
func (x MACVersion) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// MACVersion_customvalue contains custom string values that extend MACVersion_value.
var MACVersion_customvalue = map[string]int32{
"UNKNOWN": 0,
"unknown": 0,
"V1_0": 1,
"1.0": 1,
"1.0.0": 1,
"V1_0_1": 2,
"1.0.1": 2,
"V1_0_2": 3,
"1.0.2": 3,
"V1_1": 4,
"1.1": 4,
"1.1.0": 4,
"V1_0_3": 5,
"1.0.3": 5,
"V1_0_4": 6,
"1.0.4": 6,
}
// UnmarshalProtoJSON unmarshals the MACVersion from JSON.
func (x *MACVersion) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(MACVersion_value, MACVersion_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read MACVersion enum: %v", err)
return
}
*x = MACVersion(v)
}
// UnmarshalText unmarshals the MACVersion from text.
func (x *MACVersion) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), MACVersion_customvalue, MACVersion_value)
if err != nil {
return err
}
*x = MACVersion(i)
return nil
}
// UnmarshalJSON unmarshals the MACVersion from JSON.
func (x *MACVersion) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the PHYVersion to JSON.
func (x PHYVersion) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), PHYVersion_name)
}
// MarshalText marshals the PHYVersion to text.
func (x PHYVersion) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), PHYVersion_name)), nil
}
// MarshalJSON marshals the PHYVersion to JSON.
func (x PHYVersion) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// PHYVersion_customvalue contains custom string values that extend PHYVersion_value.
var PHYVersion_customvalue = map[string]int32{
"unknown": 0,
"1.0": 1,
"1.0.0": 1,
"V1_0": 1,
"1.0.1": 2,
"V1_0_1": 2,
"1.0.2": 3,
"1.0.2-a": 3,
"V1_0_2": 3,
"V1_0_2_REV_A": 3,
"1.0.2-b": 4,
"V1_0_2_REV_B": 4,
"1.1-a": 5,
"1.1.0-a": 5,
"V1_1_REV_A": 5,
"1.1-b": 6,
"1.1.0-b": 6,
"V1_1_REV_B": 6,
"1.0.3-a": 7,
"V1_0_3_REV_A": 7,
}
// UnmarshalProtoJSON unmarshals the PHYVersion from JSON.
func (x *PHYVersion) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(PHYVersion_value, PHYVersion_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read PHYVersion enum: %v", err)
return
}
*x = PHYVersion(v)
}
// UnmarshalText unmarshals the PHYVersion from text.
func (x *PHYVersion) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), PHYVersion_customvalue, PHYVersion_value)
if err != nil {
return err
}
*x = PHYVersion(i)
return nil
}
// UnmarshalJSON unmarshals the PHYVersion from JSON.
func (x *PHYVersion) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the DataRateIndex to JSON.
func (x DataRateIndex) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumNumber(int32(x))
}
// MarshalText marshals the DataRateIndex to text.
func (x DataRateIndex) MarshalText() ([]byte, error) {
return []byte(strconv.FormatInt(int64(x), 10)), nil
}
// MarshalJSON marshals the DataRateIndex to JSON.
func (x DataRateIndex) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// DataRateIndex_customvalue contains custom string values that extend DataRateIndex_value.
var DataRateIndex_customvalue = map[string]int32{
"0": 0,
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"5": 5,
"6": 6,
"7": 7,
"8": 8,
"9": 9,
"10": 10,
"11": 11,
"12": 12,
"13": 13,
"14": 14,
"15": 15,
}
// UnmarshalProtoJSON unmarshals the DataRateIndex from JSON.
func (x *DataRateIndex) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(DataRateIndex_value, DataRateIndex_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read DataRateIndex enum: %v", err)
return
}
*x = DataRateIndex(v)
}
// UnmarshalText unmarshals the DataRateIndex from text.
func (x *DataRateIndex) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), DataRateIndex_value)
if err != nil {
return err
}
*x = DataRateIndex(i)
return nil
}
// UnmarshalJSON unmarshals the DataRateIndex from JSON.
func (x *DataRateIndex) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the DataRateOffset to JSON.
func (x DataRateOffset) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumNumber(int32(x))
}
// MarshalText marshals the DataRateOffset to text.
func (x DataRateOffset) MarshalText() ([]byte, error) {
return []byte(strconv.FormatInt(int64(x), 10)), nil
}
// MarshalJSON marshals the DataRateOffset to JSON.
func (x DataRateOffset) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// DataRateOffset_customvalue contains custom string values that extend DataRateOffset_value.
var DataRateOffset_customvalue = map[string]int32{
"0": 0,
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"5": 5,
"6": 6,
"7": 7,
}
// UnmarshalProtoJSON unmarshals the DataRateOffset from JSON.
func (x *DataRateOffset) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(DataRateOffset_value, DataRateOffset_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read DataRateOffset enum: %v", err)
return
}
*x = DataRateOffset(v)
}
// UnmarshalText unmarshals the DataRateOffset from text.
func (x *DataRateOffset) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), DataRateOffset_value)
if err != nil {
return err
}
*x = DataRateOffset(i)
return nil
}
// UnmarshalJSON unmarshals the DataRateOffset from JSON.
func (x *DataRateOffset) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the JoinRequestType to JSON.
func (x JoinRequestType) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), JoinRequestType_name)
}
// MarshalText marshals the JoinRequestType to text.
func (x JoinRequestType) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), JoinRequestType_name)), nil
}
// MarshalJSON marshals the JoinRequestType to JSON.
func (x JoinRequestType) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// UnmarshalProtoJSON unmarshals the JoinRequestType from JSON.
func (x *JoinRequestType) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(JoinRequestType_value)
if err := s.Err(); err != nil {
s.SetErrorf("could not read JoinRequestType enum: %v", err)
return
}
*x = JoinRequestType(v)
}
// UnmarshalText unmarshals the JoinRequestType from text.
func (x *JoinRequestType) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), JoinRequestType_value)
if err != nil {
return err
}
*x = JoinRequestType(i)
return nil
}
// UnmarshalJSON unmarshals the JoinRequestType from JSON.
func (x *JoinRequestType) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the RejoinRequestType to JSON.
func (x RejoinRequestType) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), RejoinRequestType_name)
}
// MarshalText marshals the RejoinRequestType to text.
func (x RejoinRequestType) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), RejoinRequestType_name)), nil
}
// MarshalJSON marshals the RejoinRequestType to JSON.
func (x RejoinRequestType) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// UnmarshalProtoJSON unmarshals the RejoinRequestType from JSON.
func (x *RejoinRequestType) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(RejoinRequestType_value)
if err := s.Err(); err != nil {
s.SetErrorf("could not read RejoinRequestType enum: %v", err)
return
}
*x = RejoinRequestType(v)
}
// UnmarshalText unmarshals the RejoinRequestType from text.
func (x *RejoinRequestType) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), RejoinRequestType_value)
if err != nil {
return err
}
*x = RejoinRequestType(i)
return nil
}
// UnmarshalJSON unmarshals the RejoinRequestType from JSON.
func (x *RejoinRequestType) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the CFListType to JSON.
func (x CFListType) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), CFListType_name)
}
// MarshalText marshals the CFListType to text.
func (x CFListType) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), CFListType_name)), nil
}
// MarshalJSON marshals the CFListType to JSON.
func (x CFListType) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// UnmarshalProtoJSON unmarshals the CFListType from JSON.
func (x *CFListType) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(CFListType_value)
if err := s.Err(); err != nil {
s.SetErrorf("could not read CFListType enum: %v", err)
return
}
*x = CFListType(v)
}
// UnmarshalText unmarshals the CFListType from text.
func (x *CFListType) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), CFListType_value)
if err != nil {
return err
}
*x = CFListType(i)
return nil
}
// UnmarshalJSON unmarshals the CFListType from JSON.
func (x *CFListType) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the Class to JSON.
func (x Class) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), Class_name)
}
// MarshalText marshals the Class to text.
func (x Class) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), Class_name)), nil
}
// MarshalJSON marshals the Class to JSON.
func (x Class) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// Class_customvalue contains custom string values that extend Class_value.
var Class_customvalue = map[string]int32{
"A": 0,
"B": 1,
"C": 2,
}
// UnmarshalProtoJSON unmarshals the Class from JSON.
func (x *Class) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(Class_value, Class_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read Class enum: %v", err)
return
}
*x = Class(v)
}
// UnmarshalText unmarshals the Class from text.
func (x *Class) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), Class_value)
if err != nil {
return err
}
*x = Class(i)
return nil
}
// UnmarshalJSON unmarshals the Class from JSON.
func (x *Class) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the TxSchedulePriority to JSON.
func (x TxSchedulePriority) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), TxSchedulePriority_name)
}
// MarshalText marshals the TxSchedulePriority to text.
func (x TxSchedulePriority) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), TxSchedulePriority_name)), nil
}
// MarshalJSON marshals the TxSchedulePriority to JSON.
func (x TxSchedulePriority) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// UnmarshalProtoJSON unmarshals the TxSchedulePriority from JSON.
func (x *TxSchedulePriority) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(TxSchedulePriority_value)
if err := s.Err(); err != nil {
s.SetErrorf("could not read TxSchedulePriority enum: %v", err)
return
}
*x = TxSchedulePriority(v)
}
// UnmarshalText unmarshals the TxSchedulePriority from text.
func (x *TxSchedulePriority) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), TxSchedulePriority_value)
if err != nil {
return err
}
*x = TxSchedulePriority(i)
return nil
}
// UnmarshalJSON unmarshals the TxSchedulePriority from JSON.
func (x *TxSchedulePriority) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommandIdentifier to JSON.
func (x MACCommandIdentifier) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), MACCommandIdentifier_name)
}
// MarshalText marshals the MACCommandIdentifier to text.
func (x MACCommandIdentifier) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), MACCommandIdentifier_name)), nil
}
// MarshalJSON marshals the MACCommandIdentifier to JSON.
func (x MACCommandIdentifier) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// MACCommandIdentifier_customvalue contains custom string values that extend MACCommandIdentifier_value.
var MACCommandIdentifier_customvalue = map[string]int32{
"RFU_0": 0,
"RESET": 1,
"LINK_CHECK": 2,
"LINK_ADR": 3,
"DUTY_CYCLE": 4,
"RX_PARAM_SETUP": 5,
"DEV_STATUS": 6,
"NEW_CHANNEL": 7,
"RX_TIMING_SETUP": 8,
"TX_PARAM_SETUP": 9,
"DL_CHANNEL": 10,
"REKEY": 11,
"ADR_PARAM_SETUP": 12,
"DEVICE_TIME": 13,
"FORCE_REJOIN": 14,
"REJOIN_PARAM_SETUP": 15,
"PING_SLOT_INFO": 16,
"PING_SLOT_CHANNEL": 17,
"BEACON_TIMING": 18,
"BEACON_FREQ": 19,
"DEVICE_MODE": 32,
}
// UnmarshalProtoJSON unmarshals the MACCommandIdentifier from JSON.
func (x *MACCommandIdentifier) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(MACCommandIdentifier_value, MACCommandIdentifier_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read MACCommandIdentifier enum: %v", err)
return
}
*x = MACCommandIdentifier(v)
}
// UnmarshalText unmarshals the MACCommandIdentifier from text.
func (x *MACCommandIdentifier) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), MACCommandIdentifier_value)
if err != nil {
return err
}
*x = MACCommandIdentifier(i)
return nil
}
// UnmarshalJSON unmarshals the MACCommandIdentifier from JSON.
func (x *MACCommandIdentifier) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the AggregatedDutyCycle to JSON.
func (x AggregatedDutyCycle) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), AggregatedDutyCycle_name)
}
// MarshalText marshals the AggregatedDutyCycle to text.
func (x AggregatedDutyCycle) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), AggregatedDutyCycle_name)), nil
}
// MarshalJSON marshals the AggregatedDutyCycle to JSON.
func (x AggregatedDutyCycle) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// AggregatedDutyCycle_customvalue contains custom string values that extend AggregatedDutyCycle_value.
var AggregatedDutyCycle_customvalue = map[string]int32{
"1": 0,
"2": 1,
"4": 2,
"8": 3,
"16": 4,
"32": 5,
"64": 6,
"128": 7,
"256": 8,
"512": 9,
"1024": 10,
"2048": 11,
"4096": 12,
"8192": 13,
"16384": 14,
"32768": 15,
}
// UnmarshalProtoJSON unmarshals the AggregatedDutyCycle from JSON.
func (x *AggregatedDutyCycle) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(AggregatedDutyCycle_value, AggregatedDutyCycle_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read AggregatedDutyCycle enum: %v", err)
return
}
*x = AggregatedDutyCycle(v)
}
// UnmarshalText unmarshals the AggregatedDutyCycle from text.
func (x *AggregatedDutyCycle) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), AggregatedDutyCycle_value)
if err != nil {
return err
}
*x = AggregatedDutyCycle(i)
return nil
}
// UnmarshalJSON unmarshals the AggregatedDutyCycle from JSON.
func (x *AggregatedDutyCycle) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the PingSlotPeriod to JSON.
func (x PingSlotPeriod) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), PingSlotPeriod_name)
}
// MarshalText marshals the PingSlotPeriod to text.
func (x PingSlotPeriod) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), PingSlotPeriod_name)), nil
}
// MarshalJSON marshals the PingSlotPeriod to JSON.
func (x PingSlotPeriod) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// PingSlotPeriod_customvalue contains custom string values that extend PingSlotPeriod_value.
var PingSlotPeriod_customvalue = map[string]int32{
"1S": 0,
"2S": 1,
"4S": 2,
"8S": 3,
"16S": 4,
"32S": 5,
"64S": 6,
"128S": 7,
}
// UnmarshalProtoJSON unmarshals the PingSlotPeriod from JSON.
func (x *PingSlotPeriod) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(PingSlotPeriod_value, PingSlotPeriod_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read PingSlotPeriod enum: %v", err)
return
}
*x = PingSlotPeriod(v)
}
// UnmarshalText unmarshals the PingSlotPeriod from text.
func (x *PingSlotPeriod) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), PingSlotPeriod_value)
if err != nil {
return err
}
*x = PingSlotPeriod(i)
return nil
}
// UnmarshalJSON unmarshals the PingSlotPeriod from JSON.
func (x *PingSlotPeriod) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the RejoinCountExponent to JSON.
func (x RejoinCountExponent) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), RejoinCountExponent_name)
}
// MarshalText marshals the RejoinCountExponent to text.
func (x RejoinCountExponent) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), RejoinCountExponent_name)), nil
}
// MarshalJSON marshals the RejoinCountExponent to JSON.
func (x RejoinCountExponent) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// RejoinCountExponent_customvalue contains custom string values that extend RejoinCountExponent_value.
var RejoinCountExponent_customvalue = map[string]int32{
"16": 0,
"32": 1,
"64": 2,
"128": 3,
"256": 4,
"512": 5,
"1024": 6,
"2048": 7,
"4096": 8,
"8192": 9,
"16384": 10,
"32768": 11,
"65536": 12,
"131072": 13,
"262144": 14,
"524288": 15,
}
// UnmarshalProtoJSON unmarshals the RejoinCountExponent from JSON.
func (x *RejoinCountExponent) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(RejoinCountExponent_value, RejoinCountExponent_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read RejoinCountExponent enum: %v", err)
return
}
*x = RejoinCountExponent(v)
}
// UnmarshalText unmarshals the RejoinCountExponent from text.
func (x *RejoinCountExponent) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), RejoinCountExponent_value)
if err != nil {
return err
}
*x = RejoinCountExponent(i)
return nil
}
// UnmarshalJSON unmarshals the RejoinCountExponent from JSON.
func (x *RejoinCountExponent) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the RejoinTimeExponent to JSON.
func (x RejoinTimeExponent) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), RejoinTimeExponent_name)
}
// MarshalText marshals the RejoinTimeExponent to text.
func (x RejoinTimeExponent) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), RejoinTimeExponent_name)), nil
}
// MarshalJSON marshals the RejoinTimeExponent to JSON.
func (x RejoinTimeExponent) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// RejoinTimeExponent_customvalue contains custom string values that extend RejoinTimeExponent_value.
var RejoinTimeExponent_customvalue = map[string]int32{
"0": 0,
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"5": 5,
"6": 6,
"7": 7,
"8": 8,
"9": 9,
"10": 10,
"11": 11,
"12": 12,
"13": 13,
"14": 14,
"15": 15,
}
// UnmarshalProtoJSON unmarshals the RejoinTimeExponent from JSON.
func (x *RejoinTimeExponent) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(RejoinTimeExponent_value, RejoinTimeExponent_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read RejoinTimeExponent enum: %v", err)
return
}
*x = RejoinTimeExponent(v)
}
// UnmarshalText unmarshals the RejoinTimeExponent from text.
func (x *RejoinTimeExponent) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), RejoinTimeExponent_value)
if err != nil {
return err
}
*x = RejoinTimeExponent(i)
return nil
}
// UnmarshalJSON unmarshals the RejoinTimeExponent from JSON.
func (x *RejoinTimeExponent) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the RejoinPeriodExponent to JSON.
func (x RejoinPeriodExponent) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), RejoinPeriodExponent_name)
}
// MarshalText marshals the RejoinPeriodExponent to text.
func (x RejoinPeriodExponent) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), RejoinPeriodExponent_name)), nil
}
// MarshalJSON marshals the RejoinPeriodExponent to JSON.
func (x RejoinPeriodExponent) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// RejoinPeriodExponent_customvalue contains custom string values that extend RejoinPeriodExponent_value.
var RejoinPeriodExponent_customvalue = map[string]int32{
"0": 0,
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"5": 5,
"6": 6,
"7": 7,
}
// UnmarshalProtoJSON unmarshals the RejoinPeriodExponent from JSON.
func (x *RejoinPeriodExponent) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(RejoinPeriodExponent_value, RejoinPeriodExponent_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read RejoinPeriodExponent enum: %v", err)
return
}
*x = RejoinPeriodExponent(v)
}
// UnmarshalText unmarshals the RejoinPeriodExponent from text.
func (x *RejoinPeriodExponent) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), RejoinPeriodExponent_value)
if err != nil {
return err
}
*x = RejoinPeriodExponent(i)
return nil
}
// UnmarshalJSON unmarshals the RejoinPeriodExponent from JSON.
func (x *RejoinPeriodExponent) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the DeviceEIRP to JSON.
func (x DeviceEIRP) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), DeviceEIRP_name)
}
// MarshalText marshals the DeviceEIRP to text.
func (x DeviceEIRP) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), DeviceEIRP_name)), nil
}
// MarshalJSON marshals the DeviceEIRP to JSON.
func (x DeviceEIRP) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// DeviceEIRP_customvalue contains custom string values that extend DeviceEIRP_value.
var DeviceEIRP_customvalue = map[string]int32{
"8": 0,
"10": 1,
"12": 2,
"13": 3,
"14": 4,
"16": 5,
"18": 6,
"20": 7,
"21": 8,
"24": 9,
"26": 10,
"27": 11,
"29": 12,
"30": 13,
"33": 14,
"36": 15,
}
// UnmarshalProtoJSON unmarshals the DeviceEIRP from JSON.
func (x *DeviceEIRP) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(DeviceEIRP_value, DeviceEIRP_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read DeviceEIRP enum: %v", err)
return
}
*x = DeviceEIRP(v)
}
// UnmarshalText unmarshals the DeviceEIRP from text.
func (x *DeviceEIRP) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), DeviceEIRP_value)
if err != nil {
return err
}
*x = DeviceEIRP(i)
return nil
}
// UnmarshalJSON unmarshals the DeviceEIRP from JSON.
func (x *DeviceEIRP) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the ADRAckLimitExponent to JSON.
func (x ADRAckLimitExponent) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), ADRAckLimitExponent_name)
}
// MarshalText marshals the ADRAckLimitExponent to text.
func (x ADRAckLimitExponent) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), ADRAckLimitExponent_name)), nil
}
// MarshalJSON marshals the ADRAckLimitExponent to JSON.
func (x ADRAckLimitExponent) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// ADRAckLimitExponent_customvalue contains custom string values that extend ADRAckLimitExponent_value.
var ADRAckLimitExponent_customvalue = map[string]int32{
"1": 0,
"2": 1,
"4": 2,
"8": 3,
"16": 4,
"32": 5,
"64": 6,
"128": 7,
"256": 8,
"512": 9,
"1024": 10,
"2048": 11,
"4096": 12,
"8192": 13,
"16384": 14,
"32768": 15,
}
// UnmarshalProtoJSON unmarshals the ADRAckLimitExponent from JSON.
func (x *ADRAckLimitExponent) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(ADRAckLimitExponent_value, ADRAckLimitExponent_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read ADRAckLimitExponent enum: %v", err)
return
}
*x = ADRAckLimitExponent(v)
}
// UnmarshalText unmarshals the ADRAckLimitExponent from text.
func (x *ADRAckLimitExponent) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), ADRAckLimitExponent_value)
if err != nil {
return err
}
*x = ADRAckLimitExponent(i)
return nil
}
// UnmarshalJSON unmarshals the ADRAckLimitExponent from JSON.
func (x *ADRAckLimitExponent) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the ADRAckDelayExponent to JSON.
func (x ADRAckDelayExponent) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), ADRAckDelayExponent_name)
}
// MarshalText marshals the ADRAckDelayExponent to text.
func (x ADRAckDelayExponent) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), ADRAckDelayExponent_name)), nil
}
// MarshalJSON marshals the ADRAckDelayExponent to JSON.
func (x ADRAckDelayExponent) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// ADRAckDelayExponent_customvalue contains custom string values that extend ADRAckDelayExponent_value.
var ADRAckDelayExponent_customvalue = map[string]int32{
"1": 0,
"2": 1,
"4": 2,
"8": 3,
"16": 4,
"32": 5,
"64": 6,
"128": 7,
"256": 8,
"512": 9,
"1024": 10,
"2048": 11,
"4096": 12,
"8192": 13,
"16384": 14,
"32768": 15,
}
// UnmarshalProtoJSON unmarshals the ADRAckDelayExponent from JSON.
func (x *ADRAckDelayExponent) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(ADRAckDelayExponent_value, ADRAckDelayExponent_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read ADRAckDelayExponent enum: %v", err)
return
}
*x = ADRAckDelayExponent(v)
}
// UnmarshalText unmarshals the ADRAckDelayExponent from text.
func (x *ADRAckDelayExponent) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), ADRAckDelayExponent_value)
if err != nil {
return err
}
*x = ADRAckDelayExponent(i)
return nil
}
// UnmarshalJSON unmarshals the ADRAckDelayExponent from JSON.
func (x *ADRAckDelayExponent) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the RxDelay to JSON.
func (x RxDelay) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumNumber(int32(x))
}
// MarshalText marshals the RxDelay to text.
func (x RxDelay) MarshalText() ([]byte, error) {
return []byte(strconv.FormatInt(int64(x), 10)), nil
}
// MarshalJSON marshals the RxDelay to JSON.
func (x RxDelay) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// RxDelay_customvalue contains custom string values that extend RxDelay_value.
var RxDelay_customvalue = map[string]int32{
"0": 0,
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"5": 5,
"6": 6,
"7": 7,
"8": 8,
"9": 9,
"10": 10,
"11": 11,
"12": 12,
"13": 13,
"14": 14,
"15": 15,
}
// UnmarshalProtoJSON unmarshals the RxDelay from JSON.
func (x *RxDelay) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(RxDelay_value, RxDelay_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read RxDelay enum: %v", err)
return
}
*x = RxDelay(v)
}
// UnmarshalText unmarshals the RxDelay from text.
func (x *RxDelay) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), RxDelay_value)
if err != nil {
return err
}
*x = RxDelay(i)
return nil
}
// UnmarshalJSON unmarshals the RxDelay from JSON.
func (x *RxDelay) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the Minor to JSON.
func (x Minor) MarshalProtoJSON(s *jsonplugin.MarshalState) {
s.WriteEnumString(int32(x), Minor_name)
}
// MarshalText marshals the Minor to text.
func (x Minor) MarshalText() ([]byte, error) {
return []byte(jsonplugin.GetEnumString(int32(x), Minor_name)), nil
}
// MarshalJSON marshals the Minor to JSON.
func (x Minor) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(x)
}
// Minor_customvalue contains custom string values that extend Minor_value.
var Minor_customvalue = map[string]int32{
"RFU_0": 0,
"1": 1,
"RFU_2": 2,
"RFU_3": 3,
"RFU_4": 4,
"RFU_5": 5,
"RFU_6": 6,
"RFU_7": 7,
"RFU_8": 8,
"RFU_9": 9,
"RFU_10": 10,
"RFU_11": 11,
"RFU_12": 12,
"RFU_13": 13,
"RFU_14": 14,
"RFU_15": 15,
}
// UnmarshalProtoJSON unmarshals the Minor from JSON.
func (x *Minor) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
v := s.ReadEnum(Minor_value, Minor_customvalue)
if err := s.Err(); err != nil {
s.SetErrorf("could not read Minor enum: %v", err)
return
}
*x = Minor(v)
}
// UnmarshalText unmarshals the Minor from text.
func (x *Minor) UnmarshalText(b []byte) error {
i, err := jsonplugin.ParseEnumString(string(b), Minor_value)
if err != nil {
return err
}
*x = Minor(i)
return nil
}
// UnmarshalJSON unmarshals the Minor from JSON.
func (x *Minor) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the Message message to JSON.
func (x *Message) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.MHdr != nil || s.HasField("m_hdr") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("m_hdr")
x.MHdr.MarshalProtoJSON(s.WithField("m_hdr"))
}
if len(x.Mic) > 0 || s.HasField("mic") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("mic")
s.WriteBytes(x.Mic)
}
if x.Payload != nil {
switch ov := x.Payload.(type) {
case *Message_MacPayload:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("mac_payload")
// NOTE: MACPayload does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.MacPayload)
case *Message_JoinRequestPayload:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("join_request_payload")
// NOTE: JoinRequestPayload does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.JoinRequestPayload)
case *Message_JoinAcceptPayload:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("join_accept_payload")
ov.JoinAcceptPayload.MarshalProtoJSON(s.WithField("join_accept_payload"))
case *Message_RejoinRequestPayload:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rejoin_request_payload")
ov.RejoinRequestPayload.MarshalProtoJSON(s.WithField("rejoin_request_payload"))
}
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the Message to JSON.
func (x Message) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the Message message from JSON.
func (x *Message) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "m_hdr", "mHdr":
if s.ReadNil() {
x.MHdr = nil
return
}
x.MHdr = &MHDR{}
x.MHdr.UnmarshalProtoJSON(s.WithField("m_hdr", true))
case "mic":
s.AddField("mic")
x.Mic = s.ReadBytes()
case "mac_payload", "macPayload":
s.AddField("mac_payload")
ov := &Message_MacPayload{}
x.Payload = ov
if s.ReadNil() {
ov.MacPayload = nil
return
}
// NOTE: MACPayload does not seem to implement UnmarshalProtoJSON.
var v MACPayload
gogo.UnmarshalMessage(s, &v)
ov.MacPayload = &v
case "join_request_payload", "joinRequestPayload":
s.AddField("join_request_payload")
ov := &Message_JoinRequestPayload{}
x.Payload = ov
if s.ReadNil() {
ov.JoinRequestPayload = nil
return
}
// NOTE: JoinRequestPayload does not seem to implement UnmarshalProtoJSON.
var v JoinRequestPayload
gogo.UnmarshalMessage(s, &v)
ov.JoinRequestPayload = &v
case "join_accept_payload", "joinAcceptPayload":
ov := &Message_JoinAcceptPayload{}
x.Payload = ov
if s.ReadNil() {
ov.JoinAcceptPayload = nil
return
}
ov.JoinAcceptPayload = &JoinAcceptPayload{}
ov.JoinAcceptPayload.UnmarshalProtoJSON(s.WithField("join_accept_payload", true))
case "rejoin_request_payload", "rejoinRequestPayload":
ov := &Message_RejoinRequestPayload{}
x.Payload = ov
if s.ReadNil() {
ov.RejoinRequestPayload = nil
return
}
ov.RejoinRequestPayload = &RejoinRequestPayload{}
ov.RejoinRequestPayload.UnmarshalProtoJSON(s.WithField("rejoin_request_payload", true))
}
})
}
// UnmarshalJSON unmarshals the Message from JSON.
func (x *Message) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MHDR message to JSON.
func (x *MHDR) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.MType != 0 || s.HasField("m_type") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("m_type")
x.MType.MarshalProtoJSON(s)
}
if x.Major != 0 || s.HasField("major") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("major")
x.Major.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MHDR to JSON.
func (x MHDR) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MHDR message from JSON.
func (x *MHDR) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "m_type", "mType":
s.AddField("m_type")
x.MType.UnmarshalProtoJSON(s)
case "major":
s.AddField("major")
x.Major.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MHDR from JSON.
func (x *MHDR) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the RejoinRequestPayload message to JSON.
func (x *RejoinRequestPayload) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.RejoinType != 0 || s.HasField("rejoin_type") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rejoin_type")
x.RejoinType.MarshalProtoJSON(s)
}
if len(x.NetId) > 0 || s.HasField("net_id") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("net_id")
x.NetId.MarshalProtoJSON(s.WithField("net_id"))
}
if len(x.JoinEui) > 0 || s.HasField("join_eui") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("join_eui")
x.JoinEui.MarshalProtoJSON(s.WithField("join_eui"))
}
if len(x.DevEui) > 0 || s.HasField("dev_eui") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("dev_eui")
x.DevEui.MarshalProtoJSON(s.WithField("dev_eui"))
}
if x.RejoinCnt != 0 || s.HasField("rejoin_cnt") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rejoin_cnt")
s.WriteUint32(x.RejoinCnt)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the RejoinRequestPayload to JSON.
func (x RejoinRequestPayload) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the RejoinRequestPayload message from JSON.
func (x *RejoinRequestPayload) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "rejoin_type", "rejoinType":
s.AddField("rejoin_type")
x.RejoinType.UnmarshalProtoJSON(s)
case "net_id", "netId":
s.AddField("net_id")
x.NetId.UnmarshalProtoJSON(s.WithField("net_id", false))
case "join_eui", "joinEui":
s.AddField("join_eui")
x.JoinEui.UnmarshalProtoJSON(s.WithField("join_eui", false))
case "dev_eui", "devEui":
s.AddField("dev_eui")
x.DevEui.UnmarshalProtoJSON(s.WithField("dev_eui", false))
case "rejoin_cnt", "rejoinCnt":
s.AddField("rejoin_cnt")
x.RejoinCnt = s.ReadUint32()
}
})
}
// UnmarshalJSON unmarshals the RejoinRequestPayload from JSON.
func (x *RejoinRequestPayload) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the JoinAcceptPayload message to JSON.
func (x *JoinAcceptPayload) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if len(x.Encrypted) > 0 || s.HasField("encrypted") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("encrypted")
s.WriteBytes(x.Encrypted)
}
if len(x.JoinNonce) > 0 || s.HasField("join_nonce") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("join_nonce")
x.JoinNonce.MarshalProtoJSON(s.WithField("join_nonce"))
}
if len(x.NetId) > 0 || s.HasField("net_id") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("net_id")
x.NetId.MarshalProtoJSON(s.WithField("net_id"))
}
if len(x.DevAddr) > 0 || s.HasField("dev_addr") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("dev_addr")
x.DevAddr.MarshalProtoJSON(s.WithField("dev_addr"))
}
if x.DlSettings != nil || s.HasField("dl_settings") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("dl_settings")
x.DlSettings.MarshalProtoJSON(s.WithField("dl_settings"))
}
if x.RxDelay != 0 || s.HasField("rx_delay") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx_delay")
x.RxDelay.MarshalProtoJSON(s)
}
if x.CfList != nil || s.HasField("cf_list") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("cf_list")
x.CfList.MarshalProtoJSON(s.WithField("cf_list"))
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the JoinAcceptPayload to JSON.
func (x JoinAcceptPayload) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the JoinAcceptPayload message from JSON.
func (x *JoinAcceptPayload) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "encrypted":
s.AddField("encrypted")
x.Encrypted = s.ReadBytes()
case "join_nonce", "joinNonce":
s.AddField("join_nonce")
x.JoinNonce.UnmarshalProtoJSON(s.WithField("join_nonce", false))
case "net_id", "netId":
s.AddField("net_id")
x.NetId.UnmarshalProtoJSON(s.WithField("net_id", false))
case "dev_addr", "devAddr":
s.AddField("dev_addr")
x.DevAddr.UnmarshalProtoJSON(s.WithField("dev_addr", false))
case "dl_settings", "dlSettings":
if s.ReadNil() {
x.DlSettings = nil
return
}
x.DlSettings = &DLSettings{}
x.DlSettings.UnmarshalProtoJSON(s.WithField("dl_settings", true))
case "rx_delay", "rxDelay":
s.AddField("rx_delay")
x.RxDelay.UnmarshalProtoJSON(s)
case "cf_list", "cfList":
if s.ReadNil() {
x.CfList = nil
return
}
x.CfList = &CFList{}
x.CfList.UnmarshalProtoJSON(s.WithField("cf_list", true))
}
})
}
// UnmarshalJSON unmarshals the JoinAcceptPayload from JSON.
func (x *JoinAcceptPayload) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the DLSettings message to JSON.
func (x *DLSettings) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.Rx1DrOffset != 0 || s.HasField("rx1_dr_offset") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx1_dr_offset")
x.Rx1DrOffset.MarshalProtoJSON(s)
}
if x.Rx2Dr != 0 || s.HasField("rx2_dr") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx2_dr")
x.Rx2Dr.MarshalProtoJSON(s)
}
if x.OptNeg || s.HasField("opt_neg") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("opt_neg")
s.WriteBool(x.OptNeg)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the DLSettings to JSON.
func (x DLSettings) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the DLSettings message from JSON.
func (x *DLSettings) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "rx1_dr_offset", "rx1DrOffset":
s.AddField("rx1_dr_offset")
x.Rx1DrOffset.UnmarshalProtoJSON(s)
case "rx2_dr", "rx2Dr":
s.AddField("rx2_dr")
x.Rx2Dr.UnmarshalProtoJSON(s)
case "opt_neg", "optNeg":
s.AddField("opt_neg")
x.OptNeg = s.ReadBool()
}
})
}
// UnmarshalJSON unmarshals the DLSettings from JSON.
func (x *DLSettings) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the CFList message to JSON.
func (x *CFList) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.Type != 0 || s.HasField("type") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("type")
x.Type.MarshalProtoJSON(s)
}
if len(x.Freq) > 0 || s.HasField("freq") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("freq")
s.WriteUint32Array(x.Freq)
}
if len(x.ChMasks) > 0 || s.HasField("ch_masks") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("ch_masks")
s.WriteBoolArray(x.ChMasks)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the CFList to JSON.
func (x CFList) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the CFList message from JSON.
func (x *CFList) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "type":
s.AddField("type")
x.Type.UnmarshalProtoJSON(s)
case "freq":
s.AddField("freq")
if s.ReadNil() {
x.Freq = nil
return
}
x.Freq = s.ReadUint32Array()
case "ch_masks", "chMasks":
s.AddField("ch_masks")
if s.ReadNil() {
x.ChMasks = nil
return
}
x.ChMasks = s.ReadBoolArray()
}
})
}
// UnmarshalJSON unmarshals the CFList from JSON.
func (x *CFList) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the TxRequest message to JSON.
func (x *TxRequest) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.Class != 0 || s.HasField("class") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("class")
x.Class.MarshalProtoJSON(s)
}
if len(x.DownlinkPaths) > 0 || s.HasField("downlink_paths") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("downlink_paths")
s.WriteArrayStart()
var wroteElement bool
for _, element := range x.DownlinkPaths {
s.WriteMoreIf(&wroteElement)
// NOTE: DownlinkPath does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, element)
}
s.WriteArrayEnd()
}
if x.Rx1Delay != 0 || s.HasField("rx1_delay") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx1_delay")
x.Rx1Delay.MarshalProtoJSON(s)
}
if x.Rx1DataRate != nil || s.HasField("rx1_data_rate") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx1_data_rate")
// NOTE: DataRate does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, x.Rx1DataRate)
}
if x.Rx1Frequency != 0 || s.HasField("rx1_frequency") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx1_frequency")
s.WriteUint64(x.Rx1Frequency)
}
if x.Rx2DataRate != nil || s.HasField("rx2_data_rate") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx2_data_rate")
// NOTE: DataRate does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, x.Rx2DataRate)
}
if x.Rx2Frequency != 0 || s.HasField("rx2_frequency") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx2_frequency")
s.WriteUint64(x.Rx2Frequency)
}
if x.Priority != 0 || s.HasField("priority") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("priority")
x.Priority.MarshalProtoJSON(s)
}
if x.AbsoluteTime != nil || s.HasField("absolute_time") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("absolute_time")
if x.AbsoluteTime == nil {
s.WriteNil()
} else {
gogo.MarshalTimestamp(s, x.AbsoluteTime)
}
}
if x.FrequencyPlanId != "" || s.HasField("frequency_plan_id") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("frequency_plan_id")
s.WriteString(x.FrequencyPlanId)
}
if x.Advanced != nil || s.HasField("advanced") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("advanced")
if x.Advanced == nil {
s.WriteNil()
} else {
gogo.MarshalStruct(s, x.Advanced)
}
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the TxRequest to JSON.
func (x TxRequest) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the TxRequest message from JSON.
func (x *TxRequest) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "class":
s.AddField("class")
x.Class.UnmarshalProtoJSON(s)
case "downlink_paths", "downlinkPaths":
s.AddField("downlink_paths")
if s.ReadNil() {
x.DownlinkPaths = nil
return
}
s.ReadArray(func() {
// NOTE: DownlinkPath does not seem to implement UnmarshalProtoJSON.
var v DownlinkPath
gogo.UnmarshalMessage(s, &v)
x.DownlinkPaths = append(x.DownlinkPaths, &v)
})
case "rx1_delay", "rx1Delay":
s.AddField("rx1_delay")
x.Rx1Delay.UnmarshalProtoJSON(s)
case "rx1_data_rate", "rx1DataRate":
s.AddField("rx1_data_rate")
if s.ReadNil() {
x.Rx1DataRate = nil
return
}
// NOTE: DataRate does not seem to implement UnmarshalProtoJSON.
var v DataRate
gogo.UnmarshalMessage(s, &v)
x.Rx1DataRate = &v
case "rx1_frequency", "rx1Frequency":
s.AddField("rx1_frequency")
x.Rx1Frequency = s.ReadUint64()
case "rx2_data_rate", "rx2DataRate":
s.AddField("rx2_data_rate")
if s.ReadNil() {
x.Rx2DataRate = nil
return
}
// NOTE: DataRate does not seem to implement UnmarshalProtoJSON.
var v DataRate
gogo.UnmarshalMessage(s, &v)
x.Rx2DataRate = &v
case "rx2_frequency", "rx2Frequency":
s.AddField("rx2_frequency")
x.Rx2Frequency = s.ReadUint64()
case "priority":
s.AddField("priority")
x.Priority.UnmarshalProtoJSON(s)
case "absolute_time", "absoluteTime":
s.AddField("absolute_time")
if s.ReadNil() {
x.AbsoluteTime = nil
return
}
v := gogo.UnmarshalTimestamp(s)
if s.Err() != nil {
return
}
x.AbsoluteTime = v
case "frequency_plan_id", "frequencyPlanId":
s.AddField("frequency_plan_id")
x.FrequencyPlanId = s.ReadString()
case "advanced":
s.AddField("advanced")
if s.ReadNil() {
x.Advanced = nil
return
}
v := gogo.UnmarshalStruct(s)
if s.Err() != nil {
return
}
x.Advanced = v
}
})
}
// UnmarshalJSON unmarshals the TxRequest from JSON.
func (x *TxRequest) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_ResetInd message to JSON.
func (x *MACCommand_ResetInd) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.MinorVersion != 0 || s.HasField("minor_version") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("minor_version")
x.MinorVersion.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_ResetInd to JSON.
func (x MACCommand_ResetInd) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_ResetInd message from JSON.
func (x *MACCommand_ResetInd) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "minor_version", "minorVersion":
s.AddField("minor_version")
x.MinorVersion.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_ResetInd from JSON.
func (x *MACCommand_ResetInd) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_ResetConf message to JSON.
func (x *MACCommand_ResetConf) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.MinorVersion != 0 || s.HasField("minor_version") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("minor_version")
x.MinorVersion.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_ResetConf to JSON.
func (x MACCommand_ResetConf) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_ResetConf message from JSON.
func (x *MACCommand_ResetConf) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "minor_version", "minorVersion":
s.AddField("minor_version")
x.MinorVersion.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_ResetConf from JSON.
func (x *MACCommand_ResetConf) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_LinkADRReq message to JSON.
func (x *MACCommand_LinkADRReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.DataRateIndex != 0 || s.HasField("data_rate_index") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("data_rate_index")
x.DataRateIndex.MarshalProtoJSON(s)
}
if x.TxPowerIndex != 0 || s.HasField("tx_power_index") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("tx_power_index")
s.WriteUint32(x.TxPowerIndex)
}
if len(x.ChannelMask) > 0 || s.HasField("channel_mask") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("channel_mask")
s.WriteBoolArray(x.ChannelMask)
}
if x.ChannelMaskControl != 0 || s.HasField("channel_mask_control") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("channel_mask_control")
s.WriteUint32(x.ChannelMaskControl)
}
if x.NbTrans != 0 || s.HasField("nb_trans") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("nb_trans")
s.WriteUint32(x.NbTrans)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_LinkADRReq to JSON.
func (x MACCommand_LinkADRReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_LinkADRReq message from JSON.
func (x *MACCommand_LinkADRReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "data_rate_index", "dataRateIndex":
s.AddField("data_rate_index")
x.DataRateIndex.UnmarshalProtoJSON(s)
case "tx_power_index", "txPowerIndex":
s.AddField("tx_power_index")
x.TxPowerIndex = s.ReadUint32()
case "channel_mask", "channelMask":
s.AddField("channel_mask")
if s.ReadNil() {
x.ChannelMask = nil
return
}
x.ChannelMask = s.ReadBoolArray()
case "channel_mask_control", "channelMaskControl":
s.AddField("channel_mask_control")
x.ChannelMaskControl = s.ReadUint32()
case "nb_trans", "nbTrans":
s.AddField("nb_trans")
x.NbTrans = s.ReadUint32()
}
})
}
// UnmarshalJSON unmarshals the MACCommand_LinkADRReq from JSON.
func (x *MACCommand_LinkADRReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_DutyCycleReq message to JSON.
func (x *MACCommand_DutyCycleReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.MaxDutyCycle != 0 || s.HasField("max_duty_cycle") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("max_duty_cycle")
x.MaxDutyCycle.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_DutyCycleReq to JSON.
func (x MACCommand_DutyCycleReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_DutyCycleReq message from JSON.
func (x *MACCommand_DutyCycleReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "max_duty_cycle", "maxDutyCycle":
s.AddField("max_duty_cycle")
x.MaxDutyCycle.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_DutyCycleReq from JSON.
func (x *MACCommand_DutyCycleReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_RxParamSetupReq message to JSON.
func (x *MACCommand_RxParamSetupReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.Rx2DataRateIndex != 0 || s.HasField("rx2_data_rate_index") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx2_data_rate_index")
x.Rx2DataRateIndex.MarshalProtoJSON(s)
}
if x.Rx1DataRateOffset != 0 || s.HasField("rx1_data_rate_offset") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx1_data_rate_offset")
x.Rx1DataRateOffset.MarshalProtoJSON(s)
}
if x.Rx2Frequency != 0 || s.HasField("rx2_frequency") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx2_frequency")
s.WriteUint64(x.Rx2Frequency)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_RxParamSetupReq to JSON.
func (x MACCommand_RxParamSetupReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_RxParamSetupReq message from JSON.
func (x *MACCommand_RxParamSetupReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "rx2_data_rate_index", "rx2DataRateIndex":
s.AddField("rx2_data_rate_index")
x.Rx2DataRateIndex.UnmarshalProtoJSON(s)
case "rx1_data_rate_offset", "rx1DataRateOffset":
s.AddField("rx1_data_rate_offset")
x.Rx1DataRateOffset.UnmarshalProtoJSON(s)
case "rx2_frequency", "rx2Frequency":
s.AddField("rx2_frequency")
x.Rx2Frequency = s.ReadUint64()
}
})
}
// UnmarshalJSON unmarshals the MACCommand_RxParamSetupReq from JSON.
func (x *MACCommand_RxParamSetupReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_NewChannelReq message to JSON.
func (x *MACCommand_NewChannelReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.ChannelIndex != 0 || s.HasField("channel_index") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("channel_index")
s.WriteUint32(x.ChannelIndex)
}
if x.Frequency != 0 || s.HasField("frequency") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("frequency")
s.WriteUint64(x.Frequency)
}
if x.MinDataRateIndex != 0 || s.HasField("min_data_rate_index") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("min_data_rate_index")
x.MinDataRateIndex.MarshalProtoJSON(s)
}
if x.MaxDataRateIndex != 0 || s.HasField("max_data_rate_index") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("max_data_rate_index")
x.MaxDataRateIndex.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_NewChannelReq to JSON.
func (x MACCommand_NewChannelReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_NewChannelReq message from JSON.
func (x *MACCommand_NewChannelReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "channel_index", "channelIndex":
s.AddField("channel_index")
x.ChannelIndex = s.ReadUint32()
case "frequency":
s.AddField("frequency")
x.Frequency = s.ReadUint64()
case "min_data_rate_index", "minDataRateIndex":
s.AddField("min_data_rate_index")
x.MinDataRateIndex.UnmarshalProtoJSON(s)
case "max_data_rate_index", "maxDataRateIndex":
s.AddField("max_data_rate_index")
x.MaxDataRateIndex.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_NewChannelReq from JSON.
func (x *MACCommand_NewChannelReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_RxTimingSetupReq message to JSON.
func (x *MACCommand_RxTimingSetupReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.Delay != 0 || s.HasField("delay") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("delay")
x.Delay.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_RxTimingSetupReq to JSON.
func (x MACCommand_RxTimingSetupReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_RxTimingSetupReq message from JSON.
func (x *MACCommand_RxTimingSetupReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "delay":
s.AddField("delay")
x.Delay.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_RxTimingSetupReq from JSON.
func (x *MACCommand_RxTimingSetupReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_TxParamSetupReq message to JSON.
func (x *MACCommand_TxParamSetupReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.MaxEirpIndex != 0 || s.HasField("max_eirp_index") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("max_eirp_index")
x.MaxEirpIndex.MarshalProtoJSON(s)
}
if x.UplinkDwellTime || s.HasField("uplink_dwell_time") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("uplink_dwell_time")
s.WriteBool(x.UplinkDwellTime)
}
if x.DownlinkDwellTime || s.HasField("downlink_dwell_time") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("downlink_dwell_time")
s.WriteBool(x.DownlinkDwellTime)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_TxParamSetupReq to JSON.
func (x MACCommand_TxParamSetupReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_TxParamSetupReq message from JSON.
func (x *MACCommand_TxParamSetupReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "max_eirp_index", "maxEirpIndex":
s.AddField("max_eirp_index")
x.MaxEirpIndex.UnmarshalProtoJSON(s)
case "uplink_dwell_time", "uplinkDwellTime":
s.AddField("uplink_dwell_time")
x.UplinkDwellTime = s.ReadBool()
case "downlink_dwell_time", "downlinkDwellTime":
s.AddField("downlink_dwell_time")
x.DownlinkDwellTime = s.ReadBool()
}
})
}
// UnmarshalJSON unmarshals the MACCommand_TxParamSetupReq from JSON.
func (x *MACCommand_TxParamSetupReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_RekeyInd message to JSON.
func (x *MACCommand_RekeyInd) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.MinorVersion != 0 || s.HasField("minor_version") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("minor_version")
x.MinorVersion.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_RekeyInd to JSON.
func (x MACCommand_RekeyInd) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_RekeyInd message from JSON.
func (x *MACCommand_RekeyInd) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "minor_version", "minorVersion":
s.AddField("minor_version")
x.MinorVersion.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_RekeyInd from JSON.
func (x *MACCommand_RekeyInd) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_RekeyConf message to JSON.
func (x *MACCommand_RekeyConf) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.MinorVersion != 0 || s.HasField("minor_version") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("minor_version")
x.MinorVersion.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_RekeyConf to JSON.
func (x MACCommand_RekeyConf) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_RekeyConf message from JSON.
func (x *MACCommand_RekeyConf) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "minor_version", "minorVersion":
s.AddField("minor_version")
x.MinorVersion.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_RekeyConf from JSON.
func (x *MACCommand_RekeyConf) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_ADRParamSetupReq message to JSON.
func (x *MACCommand_ADRParamSetupReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.AdrAckLimitExponent != 0 || s.HasField("adr_ack_limit_exponent") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("adr_ack_limit_exponent")
x.AdrAckLimitExponent.MarshalProtoJSON(s)
}
if x.AdrAckDelayExponent != 0 || s.HasField("adr_ack_delay_exponent") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("adr_ack_delay_exponent")
x.AdrAckDelayExponent.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_ADRParamSetupReq to JSON.
func (x MACCommand_ADRParamSetupReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_ADRParamSetupReq message from JSON.
func (x *MACCommand_ADRParamSetupReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "adr_ack_limit_exponent", "adrAckLimitExponent":
s.AddField("adr_ack_limit_exponent")
x.AdrAckLimitExponent.UnmarshalProtoJSON(s)
case "adr_ack_delay_exponent", "adrAckDelayExponent":
s.AddField("adr_ack_delay_exponent")
x.AdrAckDelayExponent.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_ADRParamSetupReq from JSON.
func (x *MACCommand_ADRParamSetupReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_ForceRejoinReq message to JSON.
func (x *MACCommand_ForceRejoinReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.RejoinType != 0 || s.HasField("rejoin_type") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rejoin_type")
x.RejoinType.MarshalProtoJSON(s)
}
if x.DataRateIndex != 0 || s.HasField("data_rate_index") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("data_rate_index")
x.DataRateIndex.MarshalProtoJSON(s)
}
if x.MaxRetries != 0 || s.HasField("max_retries") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("max_retries")
s.WriteUint32(x.MaxRetries)
}
if x.PeriodExponent != 0 || s.HasField("period_exponent") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("period_exponent")
x.PeriodExponent.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_ForceRejoinReq to JSON.
func (x MACCommand_ForceRejoinReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_ForceRejoinReq message from JSON.
func (x *MACCommand_ForceRejoinReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "rejoin_type", "rejoinType":
s.AddField("rejoin_type")
x.RejoinType.UnmarshalProtoJSON(s)
case "data_rate_index", "dataRateIndex":
s.AddField("data_rate_index")
x.DataRateIndex.UnmarshalProtoJSON(s)
case "max_retries", "maxRetries":
s.AddField("max_retries")
x.MaxRetries = s.ReadUint32()
case "period_exponent", "periodExponent":
s.AddField("period_exponent")
x.PeriodExponent.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_ForceRejoinReq from JSON.
func (x *MACCommand_ForceRejoinReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_RejoinParamSetupReq message to JSON.
func (x *MACCommand_RejoinParamSetupReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.MaxCountExponent != 0 || s.HasField("max_count_exponent") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("max_count_exponent")
x.MaxCountExponent.MarshalProtoJSON(s)
}
if x.MaxTimeExponent != 0 || s.HasField("max_time_exponent") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("max_time_exponent")
x.MaxTimeExponent.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_RejoinParamSetupReq to JSON.
func (x MACCommand_RejoinParamSetupReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_RejoinParamSetupReq message from JSON.
func (x *MACCommand_RejoinParamSetupReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "max_count_exponent", "maxCountExponent":
s.AddField("max_count_exponent")
x.MaxCountExponent.UnmarshalProtoJSON(s)
case "max_time_exponent", "maxTimeExponent":
s.AddField("max_time_exponent")
x.MaxTimeExponent.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_RejoinParamSetupReq from JSON.
func (x *MACCommand_RejoinParamSetupReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_PingSlotInfoReq message to JSON.
func (x *MACCommand_PingSlotInfoReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.Period != 0 || s.HasField("period") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("period")
x.Period.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_PingSlotInfoReq to JSON.
func (x MACCommand_PingSlotInfoReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_PingSlotInfoReq message from JSON.
func (x *MACCommand_PingSlotInfoReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "period":
s.AddField("period")
x.Period.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_PingSlotInfoReq from JSON.
func (x *MACCommand_PingSlotInfoReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_PingSlotChannelReq message to JSON.
func (x *MACCommand_PingSlotChannelReq) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.Frequency != 0 || s.HasField("frequency") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("frequency")
s.WriteUint64(x.Frequency)
}
if x.DataRateIndex != 0 || s.HasField("data_rate_index") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("data_rate_index")
x.DataRateIndex.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_PingSlotChannelReq to JSON.
func (x MACCommand_PingSlotChannelReq) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_PingSlotChannelReq message from JSON.
func (x *MACCommand_PingSlotChannelReq) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "frequency":
s.AddField("frequency")
x.Frequency = s.ReadUint64()
case "data_rate_index", "dataRateIndex":
s.AddField("data_rate_index")
x.DataRateIndex.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_PingSlotChannelReq from JSON.
func (x *MACCommand_PingSlotChannelReq) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_DeviceModeInd message to JSON.
func (x *MACCommand_DeviceModeInd) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.Class != 0 || s.HasField("class") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("class")
x.Class.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_DeviceModeInd to JSON.
func (x MACCommand_DeviceModeInd) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_DeviceModeInd message from JSON.
func (x *MACCommand_DeviceModeInd) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "class":
s.AddField("class")
x.Class.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_DeviceModeInd from JSON.
func (x *MACCommand_DeviceModeInd) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand_DeviceModeConf message to JSON.
func (x *MACCommand_DeviceModeConf) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.Class != 0 || s.HasField("class") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("class")
x.Class.MarshalProtoJSON(s)
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand_DeviceModeConf to JSON.
func (x MACCommand_DeviceModeConf) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand_DeviceModeConf message from JSON.
func (x *MACCommand_DeviceModeConf) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "class":
s.AddField("class")
x.Class.UnmarshalProtoJSON(s)
}
})
}
// UnmarshalJSON unmarshals the MACCommand_DeviceModeConf from JSON.
func (x *MACCommand_DeviceModeConf) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the MACCommand message to JSON.
func (x *MACCommand) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteObjectStart()
var wroteField bool
if x.Cid != 0 || s.HasField("cid") {
s.WriteMoreIf(&wroteField)
s.WriteObjectField("cid")
x.Cid.MarshalProtoJSON(s)
}
if x.Payload != nil {
switch ov := x.Payload.(type) {
case *MACCommand_RawPayload:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("raw_payload")
s.WriteBytes(ov.RawPayload)
case *MACCommand_ResetInd_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("reset_ind")
ov.ResetInd.MarshalProtoJSON(s.WithField("reset_ind"))
case *MACCommand_ResetConf_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("reset_conf")
ov.ResetConf.MarshalProtoJSON(s.WithField("reset_conf"))
case *MACCommand_LinkCheckAns_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("link_check_ans")
// NOTE: MACCommand_LinkCheckAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.LinkCheckAns)
case *MACCommand_LinkAdrReq:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("link_adr_req")
ov.LinkAdrReq.MarshalProtoJSON(s.WithField("link_adr_req"))
case *MACCommand_LinkAdrAns:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("link_adr_ans")
// NOTE: MACCommand_LinkADRAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.LinkAdrAns)
case *MACCommand_DutyCycleReq_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("duty_cycle_req")
ov.DutyCycleReq.MarshalProtoJSON(s.WithField("duty_cycle_req"))
case *MACCommand_RxParamSetupReq_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx_param_setup_req")
ov.RxParamSetupReq.MarshalProtoJSON(s.WithField("rx_param_setup_req"))
case *MACCommand_RxParamSetupAns_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx_param_setup_ans")
// NOTE: MACCommand_RxParamSetupAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.RxParamSetupAns)
case *MACCommand_DevStatusAns_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("dev_status_ans")
// NOTE: MACCommand_DevStatusAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.DevStatusAns)
case *MACCommand_NewChannelReq_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("new_channel_req")
ov.NewChannelReq.MarshalProtoJSON(s.WithField("new_channel_req"))
case *MACCommand_NewChannelAns_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("new_channel_ans")
// NOTE: MACCommand_NewChannelAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.NewChannelAns)
case *MACCommand_DlChannelReq:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("dl_channel_req")
// NOTE: MACCommand_DLChannelReq does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.DlChannelReq)
case *MACCommand_DlChannelAns:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("dl_channel_ans")
// NOTE: MACCommand_DLChannelAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.DlChannelAns)
case *MACCommand_RxTimingSetupReq_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rx_timing_setup_req")
ov.RxTimingSetupReq.MarshalProtoJSON(s.WithField("rx_timing_setup_req"))
case *MACCommand_TxParamSetupReq_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("tx_param_setup_req")
ov.TxParamSetupReq.MarshalProtoJSON(s.WithField("tx_param_setup_req"))
case *MACCommand_RekeyInd_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rekey_ind")
ov.RekeyInd.MarshalProtoJSON(s.WithField("rekey_ind"))
case *MACCommand_RekeyConf_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rekey_conf")
ov.RekeyConf.MarshalProtoJSON(s.WithField("rekey_conf"))
case *MACCommand_AdrParamSetupReq:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("adr_param_setup_req")
ov.AdrParamSetupReq.MarshalProtoJSON(s.WithField("adr_param_setup_req"))
case *MACCommand_DeviceTimeAns_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("device_time_ans")
// NOTE: MACCommand_DeviceTimeAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.DeviceTimeAns)
case *MACCommand_ForceRejoinReq_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("force_rejoin_req")
ov.ForceRejoinReq.MarshalProtoJSON(s.WithField("force_rejoin_req"))
case *MACCommand_RejoinParamSetupReq_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rejoin_param_setup_req")
ov.RejoinParamSetupReq.MarshalProtoJSON(s.WithField("rejoin_param_setup_req"))
case *MACCommand_RejoinParamSetupAns_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("rejoin_param_setup_ans")
// NOTE: MACCommand_RejoinParamSetupAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.RejoinParamSetupAns)
case *MACCommand_PingSlotInfoReq_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("ping_slot_info_req")
ov.PingSlotInfoReq.MarshalProtoJSON(s.WithField("ping_slot_info_req"))
case *MACCommand_PingSlotChannelReq_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("ping_slot_channel_req")
ov.PingSlotChannelReq.MarshalProtoJSON(s.WithField("ping_slot_channel_req"))
case *MACCommand_PingSlotChannelAns_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("ping_slot_channel_ans")
// NOTE: MACCommand_PingSlotChannelAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.PingSlotChannelAns)
case *MACCommand_BeaconTimingAns_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("beacon_timing_ans")
// NOTE: MACCommand_BeaconTimingAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.BeaconTimingAns)
case *MACCommand_BeaconFreqReq_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("beacon_freq_req")
// NOTE: MACCommand_BeaconFreqReq does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.BeaconFreqReq)
case *MACCommand_BeaconFreqAns_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("beacon_freq_ans")
// NOTE: MACCommand_BeaconFreqAns does not seem to implement MarshalProtoJSON.
gogo.MarshalMessage(s, ov.BeaconFreqAns)
case *MACCommand_DeviceModeInd_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("device_mode_ind")
ov.DeviceModeInd.MarshalProtoJSON(s.WithField("device_mode_ind"))
case *MACCommand_DeviceModeConf_:
s.WriteMoreIf(&wroteField)
s.WriteObjectField("device_mode_conf")
ov.DeviceModeConf.MarshalProtoJSON(s.WithField("device_mode_conf"))
}
}
s.WriteObjectEnd()
}
// MarshalJSON marshals the MACCommand to JSON.
func (x MACCommand) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the MACCommand message from JSON.
func (x *MACCommand) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
s.ReadObject(func(key string) {
switch key {
default:
s.ReadAny() // ignore unknown field
case "cid":
s.AddField("cid")
x.Cid.UnmarshalProtoJSON(s)
case "raw_payload", "rawPayload":
s.AddField("raw_payload")
ov := &MACCommand_RawPayload{}
x.Payload = ov
ov.RawPayload = s.ReadBytes()
case "reset_ind", "resetInd":
ov := &MACCommand_ResetInd_{}
x.Payload = ov
if s.ReadNil() {
ov.ResetInd = nil
return
}
ov.ResetInd = &MACCommand_ResetInd{}
ov.ResetInd.UnmarshalProtoJSON(s.WithField("reset_ind", true))
case "reset_conf", "resetConf":
ov := &MACCommand_ResetConf_{}
x.Payload = ov
if s.ReadNil() {
ov.ResetConf = nil
return
}
ov.ResetConf = &MACCommand_ResetConf{}
ov.ResetConf.UnmarshalProtoJSON(s.WithField("reset_conf", true))
case "link_check_ans", "linkCheckAns":
s.AddField("link_check_ans")
ov := &MACCommand_LinkCheckAns_{}
x.Payload = ov
if s.ReadNil() {
ov.LinkCheckAns = nil
return
}
// NOTE: MACCommand_LinkCheckAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_LinkCheckAns
gogo.UnmarshalMessage(s, &v)
ov.LinkCheckAns = &v
case "link_adr_req", "linkAdrReq":
ov := &MACCommand_LinkAdrReq{}
x.Payload = ov
if s.ReadNil() {
ov.LinkAdrReq = nil
return
}
ov.LinkAdrReq = &MACCommand_LinkADRReq{}
ov.LinkAdrReq.UnmarshalProtoJSON(s.WithField("link_adr_req", true))
case "link_adr_ans", "linkAdrAns":
s.AddField("link_adr_ans")
ov := &MACCommand_LinkAdrAns{}
x.Payload = ov
if s.ReadNil() {
ov.LinkAdrAns = nil
return
}
// NOTE: MACCommand_LinkADRAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_LinkADRAns
gogo.UnmarshalMessage(s, &v)
ov.LinkAdrAns = &v
case "duty_cycle_req", "dutyCycleReq":
ov := &MACCommand_DutyCycleReq_{}
x.Payload = ov
if s.ReadNil() {
ov.DutyCycleReq = nil
return
}
ov.DutyCycleReq = &MACCommand_DutyCycleReq{}
ov.DutyCycleReq.UnmarshalProtoJSON(s.WithField("duty_cycle_req", true))
case "rx_param_setup_req", "rxParamSetupReq":
ov := &MACCommand_RxParamSetupReq_{}
x.Payload = ov
if s.ReadNil() {
ov.RxParamSetupReq = nil
return
}
ov.RxParamSetupReq = &MACCommand_RxParamSetupReq{}
ov.RxParamSetupReq.UnmarshalProtoJSON(s.WithField("rx_param_setup_req", true))
case "rx_param_setup_ans", "rxParamSetupAns":
s.AddField("rx_param_setup_ans")
ov := &MACCommand_RxParamSetupAns_{}
x.Payload = ov
if s.ReadNil() {
ov.RxParamSetupAns = nil
return
}
// NOTE: MACCommand_RxParamSetupAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_RxParamSetupAns
gogo.UnmarshalMessage(s, &v)
ov.RxParamSetupAns = &v
case "dev_status_ans", "devStatusAns":
s.AddField("dev_status_ans")
ov := &MACCommand_DevStatusAns_{}
x.Payload = ov
if s.ReadNil() {
ov.DevStatusAns = nil
return
}
// NOTE: MACCommand_DevStatusAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_DevStatusAns
gogo.UnmarshalMessage(s, &v)
ov.DevStatusAns = &v
case "new_channel_req", "newChannelReq":
ov := &MACCommand_NewChannelReq_{}
x.Payload = ov
if s.ReadNil() {
ov.NewChannelReq = nil
return
}
ov.NewChannelReq = &MACCommand_NewChannelReq{}
ov.NewChannelReq.UnmarshalProtoJSON(s.WithField("new_channel_req", true))
case "new_channel_ans", "newChannelAns":
s.AddField("new_channel_ans")
ov := &MACCommand_NewChannelAns_{}
x.Payload = ov
if s.ReadNil() {
ov.NewChannelAns = nil
return
}
// NOTE: MACCommand_NewChannelAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_NewChannelAns
gogo.UnmarshalMessage(s, &v)
ov.NewChannelAns = &v
case "dl_channel_req", "dlChannelReq":
s.AddField("dl_channel_req")
ov := &MACCommand_DlChannelReq{}
x.Payload = ov
if s.ReadNil() {
ov.DlChannelReq = nil
return
}
// NOTE: MACCommand_DLChannelReq does not seem to implement UnmarshalProtoJSON.
var v MACCommand_DLChannelReq
gogo.UnmarshalMessage(s, &v)
ov.DlChannelReq = &v
case "dl_channel_ans", "dlChannelAns":
s.AddField("dl_channel_ans")
ov := &MACCommand_DlChannelAns{}
x.Payload = ov
if s.ReadNil() {
ov.DlChannelAns = nil
return
}
// NOTE: MACCommand_DLChannelAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_DLChannelAns
gogo.UnmarshalMessage(s, &v)
ov.DlChannelAns = &v
case "rx_timing_setup_req", "rxTimingSetupReq":
ov := &MACCommand_RxTimingSetupReq_{}
x.Payload = ov
if s.ReadNil() {
ov.RxTimingSetupReq = nil
return
}
ov.RxTimingSetupReq = &MACCommand_RxTimingSetupReq{}
ov.RxTimingSetupReq.UnmarshalProtoJSON(s.WithField("rx_timing_setup_req", true))
case "tx_param_setup_req", "txParamSetupReq":
ov := &MACCommand_TxParamSetupReq_{}
x.Payload = ov
if s.ReadNil() {
ov.TxParamSetupReq = nil
return
}
ov.TxParamSetupReq = &MACCommand_TxParamSetupReq{}
ov.TxParamSetupReq.UnmarshalProtoJSON(s.WithField("tx_param_setup_req", true))
case "rekey_ind", "rekeyInd":
ov := &MACCommand_RekeyInd_{}
x.Payload = ov
if s.ReadNil() {
ov.RekeyInd = nil
return
}
ov.RekeyInd = &MACCommand_RekeyInd{}
ov.RekeyInd.UnmarshalProtoJSON(s.WithField("rekey_ind", true))
case "rekey_conf", "rekeyConf":
ov := &MACCommand_RekeyConf_{}
x.Payload = ov
if s.ReadNil() {
ov.RekeyConf = nil
return
}
ov.RekeyConf = &MACCommand_RekeyConf{}
ov.RekeyConf.UnmarshalProtoJSON(s.WithField("rekey_conf", true))
case "adr_param_setup_req", "adrParamSetupReq":
ov := &MACCommand_AdrParamSetupReq{}
x.Payload = ov
if s.ReadNil() {
ov.AdrParamSetupReq = nil
return
}
ov.AdrParamSetupReq = &MACCommand_ADRParamSetupReq{}
ov.AdrParamSetupReq.UnmarshalProtoJSON(s.WithField("adr_param_setup_req", true))
case "device_time_ans", "deviceTimeAns":
s.AddField("device_time_ans")
ov := &MACCommand_DeviceTimeAns_{}
x.Payload = ov
if s.ReadNil() {
ov.DeviceTimeAns = nil
return
}
// NOTE: MACCommand_DeviceTimeAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_DeviceTimeAns
gogo.UnmarshalMessage(s, &v)
ov.DeviceTimeAns = &v
case "force_rejoin_req", "forceRejoinReq":
ov := &MACCommand_ForceRejoinReq_{}
x.Payload = ov
if s.ReadNil() {
ov.ForceRejoinReq = nil
return
}
ov.ForceRejoinReq = &MACCommand_ForceRejoinReq{}
ov.ForceRejoinReq.UnmarshalProtoJSON(s.WithField("force_rejoin_req", true))
case "rejoin_param_setup_req", "rejoinParamSetupReq":
ov := &MACCommand_RejoinParamSetupReq_{}
x.Payload = ov
if s.ReadNil() {
ov.RejoinParamSetupReq = nil
return
}
ov.RejoinParamSetupReq = &MACCommand_RejoinParamSetupReq{}
ov.RejoinParamSetupReq.UnmarshalProtoJSON(s.WithField("rejoin_param_setup_req", true))
case "rejoin_param_setup_ans", "rejoinParamSetupAns":
s.AddField("rejoin_param_setup_ans")
ov := &MACCommand_RejoinParamSetupAns_{}
x.Payload = ov
if s.ReadNil() {
ov.RejoinParamSetupAns = nil
return
}
// NOTE: MACCommand_RejoinParamSetupAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_RejoinParamSetupAns
gogo.UnmarshalMessage(s, &v)
ov.RejoinParamSetupAns = &v
case "ping_slot_info_req", "pingSlotInfoReq":
ov := &MACCommand_PingSlotInfoReq_{}
x.Payload = ov
if s.ReadNil() {
ov.PingSlotInfoReq = nil
return
}
ov.PingSlotInfoReq = &MACCommand_PingSlotInfoReq{}
ov.PingSlotInfoReq.UnmarshalProtoJSON(s.WithField("ping_slot_info_req", true))
case "ping_slot_channel_req", "pingSlotChannelReq":
ov := &MACCommand_PingSlotChannelReq_{}
x.Payload = ov
if s.ReadNil() {
ov.PingSlotChannelReq = nil
return
}
ov.PingSlotChannelReq = &MACCommand_PingSlotChannelReq{}
ov.PingSlotChannelReq.UnmarshalProtoJSON(s.WithField("ping_slot_channel_req", true))
case "ping_slot_channel_ans", "pingSlotChannelAns":
s.AddField("ping_slot_channel_ans")
ov := &MACCommand_PingSlotChannelAns_{}
x.Payload = ov
if s.ReadNil() {
ov.PingSlotChannelAns = nil
return
}
// NOTE: MACCommand_PingSlotChannelAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_PingSlotChannelAns
gogo.UnmarshalMessage(s, &v)
ov.PingSlotChannelAns = &v
case "beacon_timing_ans", "beaconTimingAns":
s.AddField("beacon_timing_ans")
ov := &MACCommand_BeaconTimingAns_{}
x.Payload = ov
if s.ReadNil() {
ov.BeaconTimingAns = nil
return
}
// NOTE: MACCommand_BeaconTimingAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_BeaconTimingAns
gogo.UnmarshalMessage(s, &v)
ov.BeaconTimingAns = &v
case "beacon_freq_req", "beaconFreqReq":
s.AddField("beacon_freq_req")
ov := &MACCommand_BeaconFreqReq_{}
x.Payload = ov
if s.ReadNil() {
ov.BeaconFreqReq = nil
return
}
// NOTE: MACCommand_BeaconFreqReq does not seem to implement UnmarshalProtoJSON.
var v MACCommand_BeaconFreqReq
gogo.UnmarshalMessage(s, &v)
ov.BeaconFreqReq = &v
case "beacon_freq_ans", "beaconFreqAns":
s.AddField("beacon_freq_ans")
ov := &MACCommand_BeaconFreqAns_{}
x.Payload = ov
if s.ReadNil() {
ov.BeaconFreqAns = nil
return
}
// NOTE: MACCommand_BeaconFreqAns does not seem to implement UnmarshalProtoJSON.
var v MACCommand_BeaconFreqAns
gogo.UnmarshalMessage(s, &v)
ov.BeaconFreqAns = &v
case "device_mode_ind", "deviceModeInd":
ov := &MACCommand_DeviceModeInd_{}
x.Payload = ov
if s.ReadNil() {
ov.DeviceModeInd = nil
return
}
ov.DeviceModeInd = &MACCommand_DeviceModeInd{}
ov.DeviceModeInd.UnmarshalProtoJSON(s.WithField("device_mode_ind", true))
case "device_mode_conf", "deviceModeConf":
ov := &MACCommand_DeviceModeConf_{}
x.Payload = ov
if s.ReadNil() {
ov.DeviceModeConf = nil
return
}
ov.DeviceModeConf = &MACCommand_DeviceModeConf{}
ov.DeviceModeConf.UnmarshalProtoJSON(s.WithField("device_mode_conf", true))
}
})
}
// UnmarshalJSON unmarshals the MACCommand from JSON.
func (x *MACCommand) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the FrequencyValue message to JSON.
func (x *FrequencyValue) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
s.WriteUint64(x.Value)
return
}
// MarshalJSON marshals the FrequencyValue to JSON.
func (x FrequencyValue) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the FrequencyValue message from JSON.
func (x *FrequencyValue) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
x.Value = s.ReadUint64()
return
}
// UnmarshalJSON unmarshals the FrequencyValue from JSON.
func (x *FrequencyValue) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the DataRateOffsetValue message to JSON.
func (x *DataRateOffsetValue) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
x.Value.MarshalProtoJSON(s)
return
}
// MarshalJSON marshals the DataRateOffsetValue to JSON.
func (x DataRateOffsetValue) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the DataRateOffsetValue message from JSON.
func (x *DataRateOffsetValue) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
x.Value.UnmarshalProtoJSON(s)
return
}
// UnmarshalJSON unmarshals the DataRateOffsetValue from JSON.
func (x *DataRateOffsetValue) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the DataRateIndexValue message to JSON.
func (x *DataRateIndexValue) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
x.Value.MarshalProtoJSON(s)
return
}
// MarshalJSON marshals the DataRateIndexValue to JSON.
func (x DataRateIndexValue) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the DataRateIndexValue message from JSON.
func (x *DataRateIndexValue) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
x.Value.UnmarshalProtoJSON(s)
return
}
// UnmarshalJSON unmarshals the DataRateIndexValue from JSON.
func (x *DataRateIndexValue) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the PingSlotPeriodValue message to JSON.
func (x *PingSlotPeriodValue) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
x.Value.MarshalProtoJSON(s)
return
}
// MarshalJSON marshals the PingSlotPeriodValue to JSON.
func (x PingSlotPeriodValue) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the PingSlotPeriodValue message from JSON.
func (x *PingSlotPeriodValue) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
x.Value.UnmarshalProtoJSON(s)
return
}
// UnmarshalJSON unmarshals the PingSlotPeriodValue from JSON.
func (x *PingSlotPeriodValue) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the AggregatedDutyCycleValue message to JSON.
func (x *AggregatedDutyCycleValue) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
x.Value.MarshalProtoJSON(s)
return
}
// MarshalJSON marshals the AggregatedDutyCycleValue to JSON.
func (x AggregatedDutyCycleValue) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the AggregatedDutyCycleValue message from JSON.
func (x *AggregatedDutyCycleValue) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
x.Value.UnmarshalProtoJSON(s)
return
}
// UnmarshalJSON unmarshals the AggregatedDutyCycleValue from JSON.
func (x *AggregatedDutyCycleValue) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the RxDelayValue message to JSON.
func (x *RxDelayValue) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
x.Value.MarshalProtoJSON(s)
return
}
// MarshalJSON marshals the RxDelayValue to JSON.
func (x RxDelayValue) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the RxDelayValue message from JSON.
func (x *RxDelayValue) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
x.Value.UnmarshalProtoJSON(s)
return
}
// UnmarshalJSON unmarshals the RxDelayValue from JSON.
func (x *RxDelayValue) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the ADRAckLimitExponentValue message to JSON.
func (x *ADRAckLimitExponentValue) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
x.Value.MarshalProtoJSON(s)
return
}
// MarshalJSON marshals the ADRAckLimitExponentValue to JSON.
func (x ADRAckLimitExponentValue) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the ADRAckLimitExponentValue message from JSON.
func (x *ADRAckLimitExponentValue) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
x.Value.UnmarshalProtoJSON(s)
return
}
// UnmarshalJSON unmarshals the ADRAckLimitExponentValue from JSON.
func (x *ADRAckLimitExponentValue) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the ADRAckDelayExponentValue message to JSON.
func (x *ADRAckDelayExponentValue) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
x.Value.MarshalProtoJSON(s)
return
}
// MarshalJSON marshals the ADRAckDelayExponentValue to JSON.
func (x ADRAckDelayExponentValue) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the ADRAckDelayExponentValue message from JSON.
func (x *ADRAckDelayExponentValue) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
x.Value.UnmarshalProtoJSON(s)
return
}
// UnmarshalJSON unmarshals the ADRAckDelayExponentValue from JSON.
func (x *ADRAckDelayExponentValue) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
// MarshalProtoJSON marshals the DeviceEIRPValue message to JSON.
func (x *DeviceEIRPValue) MarshalProtoJSON(s *jsonplugin.MarshalState) {
if x == nil {
s.WriteNil()
return
}
x.Value.MarshalProtoJSON(s)
return
}
// MarshalJSON marshals the DeviceEIRPValue to JSON.
func (x DeviceEIRPValue) MarshalJSON() ([]byte, error) {
return jsonplugin.DefaultMarshalerConfig.Marshal(&x)
}
// UnmarshalProtoJSON unmarshals the DeviceEIRPValue message from JSON.
func (x *DeviceEIRPValue) UnmarshalProtoJSON(s *jsonplugin.UnmarshalState) {
if s.ReadNil() {
return
}
x.Value.UnmarshalProtoJSON(s)
return
}
// UnmarshalJSON unmarshals the DeviceEIRPValue from JSON.
func (x *DeviceEIRPValue) UnmarshalJSON(b []byte) error {
return jsonplugin.DefaultUnmarshalerConfig.Unmarshal(b, x)
}
|
#!/bin/bash
## @author Jay Goldberg
## @email jaymgoldberg@gmail.com
## @description appends little text lines using a popup window
## just tie it to a keybinding in your window manager
## @license Apache 2.0
## @usage guinote.sh <filename>
## @requires zenity
#=======================================================================
progfile=~/${1}
timestamp=$(date +%Y-%m-%d_%H:%M:%S)
title="Enter note"
query=$(zenity --entry --title="$title" --text="Note text for $progfile:")
output=0
echo "$timestamp $query" >> "$progfile"
exit 0
|
<filename>Scripts/skracivanje.py<gh_stars>0
with open('SRR1031159_1_full.fasta', 'r') as f:
head = f.readline().strip()
while head:
seq = f.readline().strip()
if seq.find('N') == -1:
print(head)
print(seq)
head = f.readline().strip()
|
<reponame>neoguru/axboot-origin
package com.chequer.axboot.core.model.extract.metadata;
import lombok.Data;
@Data
public class PrimaryKey {
private String columnName;
private Integer keySeq;
}
|
#!/bin/bash
# Simpler entrypoint script for awe client
clientgroup=$1
vmhostname=$2
echo clientgroup is $clientgroup
echo vmhostname is $vmhostname
# it would be nice to clean this up
containername=$(docker inspect $(hostname)|grep aweworker|grep Name|cut -f2 -d '/'|cut -f1 -d '"')
clientname=${clientgroup}_${vmhostname}_${containername}
# for njsw reporting
export AWE_CLIENTNAME=$clientname
export AWE_CLIENTGROUP=$clientgroup
env
cat /config/config/$clientgroup.token
echo /config/config/$clientgroup.token
/kb/deployment/bin/awe-client --conf /kb/deployment/awe-client.cfg --name=$clientname --data=/mnt/awe/$clientgroup/$clientname/data --logs=/mnt/awe/$clientgroup/$clientname/logs --workpath=/mnt/awe/$clientgroup/$clientname/work --group=$clientgroup --clientgroup_token=$(cat /config/config/$clientgroup.token)
|
use serde::{Deserialize, Deserializer};
use serde_json::from_str;
#[derive(Deserialize, Debug)]
pub struct Scml {
pub name: String,
pub strokes: Vec<Stroke>,
}
impl Scml {
fn parse(scml_json: &str) -> Scml {
from_str(scml_json).expect("Scml parse error")
}
}
#[derive(Deserialize, Debug)]
pub struct Stroke {
// Define the fields of the Stroke struct based on the actual SCML format
// For example:
// pub name: String,
// pub duration: u32,
// pub frames: Vec<Frame>,
}
impl Stroke {
fn parse(stroke_json: &str) -> Stroke {
from_str(stroke_json).expect("Stroke parse error")
}
}
fn main() {
let scml_json = r#"
{
"name": "character_name",
"strokes": [
{
"name": "stroke1",
"duration": 100,
"frames": [
{"frame_num": 1, "image": "image1.png"},
{"frame_num": 2, "image": "image2.png"}
]
},
{
"name": "stroke2",
"duration": 150,
"frames": [
{"frame_num": 1, "image": "image3.png"},
{"frame_num": 2, "image": "image4.png"}
]
}
]
}
"#;
let scml = Scml::parse(scml_json);
println!("{:?}", scml);
}
|
#!/bin/sh
# Usage: ./deploy.sh APP_NAME
APP_NAME=$1
aws cloudformation deploy \
--stack-name "${APP_NAME}" \
--template-file ./ci/s3.yml \
--parameter-overrides AppName="${APP_NAME}" \
--no-fail-on-empty-changeset
BUCKET_NAME=$(aws cloudformation describe-stacks --stack-name "${APP_NAME}" | jq -r '.Stacks[0] | .Outputs[] | select(.OutputKey == "BucketName") | .OutputValue')
npm install && npm run build
aws s3 sync ./dist s3://${BUCKET_NAME}
|
#!/usr/bin/env bash
#
# Copyright 2012 HellaSec, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ==== kill.sh for doorman test====
#
pkill -f "python.*doorman_legit"
pkill -f "python.*doorman_attack"
|
cd /Applications/Adept/lib
java -classpath "*" com.sri.tasklearning.adept.applications.Imageloader.Imageloader
|
#!/bin/sh
# This script will install a new BookStack instance on a fresh Ubuntu 16.04 server.
# This script is experimental and does not ensure any security.
echo ""
echo -n "Enter the domain you want to host BookStack and press [ENTER]: "
read DOMAIN
myip=$(ip addr | grep 'state UP' -A2 | tail -n1 | awk '{print $2}' | cut -f1 -d'/')
export DEBIAN_FRONTEND=noninteractive
apt update
apt install -y git nginx curl php7.0-fpm php7.0-curl php7.0-mbstring php7.0-ldap php7.0-mcrypt \
php7.0-tidy php7.0-xml php7.0-zip php7.0-gd php7.0-mysql mysql-server-5.7 mcrypt
# Set up database
DB_PASS="$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 13)"
mysql -u root --execute="CREATE DATABASE bookstack;"
mysql -u root --execute="CREATE USER 'bookstack'@'localhost' IDENTIFIED BY '$DB_PASS';"
mysql -u root --execute="GRANT ALL ON bookstack.* TO 'bookstack'@'localhost';FLUSH PRIVILEGES;"
# Download BookStack
cd /var/www
git clone https://github.com/ssddanbrown/BookStack.git --branch release --single-branch bookstack
BOOKSTACK_DIR="/var/www/bookstack"
cd $BOOKSTACK_DIR
# Install composer
EXPECTED_SIGNATURE=$(wget https://composer.github.io/installer.sig -O - -q)
curl -s https://getcomposer.org/installer > composer-setup.php
ACTUAL_SIGNATURE=$(php -r "echo hash_file('SHA384', 'composer-setup.php');")
if [ "$EXPECTED_SIGNATURE" = "$ACTUAL_SIGNATURE" ]
then
php composer-setup.php --quiet
RESULT=$?
rm composer-setup.php
else
>&2 echo 'ERROR: Invalid composer installer signature'
rm composer-setup.php
exit 1
fi
# Install BookStack composer dependancies
php composer.phar install
# Copy and update BookStack environment variables
cp .env.example .env
sed -i.bak 's/DB_DATABASE=.*$/DB_DATABASE=bookstack/' .env
sed -i.bak 's/DB_USERNAME=.*$/DB_USERNAME=bookstack/' .env
sed -i.bak "s/DB_PASSWORD=.*\$/DB_PASSWORD=$DB_PASS/" .env
# Generate the application key
php artisan key:generate --no-interaction --force
# Migrate the databases
php artisan migrate --no-interaction --force
# Set file and folder permissions
chown www-data:www-data -R bootstrap/cache public/uploads storage && chmod -R 755 bootstrap/cache public/uploads storage
# Add nginx configuration
curl -s https://raw.githubusercontent.com/BookStackApp/devops/master/config/nginx > /etc/nginx/sites-available/bookstack
sed -i.bak "s/bookstack.dev/$DOMAIN/" /etc/nginx/sites-available/bookstack
ln -s /etc/nginx/sites-available/bookstack /etc/nginx/sites-enabled/bookstack
# Remove the default nginx configuration
rm /etc/nginx/sites-enabled/default
# Restart nginx to load new config
service nginx restart
echo ""
echo "Setup Finished, Your BookStack instance should now be installed."
echo "You can login with the email 'admin@admin.com' and password of 'password'"
echo "MySQL was installed without a root password, It is recommended that you set a root MySQL password."
echo ""
echo "You can access your BookStack instance at: http://$myip/"
|
for(let key in person) {
console.log(person[key]);
}
|
#!/usr/bin/env bash
# https://github.com/BeSlower/Udacity_object_dataset
readonly CURRENT_DIR=$(dirname $(realpath $0))
readonly DATA_PATH_BASE=$(realpath ${CURRENT_DIR}/../data)
readonly DATA_PATH=${DATA_PATH_BASE}/udacity
echo "start downloading udacity dataset"
if [ ! -d ${DATA_PATH} ]; then
mkdir -p ${DATA_PATH}
fi
if [ ! -f ${DATA_PATH}/object-dataset.tar.gz ]; then
wget -c https://s3.amazonaws.com/udacity-sdc/annotations/object-dataset.tar.gz -P ${DATA_PATH}
fi
tar -xvf ${DATA_PATH}/object-dataset.tar.gz -C ${DATA_PATH}
|
#!/bin/bash
## Adapted from code by Nadia Davidson: https://github.com/Oshlack/JAFFA/blob/master/install_linux64.sh
## This script installs the prerequisite software for the MINTIE pipeline
## It will fetch each tool from the web and place it into the tools/ subdirectory.
## Paths to all installed tools can be found in the file tools.groovy at the
## end of execution of this script. These paths can be changed if a different
## version of software is required. Note that R must be installed manually
mkdir -p tools/bin
cd tools
#a list of which programs need to be installed
#commands="bpipe fastuniq dedupe trimmomatic fasta_formatter samtools bedtools soapdenovotrans salmon hisat gmap"
commands="bpipe fastuniq dedupe trimmomatic fasta_formatter samtools bedtools jellyfish bowtie2 Trinity salmon hisat gmap"
#installation methods
function bpipe_install {
wget -O bpipe-0.9.9.5.tar.gz https://github.com/ssadedin/bpipe/releases/download/0.9.9.5/bpipe-0.9.9.5.tar.gz
tar -zxvf bpipe-0.9.9.5.tar.gz ; rm bpipe-0.9.9.5.tar.gz
ln -s $PWD/bpipe-0.9.9.5/bin/* $PWD/bin/
}
function fastuniq_install {
wget --no-check-certificate https://sourceforge.net/projects/fastuniq/files/FastUniq-1.1.tar.gz
tar -xvzf FastUniq-1.1.tar.gz
rm FastUniq-1.1.tar.gz
make -C FastUniq/source/
ln -s $PWD/FastUniq/source/fastuniq $PWD/bin
}
function trimmomatic_install {
wget http://www.usadellab.org/cms/uploads/supplementary/Trimmomatic/Trimmomatic-0.39.zip
unzip Trimmomatic-0.39.zip ; rm Trimmomatic-0.39.zip
echo "java -jar $PWD/Trimmomatic-0.39/trimmomatic-0.39.jar \$*" > Trimmomatic-0.39/trimmomatic.sh
chmod +x Trimmomatic-0.39/trimmomatic.sh
ln -s $PWD/Trimmomatic-0.39/trimmomatic.sh $PWD/bin/trimmomatic
}
function soapdenovotrans_install {
wget --no-check-certificate https://sourceforge.net/projects/soapdenovotrans/files/SOAPdenovo-Trans/bin/v1.03/SOAPdenovo-Trans-bin-v1.03.tar.gz
mkdir -p SOAPdenovo-Trans-bin-v1.03
tar -xvzf SOAPdenovo-Trans-bin-v1.03.tar.gz -C SOAPdenovo-Trans-bin-v1.03
rm SOAPdenovo-Trans-bin-v1.03.tar.gz
ln -s $PWD/SOAPdenovo-Trans-bin-v1.03/SOAPdenovo-Trans-127mer $PWD/bin/soapdenovotrans
}
function rnaspades_install {
wget --no-check-certificate http://cab.spbu.ru/files/release3.12.0/SPAdes-3.12.0-Linux.tar.gz
tar -xvzf SPAdes-3.12.0-Linux.tar.gz
rm SPAdes-3.12.0-Linux.tar.gz
ln -s $PWD/SPAdes-3.12.0-Linux/bin/rnaspades.py $PWD/bin/rnaspades
}
function Trinity_install {
wget --no-check-certificate https://github.com/trinityrnaseq/trinityrnaseq/archive/Trinity-v2.8.5.tar.gz
tar -xvzf Trinity-v2.8.5.tar.gz
rm Trinity-v2.8.5.tar.gz
make -C trinityrnaseq-Trinity-v2.8.5
make plugins -C trinityrnaseq-Trinity-v2.8.5
ln -s $PWD/trinityrnaseq-Trinity-v2.8.5/Trinity $PWD/bin
}
function jellyfish_install {
wget --no-check-certificate https://github.com/gmarcais/Jellyfish/releases/download/v2.3.0/jellyfish-linux
chmod u+x jellyfish-linux
mv jellyfish-linux $PWD/bin/jellyfish
}
function bowtie2_install {
wget --no-check-certificate https://sourceforge.net/projects/bowtie-bio/files/bowtie2/2.3.5.1/bowtie2-2.3.5.1-linux-x86_64.zip
unzip bowtie2-2.3.5.1-linux-x86_64.zip
rm bowtie2-2.3.5.1-linux-x86_64.zip
ln -s $PWD/bowtie2-2.3.5.1-linux-x86_64/bowtie* $PWD/bin
}
function fasta_formatter_install {
wget http://hannonlab.cshl.edu/fastx_toolkit/fastx_toolkit_0.0.13_binaries_Linux_2.6_amd64.tar.bz2
tar -jxvf fastx_toolkit_0.0.13_binaries_Linux_2.6_amd64.tar.bz2
rm fastx_toolkit_0.0.13_binaries_Linux_2.6_amd64.tar.bz2
}
function dedupe_install {
wget --no-check-certificate https://sourceforge.net/projects/bbmap/files/BBMap_38.50b.tar.gz
tar -zxvf BBMap_38.50b.tar.gz
rm BBMap_38.50b.tar.gz
for script in `ls $PWD/bbmap/*.sh` ; do
s=`basename $script`
s_pre=`echo $s | sed 's/.sh//g'`
echo "$PWD/bbmap/$s \$@" > $PWD/bin/$s_pre
chmod +x $PWD/bin/$s_pre
done
}
function samtools_install {
wget --no-check-certificate http://sourceforge.net/projects/samtools/files/samtools/1.9/samtools-1.9.tar.bz2
tar -jxvf samtools-1.9.tar.bz2
rm samtools-1.9.tar.bz2
make prefix=$PWD install -C samtools-1.9/
}
function bedtools_install {
wget --no-check-certificate https://github.com/arq5x/bedtools2/releases/download/v2.28.0/bedtools
chmod u+x $PWD/bedtools
ln -s $PWD/bedtools $PWD/bin
}
function gmap_install {
wget --no-check-certificate http://research-pub.gene.com/gmap/src/gmap-gsnap-2020-06-04.tar.gz
tar -xvzf gmap-gsnap-2020-06-04.tar.gz
rm gmap-gsnap-2020-06-04.tar.gz
cd gmap-2020-06-04 && ./configure --prefix=$PWD/../ ; cd ..
make -C gmap-2020-06-04/
make prefix=$PWD install -C gmap-2020-06-04/
}
function salmon_install {
wget --no-check-certificate https://github.com/COMBINE-lab/salmon/releases/download/v0.14.0/salmon-0.14.0_linux_x86_64.tar.gz
tar -xvzf salmon-0.14.0_linux_x86_64.tar.gz
rm salmon-0.14.0_linux_x86_64.tar.gz
ln -s $PWD/salmon-latest_linux_x86_64/bin/salmon $PWD/bin
}
function hisat_install {
wget --no-check-certificate http://ccb.jhu.edu/software/hisat2/dl/hisat2-2.1.0-Linux_x86_64.zip
unzip hisat2-2.1.0-Linux_x86_64.zip
rm hisat2-2.1.0-Linux_x86_64.zip
ln -s $PWD/hisat2-2.1.0/hisat2 $PWD/bin/hisat
ln -s $PWD/hisat2-2.1.0/hisat2-build $PWD/bin/hisat-build
}
echo "// Path to tools used by the MINTIE pipeline" > ../tools.groovy
for c in $commands ; do
c_path=`which $PWD/bin/$c 2>/dev/null`
if [ -z $c_path ] ; then
echo "$c not found, fetching it"
${c}_install
c_path=`which $PWD/bin/$c 2>/dev/null`
fi
echo "$c=\"$c_path\"" >> ../tools.groovy
done
# check that R is installed
# install requirements if so
R_path=`which R 2>/dev/null`
if [ -z $R_path ] ; then
echo "R not found!"
echo "Please go to http://www.r-project.org/ and follow the installation instructions."
echo "Then install requirements by running \"Rscript install_R_dependencies.R\""
exit 1
else
if [ $# -ne 0 ] && [ $1 = "--skip_R" ]; then
echo "Skipping R requirements."
else
echo "Installing R requirements..."
Rscript ../install_R_dependencies.R
status=$?
if [ ! $status -eq 0 ] ; then
echo "Installing R requirements failed!"
echo "Please install dependencies manually (https://github.com/Oshlack/MINTIE/wiki/Install#troubleshooting)."
exit 1
fi
fi
fi
echo "R=\"$R_path\"" >> ../tools.groovy
# check that python is installed
# install requirements if so
python_path=`which python 2>/dev/null`
if [ -z $python_path ] ; then
echo "Python not found!"
echo "Please go to https://www.anaconda.com/distribution/#download-section,"
echo "download the Python 3.7+ version and follow download instructions."
echo "Then install requirements by running \"pip install -r requirements.txt\""
exit 1
else
echo "Installing python requirements..."
#pip install -r ../requirements.txt
status=$?
if [ ! $status -eq 0 ]; then
echo "Installing python requirements failed!"
echo "Please install dependencies manually (https://github.com/Oshlack/MINTIE/wiki/Install#troubleshooting)."
exit 1
fi
fi
echo "python=\"$python_path\"" >> ../tools.groovy
#loop through commands to check they are all installed
echo "Checking that all required tools were installed:"
Final_message="All commands installed successfully!"
for c in $commands ; do
c_path=`which $PWD/bin/$c 2>/dev/null`
if [ -z $c_path ] ; then
echo -n "WARNING: $c could not be found!!!! "
echo "You will need to download and install $c manually, then add its path to tools.groovy"
Final_message="WARNING: One or more command did not install successfully. See warning messages above. \
You will need to correct this before running MINTIE."
else
echo "$c looks like it has been installed"
fi
done
echo "**********************************************************"
echo $Final_message
|
set -ex
pushd _hub
git pull https://github.com/9bow/PyTorch-hub-kr
popd
cp _hub/images/* assets/images/
python3 -c 'import notedown' || pip3 install notedown
python3 -c 'import yaml' || pip3 install pyyaml
mkdir -p assets/hub/
pushd _hub
find . -maxdepth 1 -name "*.md" | grep -v "README" | cut -f2- -d"/" |
while read file; do
cat "$file" | python3 ../_devel/formatter.py | notedown >"../assets/hub/${file%.md}.ipynb";
done
popd
|
import * as fs from "fs";
import * as path from "path";
import * as _ from "lodash";
const pjsonFileName = path.join(__dirname, "..", "..", "package.json");
// @ts-expect-error: fs.readFile is not readonly property
fs.readFile = new Proxy(fs.readFile, {
apply(target, thisArg, args) {
if (
Array.isArray(args) &&
args[0] === pjsonFileName &&
args[1] == "utf8" &&
typeof args[2] === "function"
) {
target.call(thisArg, pjsonFileName, (error, data) => {
const callback = args[2] as (
error: Error | null,
data?: string,
) => void;
if (!_.isNil(error)) {
callback(error);
} else {
const pjson = JSON.parse(data.toString()) as {
oclif: {commands: string};
};
pjson.oclif.commands = "./bin/commands";
callback(error, JSON.stringify(pjson));
}
});
} else {
target.apply(thisArg, args);
}
},
});
|
/*****************************************************************************
* Copyright (C) NanoContainer Organization. All rights reserved. *
* ------------------------------------------------------------------------- *
* The software in this package is published under the terms of the BSD *
* style license a copy of which has been included with this distribution in *
* the LICENSE.txt file. *
* *
*****************************************************************************/
package org.nanocontainer.nanowar;
import org.picocontainer.defaults.ObjectReference;
import javax.servlet.http.HttpSession;
import java.io.Serializable;
/**
* References an object that lives as an attribute of the
* HttpSession.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class SessionScopeObjectReference implements ObjectReference, Serializable {
//The only reason this class is Serializable and the 'session' field is transient
//is so that if this class is used as a key in a PicoContainer (as it is in the
//nanocontainer servlet framework), it won't break serializability of the
//container. The deserialized class won't be reused for its actual purpose, but
//discarded. As such, there is no need to resurrect the transient session field
private transient HttpSession session;
private String key;
public SessionScopeObjectReference(HttpSession session, String key) {
this.session = session;
this.key = key;
}
public void set(Object item) {
session.setAttribute(key, item);
}
public Object get() {
return session.getAttribute(key);
}
}
|
# Prefer US English and use UTF-8
export LC_ALL='en_US.UTF-8'
export LANG='en_US.UTF-8'
# Set default programs
export TERMINAL='alacritty'
export BROWSER='brave'
export PAGER='less'
export EDITOR='nvim'
export VISUAL="${EDITOR}"
# Set correct TTY for GPG
# https://www.gnupg.org/documentation/manuals/gnupg/Invoking-GPG_002dAGENT.html
export GPG_TTY="$(tty)"
# Set environment variables for Firefox
if [[ -n "${WAYLAND_DISPLAY}" ]]; then
export MOZ_ENABLE_WAYLAND=1
export XDG_CURRENT_DESKTOP=sway
fi
export JDTLS_HOME=/Users/mathiskretz/.local/bin
|
<reponame>MagnoBelloni/ImpactaAngular4
export interface ICurso{
codigo: number;
descricao: string;
ch: number;
}
|
import json
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
class SearchPlugin:
def __init__(self, saved_model):
"""
Initializes search plugin by loading the saved machine learning model.
Parameters
----------
saved_model: str
String path of the saved machine learning model.
"""
self.model = json.loads(open(saved_model, 'r').read())
def search(self, query):
"""
Utilizes the saved machine learning model to improve user query search results.
Parameters
----------
query: str
String representing user query.
Returns
-------
results: list
List of most relevant search results.
"""
# create vector representation of query
query_vector = TfidfVectorizer(vocabulary=self.model).transform([query]).toarray()
# calculate cosine similarity between query and previously indexed search results
sims = cosine_similarity(query_vector, self.model['vector'])
# sort results based on cosine similarity
sims_sorted_indices = np.argsort(-sims)
# return most relevant search results
return self.model['result'][sims_sorted_indices]
|
<reponame>1aurabrown/ervell<gh_stars>0
import gql from 'graphql-tag';
export default gql`
mutation createPrivateChannelMutation($title: String!) {
create_channel(input: { title: $title, visibility: PRIVATE }) {
clientMutationId
channel {
id
}
}
}
`;
|
package pluto
import (
"golang.org/x/net/context"
"google.golang.org/grpc"
)
// serviceContextUnaryServerInterceptor Interceptor that adds service instance
// available in handlers context
func serviceContextUnaryServerInterceptor(s *Service) grpc.UnaryServerInterceptor {
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
// Note: service instance is always available in handlers context
// under the general name > pluto
ctx = s.WithContext(ctx)
return handler(ctx, req)
}
}
|
#!/usr/bin/env bash
cp ../samples/ControlCatalog.NetCore/bin/Debug/netcoreapp3.1/Avalonia**.dll ~/.nuget/packages/avalonia/$1/lib/netcoreapp3.1/
cp ../samples/ControlCatalog.NetCore/bin/Debug/netcoreapp3.1/Avalonia**.dll ~/.nuget/packages/avalonia/$1/lib/netstandard2.0/
cp ../samples/ControlCatalog.NetCore/bin/Debug/netcoreapp3.1/Avalonia**.dll ~/.nuget/packages/avalonia.skia/$1/lib/netstandard2.0/
cp ../samples/ControlCatalog.NetCore/bin/Debug/netcoreapp3.1/Avalonia**.dll ~/.nuget/packages/avalonia.native/$1/lib/netstandard2.0/
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.