text stringlengths 1 1.05M |
|---|
<filename>core/locale.go
package core
import (
"fmt"
"github.com/nicksnyder/go-i18n/i18n"
)
var (
// Langs holds available languages
Langs []string
)
// P is a sugar type to write core.P{} instead of map[string]interface{} for i18n parameters
type P map[string]interface{}
// TranslateFunc represents a translation function
type TranslateFunc func(translationID string, args ...interface{}) string
func init() {
Langs = []string{"en", "fr"}
}
// LoadLocales loads i18n locales
func LoadLocales() {
for _, lang := range Langs {
filePath := fmt.Sprintf("locales/%s.json", lang)
// fetch file from embedded assets
data, err := Asset(filePath)
if err != nil {
panic("Failed to load translation files for language: " + lang)
}
// load translations
i18n.ParseTranslationFileBytes(filePath, data)
}
}
// MustTfunc is our own wrapper around i18n.MustTfunc that converts core.P arguments into map[string]interface{} to please i18n lib
func MustTfunc(lang string) TranslateFunc {
f := i18n.MustTfunc(lang)
return TranslateFunc(func(translationID string, args ...interface{}) string {
var convArgs []interface{}
if len(args) > 0 {
arg0, arg0isP := args[0].(P)
if arg0isP {
convArgs = append(convArgs, map[string]interface{}(arg0))
} else {
convArgs = append(convArgs, args[0])
}
if len(args) > 1 {
arg1, arg1isP := args[1].(P)
if arg1isP {
convArgs = append(convArgs, map[string]interface{}(arg1))
} else {
convArgs = append(convArgs, args[1])
}
}
}
return f(translationID, convArgs...)
})
}
|
<reponame>dongxuny/rk-entry
// Copyright (c) 2021 rookie-ninja
//
// Use of this source code is governed by an Apache-style
// license that can be found in the LICENSE file.
package rkentry
import (
"context"
"encoding/json"
"net/http"
"path"
"runtime"
)
const (
// CommonServiceEntryType type of entry
CommonServiceEntryType = "CommonServiceEntry"
// CommonServiceEntryNameDefault name of entry
CommonServiceEntryNameDefault = "CommonServiceDefault"
// CommonServiceEntryDescription description of entry
CommonServiceEntryDescription = "Internal RK entry which implements commonly used API."
)
// @title RK Common Service
// @version 1.0
// @description This is builtin RK common service.
// @contact.name rk-dev
// @contact.url https://github.com/rookie-ninja/rk-entry
// @contact.email <EMAIL>
// @license.name Apache 2.0 License
// @license.url https://github.com/rookie-ninja/rk-entry/blob/master/LICENSE.txt
// @securityDefinitions.basic BasicAuth
// @securityDefinitions.apikey ApiKeyAuth
// @in header
// @name X-API-Key
// @securityDefinitions.apikey JWT
// @in header
// @name Authorization
// @schemes http https
// BootConfigCommonService Bootstrap config of common service.
// 1: Enabled: Enable common service.
type BootConfigCommonService struct {
Enabled bool `yaml:"enabled" json:"enabled"`
}
// CommonServiceEntry RK common service which contains commonly used APIs
// 1: Healthy Returns true if process is alive
// 2: Gc Trigger gc()
// 3: Info Returns entry basic information
// 4: Configs Returns viper configs in GlobalAppCtx
// 5: Apis Returns list of apis registered in gin router
// 6: Sys Returns CPU and Memory information
// 7: Req Returns request metrics
// 8: Certs Returns certificates
// 9: Entries Returns entries
// 10: Logs Returns log entries
// 12: Deps Returns dependency which is full go.mod file content
// 13: License Returns license file content
// 14: Readme Returns README file content
type CommonServiceEntry struct {
EntryName string `json:"entryName" yaml:"entryName"`
EntryType string `json:"entryType" yaml:"entryType"`
EntryDescription string `json:"-" yaml:"-"`
EventLoggerEntry *EventLoggerEntry `json:"-" yaml:"-"`
ZapLoggerEntry *ZapLoggerEntry `json:"-" yaml:"-"`
HealthyPath string `json:"-" yaml:"-"`
GcPath string `json:"-" yaml:"-"`
InfoPath string `json:"-" yaml:"-"`
ConfigsPath string `json:"-" yaml:"-"`
SysPath string `json:"-" yaml:"-"`
EntriesPath string `json:"-" yaml:"-"`
CertsPath string `json:"-" yaml:"-"`
LogsPath string `json:"-" yaml:"-"`
DepsPath string `json:"-" yaml:"-"`
LicensePath string `json:"-" yaml:"-"`
ReadmePath string `json:"-" yaml:"-"`
GitPath string `json:"-" yaml:"-"`
ApisPath string `json:"-" yaml:"-"`
ReqPath string `json:"-" yaml:"-"`
GwErrorMappingPath string `json:"-" yaml:"-"`
}
// CommonServiceEntryOption Common service entry option.
type CommonServiceEntryOption func(*CommonServiceEntry)
// WithNameCommonService Provide name.
func WithNameCommonService(name string) CommonServiceEntryOption {
return func(entry *CommonServiceEntry) {
entry.EntryName = name
}
}
// WithEventLoggerEntryCommonService Provide rkentry.EventLoggerEntry.
func WithEventLoggerEntryCommonService(eventLoggerEntry *EventLoggerEntry) CommonServiceEntryOption {
return func(entry *CommonServiceEntry) {
entry.EventLoggerEntry = eventLoggerEntry
}
}
// WithZapLoggerEntryCommonService Provide rkentry.ZapLoggerEntry.
func WithZapLoggerEntryCommonService(zapLoggerEntry *ZapLoggerEntry) CommonServiceEntryOption {
return func(entry *CommonServiceEntry) {
entry.ZapLoggerEntry = zapLoggerEntry
}
}
// RegisterCommonServiceEntryWithConfig Create new common service entry with config
func RegisterCommonServiceEntryWithConfig(config *BootConfigCommonService, name string, zap *ZapLoggerEntry, event *EventLoggerEntry) *CommonServiceEntry {
var commonServiceEntry *CommonServiceEntry
if config.Enabled {
commonServiceEntry = RegisterCommonServiceEntry(
WithNameCommonService(name),
WithZapLoggerEntryCommonService(zap),
WithEventLoggerEntryCommonService(event))
}
return commonServiceEntry
}
// RegisterCommonServiceEntry Create new common service entry with options.
func RegisterCommonServiceEntry(opts ...CommonServiceEntryOption) *CommonServiceEntry {
entry := &CommonServiceEntry{
EntryName: CommonServiceEntryNameDefault,
EntryType: CommonServiceEntryType,
EntryDescription: CommonServiceEntryDescription,
ZapLoggerEntry: GlobalAppCtx.GetZapLoggerEntryDefault(),
EventLoggerEntry: GlobalAppCtx.GetEventLoggerEntryDefault(),
HealthyPath: "/rk/v1/healthy",
GcPath: "/rk/v1/gc",
InfoPath: "/rk/v1/info",
ConfigsPath: "/rk/v1/configs",
SysPath: "/rk/v1/sys",
EntriesPath: "/rk/v1/entries",
CertsPath: "/rk/v1/certs",
LogsPath: "/rk/v1/logs",
DepsPath: "/rk/v1/deps",
LicensePath: "/rk/v1/license",
ReadmePath: "/rk/v1/readme",
GitPath: "/rk/v1/git",
ApisPath: "/rk/v1/apis",
ReqPath: "/rk/v1/req",
GwErrorMappingPath: "/rk/v1/gwErrorMapping",
}
for i := range opts {
opts[i](entry)
}
if entry.ZapLoggerEntry == nil {
entry.ZapLoggerEntry = GlobalAppCtx.GetZapLoggerEntryDefault()
}
if entry.EventLoggerEntry == nil {
entry.EventLoggerEntry = GlobalAppCtx.GetEventLoggerEntryDefault()
}
if len(entry.EntryName) < 1 {
entry.EntryName = CommonServiceEntryNameDefault
}
return entry
}
// Bootstrap common service entry.
func (entry *CommonServiceEntry) Bootstrap(context.Context) {
// Noop
}
// Interrupt common service entry.
func (entry *CommonServiceEntry) Interrupt(context.Context) {
// Noop
}
// GetName Get name of entry.
func (entry *CommonServiceEntry) GetName() string {
return entry.EntryName
}
// GetType Get entry type.
func (entry *CommonServiceEntry) GetType() string {
return entry.EntryType
}
// String Stringfy entry.
func (entry *CommonServiceEntry) String() string {
bytes, _ := json.Marshal(entry)
return string(bytes)
}
// GetDescription Get description of entry.
func (entry *CommonServiceEntry) GetDescription() string {
return entry.EntryDescription
}
// MarshalJSON Marshal entry.
func (entry *CommonServiceEntry) MarshalJSON() ([]byte, error) {
m := map[string]interface{}{
"entryName": entry.EntryName,
"entryType": entry.EntryType,
"entryDescription": entry.EntryDescription,
"zapLoggerEntry": entry.ZapLoggerEntry.GetName(),
"eventLoggerEntry": entry.EventLoggerEntry.GetName(),
}
return json.Marshal(&m)
}
// UnmarshalJSON Not supported.
func (entry *CommonServiceEntry) UnmarshalJSON([]byte) error {
return nil
}
func doHealthy() *HealthyResponse {
return &HealthyResponse{
Healthy: true,
}
}
// Healthy handler
// @Summary Get application healthy status
// @Id 1
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} HealthyResponse
// @Router /rk/v1/healthy [get]
func (entry *CommonServiceEntry) Healthy(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doHealthy(), "", " ")
writer.Write(bytes)
}
func doGc() *GcResponse {
before := NewMemInfo()
runtime.GC()
after := NewMemInfo()
return &GcResponse{
MemStatBeforeGc: before,
MemStatAfterGc: after,
}
}
// Gc handler
// @Summary Trigger Gc
// @Id 2
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} GcResponse
// @Router /rk/v1/gc [get]
func (entry *CommonServiceEntry) Gc(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doGc(), "", " ")
writer.Write(bytes)
}
func doInfo() *ProcessInfo {
return NewProcessInfo()
}
// Info handler
// @Summary Get application and process info
// @Id 3
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} ProcessInfo
// @Router /rk/v1/info [get]
func (entry *CommonServiceEntry) Info(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doInfo(), "", " ")
writer.Write(bytes)
}
func doConfigs() *ConfigsResponse {
res := &ConfigsResponse{
Entries: make([]*ConfigsResponseElement, 0),
}
for _, v := range GlobalAppCtx.ListConfigEntries() {
configEntry := &ConfigsResponseElement{
EntryName: v.GetName(),
EntryType: v.GetType(),
EntryDescription: v.GetDescription(),
EntryMeta: v.GetViperAsMap(),
Path: v.Path,
}
res.Entries = append(res.Entries, configEntry)
}
return res
}
// Configs handler
// @Summary List ConfigEntry
// @Id 4
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} ConfigsResponse
// @Router /rk/v1/configs [get]
func (entry *CommonServiceEntry) Configs(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doConfigs(), "", " ")
writer.Write(bytes)
}
func doSys() *SysResponse {
return &SysResponse{
CpuInfo: NewCpuInfo(),
MemInfo: NewMemInfo(),
NetInfo: NewNetInfo(),
OsInfo: NewOsInfo(),
GoEnvInfo: NewGoEnvInfo(),
}
}
// Sys handler
// @Summary Get OS Stat
// @Id 5
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} SysResponse
// @Router /rk/v1/sys [get]
func (entry *CommonServiceEntry) Sys(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doSys(), "", " ")
writer.Write(bytes)
}
func doEntries() *EntriesResponse {
res := &EntriesResponse{
Entries: make(map[string][]*EntriesResponseElement),
}
// Iterate all internal and external entries in GlobalAppCtx
entriesHelper(GlobalAppCtx.ListEntries(), res)
entriesHelper(GlobalAppCtx.ListEventLoggerEntriesRaw(), res)
entriesHelper(GlobalAppCtx.ListZapLoggerEntriesRaw(), res)
entriesHelper(GlobalAppCtx.ListConfigEntriesRaw(), res)
entriesHelper(GlobalAppCtx.ListCertEntriesRaw(), res)
entriesHelper(GlobalAppCtx.ListCredEntriesRaw(), res)
// App info entry
appInfoEntry := GlobalAppCtx.GetAppInfoEntry()
res.Entries[appInfoEntry.GetType()] = []*EntriesResponseElement{
{
EntryName: appInfoEntry.GetName(),
EntryType: appInfoEntry.GetType(),
EntryDescription: appInfoEntry.GetDescription(),
EntryMeta: appInfoEntry,
},
}
return res
}
// Helper function of /entries
func entriesHelper(m map[string]Entry, res *EntriesResponse) {
// Iterate entries and construct EntryElement
for i := range m {
entry := m[i]
element := &EntriesResponseElement{
EntryName: entry.GetName(),
EntryType: entry.GetType(),
EntryDescription: entry.GetDescription(),
EntryMeta: entry,
}
if entries, ok := res.Entries[entry.GetType()]; ok {
entries = append(entries, element)
} else {
res.Entries[entry.GetType()] = []*EntriesResponseElement{element}
}
}
}
// Entries handler
// @Summary List all Entry
// @Id 6
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} EntriesResponse
// @Router /rk/v1/entries [get]
func (entry *CommonServiceEntry) Entries(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doEntries(), "", " ")
writer.Write(bytes)
}
func doCerts() *CertsResponse {
res := &CertsResponse{
Entries: make([]*CertsResponseElement, 0),
}
entries := GlobalAppCtx.ListCertEntries()
// Iterator cert entries and construct CertResponse
for i := range entries {
entry := entries[i]
certEntry := &CertsResponseElement{
EntryName: entry.GetName(),
EntryType: entry.GetType(),
EntryDescription: entry.GetDescription(),
}
if entry.Retriever != nil {
certEntry.Endpoint = entry.Retriever.GetEndpoint()
certEntry.Locale = entry.Retriever.GetLocale()
certEntry.Provider = entry.Retriever.GetProvider()
certEntry.ServerCertPath = entry.ServerCertPath
certEntry.ServerKeyPath = entry.ServerKeyPath
certEntry.ClientCertPath = entry.ClientCertPath
certEntry.ClientKeyPath = entry.ClientKeyPath
}
if entry.Store != nil {
certEntry.ServerCert = entry.Store.SeverCertString()
certEntry.ClientCert = entry.Store.ClientCertString()
}
res.Entries = append(res.Entries, certEntry)
}
return res
}
// Certs handler
// @Summary List CertEntry
// @Id 7
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} CertsResponse
// @Router /rk/v1/certs [get]
func (entry *CommonServiceEntry) Certs(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doCerts(), "", " ")
writer.Write(bytes)
}
func doLogs() *LogsResponse {
res := &LogsResponse{
Entries: make(map[string][]*LogsResponseElement),
}
logsHelper(GlobalAppCtx.ListEventLoggerEntriesRaw(), res)
logsHelper(GlobalAppCtx.ListZapLoggerEntriesRaw(), res)
return res
}
// Helper function of /logs
func logsHelper(m map[string]Entry, res *LogsResponse) {
entries := make([]*LogsResponseElement, 0)
// Iterate logger related entries and construct LogEntryElement
for i := range m {
entry := m[i]
logEntry := &LogsResponseElement{
EntryName: entry.GetName(),
EntryType: entry.GetType(),
EntryDescription: entry.GetDescription(),
EntryMeta: entry,
}
if val, ok := entry.(*ZapLoggerEntry); ok {
if val.LoggerConfig != nil {
logEntry.OutputPaths = val.LoggerConfig.OutputPaths
logEntry.ErrorOutputPaths = val.LoggerConfig.ErrorOutputPaths
}
}
if val, ok := entry.(*EventLoggerEntry); ok {
if val.LoggerConfig != nil {
logEntry.OutputPaths = val.LoggerConfig.OutputPaths
logEntry.ErrorOutputPaths = val.LoggerConfig.ErrorOutputPaths
}
}
entries = append(entries, logEntry)
}
var entryType string
if len(entries) > 0 {
entryType = entries[0].EntryType
}
res.Entries[entryType] = entries
}
// Logs handler
// @Summary List logger related entries
// @Id 8
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} LogsResponse
// @Router /rk/v1/logs [get]
func (entry *CommonServiceEntry) Logs(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doLogs(), "", " ")
writer.Write(bytes)
}
func doDeps() *DepResponse {
return &DepResponse{
GoMod: GlobalAppCtx.GetAppInfoEntry().GoMod,
}
}
// Deps handler
// @Summary List dependencies related application
// @Id 9
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} DepResponse
// @Router /rk/v1/deps [get]
func (entry *CommonServiceEntry) Deps(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doDeps(), "", " ")
writer.Write(bytes)
}
func doLicense() *LicenseResponse {
return &LicenseResponse{
License: GlobalAppCtx.GetAppInfoEntry().License,
}
}
// License handler
// @Summary Get license related application
// @Id 10
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} LicenseResponse
// @Router /rk/v1/license [get]
func (entry *CommonServiceEntry) License(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doLicense(), "", " ")
writer.Write(bytes)
}
func doReadme() *ReadmeResponse {
return &ReadmeResponse{
Readme: GlobalAppCtx.GetAppInfoEntry().Readme,
}
}
// Readme handler
// @Summary Get README file.
// @Id 11
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} ReadmeResponse
// @Router /rk/v1/readme [get]
func (entry *CommonServiceEntry) Readme(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doReadme(), "", " ")
writer.Write(bytes)
}
func doGit() *GitResponse {
res := &GitResponse{}
rkMetaEntry := GlobalAppCtx.GetRkMetaEntry()
if rkMetaEntry != nil {
res.Package = path.Base(rkMetaEntry.RkMeta.Git.Url)
res.Branch = rkMetaEntry.RkMeta.Git.Branch
res.Tag = rkMetaEntry.RkMeta.Git.Tag
res.Url = rkMetaEntry.RkMeta.Git.Url
res.CommitId = rkMetaEntry.RkMeta.Git.Commit.Id
res.CommitIdAbbr = rkMetaEntry.RkMeta.Git.Commit.IdAbbr
res.CommitSub = rkMetaEntry.RkMeta.Git.Commit.Sub
res.CommitterName = rkMetaEntry.RkMeta.Git.Commit.Committer.Name
res.CommitterEmail = rkMetaEntry.RkMeta.Git.Commit.Committer.Email
res.CommitDate = rkMetaEntry.RkMeta.Git.Commit.Date
}
return res
}
// Git handler
// @Summary Get Git information.
// @Id 12
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} GitResponse
// @Router /rk/v1/git [get]
func (entry *CommonServiceEntry) Git(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
bytes, _ := json.MarshalIndent(doGit(), "", " ")
writer.Write(bytes)
}
// Apis handler
// @Summary List API
// @Id 13
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @Success 200 {object} ApisResponse
// @Router /rk/v1/apis [get]
func (entry *CommonServiceEntry) apisNoop(writer http.ResponseWriter, request *http.Request) {}
// Req handler
// @Summary List prometheus metrics of requests
// @Id 14
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @success 200 {object} ReqResponse
// @Router /rk/v1/req [get]
func (entry *CommonServiceEntry) reqNoop(writer http.ResponseWriter, request *http.Request) {}
// gRPC gateway error mapping handler
// @Summary List error mapping between gRPC and grpc-gateway
// @Id 16
// @version 1.0
// @Security ApiKeyAuth
// @Security BasicAuth
// @Security JWT
// @produce application/json
// @success 200 {object} GwErrorMappingResponse
// @Router /rk/v1/gwErrorMapping [get]
func (entry *CommonServiceEntry) gwErrorMappingNoop(writer http.ResponseWriter, request *http.Request) {
}
|
<reponame>paullewallencom/grunt-978-1-7852-8161-7
module.exports = function(grunt) {
grunt.initConfig({
browserify: {
options: {
transform: [
['babelify', {
'presets' : ['es2015']
}]
]
},
bundle: {
files: {
'public/js/bundle.js': ['public/js/src/main.js']
},
options: {
browserifyOptions: {
debug: true
}
}
}
},
exorcise: {
options: {
base: 'public'
},
bundle: {
files: {
'public/js/bundle.js.map': ['public/js/bundle.js']
}
}
},
uglify: {
bundle: {
options: {
sourceMap: true,
sourceMapIncludeSources: true,
sourceMapIn: 'public/js/bundle.js.map'
},
files: {
'public/js/bundle.min.js': ['public/js/bundle.js']
}
}
},
clean: {
bundle : {
files : [{
expand: true,
cwd: 'public/js',
src: ['bundle.js*']
}]
}
}
});
grunt.loadNpmTasks('grunt-browserify');
grunt.loadNpmTasks('grunt-exorcise');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.registerTask('bundle', 'Generate client side bundles', ['browserify:bundle', 'exorcise:bundle', 'uglify:bundle', 'clean:bundle']);
};
|
/*
* The AnVIL
* https://www.anvilproject.org
*
* Basic navigation service.
* Filters navigation by document path.
*/
/**
* Given a document path, return either its corresponding section or primary link.
* Param x = 0 corresponds to section, x = 1 to primaryLink and so on.
* @param docPath
* @param x
* @returns {*}
*/
export function getKeyOfPath(docPath, x) {
return docPath.split('/')[x];
}
/**
* Returns either the path (if there is one) or the key
* @param link
* @returns {*}
*/
export function getPath(link) {
return link.path ? link.path : link.key ? link.key : '/';
}
/**
* Given a path, return the navigation for the section.
* @param siteMap
* @param docPath
* @returns {Array}
*/
export function getSectionNav(siteMap, docPath) {
// Get section for the document path
const section = siteMap.filter(n => n.key === getKeyOfPath(docPath, 1))[0];
if (!section) {
return [];
}
// Return error if no primary links for the section
if (section && !section.primaryLinks) {
throw new Error("No links for section: " + section);
}
// Return all primary links for the document section
return section.primaryLinks;
}
/**
* Returns a filtered siteMap that will exclude any documents in draft mode.
*
* @param siteMap
* @param draftDocuments
* @returns {*}
*/
export function removeDraftDocuments(siteMap, draftDocuments) {
if ( !draftDocuments ) {
// If there are no documents in draft mode, return the siteMap for all documents
return siteMap;
}
else {
// Return a filtered siteMap, excluding any documents in draft mode.
// Draft mode is indicated by the frontmatter where "draft" is true.
return siteMap.filter(page => {
if (page.secondaryLinks) {
page.secondaryLinks = page.secondaryLinks.filter(secondaryLink => {
return !draftDocuments.some(draftDoc => draftDoc.slug === secondaryLink.key)
});
}
return !draftDocuments.some(draftDoc => draftDoc.slug === page.key);
});
}
}
|
package com.globalcollect.gateway.sdk.java.gc.token.definitions;
import com.globalcollect.gateway.sdk.java.gc.fei.definitions.BankAccountIban;
public class TokenNonSepaDirectDebitPaymentProduct707SpecificData {
private String addressLine1 = null;
private String addressLine2 = null;
private String addressLine3 = null;
private String addressLine4 = null;
private BankAccountIban bankAccountIban = null;
public String getAddressLine1() {
return addressLine1;
}
public void setAddressLine1(String value) {
this.addressLine1 = value;
}
public String getAddressLine2() {
return addressLine2;
}
public void setAddressLine2(String value) {
this.addressLine2 = value;
}
public String getAddressLine3() {
return addressLine3;
}
public void setAddressLine3(String value) {
this.addressLine3 = value;
}
public String getAddressLine4() {
return addressLine4;
}
public void setAddressLine4(String value) {
this.addressLine4 = value;
}
public BankAccountIban getBankAccountIban() {
return bankAccountIban;
}
public void setBankAccountIban(BankAccountIban value) {
this.bankAccountIban = value;
}
}
|
<filename>client/src/components/PostDetail/Participants/Card.js
import React from 'react';
import styled from 'styled-components/macro';
import { wideFont } from '../../shared/helpers';
const CardWrapper = styled.div`
${wideFont};
border: 1px solid silver;
margin-right: 10px;
padding: 10px;
min-width: 70px;
text-align: center;
color: ${props => props.theme.mutedText};
`
const Card = (props) => {
const { participant } = props;
return (
<CardWrapper>
<strong>{participant.name}</strong><br /><br />
<h5 style={{ textDecoration: 'underline' }}>Role</h5>
{participant ? participant.role : 'No role given.'}
</CardWrapper>
)
}
export default Card; |
<filename>src/main/java/net/avcompris/tools/diagrammer/AppInfo.java
package net.avcompris.tools.diagrammer;
import static org.apache.commons.lang3.StringUtils.isBlank;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
abstract class AppInfo {
public final String artifactId;
public final String version;
public final String url;
private AppInfo(
final String artifactId,
final String version,
final String url) {
this.artifactId = validate(artifactId, "project.artifactId");
this.version = validate(version, "project.version");
this.url = validate(url, "project.url");
}
private static String validate(final String value, final String label) {
if (value == null) {
throw new IllegalArgumentException(
"Illegal null value for property \"" + label + "\"");
}
if (isBlank(value)) {
throw new IllegalArgumentException(
"Illegal empty value for property \"" + label + "\"");
}
if (value.contains("$")) {
throw new IllegalArgumentException("Illegal value for property \""
+ label + "\": " + value + "."
+ " It seems the value in app.properties has not been processed."
+ " Make sure you've run \"mvn process-resources\".");
}
return value;
}
private static AppInfo appInfo = null;
public static AppInfo get() {
if (appInfo != null) {
return appInfo;
}
final InputStream is = AppInfo.class.getClassLoader()
.getResourceAsStream("app.properties");
if (is == null) {
throw new RuntimeException("Cannot find app.properties");
}
final Properties properties = new Properties();
try {
try {
properties.load(is);
} finally {
is.close();
}
} catch (final IOException e) {
throw new RuntimeException(e);
}
appInfo = new AppInfo(properties.getProperty("project.artifactId"),
properties.getProperty("project.version"),
properties.getProperty("project.url")) {
};
return appInfo;
}
}
|
import React, { Fragment } from 'react';
import moment from 'moment';
import PropTypes from 'prop-types';
import { Squares } from 'react-activity';
import { compose } from 'redux';
import { connect } from 'react-redux';
import { createStructuredSelector } from 'reselect';
import { Table } from 'semantic-ui-react';
import { RefreshIcon } from 'components/RefreshIcon';
import {
UnitTableWrapper,
ScoreP,
ScoreDate,
TableCell,
UnitScoreWrapper,
UnitTable,
UnitRefreshWrapper,
NoneP,
} from './styles';
import {
makeSelectUnitProgress,
makeSelectUnitProgressLoading,
} from '../../../selectors';
import { LoadingWrapper } from '../../../styles';
const UnitProgress = ({ unitProgress, unitProgressLoading, handleRefresh }) => {
const renderUnitProgress = unit => {
const submissionSize = unit.submissions.length;
const vodProgress = unit.vod_progress.progress;
if (submissionSize && !vodProgress) {
return (
<Fragment>
{unit.submissions.map(submission => (
<UnitScoreWrapper key={`unit-progress-submission-${submission.id}`}>
<ScoreP final={submission.is_arch_final} score={submission.score}>
{submission.score}점
</ScoreP>
<ScoreDate>
{moment(submission.updated).format('YY/MM/DD')}
</ScoreDate>
</UnitScoreWrapper>
))}
</Fragment>
);
}
if (vodProgress && !submissionSize) {
return <p>{vodProgress}%</p>;
}
return <NoneP>미학습</NoneP>;
};
return (
<Fragment>
<UnitTableWrapper>
{unitProgressLoading ? (
<LoadingWrapper full>
<Squares color="#a9a9a9" size={25} speed={1} />
</LoadingWrapper>
) : (
<UnitTable>
<Table.Header>
<Table.Row>
{unitProgress.map(data => (
<Table.HeaderCell key={`unit-title-${data.id}`}>
{data.unit.title}
</Table.HeaderCell>
))}
</Table.Row>
</Table.Header>
<Table.Body>
<Table.Row>
{unitProgress.map(unit => (
<TableCell singleLine key={`unit-progress-${unit.id}`}>
{renderUnitProgress(unit)}
</TableCell>
))}
</Table.Row>
</Table.Body>
</UnitTable>
)}
<UnitRefreshWrapper onClick={handleRefresh}>
정보 새로고침
<RefreshIcon refreshing={unitProgressLoading} />
</UnitRefreshWrapper>
</UnitTableWrapper>
</Fragment>
);
};
UnitProgress.propTypes = {
unitProgress: PropTypes.array,
unitProgressLoading: PropTypes.bool,
handleRefresh: PropTypes.func,
};
const mapStateToProps = createStructuredSelector({
unitProgress: makeSelectUnitProgress(),
unitProgressLoading: makeSelectUnitProgressLoading(),
});
const withConnect = connect(mapStateToProps);
export default compose(withConnect)(UnitProgress);
|
package util;
import java.io.File;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.Queue;
import java.util.Random;
public class FileUtil {
/***
* Returns the list of files, with the given extension, in the given inDir.
*
* @param extension the file with given extension should be listed. If NULL returns all files in
* directory inDir.
* @param inDir a File object to the input directory.
* @return the list of files, with the given extension, in the given inDir.
*/
public static File[] listFiles(final String extension, File inDir) {
return listFiles(extension, inDir, false);
}
public static File[] listFiles(final String extension, File inDir, boolean recursive) {
Queue<File> dirQ = new LinkedList<File>();
LinkedList<File> fileList = new LinkedList<File>();
dirQ.add(inDir);
while (!dirQ.isEmpty()) {
File head = dirQ.poll();
// List the files in the current dir
File[] files = head.listFiles(new FilenameFilter() {
@Override
public boolean accept(File current, String name) {
if (extension == null) {
return true;
}
return name.endsWith(extension) && new File(current, name).isFile();
}
});
// Add the files to final list
for (File f : files) {
fileList.add(f);
}
// If recursive list the Dirs in the current dir
if (recursive) {
File[] dirList = head.listFiles(new FilenameFilter() {
@Override
public boolean accept(File current, String name) {
return new File(current, name).isDirectory();
}
});
for (File d : dirList) {
dirQ.add(d);
}
}
}
File[] allFiles = new File[fileList.size()];
return fileList.toArray(allFiles);
}
/***
* Returns a random subset of files, with the given extension, in the inDir .
*
* @param extension the file with given extension should be listed. If NULL returns all files in
* directory inDir.
* @param inDir a File object to the input directory.
* @param selectedProbability each file is included in the return list with the given probability.
* selectedProbability=1.0 means all and selectedProbability=0 means none.
* @return a random subset of files, with the given extension, in the inDir.
*/
public static File[] listFileRandomSubset(final String extension, File inDir,
double selectedProbability) {
File[] files = listFiles(extension, inDir);
if (Double.compare(selectedProbability, 1.0) == 0) {
return files;
}
ArrayList<File> newFileList = new ArrayList<File>((int) (files.length * selectedProbability));
Random rng = new Random(Long.getLong("seed", System.currentTimeMillis()));
for (File f : files) {
if (rng.nextDouble() < selectedProbability) {
newFileList.add(f);
}
}
File[] newFiles = new File[newFileList.size()];
return newFileList.toArray(newFiles);
}
}
|
package gamesite.servlet;
import java.io.*;
import java.net.*;
import java.sql.SQLException;
import java.text.*;
import java.util.*;
import javax.servlet.*;
import javax.servlet.http.*;
import gamesite.utils.*;
import gamesite.utils.LoginHandler;
import gamesite.model.DashBoardCommands;
import gamesite.model.SQLExceptionHandler;
public class LoginServletXml extends HttpServlet {
public String getServletInfo() {
return "Servlet for employee interface";
}
private static String xmlHeader="<?xml version=\"1.0\" encoding=\"UTF-8\"?><login_status>";
private static String xmlFooter="</login_status>";
public void writeSuccess(PrintWriter writer) {
writer.println("<status>success</status>");
writer.println("<status_code>1</status_code>");
}
public void writeFailure(PrintWriter writer, String code, String msg) {
writer.println("<status>failure</status>");
writer.println("<status_code>"+code+"</status_code>");
writer.println("<message>"+msg+"</message>");
}
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
HashMap<String,String> params = ParameterParse.getQueryParameters(request.getQueryString());
PrintWriter writer = null;
try {
writer = response.getWriter();
writer.println(xmlHeader);
if (params.containsKey("email") && params.containsKey("password")) {
//writer = response.getWriter();
int login = LoginHandler.loginNoCaptcha(request,response,"customers");
// writer.println(xmlHeader);
switch (login) {
case 1:
writeSuccess(writer);
break;
case -1:
//This case should never be reached, but included for
//completeness and error checking
writeFailure(writer,"-1","Please complete the ReCaptcha");
break;
case -2:
writeFailure(writer,"-2","Invalid email or password");
break;
}
// writer.println(xmlFooter);
} else if (params.containsKey("email")) {
writeFailure(writer,"-2","Password required as a parameter");
} else if (params.containsKey("password")) {
writeFailure(writer,"-2","Email required as a parameter");
} else {
writeFailure(writer,"-2","Email and password required");
}
writer.println(xmlFooter);
} catch (SQLExceptionHandler ex) {
writer.println(SQLExceptionFormat.toXml(ex));
} catch (SQLException ex) {
writer.println(SQLExceptionFormat.toXml(ex));
} catch (java.lang.Exception ex) {
writer.println(SQLExceptionFormat.toXml(ex));
} finally {
if (writer != null) {
writer.close();
}
}
}
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException
{
doGet(request, response);
}
}
|
class UsersController {
getAll(req, res) {
res.end('/users GET');
}
get(req, res) {
res.end('/users/:id GET');
}
add(req, res) {
res.end('/users POST');
}
update(req, res) {
res.end('/users/:id PUT');
}
remove(req, res) {
res.end('/users/:id DELETE');
}
}
module.exports = UsersController;
|
#!/bin/bash
WORK_DIR=`dirname $(readlink -f $0)`
VERBOSE=""
database=chouette2
user=chouette
host=localhost
port=5432
datatype="--column-inserts"
schema_name=""
function usage(){
echo "Usage `basename $0` [-p port] [-d database] [-u user] [-t tables separated with space] [-o outputfile] [-n schema-name] [-r new-name] [-h host] -s [schema | data]"
echo " example: `basename $0` -p 5433 -d chouette -u chouette -t 'compliances* referentiel' -o toto.sql -h localhost -s "
}
while getopts p:d:u:t:o:h:s:n:r: option
do
case $option in
n)
schema_name="$OPTARG"
opt_schema="--schema=$schema_name"
;;
p)
port=$OPTARG
;;
d)
database=$OPTARG
;;
u)
user=$OPTARG
;;
t)
tables=$OPTARG
;;
o)
output=$OPTARG
;;
h)
host=$OPTARG
;;
r)
new_name=$OPTARG
;;
s)
if [ "$OPTARG" == "data" ]; then
datatype="--data-only --column-inserts"
elif [ "$OPTARG" == "schema" ]; then
datatype="--schema-only --column-inserts"
fi
;;
esac
done
if [ "${output}" == "" ] || [ "${new_name}" == "" ]; then
usage
else
opt_tables="";
for i in ${tables}; do
opt_tables="${opt_tables} --table=${i}"
done
CMD="pg_dump ${datatype} ${opt_schema} --format=plain --file=${output} ${opt_tables} --username=${user} --host=${host} --port=${port} ${database}"
$CMD
sed -i "s/${schema_name}/${new_name}/g" ${output}
sed -i "s/^SET row_security/-- SET row_security/g" ${output};
sed -i "1s/^/DROP SCHEMA IF EXISTS ${new_name} CASCADE;\n/" ${output}
echo -e "
-- Ajout de l'offre dans les tables REFERENTIALS & REFERENTIAL_METADATA
SET search_path = public, pg_catalog;
DELETE FROM referential_metadata WHERE referential_id IN (SELECT id FROM referentials as r WHERE r.name='${new_name}');
DELETE FROM referentials AS ref WHERE ref.name='${new_name}';
INSERT INTO referentials (name, slug, created_at, updated_at, time_zone, user_name, organisation_id, workbench_id, line_referential_id, stop_area_referential_id) VALUES ('${new_name}', '${new_name}', '2017-10-06', '2017-10-06', 'Paris' , '!AutoTest!', 1, 1,1,1); \n \
INSERT INTO referential_metadata (referential_id, line_ids, created_at, updated_at, periodes) VALUES ((SELECT id FROM referentials where name='${new_name}'), '{1,2,3,4,5,6,7,8}', '2017-10-06', '2017-10-06', '{\"[2017-12-03,2017-12-08)\",\"[2017-12-10,2017-12-15)\"}'); \n \
" >> ${output}
fi
|
<filename>container/src/main/java/no/mnemonic/commons/container/ComponentContainer.java
package no.mnemonic.commons.container;
import no.mnemonic.commons.component.*;
import no.mnemonic.commons.container.plugins.ComponentContainerPlugin;
import no.mnemonic.commons.container.plugins.ComponentDependencyResolver;
import no.mnemonic.commons.container.plugins.ComponentLifecycleHandler;
import no.mnemonic.commons.container.plugins.ComponentValidator;
import no.mnemonic.commons.container.plugins.impl.*;
import no.mnemonic.commons.container.providers.BeanProvider;
import no.mnemonic.commons.container.providers.SimpleBeanProvider;
import no.mnemonic.commons.logging.Logger;
import no.mnemonic.commons.logging.Logging;
import no.mnemonic.commons.utilities.ObjectUtils;
import no.mnemonic.commons.utilities.collections.ListUtils;
import no.mnemonic.commons.utilities.collections.MapUtils;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import static no.mnemonic.commons.component.ComponentState.*;
import static no.mnemonic.commons.utilities.collections.ListUtils.list;
import static no.mnemonic.commons.utilities.lambda.LambdaUtils.tryTo;
/**
* ComponentContainer
*/
public class ComponentContainer implements Component, ComponentListener, ComponentListenerAspect, ComponentStatusAspect, ComponentStateAspect {
//nodes and state
private final BeanProvider beans;
private final Set<Object> initializedComponents = Collections.synchronizedSet(new HashSet<>());
private final Map<String, ComponentNode> nodes = new ConcurrentHashMap<>();
private final Map<Object, ComponentNode> objectNodeMap = new ConcurrentHashMap<>();
//parent and child containers
private final ComponentContainer parent;
private final AtomicReference<ComponentState> state = new AtomicReference<>(NOT_STARTED);
private final Collection<ComponentContainer> childContainers = Collections.synchronizedCollection(new ArrayList<>());
//timestamps and metrics
private final AtomicLong lastStoppingNotificationTimestamp = new AtomicLong();
//listeners
private final Collection<ComponentListener> componentListeners = new HashSet<>();
private final Collection<ContainerListener> containerListeners = new HashSet<>();
private final Collection<ComponentDependencyResolver> dependencyResolvers = new HashSet<>();
private final Collection<ComponentLifecycleHandler> lifecycleManagers = new HashSet<>();
private final Collection<ComponentValidator> validators = new HashSet<>();
private final Object STATE_LOCK = new Object();
private final Logger LOGGER = Logging.getLogger(ComponentContainer.class.getName());
//creators
/**
* @param beans beans which are administered by this container
* @param parent component container
*/
private ComponentContainer(BeanProvider beans, ComponentContainer parent) {
this.beans = beans;
this.parent = parent;
if (parent != null) {
synchronized (parent.STATE_LOCK) {
parent.childContainers.add(this);
}
this.addComponentListener(parent);
}
}
public static ComponentContainer create(Object... beans) {
return new ComponentContainer(new SimpleBeanProvider(list(beans)), null);
}
public static ComponentContainer create(BeanProvider provider) {
return new ComponentContainer(provider, null);
}
// interface methods
@Override
public void addComponentListener(ComponentListener listener) {
componentListeners.add(listener);
}
@Override
public Collection<ComponentListener> getComponentListeners() {
return Collections.unmodifiableCollection(componentListeners);
}
@Override
public void removeComponentListener(ComponentListener listener) {
componentListeners.remove(listener);
}
public void addContainerListener(ContainerListener listener) {
containerListeners.add(listener);
}
public void removeContainerListener(ContainerListener listener) {
containerListeners.remove(listener);
}
@Override
public ComponentStatus getComponentStatus() {
return new ComponentStatus();
}
@Override
public ComponentState getComponentState() {
return state.get();
}
@Override
public void notifyComponentStopping(Object component) {
if (getComponentState().isTerminal()) {
if (getLogger().isDebug())
getLogger().debug("Component " + component + " notified us of current shutdown");
}
}
@Override
public void notifyComponentStopped(Object component) {
//if notifying component is part of this container, destroy this container now (unless already terminating)
if (initializedComponents.contains(component) && !getComponentState().isTerminal()) {
getLogger().warning("Component " + component + " stopped, destroying container " + this);
try {
this.destroy();
} catch (Exception e) {
getLogger().error("Error when calling destroy", e);
}
}
}
//public methods
/**
* Initialize this container, and any subcontainers it may have
* @return The initialized container
*/
public ComponentContainer initialize() {
try {
if (getComponentState() != NOT_STARTED) return this;
// update state
setState(INITIALIZING);
// initialize parent container if not already so
if (parent != null) {
// make sure parent is initialized first
parent.initialize();
} else {
// create a thread that will shutdown the container
new ShutdownTask(this);
}
// handle container plugins
handleContainerPlugins();
// create component nodes
createNodes();
// resolve dependencies between components
resolveDependencies();
// validate configuration
validate();
// activate components
activate();
// update state
setState(STARTED);
// notify listeners
containerListeners.forEach(l -> tryTo(() -> l.notifyContainerStarted(this), e -> LOGGER.error(e, "Error calling notifyContainerStarted")));
} catch (RuntimeException e) {
getLogger().error("Error initializing container", e);
//if startup fails, make sure to exit the container
destroy();
throw e;
}
return this;
}
/**
* Destroy only the current container (along with any child containers which cannot survive without their parent)
* Parent containers are untouched
*/
public ComponentContainer destroy() {
// only allow one thread to attempt shutdown for any container
synchronized (STATE_LOCK) {
if (getComponentState().isTerminal()) return this;
setState(STOPPING);
STATE_LOCK.notifyAll();
}
try {
getLogger().warning("Shutting down...");
fireContainerStopping();
// shut down child containers first
getChildContainers().forEach(ComponentContainer::destroy);
// stop all nodes in this container
nodes.values().forEach(this::stopNode);
// stop nodes not registered in initial dependency graph
initializedComponents.clear();
} catch (RuntimeException e) {
getLogger().error("Error in destroy()", e);
} finally {
// remove parent reference
if (parent != null) {
synchronized (parent.STATE_LOCK) {
parent.childContainers.remove(this);
}
}
getLogger().warning("Shutdown complete");
setState(STOPPED);
nodes.clear();
//notify componentListeners that we are done
fireContainerStopped();
}
return this;
}
// ***************************** private methods
private Logger getLogger() {
return LOGGER;
}
Map<String, Object> getComponents() {
synchronized (STATE_LOCK) {
return MapUtils.map(nodes.entrySet(), e -> MapUtils.Pair.T(e.getKey(), e.getValue().getObject()));
}
}
private Collection<ComponentContainer> getChildContainers() {
synchronized (STATE_LOCK) {
return Collections.unmodifiableCollection(list(childContainers));
}
}
private void fireContainerStopping() {
//avoid excessive notifications
if (System.currentTimeMillis() - lastStoppingNotificationTimestamp.get() < 1000) return;
if (getComponentState().isTerminal()) {
lastStoppingNotificationTimestamp.set(System.currentTimeMillis());
componentListeners.forEach(l -> tryTo(() -> l.notifyComponentStopping(ComponentContainer.this), e -> LOGGER.error(e, "Error calling notifyComponentStopped")));
containerListeners.forEach(l -> tryTo(() -> l.notifyContainerDestroying(ComponentContainer.this), e -> LOGGER.error(e, "Error calling notifyContainerDestroying")));
}
}
private void fireContainerStopped() {
componentListeners.forEach(l -> tryTo(() -> l.notifyComponentStopped(this), e -> LOGGER.error(e, "Error calling notifyComponentStopped")));
containerListeners.forEach(l -> tryTo(() -> l.notifyContainerDestroyed(this), e -> LOGGER.error(e, "Error calling notifyContainerDestroyed")));
}
private void setState(ComponentState state) {
synchronized (STATE_LOCK) {
this.state.set(state);
STATE_LOCK.notifyAll();
}
}
/**
* Set all special purpose objects to special interfaces
*/
private void handleContainerPlugins() {
//make all container aware beans aware of its parent container
beans.getBeans(ContainerAware.class).forEach((k, v) -> v.registerContainerAware(this));
//register all plugins
beans.getBeans(ComponentContainerPlugin.class).forEach((k, v) -> registerPlugin(v));
//add handler plugin to register all container listeners
registerPlugin(new ContainerListenerHandler(this));
//add handler plugin to register all component listeners
registerPlugin(new ComponentListenerAspectHandler(this));
lifecycleManagers.addAll(beans.getBeans(ComponentLifecycleHandler.class).values());
lifecycleManagers.add(new ComponentLifecycleAspectHandler());
dependencyResolvers.addAll(beans.getBeans(ComponentDependencyResolver.class).values());
dependencyResolvers.add(new MethodAnnotationDependencyResolver());
dependencyResolvers.add(new FieldAnnotationDependencyResolver());
validators.addAll(beans.getBeans(ComponentValidator.class).values());
validators.add(new ComponentValidationAspectValidator());
}
/**
* Validate all components that has a validator
*/
private void validate() {
ValidationContext validationContext = new ValidationContext();
beans.getBeans().values().forEach(b -> validateBean(b, validationContext));
for (String error : validationContext.getErrors()) {
getLogger().error(error);
}
for (String warning : validationContext.getWarnings()) {
getLogger().warning(warning);
}
if (!validationContext.isValid()) {
throw new ComponentConfigurationException(validationContext);
}
}
private void registerPlugin(ComponentContainerPlugin plugin) {
Map<String, Object> targets = new HashMap<>();
beans.getBeans().forEach((k, v) -> {
if (plugin.appliesTo(v)) targets.put(k, v);
});
plugin.registerBeans(targets);
}
private void validateBean(Object bean, ValidationContext validationContext) {
for (ComponentValidator v : validators) {
if (v.appliesTo(bean)) {
v.validate(validationContext, bean);
return;
}
}
}
/**
* Activate active components
*/
private void activate() {
try {
getLogger().info("Initializing " + this);
nodes.values().forEach(this::startNode);
if (getLogger().isInfo()) getLogger().info("Initialization complete");
} catch (Exception e) {
getLogger().error("Caught exception during initialization", e);
destroy();
throw new ComponentException(e);
}
}
/**
* Start this node component. Resolves dependencies, so any dependent objects
* are started first, and any objects listed to be started afterwords is
* started afterwords.
*
* @param n node to start
*/
private void startNode(ComponentNode n) {
if (n.isStarted()) return;
n.setStarted(true);
// first start all components which we have an initialization dependency to
n.getInitializationDependencies().forEach(this::startNode);
// see if any lifecycle manager can start this component
for (ComponentLifecycleHandler manager : lifecycleManagers) {
if (!manager.appliesTo(n.getObject())) continue;
getLogger().info("Starting " + n.getObjectName() + "/" + n.getObject());
manager.startComponent(n.getObject());
// mark component as initialized
initializedComponents.add(n.getObject());
}
}
/**
* Stop the given node component. Resolves dependencies, to stop depending
* components first, and successive components afterwords.
*
* @param n node to stop
*/
private void stopNode(ComponentNode n) {
if (!n.isStarted()) return;
n.setStarted(false);
// first stop all components which we have a destruction dependency to
n.getDestructionDependencies().forEach(this::stopNode);
// see if any lifecycle manager can stop this component
for (ComponentLifecycleHandler manager : lifecycleManagers) {
if (!manager.appliesTo(n.getObject())) continue;
try {
getLogger().info("Destroying " + n.getObjectName() + "/" + n.getObject());
manager.stopComponent(n.getObject());
if (getLogger().isDebug()) getLogger().debug("Finished stopComponent for component " + n.getObjectName());
} catch (Exception e) {
getLogger().error("Error calling stopComponent on " + n.getObject(), e);
}
// remove initialization mark
initializedComponents.remove(n.getObject());
}
}
/**
* Build dependency tree
*/
@SuppressWarnings("unchecked")
private void createNodes() {
nodes.clear();
objectNodeMap.clear();
// make all nodes available
beans.getBeans().forEach((oid, o) -> {
//deduplicate objects, to avoid same object being registered in two different dependency nodes
if (objectNodeMap.containsKey(o)) {
ComponentNode n = objectNodeMap.get(o);
nodes.put(oid, n);
return;
}
ComponentNode n = new ComponentNode(oid, o);
nodes.put(oid, n);
objectNodeMap.put(o, n);
});
}
private void resolveDependencies() {
//make all dependency resolvers scan all objects
dependencyResolvers.forEach(r->r.scan(ListUtils.list(nodes.values(), ComponentNode::getObject)));
//then resolve dependencies for each node
nodes.keySet().forEach(oid -> resolveDependsOn(nodes.get(oid)));
}
private void resolveDependsOn(ComponentNode node) {
//check for Dependency annotations on getters
getDependencies(node).stream()
.filter(dep -> dep != null)
.forEach(dep -> {
if (dep instanceof Collection) {
//add dependency to each member of collection
((Collection<?>) dep).stream()
.map(objectNodeMap::get)
.filter(o -> o != null)
.forEach(depnode -> addDependency(node, depnode));
} else {
//add dependency to object
ObjectUtils.ifNotNullDo(objectNodeMap.get(dep), depnode -> addDependency(node, depnode));
}
});
}
private void addDependency(ComponentNode node, ComponentNode dependencyNode) {
node.addInitializationDependency(dependencyNode);
dependencyNode.addDestructionDependency(node);
}
private Collection<?> getDependencies(ComponentNode node) {
Collection<Object> dependencies = new HashSet<>();
dependencyResolvers.forEach(r -> dependencies.addAll(
ObjectUtils.ifNull(r.resolveDependencies(node.getObject()), new HashSet<>())
));
return dependencies;
}
private class ShutdownTask implements Runnable {
private ComponentContainer rootContainer;
ShutdownTask(ComponentContainer rootContainer) {
this.rootContainer = rootContainer;
Runtime.getRuntime().addShutdownHook(new Thread(this));
if (getLogger().isInfo()) rootContainer.getLogger().info("Shutdownhook added");
}
public void run() {
try {
// usedShutdownThread = true;
rootContainer.getLogger().warning("Shutdownhook triggered");
// drop out of this shutdownhook if container is already shut down
synchronized (STATE_LOCK) {
if (getComponentState().isTerminal()) {
rootContainer.getLogger().warning("Shutdownhook aborted, container already shut down");
return;
}
}
rootContainer.destroy();
} finally {
rootContainer.getLogger().warning("Shutdownhook done");
}
}
}
}
|
<reponame>mouchtaris/jleon
package gv.jleon
package config
protected[config] trait ConfigDecorationOps extends Any {
def self: tsConfig
final def apply(path: String): tsConfig = self getConfig path
final def mirrors: tsConfigObject = self getObject s"${config.key.mirrors}"
final def storage: tsConfig = self getConfig s"${config.key.storage}"
}
|
rm -f ~/.config/VirtualBox/VirtualBox.xml
rm -rf ~/VirtualBox\ VMs/*
rm -rf ~/.vagrant.d/boxes/*
rm -rf ~/github/test
mkdir ~/github/test
cd ~/github/test
STATUS='OK'
## tests below
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-opennebula-ha-tutorial-centos7.sh > ~/github/vagrant-opennebula-ha-tutorial-centos7.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-opennebula-ha-tutorial-centos7.txt -s 'github FAIL: vagrant-opennebula-ha-tutorial-centos7' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-haproxy-pcs-ansible-tutorial-centos7.sh > ~/github/vagrant-haproxy-pcs-ansible-tutorial-centos7.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-haproxy-pcs-ansible-tutorial-centos7.txt -s 'github FAIL: vagrant-haproxy-pcs-ansible-tutorial-centos7' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/install-guide-rdo-with-vagrant.sh > ~/github/install-guide-rdo-with-vagrant.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/install-guide-rdo-with-vagrant.txt -s 'github FAIL: install-guide-rdo-with-vagrant' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/minishift-quickstart-with-vagrant-centos7.sh > ~/github/minishift-quickstart-with-vagrant-centos7.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/minishift-quickstart-with-vagrant-centos7.txt -s 'github FAIL: minishift-quickstart-with-vagrant-centos7' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/VMware-vSphere-Perl-SDK-rpm-centos.sh > ~/github/VMware-vSphere-Perl-SDK-rpm-centos.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/VMware-vSphere-Perl-SDK-rpm-centos.txt -s 'github FAIL: VMware-vSphere-Perl-SDK-rpm-centos' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-CaltechDelftX-QuCryptox.sh > ~/github/vagrant-CaltechDelftX-QuCryptox.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-CaltechDelftX-QuCryptox.txt -s 'github FAIL: vagrant-CaltechDelftX-QuCryptox' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-mariadb-galera-tutorial-centos7.sh > ~/github/vagrant-mariadb-galera-tutorial-centos7.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-mariadb-galera-tutorial-centos7.txt -s 'github FAIL: vagrant-mariadb-galera-tutorial-centos7' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-hadoop-hortonworks-tutorial-centos7.sh > ~/github/vagrant-hadoop-hortonworks-tutorial-centos7.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-hadoop-hortonworks-tutorial-centos7.txt -s 'github FAIL: vagrant-hadoop-hortonworks-tutorial-centos7' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-ceph-rbd-tutorial-centos7.sh > ~/github/vagrant-ceph-rbd-tutorial-centos7.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-ceph-rbd-tutorial-centos7.txt -s 'github FAIL: vagrant-ceph-rbd-tutorial-centos7' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-opennebula-tutorial-centos7.sh > ~/github/vagrant-opennebula-tutorial-centos7.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-opennebula-tutorial-centos7.txt -s 'github FAIL: vagrant-opennebula-tutorial-centos7' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-systemimager-tutorial-centos6.sh > ~/github/vagrant-systemimager-tutorial-centos6.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-systemimager-tutorial-centos6.txt -s 'github FAIL: vagrant-systemimager-tutorial-centos6' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/irods-tutorial-centos6.sh > ~/github/irods-tutorial-centos6.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/irods-tutorial-centos6.txt -s 'github FAIL: irods-tutorial-centos6' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-lustre-tutorial-centos6.sh > ~/github/vagrant-lustre-tutorial-centos6.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-lustre-tutorial-centos6.txt -s 'github FAIL: vagrant-lustre-tutorial-centos6' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-xymon-tutorial.sh > ~/github/vagrant-xymon-tutorial.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-xymon-tutorial.txt -s 'github FAIL: vagrant-xymon-tutorial' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/vagrant-munin-tutorial.sh > ~/github/vagrant-munin-tutorial.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/vagrant-munin-tutorial.txt -s 'github FAIL: vagrant-munin-tutorial' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/puppet-nagios.sh > ~/github/puppet-nagios.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/puppet-nagios.txt -s 'github FAIL: puppet-nagios' ${EMAIL}; STATUS='FAIL'; fi
#
sh -x ~/github/clean-each.sh
sh -x ~/github/Clusters_From_Scratch-1.1-pcs-Fedora-21.sh > ~/github/Clusters_From_Scratch-1.1-pcs-Fedora-21.txt 2>&1
if ! test $? -eq 0; then echo | mailx -a ~/github/Clusters_From_Scratch-1.1-pcs-Fedora-21.txt -s 'github FAIL: Clusters_From_Scratch-1.1-pcs-Fedora-21' ${EMAIL}; STATUS='FAIL'; fi
#
if [ "${STATUS}" = "OK" ];
then
echo | mailx -s 'github OK' ${EMAIL}
fi
|
/**
* MK & MK4due 3D Printer Firmware
*
* Based on Marlin, Sprinter and grbl
* Copyright (C) 2011 <NAME> / <NAME>
* Copyright (C) 2013 - 2016 <NAME> @MagoKimbra
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
#include "../../base.h"
#if ENABLED(ULTRA_LCD)
#include "ultralcd.h"
int8_t encoderDiff; // updated from interrupt context and added to encoderPosition every LCD update
bool encoderRateMultiplierEnabled;
int32_t lastEncoderMovementMillis;
#if !MECH(DELTA) && DISABLED(Z_SAFE_HOMING) && Z_HOME_DIR < 0
int pageShowInfo = 0;
void set_pageShowInfo(int value){ pageShowInfo = value; }
#endif
int plaPreheatHotendTemp;
int plaPreheatHPBTemp;
int plaPreheatFanSpeed;
int absPreheatHotendTemp;
int absPreheatHPBTemp;
int absPreheatFanSpeed;
int gumPreheatHotendTemp;
int gumPreheatHPBTemp;
int gumPreheatFanSpeed;
#if HAS(LCD_FILAMENT_SENSOR) || HAS(LCD_POWER_SENSOR)
millis_t previous_lcd_status_ms = 0;
#endif
#if HAS(LCD_POWER_SENSOR)
millis_t print_millis = 0;
#endif
// Function pointer to menu functions.
typedef void (*menuFunc_t)();
uint8_t lcd_status_message_level;
char lcd_status_message[3 * LCD_WIDTH + 1] = WELCOME_MSG; // worst case is kana with up to 3*LCD_WIDTH+1
#if ENABLED(DOGLCD)
#include "dogm_lcd_implementation.h"
#define LCD_Printpos(x, y) u8g.setPrintPos(x + 5, (y + 1) * (DOG_CHAR_HEIGHT + 2))
#else
#include "ultralcd_implementation_hitachi_HD44780.h"
#define LCD_Printpos(x, y) lcd.setCursor(x, y)
#endif
// The main status screen
static void lcd_status_screen();
#if ENABLED(ULTIPANEL)
#if HAS(POWER_SWITCH)
extern bool powersupply;
#endif
static float manual_feedrate[] = MANUAL_FEEDRATE;
static void lcd_main_menu();
static void lcd_tune_menu();
static void lcd_prepare_menu();
static void lcd_move_menu();
static void lcd_control_menu();
static void lcd_stats_menu();
static void lcd_control_temperature_menu();
static void lcd_control_temperature_preheat_pla_settings_menu();
static void lcd_control_temperature_preheat_abs_settings_menu();
static void lcd_control_temperature_preheat_gum_settings_menu();
static void lcd_control_motion_menu();
static void lcd_control_volumetric_menu();
#if HAS(LCD_CONTRAST)
static void lcd_set_contrast();
#endif
#if ENABLED(FWRETRACT)
static void lcd_control_retract_menu();
#endif
#if MECH(DELTA)
static void lcd_delta_calibrate_menu();
#elif !MECH(DELTA) && DISABLED(Z_SAFE_HOMING) && Z_HOME_DIR < 0
static void lcd_level_bed();
static void config_lcd_level_bed();
#endif // DELTA
/* Different types of actions that can be used in menu items. */
static void menu_action_back(menuFunc_t data);
static void menu_action_submenu(menuFunc_t data);
static void menu_action_gcode(const char* pgcode);
static void menu_action_function(menuFunc_t data);
static void menu_action_setting_edit_bool(const char* pstr, bool* ptr);
static void menu_action_setting_edit_int3(const char* pstr, int* ptr, int minValue, int maxValue);
static void menu_action_setting_edit_float3(const char* pstr, float* ptr, float minValue, float maxValue);
static void menu_action_setting_edit_float32(const char* pstr, float* ptr, float minValue, float maxValue);
static void menu_action_setting_edit_float43(const char* pstr, float* ptr, float minValue, float maxValue);
static void menu_action_setting_edit_float5(const char* pstr, float* ptr, float minValue, float maxValue);
static void menu_action_setting_edit_float51(const char* pstr, float* ptr, float minValue, float maxValue);
static void menu_action_setting_edit_float52(const char* pstr, float* ptr, float minValue, float maxValue);
static void menu_action_setting_edit_long5(const char* pstr, unsigned long* ptr, unsigned long minValue, unsigned long maxValue);
static void menu_action_setting_edit_callback_bool(const char* pstr, bool* ptr, menuFunc_t callbackFunc);
static void menu_action_setting_edit_callback_int3(const char* pstr, int* ptr, int minValue, int maxValue, menuFunc_t callbackFunc);
static void menu_action_setting_edit_callback_float3(const char* pstr, float* ptr, float minValue, float maxValue, menuFunc_t callbackFunc);
static void menu_action_setting_edit_callback_float32(const char* pstr, float* ptr, float minValue, float maxValue, menuFunc_t callbackFunc);
static void menu_action_setting_edit_callback_float43(const char* pstr, float* ptr, float minValue, float maxValue, menuFunc_t callbackFunc);
static void menu_action_setting_edit_callback_float5(const char* pstr, float* ptr, float minValue, float maxValue, menuFunc_t callbackFunc);
static void menu_action_setting_edit_callback_float51(const char* pstr, float* ptr, float minValue, float maxValue, menuFunc_t callbackFunc);
static void menu_action_setting_edit_callback_float52(const char* pstr, float* ptr, float minValue, float maxValue, menuFunc_t callbackFunc);
static void menu_action_setting_edit_callback_long5(const char* pstr, unsigned long* ptr, unsigned long minValue, unsigned long maxValue, menuFunc_t callbackFunc);
#if ENABLED(SDSUPPORT)
static void lcd_sdcard_menu();
static void menu_action_sdfile(const char* longFilename);
static void menu_action_sddirectory(const char* longFilename);
#endif
#define ENCODER_FEEDRATE_DEADZONE 10
#if DISABLED(LCD_I2C_VIKI)
#if DISABLED(ENCODER_STEPS_PER_MENU_ITEM)
#define ENCODER_STEPS_PER_MENU_ITEM 5
#endif
#if DISABLED(ENCODER_PULSES_PER_STEP)
#define ENCODER_PULSES_PER_STEP 1
#endif
#else
#if DISABLED(ENCODER_STEPS_PER_MENU_ITEM)
#define ENCODER_STEPS_PER_MENU_ITEM 2 // VIKI LCD rotary encoder uses a different number of steps per rotation
#endif
#if DISABLED(ENCODER_PULSES_PER_STEP)
#define ENCODER_PULSES_PER_STEP 1
#endif
#endif
/* Helper macros for menus */
/**
* START_MENU generates the init code for a menu function
*/
#if ENABLED(BTN_BACK) && BTN_BACK > 0
#define START_MENU(last_menu) do { \
encoderRateMultiplierEnabled = false; \
if (encoderPosition > 0x8000) encoderPosition = 0; \
uint8_t encoderLine = encoderPosition / ENCODER_STEPS_PER_MENU_ITEM; \
NOMORE(currentMenuViewOffset, encoderLine); \
uint8_t _lineNr = currentMenuViewOffset, _menuItemNr; \
bool wasClicked = LCD_CLICKED, itemSelected; \
bool wasBackClicked = LCD_BACK_CLICKED; \
if (wasBackClicked) { \
lcd_quick_feedback(); \
menu_action_back( last_menu ); \
return; } \
for (uint8_t _drawLineNr = 0; _drawLineNr < LCD_HEIGHT; _drawLineNr++, _lineNr++) { \
_menuItemNr = 0;
#else
#define START_MENU(last_menu) do { \
encoderRateMultiplierEnabled = false; \
if (encoderPosition > 0x8000) encoderPosition = 0; \
uint8_t encoderLine = encoderPosition / ENCODER_STEPS_PER_MENU_ITEM; \
NOMORE(currentMenuViewOffset, encoderLine); \
uint8_t _lineNr = currentMenuViewOffset, _menuItemNr; \
bool wasClicked = LCD_CLICKED, itemSelected; \
for (uint8_t _drawLineNr = 0; _drawLineNr < LCD_HEIGHT; _drawLineNr++, _lineNr++) { \
_menuItemNr = 0;
#endif
/**
* MENU_ITEM generates draw & handler code for a menu item, potentially calling:
*
* lcd_implementation_drawmenu_[type](sel, row, label, arg3...)
* menu_action_[type](arg3...)
*
* Examples:
* MENU_ITEM(back, MSG_WATCH, lcd_status_screen)
* lcd_implementation_drawmenu_back(sel, row, PSTR(MSG_WATCH), lcd_status_screen)
* menu_action_back(lcd_status_screen)
*
* MENU_ITEM(function, MSG_PAUSE_PRINT, lcd_sdcard_pause)
* lcd_implementation_drawmenu_function(sel, row, PSTR(MSG_PAUSE_PRINT), lcd_sdcard_pause)
* menu_action_function(lcd_sdcard_pause)
*
* MENU_ITEM_EDIT(int3, MSG_SPEED, &feedrate_multiplier, 10, 999)
* MENU_ITEM(setting_edit_int3, MSG_SPEED, PSTR(MSG_SPEED), &feedrate_multiplier, 10, 999)
* lcd_implementation_drawmenu_setting_edit_int3(sel, row, PSTR(MSG_SPEED), PSTR(MSG_SPEED), &feedrate_multiplier, 10, 999)
* menu_action_setting_edit_int3(PSTR(MSG_SPEED), &feedrate_multiplier, 10, 999)
*
*/
#define MENU_ITEM(type, label, args...) do { \
if (_menuItemNr == _lineNr) { \
itemSelected = encoderLine == _menuItemNr; \
if (lcdDrawUpdate) \
lcd_implementation_drawmenu_ ## type(itemSelected, _drawLineNr, PSTR(label), ## args); \
if (wasClicked && itemSelected) { \
lcd_quick_feedback(); \
menu_action_ ## type(args); \
return; \
} \
} \
_menuItemNr++; \
} while(0)
#if ENABLED(ENCODER_RATE_MULTIPLIER)
//#define ENCODER_RATE_MULTIPLIER_DEBUG // If defined, output the encoder steps per second value
/**
* MENU_MULTIPLIER_ITEM generates drawing and handling code for a multiplier menu item
*/
#define MENU_MULTIPLIER_ITEM(type, label, args...) do { \
if (_menuItemNr == _lineNr) { \
itemSelected = encoderLine == _menuItemNr; \
if (lcdDrawUpdate) \
lcd_implementation_drawmenu_ ## type(itemSelected, _drawLineNr, PSTR(label), ## args); \
if (wasClicked && itemSelected) { \
lcd_quick_feedback(); \
encoderRateMultiplierEnabled = true; \
lastEncoderMovementMillis = 0; \
menu_action_ ## type(args); \
return; \
} \
} \
_menuItemNr++; \
} while(0)
#endif //ENCODER_RATE_MULTIPLIER
#define MENU_ITEM_DUMMY() do { _menuItemNr++; } while(0)
#define MENU_ITEM_EDIT(type, label, args...) MENU_ITEM(setting_edit_ ## type, label, PSTR(label), ## args)
#define MENU_ITEM_EDIT_CALLBACK(type, label, args...) MENU_ITEM(setting_edit_callback_ ## type, label, PSTR(label), ## args)
#if ENABLED(ENCODER_RATE_MULTIPLIER)
#define MENU_MULTIPLIER_ITEM_EDIT(type, label, args...) MENU_MULTIPLIER_ITEM(setting_edit_ ## type, label, PSTR(label), ## args)
#define MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(type, label, args...) MENU_MULTIPLIER_ITEM(setting_edit_callback_ ## type, label, PSTR(label), ## args)
#else //!ENCODER_RATE_MULTIPLIER
#define MENU_MULTIPLIER_ITEM_EDIT(type, label, args...) MENU_ITEM(setting_edit_ ## type, label, PSTR(label), ## args)
#define MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(type, label, args...) MENU_ITEM(setting_edit_callback_ ## type, label, PSTR(label), ## args)
#endif //!ENCODER_RATE_MULTIPLIER
#define END_MENU() \
if (encoderLine >= _menuItemNr) { encoderPosition = _menuItemNr * ENCODER_STEPS_PER_MENU_ITEM - 1; encoderLine = encoderPosition / ENCODER_STEPS_PER_MENU_ITEM; }\
if (encoderLine >= currentMenuViewOffset + LCD_HEIGHT) { currentMenuViewOffset = encoderLine - LCD_HEIGHT + 1; lcdDrawUpdate = 1; _lineNr = currentMenuViewOffset - 1; _drawLineNr = -1; } \
} } while(0)
/** Used variables to keep track of the menu */
volatile uint8_t buttons; //the last checked buttons in a bit array.
#if ENABLED(REPRAPWORLD_KEYPAD)
volatile uint8_t buttons_reprapworld_keypad; // to store the keypad shift register values
#endif
#if ENABLED(LCD_HAS_SLOW_BUTTONS)
volatile uint8_t slow_buttons; // Bits of the pressed buttons.
#endif
uint8_t currentMenuViewOffset; /* scroll offset in the current menu */
millis_t next_button_update_ms;
uint8_t lastEncoderBits;
uint32_t encoderPosition;
#if PIN_EXISTS(SD_DETECT)
uint8_t lcd_sd_status;
#endif
#endif // ULTIPANEL
menuFunc_t currentMenu = lcd_status_screen; /* function pointer to the currently active menu */
millis_t next_lcd_update_ms;
uint8_t lcd_status_update_delay;
bool ignore_click = false;
bool wait_for_unclick;
uint8_t lcdDrawUpdate = 2; /* Set to none-zero when the LCD needs to draw, decreased after every draw. Set to 2 in LCD routines so the LCD gets at least 1 full redraw (first redraw is partial) */
//prevMenu and prevEncoderPosition are used to store the previous menu location when editing settings.
menuFunc_t prevMenu = NULL;
uint16_t prevEncoderPosition;
//Variables used when editing values.
const char* editLabel;
void* editValue;
int32_t minEditValue, maxEditValue;
menuFunc_t callbackFunc;
// place-holders for Ki and Kd edits
float raw_Ki, raw_Kd;
/**
* General function to go directly to a menu
*/
static void lcd_goto_menu(menuFunc_t menu, const bool feedback = false, const uint32_t encoder = 0) {
if (currentMenu != menu) {
currentMenu = menu;
#if ENABLED(NEWPANEL)
encoderPosition = encoder;
if (feedback) lcd_quick_feedback();
#endif
// For LCD_PROGRESS_BAR re-initialize the custom characters
#if ENABLED(LCD_PROGRESS_BAR)
lcd_set_custom_characters(menu == lcd_status_screen);
#endif
}
}
/**
*
* "Info Screen"
*
* This is very display-dependent, so the lcd implementation draws this.
*/
static void lcd_status_screen() {
encoderRateMultiplierEnabled = false;
#if ENABLED(LCD_PROGRESS_BAR)
millis_t ms = millis();
#if DISABLED(PROGRESS_MSG_ONCE)
if (ms > progress_bar_ms + PROGRESS_BAR_MSG_TIME + PROGRESS_BAR_BAR_TIME) {
progress_bar_ms = ms;
}
#endif
#if PROGRESS_MSG_EXPIRE > 0
// Handle message expire
if (expire_status_ms > 0) {
#if ENABLED(SDSUPPORT)
if (card.isFileOpen()) {
// Expire the message when printing is active
if (IS_SD_PRINTING) {
if (ms >= expire_status_ms) {
lcd_status_message[0] = '\0';
expire_status_ms = 0;
}
}
else {
expire_status_ms += LCD_UPDATE_INTERVAL;
}
}
else {
expire_status_ms = 0;
}
#else
expire_status_ms = 0;
#endif // SDSUPPORT
}
#endif
#endif // LCD_PROGRESS_BAR
lcd_implementation_status_screen();
#if HAS(LCD_POWER_SENSOR)
if (millis() > print_millis + 2000) print_millis = millis();
#endif
#if HAS(LCD_FILAMENT_SENSOR) || HAS(LCD_POWER_SENSOR)
#if HAS(LCD_FILAMENT_SENSOR) && HAS(LCD_POWER_SENSOR)
if (millis() > previous_lcd_status_ms + 15000)
#else
if (millis() > previous_lcd_status_ms + 10000)
#endif
{
previous_lcd_status_ms = millis();
}
#endif
#if ENABLED(ULTIPANEL)
bool current_click = LCD_CLICKED;
if (ignore_click) {
if (wait_for_unclick) {
if (!current_click)
ignore_click = wait_for_unclick = false;
else
current_click = false;
}
else if (current_click) {
lcd_quick_feedback();
wait_for_unclick = true;
current_click = false;
}
}
if (current_click) {
lcd_goto_menu(lcd_main_menu, true);
lcd_implementation_init( // to maybe revive the LCD if static electricity killed it.
#if ENABLED(LCD_PROGRESS_BAR)
currentMenu == lcd_status_screen
#endif
);
#if HAS(LCD_FILAMENT_SENSOR) || HAS(LCD_POWER_SENSOR)
previous_lcd_status_ms = millis(); // get status message to show up for a while
#endif
}
#if ENABLED(ULTIPANEL_FEEDMULTIPLY)
// Dead zone at 100% feedrate
if ((feedrate_multiplier < 100 && (feedrate_multiplier + int(encoderPosition)) > 100) ||
(feedrate_multiplier > 100 && (feedrate_multiplier + int(encoderPosition)) < 100)) {
encoderPosition = 0;
feedrate_multiplier = 100;
}
if (feedrate_multiplier == 100) {
if (int(encoderPosition) > ENCODER_FEEDRATE_DEADZONE) {
feedrate_multiplier += int(encoderPosition) - ENCODER_FEEDRATE_DEADZONE;
encoderPosition = 0;
}
else if (int(encoderPosition) < -ENCODER_FEEDRATE_DEADZONE) {
feedrate_multiplier += int(encoderPosition) + ENCODER_FEEDRATE_DEADZONE;
encoderPosition = 0;
}
}
else {
feedrate_multiplier += int(encoderPosition);
encoderPosition = 0;
}
#endif // ULTIPANEL_FEEDMULTIPLY
feedrate_multiplier = constrain(feedrate_multiplier, 10, 999);
#endif // ULTIPANEL
}
#if ENABLED(ULTIPANEL)
static void lcd_return_to_status() { lcd_goto_menu(lcd_status_screen); }
#if ENABLED(SDSUPPORT)
static void lcd_sdcard_pause() { card.pausePrint(); }
static void lcd_sdcard_resume() { card.startPrint(); }
static void lcd_sdcard_stop() {
quickStop();
card.sdprinting = false;
card.closeFile();
autotempShutdown();
cancel_heatup = true;
lcd_setstatus(MSG_PRINT_ABORTED, true);
}
#endif // SDSUPPORT
/**
*
* "Main" menu
*
*/
static void lcd_main_menu() {
START_MENU(lcd_status_screen);
MENU_ITEM(back, MSG_WATCH, lcd_status_screen);
if (movesplanned() || IS_SD_PRINTING) {
MENU_ITEM(submenu, MSG_TUNE, lcd_tune_menu);
}
else {
MENU_ITEM(submenu, MSG_PREPARE, lcd_prepare_menu);
#if MECH(DELTA)
MENU_ITEM(submenu, MSG_DELTA_CALIBRATE, lcd_delta_calibrate_menu);
#endif
}
MENU_ITEM(submenu, MSG_CONTROL, lcd_control_menu);
MENU_ITEM(submenu, MSG_STATS, lcd_stats_menu);
#if ENABLED(SDSUPPORT)
if (card.cardOK) {
if (card.isFileOpen()) {
if (card.sdprinting)
MENU_ITEM(function, MSG_PAUSE_PRINT, lcd_sdcard_pause);
else
MENU_ITEM(function, MSG_RESUME_PRINT, lcd_sdcard_resume);
MENU_ITEM(function, MSG_STOP_PRINT, lcd_sdcard_stop);
}
else {
MENU_ITEM(submenu, MSG_CARD_MENU, lcd_sdcard_menu);
#if !PIN_EXISTS(SD_DETECT)
MENU_ITEM(gcode, MSG_CNG_SDCARD, PSTR("M21")); // SD-card changed by user
#endif
}
}
else {
MENU_ITEM(submenu, MSG_NO_CARD, lcd_sdcard_menu);
#if !PIN_EXISTS(SD_DETECT)
MENU_ITEM(gcode, MSG_INIT_SDCARD, PSTR("M21")); // Manually initialize the SD-card via user interface
#endif
}
#endif // SDSUPPORT
END_MENU();
}
#if ENABLED(SDSUPPORT) && ENABLED(MENU_ADDAUTOSTART)
static void lcd_autostart_sd() {
card.checkautostart(true);
}
#endif
/**
* Set the home offset based on the current_position
*/
void lcd_set_home_offsets() {
// M428 Command
enqueue_and_echo_commands_P(PSTR("M428"));
lcd_return_to_status();
}
#if ENABLED(BABYSTEPPING)
static void _lcd_babystep(int axis, const char* msg) {
if (encoderPosition != 0) {
babystepsTodo[axis] += BABYSTEP_MULTIPLICATOR * (int)encoderPosition;
encoderPosition = 0;
lcdDrawUpdate = 1;
}
if (lcdDrawUpdate) lcd_implementation_drawedit(msg, (char*)"");
if (LCD_CLICKED) lcd_goto_menu(lcd_tune_menu);
}
static void lcd_babystep_x() { _lcd_babystep(X_AXIS, PSTR(MSG_BABYSTEPPING_X)); }
static void lcd_babystep_y() { _lcd_babystep(Y_AXIS, PSTR(MSG_BABYSTEPPING_Y)); }
static void lcd_babystep_z() { _lcd_babystep(Z_AXIS, PSTR(MSG_BABYSTEPPING_Z)); }
#endif // BABYSTEPPING
static void lcd_tune_fixstep() {
#if MECH(DELTA)
enqueue_and_echo_commands_P(PSTR("G28 B"));
#else
enqueue_and_echo_commands_P(PSTR("G28 X Y B"));
#endif
}
/**
* Watch temperature callbacks
*/
#if ENABLED(THERMAL_PROTECTION_HOTENDS)
#if TEMP_SENSOR_0 != 0
void watch_temp_callback_E0() { start_watching_heater(0); }
#endif
#if HOTENDS > 1 && TEMP_SENSOR_1 != 0
void watch_temp_callback_E1() { start_watching_heater(1); }
#endif
#if HOTENDS > 2 && TEMP_SENSOR_2 != 0
void watch_temp_callback_E2() { start_watching_heater(2); }
#endif
#if HOTENDS > 3 && TEMP_SENSOR_3 != 0
void watch_temp_callback_E3() { start_watching_heater(3); }
#endif
#else
#if TEMP_SENSOR_0 != 0
void watch_temp_callback_E0() {}
#endif
#if HOTENDS > 1 && TEMP_SENSOR_1 != 0
void watch_temp_callback_E1() {}
#endif
#if HOTENDS > 2 && TEMP_SENSOR_2 != 0
void watch_temp_callback_E2() {}
#endif
#if HOTENDS > 3 && TEMP_SENSOR_3 != 0
void watch_temp_callback_E3() {}
#endif
#endif // !THERMAL_PROTECTION_HOTENDS
/**
*
* "Tune" submenu
*
*/
static void lcd_tune_menu() {
START_MENU(lcd_main_menu);
//
// ^ Main
//
MENU_ITEM(back, MSG_MAIN, lcd_main_menu);
//
// Speed:
//
MENU_ITEM_EDIT(int3, MSG_SPEED, &feedrate_multiplier, 10, 999);
//
// Nozzle:
//
#if HOTENDS == 1
#if TEMP_SENSOR_0 != 0
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(int3, MSG_NOZZLE, &target_temperature[0], 0, HEATER_0_MAXTEMP - 15, watch_temp_callback_E0);
#endif
#else // HOTENDS > 1
#if TEMP_SENSOR_0 != 0
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(int3, MSG_NOZZLE " 0", &target_temperature[0], 0, HEATER_0_MAXTEMP - 15, watch_temp_callback_E0);
#endif
#if TEMP_SENSOR_1 != 0
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(int3, MSG_NOZZLE " 1", &target_temperature[1], 0, HEATER_1_MAXTEMP - 15, watch_temp_callback_E1);
#endif
#if HOTENDS > 2
#if TEMP_SENSOR_2 != 0
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(int3, MSG_NOZZLE " 2", &target_temperature[2], 0, HEATER_2_MAXTEMP - 15, watch_temp_callback_E2);
#endif
#if HOTENDS > 3
#if TEMP_SENSOR_3 != 0
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(int3, MSG_NOZZLE " 3", &target_temperature[3], 0, HEATER_3_MAXTEMP - 15, watch_temp_callback_E3);
#endif
#endif // HOTENDS > 3
#endif // HOTENDS > 2
#endif // HOTENDS > 1
//
// Bed:
//
#if TEMP_SENSOR_BED != 0
MENU_MULTIPLIER_ITEM_EDIT(int3, MSG_BED, &target_temperature_bed, 0, BED_MAXTEMP - 15);
#endif
//
// Fan Speed:
//
MENU_MULTIPLIER_ITEM_EDIT(int3, MSG_FAN_SPEED, &fanSpeed, 0, 255);
//
// Flow:
// Flow 1:
// Flow 2:
// Flow 3:
// Flow 4:
//
#if EXTRUDERS == 1
MENU_ITEM_EDIT(int3, MSG_FLOW, &extruder_multiplier[0], 10, 999);
#else // EXTRUDERS > 1
MENU_ITEM_EDIT(int3, MSG_FLOW, &extruder_multiplier[active_extruder], 10, 999);
MENU_ITEM_EDIT(int3, MSG_FLOW " 0", &extruder_multiplier[0], 10, 999);
MENU_ITEM_EDIT(int3, MSG_FLOW " 1", &extruder_multiplier[1], 10, 999);
#if EXTRUDERS > 2
MENU_ITEM_EDIT(int3, MSG_FLOW " 2", &extruder_multiplier[2], 10, 999);
#if EXTRUDERS > 3
MENU_ITEM_EDIT(int3, MSG_FLOW " 3", &extruder_multiplier[3], 10, 999);
#endif // EXTRUDERS > 3
#endif // EXTRUDERS > 2
#endif // EXTRUDERS > 1
//
// Babystep X:
// Babystep Y:
// Babystep Z:
//
#if ENABLED(BABYSTEPPING)
#if ENABLED(BABYSTEP_XY)
MENU_ITEM(submenu, MSG_BABYSTEP_X, lcd_babystep_x);
MENU_ITEM(submenu, MSG_BABYSTEP_Y, lcd_babystep_y);
#endif // BABYSTEP_XY
MENU_ITEM(submenu, MSG_BABYSTEP_Z, lcd_babystep_z);
#endif
MENU_ITEM(function, MSG_FIX_LOSE_STEPS, lcd_tune_fixstep);
//
// Change filament
//
#if ENABLED(FILAMENTCHANGEENABLE)
MENU_ITEM(gcode, MSG_FILAMENTCHANGE, PSTR("M600"));
#endif
END_MENU();
}
#if ENABLED(EASY_LOAD)
static void lcd_extrude(float length, float feedrate) {
current_position[E_AXIS] += length;
#if MECH(DELTA)
calculate_delta(current_position);
plan_buffer_line(delta[X_AXIS], delta[Y_AXIS], delta[Z_AXIS], current_position[E_AXIS], feedrate, active_extruder, active_driver);
#else
plan_buffer_line(current_position[X_AXIS], current_position[Y_AXIS], current_position[Z_AXIS], current_position[E_AXIS], feedrate, active_extruder, active_driver);
#endif
}
static void lcd_purge() { lcd_extrude(LCD_PURGE_LENGTH, LCD_PURGE_FEEDRATE); }
static void lcd_retract() { lcd_extrude(-LCD_RETRACT_LENGTH, LCD_RETRACT_FEEDRATE); }
static void lcd_easy_load() {
allow_lengthy_extrude_once = true;
lcd_extrude(BOWDEN_LENGTH, LCD_LOAD_FEEDRATE);
lcd_return_to_status();
}
static void lcd_easy_unload() {
allow_lengthy_extrude_once = true;
lcd_extrude(-BOWDEN_LENGTH, LCD_UNLOAD_FEEDRATE);
lcd_return_to_status();
}
#endif // EASY_LOAD
void _lcd_preheat(int endnum, const float temph, const float tempb, const int fan) {
if (temph > 0) setTargetHotend(temph, endnum);
#if TEMP_SENSOR_BED != 0
setTargetBed(tempb);
#endif
fanSpeed = fan;
lcd_return_to_status();
}
#if TEMP_SENSOR_0 != 0
void lcd_preheat_pla0() { _lcd_preheat(0, plaPreheatHotendTemp, plaPreheatHPBTemp, plaPreheatFanSpeed); }
void lcd_preheat_abs0() { _lcd_preheat(0, absPreheatHotendTemp, absPreheatHPBTemp, absPreheatFanSpeed); }
void lcd_preheat_gum0() { _lcd_preheat(0, gumPreheatHotendTemp, gumPreheatHPBTemp, gumPreheatFanSpeed); }
#endif
#if HOTENDS > 1
void lcd_preheat_pla1() { _lcd_preheat(1, plaPreheatHotendTemp, plaPreheatHPBTemp, plaPreheatFanSpeed); }
void lcd_preheat_abs1() { _lcd_preheat(1, absPreheatHotendTemp, absPreheatHPBTemp, absPreheatFanSpeed); }
void lcd_preheat_gum1() { _lcd_preheat(1, gumPreheatHotendTemp, gumPreheatHPBTemp, gumPreheatFanSpeed); }
#if HOTENDS > 2
void lcd_preheat_pla2() { _lcd_preheat(2, plaPreheatHotendTemp, plaPreheatHPBTemp, plaPreheatFanSpeed); }
void lcd_preheat_abs2() { _lcd_preheat(2, absPreheatHotendTemp, absPreheatHPBTemp, absPreheatFanSpeed); }
void lcd_preheat_gum2() { _lcd_preheat(2, gumPreheatHotendTemp, gumPreheatHPBTemp, gumPreheatFanSpeed); }
#if HOTENDS > 3
void lcd_preheat_pla3() { _lcd_preheat(3, plaPreheatHotendTemp, plaPreheatHPBTemp, plaPreheatFanSpeed); }
void lcd_preheat_abs3() { _lcd_preheat(3, absPreheatHotendTemp, absPreheatHPBTemp, absPreheatFanSpeed); }
void lcd_preheat_gum3() { _lcd_preheat(3, gumPreheatHotendTemp, gumPreheatHPBTemp, gumPreheatFanSpeed); }
#endif
#endif
void lcd_preheat_pla0123() {
setTargetHotend0(plaPreheatHotendTemp);
setTargetHotend1(plaPreheatHotendTemp);
setTargetHotend2(plaPreheatHotendTemp);
_lcd_preheat(3, plaPreheatHotendTemp, plaPreheatHPBTemp, plaPreheatFanSpeed);
}
void lcd_preheat_abs0123() {
setTargetHotend0(absPreheatHotendTemp);
setTargetHotend1(absPreheatHotendTemp);
setTargetHotend2(absPreheatHotendTemp);
_lcd_preheat(3, absPreheatHotendTemp, absPreheatHPBTemp, absPreheatFanSpeed);
}
void lcd_preheat_gum0123() {
setTargetHotend0(gumPreheatHotendTemp);
setTargetHotend1(gumPreheatHotendTemp);
setTargetHotend2(gumPreheatHotendTemp);
_lcd_preheat(3, gumPreheatHotendTemp, gumPreheatHPBTemp, gumPreheatFanSpeed);
}
#endif // HOTENDS > 1
#if TEMP_SENSOR_BED != 0
void lcd_preheat_pla_bedonly() { _lcd_preheat(0, 0, plaPreheatHPBTemp, plaPreheatFanSpeed); }
void lcd_preheat_abs_bedonly() { _lcd_preheat(0, 0, absPreheatHPBTemp, absPreheatFanSpeed); }
void lcd_preheat_gum_bedonly() { _lcd_preheat(0, 0, gumPreheatHPBTemp, gumPreheatFanSpeed); }
#endif
#if TEMP_SENSOR_0 != 0 && (TEMP_SENSOR_1 != 0 || TEMP_SENSOR_2 != 0 || TEMP_SENSOR_3 != 0 || TEMP_SENSOR_BED != 0)
static void lcd_preheat_pla_menu() {
START_MENU(lcd_prepare_menu);
MENU_ITEM(back, MSG_PREPARE, lcd_prepare_menu);
#if HOTENDS == 1
MENU_ITEM(function, MSG_PREHEAT_PLA, lcd_preheat_pla0);
#else
MENU_ITEM(function, MSG_PREHEAT_PLA " 0", lcd_preheat_pla0);
MENU_ITEM(function, MSG_PREHEAT_PLA " 1", lcd_preheat_pla1);
#if HOTENDS > 2
MENU_ITEM(function, MSG_PREHEAT_PLA " 2", lcd_preheat_pla2);
#if HOTENDS > 3
MENU_ITEM(function, MSG_PREHEAT_PLA " 3", lcd_preheat_pla3);
#endif
#endif
MENU_ITEM(function, MSG_PREHEAT_PLA_ALL, lcd_preheat_pla0123);
#endif
#if TEMP_SENSOR_BED != 0
MENU_ITEM(function, MSG_PREHEAT_PLA_BEDONLY, lcd_preheat_pla_bedonly);
#endif
END_MENU();
}
static void lcd_preheat_abs_menu() {
START_MENU(lcd_prepare_menu);
MENU_ITEM(back, MSG_TEMPERATURE, lcd_prepare_menu);
#if HOTENDS == 1
MENU_ITEM(function, MSG_PREHEAT_ABS, lcd_preheat_abs0);
#else
MENU_ITEM(function, MSG_PREHEAT_ABS " 0", lcd_preheat_abs0);
MENU_ITEM(function, MSG_PREHEAT_ABS " 1", lcd_preheat_abs1);
#if HOTENDS > 2
MENU_ITEM(function, MSG_PREHEAT_ABS " 2", lcd_preheat_abs2);
#if HOTENDS > 3
MENU_ITEM(function, MSG_PREHEAT_ABS " 3", lcd_preheat_abs3);
#endif
#endif
MENU_ITEM(function, MSG_PREHEAT_ABS_ALL, lcd_preheat_abs0123);
#endif
#if TEMP_SENSOR_BED != 0
MENU_ITEM(function, MSG_PREHEAT_ABS_BEDONLY, lcd_preheat_abs_bedonly);
#endif
END_MENU();
}
static void lcd_preheat_gum_menu() {
START_MENU(lcd_prepare_menu);
MENU_ITEM(back, MSG_TEMPERATURE, lcd_prepare_menu);
#if HOTENDS == 1
MENU_ITEM(function, MSG_PREHEAT_GUM, lcd_preheat_gum0);
#else
MENU_ITEM(function, MSG_PREHEAT_GUM " 0", lcd_preheat_gum0);
MENU_ITEM(function, MSG_PREHEAT_GUM " 1", lcd_preheat_gum1);
#if HOTENDS > 2
MENU_ITEM(function, MSG_PREHEAT_GUM " 2", lcd_preheat_gum2);
#if HOTENDS > 3
MENU_ITEM(function, MSG_PREHEAT_GUM " 3", lcd_preheat_gum3);
#endif
#endif
MENU_ITEM(function, MSG_PREHEAT_GUM_ALL, lcd_preheat_gum0123);
#endif
#if TEMP_SENSOR_BED != 0
MENU_ITEM(function, MSG_PREHEAT_GUM_BEDONLY, lcd_preheat_gum_bedonly);
#endif
END_MENU();
}
#endif // TEMP_SENSOR_0 && (TEMP_SENSOR_1 || TEMP_SENSOR_2 || TEMP_SENSOR_3 || TEMP_SENSOR_BED)
void lcd_cooldown() {
disable_all_heaters();
fanSpeed = 0;
lcd_return_to_status();
}
/**
*
* "Prepare" submenu
*
*/
static void lcd_prepare_menu() {
START_MENU(lcd_main_menu);
//
// ^ Main
//
MENU_ITEM(back, MSG_MAIN, lcd_main_menu);
//
// Auto Home
//
MENU_ITEM(gcode, MSG_AUTO_HOME, PSTR("G28"));
//
// Set Home Offsets
//
MENU_ITEM(function, MSG_SET_HOME_OFFSETS, lcd_set_home_offsets);
//MENU_ITEM(gcode, MSG_SET_ORIGIN, PSTR("G92 X0 Y0 Z0"));
//
// Level Bed
//
#if ENABLED(AUTO_BED_LEVELING_FEATURE)
if (axis_known_position & (_BV(X_AXIS)|_BV(Y_AXIS)) == (_BV(X_AXIS)|_BV(Y_AXIS)))
MENU_ITEM(gcode, MSG_LEVEL_BED, PSTR("G29"));
#elif !MECH(DELTA) && DISABLED(Z_SAFE_HOMING) && Z_HOME_DIR < 0
MENU_ITEM(submenu, MSG_MBL_SETTING, config_lcd_level_bed);
#endif
//
// Move Axis
//
MENU_ITEM(submenu, MSG_MOVE_AXIS, lcd_move_menu);
//
// Disable Steppers
//
MENU_ITEM(gcode, MSG_DISABLE_STEPPERS, PSTR("M84"));
//
// Preheat PLA
// Preheat ABS
// Preheat GUM
//
#if TEMP_SENSOR_0 != 0
#if TEMP_SENSOR_1 != 0 || TEMP_SENSOR_2 != 0 || TEMP_SENSOR_3 != 0 || TEMP_SENSOR_BED != 0
MENU_ITEM(submenu, MSG_PREHEAT_PLA, lcd_preheat_pla_menu);
MENU_ITEM(submenu, MSG_PREHEAT_ABS, lcd_preheat_abs_menu);
MENU_ITEM(submenu, MSG_PREHEAT_GUM, lcd_preheat_gum_menu);
#else
MENU_ITEM(function, MSG_PREHEAT_PLA, lcd_preheat_pla0);
MENU_ITEM(function, MSG_PREHEAT_ABS, lcd_preheat_abs0);
MENU_ITEM(function, MSG_PREHEAT_GUM, lcd_preheat_gum0);
#endif
#endif
//
// Easy Load
//
#if ENABLED(EASY_LOAD)
MENU_ITEM(function, MSG_E_BOWDEN_LENGTH, lcd_easy_load);
MENU_ITEM(function, MSG_R_BOWDEN_LENGTH, lcd_easy_unload);
MENU_ITEM(function, MSG_PURGE_XMM, lcd_purge);
MENU_ITEM(function, MSG_RETRACT_XMM, lcd_retract);
#endif // EASY_LOAD
//
// LASER BEAM
//
#if ENABLED(LASERBEAM)
MENU_ITEM_EDIT(int3, MSG_LASER, &laser_ttl_modulation, 0, 255);
if(laser_ttl_modulation == 0) {
WRITE(LASER_PWR_PIN, LOW);
}
else {
WRITE(LASER_PWR_PIN, HIGH);
}
#endif
//
// Cooldown
//
MENU_ITEM(function, MSG_COOLDOWN, lcd_cooldown);
//
// Switch power on/off
//
#if HAS(POWER_SWITCH)
if (powersupply)
MENU_ITEM(gcode, MSG_SWITCH_PS_OFF, PSTR("M81"));
else
MENU_ITEM(gcode, MSG_SWITCH_PS_ON, PSTR("M80"));
#endif
//
// Autostart
//
#if ENABLED(SDSUPPORT) && ENABLED(MENU_ADDAUTOSTART)
MENU_ITEM(function, MSG_AUTOSTART, lcd_autostart_sd);
#endif
END_MENU();
}
#if MECH(DELTA)
static void lcd_delta_calibrate_menu() {
START_MENU(lcd_main_menu);
MENU_ITEM(back, MSG_MAIN, lcd_main_menu);
MENU_ITEM(gcode, MSG_AUTO_HOME, PSTR("G28"));
MENU_ITEM(gcode, MSG_DELTA_CALIBRATE_X, PSTR("G0 F8000 X-77.94 Y-45 Z0"));
MENU_ITEM(gcode, MSG_DELTA_CALIBRATE_Y, PSTR("G0 F8000 X77.94 Y-45 Z0"));
MENU_ITEM(gcode, MSG_DELTA_CALIBRATE_Z, PSTR("G0 F8000 X0 Y90 Z0"));
MENU_ITEM(gcode, MSG_DELTA_CALIBRATE_CENTER, PSTR("G0 F8000 X0 Y0 Z0"));
END_MENU();
}
#endif // DELTA
inline void line_to_current(float feedrate) {
#if MECH(DELTA)
calculate_delta(current_position);
plan_buffer_line(delta[X_AXIS], delta[Y_AXIS], delta[Z_AXIS], current_position[E_AXIS], feedrate/60, active_extruder, active_driver);
#else
plan_buffer_line(current_position[X_AXIS], current_position[Y_AXIS], current_position[Z_AXIS], current_position[E_AXIS], feedrate/60, active_extruder, active_driver);
#endif
}
/**
*
* "Prepare" > "Move Axis" submenu
*
*/
float move_menu_scale;
static void lcd_move_menu_axis();
static void _lcd_move(const char* name, AxisEnum axis, int min, int max) {
if (encoderPosition != 0) {
refresh_cmd_timeout();
current_position[axis] += float((int)encoderPosition) * move_menu_scale;
if (SOFTWARE_MIN_ENDSTOPS) NOLESS(current_position[axis], min);
if (SOFTWARE_MAX_ENDSTOPS) NOMORE(current_position[axis], max);
encoderPosition = 0;
line_to_current(manual_feedrate[axis]);
lcdDrawUpdate = 1;
}
if (lcdDrawUpdate) lcd_implementation_drawedit(name, ftostr31(current_position[axis]));
if (LCD_CLICKED) lcd_goto_menu(lcd_move_menu_axis);
}
#if MECH(DELTA)
static float delta_clip_radius_2 = DELTA_PRINTABLE_RADIUS * DELTA_PRINTABLE_RADIUS;
static int delta_clip( float a ) { return sqrt(delta_clip_radius_2 - a * a); }
static void lcd_move_x() { int clip = delta_clip(current_position[Y_AXIS]); _lcd_move(PSTR(MSG_MOVE_X), X_AXIS, max(X_MIN_POS, -clip), min(X_MAX_POS, clip)); }
static void lcd_move_y() { int clip = delta_clip(current_position[X_AXIS]); _lcd_move(PSTR(MSG_MOVE_X), X_AXIS, max(X_MIN_POS, -clip), min(X_MAX_POS, clip)); }
#else
static void lcd_move_x() { _lcd_move(PSTR(MSG_MOVE_X), X_AXIS, X_MIN_POS, X_MAX_POS); }
static void lcd_move_y() { _lcd_move(PSTR(MSG_MOVE_Y), Y_AXIS, Y_MIN_POS, Y_MAX_POS); }
#endif
static void lcd_move_z() { _lcd_move(PSTR(MSG_MOVE_Z), Z_AXIS, Z_MIN_POS, Z_MAX_POS); }
static void lcd_move_e(
#if EXTRUDERS > 1
uint8_t e
#endif
) {
#if EXTRUDERS > 1
unsigned short original_active_extruder = active_extruder;
active_extruder = e;
#endif
if (encoderPosition != 0) {
#if ENABLED(IDLE_OOZING_PREVENT)
IDLE_OOZING_retract(false);
#endif
current_position[E_AXIS] += float((int)encoderPosition) * move_menu_scale;
encoderPosition = 0;
line_to_current(manual_feedrate[E_AXIS]);
lcdDrawUpdate = 1;
}
if (lcdDrawUpdate) {
PGM_P pos_label;
#if EXTRUDERS == 1
pos_label = PSTR(MSG_MOVE_E);
#else
switch (e) {
case 0: pos_label = PSTR(MSG_MOVE_E "0"); break;
case 1: pos_label = PSTR(MSG_MOVE_E "1"); break;
#if EXTRUDERS > 2
case 2: pos_label = PSTR(MSG_MOVE_E "2"); break;
#if EXTRUDERS > 3
case 3: pos_label = PSTR(MSG_MOVE_E "3"); break;
#endif // EXTRUDERS > 3
#endif // EXTRUDERS > 2
}
#endif // EXTRUDERS > 1
lcd_implementation_drawedit(pos_label, ftostr31(current_position[E_AXIS]));
}
if (LCD_CLICKED) lcd_goto_menu(lcd_move_menu_axis);
#if EXTRUDERS > 1
active_extruder = original_active_extruder;
#endif
}
#if EXTRUDERS > 1
static void lcd_move_e0() { lcd_move_e(0); }
static void lcd_move_e1() { lcd_move_e(1); }
#if EXTRUDERS > 2
static void lcd_move_e2() { lcd_move_e(2); }
#if EXTRUDERS > 3
static void lcd_move_e3() { lcd_move_e(3); }
#endif // EXTRUDERS > 3
#endif // EXTRUDERS > 2
#endif // EXTRUDERS > 1
/**
*
* "Prepare" > "Move Xmm" > "Move XYZ" submenu
*
*/
static void lcd_move_menu_axis() {
START_MENU(lcd_move_menu);
MENU_ITEM(back, MSG_MOVE_AXIS, lcd_move_menu);
MENU_ITEM(submenu, MSG_MOVE_X, lcd_move_x);
MENU_ITEM(submenu, MSG_MOVE_Y, lcd_move_y);
MENU_ITEM(submenu, MSG_MOVE_Z, lcd_move_z);
if (move_menu_scale < 10.0) {
#if EXTRUDERS == 1
MENU_ITEM(submenu, MSG_MOVE_E, lcd_move_e);
#else
MENU_ITEM(submenu, MSG_MOVE_E "0", lcd_move_e0);
MENU_ITEM(submenu, MSG_MOVE_E "1", lcd_move_e1);
#if EXTRUDERS > 2
MENU_ITEM(submenu, MSG_MOVE_E "2", lcd_move_e2);
#if EXTRUDERS > 3
MENU_ITEM(submenu, MSG_MOVE_E "3", lcd_move_e3);
#endif // EXTRUDERS > 3
#endif // EXTRUDERS > 2
#endif // EXTRUDERS > 1
}
END_MENU();
}
static void lcd_move_menu_10mm() {
move_menu_scale = 10.0;
lcd_move_menu_axis();
}
static void lcd_move_menu_1mm() {
move_menu_scale = 1.0;
lcd_move_menu_axis();
}
static void lcd_move_menu_01mm() {
move_menu_scale = 0.1;
lcd_move_menu_axis();
}
/**
*
* "Prepare" > "Move Axis" submenu
*
*/
static void lcd_move_menu() {
START_MENU(lcd_prepare_menu);
MENU_ITEM(back, MSG_MOTION, lcd_prepare_menu);
MENU_ITEM(submenu, MSG_MOVE_10MM, lcd_move_menu_10mm);
MENU_ITEM(submenu, MSG_MOVE_1MM, lcd_move_menu_1mm);
MENU_ITEM(submenu, MSG_MOVE_01MM, lcd_move_menu_01mm);
// TODO:X,Y,Z,E
END_MENU();
}
/**
*
* "Control" submenu
*
*/
static void lcd_control_menu() {
START_MENU(lcd_main_menu);
MENU_ITEM(back, MSG_MAIN, lcd_main_menu);
MENU_ITEM(submenu, MSG_TEMPERATURE, lcd_control_temperature_menu);
MENU_ITEM(submenu, MSG_MOTION, lcd_control_motion_menu);
MENU_ITEM(submenu, MSG_FILAMENT, lcd_control_volumetric_menu);
#if HAS(LCD_CONTRAST)
//MENU_ITEM_EDIT(int3, MSG_CONTRAST, &lcd_contrast, 0, 63);
MENU_ITEM(submenu, MSG_CONTRAST, lcd_set_contrast);
#endif
#if ENABLED(FWRETRACT)
MENU_ITEM(submenu, MSG_RETRACT, lcd_control_retract_menu);
#endif
#if ENABLED(EEPROM_SETTINGS)
MENU_ITEM(function, MSG_STORE_EPROM, Config_StoreSettings);
MENU_ITEM(function, MSG_LOAD_EPROM, Config_RetrieveSettings);
#endif
MENU_ITEM(function, MSG_RESTORE_FAILSAFE, Config_ResetDefault);
END_MENU();
}
/**
*
* "Statistics" submenu
*
*/
static void lcd_stats_menu() {
char row[30];
int day = printer_usage_seconds / 60 / 60 / 24, hours = (printer_usage_seconds / 60 / 60) % 24, minutes = (printer_usage_seconds / 60) % 60;
sprintf_P(row, PSTR(MSG_ONFOR " %id %ih %im"), day, hours, minutes);
LCD_Printpos(0, 0); lcd_print(row);
#if HAS(POWER_CONSUMPTION_SENSOR)
sprintf_P(row, PSTR(MSG_PWRCONSUMED " %iWh"), power_consumption_hour);
LCD_Printpos(0, 1); lcd_print(row);
#endif
char lung[30];
unsigned int kmeter = (long)printer_usage_filament / 1000 / 1000,
meter = ((long)printer_usage_filament / 1000) % 1000,
centimeter = ((long)printer_usage_filament / 10) % 100,
millimeter = ((long)printer_usage_filament) % 10;
sprintf_P(lung, PSTR(MSG_FILCONSUMED "%i Km %i m %i cm %i mm"), kmeter, meter, centimeter, millimeter);
LCD_Printpos(0, 2); lcd_print(lung);
if (LCD_CLICKED) lcd_goto_menu(lcd_main_menu);
}
/**
*
* "Temperature" submenu
*
*/
#if ENABLED(PIDTEMP)
// Helpers for editing PID Ki & Kd values
// grab the PID value out of the temp variable; scale it; then update the PID driver
void copy_and_scalePID_i(int h) {
PID_PARAM(Ki, h) = scalePID_i(raw_Ki);
updatePID();
}
void copy_and_scalePID_d(int h) {
PID_PARAM(Kd, h) = scalePID_d(raw_Kd);
updatePID();
}
#define COPY_AND_SCALE(hindex) \
void copy_and_scalePID_i_H ## hindex() { copy_and_scalePID_i(hindex); } \
void copy_and_scalePID_d_H ## hindex() { copy_and_scalePID_d(hindex); }
COPY_AND_SCALE(0);
#if HOTENDS > 1
COPY_AND_SCALE(1);
#if HOTENDS > 2
COPY_AND_SCALE(2);
#if HOTENDS > 3
COPY_AND_SCALE(3);
#endif // HOTENDS > 3
#endif // HOTENDS > 2
#endif // HOTENDS > 1
#endif // PIDTEMP
/**
*
* "Control" > "Temperature" submenu
*
*/
static void lcd_control_temperature_menu() {
START_MENU(lcd_control_menu);
//
// ^ Control
//
MENU_ITEM(back, MSG_CONTROL, lcd_control_menu);
//
// Nozzle:
//
#if HOTENDS == 1
#if TEMP_SENSOR_0 != 0
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(int3, MSG_NOZZLE, &target_temperature[0], 0, HEATER_0_MAXTEMP - 15, watch_temp_callback_E0);
#endif
#else // HOTENDS > 1
#if TEMP_SENSOR_0 != 0
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(int3, MSG_NOZZLE "0", &target_temperature[0], 0, HEATER_0_MAXTEMP - 15, watch_temp_callback_E0);
#endif
#if TEMP_SENSOR_1 != 0
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(int3, MSG_NOZZLE "1", &target_temperature[1], 0, HEATER_1_MAXTEMP - 15, watch_temp_callback_E1);
#endif
#if HOTENDS > 2
#if TEMP_SENSOR_2 != 0
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(int3, MSG_NOZZLE "2", &target_temperature[2], 0, HEATER_2_MAXTEMP - 15, watch_temp_callback_E2);
#endif
#if HOTENDS > 3
#if TEMP_SENSOR_3 != 0
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(int3, MSG_NOZZLE "3", &target_temperature[3], 0, HEATER_3_MAXTEMP - 15, watch_temp_callback_E3);
#endif
#endif // HOTENDS > 3
#endif // HOTENDS > 2
#endif // HOTENDS > 1
//
// Bed:
//
#if TEMP_SENSOR_BED != 0
MENU_MULTIPLIER_ITEM_EDIT(int3, MSG_BED, &target_temperature_bed, 0, BED_MAXTEMP - 15);
#endif
//
// Fan Speed:
//
MENU_MULTIPLIER_ITEM_EDIT(int3, MSG_FAN_SPEED, &fanSpeed, 0, 255);
//
// Autotemp, Min, Max, Fact
//
#if ENABLED(AUTOTEMP) && (TEMP_SENSOR_0 != 0)
MENU_ITEM_EDIT(bool, MSG_AUTOTEMP, &autotemp_enabled);
MENU_ITEM_EDIT(float3, MSG_MIN, &autotemp_min, 0, HEATER_0_MAXTEMP - 15);
MENU_ITEM_EDIT(float3, MSG_MAX, &autotemp_max, 0, HEATER_0_MAXTEMP - 15);
MENU_ITEM_EDIT(float32, MSG_FACTOR, &autotemp_factor, 0.0, 1.0);
#endif
//
// PID-P, PID-I, PID-D
//
#if ENABLED(PIDTEMP)
// set up temp variables - undo the default scaling
raw_Ki = unscalePID_i(PID_PARAM(Ki, 0));
raw_Kd = unscalePID_d(PID_PARAM(Kd, 0));
MENU_ITEM_EDIT(float52, MSG_PID_P, &PID_PARAM(Kp,0), 1, 9990);
// i is typically a small value so allows values below 1
MENU_ITEM_EDIT_CALLBACK(float52, MSG_PID_I, &raw_Ki, 0.01, 9990, copy_and_scalePID_i_H0);
MENU_ITEM_EDIT_CALLBACK(float52, MSG_PID_D, &raw_Kd, 1, 9990, copy_and_scalePID_d_H0);
#if HOTENDS > 1
// set up temp variables - undo the default scaling
raw_Ki = unscalePID_i(PID_PARAM(Ki, 1));
raw_Kd = unscalePID_d(PID_PARAM(Kd, 1));
MENU_ITEM_EDIT(float52, MSG_PID_P MSG_H1, &PID_PARAM(Kp,1), 1, 9990);
// i is typically a small value so allows values below 1
MENU_ITEM_EDIT_CALLBACK(float52, MSG_PID_I MSG_H1, &raw_Ki, 0.01, 9990, copy_and_scalePID_i_H1);
MENU_ITEM_EDIT_CALLBACK(float52, MSG_PID_D MSG_H1, &raw_Kd, 1, 9990, copy_and_scalePID_d_H1);
#if HOTENDS > 2
// set up temp variables - undo the default scaling
raw_Ki = unscalePID_i(PID_PARAM(Ki, 2));
raw_Kd = unscalePID_d(PID_PARAM(Kd, 2));
MENU_ITEM_EDIT(float52, MSG_PID_P MSG_H2, &PID_PARAM(Kp,2), 1, 9990);
// i is typically a small value so allows values below 1
MENU_ITEM_EDIT_CALLBACK(float52, MSG_PID_I MSG_H2, &raw_Ki, 0.01, 9990, copy_and_scalePID_i_H2);
MENU_ITEM_EDIT_CALLBACK(float52, MSG_PID_D MSG_H2, &raw_Kd, 1, 9990, copy_and_scalePID_d_H2);
#if HOTENDS > 3
// set up temp variables - undo the default scaling
raw_Ki = unscalePID_i(PID_PARAM(Ki, 3));
raw_Kd = unscalePID_d(PID_PARAM(Kd, 3));
MENU_ITEM_EDIT(float52, MSG_PID_P MSG_H3, &PID_PARAM(Kp,3), 1, 9990);
// i is typically a small value so allows values below 1
MENU_ITEM_EDIT_CALLBACK(float52, MSG_PID_I MSG_H3, &raw_Ki, 0.01, 9990, copy_and_scalePID_i_H3);
MENU_ITEM_EDIT_CALLBACK(float52, MSG_PID_D MSG_H3, &raw_Kd, 1, 9990, copy_and_scalePID_d_H3);
#endif // HOTENDS > 3
#endif // HOTENDS > 2
#endif // HOTENDS > 1
#endif // PIDTEMP
//
// Idle oozing
//
#if ENABLED(IDLE_OOZING_PREVENT)
MENU_ITEM_EDIT(bool, MSG_IDLEOOZING, &IDLE_OOZING_enabled);
#endif
//
// Preheat PLA conf
//
MENU_ITEM(submenu, MSG_PREHEAT_PLA_SETTINGS, lcd_control_temperature_preheat_pla_settings_menu);
//
// Preheat ABS conf
//
MENU_ITEM(submenu, MSG_PREHEAT_ABS_SETTINGS, lcd_control_temperature_preheat_abs_settings_menu);
//
// Preheat GUM conf
//
MENU_ITEM(submenu, MSG_PREHEAT_GUM_SETTINGS, lcd_control_temperature_preheat_gum_settings_menu);
END_MENU();
}
/**
*
* "Temperature" > "Preheat PLA conf" submenu
*
*/
static void lcd_control_temperature_preheat_pla_settings_menu() {
START_MENU(lcd_control_temperature_menu);
MENU_ITEM(back, MSG_TEMPERATURE, lcd_control_temperature_menu);
MENU_ITEM_EDIT(int3, MSG_FAN_SPEED, &plaPreheatFanSpeed, 0, 255);
#if TEMP_SENSOR_0 != 0
MENU_ITEM_EDIT(int3, MSG_NOZZLE, &plaPreheatHotendTemp, HEATER_0_MINTEMP, HEATER_0_MAXTEMP - 15);
#endif
#if TEMP_SENSOR_BED != 0
MENU_ITEM_EDIT(int3, MSG_BED, &plaPreheatHPBTemp, BED_MINTEMP, BED_MAXTEMP - 15);
#endif
#if ENABLED(EEPROM_SETTINGS)
MENU_ITEM(function, MSG_STORE_EPROM, Config_StoreSettings);
#endif
END_MENU();
}
/**
*
* "Temperature" > "Preheat ABS conf" submenu
*
*/
static void lcd_control_temperature_preheat_abs_settings_menu() {
START_MENU(lcd_control_temperature_menu);
MENU_ITEM(back, MSG_TEMPERATURE, lcd_control_temperature_menu);
MENU_ITEM_EDIT(int3, MSG_FAN_SPEED, &absPreheatFanSpeed, 0, 255);
#if TEMP_SENSOR_0 != 0
MENU_ITEM_EDIT(int3, MSG_NOZZLE, &absPreheatHotendTemp, HEATER_0_MINTEMP, HEATER_0_MAXTEMP - 15);
#endif
#if TEMP_SENSOR_BED != 0
MENU_ITEM_EDIT(int3, MSG_BED, &absPreheatHPBTemp, BED_MINTEMP, BED_MAXTEMP - 15);
#endif
#if ENABLED(EEPROM_SETTINGS)
MENU_ITEM(function, MSG_STORE_EPROM, Config_StoreSettings);
#endif
END_MENU();
}
/**
*
* "Temperature" > "Preheat GUM conf" submenu
*
*/
static void lcd_control_temperature_preheat_gum_settings_menu() {
START_MENU(lcd_control_temperature_menu);
MENU_ITEM(back, MSG_TEMPERATURE, lcd_control_temperature_menu);
MENU_ITEM_EDIT(int3, MSG_FAN_SPEED, &gumPreheatFanSpeed, 0, 255);
#if TEMP_SENSOR_0 != 0
MENU_ITEM_EDIT(int3, MSG_NOZZLE, &gumPreheatHotendTemp, HEATER_0_MINTEMP, HEATER_0_MAXTEMP - 15);
#endif
#if TEMP_SENSOR_BED != 0
MENU_ITEM_EDIT(int3, MSG_BED, &gumPreheatHPBTemp, BED_MINTEMP, BED_MAXTEMP - 15);
#endif
#if ENABLED(EEPROM_SETTINGS)
MENU_ITEM(function, MSG_STORE_EPROM, Config_StoreSettings);
#endif
END_MENU();
}
/**
*
* "Control" > "Motion" submenu
*
*/
static void lcd_control_motion_menu() {
START_MENU(lcd_control_menu);
MENU_ITEM(back, MSG_CONTROL, lcd_control_menu);
#if ENABLED(AUTO_BED_LEVELING_FEATURE)
MENU_ITEM_EDIT(float32, MSG_ZPROBE_ZOFFSET, &zprobe_zoffset, -50, 50);
#endif
MENU_ITEM_EDIT(float5, MSG_ACC, &acceleration, 10, 99000);
MENU_ITEM_EDIT(float3, MSG_VXY_JERK, &max_xy_jerk, 1, 990);
MENU_ITEM_EDIT(float52, MSG_VZ_JERK, &max_z_jerk, 0.1, 990);
MENU_ITEM_EDIT(float3, MSG_VMAX MSG_X, &max_feedrate[X_AXIS], 1, 999);
MENU_ITEM_EDIT(float3, MSG_VMAX MSG_Y, &max_feedrate[Y_AXIS], 1, 999);
MENU_ITEM_EDIT(float3, MSG_VMAX MSG_Z, &max_feedrate[Z_AXIS], 1, 999);
MENU_ITEM_EDIT(float3, MSG_VMIN, &minimumfeedrate, 0, 999);
MENU_ITEM_EDIT(float3, MSG_VTRAV_MIN, &mintravelfeedrate, 0, 999);
MENU_ITEM_EDIT_CALLBACK(long5, MSG_AMAX MSG_X, &max_acceleration_units_per_sq_second[X_AXIS], 100, 99000, reset_acceleration_rates);
MENU_ITEM_EDIT_CALLBACK(long5, MSG_AMAX MSG_Y, &max_acceleration_units_per_sq_second[Y_AXIS], 100, 99000, reset_acceleration_rates);
MENU_ITEM_EDIT_CALLBACK(long5, MSG_AMAX MSG_Z, &max_acceleration_units_per_sq_second[Z_AXIS], 10, 99000, reset_acceleration_rates);
MENU_ITEM_EDIT(float5, MSG_A_TRAVEL, &travel_acceleration, 100, 99000);
MENU_ITEM_EDIT(float52, MSG_XSTEPS, &axis_steps_per_unit[X_AXIS], 5, 9999);
MENU_ITEM_EDIT(float52, MSG_YSTEPS, &axis_steps_per_unit[Y_AXIS], 5, 9999);
MENU_ITEM_EDIT(float51, MSG_ZSTEPS, &axis_steps_per_unit[Z_AXIS], 5, 9999);
#if EXTRUDERS > 0
MENU_ITEM_EDIT(float3, MSG_VE_JERK MSG_E "0", &max_e_jerk[0], 1, 990);
MENU_ITEM_EDIT(float3, MSG_VMAX MSG_E "0", &max_feedrate[E_AXIS], 1, 999);
MENU_ITEM_EDIT(long5, MSG_AMAX MSG_E "0", &max_acceleration_units_per_sq_second[E_AXIS], 100, 99000);
MENU_ITEM_EDIT(float5, MSG_A_RETRACT MSG_E "0", &retract_acceleration[0], 100, 99000);
MENU_ITEM_EDIT(float51, MSG_E0STEPS, &axis_steps_per_unit[E_AXIS], 5, 9999);
#if EXTRUDERS > 1
MENU_ITEM_EDIT(float3, MSG_VE_JERK MSG_E "1", &max_e_jerk[1], 1, 990);
MENU_ITEM_EDIT(float3, MSG_VMAX MSG_E "1", &max_feedrate[E_AXIS + 1], 1, 999);
MENU_ITEM_EDIT(long5, MSG_AMAX MSG_E "1", &max_acceleration_units_per_sq_second[E_AXIS + 1], 100, 99000);
MENU_ITEM_EDIT(float5, MSG_A_RETRACT MSG_E "1", &retract_acceleration[1], 100, 99000);
MENU_ITEM_EDIT(float51, MSG_E1STEPS, &axis_steps_per_unit[E_AXIS + 1], 5, 9999);
#if EXTRUDERS > 2
MENU_ITEM_EDIT(float3, MSG_VE_JERK MSG_E "2", &max_e_jerk[2], 1, 990);
MENU_ITEM_EDIT(float3, MSG_VMAX MSG_E "2", &max_feedrate[E_AXIS + 2], 1, 999);
MENU_ITEM_EDIT(long5, MSG_AMAX MSG_E "2", &max_acceleration_units_per_sq_second[E_AXIS + 2], 100, 99000);
MENU_ITEM_EDIT(float5, MSG_A_RETRACT MSG_E "2", &retract_acceleration[2], 100, 99000);
MENU_ITEM_EDIT(float51, MSG_E2STEPS, &axis_steps_per_unit[E_AXIS + 2], 5, 9999);
#if EXTRUDERS > 3
MENU_ITEM_EDIT(float3, MSG_VE_JERK MSG_E "3", &max_e_jerk[3], 1, 990);
MENU_ITEM_EDIT(float3, MSG_VMAX MSG_E "3", &max_feedrate[E_AXIS + 3], 1, 999);
MENU_ITEM_EDIT(long5, MSG_AMAX MSG_E "3", &max_acceleration_units_per_sq_second[E_AXIS + 3], 100, 99000);
MENU_ITEM_EDIT(float5, MSG_A_RETRACT MSG_E "3", &retract_acceleration[3], 100, 99000);
MENU_ITEM_EDIT(float51, MSG_E3STEPS, &axis_steps_per_unit[E_AXIS + 3], 5, 9999);
#endif // EXTRUDERS > 3
#endif // EXTRUDERS > 2
#endif // EXTRUDERS > 1
#endif // EXTRUDERS > 0
#if ENABLED(ABORT_ON_ENDSTOP_HIT_FEATURE_ENABLED)
MENU_ITEM_EDIT(bool, MSG_ENDSTOP_ABORT, &abort_on_endstop_hit);
#endif
#if MECH(SCARA)
MENU_ITEM_EDIT(float74, MSG_XSCALE, &axis_scaling[X_AXIS], 0.5, 2);
MENU_ITEM_EDIT(float74, MSG_YSCALE, &axis_scaling[Y_AXIS], 0.5, 2);
#endif
END_MENU();
}
/**
*
* "Control" > "Filament" submenu
*
*/
static void lcd_control_volumetric_menu() {
START_MENU(lcd_control_menu);
MENU_ITEM(back, MSG_CONTROL, lcd_control_menu);
MENU_ITEM_EDIT_CALLBACK(bool, MSG_VOLUMETRIC_ENABLED, &volumetric_enabled, calculate_volumetric_multipliers);
if (volumetric_enabled) {
#if EXTRUDERS == 1
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(float43, MSG_FILAMENT_SIZE_EXTRUDER, &filament_size[0], DEFAULT_NOMINAL_FILAMENT_DIA - .5, DEFAULT_NOMINAL_FILAMENT_DIA + .5, calculate_volumetric_multipliers);
#else // EXTRUDERS > 1
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(float43, MSG_FILAMENT_SIZE_EXTRUDER " 0", &filament_size[0], DEFAULT_NOMINAL_FILAMENT_DIA - .5, DEFAULT_NOMINAL_FILAMENT_DIA + .5, calculate_volumetric_multipliers);
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(float43, MSG_FILAMENT_SIZE_EXTRUDER " 1", &filament_size[1], DEFAULT_NOMINAL_FILAMENT_DIA - .5, DEFAULT_NOMINAL_FILAMENT_DIA + .5, calculate_volumetric_multipliers);
#if EXTRUDERS > 2
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(float43, MSG_FILAMENT_SIZE_EXTRUDER " 2", &filament_size[2], DEFAULT_NOMINAL_FILAMENT_DIA - .5, DEFAULT_NOMINAL_FILAMENT_DIA + .5, calculate_volumetric_multipliers);
#if EXTRUDERS > 3
MENU_MULTIPLIER_ITEM_EDIT_CALLBACK(float43, MSG_FILAMENT_SIZE_EXTRUDER " 3", &filament_size[3], DEFAULT_NOMINAL_FILAMENT_DIA - .5, DEFAULT_NOMINAL_FILAMENT_DIA + .5, calculate_volumetric_multipliers);
#endif // EXTRUDERS > 3
#endif // EXTRUDERS > 2
#endif // EXTRUDERS > 1
}
END_MENU();
}
/**
*
* "Control" > "Contrast" submenu
*
*/
#if HAS(LCD_CONTRAST)
static void lcd_set_contrast() {
if (encoderPosition != 0) {
#if ENABLED(U8GLIB_LM6059_AF)
lcd_contrast += encoderPosition;
lcd_contrast &= 0xFF;
#else
lcd_contrast -= encoderPosition;
lcd_contrast &= 0x3F;
#endif
encoderPosition = 0;
lcdDrawUpdate = 1;
u8g.setContrast(lcd_contrast);
}
if (lcdDrawUpdate) {
#if ENABLED(U8GLIB_LM6059_AF)
lcd_implementation_drawedit(PSTR(MSG_CONTRAST), itostr3(lcd_contrast));
#else
lcd_implementation_drawedit(PSTR(MSG_CONTRAST), itostr2(lcd_contrast));
#endif
}
if (LCD_CLICKED) lcd_goto_menu(lcd_control_menu);
}
#endif // HAS(LCD_CONTRAST)
/**
*
* "Control" > "Retract" submenu
*
*/
#if ENABLED(FWRETRACT)
static void lcd_control_retract_menu() {
START_MENU(lcd_control_menu);
MENU_ITEM(back, MSG_CONTROL, lcd_control_menu);
MENU_ITEM_EDIT(bool, MSG_AUTORETRACT, &autoretract_enabled);
MENU_ITEM_EDIT(float52, MSG_CONTROL_RETRACT, &retract_length, 0, 100);
#if EXTRUDERS > 1
MENU_ITEM_EDIT(float52, MSG_CONTROL_RETRACT_SWAP, &retract_length_swap, 0, 100);
#endif
MENU_ITEM_EDIT(float3, MSG_CONTROL_RETRACTF, &retract_feedrate, 1, 999);
MENU_ITEM_EDIT(float52, MSG_CONTROL_RETRACT_ZLIFT, &retract_zlift, 0, 999);
MENU_ITEM_EDIT(float52, MSG_CONTROL_RETRACT_RECOVER, &retract_recover_length, 0, 100);
#if EXTRUDERS > 1
MENU_ITEM_EDIT(float52, MSG_CONTROL_RETRACT_RECOVER_SWAP, &retract_recover_length_swap, 0, 100);
#endif
MENU_ITEM_EDIT(float3, MSG_CONTROL_RETRACT_RECOVERF, &retract_recover_feedrate, 1, 999);
END_MENU();
}
#endif // FWRETRACT
#if ENABLED(SDSUPPORT)
#if !PIN_EXISTS(SD_DETECT)
static void lcd_sd_refresh() {
card.mount();
currentMenuViewOffset = 0;
}
#endif
static void lcd_sd_updir() {
card.updir();
currentMenuViewOffset = 0;
}
/**
*
* "Print from SD" submenu
*
*/
void lcd_sdcard_menu() {
if (lcdDrawUpdate == 0 && LCD_CLICKED == 0) return; // nothing to do (so don't thrash the SD card)
uint16_t fileCnt = card.getnrfilenames();
START_MENU(lcd_main_menu);
MENU_ITEM(back, MSG_MAIN, lcd_main_menu);
card.getWorkDirName();
if (fullName[0] == '/') {
#if !PIN_EXISTS(SD_DETECT)
MENU_ITEM(function, LCD_STR_REFRESH MSG_REFRESH, lcd_sd_refresh);
#endif
}
else {
MENU_ITEM(function, LCD_STR_FOLDER "..", lcd_sd_updir);
}
for (uint16_t i = 0; i < fileCnt; i++) {
if (_menuItemNr == _lineNr) {
card.getfilename(
#if ENABLED(SDCARD_RATHERRECENTFIRST)
fileCnt-1 -
#endif
i
);
if (card.filenameIsDir)
MENU_ITEM(sddirectory, MSG_CARD_MENU, fullName);
else
MENU_ITEM(sdfile, MSG_CARD_MENU, fullName);
}
else {
MENU_ITEM_DUMMY();
}
}
END_MENU();
}
#endif // SDSUPPORT
/**
*
* Functions for editing single values
*
*/
#define menu_edit_type(_type, _name, _strFunc, scale) \
bool _menu_edit_ ## _name () { \
bool isClicked = LCD_CLICKED; \
if ((int32_t)encoderPosition < 0) encoderPosition = 0; \
if ((int32_t)encoderPosition > maxEditValue) encoderPosition = maxEditValue; \
if (lcdDrawUpdate) \
lcd_implementation_drawedit(editLabel, _strFunc(((_type)((int32_t)encoderPosition + minEditValue)) / scale)); \
if (isClicked) { \
*((_type*)editValue) = ((_type)((int32_t)encoderPosition + minEditValue)) / scale; \
lcd_goto_menu(prevMenu, prevEncoderPosition); \
} \
return isClicked; \
} \
void menu_edit_ ## _name () { _menu_edit_ ## _name(); } \
void menu_edit_callback_ ## _name () { if (_menu_edit_ ## _name ()) (*callbackFunc)(); } \
static void _menu_action_setting_edit_ ## _name (const char* pstr, _type* ptr, _type minValue, _type maxValue) { \
prevMenu = currentMenu; \
prevEncoderPosition = encoderPosition; \
\
lcdDrawUpdate = 2; \
currentMenu = menu_edit_ ## _name; \
\
editLabel = pstr; \
editValue = ptr; \
minEditValue = minValue * scale; \
maxEditValue = maxValue * scale - minEditValue; \
encoderPosition = (*ptr) * scale - minEditValue; \
} \
static void menu_action_setting_edit_ ## _name (const char* pstr, _type* ptr, _type minValue, _type maxValue) { \
_menu_action_setting_edit_ ## _name(pstr, ptr, minValue, maxValue); \
currentMenu = menu_edit_ ## _name; \
}\
static void menu_action_setting_edit_callback_ ## _name (const char* pstr, _type* ptr, _type minValue, _type maxValue, menuFunc_t callback) { \
_menu_action_setting_edit_ ## _name(pstr, ptr, minValue, maxValue); \
currentMenu = menu_edit_callback_ ## _name; \
callbackFunc = callback; \
}
menu_edit_type(int, int3, itostr3, 1)
menu_edit_type(float, float3, ftostr3, 1)
menu_edit_type(float, float32, ftostr32, 100)
menu_edit_type(float, float43, ftostr43, 1000)
menu_edit_type(float, float5, ftostr5, 0.01)
menu_edit_type(float, float51, ftostr51, 10)
menu_edit_type(float, float52, ftostr52, 100)
menu_edit_type(unsigned long, long5, ftostr5, 0.01)
/**
*
* Handlers for RepRap World Keypad input
*
*/
#if ENABLED(REPRAPWORLD_KEYPAD)
static void reprapworld_keypad_move_z_up() {
encoderPosition = 1;
move_menu_scale = REPRAPWORLD_KEYPAD_MOVE_STEP;
lcd_move_z();
}
static void reprapworld_keypad_move_z_down() {
encoderPosition = -1;
move_menu_scale = REPRAPWORLD_KEYPAD_MOVE_STEP;
lcd_move_z();
}
static void reprapworld_keypad_move_x_left() {
encoderPosition = -1;
move_menu_scale = REPRAPWORLD_KEYPAD_MOVE_STEP;
lcd_move_x();
}
static void reprapworld_keypad_move_x_right() {
encoderPosition = 1;
move_menu_scale = REPRAPWORLD_KEYPAD_MOVE_STEP;
lcd_move_x();
}
static void reprapworld_keypad_move_y_down() {
encoderPosition = 1;
move_menu_scale = REPRAPWORLD_KEYPAD_MOVE_STEP;
lcd_move_y();
}
static void reprapworld_keypad_move_y_up() {
encoderPosition = -1;
move_menu_scale = REPRAPWORLD_KEYPAD_MOVE_STEP;
lcd_move_y();
}
static void reprapworld_keypad_move_home() {
enqueue_and_echo_commands_P((PSTR("G28"))); // move all axis home
}
#endif // REPRAPWORLD_KEYPAD
/**
*
* Audio feedback for controller clicks
*
*/
#if ENABLED(LCD_USE_I2C_BUZZER)
void lcd_buzz(long duration, uint16_t freq) { // called from buzz() in Marlin_main.cpp where lcd is unknown
lcd.buzz(duration, freq);
}
#endif
void lcd_quick_feedback() {
lcdDrawUpdate = 2;
next_button_update_ms = millis() + 500;
#if ENABLED(LCD_USE_I2C_BUZZER)
#if DISABLED(LCD_FEEDBACK_FREQUENCY_HZ)
#define LCD_FEEDBACK_FREQUENCY_HZ 100
#endif
#if DISABLED(LCD_FEEDBACK_FREQUENCY_DURATION_MS)
#define LCD_FEEDBACK_FREQUENCY_DURATION_MS (1000/6)
#endif
lcd.buzz(LCD_FEEDBACK_FREQUENCY_DURATION_MS, LCD_FEEDBACK_FREQUENCY_HZ);
#elif HAS(BUZZER)
#if DISABLED(LCD_FEEDBACK_FREQUENCY_HZ)
#define LCD_FEEDBACK_FREQUENCY_HZ 5000
#endif
#if DISABLED(LCD_FEEDBACK_FREQUENCY_DURATION_MS)
#define LCD_FEEDBACK_FREQUENCY_DURATION_MS 2
#endif
buzz(LCD_FEEDBACK_FREQUENCY_DURATION_MS, LCD_FEEDBACK_FREQUENCY_HZ);
#else
#if DISABLED(LCD_FEEDBACK_FREQUENCY_DURATION_MS)
#define LCD_FEEDBACK_FREQUENCY_DURATION_MS 2
#endif
HAL::delayMilliseconds(LCD_FEEDBACK_FREQUENCY_DURATION_MS);
#endif
}
/**
*
* Menu actions
*
*/
static void menu_action_back(menuFunc_t func) { lcd_goto_menu(func); }
static void menu_action_submenu(menuFunc_t func) { lcd_goto_menu(func); }
static void menu_action_gcode(const char* pgcode) { enqueue_and_echo_commands_P(pgcode); }
static void menu_action_function(menuFunc_t func) { (*func)(); }
#if ENABLED(SDSUPPORT)
static void menu_action_sdfile(const char* longFilename) {
char cmd[30];
char* c;
sprintf_P(cmd, PSTR("M23 %s"), longFilename);
for (c = &cmd[4]; *c; c++) *c = tolower(*c);
enqueue_and_echo_command(cmd);
enqueue_and_echo_commands_P(PSTR("M24"));
lcd_return_to_status();
}
static void menu_action_sddirectory(const char* longFilename) {
card.chdir(longFilename);
encoderPosition = 0;
}
#endif // SDSUPPORT
static void menu_action_setting_edit_bool(const char* pstr, bool* ptr) { *ptr = !(*ptr); }
static void menu_action_setting_edit_callback_bool(const char* pstr, bool* ptr, menuFunc_t callback) {
menu_action_setting_edit_bool(pstr, ptr);
(*callback)();
}
#endif // ULTIPANEL
/** LCD API **/
void lcd_init() {
lcd_implementation_init();
#if ENABLED(NEWPANEL)
SET_INPUT(BTN_EN1);
SET_INPUT(BTN_EN2);
PULLUP(BTN_EN1, HIGH);
PULLUP(BTN_EN2, HIGH);
#if BTN_ENC > 0
SET_INPUT(BTN_ENC);
PULLUP(BTN_ENC, HIGH);
#endif
#if ENABLED(REPRAPWORLD_KEYPAD)
pinMode(SHIFT_CLK, OUTPUT);
pinMode(SHIFT_LD, OUTPUT);
pinMode(SHIFT_OUT, INPUT);
PULLUP(SHIFT_OUT, HIGH);
WRITE(SHIFT_LD, HIGH);
#endif
#else // Not NEWPANEL
#if ENABLED(SR_LCD_2W_NL) // Non latching 2 wire shift register
pinMode(SR_DATA_PIN, OUTPUT);
pinMode(SR_CLK_PIN, OUTPUT);
#elif ENABLED(SHIFT_CLK)
pinMode(SHIFT_CLK, OUTPUT);
pinMode(SHIFT_LD, OUTPUT);
pinMode(SHIFT_EN, OUTPUT);
pinMode(SHIFT_OUT, INPUT);
PULLUP(SHIFT_OUT, HIGH);
WRITE(SHIFT_LD, HIGH);
WRITE(SHIFT_EN, LOW);
#endif // SR_LCD_2W_NL
#endif // !NEWPANEL
#if ENABLED(SDSUPPORT) && PIN_EXISTS(SD_DETECT)
pinMode(SD_DETECT_PIN, INPUT);
PULLUP(SD_DETECT_PIN, HIGH);
lcd_sd_status = 2; // UNKNOWN
#endif
#if ENABLED(LCD_HAS_SLOW_BUTTONS)
slow_buttons = 0;
#endif
lcd_buttons_update();
#if ENABLED(ULTIPANEL)
encoderDiff = 0;
#endif
}
int lcd_strlen(char* s) {
int i = 0, j = 0;
while (s[i]) {
if ((s[i] & 0xc0) != 0x80) j++;
i++;
}
return j;
}
int lcd_strlen_P(const char* s) {
int j = 0;
while (pgm_read_byte(s)) {
if ((pgm_read_byte(s) & 0xc0) != 0x80) j++;
s++;
}
return j;
}
/**
* Update the LCD, read encoder buttons, etc.
* - Read button states
* - Check the SD Card slot state
* - Act on RepRap World keypad input
* - Update the encoder position
* - Apply acceleration to the encoder position
* - Reset the Info Screen timeout if there's any input
* - Update status indicators, if any
* - Clear the LCD if lcdDrawUpdate == 2
*
* Warning: This function is called from interrupt context!
*/
void lcd_update() {
#if ENABLED(ULTIPANEL)
static millis_t return_to_status_ms = 0;
#endif
lcd_buttons_update();
#if ENABLED(SDSUPPORT) && PIN_EXISTS(SD_DETECT)
bool sd_status = IS_SD_INSERTED;
if (sd_status != lcd_sd_status && lcd_detected()) {
lcdDrawUpdate = 2;
lcd_implementation_init( // to maybe revive the LCD if static electricity killed it.
#if ENABLED(LCD_PROGRESS_BAR)
currentMenu == lcd_status_screen
#endif
);
if (sd_status) {
card.mount();
if (lcd_sd_status != 2) LCD_MESSAGEPGM(MSG_SD_INSERTED);
}
else {
card.unmount();
if (lcd_sd_status != 2) LCD_MESSAGEPGM(MSG_SD_REMOVED);
}
lcd_sd_status = sd_status;
}
#endif // SDSUPPORT && SD_DETECT_PIN
millis_t ms = millis();
if (ms > next_lcd_update_ms) {
#if ENABLED(LCD_HAS_SLOW_BUTTONS)
slow_buttons = lcd_implementation_read_slow_buttons(); // buttons which take too long to read in interrupt context
#endif
#if ENABLED(ULTIPANEL)
#if ENABLED(REPRAPWORLD_KEYPAD)
if (REPRAPWORLD_KEYPAD_MOVE_Z_UP) reprapworld_keypad_move_z_up();
if (REPRAPWORLD_KEYPAD_MOVE_Z_DOWN) reprapworld_keypad_move_z_down();
if (REPRAPWORLD_KEYPAD_MOVE_X_LEFT) reprapworld_keypad_move_x_left();
if (REPRAPWORLD_KEYPAD_MOVE_X_RIGHT) reprapworld_keypad_move_x_right();
if (REPRAPWORLD_KEYPAD_MOVE_Y_DOWN) reprapworld_keypad_move_y_down();
if (REPRAPWORLD_KEYPAD_MOVE_Y_UP) reprapworld_keypad_move_y_up();
if (REPRAPWORLD_KEYPAD_MOVE_HOME) reprapworld_keypad_move_home();
#endif
bool encoderPastThreshold = (abs(encoderDiff) >= ENCODER_PULSES_PER_STEP);
if (encoderPastThreshold || LCD_CLICKED) {
if (encoderPastThreshold) {
int32_t encoderMultiplier = 1;
#if ENABLED(ENCODER_RATE_MULTIPLIER)
if (encoderRateMultiplierEnabled) {
int32_t encoderMovementSteps = abs(encoderDiff) / ENCODER_PULSES_PER_STEP;
if (lastEncoderMovementMillis != 0) {
// Note that the rate is always calculated between to passes through the
// loop and that the abs of the encoderDiff value is tracked.
float encoderStepRate = (float)(encoderMovementSteps) / ((float)(ms - lastEncoderMovementMillis)) * 1000.0;
if (encoderStepRate >= ENCODER_100X_STEPS_PER_SEC) encoderMultiplier = 100;
else if (encoderStepRate >= ENCODER_10X_STEPS_PER_SEC) encoderMultiplier = 10;
#if ENABLED(ENCODER_RATE_MULTIPLIER_DEBUG)
ECHO_SMV(DB, "Enc Step Rate: ", encoderStepRate);
ECHO_MV(" Multiplier: ", encoderMultiplier);
ECHO_MV(" ENCODER_10X_STEPS_PER_SEC: ", ENCODER_10X_STEPS_PER_SEC);
ECHO_EMV(" ENCODER_100X_STEPS_PER_SEC: ", ENCODER_100X_STEPS_PER_SEC);
#endif
}
lastEncoderMovementMillis = ms;
} // encoderRateMultiplierEnabled
#endif // ENCODER_RATE_MULTIPLIER
encoderPosition += (encoderDiff * encoderMultiplier) / ENCODER_PULSES_PER_STEP;
encoderDiff = 0;
}
return_to_status_ms = ms + LCD_TIMEOUT_TO_STATUS;
lcdDrawUpdate = 1;
}
#endif //ULTIPANEL
if (currentMenu == lcd_status_screen) {
if (!lcd_status_update_delay) {
lcdDrawUpdate = 1;
lcd_status_update_delay = 10; /* redraw the main screen every second. This is easier then trying keep track of all things that change on the screen */
}
else {
lcd_status_update_delay--;
}
}
#if ENABLED(DOGLCD) // Changes due to different driver architecture of the DOGM display
if (lcdDrawUpdate) {
blink++; // Variable for fan animation and alive dot
u8g.firstPage();
do {
lcd_setFont(FONT_MENU);
u8g.setPrintPos(125, 0);
if (blink % 2) u8g.setColorIndex(1); else u8g.setColorIndex(0); // Set color for the alive dot
u8g.drawPixel(127, 63); // draw alive dot
u8g.setColorIndex(1); // black on white
(*currentMenu)();
} while(u8g.nextPage());
}
#else
if (lcdDrawUpdate)
(*currentMenu)();
#endif
#if ENABLED(LCD_HAS_STATUS_INDICATORS)
lcd_implementation_update_indicators();
#endif
#if ENABLED(ULTIPANEL)
// Return to Status Screen after a timeout
if (currentMenu != lcd_status_screen &&
#if !MECH(DELTA) && DISABLED(Z_SAFE_HOMING) && Z_HOME_DIR < 0
currentMenu != lcd_level_bed &&
#endif
millis() > return_to_status_ms
) {
lcd_return_to_status();
lcdDrawUpdate = 2;
}
#endif // ULTIPANEL
if (lcdDrawUpdate == 2) lcd_implementation_clear();
if (lcdDrawUpdate) lcdDrawUpdate--;
next_lcd_update_ms = ms + LCD_UPDATE_INTERVAL;
}
}
void lcd_ignore_click(bool b) {
ignore_click = b;
wait_for_unclick = false;
}
void lcd_finishstatus(bool persist = false) {
#if ENABLED(LCD_PROGRESS_BAR)
progress_bar_ms = millis();
#if PROGRESS_MSG_EXPIRE > 0
expire_status_ms = persist ? 0 : progress_bar_ms + PROGRESS_MSG_EXPIRE;
#endif
#endif
lcdDrawUpdate = 2;
#if HAS(LCD_FILAMENT_SENSOR) || HAS(LCD_POWER_SENSOR)
previous_lcd_status_ms = millis(); //get status message to show up for a while
#endif
}
#if ENABLED(LCD_PROGRESS_BAR) && PROGRESS_MSG_EXPIRE > 0
void dontExpireStatus() { expire_status_ms = 0; }
#endif
void set_utf_strlen(char* s, uint8_t n) {
uint8_t i = 0, j = 0;
while (s[i] && (j < n)) {
if ((s[i] & 0xc0u) != 0x80u) j++;
i++;
}
while (j++ < n) s[i++] = ' ';
s[i] = 0;
}
bool lcd_hasstatus() { return (lcd_status_message[0] != '\0'); }
void lcd_setstatus(const char* message, bool persist) {
if (lcd_status_message_level > 0) return;
strncpy(lcd_status_message, message, 3 * LCD_WIDTH);
set_utf_strlen(lcd_status_message, LCD_WIDTH);
lcd_finishstatus(persist);
}
void lcd_setstatuspgm(const char* message, uint8_t level) {
if (level >= lcd_status_message_level) {
strncpy_P(lcd_status_message, message, 3 * LCD_WIDTH);
set_utf_strlen(lcd_status_message, LCD_WIDTH);
lcd_status_message_level = level;
lcd_finishstatus(level > 0);
}
}
void lcd_setalertstatuspgm(const char* message) {
lcd_setstatuspgm(message, 1);
#if ENABLED(ULTIPANEL)
lcd_return_to_status();
#endif
}
void lcd_reset_alert_level() { lcd_status_message_level = 0; }
#if HAS(LCD_CONTRAST)
void lcd_setcontrast(uint8_t value) {
lcd_contrast = value & 0x3F;
u8g.setContrast(lcd_contrast);
}
#endif
#if ENABLED(ULTIPANEL)
/**
* Setup Rotary Encoder Bit Values (for two pin encoders to indicate movement)
* These values are independent of which pins are used for EN_A and EN_B indications
* The rotary encoder part is also independent to the chipset used for the LCD
*/
#if ENABLED(EN_A) && ENABLED(EN_B)
#define encrot0 0
#define encrot1 2
#define encrot2 3
#define encrot3 1
#endif
/**
* Read encoder buttons from the hardware registers
* Warning: This function is called from interrupt context!
*/
void lcd_buttons_update() {
#if ENABLED(NEWPANEL)
uint8_t newbutton = 0;
#if ENABLED(INVERT_ROTARY_SWITCH)
if (READ(BTN_EN1) == 0) newbutton |= EN_B;
if (READ(BTN_EN2) == 0) newbutton |= EN_A;
#else
if (READ(BTN_EN1) == 0) newbutton |= EN_A;
if (READ(BTN_EN2) == 0) newbutton |= EN_B;
#endif
#if BTN_ENC > 0
millis_t ms = millis();
if (ms > next_button_update_ms && READ(BTN_ENC) == 0) newbutton |= EN_C;
#if ENABLED(BTN_BACK) && BTN_BACK > 0
if (ms > next_button_update_ms && READ(BTN_BACK) == 0) newbutton |= EN_D;
#endif
#endif
buttons = newbutton;
#if ENABLED(LCD_HAS_SLOW_BUTTONS)
buttons |= slow_buttons;
#endif
#if ENABLED(REPRAPWORLD_KEYPAD)
// for the reprapworld_keypad
uint8_t newbutton_reprapworld_keypad = 0;
WRITE(SHIFT_LD, LOW);
WRITE(SHIFT_LD, HIGH);
for (uint8_t i = 0; i < 8; i++) {
newbutton_reprapworld_keypad >>= 1;
if (READ(SHIFT_OUT)) BITSET(newbutton_reprapworld_keypad, 7);
WRITE(SHIFT_CLK, HIGH);
WRITE(SHIFT_CLK, LOW);
}
buttons_reprapworld_keypad = ~newbutton_reprapworld_keypad; //invert it, because a pressed switch produces a logical 0
#endif
#else //read it from the shift register
uint8_t newbutton = 0;
WRITE(SHIFT_LD, LOW);
WRITE(SHIFT_LD, HIGH);
unsigned char tmp_buttons = 0;
for (uint8_t i = 0; i < 8; i++) {
newbutton >>= 1;
if (READ(SHIFT_OUT)) BITSET(newbutton, 7);
WRITE(SHIFT_CLK, HIGH);
WRITE(SHIFT_CLK, LOW);
}
buttons = ~newbutton; //invert it, because a pressed switch produces a logical 0
#endif //!NEWPANEL
//manage encoder rotation
uint8_t enc = 0;
if (buttons & EN_A) enc |= B01;
if (buttons & EN_B) enc |= B10;
if (enc != lastEncoderBits) {
switch (enc) {
case encrot0:
if (lastEncoderBits == encrot3) encoderDiff++;
else if (lastEncoderBits == encrot1) encoderDiff--;
break;
case encrot1:
if (lastEncoderBits == encrot0) encoderDiff++;
else if (lastEncoderBits == encrot2) encoderDiff--;
break;
case encrot2:
if (lastEncoderBits == encrot1) encoderDiff++;
else if (lastEncoderBits == encrot3) encoderDiff--;
break;
case encrot3:
if (lastEncoderBits == encrot2) encoderDiff++;
else if (lastEncoderBits == encrot0) encoderDiff--;
break;
}
}
lastEncoderBits = enc;
}
bool lcd_detected(void) {
#if (ENABLED(LCD_I2C_TYPE_MCP23017) || ENABLED(LCD_I2C_TYPE_MCP23008)) && ENABLED(DETECT_DEVICE)
return lcd.LcdDetected() == 1;
#else
return true;
#endif
}
bool lcd_clicked() { return LCD_CLICKED; }
#endif // ULTIPANEL
/*********************************/
/** Number to string conversion **/
/*********************************/
char conv[8];
// Convert float to rj string with 123 or -12 format
char *ftostr3(const float& x) { return itostr3((int)x); }
// Convert float to rj string with _123, -123, _-12, or __-1 format
char *ftostr4sign(const float& x) { return itostr4sign((int)x); }
// Convert int to string with 12 format
char* itostr2(const uint8_t& x) {
//sprintf(conv,"%5.1f",x);
int xx = x;
conv[0] = (xx / 10) % 10 + '0';
conv[1] = xx % 10 + '0';
conv[2] = 0;
return conv;
}
// Convert float to string with +123.4 format
char* ftostr31(const float& x) {
int xx = abs(x * 10);
conv[0] = (x >= 0) ? '+' : '-';
conv[1] = (xx / 1000) % 10 + '0';
conv[2] = (xx / 100) % 10 + '0';
conv[3] = (xx / 10) % 10 + '0';
conv[4] = '.';
conv[5] = xx % 10 + '0';
conv[6] = 0;
return conv;
}
// Convert float to string with 123.4 format, dropping sign
char* ftostr31ns(const float& x) {
int xx = abs(x * 10);
conv[0] = (xx / 1000) % 10 + '0';
conv[1] = (xx / 100) % 10 + '0';
conv[2] = (xx / 10) % 10 + '0';
conv[3] = '.';
conv[4] = xx % 10 + '0';
conv[5] = 0;
return conv;
}
// Convert float to string with 123.45 format
char* ftostr32(const float& x) {
long xx = abs(x * 100);
conv[0] = x >= 0 ? (xx / 10000) % 10 + '0' : '-';
conv[1] = (xx / 1000) % 10 + '0';
conv[2] = (xx / 100) % 10 + '0';
conv[3] = '.';
conv[4] = (xx / 10) % 10 + '0';
conv[5] = xx % 10 + '0';
conv[6] = 0;
return conv;
}
// Convert float to string with 1.234 format
char* ftostr43(const float& x) {
long xx = x * 1000;
if (xx >= 0)
conv[0] = (xx / 1000) % 10 + '0';
else
conv[0] = '-';
xx = abs(xx);
conv[1] = '.';
conv[2] = (xx / 100) % 10 + '0';
conv[3] = (xx / 10) % 10 + '0';
conv[4] = (xx) % 10 + '0';
conv[5] = 0;
return conv;
}
// Convert float to string with 1.23 format
char* ftostr12ns(const float& x) {
long xx = x * 100;
xx = abs(xx);
conv[0] = (xx / 100) % 10 + '0';
conv[1] = '.';
conv[2] = (xx / 10) % 10 + '0';
conv[3] = (xx) % 10 + '0';
conv[4] = 0;
return conv;
}
// Convert float to space-padded string with -_23.4_ format
char* ftostr32sp(const float& x) {
long xx = abs(x * 100);
uint8_t dig;
if (x < 0) { // negative val = -_0
conv[0] = '-';
dig = (xx / 1000) % 10;
conv[1] = dig ? '0' + dig : ' ';
}
else { // positive val = __0
dig = (xx / 10000) % 10;
if (dig) {
conv[0] = '0' + dig;
conv[1] = '0' + (xx / 1000) % 10;
}
else {
conv[0] = ' ';
dig = (xx / 1000) % 10;
conv[1] = dig ? '0' + dig : ' ';
}
}
conv[2] = '0' + (xx / 100) % 10; // lsd always
dig = xx % 10;
if (dig) { // 2 decimal places
conv[5] = '0' + dig;
conv[4] = '0' + (xx / 10) % 10;
conv[3] = '.';
}
else { // 1 or 0 decimal place
dig = (xx / 10) % 10;
if (dig) {
conv[4] = '0' + dig;
conv[3] = '.';
}
else {
conv[3] = conv[4] = ' ';
}
conv[5] = ' ';
}
conv[6] = '\0';
return conv;
}
// Convert int to lj string with +123.0 format
char* itostr31(const int& x) {
conv[0] = x >= 0 ? '+' : '-';
int xx = abs(x);
conv[1] = (xx / 100) % 10 + '0';
conv[2] = (xx / 10) % 10 + '0';
conv[3] = xx % 10 + '0';
conv[4] = '.';
conv[5] = '0';
conv[6] = 0;
return conv;
}
// Convert int to rj string with 123 or -12 format
char* itostr3(const int& x) {
int xx = x;
if (xx < 0) {
conv[0] = '-';
xx = -xx;
}
else
conv[0] = xx >= 100 ? (xx / 100) % 10 + '0' : ' ';
conv[1] = xx >= 10 ? (xx / 10) % 10 + '0' : ' ';
conv[2] = xx % 10 + '0';
conv[3] = 0;
return conv;
}
// Convert int to lj string with 123 format
char* itostr3left(const int& xx) {
if (xx >= 100) {
conv[0] = (xx / 100) % 10 + '0';
conv[1] = (xx / 10) % 10 + '0';
conv[2] = xx % 10 + '0';
conv[3] = 0;
}
else if (xx >= 10) {
conv[0] = (xx / 10) % 10 + '0';
conv[1] = xx % 10 + '0';
conv[2] = 0;
}
else {
conv[0] = xx % 10 + '0';
conv[1] = 0;
}
return conv;
}
// Convert int to rj string with 1234 format
char* itostr4(const int& xx) {
conv[0] = xx >= 1000 ? (xx / 1000) % 10 + '0' : ' ';
conv[1] = xx >= 100 ? (xx / 100) % 10 + '0' : ' ';
conv[2] = xx >= 10 ? (xx / 10) % 10 + '0' : ' ';
conv[3] = xx % 10 + '0';
conv[4] = 0;
return conv;
}
// Convert int to rj string with _123, -123, _-12, or __-1 format
char* itostr4sign(const int& x) {
int xx = abs(x);
int sign = 0;
if (xx >= 100) {
conv[1] = (xx / 100) % 10 + '0';
conv[2] = (xx / 10) % 10 + '0';
}
else if (xx >= 10) {
conv[0] = ' ';
sign = 1;
conv[2] = (xx / 10) % 10 + '0';
}
else {
conv[0] = ' ';
conv[1] = ' ';
sign = 2;
}
conv[sign] = x < 0 ? '-' : ' ';
conv[3] = xx % 10 + '0';
conv[4] = 0;
return conv;
}
char* ltostr7(const long& xx) {
if (xx >= 1000000)
conv[0]=(xx/1000000)%10+'0';
else
conv[0]=' ';
if (xx >= 100000)
conv[1]=(xx/100000)%10+'0';
else
conv[1]=' ';
if (xx >= 10000)
conv[2]=(xx/10000)%10+'0';
else
conv[2]=' ';
if (xx >= 1000)
conv[3]=(xx/1000)%10+'0';
else
conv[3]=' ';
if (xx >= 100)
conv[4]=(xx/100)%10+'0';
else
conv[4]=' ';
if (xx >= 10)
conv[5]=(xx/10)%10+'0';
else
conv[5]=' ';
conv[6]=(xx)%10+'0';
conv[7]=0;
return conv;
}
// convert float to string with +123 format
char* ftostr30(const float& x) {
int xx=x;
conv[0]=(xx>=0)?'+':'-';
xx=abs(xx);
conv[1]=(xx/100)%10+'0';
conv[2]=(xx/10)%10+'0';
conv[3]=(xx)%10+'0';
conv[4]=0;
return conv;
}
// Convert float to rj string with 12345 format
char* ftostr5(const float& x) {
long xx = abs(x);
conv[0] = xx >= 10000 ? (xx / 10000) % 10 + '0' : ' ';
conv[1] = xx >= 1000 ? (xx / 1000) % 10 + '0' : ' ';
conv[2] = xx >= 100 ? (xx / 100) % 10 + '0' : ' ';
conv[3] = xx >= 10 ? (xx / 10) % 10 + '0' : ' ';
conv[4] = xx % 10 + '0';
conv[5] = 0;
return conv;
}
// Convert float to string with +1234.5 format
char* ftostr51(const float& x) {
long xx = abs(x * 10);
conv[0] = (x >= 0) ? '+' : '-';
conv[1] = (xx / 10000) % 10 + '0';
conv[2] = (xx / 1000) % 10 + '0';
conv[3] = (xx / 100) % 10 + '0';
conv[4] = (xx / 10) % 10 + '0';
conv[5] = '.';
conv[6] = xx % 10 + '0';
conv[7] = 0;
return conv;
}
// Convert float to string with +123.45 format
char* ftostr52(const float& x) {
conv[0] = (x >= 0) ? '+' : '-';
long xx = abs(x * 100);
conv[1] = (xx / 10000) % 10 + '0';
conv[2] = (xx / 1000) % 10 + '0';
conv[3] = (xx / 100) % 10 + '0';
conv[4] = '.';
conv[5] = (xx / 10) % 10 + '0';
conv[6] = xx % 10 + '0';
conv[7] = 0;
return conv;
}
#if !MECH(DELTA) && DISABLED(Z_SAFE_HOMING) && Z_HOME_DIR < 0
static void lcd_level_bed() {
switch(pageShowInfo) {
case 0:
{
LCD_Printpos(0, 0); lcd_printPGM(PSTR(MSG_MBL_INTRO));
LCD_Printpos(0, 1); lcd_printPGM(PSTR(MSG_MBL_BUTTON));
}
break;
case 1:
{
LCD_Printpos(0, 0); lcd_printPGM(PSTR(MSG_MBL_1));
LCD_Printpos(0, 1); lcd_printPGM(PSTR(MSG_MBL_BUTTON));
}
break;
case 2:
{
LCD_Printpos(0, 0); lcd_printPGM(PSTR(MSG_MBL_2));
LCD_Printpos(0, 1); lcd_printPGM(PSTR(MSG_MBL_BUTTON));
}
break;
case 3:
{
LCD_Printpos(0, 0); lcd_printPGM(PSTR(MSG_MBL_3));
LCD_Printpos(0, 1); lcd_printPGM(PSTR(MSG_MBL_BUTTON));
}
break;
case 4:
{
LCD_Printpos(0, 0); lcd_printPGM(PSTR(MSG_MBL_4));
LCD_Printpos(0, 1); lcd_printPGM(PSTR(MSG_MBL_BUTTON));
}
break;
case 5:
{
LCD_Printpos(0, 0); lcd_printPGM(PSTR(MSG_MBL_5));
LCD_Printpos(0, 1); lcd_printPGM(PSTR(MSG_MBL_BUTTON));
}
break;
case 6:
{
LCD_Printpos(0, 0); lcd_printPGM(PSTR(MSG_MBL_6));
LCD_Printpos(0, 1); lcd_printPGM(PSTR(" "));
HAL::delayMilliseconds(5000);
enqueue_and_echo_commands_P(PSTR("G28"));
lcd_goto_menu(lcd_prepare_menu);
}
break;
}
}
static void config_lcd_level_bed() {
ECHO_EM(MSG_MBL_SETTING);
enqueue_and_echo_commands_P(PSTR("G28 M"));
pageShowInfo = 0;
lcd_goto_menu(lcd_level_bed);
}
#endif
#endif //ULTRA_LCD
#if ENABLED(SDSUPPORT) && ENABLED(SD_SETTINGS)
void set_sd_dot() {
#if ENABLED(DOGLCD)
u8g.firstPage();
do {
u8g.setColorIndex(1);
u8g.drawPixel(0, 0); // draw sd dot
u8g.setColorIndex(1); // black on white
(*currentMenu)();
} while( u8g.nextPage() );
#endif
}
void unset_sd_dot() {
#if ENABLED(DOGLCD)
u8g.firstPage();
do {
u8g.setColorIndex(0);
u8g.drawPixel(0, 0); // draw sd dot
u8g.setColorIndex(1); // black on white
(*currentMenu)();
} while( u8g.nextPage() );
#endif
}
#endif
|
<reponame>gfrntz/anycable-go
package mruby
import "unsafe"
// #cgo CFLAGS: -Ivendor/mruby/include
// #cgo darwin LDFLAGS: ${SRCDIR}/libmruby_darwin.a -lm
// #cgo linux,386 LDFLAGS: ${SRCDIR}/libmruby_linux386.a -lm
// #cgo linux,amd64 LDFLAGS: ${SRCDIR}/libmruby_linux_amd64.a -lm
// #cgo linux,arm64 LDFLAGS: ${SRCDIR}/libmruby_linux_arm64.a -lm
// #include <stdlib.h>
// #include "gomruby.h"
import "C"
// Mrb represents a single instance of mruby.
type Mrb struct {
state *C.mrb_state
}
// ArenaIndex represents the index into the arena portion of the GC.
//
// See ArenaSave for more information.
type ArenaIndex int
// NewMrb creates a new instance of Mrb, representing the state of a single
// Ruby VM.
//
// When you're finished with the VM, clean up all resources it is using
// by calling the Close method.
func NewMrb() *Mrb {
state := C.mrb_open()
return &Mrb{
state: state,
}
}
// ArenaRestore restores the arena index so the objects between the save and this point
// can be garbage collected in the future.
//
// See ArenaSave for more documentation.
func (m *Mrb) ArenaRestore(idx ArenaIndex) {
C.mrb_gc_arena_restore(m.state, C.int(idx))
}
// ArenaSave saves the index into the arena.
//
// Restore the arena index later by calling ArenaRestore.
//
// The arena is where objects returned by functions such as LoadString
// are stored. By saving the index and then later restoring it with
// ArenaRestore, these objects can be garbage collected. Otherwise, the
// objects will never be garbage collected.
//
// The recommended usage pattern for memory management is to save
// the arena index prior to any Ruby execution, to turn the resulting
// Ruby value into Go values as you see fit, then to restore the arena
// index so that GC can collect any values.
//
// Of course, when Close() is called, all objects in the arena are
// garbage collected anyways, so if you're only calling mruby for a short
// period of time, you might not have to worry about saving/restoring the
// arena.
func (m *Mrb) ArenaSave() ArenaIndex {
return ArenaIndex(C.mrb_gc_arena_save(m.state))
}
// EnableGC enables the garbage collector for this mruby instance. It returns
// true if garbage collection was previously disabled.
func (m *Mrb) EnableGC() {
C._go_enable_gc(m.state)
}
// DisableGC disables the garbage collector for this mruby instance. It returns
// true if it was previously disabled.
func (m *Mrb) DisableGC() {
C._go_disable_gc(m.state)
}
// LiveObjectCount returns the number of objects that have not been collected (aka, alive).
func (m *Mrb) LiveObjectCount() int {
return int(C._go_gc_live(m.state))
}
// Class returns the class with the kgiven name and superclass. Note that
// if you call this with a class that doesn't exist, mruby will abort the
// application (like a panic, but not a Go panic).
//
// super can be nil, in which case the Object class will be used.
func (m *Mrb) Class(name string, super *Class) *Class {
cs := C.CString(name)
defer C.free(unsafe.Pointer(cs))
var class *C.struct_RClass
if super == nil {
class = C.mrb_class_get(m.state, cs)
} else {
class = C.mrb_class_get_under(m.state, super.class, cs)
}
return newClass(m, class)
}
// Module returns the named module as a *Class. If the module is invalid,
// NameError is triggered within your program and SIGABRT is sent to the
// application.
func (m *Mrb) Module(name string) *Class {
cs := C.CString(name)
defer C.free(unsafe.Pointer(cs))
class := C.mrb_module_get(m.state, cs)
return newClass(m, class)
}
// Close a Mrb, this must be called to properly free resources, and
// should only be called once.
func (m *Mrb) Close() {
// Delete all the methods from the state
stateMethodTable.Mutex.Lock()
delete(stateMethodTable.Map, m.state)
stateMethodTable.Mutex.Unlock()
// Close the state
C.mrb_close(m.state)
}
// ConstDefined checks if the given constant is defined in the scope.
//
// This should be used, for example, before a call to Class, because a
// failure in Class will crash your program (by design). You can retrieve
// the Value of a Class by calling Value().
func (m *Mrb) ConstDefined(name string, scope Value) bool {
cs := C.CString(name)
defer C.free(unsafe.Pointer(cs))
scopeV := scope.MrbValue(m).value
b := C.mrb_const_defined(
m.state, scopeV, C.mrb_intern_cstr(m.state, cs))
return C.ushort(b) != 0
}
// FullGC executes a complete GC cycle on the VM.
func (m *Mrb) FullGC() {
C.mrb_full_gc(m.state)
}
// GetArgs returns all the arguments that were given to the currnetly
// called function (currently on the stack).
func (m *Mrb) GetArgs() []*MrbValue {
getArgLock.Lock()
defer getArgLock.Unlock()
// Clear reset the accumulator to zero length
getArgAccumulator = make([]C.mrb_value, 0, C._go_get_max_funcall_args())
// Get all the arguments and put it into our accumulator
count := C._go_mrb_get_args_all(m.state)
// Convert those all to values
values := make([]*MrbValue, count)
for i := 0; i < int(count); i++ {
values[i] = newValue(m.state, getArgAccumulator[i])
}
return values
}
// IncrementalGC runs an incremental GC step. It is much less expensive
// than a FullGC, but must be called multiple times for GC to actually
// happen.
//
// This function is best called periodically when executing Ruby in
// the VM many times (thousands of times).
func (m *Mrb) IncrementalGC() {
C.mrb_incremental_gc(m.state)
}
// LoadString loads the given code, executes it, and returns its final
// value that it might return.
func (m *Mrb) LoadString(code string) (*MrbValue, error) {
cs := C.CString(code)
defer C.free(unsafe.Pointer(cs))
value := C._go_mrb_load_string(m.state, cs)
if exc := checkException(m.state); exc != nil {
return nil, exc
}
return newValue(m.state, value), nil
}
// Run executes the given value, which should be a proc type.
//
// If you're looking to execute code directly a string, look at LoadString.
//
// If self is nil, it is set to the top-level self.
func (m *Mrb) Run(v Value, self Value) (*MrbValue, error) {
if self == nil {
self = m.TopSelf()
}
mrbV := v.MrbValue(m)
mrbSelf := self.MrbValue(m)
proc := C._go_mrb_proc_ptr(mrbV.value)
value := C.mrb_run(m.state, proc, mrbSelf.value)
if exc := checkException(m.state); exc != nil {
return nil, exc
}
return newValue(m.state, value), nil
}
// RunWithContext is a context-aware parser (aka, it does not discard state
// between runs). It returns a magic integer that describes the stack in place,
// so that it can be re-used on the next call. This is how local variables can
// traverse ruby parse invocations.
//
// Otherwise, it is very similar in function to Run()
func (m *Mrb) RunWithContext(v Value, self Value, stackKeep int) (int, *MrbValue, error) {
if self == nil {
self = m.TopSelf()
}
mrbV := v.MrbValue(m)
mrbSelf := self.MrbValue(m)
proc := C._go_mrb_proc_ptr(mrbV.value)
i := C.int(stackKeep)
value := C._go_mrb_context_run(m.state, proc, mrbSelf.value, &i)
if exc := checkException(m.state); exc != nil {
return stackKeep, nil, exc
}
return int(i), newValue(m.state, value), nil
}
// Yield yields to a block with the given arguments.
//
// This should be called within the context of a Func.
func (m *Mrb) Yield(block Value, args ...Value) (*MrbValue, error) {
mrbBlock := block.MrbValue(m)
var argv []C.mrb_value
var argvPtr *C.mrb_value
if len(args) > 0 {
// Make the raw byte slice to hold our arguments we'll pass to C
argv = make([]C.mrb_value, len(args))
for i, arg := range args {
argv[i] = arg.MrbValue(m).value
}
argvPtr = &argv[0]
}
result := C._go_mrb_yield_argv(
m.state,
mrbBlock.value,
C.mrb_int(len(argv)),
argvPtr)
if exc := checkException(m.state); exc != nil {
return nil, exc
}
return newValue(m.state, result), nil
}
//-------------------------------------------------------------------
// Functions handling defining new classes/modules in the VM
//-------------------------------------------------------------------
// DefineClass defines a new top-level class.
//
// If super is nil, the class will be defined under Object.
func (m *Mrb) DefineClass(name string, super *Class) *Class {
if super == nil {
super = m.ObjectClass()
}
cs := C.CString(name)
defer C.free(unsafe.Pointer(cs))
return newClass(
m, C.mrb_define_class(m.state, cs, super.class))
}
// DefineClassUnder defines a new class under another class.
//
// This is, for example, how you would define the World class in
// `Hello::World` where Hello is the "outer" class.
func (m *Mrb) DefineClassUnder(name string, super *Class, outer *Class) *Class {
if super == nil {
super = m.ObjectClass()
}
if outer == nil {
outer = m.ObjectClass()
}
cs := C.CString(name)
defer C.free(unsafe.Pointer(cs))
return newClass(m, C.mrb_define_class_under(
m.state, outer.class, cs, super.class))
}
// DefineModule defines a top-level module.
func (m *Mrb) DefineModule(name string) *Class {
cs := C.CString(name)
defer C.free(unsafe.Pointer(cs))
return newClass(m, C.mrb_define_module(m.state, cs))
}
// DefineModuleUnder defines a module under another class/module.
func (m *Mrb) DefineModuleUnder(name string, outer *Class) *Class {
if outer == nil {
outer = m.ObjectClass()
}
cs := C.CString(name)
defer C.free(unsafe.Pointer(cs))
return newClass(m,
C.mrb_define_module_under(m.state, outer.class, cs))
}
//-------------------------------------------------------------------
// Functions below return Values or constant Classes
//-------------------------------------------------------------------
// ObjectClass returns the Object top-level class.
func (m *Mrb) ObjectClass() *Class {
return newClass(m, m.state.object_class)
}
// KernelModule returns the Kernel top-level module.
func (m *Mrb) KernelModule() *Class {
return newClass(m, m.state.kernel_module)
}
// TopSelf returns the top-level `self` value.
func (m *Mrb) TopSelf() *MrbValue {
return newValue(m.state, C.mrb_obj_value(unsafe.Pointer(m.state.top_self)))
}
// FalseValue returns a Value for "false"
func (m *Mrb) FalseValue() *MrbValue {
return newValue(m.state, C.mrb_false_value())
}
// NilValue returns "nil"
func (m *Mrb) NilValue() *MrbValue {
return newValue(m.state, C.mrb_nil_value())
}
// TrueValue returns a Value for "true"
func (m *Mrb) TrueValue() *MrbValue {
return newValue(m.state, C.mrb_true_value())
}
// FixnumValue returns a Value for a fixed number.
func (m *Mrb) FixnumValue(v int) *MrbValue {
return newValue(m.state, C.mrb_fixnum_value(C.mrb_int(v)))
}
// StringValue returns a Value for a string.
func (m *Mrb) StringValue(s string) *MrbValue {
cs := C.CString(s)
defer C.free(unsafe.Pointer(cs))
return newValue(m.state, C.mrb_str_new_cstr(m.state, cs))
}
func checkException(state *C.mrb_state) error {
if state.exc == nil {
return nil
}
err := newExceptionValue(state)
state.exc = nil
return err
}
|
<filename>common-utils/common-api/src/main/java/com/atjl/common/api/req/PageReqV1.java
package com.atjl.common.api.req;
import com.atjl.common.constant.CommonConstant;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* 分页请求 类型2
*
* @author jasondliu
*/
@ApiModel(value = "分页基础请求对象类型2")
public class PageReqV1 extends PageBaseReq {
@ApiModelProperty(value = "起始对象主键", required = true, example = "查询需分页时传递")
private String startItemId;
@ApiModelProperty(value = "页大小", example = "查询需分页时传递,默认10")
private Long pageSize = CommonConstant.DFT_PAGE_SIZE;
public PageReqV1() {
super();
}
public PageReqV1(Long pageSize) {
this.startItemId = null;
this.pageSize = pageSize;
}
public PageReqV1(String startItemId, Long pageSize) {
this.startItemId = startItemId;
this.pageSize = pageSize;
}
public String getStartItemId() {
return startItemId;
}
public void setStartItemId(String startItemId) {
this.startItemId = startItemId;
}
public Long getPageSize() {
return pageSize == null || pageSize <= 0 ? CommonConstant.DFT_PAGE_SIZE : pageSize;
}
public void setPageSize(Long pageSize) {
this.pageSize = pageSize;
}
}
|
#!/bin/bash
TABLE_NAMES_DATA="Assay \
AssayPV \
AssayPVOntology \
AssaySample \
Experiment \
ExperimentAsset \
Sample \
SamplePV \
SamplePVOntology"
TABLE_NAMES_SCHEMA="ArrayDesign \
AnnotationSrc \
AnnSrc_BioEntityType \
BioEntity \
BioEntitybepv \
BioEntityproperty \
BioEntitypropertyvalue \
BioEntitytype \
BIOMART_ANNSRC \
DesignElement \
DesignEltBioentity \
EXTERNAL_ARRAYDESIGN \
EXTERNAL_BEPROPERTY \
Gene \
GeneGPV \
GeneProperty \
GenePropertyValue \
Ontology \
OntologyTerm \
Organism \
Property \
PropertyValue \
SchemaVersion \
Software"
create_schema() {
ATLAS_CONNECTION=$1
# scripts which must be executed first, in given order
CORE_SCRIPTS="Types.sql Tables.sql Views.sql list_to_table.sql list_to_table_str.sql PKG_ATLASMGR.sql \
PKG_ATLASLDR.sql CUR_AssayProperty.sql CUR_MergePropertyValue.sql CUR_PropertyValue.sql CUR_SampleProperty.sql \
CUR_AllPropertyID.sql CUR_TwoValues.sql CUR_TwoFactors.sql CUR_MergeFactors.sql \
TR_CUR_AssayProperty.sql TR_CUR_PropertyValue.sql TR_CUR_SampleProperty.sql CUR_OntologyMapping.sql \
TR_CUR_OntologyMapping.sql"
SCHEMA_FOLDER=Schema
for SCRIPT_NAME in $CORE_SCRIPTS
do
if [ ! -r Schema/$SCRIPT_NAME ]; then
echo "required script not found in Schema folder:" $SCRIPT_NAME; exit -1
fi
echo "executing " $SCRIPT_NAME
sqlplus -L -S $ATLAS_CONNECTION @Schema/$SCRIPT_NAME
if [ "$?" -ne "0" ]; then
echo "can not execute script" $SCRIPT_NAME ; exit -1
fi
done
}
load_data() {
ATLAS_CONNECTION=$1
DATA_FOLDER=$2
CTL_FOLDER=$3
INSTALL_MODE=$4
ATLAS_INDEX_TABLESPACE=$5
echo "call ATLASMGR.DisableConstraints();" | sqlplus -L -S $ATLAS_CONNECTION
TABLE_NAMES_SET="${TABLE_NAMES_SCHEMA} ${TABLE_NAMES_DATA}"
if [ "$INSTALL_MODE" == "Schema" ]; then
TABLE_NAMES_SET=$TABLE_NAMES_SCHEMA
fi
TARGET_ATLAS_USER=`echo ${ATLAS_INDEX_TABLESPACE} | awk -F'_' '{print $1}'`
echo "Creating table synonyms for user: ${TARGET_ATLAS_USER}_ro"
for TABLE in $TABLE_NAMES_SET
do
echo "create or replace synonym ${TARGET_ATLAS_USER}_ro.A2_${TABLE} for ${TARGET_ATLAS_USER}.A2_${TABLE};" | sqlplus -L -S $ATLAS_CONNECTION
echo "create or replace synonym ${TARGET_ATLAS_USER}_ro.A2_${TABLE} for ${TARGET_ATLAS_USER}.A2_${TABLE};"
done
echo "create or replace synonym ${TARGET_ATLAS_USER}_ro.A2_CONFIG_PROPERTY for ${TARGET_ATLAS_USER}.A2_CONFIG_PROPERTY;" | sqlplus -L -S $ATLAS_CONNECTION
echo "create or replace synonym ${TARGET_ATLAS_USER}_ro.A2_CONFIG_PROPERTY for ${TARGET_ATLAS_USER}.A2_CONFIG_PROPERTY;"
# Alas, these two CUR_ views are still used in the public Atlas web service (e.g. for generating experiment_properties.txt file)
echo "create or replace synonym ${TARGET_ATLAS_USER}_ro.CUR_OntologyMapping for ${TARGET_ATLAS_USER}.CUR_OntologyMapping;" | sqlplus -L -S $ATLAS_CONNECTION
echo "create or replace synonym ${TARGET_ATLAS_USER}_ro.CUR_OntologyMapping for ${TARGET_ATLAS_USER}.CUR_OntologyMapping;"
echo "create or replace synonym ${TARGET_ATLAS_USER}_ro.CUR_AssayProperty for ${TARGET_ATLAS_USER}.CUR_AssayProperty;" | sqlplus -L -S $ATLAS_CONNECTION
echo "create or replace synonym ${TARGET_ATLAS_USER}_ro.CUR_AssayProperty for ${TARGET_ATLAS_USER}.CUR_AssayProperty;"
echo "Done creating table synonyms for user: ${TARGET_ATLAS_USER}_ro"
echo "Granting permissions to ${TARGET_ATLAS_USER}_select_role"
for TABLE in $TABLE_NAMES_SET
do
echo "grant select on A2_${TABLE} to ${TARGET_ATLAS_USER}_select_role;" | sqlplus -L -S $ATLAS_CONNECTION
echo "grant select on A2_${TABLE} to ${TARGET_ATLAS_USER}_select_role"
done
echo "grant select on A2_CONFIG_PROPERTY to ${TARGET_ATLAS_USER}_select_role;" | sqlplus -L -S $ATLAS_CONNECTION
echo "grant select on A2_CONFIG_PROPERTY to ${TARGET_ATLAS_USER}_select_role"
# Alas, these two CUR_ views are still used in the public Atlas web service (e.g. for generating experiment_properties.txt file)
echo "grant select on CUR_OntologyMapping to ${TARGET_ATLAS_USER}_select_role;" | sqlplus -L -S $ATLAS_CONNECTION
echo "grant select on CUR_OntologyMapping to ${TARGET_ATLAS_USER}_select_role"
echo "grant select on CUR_AssayProperty to ${TARGET_ATLAS_USER}_select_role;" | sqlplus -L -S $ATLAS_CONNECTION
echo "grant select on CUR_AssayProperty to ${TARGET_ATLAS_USER}_select_role"
echo "Done granting select permissions to ${TARGET_ATLAS_USER}_select_role"
for LDR_CTL in $TABLE_NAMES_SET
do
echo "... $LDR_CTL"
sqlldr $ATLAS_CONNECTION control=$CTL_FOLDER/$LDR_CTL.ctl data=$DATA_FOLDER/$LDR_CTL.dat
LDR_RESULT="$?"
if [ "$LDR_RESULT" -ne "0" ]; then
echo "can not execute sqlldr:" $LDR_CTL $LDR_RESULT ;
fi
cat $LDR_CTL.log >> install.log
rm $LDR_CTL.log
done
echo "Creating indexes and constraints..."
SCRIPT_NAME=Indexes.sql
if [ ! -z "${ATLAS_INDEX_TABLESPACE}" ]; then
sed "s/\/\*PK_TABLESPACE\*\//USING INDEX TABLESPACE ${ATLAS_INDEX_TABLESPACE}/" Schema/Indexes.sql | \
sed "s/\/\*INDEX_TABLESPACE\*\//TABLESPACE ${ATLAS_INDEX_TABLESPACE}/" > Schema/IndexesTablespace.sql
SCRIPT_NAME=IndexesTablespace.sql
fi
sqlplus -L -S $ATLAS_CONNECTION @Schema/${SCRIPT_NAME}
if [ "$?" -ne "0" ]; then
echo "can not execute script" Indexes.sql ; exit -1
fi
echo "Enabling constraints and rebuilding sequences..."
echo "call ATLASMGR.EnableConstraints();" | sqlplus -L -S $ATLAS_CONNECTION
echo "call ATLASMGR.RebuildSequences();" | sqlplus -L -S $ATLAS_CONNECTION
}
|
<reponame>smarulanda97/nextjs-spa5sentidos-v2
type Menu = {
id: string;
name: string;
items: MenuItem[];
__typename?: string;
machine_name: string;
};
type MenuItem = {
id: string;
link: string;
title: string;
icon?: StrapiImage;
__typename?: string;
};
type StrapiImage = {
url: string;
name: string;
width: number;
height: number;
__typename?: string;
alternativeText: string;
provider_metadata?: string;
};
type ResponsiveImages = {
mobile: StrapiImage;
desktop: StrapiImage;
};
type Service = {
id: string;
title: string;
summary: string;
slug: string;
price: number;
discount: number;
home_service_included: boolean;
images: {
thumbnail: StrapiImage;
};
};
type Url = {
origin: string;
};
type Svg = {
width: string;
height: string;
};
type MetaTags = {
pathname: string;
basic_tags: {
[name: string]: string;
};
open_graph?: {
[name: string]: string;
};
twitter?: {
[name: string]: string;
};
};
export enum ButtonColors {
primary = 'primary',
secondary = 'secondary',
}
export enum Locale {
Colombia = 'es-CO',
Global = 'en-US',
}
export enum Currency {
Colombia = 'COP',
Global = 'USD',
}
export type {
Menu,
MenuItem,
StrapiImage,
ResponsiveImages,
Service,
Url,
Svg,
MetaTags,
};
|
def merge_sort(arr):
if len(arr) > 1:
mid = len(arr) // 2
left = arr[:mid]
right = arr[mid:]
merge_sort(left)
merge_sort(right)
i = j = k = 0
while i < len(left) and j < len(right):
if left[i] < right[j]:
arr[k] = left[i]
i += 1
else:
arr[k] = right[j]
j += 1
k += 1
while i < len(left):
arr[k] = left[i]
i += 1
k += 1
while j < len(right):
arr[k] = right[j]
j += 1
k += 1
arr = [8, 4, 14, 1, 9]
merge_sort(arr)
print(arr) # prints [1, 4, 8, 9, 14] |
package org.museautomation.ui.editors.suite;
import javafx.application.*;
import javafx.scene.*;
import javafx.scene.input.*;
import net.christophermerrill.testfx.*;
import org.junit.jupiter.api.*;
import org.museautomation.builtins.step.*;
import org.museautomation.core.*;
import org.museautomation.core.project.*;
import org.museautomation.core.step.*;
import org.museautomation.core.steptask.*;
import org.museautomation.core.suite.*;
import org.museautomation.ui.extend.components.*;
import java.io.*;
import java.util.*;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public class IdListTestSuiteEditorTests extends ComponentTest
{
@Test
void displayList() throws IOException
{
int num_tests = 3;
IdListTaskSuite suite = setupTests(num_tests);
Platform.runLater(() -> _editor.editResource(_project, suite));
waitForUiEvents();
for (int i = 0; i < num_tests; i++)
Assertions.assertTrue(exists(createTestId(i))); // all tests displayed
}
/*
@Test
public void okDisabledUntilSomethingSelected() throws IOException
{
IdListTestSuite suite = setupTests(3);
Platform.runLater(() -> _editor.editResource(_project, suite));
waitForUiEvents();
final String new_test_id = "new-test";
createTest(new_test_id);
clickOn(id(IdListTestSuiteEditor.ADD_BUTTON_ID));
Button ok_button = lookup(id(PopupDialog.OK_BUTTON_ID)).query();
Assert.assertTrue(ok_button.disabledProperty().getValue());
moveTo(new_test_id).moveBy(-100, 0).clickOn(); // check something
Assert.assertFalse(ok_button.disabledProperty().getValue()); // ok button enabled
moveTo(new_test_id).moveBy(-100, 0).clickOn(); // un-check it
Assert.assertTrue(ok_button.disabledProperty().getValue()); // ok button disabled
// cleanup
moveTo(new_test_id).moveBy(-100, 0).clickOn(); // re-check it
clickOn(id(PopupDialog.OK_BUTTON_ID)); // dismiss the pop-up - else, the popup interferes with next test
waitForUiEvents();
}
*/
@Test
void addTestToSuite() throws IOException
{
PopupDialog.makeFast();
IdListTaskSuite suite = setupTests(3);
Platform.runLater(() -> _editor.editResource(_project, suite));
waitForUiEvents();
final String new_test_id = "new-test";
createTest(new_test_id);
clickOn(id(IdListTaskSuiteEditor.ADD_BUTTON_ID));
// TODO: how to verify existing tests are not shown in suggestion list? Can't just check for exists(), because they are in the other list
moveTo(new_test_id).moveBy(-100, 0).clickOn();
clickOn(id(PopupDialog.OK_BUTTON_ID));
waitForUiEvents();
Assertions.assertTrue(exists(new_test_id)); // displayed in list
Assertions.assertTrue(suite.getTaskIds().contains(new_test_id)); // added to suite
// undo
_editor.getUndoStack().undoLastAction();
waitForUiEvents();
Assertions.assertFalse(exists(new_test_id)); // not displayed in list
Assertions.assertFalse(suite.getTaskIds().contains(new_test_id)); // not in suite
}
@Test
void removeTestsFromSuite() throws IOException
{
IdListTaskSuite suite = setupTests(5);
Platform.runLater(() -> _editor.editResource(_project, suite));
waitForUiEvents();
String removed1 = createTestId(1);
String removed2 = createTestId(3);
clickOn(removed1);
press(KeyCode.CONTROL).clickOn(removed2).release(KeyCode.CONTROL);
push(KeyCode.DELETE);
waitForUiEvents();
Assertions.assertFalse(exists(removed1)); // not displayed in list
Assertions.assertFalse(exists(removed2)); // not displayed in list
Assertions.assertFalse(suite.getTaskIds().contains(removed1)); // removed from suite
Assertions.assertFalse(suite.getTaskIds().contains(removed2)); // removed from suite
// undo
_editor.getUndoStack().undoLastAction();
waitForUiEvents();
Assertions.assertTrue(exists(removed1)); // returned to list
Assertions.assertTrue(exists(removed2)); // returned to list
Assertions.assertTrue(suite.getTaskIds().contains(removed1)); // exists in suite
Assertions.assertTrue(suite.getTaskIds().contains(removed2)); // exists in suite
}
@Test
void listOfAddableTests() throws IOException
{
IdListTaskSuite suite = setupTests(2);
final String new_test_1 = "new-test1";
createTest(new_test_1);
final String new_test_2 = "new-test2";
createTest(new_test_2);
List<String> unused_test_ids = new UnusedTests(_project, suite).getUnusedTestIds();
Assertions.assertTrue(unused_test_ids.contains(new_test_1)); // new tests should be in the list
Assertions.assertTrue(unused_test_ids.contains(new_test_2));
Assertions.assertFalse(unused_test_ids.contains(createTestId(0))); // previously existsing tets shoudl not
Assertions.assertFalse(unused_test_ids.contains(createTestId(1)));
}
private IdListTaskSuite setupTests(int num_tests) throws IOException
{
List<String> ids = new ArrayList<>();
for (int i = 0; i < num_tests; i++)
{
String id = createTestId(i);
ids.add(id);
createTest(id);
}
IdListTaskSuite suite = new IdListTaskSuite();
suite.setTestIds(ids);
return suite;
}
private void createTest(String id) throws IOException
{
SteppedTask test = new SteppedTask(new StepConfiguration(LogMessage.TYPE_ID));
test.setId(id);
_project.getResourceStorage().addResource(test);
}
private String createTestId(int test_num)
{
return "test-" + test_num;
}
@Override
public Node createComponentNode()
{
_project = new SimpleProject();
_editor = new IdListTaskSuiteEditor();
return _editor.getNode();
}
private MuseProject _project;
private IdListTaskSuiteEditor _editor;
} |
#!/bin/bash
#SBATCH --nodes=1
#SBATCH --N=1
#SBATCH --gres=gpu:8
#SBATCH --exclusive
#SBATCH --mem=0
##SBATCH -p debug
#SBATCH --time=06:00:00
##SBATCH --time=06:00:00
srun --gres=gpu:1 -C cuda-mode-exclusive -t 360 -N 1 -n 1 python finbert-bilstm-1.py 4094 sec7 Z_score_c 5 1 2 6e-4 &
srun --gres=gpu:1 -C cuda-mode-exclusive -t 360 -N 1 -n 1 python finbert-bilstm-1.py 4094 sec7 Z_score_c 5 1 2 7e-4 &
srun --gres=gpu:1 -C cuda-mode-exclusive -t 360 -N 1 -n 1 python finbert-bilstm-1.py 4094 sec7 Z_score_c 5 1 2 8e-4 &
srun --gres=gpu:1 -C cuda-mode-exclusive -t 360 -N 1 -n 1 python finbert-bilstm-1.py 4094 sec7 Z_score_c 5 1 2 9e-4 &
srun --gres=gpu:1 -C cuda-mode-exclusive -t 360 -N 1 -n 1 python finbert-bilstm-1.py 4094 sec7 Z_score_c 5 1 2 10e-4 &
srun --gres=gpu:1 -C cuda-mode-exclusive -t 360 -N 1 -n 1 python finbert-bilstm-1.py 4094 sec7 Z_score_c 5 1 2 11e-4 &
srun --gres=gpu:1 -C cuda-mode-exclusive -t 360 -N 1 -n 1 python finbert-bilstm-1.py 4094 sec7 Z_score_c 5 1 2 12e-4 &
srun --gres=gpu:1 -C cuda-mode-exclusive -t 360 -N 1 -n 1 python finbert-bilstm-1.py 4094 sec7 Z_score_c 5 1 2 13e-4 &
wait
|
<reponame>shawntoffel/atto
#ifndef _ATTO_H_
#define _ATTO_H_
typedef struct atto_server {
const char *port;
const int file_descriptor;
} atto_server_t;
atto_server_t atto_init_server(char *port);
int atto_handle_next_connection(atto_server_t *server, char *response);
int atto_close_server(atto_server_t *server);
#endif |
public static int[] getDigits(int num) {
int[] digits = new int[Integer.toString(num).length()];
int i = 0;
while (num > 0) {
digits[i] = num % 10;
num /= 10;
i++;
}
return digits;
}
int[] digits = getDigits(1975);
System.out.println(Arrays.toString(digits)); // outputs "[5, 9, 7, 1]" |
#include "xr_dsa.h"
#include "crypto.h"
#include <openssl/dsa.h>
namespace crypto
{
xr_dsa::xr_dsa(u8 const p[public_key_length],
u8 const q[private_key_length],
u8 const g[public_key_length])
{
m_dsa = DSA_new();
m_dsa->p = BN_new();
m_dsa->q = BN_new();
m_dsa->g = BN_new();
m_dsa->priv_key = BN_new();
m_dsa->pub_key = BN_new();
BN_bin2bn(p, public_key_length, m_dsa->p);
BN_bin2bn(q, private_key_length, m_dsa->q);
BN_bin2bn(g, public_key_length, m_dsa->g);
}
xr_dsa::~xr_dsa()
{
DSA_free (m_dsa);
}
shared_str const xr_dsa::sign (private_key_t const & priv_key,
u8 const* data,
u32 const data_size)
{
BN_bin2bn(priv_key.m_value, sizeof(priv_key.m_value), m_dsa->priv_key);
unsigned int sign_size = DSA_size(m_dsa);
u8* sign_dest = static_cast<u8*>(
_alloca(sign_size));
BIGNUM tmp_sign_res_bn;
BN_init (&tmp_sign_res_bn);
DSA_sign (0, data, data_size, sign_dest, &sign_size, m_dsa);
BN_bin2bn (sign_dest, sign_size, &tmp_sign_res_bn);
return shared_str(BN_bn2hex(&tmp_sign_res_bn));
}
bool xr_dsa::verify (public_key_t const & pub_key,
u8 const * data,
u32 const data_size,
shared_str const & dsign)
{
BN_bin2bn(pub_key.m_value, sizeof(pub_key.m_value), m_dsa->pub_key);
BIGNUM* tmp_bn = NULL;
BN_hex2bn (&tmp_bn, dsign.c_str());
int sig_size = tmp_bn->top * sizeof(unsigned long);
u8* sig_buff = static_cast<u8*>(_alloca(sig_size));
VERIFY (sig_size == DSA_size(m_dsa));
BN_bn2bin (tmp_bn, sig_buff);
bool ret = DSA_verify (0, data, data_size, sig_buff, sig_size, m_dsa) == 1 ? true : false;
BN_free(tmp_bn);
return ret;
}
#ifdef DEBUG
static void dsa_genparams_cb(int p, int n, void *arg)
{
Msg("* dsa genparams cb(%d, %d)", p, n);
}
static unsigned char rnd_seed[] = "S.T.A.L.K.E.R. 4ever Rulezz !!!";
void print_big_number(BIGNUM* big_num, u32 max_columns = 8)
{
u8 bin_buff[xr_dsa::public_key_length];//public_key_length is the max
int bin_size = 0;
string4096 result_buffer;
string16 tmp_buff;
ZeroMemory (bin_buff, sizeof(bin_buff));
BN_bn2bin (big_num, bin_buff);
bin_size = big_num->top * sizeof(unsigned long);
result_buffer[0] = 0;
strcat_s(result_buffer, "\t");
for (int i = 0; i < bin_size; ++i)
{
if (((i % max_columns) == 0) && (i > 0))
{
strcat_s(result_buffer, "\n\t");
}
sprintf(tmp_buff, "0x%02x, ", bin_buff[i]);
strcat_s(result_buffer, tmp_buff);
}
Msg(result_buffer);
};
void xr_dsa::generate_params()
{
int counter;
unsigned long long_ret;
string256 random_string;
sprintf_s (random_string, "%I64d_%s", CPU::QPC(), rnd_seed);
//sprintf_s (random_string, "%s", rnd_seed);
unsigned char* rnd_seed = static_cast<unsigned char*>((void*)random_string);
unsigned int rnd_ssize = xr_strlen(random_string);
DSA* tmp_dsa_params = DSA_generate_parameters(
key_bit_length,
rnd_seed,
rnd_ssize,
&counter,
&long_ret,
dsa_genparams_cb,
NULL
);
DSA_generate_key (tmp_dsa_params);
VERIFY (tmp_dsa_params->p->top * sizeof(u32) == public_key_length);
VERIFY (tmp_dsa_params->q->top * sizeof(u32) == private_key_length);
VERIFY (tmp_dsa_params->g->top * sizeof(u32) == public_key_length);
VERIFY (tmp_dsa_params->pub_key->top * sizeof(u32) == public_key_length);
VERIFY (tmp_dsa_params->priv_key->top * sizeof(u32)== private_key_length);
Msg("// DSA params ");
Msg("u8 const p_number[crypto::xr_dsa::public_key_length] = {");
print_big_number (tmp_dsa_params->p);
Msg("};//p_number");
Msg("u8 const q_number[crypto::xr_dsa::private_key_length] = {");
print_big_number (tmp_dsa_params->q);
Msg("};//q_number");
Msg("u8 const g_number[crypto::xr_dsa::public_key_length] = {");
print_big_number (tmp_dsa_params->g);
Msg("};//g_number");
Msg("u8 const public_key[crypto::xr_dsa::public_key_length] = {");
print_big_number (tmp_dsa_params->pub_key);
Msg("};//public_key");
u8 priv_bin[private_key_length];
BN_bn2bin (tmp_dsa_params->priv_key, priv_bin);
Msg("// Private key:");
for (int i = 0; i < private_key_length; ++i)
{
Msg(" m_private_key.m_value[%d] = 0x%02x;", i, priv_bin[i]);
}
u8 debug_digest[] = "this is a test";
u8 debug_bad_digest[] = "this as a test";
u32 siglen = DSA_size(tmp_dsa_params);
u8* sig = static_cast<u8*>(_alloca(siglen));
BIGNUM bn_sign;
BN_init (&bn_sign);
VERIFY (DSA_sign(0, debug_digest, sizeof(debug_digest), sig, &siglen, tmp_dsa_params) == 1);
BN_bin2bn (sig, siglen, &bn_sign);
shared_str sig_str = BN_bn2hex(&bn_sign);
BIGNUM* bn_rsing = NULL;
ZeroMemory (sig, siglen);
BN_hex2bn (&bn_rsing, sig_str.c_str());
BN_bn2bin (bn_rsing, sig);
BN_free (bn_rsing);
VERIFY (DSA_verify(0, debug_digest, sizeof(debug_digest), sig, siglen, tmp_dsa_params) == 1);
VERIFY (DSA_verify(0, debug_bad_digest, sizeof(debug_bad_digest), sig, siglen, tmp_dsa_params) == 0);
DSA_free(tmp_dsa_params);
}
#endif //#ifdef DEBUG
} //namespace crypto |
<reponame>netluxe/goss
package resource
import "github.com/aelsabbahy/goss/system"
type Package struct {
Name string `json:"-"`
Installed bool `json:"installed"`
Versions []string `json:"versions,omitempty"`
}
func (p *Package) ID() string { return p.Name }
func (p *Package) SetID(id string) { p.Name = id }
func (p *Package) Validate(sys *system.System) []TestResult {
sysPkg := sys.NewPackage(p.Name, sys)
var results []TestResult
results = append(results, ValidateValue(p, "installed", p.Installed, sysPkg.Installed))
if len(p.Versions) > 0 {
results = append(results, ValidateValues(p, "version", p.Versions, sysPkg.Versions))
}
return results
}
func NewPackage(sysPackage system.Package, ignoreList []string) *Package {
name := sysPackage.Name()
installed, _ := sysPackage.Installed()
p := &Package{
Name: name,
Installed: installed.(bool),
}
if !contains(ignoreList, "versions") {
versions, _ := sysPackage.Versions()
p.Versions = versions
}
return p
}
|
<filename>scripts/generate-upload-merkle.js
#!/usr/bin/env node
require("dotenv").config();
const program = require("commander");
const axios = require("axios");
const loadJsonFile = require("load-json-file");
const MerkleTree = require("../scripts/merkle-tree");
async function main() {
program
.description("merkle-drop")
.option("-f, --file [value]", "json formatted file");
program.parse(process.argv);
const options = program.opts();
const userSet = await loadJsonFile(options.file);
const tree = MerkleTree.build(userSet);
const res = await upload(tree.userSet);
//report
console.log("Merkle Root: ", '0x'+tree.getRoot().toString("hex"));
console.log("IPFS :", res);
}
async function upload(memoryData) {
try {
const res = await axios.post("https://api.pinata.cloud/pinning/pinJSONToIPFS", memoryData, {
headers: {
pinata_api_key: process.env.PINATA_API_KEY,
pinata_secret_api_key: process.env.PINATA_SECRET_KEY,
},
});
return res.data;
} catch(error) {
console.error(error);
throw error;
}
}
main(); |
<gh_stars>1-10
StartTest(function(t) {
// Running in the 'top' page scope. Get the local variables from the test.
var Ext = t.Ext();
var window = t.global;
var document = window.document;
t.chain(
{ type : "CharlieJohnson", target : '>> #loginPanel textfield[fieldLabel=Login]' },
{ type : "secret", target : '>> #loginPanel textfield[fieldLabel=Password]' },
// NOTE, that this code won't work (or will work unreliably, as it contains race condition):
// { click : '>> #loginPanel button' },
// { waitFor : 'PageLoad'}
// It is because in Chrome page refresh may happen too fast (may be even synchronously),
// and by the time the "waitForPageLoad" action will start, the page load event will already happen.
// Because of that `waitForPageLoad` will wait indefinitely.
// Need to start waiting first, and only then - click, we'll use "trigger" config of the `wait` action for that
{
waitFor : 'PageLoad',
trigger : {
click : '>> #loginPanel button'
}
},
// The complex "waitFor" action above can be written with the function step as well:
// function (next) {
// t.waitForPageLoad(next)
//
// t.click('>> #loginPanel button', function () {})
// },
function (next, window, Ext) {
var panel = Ext.getCmp('authResult')
t.is(panel.authResult, 'success', 'Correct authentication result');
t.done();
}
)
}) |
#!/usr/bin/env bash
# Exit immediately if a pipeline, which may consist of a single simple command,
# a list, or a compound command returns a non-zero status
set -e
readonly MONIKER=yed
readonly VERSION=3.21.1
readonly STUFF=yEd-$VERSION.zip
readonly TARGET_DIR=$HOME/programs/$MONIKER
readonly START_SCRIPT=$TARGET_DIR/start-$MONIKER.sh
readonly MIME_TYPE=application/graphml
create_start_script() {
echo java -jar $TARGET_DIR/yed.jar '"$@"' > $START_SCRIPT
chmod +x $START_SCRIPT
}
create_desktop_entry() { # https://specifications.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html
echo "[Desktop Entry]
Type=Application
Categories=Office;
Name=yEd
Comment=
Icon=$TARGET_DIR/icons/yed48.png
Exec=$START_SCRIPT %u
Terminal=false" > $HOME/.local/share/applications/$MONIKER.desktop
}
install_mime_type() {
echo '<?xml version="1.0"?>
<mime-info xmlns="http://www.freedesktop.org/standards/shared-mime-info">
<mime-type type="'$MIME_TYPE'">
<comment>GraphML</comment>
<glob pattern="*.graphml"/>
<sub-class-of type="application/xml"/>
<icon name="'$MONIKER'"/>
</mime-type>
</mime-info>' > $MONIKER-mime.xml
xdg-mime install $MONIKER-mime.xml
}
install_mime_icon() {
xdg-icon-resource install --context mimetypes --size 48 $TARGET_DIR/icons/yed48.png $MONIKER
}
register_mime_handler() {
xdg-mime default $MONIKER.desktop $MIME_TYPE
}
if [ -d "$TARGET_DIR" ]; then
echo Directory exists: $TARGET_DIR >&2
exit 1
fi
mkdir --parents $TARGET_DIR
readonly TEMP_DIR=$(mktemp --directory -t delete-me-XXXXXXXXXX)
(
cd $TEMP_DIR
echo -n Downloading...
wget --quiet https://yworks.com/resources/yed/demo/$STUFF
echo done
echo -n Extracting...
unzip -qq $STUFF
echo done
echo -n Installing...
mv --force yed-$VERSION/* $TARGET_DIR
create_start_script
create_desktop_entry
install_mime_type
install_mime_icon
register_mime_handler
echo done
)
rm --recursive --force $TEMP_DIR
|
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/cloud/data_labeling/v1beta1/data_labeling_service"
class ::Google::Cloud::DataLabeling::V1beta1::DataLabelingService::ClientPathsTest < Minitest::Test
def test_annotated_dataset_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::DataLabeling::V1beta1::DataLabelingService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.annotated_dataset_path project: "value0", dataset: "value1", annotated_dataset: "value2"
assert_equal "projects/value0/datasets/value1/annotatedDatasets/value2", path
end
end
def test_annotation_spec_set_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::DataLabeling::V1beta1::DataLabelingService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.annotation_spec_set_path project: "value0", annotation_spec_set: "value1"
assert_equal "projects/value0/annotationSpecSets/value1", path
end
end
def test_data_item_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::DataLabeling::V1beta1::DataLabelingService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.data_item_path project: "value0", dataset: "value1", data_item: "value2"
assert_equal "projects/value0/datasets/value1/dataItems/value2", path
end
end
def test_dataset_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::DataLabeling::V1beta1::DataLabelingService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.dataset_path project: "value0", dataset: "value1"
assert_equal "projects/value0/datasets/value1", path
end
end
def test_evaluation_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::DataLabeling::V1beta1::DataLabelingService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.evaluation_path project: "value0", dataset: "value1", evaluation: "value2"
assert_equal "projects/value0/datasets/value1/evaluations/value2", path
end
end
def test_evaluation_job_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::DataLabeling::V1beta1::DataLabelingService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.evaluation_job_path project: "value0", evaluation_job: "value1"
assert_equal "projects/value0/evaluationJobs/value1", path
end
end
def test_example_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::DataLabeling::V1beta1::DataLabelingService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.example_path project: "value0", dataset: "value1", annotated_dataset: "value2", example: "value3"
assert_equal "projects/value0/datasets/value1/annotatedDatasets/value2/examples/value3", path
end
end
def test_instruction_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::DataLabeling::V1beta1::DataLabelingService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.instruction_path project: "value0", instruction: "value1"
assert_equal "projects/value0/instructions/value1", path
end
end
def test_project_path
grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
::Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::DataLabeling::V1beta1::DataLabelingService::Client.new do |config|
config.credentials = grpc_channel
end
path = client.project_path project: "value0"
assert_equal "projects/value0", path
end
end
end
|
#!/bin/bash
set -e
java -jar ${JAVA_MEMORY_OPTIONS} ${JAVA_OPTIONS} /opt/minecraft/minecraft_server.jar
exit 0
|
<reponame>FAU-SWARM/website<filename>src/app/components/data/intelligent/intelligent.component.ts
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-intelligent',
templateUrl: './intelligent.component.html',
styleUrls: ['./intelligent.component.scss']
})
export class IntelligentComponent implements OnInit {
constructor() { }
ngOnInit() {
}
}
|
<reponame>alex-kar/andhow
package org.yarnandtail.andhow.property;
import org.yarnandtail.andhow.api.*;
import org.yarnandtail.andhow.valid.BigDecValidator;
import org.yarnandtail.andhow.valuetype.BigDecType;
import java.math.BigDecimal;
import java.util.List;
/**
* A Property that refers to a BigDecimal value.
*
* By default this uses the TrimToNullTrimmer, which removes all whitespace from
* the value and ultimately null if the value is all whitespace. The String
* constructor version is used when creating instances of BigDecimal.
*
* @author chace86
*/
public class BigDecProp extends PropertyBase<BigDecimal> {
/**
* Construct an instance of BigDecProp
* @param defaultValue default value
* @param required make the property required or not
* @param shortDesc short description of the property
* @param validators list of validators for the property
* @param aliases aliases of the property
* @param paramType property type
* @param valueType property value type
* @param trimmer trimmer associated with the property
* @param helpText help text of the property
*/
public BigDecProp(BigDecimal defaultValue, boolean required, String shortDesc, List<Validator<BigDecimal>> validators,
List<Name> aliases, PropertyType paramType, ValueType<BigDecimal> valueType, Trimmer trimmer,
String helpText) {
super(defaultValue, required, shortDesc, validators, aliases, paramType, valueType, trimmer, helpText);
}
/**
* Return an instance of BigDecBuilder
*/
public static BigDecBuilder builder() {
return new BigDecBuilder();
}
/**
* Build a BigDecProp
*/
public static class BigDecBuilder extends PropertyBuilderBase<BigDecBuilder, BigDecProp, BigDecimal> {
/**
* Construct an instance of BigDecBuilder
*/
public BigDecBuilder() {
instance = this;
valueType(BigDecType.instance());
trimmer(TrimToNullTrimmer.instance());
}
@Override
public BigDecProp build() {
return new BigDecProp(_defaultValue, _nonNull, _desc, _validators,
_aliases, PropertyType.SINGLE_NAME_VALUE, _valueType, _trimmer, _helpText);
}
/**
* The property must be greater than the reference
* @param reference value the property must be greater than
* @return the builder instance
*/
public BigDecBuilder mustBeGreaterThan(BigDecimal reference) {
validation(new BigDecValidator.GreaterThan(reference));
return instance;
}
/**
* The property must be greater than or equal to the reference
* @param reference value the property must be greater than or equal to
* @return the builder instance
*/
public BigDecBuilder mustBeGreaterThanOrEqualTo(BigDecimal reference) {
validation(new BigDecValidator.GreaterThanOrEqualTo(reference));
return instance;
}
/**
* The property must be less than the reference
* @param reference value the property must be less than
* @return the builder instance
*/
public BigDecBuilder mustBeLessThan(BigDecimal reference) {
validation(new BigDecValidator.LessThan(reference));
return instance;
}
/**
* The property must be less than or equal to the reference
* @param reference value the property must be less than or equal to
* @return the builder instance
*/
public BigDecBuilder mustBeLessThanOrEqualTo(BigDecimal reference) {
validation(new BigDecValidator.LessThanOrEqualTo(reference));
return instance;
}
}
}
|
package fr.syncrase.ecosyst.service.criteria;
import java.io.Serializable;
import java.util.Objects;
import tech.jhipster.service.Criteria;
import tech.jhipster.service.filter.BooleanFilter;
import tech.jhipster.service.filter.DoubleFilter;
import tech.jhipster.service.filter.Filter;
import tech.jhipster.service.filter.FloatFilter;
import tech.jhipster.service.filter.IntegerFilter;
import tech.jhipster.service.filter.LongFilter;
import tech.jhipster.service.filter.StringFilter;
/**
* Criteria class for the {@link fr.syncrase.ecosyst.domain.NomVernaculaire} entity. This class is used
* in {@link fr.syncrase.ecosyst.web.rest.NomVernaculaireResource} to receive all the possible filtering options from
* the Http GET request parameters.
* For example the following could be a valid request:
* {@code /nom-vernaculaires?id.greaterThan=5&attr1.contains=something&attr2.specified=false}
* As Spring is unable to properly convert the types, unless specific {@link Filter} class are used, we need to use
* fix type specific filters.
*/
public class NomVernaculaireCriteria implements Serializable, Criteria {
private static final long serialVersionUID = 1L;
private LongFilter id;
private StringFilter nom;
private StringFilter description;
private LongFilter plantesId;
private Boolean distinct;
public NomVernaculaireCriteria() {}
public NomVernaculaireCriteria(NomVernaculaireCriteria other) {
this.id = other.id == null ? null : other.id.copy();
this.nom = other.nom == null ? null : other.nom.copy();
this.description = other.description == null ? null : other.description.copy();
this.plantesId = other.plantesId == null ? null : other.plantesId.copy();
this.distinct = other.distinct;
}
@Override
public NomVernaculaireCriteria copy() {
return new NomVernaculaireCriteria(this);
}
public LongFilter getId() {
return id;
}
public LongFilter id() {
if (id == null) {
id = new LongFilter();
}
return id;
}
public void setId(LongFilter id) {
this.id = id;
}
public StringFilter getNom() {
return nom;
}
public StringFilter nom() {
if (nom == null) {
nom = new StringFilter();
}
return nom;
}
public void setNom(StringFilter nom) {
this.nom = nom;
}
public StringFilter getDescription() {
return description;
}
public StringFilter description() {
if (description == null) {
description = new StringFilter();
}
return description;
}
public void setDescription(StringFilter description) {
this.description = description;
}
public LongFilter getPlantesId() {
return plantesId;
}
public LongFilter plantesId() {
if (plantesId == null) {
plantesId = new LongFilter();
}
return plantesId;
}
public void setPlantesId(LongFilter plantesId) {
this.plantesId = plantesId;
}
public Boolean getDistinct() {
return distinct;
}
public void setDistinct(Boolean distinct) {
this.distinct = distinct;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final NomVernaculaireCriteria that = (NomVernaculaireCriteria) o;
return (
Objects.equals(id, that.id) &&
Objects.equals(nom, that.nom) &&
Objects.equals(description, that.description) &&
Objects.equals(plantesId, that.plantesId) &&
Objects.equals(distinct, that.distinct)
);
}
@Override
public int hashCode() {
return Objects.hash(id, nom, description, plantesId, distinct);
}
// prettier-ignore
@Override
public String toString() {
return "NomVernaculaireCriteria{" +
(id != null ? "id=" + id + ", " : "") +
(nom != null ? "nom=" + nom + ", " : "") +
(description != null ? "description=" + description + ", " : "") +
(plantesId != null ? "plantesId=" + plantesId + ", " : "") +
(distinct != null ? "distinct=" + distinct + ", " : "") +
"}";
}
}
|
package httpinfo_test
import (
"fmt"
"net/http"
"net/http/httptest"
"github.com/krostar/httpinfo"
)
func myMiddleware(next http.Handler) http.HandlerFunc {
return func(rw http.ResponseWriter, r *http.Request) {
next.ServeHTTP(rw, r)
if httpinfo.IsUsed(r) {
fmt.Printf("status = %d\n", httpinfo.Status(r))
fmt.Printf("bytes wrote = %d\n", httpinfo.ContentLength(r))
}
}
}
func myHandler(rw http.ResponseWriter, _ *http.Request) {
rw.Write([]byte("Hello world")) // nolint: errcheck, gosec
rw.WriteHeader(http.StatusAlreadyReported)
}
func Example() {
var srv = httptest.NewServer(
httpinfo.Record()(
myMiddleware(http.HandlerFunc(myHandler)),
),
)
defer srv.Close()
resp, err := http.DefaultClient.Get(srv.URL)
if err != nil {
panic(err)
}
defer resp.Body.Close() // nolint: errcheck, gosec
// Output:
// status = 208
// bytes wrote = 11
}
|
export SCALA_VERSION="2.13"
export KAFKA_VERSION="2.7.0"
export KAFKA_HOME=/opt/kafka_$SCALA_VERSION-$KAFKA_VERSION
# Kafka Environment Configuration ########
cd $KAFKA_HOME/config
# set zookeeper.connect ===========
FIND="^zookeeper.connect=.*$"
REPLACE="zookeeper.connect=${KAFKA_ZOOKEEPER_CONNECT}"
sed -i "s/${FIND}/${REPLACE}/" server.properties
# set broker.id ===================
FIND="^broker.id=[[:digit:]]*$"
REPLACE="broker.id=${KAFKA_BROKER_ID}"
sed -i "s/${FIND}/${REPLACE}/" server.properties
# set advertised.listeners ========
# ensures / -> \/ (forward slashes escape the sed find and replace)
KAFKA_ADVERTISED_LISTENERS=$(echo $KAFKA_ADVERTISED_LISTENERS | sed "s/\//\\\\\//g")
FIND="^#\{0,1\}advertised.listeners=.*$"
REPLACE="advertised.listeners=${KAFKA_ADVERTISED_LISTENERS}"
sed -i "s/${FIND}/${REPLACE}/" server.properties
# Start Kafka ############################
$KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties
|
def primeFactorization(m):
factors = []
# Divide by 2 until m is odd
while m % 2 == 0:
factors.append(2)
m = m // 2
# Now m is odd, start checking from 3
for i in range(3, int(m**0.5) + 1, 2):
while m % i == 0:
factors.append(i)
m = m // i
# If m is a prime number greater than 2
if m > 2:
factors.append(m)
return factors |
package com.hookedroid.chromecastdemo.provider;
import android.content.Context;
import com.google.android.gms.cast.framework.CastOptions;
import com.google.android.gms.cast.framework.OptionsProvider;
import com.google.android.gms.cast.framework.SessionProvider;
import java.util.List;
public class CastOptionsProvider implements OptionsProvider {
@Override
public CastOptions getCastOptions(Context appContext) {
return new CastOptions.Builder()
.setReceiverApplicationId("72116364")
.build();
}
@Override
public List<SessionProvider> getAdditionalSessionProviders(Context context) {
return null;
}
} |
#!/bin/bash
shopt -s extglob
home=$(pwd)
# If we're not under a "src" directory we're (probably) on the CI server.
# export GOPATH and cd to the right location
if [[ $home != *"src"* ]]; then
export GOPATH=${home}
dir=$(git config --get remote.origin.url)
dir=${dir#http://} # remove leading http://
dir=${dir#https://} # remove leading https://
dir=${dir%.git} # remove trailing .git
dir="src/${dir}" # add src/ prefix
cd ${dir}
fi
DIRS=$(go list ./... | grep -v '\/vendor\/')
printf "\nGo dirs:\n${DIRS}\n\n"
if [[ -z $DIRS ]]; then
echo "No Go dirs found."
exit 255
fi
for dir in $DIRS; do
cd $GOPATH/src/${dir}
echo "Running tests for ${dir}..."
if [ -f cover.out ]; then
rm cover.out
fi
echo "go test -v -timeout 3m -race -vet off -cpu 1"
go test -v -timeout 3m -race -vet off -cpu 1
if [ $? -ne 0 ]; then
exit 255
fi
echo "go test -v -timeout 3m -race -vet off -cpu 4"
go test -v -timeout 3m -race -vet off -cpu 4
if [ $? -ne 0 ]; then
exit 255
fi
echo "go test -v -timeout 3m -coverprofile cover.out"
go test -v -timeout 3m -coverprofile cover.out
if [ $? -ne 0 ]; then
exit 255
fi
printf "\n"
done
echo "Success."
|
<gh_stars>0
package epizza.order;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.domain.EntityScan;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Primary;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.data.web.config.EnableSpringDataWebSupport;
import org.springframework.hateoas.config.EnableHypermediaSupport;
import org.springframework.hateoas.config.EnableHypermediaSupport.HypermediaType;
import org.springframework.validation.Validator;
import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
import org.springframework.web.filter.CorsFilter;
@EntityScan
@EnableJpaRepositories
@EnableSpringDataWebSupport
@SpringBootApplication
@EnableHypermediaSupport(type = HypermediaType.HAL)
public class OrderApplication {
public static void main(String[] args) {
SpringApplication.run(OrderApplication.class, args);
}
@Bean
@Primary
public Validator validator() {
return new LocalValidatorFactoryBean();
}
@Bean
public FilterRegistrationBean corsFilter() {
// http://stackoverflow.com/questions/31724994/spring-data-rest-and-cors
CorsConfiguration config = new CorsConfiguration();
config.setAllowCredentials(true);
config.addAllowedOrigin("*");
config.addAllowedHeader("*");
config.addAllowedMethod("OPTIONS");
config.addAllowedMethod("HEAD");
config.addAllowedMethod("GET");
config.addAllowedMethod("PUT");
config.addAllowedMethod("POST");
config.addAllowedMethod("DELETE");
config.addAllowedMethod("PATCH");
config.addExposedHeader("Location");
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
source.registerCorsConfiguration("/**", config);
FilterRegistrationBean corsFilter = new FilterRegistrationBean(new CorsFilter(source));
corsFilter.setOrder(0);
return corsFilter;
}
}
|
require 'thor'
require 'pathname'
require 'yaml'
# Praxis application generator
#
# Generates all files required to run a simple praxis app.
#
class PraxisAppGenerator < Thor
include Thor::Actions
attr_reader :app_name
namespace 'praxis'
desc "generate 'app-name'", "Generates a new PRAXIS application"
# Generates a new praxis app in the current directory
#
# @param [String] name
#
# @return [void]
#
# @example
# # Using thor task
# > bundle exec thor generate my_test_app
#
# @example
# # Using 'praxis' file saved into '/usr/bin'
# > praxis generate my_test_app
#
def generate(app_name)
# Fix weird symbols in the app name (if they are)
@app_name = app_name.downcase.gsub(/[^a-z0-9_\/.]/, '')
# Generate a new app
empty_directory path('app')
empty_directory path('design')
empty_directory path('lib')
empty_directory path('spec')
generate_config_environment_rb
generate_gemfile
generate_rakefile
generate_config_ru
generate_app_definitions_hello_world
generate_app_controllers_hello_world
#
puts
puts "To run the example application:"
puts
puts " # terminal 1:"
puts " cd #{app_name}"
puts " bundle"
puts " rackup -p 8888"
puts
puts " # terminal 2:"
puts " curl -i http://localhost:8888/api/hello -H 'X-Api-Version: 1.0' -X GET # Index"
puts " curl -i http://localhost:8888/api/hello/2 -H 'X-Api-Version: 1.0' -X GET # Show"
puts " curl -i http://localhost:8888/api/hello/2 -H 'X-Api-Version: 2.0' -X GET # NotFound Error"
nil
end
private
# Returns relative path for the new application
#
# @return [String]
#
# @example
# # > /praxis generate My-test_praxisApp
# app_dir_pathname #=> 'mytest_praxisapp'
#
#
def app_dir_pathname
@app_dir_pathname ||= Pathname.new(app_name)
end
# Returns path string built from the set of the given strings
#
# @param [String,Array] strings
#
# @return [String]
#
# @example
# path('a', 'b', 'c') #=> 'my_test_app/a/b/c'
#
def path(*strings)
app_dir_pathname.join(*strings).to_s
end
# Creates './config/environment.rb' file
#
# @return [void]
#
def generate_config_environment_rb
create_file path('config/environment.rb') do
<<-RUBY
# Main entry point - DO NOT MODIFY THIS FILE
ENV['RACK_ENV'] ||= 'development'
Bundler.require(:default, ENV['RACK_ENV'])
# Default application layout.
# NOTE: This layout need NOT be specified explicitly.
# It is provided just for illustration.
Praxis::Application.instance.layout do
map :initializers, 'config/initializers/**/*'
map :lib, 'lib/**/*'
map :design, 'design/' do
map :api, 'api.rb'
map :media_types, '**/media_types/**/*'
map :resources, '**/resources/**/*'
end
map :app, 'app/' do
map :models, 'models/**/*'
map :controllers, '**/controllers/**/*'
map :responses, '**/responses/**/*'
end
end
RUBY
end
nil
end
# Creates './Gemfile' file
#
# @return [void]
#
def generate_gemfile
create_file path('Gemfile') do
<<-RUBY
source 'https://rubygems.org'
gem 'praxis'
gem 'rack', '~> 1.0'
gem 'rake'
group :development, :test do
gem 'rspec'
end
RUBY
end
nil
end
# Creates './Rakefile' file
#
# @return [void]
#
def generate_rakefile
create_file path('Rakefile') do
<<-RUBY
require 'praxis'
require 'praxis/tasks'
RUBY
end
nil
end
# Creates './config.ru' file
#
# @return [void]
#
def generate_config_ru
create_file path('config.ru') do
<<-RUBY
#\ -p 8888
require 'bundler/setup'
require 'praxis'
application = Praxis::Application.instance
application.logger = Logger.new(STDOUT)
application.setup
run application
RUBY
end
nil
end
def generate_app_definitions_hello_world
create_file path('design/api.rb') do
<<-RUBY
# Use this file to define your response templates and traits.
#
# For example, to define a response template:
# response_template :custom do |media_type:|
# status 200
# media_type media_type
# end
#
# And to define a trait:
# trait :versionable do
# headers do
# header :X_Api_Version, String, values: ['1.0'], required: true
# end
# end
Praxis::ApiDefinition.define do
end
RUBY
end
create_file path('design/resources/hello.rb') do
<<-RUBY
module V1
module ApiResources
class Hello
include Praxis::ResourceDefinition
media_type V1::MediaTypes::Hello
version '1.0'
routing do
prefix '/api/hello'
end
action :index do
use :versionable
routing do
get ''
end
response :ok
end
action :show do
use :versionable
routing do
get '/:id'
end
params do
attribute :id, Integer, required: true, min: 0
end
response :ok
end
end
end
end
RUBY
end
create_file path('design/media_types/hello.rb') do
<<-RUBY
module V1
module MediaTypes
class Hello < Praxis::MediaType
identifier 'application/json'
attributes do
attribute :string, String
end
view :default do
attribute :string
end
end
end
end
RUBY
end
end
def generate_app_controllers_hello_world
create_file path('app/controllers/hello.rb') do
<<-RUBY
module V1
class Hello
include Praxis::Controller
implements V1::ApiResources::Hello
HELLO_WORLD = [ 'Hello world!', 'Привет мир!', 'Hola mundo!', '你好世界!', 'こんにちは世界!' ]
def index(**params)
response.headers['Content-Type'] = 'application/json'
response.body = HELLO_WORLD.to_json
response
end
def show(id:, **other_params)
hello = HELLO_WORLD[id]
if hello
response.body = { id: id, data: hello }
else
response.status = 404
response.body = { error: '404: Not found' }
end
response.headers['Content-Type'] = 'application/json'
response
end
end
end
RUBY
end
end
end
|
// Connect to the database
let db = connectToDB();
// define a handler for orders
let handleOrder = (order) => {
// parse user order
let items = parseOrder(order);
// store user order in the database
storeItems(items, db);
};
// Call the handler with a user-defined order
handleOrder('2x Apple, 1x Banana'); |
DROP TABLE IF EXISTS student;
CREATE TABLE student (
student_id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT,
login VARCHAR(80) NOT NULL,
pass VARCHAR(80) NOT NULL
) ENGINE=INNODB;
INSERT INTO student(login, pass) VALUES('test','test');
|
<reponame>KnisterPeter/smaller-node-builder
package de.matrixweb.smaller.maven.plugin.node;
/**
* @author markusw
*/
public interface Logger {
/**
* @param message
*/
void info(String message);
/**
* @param message
*/
void debug(String message);
}
|
<filename>src/icons/legacy/PinterestSquare.tsx
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import PinterestSquareSvg from '@rsuite/icon-font/lib/legacy/PinterestSquare';
const PinterestSquare = createSvgIcon({
as: PinterestSquareSvg,
ariaLabel: 'pinterest square',
category: 'legacy',
displayName: 'PinterestSquare'
});
export default PinterestSquare;
|
<gh_stars>10-100
import {getInitials} from "constants/name";
describe("initials", () => {
test("name without spaces", () => {
expect(getInitials("andiKandi")).toEqual("an");
});
test("name with one space", () => {
expect(getInitials("<NAME>")).toEqual("aK");
});
test("name with multiple spaces", () => {
expect(getInitials("<NAME>")).toEqual("aK");
});
test("name with special characters", () => {
expect(getInitials("<NAME>")).toEqual("JB");
});
});
|
result=$( w )
echo ${result:0:80} |
#pragma once
#include <afxwin.h>
#include <afxdialogex.h>
#include <functional>
class CSimulationDialog : public CDialogEx
{
public:
CSimulationDialog(UINT nIDTemplate, CWnd* pParent);
protected:
virtual afx_msg LRESULT OnInvoke(WPARAM wParam, LPARAM lParam);
DECLARE_MESSAGE_MAP()
public:
CWinThread * m_pWorkerThread;
volatile BOOL m_bWorking;
virtual void Invoke(const std::function < void () > & fn);
virtual void StartSimulationThread();
virtual void StopSimulationThread();
virtual void SuspendSimulationThread();
virtual void ResumeSimulationThread();
virtual BOOL DestroyWindow();
virtual void OnSimulation();
};
|
<reponame>cugg/BusinessParameters<filename>parameters-backend/parameters-backend-inmemory/src/main/java/be/kwakeroni/evelyn/storage/StorageProvider.java
package be.kwakeroni.evelyn.storage;
public interface StorageProvider {
public Storage create(String name) throws StorageExistsException;
public Storage read(String name);
public boolean exists(String name);
}
|
<filename>lib/systems/triphenylene.py
import pulsar as psr
def load_ref_system():
""" Returns triphenylene as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C 1.23839 0.71468 -0.00000
C 1.23839 -0.71468 -0.00000
C -0.00026 1.42982 -0.00000
C 2.49296 1.39093 0.00000
C -0.00026 -1.42982 -0.00000
C -1.23813 0.71514 -0.00001
C 2.49296 -1.39092 0.00000
C -0.04190 2.85443 -0.00000
C 3.69209 0.69026 0.00000
C 3.69209 -0.69025 0.00000
C -1.23812 -0.71513 -0.00001
C -0.04190 -2.85443 -0.00000
C -2.45105 1.46351 -0.00001
C -1.24826 3.54257 -0.00001
C -2.45105 -1.46350 -0.00001
C -2.44382 2.85232 -0.00001
C -1.24826 -3.54257 -0.00001
C -2.44382 -2.85231 -0.00001
H 4.63206 1.22780 0.00000
H 4.63206 -1.22779 0.00000
H -1.25273 4.62538 -0.00001
H -3.37933 3.39759 -0.00001
H -1.25273 -4.62538 -0.00001
H -3.37933 -3.39758 -0.00001
H 2.58415 2.45972 0.00000
H 2.58415 -2.45972 0.00000
H 0.83811 3.46781 -0.00000
H 0.83811 -3.46780 -0.00000
H -3.42226 1.00808 -0.00001
H -3.42226 -1.00808 -0.00001
""")
|
<filename>vi/.vim/bundle/pencil/app/pencil-core/propertyType/color.js
function Color() {
this.r = 0;
this.g = 0;
this.b = 0;
this.a = 1.0;
}
Color.REG_EX = /^#([0-9A-F]{2,2})([0-9A-F]{2,2})([0-9A-F]{2,2})([0-9A-F]{2,2})$/i;
Color.REG_EX_NO_ALPHA = /^#([0-9A-F]{2,2})([0-9A-F]{2,2})([0-9A-F]{2,2})$/i;
Color.REG_EX_RGB = /^rgb\(([0-9]+)\,[ ]*([0-9]+)\,[ ]*([0-9]+)\)$/i;
Color.REG_EX_RGBA = /^rgba\(([0-9]+)\,[ ]*([0-9]+)\,[ ]*([0-9]+)\,[ ]*([0-9\.]+)\)$/i;
Color.hexdig = '0123456789ABCDEF';
Color.fromString = function (literal) {
var color = new Color();
if (!literal) literal = "#ffffffff";
if (literal.match(Color.REG_EX)) {
color.r = parseInt(RegExp.$1, 16);
color.g = parseInt(RegExp.$2, 16);
color.b = parseInt(RegExp.$3, 16);
color.a = parseInt(RegExp.$4, 16) / 255;
} else if (literal.match(Color.REG_EX_NO_ALPHA)) {
color.r = parseInt(RegExp.$1, 16);
color.g = parseInt(RegExp.$2, 16);
color.b = parseInt(RegExp.$3, 16);
color.a = 1;
} else if (literal.match(Color.REG_EX_RGBA)) {
//debug("found rgba()");
color.r = parseInt(RegExp.$1, 10);
color.g = parseInt(RegExp.$2, 10);
color.b = parseInt(RegExp.$3, 10);
color.a = parseFloat(RegExp.$4, 10);;
//debug("found rgba(): " + color);
} else if (literal.match(Color.REG_EX_RGB)) {
//debug("found rgb()");
color.r = parseInt(RegExp.$1, 10);
color.g = parseInt(RegExp.$2, 10);
color.b = parseInt(RegExp.$3, 10);
color.a = 1;
//debug("found rgb(): " + color);
} if (literal == "transparent") {
color.r = 0;
color.g = 0;
color.b = 0;
color.a = 0;
//debug("transparent");
}
return color;
};
Color.fromHSV = function (h, s, v) {
var rgb = Color.HSV2RGB({hue: h, saturation: s, value: v});
var color = new Color();
color.r = rgb.r;
color.g = rgb.g;
color.b = rgb.b;
color.a = 1;
return color;
};
Color.Dec2Hex = function(d) {
return Color.hexdig.charAt((d-(d%16))/16)+Color.hexdig.charAt(d%16);
}
Color.Hex2Dec = function(h) {
return parseInt(h, 16);
}
Color.RGB2Hex = function(r,g,b) {
return Color.Dec2Hex(r) + Color.Dec2Hex(g) + Color.Dec2Hex(b);
}
// RGB2HSV and HSV2RGB are based on Color Match Remix [http://color.twysted.net/]
// which is based on or copied from ColorMatch 5K [http://colormatch.dk/]
Color.HSV2RGB = function(hsv) {
var rgb = new Object();
if (hsv.saturation == 0) {
rgb.r = rgb.g = rgb.b = Math.round(hsv.value * 2.55);
} else {
hsv.hue /= 60;
hsv.saturation /= 100;
hsv.value /= 100;
var i = Math.floor(hsv.hue);
var f = hsv.hue - i;
var p = hsv.value * (1 - hsv.saturation);
var q = hsv.value * (1 - hsv.saturation * f);
var t = hsv.value * (1 - hsv.saturation * (1 - f));
switch(i) {
case 0: rgb.r=hsv.value; rgb.g=t; rgb.b=p; break;
case 1: rgb.r=q; rgb.g=hsv.value; rgb.b=p; break;
case 2: rgb.r=p; rgb.g=hsv.value; rgb.b=t; break;
case 3: rgb.r=p; rgb.g=q; rgb.b=hsv.value; break;
case 4: rgb.r=t; rgb.g=p; rgb.b=hsv.value; break;
default: rgb.r=hsv.value; rgb.g=p; rgb.b=q;
}
rgb.r=Math.round(rgb.r*255);
rgb.g=Math.round(rgb.g*255);
rgb.b=Math.round(rgb.b*255);
}
return rgb;
}
Color.min3 = function(a,b,c) { return (a<b)?((a<c)?a:c):((b<c)?b:c); }
Color.max3 = function(a,b,c) { return (a>b)?((a>c)?a:c):((b>c)?b:c); }
Color.RGB2HSV = function(rgb) {
var hsv = new Object();
var max=Color.max3(rgb.r,rgb.g,rgb.b);
var dif=max-Color.min3(rgb.r,rgb.g,rgb.b);
hsv.saturation=(max==0.0)?0:(100*dif/max);
if (hsv.saturation==0) hsv.hue=0;
else if (rgb.r==max) hsv.hue=60.0*(rgb.g-rgb.b)/dif;
else if (rgb.g==max) hsv.hue=120.0+60.0*(rgb.b-rgb.r)/dif;
else if (rgb.b==max) hsv.hue=240.0+60.0*(rgb.r-rgb.g)/dif;
if (hsv.hue<0.0) hsv.hue+=360.0;
hsv.value=Math.round(max*100/255);
hsv.hue=Math.round(hsv.hue);
hsv.saturation=Math.round(hsv.saturation);
return hsv;
}
Color.prototype.toString = function () {
return this.toRGBString() + Color.Dec2Hex(Math.min(255, Math.round(this.a * 255)));
};
Color.prototype.toRGBString = function () {
return "#" + Color.Dec2Hex(this.r) + Color.Dec2Hex(this.g) + Color.Dec2Hex(this.b);
};
Color.prototype.toRGBAString = function () {
return "rgba(" + this.r + ", " + this.g + ", " + this.b + ", " + (Math.round(this.a * 100) / 100) + ")";
};
Color.prototype.shaded = function (percent) {
var hsv = Color.RGB2HSV(this);
hsv.value = Math.max(Math.min(hsv.value * (1 - percent), 100), 0);
var rgb = Color.HSV2RGB(hsv);
var color = new Color();
color.r = rgb.r;
color.g = rgb.g;
color.b = rgb.b;
color.a = this.a;
return color;
};
Color.prototype.hollowed = function (percent) {
var color = new Color();
color.r = this.r;
color.g = this.g;
color.b = this.b;
color.a = Math.max(Math.min(this.a * (1 - percent), 1), 0);
return color;
};
Color.prototype.inverse = function () {
var color = new Color();
color.r = 255 - this.r;
color.g = 255 - this.g;
color.b = 255 - this.b;
color.a = this.a;
return color;
};
Color.prototype.getHSV = function () {
return Color.RGB2HSV(this); //h: 0..259, s: 0..100, v: 0..100
};
Color.prototype.transparent = function () {
var color = new Color();
color.r = this.r;
color.g = this.g;
color.b = this.b;
color.a = 0;
return color;
};
pencilSandbox.Color = {
newColor: function () {
return new Color();
}
};
for (var p in Color) {
pencilSandbox.Color[p] = Color[p];
};
|
CUDA_VISIBLE_DEVICES=0 python main.py --optim sgd --lr 0.1 --momentum 0.9 --decay_epoch 150 --model vgg && mv curve/vgg-sgd-lr0.1-momentum0.9-wdecay0.0005-run0-resetFalse curve/localtrain-basic/vgg-sgd-lr0.1-momentum0.9-wdecay0.0005-run0-resetFalse
|
"""
Common resource for testing annotation terms.
"""
# convention: preferred name, preferred id, followed by any other ids and alternative names
brainstem_terms = [ # Landmarks and groups
("brainstem", "UBERON:0002298", "ILX:0101444"),
("central canal of spinal cord", "UBERON:0002291", "ILX:0724457"),
("cerebral aqueduct", "UBERON:0002289", "ILX:0101977"),
("diencephalon", "UBERON:0001894", "ILX:0103217"),
("foramen caecum of medulla oblongata", "ILX:0746371"),
("medulla oblongata", "UBERON:0001896", "ILX:0106736"),
("midbrain", "UBERON:0001891", "ILX:0106935"),
("middle cerebellar peduncle", "UBERON:0002152", "ILX:0106956"),
("obex", "ILX:0107862"),
("pons", "UBERON:0000988", "ILX:0109019"),
# Geometric markers
("brainstem dorsal midline caudal point", "ILX:0778144"),
("brainstem ventral midline caudal point", "ILX:0778145"),
("brainstem dorsal midline cranial point", "ILX:0778146 "),
("brainstem ventral midline cranial point", "ILX:0778147"),
("brainstem dorsal midline pons-medulla junction", "ILX:0778148"),
("brainstem ventral midline pons-medulla junction", "ILX:0778149"),
("brainstem dorsal midline midbrain-pons junction", "ILX:0778150"),
("brainstem ventral midline midbrain-pons junction", "ILX:0778151"),
# Surface
("brainstem exterior", "ILX:0778157"),
("midbrain exterior", "ILX:0778158"),
("medulla oblongata exterior", "ILX:0778159"),
("pons exterior", "ILX:0778160"),
("brainstem-spinal cord interface", "ILX:0778162"),
("thalamus-brainstem interface", "ILX:0778163")
]
def get_brainstem_term(name : str):
"""
Find term by matching name to any identifier held for a term.
Raise exception if name not found.
:return ( preferred name, preferred id )
"""
for term in brainstem_terms:
if name in term:
return ( term[0], term[1] )
raise NameError("Brainstem annotation term '" + name + "' not found.")
|
div {
width: 300px;
height: 200px;
border: 5px solid #3498db;
} |
#!/bin/bash
curl ipinfo.io/ip
|
<filename>src/Boj6444.java
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.StringTokenizer;
public class Boj6444 {
private static final int LAST = 18_278;
private static final char EQUAL = '=';
private static final String PLUS = "+";
private static final String SPACE = " ";
private static final String NEW_LINE = "\n";
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringBuilder sb = new StringBuilder();
int T = Integer.parseInt(br.readLine());
while(T-- > 0) {
StringTokenizer st = new StringTokenizer(br.readLine());
int M = Integer.parseInt(st.nextToken());
int N = Integer.parseInt(st.nextToken());
HashMap<Integer, Integer> sheets = new HashMap<>();
for(int i = 0; i < N; i++) {
st = new StringTokenizer(br.readLine());
for(int j = 0; j < M; j++) {
String input = st.nextToken();
int val = 0;
int target = i * LAST + j;
if(input.charAt(0) == EQUAL) {
StringTokenizer tokens = new StringTokenizer(input.substring(1), PLUS);
while(tokens.hasMoreTokens()) {
int idx = getIndex(tokens.nextToken());
if(!sheets.containsKey(idx)) continue;
val += sheets.get(idx);
}
}
else {
val = Integer.parseInt(input);
}
if(val != 0) sheets.put(target, val);
sb.append((sheets.containsKey(target) ? sheets.get(target): 0) + SPACE);
}
sb.append(NEW_LINE);
}
}
System.out.println(sb.toString());
}
private static int getIndex(String spot) {
char[] c = spot.toCharArray();
int idx = -1;
StringBuilder build = new StringBuilder();
for(int i = 0; i < c.length; i++) {
if(c[i] >= '0' && c[i] <= '9' && idx == -1) {
idx = i;
}
if(idx != -1) build.append(c[i]);
}
int fix = LAST * (Integer.parseInt(build.toString()) - 1);
if(idx == 1) return fix + c[0] - 'A';
else if(idx == 2) return fix + (c[1] - 'A' + 1) * 26 + (c[0] - 'A');
else return fix + (c[2] - 'A' + 1) * 26 * 26 + (c[1] - 'A' + 1) * 26 + (c[0] - 'A');
}
}
|
#!/bin/bash
#rename in=<infile> out=<outfile>
function usage(){
echo "
Written by Brian Bushnell
Last modified July 31, 2015
Description: Reduces Silva entries down to one entry per taxa.
Usage: reducesilva.sh in=<file> out=<file> column=<1>
Parameters:
column The taxonomic level. 0=species, 1=genus, etc.
ow=f (overwrite) Overwrites files that already exist.
zl=4 (ziplevel) Set compression level, 1 (low) to 9 (max).
fastawrap=70 Length of lines in fasta output.
Sampling parameters:
reads=-1 Set to a positive number to only process this many INPUT sequences, then quit.
Java Parameters:
-Xmx This will be passed to Java to set memory usage, overriding the program's automatic memory detection.
-Xmx20g will specify 20 gigs of RAM, and -Xmx200m will specify 200 megs. The max is typically 85% of physical memory.
To read from stdin, set 'in=stdin'. The format should be specified with an extension, like 'in=stdin.fq.gz'
To write to stdout, set 'out=stdout'. The format should be specified with an extension, like 'out=stdout.fasta'
Please contact Brian Bushnell at bbushnell@lbl.gov if you encounter any problems.
"
}
pushd . > /dev/null
DIR="${BASH_SOURCE[0]}"
while [ -h "$DIR" ]; do
cd "$(dirname "$DIR")"
DIR="$(readlink "$(basename "$DIR")")"
done
cd "$(dirname "$DIR")"
DIR="$(pwd)/"
popd > /dev/null
#DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/"
CP="$DIR""current/"
z="-Xmx1g"
EA="-ea"
set=0
if [ -z "$1" ] || [[ $1 == -h ]] || [[ $1 == --help ]]; then
usage
exit
fi
calcXmx () {
source "$DIR""/calcmem.sh"
parseXmx "$@"
}
calcXmx "$@"
function reducesilva() {
#module load oracle-jdk/1.7_64bit
#module load pigz
local CMD="java $EA $z -cp $CP driver.ReduceSilva $@"
echo $CMD >&2
eval $CMD
}
reducesilva "$@"
|
<filename>backend/src/models/index.js
const Sequelize = require('sequelize');
const env = process.env.NODE_ENV || 'development';
const config = require(__dirname + '/../../config/database.js')[env];
let sequelize;
if (config.use_env_variable) {
sequelize = new Sequelize(process.env[config.use_env_variable], config);
} else {
sequelize = new Sequelize(
config.database,
config.username,
config.password,
config
);
}
const modelDefiners = [
require('./BalanceUpdate'),
require('./Broker'),
require('./Investment'),
require('./Transaction'),
];
// We define all models according to their files.
modelDefiners.forEach((modelDefiner) => modelDefiner(sequelize));
// We execute any extra setup after the models are defined, such as adding associations.
Object.values(sequelize.models).forEach((model) => {
if (model.associate) {
model.associate(sequelize.models);
}
});
// We export the sequelize connection instance to be used around our app.
module.exports = sequelize;
|
def process_text(data):
data = [x.split(" . ") for x in data if x.strip() and x.strip()[0] != "="] # Step 1
sentences = []
for para in data:
for sent in para:
sentences.append(sent + ".") # Step 2
data = "\n".join(sentences)
data = data.replace(" @.@ ", ".").replace(" @-@ ", "-").replace(" ,", ",") # Step 3
data = data.replace(" '", "'").replace(" )", ")").replace("( ", "(").replace(" ;", ";")
return data |
<filename>libs/sdk-ui-pivot/src/impl/agGridColumnSizing.ts
// (C) 2007-2020 GoodData Corporation
import invariant, { InvariantError } from "ts-invariant";
import omit from "lodash/omit";
import omitBy from "lodash/omitBy";
import {
getAttributeLocators,
getColumnIdentifier,
getColumnIdentifierFromDef,
getIdsFromUri,
getLastFieldId,
getLastFieldType,
getMappingHeaderMeasureItemLocalIdentifier,
getMeasureFormat,
getParsedFields,
getTreeLeaves,
isMeasureColumn,
isSomeTotal,
} from "./agGridUtils";
import {
DEFAULT_HEADER_FONT,
DEFAULT_ROW_FONT,
DEFAULT_SUBTOTAL_FONT,
DEFAULT_TOTAL_FONT,
FIELD_SEPARATOR,
FIELD_TYPE_ATTRIBUTE,
FIELD_TYPE_MEASURE,
HEADER_LABEL_CLASS,
ID_SEPARATOR,
ROW_SUBTOTAL_CLASS,
ROW_TOTAL_CLASS,
VALUE_CLASS,
} from "./agGridConst";
import { identifyResponseHeader } from "./agGridHeaders";
import { IGridHeader, IGridRow } from "./agGridTypes";
import { ColDef, Column, ColumnApi, GridApi } from "@ag-grid-community/all-modules";
import {
ColumnWidth,
ColumnWidthItem,
IAbsoluteColumnWidth,
IAllMeasureColumnWidthItem,
IAttributeColumnWidthItem,
IManuallyResizedColumnsItem,
IMeasureColumnLocator,
IMeasureColumnWidthItem,
IResizedColumns,
isAbsoluteColumnWidth,
isAllMeasureColumnWidthItem,
isAttributeColumnWidthItem,
isMeasureColumnLocator,
isMeasureColumnWidthItem,
isWeakMeasureColumnWidthItem,
IWeakMeasureColumnWidthItem,
} from "../columnWidths";
import { DataViewFacade } from "@gooddata/sdk-ui";
import { IAttributeDescriptor, IExecutionResult, IMeasureDescriptor } from "@gooddata/sdk-backend-spi";
import { getMeasureCellFormattedValue } from "./tableCell";
import isEmpty from "lodash/isEmpty";
export const MIN_WIDTH = 60;
export const MANUALLY_SIZED_MAX_WIDTH = 2000;
export const AUTO_SIZED_MAX_WIDTH = 500;
export const SORT_ICON_WIDTH = 12;
//
//
//
function isColumnWidthAuto(columnWidth: ColumnWidth): boolean {
return columnWidth.value === "auto";
}
export interface IResizedColumnsCollection {
[columnIdentifier: string]: IResizedColumnsCollectionItem;
}
export interface IResizedColumnsCollectionItem {
width: ColumnWidth;
measureIdentifier?: string;
}
export interface IWeakMeasureColumnWidthItemsMap {
[measureIdentifier: string]: IWeakMeasureColumnWidthItem;
}
export class ResizedColumnsStore {
private manuallyResizedColumns: IResizedColumnsCollection;
private allMeasureColumnWidth: number | null;
private weakMeasuresColumnWidths: IWeakMeasureColumnWidthItemsMap;
public constructor(
manuallyResizedColumns: IResizedColumnsCollection = {},
allMeasureColumnWidth: number | null = null,
weakMeasuresColumnWidths: IWeakMeasureColumnWidthItemsMap = {},
) {
this.manuallyResizedColumns = manuallyResizedColumns;
this.allMeasureColumnWidth = allMeasureColumnWidth;
this.weakMeasuresColumnWidths = weakMeasuresColumnWidths;
}
public getManuallyResizedColumn(item: Column | ColDef): IManuallyResizedColumnsItem | undefined {
const colId = getColumnIdentifier(item);
if (this.manuallyResizedColumns[colId]) {
return this.convertItem(this.manuallyResizedColumns[colId]);
}
const weakColumnWidth = this.getMatchedWeakMeasuresColumnWidth(item);
if (weakColumnWidth) {
return this.getWeakMeasureColumMapItem(weakColumnWidth);
}
if (isMeasureColumn(item) && this.isAllMeasureColumWidthUsed()) {
return this.getAllMeasureColumMapItem();
}
}
public isColumnManuallyResized(item: Column | ColDef): boolean {
return !!this.getManuallyResizedColumn(item);
}
public addToManuallyResizedColumn(column: Column, allowGrowToFit: boolean = false): void {
this.manuallyResizedColumns[getColumnIdentifier(column)] = {
width: {
value: column.getActualWidth(),
...getAllowGrowToFitProp(allowGrowToFit),
},
};
column.getColDef().suppressSizeToFit = !allowGrowToFit;
}
public addAllMeasureColumn(columnWidth: number, allColumns: Column[]): void {
this.allMeasureColumnWidth = columnWidth;
allColumns.forEach((col) => {
if (isMeasureColumn(col)) {
const colId = getColumnIdentifier(col);
if (this.manuallyResizedColumns[colId]) {
this.manuallyResizedColumns = omit(this.manuallyResizedColumns, colId);
}
col.getColDef().suppressSizeToFit = true;
}
});
this.weakMeasuresColumnWidths = {};
}
public addWeekMeasureColumn(column: Column): void {
const width = column.getActualWidth();
const measureHeaderLocalIdentifier = getMappingHeaderMeasureItemLocalIdentifier(column);
if (measureHeaderLocalIdentifier) {
this.weakMeasuresColumnWidths[measureHeaderLocalIdentifier] = {
measureColumnWidthItem: {
width: {
value: width,
},
locator: {
measureLocatorItem: {
measureIdentifier: measureHeaderLocalIdentifier,
},
},
},
};
const shouldBeRemoved = (resizedColumnItem: IResizedColumnsCollectionItem) =>
resizedColumnItem.measureIdentifier === measureHeaderLocalIdentifier;
this.manuallyResizedColumns = omitBy(this.manuallyResizedColumns, shouldBeRemoved);
}
}
public removeAllMeasureColumns(): void {
this.allMeasureColumnWidth = null;
const shouldBeRemoved = (resizedColumnItem: IResizedColumnsCollectionItem) =>
isColumnWidthAuto(resizedColumnItem.width);
this.manuallyResizedColumns = omitBy(this.manuallyResizedColumns, shouldBeRemoved);
this.weakMeasuresColumnWidths = {};
}
public removeWeakMeasureColumn(column: Column): void {
const weakColumnWidth = this.getMatchedWeakMeasuresColumnWidth(column);
if (weakColumnWidth) {
this.weakMeasuresColumnWidths = omit(
this.weakMeasuresColumnWidths,
weakColumnWidth.measureColumnWidthItem.locator.measureLocatorItem.measureIdentifier,
);
const shouldBeRemoved = (resizedColumnItem: IResizedColumnsCollectionItem) => {
return (
isColumnWidthAuto(resizedColumnItem.width) &&
this.isMatchingWeakWidth(resizedColumnItem, weakColumnWidth) &&
!this.isAllMeasureColumWidthUsed()
);
};
this.manuallyResizedColumns = omitBy(this.manuallyResizedColumns, shouldBeRemoved);
}
}
public removeFromManuallyResizedColumn(column: Column): void {
const colId = getColumnIdentifier(column);
const item = this.manuallyResizedColumns[colId];
if (item) {
this.manuallyResizedColumns = omit(this.manuallyResizedColumns, colId);
if (!this.isAllMeasureColumWidthUsed() || !isMeasureColumn(column)) {
column.getColDef().suppressSizeToFit = false;
}
}
if (
isMeasureColumn(column) &&
(this.isAllMeasureColumWidthUsed() || this.getMatchedWeakMeasuresColumnWidth(column))
) {
// TODO INE: consider creating weakItem with width: "auto" when alt+DC over allMeasure
this.manuallyResizedColumns[colId] = this.getAutoSizeItem(column);
column.getColDef().suppressSizeToFit = false;
}
}
public getColumnWidthsFromMap(dv: DataViewFacade): ColumnWidthItem[] {
const result = getColumnWidthsFromMap(this.manuallyResizedColumns, dv);
if (this.isAllMeasureColumWidthUsed()) {
result.push(this.getAllMeasureColumnWidth());
}
const weakColumnWidthItems: ColumnWidthItem[] = getWeakColumnWidthsFromMap(
this.weakMeasuresColumnWidths,
);
return result.concat(weakColumnWidthItems);
}
public updateColumnWidths(columnWidths: ColumnWidthItem[] | undefined, dv: DataViewFacade): void {
const allMeasureWidthItem = this.filterAllMeasureColumnWidthItem(columnWidths);
if (allMeasureWidthItem && isAllMeasureColumnWidthItem(allMeasureWidthItem)) {
const validatedAllMeasureColumnWidth = defaultWidthValidator(
allMeasureWidthItem.measureColumnWidthItem.width,
);
this.allMeasureColumnWidth = isAbsoluteColumnWidth(validatedAllMeasureColumnWidth)
? validatedAllMeasureColumnWidth.value
: null;
} else {
this.allMeasureColumnWidth = null;
}
this.weakMeasuresColumnWidths = this.filterWeakColumnWidthItems(columnWidths);
const columnWidthItems = this.filterStrongColumnWidthItems(columnWidths);
const columnWidthsByField = convertColumnWidthsToMap(columnWidthItems, dv);
this.manuallyResizedColumns = columnWidthsByField;
}
public getMatchingColumnsByMeasure(targetColumn: Column, allColumns: Column[]): Column[] {
const targetMeasureLocalIdentifier = getMappingHeaderMeasureItemLocalIdentifier(targetColumn);
if (targetMeasureLocalIdentifier) {
return allColumns.filter((col: Column) => {
const measureLocalIdentifier = getMappingHeaderMeasureItemLocalIdentifier(col);
return targetMeasureLocalIdentifier === measureLocalIdentifier;
});
}
return [];
}
public getMatchedWeakMeasuresColumnWidth(item: Column | ColDef): IWeakMeasureColumnWidthItem | undefined {
const measureHeaderLocalIdentifier = getMappingHeaderMeasureItemLocalIdentifier(item);
if (measureHeaderLocalIdentifier) {
return this.weakMeasuresColumnWidths[measureHeaderLocalIdentifier];
}
}
private filterAllMeasureColumnWidthItem(
columnWidths: ColumnWidthItem[] | undefined,
): IAllMeasureColumnWidthItem | undefined {
if (columnWidths) {
return columnWidths.filter(isAllMeasureColumnWidthItem)[0];
}
}
private filterStrongColumnWidthItems(columnWidths: ColumnWidthItem[] | undefined) {
if (columnWidths) {
return columnWidths.filter(
(item) => isAttributeColumnWidthItem(item) || isMeasureColumnWidthItem(item),
);
}
return [];
}
private filterWeakColumnWidthItems(
columnWidths: ColumnWidthItem[] | undefined,
): IWeakMeasureColumnWidthItemsMap {
if (columnWidths) {
const onlyWeakWidthItems: IWeakMeasureColumnWidthItem[] = columnWidths.filter(
isWeakMeasureColumnWidthItem,
);
return onlyWeakWidthItems.reduce(
(map: IWeakMeasureColumnWidthItemsMap, weakWidthItem: IWeakMeasureColumnWidthItem) => {
const validatedWidth = defaultWidthValidator(weakWidthItem.measureColumnWidthItem.width);
if (isAbsoluteColumnWidth(validatedWidth)) {
return {
...map,
[weakWidthItem.measureColumnWidthItem.locator.measureLocatorItem
.measureIdentifier]: {
measureColumnWidthItem: {
...weakWidthItem.measureColumnWidthItem,
width: {
...weakWidthItem.measureColumnWidthItem.width,
value: validatedWidth.value,
},
},
},
};
}
return map;
},
{},
);
}
return {};
}
private convertItem(item: IResizedColumnsCollectionItem): IManuallyResizedColumnsItem | undefined {
// columns with width.value = auto are hidden
if (isAbsoluteColumnWidth(item.width)) {
const { width } = item;
return {
width: width.value,
...getAllowGrowToFitProp(width.allowGrowToFit),
};
}
}
private getWeakMeasureColumMapItem(item: IWeakMeasureColumnWidthItem): IManuallyResizedColumnsItem {
return {
width: item.measureColumnWidthItem.width.value,
};
}
private isAllMeasureColumWidthUsed() {
return this.allMeasureColumnWidth !== null;
}
private getAutoSizeItem(column: Column): IResizedColumnsCollectionItem {
const measureHeaderLocalIdentifier = getMappingHeaderMeasureItemLocalIdentifier(column);
const result: IResizedColumnsCollectionItem = { width: { value: "auto" } };
if (measureHeaderLocalIdentifier) {
result.measureIdentifier = measureHeaderLocalIdentifier;
}
return result;
}
private getAllMeasureColumMapItem(): IManuallyResizedColumnsItem {
return { width: this.allMeasureColumnWidth! };
}
private getAllMeasureColumnWidth(): IAllMeasureColumnWidthItem {
return {
measureColumnWidthItem: {
width: {
value: this.allMeasureColumnWidth!,
},
},
};
}
private isMatchingWeakWidth(
item: IResizedColumnsCollectionItem,
weakColumnWidth: IWeakMeasureColumnWidthItem,
) {
return (
item.measureIdentifier ===
weakColumnWidth.measureColumnWidthItem.locator.measureLocatorItem.measureIdentifier
);
}
}
//
//
//
export const convertColumnWidthsToMap = (
columnWidths: ColumnWidthItem[],
dv: DataViewFacade,
widthValidator: (width: ColumnWidth) => ColumnWidth = defaultWidthValidator,
): IResizedColumnsCollection => {
if (!columnWidths) {
return {};
}
const columnWidthsMap: IResizedColumnsCollection = {};
const attributeDescriptors = dv.meta().attributeDescriptors();
const measureDescriptors = dv.meta().measureDescriptors();
columnWidths.forEach((columnWidth: ColumnWidthItem) => {
if (isAttributeColumnWidthItem(columnWidth)) {
const [field, width] = getAttributeColumnWidthItemFieldAndWidth(
columnWidth,
attributeDescriptors,
);
columnWidthsMap[field] = {
width: widthValidator(width),
};
}
if (isMeasureColumnWidthItem(columnWidth)) {
const [field, width] = getMeasureColumnWidthItemFieldAndWidth(columnWidth, measureDescriptors);
const locator: IMeasureColumnLocator = columnWidth.measureColumnWidthItem.locators.filter(
isMeasureColumnLocator,
)[0];
const measureIdentifier = locator ? locator.measureLocatorItem.measureIdentifier : undefined;
columnWidthsMap[field] = {
width: widthValidator(width),
measureIdentifier,
};
}
});
return columnWidthsMap;
};
const getAttributeColumnWidthItemFieldAndWidth = (
columnWidthItem: IAttributeColumnWidthItem,
attributeHeaders: IAttributeDescriptor[],
): [string, IAbsoluteColumnWidth] => {
const localIdentifier = columnWidthItem.attributeColumnWidthItem.attributeIdentifier;
const attributeHeader = attributeHeaders.find(
(header) => header.attributeHeader.localIdentifier === localIdentifier,
);
invariant(attributeHeader, `Could not find attributeHeader with localIdentifier "${localIdentifier}"`);
const field = identifyResponseHeader(attributeHeader!);
return [field!, columnWidthItem.attributeColumnWidthItem.width];
};
const getMeasureColumnWidthItemFieldAndWidth = (
columnWidthItem: IMeasureColumnWidthItem,
measureHeaderItems: IMeasureDescriptor[],
): [string, ColumnWidth] => {
const keys: string[] = [];
columnWidthItem.measureColumnWidthItem.locators.forEach((locator) => {
if (isMeasureColumnLocator(locator)) {
const measureColumnWidthHeaderIndex = measureHeaderItems.findIndex(
(measureHeaderItem) =>
measureHeaderItem.measureHeaderItem.localIdentifier ===
locator.measureLocatorItem.measureIdentifier,
);
invariant(
measureColumnWidthHeaderIndex !== -1,
`Could not find measureHeader with localIdentifier "${locator.measureLocatorItem.measureIdentifier}"`,
);
keys.push(`m${ID_SEPARATOR}${measureColumnWidthHeaderIndex}`);
} else {
const key = `a${ID_SEPARATOR}${getIdsFromUri(locator.attributeLocatorItem.element!).join(
ID_SEPARATOR,
)}`;
keys.push(key);
}
});
const field = keys.join(FIELD_SEPARATOR); // check if keys is empty than *
return [field, columnWidthItem.measureColumnWidthItem.width];
};
const getSizeItemByColId = (dv: DataViewFacade, colId: string, width: ColumnWidth): ColumnWidthItem => {
const fields = getParsedFields(colId);
const lastFieldType = getLastFieldType(fields);
const lastFieldId = getLastFieldId(fields);
if (lastFieldType === FIELD_TYPE_ATTRIBUTE) {
const rowDescriptors = dv.meta().attributeDescriptorsForDim(0);
for (const header of rowDescriptors) {
if (getIdsFromUri(header.attributeHeader.uri)[0] === lastFieldId) {
const attributeIdentifier = header.attributeHeader.localIdentifier;
if (isAbsoluteColumnWidth(width)) {
return {
attributeColumnWidthItem: {
width,
attributeIdentifier,
},
};
} else {
throw new InvariantError(
`width value for attributeColumnWidthItem has to be number ${colId}`,
);
}
}
}
// check only column attribute without measure
const colDescriptors = dv.meta().attributeDescriptorsForDim(1);
const EMPTY_MEASURE_FIELD: string[] = [];
const attributeLocators = getAttributeLocators([...fields, EMPTY_MEASURE_FIELD], colDescriptors);
if (attributeLocators) {
return {
measureColumnWidthItem: {
width,
locators: [...attributeLocators],
},
};
}
throw new InvariantError(`could not find attribute header matching ${colId}`);
} else if (lastFieldType === FIELD_TYPE_MEASURE) {
const colDescriptors = dv.meta().attributeDescriptorsForDim(1);
const measureDescriptors = dv.meta().measureDescriptors();
const headerItem = measureDescriptors[parseInt(lastFieldId, 10)];
const attributeLocators = getAttributeLocators(fields, colDescriptors);
return {
measureColumnWidthItem: {
width,
locators: [
...attributeLocators,
{
measureLocatorItem: {
measureIdentifier: headerItem.measureHeaderItem.localIdentifier,
},
},
],
},
};
}
throw new InvariantError(`could not find header matching ${colId}`);
};
export const getColumnWidthsFromMap = (
map: IResizedColumnsCollection,
dv: DataViewFacade,
): ColumnWidthItem[] => {
return Object.keys(map).map((colId: string) => {
const { width } = map[colId];
const sizeItem = getSizeItemByColId(dv, colId, width);
invariant(sizeItem, `unable to find size item by filed ${colId}`);
return sizeItem;
});
};
export const getWeakColumnWidthsFromMap = (map: IWeakMeasureColumnWidthItemsMap): ColumnWidthItem[] => {
return Object.keys(map).map((measureIdentifier: string) => {
return map[measureIdentifier];
});
};
const defaultWidthValidator = (width: ColumnWidth): ColumnWidth => {
if (isAbsoluteColumnWidth(width)) {
return {
...width,
value: Math.min(Math.max(width.value, MIN_WIDTH), MANUALLY_SIZED_MAX_WIDTH),
};
}
return width;
};
/**
* This function _mutates_ the incoming column defs according to the sizing rules.
*/
export const updateColumnDefinitionsWithWidths = (
columnDefinitions: IGridHeader[],
resizedColumnsStore: ResizedColumnsStore,
autoResizedColumns: IResizedColumns,
defaultColumnWidth: number,
isGrowToFitEnabled: boolean,
growToFittedColumns: IResizedColumns = {},
): void => {
const leaves = getTreeLeaves(columnDefinitions);
leaves.forEach((columnDefinition: IGridHeader) => {
if (columnDefinition) {
const columnId = getColumnIdentifierFromDef(columnDefinition);
const manualSize = resizedColumnsStore.getManuallyResizedColumn(columnDefinition);
const autoResizeSize = autoResizedColumns[columnId];
columnDefinition.maxWidth = MANUALLY_SIZED_MAX_WIDTH;
if (manualSize) {
columnDefinition.width = manualSize.width;
columnDefinition.suppressSizeToFit = !manualSize.allowGrowToFit;
} else {
columnDefinition.suppressSizeToFit = false;
columnDefinition.width = autoResizeSize ? autoResizeSize.width : defaultColumnWidth;
if (isGrowToFitEnabled) {
const growToFittedColumn =
growToFittedColumns[getColumnIdentifierFromDef(columnDefinition)];
if (growToFittedColumn) {
columnDefinition.width = growToFittedColumn.width;
if (growToFittedColumn.width > MANUALLY_SIZED_MAX_WIDTH) {
columnDefinition.maxWidth = undefined;
}
}
}
}
}
});
};
export const syncSuppressSizeToFitOnColumns = (
resizedColumnsStore: ResizedColumnsStore,
columnApi: ColumnApi,
): void => {
if (!columnApi) {
return;
}
const columns = columnApi.getAllColumns();
columns.forEach((col) => {
const resizedColumn = resizedColumnsStore.getManuallyResizedColumn(col);
resizedColumn
? (col.getColDef().suppressSizeToFit = !resizedColumn.allowGrowToFit)
: (col.getColDef().suppressSizeToFit = false);
});
};
export const isColumnAutoResized = (autoResizedColumns: IResizedColumns, resizedColumnId: string): boolean =>
Boolean(resizedColumnId && autoResizedColumns[resizedColumnId]);
export const resetColumnsWidthToDefault = (
columnApi: ColumnApi,
columns: Column[],
resizedColumnsStore: ResizedColumnsStore,
autoResizedColumns: IResizedColumns,
defaultWidth: number,
): void => {
columns.forEach((col) => {
const id = getColumnIdentifier(col);
if (resizedColumnsStore.isColumnManuallyResized(col)) {
const manuallyResizedColumn = resizedColumnsStore.getManuallyResizedColumn(col);
if (manuallyResizedColumn) {
columnApi.setColumnWidth(col, manuallyResizedColumn.width);
}
} else if (isColumnAutoResized(autoResizedColumns, id)) {
columnApi.setColumnWidth(col, autoResizedColumns[id].width);
} else {
columnApi.setColumnWidth(col, defaultWidth);
}
});
};
export const resizeAllMeasuresColumns = (
columnApi: ColumnApi,
resizedColumnsStore: ResizedColumnsStore,
column: Column,
): void => {
const columnWidth = column.getActualWidth();
const allColumns = columnApi.getAllColumns();
allColumns.forEach((col) => {
if (isMeasureColumn(col)) {
columnApi.setColumnWidth(col, columnWidth);
}
});
resizedColumnsStore.addAllMeasureColumn(columnWidth, allColumns);
};
export const resizeWeakMeasureColumns = (
columnApi: ColumnApi,
resizedColumnsStore: ResizedColumnsStore,
column: Column,
): void => {
const allColumns = columnApi.getAllColumns();
resizedColumnsStore.addWeekMeasureColumn(column);
allColumns.forEach((col) => {
const weakColumnWidth = resizedColumnsStore.getMatchedWeakMeasuresColumnWidth(col);
if (isMeasureColumn(col) && weakColumnWidth) {
columnApi.setColumnWidth(col, weakColumnWidth.measureColumnWidthItem.width.value);
col.getColDef().suppressSizeToFit = true;
}
});
};
const getAllowGrowToFitProp = (allowGrowToFit: boolean | undefined): { allowGrowToFit?: boolean } =>
allowGrowToFit ? { allowGrowToFit } : {};
interface CalculateColumnWidthsConfig {
context: CanvasRenderingContext2D | null;
columns: Column[];
rowData: IGridRow[];
totalData: IGridRow[];
execution: IExecutionResult;
measureHeaders: boolean;
headerFont: string;
subtotalFont: string;
totalFont: string;
rowFont: string;
padding: number;
separators: any;
cache: Map<string, number>;
}
export const getMaxWidth = (
context: CanvasRenderingContext2D,
text: string | undefined,
hasSort: boolean,
maxWidth: number | undefined,
): number | undefined => {
if (!text) {
return;
}
const width = hasSort
? context.measureText(text).width + SORT_ICON_WIDTH
: context.measureText(text).width;
return maxWidth === undefined || width > maxWidth ? width : undefined;
};
export const getMaxWidthCached = (
context: CanvasRenderingContext2D,
text: string,
maxWidth: number | undefined,
widthsCache: Map<string, number>,
): number | undefined => {
const cachedWidth = widthsCache.get(text);
let width;
if (cachedWidth === undefined) {
width = context.measureText(text).width;
widthsCache.set(text, width);
} else {
width = cachedWidth;
}
return maxWidth === undefined || width > maxWidth ? width : undefined;
};
const valueFormatter = (text: string, colDef: IGridHeader, execution: IExecutionResult, separators: any) => {
return text !== undefined
? getMeasureCellFormattedValue(text, getMeasureFormat(colDef, execution), separators)
: null;
};
const collectWidths = (
config: CalculateColumnWidthsConfig,
row: IGridRow,
maxWidths: Map<string, number>,
): void => {
const { context } = config;
config.columns.forEach((column: Column) => {
const colDef: IGridHeader = column.getColDef() as IGridHeader;
if (colDef.field && context) {
const text = row[colDef.field];
const formattedText =
isMeasureColumn(column) && valueFormatter(text, colDef, config.execution, config.separators);
const textForCalculation = formattedText || text;
const maxWidth = colDef.field ? maxWidths.get(colDef.field) : undefined;
let possibleMaxWidth;
if (config.cache) {
possibleMaxWidth = getMaxWidthCached(context, textForCalculation, maxWidth, config.cache);
} else {
possibleMaxWidth = getMaxWidth(context, textForCalculation, false, maxWidth);
}
if (possibleMaxWidth) {
maxWidths.set(colDef.field, possibleMaxWidth);
}
}
});
};
export const getUpdatedColumnDefs = (
columns: Column[],
maxWidths: Map<string, number>,
padding: number,
): ColDef[] => {
return columns.map((column: Column) => {
const colDef: ColDef = column.getColDef();
if (colDef.field) {
const maxWidth = maxWidths.get(colDef.field);
const newWidth = maxWidth ? Math.ceil(maxWidth + padding) : 0;
return {
...colDef,
width: Math.min(Math.max(MIN_WIDTH, newWidth), AUTO_SIZED_MAX_WIDTH),
};
}
return colDef;
});
};
const calculateColumnWidths = (config: CalculateColumnWidthsConfig) => {
const { context } = config;
const maxWidths = new Map<string, number>();
if (config.measureHeaders && context) {
context.font = config.headerFont;
config.columns.forEach((column: Column) => {
const colDef: ColDef = column.getColDef();
const maxWidth = colDef.field ? maxWidths.get(colDef.field) : undefined;
const possibleMaxWidth = getMaxWidth(context, colDef.headerName, !!colDef.sort, maxWidth);
if (colDef.field && possibleMaxWidth) {
maxWidths.set(colDef.field, possibleMaxWidth);
}
});
}
config.rowData.forEach((row: IGridRow) => {
if (context) {
context.font = isSomeTotal(row.type) ? config.subtotalFont : config.rowFont;
collectWidths(config, row, maxWidths);
}
});
config.totalData.forEach((row: IGridRow) => {
if (context) {
context.font = config.totalFont;
collectWidths(config, row, maxWidths);
}
});
return getUpdatedColumnDefs(config.columns, maxWidths, config.padding);
};
const getDisplayedRowData = (gridApi: GridApi): IGridRow[] => {
const rowCount = gridApi.getDisplayedRowCount();
const rowData: IGridRow[] = [];
for (let index = 0; index < rowCount; index++) {
const item: IGridRow = gridApi.getDisplayedRowAtIndex(index).data;
if (item) {
rowData.push(item);
}
}
return rowData;
};
const getDisplayedTotalData = (gridApi: GridApi): IGridRow[] => {
const totalCount = gridApi.getPinnedBottomRowCount();
const totalData: IGridRow[] = [];
for (let index = 0; index < totalCount; index++) {
const item: IGridRow = gridApi.getPinnedBottomRow(index).data;
if (item) {
totalData.push(item);
}
}
return totalData;
};
const getTableFont = (containerRef: HTMLDivElement, className: string, defaultFont: string) => {
const element = containerRef.getElementsByClassName(className)[0];
if (!element) {
return defaultFont;
}
const { font, fontWeight, fontSize, fontFamily } = window.getComputedStyle(element);
return isEmpty(font) ? `${fontWeight} ${fontSize} ${fontFamily}` : font;
};
const getTableFonts = (
containerRef: HTMLDivElement,
): { headerFont: string; rowFont: string; subtotalFont: string; totalFont: string } => {
/**
* All fonts are gotten from first element with given class. Once we will have font different for each cell/header/row this will not work
*/
const headerFont = getTableFont(containerRef, HEADER_LABEL_CLASS, DEFAULT_HEADER_FONT);
const rowFont = getTableFont(containerRef, VALUE_CLASS, DEFAULT_ROW_FONT);
const subtotalFont = getTableFont(containerRef, ROW_SUBTOTAL_CLASS, DEFAULT_SUBTOTAL_FONT);
const totalFont = getTableFont(containerRef, ROW_TOTAL_CLASS, DEFAULT_TOTAL_FONT);
return { headerFont, rowFont, subtotalFont, totalFont };
};
/**
* Ag-Grid API set desired column sizes (it *mutates* pivot table columns data).
*/
export const autoresizeAllColumns = (
columnApi: ColumnApi | null,
autoResizedColumns: IResizedColumns,
): void => {
if (columnApi) {
const columns = columnApi.getPrimaryColumns();
columns.forEach((column: Column) => {
const columnDef = column.getColDef();
const autoResizedColumn = autoResizedColumns[getColumnIdentifier(columnDef)];
if (columnDef.field && autoResizedColumn && autoResizedColumn.width) {
columnApi.setColumnWidth(columnDef.field, autoResizedColumn.width);
}
});
}
};
/**
* Custom implementation of columns autoresizing according content: https://en.morzel.net/post/resizing-all-ag-gird-react-columns
* Calculate the width of text for each grid cell and collect the minimum width needed for each of the gird columns.
* Text width calculation is done efficiently with measureText method on Canvas.
*/
export const getAutoResizedColumns = (
gridApi: GridApi | null,
columnApi: ColumnApi | null,
execution: IExecutionResult | null,
containerRef: HTMLDivElement,
options: {
measureHeaders: boolean;
padding: number;
separators: any;
},
): IResizedColumns => {
if (gridApi && columnApi && execution) {
const columns = columnApi.getPrimaryColumns();
const { headerFont, rowFont, subtotalFont, totalFont } = getTableFonts(containerRef);
const canvas = document.createElement("canvas");
const context = canvas.getContext("2d");
const rowData = getDisplayedRowData(gridApi);
const totalData = getDisplayedTotalData(gridApi);
const autoResizedColumns = {};
const updatedColumDefs = calculateColumnWidths({
context,
columns,
rowData,
totalData,
execution,
measureHeaders: options.measureHeaders,
headerFont: headerFont,
subtotalFont: subtotalFont,
totalFont: totalFont,
rowFont: rowFont,
padding: options.padding,
separators: options.separators,
cache: new Map(),
});
updatedColumDefs.forEach((columnDef: ColDef) => {
if (columnDef.field && columnDef.width !== undefined) {
autoResizedColumns[getColumnIdentifier(columnDef)] = {
width: columnDef.width,
};
}
});
return autoResizedColumns;
}
return {};
};
|
export default function ( obj ) {
const vals = [];
for ( let key in obj ) {
if ( obj.hasOwnProperty(key) ) {
vals.push( obj[key] );
}
}
return vals;
};
|
<gh_stars>0
import pickle
import tensorflow as tf
from sklearn.model_selection import train_test_split
from alexnet import AlexNet
from sklearn.utils import shuffle
# TODO: Load traffic signs data.
# TODO: Split data into training and validation sets.
training_file = 'train.p'
validation_file= 'valid.p'
testing_file = 'test.p'
nb_classes = 43
n_channels_in_image = 3
EPOCHS = 1
BATCH_SIZE = 128
with open(training_file, mode='rb') as f:
train = pickle.load(f)
with open(validation_file, mode='rb') as f:
valid = pickle.load(f)
with open(testing_file, mode='rb') as f:
test = pickle.load(f)
X_train, y_train = train['features'], train['labels']
X_validation, y_validation = valid['features'], valid['labels']
X_test, y_test = test['features'], test['labels']
# TODO: Define placeholders and resize operation.
X_train_resized = tf.image.resize_images(X_train,(227,227))
X_validation_resized = tf.image.resize_images(X_validation,(227,227))
X_test_resized = tf.image.resize_images(X_test,(227,227))
# TODO: pass placeholder as first argument to `AlexNet`.
fc7 = AlexNet(X_train_resized, feature_extract=True)
# NOTE: `tf.stop_gradient` prevents the gradient from flowing backwards
# past this point, keeping the weights before and up to `fc7` frozen.
# This also makes training faster, less work to do!
fc7 = tf.stop_gradient(fc7)
# TODO: Add the final layer for traffic sign classification.
shape = (fc7.get_shape().as_list()[-1], nb_classes) # use this shape for the weight matrix
fc8W = tf.Variable(tf.truncated_normal(shape, stddev=0.01))
fc8b = tf.Variable(tf.zeros(nb_classes))
logits = tf.matmul(fc7, fc8W) + fc8b
probs = tf.nn.softmax(logits)
# TODO: Define loss, training, accuracy operations.
# HINT: Look back at your traffic signs project solution, you may
# be able to reuse some the code.
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
x = tf.placeholder(tf.float32, (None, 32, 32, n_channels_in_image))
y = tf.placeholder(tf.int32, (None))
keep_prob = tf.placeholder(tf.float32)
one_hot_y = tf.one_hot(y, 43)
rate = 0.001
### Train your model here. "probs"
### Calculate and report the accuracy on the training and validation set.
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(labels=one_hot_y, logits=probs)
loss_operation = tf.reduce_mean(cross_entropy)
optimizer = tf.train.AdamOptimizer(learning_rate = rate)
training_operation = optimizer.minimize(loss_operation)
# TODO: Train and evaluate the feature extraction model.
correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(one_hot_y, 1))
accuracy_operation = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
saver = tf.train.Saver()
def evaluate(X_data, y_data):
num_examples = len(X_data)
total_accuracy = 0
sess = tf.get_default_session()
for offset in range(0, num_examples, BATCH_SIZE):
batch_x, batch_y = X_data[offset:offset+BATCH_SIZE], y_data[offset:offset+BATCH_SIZE]
accuracy = sess.run(accuracy_operation, feed_dict={x: batch_x, y: batch_y, keep_prob: 1.0})
total_accuracy += (accuracy * len(batch_x))
return total_accuracy / num_examples
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
num_examples = len(X_train)
print("Training...")
print()
for i in range(EPOCHS):
X_train, y_train = shuffle(X_train, y_train)
for offset in range(0, num_examples, BATCH_SIZE):
end = offset + BATCH_SIZE
batch_x, batch_y = X_train[offset:end], y_train[offset:end]
sess.run(training_operation, feed_dict={x: batch_x, y: batch_y, keep_prob: 0.5})
validation_accuracy = evaluate(X_validation, y_validation)
print("EPOCH {} ...".format(i+1))
print("Validation Accuracy = {:.3f}".format(validation_accuracy))
print()
saver.save(sess, './lenet')
print("Model saved")
|
<reponame>mykaelandrade/fiscal4j<filename>src/main/java/br/indie/fiscal4j/nfe400/transformers/NFIdentificadorLocalDestinoOperacaoTransformer.java
package br.indie.fiscal4j.nfe400.transformers;
import br.indie.fiscal4j.nfe400.classes.nota.NFIdentificadorLocalDestinoOperacao;
import org.simpleframework.xml.transform.Transform;
public class NFIdentificadorLocalDestinoOperacaoTransformer implements Transform<NFIdentificadorLocalDestinoOperacao> {
@Override
public NFIdentificadorLocalDestinoOperacao read(final String codigoOperacao) {
return NFIdentificadorLocalDestinoOperacao.valueOfCodigo(codigoOperacao);
}
@Override
public String write(final NFIdentificadorLocalDestinoOperacao operacao) {
return operacao.getCodigo();
}
} |
import math
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import Parameter
from .math import normalize
class AngleMultipleLinear(nn.Module):
"""Based on SoftTriplet loss: https://arxiv.org/pdf/1909.05235.pdf
"""
def __init__(self, in_features, num_classes, num_centers=1, scale=10.0, reg_weight=0.2, reg_threshold=0.2):
super(AngleMultipleLinear, self).__init__()
self.in_features = in_features
assert in_features > 0
self.num_classes = num_classes
assert num_classes >= 2
self.num_centers = num_centers
assert num_centers >= 1
self.scale = scale
assert scale > 0.0
weight_shape = [in_features, num_classes, num_centers] if num_centers > 1 else [in_features, num_classes]
self.weight = Parameter(torch.Tensor(*weight_shape))
self.weight.data.normal_().renorm_(2, 1, 1e-5).mul_(1e5)
self.enable_regularization = reg_weight is not None and reg_weight > 0.0
if self.enable_regularization:
self.reg_weight = reg_weight
if num_centers == 1:
self.reg_threshold = reg_threshold
assert self.reg_threshold >= 0.0
reg_valid_mask = np.triu(np.ones((num_classes, num_classes), dtype=np.float32), k=1)
else:
self.reg_weight /= num_classes
if num_centers > 2:
self.reg_weight /= (num_centers - 1) * (num_centers - 2)
reg_valid_mask = np.tile(np.triu(np.ones((1, num_centers, num_centers), dtype=np.float32), k=1),
(num_classes, 1, 1))
self.register_buffer('reg_mask', torch.from_numpy(reg_valid_mask))
else:
self.reg_weight = None
self.reg_mask = None
def forward(self, normalized_x):
normalized_x = normalized_x.view(-1, self.in_features)
normalized_weights = normalize(self.weight.view(self.in_features, -1), dim=0)
prod = normalized_x.mm(normalized_weights)
if not torch.onnx.is_in_onnx_export():
prod = prod.clamp(-1.0, 1.0)
if self.num_centers > 1:
prod = prod.view(-1, self.num_classes, self.num_centers)
prod_weights = F.softmax(self.scale * prod, dim=-1)
scores = torch.sum(prod_weights * prod, dim=-1)
else:
scores = prod
return scores
def loss(self, name):
out_losses = dict()
if self.enable_regularization:
normalized_weights = F.normalize(self.weight, dim=0)
if self.num_centers == 1:
all_pairwise_scores = normalized_weights.permute(1, 0).matmul(normalized_weights)
valid_pairwise_scores = all_pairwise_scores[self.reg_mask > 0.0]
losses = valid_pairwise_scores[valid_pairwise_scores > self.reg_threshold] - self.reg_threshold
out_losses['loss/cpush' + name] =\
self.reg_weight * losses.mean() if losses.numel() > 0 else losses.sum()
else:
all_pairwise_scores = normalized_weights.permute(1, 2, 0).matmul(normalized_weights.permute(1, 0, 2))
valid_pairwise_scores = all_pairwise_scores[self.reg_mask > 0.0]
losses = 1.0 - valid_pairwise_scores
out_losses['loss/st_reg' + name] = self.reg_weight * losses.sum()
return out_losses
class SymmetricalLayer(nn.Module):
"""
Init version: https://github.com/IoannisKansizoglou/Symmetrical-Feature-Space
"""
def __init__(self, in_features, num_classes):
super().__init__()
self.in_features = in_features
assert in_features > 0
self.num_classes = num_classes
assert num_classes > 1
self.weight = nn.Parameter(torch.FloatTensor(2, self.in_features))
self.weight.data.normal_()
steps = torch.arange(self.num_classes, dtype=torch.float32)
thetas = 2.0 * math.pi / float(self.num_classes) * steps
self.register_buffer('thetas', thetas)
eye_matrix = torch.eye(self.in_features)
self.register_buffer('eye_matrix', eye_matrix)
def _generate_centers(self, v1, v2):
n1 = normalize(v1, dim=0, p=2)
n2 = normalize(v2, dim=0, p=2)
n2 = normalize(n2 - torch.dot(n1, n2) * n1, dim=0, p=2)
ger_sub = torch.outer(n2, n1) - torch.outer(n1, n2)
ger_add = torch.outer(n1, n1) + torch.outer(n2, n2)
sin_thetas = torch.unsqueeze(torch.unsqueeze(torch.sin(self.thetas), dim=-1), dim=-1)
cos_thetas = torch.unsqueeze(torch.unsqueeze(torch.cos(self.thetas) - 1, dim=-1), dim=-1)
R = self.eye_matrix + ger_sub * sin_thetas + ger_add * cos_thetas
return torch.einsum('bij,j->bi', R, n1)
def forward(self, normalized_x):
normalized_x = normalized_x.view(-1, self.in_features)
centers = self._generate_centers(self.weight[0], self.weight[1])
scores = F.linear(normalized_x, centers)
return scores
|
const curry = require('lodash/curry');
const { withProps } = require('bottender');
const _ = require('./_');
const match = (value, mapping) => {
const defaultMapping = mapping.find(([pattern]) => pattern === _);
const otherMapping = mapping.filter(([pattern]) => pattern !== _);
const Fn = async (context, props) => {
const val =
typeof value === 'function' ? await value(context, props) : value;
for (let i = 0; i < otherMapping.length; i++) {
const [pattern, Action] = otherMapping[i];
if (pattern === val) {
return withProps(Action, props);
}
}
if (defaultMapping) {
const [, DefaultAction] = defaultMapping;
return withProps(DefaultAction, props);
}
};
const names = mapping.map(([, Action]) => Action.name || 'Anonymous');
const name = `Match(${names.join(', ')})`;
Object.defineProperty(Fn, 'name', { value: name });
return Fn;
};
module.exports = curry(match);
|
#!/bin/bash
sudo apt-get upgrade -y
sudo apt-get update -y
sudo apt-get install libsm6 -y
sudo apt-get install libgtk2.0-dev -y
sudo bash scripts/install_python36.sh
python3.6 -m venv env-prod
source env-prod/bin/activate
sudo $(which python) -m pip install --upgrade setuptools
sudo $(which python) -m pip install -r requirements.txt
|
#!/bin/bash
#SBATCH --job-name=/data/unibas/boittier/test-neighbours2
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --partition=short
#SBATCH --output=/data/unibas/boittier/test-neighbours2_%A-%a.out
hostname
# Path to scripts and executables
cubefit=/home/unibas/boittier/fdcm_project/mdcm_bin/cubefit.x
fdcm=/home/unibas/boittier/fdcm_project/fdcm.x
ars=/home/unibas/boittier/fdcm_project/ARS.py
# Variables for the job
n_steps=2
n_charges=24
scan_name=frame_
suffix=.chk
cubes_dir=/data/unibas/boittier/fdcm/amide_graph
output_dir=/data/unibas/boittier/test-neighbours2
frames=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/frames.txt
initial_fit=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/24_charges_refined.xyz
initial_fit_cube=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/amide1.pdb.chk
prev_frame=0
start_frame=1
next_frame=(1, 71)
acd=/home/unibas/boittier/fdcm_project/0_fit.xyz.acd
start=$start_frame
next=$next_frame
dir='frame_'$next
output_name=$output_dir/$dir/$dir'-'$start'-'$next'.xyz'
initial_fit=$output_dir/"frame_"$start/"frame_"$start'-'$prev_frame'-'$start'.xyz'
# Go to the output directory
mkdir -p $output_dir
cd $output_dir
mkdir -p $dir
cd $dir
# Do Initial Fit
# for initial fit
esp1=$cubes_dir/$scan_name$start$suffix'.p.cube'
dens1=$cubes_dir/$scan_name$start$suffix'.d.cube'
esp=$cubes_dir/$scan_name$next$suffix'.p.cube'
dens=$cubes_dir/$scan_name$next$suffix'.d.cube'
# adjust reference frame
python $ars -charges $initial_fit -pcube $dens1 -pcube2 $dens -frames $frames -output $output_name -acd $acd > $output_name.ARS.log
# do gradient descent fit
$fdcm -xyz $output_name.global -dens $dens -esp $esp -stepsize 0.2 -n_steps $n_steps -learning_rate 0.5 -output $output_name > $output_name.GD.log
# adjust reference frame
python $ars -charges $output_name -pcube $esp -pcube2 $esp -frames $frames -output $output_name -acd $acd > $output_name.ARS-2.log
# make a cube file for the fit
$cubefit -v -generate -esp $esp -dens $dens -xyz refined.xyz > $output_name.cubemaking.log
# do analysis
$cubefit -v -analysis -esp $esp -esp2 $n_charges'charges.cube' -dens $dens > $output_name.analysis.log
echo $PWD
|
const matrix = [];
for (let i = 0; i < 5; i++) {
let row = [];
for (let j = 0; j < 5; j++) {
row.push(Math.floor(Math.random() * 100));
}
matrix.push(row);
}
console.table(matrix); |
import { IVariant } from "./IVariant";
import { Types } from "./Types";
import { SPIRType } from "./SPIRType";
export declare class SPIRConstantConstant {
value: ArrayBuffer;
private _dataView;
get u32(): number;
set u32(value: number);
get i32(): number;
set i32(value: number);
get f32(): number;
set f32(value: number);
get u64(): bigint;
set u64(value: bigint);
get i64(): bigint;
set i64(value: bigint);
get f64(): number;
set f64(value: number);
clone(): SPIRConstantConstant;
}
export declare class SPIRConstantConstantVector {
r: SPIRConstantConstant[];
id: ID[];
vecsize: number;
constructor();
clone(): SPIRConstantConstantVector;
}
export declare class SPIRConstantConstantMatrix {
c: SPIRConstantConstantVector[];
id: ID[];
columns: number;
constructor();
clone(): SPIRConstantConstantMatrix;
}
export declare class SPIRConstant extends IVariant {
static type: Types;
constant_type: TypeID;
m: SPIRConstantConstantMatrix;
specialization: boolean;
is_used_as_array_length: boolean;
is_used_as_lut: boolean;
subconstants: Uint32Array;
specialization_constant_macro_name: string;
f16_to_f32(u16_value: number): number;
specialization_constant_id(col: number, row?: number): number;
scalar(col?: number, row?: number): number;
scalar_i16(col?: number, row?: number): number;
scalar_u16(col?: number, row?: number): number;
scalar_i8(col?: number, row?: number): number;
scalar_u8(col?: number, row?: number): number;
scalar_f16(col?: number, row?: number): number;
scalar_f32(col?: number, row?: number): number;
scalar_i32(col?: number, row?: number): number;
scalar_f64(col?: number, row?: number): number;
scalar_i64(col?: number, row?: number): bigint;
scalar_u64(col?: number, row?: number): bigint;
vector(): SPIRConstantConstantVector;
vector_size(): number;
columns(): number;
make_null(constant_type: SPIRType): void;
constant_is_null(): boolean;
constructor();
constructor(other: SPIRConstant);
constructor(constant_type: TypeID);
constructor(constant_type: TypeID, elements: number[], num_elements: number, specialized: boolean);
constructor(constant_type: TypeID, v0: number, specialized: boolean);
constructor(constant_type: TypeID, v0: bigint, specialized: boolean);
constructor(constant_type: TypeID, vector_elements: SPIRConstant[], num_elements: number, specialized: boolean);
_construct(constant_type: TypeID): void;
_constructArray(constant_type: TypeID, elements: Uint32Array, num_elements: number, specialized: boolean): void;
_constructScalar32(constant_type: TypeID, v0: number, specialized: boolean): void;
_constructScalar64(constant_type: TypeID, v0: bigint, specialized: boolean): void;
_constructVecMat(constant_type: TypeID, vector_elements: SPIRConstant[], num_elements: number, specialized: boolean): void;
}
|
<reponame>leomillon/try-jcv
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.webdemo.kotlin.impl.analyzer;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.PsiErrorElement;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.diagnostics.Diagnostic;
import org.jetbrains.kotlin.diagnostics.Errors;
import org.jetbrains.kotlin.diagnostics.Severity;
import org.jetbrains.kotlin.diagnostics.rendering.DefaultErrorMessages;
import org.jetbrains.kotlin.psi.KtFile;
import org.jetbrains.kotlin.resolve.BindingContext;
import org.jetbrains.webdemo.kotlin.datastructures.ErrorDescriptor;
import org.jetbrains.webdemo.kotlin.datastructures.TextInterval;
import org.jetbrains.webdemo.kotlin.datastructures.TextPosition;
import org.jetbrains.webdemo.kotlin.exceptions.KotlinCoreException;
import org.jetbrains.webdemo.kotlin.impl.ResolveUtils;
import org.jetbrains.webdemo.kotlin.impl.WrapperSettings;
import java.util.*;
public class ErrorAnalyzer {
private final List<KtFile> currentPsiFiles;
private final Project currentProject;
public ErrorAnalyzer(List<KtFile> currentPsiFiles, Project currentProject) {
this.currentPsiFiles = currentPsiFiles;
this.currentProject = currentProject;
}
public Map<String, List<ErrorDescriptor>> getAllErrors(boolean isJs) {
try {
final Map<String, List<ErrorDescriptor>> errors = new HashMap<>();
for (PsiFile psiFile : currentPsiFiles) {
errors.put(psiFile.getName(), getErrorsByVisitor(psiFile));
}
BindingContext bindingContext = ResolveUtils.getBindingContext(currentPsiFiles, currentProject, isJs);
getErrorsFromDiagnostics(bindingContext.getDiagnostics().all(), errors);
return errors;
} catch (Throwable e) {
throw new KotlinCoreException(e);
}
}
public void getErrorsFromDiagnostics(Collection<Diagnostic> diagnostics, Map<String, List<ErrorDescriptor>> errors) {
try {
for (Diagnostic diagnostic : diagnostics) {
//fix for errors in js library files
VirtualFile virtualFile = diagnostic.getPsiFile().getVirtualFile();
if (virtualFile == null || virtualFile.getPresentableUrl().startsWith(WrapperSettings.JS_LIB_ROOT.toString())) {
continue;
}
String render = DefaultErrorMessages.render(diagnostic);
if (render.contains("This cast can never succeed")) {
continue;
}
if (diagnostic.getSeverity() != Severity.INFO) {
Iterator<TextRange> textRangeIterator = diagnostic.getTextRanges().iterator();
if (!textRangeIterator.hasNext()) {
continue;
}
TextRange firstRange = textRangeIterator.next();
String className = diagnostic.getSeverity().name();
if (!(diagnostic.getFactory() == Errors.UNRESOLVED_REFERENCE) && (diagnostic.getSeverity() == Severity.ERROR)) {
className = "red_wavy_line";
}
TextInterval interval = getInterval(firstRange.getStartOffset(), firstRange.getEndOffset(),
diagnostic.getPsiFile().getViewProvider().getDocument());
errors.get(diagnostic.getPsiFile().getName()).add(
new ErrorDescriptor(interval, render, convertSeverity(diagnostic.getSeverity()), className)
);
}
}
for (String key : errors.keySet()) {
Collections.sort(errors.get(key), new Comparator<ErrorDescriptor>() {
@Override
public int compare(ErrorDescriptor o1, ErrorDescriptor o2) {
if (o1.getInterval().getStart().getLine() > o2.getInterval().getStart().getLine()) {
return 1;
} else if (o1.getInterval().getStart().getLine() < o2.getInterval().getStart().getLine()) {
return -1;
} else if (o1.getInterval().getStart().getLine() == o2.getInterval().getStart().getLine()) {
if (o1.getInterval().getStart().getCh() > o2.getInterval().getStart().getCh()) {
return 1;
} else if (o1.getInterval().getStart().getCh() < o2.getInterval().getStart().getCh()) {
return -1;
} else if (o1.getInterval().getStart().getCh() == o2.getInterval().getStart().getCh()) {
return 0;
}
}
return -1;
}
});
}
} catch (Throwable e) {
throw new KotlinCoreException(e);
}
}
private List<ErrorDescriptor> getErrorsByVisitor(PsiFile psiFile) {
final List<PsiErrorElement> errorElements = new ArrayList<PsiErrorElement>();
PsiElementVisitor visitor = new PsiElementVisitor() {
@Override
public void visitElement(PsiElement element) {
element.acceptChildren(this);
}
@Override
public void visitErrorElement(PsiErrorElement element) {
errorElements.add(element);
}
};
final List<ErrorDescriptor> errors = new ArrayList<>();
visitor.visitFile(psiFile);
for (PsiErrorElement errorElement : errorElements) {
int start = errorElement.getTextRange().getStartOffset();
int end = errorElement.getTextRange().getEndOffset();
TextInterval interval = getInterval(start, end, psiFile.getViewProvider().getDocument());
errors.add(new ErrorDescriptor(interval, errorElement.getErrorDescription(),
convertSeverity(Severity.ERROR), "red_wavy_line"));
}
return errors;
}
@NotNull
private org.jetbrains.webdemo.kotlin.datastructures.Severity convertSeverity(Severity severity) {
switch (severity) {
case ERROR:
return org.jetbrains.webdemo.kotlin.datastructures.Severity.ERROR;
case INFO:
return org.jetbrains.webdemo.kotlin.datastructures.Severity.INFO;
case WARNING:
return org.jetbrains.webdemo.kotlin.datastructures.Severity.WARNING;
default:
return null;
}
}
private TextInterval getInterval(int start, int end, Document currentDocument) {
int lineNumberForElementStart = currentDocument.getLineNumber(start);
int lineNumberForElementEnd = currentDocument.getLineNumber(end);
int charNumberForElementStart = start - currentDocument.getLineStartOffset(lineNumberForElementStart);
int charNumberForElementEnd = end - currentDocument.getLineStartOffset(lineNumberForElementStart);
if ((start == end) && (lineNumberForElementStart == lineNumberForElementEnd)) {
charNumberForElementStart--;
if (charNumberForElementStart < 0) {
charNumberForElementStart++;
charNumberForElementEnd++;
}
}
TextPosition startPosition = new TextPosition(lineNumberForElementStart, charNumberForElementStart);
TextPosition endPosition = new TextPosition(lineNumberForElementEnd, charNumberForElementEnd);
return new TextInterval(startPosition, endPosition);
}
}
|
<gh_stars>0
/*
* Copyright (c) 2015, 2016 Oracle and/or its affiliates. All rights reserved. This
* code is released under a tri EPL/GPL/LGPL license. You can use it,
* redistribute it and/or modify it under the terms of the:
*
* Eclipse Public License version 1.0
* GNU General Public License version 2
* GNU Lesser General Public License version 2.1
*/
package org.jruby.truffle.core.rubinius;
import com.oracle.truffle.api.object.DynamicObject;
import com.oracle.truffle.api.source.SourceSection;
import org.jruby.truffle.RubyContext;
import org.jruby.truffle.core.Layouts;
import org.jruby.truffle.core.method.MethodNodesFactory;
import org.jruby.truffle.language.NotProvided;
import org.jruby.truffle.language.RubyGuards;
import org.jruby.truffle.language.RubyNode;
import org.jruby.truffle.language.arguments.ObjectArrayNode;
import org.jruby.truffle.language.arguments.ReadAllArgumentsNode;
import org.jruby.truffle.language.arguments.ReadBlockNode;
import org.jruby.truffle.language.control.ReturnID;
import org.jruby.truffle.language.literal.ObjectLiteralNode;
public class RubiniusPrimitiveCallConstructor implements RubiniusPrimitiveConstructor {
private final DynamicObject method;
public RubiniusPrimitiveCallConstructor(DynamicObject method) {
assert RubyGuards.isRubyMethod(method);
this.method = method;
}
@Override
public int getPrimitiveArity() {
return Layouts.METHOD.getMethod(method).getSharedMethodInfo().getArity().getPreRequired();
}
@Override
public RubyNode createCallPrimitiveNode(RubyContext context, SourceSection sourceSection, ReturnID returnID) {
return new CallRubiniusPrimitiveNode(context, sourceSection,
MethodNodesFactory.CallNodeFactory.create(context, sourceSection, new RubyNode[] {
new ObjectLiteralNode(context, sourceSection, method),
new ReadAllArgumentsNode(context, sourceSection),
new ReadBlockNode(context, sourceSection, NotProvided.INSTANCE)
}), returnID);
}
@Override
public RubyNode createInvokePrimitiveNode(RubyContext context, SourceSection sourceSection, RubyNode[] arguments) {
return MethodNodesFactory.CallNodeFactory.create(context, sourceSection, new RubyNode[] {
new ObjectLiteralNode(context, sourceSection, method),
new ObjectArrayNode(context, sourceSection, arguments),
new ReadBlockNode(context, sourceSection, NotProvided.INSTANCE)
});
}
}
|
<gh_stars>0
import { Component, OnInit} from '@angular/core';
import { Router } from '@angular/router';
import { Project } from '../../../core/models/view-models/project.view.model';
import { UserModel } from '../../../core/models/input-models/user.model';
import { ProjectsService } from '../../../core/services/projects/projects.service';
import { ToastsManager } from 'ng2-toastr/ng2-toastr';
@Component({
templateUrl: './project-create.component.html'
})
export class ProjectCreateComponent implements OnInit {
public model : Project;
public createFail : boolean;
public errorMessage :string;
private users: UserModel
constructor(
private toastr: ToastsManager,
private route : Router,
private projectsService : ProjectsService
) {
this.model = new Project("", 0, localStorage.getItem('email'), localStorage.getItem('_id'), "");
}
async ngOnInit() {
const loadAllUsers = await this.projectsService
.createGet()
.subscribe(data => {
this.users = data.users;
})
return loadAllUsers;
}
createProject () : void {
this.projectsService
.create(this.model)
.subscribe(
data => {
if(data.success == true) {
this.createFail = false;
this.successfullCreateRequest(data);
} else {
this.errorMessage = data.errorMessage;
this.createFail = true;
}
},
err => {
this.createFail = true;
this.toastr.error('Unknown error occured. Please try again');
}
)
}
successfullCreateRequest(data) : void {
//sthis.toastr.success('A new project is successfully created!');
this.route.navigate(['/projects']);
}
} |
#!/bin/sh
set -e
CONFIG_LOCAL="$(echo "$0" | sed -e 's/[^\/]*$//')config-local.sh"
perl -e '
for(qw(HOME USER SOURCE CORE CONFIG DATA LOG TMP PORT UPLOAD NO_CLONETRACK))
{
if (exists $ENV{$_})
{
print "export $_=\"$ENV{$_}\"\n";
}
}
' >$CONFIG_LOCAL
|
#!/bin/bash
CYN='\e[96m';
GRN='\e[92m';
NC='\e[0m';
CHK='\xE2\x9C\x94';
printf "\n\n${CYN}Setting up your environment...${NC}\n";
sudo touch /dev/null;
# Stopping Apach and restarting services
printf "\nStopping Apache service in case it's running... ${NC}\n";
sudo systemctl stop apache2 &> /dev/null;
printf "${GRN}${CHK} Complete! ${NC}\n";
printf "\nRestarting Nginx... ${NC}\n";
sudo systemctl restart nginx &> /dev/null;
printf "${GRN}${CHK} Complete! ${NC}\n";
printf "\nRestarting PHP... ${NC}\n";
sudo systemctl restart php7.2-fpm &> /dev/null;
printf "${GRN}${CHK} Complete! ${NC}\n";
printf "\nRestarting Redis... ${NC}\n";
sudo systemctl restart redis &> /dev/null;
printf "${GRN}${CHK} Complete! ${NC}\n";
printf "\nRestarting MongoDB... ${NC}\n";
sudo systemctl restart mongod &> /dev/null;
printf "${GRN}${CHK} Complete! ${NC}\n";
printf "\nCreating directory for project files on host machine...${NC}\n";
sudo mkdir -p /var/www/html &> /dev/null;
printf "${GRN}${CHK} Complete! ${NC}\n";
printf "\nDownloading latest WordPress version... ${NC}\n";
sudo -u vagrant -i -- wp core download --path='/var/www/html' --version='latest' &> /dev/null;
printf "${GRN}${CHK} Complete! ${NC}\n";
printf "\nConfiguring WordPress database... ${NC}\n";
sudo -u vagrant -i -- wp core config --dbhost=localhost --path='/var/www/html' --dbname=wp_local --dbuser=root --dbpass=root &> /dev/null;
printf "${GRN}${CHK} Complete! ${NC}\n";
printf "\nModifying permissions... ${NC}\n";
chmod 644 /var/www/html/wp-config.php &> /dev/null;
sudo -u www-data -i -- wp core install --url=my.local.com --path='/var/www/html' --title="My local development environment." --admin_name=vagrant --admin_password=vagrant --admin_email=null &> /dev/null;
sudo mkdir -p /var/www/html/wp-content/uploads &> /dev/null;
sudo chgrp vagrant /var/www/html/wp-content/uploads &> /dev/null;
sudo chmod 775 /var/www/html/wp-content/uploads &> /dev/null;
printf "${GRN}${CHK} Complete! ${NC}\n";
printf "\nBox information ready... ${NC}\n";
cp /var/www/info.php /var/www/html/
cp /var/www/box.php /var/www/html/
printf "\n\n${GRN}Box is ready! Enjoy!${NC}\n\n\n";
printf "Use [vagrant ssh] to log in to your new box!\n\n";
printf "Visit ${CYN}my.local.com/info.php${NC} for server information. \n";
|
<gh_stars>1-10
package golastic
import (
"encoding/json"
"testing"
)
func TestResultItem(t *testing.T) {
resultItem, err := getResultItem()
if err != nil {
Error(t, err)
}
AssertEqualString(t, resultItem.Index, "test")
AssertEqualString(t, resultItem.Type, "products")
AssertEqualString(t, resultItem.Id, "1")
AssertEqualInt(t, resultItem.Version, 1)
AssertEqualBool(t, resultItem.Found, true)
}
func TestResultItem_Scan(t *testing.T) {
resultItem, err := getResultItem()
if err != nil {
Error(t, err)
}
product := &TestProduct{}
resultItem.Scan(&product)
AssertEqualInt(t, product.Id, 16)
AssertEqualString(t, product.Name, "Spree Mug")
}
func getResultItem() (*ResultItem, error) {
jsonBytes := []byte(`{
"_index" : "test",
"_type" : "products",
"_id" : "1",
"_version" : 1,
"found" : true,
"_source":{"id":16,"name":"Spree Mug"}
}`)
resultItem := &ResultItem{}
err := json.Unmarshal(jsonBytes, resultItem)
if err != nil {
return nil, err
}
return resultItem, nil
}
|
module.exports = async function (context, req) {
context.log('CreatePlayer triggered.');
if (req.body && req.body.username && req.body.game) {
return {
playerDocument: {
id: req.body.username,
game: req.body.game,
},
res: {
status: 204,
},
};
}
return {
res: {
status: 400,
body:
"Please pass a a request body containing a username and a game." & req,
},
};
} |
<reponame>glowroot/glowroot-instrumentation
/*
* Copyright 2017-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.glowroot.instrumentation.javahttpserver;
import java.util.HashMap;
import java.util.Map;
import org.glowroot.instrumentation.api.Message;
import org.glowroot.instrumentation.api.MessageSupplier;
import org.glowroot.instrumentation.api.checker.Nullable;
class HttpHandlerMessageSupplier extends MessageSupplier {
private final String requestMethod;
private final String requestUri;
private final @Nullable String requestQueryString;
private final Map<String, Object> requestHeaders;
private @Nullable Map<String, Object> responseHeaders;
private final @Nullable String requestRemoteAddr;
private final @Nullable String requestRemoteHost;
HttpHandlerMessageSupplier(String requestMethod, String requestUri,
@Nullable String requestQueryString, Map<String, Object> requestHeaders,
@Nullable String requestRemoteAddr, @Nullable String requestRemoteHost) {
this.requestMethod = requestMethod;
this.requestUri = requestUri;
this.requestQueryString = requestQueryString;
this.requestHeaders = requestHeaders;
this.requestRemoteAddr = requestRemoteAddr;
this.requestRemoteHost = requestRemoteHost;
}
@Override
public Message get() {
Map<String, Object> detail = new HashMap<String, Object>();
detail.put("Request http method", requestMethod);
if (requestQueryString != null) {
// including empty query string since that means request ended with ?
detail.put("Request query string", requestQueryString);
}
if (!requestHeaders.isEmpty()) {
detail.put("Request headers", requestHeaders);
}
if (requestRemoteAddr != null) {
detail.put("Request remote address", requestRemoteAddr);
}
if (requestRemoteHost != null) {
detail.put("Request remote host", requestRemoteHost);
}
if (responseHeaders != null && !responseHeaders.isEmpty()) {
detail.put("Response headers", responseHeaders);
}
return Message.create(requestUri, detail);
}
void setResponseHeaders(Map<String, Object> responseHeaders) {
this.responseHeaders = responseHeaders;
}
}
|
#include <iostream>
using namespace std;
// Function to sort an array using bubble sort
void BubbleSort(int arr[], int n)
{
int i, j;
for (i = 0; i < n-1; i++)
for (j = 0; j < n-i-1; j++)
if (arr[j] > arr[j+1])
swap(arr[j], arr[j+1]);
}
// Function to swap two numbers
void swap(int &a, int &b)
{
int temp = a;
a = b;
b = temp;
}
// Driver program
int main()
{
int arr[] = {10, 7, 8, 9, 1, 5};
int n = sizeof(arr)/sizeof(arr[0]);
BubbleSort(arr, n);
return 0;
} |
#!/bin/bash
#
# Copyright 2016 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Test that bazel can be compiled out of the distribution artifact.
#
set -u
DISTFILE=$(rlocation io_bazel/${1#./})
shift 1
if [ "${JAVA_VERSION:-}" == "1.7" ] ; then
echo "Warning: bootstrapping not tested for java 1.7"
exit 0
fi
# Load the test setup defined in the parent directory
source $(rlocation io_bazel/src/test/shell/integration_test_setup.sh) \
|| { echo "integration_test_setup.sh not found!" >&2; exit 1; }
function test_bootstrap() {
local olddir=$(pwd)
WRKDIR=$(mktemp -d ${TEST_TMPDIR}/bazelbootstrap.XXXXXXXX)
mkdir -p "${WRKDIR}" || fail "Could not create workdir"
trap "rm -rf \"$WRKDIR\"" EXIT
cd "${WRKDIR}" || fail "Could not change to work directory"
export SOURCE_DATE_EPOCH=1501234567
unzip -q "${DISTFILE}"
find . -type f -exec chmod u+w {} \;
env EXTRA_BAZEL_ARGS="--curses=no --strategy=Javac=standalone" ./compile.sh \
|| fail "Expected to be able to bootstrap bazel"
./output/bazel version > "${TEST_log}" || fail "Generated bazel not working"
./output/bazel shutdown
expect_log "${SOURCE_DATE_EPOCH}"
cd "${olddir}"
}
run_suite "bootstrap test"
|
def sort(arr):
n = len(arr)
# Traverse through all array elements
for i in range(n):
# The last element has already been sorted
for j in range(0, n-i-1):
# traverse the array from 0 to n-i-1
# Swap if the element found is greater
# than the next element
if arr[j] > arr[j+1] :
arr[j], arr[j+1] = arr[j+1], arr[j]
# Driver code to test above
arr = [7, 5, 2, 9, 6, 4, 1, 3, 8]
sort(arr)
print ("Sorted array is:")
for i in range(len(arr)):
print (arr[i]), |
# Test
npm test
if [[ $? -ne 0 ]] ; then
echo "Tests failed."
exit $?
fi
echo 'test complete'
# Upgrade patch version
npm version patch
echo 'updated patch version'
# Deploy to NPM
npm publish
echo 'publish complete'
|
#!bin/bash
#take date from lsat logdate
logdate=$(tail -1 Foto.log | awk '{print $1}' Foto.log )
yr=${date:1:5}
mo=${date:6:3}
dy=${date:10:2}
#printf "%s%s%s\n" $dy $mo $yr
logfolder="$dy$mo$yr"
#take all folder name kucing_* and kelinci_*
folderkucing=$(kucing_*)
folderkelinci=$(kelinci_*)
#set password into today date
password=$(date '+%m%d%Y')
echo $password
if [ ! -e ./Kumpulan.zip ]
then
echo "zip file"
# zip every folder with kumpulan name and exclude sh log and tab file
zip -rm -P $password Kumpulan.zip ./$logfolder ./$folderkucing ./$folderkelinci -x *.sh* *.log* *.tab*
elif [ -e ./Kumpulan.zip ]
then
echo "unzip and delete"
unzip -P $password Kumpulan.zip
rm -rf "Kumpulan.zip"
fi
|
from IPython.utils.traitlets import
class CustomMenu(IPython.config.configurable.Configurable):
"""A customizable menu container
"""
def __init__(self):
super(CustomMenu, self).__init__(config=None)
self._buttons = []
def add_button(self, label, action):
"""Add a new button to the menu
"""
self._buttons.append((label,action))
def _render_menu(self):
"""Renders the button
"""
html = []
for label, action in self._buttons:
html.append('<a href="javascript:{}">{}</a>'.format(action,label))
return ' '.join(html)
def _ipython_display_(self):
"""Renders the menubar
"""
display(IPython.core.display.HTML(self._render_menu())) |
def reverse_string(string):
return string[::-1] |
# # Lines configured by zsh-newuser-install
# # Not sure If I banjqxed UZ
autoload -Uz compinit
compinit
#
# End of lines added by compinstall
#
# For Adding Tmuxinator Functionality
# source $HOME/.bin/tmuxinator.zsh
source $HOME/.zplug/init.zsh
if [ -f '$HOME/.fzf.zsh' ]; then
. "$HOME/.fzf.zsh"
fi
####################
# oq Sundry
####################
# https://github.com/robbyrussell/oh-my-zsh/tree/master/plugins/pj
########################################
# 10_Activity
########################################
#
# Sane bindings for zsh's vi mode so it behaves more vim like
# https://github.com/sharat87/zsh-vim-mode
####################
# rq Install
####################
# Install plugins if there are plugins that have not been installed
if ! zplug check --verbose; then
printf "Install? [y/N]: "
if read -q; then
echo; zplug install
fi
fi
# ZPLUG_CLONE_DEPTH=0
zstyle ':zplug:tag' depth 0
zplug load --verbose
#####################
# Deleted file, to rebase
# $HOME/1q_ctrl-qiuy/10_Activity/rq_
# if [ -f '$HOME/10_Activity/10r_Initiatives/10r_Installing/60o_Shell_Qiuy/60o_ZSH/50j_Settings/50i_Production/sq50j-settings-install-zsh' ]; then
# . '$HOME/10_Activity/10r_Initiatives/10r_Installing/60o_Shell_Qiuy/60o_ZSH/50j_Settings/50i_Production/sq50j-settings-install-zsh'
# # break
# fi
########################################
# Requirement
#
# Some widgets requires external commands.
# anyframe-widget-cdr
#
# require cdr
#
# To use cdr, add the following line to your .zshrc:
# autoload -Uz chpwd_recent_dirs cdr add-zsh-hook
# add-zsh-hook chpwd chpwd_recent_dirs
# expressly specify to use fzf
# Debug
# zplug ":anyframe:selector:" use fzf
# specify path and options for peco, percol, or fzf
# Debug
# zplug ":anyframe:selector:fzf:" command 'fzf --extended'
|
#!/bin/bash
#
# Master build script
#
# This will:
# 1. Build OpenSSL libraries for macOS and iOS using the `build.sh`
# 2. Generate the `openssl.h` umbrella header for macOS and iOS based on the contents of
# the `include-macos` and `include-ios` directories.
#
# Levi Brown
# mailto:levigroker@gmail.com
# September 8, 2017
##
### Configuration
OPENSSL_VERSION="1.1.1a"
FRAMEWORK="openssl.framework"
FRAMEWORK_BIN="${FRAMEWORK}/openssl"
# macOS configuration
MAC_HEADER_DEST="OpenSSL-macOS/OpenSSL-macOS/openssl.h"
MAC_HEADER_TEMPLATE="OpenSSL-macOS/OpenSSL-macOS/openssl_umbrella_template.h"
MAC_INCLUDES_DIR="include-macos"
MAC_LIB_DIR="lib-macos"
MAC_BUILD_DIR="OpenSSL-macOS/bin"
# iOS configuration
IOS_HEADER_DEST="OpenSSL-iOS/OpenSSL-iOS/openssl.h"
IOS_HEADER_TEMPLATE="OpenSSL-iOS/OpenSSL-iOS/openssl_umbrella_template.h"
IOS_INCLUDES_DIR="include-ios"
IOS_LIB_DIR="lib-ios"
IOS_BUILD_DIR="OpenSSL-iOS/bin"
UMBRELLA_HEADER_SCRIPT="framework_scripts/create_umbrella_header.sh"
UMBRELLA_STATIC_INCLUDES="framework_scripts/static_includes.txt"
###
function fail()
{
echo "Failed: $@" >&2
exit 1
}
function usage()
{
[[ "$@" = "" ]] || echo "$@" >&2
echo "Usage:" >&2
echo "$0 build|valid|clean" >&2
echo " build Builds OpenSSL libraries from source." >&2
echo " header Generates macOS and iOS umbrella headers." >&2
echo " valid Validates the frameworks." >&2
echo " clean Removes all build artifacts." >&2
echo "" >&2
echo " ex.: $0 build" >&2
echo " ex.: $0 clean" >&2
echo "" >&2
exit 1
}
function build()
{
# Build OpenSSL
echo "Building OpenSSL ${OPENSSL_VERSION}..."
source ./build.sh
echo "Finished building OpenSSL ${OPENSSL_VERSION}"
header
echo "Build complete. Please follow the steps under \"Building\" in the README.md file to create the macOS and iOS frameworks."
}
function header()
{
export CONTENT=$(<"${UMBRELLA_STATIC_INCLUDES}")
# Create the macOS umbrella header
# HEADER_DEST="${MAC_HEADER_DEST}"
# HEADER_TEMPLATE="${MAC_HEADER_TEMPLATE}"
# INCLUDES_DIR="${MAC_INCLUDES_DIR}"
# source "${UMBRELLA_HEADER_SCRIPT}"
# echo "Created $HEADER_DEST"
# Create the iOS umbrella header
HEADER_DEST="${IOS_HEADER_DEST}"
HEADER_TEMPLATE="${IOS_HEADER_TEMPLATE}"
INCLUDES_DIR="${IOS_INCLUDES_DIR}"
source "${UMBRELLA_HEADER_SCRIPT}"
echo "Created $HEADER_DEST"
}
function valid()
{
local VALID=1
local LIB_BIN="${IOS_BUILD_DIR}/${FRAMEWORK_BIN}"
if [ -r "${LIB_BIN}" ]; then
# Check expected architectures
local REZ=$($LIPO_B -info "${LIB_BIN}")
if [ "$REZ" != "Architectures in the fat file: OpenSSL-iOS/bin/openssl.framework/openssl are: i386 x86_64 armv7 armv7s arm64 " ]; then
echo "ERROR: Unexpected result from $LIPO_B: \"${REZ}\""
VALID=0
else
echo " GOOD: ${REZ}"
fi
# Check for bitcode where expected
local ARCHS=("arm64" "armv7" "armv7s")
for ARCH in ${ARCHS[*]}
do
local REZ=$($OTOOL_B -arch ${ARCH} -l "${LIB_BIN}" | $GREP_B LLVM)
if [ "$REZ" == "" ]; then
echo "ERROR: Did not find bitcode slice for ${ARCH}"
VALID=0
else
echo " GOOD: Found bitcode slice for ${ARCH}"
fi
done
# Check for bitcode where not expected
local ARCHS=("i386")
for ARCH in ${ARCHS[*]}
do
local REZ=$($OTOOL_B -arch ${ARCH} -l "${LIB_BIN}" | $GREP_B LLVM)
if [ "$REZ" != "" ]; then
echo "ERROR: Found bitcode slice for ${ARCH}"
VALID=0
else
echo " GOOD: Did not find bitcode slice for ${ARCH}"
fi
done
local EXPECTING=("${IOS_BUILD_DIR}/${FRAMEWORK}/Modules/module.modulemap")
for EXPECT in ${EXPECTING[*]}
do
if [ -f "${EXPECT}" ]; then
echo " GOOD: Found expected file: \"${EXPECT}\""
else
echo "ERROR: Did not file expected file: \"${EXPECT}\""
VALID=0
fi
done
else
echo "ERROR: \"${LIB_BIN}\" not found. Please be sure it has been built (see README.md)"
VALID=0
fi
if [ $VALID -ne 1 ]; then
fail "Invalid framework"
fi
}
function clean()
{
# echo "Cleaning macOS..."
# set -x
# $RM_B "${MAC_HEADER_DEST}"
# $RM_B -rf "${MAC_INCLUDES_DIR}"
# $RM_B -rf "${MAC_LIB_DIR}"
# $RM_B -rf "${MAC_BUILD_DIR}"
# [ $DEBUG -ne 1 ] && set +x
echo "Cleaning iOS..."
set -x
$RM_B "${IOS_HEADER_DEST}"
$RM_B -rf "${IOS_INCLUDES_DIR}"
$RM_B -rf "${IOS_LIB_DIR}"
$RM_B -rf "${IOS_BUILD_DIR}"
[ $DEBUG -ne 1 ] && set +x
echo "Clean complete"
}
DEBUG=${DEBUG:-0}
export DEBUG
set -eu
[ $DEBUG -ne 0 ] && set -x
# Fully qualified binaries (_B suffix to prevent collisions)
RM_B="/bin/rm"
GREP_B="/usr/bin/grep"
LIPO_B="/usr/bin/lipo"
OTOOL_B="/usr/bin/otool"
if [[ $# -eq 0 ]]; then
usage
fi
command="$1"
shift
case $command in
build)
if [[ $# -le 0 ]]; then
build
else
usage
fi
;;
header)
if [[ $# -le 0 ]]; then
header
else
usage
fi
;;
valid)
if [[ $# -le 0 ]]; then
valid
else
usage
fi
;;
clean)
if [[ $# -le 0 ]]; then
clean
else
usage
fi
;;
*)
# Unknown option
usage
;;
esac
echo "Script done"
|
<gh_stars>100-1000
package dev.webfx.kit.mapper.peers.javafxgraphics.markers;
import javafx.beans.property.Property;
import javafx.scene.layout.Border;
/**
* @author <NAME>
*/
public interface HasBorderProperty {
Property<Border> borderProperty();
default void setBorder(Border border) { borderProperty().setValue(border); }
default Border getBorder() { return borderProperty().getValue(); }
}
|
<gh_stars>1-10
// Copyright (c) 2020, Battelle Memorial Institute
// All rights reserved.
// 1. Battelle Memorial Institute (hereinafter Battelle) hereby grants
// permission to any person or entity lawfully obtaining a copy of this
// software and associated documentation files (hereinafter "the Software")
// to redistribute and use the Software in source and binary forms, with or
// without modification. Such person or entity may use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software, and
// may permit others to do so, subject to the following conditions:
// - Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimers.
// - Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// - Other than as used herein, neither the name Battelle Memorial Institute
// or Battelle may be used in any form whatsoever without the express
// written consent of Battelle.
// 2. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL BATTELLE OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
// The views and conclusions contained in the software and documentation are those
// of the authors and should not be interpreted as representing official policies,
// either expressed or implied, of the FreeBSD Project.
// This material was prepared as an account of work sponsored by an agency of the
// United States Government. Neither the United States Government nor the United
// States Department of Energy, nor Battelle, nor any of their employees, nor any
// jurisdiction or organization that has cooperated in the development of these
// materials, makes any warranty, express or implied, or assumes any legal
// liability or responsibility for the accuracy, completeness, or usefulness or
// any information, apparatus, product, software, or process disclosed, or
// represents that its use would not infringe privately owned rights.
// Reference herein to any specific commercial product, process, or service by
// trade name, trademark, manufacturer, or otherwise does not necessarily
// constitute or imply its endorsement, recommendation, or favoring by the
// United States Government or any agency thereof, or Battelle Memorial Institute.
// The views and opinions of authors expressed herein do not necessarily state or
// reflect those of the United States Government or any agency thereof.
// PACIFIC NORTHWEST NATIONAL LABORATORY
// operated by
// BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
// under Contract DE-AC05-76RL01830
import { cdf, pmf, quantile } from "@stdlib/stats/base/dists/binomial";
import _ from "lodash";
import {
all,
dark,
faults,
inconclusive,
lighter,
likely,
primary,
unitOff,
} from "./palette";
const noData = {
name: "no-data",
label: "No Analysis Only",
alt: "No Analysis",
single: "No Analysis",
abbr: "Empty",
color: primary,
isType: (v) => _.get(getType(v), "name") === "no-data",
show: (f) => _.includes(["no-data"], _.get(f, "name")),
};
const outsideRange = {
name: "outside-range",
label: "Outside Date Range Only",
alt: "Outside Date Range",
single: "Outside Date Range",
abbr: "Outside",
color: lighter,
isType: (v) => _.get(getType(v), "name") === "outside-range",
show: (f) => _.includes(["outside-range"], _.get(f, "name")),
};
const fault = {
name: "faults",
label: "Faults Only",
alt: "Faults",
single: "Fault",
abbr: "Fault",
color: faults,
isType: (v) => _.get(getType(v), "name") === "faults",
show: (f) =>
_.includes(["faults", "no-data", "outside-range"], _.get(f, "name")),
};
const incon = {
name: "inconclusive",
label: "Inconclusive Only",
alt: "Inconclusive",
single: "Inconclusive",
abbr: "Incon",
color: inconclusive,
isType: (v) => _.get(getType(v), "name") === "inconclusive",
show: (f) =>
_.includes(["inconclusive", "no-data", "outside-range"], _.get(f, "name")),
};
const off = {
name: "unit-off",
label: "Unit Off Only",
alt: "Off Time",
single: "Unit Off",
abbr: "Unit Off",
color: unitOff,
isType: (v) => _.get(getType(v), "name") === "unit-off",
show: (f) =>
_.includes(["unit-off", "no-data", "outside-range"], _.get(f, "name")),
};
const okay = {
name: "okay",
label: "Okay Only",
alt: "Okay",
single: "Okay",
abbr: "Okay",
color: dark,
isType: (v) => _.get(getType(v), "name") === "okay",
show: (f) =>
_.includes(["okay", "no-data", "outside-range"], _.get(f, "name")),
};
const values = [
fault,
incon,
off,
// uncomment to view and filter okay messages if available
okay,
// combined states
{
name: "aggregate",
label: "Likely State",
alt: "Likely",
single: "Likely",
abbr: "Likely",
color: likely,
isType: (v) => _.includes(["faults", "okay"], _.get(getType(v), "name")),
show: (f) =>
_.includes(
["faults", "inconclusive", "okay", "outside-range"],
_.get(f, "name")
),
},
{
name: "all",
label: "All States",
alt: "All",
single: "One",
abbr: "All",
color: all,
isType: (v) => getType(v) !== null,
show: (f) =>
_.includes(
[
"faults",
"inconclusive",
"okay",
"unit-off",
"no-data",
"outside-range",
],
_.get(f, "name")
),
},
];
const getType = (value) => {
let s = value;
if (_.isString(value)) {
s = parseInt(value);
}
if (_.isNumber(value)) {
s = value.toFixed(1);
}
switch (s) {
case "-99.0":
return incon;
default:
// continue
}
const t = _.get(/-?\d+\.(\d+)/.exec(s), "1");
switch (t) {
case "0":
return okay;
case "1":
return fault;
case "2":
return incon;
case "3":
return off;
default:
return null;
}
};
const parse = function(value) {
if (_.isNumber(value)) {
return values[value];
}
value = _.isString(value) ? value.toLowerCase() : value;
return _.concat(values, [okay, noData, outsideRange]).find(
(operation) =>
operation.name === value ||
operation.label.toLowerCase() === value ||
operation.alt.toLowerCase() === value ||
operation.single.toLowerCase() === value ||
operation.abbr.toLowerCase() === value ||
operation.color.toLowerCase() === value
);
};
const getCount = (v) =>
Array.isArray(v) ? v.length : _.isString(v) ? parseInt(v) : v;
const interval = (a, n, p) => {
const q1 = (1.0 - a) / 2;
const q2 = (1.0 + a) / 2;
const l = quantile(q1, n, p);
const u = quantile(q2, n, p);
return [l, u];
};
const aggregate = (errors, passed) => {
const min = 5;
const p = 0.5;
const a = 0.95;
const x = getCount(errors);
const n = x + getCount(passed);
const c = cdf(x, n, p);
const i = interval(a, n, p)[0];
const y = pmf(i, n, p);
if (n > min) {
if (y <= c) {
return fault;
} else {
return okay;
}
} else {
return incon;
}
};
export default { values, okay, getType, parse, aggregate };
|
#!/bin/bash
# Launch turtlebot_teleop node
xterm -hold -e "roslaunch turtlebot_teleop keyboard_teleop.launch" &
pid1=$!
# Wait for the teleop node to initialize
sleep 5
# Launch amcl node
xterm -hold -e "roslaunch my_robot amcl.launch" &
pid2=$!
# Wait for the amcl node to initialize
sleep 10
# Run pick_objects_node
xterm -hold -e "rosrun pick_objects pick_objects_node" &
pid3=$!
# Wait for all processes to complete
wait $pid1 $pid2 $pid3 |
#!/bin/bash
# author: Liang Gong
if [ "$(uname)" == "Darwin" ]; then
# under Mac OS X platform
NODE='node'
elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then
# under GNU/Linux platform
NODE='nodejs'
fi
cd directory-traversal/canvas-designer
RED='\033[0;31m'
BLUE='\033[0;34m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
# start the server
echo -e "\t[${GREEN}start vulnerable server${NC}]: ${BLUE}canvas-designer${NC}"
$NODE test.js >/dev/null 2>&1 &
vulnpid=$!
# wait for the server to get started
sleep 1.5s
echo -e "\t[${GREEN}server root directory${NC}]: `pwd`"
# utilize directory traversal to get files outside the working directory
# trigger directory traversal issues: send a request to retrieve the confidential file outside the working directory
$NODE attack.js
# kill the vulnerable npm package's process
kill -9 $vulnpid
|
package com.bebel.bdd.dao;
import com.bebel.bdd.dto.SamhainDto;
import com.bebel.soclews.util.Logger;
import org.springframework.stereotype.Repository;
import java.util.HashMap;
import java.util.Map;
@Repository
public class SamhainDao extends AbstractDao {
private final Logger log = new Logger(getClass());
public SamhainDao() {
}
public String getSave(final String username) {
log.info("Recuperation de la sauvegarde pour : " + username);
final Map<String, String> params = new HashMap<>();
params.put("username", username);
return unique("SELECT t.save FROM SamhainDto t WHERE t.username = :username", params, String.class);
}
public void save(final String username, final String save) {
log.info("Sauvegarde de : " + save + " pour : " + username);
final SamhainDto dto = new SamhainDto();
dto.setUsername(username);
dto.setSave(save);
save(dto);
}
}
|
class ProjectMetadata:
def __init__(self, thinker, category, name):
self.thinker = thinker
self.category = category
self.name = name
def write_metadata_to_file(self, file_name):
with open(file_name, "w") as f:
f.write(
f'THINKER = "{self.thinker}"\nCATEGORY = "{self.category}"\nPROJECT_NAME = "{self.name}"')
# Example usage
metadata = ProjectMetadata("John Doe", "Data Science", "Data Analysis Project")
metadata.write_metadata_to_file("project_metadata.txt") |
"""
Generate code to create a random 10x10 matrix of zeroes and ones
"""
import random
matrix_size = 10
matrix = [[random.choice([0, 1]) for i in range(matrix_size)] for j in range(matrix_size)]
print(matrix) |
<filename>UVa/uva 10550.cpp
#include <bits/stdc++.h>
#define endl '\n'
using namespace std;
int main() {
ios::sync_with_stdio(false);
cin.tie(0);
int a, b, c, d;
while(cin>>a>>b>>c>>d, a||b||c||d){
int ans=1080;
cout<<1080+9*(a-b<0?a-b+40:a-b)+9*(c-b<0?c-b+40:c-b)+9*(c-d<0?c-d+40:c-d)<<endl;
}
}
|
const { GenericContainer } = require('testcontainers');
const MySqlDriver = require('../driver/MySqlDriver');
const version = process.env.TEST_MYSQL_VERSION || '5.7';
const startContainer = async () => {
const builder = new GenericContainer(`mysql:${version}`)
.withEnv('MYSQL_ROOT_PASSWORD', process.env.TEST_DB_PASSWORD || '<PASSWORD>')
.withExposedPorts(3306);
if (version.split('.')[0] === '8') {
/**
* workaround for MySQL 8 and unsupported auth in mysql package
* @link https://github.com/mysqljs/mysql/pull/2233
*/
builder.withCmd('--default-authentication-plugin=mysql_native_password');
}
return builder.start();
};
const createDriver = (c) => new MySqlDriver({
host: c.getHost(),
user: 'root',
password: process.env.TEST_DB_PASSWORD || '<PASSWORD>',
port: c.getMappedPort(3306),
database: 'mysql',
});
module.exports = {
startContainer,
createDriver,
};
|
#!/bin/sh -l
set -e
echo "Stable: $STABLE"
VERSION=`python -c 'import fsps;print(fsps.__version__)'`
echo "Version: $VERSION"
# Build the docs
cd docs
make dirhtml
# Update the gh-pages branch
git clone --branch=gh-pages https://github.com/$GITHUB_REPOSITORY _output
cd _output
rm -rf latest
mkdir -p latest
cp -r ../_build/dirhtml/* latest/
git add latest
if [ "$STABLE" = "true" ]; then
rm -rf $VERSION
cp -r latest $VERSION
git add $VERSION
fi
# Push the results to GitHub
if git -c user.name='gh-actions' -c user.email='gh-actions' commit -m "Updated docs [ci skip]"; then
git push --force https://x-access-token:$GITHUB_TOKEN@github.com/$GITHUB_REPOSITORY gh-pages
else
echo "No changes"
fi
|
| <-- | 9 2 7 4 1 |
|
| 2 | | | | | |
|
| 7 | | | | | |
|
| 4 | 2 | | | |
|
| 1 | 2 4 | | |
|
| | 1 2 4 7 | |
|
| | <-- | 1 2 4 7 9 | |
#!/bin/bash
set -eo pipefail
set +x
set -u
if [ $# -eq 0 ]; then
set -- "help"
fi
export BUILDIFIER_BIN="${BUILDIFIER_BIN:=/usr/local/bin/buildifier}"
export BUILDOZER_BIN="${BUILDOZER_BIN:=/usr/local/bin/buildozer}"
export NUM_CPUS=${NUM_CPUS:=$(grep -c ^processor /proc/cpuinfo)}
export CIRCLECI=${CIRCLECI:=""}
export BAZEL_EXTRA_TEST_OPTIONS=${BAZEL_EXTRA_TEST_OPTIONS:=""}
export BAZEL_OPTIONS=${BAZEL_OPTIONS:=""}
export BAZEL_BUILD_EXTRA_OPTIONS=${BAZEL_BUILD_EXTRA_OPTIONS:=""}
export SRCDIR=${SRCDIR:="${PWD}"}
export CLANG_FORMAT=clang-format
export NIGHTHAWK_BUILD_ARCH=$(uname -m)
# We build in steps to avoid running out of memory in CI.
# This list doesn't have to be complete, execution of bazel test will build any
# remaining targets.
# The order matters here, dependencies are placed before dependents.
BUILD_PARTS=(
"//api/..."
"//source/exe/..."
"//source/server/..."
"//source/request_source/..."
"//source/adaptive_load/..."
"//test/mocks/..."
"//test/..."
)
#######################################
# Runs the specified command on all the BUILD_PARTS.
# Arguments:
# The command to execute, each part will be appended as the last argument to
# this command.
# Returns:
# 0 on success, exits with return code 1 on failure.
#######################################
function run_on_build_parts() {
local command="$1"
for part in ${BUILD_PARTS[@]}; do
echo "run_on_build_parts: running command $command $part"
eval "$command $part"
if (( $? != 0 )); then
echo "Error executing $command $part."
exit 1
fi
done
}
function do_build () {
bazel build $BAZEL_BUILD_OPTIONS //:nighthawk
tools/update_cli_readme_documentation.sh --mode check
}
function do_opt_build () {
bazel build $BAZEL_BUILD_OPTIONS -c opt --define tcmalloc=gperftools //:nighthawk
bazel build $BAZEL_BUILD_OPTIONS -c opt --define tcmalloc=gperftools //benchmarks:benchmarks
}
function do_test() {
# The environment variable CI is used to determine if some expensive tests
# that cannot run locally should be executed.
# E.g. test_http_h1_mini_stress_test_open_loop.
run_on_build_parts "bazel build -c dbg $BAZEL_BUILD_OPTIONS --action_env=CI"
bazel test -c dbg $BAZEL_TEST_OPTIONS --test_output=all --action_env=CI //test/...
}
function do_clang_tidy() {
# clang-tidy will warn on standard library issues with libc++
BAZEL_BUILD_OPTIONS=("--config=clang" "${BAZEL_BUILD_OPTIONS[@]}")
BAZEL_BUILD_OPTIONS="${BAZEL_BUILD_OPTIONS[*]}" NUM_CPUS=4 ci/run_clang_tidy.sh
}
function do_unit_test_coverage() {
export TEST_TARGETS="//test/... -//test:python_test"
# TODO(https://github.com/envoyproxy/nighthawk/issues/747): Increase back to 93.2 when coverage flakiness address
export COVERAGE_THRESHOLD=92
echo "bazel coverage build with tests ${TEST_TARGETS}"
test/run_nighthawk_bazel_coverage.sh ${TEST_TARGETS}
exit 0
}
function do_integration_test_coverage() {
export TEST_TARGETS="//test:python_test"
#TODO(#564): Revert this to 78.6
export COVERAGE_THRESHOLD=75.0
echo "bazel coverage build with tests ${TEST_TARGETS}"
test/run_nighthawk_bazel_coverage.sh ${TEST_TARGETS}
exit 0
}
function setup_gcc_toolchain() {
export CC=gcc
export CXX=g++
export BAZEL_COMPILER=gcc
[[ "${NIGHTHAWK_BUILD_ARCH}" == "aarch64" ]] && BAZEL_BUILD_OPTIONS="$BAZEL_BUILD_OPTIONS --copt -march=armv8-a+crypto"
[[ "${NIGHTHAWK_BUILD_ARCH}" == "aarch64" ]] && BAZEL_TEST_OPTIONS="$BAZEL_TEST_OPTIONS --copt -march=armv8-a+crypto"
echo "$CC/$CXX toolchain configured"
}
function setup_clang_toolchain() {
export PATH=/opt/llvm/bin:$PATH
export CC=clang
export CXX=clang++
export ASAN_SYMBOLIZER_PATH=/opt/llvm/bin/llvm-symbolizer
export BAZEL_COMPILER=clang
echo "$CC/$CXX toolchain configured"
}
function run_bazel() {
declare -r BAZEL_OUTPUT="${SRCDIR}"/bazel.output.txt
bazel $* | tee "${BAZEL_OUTPUT}"
declare BAZEL_STATUS="${PIPESTATUS[0]}"
if [ "${BAZEL_STATUS}" != "0" ]
then
declare -r FAILED_TEST_LOGS="$(grep " /build.*test.log" "${BAZEL_OUTPUT}" | sed -e 's/ \/build.*\/testlogs\/\(.*\)/\1/')"
cd bazel-testlogs
for f in ${FAILED_TEST_LOGS}
do
echo "Failed test log ${f}"
cp --parents -f $f "${ENVOY_FAILED_TEST_LOGS}"
done
exit "${BAZEL_STATUS}"
fi
}
function do_sanitizer() {
CONFIG="$1"
echo "bazel $CONFIG debug build with tests"
echo "Building and testing Nighthawk tests..."
cd "${SRCDIR}"
# We build this in steps to avoid running out of memory in CI
run_on_build_parts "run_bazel build ${BAZEL_TEST_OPTIONS} -c dbg --config=$CONFIG --"
run_bazel test ${BAZEL_TEST_OPTIONS} -c dbg --config="$CONFIG" -- //test/...
}
function cleanup_benchmark_artifacts {
# TODO(oschaaf): we clean the tmp dir above from uninteresting stuff
# that crept into the tmp/output directory. The cruft gets in there because
# other tooling also responds to the TMPDIR environment variable, which in retrospect
# was a bad choice.
# Consider using a different environment variable for the benchmark tooling
# to use for this.
size=${#TMPDIR}
if [ $size -gt 4 ] && [ -d "${TMPDIR}" ]; then
rm -rf ${TMPDIR}/tmp.*
fi
}
function do_benchmark_with_own_binaries() {
echo "Running benchmark framework with own binaries"
cd "${SRCDIR}"
# Benchmark artifacts will be dropped into this directory:
export TMPDIR="${SRCDIR}/generated"
mkdir -p "${TMPDIR}"
trap cleanup_benchmark_artifacts EXIT
run_bazel test ${BAZEL_TEST_OPTIONS} --test_summary=detailed \
--test_arg=--log-cli-level=info \
--test_env=HEAPPROFILE= \
--test_env=HEAPCHECK= \
--compilation_mode=opt \
--cxxopt=-g \
--cxxopt=-ggdb3 \
--define tcmalloc=gperftools \
//benchmarks:*
}
function do_check_format() {
echo "check_format..."
cd "${SRCDIR}"
./tools/check_format.sh check
./tools/format_python_tools.sh check
}
function do_docker() {
echo "docker..."
cd "${SRCDIR}"
# Note that we implicly test the opt build in CI here
do_opt_build
./ci/docker/docker_build.sh
./ci/docker/docker_push.sh
./ci/docker/benchmark_build.sh
./ci/docker/benchmark_push.sh
}
function do_fix_format() {
echo "fix_format..."
cd "${SRCDIR}"
./tools/check_format.sh fix
./tools/format_python_tools.sh fix
}
if grep 'docker\|lxc' /proc/1/cgroup; then
# Create a fake home. Python site libs tries to do getpwuid(3) if we don't and the CI
# Docker image gets confused as it has no passwd entry when running non-root
# unless we do this.
FAKE_HOME=/tmp/fake_home
mkdir -p "${FAKE_HOME}"
export HOME="${FAKE_HOME}"
export PYTHONUSERBASE="${FAKE_HOME}"
export BUILD_DIR=/build
if [[ ! -d "${BUILD_DIR}" ]]
then
echo "${BUILD_DIR} mount missing - did you forget -v <something>:${BUILD_DIR}? Creating."
mkdir -p "${BUILD_DIR}"
fi
# Environment setup.
export USER=bazel
export TEST_TMPDIR=/build/tmp
export BAZEL="bazel"
fi
export BAZEL_EXTRA_TEST_OPTIONS="--test_env=ENVOY_IP_TEST_VERSIONS=v4only ${BAZEL_EXTRA_TEST_OPTIONS}"
export BAZEL_BUILD_OPTIONS=" \
--verbose_failures ${BAZEL_OPTIONS} --action_env=HOME --action_env=PYTHONUSERBASE \
--experimental_local_memory_estimate \
--show_task_finish --experimental_generate_json_trace_profile ${BAZEL_BUILD_EXTRA_OPTIONS}"
if [ -n "$CIRCLECI" ]; then
if [[ -f "${HOME:-/root}/.gitconfig" ]]; then
mv "${HOME:-/root}/.gitconfig" "${HOME:-/root}/.gitconfig_save"
echo 1
fi
NUM_CPUS=8
if [[ "$1" == "test_gcc" ]]; then
NUM_CPUS=2
BAZEL_BUILD_OPTIONS="${BAZEL_BUILD_OPTIONS} \
--discard_analysis_cache --notrack_incremental_state --nokeep_state_after_build"
fi
echo "Running with ${NUM_CPUS} cpus and BAZEL_BUILD_OPTIONS: ${BAZEL_BUILD_OPTIONS}"
BAZEL_BUILD_OPTIONS="${BAZEL_BUILD_OPTIONS} --jobs=${NUM_CPUS}"
fi
export BAZEL_TEST_OPTIONS="${BAZEL_BUILD_OPTIONS} --test_env=HOME --test_env=PYTHONUSERBASE \
--test_env=UBSAN_OPTIONS=print_stacktrace=1 \
--cache_test_results=no --test_output=all ${BAZEL_EXTRA_TEST_OPTIONS}"
case "$1" in
build)
setup_clang_toolchain
do_build
exit 0
;;
test)
setup_clang_toolchain
do_test
exit 0
;;
test_gcc)
setup_gcc_toolchain
do_test
exit 0
;;
clang_tidy)
setup_clang_toolchain
RUN_FULL_CLANG_TIDY=1 do_clang_tidy
exit 0
;;
coverage)
setup_clang_toolchain
do_unit_test_coverage
exit 0
;;
coverage_integration)
setup_clang_toolchain
do_integration_test_coverage
exit 0
;;
asan)
setup_clang_toolchain
do_sanitizer "clang-asan"
exit 0
;;
tsan)
setup_clang_toolchain
do_sanitizer "clang-tsan"
exit 0
;;
docker)
setup_clang_toolchain
do_docker
exit 0
;;
check_format)
setup_clang_toolchain
do_check_format
exit 0
;;
fix_format)
setup_clang_toolchain
do_fix_format
exit 0
;;
benchmark_with_own_binaries)
setup_clang_toolchain
do_benchmark_with_own_binaries
exit 0
;;
opt_build)
setup_clang_toolchain
do_opt_build
exit 0
;;
*)
echo "must be one of [opt_build, build,test,clang_tidy,coverage,coverage_integration,asan,tsan,benchmark_with_own_binaries,docker,check_format,fix_format,test_gcc]"
exit 1
;;
esac
|
#!/bin/sh
export PYTHONPATH=$PWD
python3 svcClient/run.py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.