text stringlengths 1 1.05M |
|---|
/* Copyright (c) 1997-2004 <NAME>.
* For information on usage and redistribution, and for a DISCLAIMER OF ALL
* WARRANTIES, see the file, "LICENSE.txt," in this distribution. */
/*
* this file implements a mechanism for storing and retrieving preferences.
* Should later be renamed "preferences.c" or something.
*
* In unix this is handled by the "~/.pdsettings" file, in windows by
* the registry, and in MacOS by the Preferences system.
*/
#include "m_pd.h"
#include "s_stuff.h"
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
#ifdef HAVE_UNISTD_H
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#endif
#ifdef _WIN32
#include <windows.h>
#include <tchar.h>
#include <io.h>
#endif
#ifdef _MSC_VER /* This is only for Microsoft's compiler, not cygwin, e.g. */
#define snprintf _snprintf
#endif
#ifdef __APPLE__ /* needed for plist handling */
#include <CoreFoundation/CoreFoundation.h>
#endif
void sys_doflags(void);
static PERTHREAD char *sys_prefbuf;
static PERTHREAD int sys_prefbufsize;
static PERTHREAD FILE *sys_prefsavefp;
static void sys_initloadpreferences_file(const char *filename)
{
int fd;
long length;
if ((fd = open(filename, 0)) < 0)
{
if (sys_verbose)
perror(filename);
return;
}
length = lseek(fd, 0, 2);
if (length < 0)
{
if (sys_verbose)
perror(filename);
close(fd);
return;
}
lseek(fd, 0, 0);
if (!(sys_prefbuf = malloc(length + 2)))
{
pd_error(0, "couldn't allocate memory for preferences buffer");
close(fd);
return;
}
sys_prefbuf[0] = '\n';
if (read(fd, sys_prefbuf+1, length) < length)
{
perror(filename);
sys_prefbuf[0] = 0;
close(fd);
return;
}
sys_prefbuf[length+1] = 0;
close(fd);
verbose(PD_VERBOSE, "success reading preferences from: %s", filename);
}
static int sys_getpreference_file(const char *key, char *value, int size)
{
char searchfor[80], *where, *whereend;
if (!sys_prefbuf)
return (0);
sprintf(searchfor, "\n%s:", key);
where = strstr(sys_prefbuf, searchfor);
if (!where)
return (0);
where += strlen(searchfor);
while (*where == ' ' || *where == '\t')
where++;
for (whereend = where; *whereend && *whereend != '\n'; whereend++)
;
if (*whereend == '\n')
whereend--;
if (whereend > where + size - 1)
whereend = where + size - 1;
strncpy(value, where, whereend+1-where);
value[whereend+1-where] = 0;
return (1);
}
static void sys_doneloadpreferences_file(void)
{
if (sys_prefbuf)
free(sys_prefbuf);
}
static void sys_initsavepreferences_file(const char *filename)
{
if ((sys_prefsavefp = fopen(filename, "w")) == NULL)
pd_error(0, "%s: %s", filename, strerror(errno));
}
static void sys_putpreference_file(const char *key, const char *value)
{
if (sys_prefsavefp)
fprintf(sys_prefsavefp, "%s: %s\n",
key, value);
}
static void sys_donesavepreferences_file(void)
{
if (sys_prefsavefp)
{
fclose(sys_prefsavefp);
sys_prefsavefp = 0;
}
}
#if defined(__APPLE__)
/***** macos: read and write to ~/Library/Preferences plist file ******/
static PERTHREAD CFMutableDictionaryRef sys_prefdict = NULL;
// get preferences file load path into dst, returns 1 if embedded
static int preferences_getloadpath(char *dst, size_t size)
{
char embedded_prefs[MAXPDSTRING];
char user_prefs[MAXPDSTRING];
char *homedir = getenv("HOME");
struct stat statbuf;
snprintf(embedded_prefs, MAXPDSTRING, "%s/../org.puredata.pd",
sys_libdir->s_name);
snprintf(user_prefs, MAXPDSTRING,
"%s/Library/Preferences/org.puredata.pd.plist", homedir);
if (stat(user_prefs, &statbuf) == 0)
{
strncpy(dst, user_prefs, size);
return 0;
}
else
{
strncpy(dst, embedded_prefs, size);
return 1;
}
}
// get preferences file save path
static void preferences_getsavepath(char *dst, size_t size)
{
char user_prefs[MAXPDSTRING];
snprintf(user_prefs, MAXPDSTRING,
"%s/Library/Preferences/org.puredata.pd.plist", getenv("HOME"));
strncpy(dst, user_prefs, size);
}
static void sys_initloadpreferences(void)
{
char user_prefs[MAXPDSTRING];
CFStringRef path = NULL;
CFURLRef fileURL = NULL;
CFReadStreamRef stream = NULL;
CFErrorRef err = NULL;
CFPropertyListRef plist = NULL;
if (sys_prefbuf || sys_prefdict)
{
bug("sys_initloadpreferences");
return;
}
// open read stream
preferences_getloadpath(user_prefs, MAXPDSTRING);
path = CFStringCreateWithCStringNoCopy(kCFAllocatorDefault, user_prefs,
kCFStringEncodingUTF8, kCFAllocatorNull);
fileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, path,
kCFURLPOSIXPathStyle, false); // false -> not a directory
stream = CFReadStreamCreateWithFile(kCFAllocatorDefault, fileURL);
if (!stream || !CFReadStreamOpen(stream)) goto cleanup;
// read plist
plist = CFPropertyListCreateWithStream(kCFAllocatorDefault, stream, 0,
kCFPropertyListImmutable, NULL, &err);
if (!plist) {
CFStringRef errString = CFErrorCopyDescription(err);
pd_error(0, "couldn't read preferences plist: %s",
CFStringGetCStringPtr(errString, kCFStringEncodingUTF8));
CFRelease(errString);
goto cleanup;
}
CFRetain(plist);
sys_prefdict = (CFMutableDictionaryRef)plist;
cleanup:
if (stream) {
if (CFReadStreamGetStatus(stream) == kCFStreamStatusOpen) {
CFReadStreamClose(stream);
}
CFRelease(stream);
}
if (fileURL) {CFRelease(fileURL);}
if (path) {CFRelease(path);}
if (err) {CFRelease(err);}
}
static void sys_doneloadpreferences(void)
{
if (sys_prefbuf)
sys_doneloadpreferences_file();
if (sys_prefdict)
{
CFRelease(sys_prefdict);
sys_prefdict = NULL;
}
}
static void sys_initsavepreferences(void)
{
if (sys_prefsavefp)
{
bug("sys_initsavepreferences");
return;
}
sys_prefdict = CFDictionaryCreateMutable(kCFAllocatorDefault, 0,
&kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
}
static void sys_donesavepreferences(void)
{
char user_prefs[MAXPDSTRING];
CFStringRef path = NULL;
CFURLRef fileURL = NULL;
CFWriteStreamRef stream = NULL;
CFErrorRef err = NULL;
CFDataRef data = NULL;
if (sys_prefsavefp)
sys_donesavepreferences_file();
if (!sys_prefdict) return;
// convert dict to plist data
data = CFPropertyListCreateData(kCFAllocatorDefault,
(CFPropertyListRef)sys_prefdict,
kCFPropertyListBinaryFormat_v1_0, 0, &err);
if (!data)
{
CFStringRef errString = CFErrorCopyDescription(err);
pd_error(0, "couldn't write preferences plist: %s",
CFStringGetCStringPtr(errString, kCFStringEncodingUTF8));
CFRelease(errString);
goto cleanup;
}
// open write stream
preferences_getsavepath(user_prefs, MAXPDSTRING);
path = CFStringCreateWithCStringNoCopy(kCFAllocatorDefault, user_prefs,
kCFStringEncodingUTF8, kCFAllocatorNull);
fileURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, path,
kCFURLPOSIXPathStyle, false); // false -> not a directory
stream = CFWriteStreamCreateWithFile(kCFAllocatorDefault, fileURL);
if (!stream || !CFWriteStreamOpen(stream)) goto cleanup;
// write plist
if (CFWriteStreamWrite(stream, CFDataGetBytePtr(data),
CFDataGetLength(data)) < 0) {
pd_error(0, "couldn't write preferences plist");
goto cleanup;
}
cleanup:
if (sys_prefdict)
{
CFRelease(sys_prefdict);
sys_prefdict = NULL;
}
if (data) {CFRelease(data);}
if (stream) {
if(CFWriteStreamGetStatus(stream) == kCFStreamStatusOpen) {
CFWriteStreamClose(stream);
}
CFRelease(stream);
}
if (fileURL) {CFRelease(fileURL);}
if (path) {CFRelease(path);}
if (err) {CFRelease(err);}
}
static int sys_getpreference(const char *key, char *value, int size)
{
if (sys_prefbuf)
return (sys_getpreference_file(key, value, size));
if (sys_prefdict) {
/* read from loaded plist dict */
CFStringRef k = CFStringCreateWithCStringNoCopy(kCFAllocatorDefault,
key, kCFStringEncodingUTF8, kCFAllocatorNull);
void *v = NULL;
int ret = 0;
if (CFDictionaryGetValueIfPresent(sys_prefdict, k,
(const void **)&v)) {
ret = CFStringGetCString((CFStringRef)v, value, size,
kCFStringEncodingUTF8);
#if 0
if (ret) fprintf(stderr, "plist read %s = %s\n", key, value);
#endif
if (v) CFRelease(v);
}
CFRelease(k);
return (ret);
}
else {
/* fallback to defaults command */
char cmdbuf[256];
int nread = 0, nleft = size;
char path[MAXPDSTRING];
int embedded = preferences_getloadpath(path, MAXPDSTRING);
if (embedded)
snprintf(cmdbuf, 256, "defaults read %s %s 2> /dev/null\n",
path, key);
else
snprintf(cmdbuf, 256, "defaults read org.puredata.pd %s 2> /dev/null\n",
key);
FILE *fp = popen(cmdbuf, "r");
while (nread < size)
{
int newread = fread(value+nread, 1, size-nread, fp);
if (newread <= 0)
break;
nread += newread;
}
pclose(fp);
if (nread < 1)
return (0);
if (nread >= size)
nread = size-1;
value[nread] = 0;
if (value[nread-1] == '\n') /* remove newline character at end */
value[nread-1] = 0;
return (1);
}
}
static void sys_putpreference(const char *key, const char *value)
{
if (sys_prefsavefp)
{
sys_putpreference_file(key, value);
return;
}
if (sys_prefdict) {
CFStringRef k = CFStringCreateWithCString(kCFAllocatorDefault, key,
kCFStringEncodingUTF8);
CFStringRef v = CFStringCreateWithCString(kCFAllocatorDefault, value,
kCFStringEncodingUTF8);
CFDictionarySetValue((CFMutableDictionaryRef)sys_prefdict, k, v);
CFRelease(k);
CFRelease(v);
#if 0
fprintf(stderr, "plist write %s = %s\n", key, value);
#endif
}
else {
/* fallback to defaults command */
char cmdbuf[MAXPDSTRING];
snprintf(cmdbuf, MAXPDSTRING,
"defaults write org.puredata.pd %s \"%s\" 2> /dev/null\n", key, value);
system(cmdbuf);
}
}
#elif defined(_WIN32)
/***** windows: read and write to registry ******/
static void sys_initloadpreferences(void)
{
if (sys_prefbuf)
bug("sys_initloadpreferences");
}
static void sys_doneloadpreferences(void)
{
if (sys_prefbuf)
sys_doneloadpreferences_file();
}
static void sys_initsavepreferences(void)
{
if (sys_prefsavefp)
bug("sys_initsavepreferences");
}
static void sys_donesavepreferences(void)
{
if (sys_prefsavefp)
sys_donesavepreferences_file();
}
static int sys_getpreference(const char *key, char *value, int size)
{
if (sys_prefbuf)
return (sys_getpreference_file(key, value, size));
else
{
HKEY hkey;
DWORD bigsize = size;
LONG err = RegOpenKeyEx(HKEY_CURRENT_USER,
"Software\\Pure-Data", 0, KEY_QUERY_VALUE, &hkey);
if (err != ERROR_SUCCESS)
return (0);
err = RegQueryValueEx(hkey, key, 0, 0, value, &bigsize);
if (err != ERROR_SUCCESS)
{
RegCloseKey(hkey);
return (0);
}
RegCloseKey(hkey);
return (1);
}
}
static void sys_putpreference(const char *key, const char *value)
{
if (sys_prefsavefp)
sys_putpreference_file(key, value);
else
{
HKEY hkey;
LONG err = RegCreateKeyEx(HKEY_CURRENT_USER,
"Software\\Pure-Data", 0, NULL, REG_OPTION_NON_VOLATILE, KEY_SET_VALUE,
NULL, &hkey, NULL);
if (err != ERROR_SUCCESS)
{
pd_error(0, "unable to create registry entry: %s\n", key);
return;
}
err = RegSetValueEx(hkey, key, 0, REG_EXPAND_SZ, value, strlen(value)+1);
if (err != ERROR_SUCCESS)
pd_error(0, "unable to set registry entry: %s\n", key);
RegCloseKey(hkey);
}
}
#else
/***** linux/android/BSD etc: read and write to ~/.pdsettings file ******/
static void sys_initloadpreferences(void)
{
char filenamebuf[MAXPDSTRING], *homedir = getenv("HOME");
int fd, length;
char user_prefs_file[MAXPDSTRING]; /* user prefs file */
/* default prefs embedded in the package */
char default_prefs_file[MAXPDSTRING];
struct stat statbuf;
snprintf(default_prefs_file, MAXPDSTRING, "%s/default.pdsettings",
sys_libdir->s_name);
snprintf(user_prefs_file, MAXPDSTRING, "%s/.pdsettings",
(homedir ? homedir : "."));
if (stat(user_prefs_file, &statbuf) == 0)
strncpy(filenamebuf, user_prefs_file, MAXPDSTRING);
else if (stat(default_prefs_file, &statbuf) == 0)
strncpy(filenamebuf, default_prefs_file, MAXPDSTRING);
else return;
filenamebuf[MAXPDSTRING-1] = 0;
sys_initloadpreferences_file(filenamebuf);
}
static int sys_getpreference(const char *key, char *value, int size)
{
return (sys_getpreference_file(key, value, size));
}
static void sys_doneloadpreferences(void)
{
sys_doneloadpreferences_file();
}
static void sys_initsavepreferences(void)
{
char filenamebuf[MAXPDSTRING],
*homedir = getenv("HOME");
FILE *fp;
if (!homedir)
return;
snprintf(filenamebuf, MAXPDSTRING, "%s/.pdsettings", homedir);
filenamebuf[MAXPDSTRING-1] = 0;
sys_initsavepreferences_file(filenamebuf);
}
static void sys_putpreference(const char *key, const char *value)
{
sys_putpreference_file(key, value);
}
static void sys_donesavepreferences(void)
{
sys_donesavepreferences_file();
}
#endif
void sys_loadpreferences(const char *filename, int startingup)
{
t_audiosettings as;
int nmidiindev, midiindev[MAXMIDIINDEV];
int nmidioutdev, midioutdev[MAXMIDIOUTDEV];
int midiapi, nolib, maxi, i;
char prefbuf[MAXPDSTRING], keybuf[80];
sys_get_audio_settings(&as);
if (*filename)
sys_initloadpreferences_file(filename);
else sys_initloadpreferences();
/* load audio preferences */
if (!sys_externalschedlib
&& sys_getpreference("audioapi", prefbuf, MAXPDSTRING)
&& sscanf(prefbuf, "%d", &as.a_api) < 1)
as.a_api = -1;
/* JMZ/MB: brackets for initializing */
if (sys_getpreference("noaudioin", prefbuf, MAXPDSTRING) &&
(!strcmp(prefbuf, ".") || !strcmp(prefbuf, "True")))
as.a_nindev = 0;
else
{
for (as.a_nindev = 0; as.a_nindev < MAXAUDIOINDEV; as.a_nindev++)
{
/* first try to find a name - if that matches an existing
device use it. Otherwise fall back to device number. */
int devn;
/* read in device number and channel count */
sprintf(keybuf, "audioindev%d", as.a_nindev+1);
if (!sys_getpreference(keybuf, prefbuf, MAXPDSTRING))
break;
if (sscanf(prefbuf, "%d %d",
&as.a_indevvec[as.a_nindev], &as.a_chindevvec[as.a_nindev]) < 2)
break;
/* possibly override device number if the device name was
also saved and if it matches one we have now */
sprintf(keybuf, "audioindevname%d", as.a_nindev+1);
if (sys_getpreference(keybuf, prefbuf, MAXPDSTRING)
&& (devn = sys_audiodevnametonumber(0, prefbuf)) >= 0)
as.a_indevvec[as.a_nindev] = devn;
as.a_nindev++;
}
/* if no preferences at all, set -1 for default behavior */
if (as.a_nindev == 0)
as.a_nindev = -1;
}
/* JMZ/MB: brackets for initializing */
if (sys_getpreference("noaudioout", prefbuf, MAXPDSTRING) &&
(!strcmp(prefbuf, ".") || !strcmp(prefbuf, "True")))
as.a_noutdev = 0;
else
{
for (as.a_noutdev = 0; as.a_noutdev < MAXAUDIOOUTDEV; as.a_noutdev++)
{
int devn;
sprintf(keybuf, "audiooutdev%d", as.a_noutdev+1);
if (!sys_getpreference(keybuf, prefbuf, MAXPDSTRING))
break;
if (sscanf(prefbuf, "%d %d",
&as.a_outdevvec[as.a_noutdev],
&as.a_choutdevvec[as.a_noutdev]) < 2)
break;
sprintf(keybuf, "audiooutdevname%d", as.a_noutdev+1);
if (sys_getpreference(keybuf, prefbuf, MAXPDSTRING)
&& (devn = sys_audiodevnametonumber(1, prefbuf)) >= 0)
as.a_outdevvec[as.a_noutdev] = devn;
as.a_noutdev++;
}
if (as.a_noutdev == 0)
as.a_noutdev = -1;
}
if (sys_getpreference("rate", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &as.a_srate);
if (sys_getpreference("audiobuf", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &as.a_advance);
if (sys_getpreference("callback", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &as.a_callback);
if (sys_getpreference("audioblocksize", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &as.a_blocksize);
#ifndef _WIN32
else if (sys_getpreference("blocksize", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &as.a_blocksize);
#endif
sys_set_audio_settings(&as);
/* load MIDI preferences */
if (sys_getpreference("midiapi", prefbuf, MAXPDSTRING)
&& sscanf(prefbuf, "%d", &midiapi) > 0)
sys_set_midi_api(midiapi);
/* JMZ/MB: brackets for initializing */
if (sys_getpreference("nomidiin", prefbuf, MAXPDSTRING) &&
(!strcmp(prefbuf, ".") || !strcmp(prefbuf, "True")))
nmidiindev = 0;
else for (nmidiindev = 0; nmidiindev < MAXMIDIINDEV; nmidiindev++)
{
/* first try to find a name - if that matches an existing device
use it. Otherwise fall back to device number. */
int devn;
sprintf(keybuf, "midiindevname%d", nmidiindev+1);
if (sys_getpreference(keybuf, prefbuf, MAXPDSTRING)
&& (devn = sys_mididevnametonumber(0, prefbuf)) >= 0)
midiindev[nmidiindev] = devn;
else
{
sprintf(keybuf, "midiindev%d", nmidiindev+1);
if (!sys_getpreference(keybuf, prefbuf, MAXPDSTRING))
break;
if (sscanf(prefbuf, "%d", &midiindev[nmidiindev]) < 1)
break;
}
nmidiindev++;
}
/* JMZ/MB: brackets for initializing */
if (sys_getpreference("nomidiout", prefbuf, MAXPDSTRING) &&
(!strcmp(prefbuf, ".") || !strcmp(prefbuf, "True")))
nmidioutdev = 0;
else for (nmidioutdev = 0; nmidioutdev < MAXMIDIOUTDEV; nmidioutdev++)
{
int devn;
sprintf(keybuf, "midioutdevname%d", nmidioutdev+1);
if (sys_getpreference(keybuf, prefbuf, MAXPDSTRING)
&& (devn = sys_mididevnametonumber(1, prefbuf)) >= 0)
midioutdev[nmidioutdev] = devn;
else
{
sprintf(keybuf, "midioutdev%d", nmidioutdev+1);
if (!sys_getpreference(keybuf, prefbuf, MAXPDSTRING))
break;
if (sscanf(prefbuf, "%d", &midioutdev[nmidioutdev]) < 1)
break;
}
nmidioutdev++;
}
sys_open_midi(nmidiindev, midiindev, nmidioutdev, midioutdev, 0);
/* search path */
if (sys_getpreference("npath", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &maxi);
else maxi = 0x7fffffff;
for (i = 0; i < maxi; i++)
{
sprintf(keybuf, "path%d", i+1);
if (!sys_getpreference(keybuf, prefbuf, MAXPDSTRING))
break;
STUFF->st_searchpath =
namelist_append_files(STUFF->st_searchpath, prefbuf);
}
if (sys_getpreference("standardpath", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &sys_usestdpath);
if (sys_getpreference("verbose", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &sys_verbose);
/* startup settings */
if (sys_getpreference("nloadlib", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &maxi);
else maxi = 0x7fffffff;
for (i = 0; i<maxi; i++)
{
sprintf(keybuf, "loadlib%d", i+1);
if (!sys_getpreference(keybuf, prefbuf, MAXPDSTRING))
break;
STUFF->st_externlist = namelist_append_files(STUFF->st_externlist, prefbuf);
}
if (sys_getpreference("defeatrt", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &sys_defeatrt);
if (sys_getpreference("flags", prefbuf, MAXPDSTRING) &&
strcmp(prefbuf, "."))
{
sys_flags = gensym(prefbuf);
if (startingup)
sys_doflags();
}
if (sys_defeatrt)
sys_hipriority = 0;
else
#if defined(ANDROID)
sys_hipriority = 0;
#else
sys_hipriority = 1;
#endif
if (sys_getpreference("zoom", prefbuf, MAXPDSTRING))
sscanf(prefbuf, "%d", &sys_zoom_open);
sys_doneloadpreferences();
}
void sys_savepreferences(const char *filename)
{
t_audiosettings as;
int i;
char buf1[MAXPDSTRING], buf2[MAXPDSTRING];
int nmidiindev, midiindev[MAXMIDIINDEV];
int nmidioutdev, midioutdev[MAXMIDIOUTDEV];
if (filename && *filename)
sys_initsavepreferences_file(filename);
else sys_initsavepreferences();
/* audio settings */
sys_get_audio_settings(&as);
sprintf(buf1, "%d", as.a_api);
sys_putpreference("audioapi", buf1);
sys_putpreference("noaudioin", (as.a_nindev <= 0 ? "True":"False"));
for (i = 0; i < as.a_nindev; i++)
{
sprintf(buf1, "audioindev%d", i+1);
sprintf(buf2, "%d %d", as.a_indevvec[i], as.a_chindevvec[i]);
sys_putpreference(buf1, buf2);
sprintf(buf1, "audioindevname%d", i+1);
sys_audiodevnumbertoname(0, as.a_indevvec[i], buf2, MAXPDSTRING);
if (! *buf2)
strcat(buf2, "?");
sys_putpreference(buf1, buf2);
}
sys_putpreference("noaudioout", (as.a_noutdev <= 0 ? "True":"False"));
for (i = 0; i < as.a_noutdev; i++)
{
sprintf(buf1, "audiooutdev%d", i+1);
sprintf(buf2, "%d %d", as.a_outdevvec[i], as.a_choutdevvec[i]);
sys_putpreference(buf1, buf2);
sprintf(buf1, "audiooutdevname%d", i+1);
sys_audiodevnumbertoname(1, as.a_outdevvec[i], buf2, MAXPDSTRING);
if (! *buf2)
strcat(buf2, "?");
sys_putpreference(buf1, buf2);
}
sprintf(buf1, "%d", as.a_advance);
sys_putpreference("audiobuf", buf1);
sprintf(buf1, "%d", as.a_srate);
sys_putpreference("rate", buf1);
sprintf(buf1, "%d", as.a_callback);
sys_putpreference("callback", buf1);
sprintf(buf1, "%d", as.a_blocksize);
sys_putpreference("audioblocksize", buf1);
/* MIDI settings */
sprintf(buf1, "%d", sys_midiapi);
sys_putpreference("midiapi", buf1);
sys_get_midi_params(&nmidiindev, midiindev, &nmidioutdev, midioutdev);
sys_putpreference("nomidiin", (nmidiindev <= 0 ? "True" : "False"));
for (i = 0; i < nmidiindev; i++)
{
sprintf(buf1, "midiindev%d", i+1);
sprintf(buf2, "%d", midiindev[i]);
sys_putpreference(buf1, buf2);
sprintf(buf1, "midiindevname%d", i+1);
sys_mididevnumbertoname(0, midiindev[i], buf2, MAXPDSTRING);
if (! *buf2)
strcat(buf2, "?");
sys_putpreference(buf1, buf2);
}
sys_putpreference("nomidiout", (nmidioutdev <= 0 ? "True" : "False"));
for (i = 0; i < nmidioutdev; i++)
{
sprintf(buf1, "midioutdev%d", i+1);
sprintf(buf2, "%d", midioutdev[i]);
sys_putpreference(buf1, buf2);
sprintf(buf1, "midioutdevname%d", i+1);
sys_mididevnumbertoname(1, midioutdev[i], buf2, MAXPDSTRING);
if (! *buf2)
strcat(buf2, "?");
sys_putpreference(buf1, buf2);
}
/* file search path */
for (i = 0; 1; i++)
{
const char *pathelem = namelist_get(STUFF->st_searchpath, i);
if (!pathelem)
break;
sprintf(buf1, "path%d", i+1);
sys_putpreference(buf1, pathelem);
}
sprintf(buf1, "%d", i);
sys_putpreference("npath", buf1);
sprintf(buf1, "%d", sys_usestdpath);
sys_putpreference("standardpath", buf1);
sprintf(buf1, "%d", sys_verbose);
sys_putpreference("verbose", buf1);
/* startup */
for (i = 0; 1; i++)
{
const char *pathelem = namelist_get(STUFF->st_externlist, i);
if (!pathelem)
break;
sprintf(buf1, "loadlib%d", i+1);
sys_putpreference(buf1, pathelem);
}
sprintf(buf1, "%d", i);
sys_putpreference("nloadlib", buf1);
sprintf(buf1, "%d", sys_defeatrt);
sys_putpreference("defeatrt", buf1);
sys_putpreference("flags",
(sys_flags ? sys_flags->s_name : ""));
/* misc */
sprintf(buf1, "%d", sys_zoom_open);
sys_putpreference("zoom", buf1);
sys_putpreference("loading", "no");
sys_donesavepreferences();
}
/* calls from GUI to load/save from/to a file */
void glob_loadpreferences(t_pd *dummy, t_symbol *filesym)
{
sys_loadpreferences(filesym->s_name, 0);
sys_close_audio();
sys_reopen_audio();
sys_close_midi();
sys_reopen_midi();
}
void glob_savepreferences(t_pd *dummy, t_symbol *filesym)
{
sys_savepreferences(filesym->s_name);
}
void glob_forgetpreferences(t_pd *dummy)
{
#if !defined(_WIN32) && !defined(__APPLE__)
char user_prefs_file[MAXPDSTRING]; /* user prefs file */
const char *homedir = getenv("HOME");
struct stat statbuf;
snprintf(user_prefs_file, MAXPDSTRING, "%s/.pdsettings",
(homedir ? homedir : "."));
user_prefs_file[MAXPDSTRING-1] = 0;
if (stat(user_prefs_file, &statbuf) != 0) {
post("no Pd settings to clear");
} else if (!unlink(user_prefs_file)) {
post("removed %s file", user_prefs_file);
} else {
post("couldn't delete %s file: %s", user_prefs_file, strerror(errno));
}
#endif /* !defined(_WIN32) && !defined(__APPLE__) */
#ifdef __APPLE__
char cmdbuf[MAXPDSTRING];
int warn = 1;
if (!sys_getpreference("audioapi", cmdbuf, MAXPDSTRING))
post("no Pd settings to clear"), warn = 0;
/* do it anyhow, why not... */
snprintf(cmdbuf, MAXPDSTRING,
"defaults delete org.puredata.pd 2> /dev/null\n");
if (system(cmdbuf) && warn)
post("failed to erase Pd settings");
else if(warn) post("erased Pd settings");
#endif /* __APPLE__ */
#ifdef _WIN32
HKEY hkey;
if (RegOpenKeyEx(HKEY_CURRENT_USER,
"Software", 0, KEY_QUERY_VALUE, &hkey) != ERROR_SUCCESS)
post("no Pd settings to erase");
else
{
if (RegDeleteKey(hkey, "Pure-Data") != ERROR_SUCCESS)
post("no Pd settings to erase");
else post("erased Pd settings");
RegCloseKey(hkey);
}
#endif /* _WIN32 */
}
int sys_oktoloadfiles(int done)
{
#if defined(_WIN32) || defined(__APPLE__)
if (done)
{
sys_putpreference("loading", "no");
return (1);
}
else
{
char prefbuf[MAXPDSTRING];
if (sys_getpreference("loading", prefbuf, MAXPDSTRING) &&
strcmp(prefbuf, "no"))
{
post(
"skipping loading preferences... Pd seems to have crashed on startup");
post("(re-save preferences to reinstate them)");
return (0);
}
else
{
sys_putpreference("loading", "yes");
return (1);
}
}
#else
return (1);
#endif
}
|
import math
def hypotenuse_length(a, b):
""" Calculates the hypotenuse of a right triangle given its side lengths """
return math.sqrt(a**2 + b**2)
length = hypotenuse_length(3, 4)
print(length) |
class RequestError(Exception):
pass
class Timeout(Exception):
pass
class TorrentDetailsHandler:
def __init__(self, provider, data):
self.provider = provider
self.data = data
def fetch_details_data(self, timeout):
try:
details_data = self.provider.fetch_details_data(timeout)
return details_data
except RequestError as e:
raise RequestError("Something went wrong requesting the search page.")
except Timeout as e:
raise Timeout("The search lasted longer than timeout.")
class TorrentDetails:
def __init__(self, **kwargs):
self.details = kwargs
# Example usage
class TorrentProvider:
def fetch_details_data(self, timeout):
# Simulated fetching of details data
return {"additional_info": "Some additional information"}
provider = TorrentProvider()
initial_data = {"name": "Example Torrent", "size": "1GB"}
handler = TorrentDetailsHandler(provider, initial_data)
try:
details = handler.fetch_details_data(10)
combined_details = TorrentDetails(**{**initial_data, **details})
print(combined_details.details)
except RequestError as e:
print(f"RequestError: {e}")
except Timeout as e:
print(f"Timeout: {e}") |
#!/bin/bash
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
# Skip duplicate build and test runs through the CI, that occur because we are now running on osx and linux.
# Skipping these steps saves time and travis-ci resources.
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
exit 0
fi
export REGISTRY=quay.io/external_storage/
docker login -u "${QUAY_USERNAME}" -p "${QUAY_PASSWORD}" quay.io
provisioners=(
efs-provisioner
cephfs-provisioner
flex-provisioner
glusterblock-provisioner
glusterfile-provisioner
glusterfs-simple-provisioner
iscsi-controller
local-volume-provisioner-bootstrap
local-volume-provisioner
nfs-client-provisioner
nfs-provisioner
openebs-provisioner
rbd-provisioner
)
regex="^($(IFS=\|; echo "${provisioners[*]}"))-(v[0-9]+\.[0-9]+\.[0-9]+)$"
if [[ "${TRAVIS_TAG}" =~ $regex ]]; then
PROVISIONER="${BASH_REMATCH[1]}"
export VERSION="${BASH_REMATCH[2]}"
if [[ "${PROVISIONER}" = nfs-provisioner ]]; then
export REGISTRY=quay.io/kubernetes_incubator/
fi
echo "Pushing image '${PROVISIONER}' with tags '${VERSION}' and 'latest' to '${REGISTRY}'."
if [[ "${PROVISIONER}" = openebs-provisioner ]]; then
export DIMAGE="${REGISTRY}openebs-provisioner"
export DNAME="${QUAY_USERNAME}"
export DPASS="${QUAY_PASSWORD}"
pushd openebs; make; popd
make deploy-openebs-provisioner
else
make push-"${PROVISIONER}"
fi
else
echo "Nothing to deploy"
fi
|
<filename>local-tasks/upgrade-tempdb-runner.js
const
rimraf = requireModule("rimraf"),
writeTextFile = requireModule("write-text-file"),
{ run } = require("./modules/run"),
{ NugetClient } = require("node-nuget-client"),
{ updatePackageFiles, tempDbPackageName } = require("./modules/update-package-files"),
{ config } = require("./modules/config")
path = require("path"),
chalk = require("chalk"),
gulp = requireModule("gulp-with-help");
gulp.task("upgrade-tempdb-runner", async () => {
const
output = path.dirname(__dirname),
nuget = new NugetClient();
await run(
`remove any existing ${tempDbPackageName}`,
() => rimraf(path.join(output, `${tempDbPackageName}*`))
);
const dlResult = await run(
`download latest ${tempDbPackageName}`,
() => nuget.downloadPackage({
packageId: tempDbPackageName,
output
})
);
console.log(chalk.yellow(` -> ${dlResult.fullName} downloaded`));
await run(`Update package files entry`, updatePackageFiles);
// update postinstall script
await run(`Update config.json`,
async () => {
const
configFile = path.join(__dirname, "modules", "config.json");
config.tempDb = config.tempDb || {};
config.tempDb.version = dlResult.version;
await writeTextFile(configFile, JSON.stringify(config, null, 2));
});
});
|
package be.kwakeroni.parameters.client.api.model;
/**
* Represents a Business Parameter.
*/
public interface Parameter<T> {
public String getName();
public T fromString(String value);
public String toString(T value);
}
|
package com.ibm.socialcrm.notesintegration.core;
/****************************************************************
* IBM OpenSource
*
* (C) Copyright IBM Corp. 2012
*
* Licensed under the Apache License v2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
***************************************************************/
public class DocumentInfo {
/*
* Information retrieved from the Connections server
*/
String documentName = null;
String version = "1"; //$NON-NLS-1$
String connectionsUUID = null;
String sugarDocumentID = null;
public DocumentInfo(String documentName, String versionI, String connectionsUUID) {
this.documentName = documentName;
this.version = versionI;
this.connectionsUUID = connectionsUUID;
}
public String getDocumentName() {
return documentName;
}
public String getVersion() {
return version;
}
public String getConnectionsUUID() {
return connectionsUUID;
}
public String getSugarDocumentID() {
return sugarDocumentID;
}
public void setSugarDocumentID(String s) {
sugarDocumentID = s;
}
} |
<filename>libs/Util.py<gh_stars>10-100
import os
import tkinter as tk
# 支持的图片格式后缀
IMG_EXT_LIST = ['bmp', 'dib', 'rle', 'emf', 'gif',
'jpg', 'jpeg', 'jpe', 'jif', 'pcx',
'dcx', 'pic', 'png', 'tga', 'tif',
'tiff', 'xif', 'wmf', 'jfif', 'ico']
# 计算文件夹内的文件个数
def count_files(dir_path):
count = 0
for path, subdir, files in os.walk(dir_path):
for f in files:
count += 1
return count
def set_combobox_item(combobox, text, fuzzy=False):
for index, value in enumerate(combobox.cget("values")):
if (fuzzy and text in value) or (value == text):
combobox.current(index)
return
combobox.current(0 if len(combobox.cget("values")) else -1)
def is_sub_path(output_path, input_path):
input_dir = os.path.abspath(input_path).replace('\\', '/')
output_dir = os.path.abspath(output_path).replace('\\', '/')
if input_dir in output_dir:
return True
else:
return False
# 设置滚动条与框体的绑定
def set_scrollbar(widget, scrollbar_x, scrollbar_y):
scrollbar_y["command"] = widget.yview
scrollbar_x["command"] = widget.xview
widget['xscrollcommand'] = scrollbar_x.set
widget['yscrollcommand'] = scrollbar_y.set
# 判断文件名是否是图片
def is_img(file_name):
if os.path.splitext(file_name)[-1][1:] in IMG_EXT_LIST:
return True
else:
return False
# 判断文件名是否是文本文件
def is_txt(file_name):
txt_ext_list = ['txt', 'py', 'json']
if os.path.splitext(file_name)[-1][1:] in txt_ext_list:
return True
else:
return False
# 判断文件名是否是视频文件
def is_video(file_name):
video_ext_list = ['mp4', 'm4v', 'mov', 'qt', 'avi', 'flv', 'wmv', 'asf', 'mpeg', 'mpg', 'vob', 'mkv', 'rm', 'rmvb',
'vob', 'ts', 'dat']
if os.path.splitext(file_name)[-1][1:] in video_ext_list:
return True
else:
return False
# 文本选中时的处理
class TextSection(object):
def __init__(self, master_widget, text_area):
self.master_widget = master_widget
self.text_area = text_area
def on_paste(self):
try:
self.text = self.master_widget.clipboard_get()
except tk.TclError:
pass
try:
self.text_area.delete('sel.first', 'sel.last')
except tk.TclError:
pass
self.text_area.insert(tk.INSERT, self.text)
def on_copy(self):
self.text = self.text_area.get('sel.first', 'sel.last')
self.master_widget.clipboard_clear()
self.master_widget.clipboard_append(self.text)
def on_cut(self):
self.on_copy()
try:
self.text_area.delete('sel.first', 'sel.last')
except tk.TclError:
pass
|
/*
* Copyright(C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.javacard.keymaster;
import javacard.framework.ISO7816;
import javacard.framework.ISOException;
import javacard.framework.Util;
/**
* KMByteTag represents BYTES Tag Type from android keymaster hal specifications. The tag value of
* this tag is the KMByteBlob pointer i.e. offset of KMByteBlob in memory heap. struct{byte
* TAG_TYPE; short length; struct{short BYTES_TAG; short tagKey; short blobPtr}}
*/
public class KMByteTag extends KMTag {
private static KMByteTag prototype;
// The allowed tag keys of type bool tag
private static final short[] tags = {
APPLICATION_ID,
APPLICATION_DATA,
ROOT_OF_TRUST,
UNIQUE_ID,
ATTESTATION_CHALLENGE,
ATTESTATION_APPLICATION_ID,
ATTESTATION_ID_BRAND,
ATTESTATION_ID_DEVICE,
ATTESTATION_ID_PRODUCT,
ATTESTATION_ID_SERIAL,
ATTESTATION_ID_IMEI,
ATTESTATION_ID_MEID,
ATTESTATION_ID_MANUFACTURER,
ATTESTATION_ID_MODEL,
ASSOCIATED_DATA,
NONCE,
CONFIRMATION_TOKEN,
VERIFIED_BOOT_KEY,
VERIFIED_BOOT_HASH
};
private KMByteTag() {
}
private static KMByteTag proto(short ptr) {
if (prototype == null) {
prototype = new KMByteTag();
}
instanceTable[KM_BYTE_TAG_OFFSET] = ptr;
return prototype;
}
// pointer to an empty instance used as expression
public static short exp() {
short blobPtr = KMByteBlob.exp();
short ptr = instance(TAG_TYPE, (short) 6);
Util.setShort(heap, (short) (ptr + TLV_HEADER_SIZE), BYTES_TAG);
Util.setShort(heap, (short) (ptr + TLV_HEADER_SIZE + 2), INVALID_TAG);
Util.setShort(heap, (short) (ptr + TLV_HEADER_SIZE + 4), blobPtr);
return ptr;
}
public static short instance(short key) {
if (!validateKey(key)) {
ISOException.throwIt(ISO7816.SW_DATA_INVALID);
}
return instance(key, KMByteBlob.exp());
}
public static short instance(short key, short byteBlob) {
if (!validateKey(key)) {
ISOException.throwIt(ISO7816.SW_DATA_INVALID);
}
if (heap[byteBlob] != BYTE_BLOB_TYPE) {
ISOException.throwIt(ISO7816.SW_DATA_INVALID);
}
short ptr = instance(TAG_TYPE, (short) 6);
Util.setShort(heap, (short) (ptr + TLV_HEADER_SIZE), BYTES_TAG);
Util.setShort(heap, (short) (ptr + TLV_HEADER_SIZE + 2), key);
Util.setShort(heap, (short) (ptr + TLV_HEADER_SIZE + 4), byteBlob);
return ptr;
}
public static KMByteTag cast(short ptr) {
if (heap[ptr] != TAG_TYPE) {
ISOException.throwIt(ISO7816.SW_CONDITIONS_NOT_SATISFIED);
}
if (Util.getShort(heap, (short) (ptr + TLV_HEADER_SIZE)) != BYTES_TAG) {
ISOException.throwIt(ISO7816.SW_CONDITIONS_NOT_SATISFIED);
}
return proto(ptr);
}
public short getKey() {
return Util.getShort(heap, (short) (instanceTable[KM_BYTE_TAG_OFFSET] + TLV_HEADER_SIZE + 2));
}
public short getTagType() {
return KMType.BYTES_TAG;
}
public short getValue() {
return Util.getShort(heap, (short) (instanceTable[KM_BYTE_TAG_OFFSET] + TLV_HEADER_SIZE + 4));
}
public short length() {
short blobPtr = Util.getShort(heap, (short) (instanceTable[KM_BYTE_TAG_OFFSET] + TLV_HEADER_SIZE + 4));
return KMByteBlob.cast(blobPtr).length();
}
private static boolean validateKey(short key) {
short index = (short) tags.length;
while (--index >= 0) {
if (tags[index] == key) {
return true;
}
}
return false;
}
}
|
/* ound\soc\sunxi\sunxi_daudio.c
* (C) Copyright 2015-2017
* Allwinner Technology Co., Ltd. <www.allwinnertech.com>
* <NAME> <<EMAIL>>
*
* some simple description for this code
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of
* the License, or (at your option) any later version.
*
*/
#include <linux/init.h>
#include <linux/module.h>
#include <linux/device.h>
#include <linux/clk.h>
#include <linux/of.h>
#include <linux/of_device.h>
#include <linux/of_address.h>
#include <linux/regmap.h>
#include <linux/dma/sunxi-dma.h>
#include <linux/pinctrl/consumer.h>
#include <sound/core.h>
#include <sound/pcm.h>
#include <sound/dmaengine_pcm.h>
#include <sound/pcm_params.h>
#include <sound/initval.h>
#include <sound/soc.h>
#include "sunxi_daudio.h"
#include "sunxi_dma.h"
#define DRV_NAME "sunxi_daudio"
#define SUNXI_DAUDIO_EXTERNAL_TYPE 1
#define SUNXI_DAUDIO_TDMHDMI_TYPE 2
struct sunxi_daudio_platform_data {
unsigned int daudio_type;
unsigned int external_type;
unsigned int daudio_master;
unsigned int pcm_lrck_period;
unsigned int pcm_lrckr_period;
unsigned int slot_width_select;
unsigned int tx_data_mode;
unsigned int rx_data_mode;
unsigned int audio_format;
unsigned int signal_inversion;
unsigned int frame_type;
unsigned int tdm_config;
unsigned int tdm_num;
unsigned int mclk_div;
};
struct sunxi_daudio_info {
struct device *dev;
struct regmap *regmap;
struct clk *pllclk;
struct clk *moduleclk;
struct mutex mutex;
struct sunxi_dma_params playback_dma_param;
struct sunxi_dma_params capture_dma_param;
struct pinctrl *pinctrl;
struct pinctrl_state *pinstate;
struct pinctrl_state *pinstate_sleep;
struct sunxi_daudio_platform_data *pdata;
unsigned int hub_mode;
unsigned int hdmi_en;
};
static bool daudio_loop_en;
module_param(daudio_loop_en, bool, S_IRUGO | S_IWUSR);
MODULE_PARM_DESC(daudio_loop_en, "SUNXI Digital audio loopback debug(Y=enable, N=disable)");
static int sunxi_daudio_get_hub_mode(struct snd_kcontrol *kcontrol,
struct snd_ctl_elem_value *ucontrol)
{
struct snd_soc_card *card = snd_kcontrol_chip(kcontrol);
struct snd_soc_dai *dai = card->rtd->cpu_dai;
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
unsigned int reg_val;
regmap_read(sunxi_daudio->regmap, SUNXI_DAUDIO_FIFOCTL, ®_val);
ucontrol->value.integer.value[0] = ((reg_val & (1<<HUB_EN)) ? 2 : 1);
return 0;
}
static int sunxi_daudio_set_hub_mode(struct snd_kcontrol *kcontrol,
struct snd_ctl_elem_value *ucontrol)
{
struct snd_soc_card *card = snd_kcontrol_chip(kcontrol);
struct snd_soc_dai *dai = card->rtd->cpu_dai;
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
switch (ucontrol->value.integer.value[0]) {
case 0:
case 1:
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FIFOCTL,
(1<<HUB_EN), (0<<HUB_EN));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<CTL_TXEN), (0<<CTL_TXEN));
break;
case 2:
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FIFOCTL,
(1<<HUB_EN), (1<<HUB_EN));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<CTL_TXEN), (1<<CTL_TXEN));
break;
default:
return -EINVAL;
}
return 0;
}
static const char *daudio_format_function[] = {"null", "hub_disable", "hub_enable"};
static const struct soc_enum daudio_format_enum[] = {
SOC_ENUM_SINGLE_EXT(ARRAY_SIZE(daudio_format_function),
daudio_format_function),
};
/* dts pcm Audio Mode Select */
static const struct snd_kcontrol_new sunxi_spdif_controls[] = {
SOC_ENUM_EXT("sunxi daudio audio hub mode", daudio_format_enum[0],
sunxi_daudio_get_hub_mode, sunxi_daudio_set_hub_mode),
};
static void sunxi_daudio_txctrl_enable(struct sunxi_daudio_info *sunxi_daudio,
int enable)
{
pr_debug("Enter %s, enable %d\n", __func__, enable);
if (enable) {
/* HDMI audio Transmit Clock just enable at startup */
if (sunxi_daudio->pdata->daudio_type
!= SUNXI_DAUDIO_TDMHDMI_TYPE)
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_CTL,
(1<<CTL_TXEN), (1<<CTL_TXEN));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_INTCTL,
(1<<TXDRQEN), (1<<TXDRQEN));
} else {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_INTCTL,
(1<<TXDRQEN), (0<<TXDRQEN));
if (sunxi_daudio->pdata->daudio_type
!= SUNXI_DAUDIO_TDMHDMI_TYPE)
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_CTL,
(1<<CTL_TXEN), (0<<CTL_TXEN));
}
pr_debug("End %s, enable %d\n", __func__, enable);
}
static void sunxi_daudio_rxctrl_enable(struct sunxi_daudio_info *sunxi_daudio,
int enable)
{
if (enable) {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<CTL_RXEN), (1<<CTL_RXEN));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_INTCTL,
(1<<RXDRQEN), (1<<RXDRQEN));
} else {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_INTCTL,
(1<<RXDRQEN), (0<<RXDRQEN));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<CTL_RXEN), (0<<CTL_RXEN));
}
}
static int sunxi_daudio_global_enable(struct sunxi_daudio_info *sunxi_daudio,
int enable)
{
if (enable) {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<SDO0_EN), (1<<SDO0_EN));
if (sunxi_daudio->hdmi_en) {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<SDO1_EN), (1<<SDO1_EN));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<SDO2_EN), (1<<SDO2_EN));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<SDO3_EN), (1<<SDO3_EN));
}
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<GLOBAL_EN), (1<<GLOBAL_EN));
} else {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<GLOBAL_EN), (0<<GLOBAL_EN));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<SDO0_EN), (0<<SDO0_EN));
if (sunxi_daudio->hdmi_en) {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<SDO1_EN), (0<<SDO1_EN));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<SDO2_EN), (0<<SDO2_EN));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<SDO3_EN), (0<<SDO3_EN));
}
}
return 0;
}
static int sunxi_daudio_mclk_setting(struct sunxi_daudio_info *sunxi_daudio)
{
unsigned int mclk_div;
if (sunxi_daudio->pdata->mclk_div) {
switch (sunxi_daudio->pdata->mclk_div) {
case 1:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_1;
break;
case 2:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_2;
break;
case 4:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_3;
break;
case 6:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_4;
break;
case 8:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_5;
break;
case 12:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_6;
break;
case 16:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_7;
break;
case 24:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_8;
break;
case 32:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_9;
break;
case 48:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_10;
break;
case 64:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_11;
break;
case 96:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_12;
break;
case 128:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_13;
break;
case 176:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_14;
break;
case 192:
mclk_div = SUNXI_DAUDIO_MCLK_DIV_15;
break;
default:
dev_err(sunxi_daudio->dev, "unsupport mclk_div\n");
return -EINVAL;
}
/* setting Mclk as external codec input clk */
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CLKDIV,
(SUNXI_DAUDIO_MCLK_DIV_MASK<<MCLK_DIV),
(mclk_div<<MCLK_DIV));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CLKDIV,
(1<<MCLKOUT_EN), (1<<MCLKOUT_EN));
} else {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CLKDIV,
(1<<MCLKOUT_EN), (0<<MCLKOUT_EN));
}
return 0;
}
static int sunxi_daudio_init_fmt(struct sunxi_daudio_info *sunxi_daudio,
unsigned int fmt)
{
unsigned int offset, mode;
unsigned int lrck_polarity, brck_polarity;
switch (fmt & SND_SOC_DAIFMT_MASTER_MASK) {
case SND_SOC_DAIFMT_CBM_CFM:
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(SUNXI_DAUDIO_LRCK_OUT_MASK<<LRCK_OUT),
(SUNXI_DAUDIO_LRCK_OUT_DISABLE<<LRCK_OUT));
break;
case SND_SOC_DAIFMT_CBS_CFS:
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(SUNXI_DAUDIO_LRCK_OUT_MASK<<LRCK_OUT),
(SUNXI_DAUDIO_LRCK_OUT_ENABLE<<LRCK_OUT));
break;
default:
dev_err(sunxi_daudio->dev, "unknown maser/slave format\n");
return -EINVAL;
}
switch (fmt & SND_SOC_DAIFMT_FORMAT_MASK) {
case SND_SOC_DAIFMT_I2S:
offset = SUNXI_DAUDIO_TX_OFFSET_1;
mode = SUNXI_DAUDIO_MODE_CTL_I2S;
break;
case SND_SOC_DAIFMT_RIGHT_J:
offset = SUNXI_DAUDIO_TX_OFFSET_0;
mode = SUNXI_DAUDIO_MODE_CTL_RIGHT;
break;
case SND_SOC_DAIFMT_LEFT_J:
offset = SUNXI_DAUDIO_TX_OFFSET_0;
mode = SUNXI_DAUDIO_MODE_CTL_LEFT;
break;
case SND_SOC_DAIFMT_DSP_A:
offset = SUNXI_DAUDIO_TX_OFFSET_1;
mode = SUNXI_DAUDIO_MODE_CTL_PCM;
break;
case SND_SOC_DAIFMT_DSP_B:
offset = SUNXI_DAUDIO_TX_OFFSET_0;
mode = SUNXI_DAUDIO_MODE_CTL_PCM;
break;
default:
dev_err(sunxi_daudio->dev, "format setting failed\n");
return -EINVAL;
}
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(SUNXI_DAUDIO_MODE_CTL_MASK<<MODE_SEL),
(mode<<MODE_SEL));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_TX0CHSEL,
(SUNXI_DAUDIO_TX_OFFSET_MASK<<TX_OFFSET),
(offset<<TX_OFFSET));
if (sunxi_daudio->hdmi_en) {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_TX1CHSEL,
(SUNXI_DAUDIO_TX_OFFSET_MASK<<TX_OFFSET),
(offset<<TX_OFFSET));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_TX2CHSEL,
(SUNXI_DAUDIO_TX_OFFSET_MASK<<TX_OFFSET),
(offset<<TX_OFFSET));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_TX3CHSEL,
(SUNXI_DAUDIO_TX_OFFSET_MASK<<TX_OFFSET),
(offset<<TX_OFFSET));
}
#ifdef CONFIG_ARCH_SUN8IW10
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_RXCHSEL,
(SUNXI_DAUDIO_RX_OFFSET_MASK<<RX_OFFSET),
(offset<<RX_OFFSET));
#else
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_RXCHSEL,
(SUNXI_DAUDIO_RX_OFFSET_MASK<<RX_OFFSET),
(offset<<RX_OFFSET));
#endif
switch (fmt & SND_SOC_DAIFMT_INV_MASK) {
case SND_SOC_DAIFMT_NB_NF:
lrck_polarity = SUNXI_DAUDIO_LRCK_POLARITY_NOR;
brck_polarity = SUNXI_DAUDIO_BCLK_POLARITY_NOR;
break;
case SND_SOC_DAIFMT_NB_IF:
lrck_polarity = SUNXI_DAUDIO_LRCK_POLARITY_INV;
brck_polarity = SUNXI_DAUDIO_BCLK_POLARITY_NOR;
break;
case SND_SOC_DAIFMT_IB_NF:
lrck_polarity = SUNXI_DAUDIO_LRCK_POLARITY_NOR;
brck_polarity = SUNXI_DAUDIO_BCLK_POLARITY_INV;
break;
case SND_SOC_DAIFMT_IB_IF:
lrck_polarity = SUNXI_DAUDIO_LRCK_POLARITY_INV;
brck_polarity = SUNXI_DAUDIO_BCLK_POLARITY_INV;
break;
default:
dev_err(sunxi_daudio->dev, "invert clk setting failed\n");
return -EINVAL;
}
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FMT0,
(1<<LRCK_POLARITY), (lrck_polarity<<LRCK_POLARITY));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FMT0,
(1<<BRCK_POLARITY), (brck_polarity<<BRCK_POLARITY));
return 0;
}
static int sunxi_daudio_init(struct sunxi_daudio_info *sunxi_daudio)
{
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FMT0,
(1<<LRCK_WIDTH),
(sunxi_daudio->pdata->frame_type<<LRCK_WIDTH));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FMT0,
(SUNXI_DAUDIO_LRCK_PERIOD_MASK)<<LRCK_PERIOD,
((sunxi_daudio->pdata->pcm_lrck_period-1)<<LRCK_PERIOD));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FMT0,
(SUNXI_DAUDIO_SLOT_WIDTH_MASK<<SLOT_WIDTH),
(((sunxi_daudio->pdata->slot_width_select>>2) - 1)<<SLOT_WIDTH));
/*
* MSB on the transmit format, always be first.
* default using Linear-PCM, without no companding.
* A-law<Eourpean standard> or U-law<US-Japan> not working ok.
*/
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_FMT1, SUNXI_DAUDIO_FMT1_DEF);
sunxi_daudio_init_fmt(sunxi_daudio, (sunxi_daudio->pdata->audio_format
| (sunxi_daudio->pdata->signal_inversion<<SND_SOC_DAIFMT_SIG_SHIFT)
| (sunxi_daudio->pdata->daudio_master<<SND_SOC_DAIFMT_MASTER_SHIFT)));
return sunxi_daudio_mclk_setting(sunxi_daudio);
}
static int sunxi_daudio_hw_params(struct snd_pcm_substream *substream,
struct snd_pcm_hw_params *params, struct snd_soc_dai *dai)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
struct snd_soc_pcm_runtime *rtd = substream->private_data;
struct snd_soc_card *card = rtd->card;
struct sunxi_hdmi_priv *sunxi_hdmi = snd_soc_card_get_drvdata(card);
#ifndef CONFIG_ARCH_SUN8IW10
unsigned int reg_val;
#endif
switch (params_format(params)) {
case SNDRV_PCM_FORMAT_S16_LE:
/*
* Special procesing for hdmi, HDMI card name is
* "sndhdmi" or sndhdmiraw. if card not HDMI,
* strstr func just return NULL, jump to right section.
* Not HDMI card, sunxi_hdmi maybe a NULL pointer.
*/
if (sunxi_daudio->pdata->daudio_type
== SUNXI_DAUDIO_TDMHDMI_TYPE
&& (sunxi_hdmi->hdmi_format > 1)) {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FMT0,
(SUNXI_DAUDIO_SR_MASK<<SAMPLE_RESOLUTION),
(SUNXI_DAUDIO_SR_24BIT<<SAMPLE_RESOLUTION));
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_FIFOCTL,
(SUNXI_DAUDIO_TXIM_MASK<<TXIM),
(SUNXI_DAUDIO_TXIM_VALID_MSB<<TXIM));
} else {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FMT0,
(SUNXI_DAUDIO_SR_MASK<<SAMPLE_RESOLUTION),
(SUNXI_DAUDIO_SR_16BIT<<SAMPLE_RESOLUTION));
if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_FIFOCTL,
(SUNXI_DAUDIO_TXIM_MASK<<TXIM),
(SUNXI_DAUDIO_TXIM_VALID_LSB<<TXIM));
else
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_FIFOCTL,
(SUNXI_DAUDIO_RXOM_MASK<<RXOM),
(SUNXI_DAUDIO_RXOM_EXPH<<RXOM));
}
break;
case SNDRV_PCM_FORMAT_S20_3LE:
case SNDRV_PCM_FORMAT_S24_LE:
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FMT0,
(SUNXI_DAUDIO_SR_MASK<<SAMPLE_RESOLUTION),
(SUNXI_DAUDIO_SR_24BIT<<SAMPLE_RESOLUTION));
if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_FIFOCTL,
(SUNXI_DAUDIO_TXIM_MASK<<TXIM),
(SUNXI_DAUDIO_TXIM_VALID_LSB<<TXIM));
else
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_FIFOCTL,
(SUNXI_DAUDIO_RXOM_MASK<<RXOM),
(SUNXI_DAUDIO_RXOM_EXPH<<RXOM));
break;
case SNDRV_PCM_FORMAT_S32_LE:
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FMT0,
(SUNXI_DAUDIO_SR_MASK<<SAMPLE_RESOLUTION),
(SUNXI_DAUDIO_SR_32BIT<<SAMPLE_RESOLUTION));
if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_FIFOCTL,
(SUNXI_DAUDIO_TXIM_MASK<<TXIM),
(SUNXI_DAUDIO_TXIM_VALID_LSB<<TXIM));
else
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_FIFOCTL,
(SUNXI_DAUDIO_RXOM_MASK<<RXOM),
(SUNXI_DAUDIO_RXOM_EXPH<<RXOM));
break;
default:
dev_err(sunxi_daudio->dev, "unrecognized format\n");
return -EINVAL;
}
if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CHCFG,
(SUNXI_DAUDIO_TX_SLOT_MASK<<TX_SLOT_NUM),
((params_channels(params)-1)<<TX_SLOT_NUM));
if (sunxi_daudio->hdmi_en == 0) {
#ifdef CONFIG_ARCH_SUN8IW10
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_TX0CHMAP0, SUNXI_DEFAULT_CHMAP1);
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_TX0CHMAP1, SUNXI_DEFAULT_CHMAP0);
#else
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_TX0CHMAP0, SUNXI_DEFAULT_CHMAP0);
#endif
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_TX0CHSEL,
(SUNXI_DAUDIO_TX_CHSEL_MASK<<TX_CHSEL),
((params_channels(params)-1)<<TX_CHSEL));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_TX0CHSEL,
(SUNXI_DAUDIO_TX_CHEN_MASK<<TX_CHEN),
((1<<params_channels(params))-1)<<TX_CHEN);
} else {
#ifndef CONFIG_ARCH_SUN8IW10
pr_info("GYY:channel num is %d\n", params_channels(params));
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_TX0CHMAP0, 0x10);
if (params_channels(params) - 2 > 0)
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_TX1CHMAP0, 0x23);
if (params_channels(params) - 4 > 0) {
if (params_channels(params) == 6)
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_TX2CHMAP0, 0x54);
else
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_TX2CHMAP0, 0x54);
}
if (params_channels(params) - 6 > 0)
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_TX3CHMAP0, 0x76);
regmap_update_bits(sunxi_daudio->regmap , SUNXI_DAUDIO_TX0CHSEL,
0x01 << TX_CHSEL, 0x01 << TX_CHSEL);
regmap_update_bits(sunxi_daudio->regmap , SUNXI_DAUDIO_TX0CHSEL,
0x03 << TX_CHEN, 0x03 << TX_CHEN);
regmap_update_bits(sunxi_daudio->regmap , SUNXI_DAUDIO_TX1CHSEL,
0x01 << TX_CHSEL, 0x01 << TX_CHSEL);
regmap_update_bits(sunxi_daudio->regmap , SUNXI_DAUDIO_TX1CHSEL,
(0x03)<<TX_CHEN, 0x03 << TX_CHEN);
regmap_update_bits(sunxi_daudio->regmap , SUNXI_DAUDIO_TX2CHSEL,
0x01 << TX_CHSEL, 0x01 << TX_CHSEL);
regmap_update_bits(sunxi_daudio->regmap , SUNXI_DAUDIO_TX2CHSEL,
(0x03)<<TX_CHEN, 0x03 << TX_CHEN);
regmap_update_bits(sunxi_daudio->regmap , SUNXI_DAUDIO_TX3CHSEL,
0x01 << TX_CHSEL, 0x01 << TX_CHSEL);
regmap_update_bits(sunxi_daudio->regmap , SUNXI_DAUDIO_TX3CHSEL,
(0x03)<<TX_CHEN, 0x03 << TX_CHEN);
#endif
}
} else {
#ifdef CONFIG_ARCH_SUN8IW10
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_RXCHMAP0, SUNXI_DEFAULT_CHMAP1);
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_RXCHMAP1, SUNXI_DEFAULT_CHMAP0);
#else
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_RXCHMAP, SUNXI_DEFAULT_CHMAP);
#endif
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CHCFG,
(SUNXI_DAUDIO_RX_SLOT_MASK<<RX_SLOT_NUM),
((params_channels(params)-1)<<RX_SLOT_NUM));
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_RXCHSEL,
(SUNXI_DAUDIO_RX_CHSEL_MASK<<RX_CHSEL),
((params_channels(params)-1)<<RX_CHSEL));
}
#ifndef CONFIG_ARCH_SUN8IW10
/* Special processing for HDMI hub playback to enable hdmi module */
if (sunxi_daudio->pdata->daudio_type == SUNXI_DAUDIO_TDMHDMI_TYPE) {
mutex_lock(&sunxi_daudio->mutex);
regmap_read(sunxi_daudio->regmap,
SUNXI_DAUDIO_FIFOCTL, ®_val);
sunxi_daudio->hub_mode = (reg_val & (1<<HUB_EN));
if (sunxi_daudio->hub_mode) {
sndhdmi_hw_params(substream, params, NULL);
sndhdmi_prepare(substream, NULL);
}
mutex_unlock(&sunxi_daudio->mutex);
}
#endif
return 0;
}
static int sunxi_daudio_set_fmt(struct snd_soc_dai *dai, unsigned int fmt)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
sunxi_daudio_init_fmt(sunxi_daudio, fmt);
return 0;
}
static int sunxi_daudio_set_sysclk(struct snd_soc_dai *dai,
int clk_id, unsigned int freq, int dir)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
if (clk_set_rate(sunxi_daudio->pllclk, freq)) {
dev_err(sunxi_daudio->dev, "set pllclk rate failed\n");
return -EBUSY;
}
return 0;
}
static int sunxi_daudio_set_clkdiv(struct snd_soc_dai *dai,
int clk_id, int clk_div)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
unsigned int bclk_div, div_ratio;
if (sunxi_daudio->pdata->tdm_config)
/* I2S/TDM two channel mode */
div_ratio = clk_div / (2 * sunxi_daudio->pdata->pcm_lrck_period);
else
/* PCM mode */
div_ratio = clk_div / sunxi_daudio->pdata->pcm_lrck_period;
switch (div_ratio) {
case 1:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_1;
break;
case 2:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_2;
break;
case 4:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_3;
break;
case 6:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_4;
break;
case 8:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_5;
break;
case 12:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_6;
break;
case 16:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_7;
break;
case 24:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_8;
break;
case 32:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_9;
break;
case 48:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_10;
break;
case 64:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_11;
break;
case 96:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_12;
break;
case 128:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_13;
break;
case 176:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_14;
break;
case 192:
bclk_div = SUNXI_DAUDIO_BCLK_DIV_15;
break;
default:
dev_err(sunxi_daudio->dev, "unsupport clk_div\n");
return -EINVAL;
}
/* setting bclk to driver external codec bit clk */
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CLKDIV,
(SUNXI_DAUDIO_BCLK_DIV_MASK<<BCLK_DIV),
(bclk_div<<BCLK_DIV));
return 0;
}
static int sunxi_daudio_dai_startup(struct snd_pcm_substream *substream,
struct snd_soc_dai *dai)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
/* FIXME: As HDMI module to play audio, it need at least 1100ms to sync.
* if we not wait we lost audio data to playback, or we wait for 1100ms
* to playback, user experience worst than you can imagine. So we need
* to cutdown that sync time by keeping clock signal on. we just enable
* it at startup and resume, cutdown it at remove and suspend time.
*/
if (sunxi_daudio->pdata->daudio_type == SUNXI_DAUDIO_TDMHDMI_TYPE)
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<CTL_TXEN), (1<<CTL_TXEN));
if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
snd_soc_dai_set_dma_data(dai, substream,
&sunxi_daudio->playback_dma_param);
else
snd_soc_dai_set_dma_data(dai, substream,
&sunxi_daudio->capture_dma_param);
return 0;
}
static int sunxi_daudio_trigger(struct snd_pcm_substream *substream,
int cmd, struct snd_soc_dai *dai)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
switch (cmd) {
case SNDRV_PCM_TRIGGER_START:
case SNDRV_PCM_TRIGGER_RESUME:
case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
if (daudio_loop_en)
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_CTL,
(1<<LOOP_EN), (1<<LOOP_EN));
else
regmap_update_bits(sunxi_daudio->regmap,
SUNXI_DAUDIO_CTL,
(1<<LOOP_EN), (0<<LOOP_EN));
sunxi_daudio_txctrl_enable(sunxi_daudio, 1);
} else {
sunxi_daudio_rxctrl_enable(sunxi_daudio, 1);
}
break;
case SNDRV_PCM_TRIGGER_STOP:
case SNDRV_PCM_TRIGGER_SUSPEND:
case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
sunxi_daudio_txctrl_enable(sunxi_daudio, 0);
else
sunxi_daudio_rxctrl_enable(sunxi_daudio, 0);
break;
default:
return -EINVAL;
}
return 0;
}
static int sunxi_daudio_prepare(struct snd_pcm_substream *substream,
struct snd_soc_dai *dai)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FIFOCTL,
(1<<FIFO_CTL_FTX), (1<<FIFO_CTL_FTX));
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_TXCNT, 0);
} else {
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_FIFOCTL,
(1<<FIFO_CTL_FRX), (1<<FIFO_CTL_FRX));
regmap_write(sunxi_daudio->regmap, SUNXI_DAUDIO_RXCNT, 0);
}
return 0;
}
static int sunxi_daudio_probe(struct snd_soc_dai *dai)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
int ret;
mutex_init(&sunxi_daudio->mutex);
ret = snd_soc_add_card_controls(dai->card, sunxi_spdif_controls,
ARRAY_SIZE(sunxi_spdif_controls));
if (ret)
dev_warn(sunxi_daudio->dev, "Failed to register hub mode control, will continue without it.\n");
sunxi_daudio_init(sunxi_daudio);
return 0;
}
static void sunxi_daudio_shutdown(struct snd_pcm_substream *substream,
struct snd_soc_dai *dai)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
/* Special processing for HDMI hub playback to shutdown hdmi module */
if (sunxi_daudio->pdata->daudio_type == SUNXI_DAUDIO_TDMHDMI_TYPE) {
mutex_lock(&sunxi_daudio->mutex);
if (sunxi_daudio->hub_mode)
sndhdmi_shutdown(substream, NULL);
mutex_unlock(&sunxi_daudio->mutex);
}
}
static int sunxi_daudio_remove(struct snd_soc_dai *dai)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
if (sunxi_daudio->pdata->daudio_type == SUNXI_DAUDIO_TDMHDMI_TYPE)
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<CTL_TXEN), (0<<CTL_TXEN));
return 0;
}
static int sunxi_daudio_suspend(struct snd_soc_dai *dai)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
int ret = 0;
pr_debug("[daudio] suspend .%s\n", dev_name(sunxi_daudio->dev));
/* Global disable I2S/TDM module */
sunxi_daudio_global_enable(sunxi_daudio, 0);
if (sunxi_daudio->pdata->daudio_type == SUNXI_DAUDIO_TDMHDMI_TYPE)
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<CTL_TXEN), (0<<CTL_TXEN));
clk_disable_unprepare(sunxi_daudio->moduleclk);
clk_disable_unprepare(sunxi_daudio->pllclk);
if (sunxi_daudio->pdata->external_type) {
ret = pinctrl_select_state(sunxi_daudio->pinctrl,
sunxi_daudio->pinstate_sleep);
if (ret) {
pr_warn("[daudio]select pin sleep state failed\n");
return ret;
}
devm_pinctrl_put(sunxi_daudio->pinctrl);
}
return 0;
}
static int sunxi_daudio_resume(struct snd_soc_dai *dai)
{
struct sunxi_daudio_info *sunxi_daudio = snd_soc_dai_get_drvdata(dai);
int ret;
pr_debug("[daudio] resume .%s\n", dev_name(sunxi_daudio->dev));
if (clk_prepare_enable(sunxi_daudio->pllclk)) {
dev_err(sunxi_daudio->dev, "pllclk resume failed\n");
ret = -EBUSY;
goto err_resume_out;
}
if (clk_prepare_enable(sunxi_daudio->moduleclk)) {
dev_err(sunxi_daudio->dev, "moduleclk resume failed\n");
ret = -EBUSY;
goto err_pllclk_disable;
}
if (sunxi_daudio->pdata->external_type) {
sunxi_daudio->pinctrl = devm_pinctrl_get(sunxi_daudio->dev);
if (IS_ERR_OR_NULL(sunxi_daudio)) {
dev_err(sunxi_daudio->dev, "pinctrl resume get failed\n");
ret = -ENOMEM;
goto err_moduleclk_disable;
}
sunxi_daudio->pinstate = pinctrl_lookup_state(sunxi_daudio->pinctrl,
PINCTRL_STATE_DEFAULT);
if (IS_ERR_OR_NULL(sunxi_daudio->pinstate)) {
dev_err(sunxi_daudio->dev, "pinctrl default state get failed\n");
ret = -EINVAL;
goto err_pinctrl_put;
}
sunxi_daudio->pinstate_sleep = pinctrl_lookup_state(sunxi_daudio->pinctrl,
PINCTRL_STATE_SLEEP);
if (IS_ERR_OR_NULL(sunxi_daudio->pinstate_sleep)) {
dev_err(sunxi_daudio->dev, "pinctrl sleep state get failed\n");
ret = -EINVAL;
goto err_pinctrl_put;
}
ret = pinctrl_select_state(sunxi_daudio->pinctrl, sunxi_daudio->pinstate);
if (ret)
dev_warn(sunxi_daudio->dev,
"digital audio set pinctrl default state failed\n");
}
sunxi_daudio_init(sunxi_daudio);
/* Global enable I2S/TDM module */
sunxi_daudio_global_enable(sunxi_daudio, 1);
if (sunxi_daudio->pdata->daudio_type == SUNXI_DAUDIO_TDMHDMI_TYPE)
regmap_update_bits(sunxi_daudio->regmap, SUNXI_DAUDIO_CTL,
(1<<CTL_TXEN), (1<<CTL_TXEN));
return 0;
err_pinctrl_put:
devm_pinctrl_put(sunxi_daudio->pinctrl);
err_moduleclk_disable:
clk_disable_unprepare(sunxi_daudio->moduleclk);
err_pllclk_disable:
clk_disable_unprepare(sunxi_daudio->pllclk);
err_resume_out:
return ret;
}
#define SUNXI_DAUDIO_RATES (SNDRV_PCM_RATE_8000_192000 \
| SNDRV_PCM_RATE_KNOT)
static struct snd_soc_dai_ops sunxi_daudio_dai_ops = {
.hw_params = sunxi_daudio_hw_params,
.set_sysclk = sunxi_daudio_set_sysclk,
.set_clkdiv = sunxi_daudio_set_clkdiv,
.set_fmt = sunxi_daudio_set_fmt,
.startup = sunxi_daudio_dai_startup,
.trigger = sunxi_daudio_trigger,
.prepare = sunxi_daudio_prepare,
.shutdown = sunxi_daudio_shutdown,
};
static struct snd_soc_dai_driver sunxi_daudio_dai = {
.probe = sunxi_daudio_probe,
.suspend = sunxi_daudio_suspend,
.resume = sunxi_daudio_resume,
.remove = sunxi_daudio_remove,
.playback = {
.channels_min = 1,
.channels_max = 8,
.rates = SUNXI_DAUDIO_RATES,
.formats = SNDRV_PCM_FMTBIT_S16_LE
| SNDRV_PCM_FMTBIT_S20_3LE
| SNDRV_PCM_FMTBIT_S24_LE
| SNDRV_PCM_FMTBIT_S32_LE,
},
.capture = {
.channels_min = 1,
.channels_max = 8,
.rates = SUNXI_DAUDIO_RATES,
.formats = SNDRV_PCM_FMTBIT_S16_LE
| SNDRV_PCM_FMTBIT_S20_3LE
| SNDRV_PCM_FMTBIT_S24_LE
| SNDRV_PCM_FMTBIT_S32_LE,
},
.ops = &sunxi_daudio_dai_ops,
};
static const struct snd_soc_component_driver sunxi_daudio_component = {
.name = DRV_NAME,
};
static struct sunxi_daudio_platform_data sunxi_daudio = {
.daudio_type = SUNXI_DAUDIO_EXTERNAL_TYPE,
.external_type = 1,
};
static struct sunxi_daudio_platform_data sunxi_tdmhdmi = {
.daudio_type = SUNXI_DAUDIO_TDMHDMI_TYPE,
.external_type = 0,
.audio_format = 1,
.signal_inversion = 1,
.daudio_master = 4,
.pcm_lrck_period = 32,
.pcm_lrckr_period = 1,
.slot_width_select = 32,
.tx_data_mode = 0,
.rx_data_mode = 0,
.tdm_config = 1,
.mclk_div = 0,
};
static const struct of_device_id sunxi_daudio_of_match[] = {
{
.compatible = "allwinner,sunxi-daudio",
.data = &sunxi_daudio,
},
{
.compatible = "allwinner,sunxi-tdmhdmi",
.data = &sunxi_tdmhdmi,
},
{},
};
MODULE_DEVICE_TABLE(of, sunxi_daudio_of_match);
static const struct regmap_config sunxi_daudio_regmap_config = {
.reg_bits = 32,
.reg_stride = 4,
.val_bits = 32,
.max_register = SUNXI_DAUDIO_DEBUG,
.cache_type = REGCACHE_NONE,
};
static int sunxi_daudio_dev_probe(struct platform_device *pdev)
{
struct resource res, *memregion;
const struct of_device_id *match;
void __iomem *sunxi_daudio_membase;
struct sunxi_daudio_info *sunxi_daudio;
struct device_node *np = pdev->dev.of_node;
unsigned int temp_val;
int ret;
match = of_match_device(sunxi_daudio_of_match, &pdev->dev);
if (match) {
sunxi_daudio = devm_kzalloc(&pdev->dev,
sizeof(struct sunxi_daudio_info),
GFP_KERNEL);
if (!sunxi_daudio) {
dev_err(&pdev->dev, "alloc sunxi_daudio failed\n");
ret = -ENOMEM;
goto err_node_put;
}
dev_set_drvdata(&pdev->dev, sunxi_daudio);
sunxi_daudio->dev = &pdev->dev;
sunxi_daudio->pdata = devm_kzalloc(&pdev->dev,
sizeof(struct sunxi_daudio_platform_data),
GFP_KERNEL);
if (!sunxi_daudio->pdata) {
dev_err(&pdev->dev, "alloc sunxi daudio platform data failed\n");
ret = -ENOMEM;
goto err_devm_kfree;
}
memcpy(sunxi_daudio->pdata, match->data,
sizeof(struct sunxi_daudio_platform_data));
} else {
dev_err(&pdev->dev, "node match failed\n");
return -EINVAL;
}
ret = of_address_to_resource(np, 0, &res);
if (ret) {
dev_err(&pdev->dev, "parse device node resource failed\n");
ret = -EINVAL;
goto err_devm_kfree;
}
memregion = devm_request_mem_region(&pdev->dev, res.start,
resource_size(&res), DRV_NAME);
if (!memregion) {
dev_err(&pdev->dev, "Memory region already claimed\n");
ret = -EBUSY;
goto err_devm_kfree;
}
sunxi_daudio_membase = ioremap(res.start, resource_size(&res));
if (!sunxi_daudio_membase) {
dev_err(&pdev->dev, "ioremap failed\n");
ret = -EBUSY;
goto err_devm_kfree;
}
sunxi_daudio->regmap = devm_regmap_init_mmio(&pdev->dev,
sunxi_daudio_membase,
&sunxi_daudio_regmap_config);
if (IS_ERR(sunxi_daudio->regmap)) {
dev_err(&pdev->dev, "regmap init failed\n");
ret = PTR_ERR(sunxi_daudio->regmap);
goto err_iounmap;
}
sunxi_daudio->pllclk = of_clk_get(np, 0);
if (IS_ERR_OR_NULL(sunxi_daudio->pllclk)) {
dev_err(&pdev->dev, "pllclk get failed\n");
ret = PTR_ERR(sunxi_daudio->pllclk);
goto err_iounmap;
}
sunxi_daudio->moduleclk = of_clk_get(np, 1);
if (IS_ERR_OR_NULL(sunxi_daudio->moduleclk)) {
dev_err(&pdev->dev, "moduleclk get failed\n");
ret = PTR_ERR(sunxi_daudio->moduleclk);
goto err_pllclk_put;
}
if (clk_set_parent(sunxi_daudio->moduleclk, sunxi_daudio->pllclk)) {
dev_err(&pdev->dev, "set parent of moduleclk to pllclk failed\n");
ret = -EBUSY;
goto err_moduleclk_put;
}
clk_prepare_enable(sunxi_daudio->pllclk);
clk_prepare_enable(sunxi_daudio->moduleclk);
if (sunxi_daudio->pdata->external_type) {
sunxi_daudio->pinctrl = devm_pinctrl_get(&pdev->dev);
if (IS_ERR_OR_NULL(sunxi_daudio->pinctrl)) {
dev_err(&pdev->dev, "pinctrl get failed\n");
ret = -EINVAL;
goto err_moduleclk_put;
}
sunxi_daudio->pinstate = pinctrl_lookup_state(sunxi_daudio->pinctrl, PINCTRL_STATE_DEFAULT);
if (IS_ERR_OR_NULL(sunxi_daudio->pinstate)) {
dev_err(&pdev->dev, "pinctrl default state get failed\n");
ret = -EINVAL;
goto err_pinctrl_put;
}
sunxi_daudio->pinstate_sleep = pinctrl_lookup_state(sunxi_daudio->pinctrl, PINCTRL_STATE_SLEEP);
if (IS_ERR_OR_NULL(sunxi_daudio->pinstate_sleep)) {
dev_err(&pdev->dev, "pinctrl sleep state get failed\n");
ret = -EINVAL;
goto err_pinctrl_put;
}
}
switch (sunxi_daudio->pdata->daudio_type) {
case SUNXI_DAUDIO_EXTERNAL_TYPE:
ret = of_property_read_u32(np, "tdm_num", &temp_val);
if (ret < 0) {
dev_warn(&pdev->dev, "tdm configuration missing or invalid\n");
/*
* warnning just continue,
* making tdm_num as default setting
*/
sunxi_daudio->pdata->tdm_num = 0;
} else {
if (temp_val > 2)
sunxi_daudio->pdata->tdm_num = 0;
else
sunxi_daudio->pdata->tdm_num = temp_val;
}
sunxi_daudio->playback_dma_param.dma_addr =
res.start + SUNXI_DAUDIO_TXFIFO;
if (sunxi_daudio->pdata->tdm_num)
sunxi_daudio->playback_dma_param.dma_drq_type_num =
DRQDST_DAUDIO_1_TX;
else
sunxi_daudio->playback_dma_param.dma_drq_type_num =
DRQDST_DAUDIO_0_TX;
sunxi_daudio->playback_dma_param.src_maxburst = 4;
sunxi_daudio->playback_dma_param.dst_maxburst = 4;
sunxi_daudio->capture_dma_param.dma_addr =
res.start + SUNXI_DAUDIO_RXFIFO;
if (sunxi_daudio->pdata->tdm_num)
sunxi_daudio->capture_dma_param.dma_drq_type_num =
DRQSRC_DAUDIO_1_RX;
else
sunxi_daudio->capture_dma_param.dma_drq_type_num =
DRQSRC_DAUDIO_0_RX;
sunxi_daudio->capture_dma_param.src_maxburst = 4;
sunxi_daudio->capture_dma_param.dst_maxburst = 4;
ret = of_property_read_u32(np, "daudio_master", &temp_val);
if (ret < 0) {
dev_warn(&pdev->dev, "daudio_master configuration missing or invalid\n");
/*
* default setting SND_SOC_DAIFMT_CBS_CFS mode
* codec clk & FRM slave
*/
sunxi_daudio->pdata->daudio_master = 4;
} else {
sunxi_daudio->pdata->daudio_master = temp_val;
}
ret = of_property_read_u32(np, "pcm_lrck_period", &temp_val);
if (ret < 0) {
dev_warn(&pdev->dev, "pcm_lrck_period configuration missing or invalid\n");
sunxi_daudio->pdata->pcm_lrck_period = 0;
} else {
sunxi_daudio->pdata->pcm_lrck_period = temp_val;
}
ret = of_property_read_u32(np, "slot_width_select", &temp_val);
if (ret < 0) {
dev_warn(&pdev->dev, "slot_width_select configuration missing or invalid\n");
sunxi_daudio->pdata->slot_width_select = 0;
} else {
sunxi_daudio->pdata->slot_width_select = temp_val;
}
ret = of_property_read_u32(np, "tx_data_mode", &temp_val);
if (ret < 0) {
dev_warn(&pdev->dev, "tx_data_mode configuration missing or invalid\n");
sunxi_daudio->pdata->tx_data_mode = 0;
} else {
sunxi_daudio->pdata->tx_data_mode = temp_val;
}
ret = of_property_read_u32(np, "rx_data_mode", &temp_val);
if (ret < 0) {
dev_warn(&pdev->dev, "rx_data_mode configuration missing or invalid\n");
sunxi_daudio->pdata->rx_data_mode = 0;
} else {
sunxi_daudio->pdata->rx_data_mode = temp_val;
}
ret = of_property_read_u32(np, "audio_format", &temp_val);
if (ret < 0) {
dev_warn(&pdev->dev, "audio_format configuration missing or invalid\n");
sunxi_daudio->pdata->audio_format = 1;
} else {
sunxi_daudio->pdata->audio_format = temp_val;
}
ret = of_property_read_u32(np, "signal_inversion", &temp_val);
if (ret < 0) {
dev_warn(&pdev->dev, "signal_inversion configuration missing or invalid\n");
sunxi_daudio->pdata->signal_inversion = 1;
} else {
sunxi_daudio->pdata->signal_inversion = temp_val;
}
ret = of_property_read_u32(np, "tdm_config", &temp_val);
if (ret < 0) {
dev_warn(&pdev->dev, "tdm_config configuration missing or invalid\n");
sunxi_daudio->pdata->tdm_config = 1;
} else {
sunxi_daudio->pdata->tdm_config = temp_val;
}
ret = of_property_read_u32(np, "mclk_div", &temp_val);
if (ret < 0)
sunxi_daudio->pdata->mclk_div = 0;
else
sunxi_daudio->pdata->mclk_div = temp_val;
break;
case SUNXI_DAUDIO_TDMHDMI_TYPE:
#ifndef CONFIG_ARCH_SUN8IW10
sunxi_daudio->playback_dma_param.dma_addr =
res.start + SUNXI_DAUDIO_TXFIFO;
sunxi_daudio->playback_dma_param.dma_drq_type_num =
DRQDST_DAUDIO_2_TX;
sunxi_daudio->playback_dma_param.src_maxburst = 8;
sunxi_daudio->playback_dma_param.dst_maxburst = 8;
sunxi_daudio->hdmi_en = 1;
#endif
break;
default:
dev_err(&pdev->dev, "missing digital audio type\n");
ret = -EINVAL;
goto err_devm_kfree;
}
ret = snd_soc_register_component(&pdev->dev, &sunxi_daudio_component,
&sunxi_daudio_dai, 1);
if (ret) {
dev_err(&pdev->dev, "component register failed\n");
ret = -ENOMEM;
goto err_pinctrl_put;
}
switch (sunxi_daudio->pdata->daudio_type) {
case SUNXI_DAUDIO_EXTERNAL_TYPE:
ret = asoc_dma_platform_register(&pdev->dev, 0);
if (ret) {
dev_err(&pdev->dev, "register ASoC platform failed\n");
ret = -ENOMEM;
goto err_unregister_component;
}
break;
case SUNXI_DAUDIO_TDMHDMI_TYPE:
ret = asoc_dma_platform_register(&pdev->dev,
SND_DMAENGINE_PCM_FLAG_NO_RESIDUE);
if (ret) {
dev_err(&pdev->dev, "register ASoC platform failed\n");
ret = -ENOMEM;
goto err_unregister_component;
}
break;
default:
dev_err(&pdev->dev, "missing digital audio type\n");
ret = -EINVAL;
goto err_unregister_component;
}
sunxi_daudio_global_enable(sunxi_daudio, 1);
return 0;
err_unregister_component:
snd_soc_unregister_component(&pdev->dev);
err_pinctrl_put:
devm_pinctrl_put(sunxi_daudio->pinctrl);
err_moduleclk_put:
clk_put(sunxi_daudio->moduleclk);
err_pllclk_put:
clk_put(sunxi_daudio->pllclk);
err_iounmap:
iounmap(sunxi_daudio_membase);
err_devm_kfree:
devm_kfree(&pdev->dev, sunxi_daudio);
err_node_put:
of_node_put(np);
return ret;
}
static int __exit sunxi_daudio_dev_remove(struct platform_device *pdev)
{
struct sunxi_daudio_info *sunxi_daudio = dev_get_drvdata(&pdev->dev);
snd_soc_unregister_component(&pdev->dev);
clk_put(sunxi_daudio->moduleclk);
clk_put(sunxi_daudio->pllclk);
devm_kfree(&pdev->dev, sunxi_daudio);
return 0;
}
static struct platform_driver sunxi_daudio_driver = {
.probe = sunxi_daudio_dev_probe,
.remove = __exit_p(sunxi_daudio_dev_remove),
.driver = {
.name = DRV_NAME,
.owner = THIS_MODULE,
.of_match_table = sunxi_daudio_of_match,
},
};
module_platform_driver(sunxi_daudio_driver);
MODULE_AUTHOR("<NAME> <<EMAIL>>");
MODULE_DESCRIPTION("SUNXI DAI AUDIO ASoC Interface");
MODULE_LICENSE("GPL");
MODULE_ALIAS("platform:sunxi-daudio");
|
package com.linkedin.gms.factory.common;
import com.linkedin.datahub.graphql.generated.VisualConfiguration;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class VisualConfigFactory {
@Value("${visualConfig.assets.logoUrl}")
private String logoUrl;
@Nonnull
@Bean(name = "visualConfig")
protected VisualConfiguration getInstance() {
VisualConfiguration config = new VisualConfiguration();
config.setLogoUrl(logoUrl);
return config;
}
}
|
#!/bin/bash
DIR="$1"
InputFile="$(pwd)/$DIR/$DIR.svg"
OutputBaseFile="$(pwd)/$DIR/$DIR"
pngSizes=("128x128" "16x16" "192x192" "24x24" "256x256" "32x32" "48x48" "512x512" "64x64" "96x96")
for size in ${pngSizes[*]}
do
echo Generating "$OutputBaseFile"_$size.png
/usr/bin/convert -density 1536 -background none -resize $size $InputFile "$OutputBaseFile"_$size.png
done
|
#!/bin/bash
# set -ex
## Sets the following environment variables:
##
## XBV_PROJECT_VERSION -> CFBundleShortVersionString
## XBV_PROJECT_BUILD -> CFBundleVersion
## Code based on
## https://github.com/denys-meloshyn/bitrise-step-git-tag-project-version-and-build-number
## by Denys Meloshyn.
read_dom() {
local IFS=\>
read -d \< ENTITY CONTENT
}
find_info_plist() {
local result=$(grep -rl 'LaunchScreen' --include 'Info.plist' --exclude-dir 'Carthage' --exclude 'Pods' . | \
awk -F: '{"date -r \""$1"\" +\"%F %R\"" | getline d; print d,$0}' | \
sort -r | \
cut -c18- | \
sed -e "s/^.\///" -e "s/^\///" | \
head -n 1)
echo "$result"
}
find_xcodeproj() {
local result=$(find . -name '*.xcodeproj' -maxdepth 1 -not -path "./.*" -not -path "./Carthage/*" -not -path "./Pods/*" | \
sed -e "s/^.\///" -e "s/^\///" | \
head -n 1)
echo "$result"
}
CFBundleVersion=""
CFBundleVersionKey=false
CFBundleShortVersionString=""
CFBundleShortVersionStringKey=false
if [ -z "$info_plist_path" ]; then
# If plist_path is not defined, it tries to find it before aborting
info_plist_path=$(find_info_plist)
if [ -z "$info_plist_path" ]; then
echo "info_plist_path is empty"
exit 1
fi
echo "Info.plist: $info_plist_path"
fi
while read_dom; do
if [[ $CFBundleShortVersionStringKey == true ]]; then
if [ $ENTITY = "string" ]; then
CFBundleShortVersionString=$CONTENT
CFBundleShortVersionStringKey=false
fi
fi
if [[ $CFBundleVersionKey == true ]]; then
if [ $ENTITY = "string" ]; then
CFBundleVersion=$CONTENT
CFBundleVersionKey=false
fi
fi
if [[ $CONTENT == "CFBundleShortVersionString" ]]; then
CFBundleShortVersionStringKey=true
fi
if [[ $CONTENT == "CFBundleVersion" ]]; then
CFBundleVersionKey=true
fi
done <"$info_plist_path"
if [ -z "$CFBundleShortVersionString" ]; then
echo "CFBundleShortVersionString is empty"
exit 1
fi
if [ -z "$CFBundleVersion" ]; then
echo "CFBundleVersion is empty"
exit 1
fi
if [[ $CFBundleVersion == *CURRENT_PROJECT_VERSION* ]]; then
if [ -z "$project_path" ]; then
# If xcodeproj_path is not defined, it tries to find it before aborting
project_path=$(find_xcodeproj)
if [ -z "$project_path" ]; then
echo "project_path is empty"
exit 1
fi
echo "XCODEPROJ: $project_path/project.pbxproj"
fi
CURRENT_PROJECT_VERSION=""
LINES=$(sed -n '/CURRENT_PROJECT_VERSION/=' "$project_path/project.pbxproj")
for LINE in $LINES; do
CURRENT_PROJECT_VERSION=$(sed -n "$LINE"p "$project_path"/project.pbxproj)
CURRENT_PROJECT_VERSION="${CURRENT_PROJECT_VERSION#*= }"
CURRENT_PROJECT_VERSION="${CURRENT_PROJECT_VERSION%;}"
done
if [ -z "$CURRENT_PROJECT_VERSION" ]; then
echo "CURRENT_PROJECT_VERSION is empty"
exit 1
fi
CFBundleVersion=$CURRENT_PROJECT_VERSION
fi
if [[ $CFBundleShortVersionString == *MARKETING_VERSION* ]]; then
if [ -z "$project_path" ]; then
echo "project_path is empty"
exit 1
fi
MARKETING_VERSION=""
LINES=$(sed -n '/MARKETING_VERSION/=' "$project_path/project.pbxproj")
for LINE in $LINES; do
MARKETING_VERSION=$(sed -n "$LINE"p "$project_path"/project.pbxproj)
MARKETING_VERSION="${MARKETING_VERSION#*= }"
MARKETING_VERSION="${MARKETING_VERSION%;}"
done
if [ -z "$MARKETING_VERSION" ]; then
echo "MARKETING_VERSION is empty"
exit 1
fi
CFBundleShortVersionString=$MARKETING_VERSION
fi
echo "XBV_PROJECT_VERSION: ${CFBundleShortVersionString}"
echo "XBV_PROJECT_BUILD: ${CFBundleVersion}"
envman add --key XBV_PROJECT_VERSION --value $CFBundleShortVersionString
envman add --key XBV_PROJECT_BUILD --value $CFBundleVersion
#
# --- Export Environment Variables for other Steps:
# You can export Environment Variables for other Steps with
# envman, which is automatically installed by `bitrise setup`.
# A very simple example:
# Envman can handle piped inputs, which is useful if the text you want to
# share is complex and you don't want to deal with proper bash escaping:
# cat file_with_complex_input | envman add --KEY EXAMPLE_STEP_OUTPUT
# You can find more usage examples on envman's GitHub page
# at: https://github.com/bitrise-io/envman
#
# --- Exit codes:
# The exit code of your Step is very important. If you return
# with a 0 exit code `bitrise` will register your Step as "successful".
# Any non zero exit code will be registered as "failed" by `bitrise`.
|
#!/bin/bash
cd $(dirname $0)
INSTALL=0
if [ ! -d .env ]; then
INSTALL=1
python3 -m venv .env
fi
. ./.env/bin/activate
if [ "${INSTALL}" = "1" ]; then
pip install -r requirements.txt
fi
python3 ./code-cracker.py
deactivate
|
<reponame>bopopescu/drawquest-web
feature_funcs = set()
def feature(func):
""" Feature functions take a request as their single argument. """
feature_funcs.add(func)
return func
@feature
def requirejs(request):
return 'requirejs' in request.GET
@feature
def thread_new(request):
return True
@feature
def lazy_content(request):
return True
|
#!/bin/bash
#zlib headers for minimap
sed -i.bak 's/CFLAGS=/CFLAGS+=/' lib/minimap2/Makefile
sed -i.bak 's/INCLUDES=/INCLUDES+=/' lib/minimap2/Makefile
export CFLAGS="-L$PREFIX/lib"
export INCLUDES="-I$PREFIX/include"
#zlib headers for flye binaries
export CXXFLAGS="-I$PREFIX/include"
export LDFLAGS="-L$PREFIX/lib"
#dynamic flag is needed for backtrace printing,
#but it seems it fails OSX build
sed -i.bak 's/-rdynamic//' src/Makefile
python setup.py build
python setup.py install --record record.txt.
|
/***************************************************************************
*
* Project _____ __ ____ _ _
* ( _ ) /__\ (_ _)_| |_ _| |_
* )(_)( /(__)\ )( (_ _)(_ _)
* (_____)(__)(__)(__) |_| |_|
*
*
* Copyright 2018-present, <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************************/
#ifndef oatpp_concurrency_Thread_hpp
#define oatpp_concurrency_Thread_hpp
#include "oatpp/core/base/Environment.hpp"
#include <thread>
namespace oatpp { namespace concurrency {
/**
* Set thread affinity to one CPU.
* @param nativeHandle - `std::thread::native_handle_type`.
* @param cpuIndex - index of CPU.
* @return - zero on success. Negative value on failure.
* -1 if platform that runs application does not support this call.
*/
v_int32 setThreadAffinityToOneCpu(std::thread::native_handle_type nativeHandle, v_int32 cpuIndex);
/**
* Set thread affinity [firstCpuIndex..lastCpuIndex].
* @param nativeHandle - `std::thread::native_handle_type`.
* @param firstCpuIndex - from CPU-index.
* @param lastCpuIndex - to CPU-index included.
* @return - zero on success. Negative value on failure.
* -1 if platform that runs application does not support this call.
*/
v_int32 setThreadAffinityToCpuRange(std::thread::native_handle_type nativeHandle, v_int32 firstCpuIndex, v_int32 lastCpuIndex);
/**
* Get hardware concurrency.
* @return - OATPP_THREAD_HARDWARE_CONCURRENCY config value if set <br>
* else return std::thread::hardware_concurrency() <br>
* else return 1. <br>
*/
v_int32 getHardwareConcurrency();
}}
#endif /* concurrency_Thread_hpp */
|
#!/bin/bash
wget https://download.sublimetext.com/latest/stable/linux/x64/deb
sudo dpkg -i deb
rm deb
|
import { Injectable } from '@nestjs/common';
import { CreateTestcaseDto } from './dto/create-testcase.dto';
import { UpdateTestcaseDto } from './dto/update-testcase.dto';
@Injectable()
export class TestcasesService {
create(createTestcaseDto: CreateTestcaseDto) {
return 'This action adds a new testcase';
}
findAll() {
return `This action returns all testcases`;
}
findOne(id: number) {
return `This action returns a #${id} testcase`;
}
update(id: number, updateTestcaseDto: UpdateTestcaseDto) {
return `This action updates a #${id} testcase`;
}
remove(id: number) {
return `This action removes a #${id} testcase`;
}
}
|
openssl genrsa -des3 -passout pass:x -out server.pass.key 2048
openssl rsa -passin pass:x -in server.pass.key -out server.key
rm server.pass.key
openssl req -new -key server.key -out server.csr -subj "/C=US/ST=State/L=City/O=Org/OU=Unit/CN=example.com"
openssl x509 -req -days 365 -in server.csr -signkey server.key -out server.crt
rm server.csr
|
/**
* Copyright 2018 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package test.org.wildfly.swarm.microprofile.openapi;
import java.io.File;
import java.io.FileWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.IOUtils;
import org.eclipse.microprofile.config.Config;
import org.eclipse.microprofile.config.spi.ConfigSource;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.testng.Arquillian;
import org.jboss.shrinkwrap.api.Archive;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.runner.Description;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.ParentRunner;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
import org.wildfly.microprofile.config.PropertiesConfigSourceProvider;
import org.wildfly.microprofile.config.WildFlyConfigBuilder;
import org.wildfly.swarm.microprofile.openapi.api.OpenApiConfig;
import org.wildfly.swarm.microprofile.openapi.api.OpenApiDocument;
import org.wildfly.swarm.microprofile.openapi.deployment.OpenApiServletContextListener;
import org.wildfly.swarm.microprofile.openapi.runtime.OpenApiDeploymentProcessor;
import org.wildfly.swarm.microprofile.openapi.runtime.io.OpenApiSerializer;
import org.wildfly.swarm.microprofile.openapi.runtime.io.OpenApiSerializer.Format;
/**
* A Junit 4 test runner used to quickly run the OpenAPI tck tests directly against the
* {@link OpenApiDeploymentProcessor} without spinning up Wildfly Swarm. This is not
* a replacement for running the full OpenAPI TCK using Arquillian. However, it runs
* much faster and does *most* of what we need for coverage.
*
* @author <EMAIL>
*/
@SuppressWarnings("rawtypes")
public class TckTestRunner extends ParentRunner<ProxiedTckTest> {
private Class<?> testClass;
private Class<? extends Arquillian> tckTestClass;
public static Map<Class, OpenAPI> OPEN_API_DOCS = new HashMap<>();
/**
* Constructor.
* @param testClass
* @throws InitializationError
*/
public TckTestRunner(Class<?> testClass) throws InitializationError {
super(testClass);
this.testClass = testClass;
this.tckTestClass = determineTckTestClass(testClass);
// The Archive (shrinkwrap deployment)
Archive archive = archive();
// MPConfig
WildFlyConfigBuilder cfgBuilder = new WildFlyConfigBuilder();
cfgBuilder.addDefaultSources();
TckTest anno = testClass.getAnnotation(TckTest.class);
if (anno.configProperties() != null && anno.configProperties().trim().length() > 0) {
List<ConfigSource> configSources = new PropertiesConfigSourceProvider(anno.configProperties(), true, tckTestClass.getClassLoader()).getConfigSources(tckTestClass.getClassLoader());
configSources.forEach(source -> {
cfgBuilder.withSources(source);
});
}
Config mpConfig = cfgBuilder.build();
OpenApiConfig config = new OpenApiConfig(mpConfig);
OpenApiDocument.INSTANCE.reset();
OpenApiDeploymentProcessor processor = new OpenApiDeploymentProcessor(config, archive);
try {
processor.process();
new OpenApiServletContextListener(mpConfig).contextInitialized(null);
Assert.assertNotNull("Generated OAI document must not be null.", OpenApiDocument.INSTANCE.get());
OPEN_API_DOCS.put(testClass, OpenApiDocument.INSTANCE.get());
// Output the /openapi content to a file for debugging purposes
File parent = new File("target", "TckTestRunner");
if (!parent.exists()) {
parent.mkdir();
}
File file = new File(parent, testClass.getName() + ".json");
String content = OpenApiSerializer.serialize(OpenApiDocument.INSTANCE.get(), Format.JSON);
try (FileWriter writer = new FileWriter(file)) {
IOUtils.write(content, writer);
}
} catch (Exception e) {
throw new InitializationError(e);
}
}
/**
* Creates and returns the shrinkwrap archive for this test.
*/
private Archive archive() throws InitializationError {
try {
Method[] methods = tckTestClass.getMethods();
for (Method method : methods) {
if (method.isAnnotationPresent(Deployment.class)) {
Archive archive = (Archive) method.invoke(null);
return archive;
}
}
throw new Exception("No @Deployment archive found for test.");
} catch (Exception e) {
throw new InitializationError(e);
}
}
/**
* Figures out what TCK test is being run.
* @throws InitializationError
*/
private Class<? extends Arquillian> determineTckTestClass(Class<?> testClass) throws InitializationError {
TckTest anno = testClass.getAnnotation(TckTest.class);
if (anno == null) {
throw new InitializationError("Missing annotation @TckTest");
}
return anno.test();
}
/**
* @see org.junit.runners.ParentRunner#getChildren()
*/
@Override
protected List<ProxiedTckTest> getChildren() {
List<ProxiedTckTest> children = new ArrayList<>();
Method[] methods = tckTestClass.getMethods();
for (Method method : methods) {
if (method.isAnnotationPresent(org.testng.annotations.Test.class)) {
try {
ProxiedTckTest test = new ProxiedTckTest();
Object theTestObj = this.testClass.newInstance();
test.setTest(theTestObj);
test.setTestMethod(method);
test.setDelegate(createDelegate(theTestObj));
children.add(test);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
children.sort(new Comparator<ProxiedTckTest>() {
@Override
public int compare(ProxiedTckTest o1, ProxiedTckTest o2) {
return o1.getTestMethod().getName().compareTo(o2.getTestMethod().getName());
}
});
return children;
}
/**
* Creates the delegate test instance. This is done by instantiating the test itself
* and calling its "getDelegate()" method. If no such method exists then an error
* is thrown.
*/
private Arquillian createDelegate(Object testObj) throws Exception {
Object delegate = testObj.getClass().getMethod("getDelegate").invoke(testObj);
return (Arquillian) delegate;
}
/**
* @see org.junit.runners.ParentRunner#describeChild(java.lang.Object)
*/
@Override
protected Description describeChild(ProxiedTckTest child) {
return Description.createTestDescription(tckTestClass, child.getTestMethod().getName());
}
/**
* @see org.junit.runners.ParentRunner#runChild(java.lang.Object, org.junit.runner.notification.RunNotifier)
*/
@Override
protected void runChild(final ProxiedTckTest child, final RunNotifier notifier) {
OpenApiDocument.INSTANCE.set(TckTestRunner.OPEN_API_DOCS.get(child.getTest().getClass()));
Description description = describeChild(child);
if (isIgnored(child)) {
notifier.fireTestIgnored(description);
} else {
Statement statement = new Statement() {
@Override
public void evaluate() throws Throwable {
try {
Object [] args = (Object[]) child.getTest().getClass().getMethod("getTestArguments").invoke(child.getTest());
child.getTestMethod().invoke(child.getDelegate(), args);
} catch (InvocationTargetException e) {
Throwable cause = e.getCause();
org.testng.annotations.Test testAnno = child.getTestMethod().getAnnotation(org.testng.annotations.Test.class);
Class[] expectedExceptions = testAnno.expectedExceptions();
if (expectedExceptions != null && expectedExceptions.length > 0) {
Class expectedException = expectedExceptions[0];
Assert.assertEquals(expectedException, cause.getClass());
} else {
throw cause;
}
}
}
};
runLeaf(statement, description, notifier);
}
}
/**
* @see org.junit.runners.ParentRunner#isIgnored(java.lang.Object)
*/
@Override
protected boolean isIgnored(ProxiedTckTest child) {
return child.getTestMethod().isAnnotationPresent(Ignore.class);
}
}
|
import numpy as np
from rdkit import Chem
from rdkit.Chem import AllChem
def optimize_molecular_structure(mol, angles, dihedrals):
conformer = mol.GetConformer()
torsions = [(d[0], d[1], d[2], d[3]) for d in conformer.GetOwningMol().GetDihedrals()]
ffc = AllChem.MMFFGetMoleculeForceField(mol, AllChem.MMFFGetMoleculeProperties(mol))
ANGLE_DELTA = 10 # Define the angle delta for force field constraint
FF_RELAX_STEPS = 100 # Define the maximum iterations for energy minimization
for dih_id, angle in zip(dihedrals, angles):
try:
Chem.rdMolTransforms.SetDihedralDeg(conformer, *torsions[dih_id], float(angle))
except:
pass
ffc.MMFFAddTorsionConstraint(*torsions[dih_id], False, angle - ANGLE_DELTA, angle + ANGLE_DELTA, 1.0e10)
ffc.Minimize(maxIts=FF_RELAX_STEPS, energyTol=1e-2, forceTol=1e-3)
return mol |
<gh_stars>10-100
user.send('Saving...');
objectManager.save();
user.send('Done.');
|
<filename>src/main/java/org/olat/modules/openmeetings/manager/OpenMeetingsLanguages.java<gh_stars>1-10
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* 12.10.2011 by frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.openmeetings.manager;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import javax.xml.XMLConstants;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.logging.log4j.Logger;
import org.olat.core.logging.Tracing;
import org.olat.core.util.i18n.I18nModule;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* Map the id (int) to the locale from the languages file of OpenMeetings
*
*
* Initial date: 07.11.2012<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class OpenMeetingsLanguages extends DefaultHandler {
private static final Logger log = Tracing.createLoggerFor(OpenMeetingsLanguages.class);
private final Map<String, Integer> languageToId = new HashMap<>();
public OpenMeetingsLanguages() {
//
}
public void read() {
try(InputStream in = OpenMeetingsLanguages.class.getResourceAsStream("languages.xml")) {
SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
SAXParser saxParser = factory.newSAXParser();
saxParser.parse(in, this);
} catch (Exception e) {
log.error("", e);
}
}
public int getLanguageId(Locale locale) {
int id = -1;
if(locale != null) {
String language = locale.getLanguage();
if(languageToId.containsKey(language)) {
id = languageToId.get(language).intValue();
}
}
if(id < 1) {
Locale defLocale = I18nModule.getDefaultLocale();
String defLanguage = defLocale.getLanguage();
if(languageToId.containsKey(defLanguage)) {
id = languageToId.get(defLanguage).intValue();
}
}
if(id < 1) {
return 1;//en
}
return id;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
if("lang".equals(qName)) {
String id = attributes.getValue("id");
String code = attributes.getValue("code");
int languageId = Integer.parseInt(id);
languageToId.put(code, Integer.valueOf(languageId));
}
}
}
|
# Bash Script for Hide Phishing URL Created by KP
url_checker() {
if [ ! "${1//:*}" = http ]; then
if [ ! "${1//:*}" = https ]; then
echo -e "\e[31m[!] Invalid URL. Please use http or https.\e[0m"
exit 1
fi
fi
}
echo -e "\n\e[1;31;42m######┌──────────────────────────┐##### \e[0m"
echo -e "\e[1;31;42m######│▙▗▌ ▌ ▛▀▖▌ ▗ ▌ │##### \e[0m"
echo -e "\e[1;31;42m######│▌▘▌▝▀▖▞▀▘▌▗▘▙▄▘▛▀▖▄ ▞▀▘▛▀▖│##### \e[0m"
echo -e "\e[1;31;42m######│▌ ▌▞▀▌▝▀▖▛▚ ▌ ▌ ▌▐ ▝▀▖▌ ▌│##### \e[0m"
echo -e "\e[1;31;42m######│▘ ▘▝▀▘▀▀ ▘ ▘▘ ▘ ▘▀▘▀▀ ▘ ▘│##### \e[0m"
echo -e "\e[1;31;42m######└──────────────────────────┘##### \e[0m \n"
echo -e "\e[40;38;5;82m Please Visit \e[30;48;5;82m https://www.kalilinux.in \e[0m"
echo -e "\e[30;48;5;82m Copyright \e[40;38;5;82m JayKali \e[0m \n\n"
echo -e "\e[1;31;42m ### Phishing URL ###\e[0m \n"
echo -n "Paste Phishing URL here (with http or https): "
read phish
url_checker $phish
sleep 1
echo "Processing and Modifing Phishing URL"
echo ""
short=$(curl -s https://is.gd/create.php\?format\=simple\&url\=${phish})
shorter=${short#https://}
echo -e "\n\e[1;31;42m ### Masking Domain ###\e[0m"
echo 'Domain to mask the Phishing URL (with http or https), ex: https://google.com, http
://anything.org) :'
echo -en "\e[32m=>\e[0m "
read mask
url_checker $mask
echo -e '\nType social engineering words:(like free-money, best-pubg-tricks)'
echo -e "\e[31mDon't use space just use '-' between social engineering words\e[0m"
echo -en "\e[32m=>\e[0m "
read words
if [[ -z "$words" ]]; then
echo -e "\e[31m[!] No words.\e[0m"
echo -e "\nGenerating MaskPhish Link...\n"
final=$mask@$shorter
echo -e "Here is the MaskPhish URL:\e[32m ${final} \e[0m\n"
exit
fi
if [[ "$words" =~ " " ]]; then
echo -e "\e[31m[!] Invalid words. Please avoid space.\e[0m"
echo -e "\nGenerating MaskPhish Link...\n"
final=$mask@$shorter
echo -e "Here is the MaskPhish URL:\e[32m ${final} \e[0m\n"
exit
fi
echo -e "\nGenerating MaskPhish Link...\n"
final=$mask-$words@$shorter
echo -e "Here is the MaskPhish URL:\e[32m ${final} \e[0m\n"
|
<gh_stars>10-100
package bridge
/*
Copyright 2018-2021 Crunchy Data Solutions, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import (
"crypto/md5"
"fmt"
"io"
"io/ioutil"
"log"
"reflect"
"testing"
"github.com/crunchydata/pgo-osb/pkg/broker"
"github.com/gofrs/uuid"
osb "github.com/pmorie/go-open-service-broker-client/v2"
osblib "github.com/pmorie/osb-broker-lib/pkg/broker"
)
func mockLogic(t *testing.T) *BusinessLogic {
bl, err := NewBusinessLogic(Options{
Simulated: true,
})
if err != nil {
t.Fatalf("error creating BusinessLogic: %s", err)
}
return bl
}
func nuuid(t *testing.T) string {
id, err := uuid.NewV4()
if err != nil {
t.Fatalf("failed to generate UUID: %s\n", err)
}
return id.String()
}
func TestUnitCatalog(t *testing.T) {
log.SetOutput(ioutil.Discard)
bl := mockLogic(t)
req := &osblib.RequestContext{}
resp, err := bl.GetCatalog(req)
if err != nil {
t.Errorf("error getting catalog: %s\n", err)
}
if l := len(resp.Services); l != 1 {
t.Fatalf("expected one and only one service, found %d", l)
}
svc := resp.Services[0]
if v := svc.Name; v != "pgo-osb-service" {
t.Errorf("unexpected service name: %s", v)
}
if svc.Bindable != true {
t.Error("expected service definition to be bindable")
}
if svc.PlanUpdatable != nil && *svc.PlanUpdatable == true {
t.Errorf("Update function unimplemented, expected PlanUpdatable to be false or undefined")
}
if l := len(svc.Plans); l != 7 {
t.Fatalf("expected seven plans, found %d", l)
}
// Some platforms (PCF) freak out if the plan name changes or goes away
// Do not blindly update this test case without taking that into account
for _, plan := range svc.Plans {
switch plan.Name {
case "default":
if plan.ID != "86064792-7ea2-467b-af93-ac9694d96d5c" {
t.Error("unexpected plan Name or ID change for default plan")
}
case "standalone_sm":
if plan.ID != "885a1cb6-ca42-43e9-a725-8195918e1343" {
t.Error("unexpected plan Name or ID change for standalone_sm plan")
}
case "standalone_md":
if plan.ID != "dc951396-bb28-45a4-b040-cfe3bebc6121" {
t.Error("unexpected plan Name or ID change for standalone_md plan")
}
case "standalone_lg":
if plan.ID != "04349656-4dc9-4b67-9b15-52a93d64d566" {
t.Error("unexpected plan Name or ID change for standalone_lg plan")
}
case "ha_sm":
if plan.ID != "877432f8-07eb-4e57-b984-d025a71d2282" {
t.Error("unexpected plan Name or ID change for ha_sm plan")
}
case "ha_md":
if plan.ID != "89bcdf8a-e637-4bb3-b7ce-aca083cc1e69" {
t.Error("unexpected plan Name or ID change for ha_md plan")
}
case "ha_lg":
if plan.ID != "470ca1a0-2763-41f1-a4cf-985acdb549ab" {
t.Error("unexpected plan Name or ID change for ha_lg plan")
}
default:
t.Errorf("Unexpected plan name: %s", plan.Name)
}
}
}
func TestUnitProvisionBasic(t *testing.T) {
log.SetOutput(ioutil.Discard)
bl := mockLogic(t)
req := &osb.ProvisionRequest{
InstanceID: nuuid(t),
PlanID: "86064792-7ea2-467b-af93-ac9694d96d5c",
ServiceID: "4be12541-2945-4101-8a33-79ac0ad58750",
Parameters: map[string]interface{}{
"PGO_NAMESPACE": "demo",
"PGO_CLUSTERNAME": "unitinstance",
},
}
_, err := bl.Provision(req, nil)
if err != nil {
t.Fatal(err)
}
}
func TestUnitProvisionMissingClustername(t *testing.T) {
log.SetOutput(ioutil.Discard)
bl := mockLogic(t)
req := &osb.ProvisionRequest{
InstanceID: nuuid(t),
PlanID: "86064792-7ea2-467b-af93-ac9694d96d5c",
ServiceID: "4be12541-2945-4101-8a33-79ac0ad58750",
Parameters: map[string]interface{}{
"PGO_NAMESPACE": "demo",
},
}
_, err := bl.Provision(req, nil)
if err == nil {
t.Fatal("expected provisioning error re: Namespace, but got none")
}
}
func TestUnitProvisionMissingNamespace(t *testing.T) {
log.SetOutput(ioutil.Discard)
bl := mockLogic(t)
req := &osb.ProvisionRequest{
InstanceID: nuuid(t),
PlanID: "86064792-7ea2-467b-af93-ac9694d96d5c",
ServiceID: "4be12541-2945-4101-8a33-79ac0ad58750",
Parameters: map[string]interface{}{
"PGO_CLUSTERNAME": "unitinstance",
},
}
_, err := bl.Provision(req, nil)
if err == nil {
t.Fatal("expected provisioning error re: Clustername, but got none")
}
}
func TestUnitProvisionUndo(t *testing.T) {
log.SetOutput(ioutil.Discard)
bl := mockLogic(t)
preq := &osb.ProvisionRequest{
InstanceID: nuuid(t),
PlanID: "86064792-7ea2-467b-af93-ac9694d96d5c",
ServiceID: "4be12541-2945-4101-8a33-79ac0ad58750",
Parameters: map[string]interface{}{
"PGO_NAMESPACE": "demo",
"PGO_CLUSTERNAME": "unitinstance",
},
}
_, err := bl.Provision(preq, nil)
if err != nil {
t.Fatalf("error provisioning: %s", err)
}
dreq := &osb.DeprovisionRequest{
InstanceID: preq.InstanceID,
PlanID: "86064792-7ea2-467b-af93-ac9694d96d5c",
ServiceID: "4be12541-2945-4101-8a33-79ac0ad58750",
}
_, err = bl.Deprovision(dreq, nil)
if err != nil {
t.Fatalf("error deprovisioning: %s", err)
}
}
func TestUnitBindingNoInstance(t *testing.T) {
log.SetOutput(ioutil.Discard)
bl := mockLogic(t)
preq := &osb.ProvisionRequest{
InstanceID: nuuid(t),
PlanID: "86064792-7ea2-467b-af93-ac9694d96d5c",
ServiceID: "4be12541-2945-4101-8a33-79ac0ad58750",
Parameters: map[string]interface{}{
"PGO_NAMESPACE": "demo",
"PGO_CLUSTERNAME": "unitinstance",
},
}
_, err := bl.Provision(preq, nil)
if err != nil {
t.Fatalf("error provisioning: %s", err)
}
appID := nuuid(t)
breq := &osb.BindRequest{
InstanceID: nuuid(t),
BindingID: nuuid(t),
AppGUID: &appID,
}
_, err = bl.Bind(breq, nil)
if err == nil {
t.Fatal("NoInstance error expected, got nil")
}
if _, ok := err.(broker.ErrNoInstance); !ok {
t.Fatalf("NoInstance error expected, got: %T - %s", err, err)
}
}
func TestUnitBindingBasic(t *testing.T) {
log.SetOutput(ioutil.Discard)
bl := mockLogic(t)
preq := &osb.ProvisionRequest{
InstanceID: nuuid(t),
PlanID: "86064792-7ea2-467b-af93-ac9694d96d5c",
ServiceID: "4be12541-2945-4101-8a33-79ac0ad58750",
AcceptsIncomplete: false,
Parameters: map[string]interface{}{
"PGO_NAMESPACE": "demo",
"PGO_CLUSTERNAME": "unitinstance",
},
}
_, err := bl.Provision(preq, nil)
if err != nil {
t.Fatalf("error provisioning: %s", err)
}
appID := nuuid(t)
breq := &osb.BindRequest{
InstanceID: preq.InstanceID,
BindingID: nuuid(t),
AppGUID: &appID,
}
bindResp, err := bl.Bind(breq, nil)
if err != nil {
t.Fatalf("error binding: %s", err)
}
h := md5.New()
io.WriteString(h, breq.BindingID)
expUser := fmt.Sprintf("user_%x", h.Sum(nil))
expect := &osblib.BindResponse{
BindResponse: osb.BindResponse{
Credentials: map[string]interface{}{
"username": expUser,
"password": <PASSWORD>.MockStatic.Password,
"db_port": 5432,
"db_name": "userdb",
"db_host": broker.MockStatic.ExternalIP,
"internal_host": broker.MockStatic.ClusterIP,
"uri": fmt.Sprintf("postgresql://%s:%s@%s:%d/%s",
expUser,
broker.MockStatic.Password,
broker.MockStatic.ExternalIP,
5432,
"userdb"),
},
},
}
if !reflect.DeepEqual(expect, bindResp) {
t.Logf("Expected: %+v", expect)
t.Logf("Received: %+v", bindResp)
t.FailNow()
}
}
|
<gh_stars>0
//
// RMUtilsFramework.h
// RMUtilsFramework
//
// Created by JianRongCao on 15/12/14.
// Copyright © 2015年 JianRongCao. All rights reserved.
//
#import <UIKit/UIKit.h>
//! Project version number for RMUtilsFramework.
FOUNDATION_EXPORT double RMUtilsFrameworkVersionNumber;
//! Project version string for RMUtilsFramework.
FOUNDATION_EXPORT const unsigned char RMUtilsFrameworkVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <RMUtilsFramework/PublicHeader.h>
#import <RMUtilsFramework/RMUtils.h>
|
from ..layers.Layer import *
class CustomLayer(Layer):
def __init__(self):
super(CustomLayer, self).__init__()
def forward(self, input_tensor, axis):
squared_input = input_tensor ** 2
sum_squared = squared_input.sum(axis=axis)
return sum_squared |
#!/usr/bin/env bash
MODULE_NAME='TTF fonts'
log_module_start
distro_has_gui || log_no_changes
typeset -a FONT_URLS
FONT_URLS=(
'http://www.gringod.com/wp-upload/software/Fonts/Monaco_Linux.ttf'
)
did_install=false
for url in ${FONT_URLS[@]}
do
if ! font_ttf_is_installed "$(basename $url)"
then
log_info "Installing $(basename $url)"
font_ttf_install $url && did_install=true
fi
done
$did_install || log_no_changes
log_module_end
|
<filename>packages/store-path/__mocks__/can-link.js
const CAN_LINK = new Set([
'/can-link-to-homedir/tmp=>/home/user/tmp',
'/mnt/project/tmp=>/mnt/tmp/tmp',
])
module.exports = function (existingPath, newPath) {
return CAN_LINK.has(`${existingPath}=>${newPath}`)
}
|
#!/bin/ash
SS_MERLIN_HOME=/opt/share/ss-merlin
DNSMASQ_CONFIG_DIR=${SS_MERLIN_HOME}/etc/dnsmasq.d
if [[ ! -f ${SS_MERLIN_HOME}/etc/ss-merlin.conf ]]; then
cp ${SS_MERLIN_HOME}/etc/ss-merlin.sample.conf ${SS_MERLIN_HOME}/etc/ss-merlin.conf
fi
if [[ ! -f ${SS_MERLIN_HOME}/etc/shadowsocks/config.json ]]; then
cp ${SS_MERLIN_HOME}/etc/shadowsocks/config.sample.json ${SS_MERLIN_HOME}/etc/shadowsocks/config.json
fi
. ${SS_MERLIN_HOME}/etc/ss-merlin.conf
get_lan_ips() {
lan_ipaddr=$(nvram get lan_ipaddr)
lan_netmask=$(nvram get lan_netmask)
# Assumes there's no "255." after a non-255 byte in the mask
local x=${lan_netmask##*255.}
set -- 0^^^128^192^224^240^248^252^254^ $(( (${#lan_netmask} - ${#x})*2 )) "${x%%.*}"
x=${1%%$3*}
cidr=$(($2 + (${#x}/4)))
echo "${lan_ipaddr%.*}".0/$cidr
}
modprobe ip_set
modprobe ip_set_hash_net
modprobe ip_set_hash_ip
modprobe xt_set
# Create ipset for user domain name whitelist and user domain name gfwlist
ipset create userwhitelist hash:net 2>/dev/null
ipset create usergfwlist hash:net 2>/dev/null
if [[ ${mode} -eq 0 ]]; then
# Add GFW list to gfwlist ipset for GFW list mode
if ipset create gfwlist hash:ip 2>/dev/null; then
if [[ -s ${DNSMASQ_CONFIG_DIR}/dnsmasq_gfwlist_ipset.conf.bak ]]; then
rm -f ${DNSMASQ_CONFIG_DIR}/dnsmasq_gfwlist_ipset.conf 2>/dev/null
cp ${DNSMASQ_CONFIG_DIR}/dnsmasq_gfwlist_ipset.conf.bak ${DNSMASQ_CONFIG_DIR}/dnsmasq_gfwlist_ipset.conf
fi
fi
# Add user_ip_proxylist.txt
if [[ -e ${SS_MERLIN_HOME}/rules/user_ip_proxylist.txt ]]; then
for ip in $(cat ${SS_MERLIN_HOME}/rules/user_ip_proxylist.txt | grep -v '^#'); do
ipset add gfwlist ${ip}
done
fi
elif [[ ${mode} -eq 1 ]]; then
# Add China IP to chinaips ipset for Bypass mainland China mode
if ipset create chinaips hash:net 2>/dev/null; then
OLDIFS="$IFS" && IFS=$'\n'
if ipset list chinaips &>/dev/null; then
count=$(ipset list chinaips | wc -l)
if [[ "$count" -lt "8000" ]]; then
echo "Applying China ipset rule, it maybe take several minute to finish..."
if [[ -s ${SS_MERLIN_HOME}/rules/chinadns_chnroute.txt.bak ]]; then
rm -f ${SS_MERLIN_HOME}/rules/chinadns_chnroute.txt 2>/dev/null
cp ${SS_MERLIN_HOME}/rules/chinadns_chnroute.txt.bak ${SS_MERLIN_HOME}/rules/chinadns_chnroute.txt
fi
for ip in $(cat ${SS_MERLIN_HOME}/rules/chinadns_chnroute.txt | grep -v '^#'); do
ipset add chinaips ${ip}
done
fi
fi
IFS=${OLDIFS}
fi
fi
# Add intranet IP to localips ipset for Bypass LAN
if ipset create localips hash:net 2>/dev/null; then
OLDIFS="$IFS" && IFS=$'\n'
if ipset list localips &>/dev/null; then
echo "Applying localips ipset rule..."
for ip in $(cat ${SS_MERLIN_HOME}/rules/localips | grep -v '^#'); do
ipset add localips ${ip}
done
fi
IFS=${OLDIFS}
fi
# Add whitelist
if ipset create whitelist hash:ip 2>/dev/null; then
if [[ ! ${china_dns_ip} ]]; then
china_dns_ip=119.29.29.29
fi
remote_server_address=$(cat ${SS_MERLIN_HOME}/etc/shadowsocks/config.json | grep 'server"' | cut -d ':' -f 2 | cut -d '"' -f 2)
remote_server_ip=${remote_server_address}
ISIP=$(echo ${remote_server_address} | grep -E '([0-9]{1,3}[\.]){3}[0-9]{1,3}|:')
if [[ -z "$ISIP" ]]; then
echo "Resolving server IP address with DNS ${china_dns_ip}..."
remote_server_ip=$(nslookup ${remote_server_address} ${china_dns_ip} | sed '1,4d' | awk '{print $3}' | grep -v : | awk 'NR==1{print}')
echo "Server IP address is ${remote_server_ip}"
fi
OLDIFS="$IFS" && IFS=$'\n'
if ipset list whitelist &>/dev/null; then
# Add China default DNS server
ipset add whitelist ${china_dns_ip}
# Add shadowsocks server ip address
ipset add whitelist ${remote_server_ip}
# Add rubyfush DNS server
ipset add whitelist 118.89.110.78
ipset add whitelist 47.96.179.163
fi
IFS=${OLDIFS}
fi
# Add user_ip_whitelist.txt
if [[ -e ${SS_MERLIN_HOME}/rules/user_ip_whitelist.txt ]]; then
for ip in $(cat ${SS_MERLIN_HOME}/rules/user_ip_whitelist.txt | grep -v '^#'); do
ipset add userwhitelist ${ip}
done
fi
# Add user_ip_gfwlist.txt
if [[ -e ${SS_MERLIN_HOME}/rules/user_ip_gfwlist.txt ]]; then
for ip in $(cat ${SS_MERLIN_HOME}/rules/user_ip_gfwlist.txt | grep -v '^#'); do
ipset add usergfwlist ${ip}
done
fi
local_redir_port=$(cat ${SS_MERLIN_HOME}/etc/shadowsocks/config.json | grep 'local_port' | cut -d ':' -f 2 | grep -o '[0-9]*')
if [[ ! ${lan_ips} || ${lan_ips} == '0.0.0.0/0' ]]; then
lan_ips=$(get_lan_ips)
fi
echo "LAN IPs are ${lan_ips}"
if iptables -t nat -N SHADOWSOCKS_TCP 2>/dev/null; then
# TCP rules
iptables -t nat -N SS_OUTPUT
iptables -t nat -N SS_PREROUTING
iptables -t nat -A OUTPUT -j SS_OUTPUT
iptables -t nat -A PREROUTING -j SS_PREROUTING
iptables -t nat -A SHADOWSOCKS_TCP -p tcp -m set --match-set localips dst -j RETURN
iptables -t nat -A SHADOWSOCKS_TCP -p tcp -m set --match-set whitelist dst -j RETURN
iptables -t nat -A SHADOWSOCKS_TCP -p tcp -m set --match-set userwhitelist dst -j RETURN
if [[ ${mode} -eq 1 ]]; then
iptables -t nat -A SHADOWSOCKS_TCP -p tcp -m set --match-set chinaips dst -j RETURN
fi
if [[ ${mode} -eq 0 ]]; then
iptables -t nat -A SHADOWSOCKS_TCP -p tcp -s ${lan_ips} -m set --match-set gfwlist dst -j REDIRECT --to-ports ${local_redir_port}
else
iptables -t nat -A SHADOWSOCKS_TCP -p tcp -s ${lan_ips} -j REDIRECT --to-ports ${local_redir_port}
fi
iptables -t nat -A SHADOWSOCKS_TCP -p tcp -s ${lan_ips} -m set --match-set usergfwlist dst -j REDIRECT --to-ports ${local_redir_port}
# Apply TCP rules
iptables -t nat -A SS_OUTPUT -p tcp -j SHADOWSOCKS_TCP
iptables -t nat -A SS_PREROUTING -p tcp -s ${lan_ips} -j SHADOWSOCKS_TCP
fi
if [[ ${udp} -eq 1 ]]; then
if iptables -t mangle -N SHADOWSOCKS_UDP 2>/dev/null; then
# UDP rules
modprobe xt_TPROXY
ip route add local 0.0.0.0/0 dev lo table 100
ip rule add fwmark 0x2333 table 100
iptables -t mangle -N SS_OUTPUT
iptables -t mangle -N SS_PREROUTING
iptables -t mangle -A OUTPUT -j SS_OUTPUT
iptables -t mangle -A PREROUTING -j SS_PREROUTING
iptables -t mangle -A SHADOWSOCKS_UDP -p udp -m set --match-set localips dst -j RETURN
iptables -t mangle -A SHADOWSOCKS_UDP -p udp -m set --match-set whitelist dst -j RETURN
iptables -t mangle -A SHADOWSOCKS_UDP -p udp -m set --match-set userwhitelist dst -j RETURN
if [[ ${mode} -eq 1 ]]; then
iptables -t mangle -A SHADOWSOCKS_UDP -p udp -m set --match-set chinaips dst -j RETURN
fi
if [[ ${mode} -eq 0 ]]; then
iptables -t mangle -A SHADOWSOCKS_UDP -p udp -s ${lan_ips} -m set --match-set gfwlist dst -j MARK --set-mark 0x2333
else
iptables -t mangle -A SHADOWSOCKS_UDP -p udp -s ${lan_ips} -j MARK --set-mark 0x2333
fi
iptables -t mangle -A SHADOWSOCKS_UDP -p udp -s ${lan_ips} -m set --match-set usergfwlist dst -j MARK --set-mark 0x2333
# Apply for udp
iptables -t mangle -A SS_OUTPUT -p udp -j SHADOWSOCKS_UDP
iptables -t mangle -A SS_PREROUTING -p udp -s ${lan_ips} --dport 53 -m mark ! --mark 0x2333 -j ACCEPT
iptables -t mangle -A SS_PREROUTING -p udp -s ${lan_ips} -m mark ! --mark 0x2333 -j SHADOWSOCKS_UDP
iptables -t mangle -A SS_PREROUTING -p udp -s ${lan_ips} -m mark --mark 0x2333 -j TPROXY --on-ip 127.0.0.1 --on-port ${local_redir_port}
fi
fi
echo "Apply iptables rule done."
|
package unstructured
import (
"github.com/ghodss/yaml"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
)
func StrToUnstructuredUnsafe(jsonStr string) *unstructured.Unstructured {
obj := make(map[string]interface{})
err := yaml.Unmarshal([]byte(jsonStr), &obj)
if err != nil {
panic(err)
}
return &unstructured.Unstructured{Object: obj}
}
func StrToUnstructured(jsonStr string) (*unstructured.Unstructured, error) {
obj := make(map[string]interface{})
err := yaml.Unmarshal([]byte(jsonStr), &obj)
if err != nil {
return nil, err
}
return &unstructured.Unstructured{Object: obj}, nil
}
|
<filename>file_reader.go
package goparquet
import (
"context"
"fmt"
"io"
"github.com/fraugster/parquet-go/parquet"
"github.com/fraugster/parquet-go/parquetschema"
)
// FileReader is used to read data from a parquet file. Always use NewFileReader or a related
// function to create such an object.
type FileReader struct {
meta *parquet.FileMetaData
schemaReader *schema
reader io.ReadSeeker
rowGroupPosition int
currentRecord int64
skipRowGroup bool
ctx context.Context
}
// NewFileReaderWithOptions creates a new FileReader. You can provide a list of FileReaderOptions to configure
// aspects of its behaviour, such as limiting the columns to read, the file metadata to use, or the
// context to use. For a full list of options, please see the type FileReaderOption.
func NewFileReaderWithOptions(r io.ReadSeeker, readerOptions ...FileReaderOption) (*FileReader, error) {
opts := newFileReaderOptions()
if err := opts.apply(readerOptions); err != nil {
return nil, err
}
var err error
if opts.metaData == nil {
opts.metaData, err = ReadFileMetaData(r, true)
if err != nil {
return nil, fmt.Errorf("reading file meta data failed: %w", err)
}
}
schema, err := makeSchema(opts.metaData, opts.validateCRC)
if err != nil {
return nil, fmt.Errorf("creating schema failed: %w", err)
}
schema.SetSelectedColumns(opts.columns...)
// Reset the reader to the beginning of the file
if _, err := r.Seek(4, io.SeekStart); err != nil {
return nil, err
}
return &FileReader{
meta: opts.metaData,
schemaReader: schema,
reader: r,
ctx: opts.ctx,
}, nil
}
// FileReaderOption is an option that can be passed on to NewFileReaderWithOptions when
// creating a new parquet file reader.
type FileReaderOption func(*fileReaderOptions) error
type fileReaderOptions struct {
metaData *parquet.FileMetaData
ctx context.Context
columns []ColumnPath
validateCRC bool
}
func newFileReaderOptions() *fileReaderOptions {
return &fileReaderOptions{ctx: context.Background()}
}
func (o *fileReaderOptions) apply(opts []FileReaderOption) error {
for _, f := range opts {
if err := f(o); err != nil {
return err
}
}
return nil
}
// WithReaderContext configures a custom context for the file reader. If none
// is provided, context.Background() is used as a default.
func WithReaderContext(ctx context.Context) FileReaderOption {
return func(opts *fileReaderOptions) error {
opts.ctx = ctx
return nil
}
}
// WithFileMetaData allows you to provide your own file metadata. If none
// is set with this option, the file reader will read it from the parquet
// file.
func WithFileMetaData(metaData *parquet.FileMetaData) FileReaderOption {
return func(opts *fileReaderOptions) error {
opts.metaData = metaData
return nil
}
}
// WithColumns limits the columns which are read. If none are set, then
// all columns will be read by the parquet file reader.
//
// Deprecated: use WithColumnPaths instead.
func WithColumns(columns ...string) FileReaderOption {
return func(opts *fileReaderOptions) error {
parsedCols := []ColumnPath{}
for _, c := range columns {
parsedCols = append(parsedCols, parseColumnPath(c))
}
opts.columns = parsedCols
return nil
}
}
// WithColumnPaths limits the columns which are read. If none are set, then
// all columns will be read by the parquet file reader.
func WithColumnPaths(columns ...ColumnPath) FileReaderOption {
return func(opts *fileReaderOptions) error {
opts.columns = columns
return nil
}
}
// WithCRC32Validation allows you to configure whether CRC32 page checksums will
// be validated when they're read. By default, checksum validation is disabled.
func WithCRC32Validation(enable bool) FileReaderOption {
return func(opts *fileReaderOptions) error {
opts.validateCRC = enable
return nil
}
}
// NewFileReader creates a new FileReader. You can limit the columns that are read by providing
// the names of the specific columns to read using dotted notation. If no columns are provided,
// then all columns are read.
func NewFileReader(r io.ReadSeeker, columns ...string) (*FileReader, error) {
return NewFileReaderWithOptions(r, WithColumns(columns...))
}
// NewFileReaderWithContext creates a new FileReader. You can limit the columns that are read by providing
// the names of the specific columns to read using dotted notation. If no columns are provided,
// then all columns are read. The provided context.Context overrides the default context (which is a context.Background())
// for use in other methods of the *FileReader type.
//
// Deprecated: use the function NewFileReaderWithOptions and the option WithContext instead.
func NewFileReaderWithContext(ctx context.Context, r io.ReadSeeker, columns ...string) (*FileReader, error) {
return NewFileReaderWithOptions(r, WithReaderContext(ctx), WithColumns(columns...))
}
// NewFileReaderWithMetaData creates a new FileReader with custom file meta data. You can limit the columns that
// are read by providing the names of the specific columns to read using dotted notation. If no columns are provided,
// then all columns are read.
//
// Deprecated: use the function NewFileReaderWithOptions and the option WithFileMetaData instead.
func NewFileReaderWithMetaData(r io.ReadSeeker, meta *parquet.FileMetaData, columns ...string) (*FileReader, error) {
return NewFileReaderWithOptions(r, WithFileMetaData(meta), WithColumns(columns...))
}
// SeekToRowGroup seeks to a particular row group, identified by its index.
func (f *FileReader) SeekToRowGroup(rowGroupPosition int) error {
return f.SeekToRowGroupWithContext(f.ctx, rowGroupPosition)
}
// SeekToRowGroupWithContext seeks to a particular row group, identified by its index.
func (f *FileReader) SeekToRowGroupWithContext(ctx context.Context, rowGroupPosition int) error {
f.rowGroupPosition = rowGroupPosition - 1
f.currentRecord = 0
return f.readRowGroup(ctx)
}
// readRowGroup read the next row group into memory
func (f *FileReader) readRowGroup(ctx context.Context) error {
if len(f.meta.RowGroups) <= f.rowGroupPosition {
return io.EOF
}
f.rowGroupPosition++
return readRowGroup(ctx, f.reader, f.schemaReader, f.meta.RowGroups[f.rowGroupPosition-1])
}
// CurrentRowGroup returns information about the current row group.
func (f *FileReader) CurrentRowGroup() *parquet.RowGroup {
if f == nil || f.meta == nil || f.meta.RowGroups == nil || f.rowGroupPosition-1 >= len(f.meta.RowGroups) {
return nil
}
return f.meta.RowGroups[f.rowGroupPosition-1]
}
// RowGroupCount returns the number of row groups in the parquet file.
func (f *FileReader) RowGroupCount() int {
return len(f.meta.RowGroups)
}
// NumRows returns the number of rows in the parquet file. This information is directly taken from
// the file's meta data.
func (f *FileReader) NumRows() int64 {
return f.meta.NumRows
}
func (f *FileReader) advanceIfNeeded(ctx context.Context) error {
if f.rowGroupPosition == 0 || f.currentRecord >= f.schemaReader.rowGroupNumRecords() || f.skipRowGroup {
if err := f.readRowGroup(ctx); err != nil {
f.skipRowGroup = true
return err
}
f.currentRecord = 0
f.skipRowGroup = false
}
return nil
}
// RowGroupNumRows returns the number of rows in the current RowGroup.
func (f *FileReader) RowGroupNumRows() (int64, error) {
return f.RowGroupNumRowsWithContext(f.ctx)
}
// RowGroupNumRowsWithContext returns the number of rows in the current RowGroup.
func (f *FileReader) RowGroupNumRowsWithContext(ctx context.Context) (int64, error) {
if err := f.advanceIfNeeded(ctx); err != nil {
return 0, err
}
return f.schemaReader.rowGroupNumRecords(), nil
}
// NextRow reads the next row from the parquet file. If required, it will load the next row group.
func (f *FileReader) NextRow() (map[string]interface{}, error) {
return f.NextRowWithContext(f.ctx)
}
// NextRowWithContext reads the next row from the parquet file. If required, it will load the next row group.
func (f *FileReader) NextRowWithContext(ctx context.Context) (map[string]interface{}, error) {
if err := f.advanceIfNeeded(ctx); err != nil {
return nil, err
}
f.currentRecord++
return f.schemaReader.getData()
}
// SkipRowGroup skips the currently loaded row group and advances to the next row group.
func (f *FileReader) SkipRowGroup() {
f.skipRowGroup = true
}
// PreLoad is used to load the row group if required. It does nothing if the row group is already loaded.
func (f *FileReader) PreLoad() error {
return f.PreLoadWithContext(f.ctx)
}
// PreLoadWithContext is used to load the row group if required. It does nothing if the row group is already loaded.
func (f *FileReader) PreLoadWithContext(ctx context.Context) error {
return f.advanceIfNeeded(ctx)
}
// MetaData returns a map of metadata key-value pairs stored in the parquet file.
func (f *FileReader) MetaData() map[string]string {
return keyValueMetaDataToMap(f.meta.KeyValueMetadata)
}
// ColumnMetaData returns a map of metadata key-value pairs for the provided column in the current
// row group. The column name has to be provided in its dotted notation.
//
// Deprecated: use ColumnMetaDataPath instead.
func (f *FileReader) ColumnMetaData(colName string) (map[string]string, error) {
return f.ColumnMetaDataByPath(parseColumnPath(colName))
}
// ColumnMetaData returns a map of metadata key-value pairs for the provided column in the current
// row group. The column is provided as ColumnPath.
func (f *FileReader) ColumnMetaDataByPath(path ColumnPath) (map[string]string, error) {
for _, col := range f.CurrentRowGroup().Columns {
if path.Equal(ColumnPath(col.MetaData.PathInSchema)) {
return keyValueMetaDataToMap(col.MetaData.KeyValueMetadata), nil
}
}
return nil, fmt.Errorf("column %q not found", path.flatName())
}
// SetSelectedColumns sets the columns which are read. By default, all columns
// will be read.
//
// Deprecated: use SetSelectedColumnsByPath instead.
func (f *FileReader) SetSelectedColumns(cols ...string) {
parsedCols := []ColumnPath{}
for _, c := range cols {
parsedCols = append(parsedCols, parseColumnPath(c))
}
f.schemaReader.SetSelectedColumns(parsedCols...)
}
func (f *FileReader) SetSelectedColumnsByPath(cols ...ColumnPath) {
f.schemaReader.SetSelectedColumns(cols...)
}
// Columns returns the list of columns.
func (f *FileReader) Columns() []*Column {
return f.schemaReader.Columns()
}
// GetColumnByName returns a column identified by name. If the column doesn't exist,
// the method returns nil.
func (f *FileReader) GetColumnByName(name string) *Column {
return f.schemaReader.GetColumnByName(name)
}
// GetColumnByPath returns a column identified by its path. If the column doesn't exist,
// nil is returned.
func (f *FileReader) GetColumnByPath(path ColumnPath) *Column {
return f.schemaReader.GetColumnByPath(path)
}
// GetSchemaDefinition returns the current schema definition.
func (f *FileReader) GetSchemaDefinition() *parquetschema.SchemaDefinition {
return f.schemaReader.GetSchemaDefinition()
}
func keyValueMetaDataToMap(kvMetaData []*parquet.KeyValue) map[string]string {
data := make(map[string]string)
for _, kv := range kvMetaData {
if kv.Value != nil {
data[kv.Key] = *kv.Value
}
}
return data
}
|
#!/usr/bin/env bash
# exit script when any command ran here returns with non-zero exit code
set -e
echo "$STAGING_KUBERNETES_KUBECONFIG" | base64 --decode > kubeconfig.yml
envsubst < deploy/deployment.yml.template | tee deploy/deployment.yml
kubectl --kubeconfig=kubeconfig.yml get nodes
kubectl --kubeconfig=kubeconfig.yml apply -f deploy/deployment.yml |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Patch(AutotoolsPackage):
"""Patch takes a patch file containing a difference listing produced by
the diff program and applies those differences to one or more
original files, producing patched versions.
"""
homepage = "http://savannah.gnu.org/projects/patch/"
url = "http://ftp.gnu.org/gnu/patch/patch-2.7.6.tar.xz"
version('2.7.6', '78ad9937e4caadcba1526ef1853730d5')
version('2.7.5', 'e3da7940431633fb65a01b91d3b7a27a')
build_directory = 'spack-build'
|
<reponame>Jakobis/OrderedSequences
import pathlib
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
from matplotlib.ticker import ScalarFormatter
import math
import pandas as pd
markers=['o', '^', 's', 'D', 'x', '1', '|']
def createplotforoperation(op, data, inter):
plt.cla()
plt.clf()
plt.rcParams.update({'font.size': 12})
plt.figure(figsize=(7.5,5))
plt.yscale('log') #Vi skal lige vælge om det er log eller ej
plt.xscale('log')
plt.gca().yaxis.set_major_formatter(mtick.StrMethodFormatter('{x:,.0f}'))
plt.gca().yaxis.set_minor_formatter(mtick.NullFormatter())
df = pd.DataFrame(data[data["Op"] == op])
df = df.astype({'OpCount': 'int64'})
df['Avg(micros)'] = (df['Time(s)'] / df['OpCount']) * 1000 * 1000
print(df)
texts = []
i = 0
for name in sorted(list(pd.unique(df['DS']))):
dfn = df[df['DS'] == name]
x = list(dfn['Size'])
y = list(dfn['Avg(micros)'])
p = plt.plot(x, y, label=name, marker=markers[i])
i += 1
texts.append(plt.annotate(f'{name}', (x[-1], y[-1] ), color = p[0].get_color()))
plt.xlabel('Initial elements')
plt.ylabel('Average time (µs)')
plt.grid(True, which="both", linestyle='--')
size = texts[0].get_fontsize()
overlapping = True
while overlapping:
overlap = set()
overlapping = False
for a in texts:
for b in texts:
if a == b: continue
if a._get_position_xy(plt)[1] < b._get_position_xy(plt)[1] and a._get_position_xy(plt)[1] + size > b._get_position_xy(plt)[1]:
overlap.add(a)
overlap.add(b)
overlapping = True
if len(overlap) == 0: break
mi = min(overlap, key=lambda a: a.xy[1])
ma = max(overlap, key=lambda a: a.xy[1])
mi.set_y(mi.xy[1] - mi.xy[1] / 100)
ma.set_y(ma.xy[1] + ma.xy[1] / 100)
mi.xy = (mi._x, mi._y)
ma.xy = (ma._x, ma._y)
plt.legend(loc="upper left")
plt.margins(x=0)
plt.title(f'Average runtime for N "{op}" operations in {l}')
plt.tight_layout()
pathlib.Path(f'../results/graphs_{inter}').mkdir(parents=True, exist_ok=True)
plt.savefig(f'../results/graphs_{inter}/{op}.png', bbox_inches='tight')
if __name__ == '__main__':
for l in ['PyPy', 'CPython']:
data = pd.read_csv(f"../results/timings/{l.lower()}_res.csv")
ops = list(pd.unique(data["Op"]))
for op in ops:
createplotforoperation(op, data, l)
|
<gh_stars>1-10
import type { StaticTestProps } from '../types';
export const staticTestProps: StaticTestProps = { expect, describe, it }
|
var express = require('express');
var router = express.Router();
var users = require('./users');
/* GET home page. */
router.get('/', function(req, res, next) {
//res.render('index', { title: 'Express' });
res.sendfile('views/index.html')
});
//登录
router.get('/users/login', function(req, res, next) {
//res.render('index', { title: 'Express' });
if(req.session.user){//登录过
res.send(req.session.user.loginname+'已登录。')
}else{
res.sendfile('views/html/login.html')
}
});
router.post('/users/login',users.login);
//注册
router.get('/users/register', function(req, res, next) {
//res.render('index', { title: 'Express' });
res.sendfile('views/html/register.html')
});
router.post('/users/register',users.register);
//用户路由
//router.use('/register',users);
//文章路由
//router.use('/article',article);
module.exports = router;
|
<filename>frontend/src/components/gamePlay/DrawerLeft.js
import React from 'react'
class DrawerLeft extends React.Component {
render() {
return (
<div className='wrapper'>
<h2>
Hi there, {this.props.currentUser.name}
</h2>
<h3>
The drawer left the game. So it has reset.
</h3>
{ !this.props.drawer_id && <button className='button, smaller' onClick={ event => this.props.handleDrawClick({type: 'drawer'}) }>I will draw</button> }
<button className='button, smaller' onClick={ event => this.props.handleGuessClick({type: 'guesser'}) }>I will guess</button>
</div>
)
}
}
export default DrawerLeft
|
function isValid(card_number) {
// 1. Reverse the order of the digits in the number.
card_number = card_number.split("").reverse().join("");
// 2. Take the first, third, fifth, etc. digits of the reversed digits and sum them.
var sum_odd_digits = 0;
for (var i=0; i < card_number.length; i+=2) {
sum_odd_digits += parseInt(card_number.charAt(i));
}
// 3. Take the second, fourth, sixth, etc. digits of the reversed digits and sum them.
var sum_even_digits = 0;
for (var j=1; j < card_number.length; j+=2) {
sum_even_digits += parseInt(card_number.charAt(j));
}
// 4. Sum the results from step 2 and step 3.
var total_sum = sum_odd_digits + sum_even_digits;
// 5. If the result from step 4 is divisible by 10, the credit card number is valid; otherwise, it is invalid.
return (total_sum % 10 == 0)
} |
app.factory("authFactory", function($http, $cookies, $q){
const object = {
login (email, password) {
let defered = $q.defer();
$http.post('user/login', {
"email": email,
"password": password
}).then(data => {
if(data.status === 200){
let xAuth = data.headers()['x-auth'];
let expireDate = new Date();
expireDate.setDate(expireDate.getDate() + 1); //1day cookie
$cookies.put('token', xAuth, {'expires': expireDate, 'samesite': 'lax'});
defered.resolve(data);
} else {
defered.reject(data);
}
}, err => {
defered.reject(err);
});
return defered.promise;
},
register (email, password) {
let defered = $q.defer();
$http.post('user/', {
"email": email,
"password": password
}).then(data => {
if(data.status === 200 ){
let xAuth = data.headers()['x-auth'];
let expireDate = new Date();
expireDate.setDate(expireDate.getDate() + 1); //1day cookie
$cookies.put('token', xAuth, {'expires': expireDate, 'samesite': 'lax'});
defered.resolve(data);
}else {
defered.reject(data);
}
}, err => {
defered.reject(err);
});
return defered.promise;
},
logout () {
let defered = $q.defer();
$http.delete('user/me/token',{
headers: {
'x-auth': $cookies.get('token')
}
}).then(data => {
if( data.status === 200){
defered.resolve(data);
} else{
defered.reject(data);
}
}, err => {
defered.reject(err);
});
return defered.promise;
},
authCheck() {
let defered = $q.defer();
$http.get('user/me',{
headers: {
'x-auth': $cookies.get('token')
}
}).then(data => {
if(data.status === 200) {
defered.resolve(data);
} else {
defered.reject(data);
}
}, err => {
defered.reject(err);
});
return defered.promise;
},
addPost(img, title, description) {
let defered = $q.defer();
$http.post('/post',{
"myImage": img,
"title": title,
"description": description},{
headers: {
'x-auth': $cookies.get('token'),
enctype:'multipart/form-data'
}
}).then(data => {
if(data.status === 200) {
defered.resolve(data);
} else {
defered.reject(data);
}
}, err => {
defered.reject(err);
});
return defered.promise;
},
getPost() {
let defered = $q.defer();
$http.get('/post',{
headers: {
'x-auth': $cookies.get('token'),
}
}).then(data => {
if(data.status === 200) {
defered.resolve(data);
} else {
defered.reject(data);
}
}, err => {
defered.reject(err);
});
return defered.promise;
}
}
return object;
}) |
package evilcraft.blocks;
import evilcraft.api.config.BlockConfig;
/**
* Config for the {@link DarkBlock}.
* @author rubensworks
*
*/
public class DarkBlockConfig extends BlockConfig {
/**
* The unique instance.
*/
public static DarkBlockConfig _instance;
/**
* Make a new instance.
*/
public DarkBlockConfig() {
super(
true,
"darkBlock",
null,
DarkBlock.class
);
}
@Override
public boolean isMultipartEnabled() {
return true;
}
}
|
#!/bin/sh
#
# STIG URL: http://www.stigviewer.com/stig/red_hat_enterprise_linux_6/2014-06-11/finding/V-38575
# Finding ID: V-38575
# Version: RHEL-06-000200
# Finding Level: Low
#
# The audit system must be configured to audit user deletions of files
# and programs. Auditing file deletions will create an audit trail for
# files that are removed from the system. The audit trail could aid in
# system troubleshooting, as well as detecting malicious processes that
# that attempt to delete log files to conceal their presence.
#
# CCI: CCI-000172
# NIST SP 800-53 :: AU-12 c
# NIST SP 800-53A :: AU-12.1 (iv)
# NIST SP 800-53 Revision 4 :: AU-12 c
#
############################################################
diag_out() {
echo "${1}"
}
diag_out "----------------------------------"
diag_out "STIG Finding ID: V-38575"
diag_out " Audit system must log user"
diag_out " deletions of files and programs"
diag_out "----------------------------------"
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function elementWidth(node, region, cfg) {
if (cfg === void 0) { cfg = { ratio: 0.15 }; }
return node.width < region.width * cfg.ratio;
}
exports.default = {
type: 'padding',
usage: 'compare',
expression: elementWidth,
};
//# sourceMappingURL=element-width.js.map |
#!/bin/bash
# Archived program command-line for experiment
# Copyright 2021 ServiceNow All Rights Reserved
#
# Usage: bash {this_file} [additional_options]
set -x;
set -e;
../bytesteady/bytesteady -driver_location models/index/doublefnvnllgram1a2a4a8a16a32a64a128a256a512a1024size16777216dimension16a0.1b0alpha0lambda0n0rho0 -driver_epoch_size 100 -data_file data/index/train.bytesteady -data_format kBytes,kIndex -model_input_size 16777216,1024 -model_output_size 3 -model_gram '{1,2,4,8,16,32,64,128,256,512,1024}' -model_dimension 16 -train_a 0.1 -train_b 0 -train_alpha 0 -train_lambda 0 -train_n 0 -train_rho 0 "$@";
|
#!/bin/bash -e
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Usage: base_box_test.sh <ZEPHYR_BOARD>
# Execute microTVM Zephyr tests.
#
set -e
set -x
if [ "$#" -lt 1 ]; then
echo "Usage: base_box_test.sh <ZEPHYR_BOARD>"
exit -1
fi
board=$1
pytest tests/micro/zephyr/test_zephyr.py --zephyr-board=${board}
pytest tests/micro/zephyr/test_zephyr_aot.py --zephyr-board=${board}
|
#!/bin/bash
unset AMBERHOME |
#!/bin/bash
# [CTCGFW]Project-OpenWrt
# Use it under GPLv3, please.
# --------------------------------------------------------
# Convert translation files zh-cn to zh_Hans
# The script is still in testing, welcome to report bugs.
po_file="$({ find |grep -E "[a-z0-9]+\.zh\-cn.+po"; } 2>"/dev/null")"
for a in ${po_file}
do
[ -n "$(grep "Language: zh_CN" "$a")" ] && sed -i "s/Language: zh_CN/Language: zh_Hans/g" "$a"
po_new_file="$(echo -e "$a"|sed "s/zh-cn/zh_Hans/g")"
mv "$a" "${po_new_file}" 2>"/dev/null"
done
po_file2="$({ find |grep "/zh-cn/" |grep "\.po"; } 2>"/dev/null")"
for b in ${po_file2}
do
[ -n "$(grep "Language: zh_CN" "$b")" ] && sed -i "s/Language: zh_CN/Language: zh_Hans/g" "$b"
po_new_file2="$(echo -e "$b"|sed "s/zh-cn/zh_Hans/g")"
mv "$b" "${po_new_file2}" 2>"/dev/null"
done
lmo_file="$({ find |grep -E "[a-z0-9]+\.zh_Hans.+lmo"; } 2>"/dev/null")"
for c in ${lmo_file}
do
lmo_new_file="$(echo -e "$c"|sed "s/zh_Hans/zh-cn/g")"
mv "$c" "${lmo_new_file}" 2>"/dev/null"
done
lmo_file2="$({ find |grep "/zh_Hans/" |grep "\.lmo"; } 2>"/dev/null")"
for d in ${lmo_file2}
do
lmo_new_file2="$(echo -e "$d"|sed "s/zh_Hans/zh-cn/g")"
mv "$d" "${lmo_new_file2}" 2>"/dev/null"
done
po_dir="$({ find |grep "/zh-cn" |sed "/\.po/d" |sed "/\.lmo/d"; } 2>"/dev/null")"
for e in ${po_dir}
do
po_new_dir="$(echo -e "$e"|sed "s/zh-cn/zh_Hans/g")"
mv "$e" "${po_new_dir}" 2>"/dev/null"
done
makefile_file="$({ find|grep Makefile |sed "/Makefile./d"; } 2>"/dev/null")"
for f in ${makefile_file}
do
[ -n "$(grep "zh-cn" "$f")" ] && sed -i "s/zh-cn/zh_Hans/g" "$f"
[ -n "$(grep "zh_Hans.lmo" "$f")" ] && sed -i "s/zh_Hans.lmo/zh-cn.lmo/g" "$f"
done
|
<reponame>xiuhuang/supermarket-admin
export default {
list: [
{
key: 0,
no: 'DJ00000001',
name: '上海高重信息科技有限公司',
jg: '江苏银行股份有限公司',
cp: '快易贷',
status: 0,
pushstatus: '0',
sj: '2019-01-01 12:00:00',
},
{
key: 1,
no: 'DJ00000002',
name: '上海高重信息科技有限公司',
jg: '南京银行股份有限公司',
cp: '快易贷',
status: 1,
pushstatus: '',
sj: '2019-01-01 12:00:00',
},
{
key: 2,
no: 'DJ00000003',
name: '上海高重信息科技有限公司',
jg: '中国农业银行股份有限公司',
cp: '快易贷',
status: 2,
pushstatus: '',
sj: '2019-01-01 12:00:00',
},
{
key: 3,
no: 'DJ00000004',
name: '上海高重信息科技有限公司',
jg: '江苏银行股份有限公司',
cp: '信保贷',
status: 0,
pushstatus: '1',
sj: '2019-01-01 12:00:00',
},
],
pagination: {
total: 3,
pageSize: 10,
current: 1,
},
};
|
#!/bin/sh
# Download the netrace trace files
mkdir -p traces
cd traces
URL=http://www.cs.utexas.edu/~netrace/download
wget ${URL}/blackscholes_64c_simlarge.tra.bz2 \
${URL}/blackscholes_64c_simmedium.tra.bz2 \
${URL}/blackscholes_64c_simsmall.tra.bz2 \
${URL}/bodytrack_64c_simlarge.tra.bz2 \
${URL}/canneal_64c_simmedium.tra.bz2 \
${URL}/dedup_64c_simmedium.tra.bz2 \
${URL}/ferret_64c_simmedium.tra.bz2 \
${URL}/fluidanimate_64c_simlarge.tra.bz2 \
${URL}/fluidanimate_64c_simmedium.tra.bz2 \
${URL}/fluidanimate_64c_simsmall.tra.bz2 \
${URL}/swaptions_64c_simlarge.tra.bz2 \
${URL}/vips_64c_simmedium.tra.bz2 \
${URL}/x264_64c_simmedium.tra.bz2 \
${URL}/x264_64c_simsmall.tra.bz2
|
package commit
import (
"fmt"
"io"
"sort"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
"gitlab.com/gitlab-org/gitaly/v14/internal/git"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/log"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/lstree"
"gitlab.com/gitlab-org/gitaly/v14/internal/helper"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
)
const (
// InvalidUTF8PathPlaceholder is a placeholder we return in the Path field since
// returning non utf8 data will result in a marshalling error
// Once we deprecate the Path field, we can remove this
InvalidUTF8PathPlaceholder = "ENCODING ERROR gitaly#1547"
)
var (
maxNumStatBatchSize = 10
)
func (s *server) ListLastCommitsForTree(in *gitalypb.ListLastCommitsForTreeRequest, stream gitalypb.CommitService_ListLastCommitsForTreeServer) error {
if err := validateListLastCommitsForTreeRequest(in); err != nil {
return helper.ErrInvalidArgument(err)
}
if err := s.listLastCommitsForTree(in, stream); err != nil {
return helper.ErrInternal(err)
}
return nil
}
func (s *server) listLastCommitsForTree(in *gitalypb.ListLastCommitsForTreeRequest, stream gitalypb.CommitService_ListLastCommitsForTreeServer) error {
cmd, parser, err := s.newLSTreeParser(in, stream)
if err != nil {
return err
}
ctx := stream.Context()
repo := s.localrepo(in.GetRepository())
c, err := s.catfileCache.BatchProcess(ctx, repo)
if err != nil {
return err
}
batch := make([]*gitalypb.ListLastCommitsForTreeResponse_CommitForTree, 0, maxNumStatBatchSize)
entries, err := getLSTreeEntries(parser)
if err != nil {
return err
}
offset := int(in.GetOffset())
if offset >= len(entries) {
offset = 0
entries = lstree.Entries{}
}
limit := offset + int(in.GetLimit())
if limit > len(entries) {
limit = len(entries)
}
for _, entry := range entries[offset:limit] {
commit, err := log.LastCommitForPath(ctx, s.gitCmdFactory, c, repo, git.Revision(in.GetRevision()), entry.Path, in.GetGlobalOptions())
if err != nil {
return err
}
commitForTree := &gitalypb.ListLastCommitsForTreeResponse_CommitForTree{
PathBytes: []byte(entry.Path),
Commit: commit,
}
batch = append(batch, commitForTree)
if len(batch) == maxNumStatBatchSize {
if err := sendCommitsForTree(batch, stream); err != nil {
return err
}
batch = batch[0:0]
}
}
if err := cmd.Wait(); err != nil {
return err
}
return sendCommitsForTree(batch, stream)
}
func getLSTreeEntries(parser *lstree.Parser) (lstree.Entries, error) {
entries := lstree.Entries{}
for {
entry, err := parser.NextEntry()
if err != nil {
if err == io.EOF {
break
}
return nil, err
}
entries = append(entries, *entry)
}
sort.Stable(entries)
return entries, nil
}
func (s *server) newLSTreeParser(in *gitalypb.ListLastCommitsForTreeRequest, stream gitalypb.CommitService_ListLastCommitsForTreeServer) (*command.Command, *lstree.Parser, error) {
path := string(in.GetPath())
if path == "" || path == "/" {
path = "."
}
opts := git.ConvertGlobalOptions(in.GetGlobalOptions())
cmd, err := s.gitCmdFactory.New(stream.Context(), in.GetRepository(), git.SubCmd{
Name: "ls-tree",
Flags: []git.Option{git.Flag{Name: "-z"}, git.Flag{Name: "--full-name"}},
Args: []string{in.GetRevision(), path},
}, opts...)
if err != nil {
return nil, nil, err
}
return cmd, lstree.NewParser(cmd), nil
}
func sendCommitsForTree(batch []*gitalypb.ListLastCommitsForTreeResponse_CommitForTree, stream gitalypb.CommitService_ListLastCommitsForTreeServer) error {
if len(batch) == 0 {
return nil
}
if err := stream.Send(&gitalypb.ListLastCommitsForTreeResponse{Commits: batch}); err != nil {
return err
}
return nil
}
func validateListLastCommitsForTreeRequest(in *gitalypb.ListLastCommitsForTreeRequest) error {
if err := git.ValidateRevision([]byte(in.Revision)); err != nil {
return err
}
if in.GetOffset() < 0 {
return fmt.Errorf("offset negative")
}
if in.GetLimit() < 0 {
return fmt.Errorf("limit negative")
}
return nil
}
|
# Implement Newton's method
import gurobipy as gp
import numpy as np
import matplotlib.pyplot as plt
from gurobipy import *
id = 903515184
# Invoke Newton's method to solve the system of equations
def newton(id, epsilon_1 = 1e-10, secant=False):
profit = 0.0
c, L, U, alpha, beta, a, b = get_data(id)
def function_(p):
f = (p/2)*(a+b) - ((p**2)/2)*(alpha + beta) + (c**2/(2*p))*(b-a-beta*p + alpha*p) - c*(b-beta*p)
return f
def function_d(p):
p = p + 1e-7
f = 0.5*(a+b) - p * (alpha+beta) + c * beta - (c**2/(2*p**2))*(b-a)
return f
def q2_function(p, alpha, beta, c, a, b):
f = 2 * (p ** 3) * (beta + alpha) - 2 * beta * c * (p ** 2) - a * (p ** 2 + c ** 2) - b * (p ** 2 - c ** 2)
return f
def q2_function_d(p, alpha, beta, c, a, b):
f = 6 * (p ** 2) * (beta + alpha) - 4 * beta * c * p - 2 * a * p - 2 * b * p
return f
#print("check", function_(5), f_p(a - alpha * 5, b - beta * 5, c, 5))
def function_dd(p):
p = p + 1e-7
f = -(alpha+beta) + (c**2/p**3) * (b-a)
return f
y_k = U
epsilon_2 = 1e-5
f_y_store = []
question2_k = U
for k in range(100):
d_k = function_dd(y_k)
#print("d_k", d_k)
if np.abs(d_k) < epsilon_2:
print("Derivative failure")
raise AssertionError
y_k = y_k - function_d(y_k) / d_k
question2_k = question2_k - q2_function(question2_k, alpha, beta, c, a, b) / q2_function_d(question2_k, alpha,beta, c, a, b)
#print("question2_k", question2_k)
f_y_store.append(function_d(y_k))
#print(f_y_store)
if k >= 2:
if np.abs(f_y_store[k]) >= np.abs(f_y_store[k-1]) and np.abs(f_y_store[k]) >= np.abs(f_y_store[k-2]):
print("algorithm is not converging")
raise AssertionError
if np.abs(f_y_store[k]) <= epsilon_1:
print("success at {}".format(k))
break
print(y_k, function_(y_k))
if y_k >= U:
y_k = U
elif y_k <= L:
y_k = L
print("p should be {}".format(y_k))
profit = function_(y_k)
# Return the profit
return(profit)
def check(id):
c, L, U, alpha, beta, a, b = get_data(id)
def function_(p):
f = (p/2)*(a+b) - (p**2/2)*(alpha + beta) + (c**2/(2*p))*(b-a-beta*p + alpha*p) - c*(b-beta*p)
return f
def function_d(p):
p = p + 1e-7
f = 0.5*(a+b) - p * (alpha+beta) + c * beta - (c**2/(2*p**2))*(b-a)
return f
def q2_function_d(p, alpha, beta, c, a, b):
f = 6 * (p ** 2) * (beta + alpha) - 4 * beta * c * p - 2 * a * p - 2 * b * p
return f
def q2_function(p, alpha, beta, c, a, b):
f = 2 * (p ** 3) * (beta + alpha) - 2 * beta * c * (p ** 2) - a * (p ** 2 + c ** 2) - b * (p ** 2 - c ** 2)
return f
x = np.arange(410,420,1).tolist()
y = [q2_function(xi, alpha, beta, c, a, b) for xi in x]
#y = [function_(xi) for xi in x]
plt.plot(x,[0 for xi in x])
plt.plot(x,y)
plt.xlabel("number of p")
plt.ylabel("function value")
plt.show()
# Generate the data (DO NOT MODIFY)
def get_data(id):
id_str = str(id)
if len(id_str) != 9:
raise IndexError('Input GTID is not 9 digits!')
c = 10
L = 10
U = 60
alpha = 0.5
beta = 2.4
a = 300 + 2 * int(id_str[5]) + int(id_str[6])
b = 2100 - 3 * int(id_str[7]) - 5 * int(id_str[8])
return(c, L, U, alpha, beta, a, b)
#check(id=id)
print(newton(id=id)) |
import random
def shuffle_array(array):
for i in range(len(array)):
random_idx = random.randint(i, len(array) - 1)
array[i], array[random_idx] = array[random_idx], array[i]
return array
print(shuffle_array([1,2,3,4,5])) |
<filename>elasta-orm/src/main/java/elasta/orm/query/iml/QueryExecutorImpl.java
package elasta.orm.query.iml;
import elasta.core.promise.intfs.Promise;
import elasta.criteria.json.mapping.GenericJsonToFuncConverter;
import elasta.criteria.json.mapping.JsonToFuncConverterMap;
import elasta.orm.entity.EntityMappingHelper;
import elasta.orm.query.QueryExecutor;
import elasta.orm.query.expression.FieldExpression;
import elasta.orm.query.expression.Query;
import elasta.orm.query.expression.builder.impl.QueryBuilderImpl;
import elasta.sql.SqlDB;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import lombok.Builder;
import lombok.Value;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
/**
* Created by sohan on 3/19/2017.
*/
final public class QueryExecutorImpl implements QueryExecutor {
final EntityMappingHelper helper;
final JsonToFuncConverterMap jsonToFuncConverterMap;
final GenericJsonToFuncConverter jsonToFuncConverter;
final SqlDB sqlDB;
public QueryExecutorImpl(EntityMappingHelper helper, JsonToFuncConverterMap jsonToFuncConverterMap, GenericJsonToFuncConverter jsonToFuncConverter, SqlDB sqlDB) {
Objects.requireNonNull(helper);
Objects.requireNonNull(jsonToFuncConverterMap);
Objects.requireNonNull(jsonToFuncConverter);
Objects.requireNonNull(sqlDB);
this.helper = helper;
this.jsonToFuncConverterMap = jsonToFuncConverterMap;
this.jsonToFuncConverter = jsonToFuncConverter;
this.sqlDB = sqlDB;
}
@Override
public Promise<List<JsonObject>> query(QueryParams params) {
return doQuery(params);
}
private Promise<List<JsonObject>> doQuery(QueryParams params) {
Optional<Pagination> pagination = params.getPagination();
QueryBuilderImpl qb = new QueryBuilderImpl(
helper,
sqlDB,
pagination.isPresent() ? pagination.get() : null
);
params.getSelections()
.forEach(
field -> qb.selectBuilder().add(
qb.select(field)
)
);
return prepareAndExecute(
qb,
jsonToFuncConverterMap,
Params.builder()
.entity(params.getEntity())
.alias(params.getAlias())
.joinParams(params.getJoinParams())
.criteria(params.getCriteria())
.groupBy(params.getGroupBy())
.orderBy(params.getOrderBy())
.having(params.getHaving())
.build()
).execute();
}
@Override
public Promise<List<JsonArray>> queryArray(QueryArrayParams params) {
Optional<Pagination> pagination = params.getPagination();
QueryBuilderImpl qb = new QueryBuilderImpl(
helper,
sqlDB,
pagination.isPresent() ? pagination.get() : null
);
final CriteriaBuilderJsonToFuncConverterMap converterMap = new CriteriaBuilderJsonToFuncConverterMap(jsonToFuncConverterMap, qb::select);
params.getSelections().forEach(
jsonObject -> qb.selectBuilder().add(
jsonToFuncConverter.convert(jsonObject, converterMap)
)
);
return prepareAndExecute(
qb,
converterMap,
Params.builder()
.entity(params.getEntity())
.alias(params.getAlias())
.joinParams(params.getJoinParams())
.criteria(params.getCriteria())
.groupBy(params.getGroupBy())
.orderBy(params.getOrderBy())
.having(params.getHaving())
.build()
).executeArray();
}
private Query prepareAndExecute(QueryBuilderImpl qb, JsonToFuncConverterMap jsonToFuncConverterMap, Params params) {
final CriteriaBuilderJsonToFuncConverterMap converterMap = new CriteriaBuilderJsonToFuncConverterMap(jsonToFuncConverterMap, qb::field);
qb.fromBuilder().root(params.getEntity(), params.getAlias());
params.getJoinParams().forEach(joinParam -> {
qb.fromBuilder().join(
joinParam.getPath(),
joinParam.getAlias(),
joinParam.getJoinType()
);
});
qb.whereBuilder().add(
jsonToFuncConverter.convert(params.getCriteria(), converterMap)
);
params.getOrderBy().forEach(orderTpl -> qb.orderByBuilder().add(
orderTpl.getField(), orderTpl.getOrder()
));
params.getGroupBy().forEach(field -> qb.groupByBuilder().add(field));
qb.havingBuilder().add(
jsonToFuncConverter.convert(params.getHaving(), converterMap)
);
return qb.build();
}
@Value
@Builder
private static final class Params {
final String entity;
final String alias;
final Collection<JoinParam> joinParams;
final JsonObject criteria;
final Collection<OrderTpl> orderBy;
final Collection<FieldExpression> groupBy;
final JsonObject having;
}
}
|
import { ParsedIR } from "./ParsedIR";
import { SPIRType } from "../common/SPIRType";
import { IVariant, IVariantType } from "../common/IVariant";
export declare class Parser {
private ir;
private current_function;
private current_block;
private global_struct_cache;
private forward_pointer_fixups;
constructor(spirv: Uint32Array);
get_parsed_ir(): ParsedIR;
parse(): void;
private parseInstruction;
private stream;
private set;
get<T extends IVariant>(classRef: IVariantType<T>, id: number): T;
maybe_get<T extends IVariant>(classRef: IVariantType<T>, id: number): T;
types_are_logically_equivalent(a: SPIRType, b: SPIRType): boolean;
}
|
/*
* Copyright 2018 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.ip4s
import java.net.{InetAddress, Inet4Address, Inet6Address}
private[ip4s] trait IpAddressPlatform {
/** Converts this address to a `java.net.InetAddress`. Note this method only exists on the JVM. */
def toInetAddress: InetAddress
}
private[ip4s] trait Ipv4AddressPlatform extends IpAddressPlatform {
protected val bytes: Array[Byte]
override def toInetAddress: Inet4Address =
InetAddress.getByAddress(bytes).asInstanceOf[Inet4Address]
}
private[ip4s] trait Ipv6AddressPlatform extends IpAddressPlatform {
protected val bytes: Array[Byte]
override def toInetAddress: Inet6Address =
InetAddress.getByAddress(bytes).asInstanceOf[Inet6Address]
}
|
<filename>pake.go
package pake
import (
"crypto/elliptic"
"crypto/rand"
"crypto/sha256"
"encoding/json"
"errors"
"fmt"
"math/big"
"github.com/tscholl2/siec"
)
// EllipticCurve is a general curve which allows other
// elliptic curves to be used with PAKE.
type EllipticCurve interface {
Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int)
ScalarBaseMult(k []byte) (*big.Int, *big.Int)
ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int)
IsOnCurve(x, y *big.Int) bool
}
// Pake keeps public and private variables by
// only transmitting between parties after marshaling.
//
// This method follows
// https://crypto.stanford.edu/~dabo/cryptobook/BonehShoup_0_4.pdf
// Figure 21/15
// http://www.lothar.com/~warner/MagicWormhole-PyCon2016.pdf
// Slide 11
type Pake struct {
// Public variables
Role int
Uᵤ, Uᵥ *big.Int
Vᵤ, Vᵥ *big.Int
Xᵤ, Xᵥ *big.Int
Yᵤ, Yᵥ *big.Int
// Private variables
curve EllipticCurve
Pw []byte
Vpwᵤ, Vpwᵥ *big.Int
Upwᵤ, Upwᵥ *big.Int
Aα []byte
Aαᵤ, Aαᵥ *big.Int
Zᵤ, Zᵥ *big.Int
K []byte
}
// Public returns the public variables of Pake
func (p *Pake) Public() *Pake {
return &Pake{
Role: p.Role,
Uᵤ: p.Uᵤ,
Uᵥ: p.Uᵥ,
Vᵤ: p.Vᵤ,
Vᵥ: p.Vᵥ,
Xᵤ: p.Xᵤ,
Xᵥ: p.Xᵥ,
Yᵤ: p.Yᵤ,
Yᵥ: p.Yᵥ,
}
}
// AvailableCurves returns available curves
func AvailableCurves() []string {
return []string{"p521", "p256", "p384", "siec"}
}
// InitCurve will take the secret weak passphrase (pw) to initialize
// the points on the elliptic curve. The role is set to either
// 0 for the sender or 1 for the recipient.
// The curve can be siec, p521, p256, p384
func initCurve(curve string) (ellipticCurve EllipticCurve, Ux *big.Int, Uy *big.Int, Vx *big.Int, Vy *big.Int, err error) {
switch curve {
case "p521":
ellipticCurve = elliptic.P521()
Ux, _ = new(big.Int).SetString("793136080485469241208656611513609866400481671852", 10)
Uy, _ = new(big.Int).SetString("4032821203812196944795502391345776760852202059010382256134592838722123385325802540879231526503456158741518531456199762365161310489884151533417829496019094620", 10)
Vx, _ = new(big.Int).SetString("1086685267857089638167386722555472967068468061489", 10)
Vy, _ = new(big.Int).SetString("5010916268086655347194655708160715195931018676225831839835602465999566066450501167246678404591906342753230577187831311039273858772817427392089150297708931207", 10)
case "p256":
ellipticCurve = elliptic.P256()
Ux, _ = new(big.Int).SetString("793136080485469241208656611513609866400481671852", 10)
Uy, _ = new(big.Int).SetString("59748757929350367369315811184980635230185250460108398961713395032485227207304", 10)
Vx, _ = new(big.Int).SetString("1086685267857089638167386722555472967068468061489", 10)
Vy, _ = new(big.Int).SetString("9157340230202296554417312816309453883742349874205386245733062928888341584123", 10)
case "p384":
ellipticCurve = elliptic.P384()
Ux, _ = new(big.Int).SetString("793136080485469241208656611513609866400481671852", 10)
Uy, _ = new(big.Int).SetString("7854890799382392388170852325516804266858248936799429260403044177981810983054351714387874260245230531084533936948596", 10)
Vx, _ = new(big.Int).SetString("1086685267857089638167386722555472967068468061489", 10)
Vy, _ = new(big.Int).SetString("21898206562669911998235297167979083576432197282633635629145270958059347586763418294901448537278960988843108277491616", 10)
case "siec":
ellipticCurve = siec.SIEC255()
Ux, _ = new(big.Int).SetString("793136080485469241208656611513609866400481671853", 10)
Uy, _ = new(big.Int).SetString("18458907634222644275952014841865282643645472623913459400556233196838128612339", 10)
Vx, _ = new(big.Int).SetString("1086685267857089638167386722555472967068468061489", 10)
Vy, _ = new(big.Int).SetString("19593504966619549205903364028255899745298716108914514072669075231742699650911", 10)
default:
err = errors.New("no such curve")
return
}
return
}
// Init will take the secret weak passphrase (pw) to initialize
// the points on the elliptic curve. The role is set to either
// 0 for the sender or 1 for the recipient.
// The curve can be any elliptic curve.
func InitCurve(pw []byte, role int, curve string) (p *Pake, err error) {
p = new(Pake)
p.curve, p.Uᵤ, p.Uᵥ, p.Vᵤ, p.Vᵥ, err = initCurve(curve)
if err != nil {
return
}
p.Pw = pw
if role == 1 {
p.Role = 1
} else {
p.Role = 0
// STEP: A computes X
p.Vpwᵤ, p.Vpwᵥ = p.curve.ScalarMult(p.Vᵤ, p.Vᵥ, p.Pw)
p.Upwᵤ, p.Upwᵥ = p.curve.ScalarMult(p.Uᵤ, p.Uᵥ, p.Pw)
p.Aα = make([]byte, 32) // randomly generated secret
_, err = rand.Read(p.Aα)
if err != nil {
return
}
p.Aαᵤ, p.Aαᵥ = p.curve.ScalarBaseMult(p.Aα)
p.Xᵤ, p.Xᵥ = p.curve.Add(p.Upwᵤ, p.Upwᵥ, p.Aαᵤ, p.Aαᵥ) // "X"
// now X should be sent to B
}
return
}
// Bytes just marshalls the PAKE structure so that
// private variables are hidden.
func (p *Pake) Bytes() (b []byte) {
if p == nil {
panic("pake is not initialized")
}
b, err := json.Marshal(p.Public())
if err != nil {
panic(err)
}
return
}
// Update will update itself with the other parties
// PAKE and automatically determine what stage
// and what to generate.
func (p *Pake) Update(qBytes []byte) (err error) {
if p == nil {
err = fmt.Errorf("pake is not initialized")
return
}
var q *Pake
err = json.Unmarshal(qBytes, &q)
if err != nil {
return
}
if p.Role == q.Role {
err = errors.New("can't have its own role")
return
}
if p.Role == 1 {
// copy over public variables
p.Xᵤ, p.Xᵥ = q.Xᵤ, q.Xᵥ
// confirm that X is on curve
if !p.curve.IsOnCurve(p.Xᵤ, p.Xᵥ) {
err = errors.New("X values not on curve")
return
}
// STEP: B computes Y
p.Vpwᵤ, p.Vpwᵥ = p.curve.ScalarMult(p.Vᵤ, p.Vᵥ, p.Pw)
p.Upwᵤ, p.Upwᵥ = p.curve.ScalarMult(p.Uᵤ, p.Uᵥ, p.Pw)
p.Aα = make([]byte, 32) // randomly generated secret
rand.Read(p.Aα)
p.Aαᵤ, p.Aαᵥ = p.curve.ScalarBaseMult(p.Aα)
p.Yᵤ, p.Yᵥ = p.curve.Add(p.Vpwᵤ, p.Vpwᵥ, p.Aαᵤ, p.Aαᵥ) // "Y"
// STEP: B computes Z
p.Zᵤ, p.Zᵥ = p.curve.Add(p.Xᵤ, p.Xᵥ, p.Upwᵤ, new(big.Int).Neg(p.Upwᵥ))
p.Zᵤ, p.Zᵥ = p.curve.ScalarMult(p.Zᵤ, p.Zᵥ, p.Aα)
// STEP: B computes k
// H(pw,id_P,id_Q,X,Y,Z)
HB := sha256.New()
HB.Write(p.Pw)
HB.Write(p.Xᵤ.Bytes())
HB.Write(p.Xᵥ.Bytes())
HB.Write(p.Yᵤ.Bytes())
HB.Write(p.Yᵥ.Bytes())
HB.Write(p.Zᵤ.Bytes())
HB.Write(p.Zᵥ.Bytes())
// STEP: B computes k
p.K = HB.Sum(nil)
} else {
p.Yᵤ, p.Yᵥ = q.Yᵤ, q.Yᵥ
// confirm that Y is on curve
if !p.curve.IsOnCurve(p.Yᵤ, p.Yᵥ) {
err = errors.New("Y values not on curve")
return
}
// STEP: A computes Z
p.Zᵤ, p.Zᵥ = p.curve.Add(p.Yᵤ, p.Yᵥ, p.Vpwᵤ, new(big.Int).Neg(p.Vpwᵥ))
p.Zᵤ, p.Zᵥ = p.curve.ScalarMult(p.Zᵤ, p.Zᵥ, p.Aα)
// STEP: A computes k
// H(pw,id_P,id_Q,X,Y,Z)
HA := sha256.New()
HA.Write(p.Pw)
HA.Write(p.Xᵤ.Bytes())
HA.Write(p.Xᵥ.Bytes())
HA.Write(p.Yᵤ.Bytes())
HA.Write(p.Yᵥ.Bytes())
HA.Write(p.Zᵤ.Bytes())
HA.Write(p.Zᵥ.Bytes())
p.K = HA.Sum(nil)
}
return
}
// SessionKey is returned, unless it is not generated
// in which is returns an error. This function does
// not check if it is verifies.
func (p *Pake) SessionKey() ([]byte, error) {
var err error
if p == nil {
err = fmt.Errorf("pake is not initialized")
}
if p.K == nil {
err = errors.New("session key not generated")
}
return p.K, err
}
// HaveSessionKey returns whether a session key has been generated
func (p *Pake) HaveSessionKey() bool {
if p == nil {
return false
}
return p.K != nil
}
|
#!/bin/sh
# ----------------------------------------------------------------------------
# Copyright 2001-2006 The Apache Software Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
# Copyright (c) 2001-2002 The Apache Software Foundation. All rights
# reserved.
BASEDIR=`dirname $0`/..
BASEDIR=`(cd "$BASEDIR"; pwd)`
# OS specific support. $var _must_ be set to either true or false.
cygwin=false;
darwin=false;
case "`uname`" in
CYGWIN*) cygwin=true ;;
Darwin*) darwin=true
if [ -z "$JAVA_VERSION" ] ; then
JAVA_VERSION="CurrentJDK"
else
echo "Using Java version: $JAVA_VERSION"
fi
if [ -z "$JAVA_HOME" ] ; then
JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home
fi
;;
esac
if [ -z "$JAVA_HOME" ] ; then
if [ -r /etc/gentoo-release ] ; then
JAVA_HOME=`java-config --jre-home`
fi
fi
# For Cygwin, ensure paths are in UNIX format before anything is touched
if $cygwin ; then
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
[ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
fi
# If a specific java binary isn't specified search for the standard 'java' binary
if [ -z "$JAVACMD" ] ; then
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
else
JAVACMD=`which java`
fi
fi
if [ ! -x "$JAVACMD" ] ; then
echo "Error: JAVA_HOME is not defined correctly."
echo " We cannot execute $JAVACMD"
exit 1
fi
if [ -z "$REPO" ]
then
REPO="$BASEDIR"/repo
fi
CLASSPATH=$CLASSPATH_PREFIX:"$BASEDIR"/conf:"$REPO"/log4j/log4j/1.2.15/log4j-1.2.15.jar:"$REPO"/org/apache/zookeeper/zookeeper/3.4.9/zookeeper-3.4.9.jar:"$REPO"/org/slf4j/slf4j-api/1.6.1/slf4j-api-1.6.1.jar:"$REPO"/org/slf4j/slf4j-log4j12/1.6.1/slf4j-log4j12-1.6.1.jar:"$REPO"/jline/jline/0.9.94/jline-0.9.94.jar:"$REPO"/io/netty/netty/3.10.5.Final/netty-3.10.5.Final.jar:"$REPO"/org/codehaus/jackson/jackson-core-asl/1.8.5/jackson-core-asl-1.8.5.jar:"$REPO"/org/codehaus/jackson/jackson-mapper-asl/1.8.5/jackson-mapper-asl-1.8.5.jar:"$REPO"/commons-io/commons-io/1.4/commons-io-1.4.jar:"$REPO"/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:"$REPO"/com/101tec/zkclient/0.5/zkclient-0.5.jar:"$REPO"/org/apache/commons/commons-math/2.1/commons-math-2.1.jar:"$REPO"/commons-codec/commons-codec/1.6/commons-codec-1.6.jar:"$REPO"/com/google/guava/guava/15.0/guava-15.0.jar:"$REPO"/org/yaml/snakeyaml/1.12/snakeyaml-1.12.jar:"$REPO"/org/apache/helix/helix-core/0.6.7/helix-core-0.6.7.jar
EXTRA_JVM_ARGUMENTS="-Xms512m -Xmx512m"
# For Cygwin, switch paths to Windows format before running java
if $cygwin; then
[ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
[ -n "$HOME" ] && HOME=`cygpath --path --windows "$HOME"`
[ -n "$BASEDIR" ] && BASEDIR=`cygpath --path --windows "$BASEDIR"`
[ -n "$REPO" ] && REPO=`cygpath --path --windows "$REPO"`
fi
exec "$JAVACMD" $JAVA_OPTS \
$EXTRA_JVM_ARGUMENTS \
-classpath "$CLASSPATH" \
-Dapp.name="helix-admin" \
-Dapp.pid="$$" \
-Dapp.repo="$REPO" \
-Dbasedir="$BASEDIR" \
org.apache.helix.tools.ClusterSetup \
"$@"
|
from typing import List, Dict, Set
def process_commands(commands: List[str]) -> Dict[str, Set[str]]:
config_tags_map = {}
for command in commands:
parts = command.split()
config_index = parts.index('--config')
tags_index = parts.index('--tags')
config = parts[config_index + 1]
tags = set(parts[tags_index + 1:])
if config in config_tags_map:
config_tags_map[config].update(tags)
else:
config_tags_map[config] = tags
return config_tags_map |
<reponame>joe-sky/flarej-jquery
/**********************************************************
*-----------------------------------*
* flareJ
*-----------------------------------*
* base on jQuery
* Copyright 2015 Joe_Sky
* Licensed under the MIT license
*-----*-----*-----*-----*-----*-----*
* author:Joe_Sky
* mail:<EMAIL>
* last update:2015-7-9
***********************************************************/
/*----------------------------------------------*
* flareJ.Menu
*-------*-------*-------*-------*-------*-------*
* 菜单
*-------*-------*-------*-------*-------*-------*
* [依赖]flareJ.Tooltip
* flareJ.SelectBox
*----------------------------------------------*/
FJ.define("widget.Menu", ["widget.Tooltip", "widget.SelectBox"], function() {
var FJ,
fj,
flareJ = FJ = fj = this.flareJ,
$ = this.jQuery;
/************************************************************
*-----------------------------------------------------------*
* 菜单
*-----------------------------------------------------------*
*************************************************************
*-----*-----*-----*-----*-----*-----*-----*-----*-----*-----*
*
*-----*-----*-----*-----*-----*-----*-----*-----*-----*-----*
*
************************************************************/
this.MenuJ = this.MUJ = FJ.MenuJ = FJ.MUJ = FJ.TooltipJ.extend({
//#region 构造方法
init: function (elemObj, settings) {
//参数
this._super(elemObj, $.extend(true, {
fjType: "MUJ",
hoverDirect: "bottom",
cMenuDirect: "right",
autoDirect: true,
shiftLeft: 50,
shiftTop: 15,
hoverType: "click",
isRightClick: false,
hoverSpeed: 100,
closeMenuDelay: 100,
closeDelay: 100, //关闭延迟时间
radius: 2,
radiusB: 2,
speed: 1,
showSpeed: FJ.isIElt9 ? 1 : 100,
loadSpeed: 1,
onlyFirstLoad: true,
width: 100,
height: 24,
forecastMenuH: 24,
maxHeight: 0,
hasShadow: true,
renderTo: "body",
showRendorBody: false,
isRenderHF: false,
isAutoSetHeight: false,
selectIndex: null, //选中一项后隐藏其余项图标
fnHtml: function () {
this.pjBody.addClass("muj_pjBody");
return this.menuContainer;
},
menus:
[
/*{
text: '菜单1',
icon: FJ.imgPath + "Menu/menu-parent-l.gif",
hasChk: false,
menus: [],
click: function () {
MBJ.alert("提示", "测试菜单1");
}
}*/
],
colorParams: {
//bgColorBody: "#fefee9",
fontColor: null
},
evts: {
afterClose: function () {
if (!this.parentMenu) { //根菜单关闭时同时关闭所有子菜单
this.cascade(this, function (menu) {
if (menu.divOut.is(":visible")) {
menu.p.isHoverShow = true;
menu.close();
}
}, true);
}
else {
this.p.isHoverShow = true;
}
},
afterItemRender: null //菜单项加载完毕
}
}, settings));
return this;
},
//#endregion
//#region 初始化
initFn: function () {
this.cMenus = []; //子菜单集合
this.chks = []; //子菜单复选框集合
this.forecastH = this.p.forecastMenuH * this.p.menus.length; //预算菜单整体高度
this._super();
this.divOut.attr("id", "MUJ_" + this.objId);
},
//#endregion
//#region 构建菜单
create: function () {
var thiz = this;
this._super();
this.divOut.addClass("muj-ttj");
//菜单容器
this.menuContainer = $('<ul class="muj"></ul>');
//项集合
this.items = [];
//创建各菜单
for (var i = 0; i < this.p.menus.length; i++) {
this.createMenuItem(this.p.menus[i], i);
}
if (this.cMenus.length > 0) {
this.bindEvt("onItemMouseover", function (e, p) { //鼠标移上时先关闭所有子菜单
var k = 0;
this.cascade(this, function (menu) {
if (k != 0) {
if (menu == p.menu) { //不关闭当前展开的子菜单
return false;
}
if (menu.divOut.is(":visible")) {
menu.p.isHoverShow = true;
menu.close();
}
}
k++;
});
});
}
//if(fj.isMobile) {
// this.menuContainer.onGestureJ(fj.Evt.click, function(e, p) {
// var target = p.evt.originalEvent.target;
// if(target.tagName === "DIV") {
// var t = $(target);
// t.trigger("mouseover");
// fj.lazyDo(function() {
// t.trigger("click");
// }), 25;
// }
// });
//}
return this;
},
//#endregion
//#region 构建菜单项
createMenuItem: function (item, inx) {
var thiz = this,
oLi = $('<li></li>'),
cMenuDirect = this.p.cMenuDirect,
selectIndex = this.p.selectIndex;
//加入项集合
this.items.push(oLi);
if (this.p.colorParams.fontColor) {
oLi.css("color", this.p.colorParams.fontColor);
}
if (item.title) { //提示
oLi.attr("title", item.title);
}
//图标
var oIcon = $('<div class="muj-icon"></div>');
if (cMenuDirect && !item.icon && !item.hasChk) {
if (cMenuDirect === "right") {
oIcon.html('<i class="icon-caret-right"></i>');
}
else {
oIcon.html('<i class="icon-caret-left"></i>');
}
}
if (!item.hasChk) {
var oIcoImg = null;
if (item.icon) { //图标
oIcoImg = $(item.icon).addClass("muj-icon-custom");
oIcon.append(oIcoImg);
if (selectIndex != null && selectIndex !== inx) {
oIcoImg.hide();
}
}
//点击事件
if (item.click) {
oLi.onGestureJ(fj.Evt.click, function (e, p) {
//切换选中项图标
thiz.changeSelectIcon(inx);
item.click.call(thiz, { e: p.evt, item: oLi, menuItem: item });
oLi.trigger("mouseleave");
thiz.setVisible(false);
}, {
tapHoverCls: "muj-hover"
});
}
}
else { //有复选框
var oChk = SBXJ.init({
inputId: (item.id ? 'mujChk_' + item.id + '_' + this.objId : null),
initChecked: !item.noCheck ? true : false,
canQuery: false,
evts: {
onChecked: function (e, p) {
item.click.call(thiz, { checked: p.checked, chk: this });
}
}
});
this.chks.push(oChk);
oLi.click(function () {
oChk.setChecked(oChk.checked ? false : true, null, true);
item.click.call(thiz, { checked: oChk.checked, chk: oChk });
});
oIcon.append(oChk.divOut);
}
var itemContent = $("<div class='muj-item-content'></div>"); //需设置div宽度,否则如果内容有折行时有些浏览器下td高度会增加导致菜单高度变大
this.menuContainer.append(oLi.append(oIcon).append(itemContent.text(item.text)));
//子菜单
var oMenu;
if (item.menus && item.menus.length > 0) {
oMenu = oLi.MUJ($.extend(true, {
renderTo: this.p.renderTo,
showRendorBody: this.p.showRendorBody,
hoverType: "over",
hoverDirect: this.p.cMenuDirect,
cMenuDirect: this.p.cMenuDirect,
autoDirect: this.p.autoDirect,
shiftLeft: this.p.cMenuDirect == "left" ? -4 : 2,
shiftTop: thiz.p.forecastMenuH - 1,
maxHeight: item.maxHeight,
zIndex: this.p.zIndex,
tranOrigin: this.p.tranOrigin,
tranScaleShow: this.p.tranScaleShow,
tranScaleClose: this.p.tranScaleClose,
evts: {
afterRender: function () {
var thix = this;
this.parentMenu = thiz; //保存父菜单引用
thiz.cMenus.push(this); //保存子菜单引用
this.bubble(this, function (menu) {
if (!menu.parentMenu) {
thix.rootMenu = menu; //保存根菜单引用
}
});
},
preShow: function () {
var offsetH = this.currentObj.offset();
if (this.p.autoDirect) { //判断横向显示位置
var w = this.currentObj[0].offsetWidth;
var s = this.p.shiftLeft;
var wd = this.divOut.width();
if (this.p.hoverDirect == "right") {
var rightMax = document.documentElement.clientWidth + document.documentElement.scrollLeft + document.body.scrollLeft - (wd + parseInt(this.divOut.css("border-left-width"), 10) + parseInt(this.divOut.css("border-right-width"), 10)) - w - s;
var left = offsetH.left + w + s;
// if (left < 0) {
// this.p.hoverDirect = "right";
// this.p.shiftLeft = 2;
// }
if (left > rightMax) {
this.p.hoverDirect = "left";
this.p.shiftLeft = -4 + (w - wd);
}
else {
this.p.hoverDirect = "right";
this.p.shiftLeft = 2;
}
}
else if (this.p.hoverDirect == "left") {
var left = offsetH.left - w + s;
if (left < 0) {
this.p.hoverDirect = "right";
this.p.shiftLeft = 2;
}
else {
this.p.hoverDirect = "left";
this.p.shiftLeft = -4 + (w - wd);
}
}
}
//判断纵向显示位置
var top = offsetH.top + (thiz.p.forecastMenuH * item.menus.length) - document.documentElement.clientHeight;
if (top > 0) { //显示位置超过页面底部则向上方调整
this.p.shiftTop = top * -1;
}
else {
this.p.shiftTop = thiz.p.forecastMenuH - 1;
}
},
onMouseenter: function () {
this.rootMenu.overByFocus = true;
if (this.p.hoverType == "over") {
this.p.isHoverShow = false;
}
},
onMouseleave: function () {
this.rootMenu.overByFocus = false;
if (this.rootMenu.hoverType == "focus") {
this.rootMenu.currentObj.focus(); //使根菜单可通过失去焦点关闭
}
}
}
}, item));
oMenu.icon = oIcon;
}
//鼠标移上时操作
oLi.hover(function () {
if (thiz.stoCloseMenu) {
clearTimeout(thiz.stoCloseMenu);
}
//执行自定义鼠标移上事件
thiz.stoCloseMenu = setTimeout(function () {
thiz.fire("onItemMouseover", { item: oLi, menu: oMenu });
}, thiz.p.closeMenuDelay);
}, function () {
if (thiz.stoCloseMenu) {
clearTimeout(thiz.stoCloseMenu);
}
thiz.fire("onItemMouseleave", { item: oLi });
});
this.fire("afterItemRender", { id: item.id, item: oLi, menu: oMenu });
return this;
},
//#endregion
//#region 级联遍历子菜单
cascade: function (oMenu, fn, noOwn) {
var noStop = null;
if (!noOwn) {
noStop = fn.call(oMenu, oMenu);
}
if (noStop !== false && oMenu.cMenus) {
for (var i = 0; i < oMenu.cMenus.length; i++) {
this.cascade(oMenu.cMenus[i], fn);
}
}
},
//#endregion
//#region 级联遍历父菜单
bubble: function (oMenu, fn, noOwn) {
var noStop = null;
if (!noOwn) {
noStop = fn.call(oMenu, oMenu);
}
if (noStop !== false && oMenu.parentMenu) {
this.bubble(oMenu.parentMenu, fn);
}
},
//#endregion
//#region 切换选中项图标
changeSelectIcon: function (no) {
//切换选中项图标
if (this.p.selectIndex != null) {
this.menuContainer.find(".muj-icon-custom").hide();
this.items[no].find(".muj-icon-custom").show();
}
}
//#endregion
});
//#region 图片文件夹路径
FJ.MUJ.imgSrc = FJ.imgPath + "Menu/";
//#endregion
//#region 预加载图片
FJ.Image.preLoad(MUJ.imgSrc + "copy.gif");
//#endregion
//#region 绑定到jquery
$.fn.extend({
MenuJ: function (settings) {
if (this && this.length > 0) {
return new FJ.MenuJ(this, fj.MUJ_commonConfig ? $.extend(true, fj.clone(fj.MUJ_commonConfig), settings) : settings);
}
},
MUJ: function (settings) {
return $(this).MenuJ(settings);
}
});
//#endregion
/************************************************************
*-----------------------------------------------------------*
* 右击菜单
*-----------------------------------------------------------*
*************************************************************
*-----*-----*-----*-----*-----*-----*-----*-----*-----*-----*
*
*-----*-----*-----*-----*-----*-----*-----*-----*-----*-----*
*
************************************************************/
this.ContextMenuJ = this.CMJ = FJ.ContextMenuJ = FJ.CMJ = FJ.MenuJ.extend({
//#region 构造方法
init: function (elemObj, settings) {
var thiz = this;
//参数
this._super(elemObj, $.extend(true, {
fjType: "CMJ",
isRightClick: true,
showOnSelectText: true,
eType: (function (j) {
return {
afterShow: j
};
})("CMJ")
}, settings));
elemObj.bind("contextmenu", function (e) {
var hasSelectText = false;
if (!thiz.p.showOnSelectText) { //判断是否有选中文本
var sel = "";
if (document.selection) {
sel = document.selection.createRange().text;
}
else if (window.getSelection) {
sel = window.getSelection().toString();
}
if (sel.length > 0) {
hasSelectText = true;
}
}
if (!hasSelectText) {
e.stopPropagation(); //阻止冒泡
e.returnValue = false; //取消默认右击菜单
thiz.showAt(e, this); //显示
return false;
}
});
return this;
},
//#endregion
//#region 初始化
initFn: function () {
this._super();
this.divOut.attr("id", "CMJ_" + this.objId);
},
//#endregion
//#region 在指定位置显示
showAt: function (e, currentObj) {
this.currentObj = $(currentObj); //记录当前目标元素
var x = e.pageX,
y = e.pageY;
if (y > document.documentElement.clientHeight - this.forecastH) {
y = y - this.forecastH;
}
this.setVisible(true, { left: x, top: y }, true); //在鼠标当前位置显示
//关闭所有子菜单
this.cascade(this, function (menu) {
if (menu != this && menu.divOut.is(":visible")) {
menu.p.isHoverShow = true;
menu.close();
}
});
this.fire("afterShow", "CMJ");
}
//#endregion
});
//#region 绑定到jquery
$.fn.extend({
ContextMenuJ: function (settings) {
if (this && this.length > 0) {
return new FJ.ContextMenuJ(this, fj.CMJ_commonConfig ? $.extend(true, fj.clone(fj.CMJ_commonConfig), settings) : settings);
}
},
CMJ: function (settings) {
return $(this).ContextMenuJ(settings);
}
});
//#endregion
}); |
#!/bin/bash
set -e
dest_path="$HOME/admin.conf"
if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
juju scp kubernetes-master/0:config "${dest_path}"
fi
|
#!/bin/bash
# Run_Dhrystone.sh
# Check Environment
if [ -z ${IMPERAS_HOME} ]; then
echo "IMPERAS_HOME not set. Please check environment setup."
exit
fi
${IMPERAS_ISS} --verbose --output imperas.log \
--program ../../../Applications/dhrystone/dhrystone.RISCV64-O0-g.elf \
--processorvendor sifive.ovpworld.org --processorname riscv --variant S76 \
--numprocessors 1 --addressbits 37 \
--override iss/cpu0/verbose=1 \
"$@" \
-argv 4000000
|
<filename>src/directives/index.ts<gh_stars>0
export const FLEX_LAYOUT_DIRECTIVES: any[] = [
]; |
#!/bin/bash
#SBATCH --job-name=rotXY
#SBATCH --output=job_%j.log # Standard output and error log
#SBATCH --nodes=1 # Run all processes on a single node
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=40
#SBATCH --time=10:00:00
module load gcc/devtoolset/9
module load cmake/3.18.0
module load anaconda/anaconda3.7
L=17
ns=3000
python sim-rotXY.py 17 $1 30 0.19 3000 1 $2
|
import React, { useEffect, useState, Component } from 'react';
import { StatusBar } from 'expo-status-bar';
import { View } from 'react-native';
import { useSelector, useDispatch } from 'react-redux';
import { Header } from '../../components/blocks/Header/Header';
import { Loaction } from '../../components/blocks/Loaction/Loaction';
import { Weather } from '../../components/blocks/Weather/Weather';
import { Error } from '../../components/blocks/Error/Error'
import { Coordinates } from '../../components/blocks/Coordinates/Coordinates';
import { geolocationResponseAction, reverseGeocoding } from '../../action/index';
import styles from './MainScreenStyles';
export const MainScreen = ({}) => {
const dispatch = useDispatch();
const [where, setWhere] = useState();
const data = useSelector(state => state.data);
const isError = useSelector(state => state.isError);
useEffect(() => {
const geoOptions = {
enableHighAccuracy: true,
timeOut: 20000,
maximumAge: 60 * 60 * 24
};
navigator.geolocation.getCurrentPosition( geoSuccess, geoSuccess, geoOptions);
}, [])
const geoSuccess = (position) => {
setWhere({ lat: position.coords.latitude, lng: position.coords.longitude });
dispatch(geolocationResponseAction(where));
dispatch(reverseGeocoding());
}
return (
<View style={styles.container} >
<Header />
{ data && <Coordinates /> }
{ isError && <Error /> }
{ data && !isError &&
<React.Fragment>
<Loaction />
<Weather />
</React.Fragment>
}
<StatusBar style="auto" />
</View>
)
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-FW/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-FW/512+0+512-N-VB-FILL-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_and_verbs_fill_first_half_quarter --eval_function penultimate_quarter_eval |
<filename>src/page-js.js
/**
Extracted from [page.js]
Source: https://github.com/visionmedia/page.js
--------------------------------------------------------------------
(The MIT License)
Copyright (c) 2012 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
var base = '';
/**
* Initialize `Route` with the given HTTP `path`,
* and an array of `callbacks` and `options`.
*
* Options:
*
* - `sensitive` enable case-sensitive routes
* - `strict` enable strict matching for trailing slashes
*
* @param {String} path
* @param {Object} options.
* @api private
*/
function Route(path, options) {
options = options || {};
this.path = path;
this.regexp = pathtoRegexp(path
, this.keys = []
, options.sensitive
, options.strict);
}
/**
* Check if this route matches `path`, if so
* populate `params`.
*
* @param {String} path
* @param {Array} params
* @return {Boolean}
* @api private
*/
Route.prototype.match = function(path, params){
var keys = this.keys
, qsIndex = path.indexOf('?')
, pathname = ~qsIndex ? path.slice(0, qsIndex) : path
, m = this.regexp.exec(decodeURIComponent(pathname));
if (!m) return false;
for (var i = 1, len = m.length; i < len; ++i) {
var key = keys[i - 1];
var val = 'string' == typeof m[i]
? decodeURIComponent(m[i])
: m[i];
if (key) {
params[key.name] = undefined !== params[key.name]
? params[key.name]
: val;
} else {
params.push(val);
}
}
return true;
};
/**
* Normalize the given path string,
* returning a regular expression.
*
* An empty array should be passed,
* which will contain the placeholder
* key names. For example "/user/:id" will
* then contain ["id"].
*
* @param {String|RegExp|Array} path
* @param {Array} keys
* @param {Boolean} sensitive
* @param {Boolean} strict
* @return {RegExp}
* @api private
*/
function pathtoRegexp(path, keys, sensitive, strict) {
if (path instanceof RegExp) return path;
if (path instanceof Array) path = '(' + path.join('|') + ')';
path = path
.concat(strict ? '' : '/?')
.replace(/\/\(/g, '(?:/')
.replace(/(\/)?(\.)?:(\w+)(?:(\(.*?\)))?(\?)?/g, function(_, slash, format, key, capture, optional){
keys.push({ name: key, optional: !! optional });
slash = slash || '';
return ''
+ (optional ? '' : slash)
+ '(?:'
+ (optional ? slash : '')
+ (format || '') + (capture || (format && '([^/.]+?)' || '([^/]+?)')) + ')'
+ (optional || '');
})
.replace(/([\/.])/g, '\\$1')
.replace(/\*/g, '(.*)');
return new RegExp('^' + path + '$', sensitive ? '' : 'i');
}
// -----------------------------------------------------------------------------------------------------
/**
* Initialize a new "request" `Context`
* with the given `path` and optional initial `state`.
*
* @param {String} path
* @param {Object} state
* @api public
*/
function Context(path, state) {
if ('/' == path[0] && 0 != path.indexOf(base)) path = base + path;
var i = path.indexOf('?');
this.canonicalPath = path;
this.path = path.replace(base, '') || '/';
this.state = state || {};
this.state.path = path;
this.querystring = ~i ? path.slice(i + 1) : '';
this.pathname = ~i ? path.slice(0, i) : path;
this.params = [];
}
export default {
Context: Context,
Route: Route
};
|
//
// Copyright (c) 2015-2020 Microsoft Corporation and Contributors.
// SPDX-License-Identifier: Apache-2.0
//
#ifndef STORAGEOBSERVER_HPP
#define STORAGEOBSERVER_HPP
#include "IOfflineStorage.hpp"
#include "system/Contexts.hpp"
#include "system/Route.hpp"
#include "system/ITelemetrySystem.hpp"
namespace MAT_NS_BEGIN {
class StorageObserver :
public IOfflineStorageObserver,
public DebugEventDispatcher
{
public:
StorageObserver(ITelemetrySystem& system, IOfflineStorage& offlineStorage);
~StorageObserver();
virtual bool DispatchEvent(DebugEvent evt) override
{
return m_system.DispatchEvent(std::move(evt));
}
protected:
bool handleStart();
bool handleStop();
bool handleStoreRecord(IncomingEventContextPtr const& ctx);
void handleRetrieveEvents(EventsUploadContextPtr const& ctx);
bool handleDeleteRecords(EventsUploadContextPtr const& ctx);
bool handleReleaseRecords(EventsUploadContextPtr const& ctx);
bool handleReleaseRecordsIncRetryCount(EventsUploadContextPtr const& ctx);
protected:
virtual void OnStorageOpened(std::string const& type) override;
virtual void OnStorageFailed(std::string const& reason) override;
virtual void OnStorageOpenFailed(std::string const &reason) override;
virtual void OnStorageTrimmed(std::map<std::string, size_t> const& numRecords) override;
virtual void OnStorageRecordsDropped(std::map<std::string, size_t> const& numRecords) override;
virtual void OnStorageRecordsRejected(std::map<std::string, size_t> const& numRecords) override;
virtual void OnStorageRecordsSaved(size_t numRecords) override;
protected:
ITelemetrySystem & m_system;
IOfflineStorage & m_offlineStorage;
public:
size_t GetSize()
{
return m_offlineStorage.GetSize();
}
size_t GetRecordCount() const
{
return m_offlineStorage.GetRecordCount();
}
RoutePassThrough<StorageObserver> start{ this, &StorageObserver::handleStart };
RoutePassThrough<StorageObserver> stop{ this, &StorageObserver::handleStop };
RouteSource<IncomingEventContextPtr const&> storeRecordFailed;
RoutePassThrough<StorageObserver, IncomingEventContextPtr const&> storeRecord{ this, &StorageObserver::handleStoreRecord };
RouteSink<StorageObserver, EventsUploadContextPtr const&> retrieveEvents{ this, &StorageObserver::handleRetrieveEvents };
RouteSource<EventsUploadContextPtr const&, StorageRecord const&, bool&> retrievedEvent;
RouteSource<EventsUploadContextPtr const&> retrievalFinished;
RouteSource<EventsUploadContextPtr const&> retrievalFailed;
RoutePassThrough<StorageObserver, EventsUploadContextPtr const&> deleteRecords{ this, &StorageObserver::handleDeleteRecords };
RoutePassThrough<StorageObserver, EventsUploadContextPtr const&> releaseRecords{ this, &StorageObserver::handleReleaseRecords };
RoutePassThrough<StorageObserver, EventsUploadContextPtr const&> releaseRecordsIncRetryCount{ this, &StorageObserver::handleReleaseRecordsIncRetryCount };
RouteSource<StorageNotificationContext const*> opened;
RouteSource<StorageNotificationContext const*> failed;
RouteSource<StorageNotificationContext const*> trimmed;
RouteSource<StorageNotificationContext const*> recordsDropped;
RouteSource<StorageNotificationContext const*> recordsRejected;
};
} MAT_NS_END
#endif
|
import matplotlib.pyplot as plt
data = {'name': ['Sara', 'Mark', 'Simon', 'Kelly'],
'age': [25, 22, 34, 21],
'gender': ['F', 'M', 'M', 'F']}
df = pd.DataFrame(data)
ax = df.plot.bar(x='name', y='age', rot=0)
plt.show() |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package ed.biodare2.backend.util.concurrent.id;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.springframework.dao.ConcurrencyFailureException;
/**
*
* @author tzielins
*/
public class DBIdGenerator implements IdGenerator {
final int ATTEMPT_MAX = 25;
public static IdGenerator newInstance(String name, int increament,LongRecordManager recordManager) {
return newInstance(name,increament,1,Long.MAX_VALUE,recordManager);
}
public static IdGenerator newInstance(String name, int increament,long START_VAL,long MAX_VAL,LongRecordManager recordManager) {
recordManager.createRecordIfNotExists(name,START_VAL);
return new DBIdGenerator(name, increament, MAX_VAL,recordManager);
}
final String name;
final Queue<Long> ids;
final int increament;
final long MAXID;
final LongRecordManager recordManager;
protected DBIdGenerator(String name,int increament,long MAX_VAL,LongRecordManager recordManager) {
if (name == null || name.trim().isEmpty()) throw new IllegalArgumentException("Name cannot be empty");
if (increament < 5) throw new IllegalArgumentException("Increament must be > 5");
this.recordManager = recordManager;
this.name = name;
this.ids = new ConcurrentLinkedQueue<>();
this.increament = increament;
this.MAXID = MAX_VAL;
loadIds();
}
@Override
public long next() {
Long id = ids.poll();
while (id == null) {
loadIds();
id = ids.poll();
}
if (id >= MAXID) throw new IllegalStateException("The id limit: "+MAXID+" has been reached");
return id;
}
protected void loadIds() {
for (int i =0;i<ATTEMPT_MAX;i++) {
try {
List<Long> reserved = recordManager.reserveNextRecords(name,increament,MAXID);
ids.addAll(reserved);
return;
} catch (ConcurrencyFailureException e) {
// In case multiple threads try to update DB there will be cocurrence exception
}
}
throw new RuntimeException("Cannot load ids due to concurrent db modifications");
}
protected void updateStart(long newStart) {
if (newStart >= MAXID) throw new IllegalStateException("The id limit: "+MAXID+" has been reached");
recordManager.updateStart(name, newStart);
ids.clear();
}
}
|
'use strict';
const internals = {};
exports.Team = internals.Team = class {
#meetings = null;
#count = null;
#notes = null;
constructor(options) {
this._init(options);
}
_init(options = {}) {
this.work = new Promise((resolve, reject) => {
this._resolve = resolve;
this._reject = reject;
});
const meetings = options.meetings || 1;
this.#meetings = meetings;
this.#count = meetings;
this.#notes = [];
}
attend(note) {
if (note instanceof Error) {
return this._reject(note);
}
this.#notes.push(note);
if (--this.#count) {
return;
}
return this._resolve(this.#meetings === 1 ? this.#notes[0] : this.#notes);
}
async regroup(options) {
await this.work;
this._init(options);
}
};
|
// UI CONTROLLER
const UIController = (() => {
// DOM elements
const elements = {
colorControls: document.querySelector(".color-controls"),
colorInput: document.querySelector(".color-input"),
hslCopyValue: document.querySelector(".hsl-copy-text"),
hexCopyValue: document.querySelector(".hex-copy-text"),
rgbCopyValue: document.querySelector(".rgb-copy-text"),
hslCopyButton: document.querySelector(".button-default.-copy.-hsl"),
hexCopyButton: document.querySelector(".button-default.-copy.-hex"),
rgbCopyButton: document.querySelector(".button-default.-copy.-rgb"),
hslTooltip: document.querySelector(".hsl-tip"),
hexTooltip: document.querySelector(".hex-tip"),
rgbTooltip: document.querySelector(".rgb-tip"),
hInput: document.querySelector(".h-input"),
hValue: document.querySelector(".h-value"),
hSlider: document.querySelector(".h-slider"),
sInput: document.querySelector(".s-input"),
sValue: document.querySelector(".s-value"),
sSlider: document.querySelector(".s-slider"),
lInput: document.querySelector(".l-input"),
lValue: document.querySelector(".l-value"),
lSlider: document.querySelector(".l-slider"),
userColors: document.querySelector(".user-colors"),
saveSomeColors: document.querySelector(".message"),
savedColorContainer: document.querySelectorAll(".saved-color"),
saveColorButton: document.querySelector(".button-default.-copy.-save"),
smSaveColorButton: document.querySelector(".button-default.-copy.-sm"),
clearAllColorsButton: document.querySelector(".button-default.-clear"),
savedColorValue: document.querySelector(".saved-color-value"),
savedColorName: document.querySelectorAll(".saved-color-name"),
savedColorRemove: document.querySelectorAll(".button-default.-saved.-clear"),
savedColorCopyHsl: document.querySelectorAll(".button-default.-saved.-hsl"),
savedColorCopyHex: document.querySelectorAll(".button-default.-saved.-hex"),
savedColorCopyRgb: document.querySelectorAll(".button-default.-saved.-rgb")
}
// App functions
const getStartColor = () => {
const letters = '0123456789ABCDEF';
let color = '#';
for (var i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)];
}
return color;
};
const showTooltip = async (el) => {
// pass in tooltip element
// 'show' tooltip via class
el.classList.toggle('visible');
// wait 1 second and remove visible class
const delay = ms => new Promise(res => setTimeout(res, ms));
await delay(1000);
el.classList.toggle('visible');
};
// Create and Display Color Wheel
let colorWheel = new iro.ColorPicker("#color-wheel", {
width: 285,
height: 285,
color: getStartColor(),
markerRadius: 8,
padding: 4,
sliderMargin: 24,
sliderHeight: 36,
borderWidth: 3,
borderColor: "#1d1d1d",
anticlockwise: true,
css: {
"#swatch, .swatch-sm, .slider, .slider-controls.-saturation, .slider-controls.-lightness": {
"background-color": "$color"
}
}
});
// Start clipboard module
new ClipboardJS(elements.hslCopyButton);
new ClipboardJS(elements.hexCopyButton);
new ClipboardJS(elements.rgbCopyButton);
// App executions
return {
colorUpdate: () => {
colorWheel.on("color:change", (color) => {
// get current color & channels
let hex = color.hexString,
rgb = color.rgbString,
hsl = color.hslString,
hue = color.hsl.h,
saturation = color.hsl.s,
lightness = color.hsl.l;
// set color values
elements.hslCopyValue.textContent = hsl;
elements.hexCopyValue.textContent = hex;
elements.rgbCopyValue.textContent = rgb;
elements.hslCopyButton.dataset.clipboardText = hsl;
elements.hexCopyButton.dataset.clipboardText = hex;
elements.rgbCopyButton.dataset.clipboardText = rgb;
// single HSL inputs
elements.hValue.textContent = hue;
elements.hInput.value = hue;
elements.sValue.textContent = saturation + '%';
elements.sInput.value = saturation;
elements.lValue.textContent = lightness + '%';
elements.lInput.value = lightness;
// elements.colorInput.placeholder = hex;
elements.colorInput.value = hex;
});
},
colorChange: () => {
colorWheel.on("color:change", (color, changes) => {
// get current color
let currentColor = color.hsl,
hslString = color.hslString,
hexString = color.hexString,
rgbString = color.rgbString;
// create new color object & string
let newColor = currentColor,
newHslString = hslString,
newHexString = hexString,
newRgbString = rgbString;
// changes to color picker change sliders
if (changes.h) {
elements.hSlider.value = color.hsl.h;
}
if (changes.s) {
elements.sSlider.value = color.hsl.s;
}
if (changes.s || changes.v) {
elements.lSlider.value = color.hsl.l;
}
if (changes) {
// elements.colorInput.placeholder = color.hexString;
elements.colorInput.value = color.hexString;
}
///////// SLIDERS ////////////
// change slider values
const sliderChange = () => {
// set new color
color.hsl = newColor;
elements.hValue.innerHTML = elements.hSlider.value;
elements.sValue.innerHTML = elements.sSlider.value + "%";
elements.lValue.innerHTML = elements.lSlider.value + "%";
};
// update slider UI
const sliderUIUpdate = () => {
// replace new colors with slider values (chg to #)
newColor.h = Number(elements.hSlider.value);
newColor.s = Number(elements.sSlider.value);
newColor.l = Number(elements.lSlider.value);
sliderChange(); // change slider values
};
//////// SINGLE HSL INPUTS ////////
const mainHslInputChange = () => {
// replace new colors with slider values (chg to #)
newColor.h = Number(elements.hInput.value);
newColor.s = Number(elements.sInput.value);
newColor.l = Number(elements.lInput.value);
// set new color
color.hsl = newColor;
// update UI
elements.hValue.textContent = elements.hInput.value;
elements.sValue.textContent = elements.sInput.value + "%";
elements.lValue.textContent = elements.lInput.value + "%";
};
const colorInputChange = (color) => {
newColor = elements.colorInput.value.replace('%', '').replace('%', '');
if (newColor.includes('r')) {
//rgb
colorWheel.color.rgbString = newColor;
} else if (newColor.length >= 3 && newColor.length <= 7) {
//hex
newColor.includes('#') ? colorWheel.color.hexString = newColor : colorWheel.color.hexString = '#' + newColor;
elements.colorInput.value = newColor;
} else {
//hsl
colorWheel.color.hslString = newColor;
elements.colorInput.value = newColor;
}
}
// Handling color control changes
elements.hSlider.addEventListener('input', sliderUIUpdate);
elements.sSlider.addEventListener('input', sliderUIUpdate);
elements.lSlider.addEventListener('input', sliderUIUpdate);
elements.hInput.addEventListener('change', mainHslInputChange);
elements.sInput.addEventListener('change', mainHslInputChange);
elements.lInput.addEventListener('change', mainHslInputChange);
elements.colorInput.addEventListener('change', colorInputChange);
});
},
copyActions: () => {
elements.hslCopyButton.addEventListener('click', () => {
showTooltip(el = elements.hslTooltip);
});
elements.hexCopyButton.addEventListener('click', () => {
showTooltip(el = elements.hexTooltip);
});
elements.rgbCopyButton.addEventListener('click', () => {
showTooltip(el = elements.rgbTooltip);
});
},
saveActions: () => {
class Colors {
constructor(name, color) {
this.name = name;
this.color = color;
}
};
let colors = [];
createColors = (color) => {
colors.push(color);
// console.log(colors);
};
const saveColor = () => {
elements.saveSomeColors.classList.add('-d-none');
// Get saved color
let currentColor = elements.hexCopyValue.textContent,
hex = currentColor,
hsl = elements.hslCopyButton.dataset.clipboardText,
rgb = elements.rgbCopyButton.dataset.clipboardText;
// Create new saved
let savedColor = new Colors(currentColor, currentColor);
createColors(savedColor);
// log newly created object to console
// console.log(savedColor);
// Create & Update UI
const markup = `
<div class="saved-color" data-color="${currentColor}" data-color-name="" style="background-color: ${currentColor}">
<input type="text" class="saved-color-name" placeholder="color name" data-color="${currentColor}">
<!-- <p><span class="saved-color-value">${currentColor}</span></p> -->
<div class="saved-color-copy">
<button class="button-default -saved -hsl" data-clipboard-text="${hsl}">HSL</button>
<button class="button-default -saved -hex" data-clipboard-text="${hex}">HEX</button>
<button class="button-default -saved -rgb" data-clipboard-text="${rgb}">RGB</button>
</div>
<button class="button-default -saved -clear">
<span class="graphic">
<svg class="-plus icon -clear" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="24" height="24"><path fill-rule="evenodd" d="M17 11a1 1 0 0 1 0 2h-4v4a1 1 0 0 1-2 0v-4H7a1 1 0 0 1 0-2h4V7a1 1 0 0 1 2 0v4h4z"></path></svg>
</span>
</button>
</div>
`;
elements.userColors.insertAdjacentHTML('beforeend', markup);
// need to reassign elements again
elements.savedColorCopyHsl = document.querySelectorAll(".button-default.-saved.-hsl");
elements.savedColorCopyHex = document.querySelectorAll(".button-default.-saved.-hex");
elements.savedColorCopyRgb = document.querySelectorAll(".button-default.-saved.-rgb");
elements.savedColorName = document.querySelectorAll(".saved-color-name");
let savedColorHslButtons = new ClipboardJS(elements.savedColorCopyHsl);
let savedColorHexButtons = new ClipboardJS(elements.savedColorCopyHex);
let savedColorRgbButtons = new ClipboardJS(elements.savedColorCopyRgb);
elements.savedColorContainer = document.querySelectorAll(".saved-color");
elements.savedColorRemove = document.querySelectorAll(".button-default.-saved.-clear");
// Remove color on click
elements.savedColorRemove.forEach(el => {
el.addEventListener('click', () => {
return el.parentNode.remove();
});
});
// update color name of element (printing feature)
elements.savedColorName.forEach(el => {
el.addEventListener('change', () => {
el.parentNode.dataset.colorName = el.value;
});
});
};
elements.saveColorButton.addEventListener('click', saveColor);
elements.smSaveColorButton.addEventListener('click', saveColor);
},
removeActions: () => {
const removeAllColors = () => {
const markup = `
<p class="message" aria-hidden="true">
Save some colors!
</p>
`;
if (elements.userColors.lastElementChild != elements.saveSomeColors) {
elements.userColors.innerHTML = markup;
}
elements.saveSomeColors = document.querySelector(".message");
};
elements.clearAllColorsButton.addEventListener('click', removeAllColors);
}
}
})();
// GLOBAL CONTROLLER
const controller = ((UICtrl) => {
const setupEventListeners = () => {
UICtrl.colorUpdate();
UICtrl.colorChange();
UICtrl.copyActions();
UICtrl.saveActions();
UICtrl.removeActions();
};
return {
init: () => {
// Check that service workers are registered
if ('serviceWorker' in navigator) {
// Use the window load event to keep the page load performant
window.addEventListener('load', () => {
navigator.serviceWorker.register('./sw.js');
});
}
console.log('App has started.');
setupEventListeners();
}
}
})(UIController);
controller.init(); |
class BankAccount:
def __init__(self, account_name, initial_balance):
self.account_name = account_name
self.balance = initial_balance
def deposit(self, amount):
self.balance += amount
def withdraw(self, amount):
if amount <= self.balance:
self.balance -= amount
else:
print("Insufficient funds")
def get_balance(self):
return self.balance
# Create an instance of the BankAccount class
account = BankAccount('American RV', 1000)
# Deposit 500 into the account
account.deposit(500)
# Withdraw 200 from the account
account.withdraw(200)
# Print the current balance
print(account.get_balance()) # Output: 1300 |
function isPalindrome(str) {
let start = 0;
let end = str.length - 1;
while (start < end) {
if (str[start] !== str[end]) {
return false;
}
start++;
end--;
}
return true;
}
const result = isPalindrome("racecar");
console.log(result); |
package com.vaadin.fusion.parser.plugins.backbone.datetime;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.Date;
import java.util.List;
import java.util.Map;
@Endpoint
public class DateTimeEndpoint {
public CustomDate echoCustomDate() {
return new CustomDate();
}
public Date echoDate(Date date) {
return date;
}
public Instant echoInstant(Instant instant) {
return instant;
}
public List<LocalDateTime> echoListLocalDateTime(
List<LocalDateTime> localDateTimeList) {
return localDateTimeList;
}
public LocalDate echoLocalDate(LocalDate localDate) {
return localDate;
}
public LocalDateTime echoLocalDateTime(LocalDateTime localDateTime) {
return localDateTime;
}
public LocalTime echoLocalTime(LocalTime localTime) {
return localTime;
}
public Map<String, Instant> echoMapInstant(
Map<String, Instant> mapInstant) {
return mapInstant;
}
public static class CustomDate extends Date {
}
}
|
<reponame>laxika/mixnode-warcreader-java
package com.morethanheroic.warc.test;
import com.morethanheroic.warc.service.WarcParsingException;
import com.morethanheroic.warc.service.WarcReader;
import com.morethanheroic.warc.service.content.response.domain.ResponseContentBlock;
import com.morethanheroic.warc.service.record.domain.WarcRecord;
import java.io.File;
import java.io.FileInputStream;
import java.util.Optional;
public class TestFileWarcReader {
public static void main(final String... arg) throws Exception {
final WarcReader warcReader = new WarcReader(new FileInputStream(
new File("C:\\warc-test\\CC-MAIN-20180716232549-20180717012549-00001.warc.gz")));
boolean hasNext = true;
while (hasNext) {
try {
final Optional<WarcRecord> optionalWarcRecord = warcReader.readRecord();
optionalWarcRecord
.filter(WarcRecord::isResponse)
.map(warcRecord -> ((ResponseContentBlock) warcRecord.getWarcContentBlock())
.getPayloadAsString())
.ifPresent(System.out::println);
hasNext = optionalWarcRecord.isPresent();
} catch (WarcParsingException e) {
e.printStackTrace();
}
}
}
}
|
import {HttpClient} from '@angular/common/http';
import {Injectable} from '@angular/core';
import {map} from 'rxjs/operators';
import {Observable} from 'rxjs';
import {ILog} from '../../models';
@Injectable()
export class LogTailService {
http: HttpClient;
constructor(http: HttpClient) {
this.http = http;
}
getLogEntries(): Observable<Array<ILog>> {
return this.http.get<Array<ILog>>('/api/log');
}
}
|
<reponame>reiver/go-strlit<gh_stars>0
package strlit
import (
"fmt"
)
// SyntaxError is an error that is used to represent a syntax error in a
// string literal.
//
// SyntaxError allows one to respond to errors returned from strlit.Compile()
// in a more precise way. For example:
//
// compiled, err := strlit.Compile(runeReader)
// if nil != err {
// switch err.(type) {
// case strlit.SyntaxError:
// //@TODO
// default:
// //@TODO
// }
// }
//
// An error message can be returned from the Error() method. In addition to this, one
// can construct their own error message using the Code() method.
//
// The Code() method returns "code" from the string literal that caused the error.
type SyntaxError interface {
error
// SyntaxError is used for typing. Calling it will do nothing.
SyntaxError()
// Code returns "code" from the string literal that caused the error.
Code() string
}
type internalSyntaxError struct {
code string
message string
}
func errSyntaxError(code string, message string) error {
var complainer SyntaxError = internalSyntaxError{
code:code,
message:message,
}
return complainer
}
func (complainer internalSyntaxError) Error() string {
msg := fmt.Sprintf("strlit: Syntax Error: %s: %q", complainer.message, complainer.code)
return msg
}
func (internalSyntaxError) SyntaxError() {
// Nothing here.
}
func (complainer internalSyntaxError) Code() string {
return complainer.code
}
|
<filename>javascript/extractor/src/com/semmle/js/extractor/trapcache/CachingTrapWriter.java
package com.semmle.js.extractor.trapcache;
import com.semmle.util.exception.Exceptions;
import com.semmle.util.files.FileUtil;
import com.semmle.util.trap.TrapWriter;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
/**
* A trap writer for use with a trap cache.
*
* <p>A caching trap writer mainly works with two files: a result file to which tuples should be
* appended, and a cache file that should be atomically overwritten with the complete list of tuples
* produced by this writer once it is closed.
*
* <p>To achieve the latter, the trap writer writes all tuples to a temporary file first. When the
* writer is closed (and extraction has succeeded), it atomically moves the temporary file over the
* cache file.
*
* <p>This is similar to a concurrent trap writer, but a caching trap writer ensures that the trap
* file is overwritten atomically. If this is impossible, the cached file will not be updated at
* all.
*
* <p>In summary, a {@code CachingTrapWriter} keeps track of three files:
*
* <ul>
* <li>a temporary file to which tuples produced by this writer are written, referenced by field
* {@code tmpFile};
* <li>a cache file that is atomically overwritten with the contents of the temporary file when
* this writer is closed, referenced by field {@code trapFile};
* <li>a result file to which the contents of the temporary file are appended when this writer is
* closed, referenced by field {@code resultFile}.
* </ul>
*/
public class CachingTrapWriter extends TrapWriter {
private final File resultFile;
// whether extraction was successful
private boolean successful = true;
public CachingTrapWriter(File cacheFile, File resultFile) {
super(cacheFile, true);
this.resultFile = resultFile;
}
/**
* Tell the writer that extraction was not successful; the cache file and the result file will not
* be updated.
*/
public void discard() {
successful = false;
}
@Override
public void close() {
FileUtil.close(out);
try {
if (successful) {
// first append tuples from temporary file to result file (in case the move fails)
FileUtil.append(tmpFile, resultFile);
// then try to atomically move temporary file over cache file
Files.move(tmpFile.toPath(), trapFile.toPath(), StandardCopyOption.ATOMIC_MOVE);
}
} catch (IOException e) {
Exceptions.ignore(e, "Failed atomic moves are preferable to cache corruption.");
} finally {
// make sure to delete the temporary file in case the move failed
tmpFile.delete();
}
}
}
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, <NAME>
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from remedy_py.RemedyAPIClient import RemedyClient
from mock import patch
import unittest
####
# Pass an obj argument to all the mocked methods
# python is implicitly passing self to each method call
# since we are testing in the context of a class
####
def mock_build_request_headers(obj):
token = "foo"
return {
"content-type": "application/json",
"Authorization": "AR-JWT " + token
}
def mock_create_form_entry(obj, form_name, values, return_values=[]):
mock_response = {
"values": {
'Incident Number': 'INC000000000301',
'Request ID': '000000000000179'
},
"_links": {
"self":
[{
"href": "foo"
}]
}
}
return mock_response, 201
def mock_get_form_entry(obj, form_name, req_id):
mock_response = {
"values": {
'Incident Number': 'INC000000000301',
'Request ID': '000000000000179'
},
"_links": {
"self":
[{
"href": "foo"
}]
}
}
return mock_response, 200
def mock_update_form_entry(obj, form_name, req_id, values):
return {}, 204
def mock_delete_form_entry(obj, form_name, req_id):
return {}, 204
class TestRemedyClient(unittest.TestCase):
@patch('remedy_py.RemedyAPIClient.RemedyClient.build_request_headers', mock_build_request_headers)
def setUp(self):
self.form_name = "HPD:IncidentInterface_Create"
self.client = RemedyClient("example.com", "foo", "bar")
@patch('remedy_py.RemedyAPIClient.RemedyClient.create_form_entry', mock_create_form_entry)
def test_create_form_entry(self):
ENTRY_TEMPLATE = {
"First_Name": "Allen",
"Last_Name": "Allbrook",
"Description": "REST API: Incident Creation",
"Impact": "1-Extensive/Widespread",
"Urgency": "1-Critical",
"Status": "Assigned",
"Reported Source": "Direct Input",
"Service_Type": "User Service Restoration",
"z1D_Action": "CREATE"
}
RETURN_VALUES = ["Incident Number", "Request ID"]
response, status_code = self.client.create_form_entry(self.form_name, ENTRY_TEMPLATE)
assert(status_code == 201)
assert(response["values"])
@patch('remedy_py.RemedyAPIClient.RemedyClient.get_form_entry', mock_get_form_entry)
def test_get_form_entry(self):
req_id = "INC0000000001"
response, status_code = self.client.get_form_entry(self.form_name, req_id)
assert(status_code == 200)
assert(response["values"])
@patch('remedy_py.RemedyAPIClient.RemedyClient.update_form_entry', mock_update_form_entry)
def test_update_form_entry(self):
req_id = "INC0000000001"
entry = {
"values": {
"First Name": "Allen",
"Last Name": "Allbrook"
}
}
response, status_code = self.client.update_form_entry(self.form_name, req_id, entry)
assert(status_code == 204)
@patch('remedy_py.RemedyAPIClient.RemedyClient.delete_form_entry', mock_delete_form_entry)
def test_delete_form_entry(self):
req_id = "INC0000000001"
response, status_code = self.client.delete_form_entry(self.form_name, req_id)
assert(status_code == 204)
if __name__ == '__main__':
unittest.main() |
<reponame>zhibirc/mtvplayer
'use strict';
var videojsLib = require('video.js');
//https://www.youtube.com/watch?v=twSn58BPgWM
|
import { combineReducers } from 'redux'
import repos from './repos'
export default combineReducers({
repos
})
|
"""
A genetic algorithm to find the maximum global maximum in a given two-dimensional array.
"""
import random
def fitness(arr):
fitness_value = 0
for i in range(len(arr)):
for j in range(len(arr[i])):
fitness_value += arr[i][j]
return fitness_value
def max_global_maximum(arr):
population = 100
generations = 1000
mutation_rate = 0.01
# Initialize population
population_arr = []
for i in range(population):
population_arr.append([random.randint(0, len(arr)), random.randint(0, len(arr[0]))])
for gen in range(generations):
new_population = []
for chromosome in population_arr:
# Crossover
if random.random() <= 0.7:
x = chromosome[0]
y = chromosome[1]
new_x = population_arr[random.randint(0, len(population_arr)-1)][0]
new_y = population_arr[random.randint(0, len(population_arr)-1)][1]
x = (x + new_x) // 2
y = (y + new_y) // 2
chromosome[0] = x
chromosome[1] = y
# Mutation
if random.random() <= mutation_rate:
chromosome[0] = random.randint(0, len(arr))
chromosome[1] = random.randint(0, len(arr[0]))
new_population.append(chromosome)
# Find the fintess value of each chromosome
fitness_value = []
for chromosome in new_population:
fitness_value.append(fitness(arr[chromosome[0]][chromosome[1]]))
new_population = [new_population[i] for i in range(len(new_population)) if fitness(arr[new_population[i][0]][new_population[i][1]]) == max(fitness_value)]
# Update population
population_arr = new_population
return max(fitness_value)
if __name__ == '__main__':
print(max_global_maximum(arr)) |
module.exports = {
title: '🌈 Vuex Dispatcher',
description: 'An easy-to-use payload builder for your dispatch actions',
themeConfig: {
nav: [
{ text: 'Github Repo', link: 'https://github.com/undervane/vuex-dispatcher' },
{ text: 'Author', link: 'https://mipigu.com' }
],
sidebar: [
'/getting-started/',
'/example-usage/',
]
}
} |
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.core.gui.components.form.flexible.impl.elements;
import java.util.Collections;
import javax.servlet.http.HttpServletRequest;
import org.json.JSONArray;
import org.olat.core.CoreSpringFactory;
import org.olat.core.dispatcher.mapper.Mapper;
import org.olat.core.dispatcher.mapper.MapperService;
import org.olat.core.dispatcher.mapper.manager.MapperKey;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.form.flexible.elements.AutoCompleter;
import org.olat.core.gui.control.Disposable;
import org.olat.core.gui.control.generic.ajax.autocompletion.AutoCompleterListReceiver;
import org.olat.core.gui.control.generic.ajax.autocompletion.ListProvider;
import org.olat.core.gui.media.JSONMediaResource;
import org.olat.core.gui.media.MediaResource;
import org.olat.core.util.StringHelper;
import org.olat.core.util.UserSession;
/**
*
* Initial date: 20.11.2015<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class AutoCompleterImpl extends AbstractTextElement implements AutoCompleter, Disposable {
private static final String PARAM_QUERY = "term";
private final AutoCompleterComponent component;
private AutoCompleterMapper mapper;
private MapperKey mapperKey;
private String key;
private int minLength = 3;
public AutoCompleterImpl(String id, String name) {
super(id, name, false);
component = new AutoCompleterComponent(id, name, this);
}
@Override
protected Component getFormItemComponent() {
return component;
}
@Override
public void setListProvider(ListProvider provider, UserSession usess) {
mapper = new AutoCompleterMapper(provider);
mapperKey = CoreSpringFactory.getImpl(MapperService.class).register(usess, mapper);
}
public int getMaxEntries() {
return mapper.getMaxEntries();
}
@Override
public String getMapperUri() {
return mapperKey.getUrl();
}
@Override
public void setDomReplacementWrapperRequired(boolean required) {
//
}
@Override
public String getKey() {
return key;
}
@Override
public void setKey(String key) {
this.key = key;
if(component != null) {
component.setDirty(true);
}
}
@Override
public int getMinLength() {
return minLength;
}
@Override
public void setMinLength(int minLength) {
this.minLength = minLength;
}
@Override
public void dispatchFormRequest(UserRequest ureq) {
String cmd = ureq.getParameter("cid");
if("select".equals(cmd)) {
String uKey = ureq.getParameter("key");
setKey(uKey);
setValue(ureq.getParameter("value"));
getRootForm().fireFormEvent(ureq, new AutoCompleteFormEvent(AutoCompleteEvent.SELECT_EVENT, this, uKey));
}
}
@Override
public void evalFormRequest(UserRequest ureq) {
String paramId = component.getFormDispatchId();
String paramValue = getRootForm().getRequestParameter(paramId);
// normalize the values
if(paramValue != null && paramValue.trim().length() == 0) {
paramValue = null;
}
String currentValue = getValue();
if(currentValue != null && currentValue.trim().length() == 0) {
currentValue = null;
}
if ((paramValue == null && currentValue != null)
|| (paramValue != null && currentValue == null)
|| (paramValue != null && currentValue != null && !paramValue.equals(getValue()))) {
setKey(null);
setValue(paramValue);
}
}
@Override
public void dispose() {
if(mapperKey != null) {
CoreSpringFactory.getImpl(MapperService.class).cleanUp(Collections.singletonList(mapperKey));
}
}
private static class AutoCompleterMapper implements Mapper {
private final ListProvider provider;
public AutoCompleterMapper(ListProvider provider) {
this.provider = provider;
}
public int getMaxEntries() {
return provider.getMaxEntries();
}
@Override
public MediaResource handle(String relPath, HttpServletRequest request) {
// Read query and generate JSON result
String lastN = request.getParameter(PARAM_QUERY);
JSONArray result;
if(StringHelper.containsNonWhitespace(lastN)) {
AutoCompleterListReceiver receiver = new AutoCompleterListReceiver(null, false);
provider.getResult(lastN, receiver);
result = receiver.getResult();
} else {
result = new JSONArray();
}
return new JSONMediaResource(result, "UTF-8");
}
}
} |
use std::io::{self, Read, Write};
use std::time::Duration;
#[derive(Debug)]
enum CommunicationError {
Timeout,
IoError(io::Error),
}
struct CommunicationDevice {
// Add any necessary fields here
}
impl CommunicationDevice {
fn flush(&mut self) {
let mut buf = Vec::new();
let _res = self.read_to_end(&mut buf);
}
fn read(&mut self, data: &mut [u8]) -> Result<(), CommunicationError> {
self.set_timeout(Duration::new(0, 100000000))?;
match Read::read_exact(self, data) {
Ok(_) => Ok(()),
Err(_) => Err(CommunicationError::IoError(io::Error::new(io::ErrorKind::Other, "Read error"))),
}
}
fn write(&mut self, data: &[u8]) -> Result<(), CommunicationError> {
match Write::write_all(self, data) {
Ok(_) => Ok(()),
Err(_) => Err(CommunicationError::IoError(io::Error::new(io::ErrorKind::Other, "Write error"))),
}
}
fn set_timeout(&self, duration: Duration) -> Result<(), CommunicationError> {
// Implementation of setting timeout
Ok(())
}
} |
<reponame>MauricioGR15/Topicos-Avanzados-de-Programcion<filename>src/AppContabilidad/App.java
package AppContabilidad;
public class App {
public static void main(String[] args) {
Modelo modelo = new Modelo();
Vista vista = new Vista();
Controlador controlador= new Controlador(modelo, vista);
controlador.vista.setVisible(true);
}
}
|
var armnn_tf_parser_2test_2_sub_8cpp =
[
[ "BOOST_FIXTURE_TEST_CASE", "armnn_tf_parser_2test_2_sub_8cpp.xhtml#a5412a63bc943a5f5357948cd8a6cc8c3", null ],
[ "BOOST_FIXTURE_TEST_CASE", "armnn_tf_parser_2test_2_sub_8cpp.xhtml#a44db723ca45599b52099e39ee997207e", null ],
[ "BOOST_FIXTURE_TEST_CASE", "armnn_tf_parser_2test_2_sub_8cpp.xhtml#a78d70093fc6c5ce323a0fc1af3b7fef2", null ]
]; |
<reponame>WSimacek/rubygrade<filename>db/migrate/20081109023740_create_assignments.rb
class CreateAssignments < ActiveRecord::Migration
def self.up
create_table :assignments do |t|
t.string :name
t.integer :course_id
t.integer :category_id
t.float :max_grade
t.float :grade_boundary1, :default => "90"
t.float :grade_boundary2, :default => "80"
t.float :grade_boundary3, :default => "70"
t.float :grade_boundary4, :default => "60"
t.float :grade_boundary5, :default => "50"
t.float :grade_boundary6, :default => "40"
t.float :grade_boundary7, :default => "30"
t.string :grade_level1, :default => "A"
t.string :grade_level2, :default => "B"
t.string :grade_level3, :default => "C"
t.string :grade_level4, :default => "D"
t.string :grade_level5, :default => "E"
t.string :grade_level6, :default => "F"
t.string :grade_level7, :default => "Near miss"
t.string :grade_level8, :default => "Fail"
t.timestamps
end
end
def self.down
drop_table :assignments
end
end
|
def remove_duplicates(lst):
new_lst = []
for item in lst:
if item not in new_lst:
new_lst.append(item)
return new_lst
result = remove_duplicates(["a", "b", "c", "a", "b", "d"])
print(result) |
<filename>src/include/vec_d4.hpp
//! 4次元ベクトル特有の関数など定義
//! (vector.hppからインクルード)
#pragma once
namespace frea {
template <class W, class D>
struct VecT_spec<W, D, 4> : VecT<W,D, VecT_spec<W,D,4>> {
using base_t = VecT<W,D, VecT_spec<W,D,4>>;
using base_t::base_t;
using wrap_t = typename base_t::wrap_t;
using this_t = VecT_spec;
typename base_t::template type_cn<3>
asVec3Coord() const noexcept { return this->asInternal().asVec3Coord(); }
};
}
|
try:
import micropython
except:
pass
def mandelbrot():
# returns True if c, complex, is in the Mandelbrot set
<EMAIL>
def in_set(c):
z = 0
for i in range(40):
z = z*z + c
if abs(z) > 60:
return False
return True
lcd.clear()
for u in range(91):
for v in range(31):
if in_set((u / 30 - 2) + (v / 15 - 1) * 1j):
lcd.set(u, v)
lcd.show()
# PC testing
import lcd
lcd = lcd.LCD(128, 32)
mandelbrot()
|
#!/usr/bin/bash
export LD_LIBRARY_PATH=/data/data/com.termux/files/usr/lib
export HOME=/data/data/com.termux/files/home
export PATH=/usr/local/bin:/data/data/com.termux/files/usr/bin:/data/data/com.termux/files/usr/sbin:/data/data/com.termux/files/usr/bin/applets:/bin:/sbin:/vendor/bin:/system/sbin:/system/bin:/system/xbin:/data/data/com.termux/files/usr/bin/python
export PYTHONPATH=/data/openpilot
cd /data/openpilot
/data/data/com.termux/files/usr/bin/git reset --keep HEAD@{1}
reboot
# test |
<filename>KCenter-Core/src/main/java/org/nesc/ec/bigdata/controller/ClusterController.java
package org.nesc.ec.bigdata.controller;
import com.alibaba.fastjson.JSONObject;
import org.nesc.ec.bigdata.common.BaseController;
import org.nesc.ec.bigdata.common.RestResponse;
import org.nesc.ec.bigdata.config.InitConfig;
import org.nesc.ec.bigdata.model.ClusterInfo;
import org.nesc.ec.bigdata.service.ClusterService;
import org.nesc.ec.bigdata.service.KafkaAdminService;
import org.nesc.ec.bigdata.service.ZKService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author lg99
*/
@RestController
@RequestMapping("/cluster")
public class ClusterController extends BaseController {
private static final Logger LOG = LoggerFactory.getLogger(ClusterController.class);
@Autowired
ClusterService clusterService;
@Autowired
InitConfig config;
@Autowired
ZKService zkService;
@Autowired
KafkaAdminService kafkaAdminService;
/** check the zookeeper cluster connection is health*/
@GetMapping("/validateZKAddress")
@ResponseBody
public RestResponse validateZKAddress(@RequestParam("zkAddress") String zkAddress){
try {
return SUCCESS_DATA(zkService.checkZKAddressHealth(zkAddress));
}catch (Exception e){
LOG.error("validateZKAddress has error",e);
return ERROR("validateZKAddress failed");
}
}
/** check the kafka cluster connection is health*/
@GetMapping("/validateKafkaAddress")
@ResponseBody
public RestResponse validateKafkaAddress(@RequestParam("kafkaAddress") String kafkaAddress){
try {
return SUCCESS_DATA(kafkaAdminService.validateKafkaAddress(kafkaAddress));
}catch (Exception e){
LOG.error("validateKafkaAddress has error",e);
return ERROR("validateKafkaAddress failed");
}
}
/** according the clusterId get the clusterInfo*/
@GetMapping("/get")
@ResponseBody
public RestResponse getClusterById(@RequestParam Long id) {
try {
ClusterInfo cluster = clusterService.selectById(id);
return SUCCESS_DATA(cluster);
} catch (Exception e) {
LOG.error("Find cluster by Id error.", e);
return ERROR("GET CLUSTER DATA BY ID FAILED!");
}
}
/**select the all data from the cluster table*/
@GetMapping("")
@ResponseBody
public RestResponse getCluster() {
try {
List<ClusterInfo> clusters = clusterService.getTotalData();
return SUCCESS_DATA(clusters);
} catch (Exception e) {
LOG.error("Find cluster List error.", e);
return ERROR("GET DATA FAILED!");
}
}
/**return cluster total data and cluster status*/
@PostMapping("/status")
@ResponseBody
public RestResponse clusterStatus(@RequestBody ClusterInfo clusterInfo){
try {
JSONObject clusters = clusterService.getClusterAndStatus(clusterInfo);
return SUCCESS_DATA(clusters);
} catch (Exception e) {
LOG.error("Find cluster status List error.", e);
return ERROR("GET DATA FAILED!");
}
}
/**add the cluster info to cluster table
* 1. check the cluster is exits,if exits,return error
* 2. else insert to cluster table
* */
@PostMapping("/add")
@ResponseBody
public RestResponse add(@RequestBody ClusterInfo cluster) {
try {
if (!clusterService.clusterExits(cluster,false)) {
if (clusterService.insert(cluster)) {
return SUCCESS("ADD CLUSTER DATA SUCCESS");
} else {
return ERROR("ADD CLUSTER DATA FAILED,PLEASE MAKE SURE YOUR ZK AND BROKER ADDRESS IS VALIDATE!");
}
} else {
return ERROR("THIS CLUSTER ALREADY EXITS!");
}
} catch (Exception e) {
LOG.error("add cluster error.", e);
return ERROR("ADD CLUSTER DATA FAILED!");
}
}
/** update the cluster info to cluster table
* 1.check the cluster is exits,if exits,return error
* 2.else update the cluster table
* */
@PutMapping("update")
@ResponseBody
public RestResponse update(@RequestBody ClusterInfo cluster) {
try {
if (!clusterService.clusterExits(cluster,true)) {
if (clusterService.update(cluster)) {
return SUCCESS("UPDATE CLUSTER DATA SUCCESS");
} else {
return ERROR("UPDATE CLUSTER DATA FAILED!");
}
} else {
return ERROR("THIS CLUSTER ALREADY EXITS!");
}
} catch (Exception e) {
LOG.error("update cluster error.", e);
return ERROR("UPDATE CLUSTER DATA FAILED!");
}
}
/**
* delete the clusterInfo by clusterId
* 1.delete all tables associated with Cluster,such as topicInfo,alertInfo,taskInfo tables,
* if success,delete the clusterInfo from cluster table
* 2.if failed,return error
* */
@DeleteMapping("/{id}")
@ResponseBody
public RestResponse delete(@PathVariable Long id) {
try {
if (clusterService.deleteAssociateTable(id)) {
if(clusterService.delete(id)) {
return SUCCESS("DELETE CLUSTER DATA SUCCESS");
}else {
return ERROR("DELETE CLUSTER DATA FAILED!");
}
} else {
return ERROR("DELETE CLUSTER DATA FAILED,MAY BE DELETE ASSOCIATED TABLE DATA FAILED!");
}
} catch (Exception e) {
LOG.error("delete cluster error.", e);
return ERROR("DELETE CLUSTER DATA FAILED!");
}
}
/**
* 根据location查询集群信息
*
* @param location
* @return
*/
@GetMapping("by_location/{location}")
@ResponseBody
public RestResponse getClusterByLocation(@PathVariable String location) {
try {
List<ClusterInfo> clusters = clusterService.getClusterByLocation(location);
return SUCCESS_DATA(clusters);
} catch (Exception e) {
LOG.error("Find cluster List error.", e);
return ERROR("GET DATA FAILED!");
}
}
}
|
import {Component, OnInit, AfterViewInit, Input, ViewChild, ElementRef} from '@angular/core';
import {ActivatedRoute} from '@angular/router';
import {screen} from 'platform';
import {SwipeGestureEventData, SwipeDirection} from 'ui/gestures';
import {SlideContainer, Slide} from 'nativescript-slides';
import {VideoService} from './video.service';
import {IVideo, IVideoPlaybackInfo} from './video';
@Component({
selector: 'ns-video-detail',
moduleId: module.id,
templateUrl: 'video-detail.component.html'
})
export class VideoDetailComponent implements OnInit, AfterViewInit {
private id: number;
vidWidth: number;
vidHeight: number;
videoThmb: string;
videoSrc: string;
@ViewChild('container') container: ElementRef;
videos: IVideo[] = [];
videosPlayback: IVideoPlaybackInfo[] = [];
private _currentId = 0;
private previewDuration = 5000;
private playDuration = 0;
private slideContainer: SlideContainer;
constructor(
private activatedRoute: ActivatedRoute,
private videoService: VideoService
) {
}
ngOnInit() {
this.id = this.activatedRoute.snapshot.params["id"];
this.slideContainer = <SlideContainer> this.container.nativeElement;
}
ngAfterViewInit() {
this.setExpectedSizes();
this.videoService.getVideos().then((videos) => {
this.videos = videos;
this.videos.map(() => {
this.videosPlayback.push({startTime: 0, duration: 0, isPlaying: false});
});
});
this.slideContainer.constructView();
}
setExpectedSizes(): void {
this.vidWidth = screen.mainScreen.widthDIPs / 2;
this.vidHeight = screen.mainScreen.heightDIPs / 4;
}
changed(event:SwipeGestureEventData) {
if (event.direction === SwipeDirection.left) {
this.slideContainer.nextSlide();
} else {
this.slideContainer.previousSlide();
}
}
startScrollingHandler() {
this.stopVideo();
}
changedScrollingHandler(event) {
this.id = event.eventData.newIndex;
}
playVideo() {
this.videosPlayback[this.id].isPlaying = true;
}
stopVideo() {
this.videosPlayback[this.id].isPlaying = false;
}
previewVideo(id: number) {
}
loadedHandler() {
this.slideContainer.goToSlide(this.id);
}
} |
#!/usr/bin/env bash
set -e
# Get all server logs
export CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL="trace"
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. $CURDIR/../shell_config.sh
cur_name=${BASH_SOURCE[0]}
server_logs_file=$cur_name"_server.logs"
server_logs="--server_logs_file=$server_logs_file"
rm -f "$server_logs_file"
settings="$server_logs --log_queries=1 --log_query_threads=1 --log_profile_events=1 --log_query_settings=1"
# Test insert logging on each block and checkPacket() method
$CLICKHOUSE_CLIENT $settings -n -q "
DROP TABLE IF EXISTS test.null;
CREATE TABLE test.null (i UInt8) ENGINE = MergeTree PARTITION BY tuple() ORDER BY tuple();"
head -c 1000 /dev/zero | $CLICKHOUSE_CLIENT $settings --max_insert_block_size=10 --min_insert_block_size_rows=1 --min_insert_block_size_bytes=1 -q "INSERT INTO test.null FORMAT RowBinary"
$CLICKHOUSE_CLIENT $settings -n -q "
SELECT count() FROM test.null;
DROP TABLE test.null;"
(( `cat "$server_logs_file" | wc -l` >= 110 )) || echo Fail
# Check ProfileEvents in query_log
heavy_cpu_query="SELECT ignore(sum(sipHash64(hex(sipHash64(hex(sipHash64(hex(number)))))))) FROM (SELECT * FROM system.numbers_mt LIMIT 1000000)"
$CLICKHOUSE_CLIENT $settings --max_threads=1 -q "$heavy_cpu_query"
$CLICKHOUSE_CLIENT $settings -q "SYSTEM FLUSH SYSTEM TABLES"
$CLICKHOUSE_CLIENT $settings -q "
WITH
any(query_duration_ms*1000) AS duration,
sumIf(PV, PN = 'RealTimeMicroseconds') AS threads_realtime,
sumIf(PV, PN IN ('UserTimeMicroseconds', 'SystemTimeMicroseconds', 'OSIOWaitMicroseconds', 'OSCPUWaitMicroseconds')) AS threads_time_user_system_io
SELECT
-- duration, threads_realtime, threads_time_user_system_io,
threads_realtime >= 0.99 * duration,
threads_realtime >= threads_time_user_system_io,
any(length(thread_numbers)) >= 1
FROM
(SELECT * FROM system.query_log PREWHERE query='$heavy_cpu_query' WHERE event_date >= today()-1 AND type=2 ORDER BY event_time DESC LIMIT 1)
ARRAY JOIN ProfileEvents.Names AS PN, ProfileEvents.Values AS PV"
# Check ProfileEvents in query_thread_log
$CLICKHOUSE_CLIENT $settings --max_threads=3 -q "$heavy_cpu_query"
$CLICKHOUSE_CLIENT $settings -q "SYSTEM FLUSH SYSTEM TABLES"
query_id=`$CLICKHOUSE_CLIENT $settings -q "SELECT query_id FROM system.query_log WHERE event_date >= today()-1 AND type=2 AND query='$heavy_cpu_query' ORDER BY event_time DESC LIMIT 1"`
query_elapsed=`$CLICKHOUSE_CLIENT $settings -q "SELECT query_duration_ms*1000 FROM system.query_log WHERE event_date >= today()-1 AND type=2 AND query_id='$query_id' ORDER BY event_time DESC LIMIT 1"`
threads=`$CLICKHOUSE_CLIENT $settings -q "SELECT length(thread_numbers) FROM system.query_log WHERE event_date >= today()-1 AND type=2 AND query_id='$query_id' ORDER BY event_time DESC LIMIT 1"`
$CLICKHOUSE_CLIENT $settings -q "
SELECT
-- max(thread_realtime), $query_elapsed, max(thread_time_user_system_io),
0.9 * $query_elapsed <= max(thread_realtime) AND max(thread_realtime) <= 1.1 * $query_elapsed,
0.7 * $query_elapsed <= max(thread_time_user_system_io) AND max(thread_time_user_system_io) <= 1.3 * $query_elapsed,
uniqExact(thread_number) = $threads
FROM
(
SELECT
thread_number,
sumIf(PV, PN = 'RealTimeMicroseconds') AS thread_realtime,
sumIf(PV, PN IN ('UserTimeMicroseconds', 'SystemTimeMicroseconds', 'OSIOWaitMicroseconds', 'OSCPUWaitMicroseconds')) AS thread_time_user_system_io
FROM
(SELECT * FROM system.query_thread_log PREWHERE query_id='$query_id' WHERE event_date >= today()-1)
ARRAY JOIN ProfileEvents.Names AS PN, ProfileEvents.Values AS PV
GROUP BY thread_number
)
"
# Check per-thread and per-query ProfileEvents consistency
$CLICKHOUSE_CLIENT $settings -q "
SELECT PN, PVq, PVt FROM
(
SELECT PN, sum(PV) AS PVt
FROM system.query_thread_log
ARRAY JOIN ProfileEvents.Names AS PN, ProfileEvents.Values AS PV
WHERE event_date >= today()-1 AND query_id='$query_id'
GROUP BY PN
)
ANY INNER JOIN
(
SELECT PN, PV AS PVq
FROM system.query_log
ARRAY JOIN ProfileEvents.Names AS PN, ProfileEvents.Values AS PV
WHERE event_date >= today()-1 AND query_id='$query_id'
)
USING PN
WHERE
NOT PN IN ('ContextLock') AND
NOT (PVq <= PVt AND PVt <= 1.1 * PVq)
"
# Clean
rm "$server_logs_file"
|
def count_occurances(list, target):
count = 0
for element in list:
if element == target:
count += 1
return count |
#!/bin/zsh
#
# Set macOS defaults. Tested on Monterey, i.e. macOS 12.0.
#
# Must logout (or restart) for some changes to take effect.
#
# Resources:
#
# - https://github.com/boochtek/mac_config
# - https://github.com/mathiasbynens/dotfiles
# - https://github.com/yannbertrand/macos-defaults
# ---------------------------------------------------------------------------- #
# Only run in macOS.
[[ $(uname) = Darwin ]] || return 1
# ---------------------------------------------------------------------------- #
# Hostname
# Set the hostname
sudo scutil --set HostName macbookpro
# ---------------------------------------------------------------------------- #
# Trackpad
# Enable tap to click.
defaults write com.apple.AppleMultitouchTrackpad Clicking -bool true
# Enable 3-finger drag.
defaults write com.apple.AppleMultitouchTrackpad TrackpadThreeFingerDrag -bool true
# Enable scroll-to-zoom with Ctrl (^) modifier key (and 2 fingers).
defaults write com.apple.universalaccess closeViewScrollWheelToggle -bool true
defaults write com.apple.AppleMultitouchTrackpad HIDScrollZoomModifierMask -int 262144
# ---------------------------------------------------------------------------- #
# Keyboard
# Automatically illuminate built-in MacBook keyboard in low light.
defaults write com.apple.BezelServices kDim -bool false
# Set a blazingly fast keyboard repeat rate.
defaults write NSGlobalDomain ApplePressAndHoldEnabled -bool false
defaults write NSGlobalDomain InitialKeyRepeat -int 15 # opts: 120,94,68,35,25,15
defaults write NSGlobalDomain KeyRepeat -int 2 # opts: 120,90,60,30,12,6,2
# ---------------------------------------------------------------------------- #
# Dock
# Automatically hide and show the Dock.
defaults write com.apple.dock autohide -bool true
# Do not automatically rearrange spaces based on most recent use.
defaults write com.apple.dock mru-spaces -bool false
# Do not show recent applications
defaults write com.apple.dock show-recents -bool false
# ---------------------------------------------------------------------------- #
# Finder
# When opening a new window, start in the home directory.
defaults write com.apple.finder NewWindowTargetPath "file://$HOME"
# Show hidden files.
defaults write com.apple.finder AppleShowAllFiles -bool true
# Finder: show status bar.
defaults write com.apple.finder ShowStatusBar -bool true
# Finder: show path bar.
defaults write com.apple.finder ShowPathbar -bool true
# Warn before emptying the Trash.
defaults write com.apple.finder WarnOnEmptyTrash -bool true
# Empty Trash securely by default.
defaults write com.apple.finder EmptyTrashSecurely -bool true
# Don't show any special icons on desktop.
defaults write com.apple.finder ShowExternalHardDrivesOnDesktop -bool false
defaults write com.apple.finder ShowHardDrivesOnDesktop -bool false
defaults write com.apple.finder ShowRemovableMediaOnDesktop -bool false
defaults write com.apple.finder ShowMountedServersOnDesktop -bool false
# Use list view in all Finder windows by default.
# Four-letter codes for the other view modes: `icnv`, `clmv`, `Flwv`.
defaults write com.apple.finder FXPreferredViewStyle -string "Nlsv"
# Finder: show all filename extensions.
defaults write NSGlobalDomain AppleShowAllExtensions -bool true
# Avoid creating .DS_Store files on network or USB volumes.
defaults write com.apple.desktopservices DSDontWriteNetworkStores -bool true
defaults write com.apple.desktopservices DSDontWriteUSBStores -bool true
# Show the ~/Library folder.
chflags nohidden ~/Library
# ---------------------------------------------------------------------------- #
# Screensaver
# Require password immediately after sleep or screen saver begins.
defaults write com.apple.screensaver askForPassword -int 1
defaults write com.apple.screensaver askForPasswordDelay -int 0
# ---------------------------------------------------------------------------- #
# Screenshots
# Save screenshots to the desktop.
defaults write com.apple.screencapture location -string "${HOME}/Desktop"
# Save screenshots in PNG format (other options: BMP, GIF, JPG, PDF, TIFF).
defaults write com.apple.screencapture type -string "png"
# Disable shadow in screenshots.
defaults write com.apple.screencapture disable-shadow -bool true
# ---------------------------------------------------------------------------- #
# Terminal
# Use Ctrl-Tab and Ctrl-Shift-Tab to cycle iTerm tabs in order.
defaults write com.googlecode.iterm2 GlobalKeyMap -dict-add 0x19-0x60000-0x30 '{ Action = 2; Label = ""; Text = ""; Version = 0; }'
defaults write com.googlecode.iterm2 GlobalKeyMap -dict-add 0x9-0x40000-0x30 '{ Action = 0; Label = ""; Text = ""; Version = 0; }'
# Minimal-style tabs in iTerm.
defaults write com.googlecode.iterm2 TabStyleWithAutomaticOption -int 5
|
#!/bin/zsh
# ------------------------------------------------------------------------------
# init params
# ------------------------------------------------------------------------------
DOTFILES_ROOT="$(dirname $(cd $(dirname $0) >/dev/null 2>&1; pwd -P;))"
DOTFILES_SCRIPTS="$DOTFILES_ROOT/scripts"
DOTFILES_CONFIG="$DOTFILES_ROOT/config"
DOTFILES_HOME="$DOTFILES_ROOT/home"
OS_TYPE=`uname`
# ------------------------------------------------------------------------------
# prerequisite for install script
# ------------------------------------------------------------------------------
if [[ $OS_TYPE = "Darwin" ]]; then # macos: xcode-select, homebrew, unzip and curl
# xcode-select
xcode-select --install
# homebrew
if ! type brew >/dev/null; then
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
fi
brew update
# curl
brew install unzip curl
elif [[ $OS_TYPE = "Linux" ]]; then # linux: apt-get, unzip and curl
# curl
if type sudo >/dev/null; then
sudo apt-get update
sudo apt-get install -y unzip curl
else
apt-get update
apt-get install -y unzip curl
fi
fi
|
var form;
var tableSelect;
layui.config({
base: '/static/plugins/layui-extend'
}).extend({
tableSelect:'/tableSelect/tableSelect',
iconPicker: '/iconPicker/iconPicker',
}).use(['form','layer','tableSelect','iconPicker'],function(){
form = layui.form;
$ = layui.jquery;
var iconPicker = layui.iconPicker,
tableSelect = layui.tableSelect;
var layer = parent.layer === undefined ? layui.layer : top.layer;
tableSelect.render({
elem: '#route-select', //定义输入框input对象
checkedKey: 'name',//表格的唯一建值,非常重要,影响到选中状态 必填
// searchKey: 'name', //搜索输入框的name值 默认keyword
searchPlaceholder: '路由地址搜索', //搜索输入框的提示文字 默认关键词搜索
table: { //定义表格参数,与LAYUI的TABLE模块一致,只是无需再定义表格elem
url : routeAssignedListUrl,
limit : 20,
limits : [10,15,20,25],
cols : [[
{ type: 'radio' },
{field: 'name', title: '请求地址',minWidth:50},
{field: 'route_name', title: '名称', align:'left',minWidth:50},
{field: 'route_description', title: '简述', align:'left', minWidth:100}
]]
},
done: function (elem, data) {
elem.val(data.data[0].name)
}
})
//图标
iconPicker.render({
// 选择器,推荐使用input
elem: '#iconPicker',
// 数据类型:fontClass/unicode,推荐使用fontClass,unicode 有问题
type: 'fontClass',
// 是否开启搜索:true/false
search:false,
// 是否开启分页
page: true,
// 每页显示数量,默认24
limit: 24,
// 点击回调
click: function (data) {
console.log(data)
},
});
//添加菜单
form.on("submit(createMenu)",function(data){
var index = layer.msg('提交中,请稍候',{icon: 16,time:false,shade:0.8});
setTimeout(function(){
$.post(menuUpdateUrl,{
menu_id:data.field.menu_id,
name:data.field.name,
route:data.field.route,
icon:data.field.icon,
order:data.field.order,
},function(res){
if (res.status==1){
layer.msg(res.msg, {icon: 1});
parent.location.reload();
}else {
layer.msg(res.msg, {icon: 5});
}
})
layer.close(index);
},2000);
return false; //阻止表单跳转。如果需要表单跳转,去掉这段即可。
})
}) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.