language stringclasses 15
values | src_encoding stringclasses 34
values | length_bytes int64 6 7.85M | score float64 1.5 5.69 | int_score int64 2 5 | detected_licenses listlengths 0 160 | license_type stringclasses 2
values | text stringlengths 9 7.85M |
|---|---|---|---|---|---|---|---|
C# | UTF-8 | 1,431 | 3.015625 | 3 | [
"MIT"
] | permissive | using System;
using System.Linq;
using System.Web;
namespace IPAddressFiltering
{
/// <summary>
/// MVC Roles linked to IPs attribute
/// </summary>
public class IPAddressRoleFilterAttribute : IPAddressFilterAttribute
{
/// <summary>
/// List of roles to get IPs from for this rule
/// </summary>
/// <param name="roles"></param>
/// <param name="filteringType"></param>
public IPAddressRoleFilterAttribute(string roles, IPAddressFilteringAction filteringType = IPAddressFilteringAction.Allow)
{
_core.IPAddressRanges = new IPAddressRange[] { };
_core.FilteringType = filteringType;
//set roles list
_core.IPRoles.AddRange(roles.Split(new [] {',',';'},StringSplitOptions.RemoveEmptyEntries).Select(x=>x.Trim()));
}
protected override bool AuthorizeCore(HttpContextBase context)
{
string ipAddressString = context.Request.UserHostName;
return IsIPAddressAllowed(ipAddressString);
}
protected override bool IsIPAddressAllowed(string ipAddressString)
{
bool isValid = _core.UpdateIPAddressesFromRoles(); //update IP from roles
if (isValid)
{
return true;
}
//validate IP
return _core.IsIPAddressAllowed(ipAddressString);
}
}
} |
Java | UTF-8 | 474 | 1.960938 | 2 | [] | no_license | package com.example.dell.univstarproject.start.presenter;
import com.example.dell.univstarproject.base.BasePresenter;
import com.example.dell.univstarproject.start.model.bean.ForeBean;
/**
* 作者:天南地北,旺哥最美!
* 微信:xw15227039901
*/
public interface ForeContrcat {
public interface ForeView {
void showbean(ForeBean foreBean);
}
public interface ForePre extends BasePresenter<ForeView> {
void loadforedata();
}
}
|
Python | UTF-8 | 1,051 | 3.125 | 3 | [] | no_license | #
# Controller untuk kind Anggota
#
# Kind Anggota tediri atas property berikut:
# + nama : untuk nama anggota
# + umut : untuk umur anggota
#
from google.cloud import datastore
# Method untuk menambah anggota baru
def tambah(nama, umur):
# Buat object yang mau disimpan
anggota = { 'nama':nama, 'umur':umur}
# Buka koneksi ke datastore
client = datastore.Client()
# Minta dibuatkan key baru
key_baru = client.key("Anggota")
# Buat entity baru memakai key yang baru dibuat
entity_baru = datastore.Entity(key=key_baru)
# Isi data untuk entity baru
entity_baru.update(anggota)
# Simpan perubaha data entity baru
client.put(entity_baru)
# Method untuk mengambil data semua anggota yang ada
def daftar():
# Buka koneksi ke datastore
client = datastore.Client()
# Buat query baru khusus untuk Anggota
query = client.query(kind="Anggota")
# Jalankan query, hasilnya berupa iterator
hasil = query.fetch()
# Ubah iterator ke list dan kembalikan hasilnya
return list(hasil)
|
C++ | UTF-8 | 928 | 3.484375 | 3 | [] | no_license | #include <atomic>
#include <iostream>
#include <thread>
std::atomic<int> msg1(0);
std::atomic<int> msg2(0);
int x, y;
void setter1() {
x = 1;
atomic_thread_fence(std::memory_order_release);
msg1.store(1, std::memory_order_relaxed);
}
void setter2() {
y = 1;
atomic_thread_fence(std::memory_order_release);
msg2.store(1, std::memory_order_relaxed);
}
void getter() {
while (!msg1.load(std::memory_order_relaxed))
std::this_thread::yield();
while (!msg2.load(std::memory_order_relaxed))
std::this_thread::yield();
atomic_thread_fence(std::memory_order_acquire);
// When 1 is read in msg1 and msg2, the acquire fence will sync with both
// setter1 and setter2.
std::cout << "x: " << x << ", y: " << y << std::endl;
}
int main(int argc, char **argv) {
std::thread set1(setter1);
std::thread set2(setter2);
std::thread get(getter);
set1.join();
set2.join();
get.join();
return 0;
}
|
JavaScript | UTF-8 | 423 | 3.296875 | 3 | [] | no_license | console.log('hfj');
// let u=prompt("Enter a number");
let i=0;
while(i<1000)
{
u=prompt("Enter a number greater than 100");
u=Number(u);
if(u>100 || u==0)
{
console.log(u);
alert("you have entered number greater than 100!! or cancelled a loop");
break;
}else{
console.log(i);
console.log(u);
i++;
}
}
|
Java | UTF-8 | 344 | 2.546875 | 3 | [] | no_license | package packageOne;
public class ArrayTest {
public static void main(String[] args) {
Array aTest = new Array(1987, 600);
aTest.addNode(1979, 650);
System.out.println(aTest.searchYr(1979).year + " " + aTest.searchYr(1979).water);
System.out.println(aTest.searchW(1979).year + " " + aTest.searchW(1979).water);
}
}
|
C++ | UTF-8 | 637 | 2.796875 | 3 | [] | no_license | #include <Servo.h> //используем библиотеку для работы с сервоприводом
Servo servo; //объявляем переменную servo типа Servo
int a;
int servPos=90;
void setup() {
Serial.begin(115200);
servo.attach(10);
}
void loop() {
a = 0;
while (Serial.available() > 0) {
a = Serial.read();
Serial.println(a);
//Serial.print("MESSAGE_OK");
Serial.print("\n");
if(a<255)
a -= 128;
}
if(a==255) servPos = 90;
else{
servPos += a;
}
if(servPos<0) servPos = 0;
if(servPos>180) servPos = 180;
servo.write(servPos);
delay(10);
}
|
C++ | UTF-8 | 1,232 | 2.71875 | 3 | [] | no_license | #ifndef TCPSOCKET_H_
#define TCPSOCKET_H_
#include <iostream>
#include <sys/types.h>
#include <netinet/in.h>
#include <inttypes.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <strings.h>
#include <unistd.h>
#include <stdio.h>
#include "Socket.h"
using namespace std;
class TCPSocket: public Socket{
private:
//struct sockaddr_in peerAddr;
//struct sockaddr_in serverAddr;
/**
* A constructor that creates socket to communicate with connected socket
*/
TCPSocket(int connected_sock,struct sockaddr_in serverAddr,struct sockaddr_in peerAddr);
public:
/**
* A Constructor to create a TCP server socket
*/
TCPSocket(int port);
/**
* Constructor that creates TCP client socket and connect it to the remote peer in the given ip and port
*/
TCPSocket(string peerIp, int port);
/**
* Perform listen and accept on server socket
* returns a new TCPSocket object to communicate over the incoming session
*/
TCPSocket* listenAndAccept();
/**
* return the address of the connected peer
*/
string fromAddr();
/*
* Returns the from port number
*/
int fromPort();
/*
* Returns the from addr:port number
*/
string fromAddrPort();
};
#endif
|
Java | UTF-8 | 939 | 2.28125 | 2 | [] | no_license | package com.myproject.technicaltest.annotation;
import com.myproject.technicaltest.annotationImpl.CheckUsernameValidator;
import javax.validation.Constraint;
import javax.validation.Payload;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* CheckUsernameFormat interface to help creating the annotation that check the username format.
* @author Yasmine MANSOURI
*
*/
@Target({ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER, ElementType.ANNOTATION_TYPE})
@Retention(RUNTIME)
@Constraint(validatedBy = CheckUsernameValidator.class)
@Documented
public @interface CheckUsernameFormat {
String message() default "The Username must have at least 3 characters";
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
}
|
Java | UTF-8 | 936 | 2.15625 | 2 | [] | no_license | package com.level.clock.dto;
public class DataDTO extends ObjectDTO {
private int data_num;
private String data_title;
private String data_main_image;
private int hero_num;
private String data_file;
public int getData_num() {
return data_num;
}
public void setData_num(int data_num) {
this.data_num = data_num;
}
public String getData_title() {
return data_title;
}
public void setData_title(String data_title) {
this.data_title = data_title;
}
public String getData_main_image() {
return data_main_image;
}
public void setData_main_image(String data_main_image) {
this.data_main_image = data_main_image;
}
public int getHero_num() {
return hero_num;
}
public void setHero_num(int hero_num) {
this.hero_num = hero_num;
}
public String getData_file() {
return data_file;
}
public void setData_file(String data_file) {
this.data_file = data_file;
}
}
|
Java | UTF-8 | 414 | 3.203125 | 3 | [] | no_license | import java.util.Scanner;
public class Prime {
public static void main(String[] args)
{
Scanner s=new Scanner(System.in);
int n=s.nextInt();
int isprime[]=new int[n+1];
for(int i=2;i<n+1;i++)
isprime[i]=1;
for(int i=2;i*i<=n;i++)
{
for(int j=i;j*i<=n;j++)
{
isprime[j*i]=0;
}
}
for(int i=2;i<n+1;i++)
if(isprime[i]==1)
System.out.print(i+" ");
}
}
|
C++ | UTF-8 | 2,395 | 2.765625 | 3 | [] | no_license | /*
AsyncMelody.ino - Asynchronous melody playing example.
Created by LazyGalaxy - Evangelos Papakonstantis, November 22, 2018.
Released into the public domain.
*/
#include <LazyGalaxySpeaker.h>
#include <LazyGalaxyTimer.h>
// any pin is ok for a speaker/buzzer, digital(with and without PWM) or analog
Speaker speaker(D13);
Melody* santaClausMelody =
new Melody((int[]){TG4, TE4, TF4, TG4, TG4, TG4, TA4, TB4, TC5, TC5,
TC5, TE4, TF4, TG4, TG4, TG4, TA4, TG4, TF4, TF4,
TE4, TG4, TC4, TE4, TD4, TF4, TB3, TC4, TEND},
(int[]){2, 2, 2, 4, 4, 4, 2, 2, 4, 4, 4, 2, 2, 4,
4, 4, 2, 2, 4, 8, 4, 4, 4, 4, 4, 8, 4, 8},
100);
Melody* jingleBellsMelody =
new Melody((int[]){TE5, TE5, TE5, TE5, TE5, TE5, TE5, TG5, TC5,
TD5, TE5, TF5, TF5, TF5, TF5, TF5, TE5, TE5,
TE5, TE5, TE5, TD5, TD5, TE5, TD5, TG5, TEND},
(int[]){2, 2, 4, 2, 2, 4, 2, 2, 2, 2, 8, 2, 2,
2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 4, 4},
100);
Melody* merryChristmasMelody =
new Melody((int[]){TB3, TF4, TF4, TG4, TF4, TE4, TD4, TD4, TD4, TG4, TG4,
TA4, TG4, TF4, TE4, TE4, TE4, TA4, TA4, TB4, TA4, TG4,
TF4, TD4, TB3, TB3, TD4, TG4, TE4, TF4, TEND},
(int[]){4, 4, 2, 2, 2, 2, 4, 4, 4, 4, 2, 2, 2, 2, 4,
4, 4, 4, 2, 2, 2, 2, 4, 4, 2, 2, 4, 4, 4, 8},
100);
void noteCallback(unsigned long time, int note) {
Serial.println("played " + String(note) + " @" + String(time));
}
void playMelody3(unsigned long time) {
// play the third melody, provide a note callback
speaker.playMelody(merryChristmasMelody, noteCallback);
}
void playMelody2(unsigned long time) {
// stop playing the current melody, this will interrupt the melody playing
speaker.stopMelody();
// play the second melody, provide a note callback and final callback to play
// the third melody
speaker.playMelody(jingleBellsMelody, noteCallback, playMelody3);
}
void setup() {
Serial.begin(9600);
// play the frist melody
speaker.playMelody(santaClausMelody);
// schedule a task/function callback to play the second melody in 2 seconds
Timer::scheduleTask(2000, playMelody2);
}
void loop() {
// update all LazyGalaxy tasks
Timer::updateTasks();
}
|
Java | UTF-8 | 277 | 2.6875 | 3 | [] | no_license | package thisKeyword;
public class PlayingWithObjects {
public static void main(String[] args) {
Paper p =new Paper();
p.setText("Hello World");
System.out.println(p.getText());
Printer hp=new Printer();
hp.print(p);
System.out.println(p.getText());
}
}
|
C++ | UTF-8 | 4,031 | 2.578125 | 3 | [] | no_license | #include "LineEditPath.h"
#include <QMenu>
#include <QFileDialog>
#include <QFocusEvent>
#include <QValidator>
LineEditPath::LineEditPath(QWidget *parent) :
QLineEdit(parent)
{
this->setContextMenuPolicy(Qt::CustomContextMenu);
connect(this, SIGNAL(customContextMenuRequested(QPoint)), this, SLOT(menuRequestPath(QPoint)));
connect(this, SIGNAL(textChanged(const QString &)), this, SLOT(textChangedSlot(const QString &)));
m_pathType = FileForOpen;
this->setText("");
}
void LineEditPath::menuRequestPath(const QPoint &pos)
{
QMenu *menu = this->createStandardContextMenu();
menu->addSeparator();
switch (m_pathType)
{
case FileForOpen:
menu->addAction(tr("Select File to Open..."));
break;
case FileForSave:
menu->addAction(tr("Select File to Save..."));
break;
case Folder:
menu->addAction(tr("Select Folder..."));
break;
}
QPoint gp = this->mapToGlobal(pos);
QAction *action = menu->exec(gp);
if (action)
{
if (action->text() == tr("Select Folder..."))
{
QString dir = QFileDialog::getExistingDirectory(this, "Select required folder", this->text());
if (!dir.isEmpty()) this->setText(dir);
}
if (action->text() == tr("Select File to Open..."))
{
QString file = QFileDialog::getOpenFileName(this, "Select required file to open", this->text());
if (!file.isEmpty()) this->setText(file);
}
if (action->text() == tr("Select File to Save..."))
{
QString file = QFileDialog::getSaveFileName(this, "Select required file to save", this->text());
if (!file.isEmpty()) this->setText(file);
}
}
delete menu;
}
void LineEditPath::focusInEvent(QFocusEvent *e)
{
QLineEdit::focusInEvent(e);
emit(focussed(true));
}
void LineEditPath::focusOutEvent(QFocusEvent *e)
{
QLineEdit::focusOutEvent(e);
emit(focussed(false));
}
LineEditPath::PathType LineEditPath::pathType() const
{
return m_pathType;
}
void LineEditPath::setPathType(const PathType &pathType)
{
m_pathType = pathType;
}
void LineEditPath::setHighlighted(bool highlight)
{
if (highlight)
{
m_backgroundStyle = "rgb(0, 255, 0)"; // green
}
else
{
m_backgroundStyle = QString();
}
generateLocalStyleSheet();
}
void LineEditPath::textChangedSlot(const QString &text)
{
QFileInfo fileInfo(text);
QValidator::State state = QValidator::Invalid;
if (m_pathType == FileForOpen && fileInfo.isFile()) state = QValidator::Acceptable;
if (m_pathType == FileForSave)
{
if (!fileInfo.exists()) state = QValidator::Acceptable;
else if (fileInfo.isFile()) state = QValidator::Acceptable;
}
if (m_pathType == Folder && fileInfo.isDir()) state = QValidator::Acceptable;
switch (state)
{
case QValidator::Acceptable:
m_foregroundStyle = QString();
break;
case QValidator::Intermediate:
m_foregroundStyle = "rgb(255, 191, 0)"; // amber
break;
case QValidator::Invalid:
m_foregroundStyle = "rgb(255, 0, 0)"; // red
break;
}
generateLocalStyleSheet();
}
void LineEditPath::generateLocalStyleSheet()
{
if (m_foregroundStyle.size() && m_backgroundStyle.size())
this->setStyleSheet(QString("QLineEdit { background: %1 ; color: %2 }").arg(m_backgroundStyle).arg(m_foregroundStyle));
if (m_foregroundStyle.size() && !m_backgroundStyle.size())
this->setStyleSheet(QString("QLineEdit { color: %1 }").arg(m_foregroundStyle));
if (!m_foregroundStyle.size() && m_backgroundStyle.size())
this->setStyleSheet(QString("QLineEdit { background: %1 }").arg(m_backgroundStyle));
if (!m_foregroundStyle.size() && !m_backgroundStyle.size())
this->setStyleSheet(QString());
}
|
Markdown | UTF-8 | 1,608 | 2.8125 | 3 | [
"MIT"
] | permissive | ---
date: '2019-09-12T17:00:00.000Z'
link: 'https://www.meetup.com/dresdenjs-io-javascript-user-group/events/wwdfrqyzmbqb/'
location: 'Hermann-Mende-Straße 4, Dresden'
title: '48. JavaScript MeetUp: Reactivity is Fun! and Let''s Work out!'
locked: false
---
Diesmal mit den tollen \~reaktionären\~ reaktiven Themen:
* "A brief history of Reactivity on the Web" von Christian Ulbrich (Zalari GmbH)
* "Node worker threads" von Vilva Athiban
(scroll down for English version)
Christian wird uns auf eine kleine Reise zur Reaktivität im Web mitnehmen. Warum React und Angular schlechte Frameworks sind, Svelte auch nicht besser ist und wie man einen HyperApp-Klon ja auch in 48 Zeilen Code selber schreiben kann!
Darüberhinaus haben wir mit Vilva Athiban einen Freelancer aus Berlin zu Gast, der uns Spannendes zu Node Worker Threads erzählt!
*-
We are happy to invite you to an evening full of JavaScript. This time with quite a good mixture between frontend and backend topics:
* "A brief history of Reactivity on the Web" by Christian Ulbrich (Zalari GmbH)
* "Worker Threads: The Bigger, The Better" by Vilva Athiban (Omio)
Christian will talk about reactivity on the web and bets that he can write a HyperApp clone in 48 lines or sass.
Vilva is a JavaScript Developer and GraphQL enthusiast working for Omio and will explain about the existing Node JS structure, need for multiple process and how Worker threads comes to rescue.
He will show us the steps to implement worker threads. A tech talk without code is no fun and hence he will show a code demo and performance comparisons of using Worker threads after the explanations.
|
Java | UTF-8 | 2,267 | 3.234375 | 3 | [
"BSD-2-Clause",
"LGPL-2.0-or-later",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only"
] | permissive | package salvo.jesus.graph.visual.layout;
import java.awt.*;
/**
* An extension of the GraphLayoutManager interface specifically
* for placing vertices in grids.
*
* @author Jesus M. Salvo Jr.
*/
public interface GraphGridLayoutManager extends GraphLayoutManager {
/**
* This method sets the point at which the grid starts.
*
* @param startat Point object indicating the upper left corner of the grid.
*/
public void setStartAt( Point startat );
/**
* This method sets the distance between vertical grids
*
* @param xgriddistance Distance between vertical grids
*/
public void setXGridDistance( int xgriddistance );
/**
* This method sets the distance between horizontal grids
*
* @param xgriddistance Distance between horizontal grids
*/
public void setYGridDistance( int ygriddistance );
/**
* This method sets or unsets the drawing of the grid
*
* @param isdrawgrid Boolean: If true, the grid will be drawn on the next
* paint operation. Otherwise, the grid will not be drawn.
*/
public void setDrawGrid( boolean isdrawgrid );
/**
* Returns the grid object where the visual vertices are laid out.
*
* @return A Grid object where the visual vertices are laid out.
*/
public Grid getGrid();
/**
* Returns the starting position where the grid will be drawn.
*
* @return Point object indicating the starting position where the grid
* will be drawn. By default, this is (0,0).
*/
public Point getStartAt();
/**
* Returns the distance between horizontal grid lines.
*
* @return An int indicating the uniform distance between horizontal grid lines.
* By default, this is 100.
*/
public int getXGridDistance();
/**
* Returns the distance between vertical grid lines.
*
* @return An int indicating the uniform distance between vertical grid lines.
* By default, this is 100.
*/
public int getYGridDistance();
/**
* Determines if the grid is to be drawn.
*
* @return A boolean indicating if the grid will be drawn on the next
* paint operation of the containers of the visual graph.
*/
public boolean getDrawgrid();
}
|
C++ | UTF-8 | 19,574 | 2.875 | 3 | [] | no_license | #include "lexer.h"
#include <fstream>
#include <vector>
#include <cctype>
namespace lexer {
int ncomment_count = 0;
std::string comment_data;
bool is_digit(char c)
{
return std::isdigit(c);
}
bool is_octit(char c)
{
return std::isdigit(c) && c < '8';
}
bool is_hexit(char c)
{
return std::isxdigit(c);
}
bool is_symbol(char c)
{
switch (c)
{
case '!':
case '#':
case '$':
case '%':
case '&':
case '*':
case '+':
case '.':
case '/':
case '<':
case '=':
case '>':
case '?':
case '@':
case '\\':
case '^':
case '|':
case '-':
case '~':
return true;
default:
return false;
}
}
bool is_large(char c)
{
return std::isupper(c);
}
bool is_small(char c)
{
return std::islower(c) || c == '_';
}
bool is_graphic(char c)
{
return is_small(c) || is_large(c) || is_symbol(c) || is_digit(c) || is_special(c) || c == ':' || c == '"' || c == '\'';
}
bool is_any(char c)
{
return is_graphic(c) || c == ' ' || c == '\t';
}
bool is_whitechar(char c)
{
return std::isspace(c);
}
bool is_special(char c)
{
switch (c) {
case '(':
case ')':
case ',':
case ';':
case '[':
case ']':
case '`':
case '{':
case '}':
return true;
default:
return false;
}
}
bool is_ANY(char c)
{
return is_graphic(c) || is_whitechar(c);
}
int add_to_symbol_table(std::vector<std::string> &symbol_table, std::string content)
{
symbol_table.push_back(content);
return symbol_table.size() - 1;
}
void token_error(Code_data &code_data, Token &token, std::vector<Token> &tokens)
{
code_data.broken_token = true;
token.type = Token_type::Token_error;
tokens.push_back(token);
}
void handle_comment(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
Token token = Token(Token_type::Comment, code_data.line, code_data.position, RAND_MAX);
std::string content = "";
while (code_data.current_line.size() > code_data.position && code_data.current_line[code_data.position] == '-')
{
content.push_back('-');
code_data.position++;
}
bool first = true;
while (code_data.current_line.size() > code_data.position)
{
if (first)
{
first = false;
if (is_symbol(code_data.current_line[code_data.position]))
{
token_error(code_data, token, tokens);
return;
}
}
if (is_any(code_data.current_line[code_data.position]))
{
content.push_back(code_data.current_line[code_data.position]);
}
else
{
//token error
}
code_data.position++;
}
int id = add_to_symbol_table(symbolTable, content);
token.symbol_table_id = id;
tokens.push_back(token);
}
void handle_ncomment(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable, std::string previous_content)
{
Token token = Token(Token_type::Ncomment, code_data.line, code_data.position, RAND_MAX);
std::string content = previous_content;
while (code_data.current_line.size() > code_data.position +1 && code_data.current_line[code_data.position] == '{' && code_data.current_line[code_data.position] == '-')
{
content.push_back('{');
content.push_back('-');
code_data.position += 2;
}
bool first = true;
while (code_data.current_line.size() > code_data.position)
{
if (is_ANY(code_data.current_line[code_data.position]))
{
content.push_back(code_data.current_line[code_data.position]);
}
else
{
if (code_data.current_line.size() > code_data.position + 1 && code_data.current_line[code_data.position] == '{' && code_data.current_line[code_data.position] == '-')
{
content.push_back('{');
content.push_back('-');
code_data.position += 2;
ncomment_count++;
}
if (code_data.current_line[code_data.position] == '}' && content.size() > 2 && content.back() == '-')
{
content.push_back('}');
ncomment_count--;
if (!ncomment_count)
{
int id = add_to_symbol_table(symbolTable, content);
token.symbol_table_id = id;
tokens.push_back(token);
return;
}
}
else
{
// token error
}
}
code_data.position++;
}
if (!ncomment_count)
{
int id = add_to_symbol_table(symbolTable, content);
token.symbol_table_id = id;
tokens.push_back(token);
}
else
{
comment_data = content;
}
}
void handle_ncomment(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
handle_ncomment(code_data, tokens, symbolTable, "");
}
void handle_special(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
Token token = Token(Token_type::Special_begin, code_data.line, code_data.position, RAND_MAX);
std::string content = "";
if (code_data.current_line.size() > code_data.position && is_special(code_data.current_line[code_data.position]))
{
switch (code_data.current_line[code_data.position])
{
case '(':
token.type = Token_type::Parenthese_left;
break;
case ')':
token.type = Token_type::Parenthese_right;
break;
case ',':
token.type = Token_type::Comma;
break;
case ';':
token.type = Token_type::Semicolon;
break;
case '[':
token.type = Token_type::Bracket_left;
break;
case ']':
token.type = Token_type::Bracket_right;
break;
case '`':
token.type = Token_type::Backtick;
break;
case '{':
token.type = Token_type::Brace_left;
break;
case '}':
token.type = Token_type::Brace_right;
break;
}
code_data.position++;
}
tokens.push_back(token);
}
void try_handle_reservedid(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable, std::string &content, Token &token)
{
if (content == "case")
{
token.type = Token_type::Case;
}
else if (content == "class")
{
token.type = Token_type::Class;
}
else if (content == "data")
{
token.type = Token_type::Data;
}
else if (content == "default")
{
token.type = Token_type::Default;
}
else if (content == "deriving")
{
token.type = Token_type::Deriving;
}
else if (content == "do")
{
token.type = Token_type::Do;
}
else if (content == "else")
{
token.type = Token_type::Else;
}
else if (content == "if")
{
token.type = Token_type::If;
}
else if (content == "import")
{
token.type = Token_type::Import;
}
else if (content == "in")
{
token.type = Token_type::In;
}
else if (content == "infix")
{
token.type = Token_type::Infix;
}
else if (content == "infixl")
{
token.type = Token_type::Infixl;
}
else if (content == "infixr")
{
token.type = Token_type::Infixr;
}
else if (content == "instance")
{
token.type = Token_type::Instance;
}
else if (content == "let")
{
token.type = Token_type::Let;
}
else if (content == "module")
{
token.type = Token_type::Module;
}
else if (content == "newtype")
{
token.type = Token_type::Newtype;
}
else if (content == "of")
{
token.type = Token_type::Of;
}
else if (content == "then")
{
token.type = Token_type::Then;
}
else if (content == "type")
{
token.type = Token_type::Type;
}
else if (content == "where")
{
token.type = Token_type::Where;
}
else if (content == "_")
{
token.type = Token_type::Underscore;
}
}
void handle_qvarid(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
Token token = Token(Token_type::Qvarid, code_data.line, code_data.position, RAND_MAX);
std::string content = "";
char c = code_data.current_line[code_data.position];
bool first = true;
while (is_small(c) || is_large(c) || is_digit(c) || c == '\'')
{
if (first)
{
first = false;
if (!is_small(c))
{
token_error(code_data, token, tokens);
return;
}
}
content += c;
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
break;
}
try_handle_reservedid(code_data, tokens, symbolTable, content, token);
if (!(token.type < Token_type::Reservedid_end && token.type > Token_type::Reservedop_begin))
{
int id = add_to_symbol_table(symbolTable, content);
token.symbol_table_id = id;
}
tokens.push_back(token);
}
void handle_qconid(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
Token token = Token(Token_type::Qconid, code_data.line, code_data.position, RAND_MAX);
std::string content = "";
char c = code_data.current_line[code_data.position];
bool first = true;
while (is_small(c) || is_large(c) || is_digit(c) || c == '\'')
{
if (first)
{
first = false;
if (!is_large(c))
{
token_error(code_data, token, tokens);
return;
}
}
content += c;
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
break;
}
int id = add_to_symbol_table(symbolTable, content);
token.symbol_table_id = id;
tokens.push_back(token);
}
bool is_final_reservedop_dfa_state(int state)
{
return state == 2 || state == 3 || state == 4 || state == 5 || state == 7 || state == 9 || state == 10 || state == 11 || state == 12;
}
int next_state_reservedop_dfa(int state, char symbol)
{
switch (state)
{
case 0:
{
if (symbol == '.')
return 1;
if (symbol == '=')
return 3;
if (symbol == '\\')
return 4;
if (symbol == '|')
return 5;
if (symbol == '<')
return 6;
if (symbol == '-')
return 8;
if (symbol == '@')
return 10;
if (symbol == '~')
return 11;
}
case 1:
if (symbol == '.')
return 2;
else
return -1;
case 3:
if (symbol == '>')
return 12;
else
return -1;
case 6:
if (symbol == '-')
return 7;
else
return -1;
case 8:
if (symbol == '>')
return 9;
else
return -1;
default:
return -1;
}
}
Token_type get_token_type_by_final_reservedop_dfa_state(int state)
{
switch (state)
{
case 2:
return Token_type::Two_dots;
case 3:
return Token_type::Equals;
case 4:
return Token_type::Backtick;
case 5:
return Token_type::Vertical_bar;
case 7:
return Token_type::Arrow_left;
case 9:
return Token_type::Arrow_right;
case 10:
return Token_type::At;
case 11:
return Token_type::Tilde;
case 12:
return Token_type::Instance_arrow;
}
}
void handle_reservedop_dfa(int state, int i, const std::string &content, Token &token)
{
if (i > content.size())
{
return;
}
if (i == content.size())
{
if (is_final_reservedop_dfa_state(state))
{
token.type = get_token_type_by_final_reservedop_dfa_state(state);
}
return;
}
char c = content[i];
int new_state = next_state_reservedop_dfa(state, c);
if (new_state == -1)
return;
handle_reservedop_dfa(new_state, i + 1, content, token);
}
void check_var_reserverdop(std::string &content, Token &token)
{
handle_reservedop_dfa(0, 0, content, token);
/*if (content == "..")
{
token.type = Token_type::Two_dots;
}
else if (content == "=")
{
token.type = Token_type::Equals;
}
else if (content == "\\")
{
token.type = Token_type::Backtick;
}
else if (content == "|")
{
token.type = Token_type::Vertical_bar;
}
else if (content == "<-")
{
token.type = Token_type::Arrow_left;
}
else if (content == "->")
{
token.type = Token_type::Arrow_right;
}
else if (content == "@")
{
token.type = Token_type::At;
}
else if (content == "~")
{
token.type = Token_type::Tilde;
}
else if (content == "=>")
{
token.type = Token_type::Instance_arrow;
}*/
}
void check_con_reserverdid(std::string &content, Token &token)
{
if (content == ":")
{
token.type = Token_type::Colon;
}
else if (content == "::")
{
token.type = Token_type::Two_colons;
}
}
void handle_qvarsym(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
Token token = Token(Token_type::Qvarsym, code_data.line, code_data.position, RAND_MAX);
std::string content = "";
char c = code_data.current_line[code_data.position];
if (c == ':')
{
handle_qconsym(code_data, tokens, symbolTable);
return;
}
while (is_symbol(c) || c == ':')
{
content += c;
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
break;
}
check_var_reserverdop(content, token);
if (!(token.type > Token_type::Reservedop_begin && token.type < Token_type::Reservedop_end))
{
int id = add_to_symbol_table(symbolTable, content);
token.symbol_table_id = id;
}
tokens.push_back(token);
}
void handle_qconsym(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
Token token = Token(Token_type::Qconsym, code_data.line, code_data.position, RAND_MAX);
std::string content = "";
char c = code_data.current_line[code_data.position];
bool first = true;
while (is_symbol(c) || c == ':')
{
if (first)
{
first = false;
if (c != ':')
{
token_error(code_data, token, tokens);
return;
}
}
content += c;
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
break;
}
check_con_reserverdid(content, token);
if (!(token.type > Token_type::Reservedop_begin && token.type < Token_type::Reservedop_end))
{
int id = add_to_symbol_table(symbolTable, content);
token.symbol_table_id = id;
}
tokens.push_back(token);
}
void handle_numeric(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
Token token = Token(Token_type::Integer, code_data.line, code_data.position, RAND_MAX);
std::string content = "";
char c = code_data.current_line[code_data.position];
bool dot = false;
while (is_digit(c))
{
content += c;
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
break;
}
if (c == '.' && code_data.current_line.size() > code_data.position + 1 && is_digit(code_data.current_line[code_data.position]))
{
token.type = Token_type::Float;
content += c;
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
{
token_error(code_data, token, tokens);
return;
}
while (is_digit(c))
{
content += c;
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
break;
}
}
int id = add_to_symbol_table(symbolTable, content);
token.symbol_table_id = id;
tokens.push_back(token);
}
void handle_char(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
Token token = Token(Token_type::Char, code_data.line, code_data.position, RAND_MAX);
std::string content = "";
char c = code_data.current_line[code_data.position];
if (c == '\'')
{
content += '\'';
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
{
token_error(code_data, token, tokens);
return;
}
}
else
{
token_error(code_data, token, tokens);
return;
}
if (is_graphic(c))
{
content += c;
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
{
token_error(code_data, token, tokens);
return;
}
}
else
{
token_error(code_data, token, tokens);
return;
}
if (c == '\'')
{
content += '\'';
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
}
else
{
token_error(code_data, token, tokens);
return;
}
int id = add_to_symbol_table(symbolTable, content);
token.symbol_table_id = id;
tokens.push_back(token);
}
void handle_string(Code_data &code_data, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
Token token = Token(Token_type::String, code_data.line, code_data.position, RAND_MAX);
std::string content = "";
char c = code_data.current_line[code_data.position];
if (c == '"')
{
content += '"';
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
{
token_error(code_data, token, tokens);
return;
}
}
else
{
token_error(code_data, token, tokens);
return;
}
while (is_graphic(c))
{
content += c;
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
{
token_error(code_data, token, tokens);
return;
}
}
if (c == '"')
{
content += '"';
if (code_data.current_line.size() > code_data.position)
c = code_data.current_line[++code_data.position];
else
{
token_error(code_data, token, tokens);
return;
}
}
else
{
token_error(code_data, token, tokens);
return;
}
int id = add_to_symbol_table(symbolTable, content);
token.symbol_table_id = id;
tokens.push_back(token);
}
void lex(std::string file_name, std::vector<Token> &tokens, std::vector<std::string> &symbolTable)
{
std::ifstream file_stream = std::ifstream(file_name);
Code_data code_data;
code_data.line = 1;
code_data.position = 0;
while (std::getline(file_stream, code_data.current_line))
{
while (code_data.current_line.size() > code_data.position)
{
if (ncomment_count)
{
handle_ncomment(code_data, tokens, symbolTable, comment_data);
}
char c = code_data.current_line[code_data.position];
if (is_small(c))
{
handle_qvarid(code_data, tokens, symbolTable);
}
else if (is_large(c))
{
handle_qconid(code_data, tokens, symbolTable);
}
else if (is_symbol(c) || c == ':')
{
handle_qvarsym(code_data, tokens, symbolTable);
}
else if (c == '\'')
{
handle_char(code_data, tokens, symbolTable);
}
else if (c == '"')
{
handle_string(code_data, tokens, symbolTable);
}
else if (is_digit(c))
{
handle_numeric(code_data, tokens, symbolTable);
}
else if (c == '-')
{
if (code_data.current_line.size() > code_data.position + 1 && code_data.current_line[code_data.position + 1] == '-')
{
handle_comment(code_data, tokens, symbolTable);
}
}
else if (c == '{')
{
if (code_data.current_line.size() > code_data.position + 1 && code_data.current_line[code_data.position + 1] == '-')
{
handle_ncomment(code_data, tokens, symbolTable);
}
}
else if (is_special(c))
{
handle_special(code_data, tokens, symbolTable);
}
else if (is_whitechar(c))
{
code_data.position++;
std::string s;
s.push_back(c);
tokens.push_back(Token(Token_type::Whitechar, code_data.line, code_data.position, add_to_symbol_table(symbolTable, s)));
}
else
{
//token error
}
if (code_data.broken_token)
{
code_data.broken_token = false;
break;
}
}
tokens.push_back(Token(Token_type::Whitechar, code_data.line, code_data.position, add_to_symbol_table(symbolTable, "\n")));
code_data.line++;
code_data.position = 0;
}
}
}
|
Markdown | UTF-8 | 1,578 | 3.390625 | 3 | [] | no_license | # Note Taker
This Project called for the creation of a simple, but fully-functional Note Taker application. It should easily allow the User to create and save notes, so that they can be viewed at a later time, and deleted when no longer required. It required practice in creating files that allowed for data persistence via a 'db.json' file using the 'fs' module.
## Requirements
The application front end was supplied for this exercise; we were to create the back end and link the two for full functionality. Two HTML routes had to be created: GET/notes, which returned the notes.tml file; and GET* which returns the index.html file. The API routes to be created are: 1) GET/api/notes - reads the db.json file and returns all saved notes as JSON 2) POST/api/notes - receives a new note to save on the request body, adds it to the db.json file, and then returns the new note to the client 3)DELETE/api/notes/:id - receives a query parameter containing the id of a note to delete. This requires giving each note a unique ID when saved. To delete, all notes are read from the db.json file, the one with the given id is removed, and the remaining notes are rewritten to the db.json file. The application had to be deployed and fully functional on Heroku, since full deployment of server side code is not possible on GitHub.
## Acknowledgement
I especially want to thank Kris Renaldi, my tutor, who was instrumental in helping me get my API routes working and showing me how to deploy on Heroku, and Steven Mccarther, who worked with me closely on this project.
## License
©MIT
|
C++ | UTF-8 | 1,091 | 2.828125 | 3 | [] | no_license | //category : graph
#include <stdio.h>
#include <vector>
#include <queue>
#include <iostream>
#include <algorithm>
using namespace std;
vector<int> a[10001];
bool check[10001];
void initCheck(){
for (int i = 0; i < 10001; i++)
check[i] = false;
}
void dfs(int x){
if (check[x] == true) return;
printf("%d ",x);
check[x] = true;
for (int i = 0; i < a[x].size(); i++)
dfs(a[x][i]);
}
void bfs(int s){
queue<int> q;
int now;
int k;
int next;
q.push(s);
check[s] = true;
while (!q.empty()){
now = q.front();
q.pop();
printf("%d ",now);
for (k = 0; k < a[now].size(); k++){
next = a[now][k];
if (check[next] == false){
check[next] = true;
q.push(next);
}
}
}
}
int main(){
int n, m, s;
int u,v;
int i;
scanf("%d %d %d",&n,&m,&s);
for (i = 0; i < m; i++){
scanf("%d %d",&u,&v);
a[u].push_back(v);
a[v].push_back(u);
}
for (i = 0; i < n; i++){
sort(a[i].begin(), a[i].begin()+a[i].size());
}
initCheck();
dfs(s);
printf("\n");
initCheck();
bfs(s);
return 0;
} |
Python | UTF-8 | 3,298 | 2.640625 | 3 | [] | no_license | import asyncio
import re
import sys
APPLICATION = None
SERVER_PORT = 8888
def wsgi_to_bytes(s):
return s.encode('iso-8859-1')
async def _handle(reader, writer):
status_line = (await reader.readuntil(b'\r\n')).decode('ascii')[:-2]
(method, path, http_version) = status_line.split(' ')
headers = {}
header_line = (await reader.readuntil(b'\r\n')).decode('ascii')[:-2]
while len(header_line) > 0:
header_key, header_value = re.split(r'\s*:\s*', header_line, maxsplit=1)
headers[header_key.upper()] = header_value
# read up next
header_line = (await reader.readuntil(b'\r\n')).decode('ascii')[:-2]
print('[{0}] - {1}'.format(method, path))
if '?' in path:
(path, query) = path.split('?')
else:
query = ''
# WSGI work!
environ = {'HTTP_{0}'.format(k.replace('-', '_')): v for k, v in headers.items()}
environ['wsgi.input'] = writer
environ['wsgi.errors'] = sys.stderr
environ['wsgi.version'] = (1, 0)
environ['wsgi.multithread'] = True
environ['wsgi.multiprocess'] = False
environ['wsgi.url_scheme'] = 'http'
environ['SERVER_NAME'] = 'Python/3.5.2'
environ['SERVER_PORT'] = SERVER_PORT
environ['REQUEST_METHOD'] = method
environ['PATH_INFO'] = path
environ['QUERY_STRING'] = query
environ['CONTENT_TYPE'] = headers.get('CONTENT-TYPE') or ''
environ['CONTENT_LENGTH'] = int(headers.get('CONTENT_LENGTH') or 0)
environ['SERVER_PROTOCOL'] = http_version
headers_set = []
headers_sent = []
def write(data):
if not headers_set:
raise AssertionError("write() before start_response()")
elif not headers_sent:
status, response_headers = headers_sent[:] = headers_set
writer.write(wsgi_to_bytes('HTTP/1.1 %s' % status))
for header in response_headers:
writer.write(wsgi_to_bytes('%s: %s\r\n' % header))
writer.write(wsgi_to_bytes('\r\n'))
writer.write(data)
def start_response(status, response_headers, exc_info=None):
if exc_info:
try:
if headers_sent:
raise exc_info[1].with_traceback(exc_info[2])
finally:
exc_info = None
elif headers_set:
raise AssertionError("Headers already set!")
headers_set[:] = [status, response_headers]
return write
result = APPLICATION(environ, start_response)
try:
for data in result:
if data: # don't send headers until body appears
write(data)
if not headers_sent:
write('') # send headers now if body was empty
finally:
if hasattr(result, 'close'):
result.close()
writer.close()
async def run_server(host="127.0.0.1", port=8888):
global SERVER_PORT
SERVER_PORT = str(port)
print("Run server {0}:{1}".format(host, port))
await asyncio.start_server(_handle, host, port)
def start_server(_app):
global APPLICATION
APPLICATION = _app
loop = asyncio.get_event_loop()
loop.create_task(run_server())
loop.run_forever()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.create_task(run_server())
loop.run_forever()
|
Java | UTF-8 | 653 | 2.234375 | 2 | [] | no_license | package org.jreflect.methods.instances;
import org.jreflect.methods.AbstractMethodTestCase;
import org.jreflect.methods.fixture.ClassWithInstanceMethods;
import org.junit.Before;
public class InstanceMethodTest extends
AbstractMethodTestCase<ClassWithInstanceMethods> {
private ClassWithInstanceMethods classWithInstanceMethods;
@Before
public void setUp() {
classWithInstanceMethods = new ClassWithInstanceMethods();
}
@Override
protected ClassWithInstanceMethods target() {
return classWithInstanceMethods;
}
@Override
protected String methodPrefix() {
return "method";
}
}
|
Java | UTF-8 | 3,266 | 2.71875 | 3 | [
"MIT"
] | permissive | package mont.gonzalo.phiuba.model;
import java.io.Serializable;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
/**
* Created by Gonzalo Montiel on 1/25/17.
*/
public class CathedraSchedule implements Serializable, Calendable {
private String day;
private String from;
private String to;
private String type;
private String classroomCode;
private static String[] daysOfWeek = {"lunes", "martes", "miércoles", "jueves", "viernes"};
public CathedraSchedule() { }
public String getDay() {
return day;
}
public void setDay(String day) {
this.day = day;
}
public String getFrom() {
return from;
}
public void setFrom(String from) {
this.from = from;
}
public String getTo() {
return to;
}
public void setTo(String to) {
this.to = to;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getClassroomCode() {
return classroomCode;
}
public void setClassroomCode(String classroomCode) {
this.classroomCode = classroomCode;
}
public String getShortType() {
String initials;
String[] s = type.split(" ");
if (s.length == 2) {
return s[0].substring(0,1).toUpperCase() + s[1].substring(0,1).toUpperCase();
} else if (s.length == 1) {
return s[0].substring(0,1).toUpperCase() + s[0].substring(s[0].length() - 1).toLowerCase();
}
return "";
}
@Override
public String toString() {
return this.getDay() + " " + getTimeToString();
}
public String getTimeToString() {
return this.getFrom() + " to " + this.getTo();
}
@Override
public String getTitle() {
return "";
}
@Override
public String getDescription() {
return null;
}
@Override
public Date getStart() {
return null;
}
@Override
public Date getEnd() {
return null;
}
@Override
public String getLocation() {
return null;
}
// Converts HH:MM in milliseconds
public long getHourToLong(String hour, String year, String month, String day) {
Calendar cal = Calendar.getInstance();
day = day.length() == 1? ('0' + day) : day;
month = month.length() == 1? ('0' + month) : month;
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd-HH:mm");
Date date = new Date();
try {
date = sdf.parse(year + month + day + '-' + hour);
} catch (ParseException e) {
e.printStackTrace();
}
return date.getTime();
}
public long getToAsLong(String endYear, String endMonth, String endDate) {
return getHourToLong(this.getTo(), endYear, endMonth, endDate);
}
public long getFromAsLong(String beginYear, String beginMonth, String beginDate) {
return getHourToLong(this.getFrom(), beginYear, beginMonth, beginDate);
}
public int getDayOfWeek() {
return Arrays.asList(daysOfWeek).indexOf(getDay().toLowerCase()) + 1;
}
}
|
Java | UTF-8 | 1,037 | 2.578125 | 3 | [] | no_license | package application;
import ui.StatisticsView;
public class ConsoleView implements StatisticsView {
@Override
public void showTime(String formattedTime) {
System.out.println(formattedTime);
}
@Override
public void showTotalSentMessages(String total) {
System.out.println(total);
}
@Override
public void showTotalSentMessagesLastSecond(String total) {
System.out.println(total);
}
@Override
public void showAverageRTTLastSecond(String average) {
System.out.println(average);
}
@Override
public void showMaxRTT(String max) {
System.out.println(max);
}
@Override
public void showAverageAtoBTimeLastSecond(String averageAtoB) {
System.out.println(averageAtoB);
}
@Override
public void showAverageBtoATimeLastSecond(String averageBtoA) {
System.out.println(averageBtoA);
}
@Override
public void showLostMessages(String lostMessages) {
System.out.println(lostMessages);
}
}
|
Java | UTF-8 | 3,709 | 2.84375 | 3 | [] | no_license | package com.example.android.mycursoradapter;
import android.content.ContentValues;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.provider.BaseColumns;
/**
* Created by Android on 06.11.2015.
*/
public class MySQLiteHelper extends SQLiteOpenHelper implements BaseColumns {
// <<<< Create DB>>>>
// Создаем статические финальные переменные
public static final int DATABASE_VERSION = 2;
public static final String DATABASE_NAME = "My DB";
public static final String NAME_COLUMN = "name";
public static final String NUMBER_COLUMN = "number";
public static final String TABLE_NAME = "table_name";
// Создание новой таблицы с помощью команды CREATE TABLE после которой указывается имя таблицы,
// а затем в круглых скобках указываются имена столбцов с параметрами
public static final String DATABASE_CREATE_SCRIPT = "CREATE TABLE " + TABLE_NAME + " ( "
+ _ID + " INTEGER primary key autoincrement, "
+ NAME_COLUMN + " TEXT, "
+ NUMBER_COLUMN + " TEXT);";
// Конструктор
public MySQLiteHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
// Метод, который будет вызван, если БД, к которой мы хотим подключиться – не существует
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(DATABASE_CREATE_SCRIPT);
addData(db);
}
// Будет вызван в случае, если мы пытаемся подключиться к БД более новой версии, чем существующая
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL("DROP TABLE IF EXISTS " + TABLE_NAME);
onCreate(db);
}
// Добавление записи в таблицу
public void addData(SQLiteDatabase db) {
// Класс ContentValues используется для указания полей таблицы и значений, которые мы в эти поля будем вставлять
// Создаем объект для данных
ContentValues cv = new ContentValues();
// Мы заполняем объект cv парами: имя поля и значение
cv.put(NAME_COLUMN, "Asus");
cv.put(NUMBER_COLUMN, "This is Asus");
// Вызываем метод insert – передаем ему имя таблицы и объект cv с вставляемыми значениями.
// Второй аргумент метода используется, при вставке в таблицу пустой строки
db.insert(TABLE_NAME, null, cv);
// Удаляет все значения
cv.clear();
cv.put(NAME_COLUMN, "LG");
cv.put(NUMBER_COLUMN, "This is LG");
db.insert(TABLE_NAME, null, cv);
cv.clear();
cv.put(NAME_COLUMN, "Huawei");
cv.put(NUMBER_COLUMN, "This is Huawei 8815");
db.insert(TABLE_NAME, null, cv);
cv.clear();
cv.put(NAME_COLUMN, "Samsung");
cv.put(NUMBER_COLUMN, "This is Samsung S4");
db.insert(TABLE_NAME, null, cv);
cv.clear();
cv.put(NAME_COLUMN, "Lenovo");
cv.put(NUMBER_COLUMN, "This is Lenevo");
db.insert(TABLE_NAME, null, cv);
}
}
|
C++ | UTF-8 | 998 | 3.84375 | 4 | [] | no_license | // chrono를 이용해 ms(milli seconds) 단위로 시간을 측정한다.
#include <iostream>
#include <chrono>
using namespace std;
int main() {
double result;
// 시작시간
// 정밀한 시간을 측정하기 위한 코드 (시작시간)
chrono::system_clock::time_point start = chrono::system_clock::now();
// 시간 측정을 위한 코드
// 임의로 아무런 코드
for(int i=0;i<100;i++)
for(int j=0;j<100;j++)
cout<<"Milli seconds 단위로 시간측정 합니다." << i+1 << "행" << j+1 << "열\n";
// 임의의 코드를 한 줄 이상 넣어야한다.
// 종료시간
// 정밀한 시간을 측정하기 위한 코드 (종료시간)
chrono::system_clock::time_point end = chrono::system_clock::now();
// ns단위를 얻기위해 for문 실행 후 시간과 실행 전 시간을 뺀다.
chrono::milliseconds mill = chrono::duration_cast<chrono::milliseconds>(end - start);
cout << "\n경과시간 : " << mill.count() << "ms";
return 0;
}
|
Python | UTF-8 | 895 | 4.34375 | 4 | [] | no_license | """
Дано положительное целое число. Вам необходимо сконвертировать его в двоичную форму и подсчитать сколько единиц в написании данного числа. Для примера: 5 = 0b101 содержит две единицы, так что ответ 2.
Заметки: Эта задача легко решается с помощью двух функций" -- bin и count.
Вх. данные: Число как положительное целочисленное (int).
Вых. данные: Число единиц в двоичной форме данного числа (int).
"""
def checkio(number):
binary = bin(number)
result = binary.count('1')
return result
print(checkio(4) == 1)
print(checkio(15) == 4)
print(checkio(1) == 1)
print(checkio(1022) == 9)
|
Ruby | UTF-8 | 606 | 4.65625 | 5 | [] | no_license | #Join elements of an array with .join to make a "string"
puts ["This", "is", "a", "test"].join(' ')
puts ["This", "is", "a", "test"].join('_')
puts ["This", "is", "a", "test"].join('*')
message = ["I", "love", "Ruby"]
puts message.join('_')
# Array of words literal operator
%w[this is also a test]
#=> ["this", "is", "also", "a", "test"]
# For method-chaining fun:
%w[this is also a test].join(" ").capitalize
#=> "This is also a test"
#Split a string into an array of words with .split
"When in the course of human events".split(" ")
#=> ["When", "in", "the", "course", "of", "human", "events"]
|
C | WINDOWS-1250 | 1,253 | 2.90625 | 3 | [] | no_license | #include<stdio.h>
#include<string.h>
int main(void){
char s[131072],t[131072];
int q,a,b,c,d,i,sl,tl;
int w,x,y,z;
int sa[131072]={0},sb[131072]={0};
int ta[131072]={0},tb[131072]={0};
scanf("%s%s",s,t);
sl = strlen(s);tl = strlen(t);
for(i = 0;i < sl;i++){
if(s[i] == 'A'){sa[i+1] = sa[i]+1;sb[i+1] = sb[i];}
else{sb[i+1] = sb[i]+1;sa[i+1] = sa[i];}
}
for(i = 0;i < tl;i++){
if(t[i] == 'A'){ta[i+1] = ta[i]+1;tb[i+1] = tb[i];}
else{tb[i+1] = tb[i]+1;ta[i+1] = ta[i];}
}
scanf("%d",&q);
for(i = 1;i <= q;i++){
scanf("%d%d%d%d",&a,&b,&c,&d);
w = sa[b]-sa[a-1];
x = sb[b]-sb[a-1];
y = ta[d]-ta[c-1];
z = tb[d]-tb[c-1];
if((w*2+x)%3 == (y*2+z)%3){printf("YES\n");}else{printf("NO\n");}
}
return 0;
} ./Main.c: In function main:
./Main.c:10:2: warning: ignoring return value of scanf, declared with attribute warn_unused_result [-Wunused-result]
scanf("%s%s",s,t);
^
./Main.c:20:2: warning: ignoring return value of scanf, declared with attribute warn_unused_result [-Wunused-result]
scanf("%d",&q);
^
./Main.c:22:3: warning: ignoring return value of scanf, declared with attribute warn_unused_result [-Wunused-result]
scanf("%d%d%d%d",&a,&b,&c,&d);
^ |
Python | UTF-8 | 32,113 | 2.640625 | 3 | [] | no_license | # -*- coding: utf-8 -*-
from numpy import *
import matplotlib as mpl
from matplotlib import cm,colors
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from scipy.optimize import leastsq
import os,time
# For full simulation which include dispersion, use PhaseMatchingBiphotonFWM.py
# This code is intended for design purpose (faster simulation) and assumes linear dispersion between pump, signal and idler.
# -----------------------------------------------------------------------------#
# Plot functions
# -----------------------------------------------------------------------------#
# Lattice: bidimensional numpy array, example : lattice = ones((size, size), dtype=float )
# extent: axis extent for each axis [begin_x,end_x,begin_y,end_y]
def plotcolormap(lattice,extent,fname = None):
fig = plt.figure()
print lattice
print extent
map1=colors.LinearSegmentedColormap.from_list('bla',['#000000','#FF0000','#FFFF00'])
begin_x,end_x,begin_y,end_y = extent
aspect = (end_x - begin_x )/(end_y - begin_y)
plt.imshow(lattice, map1, vmin = 0,interpolation='nearest',extent=extent, aspect = 'equal')
#plt.imshow(lattice, map1,vmin = 0, interpolation='nearest',extent=extent,aspect = aspect)
plt.gca().xaxis.set_major_locator( MaxNLocator(nbins = 7, prune = 'lower') )
plt.gca().yaxis.set_major_locator( MaxNLocator(nbins = 6) )
#cbar = plt.colorbar()
#cbar.locator = MaxNLocator( nbins = 6)
# vmin=0,vmax = 1,
if fname is None:
plt.show()
else:
plt.savefig(fname)
plt.close()
def plot(plots):
for x,y,style in plots:
plt.plot(x, y, style) # x, y, 'k--',
plt.grid(True)
plt.title('')
plt.xlabel('')
plt.ylabel('')
plt.show()
def plotcolormapphase(lattice,extent):
fig = plt.figure()
map1=colors.LinearSegmentedColormap.from_list('bla',['#0000FF','#000000','#FF0000'])
plt.imshow(lattice, map1,vmin = -pi,vmax = pi, interpolation='nearest',extent=extent)
# vmin=0,vmax = 1,
plt.show()
# -----------------------------------------------------------------------------#
# MISC FUNCTIONS (helpers for classes)
# -----------------------------------------------------------------------------#
def funcpeak(lbda,lbda0):
T = 1.*10**(-9)
signu = 0.441/T
siglbda = signu/(c*10**6)*(lbda0)**2
return sqrt(1./(sqrt(2*pi)*siglbda) * exp(-(lbda-lbda0)**2/(2*siglbda**2)))
"""
input state as a 2D matrix
!! the input state is not given as a density matrix
it's a pure state given in a matrix
"""
def schmidtnumber(state):
N,M = state.shape
ror=zeros((N,N)) # reduced density matrix
for l in xrange(N):
for n in xrange(N):
for p in xrange(N):
ror[l,n]+=state[p,l]*state[p,n]
ror2 = dot(ror,ror)
# compute the trace of ror2
tmp = 0
for k in xrange(N):
tmp+= ror2[k,k]
schn = 1.0/tmp
return schn
def parse_extent(line):
l1 = line.split(":")[1]
l2 = l1.split(",")[0]
swlmin,swlmax = l2.split("-")
wlmin,wlmax = float(swlmin),float(swlmax)
return wlmin,wlmax
def parse_biphoton_data(line):
l1 = line.replace("\n","")
ls = l1.split(" ")
res = []
for e in ls:
res.append(float(e))
return array(res)
# -----------------------------------------------------------------------------#
# CONSTANTS
# -----------------------------------------------------------------------------#
I = 1.0j
HPLANCK = 6.626068*10**(-34) #m2 kg / s
HBAR = HPLANCK/(2*pi)
EPSILON0 = 8.85418782*10**(-12)#m-3 kg-1 s4 A2 or C.V-1.M-1
c = 299792458.0 # CLIGHT = 299792458. # m/s
n2_Si = 6.3* 10**(-18) # m2/W (Semicond. Sci. Technol. 23 (2008) 064007 (9pp))
# -----------------------------------------------------------------------------#
# CLASS FWM_Simu
# -----------------------------------------------------------------------------#
# This class calculates the joint spectral distribution obtained for a straight
# waveguide with a given set of parameters
# Init (
# * Waveguide cross section
# * Waveguide length (Meters)
# * Pump power (Watts)
# * Pump wavelength (um)
# * Pulse duration (Seconds)
# * Repetition rate (Hz)
# )
#
# computeJS: Does the simulation
#
class Simu(object):
def __init__(self,
pumpwl = 1.55,
pulseduration=1.*10**(-12),
nb_points_pump = 400
):
self.T = pulseduration # in seconds
self.setPumpwl(pumpwl)
self.pumpenvelop(pumpwl) # computes siglbda
self.gamma = 200. # W^-1 m^-1 ; non linear coeff IEEE JOURNAL OF SELECTED TOPICS IN QUANTUM ELECTRONICS, VOL. 16, NO. 1, JANUARY/FEBRUARY 2010
self.nb_points_pump = nb_points_pump
def setPumpwl(self,x):
self.lbda_p = x
def setPulseDuration(self,x):
self.T = x
self.pumpenvelop(self.lbda_p)
# **************
# Pump envelopes
# **************
def pumpenvelop(self,lbda):
return self.gaussppumpenvelop(lbda) #self.gaussppumpenvelop(lbda)
#return self.rectpumpenvelop(lbda) #self.gaussppumpenvelop(lbda)
def gausspulsedpumpenvelop(self,lbda,dlbda = 0.4*10**(-4)):
return self.gaussppumpenvelop(lbda) *(sin(2*pi*(lbda)/dlbda))**2# From laser textbook
def rectpumpenvelop(self,lbda):
signu = 0.441/self.T # self.linewidth #0.441/sigma_t # From laser book, in Hz
lbda0 = self.lbda_p
siglbda = signu/(c*10**6)*(lbda0)**2
w = sqrt(2*pi)*siglbda
self.siglbda = siglbda
a = 1/sqrt(w)
lbda_min = lbda0-w/2
lbda_max = lbda0+w/2
step = w / 400
self.pumprange = arange(lbda_min,lbda_max,step)
output = (lbda>=lbda_min)*(lbda<=lbda_max)*a
return output
def gaussppumpenvelop(self,lbda):
lbda0 = self.lbda_p
k0,k = map(lambda x : 2*pi*self.ng/x,[lbda0,lbda])
signu = 0.441/self.T # self.linewidth #0.441/sigma_t # From laser book, in Hz
siglbda = signu/(c*10**6)*(lbda0)**2
ng = self.ng
sigk = siglbda/(lbda0)**2*2*pi*ng
self.siglbda = siglbda
return sqrt(1./(sqrt(2*pi)*siglbda) * exp(-(lbda-lbda0)**2/(2*siglbda**2))) # this gauss envelop
# Rectangular pulse in the temporal domain
# lbda in um
# T : pulse length [S]
def sincpumpenvelop(self,lbda):
T = self.T
om = 2*pi*c/(lbda*10**-6)
om0 = 2*pi*c/(self.lbda_p*10**(-6))
dom = om - om0
#return sinc(dom*T/2) * sqrt(T/(2*pi)) # this normalization works when integrating over omega
# *** WARNING, in python, sinc(x) = sin(pi*x)/(pi*x) which is already normalized to one ! ***
return sinc(dom*T/2) * sqrt(T*pi*c*10**6/(lbda**2)) # c in um/s, lbda in um, T in s; this normalization is for lambda
# **************
#
# **************
# This provides the range of lbdas which should be used to accurately span the pump
def updatepumprange(self):
print "Get pump range ..."
lbda_p = self.lbda_p
lbda_step= 0.00000001 # step for finding the pump range
P = 0.
targetfraction = 0.95
deltalbda = 0.5*10**(-6) # initialize deltalbda at 1pm
while (P<targetfraction):
deltalbda = 2*deltalbda
lbdas = arange(lbda_p-deltalbda,lbda_p+deltalbda,lbda_step)
#print P
P=(self.pumpenvelop(lbdas)*self.pumpenvelop(lbdas).conjugate()).sum()*lbda_step
print P
print P
N = self.nb_points_pump
step = (lbdas[-1]-lbdas[0])/N # Step for the returned pump range
res = arange(lbdas[0],lbdas[-1],step)
#print "Size of pump lbdas" ,lbdas.size
#print self.pumpenvelop(lbda_p)
print "Pump range : (um)",lbdas[0],lbdas[-1]
self.pumprange = res
return res
def setRangeJS(self,lbda_s_min,lbda_s_max,lbda_i_min,lbda_i_max):
self.lbda_s_min = lbda_s_min
self.lbda_s_max = lbda_s_max
self.lbda_i_min = lbda_i_min
self.lbda_i_max = lbda_i_max
self.extent = [x*1000 for x in [self.lbda_i_min,self.lbda_i_max,self.lbda_s_min,self.lbda_s_max]] # um to nm
print self.extent
def setRangeScanResonance(self,lbda_s_min,lbda_s_max):
# Get the range for signal centered on the resonance
lsm,lsM = lbda_s_min,lbda_s_max
# Get the range for idler using rough energy conservation
lp = self.lbda_p
lp_min = min(self.pumprange)
lp_max = max(self.pumprange)
lim = 1./(2./lp_min - 1./lsM)
liM = 1./(2./lp_max - 1./lsm)
print "avg_pumps", (lim+lsm)/2,(liM+lsM)/2
#print "%.2f %.2f ; %.2f %.2f (pm)" % (lsm*10**6,lsM*10**6,lim*10**6,liM*10**6)
print lsm,lsM,lim,liM
self.setRangeJS(lsm,lsM,lim,liM)
# Override these methods to add custom filters on signal and idler arm
def filter_idler(self,lbda):
return ones(lbda.size)
def filter_signal(self,lbda):
return ones(lbda.size)
def getPurityAfterFilter(self):
Ni = self.Ni
Ns = self.Ns
# Apply custom filters:
m_filter_signal =zeros((Ni,Ns))
m_filter_idler =zeros((Ni,Ns))
for i in arange(Ni):
m_filter_signal[i,:] = self.filter_signal(self.a_lbda_s)
for j in arange(Ns):
m_filter_idler[:,j] = self.filter_idler(self.a_lbda_i)
lattice = self.normlattice*m_filter_signal*m_filter_idler
# Multiply by the appropriate missing constants
Z = lattice.sum()# sqrt(abs(lattice*conjugate(lattice)).sum())
normlattice = sqrt(abs(lattice/Z))
self.normlattice_unfiltered = self.normlattice[:,:] # Save the previous matrix
self.normlattice = normlattice # assign the new filtered matrix
purity = self.computeHeraldedPhotonPurity() # computes the purity after filtering
self.normlattice = self.normlattice_unfiltered # restore the previous matrix
return purity
def computeJS(self):
pass
def plotBiphoton(self,fname = None):
plotcolormap(self.normlattice,self.extent,fname)
def __g__(self,i,j):
#return (self.normlattice[i,:]*conjugate(self.normlattice[j,:])).sum()
return (self.normlattice[i,:]*exp(I*self.phases[i,:])*conjugate(self.normlattice[j,:]*exp(I*self.phases[j,:]))).sum()
def __g_nophase__(self,i,j):
return (self.normlattice[i,:]*conjugate(self.normlattice[j,:])).sum()
def __G_nophase__(self,i,j,k,l):
return self.__g_nophase__(i,j)*self.__g_nophase__(k,l)
vectg = vectorize(__g__)
def __G__(self,i,j,k,l):
return self.__g__(i,j)*self.__g__(k,l)
vectG = vectorize(__G__)
vectG_nophase = vectorize(__G_nophase__)
# Purity = Tr(ro**2)
def computenaivepurity(self):
lattice = sqrt(self.normlattice)
N = self.N
P = 0
for n in xrange(self.N):
for m in xrange(self.N):
P+= (lattice[:,n]*conjugate(lattice[:,m])).sum()*(lattice[:,m]*conjugate(lattice[:,n])).sum()
self.purity = abs(P)
self.schn = 1./P
return P
# Computes the probability of getting coincidences between two heralded photons from different sources
def computePcoincfrom2photons(self):
lattice = sqrt(self.normlattice)
#print "State Norm:", abs(lattice*conjugate(lattice)).sum() # equivalent to the trace
print "Computing proba coincidence"
N = self.N
omega1 = zeros((N,N),int)
omega2 = zeros((N,N),int)
for i in range(N):
omega1[:,i]= arange(N)
omega2[i,:]= arange(N)
Gnnmm = self.vectG(self,omega1,omega1,omega2,omega2)
Gnmmn = self.vectG(self,omega1,omega2,omega2,omega1)
print "Gnnmm: ",Gnnmm.sum()
print "Gnmmn: ",Gnmmn.sum()
Pcoinc = 0.5*(Gnnmm.sum()-Gnmmn.sum()) # See proof in my labbook from 2012 (27/01/2012)
print "Pcoinc: ",Pcoinc
print "Visibility: ", 1.-Pcoinc/0.5
self.visibility= 1.-Pcoinc/0.5
return 1.-Pcoinc/0.5
def computeHeraldedPhotonPurity(self):
lattice = self.normlattice
N = self.N
omega1 = zeros((N,N),int)
omega2 = zeros((N,N),int)
for i in range(N):
omega1[:,i]= arange(N)
omega2[i,:]= arange(N)
purity = self.vectG(self,omega1,omega2,omega2,omega1).sum()
self.purity = abs(purity)
self.schn = 1/purity
return abs(purity)
###
# -----------------------------------------------------------------------------#
# CLASS RingSimu
# -----------------------------------------------------------------------------#
# This class calculates the joint spectral distribution obtained in a ring
# resonator for a given set of parameters
# Init (
# * Cavirt length (Meters)
# * Pump wavelength (um)
# * Pulse duration (Seconds)
# * Repetition rate (Hz)
# * N: grid sampling (the JSA is stored in a NxN grid)
# * r: ring coupling (r = 1 means no coupling, while r = 0 means full coupling)
# * tau: round trip transmission which accounts for the loss in the ring resonator
# * ng: group index
# )
#
# setPumpToClosestRes(lambda) : Sets the pump to the closest resonance to the given wavelength
# setRangeScanResonance(p) : Sets the resonance to be used for collecting the idler photon. p is the resonance number.
# p = 0 is the same as the pump resonance
# p = +1 or -1 are the next nearest resonance to the pump
# p = +M or -M ....
#
# plotcavityresponse() : Shows the transmission spectrum of the cavity
# computeJS() : Does the simulation
#
# __applycavity__(lambda) : This is the function which applies the cavity. By default, this function applies a ring resonator.
# Different cavities can however be used.
# save(filename) : Saves the result of the simulation including all the parameters, the full state, and the derived parameters such as the Schmidt number
#
class RingSimu(Simu):
def __init__(self,
length = 80., # um
pumpwl = 1.55,
pulseduration=1.*10**(-12),
N = 200,
r = 0.98,
tau = 1.0,
ng = 4.2): # 300*10**3 -> 300 kHz linewidth
self.ng = ng
Simu.__init__(self,
pumpwl = pumpwl,pulseduration=pulseduration)
self.lbda_p = pumpwl # in um # We take the cavity resonance wavelength equal to the pump central wavelength
self.mpump = -1 # resonance number closest to the pump
# Ring parameters
self.L = length # Length of the ring in um
self.r = r
self.tau = tau # tau = 1. -> No loss
#self.tau = self.r # critical coupling
self.N = N
self.lattice = zeros((N,N))
# For loading purpose : Params
self.purity = -1
self.schn = -1
self.geneeff = -1
self.setters = {"Purity" : self.__setPurity__,
"Schmidt number" : self.__setSchn__,
"r" : self.__setr__,
"Nb pairs per pulse" : self.__setgeneeff__,
"Pulse duration (ps)" : self.__setT__ ,
"N" : self.__setN__,
}
self.resonancenumber = 0 # Resonance scanned for signal
# Setters when loading
def __setPurity__(self,x):
self.purity = x
def __setSchn__(self,x):
self.schn = x
def __setr__(self,x):
self.r = x
def __setgeneeff__(self,x):
self.geneeff = x
def __setT__(self,x):
self.T = x
def __setN__(self,x):
self.N = x
self.lattice = zeros((x,x))
self.phases = zeros((x,x))
###
def setTau(self,x):
self.tau = x
def setr(self,x):
self.r = x
def setL(self,L):
self.L = L
###
def ring(self,lbda):
k = 2*pi*self.ng/(lbda)
t = sqrt(1-self.r**2)
tau = self.tau
r = self.r
return I*t/(1-tau*r*exp(I*k*self.L))
def cavityTransmission(self,lbda):
t = sqrt(1-self.r**2)
return self.r+I*t*self.ring(lbda)
# Override these methods to add custom filters on signal and idler arm
def filter_idler(self,lbda):
return ones(lbda.size)
def filter_signal(self,lbda):
return ones(lbda.size)
def __applycavity__(self,lbda):
return self.ring(lbda)
# Returns the closest cavity resonance for a given lambda and the resonance number
def getClosestCavityRes(self,lbda):
m = round(self.L*self.ng/lbda)
kp0 = m*2*pi/self.L # target pump propagation constant
lbda0 = 2*pi*self.ng/kp0
return lbda0,m
# Centers the pump on the closest cavity resonance
def setPumpToClosestRes(self,lbda):
self.lbda_p,self.mpump = self.getClosestCavityRes(lbda)
print "Pump is set at %.7f um" % self.lbda_p
# Get the range to scan for signal for the nth resonance with respect to the pump
# Rq : The pump should have been set such that mpump has a meaningful value
def getSignalRange(self,n):
FWHM = (1-self.r*self.tau)*self.lbda_p**2/(self.ng*sqrt(2)*pi*self.L)
print "FWHM (um) : ",FWHM
fullrange = 5*FWHM #
wlFSR = self.lbda_p**2/(self.L*self.ng) # FSR in lambda
print "FSR (um) : ",wlFSR
lbda_s,m = self.getClosestCavityRes(self.lbda_p+n*wlFSR)
print "Resonance (um) : ",lbda_s
return lbda_s-fullrange,lbda_s+fullrange
def plotCavityResponse(self,albda = None):
lbda_i, mi = self.getClosestCavityRes(0.5 * (self.lbda_i_min+self.lbda_i_max))
lbda_s = 1./(2./self.lbda_p-1./lbda_i)
if albda == None:
albda = arange(min(self.lbda_s_min,self.lbda_i_min),
max(self.lbda_s_max,self.lbda_i_max),0.0000001)
cavity = self.__applycavity__(albda)*self.__applycavity__(albda).conjugate()
pump = self.pumpenvelop(albda)**2
signal_wl = funcpeak(albda,lbda_s)
idler_wl = funcpeak(albda,lbda_i)
plot([(albda,cavity,"-"),
(albda,pump/pump.max()*cavity.max(),"-"),
(albda,signal_wl/signal_wl.max()*cavity.max(),"r-"),
(albda,idler_wl/idler_wl.max()*cavity.max(),"r-")
]) # Plot the pump normalised wrt the biggest field enhancement
def setRangeJS(self,lbda_s_min,lbda_s_max,lbda_i_min,lbda_i_max):
self.lbda_s_min = lbda_s_min
self.lbda_s_max = lbda_s_max
self.lbda_i_min = lbda_i_min
self.lbda_i_max = lbda_i_max
def setRangeScanResonance(self,m):
# Get the range for signal centered on the resonance
lsm,lsM = self.getSignalRange(m)
self.resonancenumber = m
# Get the range for idler using rough energy conservation
lp = self.lbda_p
lim = 1./(2./lp - 1./lsM)
liM = 1./(2./lp - 1./lsm)
#print "%.2f %.2f ; %.2f %.2f (pm)" % (lsm*10**6,lsM*10**6,lim*10**6,liM*10**6)
print lsm,lsM,lim,liM
self.setRangeJS(lsm,lsM,lim,liM)
def updatePumpRange(self):
print "Get pump range ..."
lbda_p = self.lbda_p
print lbda_p
lbda_step= 0.00000001 # step for finding the pump range
P = 0.
targetfraction = 0.95
deltalbda = 0.5*10**(-6) # initialize deltalbda at 1pm
while (P<targetfraction):
deltalbda = 2*deltalbda
lbdas = arange(lbda_p-deltalbda,lbda_p+deltalbda,lbda_step)
#print P
P=(self.pumpenvelop(lbdas)*self.pumpenvelop(lbdas).conjugate()).sum()*lbda_step
print P
print P
N = self.nb_points_pump
# get cavity range
# If the pump is broader than the cavity, then we should chop the pump to the cavity region such that the grid is fine enough in the cavity
# If the pump is narrower than the cavity, then keep pump range
lsm,lsM = self.getSignalRange(0)
rl = lsM-lsm
lsm = lsm-rl/2
lsM = lsM+rl/2
lbdamax = min(lbdas[-1],lsM)
lbdamin = max(lbdas[0],lsm)
step = (lbdamax-lbdamin)/N # Step for the returned pump range
res = arange(lbdamin,lbdamax,step)
#print "Size of pump lbdas" ,lbdas.size
#print self.pumpenvelop(lbda_p)
self.pumprange = res
print "Pump range : (um)",lbdas[0],lbdas[-1]
return res
def getjointproba(self):
return self.normlattice
def getjointprobascaled(self):
return self.normlattice/self.normlattice.max()
def computeJS(self, target_proba = 0.1): # begin=1.545,end=1.555,step=0.0001
xi = (1. - sqrt(1-4*target_proba)) / 2
self.target_proba = target_proba
L = self.L # Cavity length
N = self.N
lbda_s_min = self.lbda_s_min
lbda_s_max = self.lbda_s_max
lbda_i_min = self.lbda_i_min
lbda_i_max = self.lbda_i_max
step_i = (lbda_i_max-lbda_i_min)/N
step_s = (lbda_s_max-lbda_s_min)/N
a_lbda_i = arange(lbda_i_min,lbda_i_max,step_i)[0:N]
a_lbda_s = arange(lbda_s_min,lbda_s_max,step_s)[0:N]
Ni = a_lbda_i.size
Ns = a_lbda_s.size
print Ni, Ns
Ni = N
Ns = N
self.step_i = step_i
self.step_s = step_s
rangepump = self.pumprange
M = rangepump.size
dlbda_pump = (rangepump.max()-rangepump.min())/M
lattice = zeros((Ni,Ns))
a_lbda_p1 = rangepump
cav_resp_p1 = self.__applycavity__(a_lbda_p1)
a_p1 = self.pumpenvelop(a_lbda_p1) # pump amplitude 1
ng = self.ng
print "Steps" ,step_i,step_s
#dbgpm = 0.
pumpmax = self.pumpenvelop(self.lbda_p)
phases = zeros((Ni,Ns))
for j in xrange(Ns):
print j
lbda_s = a_lbda_s[j] # lbda_s_min+j*step_s
cav_resp_s = self.__applycavity__(lbda_s)
for i in xrange(Ni):
lbda_i = a_lbda_i[i] # lbda_i_min+i*step_i
a_lbda_p2 = 1./(1./lbda_s+1./lbda_i-1./a_lbda_p1)
a_p2 = self.pumpenvelop(a_lbda_p2) # pump amplitude 2
a_res = a_p1*a_p2*cav_resp_p1*self.__applycavity__(a_lbda_p2)* self.__applycavity__(lbda_i)*cav_resp_s #
a_res = a_res * a_lbda_p2/a_lbda_p1
# Multiply by the dlambda;
# The pump function is i um^(-1/2), dlbda_pump is in um
a_res = a_res*dlbda_pump
res = a_res.sum() # unitless
# Multiply by the dlambda
# Since the formula was derived for domega, we have to remember that domega = -2*pi*c/lbda**2 * dlbda
lattice[i,Ns-1-j]= abs(res.real**2+res.imag**2)* (step_i/(lbda_i**2)) * (step_s/(lbda_s**2))
phases[i,Ns-1-j] = angle(res)
# Multiply by the appropriate missing constants
lattice = lattice*(c*self.gamma*(self.L))**2/(2*pi**2)
Z = lattice.sum()# sqrt(abs(lattice*conjugate(lattice)).sum())
self.normlattice = sqrt(abs(lattice/Z))
self.lattice = lattice
self.phases = phases
beta = 2*lattice.sum()
# Compute the energy required per pulse
Epulse = arctanh(sqrt(xi))/sqrt(beta) # Approximation valid in the case of two-mode squeezer
self.Epulse = Epulse
# Theory calculation for CW regime for comparison
vg = c/self.ng
print "Epulse (nJ) ", self.Epulse*10**9
print "gamma W-1,m-1", self.gamma
print "L (um)", L
print "T (ps)", self.T*10**12
print "vg %e" % vg
print "r : %.4f" % self.r
print "tau : %.4f" % self.tau
print "Siglbda : %.5f" % (self.siglbda)
print "Nb pairs per pulse:",target_proba
lbda_i0 = (lbda_i_max+lbda_i_min)/2
lbda_s0 = (lbda_s_max+lbda_s_min)/2
self.extent = list(array([lbda_i_min-lbda_i0,lbda_i_max-lbda_i0,lbda_s_min-lbda_s0,lbda_s_max-lbda_s0])*1000) # Check where should go i and s
def getPhases(self):
return self.phases
def getAverageSpectra(self):
return self.normlattice.sum(axis = 0),self.normlattice.sum(axis = 1)
def save(self,directory="resonances_toshiba"):
timestamp = time.strftime("%m%d_%H%M",time.localtime(time.time()))
# Create repository if it does not exist
if not os.path.exists("data\\%s" % directory):
os.makedirs("data\\%s" % directory)
fname = "data\\%s\\simu_%s_r=%.3f_tau=%.3f_%.2fps_res=%d.txt" % (directory,timestamp,self.r,self.tau,self.T * 10**12,self.resonancenumber)
# Header
fw = open(fname,"w")
fw.write("#Laser parameters\n")
fw.write("%s : %.3f\n" % ("Pulse duration (ps)",self.T*10**12))
fw.write("%s : %.18e\n" % ("Energy per pulse (uJ)",self.Epulse*1000000))
fw.write("%s : %.6f\n" % ("Pump wavelength (um)",self.lbda_p))
fw.write("\n#Waveguide parameters\n")
fw.write("%s : %.3f\n" % ("gamma (W-1 m-1)",self.gamma))
fw.write("\n#Ring parameters\n")
fw.write("%s : %.3f\n" % ("Cavity length (um)",self.L))
fw.write("%s : %.5f\n" % ("Tau",self.tau))
fw.write("%s : %.5f\n" % ("r",self.r))
fw.write("\n#BiPhoton state properties\n")
fw.write("%s : %.5f\n" % ("Nb pairs per pulse",self.target_proba))
self.computeHeraldedPhotonPurity()
#self.computePcoincfrom2photons()
#fw.write("%s : %.6f\n" % ("Visibility from two heralded sources",self.visibility))
fw.write("%s : %.6f\n" % ("Schmidt number",abs(self.schn)))
fw.write("%s : %.6f\n" % ("Purity",abs(1/self.schn)))
# Theory calculation for CW regime for comparison
vg = c/self.ng
beta2 = self.gamma**2*(self.Epulse/self.T)**2*(self.L*10**(-6))/8 * vg*self.T*(1-self.r**2)**4/(1-self.r)**7
fw.write("%s : %.5f\n" % ("Nb pairs(analytical CW)",beta2))
fw.write("\n")
fw.write("N=%d\n" % self.N)
fw.write("Resonance number : %d\n" % self.resonancenumber)
fw.write("\n#Scan range\n")
fw.write("%s : %.6e - %.6e, %.6e\n" % ("idl min, idl max, step (um)",self.lbda_i_min,self.lbda_i_max,self.step_i))
fw.write("%s : %.6e - %.6e, %.6e\n" % ("sig min, sig max, step (um)",self.lbda_s_min,self.lbda_s_max,self.step_s))
fw.write("\n#Raw data Biphoton distribution\n")
# Saves the joint spectrum
for j in xrange(self.N):
line = " ".join(("%.18e" % x) for x in self.lattice[:,self.N-1-j])
fw.write(line+"\n")
fw.write("\n#Raw data Biphoton phase distribution\n")
# Saves the joint spectrum
for j in xrange(self.N):
line = " ".join(("%.18e" % x) for x in self.phases[:,self.N-1-j])
fw.write(line+"\n")
fw.close()
return fname
def load(self,fname):
print "Loading %s ..." % fname
f = open(fname,"r")
line = f.readline()
while (len(line)>0):
if line.startswith("#Scan range"):
# Load the extent of the wavelength for signal and idler
line = f.readline() # Readline for the idler
self.lbda_i_min,self.lbda_i_max = parse_extent(line)
line = f.readline() # Readline for the signal
self.lbda_s_min,self.lbda_s_max = parse_extent(line)
self.extent = [self.lbda_i_min,self.lbda_i_max,self.lbda_s_min,self.lbda_s_max] # Check where should go i and s
if line.startswith("#Raw data Biphoton distribution"):
# Load the biphoton distribution
for j in xrange(self.N):
line = f.readline()
self.lattice[:,self.N-1-j] = parse_biphoton_data(line)
if line.startswith("#Raw data Biphoton phase distribution"):
# Load the biphoton phase distribution
for j in xrange(self.N):
line = f.readline()
self.phases[:,self.N-1-j] = parse_biphoton_data(line)
if line.find("#")>=0:
l1 = line.split("#")[0]
if line.find(":")>=0:
line = line.replace("\n","")
name,value = line.split(" : ")
if name in self.setters.keys():
self.setters[name](float(value))
elif line.startswith("N="):
name,value = line.split("=")
self.setters[name](int(value))
line = f.readline()
Z = self.lattice.sum()# sqrt(abs(lattice*conjugate(lattice)).sum())
self.normlattice = sqrt(abs(self.lattice/Z))
f.close()
def main():
T = 5.
N = 100
r = 0.93
tau = 1.-0.0198
radius = 10.
coupling_length = 5.
lbda0= 1.55
res_number = 1 # resonance number (pump resonance is 0).
for res_number in [1]:
for r in [0.9]: # [0.95,0.96,0.97,0.98,0.99]: # 0.85,0.86,0.87,0.88,0.89,0.90,0.91,0.92,0.93,0.94,0.95,0.96
for tau in [0.997]: # 0.76,0.96,0.98
#for r2 in [0.9998,0.9997,0.9996,0.9995,0.9994]: #[1.0,0.9999,0.999,0.99]:
mySim =RingSimu(length = 2*(radius*pi+coupling_length),
pulseduration = T*10**(-12),
N = N,
r = r,
tau = tau,
pumpwl = lbda0) # 500
mySim.setPumpToClosestRes(lbda0)
mySim.setRangeScanResonance(+res_number)
#mySim.plotCavityResponse()
mySim.updatepumprange()
mySim.computeJS()
fname = mySim.save("Ring_pumpscan")
mySim.plotBiphoton()
#mySim.plotBiphoton(fname[:-3]+"png")
if __name__ == "__main__":
main()
|
C# | UTF-8 | 853 | 2.90625 | 3 | [] | no_license | private void CopyFiles()
{
var folder = Path.GetDirectoryName(SelectedItemPaths.First());
var newFolder = Path.Combine(folder, "New Folder");
Directory.CreateDirectory(newFolder);
foreach (var path in SelectedItemPaths)
{
var newPath = Path.Combine(newFolder, Path.GetFileName(path));
File.Move(path, newPath);
}
RenameInExplorer(newFolder);
}
private static void RenameInExplorer(string itemPath)
{
IntPtr folder = Win32.ILCreateFromPath(Path.GetDirectoryName(itemPath));
IntPtr file = Win32.ILCreateFromPath(itemPath);
try
{
Win32.SHOpenFolderAndSelectItems(folder, 1, new[] { file }, 1);
}
finally
{
Win32.ILFree(folder);
Win32.ILFree(file);
}
}
|
JavaScript | UTF-8 | 849 | 3.109375 | 3 | [] | no_license | var url = "https://randomuser.me/api/";
var btn = document.querySelector("#btn");
btn.addEventListener("click", () => {
fetch(url)
.then(handleErrors)
.then(parseJSON)
.then(updateProfile)
.catch(printError);
});
function printError(err) {
console.log("ERROR: " + err);
}
function handleErrors(res) {
if (!res.ok) {
throw Error(404);
}
return res;
}
function parseJSON(res) {
return res.json();
}
function updateProfile(res) {
var user = res.results[0];
document.querySelector("#avatar").src = user.picture.medium;
document.querySelector("#fullname").innerHTML =
user.name.first + " " + user.name.last;
document.querySelector("#email").innerHTML = user.email;
document.querySelector("#city").innerHTML = user.location.city;
document.querySelector("#username").innerHTML = user.login.username;
}
|
C# | UTF-8 | 2,314 | 2.515625 | 3 | [] | no_license | using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
public class GameManager : MonoBehaviour
{
public static bool isPaused;
public MenuUI menu;
public Text tanksDestroyedTxt;
public float GameTimer;
public Text timeTxt;
private int enemyTanksDestroyed = 0;
public Text gameOverTxt;
public enum GameState
{
Playing,
GameOver
};
private GameState gameState;
void Awake()
{
isPaused = false;
gameState = GameState.Playing;
tanksDestroyedTxt.text = enemyTanksDestroyed.ToString();
gameOverTxt.text = "";
}
void Update()
{
switch (gameState)
{
case GameState.Playing:
GSPlaying();
break;
case GameState.GameOver:
GSGameOver();
break;
}
if (Input.GetKeyUp(KeyCode.Escape))
{
//Application.Quit();
TogglePause();
}
}
public void Destroyed(bool isPlayer)
{
bool gameOver = false;
string message = "";
if (isPlayer)
{
message = "Game Over, You Destroyed " + enemyTanksDestroyed + " Enemy Tanks";
gameOver = true;
}
else
{
enemyTanksDestroyed ++;
tanksDestroyedTxt.text = enemyTanksDestroyed.ToString();
Debug.Log("Tanks Destroyed: " + enemyTanksDestroyed);
}
if (gameOver)
{
gameState = GameState.GameOver;
gameOverTxt.text = message;
}
}
void GSPlaying()
{
Debug.Log("Playing State");
GameTimer += Time.deltaTime;
int minutes = Mathf.FloorToInt(GameTimer / 60);
int seconds = Mathf.FloorToInt(GameTimer % 60);
timeTxt.text = string.Format("{0:0} : {1:00}", minutes, seconds);
}
void GSGameOver()
{
Debug.Log("Game Over State");
if (Input.GetKeyUp(KeyCode.Return) == true)
{
Debug.Log("Restarting");
SceneManager.LoadScene("Game");
}
}
public void TogglePause()
{
isPaused = !isPaused;
menu.PauseGame(isPaused);
}
}
|
PHP | UTF-8 | 2,273 | 2.65625 | 3 | [] | no_license | <?php
namespace app\components;
use Yii;
use app\models\App;
use yii\base\Component;
/**
* Class AppService
*
* @package app\components
* @author Jerry Hsia<jerry9916@qq.com>
*/
class AppService extends Component
{
const CACHE_APPS = 'cache_apps';
/**
* Save a app
*
* @param App $app
* @param array $attributes
* @return bool
*/
public function saveApp (App $app, array $attributes)
{
$app->setAttributes($attributes, false);
$app->tempRoleIds = isset($attributes['role_ids']) ? $attributes['role_ids'] : [];
$result = $app->save();
if ($result) {
$this->clearAppCache();
}
return $result;
}
/**
* Get a app by id or name
*
* @param $idOrName
* @return null|\yii\db\ActiveRecord
*/
public function getApp($idOrName)
{
$apps = $this->getApps();
if (is_numeric($idOrName)) {
return isset($apps[$idOrName]) ? $apps[$idOrName] : null;
} else {
foreach ($apps as $id => $app) {
if ($app->getAttribute('name') == $idOrName) {
return $app;
}
}
}
return null;
}
/**
* Get multiple apps
*
* @param bool $isIndexed whether indexed the result by id
* @return array|mixed|\yii\db\ActiveRecord[]
*/
public function getApps($isIndexed = true)
{
$apps = null;
if (Yii::$app->cache->exists(self::CACHE_APPS)) {
$apps = Yii::$app->cache->get(self::CACHE_APPS);
} else {
$apps = App::find()->indexBy('id')->all();
Yii::$app->cache->set(self::CACHE_APPS, $apps);
}
return $isIndexed ? $apps : Clever::removeIndex($apps);
}
/**
* Clear cached app data
*/
private function clearAppCache()
{
Yii::$app->cache->delete(self::CACHE_APPS);
}
/**
* Delete a app
*
* @param App $app
* @return bool
* @throws \Exception
*/
public function deleteApp(App $app)
{
$result = $app->delete() === false ? false : true;
if ($result) {
$this->clearAppCache();
}
return $result;
}
}
|
PHP | UTF-8 | 682 | 2.546875 | 3 | [] | no_license | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html xmlns="http://www.w3c.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Preselecting</title>
</head>
<form action="<?php $_SERVER['PHP_SELF']; ?>" method="post">
Tempat Lahir :
<select name="menu">
<option value="Rumah sakit" selected="yes" />Rumah sakit
<option value="Rumah bersalin" />Rumah bersalin
<option value="Rumah bidan" />Rumah bidan
<option value="Rumah ibu" />Rumah ibu
</select>
<br />
<input type="submit" value="OK" />
</form>
<?php
if (isset($_POST['menu'])){
echo $_POST['menu'];
}
?>
</body>
</html> |
TypeScript | UTF-8 | 5,361 | 2.71875 | 3 | [] | no_license | // Steal from https://github.com/brunosimon/folio-2019
export default class {
callbacks: any;
constructor() {
this.callbacks = {}
this.callbacks.base = {}
}
// event.on()
on(_names, callback) {
// Scope
const _this = this
// Name error
if (typeof _names === 'undefined' || _names === '') {
console.error('[on] wrong event names')
return false
}
// Callback error
if (typeof callback === 'undefined') {
console.error('[on] wrong event callback')
return false
}
// Resolve names
const names = this.resolveNames(_names)
// Each name
names.forEach(function (_name) {
// Resolve name
const name = _this.resolveName(_name)
// Create namespace if not exist
if (!(_this.callbacks[name.namespace] instanceof Object)) {
_this.callbacks[name.namespace] = {}
}
// Create callback if not exist
if (!(_this.callbacks[name.namespace][name.value] instanceof Array)) {
_this.callbacks[name.namespace][name.value] = []
}
// Add callback
_this.callbacks[name.namespace][name.value].push(callback)
})
return this
}
// event.off()
off(_names) {
// Scope
const _this = this
// Name error
if (typeof _names === 'undefined' || _names === '') {
console.error('[off] wrong event name')
return false
}
// Resolve names
const names = this.resolveNames(_names)
// Each name
names.forEach(function (_name) {
// Resolve name
const name = _this.resolveName(_name)
// Remove namespace
if (name.namespace !== 'base' && name.value === '') {
delete _this.callbacks[name.namespace]
}
// Remove specific callback in namespace
else {
// Default
if (name.namespace === 'base') {
// Try to remove from each namespace
for (const namespace in _this.callbacks) {
if (
_this.callbacks[namespace] instanceof Object &&
_this.callbacks[namespace][name.value] instanceof Array
) {
delete _this.callbacks[namespace][name.value]
// Remove namespace if empty
if (Object.keys(_this.callbacks[namespace]).length === 0) {
delete _this.callbacks[namespace]
}
}
}
}
// Specified namespace
else if (
_this.callbacks[name.namespace] instanceof Object &&
_this.callbacks[name.namespace][name.value] instanceof Array
) {
delete _this.callbacks[name.namespace][name.value]
// Remove namespace if empty
if (Object.keys(_this.callbacks[name.namespace]).length === 0) {
delete _this.callbacks[name.namespace]
}
}
}
})
return this
}
// event.trigger()
trigger(_name, _args?) {
// Scope
const _this = this
// Errors
if (typeof _name === 'undefined' || _name === '') {
console.error('[trigger] wrong event name')
return false
}
let finalResult = null
let result = null
// Default args
const args = !(_args instanceof Array) ? [] : _args
// Resolve names (should on have one event)
let name = this.resolveNames(_name)
// Resolve name
name = this.resolveName(name[0])
// Default namespace
if (name.namespace === 'base') {
// Try to find callback in each namespace
for (const namespace in _this.callbacks) {
if (
_this.callbacks[namespace] instanceof Object &&
_this.callbacks[namespace][name.value] instanceof Array
) {
_this.callbacks[namespace][name.value].forEach(function (callback) {
result = callback.apply(_this, args)
if (typeof finalResult === 'undefined') {
finalResult = result
}
})
}
}
}
// Specified namespace
else if (this.callbacks[name.namespace] instanceof Object) {
if (name.value === '') {
console.error('[trigger] wrong event name')
return this
}
_this.callbacks[name.namespace][name.value].forEach(function (callback) {
result = callback.apply(_this, args)
if (typeof finalResult === 'undefined') {
finalResult = result
}
})
}
return finalResult
}
// Resolve events / callbacks names
resolveNames(_names) {
let names = _names
names = names.replace(/[^a-zA-Z0-9 ,/.]/g, '')
names = names.replace(/[,/]+/g, ' ')
names = names.split(' ')
return names
}
//Resolve event / callback name
resolveName(name) {
const newName: {original?: string, value?: string, namespace?: string} = {}
const parts = name.split('.')
newName.original = name
newName.value = parts[0]
newName.namespace = 'base' // Base namespace
// Specified namespace
if (parts.length > 1 && parts[1] !== '') {
newName.namespace = parts[1]
}
return newName
}
}
|
Markdown | UTF-8 | 4,639 | 3 | 3 | [] | no_license | # 201 Introduction
## Overview
In this lab we are going to productionise a k8s cluster using the kops deployment tool. kops is tool to create production clusters and is a single binary much as the minikube minikube tool we used to build a local cluster. kops will provision a k8s cluster for us in AWS and set up the PKI between clients and nodes, set up security groups, EBS volumes, it can also create a VPC and subenets, etc. It'll also set up our local ```kubectl```. Most guides use kops in its default mode in this guide we'll dig a little deeper and deploy our cluster into existing AWS infrastructure. The default behavious for kops is to create a new VPC, subenets, security groups, NAT Gateways and other resources required. However in this lab we are going to use existing an VPC that has subnets configured and NAT Gateways deployed.
### kops
kops helps you create, destroy, upgrade and maintain production-grade, highly available, Kubernetes clusters from the command line. AWS (Amazon Web Services) is currently officially supported, with GCE and VMware vSphere in alpha and other platforms planned.
kops = Kluster Operations
#### Features
- Automates the provisioning of Kubernetes clusters in (AWS)
- Deploys Highly Available (HA) Kubernetes Masters
- Supports upgrading from kube-up
- Built on a state-sync model for dry-runs and automatic idempotency
- Ability to generate configuration files for AWS CloudFormation and Terraform Terraform configuration
- Supports custom Kubernetes add-ons
- Command line autocompletion
- Manifest Based API Configuration
- Community supported!
[Offical page here](https://github.com/kubernetes/kops)
#### Deployment Terminology
When a kops cluster is deployed in High Availibily mode we end up with three master servers and a set of nodes. In kops terms these are known as InstanceGroups (IG's). The IG's map onto amazon autoscale groups. When we spin our cluster up we will see 3 separate master IG's and a Nodes IG.
##### Master IG's/ASG's
Each master is deployed into a different AZ and has its own ASG of 1 server. This allows for you to easily replace a master if it crashes, think of the ASG more as auto healing rather than scaling in this case.
##### Nodes IG/ASG
Another default is to have a ASG for the nodes. This ASG spans all three AZ's so it starts nodes and spreads them out across the three AZs, which is good for availibilty and stability.
##### Extra Nodes
You can of course tell kops to deploy N nodes in the main node IG, but can also use other IG's. The advantage of being able to create extra IG's comes into play when you label or taint your nodes. It's possible for example to create an extra set of IG's that are dedicated for database container use and use io1 EBS volumes by default for performance. Or you may want a set of nodes with GPU's attached for rendering or computational analytics.
##### Configuration
When you run kops create command it store its configuration and state in a named S3 bucket within AWS. A single S3 bucket can be used for multiple deployments as the configuration is kept under the cluster name within the bucket. The directory/key structure of the bucket is as follows:
```
- clustername
-- addons # contains yaml configs for k8s components
-- instancegroup # instance group configs (ami, disk etc etc)
-- pki # ssh pub keys, client and node certs
-- secrets # k8s secret configs
-- cluster.spec # kops cluster config including docker settings etc
-- config # kops cluster config with main tunable options
```
### Reference Architecture
The diagrams below show a typical deployment of kops when using a prebuilt VPC. We stick to best practice and only deploy instances in private subenets, with ingress to the pods coming through ELB's deployed in the public subnets. This type of configuration does require you to have access to the provate networking via either direct connect or a VPN in order to access the API server with ```kubectl```.

(Figure 1: AWS reference deployment Architecture)
## Exercises
- Lab 1: [Installing kops](/kubernetes-201/labs/00-install-kops.md)
- Lab 2: [Deploy a cluster](/kubernetes-201/labs/01-deploy-cluster.md)
- Lab 3: [Addons](/kubernetes-201/labs/02-addons.md)
- Lab 4: [Deploy a Stateless Application](/kubernetes-201/labs/03-deploy-service.md) | [Deploy a Stateful Application](/kubernetes-201/labs/03-deploy-stateful-service.md)
- Lab 5: [Upgrade a cluster](/kubernetes-201/labs/04-upgrading.md)
##### Labs : [kubernetes-101](/kubernetes-101/) | [kubernetes-201](/kubernetes-201/) | [kubernetes-301](/kubernetes-301/)
|
C++ | UTF-8 | 491 | 2.703125 | 3 | [] | no_license | #pragma once
#define DEBUG 1
class ChessBoard
{
public:
ChessBoard(int queen);
ChessBoard() { ; }
~ChessBoard() { ; }
void OutPut();
void PlaceQueen(int row);
//how many solution have been found
const int GetSlnNum()const;
//If (row,col) can place a queen
bool Check(int row, int col);
//If (row,col) is in the corrent range
bool ifInside(int row, int col);
private:
int m_queen;//the number of the queen
int m_sln;//the solution of this problem
bool* m_pChessboard;
};
|
Go | UTF-8 | 500 | 3.09375 | 3 | [] | no_license | package space
type Planet string
func Age(seconds float64, planet Planet) float64 {
var multiplier float64 = 31557600
switch planet {
case "Earth":
multiplier *= 1
case "Mercury":
multiplier *= 0.2408467
case "Venus":
multiplier *= 0.61519726
case "Mars":
multiplier *= 1.8808158
case "Jupiter":
multiplier *= 11.862615
case "Saturn":
multiplier *= 29.447498
case "Uranus":
multiplier *= 84.016846
case "Neptune":
multiplier *= 164.79132
}
return seconds / multiplier
}
|
Markdown | WINDOWS-1251 | 3,448 | 2.640625 | 3 | [] | no_license | - (Homo sapiens)
': , , , , , , .
: , ., . ( ), ' , , .
: , , , , . , . .
: , , , ( ).
: 1) , .
2) , .
SOLID
S - , , - . .
O - , . , , .
L - , - , - (, , , ) ( ).
I - (, ), , , , .
D - : , , , . - .
|
Java | UTF-8 | 347 | 2.96875 | 3 | [] | no_license | class Solution {
public String reverseStr(String s, int k) {
StringBuilder res = new StringBuilder();
char[] str = s.toCharArray();
for(int i = 0; i < s.length(); i++) {
if((i / k) % 2 == 0) res.insert(i - i % k, str[i]);
else res.append(str[i]);
}
return res.toString();
}
} |
Java | UTF-8 | 1,172 | 2.171875 | 2 | [
"Apache-2.0"
] | permissive | package com.salesmanager.core.business.services.reference.country;
import java.util.List;
import java.util.Map;
import com.salesmanager.core.business.exception.ServiceException;
import com.salesmanager.core.business.services.common.generic.SalesManagerEntityService;
import com.salesmanager.core.model.reference.country.Country;
import com.salesmanager.core.model.reference.country.CountryDescription;
import com.salesmanager.core.model.reference.language.Language;
public interface CountryService extends SalesManagerEntityService<Integer, Country> {
Country getByCode(String code) throws ServiceException;
void addCountryDescription(Country country, CountryDescription description) throws ServiceException;
List<Country> getCountries(Language language) throws ServiceException;
Map<String, Country> getCountriesMap(Language language)
throws ServiceException;
List<Country> getCountries(List<String> isoCodes, Language language)
throws ServiceException;
/**
* List country - zone objects by language
* @param language
* @return
* @throws ServiceException
*/
List<Country> listCountryZones(Language language) throws ServiceException;
}
|
PHP | UTF-8 | 2,817 | 2.703125 | 3 | [
"MIT"
] | permissive | <?php
/**
* Event List Widget: Standard List
*
* The template is used for displaying the [eo_event] shortcode *unless* it is wrapped around a placeholder: e.g. [eo_event] {placeholder} [/eo_event].
*
* You can use this to edit how the output of the eo_event shortcode. See http://docs.wp-event-organiser.com/shortcodes/events-list
* For the event list widget see widget-event-list.php
*
* For a list of available functions (outputting dates, venue details etc) see http://codex.wp-event-organiser.com/
*
***************** NOTICE: *****************
* Do not make changes to this file. Any changes made to this file
* will be overwritten if the plug-in is updated.
*
* To overwrite this template with your own, make a copy of it (with the same name)
* in your theme directory. See http://docs.wp-event-organiser.com/theme-integration for more information
*
* WordPress will automatically prioritise the template in your theme directory.
***************** NOTICE: *****************
*
* @package Event Organiser (plug-in)
* @since 1.7
*/
global $eo_event_loop,$eo_event_loop_args;
//The list ID / classes
$id = ( $eo_event_loop_args['id'] ? 'id="'.$eo_event_loop_args['id'].'"' : '' );
$classes = $eo_event_loop_args['class'];
?>
<?php if ( $eo_event_loop->have_posts() ) : ?>
<table <?php echo $id; ?> class="hcalendar event-list <?php echo esc_attr( $classes );?>" >
<?php while ( $eo_event_loop->have_posts() ) : $eo_event_loop->the_post(); ?>
<?php $current_date = eo_get_the_start('Y-m-d') ?>
<?php if($current_date !== $last_date) : ?>
<tr>
<th colspan="2"><h3><?php print eo_get_the_start('M j, Y') ?></h3></th>
</tr>
<?php endif; ?>
<?php
//Generate HTML classes for this event
$eo_event_classes = eo_get_event_classes();
//For non-all-day events, include time format
$format = eo_get_event_datetime_format();
?>
<tr class="event-row h-event" <?php echo esc_attr( implode( ' ',$eo_event_classes ) ); ?>" >
<td class="time-col">
<a class="u-url" href="<?php echo eo_get_permalink(); ?>"><time class="dt-start" datetime="<?php print eo_get_the_start( 'c' ) ?>"><?php print eo_get_the_start( 'g:iA' ) ?></time> -
<time class="dt-end" datetime="<?php print eo_get_the_end( 'c' ) ?>"><?php print eo_get_the_end( 'g:iA' ) ?></time></a>
</td>
<td class="name-col">
<a class="p-name" href="<?php echo eo_get_permalink(); ?>"><?php the_title(); ?></a>
</td>
</tr>
<?php $last_date = $current_date; ?>
<?php endwhile; ?>
</table>
<?php elseif ( ! empty( $eo_event_loop_args['no_events'] ) ) : ?>
<div id="<?php echo esc_attr( $id );?>" class="<?php echo esc_attr( $classes );?>" >
<span class="eo-no-events" > <?php echo $eo_event_loop_args['no_events']; ?> </span>
</div>
<?php endif;
|
Swift | UTF-8 | 843 | 2.671875 | 3 | [] | no_license | //
// FeedPostRequest.swift
// PlayerAuthMe
//
// Created by Michael Green on 27/08/2015.
// Copyright (c) 2015 Michael Green. All rights reserved.
//
import Foundation
class FeedPostRequest: PlayerMeRequest {
typealias FeedPostSuccessResponse = () -> ()
private var successClosure: FeedPostSuccessResponse?
override init(URL: NSURL) {
super.init(URL: URL)
}
required init(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func onSuccess(response: FeedPostSuccessResponse) -> FeedPostRequest {
self.successClosure = response
return self
}
func performSuccess() {
if let success = successClosure {
success()
}
}
override func onFailure(response: RequestFailureResponse) -> FeedPostRequest {
self.failureClosure = response
return self
}
}
|
Swift | UTF-8 | 1,122 | 2.859375 | 3 | [] | no_license | //
// Profile.swift
// Wimme
//
// Created by Młody-MBP on 18.11.2017.
// Copyright © 2017 doThisApp. All rights reserved.
//
import Foundation
struct Profile{
private(set) public var username: String?
private(set) public var firstName: String?
private(set) public var lastName: String?
private(set) public var email: String?
private(set) public var numberOfVotes: Int?
private(set) public var rating: Float?
private(set) public var frequency: Int?
private(set) public var photoUrl: String?
private(set) public var description: String?
init(username: String? = "", firstName: String? = "", lastName: String? = "", email: String? = "", numberOfVotes: Int? = 0, rating: Float? = 0, frequency: Int? = 0, photoUrl: String? = "", description: String? = "") {
self.username = username
self.firstName = firstName
self.lastName = lastName
self.email = email
self.numberOfVotes = numberOfVotes
self.rating = rating
self.frequency = frequency
self.photoUrl = photoUrl
self.description = description
}
}
|
Java | UTF-8 | 3,360 | 2.0625 | 2 | [] | no_license | // Decompiled by Jad v1.5.8g. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.kpdus.com/jad.html
// Decompiler options: packimports(3)
// Source File Name: TXTBase.java
package org.xbill.DNS;
import java.io.IOException;
import java.util.*;
// Referenced classes of package org.xbill.DNS:
// Record, TextParseException, Tokenizer, DNSInput,
// DNSOutput, Name, Compression
abstract class TXTBase extends Record
{
protected TXTBase()
{
}
protected TXTBase(Name name, int i, int j, long l)
{
super(name, i, j, l);
}
protected TXTBase(Name name, int i, int j, long l, String s)
{
this(name, i, j, l, Collections.singletonList(s));
}
protected TXTBase(Name name, int i, int j, long l, List list)
{
super(name, i, j, l);
if(list == null)
throw new IllegalArgumentException("strings must not be null");
strings = new ArrayList(list.size());
Iterator iterator = list.iterator();
try
{
while(iterator.hasNext())
{
String s = (String)iterator.next();
strings.add(byteArrayFromString(s));
}
}
catch(TextParseException textparseexception)
{
throw new IllegalArgumentException(textparseexception.getMessage());
}
}
public List getStrings()
{
ArrayList arraylist = new ArrayList(strings.size());
for(int i = 0; i < strings.size(); i++)
arraylist.add(byteArrayToString((byte[])(byte[])strings.get(i), false));
return arraylist;
}
public List getStringsAsByteArrays()
{
return strings;
}
void rdataFromString(Tokenizer tokenizer, Name name)
throws IOException
{
strings = new ArrayList(2);
do
{
Tokenizer.Token token = tokenizer.get();
if(!token.isString())
{
tokenizer.unget();
return;
}
try
{
strings.add(byteArrayFromString(token.value));
}
catch(TextParseException textparseexception)
{
throw tokenizer.exception(textparseexception.getMessage());
}
} while(true);
}
void rrFromWire(DNSInput dnsinput)
throws IOException
{
strings = new ArrayList(2);
byte abyte0[];
for(; dnsinput.remaining() > 0; strings.add(abyte0))
abyte0 = dnsinput.readCountedString();
}
String rrToString()
{
StringBuffer stringbuffer = new StringBuffer();
Iterator iterator = strings.iterator();
do
{
if(!iterator.hasNext())
break;
stringbuffer.append(byteArrayToString((byte[])(byte[])iterator.next(), true));
if(iterator.hasNext())
stringbuffer.append(" ");
} while(true);
return stringbuffer.toString();
}
void rrToWire(DNSOutput dnsoutput, Compression compression, boolean flag)
{
for(Iterator iterator = strings.iterator(); iterator.hasNext(); dnsoutput.writeCountedString((byte[])(byte[])iterator.next()));
}
private static final long serialVersionUID = 0x54043d75L;
protected List strings;
}
|
Markdown | UTF-8 | 15,528 | 2.671875 | 3 | [
"CC-BY-4.0",
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | ---
title: "Creating, Altering, and Removing Indexes"
description: "Creating, Altering, and Removing Indexes"
author: "markingmyname"
ms.author: "maghan"
ms.date: 01/11/2019
ms.service: sql
ms.topic: "reference"
helpviewer_keywords:
- "indexes [SMO]"
monikerRange: "=azuresqldb-current||=azure-sqldw-latest||>=sql-server-2016||>=sql-server-linux-2017||=azuresqldb-mi-current"
---
# Creating, Altering, and Removing Indexes
[!INCLUDE [SQL Server ASDB, ASDBMI, ASDW](../../../includes/applies-to-version/sql-asdb-asdbmi-asa.md)]
In the [!INCLUDE[ssNoVersion](../../../includes/ssnoversion-md.md)] Management Objects (SMO) hierarchy, indexes are represented by the <xref:Microsoft.SqlServer.Management.Smo.Index> object. The indexed columns are represented by a collection of <xref:Microsoft.SqlServer.Management.Smo.IndexedColumn> objects represented by the <xref:Microsoft.SqlServer.Management.Smo.Index.IndexedColumns%2A> property.
You can create an index on a XML column by specifying the <xref:Microsoft.SqlServer.Management.Smo.Index.IsXmlIndex%2A> property of the <xref:Microsoft.SqlServer.Management.Smo.Index> object.
## Examples
To use any code example that is provided, you will have to choose the programming environment, the programming template, and the programming language in which to create your application. For more information, see [Create a Visual C# SMO Project in Visual Studio .NET](../../../relational-databases/server-management-objects-smo/how-to-create-a-visual-csharp-smo-project-in-visual-studio-net.md).
## Creating a Non-Clustered, Composite Index in Visual Basic
This code example demonstrates how to create a composite, nonclustered index. For a composite index, add more than one column to the index. Set the <xref:Microsoft.SqlServer.Management.Smo.Index.IsClustered%2A> property to **False** for a nonclustered index.
```
' /r:Microsoft.SqlServer.Smo.dll
' /r:Microsoft.SqlServer.ConnectionInfo.dll
' /r:Microsoft.SqlServer.SqlEnum.dll
' /r:Microsoft.SqlServer.Management.Sdk.Sfc.dll
Imports Microsoft.SqlServer.Management.Smo
Public Class A
Public Shared Sub Main()
' Connect to the local, default instance of SQL Server.
Dim srv As Server
srv = New Server()
' Reference the AdventureWorks2022 database.
Dim db As Database
db = srv.Databases("AdventureWorks2022")
' Declare a Table object and reference the HumanResources table.
Dim tb As Table
tb = db.Tables("Employee", "HumanResources")
' Define an Index object variable by providing the parent table and index name in the constructor.
Dim idx As Index
idx = New Index(tb, "TestIndex")
' Add indexed columns to the index.
Dim icol1 As IndexedColumn
icol1 = New IndexedColumn(idx, "BusinessEntityID", True)
idx.IndexedColumns.Add(icol1)
Dim icol2 As IndexedColumn
icol2 = New IndexedColumn(idx, "HireDate", True)
idx.IndexedColumns.Add(icol2)
' Set the index properties.
idx.IndexKeyType = IndexKeyType.DriUniqueKey
idx.IsClustered = False
idx.FillFactor = 50
' Create the index on the instance of SQL Server.
idx.Create()
' Modify the page locks property.
idx.DisallowPageLocks = True
' Run the Alter method to make the change on the instance of SQL Server.
idx.Alter()
' Remove the index from the table.
idx.Drop()
End Sub
End Class
```
## Creating a Non-Clustered, Composite Index in Visual C#
This code example demonstrates how to create a composite, nonclustered index. For a composite index, add more than one column to the index. Set the <xref:Microsoft.SqlServer.Management.Smo.Index.IsClustered%2A> property to **False** for a nonclustered index.
```
// /r:Microsoft.SqlServer.Smo.dll
// /r:Microsoft.SqlServer.ConnectionInfo.dll
// /r:Microsoft.SqlServer.SqlEnum.dll
// /r:Microsoft.SqlServer.Management.Sdk.Sfc.dll
using Microsoft.SqlServer.Management.Smo;
public class A {
public static void Main() {
// Connect to the local, default instance of SQL Server.
Server srv;
srv = new Server();
// Reference the AdventureWorks2022 database.
Database db;
db = srv.Databases["AdventureWorks2022"];
// Declare a Table object and reference the HumanResources table.
Table tb;
tb = db.Tables["Employee", "HumanResources"];
// Define an Index object variable by providing the parent table and index name in the constructor.
Index idx;
idx = new Index(tb, "TestIndex");
// Add indexed columns to the index.
IndexedColumn icol1;
icol1 = new IndexedColumn(idx, "BusinessEntityID", true);
idx.IndexedColumns.Add(icol1);
IndexedColumn icol2;
icol2 = new IndexedColumn(idx, "HireDate", true);
idx.IndexedColumns.Add(icol2);
// Set the index properties.
idx.IndexKeyType = IndexKeyType.DriUniqueKey;
idx.IsClustered = false;
idx.FillFactor = 50;
// Create the index on the instance of SQL Server.
idx.Create();
// Modify the page locks property.
idx.DisallowPageLocks = true;
// Run the Alter method to make the change on the instance of SQL Server.
idx.Alter();
// Remove the index from the table.
idx.Drop();
}
}
```
## Creating a Non-Clustered, Composite Index in PowerShell
This code example demonstrates how to create a composite, nonclustered index. For a composite index, add more than one column to the index. Set the <xref:Microsoft.SqlServer.Management.Smo.Index.IsClustered%2A> property to **False** for a nonclustered index.
```
# Set the path context to the local, default instance of SQL Server and to the
#database tables in AdventureWorks2022
CD \sql\localhost\default\databases\AdventureWorks2022\Tables\
#Get a reference to the table
$tb = get-item HumanResources.Employee
#Define an Index object variable by providing the parent table and index name in the constructor.
$idx = New-Object -TypeName Microsoft.SqlServer.Management.SMO.Index -argumentlist $tb, "TestIndex"
#Add indexed columns to the index.
$icol1 = New-Object -TypeName Microsoft.SqlServer.Management.SMO.IndexedColumn `
-argumentlist $idx, "BusinessEntityId", $true
$idx.IndexedColumns.Add($icol1)
$icol2 = New-Object -TypeName Microsoft.SqlServer.Management.SMO.IndexedColumn `
-argumentlist $idx, "HireDate", $true
$idx.IndexedColumns.Add($icol2)
#Set the index properties.
$idx.IndexKeyType = [Microsoft.SqlServer.Management.SMO.IndexKeyType]::DriUniqueKey
$idx.IsClustered = $false
$idx.FillFactor = 50
#Create the index on the instance of SQL Server.
$idx.Create()
#Modify the page locks property.
$idx.DisallowPageLocks = $true
#Run the Alter method to make the change on the instance of SQL Server.
$idx.Alter()
#Remove the index from the table.
$idx.Drop();
```
## Creating an XML Index in Visual Basic
This code example shows how to create an XML index on an XML data type. The XML data type is an XML schema collection called MySampleCollection, which is created in [Using XML Schemas](../../../relational-databases/server-management-objects-smo/tasks/using-xml-schemas.md). XML indexes have some restrictions, one of which is that it must be created on a table that already has a clustered, primary key.
```
' /r:Microsoft.SqlServer.Smo.dll
' /r:Microsoft.SqlServer.ConnectionInfo.dll
' /r:Microsoft.SqlServer.SqlEnum.dll
' /r:Microsoft.SqlServer.Management.Sdk.Sfc.dll
Imports Microsoft.SqlServer.Management.Smo
Public Class A
Public Shared Sub Main()
' Connect to the local, default instance of SQL Server.
Dim srv As Server
srv = New Server()
Dim db1 As Database = srv.Databases("TESTDB")
' Define a Table object variable and add an XML type column.
Dim tb As New Table(db1, "XmlTable3")
Dim mySample As New XmlSchemaCollection(db1, "Sample4", "dbo")
mySample.Text = "<xsd:schema xmlns:xsd=""http://www.w3.org/2001/XMLSchema"" targetNamespace=""NS2""> <xsd:element name=""elem1"" type=""xsd:integer""/></xsd:schema>"
mySample.Create()
Dim col11 As Column
' This sample requires that an XML schema type called MySampleCollection exists on the database.
col11 = New Column(tb, "XMLValue", DataType.Xml("Sample4"))
' Add another integer column that can be made into a unique, primary key.
tb.Columns.Add(col11)
Dim col21 As Column
col21 = New Column(tb, "Number", DataType.Int)
col21.Nullable = False
tb.Columns.Add(col21)
' Create the table of the instance of SQL Server.
tb.Create()
' Create a unique, clustered, primary key index on the integer column. This is required for an XML index.
Dim cp As Index
cp = New Index(tb, "clusprimindex3")
cp.IsClustered = True
cp.IndexKeyType = IndexKeyType.DriPrimaryKey
Dim cpcol As IndexedColumn
cpcol = New IndexedColumn(cp, "Number", True)
cp.IndexedColumns.Add(cpcol)
cp.Create()
' Define and XML Index object variable by supplying the parent table and the XML index name arguments in the constructor.
Dim i As Index
i = New Index(tb, "xmlindex")
Dim ic As IndexedColumn
ic = New IndexedColumn(i, "XMLValue", True)
i.IndexedColumns.Add(ic)
' Create the XML index on the instance of SQL Server.
i.Create()
End Sub
End Class
```
## Creating an XML Index in Visual C#
This code example shows how to create an XML index on an XML data type. The XML data type is an XML schema collection called MySampleCollection, which is created in [Using XML Schemas](../../../relational-databases/server-management-objects-smo/tasks/using-xml-schemas.md). XML indexes have some restrictions, one of which is that it must be created on a table that already has a clustered, primary key.
```
// /r:Microsoft.SqlServer.Smo.dll
// /r:Microsoft.SqlServer.ConnectionInfo.dll
// /r:Microsoft.SqlServer.SqlEnum.dll
// /r:Microsoft.SqlServer.Management.Sdk.Sfc.dll
using Microsoft.SqlServer.Management.Smo;
public class A {
public static void Main() {
// Connect to the local, default instance of SQL Server.
Server srv;
srv = new Server();
Database db1 = srv.Databases["TESTDB"];
// Define a Table object variable and add an XML type column.
Table tb = new Table(db1, "XmlTable3");
XmlSchemaCollection mySample = new XmlSchemaCollection(db1, "Sample4", "dbo");
mySample.Text = "<xsd:schema xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" targetNamespace=\"NS2\"> <xsd:element name=\"elem1\" type=\"xsd:integer\"/></xsd:schema>";
mySample.Create();
Column col11;
// This sample requires that an XML schema type called MySampleCollection exists on the database.
col11 = new Column(tb, "XMLValue", DataType.Xml("Sample4"));
// Add another integer column that can be made into a unique, primary key.
tb.Columns.Add(col11);
Column col21;
col21 = new Column(tb, "Number", DataType.Int);
col21.Nullable = false;
tb.Columns.Add(col21);
// Create the table of the instance of SQL Server.
tb.Create();
// Create a unique, clustered, primary key index on the integer column. This is required for an XML index.
Index cp;
cp = new Index(tb, "clusprimindex3");
cp.IsClustered = true;
cp.IndexKeyType = IndexKeyType.DriPrimaryKey;
IndexedColumn cpcol;
cpcol = new IndexedColumn(cp, "Number", true);
cp.IndexedColumns.Add(cpcol);
cp.Create();
// Define and XML Index object variable by supplying the parent table and the XML index name arguments in the constructor.
Index i;
i = new Index(tb, "xmlindex");
IndexedColumn ic;
ic = new IndexedColumn(i, "XMLValue", true);
i.IndexedColumns.Add(ic);
// Create the XML index on the instance of SQL Server.
i.Create();
}
}
```
## Creating an XML Index in PowerShell
This code example shows how to create an XML index on an XML data type. The XML data type is an XML schema collection called MySampleCollection, which is created in [Using XML Schemas](../../../relational-databases/server-management-objects-smo/tasks/using-xml-schemas.md). XML indexes have some restrictions, one of which is that it must be created on a table that already has a clustered, primary key.
```
# Set the path context to the local, default instance of SQL Server and get a reference to AdventureWorks2022
CD \sql\localhost\default\databases
$db = get-item AdventureWorks2022
#Define a Table object variable and add an XML type column.
#This sample requires that an XML schema type called MySampleCollection exists on the database.
#See sample on Creating an XML schema to do this
$tb = New-Object -TypeName Microsoft.SqlServer.Management.SMO.Table -argumentlist $db, "XmlTable"
$Type = [Microsoft.SqlServer.Management.SMO.DataType]::Xml("MySampleCollection")
$col1 = New-Object -TypeName Microsoft.SqlServer.Management.SMO.Column -argumentlist $tb,"XMLValue", $Type
$tb.Columns.Add($col1)
#Add another integer column that can be made into a unique, primary key.
$Type = [Microsoft.SqlServer.Management.SMO.DataType]::Int
$col2 = New-Object -TypeName Microsoft.SqlServer.Management.SMO.Column -argumentlist $tb,"Number", $Type
$col2.Nullable = $false
$tb.Columns.Add($col2)
#Create the table of the instance of SQL Server.
$tb.Create()
#Create a unique, clustered, primary key index on the integer column. This is required for an XML index.
#Define an Index object variable by providing the parent table and index name in the constructor.
$cp = New-Object -TypeName Microsoft.SqlServer.Management.SMO.Index -argumentlist $tb, "clusprimindex"
$cp.IsClustered = $true;
$cp.IndexKeyType = [Microsoft.SqlServer.Management.SMO.IndexKeyType]::DriPrimaryKey;
#Create and add an indexed column to the index.
$cpcol = New-Object -TypeName Microsoft.SqlServer.Management.SMO.IndexedColumn `
-argumentlist $cp, "Number", $true
$cp.IndexedColumns.Add($cpcol)
$cp.Create()
#Define and XML Index object variable by supplying the parent table and
# the XML index name arguments in the constructor.
$i = New-Object -TypeName Microsoft.SqlServer.Management.SMO.Index -argumentlist $tb, "xmlindex"
#Create and add an indexed column to the index.
$ic = New-Object -TypeName Microsoft.SqlServer.Management.SMO.IndexedColumn `
-argumentlist $i, "XMLValue", $true
$i.IndexedColumns.Add($ic)
#Create the XML index on the instance of SQL Server
$i.Create()
```
## See Also
<xref:Microsoft.SqlServer.Management.Smo.Index>
|
C | UTF-8 | 9,390 | 2.765625 | 3 | [] | no_license | #include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include "parse_ops.h"
#include "constant_data.h"
#include "util_funcs.h"
#include "parse_directives.h"
#include "errors.h"
#include "parsing.h"
int find_op (char*);
int identify_operand (char* str);
void parse_operand (char* str, int type, state_t*, int combine, operand_type_e operand_type);
void parse_ops (state_t* state) {
char* line = malloc(LINE_LENGTH + 1);
char* label_name = NULL;
char* op_name = NULL;
char* code_contents = NULL;
state->current_line_num = 0;
/* Loop over every line */
while (get_line(line, LINE_LENGTH, state->current_file_ptr, state->current_line_num)) {
state->current_line_num++;
/* Remove comment, and split label, directive/op and arguments/operands */
clean_and_split_line(line, &label_name, &op_name, &code_contents, state, 0);
/* Check op name and label */
if (op_name && !ISDIRECTIVE(op_name) &&
(!label_name || is_valid_label(label_name, state))) {
int opcode = find_op(op_name);
/* Put the address in the label allocated by find_directives_and_labels() */
if (label_name)
find_code_label(state, label_name)->address = state->code_counter;
if (opcode != -1) {
op_t my_op = OPS[opcode];
unsigned op_word_index = add_word(&state->code_table, &state->code_counter, 0);
int number_of_operands = my_op.number_of_operands;
/* Split operands by '\0' instead of comma */
char* second_operand_string = strchr(code_contents, ',');
if (second_operand_string) {
*second_operand_string = '\0';
second_operand_string++;
}
second_operand_string = advance_whitespace(second_operand_string);
/* Push the label to the table, with it's final address */
if (label_name) {
add_code_label(state, label_name);
}
if (number_of_operands >= 1) {
int src_type = identify_operand(code_contents);
if (src_type == -1) {
fprintf(stderr, ERROR_MISSING_FIRST_OPERAND, state->current_line_num, state->current_file_name);
state->failed = 1;
continue;
} else if (!(my_op.src_t & (1 << src_type))) {
fprintf(stderr, ERROR_UNSUPPORTED_OPERAND_TYPE, op_name, code_contents, state->current_line_num, state->current_file_name);
state->failed = 1;
continue;
}
parse_operand(code_contents, src_type, state, 0, number_of_operands == 2 ? SRC : DST);
if (number_of_operands == 2) {
int dst_type = identify_operand(second_operand_string);
if (dst_type == -1) {
fprintf(stderr, ERROR_MISSING_SECOND_OPERAND, state->current_line_num, state->current_file_name);
state->failed = 1;
continue;
} else if (!(my_op.dst_t & (1 << dst_type))) {
fprintf(stderr, ERROR_UNSUPPORTED_OPERAND_TYPE, op_name, second_operand_string, state->current_line_num, state->current_file_name);
state->failed = 1;
continue;
}
*(state->code_table + op_word_index) = (opcode << 6) | (src_type << 4) | (dst_type << 2);
parse_operand(second_operand_string, dst_type, state, src_type == 3 && dst_type == 3, DST);
} else {
*(state->code_table + op_word_index) = (opcode << 6) | (src_type << 2);
if (second_operand_string && *second_operand_string != '\0') {
fprintf(stderr, ERROR_EXPECTED_EOL, second_operand_string, state->current_line_num, state->current_file_name);
state->failed = 1;
continue;
}
}
} else {
*(state->code_table + op_word_index) = opcode << 6;
if (code_contents && *code_contents != '\0') {
fprintf(stderr, ERROR_EXPECTED_EOL, code_contents, state->current_line_num, state->current_file_name);
state->failed = 1;
continue;
}
}
} else {
fprintf(stderr, ERROR_UNKNOWN_OP, op_name, state->current_line_num, state->current_file_name);
}
}
}
free(line);
}
int find_op (char* directive_name) {
int i;
for (i = 0; i < OPS_LENGTH; i++)
if (!strcmp(directive_name, OPS[i].name))
return i;
return -1;
}
int identify_operand (char* str) {
char* ptr;
long tmp_number;
/* NULL check */
if (!str)
return -1;
/* Starts with a digit = fail */
if (isdigit(str[0]))
return -1;
/* 'r' + number = register */
ptr = str + 1;
if (tolower(*str) == 'r' && MAYBE_NUMBER(tmp_number) && is_register_valid(tmp_number) && *ptr == '\0')
return 3; /* 3 = REGISTER */
ptr = str;
/* alpha + alphanumeric[] = label */
if (isalpha(*ptr)) {
while (1) {
if (isalnum(*ptr)) {
ptr++;
} else if (*ptr != '\0') {
break;
} else {
return 1; /* 1 = LABEL */
}
}
}
ptr = str + 1;
/* '#' + number = immediate */
if (*str == '#' && MAYBE_NUMBER(tmp_number) && *ptr == '\0')
return 0; /* 0 = IMMEDIATE */
ptr = str;
/* alpha + alphanumeric[] + '[' + number + ']' + '[' + number + ']' = matrix */
if (isalpha(*ptr)) {
while (1) {
if (isalnum(*ptr)) {
ptr++;
} else if (*(ptr++) == '[' && tolower(*(ptr++)) == 'r' && MAYBE_NUMBER(tmp_number) && *(ptr++) == ']' &&
*(ptr++) == '[' && tolower(*(ptr++)) == 'r' && MAYBE_NUMBER(tmp_number) && *(ptr++) == ']' &&
*ptr == '\0') {
return 2; /* 2 = MATRIX */
} else {
break;
}
}
}
/* Default = fail */
return -1;
}
void parse_operand (char* str, int type, state_t* state, int combine, operand_type_e operand_type) {
/* Practically no error checking is done because it was done in identify_operand */
if (type == 0) { /* 0 = IMMEDIATE */
char* ptr = str + 1;
long num;
EXPECT_NUMBER(num);
if (num < MIN_VALUE_SIGNED_8bits || num > MAX_VALUE_SIGNED_8bits) {
fprintf(stderr, ERROR_DATA_OUT_OF_BOUNDS, num, MIN_VALUE_SIGNED_8bits, MAX_VALUE_SIGNED_8bits,
state->current_line_num, state->current_file_name);
state->failed = 1;
return;
}
add_word(&state->code_table, &state->code_counter, (cpu_word) num << 2 | ABSOLUTE);
} else if (type == 1) { /* 1 = LABEL */
if (!is_valid_label(str, state)) {
return;
}
if (is_extern_label(state, str)) {
/* Adds the extern word to the code table, and saves it's index, in the extern refs table */
add_ref_in_code(&state->extern_refs_table, str, add_word(&state->code_table, &state->code_counter, EXTERNAL));
} else if (find_data_label(state, str)) {
int label_address = find_data_label(state, str)->address;
add_word(&state->code_table, &state->code_counter, label_address << 2 | RELOCATABLE);
} else if (find_code_label(state, str)) {
/* Adds a "code label" marker to the code table, and saves it's index in the code label refs table for later replacement */
unsigned index = add_word(&state->code_table, &state->code_counter, CODE_LABEL);
add_ref_in_code(&state->code_label_refs_table, str, index);
} else {
fprintf(stderr, ERROR_LABEL_DOESNT_EXIST, str, state->current_line_num, state->current_file_name);
state->failed = 1;
return;
}
} else if (type == 2) { /* 2 = MATRIX */
char* ptr;
int label_address;
long mat_x;
long mat_y;
/* Split "LABEL[r1][r2]" */
/* ^ Here */
/* By replacing the '[' with '\0' */
ptr = strchr(str, '[');
*ptr = '\0';
/* Jump to the first register number */
ptr += 2; /* skip "[r" */
EXPECT_NUMBER(mat_x);
if (!is_register_valid(mat_x)) {
fprintf(stderr, ERROR_REGISTER_OUT_OF_BOUNDS, mat_x, state->current_line_num, MINIMUM_REG, MAXIMUM_REG, state->current_file_name);
state->failed = 1;
return;
}
/* Jump to the second register number */
ptr += 3; /* skip "][r" */
EXPECT_NUMBER(mat_y);
if (!is_register_valid(mat_y)) {
fprintf(stderr, ERROR_REGISTER_OUT_OF_BOUNDS, mat_y, MINIMUM_REG, MAXIMUM_REG, state->current_line_num, state->current_file_name);
state->failed = 1;
return;
}
if (!is_register_valid(mat_x) || !is_register_valid(mat_y))
return;
if (is_extern_label(state, str)) {
add_ref_in_code(&state->extern_refs_table, str, add_word(&state->code_table, &state->code_counter, EXTERNAL));
} else if (find_code_label(state, str)) {
fprintf(stderr, ERROR_CODE_LABEL_IN_MATRIX, str, state->current_line_num, state->current_file_name);
state->failed = 1;
return;
} else {
data_label* current;
if ((current = find_data_label(state, str))) {
label_address = current->address;
add_word(&state->code_table, &state->code_counter, label_address << 2 | RELOCATABLE);
} else {
fprintf(stderr, ERROR_LABEL_DOESNT_EXIST, str, state->current_line_num, state->current_file_name);
state->failed = 1;
return;
}
}
add_word(&state->code_table, &state->code_counter, (cpu_word) (mat_x << 6 | mat_y << 2) | ABSOLUTE);
} else if (type == 3) { /* 3 = REGISTER */
char* ptr = str + 1; /* skip 'r' */
long reg_num;
EXPECT_NUMBER(reg_num);
if (!is_register_valid(reg_num)) {
fprintf(stderr, ERROR_REGISTER_OUT_OF_BOUNDS, reg_num, MINIMUM_REG, MAXIMUM_REG, state->current_line_num, state->current_file_name);
state->failed = 1;
return;
}
if (!combine) {
add_word(&state->code_table, &state->code_counter, (cpu_word) (reg_num << (operand_type == SRC ? 6 : 2)) | ABSOLUTE);
} else {
cpu_word word = state->code_table[state->code_counter - 1] - ABSOLUTE; /* Remove abs flag */
word |= (cpu_word) reg_num << 2 | ABSOLUTE; /* Add dst register and abs flag back */
state->code_table[state->code_counter - 1] = word;
}
}
} |
Markdown | UTF-8 | 1,999 | 3.078125 | 3 | [
"MIT"
] | permissive | ---
layout: post
title: "Ejemplo sencillo de Fragment"
date: 2019-10-18 14:30:00 +0200
categories: primer_trimestre
order: 1
parent: Primer trimestre Android
---
# Ejemplo sencillo de Fragment
En este ejemplo vamos a cambiar dónde creamos la interfaz de usuario, pasando de la Actividad principal a un Fragmento.
* **Enlace al proyecto:** <https://github.com/Manuel-Ag/PMD_19-20/tree/master/EjemplosencilloFragment>
* **Enlace a la documentación:** <https://developer.android.com/guide/components/fragments?hl=es-419>
El código para crear un Fragmento es sencillo, únicamente debemos hacer que una clase herede de *Fragment*:
{% highlight java %}
public class Fragmento1 extends Fragment {
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragmento1, container, false);
}
}
{% endhighlight %}
Como se puede apreciar, se ha creado un fichero *fragmento1.xml*, pasándose como parámetro al método *inflate()* de la clase *LayoutInflater*. Será en este fichero donde se añadan las vistas.
{:refdef: style="text-align: center;"}

{: refdef}
Observa cómo en nuestro ejemplo en realidad hay 3 ficheros con el mismo nombre. En este caso se ha aplicado lo aprendido en el ejemplo de aplicación multi-lenguaje; simplemente se ha cambiado los criterios para la utilización de un fichero u otro, cambiando el idioma por la orientación del teléfono (*portrait* o *landscape*).
Por último, simplemente añade un contenedor de tipo *fragment* al fichero .xml correspondiente al *layout* principal. Esto hará que este fragmento se cargue dentro de este *layout* (puedes observarlo tanto en el diseñador como en el pripio fichero .xml).
{:refdef: style="text-align: center;"}

{: refdef}
|
Ruby | UTF-8 | 1,814 | 3.609375 | 4 | [] | no_license | require_relative '../lib/company/company'
require_relative '../lib/company/team'
require_relative '../lib/employees/developer'
require_relative '../lib/employees/tester'
require_relative '../lib/employees/employee'
require_relative '../lib/employees/manager'
class Menu
private
def write_emp_menu
puts "\n\nEmployee ( #{@emp.first_name} #{@emp.last_name} id: #{@emp.id} ) enter one of the commands: "
puts "\"Change name\" - if you want to change emplyoees name "
puts "\"Set wage\" if you want to set employees wage"
puts "\"Set worktime \" - if you want to set employee's working hours"
puts "\"Set days\" - if you want to set number of working days in a week"
puts "\"Exit\" - if you want to exit the menu"
print "Enter your command: "
command=gets.chomp
command.downcase!
execute_emp_menu(command)
end
def execute_emp_menu(command)
case command
when "change name"
print "Enter new first name of the employee: "
first_name=gets.chomp
print "Enter new last name of the emplyoee: "
last_name=gets.chomp
@emp.first_name=first_name
@emp.last_name=last_name
puts "Name changed to: #{@emp.first_name} #{@emp.last_name}"
when "set wage"
print "Enter the new hourly wage: "
wage_s=gets.chomp
wage=wage_s.to_i
@emp.wage=wage
puts "Wage changed to: #{emp.wage}"
when "set worktime"
print "Enter the new working hours: "
time_s=gets.chomp
time=time_s.to_i
@emp.worktime=worktime
puts "Worktime changed to: #{emp.worktime}"
when "set days"
print "Enter number of working days: "
days_s=gets.chomp
days=days_s.to_i
@emp.workdays=days
puts "Workdays changed to: #{emp.workdays}"
else
@emp=nil
return
end
write_dev_menu
end
public
def employee_menu(employee)
@emp=employee
write_emp_menu
end
end |
JavaScript | UTF-8 | 1,638 | 2.984375 | 3 | [
"MIT"
] | permissive | import { deburr, isString } from 'lodash'
const regexOperatorsRegex = /[|\\{}()[\]^$+*?.-]/g
const emailRegex = /^[^\s@]+@([^\s@.,]+\.)+[^\s@.,]{2,}$/
export function escapeRegexp (str) {
if (typeof str !== 'string') {
throw new TypeError('Expected a string')
}
return str.replace(regexOperatorsRegex, '\\$&')
}
// Options object format expected by url-safe-string
export function makeSafeForUrl (string, options) {
/*
Using deburr to keep basic letters without their accents rather than totally getting rid of them.
Turns (French) 'Conditions Générales' into 'conditions-generales', much better than 'conditions-gnrales'
deburr is very limited though and does not turn 'ü' into 'ue' as expected in German, just 'u'.
Unfortunately String.prototype.normalize is not included in babel
*/
return safePath(deburr(string))
}
function safePath (str) {
const opts = {
maxLength: 100,
regexRemovePattern: /((?!([a-z0-9-])).)/gi, // matches opposite of [a-z0-9-]
joinString: '-'
}
let path = str.replace(/\s/g, opts.joinString).toLowerCase()
// Replace anything unsafe but ignore the join string
path = path.replace(opts.regexRemovePattern, function (match) {
if (match === opts.joinString) return match
return ''
})
if (path.length > opts.maxLength) path = path.substring(0, opts.maxLength)
// Remove any duplicates of the join string
const duplicateJoinRegex = new RegExp(`${opts.joinString}+`, 'g')
path = path.replace(duplicateJoinRegex, opts.joinString)
return path
}
export function isEmail (value) {
return isString(value) && emailRegex.test(value)
}
|
Ruby | UTF-8 | 1,890 | 2.640625 | 3 | [] | no_license | require 'benchmark'
require 'redis'
require 'yaml'
require 'json'
N = 10000
Benchmark.bm do |r|
@redis = Redis.new
@serialize_me = { :lol => 'test', "ohai" => 'llllll', :array => [12,321,231,23, { :hi => 'hej'}]}
r.report("YAML Save") do
puts 'Yaml'
puts YAML.dump(@serialize_me).length
N.times do |n|
@redis.set "yaml-#{n}", YAML.dump(@serialize_me)
end
end
r.report("YAML Load") do
N.times do |n|
YAML.load(@redis.get "yaml-#{n}")
end
end
r.report("Marshal Save") do
puts 'Marshal'
puts Marshal.dump(@serialize_me).length
N.times do |n|
@redis.set "marshal-#{n}", Marshal.dump(@serialize_me)
end
end
r.report("Marshal Load") do
N.times do |n|
Marshal.load(@redis.get "marshal-#{n}")
end
end
r.report("JSON Save") do
puts 'JSON'
puts JSON.dump(@serialize_me).length
N.times do |n|
@redis.set "json-#{n}", JSON.dump(@serialize_me)
end
end
r.report("JSON Load") do
N.times do |n|
JSON.load(@redis.get "json-#{n}")
end
end
end
# N = 10000
# user system total real
# YAML Save 1.170000 0.160000 1.330000 ( 1.444772)
# YAML Load 0.560000 0.130000 0.690000 ( 0.781064)
# Marshal Save 0.290000 0.140000 0.430000 ( 0.527284)
# Marshal Load 0.260000 0.160000 0.420000 ( 0.507964)
# JSON Save 0.670000 0.170000 0.840000 ( 0.970764)
# JSON Load 0.310000 0.190000 0.500000 ( 0.568105
#
# N = 100000
# user system total real
# YAML Save 1.170000 0.160000 1.330000 ( 1.444772)
# YAML Load 0.560000 0.130000 0.690000 ( 0.781064)
# Marshal Save 0.290000 0.140000 0.430000 ( 0.527284)
# Marshal Load 0.260000 0.160000 0.420000 ( 0.507964)
# JSON Save 0.670000 0.170000 0.840000 ( 0.970764)
# JSON Load 0.310000 0.190000 0.500000 ( 0.568105
|
C++ | UTF-8 | 2,324 | 2.859375 | 3 | [] | no_license | /*
* File: rhythm_automata.h
* Author: puppy
*
* Created on August 7, 2012, 5:07 PM
*/
#ifndef RHYTHM_AUTOMATA_H
#define RHYTHM_AUTOMATA_H
#include <iostream>
#include <time.h>
#include <stdlib.h>
#include <math.h>
#include <string>
#include "State.h"
#define MAX_ITERATIONS 600
using namespace std;
class Rhythm_automata {
public:
Rhythm_automata();
Rhythm_automata(vector<string> input_rhythm);
void rhythm_automata_random_rhythm(int drumvoices, int notes);
void define_instrument_rules(int i, string rule_1, string rule_2, string rule_3, string rule_4);
bool** get_rhythm(int hamming_distance);
bool* get_rhythm_single(int voice , int hamming_distance);
State bool_rhythm_to_state(bool **rhythm_printed);
void next_generation();
int calculate_sum_of_neighboors_single(int j, bool *old_rhythm);
bool* next_generation(int i); //returns generation for specific element
void rules_init();
bool have_same_onsets(bool **rhythm_1, bool **rhythm_2);
int calculate_hamming_distance(bool **rhythm_1, bool **rhythm_2);
bool have_same_onsets_single_row(bool rhythm_1[], bool rhythm_2[]);
int calculate_hamming_distance_single_row(bool rhythm_1[], bool rhythm_2[]);
void printout_rhythm(bool **rhythm_printed);
int calculate_sum_of_neighboors(int i, int j, bool **old_rhythm);
string trim_rhythm_string(string input_string);
/**
* @param rhythm, rows(5) represent drum voices,
* rhythm[0]-> hihat
* rhythm[1]-> ride
* rhythm[2]-> high tom
* rhythm[3]-> snare drum
* rhythm[4]-> bass drum
* columns(16) represent the 16th notes
*/
bool **rhythm, **initial_rhythm;
int drum_voices;
int notes;
//vector_vector_rules, contains
//a vector<string> for each drum voice.
//FX v_v_rules.at(0).contains the possible set of rules{vector}
//for the "crash"
vector< vector <string> > v_v_rules;
/*
* Every row will contain the rules for the corresponding voice
* fx if we would like to apply the
* Delayed Stability rule(0 -> R, 1 -> N, 2 -> F, 3 -> U) for the hh,
* then we would have: N,F,F,U as string elements in the
* first row
*
*/
string **rules;
~Rhythm_automata();
private:
};
#endif /* RHYTHM_AUTOMATA_H */
|
Python | UTF-8 | 3,400 | 2.6875 | 3 | [] | no_license | import sympy as sp
import matplotlib.pyplot as plt
import numpy as np
import constants as c
import pandas as pd
from matplotlib import patches
from tqdm import tqdm
import os
def plotSR(m_h1_0=c.m_h1_0, m_h2_0=c.m_h2_0, r=c.r, Xhh_cut=c.Xhh_cut,
m_h1_min=c.m_h1_min, m_h1_max=c.m_h1_max, m_h2_min=c.m_h2_min,
m_h2_max=c.m_h2_max, color=c.sr_color):
"""
Plots a given Xhh curve within m_h1_min,m_h1_max,m_h2_min,m_h2_max
default parameter values are from the big paper
"""
m_h1, m_h2 = sp.symbols('m_h1 m_h2')
sg_expr = ((m_h1-m_h1_0)/(r*m_h1))**2 + ((m_h2-m_h2_0)/(r*m_h2))**2
sg_eq = sp.Eq(sg_expr, Xhh_cut**2)
plot = sp.plot_implicit(sg_eq,
x_var = (m_h1, m_h1_min, m_h1_max),
y_var = (m_h2, m_h2_min, m_h2_max),
show = False,
axis_center = (m_h1_min,m_h2_min))
x,y = zip(*[(x_int.mid, y_int.mid)
for x_int, y_int in plot[0].get_points()[0]])
x,y = list(x),list(y)
plt.plot(x,y,'.',markersize=0.5,color=color)
def plotVR(m_h1_0=c.m_h1_0, m_h2_0=c.m_h2_0,
r=30, color=c.vr_color, n=500):
theta = np.linspace(0, 2*np.pi, n)
x1 = m_h1_0 + r*np.cos(theta)
x2 = m_h2_0 + r*np.sin(theta)
plt.plot(x1, x2, '.', markersize=0.5, color=color)
def plotCR(m_h1_0=c.m_h1_0, m_h2_0=c.m_h2_0,
r=45, color=c.cr_color, n=500):
plotVR(m_h1_0=m_h1_0, m_h2_0=m_h2_0,
r=r, color=color, n=n)
def plot_fullmassplane_from_df(df, savename='fullmassplane.png',
save=True, show=False, vr=False):
"""plot the massplane for a given dataframe"""
# add up all mhh for each bin, store in a dataframe
assert all([x in df.keys() for x in ['pdf', 'm_h1', 'm_h2']])
row_list = []
for xi in tqdm(c.xbins):
for yi in c.ybins:
row_list.append({
"m_h1": xi,
"m_h2": yi,
"pdf": sum(df.loc[
(df["m_h1"]==xi) & (df["m_h2"]==yi), "pdf"])
})
plot_df = pd.DataFrame(row_list)
# cast m_h1, m_h2, pdf as arrays
shape = (len(c.xbins),len(c.ybins))
xmesh = np.array(plot_df["m_h1"]).reshape(shape).transpose()
ymesh = np.array(plot_df["m_h2"]).reshape(shape).transpose()
hmesh = np.array(plot_df["pdf"]).reshape(shape).transpose()
# basic plot set-up
fig, ax = plt.subplots()
plt.xlabel("$m_{h1}$")
plt.ylabel("$m_{h2}$")
# plot SR outline
plotSR()
if vr:
plotVR()
# use hmesh as colors
im = ax.pcolormesh(xmesh, ymesh, hmesh)
fig.colorbar(im, ax=ax)
# save figure if needed
if save:
plt.savefig(savename)
if show:
plt.show()
return xmesh, ymesh, hmesh
def plot_regions():
plotCR(n=2000)
plotVR(n=2000)
plotSR()
plt.xlim(50, 250)
plt.ylim(40, 200)
plt.text(110, 110, "SR", color=c.sr_color, fontsize=20)
plt.text(110, 82, "VR", color=c.vr_color, fontsize=20)
plt.text(110, 68, "CR", color=c.cr_color, fontsize=20)
plt.title("Control, Validation, and Signal Regions")
plt.xlabel("$m_{H_1}$", fontsize=12)
plt.ylabel("$m_{H_2}$", fontsize=12)
if not os.path.exists('figures'):
os.mkdir('figures')
plt.savefig("figures/regions.png")
if __name__ == "__main__":
plot_regions()
|
C# | UTF-8 | 900 | 3.1875 | 3 | [
"Apache-2.0"
] | permissive | using System;
using System.Collections.Generic;
using System.Text;
namespace SandwichSystem.BusinessLayer
{
public class Sandwish
{
public StringTranslated Name { get; set; }
public List<Ingredient> Ingredients { get; set; } = new List<Ingredient>();
public Sandwish(StringTranslated Name)
{
this.Name = Name;
}
public string ShowIngredients(Language Langue)
{
var result = "";
var index = 0;
foreach (Ingredient i in Ingredients)
{
if (index < Ingredients.Count-1)
{
result += i.ToString(Langue) + " - ";
}
else
{
result += i.ToString(Langue);
}
index++;
}
return result;
}
}
}
|
JavaScript | UTF-8 | 360 | 3.671875 | 4 | [] | no_license | function sequence(start, step) {
start = start || 0;
step = step || 1;
return function(){
start+=step;
return start-step;
};
}
var generator = sequence(10, 3);
var generator2 = sequence(7, 1);
console.log(generator()); // 10
console.log(generator()); // 13
console.log(generator2()); // 7
console.log(generator()); // 16
console.log(generator2()); // 8
|
Python | UTF-8 | 382 | 2.671875 | 3 | [
"MIT"
] | permissive | import pygame, sys, math, random
class Title(pygame.sprite.Sprite):
def __init__(self, image, size):
#pygame.sprite.Sprite.__init__(self, self.containers)
self.size = size
self.image = pygame.image.load(image)
if size:
self.image = pygame.transform.scale(self.image, size)
self.rect = self.image.get_rect()
|
C++ | SHIFT_JIS | 2,231 | 2.828125 | 3 | [] | no_license | #pragma once
#include "Enemy.h"
class EffectManager;
class Stage;
class ProjectileManager;
class CollisionManager;
class Thrower : public Enemy
{
private:
int runH = -1;
int throwH_ = -1;
int frame_ = 0;
int animFrame_ = 0;
Enemy* MakeClone() override;
using Func_t = void (Thrower::*)();
int specialAttackTimer = 0;
float lockonAngle_ = 0.0f;
float addAngle_ = 0.0f;
void SpecialAttack();
//XV
void RunUpdate();
void RunawayUpdate();
void FallUpdate();
void JumpUpdate();
void ThrowUpdate();
void SpecialAttackUpdate();
Func_t updater_;
// `
void RunDraw();
void ThrowDraw();
Func_t drawer_;
std::shared_ptr<EffectManager>effectManager_;
std::shared_ptr<Stage>stage_;
ProjectileManager& projectileManager_;
std::shared_ptr<CollisionManager>collisionManager_;
public:
/// <summary>
/// RXgN^
/// </summary>
/// <param name="p">vC[</param>
/// <param name="effectManager">GtFNgǗpNX</param>
/// <param name="c">J</param>
/// <param name="stage">Xe[W</param>
Thrower(const std::shared_ptr<Player>& p,
std::shared_ptr<EffectManager>& effectManager,
std::shared_ptr<CollisionManager>colManager,
std::shared_ptr<Camera> c,
std::shared_ptr<Stage>stage,
ProjectileManager& pm);
~Thrower();
/// <summary>
/// _[W
/// </summary>
/// <param name="damage">_[Wl</param>
void OnDamage(int damage);
/// <summary>
///
/// </summary>
void OnDead();
/// <summary>
/// XV
/// </summary>
void Update();
/// <summary>
/// `
/// </summary>
void Draw();
/// <summary>
///
/// </summary>
/// <param name="c">̃RW</param>
void OnHit(CollisionInfo& mine, CollisionInfo& another)override;
/// <summary>
/// LĂ~ՓˏSĎ擾
/// </summary>
/// <returns></returns>
const std::vector<Circle>& GetCircles()const override;
};
|
Java | UTF-8 | 1,208 | 1.789063 | 2 | [
"BSD-2-Clause"
] | permissive | import net.runelite.mapping.ObfuscatedName;
@ObfuscatedName("en")
public class class130 {
@ObfuscatedName("i")
class131 field1910;
@ObfuscatedName("f")
class131 field1911 = new class131();
public class130() {
this.field1911.field1912 = this.field1911;
this.field1911.field1913 = this.field1911;
}
@ObfuscatedName("f")
public void method2498(class131 var1) {
if(var1.field1913 != null) {
var1.method2511();
}
var1.field1913 = this.field1911.field1913;
var1.field1912 = this.field1911;
var1.field1913.field1912 = var1;
var1.field1912.field1913 = var1;
}
@ObfuscatedName("i")
public class131 method2499() {
class131 var1 = this.field1911.field1912;
if(var1 == this.field1911) {
this.field1910 = null;
return null;
} else {
this.field1910 = var1.field1912;
return var1;
}
}
@ObfuscatedName("u")
public class131 method2500() {
class131 var1 = this.field1910;
if(var1 == this.field1911) {
this.field1910 = null;
return null;
} else {
this.field1910 = var1.field1912;
return var1;
}
}
}
|
Java | UTF-8 | 2,770 | 3.1875 | 3 | [] | no_license | import java.util.*;
class SceneCard {
private int sceneCost;
private String cardName;
private boolean active;
private String description;
private String picName;
private ArrayList<PlayerSpot> players;
int sceneNum;
boolean flipped;
//Returns true if card is active
public boolean isActive() {
return this.active;
}
public void AddPicName(String picName){
this.picName=picName;
}
public String getPicName(){
return this.picName;
}
public SceneCard(int cost, String name) {
this.cardName = name;
this.sceneCost = cost;
this.sceneCost = cost;
this.players = new ArrayList<PlayerSpot>();
this.active = false;
}
//Gets cost of card
public int getCost() {
return this.sceneCost;
}
//Sets flipped to true
public void flip(){
this.flipped=true;
}
//sets flipped to false
public void unFlip(){
this.flipped=false;
}
//returns flipped
public boolean isFlipped(){
return this.flipped;
}
public void resetCard() {//takes all players off of player spots, active is back to false
for (int i = 0; i < players.size(); i++) {
PlayerSpot current = this.players.get(i);
current.resetPlayerSpot();
}
this.active = false;
}
public ArrayList<PlayerSpot> getRanksOnCard() {//returns array of integers with each one being the rank on a player spot.
return this.players;
}
public void addDesc(String str) {//makes description str
this.description = description;
}
public HashMap<Player, Integer> getPlayers() {//returns players located on sceneCard in HashMap. Rank is the rank+10 to signal the player was on card.
HashMap<Player, Integer> toReturn = new HashMap<Player, Integer>();
for (int i = 0; i < players.size(); i++) {
PlayerSpot current = this.players.get(i);
ArrayList<Player> onSpot = current.getPlayers();
int fixedRank = current.getRank() + 10;
for (int j = 0; j < onSpot.size(); j++) {
toReturn.put(onSpot.get(j), fixedRank);
}
}
return toReturn;
}
//Returns name of card
public String getName() {
return this.cardName;
}
//Activates card
public void activate() {
this.active = true;
}
//Deactivates card
public void deActive() {
this.active = false;
}
//Adds a player spot to a card
public void addPSpot(PlayerSpot spot) {
boolean curr = this.players.add(spot);
}
//Adds a scene number to a card
public void addNum(int num) {
this.sceneNum = num;
}
} |
PHP | UTF-8 | 1,144 | 3.859375 | 4 | [] | no_license | <?php
/*
1- Créer une fonction qui retourne la conversion d'une date FR en date US ou inversement.
Cette fonction prend 2 paramètres : une date et le format de conversion "US" ou "FR"
2- Vous validez que le paramètre de format est bien "US" ou "FR". La fonction retourne un message si ce n'est pas le cas.
*/
function formatDate($date, $format){
if($format == 'FR'){
$dateUS = strtotime($date);
$dateFormatFR = strftime ('%d-%m-%Y', $dateUS);
return $dateFormatFR;
}elseif($format == 'US'){
$dateFR = strtotime($date);
$dateFormatUS = strftime ('%Y-%m-%d', $dateFR);
return $dateFormatUS;
}
};
echo formatDate('1996-10-11', 'FR'). '<br>';
echo formatDate('11-10-1996', 'US'). '<br>';
function conversion($date, $format){
//Créer un objet date appelé $objetDate :
$objetDate = new DateTime($date);
if($format == 'FR'){
return $objetDate->format('d-m-Y');
}elseif ($format == 'US') {
return $objetDate->format('Y-m-d');
}else {
return 'Erreur sur le format demandé';
}
}
echo conversion('1996-10-11', 'FR') . '<br>';
echo conversion('11-10-1996', 'US') . '<br>';
|
Java | UTF-8 | 239 | 2.015625 | 2 | [] | no_license | package com.afarok.test;
public class Test {
public static void main(String[] ags) {
System.out.println("This is nothing but a simple test.");
System.out.println("Testing Git Version Control System.");
}
}
|
C++ | UTF-8 | 870 | 2.625 | 3 | [] | no_license | ///\file
/*!
* \details Implementation of Trump game
* \author Shubham Yadav (s.yadav@student.utwente.nl)
* \version 1.0
* \date 23 November 2017
*/
#ifndef MESSAGE_H
#define MESSAGE_H
#include <iostream>
#include <string>
#include "TrumpMisc.h"
#define BUFFERLENGTH 256
class Message
{
private:
ENTITY whichPlayer;
FRAMETYPE frameType;
DATATYPE dataType;
int msgLength;
char msgBuffer[BUFFERLENGTH];
bool available;
public:
Message();
void setMsgStatus(bool);
bool getMsgStatus(void);
void flushBuffer(void);
void flushall(void);
void setWhichPlayer(ENTITY);
ENTITY getWhichPlayer(void);
void setFrameType(FRAMETYPE);
FRAMETYPE getFrameType(void);
void setDataType(DATATYPE);
DATATYPE getDataType(void);
void setMsgLength(int);
int getMsgLength(void);
void setMsgBuffer(char*, int);
char* getMsgBuffer(void);
~Message();
};
#endif // !MESSAGE_H |
Markdown | UTF-8 | 810 | 3.1875 | 3 | [] | no_license | ### 不甘心的原因
#### 后悔
想象自己老去的样子,
想象自己无奈的时候,
想象自己还有那么多的事情没有做,
怎么能甘心啊!
现在的奋斗的汗水和以后后悔的泪水,到底哪一个更痛!
#### 想做的事情
自己想做的事情,那么的多,以后只能在做梦实现了吗?
当梦醒的时候,泪水沾湿枕头,你还会回忆起自己实现那么想法的乐趣吗?
#### 不得不面对的事情
当自己的儿女想要一个玩具的时候,你说的是什么?
看到别人的妻子,别人的父母的时候,你想的是什么?
水米油盐的不够的时候,你想的是什么?
你难道不想在面对不得不面对的事情的时候,多一种选择!
> 人生最痛苦的事情莫过于:我本可以!
|
Markdown | UTF-8 | 4,784 | 2.609375 | 3 | [] | no_license | ---
description: "Recipe of Ultimate Next level dumplings(danwake)"
title: "Recipe of Ultimate Next level dumplings(danwake)"
slug: 703-recipe-of-ultimate-next-level-dumplingsdanwake
date: 2020-12-11T05:07:26.956Z
image: https://img-global.cpcdn.com/recipes/65c4627c12620627/751x532cq70/next-level-dumplingsdanwake-recipe-main-photo.jpg
thumbnail: https://img-global.cpcdn.com/recipes/65c4627c12620627/751x532cq70/next-level-dumplingsdanwake-recipe-main-photo.jpg
cover: https://img-global.cpcdn.com/recipes/65c4627c12620627/751x532cq70/next-level-dumplingsdanwake-recipe-main-photo.jpg
author: Ida Brady
ratingvalue: 4.9
reviewcount: 6970
recipeingredient:
- "2 cups flour"
- "2 cups garin danwake"
- "4 tbspn baobab powderkuka"
- "1/2 tspn baking powder"
- " Water as required"
recipeinstructions:
- "In a bowl add the flour and the garin danwake"
- "Add the baking powder and baobab powder"
- "Mix until well corporated"
- "Add water as required to make a thick paste"
- "Then start scooping the paste with your hands and dropping into boiling water"
- "Do thesame to the rest of the paste until finished allow to cook for 10mins"
- "It's ready😋😋enjoy with the servings of your choice"
categories:
- Recipe
tags:
- next
- level
- dumplingsdanwake
katakunci: next level dumplingsdanwake
nutrition: 243 calories
recipecuisine: American
preptime: "PT27M"
cooktime: "PT32M"
recipeyield: "2"
recipecategory: Lunch
---

Hello everybody, it's Jim, welcome to our recipe site. Today, I'm gonna show you how to prepare a distinctive dish, next level dumplings(danwake). One of my favorites. This time, I am going to make it a little bit tasty. This will be really delicious.
For the first time I used baking powder in danwake instead of kanwaand it came out like the normal dumpling jux give it a try the taste is so good. Woke up today with the craving of danwake had to just make it since it is a simple food. Danwake; Hausa delicacy Easy and delicious #danwake #dumpling #hausa delicacy #surayyam Howto,easyrecipe,hausadelicay,danwake recipe,dumplings, dumplingsrecipe. Dan wake( son of beans ) Ingredients Flour Kuka Kanwa Yaji(pepper) Oil Salad Tomatoes Onion Egg.
Next level dumplings(danwake) is one of the most popular of recent trending meals on earth. It's enjoyed by millions daily. It is simple, it's quick, it tastes delicious. Next level dumplings(danwake) is something that I've loved my whole life. They're fine and they look fantastic.
To begin with this particular recipe, we must prepare a few components. You can cook next level dumplings(danwake) using 5 ingredients and 7 steps. Here is how you can achieve that.
<!--inarticleads1-->
##### The ingredients needed to make Next level dumplings(danwake):
1. Get 2 cups flour
1. Get 2 cups garin danwake
1. Take 4 tbspn baobab powder(kuka)
1. Take 1/2 tspn baking powder
1. Prepare Water as required
Extra tips to make your attempts to fold dumplings fail-proof. Danwake is Hausa dumpling made with beans. Dumpling is a broad classification for a dish that consists of pieces of dough (made from a variety of starch sources) wrapped around a filling, or of dough with no filling. Neneks Garin Danwake(danwake/beans dumplings flour) coming soon.
<!--inarticleads2-->
##### Instructions to make Next level dumplings(danwake):
1. In a bowl add the flour and the garin danwake
1. Add the baking powder and baobab powder
1. Mix until well corporated
1. Add water as required to make a thick paste
1. Then start scooping the paste with your hands and dropping into boiling water
1. Do thesame to the rest of the paste until finished allow to cook for 10mins
1. It's ready😋😋enjoy with the servings of your choice
Dumpling is a broad classification for a dish that consists of pieces of dough (made from a variety of starch sources) wrapped around a filling, or of dough with no filling. Neneks Garin Danwake(danwake/beans dumplings flour) coming soon. It's nothing like the rest, it's the best. Dan wake is one of the delicious meals made and enjoyed by the Hausa people of Learn how to make it plus all the ingredients used for it. can also be called beans dumplings. When you are done with making the paste, boil water in a pot and proceed to the next.
So that's going to wrap this up with this exceptional food next level dumplings(danwake) recipe. Thanks so much for your time. I'm sure you can make this at home. There is gonna be more interesting food at home recipes coming up. Remember to bookmark this page on your browser, and share it to your loved ones, colleague and friends. Thank you for reading. Go on get cooking!
|
Java | UTF-8 | 1,805 | 3.03125 | 3 | [] | no_license | package com.maestro.lib.calculations.js;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
/**
* Represents jscript object "console"
*/
public class JSConsole {
private static final Logger LOGGER = LoggerFactory.getLogger(JSConsole.class);
/**
* console.log
* @param objects - the list of objects
*/
public static void log(Object... objects) {
LOGGER.info(processMessage(objects));
}
/**
* console.warn
* @param objects - the list of objects
*/
public static void warn(Object... objects) {
LOGGER.warn(processMessage(objects));
}
/**
* console.error
* @param objects - the list of objects
*/
public static void error(Object... objects) {
LOGGER.error(processMessage(objects));
}
private static String processMessage(Object... objects) {
StringBuilder sb = new StringBuilder();
for (final Object o : objects) {
if (o instanceof String) {
if (sb.length() != 0) sb.append(",");
sb.append((String)o);
} else if (o instanceof Integer) {
if (sb.length() != 0) sb.append(",");
sb.append(o);
} else if (o instanceof Double) {
if (sb.length() != 0) sb.append(",");
sb.append(o);
} else if (o instanceof Float) {
if (sb.length() != 0) sb.append(",");
sb.append(o);
} else if (o instanceof Date) {
if (sb.length() != 0) sb.append(",");
sb.append(o);
} else {
if (sb.length() != 0) sb.append(",");
sb.append(o.toString());
}
}
return sb.toString();
}
}
|
C++ | UHC | 681 | 2.546875 | 3 | [] | no_license | // Interface for the CApplication class.
//
//////////////////////////////////////////////////////////////////////
#ifndef _APPLICATION_H_
#define _APPLICATION_H_
#include <string>
class CApplication
{
private:
CApplication();
public:
std::string m_sCls ;
HINSTANCE m_hInst ;
HWND m_hWnd ;
DWORD m_dWinStyle ;
DWORD m_dScnX ; // Screen Width
DWORD m_dScnY ; // Screen Height
bool m_bShowCusor; // Show Cusor
public:
//Window
INT Create(HINSTANCE hInst);
INT Run();
void Cleanup();
public:
static CApplication* GetInstance(); // singleton instance
virtual LRESULT MsgProc(HWND, UINT, WPARAM, LPARAM); // óԼ
};
#endif
|
Java | UTF-8 | 1,463 | 2.796875 | 3 | [] | no_license | package com.team766.logging;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Date;
public interface LogEntry {
public static LogEntry deserialize(ObjectInputStream objectStream) throws IOException {
StreamTags tag = StreamTags.fromInteger(objectStream.readByte());
switch(tag) {
case FORMATTED_LOG_ENTRY:
return FormattedLogEntry.deserialize(objectStream);
case LOG_ENTRY_WITH_FORMAT:
return LogEntryWithFormat.deserialize(objectStream);
case RAW_LOG_ENTRY:
return RawLogEntry.deserialize(objectStream);
default:
throw new RuntimeException("Unknown stream tag");
}
}
public void write(ObjectOutputStream objectStream);
public Severity getSeverity();
public Category getCategory();
public Date getTime();
public String format(LogReader reader);
}
enum StreamTags {
LOG_ENTRY_WITH_FORMAT,
// byte tag = LOG_ENTRY_WITH_FORMAT
// byte severity
// byte category
// long time_in_millis
// String format
// Object... args
FORMATTED_LOG_ENTRY,
// byte tag = FORMATTED_LOG_ENTRY
// byte severity
// byte category
// long time_in_millis
// int formatStringIndex
// Object... args
RAW_LOG_ENTRY;
// byte tag = RAW_LOG_ENTRY
// byte severity
// byte category
// long time_in_millis
// String message
private static final StreamTags[] VALUES = StreamTags.values();
public static StreamTags fromInteger(byte x) {
return VALUES[x];
}
} |
JavaScript | UTF-8 | 460 | 2.515625 | 3 | [] | no_license | $(document).ready(function(){
$('#logform').submit(function(e){
e.preventDefault();
var data = $("#logform").serialize()
$.ajax({
method:'POST',
url:`/ajax/user/login`,
data:data,
}).done(function(results){
if(results.length>3){
$('#login_message').html(results)
}else{
window.location.replace('/')
}
});
});
}); |
Java | UTF-8 | 2,441 | 2.265625 | 2 | [] | no_license | package com.example.cachedemo.controller;
import com.example.cachedemo.entity.User;
import com.example.cachedemo.service.UserService;
import com.example.cachedemo.service.cache.UserCacheService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RequestMapping("/user")
@RestController
public class UserController {
private UserService userService;
private UserCacheService userCacheService;
// 全部用户
@GetMapping("/list")
public List<User> list() {
return userService.findAll();
}
// 全部缓存用户
@GetMapping("/cacheList")
public List<User> cacheUserList() {
return userCacheService.findAllCache();
}
// 用户详细
@GetMapping("/get")
public User get(Integer userId) {
return userService.get(userId);
}
// 缓存用户详情
@GetMapping("/getCache")
public User getCache(Integer userId) {
return userCacheService.getCache(userId);
}
// 新增用户
@GetMapping("/add")
public List<User> add(String userName) {
userService.add(userName);
return userService.findAll();
}
// 更新用户
@GetMapping("/update")
public List<User> update(Integer userId, String userName) {
userService.update(userId, userName);
return userService.findAll();
}
// 删除用户
@GetMapping("/delete")
public List<User> delete(Integer userId) {
userService.delete(userId);
return userService.findAll();
}
// 清除全部缓存用户
@GetMapping("/clearCacheList")
public List<User> clearCacheList() {
userCacheService.clearAllCache();
return userCacheService.findAllCache();
}
// 清除缓存用户
@GetMapping("/clearCache")
public User clearCache(Integer userId) {
userCacheService.clearCache(userId);
return userCacheService.getCache(userId);
}
/* Setters */
@Autowired
public void setUserService(UserService userService) {
this.userService = userService;
}
@Autowired
public void setUserCacheService(UserCacheService userCacheService) {
this.userCacheService = userCacheService;
}
/* Setters */
}
|
Python | UTF-8 | 1,370 | 3.390625 | 3 | [
"MIT"
] | permissive | # py-sorting <http://github.com/gwtw/py-sorting>
# Copyright 2016 Daniel Imms <http://www.growingwiththeweb.com>
# Released under the MIT license <http://github.com/gwtw/py-sorting/blob/master/LICENSE>
import math
def sort(array, radix=10):
if len(array) == 0:
return array
# Determine minimum and maximum values
minValue = array[0];
maxValue = array[0];
for i in range(1, len(array)):
if array[i] < minValue:
minValue = array[i]
elif array[i] > maxValue:
maxValue = array[i]
# Perform counting sort on each exponent/digit, starting at the least
# significant digit
exponent = 1
while (maxValue - minValue) / exponent >= 1:
array = countingSortByDigit(array, radix, exponent, minValue)
exponent *= radix
return array
def countingSortByDigit(array, radix, exponent, minValue):
bucketIndex = -1
buckets = [0] * radix
output = [None] * len(array)
# Count frequencies
for i in range(0, len(array)):
bucketIndex = math.floor(((array[i] - minValue) / exponent) % radix)
buckets[bucketIndex] += 1
# Compute cumulates
for i in range(1, radix):
buckets[i] += buckets[i - 1]
# Move records
for i in range(len(array) - 1, -1, -1):
bucketIndex = math.floor(((array[i] - minValue) / exponent) % radix)
buckets[bucketIndex] -= 1
output[buckets[bucketIndex]] = array[i]
return output
|
C# | UTF-8 | 1,436 | 2.53125 | 3 | [] | no_license | using UnityEngine;
using System.Collections;
public class Music : MonoBehaviour
{
//public
// button to start music
public KeyCode play;
// button to change music track
public KeyCode changeTrack;
// value for a parameter in FMOD
public float paramValue;
//Private
// number of shells close to player
private float shellNum = 0f;
// FMOD declerations stuff
[FMODUnity.EventRef]
public string soundEvent;
FMOD.Studio.EventInstance sound;
// Use this for initialization
void Start ()
{
sound = FMODUnity.RuntimeManager.CreateInstance (soundEvent);
}
// add +1 to shellNum if a shell is close to player
void ShellNumPlus()
{
shellNum++;
//Debug.Log (shellNum);
}
// subtract -1 from shellNum when a shell is no longer considered close to player
void ShellNumNeg()
{
shellNum--;
//Debug.Log (shellNum);
}
// play sound function
void PlaySound(float value)
{
sound.setParameterValue ("Music", value);
sound.start ();
}
void ChangeParam()
{
paramValue = paramValue + 1f;
if (paramValue >= 8f)
{
paramValue = 0f;
}
}
// Update is called once per frame
void Update ()
{
// starts the ChangeParam function
if (Input.GetKeyDown (changeTrack) && changeTrack != null)
{ ChangeParam ();
}
// start the sound function and send along paramValue
if (Input.GetKeyDown (play) && play != null)
{
PlaySound (paramValue);
}
}
}
|
Go | UTF-8 | 434 | 2.578125 | 3 | [] | no_license | package dataframe
func (df *DataFrame) FindHeaderIndex(name string) int {
for i, v := range df.Headers {
if v == name {
return i
}
}
return -1
}
func (df *DataFrame) FindColumnIndex(name string) int {
for i, v := range df.Headers {
if v == name {
return i - 1
}
}
return -1
}
func (df *DataFrame) FindLabelIndex(name string) int {
for i, v := range df.Labels {
if v == name {
return i
}
}
return -1
}
|
Shell | UTF-8 | 2,082 | 4.40625 | 4 | [] | no_license | #!/bin/bash
set -eu
function print_usage_and_die
{
local errmsg
errmsg=${1:-}
cat >&2 << EOF
$1
usage: $0 SERVER NETDEF
Setup uniquely named xenserver network mapped to physical devices
positional arguments:
SERVER XenServer
NETDEF Network definition name:phy:vlan
Example:
Set up datacenter-pub as a non-tagged on eth1:
$0 xenserver1 datacenter-pub:eth1:-1
and datacenter-vm as vlan 16 on eth1:
$0 xenserver1 datacenter-vm:eth1:16
EOF
exit 1
}
function bash_on() {
local server
server="$1"
shift
ssh -q \
-o Batchmode=yes \
-o StrictHostKeyChecking=no \
-o UserKnownHostsFile=/dev/null \
"$server" bash -s -- "$@"
}
function assert_unique_network() {
local name
name="$1"
network=$(xe network-list name-label="$name" --minimal)
if [ -z "$network" ]; then
echo "The given network does not exist" >&2
exit 1
else
if echo "$network" | grep -q ","; then
echo "Multiple networks with the name $name" >&2
exit 1
fi
fi
}
function create_network() {
local name
local dev
local vlan
name=$(echo "$1" | cut -d":" -f 1)
dev=$(echo "$1" | cut -d":" -f 2)
vlan=$(echo "$1" | cut -d":" -f 3)
local network
local pif
network=$(xe pif-list VLAN="$vlan" device="$dev" params=network-uuid --minimal)
if [ -z "$network" ]; then
if [ "$vlan" = "-1" ]; then
echo "Not implemented" >&2
exit 1
fi
network=$(xe network-create name-label="$name")
pif=$(xe pif-list device="$dev" VLAN=-1 --minimal)
xe vlan-create network-uuid=$network pif-uuid=$pif vlan=$vlan
fi
xe network-param-set uuid=$network name-label="$name"
assert_unique_network "$name"
}
if [ "bash" == "$0" ]; then
set -eux
$@
else
SERVER="${1-$(print_usage_and_die "No XenServer specified")}"
NETDEF="${2-$(print_usage_and_die "No network definition given")}"
cat $0 | bash_on "$SERVER" create_network "$NETDEF"
fi
|
PHP | UTF-8 | 330 | 2.609375 | 3 | [] | no_license | <?php
class Conexao{
public function conexao(){
try{
$conexao = new PDO('mysql:host=localhost;dbname=app_loja_virtual', 'root', '');
return $conexao;
}catch(PDOException $e){
echo '<p>' . $e->getMessage() . '</p>';
}
}
}
?> |
C++ | UTF-8 | 5,971 | 3.03125 | 3 | [] | no_license | //
// SOM.h
// TinyBrain
//
// Created by Tim Omernick on 10/7/16.
// Copyright © 2016 Tim Omernick. All rights reserved.
//
#ifndef __SOM_H__
#define __SOM_H__
#include "Neuron.h"
#include <vector>
using namespace std;
// Input data is always a vector of floats.
typedef vector<float> Data;
// Label is a vector of arbitrary dimension and data type.
// It could be another float vector, or one or multiple strings, or anything else.
template <typename LabelType>
using Label = vector<LabelType>;
// A pair of data->label. This is the basic building block of a data set.
template <typename LabelType>
using DataLabelPair = pair<Data, Label<LabelType>>;
// A data set consists of multiple data->label pairs.
template <typename LabelType>
using DataSet = vector<DataLabelPair<LabelType>>;
template<typename LabelType>
class SOM {
public:
SOM(size_t inputSize, size_t outputSize, size_t numNeurons) : mIterationIndex(0)
{
size_t neuronsPerRow = (size_t)sqrtf((float)numNeurons);
size_t numRows = (size_t)ceilf((float)numNeurons / (float)neuronsPerRow);
for (uint32_t i = 0; i < numNeurons; i++) {
uint32_t row = i / neuronsPerRow;
uint32_t col = (i % neuronsPerRow);
float x = (float)col / (float)neuronsPerRow;
float y = (float)row / (float)numRows;
Neuron *neuron = new Neuron(inputSize, x, y);
mNeurons.push_back(neuron);
neuron->RandomizeWeights();
}
}
size_t GetNumNeurons() const
{
return mNeurons.size();
}
Data GetNeuronWeights(size_t neuronIndex) const
{
return mNeurons[neuronIndex]->GetWeights();
}
void SetNeuronWeights(uint32_t neuronIndex, const Data& data)
{
mNeurons[neuronIndex]->SetWeights(data);
}
void GetNeuronPosition(size_t neuronIndex, float& x, float& y) const
{
mNeurons[neuronIndex]->GetPosition(x, y);
}
float GetDistance(const Data& v0, const Data& v1)
{
float sumSq = 0.0f;
size_t dimension = v0.size();
for (uint32_t i = 0; i < dimension; i++) {
float d = v1[i] - v0[i];
sumSq += (d * d);
}
return sumSq;
}
uint32_t GetBestMatch(const Data& data)
{
uint32_t closestNeuronIndex = 0;
float closestDist = FLT_MAX;
size_t numNeurons = GetNumNeurons();
for (uint32_t i = 0; i < numNeurons; i++) {
Data neuronWeights = GetNeuronWeights(i);
float dist = GetDistance(neuronWeights, data);
if (dist < closestDist) {
closestDist = dist;
closestNeuronIndex = i;
}
}
return closestNeuronIndex;
}
vector<pair<uint32_t, float>> GetNeighborhood(uint32_t neuronIndex, float radiusSq)
{
vector<pair<uint32_t, float>> neighborhood;
float neuronX = 0.0f;
float neuronY = 0.0f;
GetNeuronPosition(neuronIndex, neuronX, neuronY);
size_t numNeurons = GetNumNeurons();
for (uint32_t i = 0; i < numNeurons; i++) {
float testX = 0.0f;
float testY = 0.0f;
GetNeuronPosition(i, testX, testY);
float dx = testX - neuronX;
float dy = testY - neuronY;
float distSq = (dx * dx) + (dy * dy);
if (distSq <= radiusSq) {
neighborhood.push_back(pair<uint32_t, float>(i, distSq));
}
}
return neighborhood;
}
Data AdjustWeights(const Data& v0, const Data& v1, float t)
{
size_t dimension = v0.size();
Data result;
result.resize(dimension);
for (uint32_t i = 0; i < dimension; i++) {
result[i] = v0[i] + ((v1[i] - v0[i]) * t);
}
return result;
}
// Train the map with the given label type.
void Train(const DataSet<LabelType>& dataSet)
{
const size_t maxIterations = 50000;
float maxRadius = 0.5f;
float neighborhoodRadius = maxRadius * exp(-(float)mIterationIndex / maxIterations);
float neighborhoodRadiusSq = neighborhoodRadius * neighborhoodRadius;
const float startLearningRate = 0.1f;
float learningRate = startLearningRate * exp(-(float)mIterationIndex / (float)maxIterations);
if ((mIterationIndex % 1000) == 0) {
printf("it=%d, radius=%f, learningRate=%f\n", (int)mIterationIndex, neighborhoodRadius, learningRate);
}
size_t dataSetSize = dataSet.size();
for (uint32_t i = 0; i < dataSetSize; i++) {
uint32_t dataSetIndex = (uint32_t)random() % (uint32_t)dataSetSize;
Data data = dataSet[dataSetIndex].first;
//Label<LabelType> label = dataSet[i].second;
uint32_t bestMatch = GetBestMatch(data);
vector<pair<uint32_t, float>> neighborhood = GetNeighborhood(bestMatch, neighborhoodRadiusSq);
for (const auto& neighborPair : neighborhood) {
uint32_t neuronIndex = neighborPair.first;
float distToBestSq = neighborPair.second;
float influence = exp(-distToBestSq / (2.0f * neighborhoodRadiusSq)) * learningRate;
Data oldWeights = GetNeuronWeights(neuronIndex);
Data newWeights = AdjustWeights(oldWeights, data, influence);
SetNeuronWeights(neuronIndex, newWeights);
}
}
mIterationIndex++;
}
// Classify the input, returning the given label type.
Label<LabelType> Map(const Data& input)
{
}
private:
vector<Neuron*> mNeurons;
size_t mIterationIndex;
};
#endif /* __SOM_H__ */
|
Java | UTF-8 | 1,736 | 2.265625 | 2 | [] | no_license | package com.diegovaldesjr.tennistats.model;
import android.content.ContentValues;
import com.diegovaldesjr.tennistats.data.TennistatsContract;
/**
* Created by diego on 27/11/2017.
*/
public class Saque {
int indice, idSet, zona;
String tipoSaque, tipoGolpe;
public Saque(int indice, int idSet, int zona, String tipoSaque, String tipoGolpe) {
this.indice = indice;
this.idSet = idSet;
this.zona = zona;
this.tipoSaque = tipoSaque;
this.tipoGolpe = tipoGolpe;
}
public Saque(){
}
public String getTipoSaque() {
return tipoSaque;
}
public void setTipoSaque(String tipoSaque) {
this.tipoSaque = tipoSaque;
}
public String getTipoGolpe() {
return tipoGolpe;
}
public void setTipoGolpe(String tipoGolpe) {
this.tipoGolpe = tipoGolpe;
}
public int getIndice() {
return indice;
}
public void setIndice(int indice) {
this.indice = indice;
}
public int getIdSet() {
return idSet;
}
public void setIdSet(int idSet) {
this.idSet = idSet;
}
public int getZona() {
return zona;
}
public void setZona(int zona) {
this.zona = zona;
}
public ContentValues saqueToContentValues() {
ContentValues values = new ContentValues();
values.put(TennistatsContract.SaqueEntry.INDICE, indice);
values.put(TennistatsContract.SaqueEntry.ID_SET, idSet);
values.put(TennistatsContract.SaqueEntry.TIPO_GOLPE, tipoGolpe);
values.put(TennistatsContract.SaqueEntry.TIPO_SAQUE, tipoSaque);
values.put(TennistatsContract.SaqueEntry.ZONA, zona);
return values;
}
}
|
PHP | UTF-8 | 1,820 | 3.15625 | 3 | [] | no_license | <?php
/**
* Created by PhpStorm.
* User: itily
* Date: 05.06.2023
* Time: 13:30
*/
namespace cryptoscan\entity;
use cryptoscan\exception\InvalidArgumentException;
/**
* Данные авторизации
*
* Class Authorize
* @package cryptoscan\entity
*/
class Authorize
{
/**
* Публичный ключ
*
* @var string
*/
private $publicKey;
/**
* Данные авторизации
*
* @var string
*/
private $credentials;
/**
* @param $publicKey
* @param $credentials
*/
public function __construct($publicKey, $credentials)
{
self::assertPublicKey($publicKey);
self::assertCredentials($credentials);
$this->publicKey = $publicKey;
$this->credentials = $credentials;
}
/**
* @param $value
* @return void
*/
private static function assertPublicKey($value)
{
if (empty($value) === true) {
throw new InvalidArgumentException("PublicKey can not to be empty");
}
if (is_string($value) === false) {
throw new InvalidArgumentException("PublicKey is not valid");
}
}
/**
* @param $value
* @return void
*/
private static function assertCredentials($value)
{
if (empty($value) === true) {
throw new InvalidArgumentException("Credentials can not to be empty");
}
if (is_string($value) === false) {
throw new InvalidArgumentException("Credentials is not valid");
}
}
/**
* @return string
*/
public function getPublicKey()
{
return $this->publicKey;
}
/**
* @return string
*/
public function getCredentials()
{
return $this->credentials;
}
} |
Python | UTF-8 | 16,425 | 2.90625 | 3 | [
"BSD-3-Clause"
] | permissive | # -*- coding: utf-8 -*-
#
# License: This module is released under the terms of the LICENSE file
# contained within this applications INSTALL directory
"""
Utility functions for model generation
"""
# -- Coding Conventions
# http://www.python.org/dev/peps/pep-0008/ - Use the Python style guide
# http://sphinx.pocoo.org/rest.html - Use Restructured Text for
# docstrings
# -- Public Imports
import logging
import math
import numpy as np
import pandas as pd
from datetime import datetime
# -- Private Imports
# -- Globals
logger = logging.getLogger(__name__)
dict_wday_name = {
0: 'W-MON',
1: 'W-TUE',
2: 'W-WED',
3: 'W-THU',
4: 'W-FRI',
5: 'W-SAT',
6: 'W-SUN',
}
# -- Exception classes
# -- Functions
def logger_info(msg, data):
# Convenience function for easier log typing
logger.info(msg + '\n%s', data)
def array_transpose(a):
"""
Transpose a 1-D numpy array
:param a: An array with shape (n,)
:type a: numpy.Array
:return: The original array, with shape (n,1)
:rtype: numpy.Array
"""
return a[np.newaxis, :].T
# TODO: rework to support model composition
def model_requires_scaling(model):
"""
Given a :py:class:`anticipy.forecast_models.ForecastModel`
return True if the function requires scaling a_x
:param model: A get_model_<modeltype> function from
:py:mod:`anticipy.model.periodic_models` or
:py:mod:`anticipy.model.aperiodic_models`
:type model: function
:return: True if function is logistic or sigmoidal
:rtype: bool
"""
requires_scaling = model is not None and model.name in [
'logistic',
'sigmoid'
]
return requires_scaling
def apply_a_x_scaling(a_x, model=None, scaling_factor=100.0):
"""
Modify a_x for forecast_models that require it
:param a_x: x axis of time series
:type a_x: numpy array
:param model: a :py:class:`anticipy.forecast_models.ForecastModel`
:type model: function or None
:param scaling_factor: Value used for scaling t_values for logistic models
:type scaling_factor: float
:return: a_x with scaling applied, if required
:rtype: numpy array
"""
if model_requires_scaling(model): # todo: check that this is still useful
a_x = a_x / scaling_factor
return a_x
dict_freq_units_per_year = dict(
A=1.0,
Y=1.0,
D=365.0,
W=52.0,
M=12,
Q=4,
H=24 * 365.0
)
dict_dateoffset_input = dict(
Y='years',
A='years',
M='months',
W='weeks',
D='days',
H='hours'
)
def get_normalized_x_from_date(s_date):
"""Get column of days since Monday of first date"""
date_start = s_date.iloc[0]
# Convert to Monday
date_start = date_start - pd.to_timedelta(date_start.weekday(), unit='D')
s_x = (s_date - date_start).dt.days
return s_x
def get_s_x_extrapolate(
date_start_actuals,
date_end_actuals,
model=None,
freq=None,
extrapolate_years=2.5,
scaling_factor=100.0,
x_start_actuals=0.):
"""
Return a_x series with DateTimeIndex, covering the date range for the
actuals, plus a forecast period.
:param date_start_actuals: date or numeric index for first actuals sample
:type date_start_actuals: str, datetime, int or float
:param date_end_actuals: date or numeric index for last actuals sample
:type date_end_actuals: str, datetime, int or float
:param extrapolate_years:
:type extrapolate_years: float
:param model:
:type model: function
:param freq: Time unit between samples. Supported units are 'W' for weekly
samples, or 'D' for daily samples. (untested) Any date unit or time
unit accepted by numpy should also work, see
https://docs.scipy.org/doc/numpy-1.13.0/reference/arrays.datetime.html#arrays-dtypes-dateunits # noqa
:type freq: basestring
:param shifted_origin: Offset to apply to a_x
:type shifted_origin: int
:param scaling_factor: Value used for scaling a_x for certain model
functions
:type scaling_factor: float
:param x_start_actuals: numeric index for the first actuals sample
:type x_start_actuals: int
:return: Series of floats with DateTimeIndex. To be used as (a_date, a_x)
input for a model function.
:rtype: pandas.Series
The returned series covers the actuals time domain plus a forecast period
lasting extrapolate_years, in years.
The number of additional samples for the forecast period is
time_resolution * extrapolate_years, rounded down
"""
if isinstance(date_start_actuals, str) or \
isinstance(date_start_actuals, datetime): # Use dates if available
date_start_actuals = pd.to_datetime(date_start_actuals)
date_end_actuals = pd.to_datetime(date_end_actuals)
weekday_adjustment = date_start_actuals.weekday()
expected_freq = dict_wday_name.get(weekday_adjustment)
if freq is None: # Default frequency
freq = expected_freq
else:
if freq.startswith('W'):
assert expected_freq == freq, \
'Error: with weekly frequency, freq ' \
'parameter must match weekday of date_start_actuals:' \
' {} - {} , {}' \
.format(freq, expected_freq, date_start_actuals)
freq_short = freq[0:1] # Changes e.g. W-MON to W
# freq_units_per_year = 52.0 if freq_short=='W' else 365.0
# Todo: change to dict to support more frequencies
freq_units_per_year = dict_freq_units_per_year.get(freq_short, 365.0)
extrapolate_units = extrapolate_years * freq_units_per_year
offset_input = {dict_dateoffset_input.get(freq_short):
extrapolate_units}
date_end_forecast = date_end_actuals + \
pd.DateOffset(**offset_input)
i_date = pd.date_range(
date_start_actuals,
date_end_forecast,
freq=freq,
name='date')
s_date = pd.Series(i_date)
# Get days passed since date_start, then add x_start_actuals
s_x = (s_date - date_start_actuals).dt.days + x_start_actuals
s_x.index = i_date
else:
# Otherwise, use numeric index
# we extrapolate future samples equal to 100*extrapolate_years
index = pd.Index(
np.arange(
date_start_actuals,
date_end_actuals +
100 *
extrapolate_years))
s_x = pd.Series(
index=index,
data=np.arange(
x_start_actuals,
x_start_actuals + index.size)) + x_start_actuals
if model_requires_scaling(model):
s_x = s_x / scaling_factor
return s_x
# Forecast Selection Functions
def get_aic_c(fit_error, n, n_params):
"""
This function implements the corrected Akaike Information Criterion (AICc)
taking as input a given fit error and data/model degrees of freedom.
We assume that the residuals of the candidate model are distributed
according to independent identical normal distributions with zero mean.
Hence, we can use define the AICc as
.. math::
AICc = AIC + \\frac{2k(k+1)}{n-k-1} =
2k + n \\log\\left(\\frac{E}{n}\\right) + \\frac{2k(k+1)}{n-k-1},
where :math:`k` and :math:`n` denotes the model and data degrees of
freedom respectively, and :math:`E`
denotes the residual error of the fit.
:param fit_error: Residual error of the fit
:type fit_error: float
:param n: Data degrees of freedom
:type n: int
:param n_params: Model degrees of freedom
:type n_params: int
:return: Corrected Akaike Information Criterion (AICc)
:rtype: float
Note:
- see AIC in `Wikipedia article on the AIC
<https://en.wikipedia.org/wiki/Akaike_information_criterion>`_.
"""
# First, deal with corner cases that can blow things up with division by
# zero
if (n <= n_params + 1) or (n == 0):
aux = n - n_params - 1
raise ValueError(
'ERROR: Time series too short for AIC_C: (n = ' +
str(n) +
', n - n_params - 1 = ' +
str(aux) +
')')
elif fit_error == 0.0:
if n_params == 1:
aicc = -float("inf")
else:
# This can lead to suboptimal model selection when we have
# multiple perfect fits - we use a patch instead
# aicc = -float("inf")
fit_error = 10 ** -320
aicc = n * math.log(fit_error / n) + 2 * n_params + \
(2 * n_params * (n_params + 1) / (n - n_params - 1))
else:
# Actual calculation of the AICc
aicc = n * math.log(fit_error / n) + 2 * n_params + \
(2 * n_params * (n_params + 1) / (n - n_params - 1))
return aicc
def get_s_aic_c_best_result_key(s_aic_c):
# Required because aic_c can be -inf, that value is not compatible with
# pd.Series.argmin()
if s_aic_c.empty or s_aic_c.isnull().all():
return None
if (s_aic_c.values == -np.inf).any():
(key_best_result,) = (s_aic_c == -np.inf).to_numpy().nonzero()[0]
key_best_result = s_aic_c.index[key_best_result.min()]
else:
key_best_result = s_aic_c.argmin()
return key_best_result
def detect_freq(a_date):
if isinstance(a_date, pd.DataFrame):
if 'date' not in a_date.columns:
return None
else:
a_date = a_date.date
s_date = pd.Series(a_date).sort_values().drop_duplicates()
min_date_delta = s_date.diff().min()
if pd.isnull(min_date_delta):
return None
elif min_date_delta == pd.Timedelta(1, unit='h'):
return 'H'
elif min_date_delta == pd.Timedelta(7, unit='D'):
# Weekly seasonality - need to determine day of week
min_date_wday = s_date.min().weekday()
return dict_wday_name.get(min_date_wday, 'W')
elif min_date_delta >= pd.Timedelta(28, unit='d') and \
min_date_delta <= pd.Timedelta(31, unit='d'):
# MS is month start, M is month end. We use MS if all dates match first
# of month
if s_date.dt.day.max() == 1:
return 'MS'
else:
return 'M'
elif min_date_delta >= pd.Timedelta(89, unit='d') and \
min_date_delta <= pd.Timedelta(92, unit='d'):
return 'Q'
elif min_date_delta >= pd.Timedelta(365, unit='d') and \
min_date_delta <= pd.Timedelta(366, unit='d'):
# YS is month start, Y is month end. We use MS if all dates match first
# of month
if s_date.dt.day.max() == 1 and s_date.dt.month.max() == 1:
return 'YS'
else:
return 'Y'
elif min_date_delta >= pd.Timedelta(23, unit='h'):
# and min_date_delta <= pd.Timedelta(1, unit='d')\
return 'D'
else:
return None
def interpolate_df(df, include_mask=False, interpolate_y=True):
# In a dataframe with date gaps, replace x-axis gaps with interpolation
if 'date' not in df.columns: # interpolate by x column
if df.x.drop_duplicates().diff().nunique() <= 1: # Regular intervals - no gaps
return df
else:
# With duplicate samples, gap filling not supported
if df.x.duplicated().any():
raise ValueError(
'Cannot fill gaps on series with multiple '
'samples per x')
df_result = (
df.set_index('x')
.reindex(
pd.RangeIndex(df.x.min(), df.x.max() + 1, name='x'))
.pipe(lambda x: x.interpolate() if interpolate_y else x)
.reset_index()
)
else: # df has date column - interpolate by date
s_date_diff = df.date.drop_duplicates().diff()
if s_date_diff.pipe(pd.isnull).all():
s_date_diff_first = None
else:
s_date_diff_first = s_date_diff.loc[s_date_diff.first_valid_index(
)]
freq = detect_freq(df)
# If space between samples is constant, no interpolation is required
# Exception: in sparse series with date gaps, we can randomly get
# gaps that are constant but don't match any real period, e.g. 8 days
if s_date_diff.nunique() <= 1 and not \
(freq == 'D' and
s_date_diff_first > pd.to_timedelta(1, 'day')):
# TODO: Add additional check for
# e.g. 2-sample series with 8-day gap
return df
# At this point, we know there are irregular intervals
# We need to check if there are duplicate samples
# - if that is the case, we crash b.c. scenario not supported by asfreq
if df.date.duplicated().any():
raise ValueError('Cannot fill gaps on series with multiple '
'samples per date')
df_result = (
df.set_index('date')
.asfreq(freq)
.pipe(lambda x: x.interpolate() if interpolate_y else x)
.reset_index()
)
if 'x' in df.columns:
df_result['x'] = df_result['x'].interpolate().astype(df.x.dtype)
if include_mask:
df_result['is_gap_filled'] = ~df_result.x.isin(df.x)
return df_result
# Functions to check for multiplicative/additive model composition
def _fit_linear(df):
"""Fit linear model based on df"""
from anticipy.forecast import fit_model, extrapolate_model
from anticipy.forecast_models import model_linear
assert df.index.size >= 4, 'Linear model requires 4+ samples'
dict_result = fit_model(model_linear, df)
params = dict_result.get('metadata').params.iloc[0]
df_pred = extrapolate_model(
model_linear, params, df.x.min(), df.x.max() + 1,
extrapolate_years=0)
df_out = (
df.merge(
df_pred
.rename(columns=dict(y='y_pred'))
# RENAME AXIS ONLY WORKS FOR NUMERIC SERIES
# - TIME SERIES WOULD BE DATE
.rename_axis('x')
.reset_index(),
how='left')
)
return df_out
def _get_mult_sum_stats(df, freq='M'):
"""
- fit linear model
- then look at stats of residuals, to check for multiplicative composition
"""
# Fit linear model, get residuals
df_pred = _fit_linear(df)
df_res = df_pred.assign(res=df_pred.y_pred - df_pred.y)
# Get statistics from residuals
df_mean = (
df_res
.set_index('date').y
.resample(freq).agg([np.mean]).rename(columns=dict(mean='y_mean'))
)
df_res_var = (
df_res
.set_index('date').res
.resample(freq).agg([np.var]).rename(columns=dict(var='res_var'))
)
df_out = df_mean.join(df_res_var)
df_out['corr_mean_to_var'] = df_out.res_var.corr(df_out.y_mean)
return df_out.reset_index()
def is_multiplicative(df, freq='M'):
"""
For an input time series, check if model composition
should be multiplicative.
Return True if multiplicative is best - otherwise, use additive
composition.
We assume multiplicative composition is best if variance
correlates heavily (>0.8) with mean. We aggregate data
on a monthly basis by default for this analysis. Use
The following exceptions apply:
- If any time series value is <=0, use additive
- If date information is unavailable (only x column), use additive
- If less than 2 periods worth of data are available, use additive
"""
if (df.y <= 0).any():
return False
if 'date' not in df.columns:
return False
if (df.date.max() - df.date.min()) < pd.Timedelta('60 days'):
return False
# Check for series size
df_filtered = df.loc[~df.y.pipe(pd.isna)]
if 'weight' in df_filtered.columns:
df_filtered = df_filtered.loc[df_filtered.weight > 0]
if df_filtered.index.size < 4: # Not enough samples to fit a linear model
return False
df_stats = _get_mult_sum_stats(df, freq)
# Column corr_mean_to_var is constant
corr_mean_to_var = df_stats.corr_mean_to_var.iloc[0]
return corr_mean_to_var >= 0.8
|
Java | UTF-8 | 809 | 3.09375 | 3 | [] | no_license | package duration;
import java.time.Duration;
import java.time.Instant;
public class StringCreationStudies {
public long measureStringCreationTimeRequiredOnHeap(int objectsCount) {
Instant start = Instant.now();
for (int i = 0; i < objectsCount; i++) {
String s = new String("test");
}
long d = Duration.between(start, Instant.now()).toMillis();
System.out.println("Heap:" + d);
return d;
}
public long measureStringCreationTimeRequiredInPool(int objectsCount) {
Instant start = Instant.now();
for (int i = 0; i < objectsCount; i++) {
String s = "test";
}
long d = Duration.between(start, Instant.now()).toMillis();
System.out.println("Pool:" + d);
return d;
}
}
|
PHP | UTF-8 | 1,113 | 2.671875 | 3 | [
"MIT"
] | permissive | <?php
declare(strict_types=1);
namespace Ahc\StrapiClientBundle\Resource;
class ContentProperties
{
private string $uid;
private string $name;
private string $label;
private bool $isDisplayed;
private Schema $schema;
public function __construct(
string $uid,
string $name,
string $label,
bool $isDisplayed,
Schema $schema
) {
$this->uid = $uid;
$this->name = $name;
$this->label = $label;
$this->isDisplayed = $isDisplayed;
$this->schema = $schema;
}
public function getUid(): string
{
return $this->uid;
}
public function getName(): string
{
return $this->name;
}
public function getLabel(): string
{
return $this->label;
}
public function getIsDisplayed(): bool
{
return $this->isDisplayed;
}
public function getSchema(): Schema
{
return $this->schema;
}
public function getFieldType(string $field): string
{
return $this->schema->getAttribute($field)['type'];
}
}
|
Python | UTF-8 | 3,145 | 3.109375 | 3 | [] | no_license | import numpy as np
from .bbo_agent import BBOAgent
from .utils import top_k_inds
from typing import Callable
from multiprocessing import Pool
from functools import partial
from itertools import product
class CEM(BBOAgent):
"""
The cross-entropy method (CEM) for policy search is a black box optimization (BBO)
algorithm. This implementation is based on Stulp and Sigaud (2012). Intuitively,
CEM starts with a multivariate Gaussian dsitribution over policy parameter vectors.
This distribution has mean thet and covariance matrix Sigma. It then samples some
fixed number, K, of policy parameter vectors from this distribution. It evaluates
these K sampled policies by running each one for N episodes and averaging the
resulting returns. It then picks the K_e best performing policy parameter
vectors and fits a multivariate Gaussian to these parameter vectors. The mean and
covariance matrix for this fit are stored in theta and Sigma and this process
is repeated.
Parameters
----------
sigma (float): exploration parameter
theta (numpy.ndarray): initial mean policy parameter vector
popSize (int): the population size
numElite (int): the number of elite policies
numEpisodes (int): the number of episodes to sample per policy
evaluationFunction (function): evaluates the provided parameterized policy.
input: theta_p (numpy.ndarray, a parameterized policy), numEpisodes
output: the estimated return of the policy
epsilon (float): small numerical stability parameter
"""
def __init__(self, theta:np.ndarray, sigma:float, popSize:int, numElite:int, evaluationFunction:Callable, epsilon:float=0.0001):
self._theta = theta
self._Sigma = np.eye(len(theta)) * sigma
self.popSize = popSize
self.numElite = numElite
self.evaluationFunction = evaluationFunction
self.epsilon = epsilon
self._theta_initial = self._theta.copy()
self._Sigma_initial = self._Sigma.copy()
@property
def name(self)->str:
return 'CEM'
@property
def parameters(self)->np.ndarray:
return self._theta.copy()
def train(self)->np.ndarray:
K, K_e = self.popSize, self.numElite
eps, dim = self.epsilon, self._theta.shape[0]
thetas = np.random.multivariate_normal(self._theta, self._Sigma, size=K)
# if log_evals:
# tmp = [self.evaluationFunction(theta, log_evals=log_evals) for theta in thetas]
# evals, log = zip(*tmp)
# else:
# evals = [self.evaluationFunction(theta) for theta in thetas]
evals = [self.evaluationFunction(theta) for theta in thetas]
print(evals)
top_thetas = thetas[top_k_inds(evals, K_e)]
self._theta = top_thetas.mean(axis=0)
centered = top_thetas - self._theta
self._Sigma = (1/(eps + K_e)) * (np.eye(dim) * eps + np.einsum('ij,ik->jk', centered, centered))
return self._theta
def reset(self)->None:
self._theta = self._theta_initial.copy()
self._Sigma = self._Sigma_initial.copy()
|
Java | UTF-8 | 10,896 | 2.140625 | 2 | [] | no_license | package edu.courseproject.client.controller.adminPageController;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import edu.courseproject.client.action.Action;
import edu.courseproject.client.connection.ConnectionServer;
import edu.courseproject.client.entity.Worker;
import edu.courseproject.client.generator.GeneratorLogin;
import edu.courseproject.client.generator.GeneratorPassword;
import edu.courseproject.client.validator.DataValidator;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Node;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.input.MouseEvent;
import javafx.stage.Stage;
import org.json.JSONObject;
import java.io.IOException;
import java.lang.reflect.Type;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.ResourceBundle;
import java.util.stream.Collectors;
public class AdminStaffPageController implements Initializable {
private Gson gson = new Gson();
private Stage stage;
public void setStage(Stage stage) {
this.stage = stage;
}
private ConnectionServer datasource = ConnectionServer.getInstance();
private static final String SPLIT_ADDRESS = ",";
private long idWorkerChangeable;
private List<Worker> workers = new ArrayList<>();
@FXML
private Button stafButn;
@FXML
private TabPane tabPane;
@FXML
private TableView<Worker> workerTable;
@FXML
private TextField surnameField;
@FXML
private TextField nameField;
@FXML
private Spinner<Double> senioriteSpinner;
@FXML
private ComboBox<String> statusBox;
@FXML
private TextField phoneField;
@FXML
private TextField countryField;
@FXML
private TextField townField;
@FXML
private TextField addressField;
private Worker createValidWorker() {
if ((surnameField.getText().trim().isEmpty() || !DataValidator.getInstance().isNameValid(surnameField.getText().trim())) ||
(nameField.getText().trim().isEmpty() || !DataValidator.getInstance().isNameValid(nameField.getText().trim())) ||
!DataValidator.getInstance().isPhoneValid(phoneField.getText()) ||
countryField.getText().trim().isEmpty() ||
(townField.getText().trim().isEmpty() || (addressField.getText().isEmpty()))) {
return null;
} else {
Worker worker = new Worker(idWorkerChangeable, surnameField.getText(), nameField.getText(),
senioriteSpinner.getValue(), phoneField.getText(),
countryField.getText() + ", " + townField.getText() + ", " + addressField.getText());
return worker;
}
}
@FXML
void addToBD(ActionEvent event) {
Worker worker = createValidWorker();
if (worker != null) {
String newLogin = GeneratorLogin.generateLogin() + worker.hashCode();
worker.setLogin(newLogin);
String newPass = GeneratorPassword.generatePassword();
worker.setPassword(newPass);
worker.setRole("worker");
worker.setStatus("active");
JSONObject jsonObject = new JSONObject();
jsonObject.put("action", Action.ADD_WORKER);
jsonObject.put("worker", gson.toJson(worker));
datasource.getWriter().println(jsonObject);
workerTable.getItems().add(worker);
workers.add(worker);
} else {
Alert alert = new Alert(Alert.AlertType.ERROR);
alert.setTitle("Внимание!");
alert.setContentText("Ошибка при добавлении в базу данных! Проверьте корректность введеных данных!");
alert.showAndWait();
}
}
//TODO if selected item exists
@FXML
void changeInBD(ActionEvent event) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("action", Action.UPDATE_WORKER);
Worker worker = createValidWorker();
if (worker != null) {
long idUser = workerTable.getSelectionModel().getSelectedItem().getIdUser();
worker.setIdUser(idUser);
worker.setStatus(statusBox.getValue());
jsonObject.put("worker", gson.toJson(worker));
datasource.getWriter().println(jsonObject.toString());
workerTable.getItems().removeAll(workers);
workers.clear();
loadWorkers();
} else {
Alert alert = new Alert(Alert.AlertType.ERROR);
alert.setTitle("Внимание!");
alert.setContentText("Ошибка при добавлении в базу данных! Проверьте корректность введеных данных!");
alert.showAndWait();
}
}
@FXML
void customerAction(ActionEvent event) throws IOException {
FXMLLoader loader = new FXMLLoader(getClass().getResource("/view/adminview/admin_customer_page.fxml"));
Parent staffViewParent = loader.load();
Scene staffViewScene = new Scene(staffViewParent);
Stage window = (Stage) ((Node) event.getSource()).getScene().getWindow();
AdminCustomerPageController adminCustomerPageController = loader.getController();
adminCustomerPageController.setStage(window);
window.setScene(staffViewScene);
window.show();
}
@FXML
void orderAction(ActionEvent event) throws IOException {
FXMLLoader loader = new FXMLLoader(getClass().getResource("/view/adminview/admin_adopt_order_page.fxml"));
Parent staffViewParent = loader.load();
Scene staffViewScene = new Scene(staffViewParent);
Stage window = (Stage) ((Node) event.getSource()).getScene().getWindow();
AdminOrderPageController adminOrderPageController = loader.getController();
adminOrderPageController.setStage(window);
window.setScene(staffViewScene);
window.show();
}
@FXML
void statisticsAction(ActionEvent event) throws IOException {
FXMLLoader loader = new FXMLLoader(getClass().getResource("/view/adminview/admin_statistic_page.fxml"));
Parent staffViewParent = loader.load();
Scene staffViewScene = new Scene(staffViewParent);
Stage window = (Stage) ((Node) event.getSource()).getScene().getWindow();
AdminStatisticsPageController adminStatisticsPageController = loader.getController();
adminStatisticsPageController.setStage(window);
window.setScene(staffViewScene);
window.show();
}
@Override
public void initialize(URL location, ResourceBundle resources) {
stafButn.setDisable(true);
fillTabPane(workerTable);
tabPane.getSelectionModel().select(0);
ObservableList<String> combobox = FXCollections.observableArrayList();
combobox.add("active");
combobox.add("blocked");
combobox.add("fired");
statusBox.setItems(combobox);
SpinnerValueFactory<Double> valueFactory = new SpinnerValueFactory.DoubleSpinnerValueFactory(0.5, 56, 0.5, 0.5);
senioriteSpinner.setValueFactory(valueFactory);
senioriteSpinner.setEditable(true);
workerTable.getColumns().get(6).setVisible(false);
workerTable.getColumns().get(7).setVisible(false);
loadWorkers();
}
//FIXME
@FXML
void refreshRequest(ActionEvent event) {
}
@FXML
void fillFields(MouseEvent event) {
idWorkerChangeable = workerTable.getSelectionModel().getSelectedItem().getIdWorker();
surnameField.setText(workerTable.getSelectionModel().getSelectedItem().getSurname());
nameField.setText(workerTable.getSelectionModel().getSelectedItem().getName());
senioriteSpinner.getValueFactory().setValue(workerTable.getSelectionModel().getSelectedItem().getSeniority());
statusBox.setValue(workerTable.getSelectionModel().getSelectedItem().getStatus());
phoneField.setText(workerTable.getSelectionModel().getSelectedItem().getPhone());
String addressWorker = workerTable.getSelectionModel().getSelectedItem().getRegionWorker();
List<String> elementsRegion = Arrays.asList(addressWorker.split(SPLIT_ADDRESS));
countryField.setText(elementsRegion.get(0));
townField.setText(elementsRegion.get(1));
addressField.setText(elementsRegion.get(2));
}
@FXML
void clearFields(MouseEvent event) {
surnameField.clear();
nameField.clear();
senioriteSpinner.getValueFactory().setValue(0.);
statusBox.setValue("active");
statusBox.setEditable(false);
phoneField.clear();
countryField.clear();
townField.clear();
addressField.clear();
}
private void loadWorkers() {
JSONObject jsonObject = new JSONObject();
jsonObject.put("action", Action.FIND_ALL_WORKERS);
datasource.getWriter().println(jsonObject);
JSONObject inputObject;
try {
inputObject = new JSONObject(datasource.getReader().readLine());
Type workersList = new TypeToken<ArrayList<Worker>>() {
}.getType();
workers = gson.fromJson(inputObject.getString("workers"), workersList);
workerTable.getItems().addAll(workers.stream().filter(item -> !item.getStatus().equals("fired")).collect(Collectors.toList()));
} catch (IOException e) {
e.printStackTrace();
}
}
private void fillTabPane(TableView<Worker> tab) {
tab.getColumns().get(0).setCellValueFactory(new PropertyValueFactory<>("surname"));
tab.getColumns().get(1).setCellValueFactory(new PropertyValueFactory<>("name"));
tab.getColumns().get(2).setCellValueFactory(new PropertyValueFactory<>("status"));
tab.getColumns().get(3).setCellValueFactory(new PropertyValueFactory<>("seniority"));
tab.getColumns().get(4).setCellValueFactory(new PropertyValueFactory<>("phone"));
tab.getColumns().get(5).setCellValueFactory(new PropertyValueFactory<>("regionWorker"));
tab.getColumns().get(6).setCellValueFactory(new PropertyValueFactory<>("idWorker"));
tab.getColumns().get(7).setCellValueFactory(new PropertyValueFactory<>("idUser"));
}
}
|
Java | UTF-8 | 640 | 2.34375 | 2 | [] | no_license | package oob.lolprofile.DetailsComponent.Data.Mapper;
import java.util.ArrayList;
import io.realm.RealmResults;
import oob.lolprofile.HomeComponent.Data.Model.ChampionRow;
import oob.lolprofile.HomeComponent.Domain.GetAllChampions.Model.Champion;
public class ChampionCollectionMapper {
public static ArrayList<Champion> parseChampionsRealmResponse(RealmResults<ChampionRow> championRows) {
ArrayList<Champion> champions = new ArrayList<>();
for (ChampionRow championRow : championRows) {
champions.add(ChampionMapper.parseChampionRealmResponse(championRow));
}
return champions;
}
}
|
Markdown | UTF-8 | 3,731 | 2.65625 | 3 | [] | no_license | #### 目标
+ 采集信息
+ 主机信息
+ CPU信息
+ 内存信息
+ 硬盘信息
+ 网卡信息
+ 实现资产变更的自动汇报
+ 代码发布系统
+ 监控
+ 服务器统计
+ 实现自动化采集
#### 方案选择
共有三种方案可供选择:
+ agent
+ 在目标机器上安装agent脚本,定期任务执行后通过requests模块将得到的信息通过API存入数据库,再通过前端页面进行展示
+ 优点:速度快,适合数量多的场景
+ 缺点:需要事先安装agent
+ ssh类
+ 创建若干个中控机,通过ssh协议(paramiko模块实现,或者使用现成的工具如Ansible)等连接目标主机,执行命令后将得到的信息通过API存入数据库,再通过前端页面显示
+ 缺点:速度慢,适合数量少的场景,且需要若干台中控机,需要服务器成本
+ 优点:不需要安装agent,不需要考虑脚本的更新
+ salt类
+ salt-master和salt-minion
+ 中控机扮演master角色,目标主机扮演minion角色。利用salt-stack现成的工具来收集信息
+ 适合正在使用salt-stack或想要使用salt-stack的场景
#### 架构分工
+ 资产采集(1-2人)
+ 资产数据处理,入库(1人)
+ Web端数据的展示(1人)
#### 代码实现
##### 资产采集
+ 设计目录结构
+ bin 可执行文件
+ conf 配置文件
+ lib 库文件目录
+ src 源码目录
+ test 测试文件目录
+ log 日志文件(不要在代码中写日志文件的目录:防止容量越来越大导致整个项目大小过重)
+ 高级配置文件实现
+ 参考Django的配置源码
+ 目标:通过一个类对象,获取自定义配置和高级配置
+ 核心代码:
+ dir()
+ getattr()
+ setattr()
+ 采集信息实现
+ 传统的做法:根据mode来进行条件判断,分别实行不同的执行过程(面向过程)
+ 存在的问题:1.面向过程 2.代码过于冗余,不利于复写 3.优化比较麻烦
+ 高内聚低耦合原则:
+ 这是一种设计思想
+ 一个类或者一个函数中,不能有一行代码和此类或者此函数所阐述的功能不一致
+ 写代码的原则(review的要点):
+ 功能是否能实现
+ 代码的整洁度
+ 变量、方法和类的命名风格要一致
+ 大驼峰 GetUserInfo
+ 小驼峰 getUserInfo
+ 下划线 get_user_info
+ 变量名要有意义
+ 函数体内部的代码不能超过一定的行数
+ 改进方法:
+ 将采集的模块代码封装成一个插件文件
+ disk: disk.py --> Disk(): ----> process()
+ cpu: cpu.py --> Cpu(): ---->process()
+ 设置一个字典来保存各个类,用来决定初始化时是否导入(可插拔式)
+ 如果不想使用某个功能,可以将字典中的该项注释掉(类似Django中)
+ 配置文件:
+ PLUGIN_DICT = {
'basic': 'src.plugins.basic.Basic',
'cpu': 'src.plugins.cpu.Cpu',
'disk': 'src.plugins.disk.Disk',
'memory': 'src.plugins.memory.Memory',
'nic': 'src.plugins.nic.Nic',
}
+ 插件:
+ src:
+ plugins
+ \__init__.py:实现可插拔式功能的关键
+ execute()
+ basic.py --> Basic -->process()
+ cpu.py-->Cpu-->process()
+ memory-->Memory-->process()
+ disk-->Disk-->process()
+ nic-->Nic-->process()
+ 改进采集功能
+ 将共用的功能抽离出来,写成一个基类,后续如果还有其他的插件,则需要继承基类
|
PHP | UTF-8 | 1,435 | 2.640625 | 3 | [
"BSD-2-Clause"
] | permissive | <?php
/*
* Copyright (c) KUBO Atsuhiro <kubo@iteman.jp>,
* All rights reserved.
*
* This file is part of Stagehand_FSM.
*
* This program and the accompanying materials are made available under
* the terms of the BSD 2-Clause License which accompanies this
* distribution, and is available at http://opensource.org/licenses/BSD-2-Clause
*/
namespace Stagehand\FSM\State;
use Stagehand\FSM\Event\Event;
use Stagehand\FSM\Event\EventInterface;
/**
* @since Trait available since Release 3.0.0
*/
trait StateActionTrait
{
/**
* @var EventInterface
*/
protected $entryEvent;
/**
* @var EventInterface
*/
protected $exitEvent;
/**
* @var EventInterface
*/
protected $doEvent;
/**
* @return EventInterface
*/
public function getEntryEvent(): EventInterface
{
return $this->entryEvent;
}
/**
* @return EventInterface
*/
public function getExitEvent(): EventInterface
{
return $this->exitEvent;
}
/**
* @return EventInterface
*/
public function getDoEvent(): EventInterface
{
return $this->doEvent;
}
protected function initializeStateActionEvents()
{
$this->entryEvent = new Event(StateActionInterface::EVENT_ENTRY);
$this->exitEvent = new Event(StateActionInterface::EVENT_EXIT);
$this->doEvent = new Event(StateActionInterface::EVENT_DO);
}
}
|
JavaScript | UTF-8 | 10,468 | 2.765625 | 3 | [] | no_license | const API = 'https://raw.githubusercontent.com/ShutovAndrey/Study/master';
const image = 'https://placehold.it/200x150';
const cartImage = 'https://placehold.it/100x80';
//super for Catalog and Cart
class List {
constructor(url, container) {
this.container = container;
this.url = url;
this.items = [];
this._init();
}
_init() {
return false
// здесь удобно навешивать листенеры на кнопки
}
getJSON(url) {
return fetch(url)
.then(d => d.json()) //не Джейсон
}
_render() {
let block = document.querySelector(this.container);
let htmlStr = '';
this.items.forEach(item => {
let prod = new lists [this.constructor.name](item);
htmlStr += prod.render();
});
block.innerHTML = htmlStr;
}
}
//super for CatalogItem and CartItem
class Item {
constructor(obj, img = image) {
this.product_name = obj.product_name;
this.price = obj.price;
this.id_product = obj.id_product;
this.img = img;
}
render() {
return `
<div class="product-item" data-id="${this.id_product}">
<img src="${this.img}" alt="Some img">
<div class="desc">
<h3>${this.product_name}</h3>
<p>${this.price} $</p>
<button class="buy-btn"
data-id="${this.id_product}"
data-name="${this.product_name}"
data-image="${this.img}"
data-price="${this.price}">Купить</button>
</div>
</div>
`
}
}
class Catalog extends List {
constructor(cart, url = '/StudyDB.json', container = '.products') {
super(url, container);
this.cart = cart;
}
_addListeners() {
document.querySelector('.products').addEventListener('click', (evt) => {
if (evt.target.classList.contains('buy-btn')) {
this.cart.addProduct(evt.target);
}
})
}
_init() {
this.getJSON(API + this.url)
.then(data => {
this.items = data
})
.then(() => {
this._render()
})
.finally(() => {
this._addListeners()
})
}
}
class CatalogItem extends Item {
} //уже готово
class Cart extends List {
constructor(url = '/getBasket.json', container = '.cart-block') {
super(url, container)
}
_addListeners() {
//открыть карзину
document.querySelector('.btn-cart').addEventListener('click', () => {
document.querySelector('.cart-block').classList.toggle('invisible');
});
//удалить из кaddToBasket.jsonарзины
document.querySelector('.cart-block').addEventListener('click', (evt) => {
if (evt.target.classList.contains('del-btn')) {
this.removeProduct(evt.target);
}
})
}
_init() {
this.getJSON(API + this.url)
.then(data => {
this.items = data.contents
})
.then(() => {
this._render()
})
.finally(() => {
this._addListeners()
})
}
addProduct(prod) {
let approveFromServer;
this.getJSON(API + '/addToBasket.json')
.then(d => {
approveFromServer = d.result
})
.finally(() => {
if (approveFromServer) {
// console.log (`Товар ${prod.dataset.name} добавлен в корзину`)
}
})
}
removeProduct(prod) {
this.getJSON(API + '/deleteFromBasket.json')
.then(d => {
if (d.result) {
// console.log (`Товар ${prod.dataset.id} удален из корзины`)
}
})
}
}
class CartItem extends Item {
constructor(obj, img = cartImage) {
super(obj, img);
this.quantity = 1;
}
render() {
return `
<div class="cart-item" data-id="${this.id_product}">
<div class="product-bio">
<img src="${this.img}" alt="Some image">
<div class="product-desc">
<p class="product-title">${this.product_name}</p>
<p class="product-quantity">Quantity: ${this.quantity}</p>
<p class="product-single-price">$${this.price} each</p>
</div>
</div>
<div class="right-block">
<p class="product-price">${this.quantity * this.price}</p>
<button class="del-btn" data-id="${this.id_product}">×</button>
</div>
</div>
`
}
}
const lists = {
Catalog: CatalogItem,
Cart: CartItem
}
let cart = new Cart();
let catalog = new Catalog(cart);
//----------------------------------------------------------------------------------------------------------------
/*class ProdList {
makeGETRequest(url) {
return fetch(url)
}
renderItem(product_name, price, id_product) {
const img = 'https://placehold.it/200x150';
return ` <div class="product-item" data-id="${id_product}">
<img src="${img}" alt="Some img">
<div class="desc">
<h3>${product_name}</h3>
<p>${price} $</p>
<button class="buy-btn"
data-id="${id_product}"
data-name="${product_name}"
data-image="${img}"
data-price="${price}">Купить</button>
</div>
</div>`;
}
renderProducts(dataArr) {
let str = '';
dataArr.forEach(good => {
str += this.renderItem(good.product_name, good.price, good.id_product);
});
document.querySelector('.products').innerHTML = str;
}
//кнопки покупки товара (добавляется один раз)
toBye(evt) {
if (evt.target.classList.contains('buy-btn')) {
goodsList.addProduct(evt.target);
}
}
}
const productList = new ProdList();
productList.makeGETRequest(dataCatalog)
.then(dJSON => dJSON.json())
.then(data => {
dataFromWeb = data
})
.then(() => {
productList.renderProducts(dataFromWeb)
})
.catch(error => {
console.log('error')
});
let toByeBtn = document.querySelector('.products');
toByeBtn.addEventListener('click', (evt) => productList.toBye(evt));
class Goodslist {
constructor(userCart) {
this.userCart = userCart;
}
//кнопка скрытия и показа корзины
showCart() {
document.querySelector('.btn-cart').addEventListener('click', () => {
document.querySelector('.cart-block').classList.toggle('invisible');
})
}
addProduct(product) {
let productId = +product.dataset['id']; //data-id="1"
let cartImage = 'https://placehold.it/100x80';
let find = this.userCart.find(element => element.id === productId); //товар или false
if (!find) {
this.userCart.push({
name: product.dataset['name'],
id: productId,
img: cartImage,
price: +product.dataset['price'],
quantity: 1
})
} else {
find.quantity++
}
this.renderCart()
}
//удаление товаров
removeProduct(product) {
let productId = +product.dataset['id'];
let find = this.userCart.find(element => element.id === productId);
if (find.quantity > 1) {
find.quantity--;
} else {
this.userCart.splice(this.userCart.indexOf(find), 1);
document.querySelector(`.cart-item[data-id="${productId}"]`).remove()
}
this.renderCart();
}
//перерендер корзины
renderCart() {
let allProducts = '';
for (let el of this.userCart) {
allProducts += `<div class="cart-item" data-id="${el.id}">
<div class="product-bio">
<img src="${el.img}" alt="Some image">
<div class="product-desc">
<p class="product-title">${el.name}</p>
<p class="product-quantity">Quantity: ${el.quantity}</p>
<p class="product-single-price">$${el.price} each</p>
</div>
</div>
<div class="right-block">
<p class="product-price">${el.quantity * el.price}</p>
<button class="del-btn" data-id="${el.id}">×</button>
</div>
</div>`
}
document.querySelector(`.cart-block`).innerHTML = allProducts + this.priceCount();
}
//подсчет стоимости корзины
priceCount() {
let priceCount = 0;
this.userCart.forEach(function (el) {
priceCount += (el.price * el.quantity);
});
let total = ` <div> <p class="product-title"> Total price: ${priceCount}</p></div> `;
return total;
}
}
let userCart = [];
const goodsList = new Goodslist(userCart);
goodsList.showCart();
goodsList.renderCart();
let toDelBtn = document.querySelector('.cart-block');
toDelBtn.addEventListener('click', (evt) => goodsList.removeProduct(evt.target));*/
|
Markdown | UTF-8 | 1,564 | 3.0625 | 3 | [] | no_license | ### 测试请求参数映射
1. `@PathVariable`方式
1. `@RequestParam`方式
1. `@RequestBody`方式
1. 无注解方式
1. `request.getParameter`方式,纯手工
#### @PathVariable
1. 参数必须是restful格式,作为请求的链接一部分
1. 不显示映射名称时,直接使用注解修饰的形参进行映射
1. 不显示设置修饰的字段的必要性时,默认必传
1. Data类型需要DateTimeFormat
1. 正常映射boolean
#### @RequestParam
1. 既可以使用链接传参,亦可以使用body(form-data)进行传参
1. 不显示映射名称时,直接使用注解修饰的形参进行映射
1. 不显示设置修饰的字段的必要性时,默认必传
1. Data类型需要DateTimeFormat
1. 正常映射boolean
#### @RequestBody
1. 发起请求需要添加header,<code>Content-Type=application/json</code>
1. 如果时使用postman发起请求,那么Body里的参数必须选择raw方式,否则报错,这也就限制了该方式的参数大对象只有一个
```
org.springframework.web.HttpMediaTypeNotSupportedException: Content type 'multipart/form-data;...not supported
```
1. 就提的bean方式接收,或者map方式均可,bean方式时需要注意,date类型不适用`@DateTimeFormat`也是可以成功注入的
#### 无注解方式
1. 日期类型字段需要添加<code>@DateTimeFormat(pattern = "yyyy-MM-dd")</code>
1. get请求,参数直接通过连接传递可以映射成功
1. 通过form-data也可以映射成功
1. body里raw方式时不能映射进来的
#### request.getParameter
|
Java | UTF-8 | 3,551 | 2.65625 | 3 | [] | no_license | package com.example.android.musicapp;
import android.graphics.drawable.Drawable;
import android.os.Parcel;
import android.os.Parcelable;
public class Song implements Parcelable {
private String genre;
private String name;
private double length;
private String mainArtist;
private String featArtist;
private String displayArtist;
private String album;
private int score=10;
private int pic;
public Song(String genre, String name, double length, String mainArtist, String featArtist, String album, int img) {
setGenre(genre);
setName(name);
setLength(length);
setMainArtist(mainArtist);
setFeatArtist(featArtist);
setAlbum(album);
setDisplayArtist();
setPic(img);
}
public void noLikeSong(){
score--;
}
public void likeSong(){
score++;
}
public String displaySongInfo(){
return name+"\n"+displayArtist;
}
public String getDisplayArtist() {
return displayArtist;
}
public String getGenre() {
return genre;
}
public String getName() {
return name;
}
public double getLength() {
return length;
}
public String getMainArtist() {
return mainArtist;
}
public String getFeatArtist() {
return featArtist;
}
public String getAlbum() {
return album;
}
public String getScore() {
return score+"";
}
public void setDisplayArtist(){
if (featArtist.equals("")){
displayArtist= mainArtist;
}else {
displayArtist = mainArtist + ", " + featArtist;
}
}
public void setGenre(String genre) {
this.genre = genre;
}
public void setName(String name) {
this.name = name;
}
public void setLength(double length) {
this.length = length;
}
public void setMainArtist(String mainArtist) {
this.mainArtist = mainArtist;
}
public void setFeatArtist(String featArtist) {
this.featArtist = featArtist;
}
public void setAlbum(String album) {
this.album = album;
}
protected Song(Parcel in) { // the order of this has to be same as write to parcel method
genre = in.readString();
name = in.readString();
length = in.readDouble();
mainArtist = in.readString();
featArtist = in.readString();
displayArtist = in.readString();
album = in.readString();
score = in.readInt();
pic= in.readInt();
}
public static final Creator<Song> CREATOR = new Creator<Song>() {
@Override
public Song createFromParcel(Parcel in) {
return new Song(in);
}
@Override
public Song[] newArray(int size) {
return new Song[size];
}
};
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) { // the order of this has to be the same as the protected Song contructor
dest.writeString(genre);
dest.writeString(name);
dest.writeDouble(length);
dest.writeString(mainArtist);
dest.writeString(featArtist);
dest.writeString(displayArtist);
dest.writeString(album);
dest.writeInt(score);
dest.writeInt(pic);
}
public int getPic() {
return pic;
}
public void setPic(int pic) {
if(pic==0) this.pic=pic;
else this.pic = pic;
}
}
|
Python | UTF-8 | 243 | 3.046875 | 3 | [] | no_license |
class A:
d=100
def __init__(self):
print("This is Constructor")
def display(self):
print("This is Display method")
def sum(self,a,b):
print a+b
def sub(self,a,b):
c= a-b
return c |
Java | UTF-8 | 23,659 | 2.015625 | 2 | [] | no_license | package org.literacybridge.acm.tbloader;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.StringUtils;
import org.literacybridge.acm.cloud.Authenticator;
import org.literacybridge.acm.cloud.ProjectsHelper;
import org.literacybridge.acm.cloud.ProjectsHelper.DeploymentInfo;
import org.literacybridge.acm.config.ACMConfiguration;
import org.literacybridge.acm.config.DBConfiguration;
import org.literacybridge.acm.config.PathsProvider;
import org.literacybridge.acm.utils.IOUtils;
import org.literacybridge.core.fs.ZipUnzip;
import org.literacybridge.core.spec.ProgramSpec;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.stream.Collectors;
import static org.literacybridge.core.tbloader.TBLoaderConstants.DEPLOYMENT_REVISION_PATTERN;
import static org.literacybridge.core.tbloader.TBLoaderConstants.PROGRAM_DESCRIPTION_PROPERTY;
import static org.literacybridge.core.tbloader.TBLoaderConstants.UNPUBLISHED_DEPLOYMENT_PATTERN;
import static org.literacybridge.core.tbloader.TBLoaderConstants.UNPUBLISHED_REV;
class DeploymentsManager {
private static final Logger LOG = Logger.getLogger(DeploymentsManager.class.getName());
private final String project;
private final File localProjectDir;
private LocalDeployment localDeployment;
private AvailableDeployments availableDeployments;
private State state = State.Unset;
DeploymentsManager(String project) {
this.project = project;
// ~/LiteracyBridge/TB-Loaders/{project}
localProjectDir = ACMConfiguration.getInstance().getLocalTbLoaderDirFor(project);
}
/**
* Gets the state of the local deployment, if any. One of the State enum values.
* @return the State
*/
synchronized State getState() {
if (state == State.Unset) {
AvailableDeployments ad = getAvailableDeployments();
LocalDeployment ld = getLocalDeployment();
if (ld.isUnpublished) {
state = State.OK_Unpublished;
} else if (ad.isMissingLatest()) {
state = State.Missing_Latest;
} else if (ld.errorMessage != null) {
state = State.Bad_Local;
} else if (ld.localRevision == null) {
state = State.No_Deployment;
// } else if (ld.isUnpublished) {
// state = State.OK_Unpublished;
} else if (ad.isOffline()) {
state = State.OK_Cached;
} else if (ld.localRevision.equalsIgnoreCase(ad.getCurrentRevId())) {
state = State.OK_Latest;
} else {
state = State.Not_Latest;
}
}
return state;
}
/**
* Retrieves information about the local deployment, if any.
* @return a LocalDeployment object. If that object's errorMessage field is non-null,
* there is some problem with the local deployment. Otherwise, localRev contains the
* current local revision, if any (if null, there is no local revision). If the field
* isUnpublished is true, the local deployment is unpublished.
*/
synchronized LocalDeployment getLocalDeployment() {
if (localDeployment == null) {
localDeployment = findLocalDeployment();
}
return localDeployment;
}
/**
* Returns a list of the programs with locally cached deployments.
* @return the list.
*/
public static Map<String, String> getLocalPrograms() {
File localProgramsDir = ACMConfiguration.getInstance().getLocalTbLoadersDir();
Map<String, String> result = new HashMap<>();
File[] programDirs = localProgramsDir.listFiles(File::isDirectory);
if (programDirs != null) {
for (File programDir : programDirs) {
File[] revFiles = programDir.listFiles(f -> f.isFile() && f.getName()
.toLowerCase()
.endsWith(".rev"));
if (revFiles != null && revFiles.length > 0) {
// We've found a local deployment, try to get the program description.
String programid = programDir.getName();
String programDescription = null;
// Look in the deployment_info.properties file, if there is one.
LocalDeployment ld = findLocalDeploymentIn(programDir);
File contentDir = ld.localContent;
File programSpecDir = new File(contentDir, "programspec");
if (programSpecDir.isDirectory()) {
ProgramSpec ps = new ProgramSpec(programSpecDir);
Properties deploymentInfo = ps.getDeploymentProperties();
programDescription = deploymentInfo.getProperty(PROGRAM_DESCRIPTION_PROPERTY);
}
// If no description, look in the ACM database.
if (StringUtils.isBlank(programDescription)) {
DBConfiguration dbConfig = ACMConfiguration.getInstance().getDbConfiguration(programid);
if (dbConfig != null) {
programDescription = dbConfig.getFriendlyName();
}
}
// If still no description, fall back to the program id.
if (StringUtils.isBlank(programDescription)) {
programDescription = programid;
}
result.put(programid, programDescription);
}
}
}
return result;
}
/**
* Get the LocalDeployment object that describes the current local deployment for the current program.
* @return the LocalDeployment object.
*/
private LocalDeployment findLocalDeployment() {
return findLocalDeploymentIn(localProjectDir);
}
/**
* Given a directory with at least one deployment for a program, including a .rev file, get information
* about the lcoal deployment.
* @param localProjectDir Directory with deployment(s), like ~/LiteracyBridge/TB-Loaders/TEST
* @return a LocalDepllyment object describing the deployment.
*/
private static LocalDeployment findLocalDeploymentIn(File localProjectDir) {
// Get *.rev files. Expect at most one.
File[] revFiles = localProjectDir.listFiles(f ->
f.isFile() && f.getName().toLowerCase().endsWith(".rev"));
// Get content/* directories. Expect at most one.
File[] contentDirs = new File(localProjectDir, "content").listFiles(File::isDirectory);
if (revFiles==null || revFiles.length==0 || contentDirs==null || contentDirs.length==0) {
// No local content; need to get from Dropbox.
return new LocalDeployment();
}
if (revFiles.length==1) { // ) && (contentDirs != null && contentDirs.length==1)) {
// There is exactly one .rev file, and at least one content file. See if
// there is a match.
Map<String,File> localFilesMap = Arrays.stream(contentDirs)
.collect(Collectors.toMap(File::getName, Function.identity()));
String localRevMarker = revFiles[0].getName();
String localRev = FilenameUtils.removeExtension(localRevMarker);
if (localRevMarker.startsWith(UNPUBLISHED_REV)) {
// If it is a new-style unpublished marker, we know the deployment, and
// can verify it matches some content we have. But if an old-style marker,
// we can only accept it if there is exactly one Deployment in content.
Matcher unpublishedMatcher = UNPUBLISHED_DEPLOYMENT_PATTERN.matcher(localRevMarker);
if (unpublishedMatcher.matches()) {
String unpublishedDeployment = unpublishedMatcher.group(2);
if (localFilesMap.containsKey(unpublishedDeployment)) {
return new LocalDeployment(localRev, localFilesMap.get(unpublishedDeployment));
} else {
return new LocalDeployment();
}
} else {
// Old style. Is there a single content directory?
if (contentDirs.length==1) {
// Note that this is just an assumption, though highly probable.
return new LocalDeployment(localRev, contentDirs[0]);
} else {
return new LocalDeployment("Ambiguous unpublished deployment.");
}
}
} else {
// There is one deployment; does it match the .rev file?
Matcher publishedMatcher = DEPLOYMENT_REVISION_PATTERN.matcher(localRevMarker);
if (publishedMatcher.matches()) {
String publishedDeployment = publishedMatcher.group(1);
if (localFilesMap.containsKey(publishedDeployment)) {
// The deployment matches the marker.
return new LocalDeployment(localRev, localFilesMap.get(publishedDeployment));
} else {
// No local content for the local .rev file.
return new LocalDeployment("Local content doesn't match local revision.");
}
} else {
// Not a good marker file. Same as no marker.
return new LocalDeployment();
}
}
}
// There are too many .rev files locally.
return new LocalDeployment("Extraneous local .rev files detected.");
}
/**
* Returns a list of all available deployments, in a map of deployment name to deployment directory.
* Enumerating the map returns the names in newest to oldest order.
*/
AvailableDeployments getAvailableDeployments() {
if (availableDeployments == null) {
availableDeployments = findAvailableDeployments();
}
return availableDeployments;
}
/**
* Examines S3 to find the deployments there. For every
* deployment found, keeps track of the highest suffix (-a, -b, etc), and keeps track
* of any .rev files found. (There should be only one.)
* @return an AvailableDeployments object, with a map of deployment to highest revision, and
* the latest published deployment.
*/
private AvailableDeployments findAvailableDeployments() {
if (!Authenticator.getInstance().isAuthenticated()) {
return new NoAvailableOfflineDeployments();
}
AvailableCloudDeployments depls = new AvailableCloudDeployments();
depls.findDeployments();
return depls;
}
/**
* Clears old content and *.rev files from the localProjectDir.
*/
synchronized void clearLocalDeployments() {
File localContentDir = new File(localProjectDir, "content");
File localMetadataDir = new File(localProjectDir, "metadata");
IOUtils.deleteRecursive(localContentDir);
IOUtils.deleteRecursive(localMetadataDir);
deleteRevFiles(localProjectDir);
localDeployment = null;
}
/**
* Retrieves the Deployment from the given directory.
* @param desiredDeployment Name of desired Deployment
* @throws IOException if the Deployment can't be unzipped.
*/
void getDeployment(String desiredDeployment, BiConsumer<Long, Long> progressHandler) throws IOException {
clearLocalDeployments();
availableDeployments.fetchDeployment(desiredDeployment, localProjectDir, progressHandler);
}
/**
* Removes any *.rev files from the given directory. NOT recursive.
* @param directory to be cleaned.
*/
private static void deleteRevFiles(File directory) {
File[] files = directory.listFiles((dir1, name) -> name.toLowerCase().endsWith(".rev"));
assert files != null;
//noinspection ResultOfMethodCallIgnored
Arrays.stream(files).forEach(File::delete);
}
enum State {
Unset,
Bad_Local, // Needs to be fixed. Easiest is delete all, re-copy.
Missing_Latest, // Something is wrong with the Dropbox state.
No_Deployment, // No deployment, simply copy latest.
OK_Unpublished, // Unpublished deployment.
OK_Latest, // Local is latest, seems OK.
Not_Latest, // Local is not latest.
OK_Cached // We have a local that looks fine, but are offline.
}
/**
* Class to describe the local Deployment, if any.
*/
static class LocalDeployment {
String localDeployment; // "TEST-19-1"
String localRevision; // "'"a" or "UNPUBLISHED"
boolean isUnpublished; // True if unpublished
File localContent; // Local directory with deployment. "TEST-19-1"
String errorMessage; // If there is no local deployment, this contains explanation.
private LocalDeployment() {
this(null, null);
}
/**
* Constructor for the case when there is no error.
* @param localRevMarkerName Local rev file name, without extension (but with -a, -b, etc).
* @param localContent Local directory containing Deployment content. Named as the
* Deployment name, without any -a, -b, ...
*
* NOTE: BOTH of localRec and localContent can be null, if there is no local Deployment.
*/
private LocalDeployment(String localRevMarkerName, File localContent) {
this.localContent = localContent;
if (localRevMarkerName != null) {
this.isUnpublished = localRevMarkerName.toUpperCase().startsWith(UNPUBLISHED_REV);
if (isUnpublished) {
Matcher unpubMatcher = UNPUBLISHED_DEPLOYMENT_PATTERN.matcher(localRevMarkerName);
if (unpubMatcher.matches()) {
this.localDeployment = unpubMatcher.group(2);
}
this.localRevision = UNPUBLISHED_REV;
} else {
Matcher pubMatcher = DEPLOYMENT_REVISION_PATTERN.matcher(localRevMarkerName);
if (pubMatcher.matches()) {
this.localDeployment = pubMatcher.group(1);
this.localRevision = pubMatcher.group(2);
}
}
}
}
/**
* Constructor for the case when there is an error.
* @param errorMessage describing the error.
*/
private LocalDeployment(String errorMessage) {
this.errorMessage = errorMessage;
}
}
public interface AvailableDeployments {
String getRevIdForDeployment(String desiredDeployment);
boolean isOffline();
boolean isMissingLatest();
String getCurrentRevId();
String getCurrentDeployment();
void fetchDeployment(String desiredDeployment,
File localProjectDir,
BiConsumer<Long, Long> progressHandler) throws IOException;
/**
* @return Map of Deployment name to Deployment Details.
*/
Map<String, String> getDeploymentDescriptions();
}
/**
* Finds any published deployments that are local the user's computer. May be in !/Dropbox
* or in ~/Amplio/acm-dbs.
* @param programid for which local deployments are desired.
* @return a map {string, file} of the local published deployments to the directory with the latest
* revision of that deployment.
*/
static public Map<String, File> getLocalPublishedDeployments(String programid) {
Map<String, File> result = new HashMap<>();
// Is there a "published" directory in which to look?
PathsProvider pathsProvider = ACMConfiguration.getInstance().getPathProvider(programid);
if (pathsProvider == null) return result;
File tbLoadersDir = pathsProvider.getProgramTbLoadersDir();
if (!tbLoadersDir.isDirectory()) return result;
File publishedDir = new File(tbLoadersDir, "published");
if (!publishedDir.isDirectory()) return result;
// Get the directories that are deployments; they contain a same-named .zip file.
File[] publishedDeployments = publishedDir.listFiles(file -> file.isDirectory() &&
new File(file, "content-" + file.getName() + ".zip").exists() ||
new File(file, file.getName() + ".zip").exists());
// Find the highest revision of every deployment found.
Map<String,String> highestRevisions = new HashMap<>();
assert publishedDeployments != null;
for (File file : publishedDeployments) {
String fileName = file.getName();
// Extract the deployment name and revision string.
Matcher deplMatcher = DEPLOYMENT_REVISION_PATTERN.matcher(fileName);
if (deplMatcher.matches()) {
String deploymentName = deplMatcher.group(1);
String fileRevision = deplMatcher.group(2).toLowerCase();
String previousRevision = highestRevisions.get(deploymentName);
// If this is a new deployment or a higher revision, save it.
// A longer name is always greater. When the lengths are the same, then we need
// to compare the strings.
if (previousRevision == null
|| fileRevision.length() > previousRevision.length()
|| (fileRevision.length()==previousRevision.length() && fileRevision.compareTo(previousRevision) > 0) ) {
highestRevisions.put(deploymentName, fileRevision);
result.put(deploymentName, file);
}
}
}
return result;
}
/**
* There aren't any available Deployments when we're offline. Of course.
*/
static class NoAvailableOfflineDeployments implements AvailableDeployments {
@Override
public String getRevIdForDeployment(String desiredDeployment) {
return null;
}
@Override
public boolean isOffline() {
return true;
}
@Override
public boolean isMissingLatest() {
return false;
}
@Override
public String getCurrentRevId() {
return null;
}
@Override
public String getCurrentDeployment() {
return null;
}
@Override
public void fetchDeployment(String desiredDeployment,
File localProjectDir,
BiConsumer<Long, Long> progressHandler)
{
}
@Override
public Map<String, String> getDeploymentDescriptions() {
return new HashMap<>();
}
}
/**
* Describes the deployments available in the cloud.
*/
class AvailableCloudDeployments implements AvailableDeployments {
Map<String, DeploymentInfo> deploymentsInfo;
DeploymentInfo currentDeploymentInfo;
void findDeployments() {
Authenticator authInstance = Authenticator.getInstance();
ProjectsHelper projectsHelper = authInstance.getProjectsHelper();
deploymentsInfo = projectsHelper.getS3DeploymentInfo(project);
if (deploymentsInfo.size() == 0) {
deploymentsInfo = projectsHelper.getDeploymentInfo(project);
}
// Keep only the current deployment. If we want to let users choose, stop doing this.
deploymentsInfo = deploymentsInfo.entrySet().stream()
.filter(e->e.getValue().isCurrent())
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
currentDeploymentInfo = deploymentsInfo.values().stream().filter(DeploymentInfo::isCurrent).findAny().orElse(null);
}
@Override
public String getRevIdForDeployment(String desiredDeployment) {
DeploymentInfo di = deploymentsInfo.get(desiredDeployment);
return di == null ? null : di.getRevId();
}
@Override
public boolean isOffline() {
return false;
}
@Override
public boolean isMissingLatest() {
return deploymentsInfo.values().stream().noneMatch(DeploymentInfo::isCurrent);
}
@Override
public String getCurrentRevId() {
return currentDeploymentInfo == null ? null : currentDeploymentInfo.getRevId();
}
@Override
public String getCurrentDeployment() {
return currentDeploymentInfo == null ? null : currentDeploymentInfo.getDeploymentName();
}
@Override
public void fetchDeployment(String desiredDeployment, File localProjectDir, BiConsumer<Long, Long> progressHandler)
throws IOException
{
DeploymentInfo di = deploymentsInfo.get(desiredDeployment);
// If the deployment is already downloaded, with the other program data, use that.
File zipFile = null;
Map<String,File> localPublishedDeployments = getLocalPublishedDeployments(project);
if (localPublishedDeployments.containsKey(desiredDeployment)) {
File publishedDir = localPublishedDeployments.get(desiredDeployment);
zipFile = new File(publishedDir, "content-"+publishedDir.getName()+".zip");
if (!zipFile.exists()) zipFile = new File(publishedDir, publishedDir.getName()+".zip");
}
if (zipFile == null || !zipFile.exists()) {
File tempDir = Files.createTempDirectory("tbloader-tmp").toFile();
File tempFile = new File(tempDir, di.getFileName());
Authenticator authInstance = Authenticator.getInstance();
ProjectsHelper projectsHelper = authInstance.getProjectsHelper();
projectsHelper.downloadDeployment(di,
tempFile,
progressHandler/*(p,t)->{System.out.printf("%d/%d\n",p,t);}*/);
zipFile = tempFile;
}
// 7z x -y -o"%userprofile%\LiteracyBridge\TB-Loaders\%project%" "..\ACM-%project%\TB-Loaders\published\%latestUpdate%\content-%latestUpdate%.zip"
ZipUnzip.unzip(zipFile, localProjectDir);
// Leave a marker to indicate what is here.
String revFileName = di.getVersionMarker() + ".rev";
File revFile = new File(localProjectDir, revFileName);
if (!revFile.createNewFile()) {
LOG.warning(String.format("Could not create file '%s'", revFile.getAbsolutePath()));
}
}
@Override
public Map<String, String> getDeploymentDescriptions() {
Map<String,String> result = new LinkedHashMap<>();
for (Map.Entry<String,DeploymentInfo> e : deploymentsInfo.entrySet()) {
result.put(e.getKey(), e.getValue().getVersionMarker());
}
return result;
}
}
}
|
JavaScript | UTF-8 | 1,050 | 2.75 | 3 | [] | no_license | // 霸都丶傲天 2019年12月24日
var config = {
// 句子的长度可以任意, 你可以写十句话, 二十句话都可以
// 每句话尽量不要超过15个字,不然展示效果可能不太好
texts: [
"送给我", //这里,每句话结尾的最后一个逗号必须是英文的哦!! 很重要哦!!
"心爱的小宝贝", // 同上...
"今天是我们重要的日子",
"这是我们相识的",
"第二个年头了哦",
"去年的今天",
"还记得在Holden干柴烈火呢",
"今年要吃好的哦",
"要把我家可爱宝贝喂饱饱",
"然后抱走",
],
wish: "小宝贝&&哈士奇相爱了", //这里,每句话结尾的最后一个逗号必须是英文的哦!! 很重要哦!!
// 时间的格式很重要哦,一定要是下面这个格式!!
// 年年年年-月月-日日 时时:分分:秒秒
time: "2019-10-05 0:0:0", //这里,每句话结尾的最后一个逗号必须是英文的哦!! 很重要哦!!
};
|
JavaScript | UTF-8 | 2,256 | 2.578125 | 3 | [] | no_license | /**
* Module dependencies.
*/
var request = require('superagent')
, _ = require('underscore')
, BASE_URL = 'https://coinbase.com/api/v1/account'
/**
* Module exports.
*/
module.exports = Account;
/**
* Account constructor.
*/
function Account (coinbase) {
this._coinbase = coinbase;
}
/**
* Balance function.
*
* @params {Function} callback
* @api public
*/
Account.prototype.balance = function (callback) {
request.get(BASE_URL + '/balance')
.query({ api_key: this._coinbase.api_key })
.end(function (res) {
if (res.body) {
return callback(null, res.body)
} else {
callback(new Error('Error thrown by coinbase.js: Bad Coinbase response. Check to make sure you are passing valid parameters.'));
}
});
}
/**
* Receive address function.
*
* @params {Function} callback
* @api public
*/
Account.prototype.receiveAddress = function (callback) {
request.get(BASE_URL + '/receive_address')
.query({ api_key: this._coinbase.api_key })
.end(function (res) {
if (res.body && _.has(res.body, 'success')) {
return callback(null, res.body)
} else {
if (_.has(res.body, 'error')) {
callback(new Error('Coinbase error: ' + res.body.error));
} else {
callback(new Error('Error thrown by coinbase.js: Bad Coinbase response. Check to make sure you are passing valid parameters.'));
}
}
});
}
/**
* Generate receive address function.
*
* @param {String} callback_url (optional)
* @param {Function} callback
* @api public
*/
Account.prototype.generateReceiveAddress = function (callback_url, callback) {
if (typeof callback === 'undefined') {
callback = callback_url;
callback_url = null;
}
var query = { api_key: this._coinbase.api_key }
if (typeof callback_url !== 'undefined' && callback_url !== null) {
_.extend(query, { address: { callback_url: callback_url } })
}
request.post(BASE_URL + '/generate_receive_address')
.query(query)
.end(function (res) {
if (res.body) {
return callback(null, res.body)
} else {
callback(new Error('Error thrown by coinbase.js: Bad Coinbase response. Check to make sure you are passing valid parameters.'));
}
});
}
|
Markdown | UTF-8 | 557 | 3.03125 | 3 | [] | no_license | # British Square
British Square is a simple strategy board game, written with AppGameKit.
Developed as an introductory project to coding, I wrote this over a long summer. It's not pretty or well commented, but it functions and got me into the world of coding.
As I get ready to graduate with a degree in Computer Science, I wanted to share where I started. Feel free to poke around and take a look. If you want to try the game,
it is available on the Google Play Store or in your browser [here](https://gregorydwyer.github.io/HTML5/British_Squares.html).
|
Python | UTF-8 | 1,785 | 3.375 | 3 | [] | no_license | # -*- coding: utf-8 -*-
"""
Created on Sat Dec 23 14:12:50 2017
bias-variance
@author: pengxu
"""
import numpy as np
import scipy.optimize as opt
def cost(theta, X, y):
"""
X: R(m*n), m records, n features
y: R(m)
theta : R(n), linear regression parameters
"""
m = X.shape[0]
inner = X @ theta - y # R(m*1)
# 1*m @ m*1 = 1*1 in matrix multiplication
# but you know numpy didn't do transpose in 1d array, so here is just a
# vector inner product to itselves
square_sum = inner.T @ inner
cost = square_sum / (2 * m)
return cost
def gradient(theta, X, y):
m = X.shape[0]
inner = X.T @ (X @ theta - y) # (m,n).T @ (m, 1) -> (n, 1)
return inner / m
def regularized_gradient(theta, X, y, l=1):
m = X.shape[0]
regularized_term = theta.copy() # same shape as theta
regularized_term[0] = 0 # don't regularize intercept theta
regularized_term = (l / m) * regularized_term
return gradient(theta, X, y) + regularized_term
def linear_regression_np(X, y, l=1):
"""linear regression
args:
X: feature matrix, (m, n+1) # with incercept x0=1
y: target vector, (m, )
l: lambda constant for regularization
return: trained parameters
"""
# init theta
theta = np.ones(X.shape[1])
# train it
res = opt.minimize(fun=regularized_cost,
x0=theta,
args=(X, y, l),
method='TNC',
jac=regularized_gradient,
options={'disp': True})
return res
def regularized_cost(theta, X, y, l=1):
m = X.shape[0]
regularized_term = (l / (2 * m)) * np.power(theta[1:], 2).sum()
return cost(theta, X, y) + regularized_term
|
Java | UTF-8 | 293 | 2.484375 | 2 | [] | no_license | package annotation;
public class AnnotationDemo {
public static void main(String[] args) {
A obj=new A();
B obj2=new B();
A obj3=new B();
obj.show();
obj2.show();
obj3.show();
obj2.m1(); // here we have used @Deprecated annotation means we cannot use this method m1().
}
}
|
Java | UTF-8 | 1,414 | 2.09375 | 2 | [] | no_license | package com.sindhu.myapplication.ui.home;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.fragment.app.Fragment;
import androidx.recyclerview.widget.RecyclerView;
import com.sindhu.myapplication.R;
import com.sindhu.myapplication.ui.event.EventListRecylerAdapter;
public class HomeFragment extends Fragment {
private boolean mTwoPane;
public View onCreateView(@NonNull LayoutInflater inflater,
ViewGroup container, Bundle savedInstanceState) {
View root = inflater.inflate(R.layout.fragment_list_home_bookmarks, container, false);
if (root.findViewById(R.id.event_detail_container) != null) {
// The detail container view will be present only in the
// large-screen layouts (res/values-w900dp).
// If this view is present, then the
// activity should be in two-pane mode.
mTwoPane = true;
}
View recyclerView = root.findViewById(R.id.event_list);
assert recyclerView != null;
setupRecyclerView((RecyclerView) recyclerView);
return root;
}
private void setupRecyclerView(@NonNull RecyclerView recyclerView) {
recyclerView.setAdapter(new EventListRecylerAdapter(this, mTwoPane, R.layout.fragment_home_item));
}
} |
PHP | UTF-8 | 1,213 | 2.5625 | 3 | [
"MIT"
] | permissive | <?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
use App\Entities\Product;
use App\Entities\Customer;
class CreateRatingsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('ratings', function (Blueprint $table) {
$table->bigIncrements('id');
$table->text('review')->nullable(false);
$table->unsignedTinyInteger('rating')->nullable(false);
$table->unsignedBigInteger('customer_id')->nullable(false);
$table->unsignedBigInteger('product_id')->nullable(false);
$table->timestamps();
$table->foreign('customer_id')
->references('id')->on((new Customer())->getTable())
->onDelete('cascade');
$table->foreign('product_id')
->references('id')->on((new Product())->getTable())
->onDelete('cascade');
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('ratings');
}
}
|
Python | UTF-8 | 6,139 | 2.75 | 3 | [] | no_license | # Small utility methods.
from collective.recipe.backup import config
import logging
import os
import shutil
import subprocess
import sys
try:
from builtins import input as raw_input
except ImportError:
# Python 2 has raw_input available by default.
pass
logger = logging.getLogger("utils")
# For zc.buildout's system() method:
MUST_CLOSE_FDS = not sys.platform.startswith("win")
try:
# Python 2
stringtypes = basestring
except NameError:
# Python 3
stringtypes = str
def system(command, input=""):
"""commands.getoutput() replacement that also works on windows
This was copied from zest.releaser.
"""
p = subprocess.Popen(
command,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=MUST_CLOSE_FDS,
)
i, o, e = (p.stdin, p.stdout, p.stderr)
if input:
i.write(input)
i.close()
result = o.read() + e.read()
o.close()
e.close()
# Return the result plus a return value (0: all is fine)
return result, p.wait()
def ask(question, default=True, exact=False):
"""Ask the question in y/n form and return True/False.
If you don't want a default 'yes', set default to None (or to False if you
want a default 'no').
With exact=True, we want to get a literal 'yes' or 'no', at least
when it does not match the default.
"""
while True:
yn = "y/n"
if exact:
yn = "yes/no"
if default is True:
yn = yn.replace("y", "Y")
if default is False:
yn = yn.replace("n", "N")
q = f"{question} ({yn})? "
input = raw_input(q)
if input:
answer = input
else:
answer = ""
if not answer and default is not None:
return default
if exact and answer.lower() not in ("yes", "no"):
print("Please explicitly answer yes/no in full " "(or accept the default)")
continue
if answer:
answer = answer[0].lower()
if answer == "y":
return True
if answer == "n":
return False
# We really want an answer.
print("Please explicitly answer y/n")
continue
def execute_or_fail(command):
if not command:
return
output, failed = system(command)
logger.debug("command executed: %r", command)
if output:
print(output)
if failed:
logger.error("command %r failed. See message above.", command)
sys.exit(1)
def check_folders(
storage, backup_blobs=True, only_blobs=False, backup_method=config.STANDARD_BACKUP
):
"""Check that folders exist, and create them if not."""
backup = backup_method == config.STANDARD_BACKUP
snapshot = backup_method == config.SNAPSHOT_BACKUP
zipbackup = backup_method == config.ZIP_BACKUP
pathdirs = []
if not only_blobs:
if backup:
pathdirs.append(storage.get("backup_location"))
if snapshot:
pathdirs.append(storage.get("snapshot_location"))
if zipbackup:
pathdirs.append(storage.get("zip_location"))
if backup_blobs:
if backup:
pathdirs.append(storage.get("blob_backup_location"))
if snapshot:
pathdirs.append(storage.get("blob_snapshot_location"))
if zipbackup:
pathdirs.append(storage.get("blob_zip_location"))
for pathdir in pathdirs:
if pathdir and not os.path.isdir(pathdir):
os.makedirs(pathdir)
logger.info("Created %s", pathdir)
def try_create_folder(pathdir):
"""Try to create a folder, but remove it again.
>>> try_create_folder('mytest')
>>> mkdir('mytest')
>>> mkdir('mytest', 'keep')
>>> write('mytest', 'myfile', 'I am a file.')
>>> ls('mytest')
d keep
- myfile
>>> try_create_folder('mytest')
>>> ls('mytest')
d keep
- myfile
>>> try_create_folder('mytest/folder')
>>> ls('mytest')
d keep
- myfile
>>> try_create_folder('mytest/keep')
>>> ls('mytest')
d keep
- myfile
>>> try_create_folder('mytest/folder/sub')
>>> ls('mytest')
d keep
- myfile
>>> try_create_folder('mytest/keep/sub')
>>> ls('mytest')
d keep
- myfile
>>> remove('mytest')
"""
if not pathdir:
return
if os.path.exists(pathdir):
if not os.path.isdir(pathdir):
logger.warning("WARNING: %s is a file, not a directory.", pathdir)
return
# Now the tricky thing is: if only a/ exists, without sub
# directories, and we call this function with a/b/c, we do not
# want to have a directory a/b/ left over at the end.
if os.path.isabs(pathdir):
newdir = os.path.sep
else:
newdir = os.getcwd()
parts = pathdir.split(os.path.sep)
# Find the first part that does not exist.
for part in parts:
newdir = os.path.join(newdir, part)
if os.path.exists(newdir):
if not os.path.isdir(newdir):
logger.warning("WARNING: %s is a file, not a directory.", newdir)
return
continue
# newdir does not exist. Try to create the full path, and the
# remove newdir.
try:
os.makedirs(pathdir)
shutil.rmtree(newdir)
except OSError:
logger.warning("WARNING: Not able to create %s", pathdir)
return
def get_date_from_args():
# Try to find a date in the command line arguments
date = None
for arg in sys.argv:
if arg in ("-q", "-n", "--quiet", "--no-prompt"):
continue
if arg.find("restore") != -1:
continue
# We can assume this argument is a date
date = arg
logger.debug(
"Argument passed to bin/restore, we assume it is "
"a date that we have to pass to repozo: %s.",
date,
)
logger.info("Date restriction: restoring state at %s.", date)
break
return date
|
Java | UTF-8 | 1,620 | 2.046875 | 2 | [] | no_license | package com.tf.persistance.util;
import java.io.Serializable;
import java.util.Date;
public class PaymentScheduleDTO implements Serializable {
/**
*
*/
private static final long serialVersionUID = -3416290638302892477L;
/**
*
*/
private Long id;
private String sellerName;
private Date paymentDate;
private String invoiceNumber;
private Date dateestablished;
private String invoiceAmount;
private int duration;
private Date financeDate;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getSellerName() {
return sellerName;
}
public void setSellerName(String sellerName) {
this.sellerName = sellerName;
}
public Date getPaymentDate() {
return paymentDate;
}
public void setPaymentDate(Date paymentDate) {
this.paymentDate = paymentDate;
}
public String getInvoiceNumber() {
return invoiceNumber;
}
public void setInvoiceNumber(String invoiceNumber) {
this.invoiceNumber = invoiceNumber;
}
public Date getDateestablished() {
return dateestablished;
}
public void setDateestablished(Date dateestablished) {
this.dateestablished = dateestablished;
}
public String getInvoiceAmount() {
return invoiceAmount;
}
public void setInvoiceAmount(String invoiceAmount) {
this.invoiceAmount = invoiceAmount;
}
public int getDuration() {
return duration;
}
public void setDuration(int duration) {
this.duration = duration;
}
public Date getFinanceDate() {
return financeDate;
}
public void setFinanceDate(Date financeDate) {
this.financeDate = financeDate;
}
}
|
C++ | UTF-8 | 292 | 3.03125 | 3 | [] | no_license | #include <iostream>
#include "Box.hpp"
int main()
{
std::cout << "Hello world" << std::endl;
Box b1(30, 50);
std::cout << "Width = " << b1.getWidth() << std::endl;
std::cout << "Height = " << b1.getHeight() << std::endl;
std::cout << "Area = " << b1.area() << std::endl;
return 0;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.