hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8b49fbdf4eb45be37f3bd9bf31ef9d8554d1f113
| 875
|
rb
|
Ruby
|
lib/engine/action/buy_shares.rb
|
jemiahlee/18xx
|
e5eac2e1f6cb4db667ebb5050e1c66e0a065acea
|
[
"MIT"
] | 201
|
2019-05-30T23:04:54.000Z
|
2022-03-07T16:22:23.000Z
|
lib/engine/action/buy_shares.rb
|
jemiahlee/18xx
|
e5eac2e1f6cb4db667ebb5050e1c66e0a065acea
|
[
"MIT"
] | 4,753
|
2020-01-24T03:58:22.000Z
|
2022-03-31T20:55:53.000Z
|
lib/engine/action/buy_shares.rb
|
jemiahlee/18xx
|
e5eac2e1f6cb4db667ebb5050e1c66e0a065acea
|
[
"MIT"
] | 169
|
2020-01-20T08:28:18.000Z
|
2022-03-26T15:46:11.000Z
|
# frozen_string_literal: true
require_relative 'base'
module Engine
module Action
class BuyShares < Base
attr_reader :entity, :bundle, :swap
def initialize(entity, shares:, share_price: nil, percent: nil, swap: nil)
super(entity)
@bundle = ShareBundle.new(Array(shares), percent)
@bundle.share_price = share_price
@swap = swap
end
def self.h_to_args(h, game)
{
shares: h['shares'].map { |id| game.share_by_id(id) },
share_price: h['share_price'],
percent: h['percent'],
swap: game.share_by_id(h['swap']),
}
end
def args_to_h
{
'shares' => @bundle.shares.map(&:id),
'percent' => @bundle.percent,
'share_price' => @bundle.share_price,
'swap' => @swap&.id,
}
end
end
end
end
| 23.648649
| 80
| 0.553143
|
ec08b1a8567a2ff2653577cdab2a9e0189c68e21
| 4,171
|
swift
|
Swift
|
Twitter/Twitter/TweetDetailViewController.swift
|
swappy208/Twitter
|
24f462001e1dd93bc05d7347ea7932fdb85cc82e
|
[
"Apache-2.0"
] | null | null | null |
Twitter/Twitter/TweetDetailViewController.swift
|
swappy208/Twitter
|
24f462001e1dd93bc05d7347ea7932fdb85cc82e
|
[
"Apache-2.0"
] | 1
|
2017-02-28T14:04:23.000Z
|
2017-03-08T08:27:00.000Z
|
Twitter/Twitter/TweetDetailViewController.swift
|
swappy208/Twitter
|
24f462001e1dd93bc05d7347ea7932fdb85cc82e
|
[
"Apache-2.0"
] | null | null | null |
//
// TweetDetailViewController.swift
// Twitter
//
// Created by Swapnil Tamrakar on 2/27/17.
// Copyright © 2017 Swapnil Tamrakar. All rights reserved.
//
import UIKit
class TweetDetailViewController: UIViewController {
@IBOutlet weak var userImageView: UIImageView!
@IBOutlet weak var nameLabel: UILabel!
@IBOutlet weak var screenNameLabel: UILabel!
@IBOutlet weak var timeStampLabel: UILabel!
@IBOutlet weak var tweetTextLabel: UILabel!
@IBOutlet weak var retweetButton: UIButton!
@IBOutlet weak var retweetCountLabel: UILabel!
@IBOutlet weak var favoriteButton: UIButton!
@IBOutlet weak var favoriteCountLabel: UILabel!
var tweet: Tweet!
override func viewDidLoad() {
super.viewDidLoad()
//image
userImageView.setImageWith(tweet.author.userImageURL)
userImageView.layer.cornerRadius = 5
userImageView.clipsToBounds = true
nameLabel.text = tweet.author.name
screenNameLabel.text = "@\(tweet.author.screenName)"
timeStampLabel.text = tweet.timeStampLongText
tweetTextLabel.text = tweet.text
retweetCountLabel.text = "\(tweet.retweetCount)"
favoriteCountLabel.text = "\(tweet.favoriteCount)"
if tweet.retweeted == true {
self.retweetButton.setImage(#imageLiteral(resourceName: "retweet-icon-green"), for: UIControlState.normal)
} else {
self.retweetButton.setImage(#imageLiteral(resourceName: "retweet-icon"), for: UIControlState.normal)
}
if tweet.favorited == true {
self.favoriteButton.setImage(#imageLiteral(resourceName: "favor-icon-red"), for: UIControlState.normal)
} else {
self.favoriteButton.setImage(#imageLiteral(resourceName: "favor-icon"), for: UIControlState.normal)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
@IBAction func onRetweetButton(_ sender: Any) {
TwitterClient.sharedInstance?.retweetTweet(success: { (tweet: Tweet) in
self.retweetCountLabel.text = "\(tweet.retweetCount)"
self.retweetButton.setImage(#imageLiteral(resourceName: "retweet-icon-green"), for: UIControlState.normal)
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "reload"), object: nil)
}, failure: { (error: Error) in
print("error: \(error.localizedDescription)")
// self.unretweetTweet()
}, tweetId: tweet.id)
}
@IBAction func onFavoriteButton(_ sender: Any) {
TwitterClient.sharedInstance?.favoriteTweet(success: { (tweet: Tweet) in
self.favoriteCountLabel.text = "\(tweet.favoriteCount)"
self.favoriteButton.setImage(#imageLiteral(resourceName: "favor-icon-red"), for: UIControlState.normal)
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "reload"), object: nil)
}, failure: { (error: Error) in
self.unfavoriteTweet()
}, tweetId: tweet.id)
}
func unfavoriteTweet() {
TwitterClient.sharedInstance?.unfavoriteTweet(success: { (tweet: Tweet) in
self.favoriteCountLabel.text = "\(tweet.favoriteCount)"
self.favoriteButton.setImage(#imageLiteral(resourceName: "favor-icon"), for: UIControlState.normal)
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "reload"), object: nil)
}, failure: { (error: Error) in
print("error: \(error.localizedDescription)")
}, tweetId: tweet.id)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
print("preparing for segue \(segue.identifier)")
if segue.identifier == "ProfileSegue" {
let profileVC = segue.destination as! ProfileViewController
profileVC.user = tweet.author
} else {
let composeVC = segue.destination as! ComposeViewController
composeVC.startingText = "@\(tweet.author.screenName)"
}
}
}
| 40.105769
| 118
| 0.657876
|
72dbfebfcafff12ba517e44f27c8cfced2e31232
| 2,860
|
rs
|
Rust
|
crates/text/ngram.rs
|
OneToolsCollection/tangramdotdev-tangram
|
666343a87b88a1c1b34a4be2298f6aa54f0fc2eb
|
[
"MIT"
] | 957
|
2021-07-26T17:13:54.000Z
|
2022-03-30T21:38:05.000Z
|
crates/text/ngram.rs
|
OneToolsCollection/tangramdotdev-tangram
|
666343a87b88a1c1b34a4be2298f6aa54f0fc2eb
|
[
"MIT"
] | 83
|
2021-07-28T09:08:27.000Z
|
2022-03-13T16:36:49.000Z
|
crates/text/ngram.rs
|
OneToolsCollection/tangramdotdev-tangram
|
666343a87b88a1c1b34a4be2298f6aa54f0fc2eb
|
[
"MIT"
] | 46
|
2021-07-29T14:46:10.000Z
|
2022-03-31T08:43:20.000Z
|
use std::{borrow::Cow, fmt::Display, hash::Hash};
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, Eq, PartialOrd, Ord)]
#[serde(untagged)]
pub enum NGram {
Unigram(String),
Bigram(String, String),
}
impl PartialEq for NGram {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(NGram::Unigram(self_token), NGram::Unigram(other_token)) => self_token == other_token,
(
NGram::Bigram(self_token_a, self_token_b),
NGram::Bigram(other_token_a, other_token_b),
) => self_token_a == other_token_a && self_token_b == other_token_b,
_ => false,
}
}
}
impl Hash for NGram {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self {
NGram::Unigram(token) => {
0usize.hash(state);
token.hash(state)
}
NGram::Bigram(token_a, token_b) => {
1usize.hash(state);
token_a.hash(state);
token_b.hash(state);
}
}
}
}
impl Display for NGram {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
NGram::Unigram(token) => {
write!(f, "{}", token)
}
NGram::Bigram(token_a, token_b) => {
write!(f, "{} {}", token_a, token_b)
}
}
}
}
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, Eq, PartialOrd, Ord)]
pub enum NGramRef<'a> {
Unigram(Cow<'a, str>),
Bigram(Cow<'a, str>, Cow<'a, str>),
}
impl<'a> PartialEq for NGramRef<'a> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(NGramRef::Unigram(self_token), NGramRef::Unigram(other_token)) => {
self_token == other_token
}
(
NGramRef::Bigram(self_token_a, self_token_b),
NGramRef::Bigram(other_token_a, other_token_b),
) => self_token_a == other_token_a && self_token_b == other_token_b,
_ => false,
}
}
}
impl<'a> Hash for NGramRef<'a> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self {
NGramRef::Unigram(token) => {
0usize.hash(state);
token.hash(state)
}
NGramRef::Bigram(token_a, token_b) => {
1usize.hash(state);
token_a.hash(state);
token_b.hash(state);
}
}
}
}
impl<'a> indexmap::Equivalent<NGram> for NGramRef<'a> {
fn equivalent(&self, key: &NGram) -> bool {
match (self, key) {
(NGramRef::Unigram(unigram_ref), NGram::Unigram(unigram)) => unigram_ref == unigram,
(NGramRef::Bigram(bigram_a_ref, bigram_b_ref), NGram::Bigram(bigram_a, bigram_b)) => {
bigram_a_ref == bigram_a && bigram_b_ref == bigram_b
}
_ => false,
}
}
}
impl<'a> NGramRef<'a> {
pub fn to_ngram(&self) -> NGram {
match self {
NGramRef::Unigram(token) => NGram::Unigram(token.as_ref().to_owned()),
NGramRef::Bigram(token_a, token_b) => {
NGram::Bigram(token_a.as_ref().to_owned(), token_b.as_ref().to_owned())
}
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub enum NGramType {
Unigram,
Bigram,
}
| 24.444444
| 90
| 0.630769
|
6dee2f6030026a64b3ef15f3cc9480e0aa392177
| 5,251
|
ts
|
TypeScript
|
serbioneer-front/src/app/components/core/view-cultural-site/comments-list/add-comment/add-comment.component.spec.ts
|
ksenija10/KTSNVT_2020_T13
|
8dc52076fc37176b94b71fab2b3746e05bf84c4f
|
[
"Unlicense"
] | null | null | null |
serbioneer-front/src/app/components/core/view-cultural-site/comments-list/add-comment/add-comment.component.spec.ts
|
ksenija10/KTSNVT_2020_T13
|
8dc52076fc37176b94b71fab2b3746e05bf84c4f
|
[
"Unlicense"
] | 2
|
2021-01-21T08:51:48.000Z
|
2021-01-22T22:15:43.000Z
|
serbioneer-front/src/app/components/core/view-cultural-site/comments-list/add-comment/add-comment.component.spec.ts
|
ksenija10/KTSNVT_2020_T13
|
8dc52076fc37176b94b71fab2b3746e05bf84c4f
|
[
"Unlicense"
] | 1
|
2022-01-23T15:20:40.000Z
|
2022-01-23T15:20:40.000Z
|
import { HarnessLoader } from '@angular/cdk/testing';
import { TestbedHarnessEnvironment } from '@angular/cdk/testing/testbed';
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { MatFormFieldModule } from '@angular/material/form-field';
import { MatInputModule } from '@angular/material/input';
import { MatInputHarness } from '@angular/material/input/testing';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { ToastrService } from 'ngx-toastr';
import { of } from 'rxjs';
import { CulturalSiteService } from 'src/app/services/cultural-site.service';
import { ImageService } from 'src/app/services/image.service';
import { AddCommentComponent } from './add-comment.component';
describe('AddCommentComponent', () => {
let component: AddCommentComponent;
let fixture: ComponentFixture<AddCommentComponent>;
let culturalSiteService: CulturalSiteService;
let imageService: ImageService;
let toastr: ToastrService;
let loader: HarnessLoader;
beforeEach(async () => {
const culturalSiteServiceMock = {
createComment: jasmine.createSpy('createComment')
.and.returnValue(of({
id: 1,
name: 'Mika',
surname: 'Mikic',
culturalSiteName: 'Kulturno dobro',
text: 'Komentar Mikin',
approved: false,
images: []
}))
};
const imageServiceMock = {
createForComment: jasmine.createSpy('createForComment')
.and.returnValue(of({
id: 1,
name: 'slika'
}))
};
const toastrMock = {
success: jasmine.createSpy('success'),
error: jasmine.createSpy('error'),
};
TestBed.configureTestingModule({
declarations: [ AddCommentComponent ],
providers: [
{ provide: CulturalSiteService, useValue: culturalSiteServiceMock },
{ provide: ImageService, useValue: imageServiceMock },
{ provide: ToastrService, useValue: toastrMock }
],
imports: [
ReactiveFormsModule,
FormsModule,
MatFormFieldModule,
MatInputModule,
BrowserAnimationsModule
]
}).compileComponents();
fixture = TestBed.createComponent(AddCommentComponent);
component = fixture.componentInstance;
culturalSiteService = TestBed.inject(CulturalSiteService);
imageService = TestBed.inject(ImageService);
toastr = TestBed.inject(ToastrService);
loader = TestbedHarnessEnvironment.loader(fixture);
// "prosledjivanje" @Input vrednosti
component.culturalSiteId = 1;
component.culturalSiteName = 'Kulturno dobro';
component.userIsLogged = true;
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('should create new comment', ( async () => {
component.myForm.setValue({
text: 'Sabacka Biblioteka was cool.',
file: '',
fileSource: {}
});
component.culturalSiteId = 1;
component.files = [new Blob()];
component.submit();
expect(culturalSiteService.createComment).toHaveBeenCalledWith(1, 'Sabacka Biblioteka was cool.');
expect(imageService.createForComment).toHaveBeenCalled();
expect(component.images.length).toEqual(0);
expect(component.files.length).toEqual(0);
expect(component.addNewComment).toEqual(false);
expect(toastr.success).toHaveBeenCalledOnceWith('Successfully reviewed cultural site!\n' +
'Your review will be visible after approval.');
expect(component.addNewComment).toEqual(false);
}));
it('should emit event for loading added images for comment', ( async () => {
const event = {
target: {
files: [
new Blob([''], { type: 'text/html' }),
new Blob([''], { type: 'text/html' })
]
}
};
component.files = [];
component.ngOnInit();
component.onFileChange(event);
expect(component.files.length).toEqual(2);
}));
it('should add comments', (async () => {
component.ngOnInit();
component.addNewComment = false;
component.addComments();
const textInput = await loader.getHarness(
MatInputHarness.with({ selector: '#new-comment-input' })
);
expect(component.addNewComment).toEqual(true);
expect(component.images.length).toEqual(0);
expect(await textInput.getValue()).toEqual('');
}));
it('should not get text error message', ( async () => {
component.myForm.value.text = 'Sabacka Biblioteka was cool.';
component.myForm.controls.text.setErrors(null);
const message = component.getTextErrorMessage();
expect(component.myForm.invalid).toBeFalsy();
expect(component.myForm.controls.text.hasError('required')).toBeFalsy();
expect(message).toEqual('');
}));
it('should get text error message', ( async () => {
component.myForm.controls.text.markAsTouched();
const message = component.getTextErrorMessage();
expect(component.myForm.invalid).toBeTruthy();
expect(component.myForm.controls.text.hasError('required')).toBeTruthy();
expect(message).toEqual('Required field');
}));
});
| 33.660256
| 102
| 0.657589
|
e00ec06a9926f950d9ec81867f7c2c57b792b49b
| 2,604
|
h
|
C
|
src/account/account.h
|
paulmillar/oidc-agent
|
034af4a62241204a68fe1cd3a361630b4f49cb59
|
[
"MIT"
] | null | null | null |
src/account/account.h
|
paulmillar/oidc-agent
|
034af4a62241204a68fe1cd3a361630b4f49cb59
|
[
"MIT"
] | null | null | null |
src/account/account.h
|
paulmillar/oidc-agent
|
034af4a62241204a68fe1cd3a361630b4f49cb59
|
[
"MIT"
] | null | null | null |
#ifndef ACCOUNT_H
#define ACCOUNT_H
#include "cJSON/cJSON.h"
#include "issuer.h"
#include "list/list.h"
#include "utils/file_io/promptCryptFileUtils.h"
#include <stdlib.h>
#include <time.h>
struct token {
char* access_token;
unsigned long token_expires_at;
};
struct oidc_account {
struct oidc_issuer* issuer;
char* shortname;
char* clientname;
char* client_id;
char* client_secret;
char* scope;
char* audience;
char* username;
char* password;
char* refresh_token;
struct token token;
char* cert_path;
list_t* redirect_uris;
char* usedState;
unsigned char usedStateChecked;
time_t death;
char* code_challenge_method;
unsigned char mode;
};
#define ACCOUNT_MODE_CONFIRM 0x01
#define ACCOUNT_MODE_NO_WEBSERVER 0x02
#define ACCOUNT_MODE_NO_SCHEME 0x04
#define ACCOUNT_MODE_ALWAYSALLOWID 0x08
char* defineUsableScopes(const struct oidc_account* account);
struct oidc_account* getAccountFromJSON(const char* json);
cJSON* accountToJSON(const struct oidc_account* p);
char* accountToJSONString(const struct oidc_account* p);
cJSON* accountToJSONWithoutCredentials(const struct oidc_account* p);
char* accountToJSONStringWithoutCredentials(const struct oidc_account* p);
void _secFreeAccount(struct oidc_account* p);
void secFreeAccountContent(struct oidc_account* p);
struct oidc_account* updateAccountWithPublicClientInfo(struct oidc_account*);
int accountConfigExists(const char* accountname);
char* getAccountNameList(list_t* accounts);
int hasRedirectUris(const struct oidc_account* account);
int account_matchByState(const struct oidc_account* p1,
const struct oidc_account* p2);
int account_matchByName(const struct oidc_account* p1,
const struct oidc_account* p2);
int account_matchByIssuerUrl(const struct oidc_account* p1,
const struct oidc_account* p2);
void stringifyIssuerUrl(struct oidc_account* account);
void account_setOSDefaultCertPath(struct oidc_account* account);
// make setters and getters avialable
#include "account/setandget.h"
#ifndef secFreeAccount
#define secFreeAccount(ptr) \
do { \
_secFreeAccount((ptr)); \
(ptr) = NULL; \
} while (0)
#endif // secFreeAccount
#endif // ACCOUNT_H
| 33.384615
| 77
| 0.658218
|
0d2c19b35dad2e5eb841ab1ce5e522305cf5fe0c
| 13,451
|
lua
|
Lua
|
engine/libs/LoveFrames/skins/Basic/icons.lua
|
VideahGams/VideahEngineRewrite
|
a6f4ee0bd0d93f1d65ac493c281837604bb5c039
|
[
"MIT"
] | 8
|
2015-04-28T10:54:52.000Z
|
2017-02-08T05:20:56.000Z
|
engine/libs/LoveFrames/skins/Basic/icons.lua
|
VideahGams/VideahEngineRewrite
|
a6f4ee0bd0d93f1d65ac493c281837604bb5c039
|
[
"MIT"
] | 1
|
2015-02-12T16:09:20.000Z
|
2015-02-12T16:09:20.000Z
|
engine/libs/LoveFrames/skins/Basic/icons.lua
|
VideahGams/VideahEngine
|
a6f4ee0bd0d93f1d65ac493c281837604bb5c039
|
[
"MIT"
] | null | null | null |
return {
["adjust"] = "",
["adn"] = "",
["align-center"] = "",
["align-justify"] = "",
["align-left"] = "",
["align-right"] = "",
["ambulance"] = "",
["anchor"] = "",
["android"] = "",
["angellist"] = "",
["angle-double-down"] = "",
["angle-double-left"] = "",
["angle-double-right"] = "",
["angle-double-up"] = "",
["angle-down"] = "",
["angle-left"] = "",
["angle-right"] = "",
["angle-up"] = "",
["apple"] = "",
["archive"] = "",
["area-chart"] = "",
["arrow-circle-down"] = "",
["arrow-circle-left"] = "",
["arrow-circle-o-down"] = "",
["arrow-circle-o-left"] = "",
["arrow-circle-o-right"] = "",
["arrow-circle-o-up"] = "",
["arrow-circle-right"] = "",
["arrow-circle-up"] = "",
["arrow-down"] = "",
["arrow-left"] = "",
["arrow-right"] = "",
["arrow-up"] = "",
["arrows"] = "",
["arrows-alt"] = "",
["arrows-h"] = "",
["arrows-v"] = "",
["asterisk"] = "",
["at"] = "",
["automobile"] = "",
["backward"] = "",
["ban"] = "",
["bank"] = "",
["bar-chart"] = "",
["bar-chart-o"] = "",
["barcode"] = "",
["bars"] = "",
["bed"] = "",
["beer"] = "",
["behance"] = "",
["behance-square"] = "",
["bell"] = "",
["bell-o"] = "",
["bell-slash"] = "",
["bell-slash-o"] = "",
["bicycle"] = "",
["binoculars"] = "",
["birthday-cake"] = "",
["bitbucket"] = "",
["bitbucket-square"] = "",
["bitcoin"] = "",
["bold"] = "",
["bolt"] = "",
["bomb"] = "",
["book"] = "",
["bookmark"] = "",
["bookmark-o"] = "",
["briefcase"] = "",
["btc"] = "",
["bug"] = "",
["building"] = "",
["building-o"] = "",
["bullhorn"] = "",
["bullseye"] = "",
["bus"] = "",
["buysellads"] = "",
["cab"] = "",
["calculator"] = "",
["calendar"] = "",
["calendar-o"] = "",
["camera"] = "",
["camera-retro"] = "",
["car"] = "",
["caret-down"] = "",
["caret-left"] = "",
["caret-right"] = "",
["caret-square-o-down"] = "",
["caret-square-o-left"] = "",
["caret-square-o-right"] = "",
["caret-square-o-up"] = "",
["caret-up"] = "",
["cart-arrow-down"] = "",
["cart-plus"] = "",
["cc"] = "",
["cc-amex"] = "",
["cc-discover"] = "",
["cc-mastercard"] = "",
["cc-paypal"] = "",
["cc-stripe"] = "",
["cc-visa"] = "",
["certificate"] = "",
["chain"] = "",
["chain-broken"] = "",
["check"] = "",
["check-circle"] = "",
["check-circle-o"] = "",
["check-square"] = "",
["check-square-o"] = "",
["chevron-circle-down"] = "",
["chevron-circle-left"] = "",
["chevron-circle-right"] = "",
["chevron-circle-up"] = "",
["chevron-down"] = "",
["chevron-left"] = "",
["chevron-right"] = "",
["chevron-up"] = "",
["child"] = "",
["circle"] = "",
["circle-o"] = "",
["circle-o-notch"] = "",
["circle-thin"] = "",
["clipboard"] = "",
["clock-o"] = "",
["close"] = "",
["cloud"] = "",
["cloud-download"] = "",
["cloud-upload"] = "",
["cny"] = "",
["code"] = "",
["code-fork"] = "",
["codepen"] = "",
["coffee"] = "",
["cog"] = "",
["cogs"] = "",
["columns"] = "",
["comment"] = "",
["comment-o"] = "",
["comments"] = "",
["comments-o"] = "",
["compass"] = "",
["compress"] = "",
["connectdevelop"] = "",
["copy"] = "",
["copyright"] = "",
["credit-card"] = "",
["crop"] = "",
["crosshairs"] = "",
["css3"] = "",
["cube"] = "",
["cubes"] = "",
["cut"] = "",
["cutlery"] = "",
["dashboard"] = "",
["dashcube"] = "",
["database"] = "",
["dedent"] = "",
["delicious"] = "",
["desktop"] = "",
["deviantart"] = "",
["diamond"] = "",
["digg"] = "",
["dollar"] = "",
["dot-circle-o"] = "",
["download"] = "",
["dribbble"] = "",
["dropbox"] = "",
["drupal"] = "",
["edit"] = "",
["eject"] = "",
["ellipsis-h"] = "",
["ellipsis-v"] = "",
["empire"] = "",
["envelope"] = "",
["envelope-o"] = "",
["envelope-square"] = "",
["eraser"] = "",
["eur"] = "",
["euro"] = "",
["exchange"] = "",
["exclamation"] = "",
["exclamation-circle"] = "",
["exclamation-triangle"] = "",
["expand"] = "",
["external-link"] = "",
["external-link-square"] = "",
["eye"] = "",
["eye-slash"] = "",
["eyedropper"] = "",
["facebook"] = "",
["facebook-f"] = "",
["facebook-official"] = "",
["facebook-square"] = "",
["fast-backward"] = "",
["fast-forward"] = "",
["fax"] = "",
["female"] = "",
["fighter-jet"] = "",
["file"] = "",
["file-archive-o"] = "",
["file-audio-o"] = "",
["file-code-o"] = "",
["file-excel-o"] = "",
["file-image-o"] = "",
["file-movie-o"] = "",
["file-o"] = "",
["file-pdf-o"] = "",
["file-photo-o"] = "",
["file-picture-o"] = "",
["file-powerpoint-o"] = "",
["file-sound-o"] = "",
["file-text"] = "",
["file-text-o"] = "",
["file-video-o"] = "",
["file-word-o"] = "",
["file-zip-o"] = "",
["files-o"] = "",
["film"] = "",
["filter"] = "",
["fire"] = "",
["fire-extinguisher"] = "",
["flag"] = "",
["flag-checkered"] = "",
["flag-o"] = "",
["flash"] = "",
["flask"] = "",
["flickr"] = "",
["floppy-o"] = "",
["folder"] = "",
["folder-o"] = "",
["folder-open"] = "",
["folder-open-o"] = "",
["font"] = "",
["forumbee"] = "",
["forward"] = "",
["foursquare"] = "",
["frown-o"] = "",
["futbol-o"] = "",
["gamepad"] = "",
["gavel"] = "",
["gbp"] = "",
["ge"] = "",
["gear"] = "",
["gears"] = "",
["genderless"] = "",
["gift"] = "",
["git"] = "",
["git-square"] = "",
["github"] = "",
["github-alt"] = "",
["github-square"] = "",
["gittip"] = "",
["glass"] = "",
["globe"] = "",
["google"] = "",
["google-plus"] = "",
["google-plus-square"] = "",
["google-wallet"] = "",
["graduation-cap"] = "",
["gratipay"] = "",
["group"] = "",
["h-square"] = "",
["hacker-news"] = "",
["hand-o-down"] = "",
["hand-o-left"] = "",
["hand-o-right"] = "",
["hand-o-up"] = "",
["hdd-o"] = "",
["header"] = "",
["headphones"] = "",
["heart"] = "",
["heart-o"] = "",
["heartbeat"] = "",
["history"] = "",
["home"] = "",
["hospital-o"] = "",
["hotel"] = "",
["html5"] = "",
["ils"] = "",
["image"] = "",
["inbox"] = "",
["indent"] = "",
["info"] = "",
["info-circle"] = "",
["inr"] = "",
["instagram"] = "",
["institution"] = "",
["ioxhost"] = "",
["italic"] = "",
["joomla"] = "",
["jpy"] = "",
["jsfiddle"] = "",
["key"] = "",
["keyboard-o"] = "",
["krw"] = "",
["language"] = "",
["laptop"] = "",
["lastfm"] = "",
["lastfm-square"] = "",
["leaf"] = "",
["leanpub"] = "",
["legal"] = "",
["lemon-o"] = "",
["level-down"] = "",
["level-up"] = "",
["life-bouy"] = "",
["life-buoy"] = "",
["life-ring"] = "",
["life-saver"] = "",
["lightbulb-o"] = "",
["line-chart"] = "",
["link"] = "",
["linkedin"] = "",
["linkedin-square"] = "",
["linux"] = "",
["list"] = "",
["list-alt"] = "",
["list-ol"] = "",
["list-ul"] = "",
["location-arrow"] = "",
["lock"] = "",
["long-arrow-down"] = "",
["long-arrow-left"] = "",
["long-arrow-right"] = "",
["long-arrow-up"] = "",
["magic"] = "",
["magnet"] = "",
["mail-forward"] = "",
["mail-reply"] = "",
["mail-reply-all"] = "",
["male"] = "",
["map-marker"] = "",
["mars"] = "",
["mars-double"] = "",
["mars-stroke"] = "",
["mars-stroke-h"] = "",
["mars-stroke-v"] = "",
["maxcdn"] = "",
["meanpath"] = "",
["medium"] = "",
["medkit"] = "",
["meh-o"] = "",
["mercury"] = "",
["microphone"] = "",
["microphone-slash"] = "",
["minus"] = "",
["minus-circle"] = "",
["minus-square"] = "",
["minus-square-o"] = "",
["mobile"] = "",
["mobile-phone"] = "",
["money"] = "",
["moon-o"] = "",
["mortar-board"] = "",
["motorcycle"] = "",
["music"] = "",
["navicon"] = "",
["neuter"] = "",
["newspaper-o"] = "",
["openid"] = "",
["outdent"] = "",
["pagelines"] = "",
["paint-brush"] = "",
["paper-plane"] = "",
["paper-plane-o"] = "",
["paperclip"] = "",
["paragraph"] = "",
["paste"] = "",
["pause"] = "",
["paw"] = "",
["paypal"] = "",
["pencil"] = "",
["pencil-square"] = "",
["pencil-square-o"] = "",
["phone"] = "",
["phone-square"] = "",
["photo"] = "",
["picture-o"] = "",
["pie-chart"] = "",
["pied-piper"] = "",
["pied-piper-alt"] = "",
["pinterest"] = "",
["pinterest-p"] = "",
["pinterest-square"] = "",
["plane"] = "",
["play"] = "",
["play-circle"] = "",
["play-circle-o"] = "",
["plug"] = "",
["plus"] = "",
["plus-circle"] = "",
["plus-square"] = "",
["plus-square-o"] = "",
["power-off"] = "",
["print"] = "",
["puzzle-piece"] = "",
["qq"] = "",
["qrcode"] = "",
["question"] = "",
["question-circle"] = "",
["quote-left"] = "",
["quote-right"] = "",
["ra"] = "",
["random"] = "",
["rebel"] = "",
["recycle"] = "",
["reddit"] = "",
["reddit-square"] = "",
["refresh"] = "",
["remove"] = "",
["renren"] = "",
["reorder"] = "",
["repeat"] = "",
["reply"] = "",
["reply-all"] = "",
["retweet"] = "",
["rmb"] = "",
["road"] = "",
["rocket"] = "",
["rotate-left"] = "",
["rotate-right"] = "",
["rouble"] = "",
["rss"] = "",
["rss-square"] = "",
["rub"] = "",
["ruble"] = "",
["rupee"] = "",
["save"] = "",
["scissors"] = "",
["search"] = "",
["search-minus"] = "",
["search-plus"] = "",
["sellsy"] = "",
["send"] = "",
["send-o"] = "",
["server"] = "",
["share"] = "",
["share-alt"] = "",
["share-alt-square"] = "",
["share-square"] = "",
["share-square-o"] = "",
["shekel"] = "",
["sheqel"] = "",
["shield"] = "",
["ship"] = "",
["shirtsinbulk"] = "",
["shopping-cart"] = "",
["sign-in"] = "",
["sign-out"] = "",
["signal"] = "",
["simplybuilt"] = "",
["sitemap"] = "",
["skyatlas"] = "",
["skype"] = "",
["slack"] = "",
["sliders"] = "",
["slideshare"] = "",
["smile-o"] = "",
["soccer-ball-o"] = "",
["sort"] = "",
["sort-alpha-asc"] = "",
["sort-alpha-desc"] = "",
["sort-amount-asc"] = "",
["sort-amount-desc"] = "",
["sort-asc"] = "",
["sort-desc"] = "",
["sort-down"] = "",
["sort-numeric-asc"] = "",
["sort-numeric-desc"] = "",
["sort-up"] = "",
["soundcloud"] = "",
["space-shuttle"] = "",
["spinner"] = "",
["spoon"] = "",
["spotify"] = "",
["square"] = "",
["square-o"] = "",
["stack-exchange"] = "",
["stack-overflow"] = "",
["star"] = "",
["star-half"] = "",
["star-half-empty"] = "",
["star-half-full"] = "",
["star-half-o"] = "",
["star-o"] = "",
["steam"] = "",
["steam-square"] = "",
["step-backward"] = "",
["step-forward"] = "",
["stethoscope"] = "",
["stop"] = "",
["street-view"] = "",
["strikethrough"] = "",
["stumbleupon"] = "",
["stumbleupon-circle"] = "",
["subscript"] = "",
["subway"] = "",
["suitcase"] = "",
["sun-o"] = "",
["superscript"] = "",
["support"] = "",
["table"] = "",
["tablet"] = "",
["tachometer"] = "",
["tag"] = "",
["tags"] = "",
["tasks"] = "",
["taxi"] = "",
["tencent-weibo"] = "",
["terminal"] = "",
["text-height"] = "",
["text-width"] = "",
["th"] = "",
["th-large"] = "",
["th-list"] = "",
["thumb-tack"] = "",
["thumbs-down"] = "",
["thumbs-o-down"] = "",
["thumbs-o-up"] = "",
["thumbs-up"] = "",
["ticket"] = "",
["times"] = "",
["times-circle"] = "",
["times-circle-o"] = "",
["tint"] = "",
["toggle-down"] = "",
["toggle-left"] = "",
["toggle-off"] = "",
["toggle-on"] = "",
["toggle-right"] = "",
["toggle-up"] = "",
["train"] = "",
["transgender"] = "",
["transgender-alt"] = "",
["trash"] = "",
["trash-o"] = "",
["tree"] = "",
["trello"] = "",
["trophy"] = "",
["truck"] = "",
["try"] = "",
["tty"] = "",
["tumblr"] = "",
["tumblr-square"] = "",
["turkish-lira"] = "",
["twitch"] = "",
["twitter"] = "",
["twitter-square"] = "",
["umbrella"] = "",
["underline"] = "",
["undo"] = "",
["university"] = "",
["unlink"] = "",
["unlock"] = "",
["unlock-alt"] = "",
["unsorted"] = "",
["upload"] = "",
["usd"] = "",
["user"] = "",
["user-md"] = "",
["user-plus"] = "",
["user-secret"] = "",
["user-times"] = "",
["users"] = "",
["venus"] = "",
["venus-double"] = "",
["venus-mars"] = "",
["viacoin"] = "",
["video-camera"] = "",
["vimeo-square"] = "",
["vine"] = "",
["vk"] = "",
["volume-down"] = "",
["volume-off"] = "",
["volume-up"] = "",
["warning"] = "",
["wechat"] = "",
["weibo"] = "",
["weixin"] = "",
["whatsapp"] = "",
["wheelchair"] = "",
["wifi"] = "",
["windows"] = "",
["won"] = "",
["wordpress"] = "",
["wrench"] = "",
["xing"] = "",
["xing-square"] = "",
["yahoo"] = "",
["yelp"] = "",
["yen"] = "",
["youtube"] = "",
["youtube-play"] = "",
["youtube-square"] = "",
}
| 22.568792
| 33
| 0.355438
|
a45e627aded3f250a12d57b6ef9b94b5dd436c91
| 463
|
php
|
PHP
|
src/Validation/Traits/Uuid.php
|
ssistemas/validation
|
cf04a45224aac4fa9e595781e6d4b7286393ac13
|
[
"MIT"
] | 28
|
2015-08-22T11:18:49.000Z
|
2022-03-10T19:27:04.000Z
|
src/Validation/Traits/Uuid.php
|
ssistemas/validation
|
cf04a45224aac4fa9e595781e6d4b7286393ac13
|
[
"MIT"
] | 7
|
2016-02-17T19:11:11.000Z
|
2018-12-14T12:15:29.000Z
|
src/Validation/Traits/Uuid.php
|
ssistemas/validation
|
cf04a45224aac4fa9e595781e6d4b7286393ac13
|
[
"MIT"
] | 17
|
2016-01-07T13:15:43.000Z
|
2020-10-15T13:41:45.000Z
|
<?php
namespace ResultSystems\Validation\Traits;
trait Uuid
{
/**
* Validate that an attribute is a valid UUID.
*
* @param string $attribute
* @param mixed $value
* @return bool
*/
public function validateUuid($attribute, $value)
{
if (!is_string($value)) {
return false;
}
return preg_match('/^[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}$/iD', $value) > 0;
}
}
| 20.130435
| 103
| 0.522678
|
8321d2533f335b7b521b64521ff90908c43f2b20
| 305
|
ts
|
TypeScript
|
components/ellipsis/index.ts
|
RiESAEX/fes-design
|
1fb3122f2fa612e2ba09b2e65b56fcf8246ca88d
|
[
"MIT"
] | 17
|
2021-12-02T11:27:14.000Z
|
2022-03-23T03:13:29.000Z
|
components/ellipsis/index.ts
|
RiESAEX/fes-design
|
1fb3122f2fa612e2ba09b2e65b56fcf8246ca88d
|
[
"MIT"
] | 5
|
2021-12-21T03:18:43.000Z
|
2022-03-24T08:51:13.000Z
|
components/ellipsis/index.ts
|
RiESAEX/fes-design
|
1fb3122f2fa612e2ba09b2e65b56fcf8246ca88d
|
[
"MIT"
] | 4
|
2021-12-16T11:16:30.000Z
|
2022-03-23T03:12:52.000Z
|
import { withInstall } from '../_util/withInstall';
import Ellipsis from './ellipsis';
import type { SFCWithInstall } from '../_util/interface';
type EllipsisType = SFCWithInstall<typeof Ellipsis>;
export const FEllipsis = withInstall<EllipsisType>(Ellipsis as EllipsisType);
export default FEllipsis;
| 30.5
| 77
| 0.77377
|
74ee39145b6297fc4259af9a60d5867ecbd70bc3
| 1,849
|
go
|
Go
|
geo/bound_test.go
|
dadadamarine/orb
|
78b47f8fd56b4195ee2781b343060c0c9baabd80
|
[
"MIT"
] | null | null | null |
geo/bound_test.go
|
dadadamarine/orb
|
78b47f8fd56b4195ee2781b343060c0c9baabd80
|
[
"MIT"
] | null | null | null |
geo/bound_test.go
|
dadadamarine/orb
|
78b47f8fd56b4195ee2781b343060c0c9baabd80
|
[
"MIT"
] | null | null | null |
package geo
import (
"math"
"testing"
"github.com/dadadamarine/orb"
)
func TestBoundAroundPoint(t *testing.T) {
p := orb.Point{
5.42553,
50.0359,
}
b := NewBoundAroundPoint(p, 1000000)
if b.Center()[1] != p[1] {
t.Errorf("should have correct center lat point")
}
if b.Center()[0] != p[0] {
t.Errorf("should have correct center lon point")
}
//Given point is 968.9 km away from center
if !b.Contains(orb.Point{3.412, 58.3838}) {
t.Errorf("should have point included in bound")
}
b = NewBoundAroundPoint(p, 10000.0)
if b.Center()[1] != p[1] {
t.Errorf("should have correct center lat point")
}
if b.Center()[0] != p[0] {
t.Errorf("should have correct center lon point")
}
//Given point is 968.9 km away from center
if b.Contains(orb.Point{3.412, 58.3838}) {
t.Errorf("should not have point included in bound")
}
}
func TestBoundPad(t *testing.T) {
cases := []struct {
name string
bound orb.Bound
}{
{
name: "test bound",
bound: orb.MultiPoint{{-122.559, 37.887}, {-122.521, 37.911}}.Bound(),
},
{
name: "no height",
bound: orb.MultiPoint{{-122.559, 15}, {-122.521, 15}}.Bound(),
},
{
name: "no area",
bound: orb.Bound{Min: orb.Point{20, -15}, Max: orb.Point{20, -15}},
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
b2 := BoundPad(tc.bound, 100)
v1 := BoundHeight(tc.bound) + 200
v2 := BoundHeight(b2)
if math.Abs(v1-v2) > 1.0 {
t.Errorf("height incorrected: %f != %f", v1, v2)
}
v1 = BoundWidth(tc.bound) + 200
v2 = BoundWidth(b2)
if math.Abs(v1-v2) > 1.0 {
t.Errorf("height incorrected: %f != %f", v1, v2)
}
})
}
b1 := orb.Bound{Min: orb.Point{-180, -90}, Max: orb.Point{180, 90}}
b2 := BoundPad(b1, 100)
if !b1.Equal(b2) {
t.Errorf("should be extend bound around fill earth: %v", b2)
}
}
| 21.011364
| 73
| 0.603029
|
661185b14dc301ec18f277e398f4468249773b19
| 162
|
py
|
Python
|
wristband/common/views.py
|
hmrc/wristband
|
35648a15b91dea4a927e486bfe0ace5e00b44dcc
|
[
"Apache-2.0"
] | 1
|
2015-07-14T14:32:17.000Z
|
2015-07-14T14:32:17.000Z
|
wristband/common/views.py
|
hmrc/wristband
|
35648a15b91dea4a927e486bfe0ace5e00b44dcc
|
[
"Apache-2.0"
] | 4
|
2015-08-03T11:17:37.000Z
|
2015-09-24T10:06:02.000Z
|
wristband/common/views.py
|
hmrc/wristband
|
35648a15b91dea4a927e486bfe0ace5e00b44dcc
|
[
"Apache-2.0"
] | 2
|
2020-05-05T13:56:47.000Z
|
2021-04-10T23:51:52.000Z
|
from django.http import HttpResponse
from django.views.decorators.http import require_GET
@require_GET
def healthcheck_view(request):
return HttpResponse()
| 20.25
| 52
| 0.820988
|
15d22d1271e79a57c1b881233a843451f5300554
| 1,797
|
rs
|
Rust
|
server/src/db/lifecycle/error.rs
|
capkurmagati/influxdb_iox
|
6f268f82606bfc6ec28b1fa83815908010fbd933
|
[
"Apache-2.0",
"MIT"
] | 1,058
|
2020-11-10T18:10:53.000Z
|
2022-03-31T18:22:22.000Z
|
server/src/db/lifecycle/error.rs
|
influxdata/influxdb_iox
|
ee2ca8fc32ca6f45fa77516464822e990b05089b
|
[
"Apache-2.0",
"MIT"
] | 2,047
|
2020-11-10T20:13:37.000Z
|
2022-03-31T20:33:16.000Z
|
server/src/db/lifecycle/error.rs
|
capkurmagati/influxdb_iox
|
6f268f82606bfc6ec28b1fa83815908010fbd933
|
[
"Apache-2.0",
"MIT"
] | 116
|
2020-11-10T18:10:59.000Z
|
2022-03-28T19:31:11.000Z
|
//! Errors that can occur during lifecycle actions
use snafu::Snafu;
use data_types::chunk_metadata::ChunkAddr;
use crate::db::catalog;
#[allow(clippy::enum_variant_names)]
#[derive(Debug, Snafu)]
// Export the snafu "selectors" so they can be used in other modules
#[snafu(visibility = "pub")]
pub enum Error {
#[snafu(context(false))]
PartitionError { source: catalog::partition::Error },
#[snafu(context(false))]
ChunkError { source: catalog::chunk::Error },
#[snafu(context(false))]
PlannerError {
source: query::frontend::reorg::Error,
},
#[snafu(context(false))]
ArrowError { source: arrow::error::ArrowError },
#[snafu(context(false))]
DataFusionError {
source: datafusion::error::DataFusionError,
},
#[snafu(context(false))]
Aborted { source: futures::future::Aborted },
#[snafu(display("Read Buffer Error in chunk {}{} : {}", chunk_id, table_name, source))]
ReadBufferChunkError {
source: read_buffer::Error,
table_name: String,
chunk_id: u32,
},
#[snafu(display("Error writing to object store: {}", source))]
WritingToObjectStore {
source: parquet_file::storage::Error,
},
#[snafu(display("Error while creating parquet chunk: {}", source))]
ParquetChunkError { source: parquet_file::chunk::Error },
#[snafu(display("Error while commiting transaction on preserved catalog: {}", source))]
CommitError {
source: parquet_catalog::core::Error,
},
#[snafu(display("Cannot write chunk: {}", addr))]
CannotWriteChunk { addr: ChunkAddr },
#[snafu(display("Cannot drop unpersisted chunk: {}", addr))]
CannotDropUnpersistedChunk { addr: ChunkAddr },
}
pub type Result<T, E = Error> = std::result::Result<T, E>;
| 28.52381
| 91
| 0.650529
|
c6c8367ac4444b6d37311239e2cb319cf37c2360
| 211
|
py
|
Python
|
CursoEmVideo/Aula10/ex031.py
|
lucashsouza/Desafios-Python
|
abb5b11ebdfd4c232b4f0427ef41fd96013f2802
|
[
"MIT"
] | null | null | null |
CursoEmVideo/Aula10/ex031.py
|
lucashsouza/Desafios-Python
|
abb5b11ebdfd4c232b4f0427ef41fd96013f2802
|
[
"MIT"
] | null | null | null |
CursoEmVideo/Aula10/ex031.py
|
lucashsouza/Desafios-Python
|
abb5b11ebdfd4c232b4f0427ef41fd96013f2802
|
[
"MIT"
] | null | null | null |
km = float(input('Qual vai ser a distância da viagem: '))
if km <= 200:
print('O valor a ser pago é de: R${:.2f}'.format(km * 0.50))
else:
print('O valor a ser pago é de: R${:.2f}'.format(km * 0.45))
| 42.2
| 65
| 0.582938
|
56eac632adee6a9dc733e3a5a46441e893e01625
| 397
|
kt
|
Kotlin
|
app/src/main/kotlin/vergecurrency/vergewallet/view/ui/components/viewholder/TransactionHeaderViewHolder.kt
|
renovate-tests/vDroid
|
26afa4c6899b58cd2ce3282c66c625e5ec698d42
|
[
"MIT"
] | 17
|
2018-11-08T19:20:23.000Z
|
2021-08-10T12:05:52.000Z
|
app/src/main/kotlin/vergecurrency/vergewallet/view/ui/components/viewholder/TransactionHeaderViewHolder.kt
|
renovate-tests/vDroid
|
26afa4c6899b58cd2ce3282c66c625e5ec698d42
|
[
"MIT"
] | 45
|
2019-01-28T16:54:34.000Z
|
2021-07-20T07:17:46.000Z
|
app/src/main/kotlin/vergecurrency/vergewallet/view/ui/components/viewholder/TransactionHeaderViewHolder.kt
|
renovate-tests/vDroid
|
26afa4c6899b58cd2ce3282c66c625e5ec698d42
|
[
"MIT"
] | 15
|
2019-01-13T20:31:10.000Z
|
2021-07-14T18:26:51.000Z
|
package vergecurrency.vergewallet.view.ui.components.viewholder
import android.view.View
import android.widget.TextView
import androidx.recyclerview.widget.RecyclerView
import vergecurrency.vergewallet.R
class TransactionHeaderViewHolder(view: View) : RecyclerView.ViewHolder(view) {
internal var headerName: TextView = view.findViewById<TextView>(R.id.listview_transactions_header_title)
}
| 36.090909
| 108
| 0.846348
|
095898225140435e0b5f41ded07423100ccf480b
| 2,191
|
kt
|
Kotlin
|
app/src/main/java/com/danhdueexoictif/androidgenericadapter/data/bean/NewBieObject.kt
|
DanhDue/AndroidGenericAdapter
|
7a134aa75de04735f4dc8e2135883bcf3f649b51
|
[
"Apache-2.0"
] | 8
|
2020-09-04T04:59:56.000Z
|
2021-12-17T02:44:07.000Z
|
app/src/main/java/com/danhdueexoictif/androidgenericadapter/data/bean/NewBieObject.kt
|
DanhDue/AndroidGenericAdapter
|
7a134aa75de04735f4dc8e2135883bcf3f649b51
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/com/danhdueexoictif/androidgenericadapter/data/bean/NewBieObject.kt
|
DanhDue/AndroidGenericAdapter
|
7a134aa75de04735f4dc8e2135883bcf3f649b51
|
[
"Apache-2.0"
] | 4
|
2020-09-04T04:36:43.000Z
|
2021-04-15T02:23:06.000Z
|
package com.danhdueexoictif.androidgenericadapter.data.bean
import android.os.Parcelable
import androidx.recyclerview.widget.DiffUtil
import com.danhdueexoictif.androidgenericadapter.R
import com.danhdueexoictif.androidgenericadapter.ui.widgets.recyclerview.BaseRecyclerViewModel
import com.google.gson.annotations.SerializedName
import kotlinx.android.parcel.Parcelize
@Parcelize
data class NewBieObject(
@SerializedName("id")
val id: Int? = null,
@SerializedName("fullname")
val fullname: String? = null,
@SerializedName("position")
val position: String? = null,
@SerializedName("location")
val location: String? = null,
@SerializedName("dob")
val dob: String? = null,
@SerializedName("quote")
val quote: String? = null,
@SerializedName("image_uuid")
val imageUuid: String? = null,
@SerializedName("join_at")
val joinAt: String? = null,
@SerializedName("created_at")
val createdAt: String? = null,
@SerializedName("avatarMember")
val avatarMember: AvatarMember? = null,
@SerializedName("hobby")
val hobby: String? = null,
@SerializedName("workspace")
val workspace: String? = null,
@SerializedName("dislike")
val dislike: String? = null,
@Transient
override var layoutId: Int = R.layout.item_newbie
) : BaseRecyclerViewModel() {
@Parcelize
data class AvatarMember(
@SerializedName("data") val data: Data? = null
) : Parcelable {
@Parcelize
data class Data(
@SerializedName("uuid") val uuid: String? = null,
@SerializedName("origin_path") val originPath: String? = null,
@SerializedName("thumbnail_path") val thumbnailPath: String? = null
) : Parcelable
}
companion object {
val COMPARATOR = object : DiffUtil.ItemCallback<BaseRecyclerViewModel>() {
override fun areItemsTheSame(
oldItem: BaseRecyclerViewModel, newItem: BaseRecyclerViewModel
): Boolean = false
override fun areContentsTheSame(
oldItem: BaseRecyclerViewModel, newItem: BaseRecyclerViewModel
): Boolean = false
}
}
}
| 32.220588
| 94
| 0.674578
|
20e3799a103413d1ebae2de8264413c623950634
| 17,497
|
py
|
Python
|
tsconto.py
|
chkastorm/tsconto
|
fd705fc5484a3c6ee3766b0e341c18ddc70bd089
|
[
"BSD-2-Clause"
] | null | null | null |
tsconto.py
|
chkastorm/tsconto
|
fd705fc5484a3c6ee3766b0e341c18ddc70bd089
|
[
"BSD-2-Clause"
] | null | null | null |
tsconto.py
|
chkastorm/tsconto
|
fd705fc5484a3c6ee3766b0e341c18ddc70bd089
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/python3
################################################################################
### ###
### Script : Telnet-Simulated-CONsole shortcut TO consoles of guests in GNS3 ###
### Abbreviation : tsconto ###
### Author : Kastor M. ###
### Modified : Kastor M. ###
### Version : 1.0.3 ###
### Date : Sun 10 Apr 2022 03:20:16 PM EDT ###
### ###
################################################################################
import os
import sys
import json
import datetime
import subprocess
from pprint import pprint
db_name_append_datetime = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
db_folder_at_home_directory = os.path.expanduser("~/.rhpn-gns3-stsc/")
db_at_user_home_directory = os.path.expanduser("~/.rhpn-gns3-stsc/dict-GNS3-NEs-list.json")
def message_user_manual():
print("\n#############\n### Usage ###\n#############\n")
print("=============================================================== Connect to console ===============================================================\n")
print("tsconto connect < Registered Hostname > ::: Connect to the Telnet-Simulated Console of a node in GNS3")
print("\n")
print("============================================================== Database Operations ===============================================================\n")
print("tsconto database list ::: List out contents of the existing Database")
print("tsconto database list checkpoint < Filename > ::: List out contents of the Database at a Specific Checkpoint")
print("tsconto database checkpoint ::: List out ALL Checkpoints of the Database")
print("tsconto database revert from < Filename > ::: Revert the Database status back to the Reference Point")
print("tsconto database clean [ --all ] ::: Clean ALL Database Checkpoints [ Deep Clean including the in-force Database ]")
print("tsconto database scan ::: Automatically scan over a specific .gns3 file for a project and import ALL nodes to the DB")
print("tsconto database add < Hostname > < Port Number > ::: To add a node manually")
print("tsconto database delete < Hostname > ::: To delete a node manually from the DB\n")
def message_database_empty():
print("\nIn-Force Database Empty.\nPlease register at least one node to the in-force Database.\n\nFormat : tsconto database add < Hostname > < Port Number >\n///OR/// tsconto database scan\n")
def message_database_checkpoint_empty():
print("\nCheckpoint Empty. Checkpoint will be generated automatically when registering node(s).\n\nFormat : tsconto database add < Hostname > < Port Number >\n///OR/// tsconto database scan\n")
def message_database_checkpoint_not_found():
print("\nCheckpoint < Filename > CANNOT be found from the Registry. Please check and try again.\n\nFormat : tsconto database list checkpoint < Filename >\n e.g. tsconto database list checkpoint dict-GNS3-NEs-list.json.20220116222106\n")
def message_hostname_not_found():
print("\nThe input < Hostname > CANNOT be found from the Registry. Please check and try again. Thank you.\n")
def message_unknown_cmd():
print("\n[ Unknown command ] Please check and try again. Thank you.\n")
if (len(sys.argv) >=3 and sys.argv[1] == "database" and os.path.isfile(db_at_user_home_directory) == True):
if len(sys.argv) == 3:
if sys.argv[2] == "list":
with open(db_at_user_home_directory, "r") as list_nodes:
list_registered_nodes = json.loads(list_nodes.read())
print("")
pprint(list_registered_nodes)
print("")
elif sys.argv[2] == "checkpoint":
print("")
os.system('ls -trl ' + db_folder_at_home_directory)
print("")
elif sys.argv[2] == "clean":
double_confirm_clean_db_backup = input("Do you really want to CLEAR ALL Backup of the Database? (yes/no) : ")
if double_confirm_clean_db_backup == "yes":
os.system('rm -rf ' + db_at_user_home_directory + '.*')
print("\nALL Checkpoints cleared.\nONLY the in-force DB remains.\n")
elif double_confirm_clean_db_backup == "no":
print("\nGood Choice ^^\" !!\n")
else:
print("\nPlease ONLY Enter \"yes\" or \"no\". Thank you.\n")
elif sys.argv[2] == "scan":
scan_target_project_id = input("Please Enter GNS3 Project ID : ")
scan_target_project_name = input("Please Enter GNS3 Project Name : ")
scan_target_gns3_file_path = "/opt/gns3/projects/" + scan_target_project_id + "/" + scan_target_project_name + ".gns3"
with open(scan_target_gns3_file_path, "r") as gns3_file:
list_all_gns3_parameters = json.loads(gns3_file.read())
list_all_gns3_nodes = ((list_all_gns3_parameters["topology"])["nodes"])
list_registered_nodes_duplicate_hostname_checked = []
list_registered_nodes_duplicate_checked = []
with open(db_at_user_home_directory, "r") as list_nodes:
list_registered_nodes = json.loads(list_nodes.read())
for each_node in list_all_gns3_nodes:
for entry_as_dict_hostname_check in list_registered_nodes:
for entry_of_list_registered_nodes_duplicate_hostname_checked in list_registered_nodes_duplicate_hostname_checked:
if (entry_as_dict_hostname_check["Hostname"] != each_node["name"] and entry_as_dict_hostname_check["Hostname"] != entry_of_list_registered_nodes_duplicate_hostname_checked["Hostname"]):
list_registered_nodes_duplicate_hostname_checked.append({"Hostname": entry_as_dict_hostname_check["Hostname"], "Port": entry_as_dict_hostname_check["Port"]})
for each_node in list_all_gns3_nodes:
for entry_as_dict in list_registered_nodes_duplicate_hostname_checked:
for entry_of_list_registered_nodes_duplicate_checked in list_registered_nodes_duplicate_checked:
if (entry_as_dict["Port"] != each_node["console"] and entry_as_dict["Port"] != entry_of_list_registered_nodes_duplicate_checked["Port"]):
list_registered_nodes_duplicate_checked.append({"Hostname": entry_as_dict["Hostname"], "Port": entry_as_dict["Port"]})
for each_node in list_all_gns3_nodes:
list_registered_nodes_duplicate_checked.append({"Hostname": each_node["name"], "Port": each_node["console"]})
os.system('mv ' + db_at_user_home_directory + ' ' + db_at_user_home_directory + '.' + db_name_append_datetime)
with open(db_at_user_home_directory, "w") as list_nodes:
list_nodes.write(json.dumps(list_registered_nodes_duplicate_checked))
print("\nBackup Complete.\nNew Database Created.\n")
else:
message_user_manual()
elif len(sys.argv) == 4:
if sys.argv[2] == "clean" and sys.argv[3] == "--all":
double_confirm_clean_db_backup = input("Do you really want to CLEAR the Entire Database? (yes/no) : ")
if double_confirm_clean_db_backup == "yes":
os.system('rm -rf ' + db_at_user_home_directory + '*')
print("\nDatabase Deep clean finished.\nDatabase is now Empty.\n")
elif double_confirm_clean_db_backup == "no":
print("\nGood Choice ^^\" !!\n")
elif sys.argv[2] == "delete":
input_node_hostname = sys.argv[3]
with open(db_at_user_home_directory, "r") as list_nodes:
list_registered_nodes = json.loads(list_nodes.read())
is_deletable_hostname = []
for deletable_hostname_entry in list_registered_nodes:
if deletable_hostname_entry["Hostname"] == input_node_hostname:
is_deletable_hostname.append(deletable_hostname_entry)
if not is_deletable_hostname:
print("\nNo matched entry in the in-force DB. Please check as below,\n\nFormat : tsconto database list\n")
else:
reduced_list_registered_nodes = []
for entry_as_dict in list_registered_nodes:
if not entry_as_dict["Hostname"] == input_node_hostname:
reduced_list_registered_nodes.append(entry_as_dict)
else:
pass
os.system('mv ' + db_at_user_home_directory + ' ' + db_at_user_home_directory + '.' + db_name_append_datetime)
with open(db_at_user_home_directory, "w") as list_nodes:
list_nodes.write(json.dumps(reduced_list_registered_nodes))
if not reduced_list_registered_nodes:
os.system('rm -rf ' + db_at_user_home_directory)
else:
pass
print("\nEntry Deleted.\n")
else:
message_user_manual()
elif len(sys.argv) == 5:
if (sys.argv[2] == "list" and sys.argv[3] == "checkpoint"):
try:
input_checkpoint_filename = sys.argv[4]
specific_checkpoint_file = (db_folder_at_home_directory + input_checkpoint_filename)
with open(specific_checkpoint_file, "r") as list_nodes:
list_registered_nodes = json.loads(list_nodes.read())
print("")
pprint(list_registered_nodes)
print("")
except:
message_database_checkpoint_not_found()
sys.exit(1)
elif (sys.argv[2] == "revert" and sys.argv[3] == "from"):
input_checkpoint_filename = sys.argv[4]
os.system('cp ' + db_at_user_home_directory + ' ' + db_at_user_home_directory + '.' + db_name_append_datetime)
revert_file = db_folder_at_home_directory + input_checkpoint_filename
revert_file_output = subprocess.run(['cp', revert_file, db_at_user_home_directory], capture_output = True, text = True)
if revert_file_output.returncode == 0:
print("\nReverted to " + sys.argv[4] + "\n")
else:
message_database_checkpoint_not_found()
elif sys.argv[2] == "add":
input_node_hostname = sys.argv[3]
input_node_port_number = sys.argv[4]
with open(db_at_user_home_directory, "r") as list_nodes:
list_registered_nodes = json.loads(list_nodes.read())
list_registered_nodes_duplicate_hostname_checked = []
for entry_as_dict_hostname_check in list_registered_nodes:
if entry_as_dict_hostname_check["Hostname"] != input_node_hostname:
list_registered_nodes_duplicate_hostname_checked.append({"Hostname": entry_as_dict_hostname_check["Hostname"], "Port": entry_as_dict_hostname_check["Port"]})
list_registered_nodes_duplicate_checked = []
for entry_as_dict in list_registered_nodes_duplicate_hostname_checked:
if entry_as_dict["Port"] != input_node_port_number:
list_registered_nodes_duplicate_checked.append({"Hostname": entry_as_dict["Hostname"], "Port": entry_as_dict["Port"]})
list_registered_nodes_duplicate_checked.append({"Hostname": input_node_hostname, "Port": input_node_port_number})
os.system('mv ' + db_at_user_home_directory + ' ' + db_at_user_home_directory + '.' + db_name_append_datetime)
with open(db_at_user_home_directory, "w") as list_nodes:
list_nodes.write(json.dumps(list_registered_nodes_duplicate_checked))
print("\nBackup Complete.\nNew Database Created.\n")
else:
message_user_manual()
else:
message_user_manual()
elif len(sys.argv) == 2:
message_user_manual()
elif (len(sys.argv) == 3 and sys.argv[1] == "database"):
if sys.argv[2] == "checkpoint":
if os.path.isdir(db_folder_at_home_directory):
if not len(os.listdir(db_folder_at_home_directory)) == 0:
print("")
os.system('ls -trl ' + db_folder_at_home_directory)
print("")
else:
message_database_checkpoint_empty()
else:
message_database_checkpoint_empty()
elif sys.argv[2] == "scan":
os.system('mkdir -p ' + db_folder_at_home_directory)
scan_target_project_id = input("Please Enter GNS3 Project ID : ")
scan_target_project_name = input("Please Enter GNS3 Project Name : ")
scan_target_gns3_file_path = "/opt/gns3/projects/" + scan_target_project_id + "/" + scan_target_project_name + ".gns3"
with open(scan_target_gns3_file_path, "r") as gns3_file:
list_all_gns3_parameters = json.loads(gns3_file.read())
list_all_gns3_nodes = ((list_all_gns3_parameters["topology"])["nodes"])
list_registered_nodes = []
for each_node in list_all_gns3_nodes:
list_registered_nodes.append({"Hostname": each_node["name"], "Port": each_node["console"]})
with open(db_at_user_home_directory, "w") as list_nodes:
list_nodes.write(json.dumps(list_registered_nodes))
print("\nCreated new Database.\n")
elif (sys.argv[2] == "list" or sys.argv[2] == "clean"):
message_database_empty()
else:
message_user_manual()
elif (len(sys.argv) == 4 and sys.argv[1] == "database"):
if (sys.argv[2] == "clean" and sys.argv[3] =="--all"):
double_confirm_clean_db_backup = input("Do you really want to CLEAR the Entire Database? (yes/no) : ")
if double_confirm_clean_db_backup == "yes":
os.system('rm -rf ' + db_at_user_home_directory + '*')
print("\nDatabase Deep clean finished.\nDatabase is now Empty.\n")
elif double_confirm_clean_db_backup == "no":
print("\nGood Choice ^^\" !!\n")
elif sys.argv[2] == "delete":
message_database_empty()
else:
message_user_manual()
elif (len(sys.argv) == 5 and sys.argv[1] == "database"):
if (sys.argv[2] == "list" and sys.argv[3] == "checkpoint"):
try:
input_checkpoint_filename = sys.argv[4]
specific_checkpoint_file = (db_folder_at_home_directory + input_checkpoint_filename)
with open(specific_checkpoint_file, "r") as list_nodes:
list_registered_nodes = json.loads(list_nodes.read())
print("")
pprint(list_registered_nodes)
print("")
except:
message_database_checkpoint_not_found()
sys.exit(1)
elif (sys.argv[2] == "revert" and sys.argv[3] == "from"):
if not len(os.listdir(db_folder_at_home_directory)) == 0:
input_checkpoint_filename = sys.argv[4]
os.system('cp ' + db_at_user_home_directory + ' ' + db_at_user_home_directory + '.' + db_name_append_datetime)
revert_file = db_folder_at_home_directory + input_checkpoint_filename
revert_file_output = subprocess.run(['cp', revert_file, db_at_user_home_directory], capture_output = True, text = True)
if revert_file_output.returncode == 0:
print("\nReverted to " + sys.argv[4] + "\n")
else:
message_database_checkpoint_not_found()
else:
message_database_checkpoint_empty()
elif sys.argv[2] == "add":
input_node_hostname = sys.argv[3]
input_node_port_number = sys.argv[4]
list_registered_nodes = [{"Hostname": input_node_hostname, "Port": input_node_port_number}]
os.system('mkdir -p ' + db_folder_at_home_directory + ' && touch ' + db_at_user_home_directory)
with open(db_at_user_home_directory, "a") as list_nodes:
list_nodes.write(json.dumps(list_registered_nodes))
print("\nCreated new Database.\n")
else:
message_user_manual()
elif (len(sys.argv) == 3 and sys.argv[1] == "connect"):
if os.path.isfile(db_at_user_home_directory) == True:
check_if_hostname_in_list_registered_nodes_exist = False
db_at_user_home_directory = os.path.expanduser("~/.rhpn-gns3-stsc/dict-GNS3-NEs-list.json")
with open(db_at_user_home_directory, "r") as list_nodes:
list_registered_nodes = json.loads(list_nodes.read())
for entry_as_dict in list_registered_nodes:
if entry_as_dict["Hostname"] == sys.argv[2]:
os.system('telnet 127.0.0.1 ' + str(entry_as_dict["Port"]))
check_if_hostname_in_list_registered_nodes_exist = True
else:
pass
if check_if_hostname_in_list_registered_nodes_exist == False:
message_hostname_not_found()
else:
pass
elif os.path.isfile(db_at_user_home_directory) == False:
message_database_empty()
else:
message_user_manual()
else:
message_user_manual()
| 54.170279
| 243
| 0.607818
|
e39b88bc491a257f71338aa52bf671b14324bad8
| 5,435
|
rb
|
Ruby
|
lib/kaminari/helpers/paginator.rb
|
l15n/kaminari
|
7d3dbd8e396ddc30454d1bf156a00f21ed9adaf6
|
[
"MIT"
] | null | null | null |
lib/kaminari/helpers/paginator.rb
|
l15n/kaminari
|
7d3dbd8e396ddc30454d1bf156a00f21ed9adaf6
|
[
"MIT"
] | null | null | null |
lib/kaminari/helpers/paginator.rb
|
l15n/kaminari
|
7d3dbd8e396ddc30454d1bf156a00f21ed9adaf6
|
[
"MIT"
] | null | null | null |
require 'active_support/inflector'
require 'kaminari/helpers/tags'
module Kaminari
module Helpers
# The main container tag
class Paginator < Tag
def initialize(template, options) #:nodoc:
@window_options = {}.tap do |h|
h[:window] = options.delete(:window) || options.delete(:inner_window) || Kaminari.config.window
outer_window = options.delete(:outer_window) || Kaminari.config.outer_window
h[:left] = options.delete(:left) || Kaminari.config.left
h[:left] = outer_window if h[:left] == 0
h[:right] = options.delete(:right) || Kaminari.config.right
h[:right] = outer_window if h[:right] == 0
end
@template, @options = template, options
@theme = @options[:theme]
@views_prefix = @options[:views_prefix]
@window_options.merge! @options
@window_options[:current_page] = @options[:current_page] = PageProxy.new(@window_options, @options[:current_page], nil)
@last = nil
# initialize the output_buffer for Context
@output_buffer = template.output_buffer.class.new
end
# render given block as a view template
def render(&block)
instance_eval(&block) if @options[:total_pages] > 1
@output_buffer
end
# enumerate each page providing PageProxy object as the block parameter
# Because of performance reason, this doesn't actually enumerate all pages but pages that are seemingly relevant to the paginator.
# "Relevant" pages are:
# * pages inside the left outer window plus one for showing the gap tag
# * pages inside the inner window plus one on the left plus one on the right for showing the gap tags
# * pages inside the right outer window plus one for showing the gap tag
def each_relevant_page
return to_enum(:each_relevant_page) unless block_given?
relevant_pages(@window_options).each do |page|
yield PageProxy.new(@window_options, page, @last)
end
end
alias each_page each_relevant_page
def relevant_pages(options)
left_window_plus_one = 1.upto(options[:left] + 1).to_a
right_window_plus_one = (options[:total_pages] - options[:right]).upto(options[:total_pages]).to_a
inside_window_plus_each_sides = (options[:current_page] - options[:window] - 1).upto(options[:current_page] + options[:window] + 1).to_a
(left_window_plus_one + inside_window_plus_each_sides + right_window_plus_one).uniq.sort.reject {|x| (x < 1) || (x > options[:total_pages])}
end
private :relevant_pages
def page_tag(page)
@last = Page.new @template, @options.merge(:page => page)
end
%w[first_page prev_page next_page last_page gap].each do |tag|
eval <<-DEF
def #{tag}_tag
@last = #{tag.classify}.new @template, @options
end
DEF
end
def to_s #:nodoc:
super @window_options.merge :paginator => self
end
# delegates view helper methods to @template
def method_missing(name, *args, &block)
@template.respond_to?(name) ? @template.send(name, *args, &block) : super
end
private :method_missing
# Wraps a "page number" and provides some utility methods
class PageProxy
include Comparable
def initialize(options, page, last) #:nodoc:
@options, @page, @last = options, page, last
end
# the page number
def number
@page
end
# current page or not
def current?
@page == @options[:current_page]
end
# the first page or not
def first?
@page == 1
end
# the last page or not
def last?
@page == @options[:total_pages]
end
# the previous page or not
def prev?
@page == @options[:current_page] - 1
end
# the next page or not
def next?
@page == @options[:current_page] + 1
end
# relationship with the current page
def rel
if next?
'next'
elsif prev?
'prev'
end
end
# within the left outer window or not
def left_outer?
@page <= @options[:left]
end
# within the right outer window or not
def right_outer?
@options[:total_pages] - @page < @options[:right]
end
# inside the inner window or not
def inside_window?
(@options[:current_page] - @page).abs <= @options[:window]
end
def single_gap?
(@page == @options[:current_page] - @options[:window] - 1) && (@page == @options[:left] + 1) ||
(@page == @options[:current_page] + @options[:window] + 1) && (@page == @options[:total_pages] - @options[:right])
end
def out_of_range?
@page > @options[:total_pages]
end
# The last rendered tag was "truncated" or not
def was_truncated?
@last.is_a? Gap
end
def to_i
number
end
def to_s
number.to_s
end
def +(other)
to_i + other.to_i
end
def -(other)
to_i - other.to_i
end
def <=>(other)
to_i <=> other.to_i
end
end
end
end
end
| 30.363128
| 148
| 0.585465
|
cd642c65839300ee266653c70c013d77c55bd6e8
| 1,019
|
cs
|
C#
|
AppServiceServerBgTask/AppServiceServerBackgroundTask.cs
|
DanieleScipioni/TestApp
|
d82145dbe68667c2dc56c97c9015e7a494e9a290
|
[
"MIT"
] | null | null | null |
AppServiceServerBgTask/AppServiceServerBackgroundTask.cs
|
DanieleScipioni/TestApp
|
d82145dbe68667c2dc56c97c9015e7a494e9a290
|
[
"MIT"
] | null | null | null |
AppServiceServerBgTask/AppServiceServerBackgroundTask.cs
|
DanieleScipioni/TestApp
|
d82145dbe68667c2dc56c97c9015e7a494e9a290
|
[
"MIT"
] | null | null | null |
using System;
using Windows.ApplicationModel.AppService;
using Windows.ApplicationModel.Background;
using Windows.Foundation.Collections;
namespace AppServiceServerBgTask
{
public sealed class AppServiceServerBackgroundTask : IBackgroundTask
{
public void Run(IBackgroundTaskInstance taskInstance)
{
if (!(taskInstance.TriggerDetails is AppServiceTriggerDetails appServiceTriggerDetails)) return;
AppServiceConnection appServiceConnection = appServiceTriggerDetails.AppServiceConnection;
appServiceConnection.RequestReceived += async (sender, args) =>
{
AppServiceDeferral appServiceDeferral = args.GetDeferral();
AppServiceRequest appServiceRequest = args.Request;
ValueSet request = appServiceRequest.Message;
await appServiceRequest.SendResponseAsync(new ValueSet {{"count", request.Count}});
appServiceDeferral.Complete();
};
}
}
}
| 33.966667
| 108
| 0.693817
|
9003ca73b64bb36dcbd8df5a779b5db9783cb45a
| 280
|
lua
|
Lua
|
neovim/.config/nvim/lua/core/utils/prequire.lua
|
batusai513/personal-dotfiles
|
5ef5a91f6b03c5ea1e761980ed748dd75e9853cf
|
[
"MIT"
] | 1
|
2022-03-03T16:08:46.000Z
|
2022-03-03T16:08:46.000Z
|
neovim/.config/nvim/lua/core/utils/prequire.lua
|
batusai513/personal-dotfiles
|
5ef5a91f6b03c5ea1e761980ed748dd75e9853cf
|
[
"MIT"
] | null | null | null |
neovim/.config/nvim/lua/core/utils/prequire.lua
|
batusai513/personal-dotfiles
|
5ef5a91f6b03c5ea1e761980ed748dd75e9853cf
|
[
"MIT"
] | null | null | null |
_G.prequire = function(plugin, verbose)
local present, plug = pcall(require, plugin)
if present then
return plug
end
local errmsg = string.format("Could not load %s", plugin)
if verbose then
errmsg = string.format("%s\nError:%s", plug)
end
print(errmsg)
end
| 23.333333
| 59
| 0.689286
|
f445da22b8787a3281eb2ce738a8d82fcfb84ede
| 2,272
|
ts
|
TypeScript
|
src/modules/dashboard/components/smartfactory-card/smartfactory-card.component.ts
|
Taesun0727/wisevill
|
4e5e001e74d9e05e76c7aeadf12ef4599f0e21bd
|
[
"MIT"
] | null | null | null |
src/modules/dashboard/components/smartfactory-card/smartfactory-card.component.ts
|
Taesun0727/wisevill
|
4e5e001e74d9e05e76c7aeadf12ef4599f0e21bd
|
[
"MIT"
] | 4
|
2021-06-29T05:38:33.000Z
|
2021-07-19T08:59:51.000Z
|
src/modules/dashboard/components/smartfactory-card/smartfactory-card.component.ts
|
Taesun0727/wisevill
|
4e5e001e74d9e05e76c7aeadf12ef4599f0e21bd
|
[
"MIT"
] | null | null | null |
import { state } from '@angular/animations';
import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { company, device } from '@modules/dashboard/models/dashcard.model';
let COMPANIES: company[] = [
{
name : "포제이",
mserver_status : true,
mserver_link : "",
tserver_status : true,
tserver_link : "",
on_device : "",
off_device : "",
monitor_link : ""
},
{
name : "지프라임",
mserver_status : true,
mserver_link : "",
tserver_status : true,
tserver_link : "",
on_device : "",
off_device : "",
monitor_link : ""
},
{
name : "MES",
mserver_status : true,
mserver_link : "",
tserver_status : true,
tserver_link : "",
on_device : "",
off_device : "",
monitor_link : ""
},
{
name : "헤이헤이",
mserver_status : true,
mserver_link : "",
tserver_status : true,
tserver_link : "",
on_device : "",
off_device : "",
monitor_link : ""
},
{
name : "헤이헤이",
mserver_status : true,
mserver_link : "",
tserver_status : true,
tserver_link : "",
on_device : "",
off_device : "",
monitor_link : ""
},
]
let DEVICES: device[] = [
{
name : "1호기",
img : "http://",
ex : "온도 : 40", //기기에서 뱉는 데이터값 이름 대충 지음 나중에 수정
status : true,
},
{
name : "1호기",
img : "http://",
ex : "온도 : 40", //기기에서 뱉는 데이터값 이름 대충 지음 나중에 수정
status : true,
}
]
@Component({
selector: 'sb-smartfactory-card',
templateUrl: './smartfactory-card.component.html',
styleUrls: ['./smartfactory-card.component.scss']
})
export class SmartfactoryCardComponent implements OnInit {
devices = DEVICES;
companies = COMPANIES;
// pagination 부분 변수
page = 1;
pageSize = 4;
collectionSize = COMPANIES.length;
constructor(public router: Router) {
this.refreshCountries();
}
//페이지네이션 작동 함수
refreshCountries() {
this.companies = COMPANIES
.map((company, i) => ({id: i + 1, ...company}))
.slice((this.page - 1) * this.pageSize, (this.page - 1) * this.pageSize + this.pageSize);
}
ngOnInit(): void {
}
gomonitor(company: any) {
this.router.navigateByUrl('/smartfactory/detail', {state: {company}})
console.log(company)
}
goserver(server: any, type: number) {
this.router.navigateByUrl('/smartfactory/server', {state: {server, type}})
}
}
| 20.106195
| 95
| 0.616637
|
247fb30f7f5c72b584dbaafcd7a20c114b3a5712
| 2,168
|
swift
|
Swift
|
ClassCopyIvarList/ClassCopyIvarList/DetailViewController.swift
|
liuchangjun9343/Moots
|
9c004da865323d97a7ab929ca95e9881ade97804
|
[
"MIT"
] | 1
|
2021-01-19T01:52:50.000Z
|
2021-01-19T01:52:50.000Z
|
ClassCopyIvarList/ClassCopyIvarList/DetailViewController.swift
|
liuchangjun9343/Moots
|
9c004da865323d97a7ab929ca95e9881ade97804
|
[
"MIT"
] | null | null | null |
ClassCopyIvarList/ClassCopyIvarList/DetailViewController.swift
|
liuchangjun9343/Moots
|
9c004da865323d97a7ab929ca95e9881ade97804
|
[
"MIT"
] | null | null | null |
//
// DetailViewController.swift
// ClassCopyIvarList
//
// Created by star on 16/5/12.
// Copyright © 2016年 Zezefamily. All rights reserved.
//
import UIKit
class DetailViewController: UIViewController {
@IBOutlet weak var tableView: UITableView!
var className: String?
var classArr: [String] {
return classCopyIvarList(className!)
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
loadUI()
}
fileprivate func loadUI() {
title = className
}
func tableView(_ tableView: UITableView, cellForRowAtIndexPath indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "cell1", for: indexPath)
let property = classArr[indexPath.row]
cell.textLabel?.text = property
return cell
}
func tableView(_: UITableView, numberOfRowsInSection _: Int) -> Int {
return classCopyIvarList(className!).count
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: 查看类中的隐藏属性
fileprivate func classCopyIvarList(_ className: String) -> [String] {
var classArray: [String] = []
let classOjbect: AnyClass! = objc_getClass(className) as? AnyClass
var icount: CUnsignedInt = 0
let ivars = class_copyIvarList(classOjbect, &icount)
print("icount == \(icount)")
for i in 0 ... (icount - 1) {
let memberName = String(utf8String: ivar_getName((ivars?[Int(i)])!)!) ?? ""
print("memberName == \(memberName)")
classArray.append(memberName)
}
return classArray
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
}
| 27.443038
| 109
| 0.64714
|
2015eb63d731d66c551233ca0b96a2ad9327f837
| 376
|
swift
|
Swift
|
FingerRuler/Ruler/Ruler.swift
|
MintJian/FingerRuler
|
4fdab7b81798e99399dd0d5c89e3dca82f4e6802
|
[
"MIT"
] | null | null | null |
FingerRuler/Ruler/Ruler.swift
|
MintJian/FingerRuler
|
4fdab7b81798e99399dd0d5c89e3dca82f4e6802
|
[
"MIT"
] | null | null | null |
FingerRuler/Ruler/Ruler.swift
|
MintJian/FingerRuler
|
4fdab7b81798e99399dd0d5c89e3dca82f4e6802
|
[
"MIT"
] | null | null | null |
//
// Ruler.swift
// FingerRuler
//
// Created by Sumisora on 2020/08/31.
// Copyright © 2020 MintJian. All rights reserved.
//
import Foundation
import SceneKit
protocol Ruler: SCNNode {
var rulerModel: SCNNode? { get set }
var textModel: SCNNode? { get set }
}
protocol RulerData {
var defaultRulerAccuracy: Int { get }
var name: String { get set }
}
| 17.904762
| 51
| 0.670213
|
ef79372800c0de623c1b5b50c5a3b7d55aafc299
| 5,787
|
rs
|
Rust
|
tests/lemire_tests.rs
|
FrozenDroid/minimal-lexical
|
608452a7c210d9205112b0e810bef8a20287a4f5
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
tests/lemire_tests.rs
|
FrozenDroid/minimal-lexical
|
608452a7c210d9205112b0e810bef8a20287a4f5
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
tests/lemire_tests.rs
|
FrozenDroid/minimal-lexical
|
608452a7c210d9205112b0e810bef8a20287a4f5
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
use minimal_lexical::lemire;
#[test]
fn test_halfway_round_down() {
// Check only Eisel-Lemire.
assert_eq!((9007199254740992.0, true), lemire::eisel_lemire::<f64>(9007199254740992, 0));
assert_eq!((0.0, false), lemire::eisel_lemire::<f64>(9007199254740993, 0));
assert_eq!((9007199254740994.0, true), lemire::eisel_lemire::<f64>(9007199254740994, 0));
assert_eq!((9223372036854775808.0, true), lemire::eisel_lemire::<f64>(9223372036854775808, 0));
assert_eq!((0.0, false), lemire::eisel_lemire::<f64>(9223372036854776832, 0));
assert_eq!((9223372036854777856.0, true), lemire::eisel_lemire::<f64>(9223372036854777856, 0));
// We can't get an accurate representation here.
assert_eq!((0.0, false), lemire::eisel_lemire::<f64>(9007199254740992000, -3));
assert_eq!((0.0, false), lemire::eisel_lemire::<f64>(9007199254740993000, -3));
assert_eq!((0.0, false), lemire::eisel_lemire::<f64>(9007199254740994000, -3));
// Check with the extended-float backup.
assert_eq!(
(9007199254740992.0, true),
lemire::moderate_path::<f64>(9007199254740992, 0, false)
);
assert_eq!(
(9007199254740992.0, false),
lemire::moderate_path::<f64>(9007199254740993, 0, false)
);
assert_eq!(
(9007199254740994.0, true),
lemire::moderate_path::<f64>(9007199254740994, 0, false)
);
assert_eq!(
(9223372036854775808.0, true),
lemire::moderate_path::<f64>(9223372036854775808, 0, false)
);
assert_eq!(
(9223372036854775808.0, false),
lemire::moderate_path::<f64>(9223372036854776832, 0, false)
);
assert_eq!(
(9223372036854777856.0, true),
lemire::moderate_path::<f64>(9223372036854777856, 0, false)
);
// We can't get an accurate from Lemire representation here.
assert_eq!(
(9007199254740992.0, true),
lemire::moderate_path::<f64>(9007199254740992000, -3, false)
);
assert_eq!(
(9007199254740992.0, false),
lemire::moderate_path::<f64>(9007199254740993000, -3, false)
);
assert_eq!(
(9007199254740994.0, true),
lemire::moderate_path::<f64>(9007199254740994000, -3, false)
);
}
#[test]
fn test_halfway_round_up() {
// Check only Eisel-Lemire.
assert_eq!((9007199254740994.0, true), lemire::eisel_lemire::<f64>(9007199254740994, 0));
assert_eq!((9007199254740996.0, true), lemire::eisel_lemire::<f64>(9007199254740995, 0));
assert_eq!((9007199254740996.0, true), lemire::eisel_lemire::<f64>(9007199254740996, 0));
assert_eq!((18014398509481988.0, true), lemire::eisel_lemire::<f64>(18014398509481988, 0));
assert_eq!((18014398509481992.0, true), lemire::eisel_lemire::<f64>(18014398509481990, 0));
assert_eq!((18014398509481992.0, true), lemire::eisel_lemire::<f64>(18014398509481992, 0));
assert_eq!((9223372036854777856.0, true), lemire::eisel_lemire::<f64>(9223372036854777856, 0));
assert_eq!((9223372036854779904.0, true), lemire::eisel_lemire::<f64>(9223372036854778880, 0));
assert_eq!((9223372036854779904.0, true), lemire::eisel_lemire::<f64>(9223372036854779904, 0));
// We can't get an accurate representation here.
assert_eq!((0.0, false), lemire::eisel_lemire::<f64>(9007199254740994000, -3));
assert_eq!((0.0, false), lemire::eisel_lemire::<f64>(9007199254740995000, -3));
assert_eq!((0.0, false), lemire::eisel_lemire::<f64>(9007199254740996000, -3));
// Check with the extended-float backup.
assert_eq!(
(9007199254740994.0, true),
lemire::moderate_path::<f64>(9007199254740994, 0, false)
);
assert_eq!(
(9007199254740996.0, true),
lemire::moderate_path::<f64>(9007199254740995, 0, false)
);
assert_eq!(
(9007199254740996.0, true),
lemire::moderate_path::<f64>(9007199254740996, 0, false)
);
assert_eq!(
(18014398509481988.0, true),
lemire::moderate_path::<f64>(18014398509481988, 0, false)
);
assert_eq!(
(18014398509481992.0, true),
lemire::moderate_path::<f64>(18014398509481990, 0, false)
);
assert_eq!(
(18014398509481992.0, true),
lemire::moderate_path::<f64>(18014398509481992, 0, false)
);
assert_eq!(
(9223372036854777856.0, true),
lemire::moderate_path::<f64>(9223372036854777856, 0, false)
);
assert_eq!(
(9223372036854779904.0, true),
lemire::moderate_path::<f64>(9223372036854778880, 0, false)
);
assert_eq!(
(9223372036854779904.0, true),
lemire::moderate_path::<f64>(9223372036854779904, 0, false)
);
// We can't get an accurate from Lemire representation here.
assert_eq!(
(9007199254740994.0, true),
lemire::moderate_path::<f64>(9007199254740994000, -3, false)
);
assert_eq!(
(9007199254740994.0, false),
lemire::moderate_path::<f64>(9007199254740995000, -3, false)
);
assert_eq!(
(9007199254740996.0, true),
lemire::moderate_path::<f64>(9007199254740996000, -3, false)
);
}
#[test]
fn test_mul() {
let e1 = 11529215046068469760; // 1e1
let e10 = 10737418240000000000; // 1e10
assert_eq!((0x5D21DBA000000000, 0x0000000000000000), lemire::mul(e1, e10));
let e9 = 17179869184000000000; // 1e9
let e70 = 13363823550460978230; // 1e70
assert_eq!((0xACB92ED9397BF995, 0xA23A700000000000), lemire::mul(e9, e70));
// e289
let e280 = 10162340898095201970; // 1e280
assert_eq!((0x83585D8FD9C25DB6, 0xFC31D00000000000), lemire::mul(e9, e280));
// e290
let e0 = 9223372036854775808; // 1e0
let e290 = 11830521861667747109; // 1e290
assert_eq!((0x52173A79E8197A92, 0x8000000000000000), lemire::mul(e0, e290));
}
| 38.838926
| 99
| 0.658027
|
43227e5789a1c9d4e17d3f36dea2017c876a37ec
| 173
|
lua
|
Lua
|
misc/regcheck.lua
|
jameshegarty/rigel
|
63bae94d8f2df675e0e36d1416f3f10c0b607368
|
[
"MIT"
] | 56
|
2016-07-27T01:23:42.000Z
|
2022-03-20T08:22:54.000Z
|
misc/regcheck.lua
|
jameshegarty/rigel
|
63bae94d8f2df675e0e36d1416f3f10c0b607368
|
[
"MIT"
] | 87
|
2017-04-01T00:35:37.000Z
|
2019-09-17T19:11:00.000Z
|
misc/regcheck.lua
|
jameshegarty/rigel
|
63bae94d8f2df675e0e36d1416f3f10c0b607368
|
[
"MIT"
] | 7
|
2016-12-08T03:05:07.000Z
|
2019-11-08T04:29:14.000Z
|
local a = dofile(arg[1])
local b = dofile(arg[2])
for k,v in pairs(a) do if a[k]~=b[k] then os.exit(1) end end
for k,v in pairs(b) do if a[k]~=b[k] then os.exit(1) end end
| 28.833333
| 60
| 0.624277
|
09da2f0aeff534c5bc0d2526439882857102845c
| 638
|
lua
|
Lua
|
lpeg_patterns/http/origin.lua
|
daurnimator/lpeg_patterns
|
0da7cadb867858f8fdf2bfbfbc92a51a7c28c3d7
|
[
"MIT"
] | 117
|
2015-01-30T00:10:04.000Z
|
2022-03-29T05:57:17.000Z
|
lpeg_patterns/http/origin.lua
|
daurnimator/lpeg_patterns
|
0da7cadb867858f8fdf2bfbfbc92a51a7c28c3d7
|
[
"MIT"
] | 15
|
2015-06-20T23:51:38.000Z
|
2018-09-10T04:26:34.000Z
|
lpeg_patterns/http/origin.lua
|
daurnimator/lpeg_patterns
|
0da7cadb867858f8fdf2bfbfbc92a51a7c28c3d7
|
[
"MIT"
] | 14
|
2015-06-20T21:23:58.000Z
|
2021-09-11T10:01:31.000Z
|
-- RFC 6454
local lpeg = require "lpeg"
local core = require "lpeg_patterns.core"
local http_core = require "lpeg_patterns.http.core"
local uri = require "lpeg_patterns.uri"
local C = lpeg.C
local P = lpeg.P
-- discard captures from scheme, host, port and just get whole string
local serialized_origin = C(uri.scheme * P"://" * uri.host * (P":" * uri.port)^-1/function() end)
local origin_list = serialized_origin * (core.SP * serialized_origin)^0
local origin_list_or_null = P"null" + origin_list
local Origin = http_core.OWS * origin_list_or_null * http_core.OWS
return {
serialized_origin = serialized_origin;
Origin = Origin;
}
| 30.380952
| 97
| 0.739812
|
6407d9ed216cfc2cba8bb1cb102069aa61b02053
| 5,768
|
py
|
Python
|
Computer-Pointer-Controller/src/main.py
|
piyush0511/Intel-Edge-AI-Projects
|
88b21e21c827fc6cac1deb23d8f5b8867d16b57b
|
[
"MIT"
] | 1
|
2020-05-31T13:29:32.000Z
|
2020-05-31T13:29:32.000Z
|
Computer-Pointer-Controller/src/main.py
|
piyush0511/Intel-Edge-AI-Projects
|
88b21e21c827fc6cac1deb23d8f5b8867d16b57b
|
[
"MIT"
] | 4
|
2021-06-08T21:55:52.000Z
|
2022-03-12T00:38:36.000Z
|
Computer-Pointer-Controller/src/main.py
|
piyush0511/Intel-Edge-AI-Projects
|
88b21e21c827fc6cac1deb23d8f5b8867d16b57b
|
[
"MIT"
] | null | null | null |
import time
from openvino.inference_engine import IENetwork, IECore
import os
import cv2
import argparse
import sys
import numpy as np
from faced import Facedet
from input_feeder import InputFeeder
from posed import Posedet
from landed import Landet
from gazed import Gazedet
from mouse_controller import MouseController
import math
import logging
def draw_arrow(frame, cord, l, r, results):
lf = (cord[0] + l[0], cord[1] + l[1])
rf = (cord[0] + r[0], cord[1] + r[1])
cv2.arrowedLine(frame, lf, (cord[0] + l[0] + int(results[0]*300),cord[1] + l[1] + int(-results[1]*300)), (155, 0, 0), 3)
cv2.arrowedLine(frame, rf, (cord[0] + r[0] + int(results[0]*300),cord[1] + r[1] + int(-results[1]*300)), (255,0, 0), 3)
return frame
def build_camera_matrix(center_of_face, focal_length):
cx = int(center_of_face[0])
cy = int(center_of_face[1])
camera_matrix = np.zeros((3, 3), dtype='float32')
camera_matrix[0][0] = focal_length
camera_matrix[0][2] = cx
camera_matrix[1][1] = focal_length
camera_matrix[1][2] = cy
camera_matrix[2][2] = 1
return camera_matrix
def draw_axes(frame, center_of_face, yaw, pitch, roll):
focal_length = 950.0
scale = 50
yaw *= np.pi / 180.0
pitch *= np.pi / 180.0
roll *= np.pi / 180.0
cx = int(center_of_face[0])
cy = int(center_of_face[1])
Rx = np.array([[1, 0, 0],
[0, math.cos(pitch), -math.sin(pitch)],
[0, math.sin(pitch), math.cos(pitch)]])
Ry = np.array([[math.cos(yaw), 0, -math.sin(yaw)],
[0, 1, 0],
[math.sin(yaw), 0, math.cos(yaw)]])
Rz = np.array([[math.cos(roll), -math.sin(roll), 0],
[math.sin(roll), math.cos(roll), 0],
[0, 0, 1]])
# ref: https://www.learnopencv.com/rotation-matrix-to-euler-angles/
R = Rz @ Ry @ Rx
camera_matrix = build_camera_matrix(center_of_face, focal_length)
xaxis = np.array(([1 * scale, 0, 0]), dtype='float32').reshape(3, 1)
yaxis = np.array(([0, -1 * scale, 0]), dtype='float32').reshape(3, 1)
zaxis = np.array(([0, 0, -1 * scale]), dtype='float32').reshape(3, 1)
zaxis1 = np.array(([0, 0, 1 * scale]), dtype='float32').reshape(3, 1)
o = np.array(([0, 0, 0]), dtype='float32').reshape(3, 1)
o[2] = camera_matrix[0][0]
xaxis = np.dot(R, xaxis) + o
yaxis = np.dot(R, yaxis) + o
zaxis = np.dot(R, zaxis) + o
zaxis1 = np.dot(R, zaxis1) + o
xp2 = (xaxis[0] / xaxis[2] * camera_matrix[0][0]) + cx
yp2 = (xaxis[1] / xaxis[2] * camera_matrix[1][1]) + cy
p2 = (int(xp2), int(yp2))
cv2.line(frame, (cx, cy), p2, (0, 0, 255), 2)
xp2 = (yaxis[0] / yaxis[2] * camera_matrix[0][0]) + cx
yp2 = (yaxis[1] / yaxis[2] * camera_matrix[1][1]) + cy
p2 = (int(xp2), int(yp2))
cv2.line(frame, (cx, cy), p2, (0, 255, 0), 2)
xp1 = (zaxis1[0] / zaxis1[2] * camera_matrix[0][0]) + cx
yp1 = (zaxis1[1] / zaxis1[2] * camera_matrix[1][1]) + cy
p1 = (int(xp1), int(yp1))
xp2 = (zaxis[0] / zaxis[2] * camera_matrix[0][0]) + cx
yp2 = (zaxis[1] / zaxis[2] * camera_matrix[1][1]) + cy
p2 = (int(xp2), int(yp2))
cv2.line(frame, p1, p2, (255, 0, 0), 2)
cv2.circle(frame, p2, 3, (255, 0, 0), 2)
cv2.imshow("frame",frame)
return frame
def main(args):
logging.basicConfig(level=logging.INFO,
format="%(asctime)s [%(levelname)s] %(message)s",
handlers=[logging.FileHandler("gaze.log"), logging.StreamHandler()])
modelf=args.modelf
modelp=args.modelp
modell=args.modell
modelg=args.modelg
device=args.device
threshold=args.threshold
### Load the model through ###
logging.info("============== Models Load time ===============")
start_time = time.time()
fa= Facedet(modelf, device, threshold)
fa.load_model()
pa= Posedet(modelp, device)
pa.load_model()
la= Landet(modell, device)
la.load_model()
ga= Gazedet(modelg, device)
ga.load_model()
logging.info("Model: {:.1f}ms".format(1000 * (time.time() - start_time)) )
logging.info("============== End =====================")
m= MouseController("low", "fast")
if args.video is not None:
feed=InputFeeder(input_type='video', input_file=args.video)
else:
feed=InputFeeder(input_type='cam')
initial_w, initial_h = feed.load_data()
time.sleep(3)
try:
for ret, frame in feed.next_batch():
if not ret:
break
frame = cv2.flip(frame,1)
t=time.time()
# get face
image , c, cord = fa.predict(frame)
# get head pose estimation
val = pa.predict(image).reshape((1,3))
# get landmarks
li , ri, l, r = la.predict(image)
# get gaze vector
results = ga.predict(val, li, ri)
logging.info("Total Inference Time:{:.1f}ms".format(1000*(time.time()-t)))
# move mouse
m.move(results[0], results[1])
yaw = val[0][0]
pitch = val[0][1]
roll = val[0][2]
# display results and frame
if args.verbose == "yes":
#ref: https://knowledge.udacity.com/questions/171017
frame = draw_axes(frame, c, yaw, pitch, roll)
frame = draw_arrow(frame, cord, l, r,results)
cv2.imshow("frame",frame)
key = cv2.waitKey(1) & 0xFF
# if the `q` key was pressed, break from the loop
if key == ord("q"):
break
cv2.destroyAllWindows()
feed.close()
except Exception as ex:
logging.exception("Error in inference:" + str(ex))
if __name__=='__main__':
parser=argparse.ArgumentParser()
parser.add_argument('--modelf', required=True, help="Face Model" )
parser.add_argument('--modelp', required=True, help="Head Pose Model")
parser.add_argument('--modell', required=True, help="Landmarks Model")
parser.add_argument('--modelg', required=True, help="Gaze Model")
parser.add_argument('--verbose', default="None", help=" Display Face (if yes type 'yes')")
parser.add_argument('--device', default='CPU', help="Device")
parser.add_argument('--video', default=None, help="Path to the video file")
parser.add_argument('--threshold', default=0.60, help="Threshold")
args=parser.parse_args()
main(args)
| 29.279188
| 121
| 0.64251
|
1acf653c94fb12fde24e3362f74a74327be62c7f
| 811
|
py
|
Python
|
venv/lib/python3.5/site-packages/airflow/contrib/hooks/vertica_hook.py
|
mesodiar/bello-airflow
|
afede57f214774b50e6a4c083ca096ca2c060d31
|
[
"MIT"
] | 1
|
2021-04-05T11:25:36.000Z
|
2021-04-05T11:25:36.000Z
|
airflow/contrib/hooks/vertica_hook.py
|
fvlankvelt/airflow
|
6cbe4a475f773bf32e1d7743718f7ae1a7dd9c91
|
[
"Apache-2.0"
] | null | null | null |
airflow/contrib/hooks/vertica_hook.py
|
fvlankvelt/airflow
|
6cbe4a475f773bf32e1d7743718f7ae1a7dd9c91
|
[
"Apache-2.0"
] | 1
|
2019-12-12T06:44:14.000Z
|
2019-12-12T06:44:14.000Z
|
from vertica_python import connect
from airflow.hooks.dbapi_hook import DbApiHook
class VerticaHook(DbApiHook):
'''
Interact with Vertica.
'''
conn_name_attr = 'vertica_conn_id'
default_conn_name = 'vertica_default'
supports_autocommit = True
def get_conn(self):
"""
Returns verticaql connection object
"""
conn = self.get_connection(self.vertica_conn_id)
conn_config = {
"user": conn.login,
"password": conn.password or '',
"database": conn.schema,
}
conn_config["host"] = conn.host or 'localhost'
if not conn.port:
conn_config["port"] = 5433
else:
conn_config["port"] = int(conn.port)
conn = connect(**conn_config)
return conn
| 24.575758
| 56
| 0.591862
|
55b238df67e1d3403800262e881373c19a727c6b
| 299
|
sh
|
Shell
|
generate_dockerfiles.sh
|
chickenzord/alpine-gradle
|
3147d6f9a543fc531f9f2435302720c038634a92
|
[
"MIT"
] | 6
|
2016-05-07T12:00:14.000Z
|
2019-09-10T09:48:13.000Z
|
generate_dockerfiles.sh
|
chickenzord/docker-alpine-gradle
|
3147d6f9a543fc531f9f2435302720c038634a92
|
[
"MIT"
] | null | null | null |
generate_dockerfiles.sh
|
chickenzord/docker-alpine-gradle
|
3147d6f9a543fc531f9f2435302720c038634a92
|
[
"MIT"
] | null | null | null |
#!/bin/sh
_gen () {
version=$1
tag=${2:-$version}
mkdir -p "dockerfiles/$tag"
sed "s/__VERSION__/$version/g" Dockerfile.tpl > "dockerfiles/$tag/Dockerfile"
}
# generate latest
_gen $(head -n 1 versions) latest
# generate all the rest
while read version; do
_gen $version
done <versions
| 17.588235
| 79
| 0.685619
|
643396d210afea6f4d0a557beafcca53eb48f6ef
| 861
|
py
|
Python
|
config/configuration_yaml.py
|
Surbeivol/PythonMatchingEngine
|
f94150294a85d7b415ca4518590b5a661d6f9958
|
[
"MIT"
] | 52
|
2019-05-30T20:02:05.000Z
|
2022-03-23T02:57:10.000Z
|
config/configuration_yaml.py
|
Surbeivol/PythonMatchingEngine
|
f94150294a85d7b415ca4518590b5a661d6f9958
|
[
"MIT"
] | 2
|
2021-12-24T16:43:46.000Z
|
2021-12-24T16:43:59.000Z
|
config/configuration_yaml.py
|
Surbeivol/PythonMatchingEngine
|
f94150294a85d7b415ca4518590b5a661d6f9958
|
[
"MIT"
] | 19
|
2019-06-18T14:35:22.000Z
|
2022-03-17T21:28:18.000Z
|
import os
import os.path
import yaml
class Configuration:
def __init__(self):
self.path = os.path.dirname(__file__)
def get_liq_bands(self):
return self.__load_config(os.path.join(self.path,'liq_bands.yml'))
def get_trades_bands(self):
return self.__load_config(os.path.join(self.path,'trades_bands.yml'))
def __load_config(self, file_path):
if not os.path.exists(file_path):
raise AttributeError(f"Config file not found:{file_path}")
# TODO: assert with a template
with open(file_path, 'r')as stream:
try:
yaml_conf = yaml.load(stream, Loader=yaml.SafeLoader)
except yaml.YAMLError as exc:
raise Exception(f'Error loading '
f'configuration file {file_path}: {exc}')
return yaml_conf
| 30.75
| 77
| 0.623693
|
e489b727f3c9d3a04173bd317fb5bfaecf528bdf
| 11,366
|
go
|
Go
|
gtk/tree_view_column.go
|
firelizzard18/gotk3
|
c6f9938302053cfb2d5fe780d49b626067fb226b
|
[
"0BSD"
] | 1,945
|
2015-08-14T08:21:13.000Z
|
2022-03-30T01:56:23.000Z
|
gtk/tree_view_column.go
|
hfmrow/gotk3
|
b8144e3926a03ad1ab87dcbd49c0c9c4053781cc
|
[
"0BSD"
] | 472
|
2015-10-16T04:06:09.000Z
|
2022-03-26T21:51:46.000Z
|
gtk/tree_view_column.go
|
hfmrow/gotk3
|
b8144e3926a03ad1ab87dcbd49c0c9c4053781cc
|
[
"0BSD"
] | 366
|
2015-09-19T00:13:12.000Z
|
2022-03-12T00:37:16.000Z
|
// Same copyright and license as the rest of the files in this project
// This file contains accelerator related functions and structures
package gtk
// #include <gtk/gtk.h>
// #include "gtk.go.h"
import "C"
import (
"unsafe"
"github.com/gotk3/gotk3/glib"
)
/*
* GtkTreeViewColumn
*/
// TreeViewColumns is a representation of GTK's GtkTreeViewColumn.
type TreeViewColumn struct {
glib.InitiallyUnowned
}
// native returns a pointer to the underlying GtkTreeViewColumn.
func (v *TreeViewColumn) native() *C.GtkTreeViewColumn {
if v == nil || v.GObject == nil {
return nil
}
p := unsafe.Pointer(v.GObject)
return C.toGtkTreeViewColumn(p)
}
func marshalTreeViewColumn(p uintptr) (interface{}, error) {
c := C.g_value_get_object((*C.GValue)(unsafe.Pointer(p)))
obj := glib.Take(unsafe.Pointer(c))
return wrapTreeViewColumn(obj), nil
}
func wrapTreeViewColumn(obj *glib.Object) *TreeViewColumn {
if obj == nil {
return nil
}
return &TreeViewColumn{glib.InitiallyUnowned{obj}}
}
// TreeViewColumnNew() is a wrapper around gtk_tree_view_column_new().
func TreeViewColumnNew() (*TreeViewColumn, error) {
c := C.gtk_tree_view_column_new()
if c == nil {
return nil, nilPtrErr
}
return wrapTreeViewColumn(glib.Take(unsafe.Pointer(c))), nil
}
// TreeViewColumnNewWithArea is a wrapper around gtk_tree_view_column_new_with_area().
func TreeViewColumnNewWithArea(area ICellArea) (*TreeViewColumn, error) {
c := C.gtk_tree_view_column_new_with_area(area.toCellArea())
if c == nil {
return nil, nilPtrErr
}
return wrapTreeViewColumn(glib.Take(unsafe.Pointer(c))), nil
}
// TreeViewColumnNewWithAttribute() is a wrapper around
// gtk_tree_view_column_new_with_attributes() that only sets one
// attribute for one column.
func TreeViewColumnNewWithAttribute(title string, renderer ICellRenderer, attribute string, column int) (*TreeViewColumn, error) {
t_cstr := C.CString(title)
defer C.free(unsafe.Pointer(t_cstr))
a_cstr := C.CString(attribute)
defer C.free(unsafe.Pointer(a_cstr))
c := C._gtk_tree_view_column_new_with_attributes_one((*C.gchar)(t_cstr),
renderer.toCellRenderer(), (*C.gchar)(a_cstr), C.gint(column))
if c == nil {
return nil, nilPtrErr
}
return wrapTreeViewColumn(glib.Take(unsafe.Pointer(c))), nil
}
// AddAttribute() is a wrapper around gtk_tree_view_column_add_attribute().
func (v *TreeViewColumn) AddAttribute(renderer ICellRenderer, attribute string, column int) {
cstr := C.CString(attribute)
defer C.free(unsafe.Pointer(cstr))
C.gtk_tree_view_column_add_attribute(v.native(),
renderer.toCellRenderer(), (*C.gchar)(cstr), C.gint(column))
}
// SetExpand() is a wrapper around gtk_tree_view_column_set_expand().
func (v *TreeViewColumn) SetExpand(expand bool) {
C.gtk_tree_view_column_set_expand(v.native(), gbool(expand))
}
// GetExpand() is a wrapper around gtk_tree_view_column_get_expand().
func (v *TreeViewColumn) GetExpand() bool {
c := C.gtk_tree_view_column_get_expand(v.native())
return gobool(c)
}
// SetMinWidth() is a wrapper around gtk_tree_view_column_set_min_width().
func (v *TreeViewColumn) SetMinWidth(minWidth int) {
C.gtk_tree_view_column_set_min_width(v.native(), C.gint(minWidth))
}
// GetMinWidth() is a wrapper around gtk_tree_view_column_get_min_width().
func (v *TreeViewColumn) GetMinWidth() int {
c := C.gtk_tree_view_column_get_min_width(v.native())
return int(c)
}
// PackStart() is a wrapper around gtk_tree_view_column_pack_start().
func (v *TreeViewColumn) PackStart(cell ICellRenderer, expand bool) {
C.gtk_tree_view_column_pack_start(v.native(), cell.toCellRenderer(), gbool(expand))
}
// PackEnd() is a wrapper around gtk_tree_view_column_pack_end().
func (v *TreeViewColumn) PackEnd(cell ICellRenderer, expand bool) {
C.gtk_tree_view_column_pack_end(v.native(), cell.toCellRenderer(), gbool(expand))
}
// Clear() is a wrapper around gtk_tree_view_column_clear().
func (v *TreeViewColumn) Clear() {
C.gtk_tree_view_column_clear(v.native())
}
// ClearAttributes() is a wrapper around gtk_tree_view_column_clear_attributes().
func (v *TreeViewColumn) ClearAttributes(cell *CellRenderer) {
C.gtk_tree_view_column_clear_attributes(v.native(), cell.native())
}
// SetSpacing() is a wrapper around gtk_tree_view_column_set_spacing().
func (v *TreeViewColumn) SetSpacing(spacing int) {
C.gtk_tree_view_column_set_spacing(v.native(), C.gint(spacing))
}
// GetSpacing() is a wrapper around gtk_tree_view_column_get_spacing().
func (v *TreeViewColumn) GetSpacing() int {
return int(C.gtk_tree_view_column_get_spacing(v.native()))
}
// SetVisible() is a wrapper around gtk_tree_view_column_set_visible().
func (v *TreeViewColumn) SetVisible(visible bool) {
C.gtk_tree_view_column_set_visible(v.native(), gbool(visible))
}
// GetVisible() is a wrapper around gtk_tree_view_column_get_visible().
func (v *TreeViewColumn) GetVisible() bool {
return gobool(C.gtk_tree_view_column_get_visible(v.native()))
}
// SetResizable() is a wrapper around gtk_tree_view_column_set_resizable().
func (v *TreeViewColumn) SetResizable(resizable bool) {
C.gtk_tree_view_column_set_resizable(v.native(), gbool(resizable))
}
// GetResizable() is a wrapper around gtk_tree_view_column_get_resizable().
func (v *TreeViewColumn) GetResizable() bool {
return gobool(C.gtk_tree_view_column_get_resizable(v.native()))
}
// GetWidth() is a wrapper around gtk_tree_view_column_get_width().
func (v *TreeViewColumn) GetWidth() int {
return int(C.gtk_tree_view_column_get_width(v.native()))
}
// SetFixedWidth() is a wrapper around gtk_tree_view_column_set_fixed_width().
func (v *TreeViewColumn) SetFixedWidth(w int) {
C.gtk_tree_view_column_set_fixed_width(v.native(), C.gint(w))
}
// GetFixedWidth() is a wrapper around gtk_tree_view_column_get_fixed_width().
func (v *TreeViewColumn) GetFixedWidth() int {
return int(C.gtk_tree_view_column_get_fixed_width(v.native()))
}
// SetMaxWidth() is a wrapper around gtk_tree_view_column_set_max_width().
func (v *TreeViewColumn) SetMaxWidth(w int) {
C.gtk_tree_view_column_set_max_width(v.native(), C.gint(w))
}
// GetMaxWidth() is a wrapper around gtk_tree_view_column_get_max_width().
func (v *TreeViewColumn) GetMaxWidth() int {
return int(C.gtk_tree_view_column_get_max_width(v.native()))
}
// Clicked() is a wrapper around gtk_tree_view_column_clicked().
func (v *TreeViewColumn) Clicked() {
C.gtk_tree_view_column_clicked(v.native())
}
// SetTitle() is a wrapper around gtk_tree_view_column_set_title().
func (v *TreeViewColumn) SetTitle(t string) {
cstr := (*C.gchar)(C.CString(t))
defer C.free(unsafe.Pointer(cstr))
C.gtk_tree_view_column_set_title(v.native(), cstr)
}
// GetTitle() is a wrapper around gtk_tree_view_column_get_title().
func (v *TreeViewColumn) GetTitle() string {
return C.GoString((*C.char)(C.gtk_tree_view_column_get_title(v.native())))
}
// SetClickable() is a wrapper around gtk_tree_view_column_set_clickable().
func (v *TreeViewColumn) SetClickable(clickable bool) {
C.gtk_tree_view_column_set_clickable(v.native(), gbool(clickable))
}
// GetClickable() is a wrapper around gtk_tree_view_column_get_clickable().
func (v *TreeViewColumn) GetClickable() bool {
return gobool(C.gtk_tree_view_column_get_clickable(v.native()))
}
// SetReorderable() is a wrapper around gtk_tree_view_column_set_reorderable().
func (v *TreeViewColumn) SetReorderable(reorderable bool) {
C.gtk_tree_view_column_set_reorderable(v.native(), gbool(reorderable))
}
// GetReorderable() is a wrapper around gtk_tree_view_column_get_reorderable().
func (v *TreeViewColumn) GetReorderable() bool {
return gobool(C.gtk_tree_view_column_get_reorderable(v.native()))
}
// SetSortIndicator() is a wrapper around gtk_tree_view_column_set_sort_indicator().
func (v *TreeViewColumn) SetSortIndicator(reorderable bool) {
C.gtk_tree_view_column_set_sort_indicator(v.native(), gbool(reorderable))
}
// GetSortIndicator() is a wrapper around gtk_tree_view_column_get_sort_indicator().
func (v *TreeViewColumn) GetSortIndicator() bool {
return gobool(C.gtk_tree_view_column_get_sort_indicator(v.native()))
}
// SetSortColumnID() is a wrapper around gtk_tree_view_column_set_sort_column_id().
func (v *TreeViewColumn) SetSortColumnID(w int) {
C.gtk_tree_view_column_set_sort_column_id(v.native(), C.gint(w))
}
// GetSortColumnID() is a wrapper around gtk_tree_view_column_get_sort_column_id().
func (v *TreeViewColumn) GetSortColumnID() int {
return int(C.gtk_tree_view_column_get_sort_column_id(v.native()))
}
// CellIsVisible() is a wrapper around gtk_tree_view_column_cell_is_visible().
func (v *TreeViewColumn) CellIsVisible() bool {
return gobool(C.gtk_tree_view_column_cell_is_visible(v.native()))
}
// FocusCell() is a wrapper around gtk_tree_view_column_focus_cell().
func (v *TreeViewColumn) FocusCell(cell *CellRenderer) {
C.gtk_tree_view_column_focus_cell(v.native(), cell.native())
}
// QueueResize() is a wrapper around gtk_tree_view_column_queue_resize().
func (v *TreeViewColumn) QueueResize() {
C.gtk_tree_view_column_queue_resize(v.native())
}
// GetXOffset() is a wrapper around gtk_tree_view_column_get_x_offset().
func (v *TreeViewColumn) GetXOffset() int {
return int(C.gtk_tree_view_column_get_x_offset(v.native()))
}
// void gtk_tree_view_column_set_attributes ()
// void gtk_tree_view_column_set_cell_data_func ()
type TreeViewColumnSizing int
const (
TREE_VIEW_COLUMN_GROW_ONLY TreeViewColumnSizing = C.GTK_TREE_VIEW_COLUMN_GROW_ONLY
TREE_VIEW_COLUMN_AUTOSIZE = C.GTK_TREE_VIEW_COLUMN_AUTOSIZE
TREE_VIEW_COLUMN_FIXED = C.GTK_TREE_VIEW_COLUMN_FIXED
)
// void gtk_tree_view_column_set_sizing ()
func (v *TreeViewColumn) SetSizing(sizing TreeViewColumnSizing) {
C.gtk_tree_view_column_set_sizing(v.native(), C.GtkTreeViewColumnSizing(sizing))
}
// GtkTreeViewColumnSizing gtk_tree_view_column_get_sizing ()
func (v *TreeViewColumn) GetSizing() TreeViewColumnSizing {
return TreeViewColumnSizing(C.gtk_tree_view_column_get_sizing(v.native()))
}
// SetWidget() is a wrapper around gtk_tree_view_column_set_widget().
func (v *TreeViewColumn) SetWidget(widget IWidget) {
C.gtk_tree_view_column_set_widget(v.native(), widget.toWidget())
}
// GetButton() is a wrapper around gtk_tree_view_column_get_button().
func (v *TreeViewColumn) GetButton() (IWidget, error) {
widget := C.gtk_tree_view_column_get_button(v.native())
if widget == nil {
return nil, nilPtrErr
}
return castWidget(widget)
}
// GetWidget() is a wrapper around gtk_tree_view_column_get_widget().
func (v *TreeViewColumn) GetWidget() (IWidget, error) {
widget := C.gtk_tree_view_column_get_widget(v.native())
if widget == nil {
return nil, nil
}
return castWidget(widget)
}
// void gtk_tree_view_column_set_alignment ()
// gfloat gtk_tree_view_column_get_alignment ()
// void gtk_tree_view_column_set_sort_order ()
func (v *TreeViewColumn) SetSortOrder(order SortType) {
C.gtk_tree_view_column_set_sort_order(v.native(), C.GtkSortType(order))
}
// GtkSortType gtk_tree_view_column_get_sort_order ()
func (v *TreeViewColumn) GetSortOrder() SortType {
return SortType(C.gtk_tree_view_column_get_sort_order(v.native()))
}
// void gtk_tree_view_column_cell_set_cell_data ()
// void gtk_tree_view_column_cell_get_size ()
// gboolean gtk_tree_view_column_cell_get_position ()
// GtkWidget * gtk_tree_view_column_get_tree_view ()
| 35.080247
| 130
| 0.775383
|
25fac9c6122d34ad561e98bdcb532f4dd2d957e1
| 187
|
cshtml
|
C#
|
input/_Head.cshtml
|
isachpaz/mahapps.com
|
a3e447598304e31d7650565287bfcb6ee6eaa90a
|
[
"MIT"
] | 20
|
2020-03-14T10:11:23.000Z
|
2021-11-11T14:49:08.000Z
|
input/_Head.cshtml
|
isachpaz/mahapps.com
|
a3e447598304e31d7650565287bfcb6ee6eaa90a
|
[
"MIT"
] | 28
|
2020-04-28T08:26:19.000Z
|
2021-06-22T08:38:18.000Z
|
input/_Head.cshtml
|
isachpaz/mahapps.com
|
a3e447598304e31d7650565287bfcb6ee6eaa90a
|
[
"MIT"
] | 38
|
2020-04-29T23:09:36.000Z
|
2022-02-06T06:51:57.000Z
|
<script type="text/javascript" src="@Context.GetLink("/assets/js/anchor.min.js")"></script>
<script type="text/javascript" src="@Context.GetLink("/assets/js/clipboard.min.js")"></script>
| 62.333333
| 94
| 0.721925
|
e7445cd7284b2c8fbbba8c5311d4e43198bd50c2
| 1,126
|
php
|
PHP
|
app/Events/BookingCreated.php
|
mfpfakhri/pkl-v.1.2
|
396d1a9607112ce4b90d1f9924912c03095bed85
|
[
"MIT"
] | null | null | null |
app/Events/BookingCreated.php
|
mfpfakhri/pkl-v.1.2
|
396d1a9607112ce4b90d1f9924912c03095bed85
|
[
"MIT"
] | null | null | null |
app/Events/BookingCreated.php
|
mfpfakhri/pkl-v.1.2
|
396d1a9607112ce4b90d1f9924912c03095bed85
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Events;
use Illuminate\Broadcasting\Channel;
use Illuminate\Queue\SerializesModels;
use Illuminate\Broadcasting\PrivateChannel;
use Illuminate\Broadcasting\PresenceChannel;
use Illuminate\Broadcasting\InteractsWithSockets;
use Illuminate\Contracts\Broadcasting\ShouldBroadcast;
//TAMBAHAN
use App\User;
use App\Models\Booking;
use App\Models\Customer;
use App\Models\Schedule;
use App\Models\Inf_lokasi;
class BookingCreated
{
use InteractsWithSockets, SerializesModels;
public $schedule;
public $lokasi;
public $booking;
public $user;
/**
* Create a new event instance.
*
* @return void
*/
public function __construct(Booking $booking, User $user, Inf_lokasi $lokasi, Schedule $schedule)
{
$this->user = $user;
$this->lokasi = $lokasi;
$this->booking = $booking;
$this->schedule = $schedule;
// dd($this);
}
/**
* Get the channels the event should broadcast on.
*
* @return Channel|array
*/
public function broadcastOn()
{
return new PrivateChannel('channel-name');
}
}
| 22.52
| 101
| 0.680284
|
115447e3605bb9c464904125070c0ad370dbfce5
| 3,441
|
dart
|
Dart
|
lib/src/array2d/dynamic/dynamic_mixin.dart
|
Grizzly-dart/grizzly_array
|
4d6c22432bea61c052232ada25d1b5cd1dbe0b22
|
[
"BSD-3-Clause"
] | 1
|
2018-03-20T14:49:19.000Z
|
2018-03-20T14:49:19.000Z
|
lib/src/array2d/dynamic/dynamic_mixin.dart
|
Grizzly-dart/grizzly_array
|
4d6c22432bea61c052232ada25d1b5cd1dbe0b22
|
[
"BSD-3-Clause"
] | 3
|
2018-02-06T20:38:55.000Z
|
2018-03-30T19:58:42.000Z
|
lib/src/array2d/dynamic/dynamic_mixin.dart
|
Grizzly-dart/grizzly_array
|
4d6c22432bea61c052232ada25d1b5cd1dbe0b22
|
[
"BSD-3-Clause"
] | null | null | null |
part of grizzly.array2d;
abstract class Dynamic2DMixin implements Array2D<dynamic> {
List<Dynamic1DView> get _data;
Dynamic2D make(Iterable<Iterable<dynamic>> newData) => Dynamic2D(newData);
@override
Array<dynamic> makeArray(Iterable<dynamic> newData) => Dynamic1D(newData);
int get numCols {
if (numRows == 0) return 0;
return _data.first.length;
}
int get numRows => _data.length;
Index2D get shape => Index2D(numRows, numCols);
bool get isSquare => numRows == numCols;
Dynamic2D slice(Index2D start, [Index2D end]) {
final Index2D myShape = shape;
if (end == null) {
end = myShape;
} else {
if (end < Index2D.zero)
throw ArgumentError.value(end, 'end', 'Index out of range!');
if (end >= myShape)
throw ArgumentError.value(end, 'end', 'Index out of range!');
if (start > end)
throw ArgumentError.value(end, 'end', 'Must be greater than start!');
}
if (start < Index2D.zero)
throw ArgumentError.value(start, 'start', 'Index out of range!');
if (start >= myShape)
throw ArgumentError.value(start, 'start', 'Index out of range!');
final list = <Dynamic1D>[];
for (int c = start.row; c < end.row; c++) {
list.add(_data[c].slice(start.col, end.col));
}
return Dynamic2D.own(list);
}
dynamic get min {
dynamic min;
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < _data.first.length; j++) {
final dynamic d = _data[i][j];
if (d == null) continue;
if (min == null || d.compareTo(min) < 0) min = d; // TODO
}
}
return min;
}
dynamic get max {
dynamic max;
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < _data.first.length; j++) {
final dynamic d = _data[i][j];
if (d == null) continue;
if (max == null || d.compareTo(max) > 0) max = d; // TODO
}
}
return max;
}
Index2D get argMin {
Index2D ret;
dynamic min;
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < _data.first.length; j++) {
final dynamic d = _data[i][j];
if (d == null) continue;
if (min == null || d.compareTo(min) < 0) {
// TODO
min = d;
ret = Index2D(i, j);
}
}
}
return ret;
}
Index2D get argMax {
Index2D ret;
dynamic max;
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < _data.first.length; j++) {
final dynamic d = _data[i][j];
if (d == null) continue;
if (max == null || d.compareTo(max) > 0) {
// TODO
max = d;
ret = Index2D(i, j);
}
}
}
return ret;
}
Dynamic2D get transpose {
final ret = Dynamic2D.sized(numCols, numRows);
for (int j = 0; j < _data.first.length; j++) {
for (int i = 0; i < numRows; i++) {
ret[j][i] = _data[i][j];
}
}
return ret;
}
Dynamic1D get diagonal {
int dim = numCols;
if (dim > numRows) dim = numRows;
final ret = Dynamic1D.sized(dim);
for (int i = 0; i < dim; i++) {
ret[i] = _data[i][i];
}
return ret;
}
Dynamic2D reshaped(Index2D newShape, {dynamic def}) =>
clone()..reshape(newShape, def: def);
Dynamic2D clone() => Dynamic2D(this);
}
| 26.068182
| 78
| 0.519907
|
6d6cbb6c5916cbeb98f955e2f900d2b0bba212f5
| 10,118
|
c
|
C
|
include_ui.c
|
MGProduction/StoryTllrC64
|
b843238abc3dde97439996ac84274608a049bb71
|
[
"MIT"
] | null | null | null |
include_ui.c
|
MGProduction/StoryTllrC64
|
b843238abc3dde97439996ac84274608a049bb71
|
[
"MIT"
] | null | null | null |
include_ui.c
|
MGProduction/StoryTllrC64
|
b843238abc3dde97439996ac84274608a049bb71
|
[
"MIT"
] | null | null | null |
// -----------------------------
// UI CODE
// -----------------------------
u8 x,y;
u8 blink;
void scrollup()
{
REFRESH
#if defined(WIN32)
memmove(video_ram+text_ty*40,video_ram+(text_ty+1)*40,9*40);
memset(video_ram+24*40,' ',40);
#else
__asm__("lda $1");
__asm__("sta %v",ch);
__asm__("sei");
__asm__("and $fc");
__asm__("sta $1");
__asm__("ldx #40");
__asm__("scrollloop:");
__asm__("dex");
__asm__("lda %w,x",TTVIDEOMEM+(text_ty+0+1)*40);
__asm__("sta %w,x",TTVIDEOMEM+(text_ty+0)*40);
__asm__("lda %w,x",TTVIDEOMEM+(text_ty+1+1)*40);
__asm__("sta %w,x",TTVIDEOMEM+(text_ty+1)*40);
__asm__("lda %w,x",TTVIDEOMEM+(text_ty+2+1)*40);
__asm__("sta %w,x",TTVIDEOMEM+(text_ty+2)*40);
__asm__("lda %w,x",TTVIDEOMEM+(text_ty+3+1)*40);
__asm__("sta %w,x",TTVIDEOMEM+(text_ty+3)*40);
__asm__("lda %w,x",TTVIDEOMEM+(text_ty+4+1)*40);
__asm__("sta %w,x",TTVIDEOMEM+(text_ty+4)*40);
__asm__("lda %w,x",TTVIDEOMEM+(text_ty+5+1)*40);
__asm__("sta %w,x",TTVIDEOMEM+(text_ty+5)*40);
__asm__("lda %w,x",TTVIDEOMEM+(text_ty+6+1)*40);
__asm__("sta %w,x",TTVIDEOMEM+(text_ty+6)*40);
__asm__("lda %w,x",TTVIDEOMEM+(text_ty+7+1)*40);
__asm__("sta %w,x",TTVIDEOMEM+(text_ty+7)*40);
__asm__("lda %w,x",TTVIDEOMEM+(text_ty+8+1)*40);
__asm__("sta %w,x",TTVIDEOMEM+(text_ty+8)*40);
//__asm__("inx");
__asm__("cpx #0");
__asm__("bne scrollloop");
__asm__("lda %v",ch);
__asm__("sta $1");
__asm__("cli");
__asm__("lda #32");
__asm__("ldx #39");
__asm__("scrollloop2:");
__asm__("dex");
__asm__("sta %w,x",TTVIDEOMEM+24*40);
__asm__("cpx #0");
__asm__("bne scrollloop2");
#endif
memmove(video_colorram+text_ty*40,video_colorram+(text_ty+1)*40,9*40);
}
u16 ii,ll,spl,align=0;
void ui_clear()
{
text_y=0,text_x=0;al=0;
if(clearfull)
{
memset(TVIDEOMEM+status_y*40,' ',SCREEN_W);
memset(video_colorram+status_y*40,0,SCREEN_W);
clearfull=0;
}
memset(TVIDEOMEM+TVIDEORAM_OFFSET,' ',TVIDEORAM_SIZE);
memset(video_colorram+TVIDEORAM_OFFSET,0,TVIDEORAM_SIZE);
}
#define ALIGN_LEFT 0
#define ALIGN_RIGHT 1
#define ALIGN_CENTER 2
u8 _pch,b_bch,_ech,_cpl[2],_buffer[SCREEN_W+2],_cbuffer[SCREEN_W+2];
u8*btxt;
void _savechpos()
{
btxt=txt;
b_bch=_bch;
}
void _restorechpos()
{
txt=btxt;
_bch=b_bch;
}
void _getnextch()
{
_pch=_ch;
#if defined(packed_strings)
if(_ech)
{
_ch=_ech;
_ech=0;
}
else
if(_bch)
{
_ch=_bch;
_bch=0;
}
else
if(txt==etxt)
_ch=0;
else
{
_ch=*txt++;
if(_ch&0x80)
{
_ch=_ch&0x7f;
memcpy(_cpl,packdata+(_ch<<1),2);
_ch=_cpl[0];
_bch=_cpl[1];
}
}
#else
if(txt==etxt)
_ch=0;
else
_ch=*txt++;
#endif
}
void cr()
{
REFRESH
text_x=0;
text_y++;
if(text_ty+text_y>=SCREEN_H)
{
scrollup();
text_y--;
}
}
void core_cr()
{
REFRESH
txt_x=0;
txt_y++;
if((_ch==' ')||(_ch==FAKE_CARRIAGECR))
_getnextch();
if(txt_y>=SCREEN_H)
{
scrollup();
txt_y--;
}
al++;
}
u8 v,u;
void core_drawtext()
{
_getnextch();
while(_ch)
{
if(al+1>=text_stoprange)
{
_ech=_ch;
return;
}
if(_ch==FAKE_CARRIAGECR)
{
core_cr();
_getnextch();
}
else
{
align=ALIGN_LEFT;spl=ll=0;
while(_ch&&(ll+txt_x<SCREEN_W)&&(_ch!=FAKE_CARRIAGECR))
{
if(_ch==ESCAPE_CHAR)
{
_getnextch();
switch(_ch)
{
case 'c'-'a'+1:
align=ALIGN_CENTER;
break;
case 'r'-'a'+1:
align=ALIGN_RIGHT;
break;
case 'l'-'a'+1:
align=ALIGN_LEFT;
break;
case 'g'-'a'+1:
txt_col=COLOR_GRAY2;
break;
case 'y'-'a'+1:
txt_col=COLOR_YELLOW;
break;
case 'w'-'a'+1:
txt_col=COLOR_WHITE;
break;
case 'V'-'A'+65:
u=1;
case 'v'-'a'+1:
v=0;
while(vrb[v])
{
_buffer[ll]=vrb[v]+txt_rev;
if(u)
{_buffer[ll]+=64;u=0;}
_cbuffer[ll]=txt_col;
ll++;v++;
}
break;
}
_getnextch();
}
else
{
if(_ch==' ')
{
spl=ll;
_savechpos();
}
_buffer[ll]=_ch+txt_rev;_cbuffer[ll]=txt_col;
ll++;
_getnextch();
}
}
if(ll+txt_x>=SCREEN_W)
{
_restorechpos();
ll=spl;
_getnextch();
}
switch(align)
{
case ALIGN_CENTER:
txt_x+=(SCREEN_W-ll)>>1;
break;
case ALIGN_RIGHT:
txt_x+=(SCREEN_W-ll);
break;
}
memcpy(video_ram+txt_y*40+txt_x,_buffer,ll);
memcpy(video_colorram+txt_y*40+txt_x,_cbuffer,ll);
txt_x+=ll;
if(_ch==0)
break;
else
core_cr();
}
}
}
void status_update()
{
strid=roomnameid[room];
if(strid!=255)
{str=advnames;_getstring();txt=ostr;}
else
txt="";
if(*txt)
{
memset(video_colorram+status_y*40,COLOR_YELLOW,40);
memset(video_ram+status_y*40,160,40);
al=0;txt_col=COLOR_YELLOW;txt_rev=128;txt_x=0;txt_y=status_y;
core_drawtext();
}
}
void hide_blink()
{
video_colorram[txt_y*40+(txt_x)]=COLOR_BLACK;
}
void do_blink()
{
blink++;
if(blink>90)
{
u8 ch=video_colorram[txt_y*40+(txt_x)];
if(ch==COLOR_BLACK)
ch=COLOR_GRAY2;
else
ch=COLOR_BLACK;
video_colorram[txt_y*40+(txt_x)]=ch;
video_ram[txt_y*40+(txt_x)]=108;
blink=0;
}
}
char charmap(char c)
{
if((c>='0')&&(c<='9'))
c=c-'0'+48;
else
if((c>='A')&&(c<='Z'))
c=c-'A'+65;
else
if((c>='a')&&(c<='z'))
c=c-'a'+1;
else
if(c==' ')
c=32;
else
if(c=='.')
c=46;
else
if(c==',')
c=44;
else
c=0;
return c;
}
void parser_update()
{
txt=">";
al=0;txt_col=COLOR_GRAY2;txt_rev=0;txt_x=0;txt_y=text_ty+text_y;
core_drawtext();
txt=strcmd;
txt_col=COLOR_GRAY2;
core_drawtext();
video_colorram[txt_y*40+txt_x]=COLOR_BLACK;
video_ram[txt_y*40+txt_x]=' ';
al=0;
REFRESH
}
#define SCNKEY 0xFF9F
#define GETIN 0xFFE4
void ui_getkey()
{
while(1)
{
#if defined(WIN32)
ch=cgetc();
#else
__asm__("JSR %w",SCNKEY);
__asm__("JSR %w",GETIN);
__asm__("STA %v",ch);
#endif
if(ch)
break;
REFRESH
}
#if defined(WIN32)
if((ch>='a')&&(ch<='z'))
ch=ch-'a'+'A';
#endif
}
void ui_waitkey()
{
ll=18;
while(ll<21)
{
video_colorram[24*40+ll]=COLOR_GRAY1;
video_ram[24*40+ll]=46;
ll++;
}
ui_getkey();
ll=18;
while(ll<21)
{
video_colorram[24*40+ll]=COLOR_GRAY1;
video_ram[24*40+ll]=' ';
ll++;
}
al=0;
}
void ui_text_write(u8*text)
{
txt=text;
txt_col=COLOR_WHITE;
if(text_attach)
text_attach=0;
else
{
txt_rev=0;txt_x=0;txt_y=text_ty+text_y;
}
while(1)
{
core_drawtext();
if(_ch==0)
{
if(txt[-1]=='+')
{text_attach=1;if(txt_x) txt_x--;}
else
{
text_y=txt_y-text_ty;
cr();
}
break;
}
else
ui_waitkey();
}
al++;
}
void room_load()
{
while(1)
{
cmd=vrb_onleave;obj1=255;adv_run();
room=newroom;
os_roomimage_load();
executed=0;
if((roomattr[room]&1)==0)
{
roomattr[room]|=1;
cmd=vrb_onfirst;
obj1=255;adv_run();
}
if(executed==0)
{
cmd=vrb_onenter;
obj1=255;
adv_run();
}
if(nextroom!=meta_nowhere)
{
newroom=nextroom;
nextroom=meta_nowhere;
}
else
break;
}
}
void execute()
{
cr();
str=strcmd;
adv_parse();
icmd=0;strcmd[icmd]=0;
parser_update();
}
void IMAGE_clear()
{
memset(TOPBITMAP,0,4*1000);
memset(video_colorram,0,status_y*40-1);
memset(VIDEOMEM,0,status_y*40-1);
ui_clear();
}
u8*t1,*t2,*t3,*ot1,*ot2,*ot3;
u16 wC,wwC,oxC,oxB;
void bytemem()
{
hunpack(t1,ADDR(0xC000));
t1=ADDR(0xC000);
if(oxC)
ot1+=oxC;
for(y=0;y<m_bitmap_h;y+=8)
{memcpy(ot1,t1,wC);t1+=wC;ot1+=wwC;}
}
void ui_image_draw()
{
if(m_bitmap_ox||m_bitmap_oy)
oxB=m_bitmap_ox+(m_bitmap_oy>>3)*320;
else
oxB=(320-m_bitmap_w)>>1;
oxC=oxB>>3;
wC=m_bitmap_w>>3;wwC=SCREEN_W;
t1=m_bitmapcol;
ot1=video_colorram;
bytemem();
t1=m_bitmapscrcol;
ot1=VIDEOMEM;
bytemem();
oxC=oxB;
wC=m_bitmap_w;wwC=320;
t1=m_bitmap;
ot1=bitmap_image;
bytemem();
t2=m_bitmapscrcol;
t3=m_bitmap;
ot1=video_colorram;
ot2=VIDEOMEM;
ot3=bitmap_image;
/*
if(oxC)
{
#if defined(ONTHEFLYCLEAN)
memset(ot1,0,oxC);
memset(ot2,0,oxC);
#endif
ot1+=oxC;
ot2+=oxC;
ot3+=oxB;
}
for(y=0;y<m_bitmap_h;y+=8)
{
memcpy(ot1,t1,wC);t1+=wC;ot1+=wC;
#if defined(ONTHEFLYCLEAN)
memset(ot1,0,oxC<<1);
#endif
ot1+=oxC<<1;
memcpy(ot2,t2,wC);t2+=wC;ot2+=wC;
#if defined(ONTHEFLYCLEAN)
memset(ot2,0,oxC<<1);
#endif
ot2+=oxC<<1;
memcpy(ot3,t3,m_bitmap_w);t3+=m_bitmap_w;ot3+=320;
}
*/
}
void ui_image_clean()
{
memset(VIDEOMEM,0,(96/8)*40);
memset(video_colorram,0,(96/8)*40);
memset(bitmap_image,0,4000);
}
void ui_room_update_start()
{
REFRESH
#if !defined(ONTHEFLYCLEAN)
ui_image_clean();
#endif
status_update();
}
void ui_room_update()
{
REFRESH
if(imagemem)
{
m_bitmap_w=*(u16*)(imagemem+0);
m_bitmap_h=*(u8*)(imagemem+2);
m_bitmap_oy=*(u8*)(imagemem+5);
m_bitmap_ox=*(u16*)(imagemem+3);
m_bitmapscrcol=imagemem+14+*(u16*)(imagemem+8);
m_bitmapcol=imagemem+14+*(u16*)(imagemem+10);
m_bitmap=imagemem+14+*(u16*)(imagemem+12);
ui_image_draw();
}
if(rightactorimg!=meta_none)
{
/*m_bitmapscrcol=itembitmap02_screencol;
m_bitmapcol=itembitmap02_col;
m_bitmap=itembitmap02_bitmap;
m_bitmap_h=itembitmap02_h;
m_bitmap_w=itembitmap02_w;
m_bitmap_ox=8;
m_bitmap_oy=8;*/
ui_image_draw();
}
if(leftactorimg!=meta_none)
{
/*m_bitmapscrcol=itembitmap01_screencol;
m_bitmapcol=itembitmap01_col;
m_bitmap=itembitmap01_bitmap;
m_bitmap_h=itembitmap01_h;
m_bitmap_w=itembitmap01_w;
m_bitmap_ox=320-itembitmap01_w-8;
m_bitmap_oy=8;*/
ui_image_draw();
}
}
| 17.03367
| 72
| 0.575114
|
20e2f6675f768e26b7817f0a0c86bd8f61cf7220
| 995
|
py
|
Python
|
farmers/api/market/models.py
|
BuildForSDG/Farmers-Edge-backend
|
4924c7f73f3e84698fde6a3d8a893c1ca282ed88
|
[
"MIT"
] | 2
|
2020-05-17T18:20:50.000Z
|
2021-04-20T21:42:43.000Z
|
farmers/api/market/models.py
|
BuildForSDG/Farmers-Edge-backend
|
4924c7f73f3e84698fde6a3d8a893c1ca282ed88
|
[
"MIT"
] | 19
|
2020-05-14T14:36:31.000Z
|
2022-03-12T00:34:40.000Z
|
farmers/api/market/models.py
|
BuildForSDG/Farmers-Edge-backend
|
4924c7f73f3e84698fde6a3d8a893c1ca282ed88
|
[
"MIT"
] | 1
|
2020-05-20T20:09:35.000Z
|
2020-05-20T20:09:35.000Z
|
from django.db import models
from django.utils import timezone
from django.conf import settings
from django.contrib.auth.models import User
class Product(models.Model):
# user = models.ForeignKey(settings.AUTH_USER_MODEL, default=None, on_delete=models.CASCADE)
retailerEmail = models.EmailField(max_length=254)
product = models.CharField(max_length=200)
quantity = models.CharField(max_length=200, null=True)
totalCost = models.CharField(max_length=100, null=True)
ready = models.BooleanField(default=False)
def __str__(self):
return self.product
class Order(models.Model):
# user = models.ForeignKey(settings.AUTH_USER_MODEL, default=None, on_delete=models.CASCADE)
productName = models.CharField(max_length=100, null=True)
totalCost = models.CharField(max_length=200)
quantity = models.CharField(max_length=100, null=True)
waitTime = models.CharField(max_length=100, null=True)
def __str__(self):
return self.productName
| 38.269231
| 96
| 0.756784
|
05c270954b98a4a49023d93278c8776d37bc0940
| 1,307
|
py
|
Python
|
ca-majority.py
|
RachidStat/PyCX
|
a1a597e61d03b25cf138dd11ab136db8202e1243
|
[
"BSD-2-Clause-FreeBSD"
] | 176
|
2019-12-18T11:44:28.000Z
|
2022-03-27T09:09:33.000Z
|
ca-majority.py
|
RachidStat/PyCX
|
a1a597e61d03b25cf138dd11ab136db8202e1243
|
[
"BSD-2-Clause-FreeBSD"
] | 2
|
2020-03-29T00:51:25.000Z
|
2020-07-19T11:08:32.000Z
|
ca-majority.py
|
RachidStat/PyCX
|
a1a597e61d03b25cf138dd11ab136db8202e1243
|
[
"BSD-2-Clause-FreeBSD"
] | 56
|
2019-12-18T19:04:12.000Z
|
2022-03-22T09:35:33.000Z
|
import pycxsimulator
from pylab import *
width = 50
height = 50
numberOfStates = 2
r = 1
def initialize():
global time, config, nextConfig
time = 0
config = zeros([height, width])
for x in range(width):
for y in range(height):
config[y, x] = randint(numberOfStates)
nextConfig = zeros([height, width])
def observe():
cla()
imshow(config, vmin = 0, vmax = numberOfStates - 1, cmap = cm.winter)
axis('image')
title('t = ' + str(time))
def update():
global time, config, nextConfig
time += 1
for x in range(width):
for y in range(height):
state = config[y, x]
counts = [0] * numberOfStates
for dx in range(- r, r + 1):
for dy in range(- r, r + 1):
s = int(config[(y+dy)%height, (x+dx)%width])
counts[s] += 1
maxCount = max(counts)
maxStates = []
for i in range(numberOfStates):
if counts[i] == maxCount:
maxStates.append(i)
state = choice(maxStates)
nextConfig[y, x] = state
config, nextConfig = nextConfig, config
pycxsimulator.GUI().start(func=[initialize, observe, update])
| 25.627451
| 74
| 0.517215
|
b8de5cd84f81973634748015d8f31df69107d806
| 116
|
h
|
C
|
learncpp_com/4-var_scopes/66-namespace_scope_resolution_operator/goo.h
|
mitsiu-carreno/cpp_tutorial
|
71f9083884ae6aa23774c044c3d08be273b6bb8e
|
[
"MIT"
] | null | null | null |
learncpp_com/4-var_scopes/66-namespace_scope_resolution_operator/goo.h
|
mitsiu-carreno/cpp_tutorial
|
71f9083884ae6aa23774c044c3d08be273b6bb8e
|
[
"MIT"
] | null | null | null |
learncpp_com/4-var_scopes/66-namespace_scope_resolution_operator/goo.h
|
mitsiu-carreno/cpp_tutorial
|
71f9083884ae6aa23774c044c3d08be273b6bb8e
|
[
"MIT"
] | null | null | null |
namespace Goo{
// This doSomething() belongs to namespace goo
int doSomething(int x, int y){
return x - y;
}
}
| 16.571429
| 47
| 0.672414
|
7f2b12a2d85852a56fdded60804db6bb3715cb4d
| 1,294
|
kt
|
Kotlin
|
app/src/main/java/fergaral/tidesapp/ui/fragment/PortsFragment.kt
|
fercarcedo/TidesApp
|
cfef9d372a3782035c4c5ee6c0b1bee98c58d170
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/fergaral/tidesapp/ui/fragment/PortsFragment.kt
|
fercarcedo/TidesApp
|
cfef9d372a3782035c4c5ee6c0b1bee98c58d170
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/fergaral/tidesapp/ui/fragment/PortsFragment.kt
|
fercarcedo/TidesApp
|
cfef9d372a3782035c4c5ee6c0b1bee98c58d170
|
[
"Apache-2.0"
] | null | null | null |
package fergaral.tidesapp.ui.fragment
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.fragment.app.viewModels
import androidx.lifecycle.observe
import androidx.recyclerview.widget.GridLayoutManager
import dagger.hilt.android.AndroidEntryPoint
import fergaral.tidesapp.databinding.FragmentPortsBinding
import fergaral.tidesapp.repository.TidesRepository
import fergaral.tidesapp.ui.adapter.PortsAdapter
import fergaral.tidesapp.viewmodel.PortsViewModel
import javax.inject.Inject
@AndroidEntryPoint
class PortsFragment : Fragment() {
private val portsViewModel: PortsViewModel by viewModels()
override fun onCreateView(
inflater: LayoutInflater,
container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
val binding = FragmentPortsBinding.inflate(inflater, container, false)
val adapter = PortsAdapter()
val layoutManager = GridLayoutManager(activity, 2)
binding.portsRv.adapter = adapter
binding.portsRv.layoutManager = layoutManager
//portsViewModel.ports.observe(viewLifecycleOwner) { ports ->
// adapter.setPorts(ports)
//}
return binding.root
}
}
| 34.052632
| 78
| 0.765842
|
44529fa434c7b1d1fd866a3b1d70522e9063d5f0
| 4,952
|
py
|
Python
|
ucscsdk/mometa/compute/ComputeFactoryResetOp.py
|
parag-may4/ucscsdk
|
2ea762fa070330e3a4e2c21b46b157469555405b
|
[
"Apache-2.0"
] | 9
|
2016-12-22T08:39:25.000Z
|
2019-09-10T15:36:19.000Z
|
ucscsdk/mometa/compute/ComputeFactoryResetOp.py
|
parag-may4/ucscsdk
|
2ea762fa070330e3a4e2c21b46b157469555405b
|
[
"Apache-2.0"
] | 10
|
2017-01-31T06:59:56.000Z
|
2021-11-09T09:14:37.000Z
|
ucscsdk/mometa/compute/ComputeFactoryResetOp.py
|
parag-may4/ucscsdk
|
2ea762fa070330e3a4e2c21b46b157469555405b
|
[
"Apache-2.0"
] | 13
|
2016-11-14T07:42:58.000Z
|
2022-02-10T17:32:05.000Z
|
"""This module contains the general information for ComputeFactoryResetOp ManagedObject."""
from ...ucscmo import ManagedObject
from ...ucsccoremeta import UcscVersion, MoPropertyMeta, MoMeta
from ...ucscmeta import VersionMeta
class ComputeFactoryResetOpConsts():
CREATE_INITIAL_VOLUMES_CREATE_INITIAL_VOLUMES = "create-initial-volumes"
CREATE_INITIAL_VOLUMES_NO_INIT = "no-init"
CREATE_INITIAL_VOLUMES_UNKNOWN = "unknown"
FLEX_STORAGE_SCRUB_NO_SCRUB = "no-scrub"
FLEX_STORAGE_SCRUB_SCRUB = "scrub"
FLEX_STORAGE_SCRUB_UNKNOWN = "unknown"
RESET_TRIGGER_CANCELED = "canceled"
RESET_TRIGGER_IDLE = "idle"
RESET_TRIGGER_TRIGGERED = "triggered"
STORAGE_SCRUB_NO_SCRUB = "no-scrub"
STORAGE_SCRUB_SCRUB = "scrub"
STORAGE_SCRUB_UNKNOWN = "unknown"
TRIGGER_STATUS_TRIGGER_ACKED = "trigger-acked"
TRIGGER_STATUS_TRIGGER_FAILED = "trigger-failed"
TRIGGER_STATUS_TRIGGERED = "triggered"
TRIGGER_STATUS_UNKNOWN = "unknown"
class ComputeFactoryResetOp(ManagedObject):
"""This is ComputeFactoryResetOp class."""
consts = ComputeFactoryResetOpConsts()
naming_props = set([])
mo_meta = MoMeta("ComputeFactoryResetOp", "computeFactoryResetOp", "factory-reset-remote-oper", VersionMeta.Version201b, "InputOutput", 0x1ff, [], ["admin", "pn-equipment", "pn-maintenance", "pn-policy"], [u'computeBlade', u'computeRackUnit', u'computeServerUnit'], [u'faultInst'], ["get", "set"])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version201b, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"create_initial_volumes": MoPropertyMeta("create_initial_volumes", "createInitialVolumes", "string", VersionMeta.Version201b, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["create-initial-volumes", "no-init", "unknown"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version201b, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"flex_storage_scrub": MoPropertyMeta("flex_storage_scrub", "flexStorageScrub", "string", VersionMeta.Version201b, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["no-scrub", "scrub", "unknown"], []),
"last_modified": MoPropertyMeta("last_modified", "lastModified", "string", VersionMeta.Version201b, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", [], []),
"remote_error_code": MoPropertyMeta("remote_error_code", "remoteErrorCode", "uint", VersionMeta.Version201b, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"remote_error_descr": MoPropertyMeta("remote_error_descr", "remoteErrorDescr", "string", VersionMeta.Version201b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"reset_trigger": MoPropertyMeta("reset_trigger", "resetTrigger", "string", VersionMeta.Version201b, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["canceled", "idle", "triggered"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version201b, MoPropertyMeta.READ_ONLY, 0x20, 0, 256, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version201b, MoPropertyMeta.READ_WRITE, 0x40, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"storage_scrub": MoPropertyMeta("storage_scrub", "storageScrub", "string", VersionMeta.Version201b, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, ["no-scrub", "scrub", "unknown"], []),
"trigger_status": MoPropertyMeta("trigger_status", "triggerStatus", "string", VersionMeta.Version201b, MoPropertyMeta.READ_WRITE, 0x100, None, None, None, ["trigger-acked", "trigger-failed", "triggered", "unknown"], []),
}
prop_map = {
"childAction": "child_action",
"createInitialVolumes": "create_initial_volumes",
"dn": "dn",
"flexStorageScrub": "flex_storage_scrub",
"lastModified": "last_modified",
"remoteErrorCode": "remote_error_code",
"remoteErrorDescr": "remote_error_descr",
"resetTrigger": "reset_trigger",
"rn": "rn",
"status": "status",
"storageScrub": "storage_scrub",
"triggerStatus": "trigger_status",
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.child_action = None
self.create_initial_volumes = None
self.flex_storage_scrub = None
self.last_modified = None
self.remote_error_code = None
self.remote_error_descr = None
self.reset_trigger = None
self.status = None
self.storage_scrub = None
self.trigger_status = None
ManagedObject.__init__(self, "ComputeFactoryResetOp", parent_mo_or_dn, **kwargs)
| 61.9
| 301
| 0.690428
|
72d72139b3780564a11fdc3989bbaa0baa546d31
| 1,468
|
cs
|
C#
|
Pipeline/RoyalCode.PipelineFlow/Resolvers/IHandlerResolver.cs
|
Royal-Code/Pipeline
|
805ea5021ad0ac4e558c618d71de91d32c798803
|
[
"MIT"
] | null | null | null |
Pipeline/RoyalCode.PipelineFlow/Resolvers/IHandlerResolver.cs
|
Royal-Code/Pipeline
|
805ea5021ad0ac4e558c618d71de91d32c798803
|
[
"MIT"
] | null | null | null |
Pipeline/RoyalCode.PipelineFlow/Resolvers/IHandlerResolver.cs
|
Royal-Code/Pipeline
|
805ea5021ad0ac4e558c618d71de91d32c798803
|
[
"MIT"
] | 1
|
2022-03-11T01:11:40.000Z
|
2022-03-11T01:11:40.000Z
|
using RoyalCode.PipelineFlow.Descriptors;
using System;
namespace RoyalCode.PipelineFlow.Resolvers
{
/// <summary>
/// <para>
/// The resolver component is used to provide descriptors for handlers when they are able to process the pipeline request.
/// </para>
/// <para>
/// Resolvers that implement this interface are for input processing handlers, including bridge handlers.
/// </para>
/// </summary>
public interface IHandlerResolver
{
/// <summary>
/// Determines that this resolver is for fallback.
/// </summary>
bool IsFallback { get; }
/// <summary>
/// For the input type, try solving a handler that doesn't produce results.
/// </summary>
/// <param name="inputType">The pipeline input type.</param>
/// <returns>
/// A handler description, or null if can't be applied.
/// </returns>
HandlerDescriptor? TryResolve(Type inputType);
/// <summary>
/// For the input type, try solving a handler that produces a results of output type.
/// </summary>
/// <param name="inputType">The pipeline input type.</param>
/// <param name="outputType">The pipeline output type.</param>
/// <returns>
/// A handler description, or null if can't be applied.
/// </returns>
HandlerDescriptor? TryResolve(Type inputType, Type outputType);
}
}
| 35.804878
| 130
| 0.604223
|
d9a30ad48b77d4735f9377884181cc76621a706d
| 4,025
|
rs
|
Rust
|
example_raspberry/src/bin/simple_sided_rtt_responder.rs
|
SII-Public-Research/dw3000
|
3f3fde4180d52b5a84f60a6395de2201c32011e8
|
[
"0BSD"
] | null | null | null |
example_raspberry/src/bin/simple_sided_rtt_responder.rs
|
SII-Public-Research/dw3000
|
3f3fde4180d52b5a84f60a6395de2201c32011e8
|
[
"0BSD"
] | null | null | null |
example_raspberry/src/bin/simple_sided_rtt_responder.rs
|
SII-Public-Research/dw3000
|
3f3fde4180d52b5a84f60a6395de2201c32011e8
|
[
"0BSD"
] | 1
|
2022-01-11T14:49:29.000Z
|
2022-01-11T14:49:29.000Z
|
/*
simple RESPONDER example to be used with simple INITIATOR example to perform RTT measurements (simple sided method)
SIMPLE SIDED RTT MEASUREMENT TECHNIQUE :
INITIATOR RESPONDER
T1 |--------____ |
| -------> | T2
| |
| ____--------| T3
T4 | <-------
/!\ A speed difference between the clocks exists, which impacts the measures. The use of the Double Sided is recommended /!\
*/
use rppal::gpio::{Gpio};
use rppal::spi::{Bus, Mode, SlaveSelect, Spi};
use std::thread;
use std::time::Duration;
use dw3000::{
self,
hl,
Config,
time::Instant
};
use nb::block;
fn main() -> ! {
/******************************************************* */
/************ BASIC CONFIGURATION ********** */
/******************************************************* */
let spi = Spi::new(Bus::Spi0, SlaveSelect::Ss0, 4_500_000, Mode::Mode0)
.expect("Failed to configure the spi");
let gpio = Gpio::new()
.expect("Failed to configure GPIO");
let cs = gpio
.get(8)
.expect("Failed to set up CS PIN")
.into_output();
/****************************************************** */
/***** DW3000 RESET ******* */
/****************************************************** */
let mut reset = gpio
.get(4)
.expect("Failed to set up RESET PIN")
.into_output();
reset.set_low();
reset.set_high();
/****************************************************** */
/********* DW3000 CONFIGURATION ******** */
/****************************************************** */
let mut dw3000 = hl::DW3000::new(spi, cs)
.init()
.expect("Failed init.")
.config(Config::default())
.expect("Failed config.");
thread::sleep(Duration::from_millis(500));
println!("Init OK");
loop {
/****************************************************** */
/********* WAITING REQUEST FROM INITIATOR ******** */
/****************************************************** */
println!("FIRST STEP : Waiting measurement request ...");
let mut receiving = dw3000
.receive(Config::default())
.expect("Failed configure receiver.");
let mut buffer = [0; 1024];
let t2: u64 = block!(receiving.r_wait(&mut buffer)).expect("error during the reception").rx_time.value();
dw3000 = receiving
.finish_receiving()
.expect("Failed to finish receiving");
/****************************************************** */
/********* COMPUTING T2 AND T3 ******** */
/****************************************************** */
println!("SECOND STEP : Computing timestamps...");
// We need to calculate a time (in ticks) at which we want to send the response
let delay_to_reply = t2 + (5000 * 63898); // T2(ticks) + (chosen_delay(µs) * clock_speed) % 1_0995_1162_7776
let t3: u64 = ((delay_to_reply >> 9) << 9) + dw3000.get_tx_antenna_delay().unwrap().value(); // T3(ticks) = delay(31 MSB) + sending_antenna_delay
let response_tab = [
((t2 >> (8 * 4)) & 0xFF ) as u8,
((t2 >> (8 * 3)) & 0xFF ) as u8,
((t2 >> (8 * 2)) & 0xFF ) as u8,
((t2 >> 8) & 0xFF ) as u8,
( t2 & 0xFF ) as u8,
((t3 >> (8 * 4)) & 0xFF ) as u8,
((t3 >> (8 * 3)) & 0xFF ) as u8,
((t3 >> (8 * 2)) & 0xFF ) as u8,
((t3 >> 8) & 0xFF ) as u8,
( t3 & 0xFF ) as u8,
];
/****************************************************** */
/********* SENDING T2 AND T3 ******** */
/****************************************************** */
println!("THIRD STEP : Offset response...");
let mut sending = dw3000
.send(
&response_tab,
hl::SendTime::Delayed(Instant::new(delay_to_reply).unwrap()),
Config::default())
.expect("Failed configure transmitter");
let _result = block!(sending.s_wait()).expect("Error sending");
dw3000 = sending.finish_sending().expect("Failed to finish sending");
println!("--- RTT FINISHED ---\n");
}
}
| 32.459677
| 149
| 0.451925
|
c686b3b8b539bfe7a8a88499b00e4c8543ecae97
| 642
|
py
|
Python
|
statsmodels/info.py
|
yarikoptic/statsmodels
|
f990cb1a1ef0c9883c9394444e6f9d027efabec6
|
[
"BSD-3-Clause"
] | 34
|
2018-07-13T11:30:46.000Z
|
2022-01-05T13:48:10.000Z
|
venv/lib/python3.6/site-packages/statsmodels/info.py
|
HeyWeiPan/vnpy_crypto
|
844381797a475a01c05a4e162592a5a6e3a48032
|
[
"MIT"
] | 6
|
2015-08-28T16:59:03.000Z
|
2019-04-12T22:29:01.000Z
|
venv/lib/python3.6/site-packages/statsmodels/info.py
|
HeyWeiPan/vnpy_crypto
|
844381797a475a01c05a4e162592a5a6e3a48032
|
[
"MIT"
] | 28
|
2015-04-01T20:02:25.000Z
|
2021-07-03T00:09:28.000Z
|
"""
Statistical models
- standard `regression` models
- `GLS` (generalized least squares regression)
- `OLS` (ordinary least square regression)
- `WLS` (weighted least square regression)
- `GLASAR` (GLS with autoregressive errors model)
- `GLM` (generalized linear models)
- robust statistical models
- `RLM` (robust linear models using M estimators)
- `robust.norms` estimates
- `robust.scale` estimates (MAD, Huber's proposal 2).
- sandbox models
- `mixed` effects models
- `gam` (generalized additive models)
"""
__docformat__ = 'restructuredtext en'
depends = ['numpy',
'scipy']
postpone_import = True
| 23.777778
| 55
| 0.699377
|
9c591e6352fcaeed365792220ce227b155e3eeb3
| 469
|
rs
|
Rust
|
examples/self_host.rs
|
nikita-skobov/aws-or-selfhost
|
31b3308a842304e4a1a882024efb89ea26789719
|
[
"MIT"
] | null | null | null |
examples/self_host.rs
|
nikita-skobov/aws-or-selfhost
|
31b3308a842304e4a1a882024efb89ea26789719
|
[
"MIT"
] | null | null | null |
examples/self_host.rs
|
nikita-skobov/aws-or-selfhost
|
31b3308a842304e4a1a882024efb89ea26789719
|
[
"MIT"
] | null | null | null |
use aws_or_selfhost::{ServerBuilder, ApiResponse, tokio_main, http_helper::FullRequest};
pub async fn root_handler(event: FullRequest) -> ApiResponse {
let mut resp = ApiResponse::default();
resp.header("content-type", "text/html");
resp.body("<html><body><h1>Hello</h1></body></html>");
resp
}
fn main() {
let app = ServerBuilder::default()
.get("/", root_handler);
tokio_main(app.start(aws_or_selfhost::self_host::selfhost_init));
}
| 31.266667
| 88
| 0.680171
|
474c6249f0232cfd137dbb3135d988c21bbc04b8
| 258
|
rb
|
Ruby
|
app/controllers/recording_dummy_controller.rb
|
DreamforceHackathon/repository-19
|
981ac4a733ddd60a4e14f13a94f594bd0531c7bc
|
[
"MIT"
] | null | null | null |
app/controllers/recording_dummy_controller.rb
|
DreamforceHackathon/repository-19
|
981ac4a733ddd60a4e14f13a94f594bd0531c7bc
|
[
"MIT"
] | null | null | null |
app/controllers/recording_dummy_controller.rb
|
DreamforceHackathon/repository-19
|
981ac4a733ddd60a4e14f13a94f594bd0531c7bc
|
[
"MIT"
] | 1
|
2019-07-29T21:46:31.000Z
|
2019-07-29T21:46:31.000Z
|
class RecordingDummyController < ApplicationController
skip_before_action :verify_authenticity_token
def create
response = Twilio::TwiML::Response.new do |r|
r.Say ""
r.Pause length: 600
end
render xml: response.text
end
end
| 18.428571
| 54
| 0.713178
|
dbc802a60ac5e60fa517a9d1bbf7bd5d27236284
| 900
|
php
|
PHP
|
database/migrations/2017_12_12_094300_create_disccount_rules_table.php
|
barraroot/body_diet
|
1dd12f225f5395226ea2602c7336dbfe09c8018a
|
[
"MIT"
] | null | null | null |
database/migrations/2017_12_12_094300_create_disccount_rules_table.php
|
barraroot/body_diet
|
1dd12f225f5395226ea2602c7336dbfe09c8018a
|
[
"MIT"
] | null | null | null |
database/migrations/2017_12_12_094300_create_disccount_rules_table.php
|
barraroot/body_diet
|
1dd12f225f5395226ea2602c7336dbfe09c8018a
|
[
"MIT"
] | null | null | null |
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateDisccountRulesTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('disccount_rules', function (Blueprint $table) {
$table->increments('id');
$table->string('title')->notnull();
$table->datetime('valido')->notnull();
$table->decimal('valor', 5, 2)->nullable();
$table->decimal('diccount_frete', 5, 2)->nullable();
$table->decimal('diccount_order', 5, 2)->nullable();
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('disccount_rules');
}
}
| 24.324324
| 71
| 0.566667
|
3acbb47d832fbf4f6eda39f2adce199e88773267
| 1,860
|
lua
|
Lua
|
garrysmod/gamemodes/nutscript/gamemode/core/derma/cl_crafting.lua
|
Kek1ch/Kek1ch
|
fa545ac1c261c20639ba7a7119ccd7aa4aaacbdc
|
[
"Apache-2.0"
] | 7
|
2019-06-15T09:10:59.000Z
|
2021-11-21T18:15:03.000Z
|
garrysmod/gamemodes/nutscript/gamemode/core/derma/cl_crafting.lua
|
Kek1ch/Kek1ch
|
fa545ac1c261c20639ba7a7119ccd7aa4aaacbdc
|
[
"Apache-2.0"
] | null | null | null |
garrysmod/gamemodes/nutscript/gamemode/core/derma/cl_crafting.lua
|
Kek1ch/Kek1ch
|
fa545ac1c261c20639ba7a7119ccd7aa4aaacbdc
|
[
"Apache-2.0"
] | 6
|
2019-06-15T08:33:15.000Z
|
2020-10-25T07:50:32.000Z
|
local PANEL = {}
local gradient2 = Material("daui/droda_ui_3.png") --фон
local gradient = Material("daui/next_but.png") --заполнение
function PANEL:Init()
self:SetTall(20)
self.value = 0
self.deltaValue = self.value
self.max = 10
self.click = self:Add("DButton")
self.click:Dock(FILL)
self.click:SetText("")
--[[self.click.DoClick = function()
self.selected = true
netstream.Start("craftmaska")
end]]
self.label = self.click:Add("DLabel")
self.label:Dock(LEFT)
self.label:SetTextColor(Color(169, 169, 169)) --(Color(210, 180, 140))
self.label:SetFont("ChatFontMisery32")
self.label:SetSize(200, 0)
end
function PANEL:setCallback(callback)
self.click.DoClick = function(this)
callback()
self.selected = true
end
end
function PANEL:Think()
if (self.pressing) then
if ((self.nextPress or 0) < CurTime()) then
self:doChange()
end
end
self.deltaValue = math.Approach(self.deltaValue, self.value, FrameTime() * 15)
end
function PANEL:doChange()
if ((self.value == 0 and self.pressing == -1) or (self.value == self.max and self.pressing == 1)) then
return
end
self.nextPress = CurTime() + 0.2
if (self:onChanged(self.pressing) != false) then
self.value = math.Clamp(self.value + self.pressing, 0, self.max)
end
end
function PANEL:onChanged(difference)
end
function PANEL:getValue()
return self.value
end
function PANEL:setValue(value)
self.value = value
end
function PANEL:setBoost(value)
self.boostValue = value
end
function PANEL:setMax(max)
self.max = max
end
function PANEL:setText(text)
self.label:SetText(text)
end
function PANEL:setReadOnly()
--self.sub:Remove()
--self.add:Remove()
end
vgui.Register("nutCraftingBar", PANEL, "DPanel")
| 22.142857
| 105
| 0.656452
|
744f42316f5d8567b4ff5a0e4c58c243cec49cfe
| 688
|
go
|
Go
|
fiber-bootstrap/bootstrap/bootstrap.go
|
oSethoum/recipes
|
c247393347b4cae29c090c699baf4751444e089e
|
[
"MIT"
] | 1
|
2020-02-05T21:47:47.000Z
|
2020-02-05T21:47:47.000Z
|
fiber-bootstrap/bootstrap/bootstrap.go
|
oSethoum/recipes
|
c247393347b4cae29c090c699baf4751444e089e
|
[
"MIT"
] | null | null | null |
fiber-bootstrap/bootstrap/bootstrap.go
|
oSethoum/recipes
|
c247393347b4cae29c090c699baf4751444e089e
|
[
"MIT"
] | null | null | null |
package bootstrap
import (
"github.com/gofiber/fiber/v2"
"github.com/gofiber/fiber/v2/middleware/logger"
"github.com/gofiber/fiber/v2/middleware/monitor"
"github.com/gofiber/fiber/v2/middleware/recover"
"github.com/gofiber/template/html"
"github.com/kooroshh/fiber-boostrap/pkg/database"
"github.com/kooroshh/fiber-boostrap/pkg/env"
"github.com/kooroshh/fiber-boostrap/pkg/router"
)
func NewApplication() *fiber.App {
env.SetupEnvFile()
database.SetupDatabase()
engine := html.New("./views", ".html")
app := fiber.New(fiber.Config{Views: engine})
app.Use(recover.New())
app.Use(logger.New())
app.Get("/dashboard", monitor.New())
router.InstallRouter(app)
return app
}
| 26.461538
| 50
| 0.744186
|
ef25ae9eb7a000ffe8cf2d201b1a249047a7a9ca
| 38,732
|
c
|
C
|
backend/db.c
|
Mvmo/acton
|
34f274cf8af04e6b1bd6a954207ef6aae0248a7d
|
[
"BSD-3-Clause"
] | 28
|
2021-06-09T14:30:35.000Z
|
2022-02-22T13:34:43.000Z
|
backend/db.c
|
Mvmo/acton
|
34f274cf8af04e6b1bd6a954207ef6aae0248a7d
|
[
"BSD-3-Clause"
] | 325
|
2021-06-01T21:32:36.000Z
|
2022-03-31T15:04:26.000Z
|
backend/db.c
|
Mvmo/acton
|
34f274cf8af04e6b1bd6a954207ef6aae0248a7d
|
[
"BSD-3-Clause"
] | 2
|
2021-07-28T13:44:35.000Z
|
2021-10-10T01:11:47.000Z
|
/*
* Copyright (C) 2019-2021 Deutsche Telekom AG
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* db.c
*
* Author: aagapi
*/
#include <stdlib.h>
#include <stdio.h>
#include <limits.h>
#include <string.h>
#include <assert.h>
#include "db.h"
#include "skiplist.h"
// DB API:
db_row_t * create_empty_row(WORD key)
{
db_cell_t * row = (db_cell_t *) malloc(sizeof(db_cell_t));
memset(row, 0, sizeof(db_cell_t));
row->key = key;
row->cells = NULL;
row->column_array = NULL;
row->no_columns=0;
row->last_blob_size = 0;
row->version = NULL;
row->_next = NULL;
row->no_entries = 0;
return row;
}
db_row_t * create_db_row_schemaless(WORD * column_values, int * primary_key_idxs, int no_primary_keys,
int * clustering_key_idxs, int no_clustering_keys, int no_schema_clustering_keys,
int no_cols, size_t last_blob_size, unsigned int * fastrandstate)
{
assert(no_primary_keys == 1);
assert(no_clustering_keys >= no_schema_clustering_keys);
db_cell_t * row = create_empty_row(column_values[primary_key_idxs[0]]);
// Several clustering keys mean several levels of depth (a la super columns):
db_cell_t * crt_cell = row, * new_cell = NULL;
for(int i=0; i<no_clustering_keys; i++, crt_cell = new_cell)
{
crt_cell->cells = create_skiplist_long();
int col_index = (i<no_schema_clustering_keys)?(clustering_key_idxs[i]):(i);
new_cell = create_empty_row(column_values[col_index]);
if(i == no_clustering_keys - 1)
{
new_cell->no_columns = no_cols - no_primary_keys - no_clustering_keys;
new_cell->last_blob_size = last_blob_size;
new_cell->column_array = (WORD *) malloc(new_cell->no_columns * sizeof(WORD));
int j=0;
for(;j<new_cell->no_columns - 1;j++)
{
new_cell->column_array[j] = column_values[no_primary_keys + no_clustering_keys + j];
}
if(last_blob_size <= 0) // last column is value
{
new_cell->column_array[j] = column_values[no_primary_keys + no_clustering_keys + j];
}
else // last column is blob
{
new_cell->column_array[j] = malloc(last_blob_size);
memcpy(new_cell->column_array[j], column_values[no_primary_keys + no_clustering_keys + j], last_blob_size);
}
}
skiplist_insert(crt_cell->cells, column_values[col_index], (WORD) new_cell, fastrandstate);
}
return row;
}
// Assumes key indexes are in order (partition keys, followed by clustering keys, followed by columns). Also assumes a single partition key:
db_row_t * create_db_row_schemaless2(WORD * keys, int no_keys, WORD * cols, int no_cols, WORD last_blob, size_t last_blob_size, unsigned int * fastrandstate)
{
db_cell_t * row = create_empty_row(keys[0]);
db_cell_t * crt_cell = row, * new_cell = NULL;
for(int i=1; i<no_keys; i++, crt_cell = new_cell)
{
crt_cell->cells = create_skiplist_long();
new_cell = create_empty_row(keys[i]);
skiplist_insert(crt_cell->cells, keys[i], (WORD) new_cell, fastrandstate);
}
assert(crt_cell != NULL && crt_cell->cells == NULL);
assert(last_blob == NULL || last_blob_size > 0);
int total_cols = no_cols + ((last_blob != NULL)?1:0);
crt_cell->no_columns = total_cols;
crt_cell->column_array = (WORD *) malloc(total_cols * sizeof(WORD));
int j=0;
for(;j<no_cols;j++)
{
crt_cell->column_array[j] = cols[j];
}
crt_cell->last_blob_size = last_blob_size;
if(last_blob != NULL)
{
assert(total_cols == no_cols + 1);
crt_cell->column_array[no_cols] = malloc(last_blob_size);
memcpy(crt_cell->column_array[no_cols], last_blob, last_blob_size);
}
return row;
}
/*
db_row_t * create_db_row(WORD * column_values, db_schema_t * schema, size_t last_blob_size, unsigned int * fastrandstate)
{
return create_db_row_schemaless(column_values, schema->primary_key_idxs, schema->no_primary_keys,
schema->clustering_key_idxs, schema->no_clustering_keys, schema->no_clustering_keys,
schema->no_cols, last_blob_size, fastrandstate);
}
*/
db_row_t * create_db_row_sf(WORD * column_values, db_schema_t * schema, int no_clustering_keys, int no_cols, size_t last_blob_size, unsigned int * fastrandstate)
{
return create_db_row_schemaless(column_values, schema->primary_key_idxs, schema->no_primary_keys,
schema->clustering_key_idxs, no_clustering_keys, schema->min_no_clustering_keys,
no_cols, last_blob_size, fastrandstate);
}
void free_db_cell(db_row_t * row)
{
if(row->cells != NULL)
{
for(snode_t * cell=HEAD(row->cells);cell!=NULL;cell=NEXT(cell))
if(cell->value != NULL)
free_db_cell(cell->value);
skiplist_free(row->cells);
}
if(row->column_array != NULL)
{
if(row->last_blob_size > 0 && row->no_columns > 0 && row->column_array[row->no_columns - 1] != NULL)
{
free(row->column_array[row->no_columns - 1]);
}
free(row->column_array);
}
free(row);
}
void free_db_row(db_row_t * row, db_schema_t * schema)
{
free_db_cell(row);
}
db_t * get_db()
{
db_t * db = (db_t *) malloc(sizeof(db_t));
db->tables = create_skiplist_long();
db->txn_state = create_skiplist_uuid();
return db;
}
int db_delete_db(db_t * db)
{
skiplist_free(db->tables);
skiplist_free(db->txn_state);
free(db);
return 0;
}
int db_dump_db(db_t * db)
{
assert(0 && "DB dump not implemented yet");
return 0;
}
// Deep copy constructor (to allow caller to free his structs):
db_schema_t* db_create_schema(int * col_types, int no_cols, int * primary_key_idxs, int no_primary_keys, int * clustering_key_idxs, int no_clustering_keys, int * index_key_idxs, int no_index_keys)
{
assert(no_cols > 0 && "Schema must have at least 1 column");
assert(no_primary_keys > 0 && "Schema must have at least 1 primary key");
assert(no_primary_keys <= no_cols && "Schema must have less primary keys than columns");
assert(no_primary_keys == 1 && "Schemas don't currently support compound primary keys");
assert(primary_key_idxs[0] < no_cols && "Primary key index out of bounds");
db_schema_t * schema = (db_schema_t *) malloc(sizeof(db_schema_t));
schema->min_no_cols = no_cols;
schema->col_types = NULL;
if(col_types != NULL)
{
schema->col_types = (int *) malloc(no_cols * sizeof(int));
for(int i=0;i<no_cols;i++)
schema->col_types[i] = col_types[i];
}
schema->primary_key_idxs = (int *) malloc(no_primary_keys * sizeof(int));
schema->no_primary_keys = no_primary_keys;
for(int i=0;i<no_primary_keys;i++)
schema->primary_key_idxs[i] = primary_key_idxs[i];
schema->min_no_clustering_keys = no_clustering_keys;
if(no_clustering_keys > 0)
{
schema->clustering_key_idxs = (int *) malloc(no_clustering_keys * sizeof(int));
for(int i=0;i<no_clustering_keys;i++)
schema->clustering_key_idxs[i] = clustering_key_idxs[i];
}
schema->no_index_keys = no_index_keys;
if(no_index_keys > 0)
{
schema->index_key_idxs = (int *) malloc(no_index_keys * sizeof(int));
for(int i=0;i<no_index_keys;i++)
schema->index_key_idxs[i] = index_key_idxs[i];
}
return schema;
}
void free_schema(db_schema_t * schema)
{
if(schema == NULL)
return;
if(schema->col_types != NULL)
free(schema->col_types);
if(schema->primary_key_idxs != NULL)
free(schema->primary_key_idxs);
if(schema->clustering_key_idxs != NULL)
free(schema->clustering_key_idxs);
if(schema->index_key_idxs != NULL)
free(schema->index_key_idxs);
free(schema);
}
int db_create_table(WORD table_key, db_schema_t* schema, db_t * db, unsigned int * fastrandstate)
{
db_table_t * table = (db_table_t *) malloc(sizeof(db_table_t));
table->table_key = table_key;
// Deep copy of schema (to allow caller to free his copy):
table->schema = db_create_schema(schema->col_types, schema->min_no_cols, schema->primary_key_idxs, schema->no_primary_keys, schema->clustering_key_idxs, schema->min_no_clustering_keys, schema->index_key_idxs, schema->no_index_keys);
table->rows = create_skiplist_long();
table->row_tombstones = create_skiplist_long();
if(schema->no_index_keys > 0)
table->indexes = (skiplist_t **) malloc(schema->no_index_keys * sizeof(skiplist_t *));
for(int i=0;i<schema->no_index_keys;i++)
table->indexes[i] = create_skiplist_long();
table->lock = (pthread_mutex_t*) malloc(sizeof(pthread_mutex_t));
pthread_mutex_init(table->lock, NULL);
return skiplist_insert(db->tables, table_key, (WORD) table, fastrandstate);
}
int db_create_index(int new_index, WORD table_key, db_t * db, unsigned int * fastrandstate)
{
assert(0 && "Index creation post schema creation not supported yet");
return 0;
}
int db_delete_table(WORD table_key, db_t * db)
{
db_table_t * table = (db_table_t *) skiplist_delete(db->tables, table_key);
if(table != NULL)
{
skiplist_free(table->rows);
for(int i=0;i<table->schema->no_index_keys;i++)
skiplist_free(table->indexes[i]);
if(table->schema->no_index_keys > 0)
free(table->indexes);
free_schema(table->schema);
free(table);
}
return table != NULL;
}
// Table API:
int table_insert(WORD * column_values, int no_cols, int no_clustering_keys, size_t last_blob_size, vector_clock * version, db_table_t * table, unsigned int * fastrandstate)
{
db_schema_t * schema = table->schema;
assert(schema->no_primary_keys == 1 && "Compound primary keys unsupported for now");
if(no_clustering_keys < schema->min_no_clustering_keys)
{
fprintf(stderr, "SERVER: Row insert must contain at least %d schema clustering keys, only has %d keys\n", schema->min_no_clustering_keys, no_clustering_keys);
assert(0);
}
assert(no_cols > (schema->no_primary_keys + no_clustering_keys) && "Insert must contain at least 1 non-key column or blob");
db_row_t * row = NULL;
snode_t * row_node = skiplist_search(table->rows, column_values[schema->primary_key_idxs[0]]);
if(row_node == NULL)
{
row = create_db_row_sf(column_values, schema, no_clustering_keys, no_cols, last_blob_size, fastrandstate);
row->version = (version != NULL)? copy_vc(version) : NULL;
skiplist_insert(table->rows, column_values[schema->primary_key_idxs[0]], (WORD) row, fastrandstate);
}
else
{
row = (db_row_t *) row_node->value;
db_row_t * cell = row, * new_cell = NULL;
for(int i=0;i<no_clustering_keys;i++, cell = new_cell)
{
int col_index = (i < schema->min_no_clustering_keys)?(schema->clustering_key_idxs[i]):(i);
snode_t * new_cell_node = skiplist_search(cell->cells, column_values[col_index]);
if(new_cell_node == NULL)
{
new_cell = create_empty_row(column_values[col_index]);
if(i < no_clustering_keys - 1)
{
new_cell->cells = create_skiplist_long();
}
// printf("Inserting into cell at level %d\n", i);
skiplist_insert(cell->cells, column_values[col_index], (WORD) new_cell, fastrandstate);
}
else
{
new_cell = (db_row_t *) (new_cell_node->value);
}
}
// Populate columns and set version for newly created cell:
assert(cell != NULL && cell->cells == NULL);
cell->no_columns = no_cols - schema->no_primary_keys - no_clustering_keys;
cell->last_blob_size = last_blob_size;
cell->column_array = (WORD *) malloc(cell->no_columns * sizeof(WORD));
int j=0;
for(;j<cell->no_columns - 1;j++)
{
cell->column_array[j] = column_values[schema->no_primary_keys + no_clustering_keys + j];
}
if(last_blob_size <= 0) // last column is value
{
cell->column_array[j] = column_values[schema->no_primary_keys + no_clustering_keys + j];
}
else // last column is blob
{
cell->column_array[j] = malloc(last_blob_size);;
memcpy(cell->column_array[j], column_values[schema->no_primary_keys + no_clustering_keys + j], last_blob_size);
}
if(version != NULL)
update_or_replace_vc(&(cell->version), version);
}
for(int i=0;i<schema->no_index_keys;i++)
{
if(schema->index_key_idxs[i] < no_cols)
skiplist_insert(table->indexes[i], column_values[schema->index_key_idxs[i]], (WORD) row, fastrandstate);
}
return 0;
}
int table_update(WORD * column_values, int no_cols, int no_clustering_keys, size_t last_blob_size, int * col_idxs, vector_clock * version, db_table_t * table)
{
db_schema_t * schema = table->schema;
assert(schema->no_primary_keys == 1 && "Compound primary keys unsupported for now");
assert(no_clustering_keys >= schema->min_no_clustering_keys);
assert(no_cols > schema->no_primary_keys + no_clustering_keys && "Empty update");
assert(col_idxs[0] == schema->primary_key_idxs[0] && "Update must contain primary key as first element");
for(int i=0;i<schema->min_no_clustering_keys;i++)
{
assert(col_idxs[i+1] == schema->clustering_key_idxs[i] && "Update must contain all minimal clustering keys in the right order, right after primary key");
}
db_row_t * row = NULL;
snode_t * row_node = skiplist_search(table->rows, column_values[schema->primary_key_idxs[0]]);
if(row_node == NULL)
return -1;
row = (db_row_t *) row_node->value;
for(int i=0;i<no_clustering_keys;i++)
{
row_node = skiplist_search(row->cells, column_values[schema->clustering_key_idxs[i]]);
if(row_node == NULL)
return -1;
row = (db_row_t *) (row_node->value);
}
int i=schema->no_primary_keys + no_clustering_keys;
for(;i<no_cols - 1;i++)
{
// printf("Updating col %d / %d to value %" PRId64 "\n", col_idxs[i], i, column_values[i]);
row->column_array[col_idxs[i] - schema->no_primary_keys - no_clustering_keys] = column_values[i];
}
if(last_blob_size <= 0) // last column is value
{
row->column_array[col_idxs[i] - schema->no_primary_keys - no_clustering_keys] = column_values[i];
}
else // last column is blob
{
row->column_array[col_idxs[i] - schema->no_primary_keys - no_clustering_keys] = malloc(last_blob_size);
memcpy(row->column_array[col_idxs[i] - schema->no_primary_keys - no_clustering_keys], column_values[i], last_blob_size);
}
if(version != NULL)
update_or_replace_vc(&(row->version), version);
return 0;
}
db_row_t* table_search(WORD* primary_keys, db_table_t * table)
{
db_schema_t * schema = table->schema;
assert(schema->no_primary_keys == 1 && "Compound primary keys unsupported for now");
snode_t * row_node = skiplist_search(table->rows, primary_keys[0]);
if(row_node == NULL)
return NULL;
db_row_t* row = (db_row_t *) row_node->value;
return row;
}
int table_range_search(WORD* start_primary_keys, WORD* end_primary_keys, snode_t** start_row, snode_t** end_row, db_table_t * table)
{
db_schema_t * schema = table->schema;
int no_results = 0;
assert(schema->no_primary_keys == 1 && "Compound primary keys unsupported for now");
if(start_primary_keys == NULL)
assert(end_primary_keys == NULL);
if(start_primary_keys == NULL || (start_primary_keys[0] == (WORD)LONG_MIN && end_primary_keys[0] == (WORD)(LONG_MAX - 1)))
{
// assert(end_primary_keys == NULL);
*start_row = HEAD(table->rows);
if(table->rows->no_items > 0 && (*start_row) != NULL)
for(*end_row=*start_row; NEXT(*end_row) != NULL; *end_row = NEXT(*end_row));
return table->rows->no_items;
}
*start_row = skiplist_search_higher(table->rows, start_primary_keys[0]);
if(*start_row == NULL)
{
*end_row = NULL;
return 0;
}
for(*end_row = *start_row; NEXT(*end_row) != NULL && (int64_t) (*end_row)->key < (int64_t) end_primary_keys[0]; *end_row=NEXT(*end_row), no_results++);
return no_results+1;
}
int table_verify_row_range_version(WORD* start_primary_keys, WORD* end_primary_keys, int no_primary_keys,
int64_t * range_result_keys, vector_clock ** range_result_versions, int no_range_results, db_table_t * table)
{
int i = 0;
assert(no_primary_keys == 1 && "Compound primary keys unsupported for now");
snode_t * start_row = skiplist_search_higher(table->rows, start_primary_keys[0]);
for(snode_t * cell_row_node = start_row; cell_row_node != NULL && (int64_t) cell_row_node->key < (int64_t) end_primary_keys[0]; cell_row_node=NEXT(cell_row_node), i++)
{
db_row_t* cell_row = (db_row_t *) cell_row_node->value;
// Some keys were removed from the backend since the range query happened:
if(i>(no_range_results - 1))
return 1;
if((int64_t) cell_row->key != range_result_keys[i])
return 1;
int cmp = compare_vc(cell_row->version, range_result_versions[i]);
if(cmp != 0)
return cmp;
}
// Some extra keys were added to the backend since the range query happened:
if(i<no_range_results)
return 1;
return 0;
}
int table_range_search_copy(WORD* start_primary_keys, WORD* end_primary_keys, db_row_t** rows, db_table_t * table)
{
db_schema_t * schema = table->schema;
int no_results = 0;
assert(schema->no_primary_keys == 1 && "Compound primary keys unsupported for now");
return skiplist_get_range(table->rows, start_primary_keys[0], end_primary_keys[0], (WORD**) rows, &no_results);
}
db_row_t* table_search_clustering(WORD* primary_keys, WORD* clustering_keys, int no_clustering_keys, db_table_t * table)
{
db_schema_t * schema = table->schema;
assert(no_clustering_keys > 0 && "No clustering keys given");
// assert(no_clustering_keys <= schema->min_no_clustering_keys && "Too many clustering keys given");
db_row_t* row = table_search(primary_keys, table);
if(row == NULL)
return NULL;
// printf("Row not found by primary key %" PRId64 "!\n", (int64_t) primary_keys[0]);
for(int i=0;i<no_clustering_keys;i++)
{
snode_t * row_node = skiplist_search(row->cells, clustering_keys[i]);
if(row_node != NULL)
{
row = (db_row_t *) row_node->value;
}
else
{
// printf("Row not found by clustering key %d / %" PRId64 "!\n", i, (int64_t) clustering_keys[i]);
return NULL;
}
}
return row;
}
int table_verify_cell_version(WORD* primary_keys, int no_primary_keys, WORD* clustering_keys, int no_clustering_keys, vector_clock * version, db_table_t * table)
{
assert(no_primary_keys == 1);
snode_t * row_node = skiplist_search(table->rows, primary_keys[0]);
if(row_node == NULL)
return -1;
db_row_t* row = (db_row_t *) row_node->value;
for(int i=0;i<no_clustering_keys;i++)
{
snode_t * row_node = skiplist_search(row->cells, clustering_keys[i]);
if(row_node == NULL)
return -1;
row = (db_row_t *) row_node->value;
}
return compare_vc(version, row->version);
}
int table_range_search_clustering(WORD* primary_keys, WORD* start_clustering_keys, WORD* end_clustering_keys, int no_clustering_keys, snode_t** start_row, snode_t** end_row, db_table_t * table)
{
db_schema_t * schema = table->schema;
assert(no_clustering_keys > 0 && "No clustering keys given");
// assert(no_clustering_keys <= schema->min_no_clustering_keys && "Too many clustering keys given");
db_row_t* row = table_search(primary_keys, table);
if(row == NULL)
return 0;
for(int i=0;i<no_clustering_keys-1;i++)
{
assert(start_clustering_keys[i] == end_clustering_keys[i] && "For first N-1 clustering keys, start key must be equal to end key");
snode_t * row_node = skiplist_search(row->cells, start_clustering_keys[i]);
if(row_node != NULL)
{
row = (db_row_t *) row_node->value;
}
else
{
return -1;
}
}
*start_row = skiplist_search_higher(row->cells, start_clustering_keys[no_clustering_keys-1]);
if(*start_row == NULL)
{
*end_row = NULL;
return 0;
}
int no_results = 0;
for(*end_row = *start_row; NEXT(*end_row) != NULL && (int64_t) (*end_row)->key < (int64_t) end_clustering_keys[no_clustering_keys-1]; *end_row=NEXT(*end_row), no_results++);
return no_results+1;
}
void print_long_db(db_t * db)
{
printf("DB: [%d tables]\n", db->tables->no_items);
for(snode_t * node = HEAD(db->tables);node!=NULL;node=NEXT(node))
print_long_table((db_table_t *) node->value);
}
void print_long_table(db_table_t * table)
{
printf("DB_TABLE: %" PRId64 " [%d rows]\n", (int64_t) table->table_key, table->rows->no_items);
for(snode_t * node = HEAD(table->rows);node!=NULL;node=NEXT(node))
print_long_row((db_row_t*) node->value);
}
void print_long_row(db_row_t* row)
{
char to_string[MAX_PRINT_BUFF];
int len = 0;
long_row_to_string(row, (char *) to_string, &len, (char *) to_string);
printf("DB_ROW [%d cells]: %s\n", (row->cells != NULL)?(row->cells->no_items):(0), to_string);
}
void long_row_to_string(db_row_t* row, char * to_string, int * len, char * orig_offset)
{
#define PRINT_BLOBS 1
#define PRINT_BLOBS_AS_LONG 1
sprintf(to_string, "{ %" PRId64 ", ", (int64_t) row->key);
if(row->cells != NULL)
{
assert(row->no_columns == 0);
for(snode_t* node = HEAD(row->cells); node != NULL; node = NEXT(node))
{
if(to_string + strlen(to_string) - orig_offset > MAX_PRINT_BUFF - 10)
{
sprintf(to_string + strlen(to_string), "..");
break;
}
db_row_t * subrow = (db_row_t *) node->value;
long_row_to_string(subrow, to_string + strlen(to_string), len, orig_offset);
}
}
if(row->no_columns > 0)
{
sprintf(to_string + strlen(to_string), "[ ");
for(int i=0; i<row->no_columns; i++)
{
if(to_string + strlen(to_string) - orig_offset > MAX_PRINT_BUFF - 10)
{
sprintf(to_string + strlen(to_string), "..");
break;
}
#if (PRINT_BLOBS > 0)
if(i<(row->no_columns - 1) || row->last_blob_size <= 0)
{
sprintf(to_string + strlen(to_string), "%" PRId64 ", ", (int64_t) row->column_array[i]);
}
else
{
#if (PRINT_BLOBS_AS_LONG > 0)
for(int bi=0;bi < row->last_blob_size / sizeof(long);bi++)
sprintf(to_string + strlen(to_string), "%lu ", *((unsigned long *) row->column_array[i] + bi));
#else
sprintf(to_string + strlen(to_string), "%s, ", (char *) row->column_array[i]);
#endif
}
#else
sprintf(to_string + strlen(to_string), "%" PRId64 ", ", (int64_t) row->column_array[i]);
#endif
}
sprintf(to_string + strlen(to_string), " ]");
}
sprintf(to_string + strlen(to_string), "}, ");
*len = strlen(to_string);
}
int table_verify_cell_range_version(WORD* primary_keys, int no_primary_keys, WORD* start_clustering_keys, WORD* end_clustering_keys, int no_clustering_keys,
int64_t * range_result_keys, vector_clock ** range_result_versions, int no_range_results, db_table_t * table)
{
assert(no_primary_keys == 1);
snode_t * row_node = skiplist_search(table->rows, primary_keys[0]);
if(row_node == NULL)
return -1;
db_row_t* row = (db_row_t *) row_node->value;
for(int i=0;i<no_clustering_keys-1;i++)
{
assert(start_clustering_keys[i] == end_clustering_keys[i] && "For first N-1 clustering keys, start key must be equal to end key");
snode_t * row_node = skiplist_search(row->cells, start_clustering_keys[i]);
if(row_node == NULL)
return -1;
db_row_t* row = (db_row_t *) row_node->value;
}
snode_t * start_row = skiplist_search_higher(row->cells, start_clustering_keys[no_clustering_keys-1]);
int i = 0;
for(snode_t * cell_row_node = start_row; cell_row_node != NULL && (int64_t) cell_row_node->key < (int64_t) end_clustering_keys[no_clustering_keys-1]; cell_row_node=NEXT(cell_row_node), i++)
{
db_row_t* cell_row = (db_row_t *) cell_row_node->value;
// Some keys were removed from the backend since the range query happened:
if(i>(no_range_results - 1))
return 1;
if((int64_t) cell_row->key != range_result_keys[i])
return 1;
int cmp = compare_vc(cell_row->version, range_result_versions[i]);
if(cmp != 0)
return cmp;
}
// Some extra keys were added to the backend since the range query happened:
if(i<no_range_results)
return 1;
return 0;
}
WORD* table_search_columns(WORD* primary_keys, WORD* clustering_keys, int no_clustering_keys, int* column_idxs, int no_columns, db_table_t * table)
{
db_schema_t * schema = table->schema;
assert(no_columns > 0 && "No column indexes given");
assert(no_clustering_keys >= schema->min_no_clustering_keys && "Not enough clustering keys given");
db_row_t* row = table_search_clustering(primary_keys, clustering_keys, no_clustering_keys, table);
if(row == NULL)
return NULL;
assert(row->column_array != NULL && row->no_columns > 0);
WORD* results = (WORD*) malloc(no_columns * sizeof(WORD));
for(int i=0;i<no_columns;i++)
{
assert(column_idxs[i] <= row->no_columns + schema->no_primary_keys + no_clustering_keys && "Column index doesn't exist in backend (DB corrupted?)");
if(column_idxs[i] < schema->no_primary_keys)
results[i] = primary_keys[column_idxs[i]];
else if(column_idxs[i] < schema->no_primary_keys + no_clustering_keys)
results[i] = clustering_keys[column_idxs[i] - schema->no_primary_keys];
else
results[i] = row->column_array[column_idxs[i] - schema->no_primary_keys - no_clustering_keys];
}
return results;
}
db_row_t* table_search_index(WORD index_key, int idx_idx, db_table_t * table)
{
db_schema_t * schema = table->schema;
assert(idx_idx <= schema->no_index_keys == 1 && "Index index out of range");
snode_t * row_node = skiplist_search(table->indexes[idx_idx], index_key);
if(row_node != NULL)
{
return (db_row_t *) (row_node->value);
}
else
{
return NULL;
}
}
int table_verify_index_version(WORD index_key, int idx_idx, vector_clock * version, db_table_t * table)
{
db_schema_t * schema = table->schema;
assert(idx_idx <= schema->no_index_keys == 1 && "Index index out of range");
snode_t * row_node = skiplist_search(table->indexes[idx_idx], index_key);
if(row_node == NULL)
return 1;
return compare_vc(version, ((db_row_t *) (row_node->value))->version);
}
int table_range_search_index(int idx_idx, WORD start_idx_key, WORD end_idx_key, snode_t** start_row, snode_t** end_row, db_table_t * table)
{
db_schema_t * schema = table->schema;
int no_results = 0;
assert(idx_idx <= schema->no_index_keys == 1 && "Index index out of range");
*start_row = skiplist_search_higher(table->indexes[idx_idx], start_idx_key);
for(*end_row = *start_row; (*end_row != NULL) && ((int64_t) (*end_row)->key < (int64_t) end_idx_key); *end_row=NEXT(*end_row), no_results++);
return no_results+1;
}
int table_verify_index_range_version(int idx_idx, WORD start_idx_key, WORD end_idx_key,
int64_t * range_result_keys, vector_clock ** range_result_versions, int no_range_results, db_table_t * table)
{
db_schema_t * schema = table->schema;
int i = 0;
assert(idx_idx <= schema->no_index_keys == 1 && "Index index out of range");
snode_t * start_row = skiplist_search_higher(table->indexes[idx_idx], start_idx_key);
for(snode_t * cell_row_node = start_row; cell_row_node != NULL && (int64_t) cell_row_node->key < (int64_t) end_idx_key; cell_row_node=NEXT(cell_row_node), i++)
{
db_row_t* cell_row = (db_row_t *) cell_row_node->value;
// Some keys were removed from the backend since the range query happened:
if(i>(no_range_results - 1))
return 1;
if((int64_t) cell_row->key != range_result_keys[i])
return 1;
int cmp = compare_vc(cell_row->version, range_result_versions[i]);
if(cmp != 0)
return cmp;
}
// Some extra keys were added to the backend since the range query happened:
if(i<no_range_results)
return 1;
return 0;
}
int table_delete_row(WORD* primary_keys, vector_clock * version, db_table_t * table, unsigned int * fastrandstate)
{
db_row_t* row = (db_row_t *) (skiplist_delete(table->rows, primary_keys[0]));
snode_t * exists = skiplist_search(table->row_tombstones, primary_keys[0]);
if(exists != NULL)
skiplist_insert(table->row_tombstones, primary_keys[0], (version != NULL)? copy_vc(version) : NULL, fastrandstate);
if(row != NULL)
{
free_db_row(row, table->schema);
}
else
{
printf("table_delete_row(): Row with pk %" PRId64 " doesn't exist!\n", (int64_t) primary_keys[0]);
}
return row == NULL;
}
int table_delete_by_index(WORD index_key, int idx_idx, db_table_t * table)
{
db_schema_t * schema = table->schema;
assert(idx_idx <= schema->no_index_keys == 1 && "Index index out of range");
db_row_t* row = (db_row_t *) (skiplist_delete(table->indexes[idx_idx], index_key));
// TO DO: Re-enable this after enhancing indexes:
// if(row != NULL)
// free_db_row(row, table->schema);
return row == NULL;
}
// DB API:
int db_insert_transactional(WORD * column_values, int no_cols, int no_clustering_keys, size_t last_blob_size, vector_clock * version, WORD table_key, db_t * db, unsigned int * fastrandstate)
{
#if (VERBOSE_BACKEND > 0)
printf("BACKEND: db_insert_transactional: Attempting to insert %d total columns into backend:\n", min_no_cols);
for(int i=0;i<min_no_cols;i++)
printf("column_values[%d] = %" PRId64 "\n", i, (int64_t) column_values[i]);
#endif
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_insert(column_values, no_cols, no_clustering_keys, last_blob_size, version, table, fastrandstate);
}
int db_insert(WORD * column_values, int no_cols, int no_clustering_keys, size_t last_blob_size, WORD table_key, db_t * db, unsigned int * fastrandstate)
{
return db_insert_transactional(column_values, no_cols, no_clustering_keys, last_blob_size, NULL, table_key, db, fastrandstate);
}
int db_update_transactional(WORD * column_values, int no_cols, int no_clustering_keys, size_t last_blob_size, int * col_idxs, vector_clock * version, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_update(column_values, no_cols, no_clustering_keys, last_blob_size, col_idxs, version, table);
}
int db_update(WORD * column_values, int no_cols, int no_clustering_keys, size_t last_blob_size, int * col_idxs, WORD table_key, db_t * db)
{
return db_update_transactional(column_values, no_cols, no_clustering_keys, last_blob_size, col_idxs, NULL, table_key, db);
}
db_row_t* db_search(WORD* primary_keys, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return NULL;
db_table_t * table = (db_table_t *) (node->value);
return table_search(primary_keys, table);
}
int db_range_search(WORD* start_primary_keys, WORD* end_primary_keys, snode_t** start_row, snode_t** end_row, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_range_search(start_primary_keys, end_primary_keys, start_row, end_row, table);
}
int db_verify_row_range_version(WORD* start_primary_keys, WORD* end_primary_keys, int no_primary_keys, WORD table_key,
int64_t * range_result_keys, vector_clock ** range_result_versions, int no_range_results, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_verify_row_range_version(start_primary_keys, end_primary_keys, no_primary_keys,
range_result_keys, range_result_versions, no_range_results, table);
}
int db_range_search_copy(WORD* start_primary_keys, WORD* end_primary_keys, db_row_t** rows, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_range_search_copy(start_primary_keys, end_primary_keys, rows, table);
}
db_row_t* db_search_clustering(WORD* primary_keys, WORD* clustering_keys, int no_clustering_keys, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return NULL;
db_table_t * table = (db_table_t *) (node->value);
return table_search_clustering(primary_keys, clustering_keys, no_clustering_keys, table);
}
int db_verify_cell_version(WORD* primary_keys, int no_primary_keys, WORD* clustering_keys, int no_clustering_keys, WORD table_key, vector_clock * version, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_verify_cell_version(primary_keys, no_primary_keys, clustering_keys, no_clustering_keys, version, table);
}
int db_range_search_clustering(WORD* primary_keys, WORD* start_clustering_keys, WORD* end_clustering_keys, int no_clustering_keys, snode_t** start_row, snode_t** end_row, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_range_search_clustering(primary_keys, start_clustering_keys, end_clustering_keys, no_clustering_keys, start_row, end_row, table);
}
int db_verify_cell_range_version(WORD* primary_keys, int no_primary_keys, WORD* start_clustering_keys, WORD* end_clustering_keys, int no_clustering_keys, WORD table_key,
int64_t * range_result_keys, vector_clock ** range_result_versions, int no_range_results, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_verify_cell_range_version(primary_keys, no_primary_keys, start_clustering_keys, end_clustering_keys, no_clustering_keys, range_result_keys, range_result_versions, no_range_results, table);
}
WORD* db_search_columns(WORD* primary_keys, WORD* clustering_keys, int no_clustering_keys, int* column_idxs, int no_columns, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return NULL;
db_table_t * table = (db_table_t *) (node->value);
return table_search_columns(primary_keys, clustering_keys, no_clustering_keys, column_idxs, no_columns, table);
}
db_row_t* db_search_index(WORD index_key, int idx_idx, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return NULL;
db_table_t * table = (db_table_t *) (node->value);
return table_search_index(index_key, idx_idx, table);
}
int db_verify_index_version(WORD index_key, int idx_idx, WORD table_key, vector_clock * version, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -2;
db_table_t * table = (db_table_t *) (node->value);
return table_verify_index_version(index_key, idx_idx, version, table);
}
int db_range_search_index(int idx_idx, WORD start_idx_key, WORD end_idx_key, snode_t** start_row, snode_t** end_row, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_range_search_index(idx_idx, start_idx_key, end_idx_key, start_row, end_row, table);
}
int db_verify_index_range_version(int idx_idx, WORD start_idx_key, WORD end_idx_key,
int64_t * range_result_keys, vector_clock ** range_result_versions, int no_range_results, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_verify_index_range_version(idx_idx, start_idx_key, end_idx_key, range_result_keys, range_result_versions, no_range_results, table);
}
int db_delete_row_transactional(WORD* primary_keys, vector_clock * version, WORD table_key, db_t * db, unsigned int * fastrandstate)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
{
printf("db_delete_row(): Table with pk %" PRId64 " doesn't exist!\n", (int64_t) table_key);
return -1;
}
db_table_t * table = (db_table_t *) (node->value);
return table_delete_row(primary_keys, version, table, fastrandstate);
}
int db_delete_row(WORD* primary_keys, WORD table_key, db_t * db, unsigned int * fastrandstate)
{
return db_delete_row_transactional(primary_keys, NULL, table_key, db, fastrandstate);
}
int db_delete_by_index(WORD index_key, int idx_idx, WORD table_key, db_t * db)
{
snode_t * node = skiplist_search(db->tables, table_key);
if(node == NULL)
return -1;
db_table_t * table = (db_table_t *) (node->value);
return table_delete_by_index(index_key, idx_idx, table);
}
#define DEBUG_QUEUE_CALLBACK 0
queue_callback_args * get_queue_callback_args(WORD table_key, WORD queue_id, WORD app_id, WORD shard_id, WORD consumer_id, int status)
{
queue_callback_args * qca = (queue_callback_args *) malloc(sizeof(queue_callback_args));
qca->table_key = table_key;
qca->queue_id = queue_id;
qca->app_id = app_id;
qca->shard_id = shard_id;
qca->consumer_id = consumer_id;
qca->status = status;
return qca;
}
void free_queue_callback_args(queue_callback_args * qca)
{
free(qca);
}
queue_callback * get_queue_callback(void (*callback)(queue_callback_args *))
{
queue_callback * qc = (queue_callback *) malloc(sizeof(queue_callback) + sizeof(pthread_mutex_t) + sizeof(pthread_cond_t));
qc->lock = (pthread_mutex_t *) ((char *)qc + sizeof(queue_callback));
qc->signal = (pthread_cond_t *) ((char *)qc + sizeof(queue_callback) + sizeof(pthread_mutex_t));
pthread_mutex_init(qc->lock, NULL);
pthread_cond_init(qc->signal, NULL);
qc->callback = callback;
return qc;
}
int wait_on_queue_callback(queue_callback * qc)
{
int ret = pthread_mutex_lock(qc->lock);
#if DEBUG_QUEUE_CALLBACK > 0
printf("Locked consumer lock %p/%p\n", qc, qc->lock);
#endif
struct timespec ts;
clock_gettime(CLOCK_REALTIME, &ts);
ts.tv_sec += 3;
ret = pthread_cond_timedwait(qc->signal, qc->lock, &ts);
pthread_mutex_unlock(qc->lock);
#if DEBUG_QUEUE_CALLBACK > 0
printf("Unlocked consumer lock %p/%p\n", qc, qc->lock);
#endif
return ret;
}
void free_queue_callback(queue_callback * qc)
{
free(qc);
}
| 30.642405
| 758
| 0.723691
|
a3bfa1643e67f0e65cd576526246413b81fc3d49
| 3,445
|
java
|
Java
|
pax-logging-samples/logger/src/main/java/org/ops4j/pax/logging/example/Activator.java
|
mattrpav/org.ops4j.pax.logging
|
a1e315f8a488f61442b95503c85553bcbdce015a
|
[
"Apache-2.0",
"MIT"
] | 21
|
2015-02-01T19:05:14.000Z
|
2022-03-03T07:04:42.000Z
|
pax-logging-samples/logger/src/main/java/org/ops4j/pax/logging/example/Activator.java
|
mattrpav/org.ops4j.pax.logging
|
a1e315f8a488f61442b95503c85553bcbdce015a
|
[
"Apache-2.0",
"MIT"
] | 153
|
2015-09-08T06:38:18.000Z
|
2022-03-11T05:22:10.000Z
|
pax-logging-samples/logger/src/main/java/org/ops4j/pax/logging/example/Activator.java
|
mattrpav/org.ops4j.pax.logging
|
a1e315f8a488f61442b95503c85553bcbdce015a
|
[
"Apache-2.0",
"MIT"
] | 69
|
2015-01-29T13:31:31.000Z
|
2022-03-03T10:51:53.000Z
|
/*
* Copyright 2005 Niclas Hedhman.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ops4j.pax.logging.example;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mortbay.http.HttpContext;
import org.mortbay.http.HttpHandler;
import org.mortbay.http.HttpListener;
import org.mortbay.http.HttpServer;
import org.mortbay.http.SocketListener;
import org.mortbay.util.InetAddrPort;
import org.ops4j.pax.logging.avalon.AvalonLogFactory;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.slf4j.LoggerFactory;
/**
* This Activator starts up the Jetty server and enables port 8080, which serves a Hello,World message.
*
* Jetty 5.1 uses Jakarta Commons Logging, and we are showing that those logging statements will be passed to
* the Pax Logging service, and ultimately output to the Log4J backend.
*/
public class Activator
implements BundleActivator
{
private HttpServer m_server;
private Log m_jclLogger;
private org.apache.juli.logging.Log m_juliLogger;
private org.apache.avalon.framework.logger.Logger m_avalonLogger;
private org.slf4j.Logger m_slf4jLogger;
private java.util.logging.Logger m_jdkLogger;
public void start( BundleContext bundleContext )
throws Exception
{
m_jclLogger = LogFactory.getLog( Activator.class );
m_juliLogger = org.apache.juli.logging.LogFactory.getLog( Activator.class );
m_avalonLogger = AvalonLogFactory.getLogger( Activator.class.getName() );
m_slf4jLogger = LoggerFactory.getLogger( Activator.class );
m_jdkLogger = java.util.logging.Logger.getLogger( Activator.class.getName() );
m_jclLogger.info( "Starting Example... (jcl)" );
m_avalonLogger.info( "Starting Example... (avalon)" );
m_slf4jLogger.info( "Starting Example... (slf4j)" );
m_jdkLogger.info( "Starting Example... (jdk)" );
m_juliLogger.info( "Starting Example... (juli)" );
HttpHandler handler = new TestHandler( "test" );
InetAddrPort port = new InetAddrPort( 8080 );
HttpListener listener = new SocketListener( port );
m_server = new HttpServer();
HttpContext context = new HttpContext();
context.setContextPath( "/" );
context.addHandler( handler );
m_server.addContext( context );
m_server.addListener( listener );
m_server.start();
}
public void stop( BundleContext bundleContext )
throws Exception
{
m_jclLogger.info( "Stopping Example... (jcl)" );
m_avalonLogger.info( "Stopping Example... (avalon)" );
m_slf4jLogger.info( "Stopping Example... (slf4j)");
m_jdkLogger.info( "Stopping Example... (jdk)");
m_juliLogger.info( "Stopping Example... (juli)");
m_server.stop();
}
}
| 39.597701
| 109
| 0.69492
|
8871cee942006c3c7baa9f64841cc821cdfce971
| 7,726
|
swift
|
Swift
|
GradientButton/GradientButton.swift
|
ChocolatesChen/GradientButton
|
0100da915f46f9431922599d6c0b599d86705898
|
[
"Apache-2.0"
] | 1
|
2020-04-10T04:25:03.000Z
|
2020-04-10T04:25:03.000Z
|
GradientButton/GradientButton.swift
|
ChocolatesChen/GradientButton
|
0100da915f46f9431922599d6c0b599d86705898
|
[
"Apache-2.0"
] | null | null | null |
GradientButton/GradientButton.swift
|
ChocolatesChen/GradientButton
|
0100da915f46f9431922599d6c0b599d86705898
|
[
"Apache-2.0"
] | null | null | null |
//
// GradientButton.swift
// GradientButton
//
// Created by cg on 2020/4/10.
// Copyright © 2020 df. All rights reserved.
//
import UIKit
let kScreenHeight = UIScreen.main.bounds.size.height
let kScreenWidth = UIScreen.main.bounds.size.width
func RGBA (red:CGFloat, green:CGFloat, blue:CGFloat, alpha:CGFloat)->UIColor {
return UIColor (red: red/255.0, green: green/255.0, blue: blue/255.0, alpha: alpha)
}
//十六进制色
func colorWithHexString(_ hexString:String)->UIColor {
var cString = hexString.trimmingCharacters(in:CharacterSet.whitespacesAndNewlines).uppercased()
if (cString.hasPrefix("#")) {
let index = cString.index(cString.startIndex, offsetBy:1)
cString = String(cString[index...])
}
if (cString.count != 6) {
return UIColor.red
}
let rIndex = cString.index(cString.startIndex, offsetBy: 2)
let rString = String(cString[..<rIndex])
let otherString = String(cString[rIndex...])
let gIndex = otherString.index(otherString.startIndex, offsetBy: 2)
let gString = String(otherString[..<gIndex])
let bIndex = cString.index(cString.endIndex, offsetBy: -2)
let bString = String(cString[bIndex...])
var red:CUnsignedInt = 0, green:CUnsignedInt = 0, blue:CUnsignedInt = 0;
Scanner(string: rString).scanHexInt32(&red)
Scanner(string: gString).scanHexInt32(&green)
Scanner(string: bString).scanHexInt32(&blue)
return RGBA(red:CGFloat(red), green: CGFloat(green), blue: CGFloat(blue), alpha: 1)
}
class GradientButton: UIButton {
typealias clickAction = (_ isSelected:Bool,_ btn:GradientButton) -> Void
private var click: clickAction?
private var buttonSelected:Bool = false
private var colors: [UIColor]!
private var pathLayer:CAGradientLayer!
convenience init(title:String,
fontSize:CGFloat = 10,
colors: [UIColor],
direction:GradientDirection,
withButtonHandler buttonHandler: @escaping (_ isSelected:Bool,_ btn:GradientButton)->Void) {
self.init(frame:.zero)
self.colors = colors
setTitle(title, for: UIControl.State())
setTitleColor(.black, for: UIControl.State())
titleLabel?.font = UIFont.systemFont(ofSize: fontSize, weight: .regular)
self.click = buttonHandler
addTarget(self, action: #selector(buttonAction(_:)), for: .touchUpInside)
}
override init(frame: CGRect) {
super.init(frame:frame)
}
override func layoutSubviews() {
super.layoutSubviews()
self.df_rounded(4, width: 1, color: buttonSelected ? .clear : colorWithHexString("#D0D0D0"))
}
func switchButton(isSelected:Bool) {
self.buttonSelected = isSelected
setTitleColor(isSelected ? .white : .black, for: UIControl.State())
titleLabel?.font = UIFont.systemFont(ofSize: 10, weight: isSelected ? .medium : .regular)
if isSelected {
addGradient()
if self.pathLayer != nil {
self.layer.insertSublayer(self.pathLayer, at: 0)
}
} else {
if ((self.layer.sublayers?[0] as? CAGradientLayer) != nil) {
self.layer.sublayers?.remove(at: 0)
}
self.pathLayer = nil
}
}
private func addGradient(){
self.pathLayer = self.setGradient(colors: self.colors, direction: .Horizontal)
}
@objc fileprivate func buttonAction(_ sender:UIButton) {
sender.isSelected.toggle()
self.switchButton(isSelected: sender.isSelected)
click?(self.buttonSelected,self)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
private var gradientLayerStr: Void?
// MARK: - 渐变色
extension UIView {
// MARK: - layer
/// 添加圆角
///
/// - Parameter cornerRadius: 半径
func df_rounded(_ cornerRadius: CGFloat) {
df_rounded(cornerRadius, width: 0, color: nil)
}
/// 添加边框
///
/// - Parameters:
/// - borderWidth: 宽度
/// - borderColor: 边框颜色
func df_border(_ borderWidth: CGFloat, color borderColor: UIColor?) {
df_rounded(0, width: borderWidth, color: borderColor)
}
/// 添加圆角、边框
///
/// - Parameters:
/// - cornerRadius: 半径
/// - borderWidth: 宽度
/// - borderColor: 颜色
func df_rounded(_ cornerRadius: CGFloat, width borderWidth: CGFloat, color borderColor: UIColor?) {
layer.cornerRadius = cornerRadius
layer.borderWidth = borderWidth
layer.borderColor = borderColor?.cgColor
layer.masksToBounds = true
}
//枚举渐变色的方向
enum GradientDirection {
case Horizontal
case Vertical
case Right
case Left
case Bottom
case Top
case TopLeftToBottomRight
case TopRightToBottomLeft
case BottomLeftToTopRight
case BottomRightToTopLeft
}
@discardableResult
func setGradient(colors: [UIColor], direction:GradientDirection) -> CAGradientLayer {
func setGradient(_ layer: CAGradientLayer) {
self.layoutIfNeeded()
var colorArr = [CGColor]()
for color in colors {
colorArr.append(color.cgColor)
}
CATransaction.begin()
CATransaction.setDisableActions(true)
layer.frame = self.bounds
CATransaction.commit()
layer.colors = colorArr
switch direction {
case .Horizontal:
layer.startPoint = CGPoint(x:0.0, y:0.0)
layer.endPoint = CGPoint(x:1.0, y:0.0)
case .Vertical:
layer.startPoint = CGPoint(x:0.0, y:0.0)
layer.endPoint = CGPoint(x:0.0, y:1.0)
case .Right:
layer.startPoint = CGPoint(x:0.0, y:0.5)
layer.endPoint = CGPoint(x:1.0, y:0.5)
case .Left:
layer.startPoint = CGPoint(x:1.0, y:0.5)
layer.endPoint = CGPoint(x:0.0, y:0.5)
case .Bottom:
layer.startPoint = CGPoint(x:0.5, y:0.0)
layer.endPoint = CGPoint(x:0.5, y:1.0)
case .Top:
layer.startPoint = CGPoint(x:0.5, y:1.0)
layer.endPoint = CGPoint(x:0.5, y:0.0)
case .TopLeftToBottomRight:
layer.startPoint = CGPoint(x:0.0, y:0.0)
layer.endPoint = CGPoint(x:1.0, y:1.0)
case .TopRightToBottomLeft:
layer.startPoint = CGPoint(x:1.0, y:0.0)
layer.endPoint = CGPoint(x:0.0, y:1.0)
case .BottomLeftToTopRight:
layer.startPoint = CGPoint(x:0.0, y:1.0)
layer.endPoint = CGPoint(x:1.0, y:0.0)
default:
layer.startPoint = CGPoint(x:1.0, y:1.0)
layer.endPoint = CGPoint(x:0.0, y:0.0)
}
}
if let gradientLayer = objc_getAssociatedObject(self, &gradientLayerStr) as? CAGradientLayer {
setGradient(gradientLayer)
return gradientLayer
}else {
let gradientLayer = CAGradientLayer()
self.layer.insertSublayer(gradientLayer , at: 0)
setGradient(gradientLayer)
objc_setAssociatedObject(self, &gradientLayerStr, gradientLayer, objc_AssociationPolicy.OBJC_ASSOCIATION_RETAIN_NONATOMIC)
return gradientLayer
}
}
}
| 35.118182
| 134
| 0.583614
|
2caac19c533755d6759031c8afe6786d1d6ac5f0
| 117,253
|
py
|
Python
|
tests/connectors/kubernetes_test.py
|
opsani/ServoX
|
06c574e2c9cd15da819f8a81e928ac0e606f2298
|
[
"Apache-2.0"
] | null | null | null |
tests/connectors/kubernetes_test.py
|
opsani/ServoX
|
06c574e2c9cd15da819f8a81e928ac0e606f2298
|
[
"Apache-2.0"
] | null | null | null |
tests/connectors/kubernetes_test.py
|
opsani/ServoX
|
06c574e2c9cd15da819f8a81e928ac0e606f2298
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import annotations
from typing import Type
import httpx
import kubetest.client
from kubetest.objects import Deployment as KubetestDeployment
import kubernetes.client.models
import kubernetes.client.exceptions
import platform
import pydantic
import pytest
import pytest_mock
import re
import respx
import traceback
from kubernetes_asyncio import client
from pydantic import BaseModel
from pydantic.error_wrappers import ValidationError
import servo
import servo.connectors.kubernetes
from servo.connectors.kubernetes import (
CPU,
CanaryOptimization,
CanaryOptimizationStrategyConfiguration,
Container,
ContainerConfiguration,
ContainerTagName,
DefaultOptimizationStrategyConfiguration,
Deployment,
DeploymentConfiguration,
DNSLabelName,
DNSSubdomainName,
FailureMode,
KubernetesChecks,
KubernetesConfiguration,
KubernetesConnector,
Memory,
Core,
OptimizationStrategy,
Pod,
ResourceRequirement,
Rollout,
RolloutConfiguration,
)
import servo
from servo.errors import AdjustmentFailedError, AdjustmentRejectedError
import servo.runner
from servo.types.api import Adjustment, Component, Description
from servo.types.settings import Replicas, EnvironmentEnumSetting
from tests.helpers import *
class TestDNSSubdomainName:
@pytest.fixture
def model(self) -> Type[BaseModel]:
class Model(BaseModel):
name: DNSSubdomainName
return Model
def test_cannot_be_blank(self, model) -> None:
valid_name = "ab"
invalid_name = ""
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": "ensure this value has at least 1 characters",
"type": "value_error.any_str.min_length",
"ctx": {
"limit_value": 1,
},
} in e.value.errors()
def test_handles_uppercase_chars(self, model) -> None:
valid_name = "ABCD"
assert model(name=valid_name)
def test_cannot_be_longer_than_253_chars(self, model) -> None:
valid_name = "a" * 253
invalid_name = valid_name + "b"
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": "ensure this value has at most 253 characters",
"type": "value_error.any_str.max_length",
"ctx": {
"limit_value": 253,
},
} in e.value.errors()
def test_can_only_contain_alphanumerics_hyphens_and_dots(self, model) -> None:
valid_name = "abcd1234.-sss"
invalid_name = "abcd1234.-sss_$%!"
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": f'string does not match regex "{DNSSubdomainName.regex.pattern}"',
"type": "value_error.str.regex",
"ctx": {
"pattern": DNSSubdomainName.regex.pattern,
},
} in e.value.errors()
def test_must_start_with_alphanumeric_character(self, model) -> None:
valid_name = "abcd"
invalid_name = "-abcd"
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": f'string does not match regex "{DNSSubdomainName.regex.pattern}"',
"type": "value_error.str.regex",
"ctx": {
"pattern": DNSSubdomainName.regex.pattern,
},
} in e.value.errors()
def test_must_end_with_alphanumeric_character(self, model) -> None:
valid_name = "abcd"
invalid_name = "abcd-"
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": f'string does not match regex "{DNSSubdomainName.regex.pattern}"',
"type": "value_error.str.regex",
"ctx": {
"pattern": DNSSubdomainName.regex.pattern,
},
} in e.value.errors()
class TestDNSLabelName:
@pytest.fixture
def model(self) -> Type[BaseModel]:
class Model(BaseModel):
name: DNSLabelName
return Model
def test_cannot_be_blank(self, model) -> None:
valid_name = "ab"
invalid_name = ""
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": "ensure this value has at least 1 characters",
"type": "value_error.any_str.min_length",
"ctx": {
"limit_value": 1,
},
} in e.value.errors()
def test_handles_uppercase_chars(self, model) -> None:
valid_name = "ABCD"
assert model(name=valid_name)
def test_cannot_be_longer_than_63_chars(self, model) -> None:
valid_name = "a" * 63
invalid_name = valid_name + "b"
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": "ensure this value has at most 63 characters",
"type": "value_error.any_str.max_length",
"ctx": {
"limit_value": 63,
},
} in e.value.errors()
def test_can_only_contain_alphanumerics_and_hyphens(self, model) -> None:
valid_name = "abcd1234-sss"
invalid_name = "abcd1234.-sss_$%!"
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": f'string does not match regex "{DNSLabelName.regex.pattern}"',
"type": "value_error.str.regex",
"ctx": {
"pattern": DNSLabelName.regex.pattern,
},
} in e.value.errors()
def test_must_start_with_alphanumeric_character(self, model) -> None:
valid_name = "abcd"
invalid_name = "-abcd"
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": f'string does not match regex "{DNSLabelName.regex.pattern}"',
"type": "value_error.str.regex",
"ctx": {
"pattern": DNSLabelName.regex.pattern,
},
} in e.value.errors()
def test_must_end_with_alphanumeric_character(self, model) -> None:
valid_name = "abcd"
invalid_name = "abcd-"
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": f'string does not match regex "{DNSLabelName.regex.pattern}"',
"type": "value_error.str.regex",
"ctx": {
"pattern": DNSLabelName.regex.pattern,
},
} in e.value.errors()
class TestContainerTagName:
@pytest.fixture
def model(self) -> Type[BaseModel]:
class Model(BaseModel):
name: ContainerTagName
return Model
def test_cant_be_more_than_128_characters(self, model) -> None:
valid_name = "a" * 128
invalid_name = valid_name + "b"
assert model(name=valid_name)
with pytest.raises(ValidationError) as e:
model(name=invalid_name)
assert e
assert {
"loc": ("name",),
"msg": "ensure this value has at most 128 characters",
"type": "value_error.any_str.max_length",
"ctx": {
"limit_value": 128,
},
} in e.value.errors()
@pytest.mark.parametrize(
"tag_name,valid",
[
("image/tag:v1.0.0", True),
("123.123.123.123:123/image/tag:v1.0.0", True),
("your-domain.com/image/tag", True),
("your-domain.com/image/tag:v1.1.1-patch1", True),
("image/tag", True),
("image", True),
("image:v1.1.1-patch", True),
(
"ubuntu@sha256:45b23dee08af5e43a7fea6c4cf9c25ccf269ee113168c19722f87876677c5cb2",
True,
),
("-", False),
(".", False),
],
)
def test_tags(self, model, tag_name, valid) -> None:
if valid:
assert model(name=tag_name)
else:
with pytest.raises(ValidationError) as e:
model(name=tag_name)
assert e
assert {
"loc": ("name",),
"msg": f'string does not match regex "{ContainerTagName.regex.pattern}"',
"type": "value_error.str.regex",
"ctx": {
"pattern": ContainerTagName.regex.pattern,
},
} in e.value.errors()
class TestEnvironmentConfiguration:
pass
class TestCommandConfiguration:
pass
class TestKubernetesConfiguration:
@pytest.fixture
def funkytown(self, config: KubernetesConfiguration) -> KubernetesConfiguration:
return config.copy(update={"namespace": "funkytown"})
def test_cascading_defaults(self, config: KubernetesConfiguration) -> None:
# Verify that by default we get a null namespace
assert DeploymentConfiguration.__fields__["namespace"].default is None
assert (
DeploymentConfiguration(
name="testing", containers=[], replicas=servo.Replicas(min=0, max=1)
).namespace
is None
)
# Verify that we inherit when nested
assert config.namespace == "default"
assert config.deployments[0].namespace == "default"
def test_explicit_cascade(self, config: KubernetesConfiguration) -> None:
model = config.copy(update={"namespace": "funkytown"})
assert model.namespace == "funkytown"
assert model.deployments[0].namespace == "default"
model.cascade_common_settings(overwrite=True)
assert model.namespace == "funkytown"
assert model.deployments[0].namespace == "funkytown"
def test_respects_explicit_override(self, config: KubernetesConfiguration) -> None:
# set the property explictly to value equal to default, then trigger
model = config.copy(update={"namespace": "funkytown"})
model.deployments[0].namespace = "default"
assert model.namespace == "funkytown"
assert model.deployments[0].namespace == "default"
model.cascade_common_settings()
assert model.namespace == "funkytown"
assert model.deployments[0].namespace == "default"
@pytest.mark.parametrize(
"yaml_path, expected_value",
[
# CPU in millicores
("deployments[0].containers[0].cpu.min", "250m"),
("deployments[0].containers[0].cpu.max", "4"),
("deployments[0].containers[0].cpu.step", "125m"),
# Memory
("deployments[0].containers[0].memory.min", "256.0Mi"),
("deployments[0].containers[0].memory.max", "4.0Gi"),
("deployments[0].containers[0].memory.step", "128.0Mi"),
],
)
def test_generate_emits_human_readable_values(
self, yaml_path, expected_value
) -> None:
# import yamlpath
config = KubernetesConfiguration.generate()
# assert yaml_key_path(config.yaml(), key_path) == expected_value
from yamlpath import Processor, YAMLPath
from yamlpath.func import get_yaml_editor
# Process command-line arguments and initialize the output writer
# args = processcli()
# log = ConsolePrinter(args)
# Prep the YAML parser and round-trip editor (tweak to your needs)
yaml = get_yaml_editor()
# At this point, you'd load or parse your YAML file, stream, or string. When
# loading from file, I typically follow this pattern:
# yaml_data = get_yaml_data(yaml, logger, config.yaml())
yaml_data = yaml.load(config.yaml())
assert yaml_data
processor = Processor(logger, yaml_data)
path = YAMLPath(yaml_path)
matches = list(processor.get_nodes(path))
assert len(matches) == 1, "expected only a single matching node"
assert matches[0].node == expected_value
def test_failure_mode_destroy(self) -> None:
"""test that the old 'destroy' setting is converted to 'shutdown'"""
config = servo.connectors.kubernetes.KubernetesConfiguration(
namespace="default",
description="Update the namespace, deployment, etc. to match your Kubernetes cluster",
on_failure=servo.connectors.kubernetes.FailureMode.destroy,
deployments=[
servo.connectors.kubernetes.DeploymentConfiguration(
name="fiber-http",
replicas=servo.Replicas(
min=1,
max=2,
),
containers=[
servo.connectors.kubernetes.ContainerConfiguration(
name="fiber-http",
cpu=servo.connectors.kubernetes.CPU(
min="250m", max="4000m", step="125m"
),
memory=servo.connectors.kubernetes.Memory(
min="128MiB", max="4.0GiB", step="128MiB"
),
)
],
)
],
)
assert config.on_failure == FailureMode.shutdown
assert config.deployments[0].on_failure == FailureMode.shutdown
class TestKubernetesConnector:
pass
class TestContainerConfiguration:
pass
class TestDeploymentConfiguration:
def test_inheritance_of_default_namespace(self) -> None:
...
def test_strategy_enum(self) -> None:
config = DeploymentConfiguration(
name="testing",
containers=[],
replicas=servo.Replicas(min=1, max=4),
strategy=OptimizationStrategy.default,
)
assert config.yaml(exclude_unset=True) == (
"name: testing\n"
"containers: []\n"
"strategy: default\n"
"replicas:\n"
" min: 1\n"
" max: 4\n"
)
def test_strategy_object_default(self) -> None:
config = DeploymentConfiguration(
name="testing",
containers=[],
replicas=servo.Replicas(min=1, max=4),
strategy=DefaultOptimizationStrategyConfiguration(
type=OptimizationStrategy.default
),
)
assert config.yaml(exclude_unset=True) == (
"name: testing\n"
"containers: []\n"
"strategy:\n"
" type: default\n"
"replicas:\n"
" min: 1\n"
" max: 4\n"
)
def test_strategy_object_canary(self) -> None:
config = DeploymentConfiguration(
name="testing",
containers=[],
replicas=servo.Replicas(min=1, max=4),
strategy=CanaryOptimizationStrategyConfiguration(
type=OptimizationStrategy.canary, alias="tuning"
),
)
assert config.yaml(exclude_unset=True) == (
"name: testing\n"
"containers: []\n"
"strategy:\n"
" type: canary\n"
" alias: tuning\n"
"replicas:\n"
" min: 1\n"
" max: 4\n"
)
def test_strategy_object_default_parsing(self) -> None:
config_yaml = (
"containers: []\n"
"name: testing\n"
"replicas:\n"
" max: 4\n"
" min: 1\n"
"strategy:\n"
" type: default\n"
)
config_dict = yaml.load(config_yaml, Loader=yaml.FullLoader)
config = DeploymentConfiguration.parse_obj(config_dict)
assert isinstance(config.strategy, DefaultOptimizationStrategyConfiguration)
assert config.strategy.type == OptimizationStrategy.default
def test_strategy_object_tuning_parsing(self) -> None:
config_yaml = (
"containers: []\n"
"name: testing\n"
"replicas:\n"
" max: 4\n"
" min: 1\n"
"strategy:\n"
" type: canary\n"
)
config_dict = yaml.load(config_yaml, Loader=yaml.FullLoader)
config = DeploymentConfiguration.parse_obj(config_dict)
assert isinstance(config.strategy, CanaryOptimizationStrategyConfiguration)
assert config.strategy.type == OptimizationStrategy.canary
assert config.strategy.alias is None
def test_strategy_object_tuning_parsing_with_alias(self) -> None:
config_yaml = (
"containers: []\n"
"name: testing\n"
"replicas:\n"
" max: 4\n"
" min: 1\n"
"strategy:\n"
" alias: tuning\n"
" type: canary\n"
)
config_dict = yaml.load(config_yaml, Loader=yaml.FullLoader)
config = DeploymentConfiguration.parse_obj(config_dict)
assert isinstance(config.strategy, CanaryOptimizationStrategyConfiguration)
assert config.strategy.type == OptimizationStrategy.canary
assert config.strategy.alias == "tuning"
class TestCanaryOptimization:
@pytest.mark.xfail
def test_to_components_default_name(self, config) -> None:
config.deployments[0].strategy = OptimizationStrategy.canary
optimization = CanaryOptimization.construct(
name="fiber-http-deployment/opsani/fiber-http:latest-canary",
target_deployment_config=config.deployments[0],
target_container_config=config.deployments[0].containers[0],
)
assert (
optimization.target_name == "fiber-http-deployment/opsani/fiber-http:latest"
)
assert (
optimization.tuning_name
== "fiber-http-deployment/opsani/fiber-http:latest-canary"
)
@pytest.mark.xfail
def test_to_components_respects_aliases(self, config) -> None:
config.deployments[0].strategy = CanaryOptimizationStrategyConfiguration(
type=OptimizationStrategy.canary, alias="tuning"
)
config.deployments[0].containers[0].alias = "main"
optimization = CanaryOptimization.construct(
name="fiber-http-deployment/opsani/fiber-http:latest-canary",
target_deployment_config=config.deployments[0],
target_container_config=config.deployments[0].containers[0],
)
assert optimization.target_name == "main"
assert optimization.tuning_name == "tuning"
def test_compare_strategy() -> None:
config = CanaryOptimizationStrategyConfiguration(
type=OptimizationStrategy.canary, alias="tuning"
)
assert config == OptimizationStrategy.canary
class TestResourceRequirement:
@pytest.mark.parametrize(
"requirement, val",
[
(ResourceRequirement.limit, "limits"),
(ResourceRequirement.request, "requests"),
],
)
def test_resource_key(self, requirement: ResourceRequirement, val) -> None:
assert requirement.resources_key == val
class TestContainer:
@pytest.fixture
def container(self, mocker) -> Container:
stub_pod = mocker.stub(name="Pod")
container = Container(client.V1Container(name="fiber-http"), stub_pod)
resources = client.V1ResourceRequirements()
resources.requests = {"cpu": "100m", "memory": "3G"}
resources.limits = {"cpu": "15000m"}
container.resources = resources
container.obj.env = [
client.V1EnvVar(name="TEST1", value="TEST2"),
]
return container
@pytest.mark.parametrize(
"resource, requirement, value",
[
(
"cpu",
None,
{
ResourceRequirement.request: "100m",
ResourceRequirement.limit: "15000m",
},
),
("cpu", ResourceRequirement.request, "100m"),
("cpu", ResourceRequirement.limit, "15000m"),
(
"memory",
None,
{ResourceRequirement.request: "3G", ResourceRequirement.limit: None},
),
("memory", ResourceRequirement.request, "3G"),
("memory", ResourceRequirement.limit, None),
(
"invalid",
None,
{ResourceRequirement.request: None, ResourceRequirement.limit: None},
),
],
)
def test_get_resource_requirements(
self,
container: Container,
resource: str,
requirement: ResourceRequirement,
value,
) -> None:
assert (
all_requirements := container.get_resource_requirements(resource)
) is not None
if requirement:
assert all_requirements.get(requirement) == value
else:
assert all_requirements == value
@pytest.mark.parametrize(
"resource, value, resources_dict",
[
(
"cpu",
{
ResourceRequirement.request: "100m",
ResourceRequirement.limit: "250m",
},
{
"limits": {"cpu": "250m"},
"requests": {"cpu": "100m", "memory": "3G"},
},
),
(
"cpu",
{ResourceRequirement.limit: "500m"},
{
"limits": {"cpu": "500m"},
"requests": {"cpu": "100m", "memory": "3G"},
},
),
],
)
def test_set_resource_requirements(
self,
container: Container,
resource: str,
value: dict[ResourceRequirement, Optional[str]],
resources_dict,
) -> None:
container.set_resource_requirements(resource, value)
assert container.resources.to_dict() == resources_dict
def test_set_resource_requirements_handles_null_requirements_dict(
self, container: Container
):
container.resources = client.V1ResourceRequirements()
container.set_resource_requirements(
"cpu",
{ResourceRequirement.request: "1000m", ResourceRequirement.limit: "1000m"},
)
assert container.resources.to_dict() == {
"limits": {"cpu": "1000m"},
"requests": {"cpu": "1000m"},
}
def test_get_environment_variable(self, container: Container):
assert container.get_environment_variable("TEST1") == "TEST2"
def test_set_environment_variable(self, container: Container):
container.set_environment_variable("TEST1", "TEST3")
container.set_environment_variable("TEST4", "TEST5")
assert container.env == [
client.V1EnvVar(name="TEST1", value="TEST3"),
client.V1EnvVar(name="TEST4", value="TEST5"),
]
class TestReplicas:
@pytest.fixture
def replicas(self) -> servo.Replicas:
return servo.Replicas(min=1, max=4)
def test_parsing(self, replicas: servo.Replicas) -> None:
assert {
"name": "replicas",
"type": "range",
"min": 1,
"max": 4,
"step": 1,
"unit": None,
"value": None,
"pinned": False,
} == replicas.dict()
def test_to___opsani_repr__(self, replicas: servo.Replicas) -> None:
replicas.value = 3
assert replicas.__opsani_repr__() == {
"replicas": {
"max": 4,
"min": 1,
"step": 1,
"value": 3,
"type": "range",
"pinned": False,
}
}
class TestCPU:
@pytest.fixture
def cpu(self) -> CPU:
return CPU(min="125m", max="4000m", step="125m")
def test_parsing(self, cpu: CPU) -> None:
assert {
"name": "cpu",
"type": "range",
"min": "125m",
"max": 4,
"step": "125m",
"value": None,
"unit": "cores",
"pinned": False,
"request": None,
"limit": None,
"get": [
ResourceRequirement.request,
ResourceRequirement.limit,
],
"set": [
ResourceRequirement.request,
ResourceRequirement.limit,
],
} == cpu.dict()
def test_to___opsani_repr__(self, cpu: CPU) -> None:
cpu.value = "3"
assert cpu.__opsani_repr__() == {
"cpu": {
"max": 4.0,
"min": 0.125,
"step": 0.125,
"value": 3.0,
"unit": "cores",
"type": "range",
"pinned": False,
}
}
def test_resolving_equivalent_units(self) -> None:
cpu = CPU(min="125m", max=4.0, step=0.125)
assert cpu.min == 0.125
assert cpu.max == 4
assert cpu.step.millicores == 125
def test_resources_encode_to_json_human_readable(self, cpu) -> None:
serialization = json.loads(cpu.json())
assert serialization["min"] == "125m"
assert serialization["max"] == "4"
assert serialization["step"] == "125m"
def test_cpu_must_be_step_aligned(self) -> None:
with pytest.raises(
ValueError,
match=re.escape(
"min/max difference is not step aligned: 3.875 is not a multiple of 250m"
),
):
CPU(min="125m", max=4.0, step=0.250)
def test_min_can_be_less_than_step(self) -> None:
CPU(min="125m", max=4.125, step=0.250)
class TestCore:
@pytest.mark.parametrize(
"input, cores",
[
("100m", 0.1),
("1", 1),
(1, 1),
("0.1", 0.1),
("0.1", 0.1),
(2.0, 2),
("2.0", 2),
],
)
def test_parsing(
self, input: Union[str, int, float], cores: Union[float, int]
) -> None:
assert Core.parse(input) == cores
@pytest.mark.parametrize(
"input, output",
[
("100m", "100m"),
("1", "1"),
("1.0", "1"),
(1, "1"),
(100, "100"),
("0.1", "100m"),
("0.1", "100m"),
(2.5, "2.5"),
("2500m", "2.5"),
("123m", "123m"),
("100u", "100u"),
("0.0001", "100u"),
("100n", "100n"),
("0.0000001", "100n"),
],
)
def test_string_serialization(
self, input: Union[str, int, float], output: str
) -> None:
cores = Core.parse(input)
assert str(cores) == output
class TestMemory:
@pytest.fixture
def memory(self) -> Memory:
return Memory(min="0.25 GiB", max="4.0 GiB", step="128 MiB")
def test_parsing(self, memory: Memory) -> None:
assert {
"name": "mem",
"type": "range",
"pinned": False,
"value": None,
"unit": "GiB",
"min": 268435456,
"max": 4294967296,
"step": 134217728,
"request": None,
"limit": None,
"get": [
ResourceRequirement.request,
ResourceRequirement.limit,
],
"set": [
ResourceRequirement.request,
ResourceRequirement.limit,
],
} == memory.dict()
def test_to___opsani_repr__(self, memory: Memory) -> None:
memory.value = "3.0 GiB"
assert memory.__opsani_repr__() == {
"mem": {
"max": 4.0,
"min": 0.25,
"step": 0.125,
"value": 3.0,
"unit": "GiB",
"type": "range",
"pinned": False,
}
}
def test_handling_float_input(self) -> None:
memory = Memory(min=0.5, max=4.0, step=0.125, value="3.0 GiB")
assert memory.__opsani_repr__() == {
"mem": {
"max": 4.0,
"min": 0.5,
"step": 0.125,
"value": 3.0,
"unit": "GiB",
"type": "range",
"pinned": False,
}
}
def test_resolving_equivalent_units(self) -> None:
memory = Memory(min=268435456, max=4.0, step="128 MiB")
assert memory.min == 268435456
assert memory.max == 4294967296
assert memory.step == 134217728
def test_resources_encode_to_json_human_readable(self, memory) -> None:
serialization = json.loads(memory.json())
assert serialization["min"] == "256.0Mi"
assert serialization["max"] == "4.0Gi"
assert serialization["step"] == "128.0Mi"
def test_mem_must_be_step_aligned(self) -> None:
with pytest.raises(
ValueError,
match=re.escape(
"min/max difference is not step aligned: 3.96875Gi is not a multiple of 256Mi"
),
):
Memory(min="32 MiB", max=4.0, step="256MiB")
def test_min_can_be_less_than_step(self) -> None:
Memory(min="32 MiB", max=4.03125, step="256MiB")
def test_millicpu():
class Model(pydantic.BaseModel):
cpu: Core
assert Model(cpu=0.1).cpu.millicores == 100
assert Model(cpu=0.5).cpu.millicores == 500
assert Model(cpu=1).cpu.millicores == 1000
assert Model(cpu="100m").cpu.millicores == 100
assert "{0:m}".format(Model(cpu=1.5).cpu) == "1500m"
assert float(Model(cpu=1.5).cpu) == 1.5
assert Model(cpu=0.1).cpu == "100m"
assert Model(cpu="100m").cpu == 0.1
@pytest.fixture
def tuning_config(config) -> KubernetesConfiguration:
tuning_config = config.copy()
for dep in tuning_config.deployments:
dep.strategy = "canary"
return tuning_config
@pytest.fixture
def namespace() -> str:
return "default"
@pytest.fixture
def config(namespace: str) -> KubernetesConfiguration:
return KubernetesConfiguration(
namespace=namespace,
deployments=[
DeploymentConfiguration(
name="fiber-http",
replicas=servo.Replicas(
min=1,
max=4,
),
containers=[
ContainerConfiguration(
name="fiber-http",
cpu=CPU(min="125m", max="875m", step="125m"),
memory=Memory(min="128MiB", max="0.75GiB", step="32MiB"),
env=[
EnvironmentEnumSetting(
name="INIT_MEMORY_SIZE",
values=["32MB", "64MB", "128MB"],
)
],
)
],
)
],
)
@pytest.mark.integration
@pytest.mark.usefixtures("kubernetes_asyncio_config")
@pytest.mark.applymanifests("../manifests", files=["fiber-http-opsani-dev.yaml"])
class TestKubernetesConnectorIntegration:
@pytest.fixture(autouse=True)
async def _wait_for_manifests(self, kube, config):
kube.wait_for_registered()
config.timeout = "5m"
@pytest.fixture
def namespace(self, kube: kubetest.client.TestClient) -> str:
return kube.namespace
async def test_describe(self, config) -> None:
connector = KubernetesConnector(config=config)
description = await connector.describe()
assert description.get_setting("fiber-http/fiber-http.cpu").value == 0.125
assert (
description.get_setting("fiber-http/fiber-http.mem").human_readable_value
== "128.0Mi"
)
assert description.get_setting("fiber-http/fiber-http.replicas").value == 1
async def test_adjust_cpu(self, config):
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="cpu",
value=".150",
)
description = await connector.adjust([adjustment])
assert description is not None
setting = description.get_setting("fiber-http/fiber-http.cpu")
assert setting
assert setting.value == 0.15
# Describe it again and make sure it matches
description = await connector.describe()
assert description.get_setting("fiber-http/fiber-http.cpu").value == 0.15
async def test_adjust_cpu_out_of_range(self, config):
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="cpu",
value=".100",
)
description = await connector.adjust([adjustment])
assert description is not None
setting = description.get_setting("fiber-http/fiber-http.cpu")
assert setting
assert setting.value == 0.1
# Describe it again and make sure it matches
description = await connector.describe()
assert description.get_setting("fiber-http/fiber-http.cpu").value == 0.1
async def test_adjust_env(self, config: KubernetesConfiguration) -> None:
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="INIT_MEMORY_SIZE",
value="64MB",
)
control = servo.Control(settlement="1s")
description = await connector.adjust([adjustment], control)
assert description is not None
setting = description.get_setting("fiber-http/fiber-http.INIT_MEMORY_SIZE")
assert setting
assert setting.value == "64MB"
async def test_adjust_cpu_with_settlement(self, config):
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="cpu",
value=".250",
)
control = servo.Control(settlement="1s")
description = await connector.adjust([adjustment], control)
assert description is not None
setting = description.get_setting("fiber-http/fiber-http.cpu")
assert setting
assert setting.value == 0.25
async def test_adjust_cpu_at_non_zero_container_index(self, config):
# Inject a sidecar at index zero
deployment = await servo.connectors.kubernetes.Deployment.read(
"fiber-http", config.namespace
)
assert (
deployment
), f"failed loading deployment 'fiber-http' in namespace '{config.namespace}'"
async with deployment.rollout(timeout=config.timeout) as deployment_update:
await deployment_update.inject_sidecar(
"opsani-envoy",
"opsani/envoy-proxy:latest",
port="8480",
service_port=8091,
index=0,
)
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="cpu",
value=".250",
)
control = servo.Control(settlement="1s")
description = await connector.adjust([adjustment], control)
assert description is not None
setting = description.get_setting("fiber-http/fiber-http.cpu")
assert setting
assert setting.value == 0.25
# Describe it again and make sure it matches
description = await connector.describe()
assert description.get_setting("fiber-http/fiber-http.cpu").value == 0.25
async def test_adjust_cpu_matchlabels_dont_match_metadata_labels(
self, config, kube: kubetest.client.TestClient
):
deployments = kube.get_deployments()
target_deploy = deployments.get("fiber-http")
assert target_deploy is not None
# Update metadata labels so they don't match the match_labels selector
target_deploy.obj.metadata.labels["app.kubernetes.io/name"] = "web"
target_deploy.api_client.patch_namespaced_deployment(
target_deploy.name, target_deploy.namespace, target_deploy.obj
)
kube.wait_for_registered()
config.timeout = "15s"
config.cascade_common_settings(overwrite=True)
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="cpu",
value=".150",
)
description = await connector.adjust([adjustment])
assert description is not None
setting = description.get_setting("fiber-http/fiber-http.cpu")
assert setting
assert setting.value == 0.15
# Describe it again and make sure it matches
description = await connector.describe()
assert description.get_setting("fiber-http/fiber-http.cpu").value == 0.15
async def test_adjust_memory(self, config):
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="mem",
value="700Mi",
)
description = await connector.adjust([adjustment])
assert description is not None
setting = description.get_setting("fiber-http/fiber-http.mem")
assert setting
assert setting.value == 734003200
# Get deployment and check the pods
# deployment = await Deployment.read("web", "default")
# debug(deployment)
# debug(deployment.obj.spec.template.spec.containers)
async def test_adjust_memory_out_of_range(self, config):
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="mem",
value="64Mi",
)
description = await connector.adjust([adjustment])
assert description is not None
setting = description.get_setting("fiber-http/fiber-http.mem")
assert setting
assert setting.value == 67108864
# Describe it again and make sure it matches
description = await connector.describe()
assert description.get_setting("fiber-http/fiber-http.mem").value == 67108864
async def test_adjust_deployment_insufficient_resources(
self, config: KubernetesConfiguration
):
config.timeout = "3s"
config.cascade_common_settings(overwrite=True)
config.deployments[0].containers[0].memory.max = "256Gi"
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="mem",
value="128Gi",
)
with pytest.raises(
AdjustmentRejectedError,
match=(
re.escape(
"Requested adjustment(s) (fiber-http/fiber-http.mem=128Gi) cannot be scheduled due to "
)
+ r"\"\d+/\d+ nodes are available: \d+ Insufficient memory\.\""
),
) as rejection_info:
await connector.adjust([adjustment])
# Validate the correct error was raised, re-raise if not for additional debugging context
try:
assert rejection_info.value.reason == "unschedulable"
except AssertionError as e:
raise e from rejection_info.value
async def test_adjust_deployment_image_pull_backoff(
self,
config: KubernetesConfiguration,
mocker: pytest_mock.MockerFixture,
) -> None:
servo.logging.set_level("TRACE")
config.timeout = "10s"
config.cascade_common_settings(overwrite=True)
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="mem",
value="256Mi",
)
mocker.patch(
"kubernetes_asyncio.client.models.v1_container.V1Container.image",
new_callable=mocker.PropertyMock,
return_value="opsani/bababooey:latest",
)
with pytest.raises(
AdjustmentFailedError, match="Container image pull failure detected"
):
await connector.adjust([adjustment])
async def test_adjust_replicas(self, config):
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="replicas",
value="2",
)
description = await connector.adjust([adjustment])
assert description is not None
setting = description.get_setting("fiber-http/fiber-http.replicas")
assert setting
assert setting.value == 2
async def test_read_pod(self, config, kube) -> None:
connector = KubernetesConnector(config=config)
pods = kube.get_pods()
pod_name = next(iter(pods.keys()))
assert pod_name.startswith("fiber-http")
pod = await Pod.read(pod_name, kube.namespace)
assert pod
##
# Canary Tests
async def test_create_tuning(
self, tuning_config: KubernetesConfiguration, kube: kubetest.client.TestClient
) -> None:
# verify existing env vars are overriden by config var with same name
main_dep = kube.get_deployments()["fiber-http"]
main_dep.obj.spec.template.spec.containers[0].env = [
kubernetes.client.models.V1EnvVar(name="FOO", value="BAZ")
]
main_dep.api_client.patch_namespaced_deployment(
main_dep.name, main_dep.namespace, main_dep.obj
)
tuning_config.deployments[0].containers[0].static_environment_variables = {
"FOO": "BAR"
}
connector = KubernetesConnector(config=tuning_config)
description = await connector.describe()
assert description == Description(
components=[
Component(
name="fiber-http/fiber-http",
settings=[
CPU(
name="cpu",
type="range",
pinned=True,
value="125m",
min="125m",
max="875m",
step="125m",
request="125m",
limit="125m",
get=["request", "limit"],
set=["request", "limit"],
),
Memory(
name="mem",
type="range",
pinned=True,
value=134217728,
min=134217728,
max=805306368,
step=33554432,
request=134217728,
limit=134217728,
get=["request", "limit"],
set=["request", "limit"],
),
Replicas(
name="replicas",
type="range",
pinned=True,
value=1,
min=0,
max=99999,
step=1,
),
EnvironmentEnumSetting(
name="INIT_MEMORY_SIZE",
type="enum",
pinned=True,
values=["32MB", "64MB", "128MB"],
value="32MB",
),
],
),
Component(
name="fiber-http/fiber-http-tuning",
settings=[
CPU(
name="cpu",
type="range",
pinned=False,
value="125m",
min="125m",
max="875m",
step="125m",
request="125m",
limit="125m",
get=["request", "limit"],
set=["request", "limit"],
),
Memory(
name="mem",
type="range",
pinned=False,
value=134217728,
min=134217728,
max=805306368,
step=33554432,
request=134217728,
limit=134217728,
get=["request", "limit"],
set=["request", "limit"],
),
Replicas(
name="replicas",
type="range",
pinned=True,
value=1,
min=0,
max=1,
step=1,
),
EnvironmentEnumSetting(
name="INIT_MEMORY_SIZE",
type="enum",
pinned=False,
values=["32MB", "64MB", "128MB"],
value="32MB",
),
],
),
]
)
tuning_pod = kube.get_pods()["fiber-http-tuning"]
assert (
tuning_pod.obj.metadata.annotations["opsani.com/opsani_tuning_for"]
== "fiber-http/fiber-http-tuning"
)
assert tuning_pod.obj.metadata.labels["opsani_role"] == "tuning"
target_container = next(
filter(lambda c: c.name == "fiber-http", tuning_pod.obj.spec.containers)
)
assert target_container.resources.requests == {"cpu": "125m", "memory": "128Mi"}
assert target_container.resources.limits == {"cpu": "125m", "memory": "128Mi"}
assert target_container.env == [
kubernetes.client.models.V1EnvVar(name="INIT_MEMORY_SIZE", value="32MB"),
kubernetes.client.models.V1EnvVar(name="FOO", value="BAR"),
]
async def test_adjust_tuning_insufficient_mem(
self, tuning_config: KubernetesConfiguration
) -> None:
tuning_config.timeout = "10s"
tuning_config.cascade_common_settings(overwrite=True)
tuning_config.deployments[0].containers[0].memory = Memory(
min="128MiB", max="128GiB", step="32MiB"
)
connector = KubernetesConnector(config=tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="mem",
value="128Gi", # impossible right?
)
try:
with pytest.raises(
AdjustmentRejectedError,
match=(
re.escape(
"Requested adjustment(s) (fiber-http/fiber-http-tuning.mem=128Gi) cannot be scheduled due to "
)
+ r"\"\d+/\d+ nodes are available: \d+ Insufficient memory\.\""
),
) as rejection_info:
await connector.adjust([adjustment])
except AssertionError as ae:
if "does not match '(reason ContainersNotReady)" in str(ae):
pytest.xfail("Unschedulable condition took too long to show up")
# Validate the correct error was raised, re-raise if not for additional debugging context
try:
assert rejection_info.value.reason == "unschedulable"
except AssertionError as e:
raise e from rejection_info.value
async def test_adjust_tuning_insufficient_cpu_and_mem(
self, tuning_config: KubernetesConfiguration
) -> None:
tuning_config.timeout = "10s"
tuning_config.cascade_common_settings(overwrite=True)
tuning_config.deployments[0].containers[0].memory = Memory(
min="128MiB", max="128GiB", step="32MiB"
)
tuning_config.deployments[0].containers[0].cpu = CPU(
min="125m", max="200", step="125m"
)
connector = KubernetesConnector(config=tuning_config)
adjustments = [
Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="mem",
value="128Gi", # impossible right?
),
Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="cpu",
value="100", # impossible right?
),
]
with pytest.raises(
AdjustmentRejectedError,
match=(
re.escape(
"Requested adjustment(s) (fiber-http/fiber-http-tuning.mem=128Gi, fiber-http/fiber-http-tuning.cpu=100) cannot be scheduled due to "
)
+ r"\"\d+/\d+ nodes are available: \d+ Insufficient cpu\, \d+ Insufficient memory\.\""
),
) as rejection_info:
await connector.adjust(adjustments)
# Validate the correct error was raised, re-raise if not for additional debugging context
try:
assert rejection_info.value.reason == "unschedulable"
except AssertionError as e:
raise e from rejection_info.value
async def test_create_tuning_image_pull_backoff(
self,
tuning_config: KubernetesConfiguration,
mocker: pytest_mock.MockerFixture,
kube,
) -> None:
tuning_config.timeout = "10s"
tuning_config.cascade_common_settings(overwrite=True)
connector = KubernetesConnector(config=tuning_config)
mocker.patch(
"kubernetes_asyncio.client.models.v1_container.V1Container.image",
new_callable=mocker.PropertyMock,
return_value="opsani/bababooey:latest",
)
# NOTE: describe logic currently invokes the same creation as adjust and allows for a faster test.
# If tuning creation is removed from describe this test will need to be refactored and have a longer timeout and runtime
try:
await connector.describe()
except AdjustmentFailedError as e:
if "Container image pull failure detected" in str(e):
pass
elif "Unknown Pod status for 'fiber-http-tuning'" in str(e):
# Catchall triggered
pytest.xfail("Pod status update took too long")
async def test_bad_request_error_handled_gracefully(
self, tuning_config: KubernetesConfiguration, mocker: pytest_mock.MockerFixture
) -> None:
"""Verify a failure to create a pod is not poorly handled in the handle_error destroy logic"""
# Passing in an intentionally mangled memory setting to trigger an API error that prevents pod creation
mocker.patch(
"servo.connectors.kubernetes.Memory.__config__.validate_assignment",
new_callable=mocker.PropertyMock(return_value=False),
)
mocker.patch(
"servo.connectors.kubernetes._normalize_adjustment",
return_value=("memory", "256.0MiBGiB"),
)
tuning_config.deployments[0].on_failure = FailureMode.rollback
connector = KubernetesConnector(config=tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="mem",
value="256Mi",
)
# Catch info log messages
messages = []
connector.logger.add(lambda m: messages.append(m.record["message"]), level=10)
with pytest.raises(kubernetes_asyncio.client.exceptions.ApiException) as error:
await connector.adjust([adjustment])
# Check logs
assert "no tuning pod exists, ignoring destroy" in messages[-30:]
# Check error
assert "quantities must match the regular expression" in str(error.value)
assert error.value.status == 400
async def test_adjust_tuning_cpu_with_settlement(
self, tuning_config, namespace, kube
):
connector = KubernetesConnector(config=tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="cpu",
value=".250",
)
control = servo.Control(settlement="50ms")
description = await connector.adjust([adjustment], control)
assert description is not None
setting = description.get_setting("fiber-http/fiber-http-tuning.cpu")
assert setting
assert setting.value == 0.25
async def test_adjust_tuning_env(self, tuning_config: KubernetesConfiguration):
connector = KubernetesConnector(config=tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="INIT_MEMORY_SIZE",
value="64MB",
)
control = servo.Control(settlement="50ms")
description = await connector.adjust([adjustment], control)
assert description is not None
setting = description.get_setting(
"fiber-http/fiber-http-tuning.INIT_MEMORY_SIZE"
)
assert setting
assert setting.value == "64MB"
async def test_adjust_handle_error_respects_nested_config(
self, config: KubernetesConfiguration, kube: kubetest.client.TestClient
):
config.timeout = "3s"
config.on_failure = FailureMode.shutdown
config.cascade_common_settings(overwrite=True)
config.deployments[0].on_failure = FailureMode.exception
config.deployments[0].containers[0].memory.max = "256Gi"
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="mem",
value="128Gi",
)
with pytest.raises(
AdjustmentRejectedError, match="Insufficient memory."
) as rejection_info:
description = await connector.adjust([adjustment])
debug(description)
deployment = await Deployment.read("fiber-http", kube.namespace)
# check deployment was not scaled to 0 replicas (i.e., the outer-level 'shutdown' was overridden)
assert deployment.obj.spec.replicas != 0
async def test_adjust_tuning_cpu_out_of_range(self, tuning_config):
connector = KubernetesConnector(config=tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="cpu",
value=".100",
)
description = await connector.adjust([adjustment])
assert description is not None
setting = description.get_setting("fiber-http/fiber-http-tuning.cpu")
assert setting
assert setting.value == 0.1
# Describe it again and make sure it matches
description = await connector.describe()
assert description.get_setting("fiber-http/fiber-http-tuning.cpu").value == 0.1
async def test_adjust_tuning_memory_out_of_range(self, tuning_config):
connector = KubernetesConnector(config=tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="mem",
value="64Mi",
)
description = await connector.adjust([adjustment])
assert description is not None
setting = description.get_setting("fiber-http/fiber-http-tuning.mem")
assert setting
assert setting.value == 67108864
# Describe it again and make sure it matches
description = await connector.describe()
assert (
description.get_setting("fiber-http/fiber-http-tuning.mem").value
== 67108864
)
# async def test_apply_no_changes(self):
# # resource_version stays the same and early exits
# pass
#
#
# async def test_apply_metadata_changes(self):
# # Update labels or something that doesn't matter
# # Detect by never getting a progressing event
# pass
#
#
# async def test_apply_replica_change(self):
# # bump the count, observed_generation goes up
# # wait for the counts to settle
# ...
#
#
# async def test_apply_memory_change(self):
# # bump the count, observed_generation goes up
# # wait for the counts to settle
# ...
#
#
# async def test_apply_cpu_change(self):
# # bump the count, observed_generation goes up
# # wait for the counts to settle
# ...
#
#
# async def test_apply_unschedulable_memory_request(self):
# # bump the count, observed_generation goes up
# # wait for the counts to settle
# ...
#
#
# async def test_apply_restart_strategy(self):
# # Make sure we can watch a non-rolling update
# # .spec.strategy specifies the strategy used to replace old Pods by new ones. .spec.strategy.type can be "Recreate" or "RollingUpdate". "RollingUpdate" is the default value.
# # Recreate Deployment
# ...
# TODO: Put a fiber-http deployment live. Create a config and describe it.
# TODO: Test talking to multiple namespaces. Test kubeconfig file
# Test describe an empty config.
# Version ID checks
# Timeouts, Encoders, refresh, ready
# Add watch, test create, read, delete, patch
# TODO: settlement time, recovery behavior (rollback, delete), "adjust_on"?, restart detection
# TODO: wait/watch tests with conditionals...
# TODO: Test cases will be: change memory, change cpu, change replica count.
# Test setting limit and request independently
# Detect scheduling error
# TODO: We want to compute progress by looking at observed generation,
# then watching as all the replicas are updated until the counts match
# If we never see a progressing condition, then whatever we did
# did not affect the deployment
# Handle: CreateContainerError
async def test_checks(self, config: KubernetesConfiguration):
await KubernetesChecks.run(config)
# Deployment readiness check was returning false positives, guard against regression
async def test_check_deployment_readiness_failure(
self, config: KubernetesConfiguration, kube: kubetest.client.TestClient
):
deployments = kube.get_deployments()
target_deploy = deployments.get("fiber-http")
assert target_deploy is not None
target_container = next(
filter(
lambda c: c.name == "fiber-http",
target_deploy.obj.spec.template.spec.containers,
)
)
assert target_container is not None
# Update to put deployment in unready state
target_container.readiness_probe = kubernetes.client.models.V1Probe(
_exec=kubernetes.client.models.V1ExecAction(command=["exit", "1"]),
failure_threshold=1,
)
target_deploy.obj.spec.strategy.rolling_update.max_surge = "0%"
target_deploy.api_client.patch_namespaced_deployment(
target_deploy.name, target_deploy.namespace, target_deploy.obj
)
while target_deploy.is_ready():
await asyncio.sleep(0.1)
result = await KubernetesChecks(config).run_one(
id="check_kubernetes_deployments_are_ready_item_0"
)
assert (
result.success == False
and result.message
== 'caught exception (RuntimeError): Deployment "fiber-http" is not ready'
)
##
# Rejection Tests using modified deployment, skips the standard manifest application
@pytest.mark.integration
@pytest.mark.usefixtures("kubernetes_asyncio_config")
class TestKubernetesConnectorIntegrationUnreadyCmd:
@pytest.fixture
def namespace(self, kube: kubetest.client.TestClient) -> str:
return kube.namespace
@pytest.fixture
def config(self, config: KubernetesConfiguration) -> KubernetesConfiguration:
config.container_logs_in_error_status = True
return config
@pytest.fixture
def kubetest_deployment(
self, kube: kubetest.client.TestClient, rootpath: pathlib.Path
) -> KubetestDeployment:
deployment = kube.load_deployment(
rootpath.joinpath("tests/manifests/fiber-http-opsani-dev.yaml")
)
deployment.obj.spec.template.spec.termination_grace_period_seconds = 10
fiber_container = deployment.obj.spec.template.spec.containers[0]
fiber_container.resources.requests["memory"] = "256Mi"
fiber_container.resources.limits["memory"] = "256Mi"
fiber_container.readiness_probe = kubernetes.client.models.V1Probe(
failure_threshold=3,
http_get=kubernetes.client.models.V1HTTPGetAction(
path="/",
port=9980,
scheme="HTTP",
),
initial_delay_seconds=1,
period_seconds=5,
success_threshold=1,
timeout_seconds=1,
)
return deployment
@pytest.fixture
def kubetest_deployment_never_ready(
self, kubetest_deployment: KubetestDeployment
) -> KubetestDeployment:
fiber_container = kubetest_deployment.obj.spec.template.spec.containers[0]
fiber_container.command = ["/bin/sh"]
# Simulate a deployment which fails to start when memory adjusted to < 192Mi
fiber_container.args = [
"-c",
"if [ $(cat /sys/fs/cgroup/memory/memory.limit_in_bytes) -gt 201326592 ]; then /bin/fiber-http; else sleep 1d; fi",
]
kubetest_deployment.create()
kubetest_deployment.wait_until_ready(timeout=30)
return kubetest_deployment
@pytest.fixture
def kubetest_deployemnt_oom_killed(
self, kubetest_deployment: KubetestDeployment
) -> KubetestDeployment:
fiber_container = kubetest_deployment.obj.spec.template.spec.containers[0]
fiber_container.command = ["/bin/sh"]
# Simulate a deployment which will be OOMKilled when memory adjusted to < 192Mi
fiber_container.args = [
"-c",
(
"if [ $(cat /sys/fs/cgroup/memory/memory.limit_in_bytes) -gt 201326592 ]; "
"then /bin/fiber-http; "
"else tail /dev/zero; "
"fi"
),
]
kubetest_deployment.create()
kubetest_deployment.wait_until_ready(timeout=30)
return kubetest_deployment
@pytest.fixture
def kubetest_deployment_becomes_unready(
self, kubetest_deployment: KubetestDeployment
) -> KubetestDeployment:
fiber_container = kubetest_deployment.obj.spec.template.spec.containers[0]
fiber_container.command = ["/bin/sh"]
# Simulate a deployment which passes initial readiness checks when memory adjusted to < 192Mi then fails them a short time later
fiber_container.args = [
"-c",
(
"if [ $(cat /sys/fs/cgroup/memory/memory.limit_in_bytes) -gt 201326592 ]; "
"then /bin/fiber-http; "
"else (/bin/fiber-http &); sleep 10s; kill $(jobs -p '%/bin/fiber-http'); "
"fi"
),
]
kubetest_deployment.create()
kubetest_deployment.wait_until_ready(timeout=30)
return kubetest_deployment
async def test_adjust_deployment_never_ready(
self,
config: KubernetesConfiguration,
kubetest_deployment_never_ready: KubetestDeployment,
) -> None:
config.timeout = "5s"
config.cascade_common_settings(overwrite=True)
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="mem",
value="128Mi",
)
with pytest.raises(AdjustmentRejectedError) as rejection_info:
await connector.adjust([adjustment])
# Validate the correct error was raised, re-raise if not for additional debugging context
try:
assert (
"(reason ContainersNotReady) containers with unready status: [fiber-http"
in str(rejection_info.value)
)
assert rejection_info.value.reason == "start-failed"
except AssertionError as e:
raise e from rejection_info.value
async def test_adjust_deployment_oom_killed(
self,
config: KubernetesConfiguration,
kubetest_deployemnt_oom_killed: KubetestDeployment,
) -> None:
config.timeout = "10s"
config.cascade_common_settings(overwrite=True)
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="mem",
value="128Mi",
)
with pytest.raises(AdjustmentRejectedError) as rejection_info:
await connector.adjust([adjustment])
# Validate the correct error was raised, re-raise if not for additional debugging context
try:
assert (
"Deployment fiber-http pod(s) crash restart detected: fiber-http-"
in str(rejection_info.value)
)
assert rejection_info.value.reason == "unstable"
except AssertionError as e:
if "Found 1 unready pod(s) for deployment fiber-http" in str(
rejection_info.value
):
pytest.xfail("Restart count update took too long")
raise e from rejection_info.value
async def test_adjust_deployment_settlement_failed(
self,
config: KubernetesConfiguration,
kubetest_deployment_becomes_unready: KubetestDeployment,
) -> None:
config.timeout = "15s"
config.settlement = "20s"
config.on_failure = FailureMode.shutdown
config.cascade_common_settings(overwrite=True)
connector = KubernetesConnector(config=config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http",
setting_name="mem",
value="128Mi",
)
with pytest.raises(AdjustmentRejectedError) as rejection_info:
await connector.adjust([adjustment])
# Validate the correct error was raised, re-raise if not for additional debugging context
try:
assert "(reason ContainersNotReady) containers with unready status: [fiber-http]" in str(
rejection_info.value
) or "Deployment fiber-http pod(s) crash restart detected" in str(
rejection_info.value
), str(
rejection_info.value
)
assert rejection_info.value.reason == "unstable"
except AssertionError as e:
raise e from rejection_info.value
# Validate deployment scaled down to 0 instances
kubetest_deployment_becomes_unready.refresh()
assert kubetest_deployment_becomes_unready.obj.spec.replicas == 0
async def test_adjust_tuning_never_ready(
self,
tuning_config: KubernetesConfiguration,
kubetest_deployment_never_ready: KubetestDeployment,
kube: kubetest.client.TestClient,
) -> None:
tuning_config.timeout = "30s"
tuning_config.on_failure = FailureMode.shutdown
tuning_config.cascade_common_settings(overwrite=True)
connector = KubernetesConnector(config=tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="mem",
value="128Mi",
)
try:
with pytest.raises(AdjustmentRejectedError) as rejection_info:
await connector.adjust([adjustment])
except RuntimeError as e:
if (
f"Time out after {tuning_config.timeout} waiting for tuning pod shutdown"
in str(e)
):
pytest.xfail("Tuning pod shutdown took over 30 seconds")
# Validate the correct error was raised, re-raise if not for additional debugging context
try:
assert (
"(reason ContainersNotReady) containers with unready status: [fiber-http"
in str(rejection_info.value)
)
assert rejection_info.value.reason == "start-failed"
except AssertionError as e:
raise e from rejection_info.value
# Validate baseline was restored during handle_error
tuning_pod = kube.get_pods()["fiber-http-tuning"]
fiber_container = next(
filter(
lambda cont: cont.name == "fiber-http", tuning_pod.obj.spec.containers
)
)
assert fiber_container.resources.requests["memory"] == "256Mi"
assert fiber_container.resources.limits["memory"] == "256Mi"
async def test_adjust_tuning_oom_killed(
self,
tuning_config: KubernetesConfiguration,
kubetest_deployemnt_oom_killed: KubetestDeployment,
kube: kubetest.client.TestClient,
) -> None:
tuning_config.timeout = "25s"
tuning_config.on_failure = FailureMode.shutdown
tuning_config.cascade_common_settings(overwrite=True)
connector = KubernetesConnector(config=tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="mem",
value="128Mi",
)
with pytest.raises(AdjustmentRejectedError) as rejection_info:
await connector.adjust([adjustment])
# Validate the correct error was raised, re-raise if not for additional debugging context
try:
assert (
"Tuning optimization fiber-http-tuning crash restart detected on container(s): fiber-http"
in str(rejection_info.value)
)
assert rejection_info.value.reason == "unstable"
except AssertionError as e:
raise e from rejection_info.value
# Validate baseline was restored during handle_error
tuning_pod = kube.get_pods()["fiber-http-tuning"]
fiber_container = next(
filter(
lambda cont: cont.name == "fiber-http", tuning_pod.obj.spec.containers
)
)
assert fiber_container.resources.requests["memory"] == "256Mi"
assert fiber_container.resources.limits["memory"] == "256Mi"
async def test_adjust_tuning_settlement_failed(
self,
tuning_config: KubernetesConfiguration,
kubetest_deployment_becomes_unready: KubetestDeployment,
kube: kubetest.client.TestClient,
) -> None:
tuning_config.timeout = "25s"
tuning_config.settlement = "15s"
tuning_config.on_failure = FailureMode.shutdown
tuning_config.cascade_common_settings(overwrite=True)
connector = KubernetesConnector(config=tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="mem",
value="128Mi",
)
with pytest.raises(AdjustmentRejectedError) as rejection_info:
await connector.adjust([adjustment])
# Validate the correct error was raised, re-raise if not for additional debugging context
try:
assert "(reason ContainersNotReady) containers with unready status: [fiber-http]" in str(
rejection_info.value
) or "Tuning optimization fiber-http-tuning crash restart detected on container(s): fiber-http" in str(
rejection_info.value
)
rejection_info.value.reason == "unstable"
except AssertionError as e:
raise e from rejection_info.value
# Validate baseline was restored during handle_error
tuning_pod = kube.get_pods()["fiber-http-tuning"]
fiber_container = next(
filter(
lambda cont: cont.name == "fiber-http", tuning_pod.obj.spec.containers
)
)
assert fiber_container.resources.requests["memory"] == "256Mi"
assert fiber_container.resources.limits["memory"] == "256Mi"
@pytest.mark.integration
@pytest.mark.usefixtures("kubernetes_asyncio_config")
class TestKubernetesResourceRequirementsIntegration:
@pytest.fixture(autouse=True)
async def _wait_for_manifests(self, kube, config):
kube.wait_for_registered()
config.timeout = "5m"
@pytest.fixture
def namespace(self, kube: kubetest.client.TestClient) -> str:
return kube.namespace
@pytest.mark.applymanifests(
"../manifests/resource_requirements",
files=["fiber-http_no_resource_limits.yaml"],
)
async def test_get_resource_requirements_no_limits(
self, kube, tuning_config: KubernetesConfiguration
) -> None:
servo.logging.set_level("DEBUG")
deployment = await Deployment.read("fiber-http", tuning_config.namespace)
await deployment.wait_until_ready()
pods = await deployment.get_pods()
assert len(pods) == 1, "expected a fiber-http pod"
pod = pods[0]
container = pod.get_container("fiber-http")
assert container.get_resource_requirements("cpu") == {
servo.connectors.kubernetes.ResourceRequirement.request: "125m",
servo.connectors.kubernetes.ResourceRequirement.limit: None,
}
@pytest.mark.applymanifests(
"../manifests/resource_requirements",
files=["fiber-http_no_resource_limits.yaml"],
)
async def test_set_resource_requirements_no_limits(
self, kube, tuning_config: KubernetesConfiguration
) -> None:
servo.logging.set_level("DEBUG")
deployment = await Deployment.read("fiber-http", tuning_config.namespace)
await deployment.wait_until_ready()
pods = await deployment.get_pods()
assert len(pods) == 1, "expected a fiber-http pod"
pod = pods[0]
container = pod.get_container("fiber-http")
assert container.get_resource_requirements("cpu") == {
servo.connectors.kubernetes.ResourceRequirement.request: "125m",
servo.connectors.kubernetes.ResourceRequirement.limit: None,
}
# Set request and limit
container.set_resource_requirements(
"cpu",
{
servo.connectors.kubernetes.ResourceRequirement.request: "125m",
servo.connectors.kubernetes.ResourceRequirement.limit: "250m",
},
)
container.get_resource_requirements("cpu") == {
servo.connectors.kubernetes.ResourceRequirement.request: "125m",
servo.connectors.kubernetes.ResourceRequirement.limit: "250m",
}
# Set limit, leaving request alone
container.set_resource_requirements(
"cpu", {servo.connectors.kubernetes.ResourceRequirement.limit: "750m"}
)
assert container.get_resource_requirements("cpu") == {
servo.connectors.kubernetes.ResourceRequirement.request: "125m",
servo.connectors.kubernetes.ResourceRequirement.limit: "750m",
}
# Set request, clearing limit
container.set_resource_requirements(
"cpu",
{
servo.connectors.kubernetes.ResourceRequirement.request: "250m",
servo.connectors.kubernetes.ResourceRequirement.limit: None,
},
)
assert container.get_resource_requirements("cpu") == {
servo.connectors.kubernetes.ResourceRequirement.request: "250m",
servo.connectors.kubernetes.ResourceRequirement.limit: None,
}
# Clear request and limit
container.set_resource_requirements(
"cpu",
{
servo.connectors.kubernetes.ResourceRequirement.request: None,
servo.connectors.kubernetes.ResourceRequirement.limit: None,
},
)
assert container.get_resource_requirements("cpu") == {
servo.connectors.kubernetes.ResourceRequirement.request: None,
servo.connectors.kubernetes.ResourceRequirement.limit: None,
}
@pytest.mark.applymanifests(
"../manifests/resource_requirements",
files=["fiber-http_no_resource_limits.yaml"],
)
async def test_initialize_tuning_pod_set_defaults_for_no_limits(
self, kube, tuning_config: KubernetesConfiguration
) -> None:
servo.logging.set_level("DEBUG")
# Setup the config to set a default limit
container_config = tuning_config.deployments[0].containers[0]
container_config.cpu.limit = "1000m"
container_config.memory.limit = "1GiB"
# NOTE: Create the optimizations class to bring up the canary
await servo.connectors.kubernetes.KubernetesOptimizations.create(tuning_config)
# Read the Tuning Pod and check resources
pod = await Pod.read("fiber-http-tuning", tuning_config.namespace)
container = pod.get_container("fiber-http")
cpu_requirements = container.get_resource_requirements("cpu")
memory_requirements = container.get_resource_requirements("memory")
assert (
cpu_requirements[servo.connectors.kubernetes.ResourceRequirement.limit]
== "1"
)
assert (
memory_requirements[servo.connectors.kubernetes.ResourceRequirement.limit]
== "1Gi"
)
@pytest.mark.applymanifests(
"../manifests/resource_requirements", files=["fiber-http_no_cpu_limit.yaml"]
)
async def test_no_cpu_limit(
self, kube, tuning_config: KubernetesConfiguration
) -> None:
servo.logging.set_level("DEBUG")
# Setup the config to set a default limit
tuning_config.deployments[0].containers[0].cpu.limit = "1000m"
tuning_config.deployments[0].containers[0].cpu.set = ["request"]
connector = KubernetesConnector(config=tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="cpu",
value=".250",
)
description = await connector.adjust([adjustment])
assert description is not None
setting = description.get_setting("fiber-http/fiber-http-tuning.cpu")
assert setting
assert setting.value == 0.25
# Read the Tuning Pod and check resources
pod = await Pod.read("fiber-http-tuning", tuning_config.namespace)
container = pod.get_container("fiber-http")
# CPU picks up the 1000m default and then gets adjust to 250m
assert container.get_resource_requirements("cpu") == {
servo.connectors.kubernetes.ResourceRequirement.request: "250m",
servo.connectors.kubernetes.ResourceRequirement.limit: "1",
}
# Memory is untouched from the mainfest
assert container.get_resource_requirements("memory") == {
servo.connectors.kubernetes.ResourceRequirement.request: "128Mi",
servo.connectors.kubernetes.ResourceRequirement.limit: "128Mi",
}
@pytest.mark.applymanifests(
"../manifests/resource_requirements",
files=["fiber-http_no_resource_limits.yaml"],
)
async def test_reading_values_from_no_limits_optimization_class(
self, kube, tuning_config: KubernetesConfiguration
) -> None:
servo.logging.set_level("DEBUG")
# NOTE: Create the optimizations class to bring up the canary
kubernetes_optimizations = (
await servo.connectors.kubernetes.KubernetesOptimizations.create(
tuning_config
)
)
canary_optimization = kubernetes_optimizations.optimizations[0]
# Validate Tuning
assert canary_optimization.tuning_cpu, "Expected Tuning CPU"
assert canary_optimization.tuning_cpu.value == 0.125
assert canary_optimization.tuning_cpu.request == 0.125
assert canary_optimization.tuning_cpu.limit is None
assert canary_optimization.tuning_cpu.pinned is False
assert canary_optimization.tuning_memory, "Expected Tuning Memory"
assert canary_optimization.tuning_memory.value == 134217728
assert canary_optimization.tuning_memory.value.human_readable() == "128.0Mi"
assert canary_optimization.tuning_memory.request == 134217728
assert canary_optimization.tuning_memory.limit is None
assert canary_optimization.tuning_memory.pinned is False
assert canary_optimization.tuning_replicas.value == 1
assert canary_optimization.tuning_replicas.pinned is True
# Validate Main
assert canary_optimization.main_cpu, "Expected Main CPU"
assert canary_optimization.main_cpu.value == 0.125
assert canary_optimization.main_cpu.request == 0.125
assert canary_optimization.main_cpu.limit is None
assert canary_optimization.main_cpu.pinned is True
assert canary_optimization.main_memory, "Expected Main Memory"
assert canary_optimization.main_memory.value == 134217728
assert canary_optimization.main_memory.value.human_readable() == "128.0Mi"
assert canary_optimization.main_memory.request == 134217728
assert canary_optimization.main_memory.limit is None
assert canary_optimization.main_memory.pinned is True
assert canary_optimization.main_replicas.value == 1
assert canary_optimization.main_replicas.pinned is True
@pytest.mark.applymanifests(
"../manifests/resource_requirements", files=["fiber-http_bursty_memory.yaml"]
)
async def test_reading_values_from_bursty_memory_optimization_class(
self, kube, tuning_config: KubernetesConfiguration
) -> None:
servo.logging.set_level("DEBUG")
# Setup the config to read limits instead of requests
container_config = tuning_config.deployments[0].containers[0]
container_config.cpu.get = ["limit"]
container_config.memory.get = ["limit"]
container_config.memory.max = "3.0GiB" # Raise max so we validate
# NOTE: Create the optimizations class to bring up the canary
kubernetes_optimizations = (
await servo.connectors.kubernetes.KubernetesOptimizations.create(
tuning_config
)
)
canary_optimization = kubernetes_optimizations.optimizations[0]
# Validate Tuning
assert canary_optimization.tuning_cpu, "Expected Tuning CPU"
assert canary_optimization.tuning_cpu.value == 0.25
assert canary_optimization.tuning_cpu.request == 0.125
assert canary_optimization.tuning_cpu.limit == 0.25
assert canary_optimization.tuning_cpu.pinned is False
assert canary_optimization.tuning_memory, "Expected Tuning Memory"
assert canary_optimization.tuning_memory.value == 2147483648
assert canary_optimization.tuning_memory.value.human_readable() == "2.0Gi"
assert canary_optimization.tuning_memory.request == 134217728
assert canary_optimization.tuning_memory.limit == 2147483648
assert canary_optimization.tuning_memory.pinned is False
assert canary_optimization.tuning_replicas.value == 1
assert canary_optimization.tuning_replicas.pinned is True
# Validate Main
assert canary_optimization.main_cpu, "Expected Main CPU"
assert canary_optimization.main_cpu.value == 0.25
assert canary_optimization.main_cpu.request == 0.125
assert canary_optimization.main_cpu.limit == 0.25
assert canary_optimization.main_cpu.pinned is True
assert canary_optimization.main_memory, "Expected Main Memory"
assert canary_optimization.main_memory.value == 2147483648
assert canary_optimization.main_memory.value.human_readable() == "2.0Gi"
assert canary_optimization.main_memory.request == 134217728
assert canary_optimization.main_memory.limit == 2147483648
assert canary_optimization.main_memory.pinned is True
assert canary_optimization.main_replicas.value == 2
assert canary_optimization.main_replicas.pinned is True
@pytest.mark.applymanifests(
"../manifests/resource_requirements", files=["fiber-http_bursty_memory.yaml"]
)
async def test_preflight_cycle(
self, kube, tuning_config: KubernetesConfiguration
) -> None:
servo.logging.set_level("DEBUG")
# Setup the config to set a default limit
tuning_config.deployments[0].containers[0].cpu.get = ["limit"]
tuning_config.deployments[0].containers[0].memory.max = "2.0GiB"
tuning_config.deployments[0].containers[0].memory.get = ["limit"]
connector = KubernetesConnector(config=tuning_config)
# Describe to get our baseline
baseline_description = await connector.describe()
baseline_main_cpu_setting = baseline_description.get_setting(
"fiber-http/fiber-http.cpu"
)
assert baseline_main_cpu_setting
assert baseline_main_cpu_setting.value == 0.25
baseline_main_memory_setting = baseline_description.get_setting(
"fiber-http/fiber-http.mem"
)
assert baseline_main_memory_setting
assert baseline_main_memory_setting.value.human_readable() == "2.0Gi"
## Tuning settings
baseline_tuning_cpu_setting = baseline_description.get_setting(
"fiber-http/fiber-http-tuning.cpu"
)
assert baseline_tuning_cpu_setting
assert baseline_tuning_cpu_setting.value == 0.25
baseline_tuning_memory_setting = baseline_description.get_setting(
"fiber-http/fiber-http-tuning.mem"
)
assert baseline_tuning_memory_setting
assert baseline_tuning_memory_setting.value.human_readable() == "2.0Gi"
##
# Adjust CPU and Memory
cpu_adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="cpu",
value=".500",
)
memory_adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="memory",
value="1.0",
)
adjusted_description = await connector.adjust(
[cpu_adjustment, memory_adjustment]
)
assert adjusted_description is not None
## Main settings
adjusted_main_cpu_setting = adjusted_description.get_setting(
"fiber-http/fiber-http.cpu"
)
assert adjusted_main_cpu_setting
assert adjusted_main_cpu_setting.value == 0.25
adjusted_main_mem_setting = adjusted_description.get_setting(
"fiber-http/fiber-http.mem"
)
assert adjusted_main_mem_setting
assert adjusted_main_mem_setting.value.human_readable() == "2.0Gi"
## Tuning settings
adjusted_tuning_cpu_setting = adjusted_description.get_setting(
"fiber-http/fiber-http-tuning.cpu"
)
assert adjusted_tuning_cpu_setting
assert adjusted_tuning_cpu_setting.value == 0.5
adjusted_tuning_mem_setting = adjusted_description.get_setting(
"fiber-http/fiber-http-tuning.mem"
)
assert adjusted_tuning_mem_setting
assert adjusted_tuning_mem_setting.value.human_readable() == "1.0Gi"
## Run another describe
adjusted_description = await connector.describe()
assert adjusted_description is not None
## Main settings
adjusted_main_cpu_setting = adjusted_description.get_setting(
"fiber-http/fiber-http.cpu"
)
assert adjusted_main_cpu_setting
assert adjusted_main_cpu_setting.value == 0.25
adjusted_main_mem_setting = adjusted_description.get_setting(
"fiber-http/fiber-http.mem"
)
assert adjusted_main_mem_setting
assert adjusted_main_mem_setting.value.human_readable() == "2.0Gi"
## Tuning settings
adjusted_tuning_cpu_setting = adjusted_description.get_setting(
"fiber-http/fiber-http-tuning.cpu"
)
assert adjusted_tuning_cpu_setting
assert adjusted_tuning_cpu_setting.value == 0.5
adjusted_tuning_mem_setting = adjusted_description.get_setting(
"fiber-http/fiber-http-tuning.mem"
)
assert adjusted_tuning_mem_setting
assert adjusted_tuning_mem_setting.value.human_readable() == "1.0Gi"
## Read the Main Pod and check resources
main_deployment = await Deployment.read("fiber-http", tuning_config.namespace)
main_pods = await main_deployment.get_pods()
main_pod_container = main_pods[0].get_container("fiber-http")
## CPU is set to 500m on both requirements
assert main_pod_container.get_resource_requirements("cpu") == {
servo.connectors.kubernetes.ResourceRequirement.request: "125m",
servo.connectors.kubernetes.ResourceRequirement.limit: "250m",
}
## Read the Tuning Pod and check resources
tuning_pod = await Pod.read("fiber-http-tuning", tuning_config.namespace)
tuning_pod_container = tuning_pod.get_container("fiber-http")
## CPU is set to 500m on both requirements
assert tuning_pod_container.get_resource_requirements("cpu") == {
servo.connectors.kubernetes.ResourceRequirement.request: "500m",
servo.connectors.kubernetes.ResourceRequirement.limit: "500m",
}
## Memory is set to 1Gi on both requirements
assert tuning_pod_container.get_resource_requirements("memory") == {
servo.connectors.kubernetes.ResourceRequirement.request: "1Gi",
servo.connectors.kubernetes.ResourceRequirement.limit: "1Gi",
}
##
# Adjust back to baseline
cpu_adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="cpu",
value=".250",
)
memory_adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="memory",
value="2.0",
)
adjusted_description = await connector.adjust(
[cpu_adjustment, memory_adjustment]
)
assert adjusted_description is not None
adjusted_cpu_setting = adjusted_description.get_setting(
"fiber-http/fiber-http-tuning.cpu"
)
assert adjusted_cpu_setting
assert adjusted_cpu_setting.value == 0.25
adjusted_mem_setting = adjusted_description.get_setting(
"fiber-http/fiber-http-tuning.mem"
)
assert adjusted_mem_setting
assert adjusted_mem_setting.value.human_readable() == "2.0Gi"
## Run another describe
adjusted_description = await connector.describe()
assert adjusted_description is not None
adjusted_cpu_setting = adjusted_description.get_setting(
"fiber-http/fiber-http-tuning.cpu"
)
assert adjusted_cpu_setting
assert adjusted_cpu_setting.value == 0.25
adjusted_mem_setting = adjusted_description.get_setting(
"fiber-http/fiber-http-tuning.mem"
)
assert adjusted_mem_setting
assert adjusted_mem_setting.value.human_readable() == "2.0Gi"
# TODO: test_inject_by_source_port_int, test_inject_by_source_port_name
##
# Sidecar injection tests
ENVOY_SIDECAR_IMAGE_TAG = "opsani/envoy-proxy:servox-v0.9.0"
@pytest.mark.integration
@pytest.mark.usefixtures("kubernetes_asyncio_config")
class TestSidecarInjection:
@pytest.fixture(autouse=True)
async def _wait_for_manifests(self, kube, config):
kube.wait_for_registered()
config.timeout = "5m"
@pytest.fixture
def namespace(self, kube: kubetest.client.TestClient) -> str:
return kube.namespace
@pytest.mark.applymanifests(
"../manifests/sidecar_injection", files=["fiber-http_single_port.yaml"]
)
@pytest.mark.parametrize(
"port, service",
[
(None, "fiber-http"),
(80, "fiber-http"),
("http", "fiber-http"),
],
)
async def test_inject_single_port_deployment(
self, namespace: str, service: str, port: Union[str, int]
) -> None:
deployment = await servo.connectors.kubernetes.Deployment.read(
"fiber-http", namespace
)
assert len(deployment.containers) == 1, "expected a single container"
service = await servo.connectors.kubernetes.Service.read(
"fiber-http", namespace
)
assert len(service.ports) == 1
port_obj = service.ports[0]
if isinstance(port, int):
assert port_obj.port == port
elif isinstance(port, str):
assert port_obj.name == port
assert port_obj.target_port == 8480
await deployment.inject_sidecar(
"opsani-envoy", ENVOY_SIDECAR_IMAGE_TAG, service="fiber-http", port=port
)
# Examine new sidecar
await deployment.refresh()
assert len(deployment.containers) == 2, "expected an injected container"
sidecar_container = deployment.containers[1]
assert sidecar_container.name == "opsani-envoy"
# Check ports and env
assert sidecar_container.ports == [
kubernetes_asyncio.client.V1ContainerPort(
container_port=9980,
host_ip=None,
host_port=None,
name="opsani-proxy",
protocol="TCP",
),
kubernetes_asyncio.client.V1ContainerPort(
container_port=9901,
host_ip=None,
host_port=None,
name="opsani-metrics",
protocol="TCP",
),
]
assert sidecar_container.obj.env == [
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXY_SERVICE_PORT", value="9980", value_from=None
),
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXIED_CONTAINER_PORT",
value="8480",
value_from=None,
),
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXY_METRICS_PORT", value="9901", value_from=None
),
]
@pytest.mark.applymanifests(
"../manifests/sidecar_injection", files=["fiber-http_multiple_ports.yaml"]
)
@pytest.mark.parametrize(
"port, service, error",
[
(
None,
"fiber-http",
ValueError(
"Target Service 'fiber-http' exposes multiple ports -- target port must be specified"
),
),
(80, "fiber-http", None),
("http", "fiber-http", None),
],
)
async def test_inject_multiport_deployment(
self,
namespace: str,
service: str,
port: Union[str, int],
error: Optional[Exception],
) -> None:
deployment = await servo.connectors.kubernetes.Deployment.read(
"fiber-http", namespace
)
assert len(deployment.containers) == 1, "expected a single container"
service = await servo.connectors.kubernetes.Service.read(
"fiber-http", namespace
)
assert len(service.ports) == 2
port_obj = service.ports[0]
if isinstance(port, int):
assert port_obj.port == port
elif isinstance(port, str):
assert port_obj.name == port
assert port_obj.target_port == 8480
try:
await deployment.inject_sidecar(
"opsani-envoy", ENVOY_SIDECAR_IMAGE_TAG, service="fiber-http", port=port
)
except Exception as e:
assert repr(e) == repr(error)
# Examine new sidecar (if success is expected)
if error is None:
await deployment.refresh()
assert len(deployment.containers) == 2, "expected an injected container"
sidecar_container = deployment.containers[1]
assert sidecar_container.name == "opsani-envoy"
# Check ports and env
assert sidecar_container.ports == [
kubernetes_asyncio.client.V1ContainerPort(
container_port=9980,
host_ip=None,
host_port=None,
name="opsani-proxy",
protocol="TCP",
),
kubernetes_asyncio.client.V1ContainerPort(
container_port=9901,
host_ip=None,
host_port=None,
name="opsani-metrics",
protocol="TCP",
),
]
assert sidecar_container.obj.env == [
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXY_SERVICE_PORT",
value="9980",
value_from=None,
),
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXIED_CONTAINER_PORT",
value="8480",
value_from=None,
),
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXY_METRICS_PORT",
value="9901",
value_from=None,
),
]
@pytest.mark.applymanifests(
"../manifests/sidecar_injection",
files=["fiber-http_multiple_ports_symbolic_targets.yaml"],
)
@pytest.mark.parametrize(
"port, service",
[
(None, "fiber-http"),
(80, "fiber-http"),
("http", "fiber-http"),
],
)
async def test_inject_symbolic_target_port(
self, namespace: str, service: str, port: Union[str, int]
) -> None:
"""test_inject_by_source_port_name_with_symbolic_target_port"""
deployment = await servo.connectors.kubernetes.Deployment.read(
"fiber-http", namespace
)
assert len(deployment.containers) == 1, "expected a single container"
service = await servo.connectors.kubernetes.Service.read(
"fiber-http", namespace
)
assert len(service.ports) == 1
port_obj = service.ports[0]
if isinstance(port, int):
assert port_obj.port == port
elif isinstance(port, str):
assert port_obj.name == port
assert port_obj.target_port == "collector"
await deployment.inject_sidecar(
"opsani-envoy", ENVOY_SIDECAR_IMAGE_TAG, service="fiber-http", port=port
)
# Examine new sidecar
await deployment.refresh()
assert len(deployment.containers) == 2, "expected an injected container"
sidecar_container = deployment.containers[1]
assert sidecar_container.name == "opsani-envoy"
# Check ports and env
assert sidecar_container.ports == [
kubernetes_asyncio.client.V1ContainerPort(
container_port=9980,
host_ip=None,
host_port=None,
name="opsani-proxy",
protocol="TCP",
),
kubernetes_asyncio.client.V1ContainerPort(
container_port=9901,
host_ip=None,
host_port=None,
name="opsani-metrics",
protocol="TCP",
),
]
assert sidecar_container.obj.env == [
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXY_SERVICE_PORT", value="9980", value_from=None
),
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXIED_CONTAINER_PORT",
value="8480",
value_from=None,
),
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXY_METRICS_PORT", value="9901", value_from=None
),
]
@pytest.mark.integration
@pytest.mark.usefixtures("kubernetes_asyncio_config")
class TestKubernetesClusterConnectorIntegration:
"""Tests not requiring manifests setup, just an active cluster"""
@pytest.fixture
def namespace(self, kube: kubetest.client.TestClient) -> str:
return kube.namespace
@respx.mock
async def test_telemetry_hello(
self,
namespace: str,
config: KubernetesConfiguration,
servo_runner: servo.runner.Runner,
) -> None:
async with client.api_client.ApiClient() as api:
v1 = kubernetes_asyncio.client.VersionApi(api)
version_obj = await v1.get_code()
expected = (
f'"telemetry": {{"servox.version": "{servo.__version__}", "servox.platform": "{platform.platform()}", '
f'"kubernetes.namespace": "{namespace}", "kubernetes.version": "{version_obj.major}.{version_obj.minor}", "kubernetes.platform": "{version_obj.platform}"}}'
)
connector = KubernetesConnector(
config=config, telemetry=servo_runner.servo.telemetry
)
# attach connector
await servo_runner.servo.add_connector("kubernetes", connector)
request = respx.post(
"https://api.opsani.com/accounts/servox.opsani.com/applications/tests/servo"
).mock(
return_value=httpx.Response(
200, text=f'{{"status": "{servo.api.OptimizerStatuses.ok}"}}'
)
)
await servo_runner._post_event(
servo.api.Events.hello,
dict(
agent=servo.api.user_agent(),
telemetry=servo_runner.servo.telemetry.values,
),
)
assert request.called
print(request.calls.last.request.content.decode())
assert expected in request.calls.last.request.content.decode()
##
# Tests against an ArgoCD rollout
@pytest.mark.integration
@pytest.mark.usefixtures("kubernetes_asyncio_config", "manage_rollout")
@pytest.mark.parametrize(
(),
[
pytest.param(
marks=pytest.mark.rollout_manifest.with_args(
"tests/manifests/argo_rollouts/fiber-http-opsani-dev.yaml"
)
),
pytest.param(
marks=pytest.mark.rollout_manifest.with_args(
"tests/manifests/argo_rollouts/fiber-http-opsani-dev-workload-ref.yaml"
)
),
],
)
class TestKubernetesConnectorRolloutIntegration:
@pytest.fixture
def namespace(self, kube: kubetest.client.TestClient) -> str:
return kube.namespace
@pytest.fixture()
def _rollout_tuning_config(
self, tuning_config: KubernetesConfiguration
) -> KubernetesConfiguration:
tuning_config.rollouts = [
RolloutConfiguration.parse_obj(d) for d in tuning_config.deployments
]
tuning_config.deployments = None
return tuning_config
##
# Canary Tests
async def test_create_rollout_tuning(
self,
_rollout_tuning_config: KubernetesConfiguration,
kube: kubetest.client.TestClient,
namespace: str,
) -> None:
_rollout_tuning_config.rollouts[0].containers[
0
].static_environment_variables = {"FOO": "BAR"}
connector = KubernetesConnector(config=_rollout_tuning_config)
rol = await Rollout.read("fiber-http", namespace)
description = await connector.describe()
assert description == Description(
components=[
Component(
name="fiber-http/fiber-http",
settings=[
CPU(
name="cpu",
type="range",
pinned=True,
value="125m",
min="125m",
max="875m",
step="125m",
request="125m",
limit="125m",
get=["request", "limit"],
set=["request", "limit"],
),
Memory(
name="mem",
type="range",
pinned=True,
value=134217728,
min=134217728,
max=805306368,
step=33554432,
request=134217728,
limit=134217728,
get=["request", "limit"],
set=["request", "limit"],
),
Replicas(
name="replicas",
type="range",
pinned=True,
value=1,
min=0,
max=99999,
step=1,
),
EnvironmentEnumSetting(
name="INIT_MEMORY_SIZE",
type="enum",
pinned=True,
values=["32MB", "64MB", "128MB"],
),
],
),
Component(
name="fiber-http/fiber-http-tuning",
settings=[
CPU(
name="cpu",
type="range",
pinned=False,
value="125m",
min="125m",
max="875m",
step="125m",
request="125m",
limit="125m",
get=["request", "limit"],
set=["request", "limit"],
),
Memory(
name="mem",
type="range",
pinned=False,
value=134217728,
min=134217728,
max=805306368,
step=33554432,
request=134217728,
limit=134217728,
get=["request", "limit"],
set=["request", "limit"],
),
Replicas(
name="replicas",
type="range",
pinned=True,
value=1,
min=0,
max=1,
step=1,
),
EnvironmentEnumSetting(
name="INIT_MEMORY_SIZE",
type="enum",
pinned=True,
values=["32MB", "64MB", "128MB"],
),
],
),
]
)
tuning_pod = kube.get_pods()["fiber-http-tuning"]
assert (
tuning_pod.obj.metadata.annotations["opsani.com/opsani_tuning_for"]
== "fiber-http/fiber-http-tuning"
)
assert tuning_pod.obj.metadata.labels["opsani_role"] == "tuning"
target_container = next(
filter(lambda c: c.name == "fiber-http", tuning_pod.obj.spec.containers)
)
assert target_container.resources.requests == {"cpu": "125m", "memory": "128Mi"}
assert target_container.resources.limits == {"cpu": "125m", "memory": "128Mi"}
assert target_container.env == [
kubernetes.client.models.V1EnvVar(name="FOO", value="BAR")
]
# verify tuning pod is registered as service endpoint
service = await servo.connectors.kubernetes.Service.read(
"fiber-http", namespace
)
endpoints = await service.get_endpoints()
tuning_name = f"{_rollout_tuning_config.rollouts[0].name}-tuning"
tuning_endpoint = next(
filter(
lambda epa: epa.target_ref.name == tuning_name,
endpoints[0].subsets[0].addresses,
),
None,
)
if tuning_endpoint is None:
raise AssertionError(
f"Tuning pod {tuning_name} not contained in service endpoints: {endpoints}"
)
async def test_adjust_rol_tuning_cpu_with_settle(
self, _rollout_tuning_config, namespace
):
# test_adjust_rollout_tuning_cpu_with_settlement
connector = KubernetesConnector(config=_rollout_tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="cpu",
value=".250",
)
control = servo.Control(settlement="1s")
description = await connector.adjust([adjustment], control)
assert description is not None
setting = description.get_setting("fiber-http/fiber-http-tuning.cpu")
assert setting
assert setting.value == 0.25
async def test_adjust_rol_tuning_insufficient_rsrcs(
self, _rollout_tuning_config: KubernetesConfiguration, namespace
) -> None:
# test_adjust_rollout_tuning_insufficient_resources
servo.logging.set_level("TRACE")
_rollout_tuning_config.timeout = "15s"
_rollout_tuning_config.cascade_common_settings(overwrite=True)
_rollout_tuning_config.rollouts[0].containers[0].memory.max = "256Gi"
connector = KubernetesConnector(config=_rollout_tuning_config)
adjustment = Adjustment(
component_name="fiber-http/fiber-http-tuning",
setting_name="mem",
value="128Gi", # impossible right?
)
with pytest.raises(AdjustmentRejectedError) as rejection_info:
description = await connector.adjust([adjustment])
rej_msg = str(rejection_info.value)
assert (
"Insufficient memory." in rej_msg
or "Pod Node didn't have enough resource: memory" in rej_msg
)
STANDARD_ROLLOUT_EXPECTED_PORTS = [
servo.connectors.kubernetes.RolloutV1ContainerPort(
container_port=9980,
host_ip=None,
host_port=None,
name="opsani-proxy",
protocol="TCP",
),
servo.connectors.kubernetes.RolloutV1ContainerPort(
container_port=9901,
host_ip=None,
host_port=None,
name="opsani-metrics",
protocol="TCP",
),
]
STANDARD_ROLLOUT_EXPECTED_ENV = [
servo.connectors.kubernetes.RolloutV1EnvVar(
name="OPSANI_ENVOY_PROXY_SERVICE_PORT", value="9980", value_from=None
),
servo.connectors.kubernetes.RolloutV1EnvVar(
name="OPSANI_ENVOY_PROXIED_CONTAINER_PORT", value="8480", value_from=None
),
servo.connectors.kubernetes.RolloutV1EnvVar(
name="OPSANI_ENVOY_PROXY_METRICS_PORT", value="9901", value_from=None
),
]
WORKLOAD_REF_ROLLOUT_EXPECTED_PORTS = [
kubernetes_asyncio.client.V1ContainerPort(
container_port=9980,
host_ip=None,
host_port=None,
name="opsani-proxy",
protocol="TCP",
),
kubernetes_asyncio.client.V1ContainerPort(
container_port=9901,
host_ip=None,
host_port=None,
name="opsani-metrics",
protocol="TCP",
),
]
WORKLOAD_REF_ROLLOUT_EXPECTED_ENV = [
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXY_SERVICE_PORT", value="9980", value_from=None
),
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXIED_CONTAINER_PORT", value="8480", value_from=None
),
kubernetes_asyncio.client.V1EnvVar(
name="OPSANI_ENVOY_PROXY_METRICS_PORT", value="9901", value_from=None
),
]
@pytest.mark.integration
@pytest.mark.usefixtures("kubernetes_asyncio_config", "manage_rollout")
class TestRolloutSidecarInjection:
@pytest.fixture
def namespace(self, kube: kubetest.client.TestClient) -> str:
return kube.namespace
@pytest.mark.parametrize(
"ports, env",
[
pytest.param(
STANDARD_ROLLOUT_EXPECTED_PORTS,
STANDARD_ROLLOUT_EXPECTED_ENV,
marks=pytest.mark.rollout_manifest.with_args(
"tests/manifests/argo_rollouts/fiber-http_single_port.yaml"
),
),
pytest.param(
WORKLOAD_REF_ROLLOUT_EXPECTED_PORTS,
WORKLOAD_REF_ROLLOUT_EXPECTED_ENV,
marks=pytest.mark.rollout_manifest.with_args(
"tests/manifests/argo_rollouts/fiber-http-workload-ref_single_port.yaml"
),
),
],
)
@pytest.mark.parametrize(
"test_port, service",
[
(None, "fiber-http"),
(80, "fiber-http"),
("http", "fiber-http"),
],
)
async def test_inject_rollout(
# test_inject_single_port_rollout
self,
namespace: str,
service: str,
test_port: Union[str, int],
ports: List[
Union[
servo.connectors.kubernetes.RolloutV1ContainerPort,
kubernetes_asyncio.client.V1ContainerPort,
]
],
env: List[
Union[
servo.connectors.kubernetes.RolloutV1EnvVar,
kubernetes_asyncio.client.V1EnvVar,
]
],
) -> None:
rollout = await servo.connectors.kubernetes.Rollout.read(
"fiber-http", namespace
)
assert len(rollout.containers) == 1, "expected a single container"
service = await servo.connectors.kubernetes.Service.read(
"fiber-http", namespace
)
assert len(service.ports) == 1
port_obj = service.ports[0]
if isinstance(test_port, int):
assert port_obj.port == test_port
elif isinstance(test_port, str):
assert port_obj.name == test_port
assert port_obj.target_port == 8480
await rollout.inject_sidecar(
"opsani-envoy",
ENVOY_SIDECAR_IMAGE_TAG,
service="fiber-http",
port=test_port,
)
# Examine new sidecar
await rollout.refresh()
assert len(rollout.containers) == 2, "expected an injected container"
sidecar_container = rollout.containers[1]
assert sidecar_container.name == "opsani-envoy"
# Check ports and env
assert sidecar_container.ports == ports
assert sidecar_container.obj.env == env
| 36.675946
| 187
| 0.584104
|
aff58c9f8141bc880f19fb1409dd47fe8e3ca72f
| 2,794
|
dart
|
Dart
|
lib/ui/movie_details/movie_detail_page.dart
|
HaiYen13/flutter_movie_db
|
8a86d33e43bcc6a2d7ace6310c8cbecc0487a404
|
[
"Apache-2.0"
] | 16
|
2019-01-10T21:09:54.000Z
|
2021-10-15T15:46:38.000Z
|
lib/ui/movie_details/movie_detail_page.dart
|
myotive/flutter_movie_db
|
8a86d33e43bcc6a2d7ace6310c8cbecc0487a404
|
[
"Apache-2.0"
] | null | null | null |
lib/ui/movie_details/movie_detail_page.dart
|
myotive/flutter_movie_db
|
8a86d33e43bcc6a2d7ace6310c8cbecc0487a404
|
[
"Apache-2.0"
] | 11
|
2019-03-28T10:23:45.000Z
|
2022-02-27T01:09:21.000Z
|
import 'package:flutter/material.dart';
import 'package:cached_network_image/cached_network_image.dart';
import 'package:flutter_movies/ui/movie_details/crew_widget.dart';
import 'package:flutter_movies/ui/movie_details/movie_detail_widget.dart';
import 'package:flutter_movies/ui/movie_details/similar_movies.dart';
class MovieDetailPage extends StatelessWidget {
final int movieId;
final String movieTitle;
final String heroImageURL;
final String heroImageTag;
MovieDetailPage(
this.movieId, this.movieTitle, this.heroImageURL, this.heroImageTag);
@override
Widget build(BuildContext context) {
return Scaffold(
body: Container(
child: CustomScrollView(
slivers: <Widget>[
SliverAppBar(
expandedHeight: 300.0,
floating: false,
pinned: true,
flexibleSpace: FlexibleSpaceBar(
title: Text(
movieTitle,
),
background: Hero(
tag: heroImageTag,
child: CachedNetworkImage(
imageUrl: heroImageURL,
fit: BoxFit.fitHeight,
)),
)),
SliverList(
delegate: SliverChildListDelegate([
Flex(
direction: Axis.vertical,
children: <Widget>[
Container(
margin: EdgeInsets.symmetric(horizontal: 10),
child: MovieDetailWidget(movieId))
],
),
Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: <Widget>[
Padding(
padding: const EdgeInsets.only(left: 10.0, bottom: 10.0),
child: Text("Actors"),
),
Container(
margin: EdgeInsets.symmetric(horizontal: 10),
child: CrewWidget(movieId),
height: 270),
],
),
Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: <Widget>[
Padding(
padding: const EdgeInsets.only(left: 10.0, bottom: 10.0),
child: Text("Similar Movies"),
),
Container(
margin: EdgeInsets.only(left: 10, right: 10),
child: SimilarMoviesWidget(movieId),
height: 270),
],
)
]),
)
],
),
),
);
}
}
| 34.493827
| 79
| 0.475304
|
2fd0ca0ae123d4b28944e38771345f39bdd6cd56
| 1,244
|
lua
|
Lua
|
prototypes/storage/fluid-canister.lua
|
henrikperden/Engineersvsenvironmentalist_0.1.55
|
df3585f5fa387b5a8f68e5d691dddde0a537cfcc
|
[
"0BSD"
] | null | null | null |
prototypes/storage/fluid-canister.lua
|
henrikperden/Engineersvsenvironmentalist_0.1.55
|
df3585f5fa387b5a8f68e5d691dddde0a537cfcc
|
[
"0BSD"
] | null | null | null |
prototypes/storage/fluid-canister.lua
|
henrikperden/Engineersvsenvironmentalist_0.1.55
|
df3585f5fa387b5a8f68e5d691dddde0a537cfcc
|
[
"0BSD"
] | null | null | null |
data:extend(
{
{
type = "item",
name = "empty-fluid-canister",
icon = "__Engineersvsenvironmentalist__/graphics/icons/storage/empty-canister.png",
flags = {"goes-to-main-inventory"},
subgroup = "fluid-logistics",
order = "f-c[empty-canister]",
stack_size = 100
},
{
type = "recipe",
name = "empty-fluid-canister",
category = "crafting",
subgroup = "fluid-logistics",
energy_required = 1,
enabled = false,
ingredients =
{
{"steel-plate", 1},
},
result ="empty-fluid-canister",
result_count = 5,
},
{
type = "item",
name = "liquid-fuel-canister",
icon = "__Engineersvsenvironmentalist__/graphics/icons/storage/liquid-fuel-canister.png",
flags = {"goes-to-main-inventory"},
fuel_category = "chemical", fuel_value = "150MJ",
subgroup = "fluid-logistics",
order = "f-c[liquid-fuel-canister]",
stack_size = 100
},
{
type = "item",
name = "ferric-chloride-canister",
icon = "__Engineersvsenvironmentalist__/graphics/icons/storage/ferric-chloride-canister.png",
flags = {"goes-to-main-inventory"},
subgroup = "fluid-logistics",
order = "f-c[ferric-chloride-canister]",
stack_size = 100
},
}
)
| 26.468085
| 97
| 0.62299
|
26e0f645cc21d17b003d930a562e341968f07a83
| 3,902
|
kt
|
Kotlin
|
app/src/main/java/io/bumbumapps/radio/internetradioplayer/extensions/ViewExtensions.kt
|
bumbumapp/FMRadio
|
ffda84a5425123ba80af7fe3c41126dbcb85be8b
|
[
"Unlicense",
"MIT"
] | null | null | null |
app/src/main/java/io/bumbumapps/radio/internetradioplayer/extensions/ViewExtensions.kt
|
bumbumapp/FMRadio
|
ffda84a5425123ba80af7fe3c41126dbcb85be8b
|
[
"Unlicense",
"MIT"
] | null | null | null |
app/src/main/java/io/bumbumapps/radio/internetradioplayer/extensions/ViewExtensions.kt
|
bumbumapp/FMRadio
|
ffda84a5425123ba80af7fe3c41126dbcb85be8b
|
[
"Unlicense",
"MIT"
] | null | null | null |
package io.bumbumapps.radio.internetradioplayer.extensions
import android.animation.ObjectAnimator
import android.os.Build
import android.os.Handler
import android.os.Looper
import android.text.Editable
import android.text.TextWatcher
import android.view.View
import android.view.ViewTreeObserver
import android.view.animation.AccelerateDecelerateInterpolator
import android.widget.SeekBar
import android.widget.TextView
import androidx.annotation.IdRes
import androidx.drawerlayout.widget.DrawerLayout
import androidx.dynamicanimation.animation.DynamicAnimation
import androidx.dynamicanimation.animation.SpringAnimation
import androidx.dynamicanimation.animation.SpringForce
/**
* Created by Vladimir Mikhalev 17.11.2017.
*/
fun runOnUiThread(action: () -> Unit) {
val mainLooper = Looper.getMainLooper()
if (Thread.currentThread().id != mainLooper.thread.id) {
Handler(mainLooper).post(action)
} else {
action.invoke()
}
}
fun runOnUiThreadDelayed(delayMs: Long, action: () -> Unit) {
val mainLooper = Looper.getMainLooper()
Handler(mainLooper).postDelayed(action, delayMs)
}
inline fun View.waitForMeasure(crossinline block: () -> Unit) {
if (width > 0 && height > 0) {
block()
return
}
viewTreeObserver.addOnPreDrawListener(object : ViewTreeObserver.OnPreDrawListener {
override fun onPreDraw(): Boolean {
val observer = viewTreeObserver
if (observer.isAlive) observer.removeOnPreDrawListener(this)
block()
return true
}
})
}
inline fun View.waitForLayout(crossinline handler: () -> Boolean) {
viewTreeObserver.addOnGlobalLayoutListener(object : ViewTreeObserver.OnGlobalLayoutListener {
override fun onGlobalLayout() {
val observer = viewTreeObserver
if (handler() && observer.isAlive) {
observer.removeOnGlobalLayoutListener(this)
}
}
})
}
fun View.visible(visible: Boolean, gone: Boolean = true) {
visibility = if (visible) View.VISIBLE else if (gone) View.GONE else View.INVISIBLE
}
val View?.isVisible: Boolean
get() = this?.visibility == View.VISIBLE
fun View.findParent(@IdRes id: Int): View? {
val parentView = parent as? View ?: return null
return if (parentView.id == id) parentView
else parentView.findParent(id)
}
fun TextView.onTextChanges(listener: (String) -> Unit) {
addTextChangedListener(object : TextWatcher {
override fun afterTextChanged(s: Editable) {
listener.invoke(s.toString())
}
override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {}
override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {}
})
}
fun TextView.setTextOrHide(text: String?) {
if (text == null || text.isBlank()) {
visible(false)
} else {
visible(true)
this.text = text
}
}
fun SeekBar.setProgressX(progress: Int, animate: Boolean) {
if (animate) {
if (Build.VERSION.SDK_INT >= 24) {
setProgress(progress, animate)
} else {
with(ObjectAnimator.ofInt(this, "progress", progress)) {
duration = 300
interpolator = AccelerateDecelerateInterpolator()
start()
}
}
} else {
setProgress(progress)
}
}
fun View.bounceXAnimation(dpVelocity: Float): SpringAnimation {
return SpringAnimation(this, DynamicAnimation.TRANSLATION_X, 0f)
.setStartVelocity(dpVelocity * context.dp).apply {
spring.dampingRatio = SpringForce.DAMPING_RATIO_HIGH_BOUNCY
spring.stiffness = 1000f
}
}
fun DrawerLayout.lock(locked: Boolean) {
setDrawerLockMode(if (locked) DrawerLayout.LOCK_MODE_LOCKED_CLOSED
else DrawerLayout.LOCK_MODE_UNLOCKED)
}
| 30.484375
| 97
| 0.677345
|
db6d7b87dccbd405e06c4a353dd17e2804a7d9d2
| 1,372
|
php
|
PHP
|
resources/views/admin/proveedores/update.blade.php
|
DevilPC-X/gpocsystem
|
770183b1e93885dbdc13084f0c2b6ad264f9d4d5
|
[
"MIT"
] | null | null | null |
resources/views/admin/proveedores/update.blade.php
|
DevilPC-X/gpocsystem
|
770183b1e93885dbdc13084f0c2b6ad264f9d4d5
|
[
"MIT"
] | null | null | null |
resources/views/admin/proveedores/update.blade.php
|
DevilPC-X/gpocsystem
|
770183b1e93885dbdc13084f0c2b6ad264f9d4d5
|
[
"MIT"
] | null | null | null |
@extends('templateMaster')
@section('title', 'Update Providers')
@section('content')
<div class="container text-center">
<div class="container header">
<div class="row">
<div class="col-md-2 header-left">
<i class="fa fa-edit fa-3x"></i>
</div>
<div class="col-md-6 header-left">
<h4>Actualizar Proveedor</h4>
</div>
</div>
</div>
<div class="container content">
{!!Form::model($value, ['route'=>['proveedores.update', $value->id], 'method'=>'PUT'])!!}
<div class="form-content">
<div class="input-group mb-3">
<div class="input-group-prepend">
<span class="input-group-text" id="basic-addon1">Proveedor</span>
</div>
{!!Form::text('nombre', null, ['class'=>'form-control',
'aria-label'=>'nombre', 'aria-describedby'=>'basic-addon2', 'required',
'placeholder'=>'Escriba el nuevo nombre'])!!}
</div>
<hr>
<a href="{{route('proveedores.index')}}" class="btn btn-warning">Cancelar <i class="fa fa-ban"></i></a>
{!!Form::button('Guardar <i class="fa fa-check"></i>', ['type'=>'submit','class'=>'btn btn-success'])!!}
</div>
{!!Form::close()!!}
</div>
</div>
@endsection
| 38.111111
| 116
| 0.505102
|
8ea6d5a3084e3c6caa585e6f29604d1ec85d1482
| 190
|
js
|
JavaScript
|
src/components/index.js
|
veldakarimi/Interior_consultant
|
e549460a0c6ffbb4b237a63496eba63b1ded0dc1
|
[
"MIT"
] | null | null | null |
src/components/index.js
|
veldakarimi/Interior_consultant
|
e549460a0c6ffbb4b237a63496eba63b1ded0dc1
|
[
"MIT"
] | 2
|
2021-09-08T19:30:52.000Z
|
2022-01-13T08:42:05.000Z
|
src/components/index.js
|
veldakarimi/interior_consultant
|
9cd459d007107275d093c09c7903aea9270fef6b
|
[
"MIT"
] | 1
|
2021-06-24T18:42:05.000Z
|
2021-06-24T18:42:05.000Z
|
export { default as Nav } from "./Navbar";
export { default as Midbody } from "./Midbody";
export{ default as Midbodyleft} from "./Midbodyleft";
export{ default as Footer } from "./Footer";
| 38
| 53
| 0.7
|
e097ed5bffa41097352245f19078ceb7d55f7e59
| 2,010
|
cs
|
C#
|
Project0/Project0.Library/Location.cs
|
1811-nov27-net/ConnerKnight-Project1
|
04f0d7f1fe42e3f3bdeb8065b1d76f591ecab414
|
[
"MIT"
] | null | null | null |
Project0/Project0.Library/Location.cs
|
1811-nov27-net/ConnerKnight-Project1
|
04f0d7f1fe42e3f3bdeb8065b1d76f591ecab414
|
[
"MIT"
] | null | null | null |
Project0/Project0.Library/Location.cs
|
1811-nov27-net/ConnerKnight-Project1
|
04f0d7f1fe42e3f3bdeb8065b1d76f591ecab414
|
[
"MIT"
] | null | null | null |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Project0.Library
{
/// <summary>
/// Represents a named Pizza location with an inventory of ingredients
/// </summary>
public class Location
{
public int LocationId { get; set; }
public string Name { get; set; }
//was Dictionary<IVictual,int> had to make List<KeyValuePair<IVictual,int>> for serialization
public Dictionary<Ingredient, int> Inventory { get; set; }
//made it so only pizzas could be ordered
//public List<Pizza> Menu { get; set; }
/*
public Location(string name, Dictionary<Ingredient, int> inventory, List<Pizza> menu ,List<Order> orderHistory)
{
Name = name;
Inventory = inventory;
Menu = menu;
OrderHistory = orderHistory;
}
*/
public Location(string name, Dictionary<Ingredient, int> inventory)
{
Name = name;
Inventory = inventory;
}
public Location(string name)
{
Name = name;
Inventory = new Dictionary<Ingredient, int>();
}
public Location()
{
Name = null;
Inventory = new Dictionary<Ingredient, int>();
}
/// <summary>
/// subtracts the ingredients in orderIngredients from the locations ingredients
/// </summary>
/// <param name="o"> the order that is going to be placed on this Location</param>
/// <param name="orderIngredients"> the ingredients that the order requires</param>
public void PlaceOrder(Order o, Dictionary<Ingredient, int> orderIngredients)
{
foreach(KeyValuePair<Ingredient,int> pair in orderIngredients)
{
//Console.WriteLine(pair.Value);
Inventory[pair.Key] = Inventory[pair.Key] - pair.Value;
}
}
}
}
| 30.454545
| 119
| 0.572637
|
06ee169210e443092afc4583cc4acdd6c11a0c6f
| 94
|
py
|
Python
|
test_project/test_image/apps.py
|
francescortiz/image
|
0abda89644c9c489bdfec91acd45d8f6cacca7e7
|
[
"BSD-3-Clause"
] | 96
|
2015-02-06T18:35:05.000Z
|
2022-03-25T17:34:45.000Z
|
test_project/test_image/apps.py
|
francescortiz/image
|
0abda89644c9c489bdfec91acd45d8f6cacca7e7
|
[
"BSD-3-Clause"
] | 9
|
2015-03-27T19:57:28.000Z
|
2020-09-11T02:02:39.000Z
|
test_project/test_image/apps.py
|
francescortiz/image
|
0abda89644c9c489bdfec91acd45d8f6cacca7e7
|
[
"BSD-3-Clause"
] | 26
|
2015-05-13T08:57:34.000Z
|
2021-03-22T14:11:59.000Z
|
from django.apps import AppConfig
class TestImageConfig(AppConfig):
name = 'test_image'
| 15.666667
| 33
| 0.765957
|
82157cb000cd1bbf9efeb4283b810fe1a5d3a7bb
| 573
|
kt
|
Kotlin
|
reader/src/main/java/cn/gygxzc/uhf/event/MyEvent.kt
|
niantuo/uhf
|
c2fa8be40f7d1605ad8c2f454ff7235b1b0ac81a
|
[
"Apache-2.0"
] | 1
|
2019-04-17T08:43:06.000Z
|
2019-04-17T08:43:06.000Z
|
reader/src/main/java/cn/gygxzc/uhf/event/MyEvent.kt
|
niantuo/uhf
|
c2fa8be40f7d1605ad8c2f454ff7235b1b0ac81a
|
[
"Apache-2.0"
] | null | null | null |
reader/src/main/java/cn/gygxzc/uhf/event/MyEvent.kt
|
niantuo/uhf
|
c2fa8be40f7d1605ad8c2f454ff7235b1b0ac81a
|
[
"Apache-2.0"
] | null | null | null |
package cn.gygxzc.uhf.event
/**
* @author niantuo
* @createdTime 2018/5/12 13:40
* 该模块的事件注册
* 这个是事件,回调事件
* 考虑到向react 注册常量
*/
enum class MyEvent(val desc: String, val code: Int) {
SimpleBleMgr("蓝牙事件", 1),
BLE_NONE("蓝牙未连接", 0),
CONNECTED("蓝牙已连接", 2),
CONNECTED_FAILED("蓝牙连接失败", 3),
CONNECT_START("开始蓝牙连接", 4),
CLOSED("蓝牙已关闭", 5),
SCAN_START("开始扫描蓝牙设备", 6),
SCAN_END("蓝牙扫描结束", 7),
BLE_SETTING_RESULT("系统设置返回", 8),
BLE_DEVICE_FOUND("发现蓝牙", 9),
RFID_EVENT("RFID相关事件", 20),
RFID_INVENTORY_EPC("扫描到电子标签", 21),
;
}
| 17.90625
| 53
| 0.619546
|
385d5b6e177b3b39ac5f17d4fcc5df72e4b24094
| 4,093
|
php
|
PHP
|
apps/myemt/modules/consent/templates/addFriendsSuccess.php
|
ukbe/emarketturkey
|
610feb9fadf5c87c09aa38f136caa3858ac316f3
|
[
"MIT"
] | null | null | null |
apps/myemt/modules/consent/templates/addFriendsSuccess.php
|
ukbe/emarketturkey
|
610feb9fadf5c87c09aa38f136caa3858ac316f3
|
[
"MIT"
] | null | null | null |
apps/myemt/modules/consent/templates/addFriendsSuccess.php
|
ukbe/emarketturkey
|
610feb9fadf5c87c09aa38f136caa3858ac316f3
|
[
"MIT"
] | null | null | null |
<style>
.invite-box {
background: url(/images/layout/background/invite-friend-box.png) no-repeat left top;
padding: 20px;
width: 483px;
height: 285px;
}
.invite-box div {
width: 445px;
height: 277px;
overflow: scroll;
}
.invite-box table {
width: 427px; height:272px;
}
.invite-box table td {
padding: 4px;
font: 14px verdana;
color: #000000;
border-bottom: solid 1px #C0C0C0;
}
</style>
<?php echo form_tag('consent/invite') ?>
<div>
<h1><?php echo __('Contact List Retrieved') ?></h1>
<div class="column span-97 pad-1 right">
<?php if (count($members)): ?>
<div class="sidebar-header"><?php echo __('Add to Your Network') ?></div>
<div class="hrsplit-1"></div>
<div class="invite-box">
<div>
<table class="network-list" cellpadding="0" cellspacing="0" align="left">
<?php foreach ($members as $member): ?>
<tr>
<td><?php echo checkbox_tag('members[]', $member->getId(), true) ?></td>
<td><?php if ($user->can(ActionPeer::ACT_VIEW_PROFILE, $member)): ?>
<?php echo link_to(image_tag($member->getProfilePictureUri()), $member->getProfileUrl()) ?>
<?php else: ?>
<?php echo image_tag($member->getProfilePictureUri()) ?>
<?php endif ?>
<?php if ($user->can(ActionPeer::ACT_VIEW_PROFILE, $member)): ?>
<?php echo link_to($member, $member->getProfileUrl(), 'class=name') ?>
<?php else: ?>
<b class="large"><?php echo $member ?></b>
<?php endif ?>
</td></tr>
<?php endforeach ?>
</table>
</div>
</div>
<div class="right">
<?php echo link_to_function('Check All', "jQuery('input[name=members\\[\\]]').each(function(){this.checked=true});") ?>
<?php echo link_to_function('Un-check All', "jQuery('input[name=members\\[\\]]').each(function(){this.checked=false});") ?>
</div>
<div class="hrsplit-1"></div>
<?php echo button_to_function('Add Network Message', "jQuery('.add-friend').slideToggle()", 'class=add-friend') ?>
<div class="add-friend ghost">
<?php echo emt_label_for('add-friend-message', __('Network Request Message :')) ?><em><?php echo __('(optional)') ?></em><br />
<?php echo textarea_tag('add-friend-message', '', array('style' => 'width: 470px; height: 40px;')) ?>
<br />
<em><?php echo __('This message will be contained in the network request notification e-mail.') ?></em>
</div>
<?php endif ?>
</div>
<div class="column span-97 pad-1 left">
<?php if (count($candidates)): ?>
<div class="sidebar-header"><?php echo __('Send Invitations to') ?></div>
<div class="hrsplit-1"></div>
<div class="invite-box">
<div>
<table class="invite-list" cellpadding="0" cellspacing="0" style="">
<?php foreach ($candidates as $candidate): ?>
<?php $attr = array();
if (in_array($candidate->getEmail(), $banned_emails))
{
$attr[0] = 'disabled=disabled';
$attr[1] = "<font color=\"#999999\">{$candidate->getEmail()}</font>";
}
else
{
$attr[0] = '';
$attr[1] = $candidate->getEmail();
} ?>
<tr>
<td><?php echo checkbox_tag('candidates[]', $candidate->getId(), $attr[0]==''?true:false, $attr[0]) ?></td>
<td><?php echo $attr[1] ?></td></tr>
<?php endforeach ?>
</table>
</div>
</div>
<div class="right">
<?php echo link_to_function('Check All', "jQuery('input[name=candidates\\[\\]]').each(function(){this.checked=true});") ?>
<?php echo link_to_function('Un-check All', "jQuery('input[name=candidates\\[\\]]').each(function(){this.checked=false});") ?>
</div>
<div class="hrsplit-1"></div>
<?php echo button_to_function('Add Invitation Message', "jQuery('.invite-friend').slideToggle();", 'class=invite-friend') ?>
<div class="invite-friend ghost">
<?php echo emt_label_for('invite-friend-message', __('Invitation Message :')) ?><em><?php echo __('(optional)') ?></em><br />
<?php echo textarea_tag('invite-friend-message', '', array('style' => 'width: 470px; height: 40px;' )) ?>
<br />
<em><?php echo __('This message will be contained in the invitation e-mail.') ?></em>
</div>
<?php endif ?>
</div>
</div>
<div class="hrsplit-1"></div>
<div align="center">
<?php echo submit_image_tag('layout/button/send-invitations.'.$sf_user->getCulture().'.png') ?>
</div>
</form>
| 36.873874
| 134
| 0.639873
|
05a550444e8c0aab54fc128a0aa011f52932cf2e
| 1,607
|
py
|
Python
|
back-end/demo.py
|
JAYqq/MonGo
|
e33c9f62c2cf494af2b2d33408853294f3aed168
|
[
"MIT"
] | 1
|
2019-03-26T04:44:59.000Z
|
2019-03-26T04:44:59.000Z
|
back-end/demo.py
|
JAYqq/MonGo
|
e33c9f62c2cf494af2b2d33408853294f3aed168
|
[
"MIT"
] | 5
|
2020-02-12T13:32:08.000Z
|
2021-06-02T00:27:16.000Z
|
back-end/demo.py
|
JAYqq/MonGo
|
e33c9f62c2cf494af2b2d33408853294f3aed168
|
[
"MIT"
] | null | null | null |
# def hello():
# v=yield 'hello'
# print(v)
# gen=hello()
# result=gen.send(None)
# print(result)
# gen.send('world')
# def index_generator(L, target):
# for i, num in enumerate(L):
# if num == target:
# yield i
# print(list(index_generator([1, 6, 2, 4, 5, 2, 8, 6, 3, 2], 2)))
# def is_des(a,b):
# b=iter(b)
# return all(i in b for i in a)
# print(is_des([1, 3, 5], [1, 2, 3, 4, 5]))
# import time
# import asyncio
# async def crawl_page(url):
# print('crawling {}'.format(url))
# sleep_time = int(url.split('_')[-1])
# await asyncio.sleep(sleep_time)
# print('OK {}'.format(url))
# async def main(urls):
# tasks = [asyncio.create_task(crawl_page(url)) for url in urls]
# await asyncio.gather(*tasks)
# #for task in tasks:
# # await task
# start_time = time.perf_counter()
# asyncio.run(main(['url_1', 'url_2', 'url_3', 'url_4']))
# end_time = time.perf_counter()
# print(end_time-start_time)
import asyncio
import time
async def worker_1():
await asyncio.sleep(1)
return 1
async def worker_2():
await asyncio.sleep(2)
return 2 / 0
async def worker_3():
await asyncio.sleep(3)
return 3
async def main():
task_1 = asyncio.create_task(worker_1())
task_2 = asyncio.create_task(worker_2())
task_3 = asyncio.create_task(worker_3())
await asyncio.sleep(2)
task_3.cancel()
res = await asyncio.gather(task_1, task_2, task_3, return_exceptions=True)
print(res)
start_time = time.perf_counter()
asyncio.run(main())
end_time = time.perf_counter()
print(end_time-start_time)
| 22.957143
| 78
| 0.630989
|
daa30e262ca306aeeaa8986dde513a2da8a4885b
| 717
|
php
|
PHP
|
app/Http/Controllers/HomeController.php
|
alibek009/diplomaProject
|
7920cd3cc40709c5bad7d06a8843b43af3f0fe95
|
[
"MIT"
] | null | null | null |
app/Http/Controllers/HomeController.php
|
alibek009/diplomaProject
|
7920cd3cc40709c5bad7d06a8843b43af3f0fe95
|
[
"MIT"
] | 6
|
2021-05-08T13:59:45.000Z
|
2022-02-26T10:25:25.000Z
|
app/Http/Controllers/HomeController.php
|
alibek009/diplomaProject
|
7920cd3cc40709c5bad7d06a8843b43af3f0fe95
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Http\Controllers;
use App\Course;
use Illuminate\Http\Request;
class HomeController extends Controller
{
public function index()
{
$purchased_courses = NULL;
if(\Auth::check()){
$purchased_courses = Course::whereHas('students',function($query){
$query->where('id',\Auth::id());
})->with('lessons')
->orderBy('id','desc')
->get();
}
$courses =Course::where('published',1)->orderBy('id','desc')->paginate(6);
$grades = Course::select('grade')->whereNotNull('grade')->groupBy('grade')->get();
return view('index',compact('courses','purchased_courses','grades'));
}
}
| 25.607143
| 90
| 0.567643
|
d4d9f6babb6c7727a32ba81de1dd45fe0aaa1e08
| 2,348
|
rs
|
Rust
|
rust/test/error/inc/basic_test.rs
|
Wandalen/wtools
|
d9e3bb7cf7423493f08dd65efc5fdc4799d1e2d5
|
[
"MIT"
] | null | null | null |
rust/test/error/inc/basic_test.rs
|
Wandalen/wtools
|
d9e3bb7cf7423493f08dd65efc5fdc4799d1e2d5
|
[
"MIT"
] | null | null | null |
rust/test/error/inc/basic_test.rs
|
Wandalen/wtools
|
d9e3bb7cf7423493f08dd65efc5fdc4799d1e2d5
|
[
"MIT"
] | null | null | null |
#![ allow( deprecated ) ]
#![ allow( unused_imports ) ]
use super::*;
//
#[ cfg( feature = "use_std" ) ]
tests_impls!
{
#[ test ]
fn basic()
{
use std::error::Error;
// test.case( "basic" );
let err1 = TheModule::BasicError::new( "Some error" );
a_id!( err1.to_string(), "Some error" );
a_id!( err1.description(), "Some error" );
a_id!( err1.msg(), "Some error" );
a_id!( format!( "err1 : {}", err1 ), "err1 : Some error" );
// test.case( "compare" );
let err1 = TheModule::BasicError::new( "Some error" );
let err2 = TheModule::BasicError::new( "Some error" );
a_id!( err1, err2 );
a_id!( err1.description(), err2.description() );
// test.case( "clone" );
let err1 = TheModule::BasicError::new( "Some error" );
let err2 = err1.clone();
a_id!( err1, err2 );
a_id!( err1.description(), err2.description() );
}
//
fn use1()
{
use std::error::Error as ErrorInterface;
use TheModule::BasicError as Error;
// test.case( "basic" );
let err1 = Error::new( "Some error" );
a_id!( err1.to_string(), "Some error" );
a_id!( err1.description(), "Some error" );
a_id!( err1.msg(), "Some error" );
a_id!( format!( "err1 : {}", err1 ), "err1 : Some error" );
}
//
fn use2()
{
use TheModule::{ BasicError, ErrorInterface };
// test.case( "basic" );
let err1 = BasicError::new( "Some error" );
a_id!( err1.to_string(), "Some error" );
a_id!( err1.description(), "Some error" );
a_id!( err1.msg(), "Some error" );
a_id!( format!( "err1 : {}", err1 ), "err1 : Some error" );
}
//
fn use3()
{
use std::error::Error;
// test.case( "basic" );
let err1 = TheModule::BasicError::new( "Some error" );
a_id!( err1.to_string(), "Some error" );
a_id!( err1.description(), "Some error" );
a_id!( err1.msg(), "Some error" );
a_id!( format!( "err1 : {}", err1 ), "err1 : Some error" );
}
//
fn err_basic()
{
// test.case( "basic" );
let err = TheModule::err!( "abc" );
a_id!( err.to_string(), "abc" );
// test.case( "with args" );
let err = TheModule::err!( "abc{}{}", "def", "ghi" );
a_id!( err.to_string(), "abcdefghi" );
}
}
//
#[ cfg( feature = "use_std" ) ]
tests_index!
{
basic,
use1,
use2,
use3,
err_basic,
}
| 20.068376
| 63
| 0.538756
|
29fc3dac16d2feb32c7278d4fccc600e8e9bdd87
| 874
|
lua
|
Lua
|
xboxone_gdk.lua
|
Blizzard/premake-consoles
|
bdc1563f82415b1edef30bef72dffb794ebaeb3e
|
[
"MIT"
] | 7
|
2017-10-13T17:23:59.000Z
|
2022-03-08T03:02:25.000Z
|
xboxone_gdk.lua
|
Blizzard/premake-consoles
|
bdc1563f82415b1edef30bef72dffb794ebaeb3e
|
[
"MIT"
] | null | null | null |
xboxone_gdk.lua
|
Blizzard/premake-consoles
|
bdc1563f82415b1edef30bef72dffb794ebaeb3e
|
[
"MIT"
] | 1
|
2021-11-24T04:40:56.000Z
|
2021-11-24T04:40:56.000Z
|
--
-- Xbox One GDK support for Visual Studio backend.
-- Copyright Blizzard Entertainment, Inc
--
--
-- Non-overrides
--
local p = premake
local vstudio = p.vstudio
local vc2010 = p.vstudio.vc2010
p.XBOXONE_GDK = "xboxone_gdk"
if vstudio.vs2010_architectures ~= nil then
vstudio.vs2010_architectures.xboxone_gdk = "Gaming.Xbox.XboxOne.x64"
p.api.addAllowed("system", p.XBOXONE_GDK)
os.systemTags[p.XBOXONE_GDK] = { "xboxone_gdk", "xboxone", "gdk", "xbox", "console" }
local osoption = p.option.get("os")
if osoption ~= nil then
table.insert(osoption.allowed, { p.XBOXONE_GDK, "Xbox One (GDK)" })
end
end
filter { "system:xboxone_gdk" }
architecture "x86_64"
filter { "system:xboxone_gdk", "kind:ConsoleApp or WindowedApp" }
targetextension ".exe"
filter { "system:xboxone_gdk", "kind:StaticLib" }
targetprefix ""
targetextension ".lib"
filter {}
| 21.85
| 86
| 0.713959
|
a9f8cb724a8adfe696dad2708731b5cb292e2528
| 119
|
php
|
PHP
|
app/Models/Tbl_deposit_type.php
|
hossainrkb/Showroom-Softwate-backend--laravel-frontend--react-
|
f359c62ceba431190b119b8e3e50b6fd2af97d08
|
[
"MIT"
] | null | null | null |
app/Models/Tbl_deposit_type.php
|
hossainrkb/Showroom-Softwate-backend--laravel-frontend--react-
|
f359c62ceba431190b119b8e3e50b6fd2af97d08
|
[
"MIT"
] | 4
|
2021-03-09T22:39:21.000Z
|
2022-02-26T19:55:20.000Z
|
app/Models/Tbl_deposit_type.php
|
hossainrkb/Showroom-Softwate-backend--laravel-frontend--react-
|
f359c62ceba431190b119b8e3e50b6fd2af97d08
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class Tbl_deposit_type extends Model
{
//
}
| 10.818182
| 39
| 0.739496
|
38abf42dea8134cc3813d10d6179afff403800da
| 135
|
php
|
PHP
|
src/Domain/Label/Collections/LabelCollection.php
|
aprendiendonode/wings
|
ad28907367c2840477325ebac0f9b4e3e508d759
|
[
"MIT"
] | 1
|
2021-11-27T17:06:54.000Z
|
2021-11-27T17:06:54.000Z
|
src/Domain/Label/Collections/LabelCollection.php
|
aprendiendonode/wings
|
ad28907367c2840477325ebac0f9b4e3e508d759
|
[
"MIT"
] | null | null | null |
src/Domain/Label/Collections/LabelCollection.php
|
aprendiendonode/wings
|
ad28907367c2840477325ebac0f9b4e3e508d759
|
[
"MIT"
] | null | null | null |
<?php
namespace Domain\Label\Collections;
use Illuminate\Database\Eloquent\Collection;
class LabelCollection extends Collection
{
}
| 13.5
| 44
| 0.814815
|
7aaf67d41b8323e3a95a55252cba17bc44ab8fb0
| 658
|
cs
|
C#
|
source/Coop/Mod/Patch/Debugging.cs
|
masesk/BannerlordCoop
|
db312a5f06880086f5fffad27349760bb092fb42
|
[
"MIT"
] | 130
|
2020-05-18T08:34:40.000Z
|
2022-03-19T18:16:48.000Z
|
source/Coop/Mod/Patch/Debugging.cs
|
masesk/BannerlordCoop
|
db312a5f06880086f5fffad27349760bb092fb42
|
[
"MIT"
] | 166
|
2020-05-18T00:50:16.000Z
|
2022-03-12T20:05:32.000Z
|
source/Coop/Mod/Patch/Debugging.cs
|
masesk/BannerlordCoop
|
db312a5f06880086f5fffad27349760bb092fb42
|
[
"MIT"
] | 37
|
2020-06-04T07:39:45.000Z
|
2022-03-18T12:20:58.000Z
|
using System;
using Coop.Mod.DebugUtil;
using HarmonyLib;
using TaleWorlds.Engine;
using TaleWorlds.Library;
namespace Coop.Mod.Patch
{
public static class Debugging
{
public static IDebugManager DebugManager { get; } = new DebugManager();
[HarmonyPatch(typeof(MBDebug))]
[HarmonyPatch(nameof(MBDebug.Print))]
class PatchPrint
{
static bool Prefix(ref string message, int logLevel, TaleWorlds.Library.Debug.DebugColor color, ulong debugFilter)
{
DebugManager.Print(message, logLevel, color, debugFilter);
return false;
}
}
}
}
| 26.32
| 126
| 0.633739
|
ddd6d50ba52f3b489421e1ff365c7c8c9d880fc8
| 3,121
|
java
|
Java
|
dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java
|
Leonard107/repositorio-padrao
|
5a1087f2911a85c1709ae2176738e93dd697c1dc
|
[
"BSD-3-Clause"
] | 10
|
2015-08-26T14:22:21.000Z
|
2019-07-02T14:18:09.000Z
|
dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java
|
Leonard107/repositorio-padrao
|
5a1087f2911a85c1709ae2176738e93dd697c1dc
|
[
"BSD-3-Clause"
] | 690
|
2015-07-16T20:44:35.000Z
|
2022-03-14T17:42:44.000Z
|
dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java
|
Leonard107/repositorio-padrao
|
5a1087f2911a85c1709ae2176738e93dd697c1dc
|
[
"BSD-3-Clause"
] | 16
|
2017-01-12T09:30:45.000Z
|
2019-04-18T20:51:12.000Z
|
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException;
/**
* RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request
* With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does.
*
* Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no specified helpdesk email.
*
* @author Sam Ottenhoff
* @author Peter Dietz
*/
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
private Logger log = Logger.getLogger(RequestItemHelpdeskStrategy.class);
@Autowired(required = true)
protected EPersonService ePersonService;
public RequestItemHelpdeskStrategy() {}
@Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException {
boolean helpdeskOverridesSubmitter = ConfigurationManager.getBooleanProperty("request.item.helpdesk.override", false);
String helpDeskEmail = ConfigurationManager.getProperty("mail.helpdesk");
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
return getHelpDeskPerson(context, helpDeskEmail);
} else {
//Fallback to default logic (author of Item) if helpdesk isn't fully enabled or setup
return super.getRequestItemAuthor(context, item);
}
}
/**
* Return a RequestItemAuthor object for the specified helpdesk email address.
* It makes an attempt to find if there is a matching eperson for the helpdesk address, to use the name,
* Otherwise it falls back to a helpdeskname key in the Messages.props.
* @param context context
* @param helpDeskEmail email
* @return RequestItemAuthor
* @throws SQLException if database error
*/
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException{
EPerson helpdeskEPerson = null;
context.turnOffAuthorisationSystem();
helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
context.restoreAuthSystemState();
if(helpdeskEPerson != null) {
return new RequestItemAuthor(helpdeskEPerson);
} else {
String helpdeskName = I18nUtil.getMessage(
"org.dspace.app.requestitem.RequestItemHelpdeskStrategy.helpdeskname",
context);
return new RequestItemAuthor(helpdeskName, helpDeskEmail);
}
}
}
| 39.506329
| 137
| 0.735662
|
cba0e6f3dd5dd3aaef100a799629a820ea391315
| 19,783
|
dart
|
Dart
|
lib/screens/chat_screen.dart
|
HagerRaafat/faculty_chat
|
e8eb55977a6d2466d32548538e86312a177ca118
|
[
"MIT"
] | null | null | null |
lib/screens/chat_screen.dart
|
HagerRaafat/faculty_chat
|
e8eb55977a6d2466d32548538e86312a177ca118
|
[
"MIT"
] | null | null | null |
lib/screens/chat_screen.dart
|
HagerRaafat/faculty_chat
|
e8eb55977a6d2466d32548538e86312a177ca118
|
[
"MIT"
] | null | null | null |
import 'package:cloud_firestore/cloud_firestore.dart';
import 'package:flutter/material.dart';
import 'package:flutter_chat_ui_starter/models/chat.dart';
import 'package:flutter_chat_ui_starter/models/message.dart';
import 'package:auto_direction/auto_direction.dart';
import 'package:flutter_chat_ui_starter/providers/auth_provider.dart';
import 'package:flutter_chat_ui_starter/screens/chat_information.dart';
import 'package:flutter_chat_ui_starter/services/cloud_storage_service.dart';
import 'package:flutter_chat_ui_starter/services/database_service.dart';
import 'package:flutter_chat_ui_starter/services/navigation_service.dart';
import 'package:timeago/timeago.dart' as timeago;
import 'package:provider/provider.dart';
import 'package:flutter_chat_ui_starter/services/media_service.dart';
// ignore: must_be_immutable
class ChatScreen extends StatefulWidget {
static const String id = 'chat_screen';
String chatID;
String receiverID;
String chatImage;
String chatName;
ChatScreen({this.chatID, this.receiverID, this.chatImage, this.chatName});
@override
_ChatScreenState createState() => _ChatScreenState();
}
class _ChatScreenState extends State<ChatScreen> {
int selectedIndex = 0;
final List<String> media = ['Chat', 'Media'];
final messageTextController = TextEditingController();
AuthProvider _auth;
String messageText;
bool isRTL = false;
String text = '';
Widget generateItem(String imageURL) {
DecorationImage _image =
DecorationImage(image: NetworkImage(imageURL), fit: BoxFit.cover);
return Padding(
padding: EdgeInsets.all(3.0),
child: Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.all(Radius.circular(10.0)),
color: Color(0xFF125589),
),
padding: EdgeInsets.all(3.0),
child: Container(
height: 160.0,
width: 121.0,
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(10),
image: _image,
),
),
),
);
}
List<Widget> imageContainerList(int count, String imageURL) {
List<Widget> items = [];
for (int i = 0; i < count; i++) {
items.add(generateItem(imageURL));
}
return items;
}
_buildMessage(String message, bool isMe, Timestamp time, String sender) {
return StreamBuilder(
stream: DatabaseService.instance.getUserData(sender),
builder: (_context, _snapshot) {
var _data = _snapshot.data;
return Column(
crossAxisAlignment:
isMe ? CrossAxisAlignment.end : CrossAxisAlignment.start,
children: [
Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.all(Radius.circular(10.0)),
color: isMe ? Theme.of(context).accentColor : Color(0xFFF3F7FA),
),
margin: isMe
? EdgeInsets.only(
left: 150.0,
right: 20.0,
top: 10.0,
bottom: 10.0,
)
: EdgeInsets.only(
right: 150.0,
left: 20.0,
top: 10.0,
bottom: 10.0,
),
padding: EdgeInsets.all(10.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
isMe
? Container(
width: 0.0,
)
: Text(
_data != null ? _data.name : '',
style: TextStyle(
fontSize: 15.0,
color: Colors.indigoAccent,
fontWeight: FontWeight.bold,
),
),
SizedBox(
height: 2.0,
),
AutoDirection(
onDirectionChange: (isRTL) {
setState(() {
this.isRTL = isRTL;
});
},
text: message,
child: Text(
message,
style: TextStyle(
color: isMe ? Colors.white : Colors.black,
fontSize: 15.0,
),
),
),
],
),
),
Container(
margin: isMe
? EdgeInsets.only(
left: 150.0,
right: 20.0,
)
: EdgeInsets.only(
right: 150.0,
left: 20.0,
),
child: Row(
mainAxisAlignment:
isMe ? MainAxisAlignment.end : MainAxisAlignment.start,
children: [
Text(
timeago.format(time.toDate()),
style: TextStyle(
color: Colors.black54,
fontSize: 12.0,
),
),
],
),
),
],
);
},
);
}
_buildImageMessage(
bool isMe, String imageURL, Timestamp time, String sender) {
DecorationImage _image =
DecorationImage(image: NetworkImage(imageURL), fit: BoxFit.cover);
return StreamBuilder(
stream: DatabaseService.instance.getUserData(sender),
builder: (_context, _snapshot) {
var _data = _snapshot.data;
return Column(
crossAxisAlignment:
isMe ? CrossAxisAlignment.end : CrossAxisAlignment.start,
children: [
Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.all(Radius.circular(10.0)),
color: isMe ? Theme.of(context).accentColor : Color(0xFFF3F7FA),
),
margin: isMe
? EdgeInsets.only(
left: 150.0,
right: 20.0,
top: 10.0,
bottom: 10.0,
)
: EdgeInsets.only(
right: 150.0,
left: 20.0,
top: 10.0,
bottom: 10.0,
),
padding: EdgeInsets.all(3.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: <Widget>[
Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
isMe
? Container(
width: 0.0,
)
: Padding(
padding: const EdgeInsets.all(5.0),
child: Text(
_data != null ? _data.name : '',
style: TextStyle(
fontSize: 15.0,
color: Colors.indigoAccent,
fontWeight: FontWeight.bold,
),
),
),
Container(
height: MediaQuery.of(context).size.height * 0.30,
width: MediaQuery.of(context).size.width * 0.40,
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(10),
image: _image,
),
),
],
),
],
),
),
Container(
margin: isMe
? EdgeInsets.only(
left: 150.0,
right: 20.0,
)
: EdgeInsets.only(
right: 150.0,
left: 20.0,
),
child: Row(
mainAxisAlignment:
isMe ? MainAxisAlignment.end : MainAxisAlignment.start,
children: [
Text(
timeago.format(time.toDate()),
style: TextStyle(
color: Colors.black54,
fontSize: 12.0,
),
),
],
),
),
],
);
},
);
}
_buildMessageComposer() {
return Container(
padding: EdgeInsets.symmetric(horizontal: 8.0),
child: Row(
mainAxisAlignment: MainAxisAlignment.end,
crossAxisAlignment: CrossAxisAlignment.end,
children: [
Expanded(
child: AutoDirection(
onDirectionChange: (isRTL) {
setState(() {
this.isRTL = isRTL;
});
},
text: text,
child: Container(
padding: EdgeInsets.only(left: 10.0),
margin: EdgeInsets.only(left: 11.0, top: 11.0, bottom: 11.0),
decoration: BoxDecoration(
color: Color(0xFFF3F7FA),
borderRadius: BorderRadius.all(Radius.circular(10.0)),
),
child: Padding(
padding: EdgeInsets.all(7.5),
child: TextField(
controller: messageTextController,
onChanged: (value) {
setState(() {
text = value;
});
},
keyboardType: TextInputType.multiline,
maxLines: null,
textCapitalization: TextCapitalization.sentences,
cursorColor: Colors.blueGrey,
decoration: InputDecoration(
isDense: true,
border: InputBorder.none,
hintText: 'Type a message',
hintStyle: TextStyle(color: Colors.grey),
),
),
),
),
),
),
Container(
margin: EdgeInsets.only(
//right: 11.0,
top: 11.0,
bottom: 11.0,
),
decoration: BoxDecoration(
borderRadius: BorderRadius.only(
topRight: Radius.circular(10.0),
bottomRight: Radius.circular(10.0),
),
),
child: Padding(
padding: EdgeInsets.all(10.0),
child: GestureDetector(
child: Icon(
Icons.camera_alt_rounded,
size: 30.0,
color: Theme.of(context).accentColor,
),
onTap: () async {
var _image =
await MediaService.instance.getImageFromLibrary();
if (_image != null) {
var _result = await CloudStorageService.instance
.uploadMediaMessage(_auth.user.uid, _image);
var _imageURL = await _result.ref.getDownloadURL();
await DatabaseService.instance.sendMessage(
this.widget.chatID,
Message(
message: _imageURL,
senderID: _auth.user.uid,
time: Timestamp.now(),
type: MessageType.Image),
);
}
},
),
),
),
Container(
padding: EdgeInsets.only(top: 2.0),
margin: EdgeInsets.only(top: 11.0, bottom: 11.0),
decoration: BoxDecoration(
borderRadius: BorderRadius.all(Radius.circular(10.0)),
),
child: IconButton(
icon: Icon(Icons.send_rounded),
iconSize: 30.0,
color: Theme.of(context).accentColor,
onPressed: () {
messageTextController.clear();
DatabaseService.instance.sendMessage(
this.widget.chatID,
Message(
message: text,
time: Timestamp.now(),
senderID: _auth.user.uid,
type: MessageType.Text,
),
);
setState(() {
text = '';
});
},
),
),
],
),
);
}
@override
Widget build(BuildContext context) {
return Scaffold(
backgroundColor: Colors.white,
appBar: AppBar(
elevation: 0.0,
backgroundColor: Theme.of(context).accentColor,
automaticallyImplyLeading: false,
title: Padding(
padding: EdgeInsets.only(top: 5.0),
child: Container(
child: Row(
children: [
GestureDetector(
child: Icon(
Icons.arrow_back,
color: Colors.white,
),
onTap: () {
Navigator.pop(context);
},
),
Padding(
padding: EdgeInsets.all(8.0),
child: CircleAvatar(
radius: 20.0,
backgroundImage: NetworkImage(this.widget.chatImage),
),
),
GestureDetector(
onTap: () {
NavigationService.instance.navigateToRoute(
MaterialPageRoute(
builder: (BuildContext _context) {
return ChatInformation(
chatID: this.widget.chatID,
chatImage: this.widget.chatImage,
chatName: this.widget.chatName,
);
},
),
);
},
child: Container(
width: MediaQuery.of(context).size.width * 0.5,
child: Text(
this.widget.chatName,
style: TextStyle(
color: Colors.white,
fontSize: 20.0,
fontWeight: FontWeight.bold,
),
overflow: TextOverflow.ellipsis,
),
),
),
],
),
),
),
actions: [
IconButton(
icon: Icon(Icons.more_vert),
iconSize: 40.0,
color: Colors.white,
onPressed: () {},
),
],
),
body: ChangeNotifierProvider<AuthProvider>.value(
value: AuthProvider.instance,
child: SafeArea(
child: GestureDetector(
onTap: () => FocusScope.of(context).unfocus(),
child: Column(
children: [
Container(
decoration: BoxDecoration(
color: Theme.of(context).accentColor,
borderRadius: BorderRadius.only(
bottomLeft: Radius.circular(15.0),
bottomRight: Radius.circular(15.0),
),
),
height: 70.0,
child: ListView.builder(
scrollDirection: Axis.horizontal,
itemCount: 2,
itemBuilder: (BuildContext context, int index) {
return GestureDetector(
onTap: () {
setState(() {
selectedIndex = index;
});
},
child: Padding(
padding: EdgeInsets.only(
right: 8.5,
top: 17.0,
bottom: 13.0,
left: 8.5,
),
child: Container(
width: 120,
decoration: BoxDecoration(
color:
index == selectedIndex ? Colors.white : null,
borderRadius:
BorderRadius.all(Radius.circular(10.0)),
),
alignment: Alignment.center,
child: Text(
media[index],
style: TextStyle(
color: index == selectedIndex
? Color(0xFF125589)
: Colors.white,
fontSize: 17.0,
fontWeight: FontWeight.w600),
),
),
),
);
},
),
),
Builder(
builder: (BuildContext _context) {
_auth = Provider.of<AuthProvider>(_context);
return Expanded(
child: StreamBuilder<Chat>(
stream: DatabaseService.instance
.getChat(this.widget.chatID),
builder: (BuildContext _context, _snapshot) {
var _chatData = _snapshot.data;
if (_chatData != null) {
return ListView.builder(
reverse: true,
shrinkWrap: true,
itemCount: _chatData.messages.length,
itemBuilder: (BuildContext context, int _index) {
var reverse =
List.from(_chatData.messages.reversed);
var _message = reverse[_index];
bool isMe = _message.senderID == _auth.user.uid;
return _message.type == MessageType.Text
? _buildMessage(_message.message, isMe,
_message.time, _message.senderID)
: _buildImageMessage(isMe, _message.message,
_message.time, _message.senderID);
},
);
} else {
return Center(
child: CircularProgressIndicator(
backgroundColor: Theme.of(context).accentColor,
),
);
}
},
),
);
},
),
_buildMessageComposer(),
],
),
),
),
),
);
}
}
| 35.77396
| 80
| 0.405854
|
9e2a4a38ac5260cdc328953a625b16ed41e4da16
| 339
|
cs
|
C#
|
Assets/_Scripts/Tags/Tag.cs
|
ozdeger/BulletHell
|
ca9b59a5added651878ab7761ad2dfa89d62cb95
|
[
"MIT"
] | null | null | null |
Assets/_Scripts/Tags/Tag.cs
|
ozdeger/BulletHell
|
ca9b59a5added651878ab7761ad2dfa89d62cb95
|
[
"MIT"
] | 3
|
2021-05-10T22:17:07.000Z
|
2021-05-27T17:14:26.000Z
|
Assets/_Scripts/Tags/Tag.cs
|
ozdeger/BulletHell
|
ca9b59a5added651878ab7761ad2dfa89d62cb95
|
[
"MIT"
] | 1
|
2021-08-12T15:20:09.000Z
|
2021-08-12T15:20:09.000Z
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public enum Tags {
None,
Item,
Obstacle,
Enemy,
Friendly,
Player,
EnemyBullet,
FriendlyBullet,
PlayerBullet,
}
public class Tag : MonoBehaviour
{
[SerializeField] List<Tags> tags;
public List<Tags> Tags => tags;
}
| 15.409091
| 37
| 0.672566
|
af93e031b575f53e9e8a2efce8d1de3819e957dd
| 969
|
py
|
Python
|
buildroot/support/testing/tests/package/test_python_txaio.py
|
TonyApuzzo/hassos
|
bb201fb84209a1bb5cf0611bd09e3610701d737d
|
[
"Apache-2.0"
] | 1
|
2019-02-12T06:53:47.000Z
|
2019-02-12T06:53:47.000Z
|
buildroot/support/testing/tests/package/test_python_txaio.py
|
berg/hassos
|
30b599acc6fda01e6a07181d01e8e03b365424f4
|
[
"Apache-2.0"
] | null | null | null |
buildroot/support/testing/tests/package/test_python_txaio.py
|
berg/hassos
|
30b599acc6fda01e6a07181d01e8e03b365424f4
|
[
"Apache-2.0"
] | null | null | null |
from tests.package.test_python import TestPythonBase
class TestPythonPy2Txaio(TestPythonBase):
config = TestPythonBase.config + \
"""
BR2_PACKAGE_PYTHON=y
BR2_PACKAGE_PYTHON_TXAIO=y
BR2_PACKAGE_PYTHON_TWISTED=y
"""
def test_run(self):
self.login()
cmd = self.interpreter + " -c 'import txaio;"
cmd += "txaio.use_twisted();"
cmd += "f0 = txaio.create_future()'"
_, exit_code = self.emulator.run(cmd)
self.assertEqual(exit_code, 0)
class TestPythonPy3Txaio(TestPythonBase):
config = TestPythonBase.config + \
"""
BR2_PACKAGE_PYTHON3=y
BR2_PACKAGE_PYTHON_TXAIO=y
"""
def test_run(self):
self.login()
cmd = self.interpreter + " -c 'import txaio;"
cmd += "txaio.use_asyncio();"
cmd += "f0 = txaio.create_future()'"
_, exit_code = self.emulator.run(cmd)
self.assertEqual(exit_code, 0)
| 27.685714
| 53
| 0.605779
|
b0579df4ef61ce528e27c80006c4445a45963787
| 385
|
py
|
Python
|
app/token.py
|
makstsar17/ToDoBackend
|
06d99a1879387f6b57a7c3c250f0edb003d976ba
|
[
"MIT"
] | null | null | null |
app/token.py
|
makstsar17/ToDoBackend
|
06d99a1879387f6b57a7c3c250f0edb003d976ba
|
[
"MIT"
] | null | null | null |
app/token.py
|
makstsar17/ToDoBackend
|
06d99a1879387f6b57a7c3c250f0edb003d976ba
|
[
"MIT"
] | null | null | null |
from flask_jwt_extended import JWTManager
import redis
jwt = JWTManager()
jwt_redis_blocklist = redis.StrictRedis(
host="localhost", port=6379, db=0, decode_responses=True
)
@jwt.token_in_blocklist_loader
def check_if_token_is_revoked(jwt_header, jwt_payload):
jti = jwt_payload["jti"]
token_in_redis = jwt_redis_blocklist.get(jti)
return token_in_redis is not None
| 25.666667
| 60
| 0.787013
|
dd9200dbac343ba505b9f8f7bfe96081b1c9dfa6
| 2,879
|
java
|
Java
|
src/main/java/me/earth/phobos/mixin/mixins/MixinBlockModelRenderer.java
|
notperry1234567890/phobos-1.3.3
|
d915e794075190b8349e23c3fabc09df18afde93
|
[
"MIT"
] | 4
|
2021-01-11T07:28:55.000Z
|
2021-06-22T03:03:24.000Z
|
src/main/java/me/earth/phobos/mixin/mixins/MixinBlockModelRenderer.java
|
notperry1234567890/phobos-1.3.3
|
d915e794075190b8349e23c3fabc09df18afde93
|
[
"MIT"
] | null | null | null |
src/main/java/me/earth/phobos/mixin/mixins/MixinBlockModelRenderer.java
|
notperry1234567890/phobos-1.3.3
|
d915e794075190b8349e23c3fabc09df18afde93
|
[
"MIT"
] | 1
|
2021-09-07T06:18:30.000Z
|
2021-09-07T06:18:30.000Z
|
package me.earth.phobos.mixin.mixins;
import me.earth.phobos.features.modules.render.XRay;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.renderer.BlockModelRenderer;
import net.minecraft.client.renderer.BufferBuilder;
import net.minecraft.client.renderer.block.model.IBakedModel;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.IBlockAccess;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.ModifyArg;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
@Mixin(BlockModelRenderer.class)
public class MixinBlockModelRenderer {
@Inject(method = "renderModel", at = @At("HEAD"), cancellable = true)
private void renderModelHook(IBlockAccess blockAccess, IBakedModel bakedModel, IBlockState blockState, BlockPos blockPos, BufferBuilder bufferBuilder, boolean b, CallbackInfoReturnable<Boolean> info) {
try {
if (XRay.getInstance().isOn() && !XRay.getInstance().shouldRender(blockState.getBlock())) {
info.setReturnValue(false);
info.cancel();
}
} catch (Exception ignored) {}
}
@ModifyArg(method = "renderModel(Lnet/minecraft/world/IBlockAccess;Lnet/minecraft/client/renderer/block/model/IBakedModel;Lnet/minecraft/block/state/IBlockState;Lnet/minecraft/util/math/BlockPos;Lnet/minecraft/client/renderer/BufferBuilder;ZJ)Z", at = @At(value = "INVOKE", target = "Lnet/minecraft/client/renderer/BlockModelRenderer;renderModelFlat(Lnet/minecraft/world/IBlockAccess;Lnet/minecraft/client/renderer/block/model/IBakedModel;Lnet/minecraft/block/state/IBlockState;Lnet/minecraft/util/math/BlockPos;Lnet/minecraft/client/renderer/BufferBuilder;ZJ)Z"))
private boolean renderModelFlatHook(boolean input) {
try {
if (XRay.getInstance().isOn()) {
return false;
}
} catch (Exception ignored) {}
return input;
}
@ModifyArg(method = { "renderModel(Lnet/minecraft/world/IBlockAccess;Lnet/minecraft/client/renderer/block/model/IBakedModel;Lnet/minecraft/block/state/IBlockState;Lnet/minecraft/util/math/BlockPos;Lnet/minecraft/client/renderer/BufferBuilder;ZJ)Z" }, at = @At(value = "INVOKE", target = "Lnet/minecraft/client/renderer/BlockModelRenderer;renderModelSmooth(Lnet/minecraft/world/IBlockAccess;Lnet/minecraft/client/renderer/block/model/IBakedModel;Lnet/minecraft/block/state/IBlockState;Lnet/minecraft/util/math/BlockPos;Lnet/minecraft/client/renderer/BufferBuilder;ZJ)Z"))
private boolean renderModelSmoothHook(boolean input) {
try {
if (XRay.getInstance().isOn()) {
return false;
}
} catch (Exception ignored) {}
return input;
}
}
| 58.755102
| 574
| 0.747482
|
dbb95e36ac8c6823d76e26b2cfe0da304f7e4376
| 7,013
|
php
|
PHP
|
application/im/clr6.php
|
pakistanlmis/requisition-management
|
d7bac2d8116def2f2d6fb3b1131debdd5f9e218f
|
[
"MIT",
"Unlicense"
] | 2
|
2019-05-24T10:15:42.000Z
|
2020-10-06T19:22:35.000Z
|
application/im/clr6.php
|
pakistanlmis/requisition-management
|
d7bac2d8116def2f2d6fb3b1131debdd5f9e218f
|
[
"MIT",
"Unlicense"
] | null | null | null |
application/im/clr6.php
|
pakistanlmis/requisition-management
|
d7bac2d8116def2f2d6fb3b1131debdd5f9e218f
|
[
"MIT",
"Unlicense"
] | null | null | null |
<?php
/**
* cls6
* @package im
*
* @author Ajmal Hussain
* @email <ahussain@ghsc-psm.org>
*
* @version 2.2
*
*/
//Including AllClasses file
include("../includes/classes/AllClasses.php");
//Including header file
include(PUBLIC_PATH . "html/header.php");
//echo '<pre>';print_r($_SESSION);exit;
$where = " 1=1 ";
if(!empty($_SESSION['user_level']) && $_SESSION['user_level'] == '1')
{
$where .= " AND tbl_warehouse.stkid = ".$_SESSION['user_stakeholder1'];
}
if(!empty($_SESSION['user_level']) && $_SESSION['user_level'] == '2')
{
$where .= " AND tbl_warehouse.stkid = ".$_SESSION['user_stakeholder1'];
$where .= " AND tbl_warehouse.prov_id = ".$_SESSION['user_province1'];
}
if(!empty($_SESSION['user_level']) && $_SESSION['user_level'] == '3')
{
$where .= " AND tbl_warehouse.stkid = ".$_SESSION['user_stakeholder1'];
$where .= " AND tbl_warehouse.dist_id = ".$_SESSION['user_district'];
}
//Query
$qry = "SELECT
CONCAT(DATE_FORMAT(clr_master.date_from, '%b-%Y'),' to ',DATE_FORMAT(clr_master.date_to, '%b-%Y')) AS duration,
DATE_FORMAT(clr_master.requested_on,'%d/%m/%Y') AS requested_on,
MONTH (clr_master.date_to) AS clrMonth,
YEAR (clr_master.date_to) AS clrYear,
clr_master.pk_id,
clr_master.wh_id,
clr_master.approval_status,
clr_master.requisition_num,
DATE_FORMAT(clr_master.date_from, '%b-%Y') as date_from,
DATE_FORMAT(clr_master.date_to, '%b-%Y') as date_to
FROM
clr_master
INNER JOIN tbl_warehouse ON clr_master.wh_id = tbl_warehouse.wh_id
WHERE
".$where."
GROUP BY
clr_master.date_from
ORDER BY
clr_master.requisition_num DESC,
clrYear DESC,
clrMonth DESC";
$qryRes = mysql_query($qry);
$num = mysql_num_rows($qryRes);
?>
<style>
body {
margin: 0px !important;
font-family: Arial, Helvetica, sans-serif;
}
table#myTable {
margin-top: 20px;
border-collapse: collapse;
border-spacing: 0;
}
table#myTable tr td, table#myTable tr th {
font-size: 13px;
padding-left: 5px;
text-align: left;
border: 1px solid #999;
}
table#myTable tr td.TAR {
text-align: right;
padding: 5px;
width: 50px !important;
}
.sb1NormalFont {
color: #444444;
font-family: Verdana, Arial, Helvetica, sans-serif;
font-size: 13px;
font-weight: bold;
text-decoration: none;
}
p {
margin-bottom: 5px;
font-size: 13px !important;
line-height: 1 !important;
padding: 0 !important;
}
table#headerTable tr td {
font-size: 13px;
}
</style>
</head><!-- END HEAD -->
<body class="page-header-fixed page-quick-sidebar-over-content">
<!-- BEGIN HEADER -->
<div class="page-container">
<?php
//Including top file
include PUBLIC_PATH . "html/top.php";
//Including top_im file
include PUBLIC_PATH . "html/top_im.php";
?>
<div class="page-content-wrapper">
<div class="page-content">
<!-- BEGIN PAGE HEADER-->
<div class="row">
<div class="col-md-12">
<div class="widget" data-toggle="collapse-widget">
<div class="widget-head">
<h3 class="heading">View Requisition</h3>
</div>
<div class="widget-body">
<table width="100%" align="center" cellspacing="3" cellpadding="5" id="myTable" class="table table-bordered table-hover">
<thead>
<tr>
<th width="80">Sr. No.</th>
<th>Requisition Number</th>
<th>From</th>
<th>To</th>
<th>Requested On</th>
<th>Status</th>
<th width="50" style="text-align:center;">Action</th>
</tr>
</thead>
<tbody>
<?php
if ($num > 0) {
$counter = 1;
while ($row = mysql_fetch_array($qryRes)) {
?>
<tr>
<td style="text-align:center;"><?php echo $counter++; ?></td>
<td><?php echo $row['requisition_num']; ?></td>
<td><?php echo $row['date_from']; ?></td>
<td><?php echo $row['date_to']; ?></td>
<td><?php echo $row['requested_on']; ?></td>
<td><?php echo str_replace("_", " ", $row['approval_status']); ?></td>
<td style="text-align:center;"><a href="clr_view.php?id=<?php echo $row['pk_id']; ?>&wh_id=<?php echo $row['wh_id']; ?>">View</a></td>
</tr>
<?php
}
} else {
echo '<tr><td colspan="4">No record found</td></tr>';
}
?>
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<?php
//Including footer file
include PUBLIC_PATH . "/html/footer.php";
?>
<script>
$(function () {
$('#sector').change(function (e) {
var val = $('#sector').val();
getStakeholder(val, '');
});
getStakeholder('<?php echo $rptType; ?>', '<?php echo $sel_stk; ?>');
})
</script>
<?php
if (isset($_REQUEST['e']) && $_REQUEST['e'] == '1') {
?>
<script>
var self = $('[data-toggle="notyfy"]');
notyfy({
force: true,
text: 'CLR-6 is successfully saved',
type: 'success',
layout: self.data('layout')
});
</script>
<?php } ?>
<!-- END JAVASCRIPTS -->
</body>
<!-- END BODY -->
</html>
| 35.598985
| 186
| 0.425353
|
a36f90b8f929a2d0782a5d0ac9e70cc956f50cfe
| 15,383
|
java
|
Java
|
src/main/java/org/jgroups/protocols/kubernetes/KUBE_PING.java
|
rhusar/jgroups-kubernetes
|
cf79f55afa5a6de3be8cb3078d133923d6aeb9b6
|
[
"Apache-2.0"
] | 65
|
2016-09-26T15:36:05.000Z
|
2022-03-30T00:36:27.000Z
|
src/main/java/org/jgroups/protocols/kubernetes/KUBE_PING.java
|
rhusar/jgroups-kubernetes
|
cf79f55afa5a6de3be8cb3078d133923d6aeb9b6
|
[
"Apache-2.0"
] | 68
|
2016-07-07T14:52:00.000Z
|
2022-03-28T14:40:29.000Z
|
src/main/java/org/jgroups/protocols/kubernetes/KUBE_PING.java
|
rhusar/jgroups-kubernetes
|
cf79f55afa5a6de3be8cb3078d133923d6aeb9b6
|
[
"Apache-2.0"
] | 34
|
2016-07-07T11:18:54.000Z
|
2021-12-08T12:37:49.000Z
|
package org.jgroups.protocols.kubernetes;
import org.jgroups.*;
import org.jgroups.annotations.MBean;
import org.jgroups.annotations.ManagedOperation;
import org.jgroups.annotations.Property;
import org.jgroups.conf.ClassConfigurator;
import org.jgroups.protocols.Discovery;
import org.jgroups.protocols.PingData;
import org.jgroups.protocols.PingHeader;
import org.jgroups.protocols.kubernetes.stream.CertificateStreamProvider;
import org.jgroups.protocols.kubernetes.stream.StreamProvider;
import org.jgroups.protocols.kubernetes.stream.TokenStreamProvider;
import org.jgroups.stack.IpAddress;
import org.jgroups.util.NameCache;
import org.jgroups.util.Responses;
import java.util.*;
import java.util.stream.Collectors;
import static org.jgroups.protocols.kubernetes.Utils.readFileToString;
/**
* Kubernetes based discovery protocol. Uses the Kubernetes master to fetch the IP addresses of all pods that have
* been created, then pings each pods separately. The ports are defined by bind_port in TP plus port_range.
* @author <a href="mailto:ales.justin@jboss.org">Ales Justin</a>
* @author Sebastian Łaskawiec
* @author Bela Ban
* @author Radoslav Husar
*/
@MBean(description="Kubernetes based discovery protocol")
public class KUBE_PING extends Discovery {
protected static final short KUBERNETES_PING_ID=2017;
static {
ClassConfigurator.addProtocol(KUBERNETES_PING_ID, KUBE_PING.class);
}
@Property(description="Number of additional ports to be probed for membership. A port_range of 0 does not " +
"probe additional ports. Example: initial_hosts=A[7800] port_range=0 probes A:7800, port_range=1 probes " +
"A:7800 and A:7801")
protected int port_range=1;
@Property(description="Max time (in millis) to wait for a connection to the Kubernetes server. If exceeded, " +
"an exception will be thrown", systemProperty="KUBERNETES_CONNECT_TIMEOUT")
protected int connectTimeout=5000;
@Property(description="Max time (in millis) to wait for a response from the Kubernetes server",
systemProperty="KUBERNETES_READ_TIMEOUT")
protected int readTimeout=30000;
@Property(description="Max number of attempts to send discovery requests", systemProperty="KUBERNETES_OPERATION_ATTEMPTS")
protected int operationAttempts=3;
@Property(description="Time (in millis) between operation attempts", systemProperty="KUBERNETES_OPERATION_SLEEP")
protected long operationSleep=1000;
@Property(description="https (default) or http. Used to send the initial discovery request to the Kubernetes server",
systemProperty="KUBERNETES_MASTER_PROTOCOL")
protected String masterProtocol="https";
@Property(description="The URL of the Kubernetes server", systemProperty="KUBERNETES_SERVICE_HOST")
protected String masterHost;
@Property(description="The port on which the Kubernetes server is listening", systemProperty="KUBERNETES_SERVICE_PORT")
protected int masterPort;
@Property(description="The version of the protocol to the Kubernetes server", systemProperty="KUBERNETES_API_VERSION")
protected String apiVersion="v1";
@Property(description="namespace", systemProperty={"KUBERNETES_NAMESPACE", "OPENSHIFT_KUBE_PING_NAMESPACE"})
protected String namespace="default";
@Property(description="The labels to use in the discovery request to the Kubernetes server",
systemProperty={"KUBERNETES_LABELS", "OPENSHIFT_KUBE_PING_LABELS"})
protected String labels;
@Property(description="Certificate to access the Kubernetes server", systemProperty="KUBERNETES_CLIENT_CERTIFICATE_FILE")
protected String clientCertFile;
@Property(description="Client key file (store)", systemProperty="KUBERNETES_CLIENT_KEY_FILE")
protected String clientKeyFile;
@Property(description="The password to access the client key store", systemProperty="KUBERNETES_CLIENT_KEY_PASSWORD")
protected String clientKeyPassword;
@Property(description="The algorithm used by the client", systemProperty="KUBERNETES_CLIENT_KEY_ALGO")
protected String clientKeyAlgo="RSA";
@Property(description = "Location of certificate bundle used to verify the serving certificate of the apiserver. If the specified file is unavailable, "
+ "a warning message is issued.", systemProperty = "KUBERNETES_CA_CERTIFICATE_FILE")
protected String caCertFile="/var/run/secrets/kubernetes.io/serviceaccount/ca.crt";
@Property(description="Token file", systemProperty="SA_TOKEN_FILE")
protected String saTokenFile="/var/run/secrets/kubernetes.io/serviceaccount/token";
@Property(description="Dumps all discovery requests and responses to the Kubernetes server to stdout when true")
protected boolean dump_requests;
@Property(description="The standard behavior during Rolling Update is to put all Pods in the same cluster. In" +
" cases (application level incompatibility) this causes problems. One might decide to split clusters to" +
" 'old' and 'new' during that process", systemProperty="KUBERNETES_SPLIT_CLUSTERS_DURING_ROLLING_UPDATE")
protected boolean split_clusters_during_rolling_update;
@Property(description="Introduces similar behaviour to Kubernetes Services (using DNS) with publishNotReadyAddresses set to true. " +
"By default it's true", systemProperty="KUBERNETES_USE_NOT_READY_ADDRESSES")
protected boolean useNotReadyAddresses = true;
protected Client client;
protected int tp_bind_port;
private boolean failedJsonErrorReported = false;
public boolean isDynamic() {
return false; // bind_port in the transport needs to be fixed (cannot be 0)
}
public void setMasterHost(String masterMost) {
this.masterHost=masterMost;
}
public void setMasterPort(int masterPort) {
this.masterPort=masterPort;
}
public void setNamespace(String namespace) {
this.namespace=namespace;
}
protected boolean isClusteringEnabled() {
return namespace != null;
}
public void init() throws Exception {
super.init();
tp_bind_port=transport.getBindPort();
if(tp_bind_port <= 0)
throw new IllegalArgumentException(String.format("%s only works with %s.bind_port > 0",
KUBE_PING.class.getSimpleName(), transport.getClass().getSimpleName()));
checkDeprecatedProperties();
if(namespace == null) {
log.warn("namespace not set; clustering disabled");
return; // no further initialization necessary
}
log.info("namespace %s set; clustering enabled", namespace);
Map<String,String> headers=new HashMap<>();
StreamProvider streamProvider;
if(clientCertFile != null) {
if(masterProtocol == null)
masterProtocol="http";
streamProvider=new CertificateStreamProvider(clientCertFile, clientKeyFile, clientKeyPassword, clientKeyAlgo, caCertFile);
}
else {
String saToken=readFileToString(saTokenFile);
if(saToken != null) {
// curl -k -H "Authorization: Bearer $(cat /var/run/secrets/kubernetes.io/serviceaccount/token)" \
// https://172.30.0.2:443/api/v1/namespaces/dward/pods?labelSelector=application%3Deap-app
headers.put("Authorization", "Bearer " + saToken);
}
streamProvider = new TokenStreamProvider(saToken, caCertFile);
}
String url=String.format("%s://%s:%s/api/%s", masterProtocol, masterHost, masterPort, apiVersion);
client=new Client(url, headers, connectTimeout, readTimeout, operationAttempts, operationSleep, streamProvider, log);
log.debug("KubePING configuration: " + toString());
}
private void checkDeprecatedProperties() {
checkDeprecatedProperty("KUBERNETES_NAMESPACE", "OPENSHIFT_KUBE_PING_NAMESPACE");
checkDeprecatedProperty("KUBERNETES_LABELS", "OPENSHIFT_KUBE_PING_LABELS");
}
private void checkDeprecatedProperty(String property_name, String deprecated_name) {
boolean propertyDefined = isPropertyDefined(property_name);
boolean deprecatedDefined = isPropertyDefined(deprecated_name);
if (propertyDefined && deprecatedDefined)
log.warn("Both %s and %s are defined, %s is deprecated so please remove it", property_name, deprecated_name, deprecated_name);
else if (deprecatedDefined)
log.warn("%s is deprecated, please remove it and use %s instead", deprecated_name, property_name);
}
private static boolean isPropertyDefined(String property_name) {
return System.getProperty(property_name) != null
|| System.getenv(property_name) != null;
}
private PhysicalAddress getCurrentPhysicalAddress(Address addr) {
return (PhysicalAddress)down(new Event(Event.GET_PHYSICAL_ADDRESS, addr));
}
public void findMembers(List<Address> members, boolean initial_discovery, Responses responses) {
List<Pod> hosts=readAll();
List<PhysicalAddress> cluster_members=new ArrayList<>(hosts != null? hosts.size() : 16);
PhysicalAddress physical_addr=null;
PingData data=null;
physical_addr = getCurrentPhysicalAddress(local_addr);
// https://issues.jboss.org/browse/JGRP-1670
data=new PingData(local_addr, false, NameCache.get(local_addr), physical_addr);
if(members != null && members.size() <= max_members_in_discovery_request)
data.mbrs(members);
if(hosts != null) {
if(log.isTraceEnabled())
log.trace("%s: hosts fetched from Kubernetes: %s", local_addr, hosts);
for(Pod host: hosts) {
if (!host.isReady() && !useNotReadyAddresses)
continue;
for(int i=0; i <= port_range; i++) {
try {
IpAddress addr=new IpAddress(host.getIp(), tp_bind_port + i);
if(!cluster_members.contains(addr))
cluster_members.add(addr);
}
catch(Exception ex) {
log.warn("failed translating host %s into InetAddress: %s", host, ex);
}
}
}
}
if(use_disk_cache) {
// this only makes sense if we have PDC below us
Collection<PhysicalAddress> list=(Collection<PhysicalAddress>)down_prot.down(new Event(Event.GET_PHYSICAL_ADDRESSES));
if(list != null)
list.stream().filter(phys_addr -> !cluster_members.contains(phys_addr)).forEach(cluster_members::add);
}
if (split_clusters_during_rolling_update) {
if(physical_addr != null) {
String senderIp = ((IpAddress)physical_addr).getIpAddress().getHostAddress();
// Please note we search for sender parent group through all pods, ever not ready. It's because JGroup discovery is performed
// before WildFly can respond to http readiness probe.
hosts.stream()
.filter(p -> p.getPodGroup() == null)
.forEach(p -> log.warn("Pod %s doesn't have group assigned. Impossible to reliably determine pod group during Rolling Update."));
String senderPodGroup = hosts.stream()
.filter(pod -> senderIp.contains(pod.getIp()))
.map(Pod::getPodGroup)
.findFirst().orElse(null);
if(senderPodGroup != null) {
Set<String> allowedAddresses = hosts.stream()
.filter(pod -> senderPodGroup.equals(pod.getPodGroup()))
.map(Pod::getIp)
.collect(Collectors.toSet());
for(Iterator<PhysicalAddress> memberIterator = cluster_members.iterator(); memberIterator.hasNext();) {
IpAddress podAddress = (IpAddress) memberIterator.next();
if(!allowedAddresses.contains(podAddress.getIpAddress().getHostAddress())) {
log.trace("removing pod %s from cluster members list since its parent domain is different than senders (%s). Allowed hosts: %s", podAddress, senderPodGroup, allowedAddresses);
memberIterator.remove();
}
}
} else {
log.warn("split_clusters_during_rolling_update is set to 'true' but can't obtain local node parent deployment. All nodes will be placed in the same cluster.");
}
} else {
log.warn("split_clusters_during_rolling_update is set to 'true' but can't obtain local node IP address. All nodes will be placed in the same cluster.");
}
}
if(log.isTraceEnabled())
log.trace("%s: sending discovery requests to %s", local_addr, cluster_members);
PingHeader hdr=new PingHeader(PingHeader.GET_MBRS_REQ).clusterName(cluster_name).initialDiscovery(initial_discovery);
for(final PhysicalAddress addr: cluster_members) {
if(addr.equals(physical_addr)) // no need to send the request to myself
continue;
// the message needs to be DONT_BUNDLE, see explanation above
final Message msg=new BytesMessage(addr).setFlag(Message.Flag.INTERNAL, Message.Flag.DONT_BUNDLE, Message.Flag.OOB)
.putHeader(this.id,hdr);
if(data != null)
msg.setArray(marshal(data));
if(async_discovery_use_separate_thread_per_request)
timer.execute(() -> sendDiscoveryRequest(msg), sends_can_block);
else
sendDiscoveryRequest(msg);
}
}
@ManagedOperation(description="Asks Kubernetes for the IP addresses of all pods")
public String fetchFromKube() {
List<Pod> list=readAll();
return list.toString();
}
protected List<Pod> readAll() {
if(isClusteringEnabled() && client != null) {
try {
List<Pod> pods = client.getPods(namespace, labels, dump_requests);
failedJsonErrorReported = false;
return pods;
}
catch(Exception e) {
if (!failedJsonErrorReported) {
failedJsonErrorReported = true;
log.warn("failed getting JSON response from Kubernetes %s for cluster [%s], namespace [%s], labels [%s]; encountered [%s: %s]",
client.info(), cluster_name, namespace, labels, e.getClass().getName(), e.getMessage());
}
}
}
return Collections.emptyList();
}
protected void sendDiscoveryRequest(Message req) {
try {
down_prot.down(req);
}
catch(Throwable t) {
log.trace("sending discovery request to %s failed: %s", req.dest(), t);
}
}
@Override
public String toString() {
return String.format("KubePing{namespace='%s', labels='%s'}", namespace, labels);
}
}
| 46.756839
| 203
| 0.660859
|
b77eb22e05e8b69ed2c7767aa7a8eab4bed43f85
| 10,700
|
cpp
|
C++
|
src/openpose/utilities/fileSystem.cpp
|
meiwanlanjun/openpose
|
71078eb1b7571789c7589cf6c8de1786c3227a90
|
[
"DOC",
"MIT-CMU"
] | null | null | null |
src/openpose/utilities/fileSystem.cpp
|
meiwanlanjun/openpose
|
71078eb1b7571789c7589cf6c8de1786c3227a90
|
[
"DOC",
"MIT-CMU"
] | null | null | null |
src/openpose/utilities/fileSystem.cpp
|
meiwanlanjun/openpose
|
71078eb1b7571789c7589cf6c8de1786c3227a90
|
[
"DOC",
"MIT-CMU"
] | null | null | null |
#include <cstdio> // fopen
#ifdef _WIN32
#include <direct.h> // _mkdir
#include <windows.h> // DWORD, GetFileAttributesA
#elif defined __unix__
#include <dirent.h> // opendir
#include <sys/stat.h> // mkdir
#else
#error Unknown environment!
#endif
#include <openpose/utilities/string.hpp>
#include <openpose/utilities/fileSystem.hpp>
namespace op
{
void makeDirectory(const std::string& directoryPath)
{
try
{
if (!directoryPath.empty())
{
// Format the path first
const auto formatedPath = formatAsDirectory(directoryPath);
// Create dir if it doesn't exist yet
if (!existDirectory(formatedPath))
{
#ifdef _WIN32
const auto status = _mkdir(formatedPath.c_str());
#elif defined __unix__
// Create folder
// Access permission - 775 (7, 7, 4+1)
// https://www.gnu.org/software/libc/manual/html_node/Permission-Bits.html
const auto status = mkdir(formatedPath.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH);
#endif
// Error if folder cannot be created
if (status != 0)
error("Could not create directory: " + formatedPath + ". Status error = "
+ std::to_string(status) + ". Does the parent folder exist and/or do you have writting"
" access to that path?", __LINE__, __FUNCTION__, __FILE__);
}
}
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
}
}
bool existDirectory(const std::string& directoryPath)
{
try
{
// Format the path first
const auto formatedPath = formatAsDirectory(directoryPath);
#ifdef _WIN32
DWORD status = GetFileAttributesA(formatedPath.c_str());
// It is not a directory
if (status == INVALID_FILE_ATTRIBUTES)
return false;
// It is a directory
else if (status & FILE_ATTRIBUTE_DIRECTORY)
return true;
// It is not a directory
return false; // this is not a directory!
#elif defined __unix__
// It is a directory
if (auto* directory = opendir(formatedPath.c_str()))
{
closedir(directory);
return true;
}
// It is not a directory
else
return false;
#else
#error Unknown environment!
#endif
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return false;
}
}
bool existFile(const std::string& filePath)
{
try
{
if (auto* file = fopen(filePath.c_str(), "r"))
{
fclose(file);
return true;
}
else
return false;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return false;
}
}
std::string formatAsDirectory(const std::string& directoryPathString)
{
try
{
std::string directoryPath = directoryPathString;
if (!directoryPath.empty())
{
// Replace all '\\' to '/'
std::replace(directoryPath.begin(), directoryPath.end(), '\\', '/');
if (directoryPath.back() != '/')
directoryPath = directoryPath + "/";
// Windows - Replace all '/' to '\\'
#ifdef _WIN32
std::replace(directoryPath.begin(), directoryPath.end(), '/', '\\');
#endif
}
return directoryPath;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return "";
}
}
std::string getFileNameAndExtension(const std::string& fullPath)
{
try
{
size_t lastSlashPos = fullPath.find_last_of("\\/");
if (lastSlashPos != std::string::npos)
return fullPath.substr(lastSlashPos+1, fullPath.size() - lastSlashPos - 1);
else
return fullPath;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return "";
}
}
std::string getFileNameNoExtension(const std::string& fullPath)
{
try
{
// Name + extension
std::string nameExt = getFileNameAndExtension(fullPath);
// Name
size_t dotPos = nameExt.find_last_of(".");
if (dotPos != std::string::npos)
return nameExt.substr(0, dotPos);
else
return nameExt;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return "";
}
}
std::string getFileExtension(const std::string& fullPath)
{
try
{
// Name + extension
std::string nameExt = getFileNameAndExtension(fullPath);
// Extension
size_t dotPos = nameExt.find_last_of(".");
if (dotPos != std::string::npos)
return nameExt.substr(dotPos + 1, nameExt.size() - dotPos - 1);
else
return "";
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return "";
}
}
// This function just removes the initial '.' in the std::string (if any)
// To avoid errors for not finding extensions because of comparing ".jpg" vs "jpg"
std::string removeExtensionDot(const std::string& extension)
{
try
{
// Extension is empty
if (extension.empty())
return "";
// Return string without initial character
else if (*extension.cbegin() == '.')
return extension.substr(1, extension.size() - 1);
// Return string itself
else
return extension;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return "";
}
}
bool extensionIsDesired(const std::string& extension, const std::vector<std::string>& extensions)
{
try
{
const auto cleanedExtension = toLower(removeExtensionDot(extension));
for (auto& extensionI : extensions)
if (cleanedExtension == toLower(removeExtensionDot(extensionI)))
return true;
return false;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return false;
}
}
std::vector<std::string> getFilesOnDirectory(const std::string& directoryPath,
const std::vector<std::string>& extensions)
{
try
{
// Format the path first
const auto formatedPath = formatAsDirectory(directoryPath);
// Check folder exits
if (!existDirectory(formatedPath))
error("Folder " + formatedPath + " does not exist.", __LINE__, __FUNCTION__, __FILE__);
// Read all files in folder
std::vector<std::string> filePaths;
#ifdef _WIN32
auto formatedPathWindows = formatedPath;
formatedPathWindows.append("\\*");
WIN32_FIND_DATA data;
HANDLE hFind;
if ((hFind = FindFirstFile(formatedPathWindows.c_str(), &data)) != INVALID_HANDLE_VALUE)
{
do
filePaths.emplace_back(formatedPath + data.cFileName);
while (FindNextFile(hFind, &data) != 0);
FindClose(hFind);
}
#elif defined __unix__
std::shared_ptr<DIR> directoryPtr(
opendir(formatedPath.c_str()),
[](DIR* formatedPath){ formatedPath && closedir(formatedPath); }
);
struct dirent* direntPtr;
while ((direntPtr = readdir(directoryPtr.get())) != nullptr)
{
std::string currentPath = formatedPath + direntPtr->d_name;
if ((strncmp(direntPtr->d_name, ".", 1) == 0) || existDirectory(currentPath))
continue;
filePaths.emplace_back(currentPath);
}
#else
#error Unknown environment!
#endif
// Check #files > 0
if (filePaths.empty())
error("No files were found on " + formatedPath, __LINE__, __FUNCTION__, __FILE__);
// If specific extensions specified
if (!extensions.empty())
{
// Read images
std::vector<std::string> specificExtensionPaths;
specificExtensionPaths.reserve(filePaths.size());
for (const auto& filePath : filePaths)
if (extensionIsDesired(getFileExtension(filePath), extensions))
specificExtensionPaths.emplace_back(filePath);
std::swap(filePaths, specificExtensionPaths);
}
// Sort alphabetically
std::sort(filePaths.begin(), filePaths.end());
// Return result
return filePaths;
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return {};
}
}
std::vector<std::string> getFilesOnDirectory(const std::string& directoryPath, const std::string& extension)
{
try
{
return getFilesOnDirectory(directoryPath, std::vector<std::string>{extension});
}
catch (const std::exception& e)
{
error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return {};
}
}
}
| 34.96732
| 117
| 0.497383
|
000fa54292b68c28571cad909366ac04d977cda4
| 4,590
|
cs
|
C#
|
CssUI/DOM/Enums/Attributes/Base/EAttributeType.cs
|
dsisco11/CssUI
|
1b73f7af78d3c4b2980102393dd2449a6e30551d
|
[
"MIT"
] | null | null | null |
CssUI/DOM/Enums/Attributes/Base/EAttributeType.cs
|
dsisco11/CssUI
|
1b73f7af78d3c4b2980102393dd2449a6e30551d
|
[
"MIT"
] | 35
|
2019-06-23T20:25:23.000Z
|
2019-08-14T03:05:20.000Z
|
CssUI/DOM/Enums/Attributes/Base/EAttributeType.cs
|
dsisco11/CssUI
|
1b73f7af78d3c4b2980102393dd2449a6e30551d
|
[
"MIT"
] | null | null | null |
using System;
namespace CssUI.DOM.Enums
{
/// <summary>
/// Specifies a DOM element attribute type, which determines what kind of string value formats are allowed to be set for it.
/// </summary>
public enum EAttributeType : int
{
/// <summary>
/// Any string is valid
/// </summary>
String,
/// <summary>
/// A number of attributes are boolean attributes. The presence of a boolean attribute on an element represents the true value, and the absence of the attribute represents the false value.
/// If the attribute is present, its value must either be the empty string or a value that is an ASCII case-insensitive match for the attribute's canonical name, with no leading or trailing whitespace.
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#boolean-attributes
Boolean,
/// <summary>
/// Enumerated attributes may only be assigned a specific set of keywords with special meaning
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#keywords-and-enumerated-attributes
Enumerated,
/// <summary>
/// A string is a valid integer if it consists of one or more ASCII digits, optionally prefixed with a U+002D HYPHEN-MINUS character (-).
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#signed-integers
Integer,
/// <summary>
/// A string is a valid non-negative integer if it consists of one or more ASCII digits.
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#non-negative-integers
NonNegative_Integer,
/// <summary>
/// A string is a valid floating-point number if it consists of only an optional hypen(-), a series of digits, an optional period(.), followed by a series of digits
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#floating-point-numbers
FloatingPoint,
/// <summary>
/// A length is a floating point number greater than or equal to 0.0
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#percentages-and-dimensions
Length,
/// <summary>
/// A percentage is a floating point number greater than or equal to 0.0 followed by a percent sign (%)
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#percentages-and-dimensions
Percentage,
/// <summary>
/// A non-zero length is a floating point number greater than 0.0
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#non-zero-percentages-and-lengths
NonZero_Length,
/// <summary>
/// A non-zero percentage is a floating point number greater than 0.0 followed by a percent sign(%)
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#non-zero-percentages-and-lengths
NonZero_Percentage,
/// <summary>
/// A simple color consists of three 8-bit numbers in the range 0..255, representing the red, green, and blue components of the color respectively, in the sRGB color space. [SRGB]
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#colours
Color,
/// <summary>
/// A time consists of a specific time with no time-zone information, consisting of an hour, a minute, a second, and a fraction of a second.
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#times
Time,
/// <summary>
/// A date consists of a specific proleptic-Gregorian date with no time-zone information, consisting of a year, a month, and a day. [GREGORIAN]
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#dates
Date,
/// <summary>
/// A duration consists of a number of seconds.
/// </summary>
/// Docs: https://html.spec.whatwg.org/multipage/common-microsyntaxes.html#durations
Duration,
/// <summary>
/// KeyCombination attributes specify a combination of keypresses as a text string in a format defined in the HTML standards
/// </summary>
KeyCombo,
}
}
| 45.9
| 209
| 0.642048
|
707ea6a8857247f67a4f9d291c12700b30bbfc18
| 257
|
ps1
|
PowerShell
|
PSPKI/Client/Get-EnrollmentPolicyServerClient.ps1
|
samrowens1/PSPKI
|
b78a6bd92f81a8cc8ae2044effa17707f3f1eba3
|
[
"MS-PL"
] | 133
|
2019-07-14T16:04:35.000Z
|
2022-03-31T15:51:39.000Z
|
PSPKI/Client/Get-EnrollmentPolicyServerClient.ps1
|
samrowens1/PSPKI
|
b78a6bd92f81a8cc8ae2044effa17707f3f1eba3
|
[
"MS-PL"
] | 109
|
2019-07-02T09:59:26.000Z
|
2022-03-30T06:32:12.000Z
|
PSPKI/Client/Get-EnrollmentPolicyServerClient.ps1
|
samrowens1/PSPKI
|
b78a6bd92f81a8cc8ae2044effa17707f3f1eba3
|
[
"MS-PL"
] | 29
|
2019-09-20T03:47:34.000Z
|
2022-03-21T18:41:11.000Z
|
function Get-EnrollmentPolicyServerClient {
<#
.ExternalHelp PSPKI.Help.xml
#>
[OutputType('PKI.Enrollment.Policy.PolicyServerClient[]')]
[CmdletBinding()]
param(
[switch]$UserContext
)
[PKI.Enrollment.Autoenrollment]::GetPolicyServers($UserContext)
}
| 23.363636
| 64
| 0.774319
|
9b8a45f21abcf5d79b55a22c44e276312da7a7f1
| 2,718
|
lua
|
Lua
|
fxmanifest.lua
|
TechnoBehemoth/bikeRack
|
b5ea6221b3590eaf485bd8fd18b1557733d17743
|
[
"MIT"
] | null | null | null |
fxmanifest.lua
|
TechnoBehemoth/bikeRack
|
b5ea6221b3590eaf485bd8fd18b1557733d17743
|
[
"MIT"
] | null | null | null |
fxmanifest.lua
|
TechnoBehemoth/bikeRack
|
b5ea6221b3590eaf485bd8fd18b1557733d17743
|
[
"MIT"
] | 3
|
2021-06-09T01:11:42.000Z
|
2022-02-08T19:44:48.000Z
|
--[[
██████╗░███████╗██████╗░███╗░░██╗███████╗░█████╗░██╗░░██╗
██╔══██╗██╔════╝██╔══██╗████╗░██║██╔════╝██╔══██╗██║░██╔╝
██████╔╝█████╗░░██║░░██║██╔██╗██║█████╗░░██║░░╚═╝█████═╝░
██╔══██╗██╔══╝░░██║░░██║██║╚████║██╔══╝░░██║░░██╗██╔═██╗░
██║░░██║███████╗██████╔╝██║░╚███║███████╗╚█████╔╝██║░╚██╗
╚═╝░░╚═╝╚══════╝╚═════╝░╚═╝░░╚══╝╚══════╝░╚════╝░╚═╝░░╚═╝
███╗░░░███╗░█████╗░██████╗░██╗███████╗██╗░█████╗░░█████╗░████████╗██╗░█████╗░███╗░░██╗░██████╗
████╗░████║██╔══██╗██╔══██╗██║██╔════╝██║██╔══██╗██╔══██╗╚══██╔══╝██║██╔══██╗████╗░██║██╔════╝
██╔████╔██║██║░░██║██║░░██║██║█████╗░░██║██║░░╚═╝███████║░░░██║░░░██║██║░░██║██╔██╗██║╚█████╗░
██║╚██╔╝██║██║░░██║██║░░██║██║██╔══╝░░██║██║░░██╗██╔══██║░░░██║░░░██║██║░░██║██║╚████║░╚═══██╗
██║░╚═╝░██║╚█████╔╝██████╔╝██║██║░░░░░██║╚█████╔╝██║░░██║░░░██║░░░██║╚█████╔╝██║░╚███║██████╔╝
╚═╝░░░░░╚═╝░╚════╝░╚═════╝░╚═╝╚═╝░░░░░╚═╝░╚════╝░╚═╝░░╚═╝░░░╚═╝░░░╚═╝░╚════╝░╚═╝░░╚══╝╚═════╝░
██╗░░░░░██╗░░░░░░█████╗░
██║░░░░░██║░░░░░██╔══██╗
██║░░░░░██║░░░░░██║░░╚═╝
██║░░░░░██║░░░░░██║░░██╗
███████╗███████╗╚█████╔╝
╚══════╝╚══════╝░╚════╝░
Website: www.redneckmods.com
Discord: discord.gg/redneckmods
Twitter: twitter.com/redneckmods
Instagram: instagram.com/redneckmods
Patreon: patreon.com/redneckmods
--]]
fx_version 'adamant'
game 'gta5'
description 'Redneck Modifications | Double trailer Script'
authors 'Nick-VD'
client_script 'client.lua'
shared_script 'config.lua'
| 73.459459
| 95
| 0.099706
|
0da79c4d2c6ac6ca5d2ca0f7a832c2982e821fec
| 238
|
cs
|
C#
|
ReadingList/Data/Repositories/AuthorRepository.cs
|
GabrielCorrado/ReadingList
|
805ad1096b653a53c8606372f79de785174c0d58
|
[
"MIT"
] | null | null | null |
ReadingList/Data/Repositories/AuthorRepository.cs
|
GabrielCorrado/ReadingList
|
805ad1096b653a53c8606372f79de785174c0d58
|
[
"MIT"
] | 2
|
2019-10-17T07:52:30.000Z
|
2019-10-26T13:40:53.000Z
|
ReadingList/Data/Repositories/AuthorRepository.cs
|
GabrielCorrado/ReadingList
|
805ad1096b653a53c8606372f79de785174c0d58
|
[
"MIT"
] | null | null | null |
using Core.Entities.BookAggregate;
namespace Data.Repositories
{
internal class AuthorRepository : Repository<Author>
{
internal AuthorRepository(ApplicationDbContext context) : base(context)
{
}
}
}
| 19.833333
| 79
| 0.680672
|
1ce48fb374dafe9e5b2bd3dedbfba1e25025ef33
| 4,298
|
sh
|
Shell
|
new.sh
|
F0xedb/tos-installer-backend
|
664b5d2100b302f22e649c449ccfa1376d838215
|
[
"MIT"
] | 1
|
2019-09-14T17:15:23.000Z
|
2019-09-14T17:15:23.000Z
|
new.sh
|
F0xedb/tos-installer-backend
|
664b5d2100b302f22e649c449ccfa1376d838215
|
[
"MIT"
] | 10
|
2019-08-25T20:52:47.000Z
|
2019-10-14T13:44:19.000Z
|
new.sh
|
ODEX-TOS/tos-installer-backend
|
664b5d2100b302f22e649c449ccfa1376d838215
|
[
"MIT"
] | null | null | null |
# MIT License
#
# Copyright (c) 2019 Meyers Tom
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#Building partition table
parted --script '/dev/sda' mkpart ESP fat32 1MiB 200MiB
parted --script '/dev/sda' set 1 boot on
parted --script '/dev/sda' name 1 efi
parted --script '/dev/sda' mkpart primary 200MiB 800MiB
parted --script '/dev/sda' name 2 boot
parted --script '/dev/sda' mkpart primary 800MiB 8GiB
parted --script '/dev/sda' name 3 swap
parted --script '/dev/sda' mkpart primary 8GiB 98%
parted --script '/dev/sda' set 4 lvm on
parted --script '/dev/sda' name 4 root
parted --script '/dev/sda' mkpart primary 97% 99%
parted --script '/dev/sda' name 8 offset
parted --script '/dev/sda' resizepart 9 1GB
#Formating partitions
mkfs.fat -I -F32 /dev/sda1
mkfs.ext4 -F /dev/sda2
mkfs.ext4 -F /dev/sda3
modprobe dm-crypt
modprobe dm-mod
printf 'a' | cryptsetup luksFormat -v -s 512 -h sha512 /dev/sda4 -d -
printf 'a' | cryptsetup open /dev/sda4 luks_lvm -d -
pvcreate /dev/mapper/luks_lvm
vgcreate tos /dev/mapper/luks_lvm
lvcreate -n root -L 200G tos
lvcreate -n home -L 200G tos
mkfs.ext4 -L root /dev/mapper/tos-root
mkfs.ext4 -L home /dev/mapper/tos-home
mkfs.ext4 -F /dev/sda8
mkfs.ext4 -F /dev/sda9
#Formating partitions
mkfs.fat -I -F32 /dev/sda1
#Mounting partitions
mount /dev/mapper/tos-root /mnt/
mkdir -p /mnt/home
mount /dev/mapper/tos-home /mnt/home
mkdir -p /mnt/boot
mount /dev/sda2 /mnt/boot
mkdir -p /mnt/boot/efi
mount /dev/sda1 /mnt/boot/efi
swapon /dev/sda3
swapon -a
swapon -s
mkdir -p /mnt/tmp
mount /dev/sda8 /mnt/tmp
mkdir -p /mnt/proc
mount /dev/sda9 /mnt/proc
#Establishing a network connection
if [[ $(ping -c1 8.8.8.8 | grep '0% packet loss') == '' ]]; then
nmcli device wifi connect 'ssid' password 'passphrase'
fi
#bootstrapping system
pacstrap /mnt base base-devel efibootmgr vim dialog grub --noconfirm
# Generate fstab
genfstab -U -p /mnt > /mnt/etc/fstab
# Executing chroot function
arch-chroot -u root /mnt <<EOF
# Setting up system parameters
timedatectl set-ntp true
hwclock --systohc
sed -i 's:^#.*en_US.UTF-8:en_US.UTF-8:' /etc/locale.gen
locale-gen
echo 'LANG=en_US.UTF-8' > /etc/locale.conf
echo KEYMAP='be-latin1' > /etc/vconsole.conf
echo 'tos' > /etc/hostname
echo -e '127.0.0.1 localhost
::1 localhost
127.0.1.1 tos.localdomain tos' > /etc/hosts
echo 'root:123' | chpasswd
echo '%wheel ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers
pacman -Syu --noconfirm linux
# Creating a user
useradd -m -p paN8aiEIonqJE -g users -G audio,lp,optical,storage,video,wheel,games,power -s /bin/bash alpha
# Generating the bootloader
sed -i 's:HOOKS=(\(.*\)):HOOKS=(\1 encrypt lvm2):' /etc/mkinitcpio.conf
sed -i "s;^GRUB_CMDLINE_LINUX_DEFAULT=.*;GRUB_CMDLINE_LINUX_DEFAULT=\"quiet cryptdevice=/dev/sda4:luks_lvm\";" /etc/default/grub
sed -i "s/^#GRUB_ENABLE_CRYPTODISK=y/GRUB_ENABLE_CRYPTODISK=y/" /etc/default/grub
mkinitcpio -p linux
grub-install --efi-directory /boot/efi --force /dev/sda
grub-mkconfig -o /boot/grub/grub.cfg
EOF
# Executing chroot function
arch-chroot -u root /mnt <<EOF
su alpha <<\EOF2
# Installing software
yay -Syu --noconfirm linux grep vim linux-tos sudo nano
# Executing custom script
echo hello world
echo hello 2 \$USER
EOF2
EOF
# Executing custom script
echo hello world
echo hello 2 $USER
| 31.602941
| 128
| 0.73825
|
8e77e7a5edb37af7b5dd184a0ccc20d0cb3b7da0
| 513
|
js
|
JavaScript
|
test/e2e/features/Page/PhoneSpeciality.js
|
VSilva001/Gofan_AutomationFramework
|
b862abc63d328fc712ff259182f2c6c7cb13407a
|
[
"MIT"
] | null | null | null |
test/e2e/features/Page/PhoneSpeciality.js
|
VSilva001/Gofan_AutomationFramework
|
b862abc63d328fc712ff259182f2c6c7cb13407a
|
[
"MIT"
] | null | null | null |
test/e2e/features/Page/PhoneSpeciality.js
|
VSilva001/Gofan_AutomationFramework
|
b862abc63d328fc712ff259182f2c6c7cb13407a
|
[
"MIT"
] | null | null | null |
'use strict';
var chai = require('chai');
var expect = chai.expect;
var chaiAsPromised = require('chai-as-promised');
chai.use(chaiAsPromised);
var PhoneSpeciality = function PhoneSpeciality() {
PhoneSpeciality.prototype.PhoneScrollToButton = function () {
//ESTO PARA HACER SCROLL al elemento deseado EN TELEFONOS
browser.executeScript("document.getElementsByClassName('button-positive')[0].scrollIntoView();");
browser.sleep(2000);
}
};
module.exports = new PhoneSpeciality();
| 32.0625
| 105
| 0.721248
|
ae25d43dc61775efd44644d6c46d3b4ed14e613b
| 943
|
cs
|
C#
|
lib/csharp/test/CatClientTest/PerfCounterReaderTest.cs
|
woozhijun/cat
|
3d523202c38e37b1a2244b26d4336ebbea5db001
|
[
"Apache-2.0"
] | 17,318
|
2015-01-03T03:02:07.000Z
|
2022-03-31T02:43:28.000Z
|
lib/csharp/test/CatClientTest/PerfCounterReaderTest.cs
|
MrCoderYu/cat
|
674bd9ab70267dd6fc74879e4344af77397f4acd
|
[
"Apache-2.0"
] | 1,162
|
2015-01-04T08:23:49.000Z
|
2022-03-31T15:38:04.000Z
|
lib/csharp/test/CatClientTest/PerfCounterReaderTest.cs
|
MrCoderYu/cat
|
674bd9ab70267dd6fc74879e4344af77397f4acd
|
[
"Apache-2.0"
] | 5,520
|
2015-01-03T03:02:07.000Z
|
2022-03-31T16:16:56.000Z
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Org.Unidal.Cat.Message.Spi.Internals;
namespace CatClientTest
{
public class PerfCounterReaderTest
{
public static void Test()
{
//float processorTime = 0;
//float gen0HeapSize = 0;
//float gen1HeapSize = 0;
//float gen2HeapSize = 0;
//float lohHeapSize = 0;
//float timeInGC = 0;
//float nAssemblies = 0;
//float nClasses = 0;
//float totalContentions = 0;
//float currentQueueLength = 0;
//float nPhysicalThreads = 0;
//float nExceptions = 0;
#if NETFULL
IPerformanceMetricProvider provider = new DefaultPerformanceMetricProvider();
provider.UpdateMetrics();
#endif
//Console.WriteLine("processor time: " + processorTime);
}
}
}
| 26.942857
| 89
| 0.579003
|
6606e0c33274316f3a16d360b7c59a3d687db813
| 3,107
|
py
|
Python
|
vaineye/htpasswd.py
|
diascreative/VaingloriousEye
|
3b1fc69a14b53d9408f0546dccab2b93b8521c90
|
[
"MIT"
] | 1
|
2020-06-01T14:31:48.000Z
|
2020-06-01T14:31:48.000Z
|
vaineye/htpasswd.py
|
diascreative/VaingloriousEye
|
3b1fc69a14b53d9408f0546dccab2b93b8521c90
|
[
"MIT"
] | null | null | null |
vaineye/htpasswd.py
|
diascreative/VaingloriousEye
|
3b1fc69a14b53d9408f0546dccab2b93b8521c90
|
[
"MIT"
] | null | null | null |
class NoSuchUser(Exception):
pass
# From: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/325204
def apache_md5crypt(password, salt, magic='$apr1$'):
# /* The password first, since that is what is most unknown */ /* Then our magic string */ /* Then the raw salt */
import md5
m = md5.new()
m.update(password + magic + salt)
# /* Then just as many characters of the MD5(pw,salt,pw) */
mixin = md5.md5(password + salt + password).digest()
for i in range(0, len(password)):
m.update(mixin[i % 16])
# /* Then something really weird... */
# Also really broken, as far as I can tell. -m
i = len(password)
while i:
if i & 1:
m.update('\x00')
else:
m.update(password[0])
i >>= 1
final = m.digest()
# /* and now, just to make sure things don't run too fast */
for i in range(1000):
m2 = md5.md5()
if i & 1:
m2.update(password)
else:
m2.update(final)
if i % 3:
m2.update(salt)
if i % 7:
m2.update(password)
if i & 1:
m2.update(final)
else:
m2.update(password)
final = m2.digest()
# This is the bit that uses to64() in the original code.
itoa64 = './0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
rearranged = ''
for a, b, c in ((0, 6, 12), (1, 7, 13), (2, 8, 14), (3, 9, 15), (4, 10, 5)):
v = ord(final[a]) << 16 | ord(final[b]) << 8 | ord(final[c])
for i in range(4):
rearranged += itoa64[v & 0x3f]; v >>= 6
v = ord(final[11])
for i in range(2):
rearranged += itoa64[v & 0x3f]; v >>= 6
return magic + salt + '$' + rearranged
def check_entry_password(username, password, entry_password):
if entry_password.startswith('$apr1$'):
salt = entry_password[6:].split('$')[0][:8]
expected = apache_md5crypt(password, salt)
elif entry_password.startswith('{SHA}'):
import sha
expected = '{SHA}' + sha.new(password).digest().encode('base64').strip()
else:
import crypt
expected = crypt.crypt(password, entry_password)
return entry_password == expected
def parse_htpasswd(fn, stop_username=None):
f = open(fn, 'rb')
try:
entries = {}
for line in f.readlines():
line = line.strip()
if not line or line.startswith('#'):
continue
if ':' not in line:
raise ValueError(
"Bad line (no :): %r" % line)
username, entry_password = line.split(':', 1)
entries[username] = entry_password
if username == stop_username:
break
return entries
finally:
f.close()
def check_password(username, password, htpasswd_fn):
entries = parse_htpasswd(htpasswd_fn, username)
if not entries.has_key(username):
raise NoSuchUser('No user: %r' % username)
return check_entry_password(
username, password, entries[username])
| 30.165049
| 118
| 0.558095
|
dafb35a7e49475167c61102bf2ce88ec834c7307
| 259
|
ts
|
TypeScript
|
api/input/addInProgressData.ts
|
yeukfei02/trello-clone-api
|
d8277b61ef900a76ab46c96e4adf56dfe5fa5ad5
|
[
"MIT"
] | null | null | null |
api/input/addInProgressData.ts
|
yeukfei02/trello-clone-api
|
d8277b61ef900a76ab46c96e4adf56dfe5fa5ad5
|
[
"MIT"
] | null | null | null |
api/input/addInProgressData.ts
|
yeukfei02/trello-clone-api
|
d8277b61ef900a76ab46c96e4adf56dfe5fa5ad5
|
[
"MIT"
] | null | null | null |
import { inputObjectType } from 'nexus';
export const AddInProgressDataInput = inputObjectType({
name: 'AddInProgressDataInput',
definition(t) {
t.nonNull.string('userId');
t.nonNull.string('title');
t.nonNull.string('description');
},
});
| 23.545455
| 55
| 0.69112
|
38c5b2bcc029cbfb034ebdec5097a018ff549a63
| 1,228
|
dart
|
Dart
|
lib/infrastructure/home/casts/casts_repository.dart
|
AvengerApp/movingPictures
|
640c6db8bd81a087b27dc28ef8c79acc9fe0e529
|
[
"MIT"
] | 37
|
2020-10-28T13:44:08.000Z
|
2022-02-07T00:21:03.000Z
|
lib/infrastructure/home/casts/casts_repository.dart
|
AvengerApp/movingPictures
|
640c6db8bd81a087b27dc28ef8c79acc9fe0e529
|
[
"MIT"
] | null | null | null |
lib/infrastructure/home/casts/casts_repository.dart
|
AvengerApp/movingPictures
|
640c6db8bd81a087b27dc28ef8c79acc9fe0e529
|
[
"MIT"
] | 11
|
2021-01-18T20:58:24.000Z
|
2022-03-28T11:08:13.000Z
|
import 'dart:io';
import 'package:dartz/dartz.dart';
import 'package:dio/dio.dart';
import 'package:injectable/injectable.dart';
import '../../../domain/home/shared_classes/cast/cast.dart';
import '../../../domain/home/shared_classes/cast/cast_failure.dart';
import '../../../domain/home/shared_classes/cast/cast_interface.dart';
import '../../core/credentials.dart';
@LazySingleton(as: CastInterface)
class CastRepository extends CastInterface {
final Dio _dio = Dio();
final String apiKey = TMDB_API_KEY;
final String tmdbUrl = TMDB_URL;
String deviceLocal = Platform.localeName;
@override
Future<Either<CastFailure, Cast>> getPerson(int castId) async {
if (deviceLocal == "pt_BR") deviceLocal = "pt-BR";
if (deviceLocal == "en_US") deviceLocal = "en-US";
final getPersonUrl = "$tmdbUrl/person/$castId";
final params = {
"api_key": apiKey,
"language": deviceLocal,
};
try {
final Response<Map<String, dynamic>> response = await _dio.get(
getPersonUrl,
queryParameters: params,
);
final Cast movie = Cast.fromJson(response.data);
return right(movie);
} catch (e) {
return left(const CastFailure.unexpected());
}
}
}
| 28.55814
| 70
| 0.67101
|
4584556b6723ad0e8c91ecda7c87b0513cf7865a
| 2,536
|
py
|
Python
|
lambdas/src/qwest_handler.py
|
DJPoland/CampusQwest-backend
|
dc376c2e52c40db3f6a4c039ea4b8c8c94e442df
|
[
"MIT"
] | null | null | null |
lambdas/src/qwest_handler.py
|
DJPoland/CampusQwest-backend
|
dc376c2e52c40db3f6a4c039ea4b8c8c94e442df
|
[
"MIT"
] | null | null | null |
lambdas/src/qwest_handler.py
|
DJPoland/CampusQwest-backend
|
dc376c2e52c40db3f6a4c039ea4b8c8c94e442df
|
[
"MIT"
] | null | null | null |
import boto3
import json
from datetime import datetime
from urllib.parse import unquote
from utils.common_functions import obtainDataFromEvent, decimal_default
from utils.dynamodb_functions import get_all_items, get_item, start_current_qwest_for_user
from utils.schemas import CurrentQwest
def filter_qwests(qwests: list, campus: str, qwestsCompleted: set) -> list:
filteredQwests = []
for qwest in qwests:
if qwest['campus'] == campus and qwest['id'] not in qwestsCompleted:
filteredQwests.append(qwest)
return filteredQwests
def get_qwests_for_user(subId: str) -> list:
allQwests = get_all_items('Qwests')
userItem = get_item('Users', subId)
qwestsCompleted = set()
if 'qwestsCompleted' in userItem:
qwestsCompleted = {qwest['qwestId']
for qwest in userItem['qwestsCompleted']}
return filter_qwests(allQwests, "UCF", qwestsCompleted)
def begin_qwest_for_user(subId: str, qwestId: str) -> None:
qwestItem = get_item('Qwests', qwestId)
print("qwestItem is:", qwestItem)
totalLocations = str(qwestItem['numOfLocations'])
print("total locations is:", totalLocations)
currentQwest = CurrentQwest(qwestId=qwestId, locationIndex="0", numOfLocations=totalLocations)
print("appended object: ", currentQwest)
start_current_qwest_for_user(subId, currentQwest)
def lambda_handler(event, context):
print(event)
method, path, subId = obtainDataFromEvent(event, True)
print("Method: ", method, "Path: ", path, "subId: ", subId)
if path == '/user/qwests/fetchQwests' and method == 'GET':
qwests = get_qwests_for_user(subId)
return {
'statusCode': 200,
'body': json.dumps(qwests, default=decimal_default)
}
elif path == '/user/qwests/startQwest' and method == 'POST':
jsonBody = unquote(event['body'])
qwestId = json.loads(jsonBody)
idString = str(qwestId['id'])
print("subId is:", subId, " and id string is:", idString)
try:
begin_qwest_for_user(subId, idString)
except Exception as err:
print(err)
return {
'statusCode': 400,
'body': json.dumps({
'Error': "Qwest failed to start likely due to malformed data for user in database"
})
}
else:
return {
'statusCode': 201
}
else:
return {
'statusCode': 400
}
| 31.7
| 102
| 0.633281
|
2e6e2a0f3abf4ae0219dd32a01a85458fa3b998b
| 1,840
|
kt
|
Kotlin
|
app/src/main/java/com/richmat/mytuya/ui/searchResult/SearchResultViewModel.kt
|
jasonpanjunnan/RichmatTuya
|
e3d1c1a859369485ec7ef675f4e574cf32e0e01d
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/com/richmat/mytuya/ui/searchResult/SearchResultViewModel.kt
|
jasonpanjunnan/RichmatTuya
|
e3d1c1a859369485ec7ef675f4e574cf32e0e01d
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/com/richmat/mytuya/ui/searchResult/SearchResultViewModel.kt
|
jasonpanjunnan/RichmatTuya
|
e3d1c1a859369485ec7ef675f4e574cf32e0e01d
|
[
"Apache-2.0"
] | null | null | null |
package com.richmat.mytuya.ui.searchResult
import android.widget.Toast
import androidx.lifecycle.SavedStateHandle
import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import com.richmat.mytuya.MyApplication
import com.richmat.mytuya.data.posts.Imp.FakePostsRepository
import com.richmat.mytuya.util.data.DevResultMassage
import com.richmat.mytuya.util.jsonToDevResultMassage
import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.flow.update
import kotlinx.coroutines.launch
import javax.inject.Inject
@HiltViewModel
class SearchResultViewModel @Inject constructor(
savedStateHandle: SavedStateHandle,
private val myRepository: FakePostsRepository,
) : ViewModel() {
fun changeName(newName: String) {
_uiState.update { it.copy(name = newName) }
}
fun rename() {
viewModelScope.launch {
var result: Boolean
try {
result = myRepository.rename(_uiState.value.deviceId, _uiState.value.name)
} catch (e: Exception) {
// throw e
result = false
println(e)
}
val toast = if (result) "修改名字成功" else "修改名字失败"
Toast.makeText(MyApplication.context, toast, Toast.LENGTH_SHORT).show()
}
}
private val saved = savedStateHandle.get<String>(DEV_RESULT)!!
private val _uiState = MutableStateFlow(DevResultMassage("", ""))
val uiState: StateFlow<DevResultMassage> = _uiState
init {
val result = jsonToDevResultMassage(saved)
// _uiState.update { it.copy(devResultMassage = result) }
_uiState.value = result
}
companion object {
const val DEV_RESULT = "dev_result"
}
}
| 32.280702
| 90
| 0.697283
|
473815c11a237822666ab8343a09b101079f73ad
| 1,226
|
kt
|
Kotlin
|
framework/android/kodein-di-framework-android-core/src/main/java/org/kodein/di/android/retained.kt
|
Inego/Kodein-DI
|
7a25cde6fa8e998ddba2f9cb6275d902aa780b66
|
[
"MIT"
] | 1
|
2019-11-12T14:39:30.000Z
|
2019-11-12T14:39:30.000Z
|
framework/android/kodein-di-framework-android-core/src/main/java/org/kodein/di/android/retained.kt
|
Inego/Kodein-DI
|
7a25cde6fa8e998ddba2f9cb6275d902aa780b66
|
[
"MIT"
] | null | null | null |
framework/android/kodein-di-framework-android-core/src/main/java/org/kodein/di/android/retained.kt
|
Inego/Kodein-DI
|
7a25cde6fa8e998ddba2f9cb6275d902aa780b66
|
[
"MIT"
] | null | null | null |
@file:Suppress("DEPRECATION")
package org.kodein.di.android
import android.app.Activity
import android.app.Fragment
import android.os.Bundle
import org.kodein.di.Kodein
/** @suppress */
class RetainedKodeinFragment : Fragment() {
var kodein: Kodein? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
retainInstance = true
}
}
private const val kodeinRetainedFragmentTag = "org.kodein.di.android.RetainedKodeinFragment"
/**
* A Kodein instance that will be retained between activity changes.
*
* @property allowSilentOverride Whether this module is allowed to non-explicit overrides.
* @property init The block of configuration for this module.
*/
fun Activity.retainedKodein(allowSilentOverride: Boolean = false, init: Kodein.MainBuilder.() -> Unit): Lazy<Kodein> = lazy {
(fragmentManager.findFragmentByTag(kodeinRetainedFragmentTag) as? RetainedKodeinFragment)?.kodein?.let { return@lazy it }
val kodein = Kodein(allowSilentOverride, init)
val fragment = RetainedKodeinFragment()
fragment.kodein = kodein
fragmentManager.beginTransaction().add(fragment, kodeinRetainedFragmentTag).commit()
return@lazy kodein
}
| 29.902439
| 125
| 0.757749
|
bd6c3507a981e17de48baf03f77258099323ed10
| 1,337
|
dart
|
Dart
|
chapter_7/step_3/lib/src/ui/excuse/widgets/excuse_page_view.dart
|
PacktPublishing/Flutter-UI-Projects-for-iOS-Android-Web
|
fde7b6b95332ad05114930c72ed8b74a785fffc5
|
[
"MIT"
] | 5
|
2021-12-27T08:41:00.000Z
|
2022-03-27T22:53:27.000Z
|
chapter_7/step_3/lib/src/ui/excuse/widgets/excuse_page_view.dart
|
PacktPublishing/Flutter-UI-Projects-for-iOS-Android-Web
|
fde7b6b95332ad05114930c72ed8b74a785fffc5
|
[
"MIT"
] | 3
|
2021-12-05T12:21:43.000Z
|
2022-01-17T17:34:18.000Z
|
chapter_7/step_3/lib/src/ui/excuse/widgets/excuse_page_view.dart
|
PacktPublishing/Flutter-UI-Projects-for-iOS-Android-Web
|
fde7b6b95332ad05114930c72ed8b74a785fffc5
|
[
"MIT"
] | 1
|
2021-12-27T08:41:01.000Z
|
2021-12-27T08:41:01.000Z
|
import 'package:animations/animations.dart';
import 'package:excuses_app/src/data/data.dart';
import 'package:flutter/material.dart';
import 'excuse_card.dart';
class ExcusePageView extends StatelessWidget {
final List<Excuse> excuses;
final int currentExcuse;
const ExcusePageView({
Key? key,
this.excuses = const [],
this.currentExcuse = 0,
}) : super(key: key);
@override
Widget build(BuildContext context) {
final selectedExcuse = excuses[currentExcuse];
return PageTransitionSwitcher(
duration: const Duration(milliseconds: 500),
transitionBuilder: (child, animation, secondAnimation) {
return FadeTransition(
opacity: Tween<double>(
begin: 1.0,
end: 0.0,
).animate(secondAnimation),
child: ScaleTransition(
scale: Tween<double>(
begin: 1.0,
end: 0.0,
).animate(secondAnimation),
child: SlideTransition(
position: Tween<Offset>(
end: Offset.zero,
begin: const Offset(1.5, 0.0),
).animate(animation),
child: child,
),
),
);
},
child: ExcuseCard(
key: ValueKey(selectedExcuse.id),
excuse: selectedExcuse,
),
);
}
}
| 26.215686
| 62
| 0.577412
|
2f3ae488c55f16d01f98d9e0fe02c7f63425ac9e
| 1,457
|
kt
|
Kotlin
|
compose/ui/ui/src/desktopMain/kotlin/androidx/compose/ui/platform/DesktopPlatform.kt
|
yuchuangu85/androidx
|
c241725a8c34f5e57af938d060747fa2638a19b5
|
[
"Apache-2.0"
] | null | null | null |
compose/ui/ui/src/desktopMain/kotlin/androidx/compose/ui/platform/DesktopPlatform.kt
|
yuchuangu85/androidx
|
c241725a8c34f5e57af938d060747fa2638a19b5
|
[
"Apache-2.0"
] | null | null | null |
compose/ui/ui/src/desktopMain/kotlin/androidx/compose/ui/platform/DesktopPlatform.kt
|
yuchuangu85/androidx
|
c241725a8c34f5e57af938d060747fa2638a19b5
|
[
"Apache-2.0"
] | null | null | null |
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.compose.ui.platform
import androidx.compose.runtime.staticAmbientOf
val DesktopPlatformAmbient = staticAmbientOf(::identifyCurrent)
/**
* Identify OS on which the application is currently running.
*
* If it is needed to know the current platform in @Composable function,
* use [DesktopPlatformAmbient] instead of this function.
*
* identifyCurrent() should be used preferable only in initialization code.
*/
private fun identifyCurrent(): DesktopPlatform {
val name = System.getProperty("os.name")
return when {
name.startsWith("Linux") -> DesktopPlatform.Linux
name.startsWith("Win") -> DesktopPlatform.Windows
name == "Mac OS X" -> DesktopPlatform.MacOS
else -> throw Error("Unsupported OS $name")
}
}
enum class DesktopPlatform {
Linux,
Windows,
MacOS
}
| 32.377778
| 75
| 0.72615
|
464b028eb4a2c95c6bf100172adb420cee1ef0e3
| 638
|
php
|
PHP
|
acitemsell.php
|
dritchie1961/c2cnetworks20190722
|
087f13111a4da051be6e065fe64db7a211c9a2c6
|
[
"BSD-3-Clause"
] | 1
|
2019-07-23T03:33:56.000Z
|
2019-07-23T03:33:56.000Z
|
acitemsell.php
|
dritchie1961/c2cnetworks20190722
|
087f13111a4da051be6e065fe64db7a211c9a2c6
|
[
"BSD-3-Clause"
] | null | null | null |
acitemsell.php
|
dritchie1961/c2cnetworks20190722
|
087f13111a4da051be6e065fe64db7a211c9a2c6
|
[
"BSD-3-Clause"
] | null | null | null |
<?php
//echo 'inside';
require ("../codebase/connector/form_connector.php");
require ("../codebase/connector/db_mysqli.php");
require ("accommon.php");
//date_default_timezone_set("America/Toronto");
//$host = "localhost";
//$user = "dritchie1961";
//$pass = "N0drepus";
//$database = "c2cnetworks";
//$port = '3306';
//$link = mysqli_connect($host, $user, $pass, $database, $port) or die("unable to connect to MySQL");
$conn = new FormConnector($link, "MySQLi");
$conn->render_table("c2cnetworks.lppasset", "lppassetid", "lppassetid, region,country,federation,series,seriessubset,description");
?>
| 22.785714
| 132
| 0.659875
|
4d838310a767ae42836c2fa84c39995f7aa4d5b1
| 476
|
cs
|
C#
|
Scene/Otherassets/Back.cs
|
11PRINCE/IndoorNAV
|
a0b5307f86b7e7c7a832290fab2f2259f59f04e7
|
[
"Apache-2.0"
] | 1
|
2020-06-07T12:26:44.000Z
|
2020-06-07T12:26:44.000Z
|
Scene/Otherassets/Back.cs
|
11PRINCE/IndoorNAV
|
a0b5307f86b7e7c7a832290fab2f2259f59f04e7
|
[
"Apache-2.0"
] | null | null | null |
Scene/Otherassets/Back.cs
|
11PRINCE/IndoorNAV
|
a0b5307f86b7e7c7a832290fab2f2259f59f04e7
|
[
"Apache-2.0"
] | null | null | null |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Back : MonoBehaviour
{
public GameObject Panel;
public void OpenPanel()
{
if(Panel!=null)
{
Animator animator = Panel.GetComponent<Animator>();
if (animator!=null)
{
bool isOpen = animator.GetBool("back");
animator.SetBool("back", !isOpen);
}
}
}
}
| 21.636364
| 63
| 0.535714
|
b08bdc9b011459118b0de3ba98d1046a9f48abc7
| 3,063
|
py
|
Python
|
http/data/csv-vis.py
|
cheeseywhiz/cheeseywhiz
|
51f6651ddbaeebd14d9ce77776bc4cf3a95511c4
|
[
"MIT"
] | null | null | null |
http/data/csv-vis.py
|
cheeseywhiz/cheeseywhiz
|
51f6651ddbaeebd14d9ce77776bc4cf3a95511c4
|
[
"MIT"
] | null | null | null |
http/data/csv-vis.py
|
cheeseywhiz/cheeseywhiz
|
51f6651ddbaeebd14d9ce77776bc4cf3a95511c4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import csv
import sys
import matplotlib.pyplot as plt
from config import data_sets, fontdict
try:
sys.argv[1]
if sys.argv[1] not in data_sets:
raise IndexError
except IndexError as error:
keys = '\n'.join(key for key in data_sets)
print(f'Data sets:\n{keys}\nPut in arg #1')
sys.exit(1)
data_set = data_sets[sys.argv[1]]
# allowing for None end chars
if data_set['str-end-chars'] is not None:
data_set['str-end-chars'] *= -1
with open(data_set['file-location']) as file:
# for processing huge files
csv.field_size_limit(sys.maxsize)
# you can unpack a list: no tupling required here
raw_data = list(csv.reader(file))
print('raw_data')
# headers from data[0] so far
# strip MULTIPOLYGON ((( ))) from coordinates string
# remove headers row [0]
formatted_data = [
(
row[data_set['label-index']].capitalize(),
row[data_set['data-index']][
data_set['str-start-chars']:data_set['str-end-chars']
]
)
for row in raw_data[1:]
]
print('formatted_data')
# mo county data pairs coords differently
if data_set == data_sets['mo-counties']:
formatted_data = [
(label, coords.replace(',', ' '))
for label, coords in formatted_data
]
# split up numbers to furthur work with
split_coords = [
(label, coords_str.split(' '))
for label, coords_str in formatted_data
]
print('split_coords')
# turn strings into floats by trimming off traiing characters if necessary
def float_recur(str, n=1):
if n > 1000: # Or else it causes stack overflow (???)
return None # Also good for debugging
try:
return float(str)
except Exception:
return float_recur(str[:-1], n=n + 1)
float_coords = [
(label, [float_recur(coord) for coord in coords_str])
for label, coords_str in split_coords
]
print('float_coords')
# throw pairs of consecutive lat/longs together in a single tuple
def combine(list):
for i in range(len(list)):
if not i % 2:
yield list[i], list[i + 1]
coord_pairs = [
(label, [i for i in combine(coords)])
for label, coords in float_coords
]
print('coord_pairs')
# calculate the center of the area to place the label
def center(points: list):
# filter out None values from combine() generator
points = [
(x, y)
for x, y in points
if not (x is None or y is None)
]
def avg(list):
return sum(list) / len(list)
x, y = zip(*points)
return avg(x), avg(y)
label_geom_center = [
(label, coords, center(coords))
for label, coords in coord_pairs
]
print('label_geom_center')
# convert pairs of coordinates into lists of lats and longs
boundaries = [
(label, zip(*coords), center)
for label, coords, center in label_geom_center
]
print('boundaries')
# plot the data
for label, boundary, center in boundaries:
plt.plot(*boundary)
if data_set['show-labels']:
plt.text(*center, label, fontdict=fontdict)
print('showing plot')
plt.show()
print('done')
| 24.11811
| 74
| 0.653934
|
e06708288906ca6672802bba7c1ed1cee0a8a0e9
| 441
|
h
|
C
|
VKHDHeaders/messages_sendSticker_req.h
|
Anonym0uz/VKPreferences
|
872932a34c7b0057369c2c7aa9861222b121c3db
|
[
"MIT"
] | 4
|
2018-11-02T01:08:31.000Z
|
2019-03-19T20:27:09.000Z
|
VKHDHeaders/messages_sendSticker_req.h
|
Anonym0uz/VKPreferences
|
872932a34c7b0057369c2c7aa9861222b121c3db
|
[
"MIT"
] | null | null | null |
VKHDHeaders/messages_sendSticker_req.h
|
Anonym0uz/VKPreferences
|
872932a34c7b0057369c2c7aa9861222b121c3db
|
[
"MIT"
] | 1
|
2018-11-02T01:08:36.000Z
|
2018-11-02T01:08:36.000Z
|
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard.
//
#import "messages_send_req.h"
@class NSNumber;
@interface messages_sendSticker_req : messages_send_req
{
NSNumber *_sticker_id;
}
@property(retain, nonatomic) NSNumber *sticker_id; // @synthesize sticker_id=_sticker_id;
- (void).cxx_destruct;
- (Class)responseClass;
- (id)getMethodName;
@end
| 19.173913
| 89
| 0.714286
|
57ed5381e74defe8eeaf66236697f4ffde650c44
| 302
|
php
|
PHP
|
app/Models/Visitor.php
|
Umayantha93/HotelCovidApi
|
ce93fa6819708de1fac2d06b9a4a285af3b3b231
|
[
"MIT"
] | null | null | null |
app/Models/Visitor.php
|
Umayantha93/HotelCovidApi
|
ce93fa6819708de1fac2d06b9a4a285af3b3b231
|
[
"MIT"
] | null | null | null |
app/Models/Visitor.php
|
Umayantha93/HotelCovidApi
|
ce93fa6819708de1fac2d06b9a4a285af3b3b231
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Models;
use Illuminate\Database\Eloquent\Factories\HasFactory;
use Illuminate\Database\Eloquent\Model;
class Visitor extends Model
{
use HasFactory;
protected $table = 'visitors';
protected $fillable = ['name', 'national_id', 'tempreture','phone', 'reason'];
}
| 18.875
| 82
| 0.711921
|
494340638077a71455b679bfb06ff350ee4f199f
| 16,019
|
py
|
Python
|
alphastarmini/core/arch/location_head.py
|
cloneniu/mini-AlphaStar
|
b08c48e2c04a384fce5a84245e54ded93c6def4e
|
[
"Apache-2.0"
] | null | null | null |
alphastarmini/core/arch/location_head.py
|
cloneniu/mini-AlphaStar
|
b08c48e2c04a384fce5a84245e54ded93c6def4e
|
[
"Apache-2.0"
] | null | null | null |
alphastarmini/core/arch/location_head.py
|
cloneniu/mini-AlphaStar
|
b08c48e2c04a384fce5a84245e54ded93c6def4e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
" Location Head."
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.init import kaiming_uniform, normal
from alphastarmini.lib.hyper_parameters import Arch_Hyper_Parameters as AHP
from alphastarmini.lib.hyper_parameters import MiniStar_Arch_Hyper_Parameters as MAHP
from alphastarmini.lib.hyper_parameters import StarCraft_Hyper_Parameters as SCHP
from alphastarmini.lib.hyper_parameters import Scalar_Feature_Size as SFS
from alphastarmini.lib import utils as L
__author__ = "Ruo-Ze Liu"
debug = False
class ResBlockFiLM(nn.Module):
# some copy from https://github.com/rosinality/film-pytorch/blob/master/model.py
def __init__(self, filter_size):
super().__init__()
self.conv1 = nn.Conv2d(filter_size, filter_size, kernel_size=[1, 1], stride=1, padding=0)
self.conv2 = nn.Conv2d(filter_size, filter_size, kernel_size=[3, 3], stride=1, padding=1, bias=False)
self.bn = nn.BatchNorm2d(filter_size, affine=False)
self.reset()
def forward(self, x, gamma, beta):
out = self.conv1(x)
resid = F.relu(out)
out = self.conv2(resid)
out = self.bn(out)
gamma = gamma.unsqueeze(2).unsqueeze(3)
beta = beta.unsqueeze(2).unsqueeze(3)
out = gamma * out + beta
out = F.relu(out)
out = out + resid
return out
def reset(self):
# deprecated, should try to find others
# kaiming_uniform(self.conv1.weight)
# self.conv1.bias.data.zero_()
# kaiming_uniform(self.conv2.weight)
pass
class FiLM(nn.Module):
# some copy from https://github.com/rosinality/film-pytorch/blob/master/model.py
def __init__(self, n_resblock=4, conv_hidden=128, gate_size=1024):
super().__init__()
self.n_resblock = n_resblock
self.conv_hidden = conv_hidden
self.resblocks = nn.ModuleList()
for i in range(n_resblock):
self.resblocks.append(ResBlockFiLM(conv_hidden))
self.film_net = nn.Linear(gate_size, conv_hidden * 2 * n_resblock)
def reset(self):
# deprecated, should try to find others
# kaiming_uniform(self.film_net.weight)
# self.film_net.bias.data.zero_()
pass
def forward(self, x, gate):
out = x
film = self.film_net(gate).chunk(self.n_resblock * 2, 1)
for i, resblock in enumerate(self.resblocks):
out = resblock(out, film[i * 2], film[i * 2 + 1])
return out
class FiLMplusMapSkip(nn.Module):
# Thanks mostly from https://github.com/metataro/sc2_imitation_learning in spatial_decoder
def __init__(self, n_resblock=4, conv_hidden=128, gate_size=1024):
super().__init__()
self.n_resblock = n_resblock
self.conv_hidden = conv_hidden
self.resblocks = nn.ModuleList()
for i in range(n_resblock):
self.resblocks.append(ResBlockFiLM(conv_hidden))
self.film_net = nn.Linear(gate_size, conv_hidden * 2 * n_resblock)
def reset(self):
# deprecated, should try to find others
# kaiming_uniform(self.film_net.weight)
# self.film_net.bias.data.zero_()
pass
def forward(self, x, gate, map_skip):
out = x
film = self.film_net(gate).chunk(self.n_resblock * 2, 1)
for i, resblock in enumerate(self.resblocks):
out = resblock(out, film[i * 2], film[i * 2 + 1])
out = out + map_skip[i]
# TODO: should we add a relu?
return out
class LocationHead(nn.Module):
'''
Inputs: autoregressive_embedding, action_type, map_skip
Outputs:
target_location_logits - The logits corresponding to the probabilities of targeting each location
target_location - The sampled target location
'''
def __init__(self, autoregressive_embedding_size=AHP.autoregressive_embedding_size,
output_map_size=SCHP.world_size, is_sl_training=True,
max_map_channels=AHP.location_head_max_map_channels,
temperature=0.8):
super().__init__()
self.use_improved_one = True
self.is_sl_training = is_sl_training
if not self.is_sl_training:
self.temperature = temperature
else:
self.temperature = 1.0
mmc = max_map_channels
self.ds_1 = nn.Conv2d(mmc + 4, mmc, kernel_size=1, stride=1,
padding=0, bias=True)
self.film_blocks_num = 4
if not self.use_improved_one:
self.film_net = FiLM(n_resblock=self.film_blocks_num,
conv_hidden=mmc,
gate_size=autoregressive_embedding_size)
else:
self.film_net_mapskip = FiLMplusMapSkip(n_resblock=self.film_blocks_num,
conv_hidden=mmc,
gate_size=autoregressive_embedding_size)
self.us_1 = nn.ConvTranspose2d(mmc, int(mmc / 2), kernel_size=4, stride=2,
padding=1, bias=True)
self.us_2 = nn.ConvTranspose2d(int(mmc / 2), int(mmc / 4),
kernel_size=4, stride=2,
padding=1, bias=True)
self.us_3 = nn.ConvTranspose2d(int(mmc / 4), int(mmc / 8),
kernel_size=4, stride=2,
padding=1, bias=True)
self.us_4 = nn.ConvTranspose2d(int(mmc / 8), int(mmc / 16),
kernel_size=4, stride=2,
padding=1, bias=True)
self.us_4_original = nn.ConvTranspose2d(int(mmc / 8), 1,
kernel_size=4, stride=2,
padding=1, bias=True)
# note: in mAS, we add a upsampling layer to transfer from 8x8 to 256x256
self.us_5 = nn.ConvTranspose2d(int(mmc / 16), 1, kernel_size=4, stride=2,
padding=1, bias=True)
self.output_map_size = output_map_size
self.softmax = nn.Softmax(dim=-1)
def forward(self, autoregressive_embedding, action_type, map_skip):
'''
Inputs:
autoregressive_embedding: [batch_size x autoregressive_embedding_size]
action_type: [batch_size x 1]
map_skip: [batch_size x channel x height x width]
Output:
target_location_logits: [batch_size x self.output_map_size x self.output_map_size]
location_out: [batch_size x 2 (x and y)]
'''
# AlphaStar: `autoregressive_embedding` is reshaped to have the same height/width as the final skip in `map_skip`
# AlphaStar: (which was just before map information was reshaped to a 1D embedding) with 4 channels
# sc2_imitation_learning: map_skip = list(reversed(map_skip))
# sc2_imitation_learning: inputs, map_skip = map_skip[0], map_skip[1:]
map_skip = list(reversed(map_skip))
x, map_skip = map_skip[0], map_skip[1:]
print("x.shape:", map_skip.shape) if debug else None
batch_size = x.shape[0]
assert autoregressive_embedding.shape[0] == action_type.shape[0]
assert autoregressive_embedding.shape[0] == x.shape[0]
reshap_size = x.shape[-1]
reshape_channels = int(AHP.autoregressive_embedding_size / (reshap_size * reshap_size))
print("autoregressive_embedding.shape:", autoregressive_embedding.shape) if debug else None
ar_map = autoregressive_embedding.reshape(batch_size, -1, reshap_size, reshap_size)
print("ar_map.shape:", ar_map.shape) if debug else None
# AlphaStar: and the two are concatenated together along the channel dimension,
# map skip shape: (-1, 128, 16, 16)
# x shape: (-1, 132, 16, 16)
x = torch.cat([ar_map, x], dim=1)
print("x.shape:", x.shape) if debug else None
# AlphaStar: passed through a ReLU,
# AlphaStar: passed through a 2D convolution with 128 channels and kernel size 1,
# AlphaStar: then passed through another ReLU.
x = F.relu(self.ds_1(F.relu(x)))
if not self.use_improved_one:
# AlphaStar: The 3D tensor (height, width, and channels) is then passed through a series of Gated ResBlocks
# AlphaStar: with 128 channels, kernel size 3, and FiLM, gated on `autoregressive_embedding`
# note: FilM is Feature-wise Linear Modulation, please see the paper "FiLM: Visual Reasoning with
# a General Conditioning Layer"
# in here we use 4 Gated ResBlocks, and the value can be changed
x = self.film_net(x, gate=autoregressive_embedding)
# x shape (-1, 128, 16, 16)
# AlphaStar: and using the elements of `map_skip` in order of last ResBlock skip to first.
x = x + map_skip
else:
# Referenced mostly from "sc2_imitation_learning" project in spatial_decoder
assert len(map_skip) == self.film_blocks_num
# use the new FiLMplusMapSkip class
x = self.film_net_mapskip(x, gate=autoregressive_embedding,
map_skip=map_skip)
# Compared to AS, we a relu, referred from "sc2_imitation_learning"
x = F.relu(x)
# AlphaStar: Afterwards, it is upsampled 2x by each of a series of transposed 2D convolutions
# AlphaStar: with kernel size 4 and channel sizes 128, 64, 16, and 1 respectively
# AlphaStar: (upsampled beyond the 128x128 input to 256x256 target location selection).
x = F.relu(self.us_1(x))
x = F.relu(self.us_2(x))
x = F.relu(self.us_3(x))
if AHP == MAHP:
x = F.relu(self.us_4(x))
# only in mAS, we need one more upsample step
# x = F.relu(self.us_5(x))
# Note: in the final layer, we don't use relu
x = self.us_5(x)
else:
x = self.us_4_original(x)
# AlphaStar: Those final logits are flattened and sampled (masking out invalid locations using `action_type`,
# AlphaStar: such as those outside the camera for build actions) with temperature 0.8
# AlphaStar: to get the actual target position.
# x shape: (-1, 1, 256, 256)
print('x.shape:', x.shape) if debug else None
y = x.reshape(batch_size, 1 * self.output_map_size * self.output_map_size)
device = next(self.parameters()).device
print("y:", y) if debug else None
print("y_.shape:", y.shape) if debug else None
target_location_logits = y.div(self.temperature)
print("target_location_logits:", target_location_logits) if debug else None
print("target_location_logits.shape:", target_location_logits.shape) if debug else None
# AlphaStar: (masking out invalid locations using `action_type`, such as those outside
# the camera for build actions)
# TODO: use action to decide the mask
if True:
# referenced from lib/utils.py function of masked_softmax()
mask = torch.zeros(batch_size, 1 * self.output_map_size * self.output_map_size, device=device)
mask = L.get_location_mask(mask)
mask_fill_value = -1e32 # a very small number
masked_vector = target_location_logits.masked_fill((1 - mask).bool(), mask_fill_value)
target_location_probs = self.softmax(masked_vector)
else:
target_location_probs = self.softmax(target_location_logits)
location_id = torch.multinomial(target_location_probs, num_samples=1, replacement=True)
print("location_id:", location_id) if debug else None
print("location_id.shape:", location_id.shape) if debug else None
location_out = location_id.squeeze(-1).cpu().numpy().tolist()
print("location_out:", location_out) if debug else None
# print("location_out.shape:", location_out.shape) if debug else None
for i, idx in enumerate(location_id):
row_number = idx // self.output_map_size
col_number = idx - self.output_map_size * row_number
target_location_y = row_number
target_location_x = col_number
print("target_location_y, target_location_x", target_location_y, target_location_x) if debug else None
# note! sc2 and pysc2 all accept the position as [x, y], so x be the first, y be the last!
# this is not right : location_out[i] = [target_location_y.item(), target_location_x.item()]
# below is right! so the location point map to the point in the matrix!
location_out[i] = [target_location_x.item(), target_location_y.item()]
# AlphaStar: If `action_type` does not involve targetting location, this head is ignored.
target_location_mask = L.action_involve_targeting_location_mask(action_type)
# target_location_mask: [batch_size x 1]
print("target_location_mask:", target_location_mask) if debug else None
print("location_out:", location_out) if debug else None
location_out = np.array(location_out)
print("location_out:", location_out) if debug else None
location_out = torch.tensor(location_out, device=device)
print("location_out:", location_out) if debug else None
print("location_out.shape:", location_out.shape) if debug else None
target_location_logits = target_location_logits.reshape(-1, self.output_map_size, self.output_map_size)
target_location_logits = target_location_logits * target_location_mask.float().unsqueeze(-1)
location_out = location_out * target_location_mask.long()
location_out = location_out
return target_location_logits, location_out
def test():
batch_size = 2
autoregressive_embedding = torch.randn(batch_size, AHP.autoregressive_embedding_size)
action_type_sample = 65 # func: 65/Effect_PsiStorm_pt (1/queued [2]; 2/unit_tags [512]; 0/world [0, 0])
action_type = torch.randint(low=0, high=SFS.available_actions, size=(batch_size, 1))
map_skip = []
if AHP == MAHP:
for i in range(5):
map_skip.append(torch.randn(batch_size, AHP.location_head_max_map_channels, 8, 8))
else:
for i in range(5):
map_skip.append(torch.randn(batch_size, AHP.location_head_max_map_channels, 16, 16))
location_head = LocationHead()
print("autoregressive_embedding:", autoregressive_embedding) if debug else None
print("autoregressive_embedding.shape:", autoregressive_embedding.shape) if 1 else None
target_location_logits, target_location = \
location_head.forward(autoregressive_embedding, action_type, map_skip)
if target_location_logits is not None:
print("target_location_logits:", target_location_logits) if debug else None
print("target_location_logits.shape:", target_location_logits.shape) if debug else None
else:
print("target_location_logits is None!")
if target_location is not None:
print("target_location:", target_location) if debug else None
# print("target_location.shape:", target_location.shape) if debug else None
else:
print("target_location is None!")
print("This is a test!") if debug else None
if __name__ == '__main__':
test()
| 43.76776
| 123
| 0.623197
|
4cdddcfccb714af20c3b59d9214afb22f5c244e6
| 5,950
|
py
|
Python
|
third_party/webrtc/src/chromium/src/tools/perf/page_sets/webrtc_cases.py
|
bopopescu/webrtc-streaming-node
|
727a441204344ff596401b0253caac372b714d91
|
[
"MIT"
] | 8
|
2016-02-08T11:59:31.000Z
|
2020-05-31T15:19:54.000Z
|
third_party/webrtc/src/chromium/src/tools/perf/page_sets/webrtc_cases.py
|
bopopescu/webrtc-streaming-node
|
727a441204344ff596401b0253caac372b714d91
|
[
"MIT"
] | 1
|
2021-05-05T11:11:31.000Z
|
2021-05-05T11:11:31.000Z
|
third_party/webrtc/src/chromium/src/tools/perf/page_sets/webrtc_cases.py
|
bopopescu/webrtc-streaming-node
|
727a441204344ff596401b0253caac372b714d91
|
[
"MIT"
] | 7
|
2016-02-09T09:28:14.000Z
|
2020-07-25T19:03:36.000Z
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.page import page as page_module
from telemetry import story
WEBRTC_GITHUB_SAMPLES_URL = 'https://webrtc.github.io/samples/src/content/'
class WebrtcCasesPage(page_module.Page):
def __init__(self, url, page_set, name):
super(WebrtcCasesPage, self).__init__(
url=url, page_set=page_set, name=name)
with open(os.path.join(os.path.dirname(__file__),
'webrtc_track_peerconnections.js')) as javascript:
self.script_to_evaluate_on_commit = javascript.read()
class Page1(WebrtcCasesPage):
""" Why: Acquires a vga local stream. """
def __init__(self, page_set):
super(Page1, self).__init__(
url=WEBRTC_GITHUB_SAMPLES_URL + 'getusermedia/gum/',
name='vga_local_stream_10s',
page_set=page_set)
def RunPageInteractions(self, action_runner):
action_runner.Wait(10)
class Page2(WebrtcCasesPage):
""" Why: Sets up a local WebRTC call. """
def __init__(self, page_set):
super(Page2, self).__init__(
url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/pc1/',
name='vga_call_10s',
page_set=page_set)
def RunPageInteractions(self, action_runner):
action_runner.ClickElement('button[id="startButton"]')
action_runner.Wait(2)
action_runner.ClickElement('button[id="callButton"]')
action_runner.Wait(10)
action_runner.ClickElement('button[id="hangupButton"]')
class Page3(WebrtcCasesPage):
""" Why: Acquires a high definition local stream. """
def __init__(self, page_set):
super(Page3, self).__init__(
url=WEBRTC_GITHUB_SAMPLES_URL + 'getusermedia/resolution/',
name='hd_local_stream_10s',
page_set=page_set)
def RunPageInteractions(self, action_runner):
action_runner.ClickElement('button[id="hd"]')
action_runner.Wait(10)
class Page4(WebrtcCasesPage):
""" Why: Sets up a WebRTC audio call with Opus. """
def __init__(self, page_set):
super(Page4, self).__init__(
url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/audio/?codec=OPUS',
name='audio_call_opus_10s',
page_set=page_set)
def RunPageInteractions(self, action_runner):
action_runner.ExecuteJavaScript('codecSelector.value="OPUS";')
action_runner.ClickElement('button[id="callButton"]')
action_runner.Wait(10)
class Page5(WebrtcCasesPage):
""" Why: Sets up a WebRTC audio call with G722. """
def __init__(self, page_set):
super(Page5, self).__init__(
url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/audio/?codec=G722',
name='audio_call_g722_10s',
page_set=page_set)
def RunPageInteractions(self, action_runner):
action_runner.ExecuteJavaScript('codecSelector.value="G722";')
action_runner.ClickElement('button[id="callButton"]')
action_runner.Wait(10)
class Page6(WebrtcCasesPage):
""" Why: Sets up a WebRTC audio call with PCMU. """
def __init__(self, page_set):
super(Page6, self).__init__(
url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/audio/?codec=PCMU',
name='audio_call_pcmu_10s',
page_set=page_set)
def RunPageInteractions(self, action_runner):
action_runner.ExecuteJavaScript('codecSelector.value="PCMU";')
action_runner.ClickElement('button[id="callButton"]')
action_runner.Wait(10)
class Page7(WebrtcCasesPage):
""" Why: Sets up a WebRTC audio call with iSAC 16K. """
def __init__(self, page_set):
super(Page7, self).__init__(
url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/audio/?codec=ISAC_16K',
name='audio_call_isac16k_10s',
page_set=page_set)
def RunPageInteractions(self, action_runner):
action_runner.ExecuteJavaScript('codecSelector.value="ISAC/16000";')
action_runner.ClickElement('button[id="callButton"]')
action_runner.Wait(10)
class Page8(WebrtcCasesPage):
""" Why: Sets up a WebRTC 720p call for 45 seconds. """
def __init__(self, page_set):
super(Page8, self).__init__(
url=WEBRTC_GITHUB_SAMPLES_URL + 'peerconnection/constraints/',
name='720p_call_45s',
page_set=page_set)
def RunPageInteractions(self, action_runner):
action_runner.ExecuteJavaScript('minWidthInput.value = 1280')
action_runner.ExecuteJavaScript('maxWidthInput.value = 1280')
action_runner.ExecuteJavaScript('minHeightInput.value = 720')
action_runner.ExecuteJavaScript('maxHeightInput.value = 720')
action_runner.ClickElement('button[id="getMedia"]')
action_runner.Wait(2)
action_runner.ClickElement('button[id="connect"]')
action_runner.Wait(45)
class Page9(WebrtcCasesPage):
""" Why: Transfer as much data as possible through a data channel in 20s. """
def __init__(self, page_set):
super(Page9, self).__init__(
url=WEBRTC_GITHUB_SAMPLES_URL + 'datachannel/datatransfer',
name="30s_datachannel_transfer",
page_set=page_set)
def RunPageInteractions(self, action_runner):
# It won't have time to finish the 512 MB, but we're only interested in
# cpu + memory anyway rather than how much data we manage to transfer.
action_runner.ExecuteJavaScript('megsToSend.value = 512;')
action_runner.ClickElement('button[id="sendTheData"]')
action_runner.Wait(30)
class WebrtcCasesPageSet(story.StorySet):
""" WebRTC tests for Real-time audio and video communication. """
def __init__(self):
super(WebrtcCasesPageSet, self).__init__(
archive_data_file='data/webrtc_cases.json',
cloud_storage_bucket=story.PUBLIC_BUCKET)
self.AddStory(Page1(self))
self.AddStory(Page2(self))
self.AddStory(Page3(self))
# Disable page 4-7 until we can implement http://crbug.com/468732. We can
# get data out from the tests, but it's not very useful yet.
self.AddStory(Page8(self))
self.AddStory(Page9(self))
| 31.481481
| 79
| 0.72
|
384590984d225988ceec706ce756068ef2929637
| 2,544
|
php
|
PHP
|
app/Models/Beacon.php
|
enyingtan/GoXplora
|
886b33a57f0df027668c5e9edcfa7f2a7a5cdb01
|
[
"MIT"
] | null | null | null |
app/Models/Beacon.php
|
enyingtan/GoXplora
|
886b33a57f0df027668c5e9edcfa7f2a7a5cdb01
|
[
"MIT"
] | null | null | null |
app/Models/Beacon.php
|
enyingtan/GoXplora
|
886b33a57f0df027668c5e9edcfa7f2a7a5cdb01
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Models;
use Backpack\CRUD\app\Models\Traits\CrudTrait;
use Backpack\CRUD\app\Models\Traits\SpatieTranslatable\HasTranslations;
use Illuminate\Database\Eloquent\Model;
class Beacon extends Model
{
use CrudTrait;
use HasTranslations;
/*
|--------------------------------------------------------------------------
| GLOBAL VARIABLES
|--------------------------------------------------------------------------
*/
protected $guarded = [];
protected $hidden = ['pivot'];
protected $fillable = ['minor', 'title', 'reference', 'description', 'range', 'local', 'battery'];
public $timestamps = true;
public $translatable = ['title', 'description'];
/*
|--------------------------------------------------------------------------
| FUNCTIONS
|--------------------------------------------------------------------------
*/
public function toggleActive($crud = false)
{
return
'<div class="btn btn-xs btn-default on ' . ($this->active ? '' : 'hide') . '" ajax-toggle-id="' . $this->id . '" ajax-toggle="/admin/beacon/' . $this->id . '/0"><i class="nav-icon la la-check-square-o"></i> Deactivate</div>' .
'<div class="btn btn-xs btn-default off ' . ($this->active ? 'hide' : '') . '" ajax-toggle-id="' . $this->id . '" ajax-toggle="/admin/beacon/' . $this->id . '/1"><i class="nav-icon la la-square-o"></i> Activate</div>';
}
/*
|--------------------------------------------------------------------------
| RELATIONS
|--------------------------------------------------------------------------
*/
public function modules()
{
return $this->belongsToMany('App\Models\Module', 'module_beacon', 'beacon_id', 'module_id');
}
/*
|--------------------------------------------------------------------------
| SCOPES
|--------------------------------------------------------------------------
*/
/*
|--------------------------------------------------------------------------
| ACCESORS
|--------------------------------------------------------------------------
*/
public function getDetailAttribute()
{
return "$this->title ($this->minor, " . __($this->local) . ')';
}
/*
|--------------------------------------------------------------------------
| MUTATORS
|--------------------------------------------------------------------------
*/
public function toArray()
{
$data = parent::toArray();
$data['detail'] = $this->detail;
return $data;
}
}
| 31.02439
| 234
| 0.36989
|
af96e095bc4eb1522c0ba8e9b7045170f52d0689
| 5,021
|
dart
|
Dart
|
dev/devicelab/lib/tasks/web_dev_mode_tests.dart
|
eujinong/reactjs-modal
|
01a5d112d5ffb3547f5b3ab48bafca2104f5ad31
|
[
"BSD-3-Clause"
] | 7
|
2020-07-04T06:05:52.000Z
|
2022-02-26T00:41:51.000Z
|
dev/devicelab/lib/tasks/web_dev_mode_tests.dart
|
eujinong/reactjs-modal
|
01a5d112d5ffb3547f5b3ab48bafca2104f5ad31
|
[
"BSD-3-Clause"
] | 1
|
2019-03-06T07:31:58.000Z
|
2019-03-06T07:31:58.000Z
|
dev/devicelab/lib/tasks/web_dev_mode_tests.dart
|
eujinong/reactjs-modal
|
01a5d112d5ffb3547f5b3ab48bafca2104f5ad31
|
[
"BSD-3-Clause"
] | 2
|
2019-12-24T10:06:47.000Z
|
2021-09-26T14:04:57.000Z
|
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:path/path.dart' as path;
import '../framework/framework.dart';
import '../framework/utils.dart';
final Directory _editedFlutterGalleryDir = dir(path.join(Directory.systemTemp.path, 'edited_flutter_gallery'));
final Directory flutterGalleryDir = dir(path.join(flutterDirectory.path, 'examples/flutter_gallery'));
TaskFunction createWebDevModeTest() {
return () async {
final List<String> options = <String>[
'--hot', '-d', 'chrome', '--verbose', '--resident', '--target=lib/main.dart',
];
int hotRestartCount = 0;
await inDirectory<void>(flutterDirectory, () async {
rmTree(_editedFlutterGalleryDir);
mkdirs(_editedFlutterGalleryDir);
recursiveCopy(flutterGalleryDir, _editedFlutterGalleryDir);
await inDirectory<void>(_editedFlutterGalleryDir, () async {
{
final Process packagesGet = await startProcess(
path.join(flutterDirectory.path, 'bin', 'flutter'),
<String>['packages', 'get'],
environment: <String, String>{
'FLUTTER_WEB': 'true',
},
);
await packagesGet.exitCode;
final Process process = await startProcess(
path.join(flutterDirectory.path, 'bin', 'flutter'),
flutterCommandArgs('run', options),
environment: <String, String>{
'FLUTTER_WEB': 'true',
},
);
final Completer<void> stdoutDone = Completer<void>();
final Completer<void> stderrDone = Completer<void>();
process.stdout
.transform<String>(utf8.decoder)
.transform<String>(const LineSplitter())
.listen((String line) {
if (line.contains('To hot restart')) {
process.stdin.write('R');
}
if (line.contains('Restarted')) {
if (hotRestartCount == 0) {
// Update the file and reload again.
final File appDartSource = file(path.join(
_editedFlutterGalleryDir.path, 'lib/gallery/app.dart',
));
appDartSource.writeAsStringSync(
appDartSource.readAsStringSync().replaceFirst(
"'Flutter Gallery'", "'Updated Flutter Gallery'",
)
);
process.stdin.writeln('R');
++hotRestartCount;
} else {
// Quit after second hot restart.
process.stdin.writeln('q');
}
}
print('stdout: $line');
}, onDone: () {
stdoutDone.complete();
});
process.stderr
.transform<String>(utf8.decoder)
.transform<String>(const LineSplitter())
.listen((String line) {
print('stderr: $line');
}, onDone: () {
stderrDone.complete();
});
await Future.wait<void>(<Future<void>>[
stdoutDone.future,
stderrDone.future,
]);
await process.exitCode;
}
// Start `flutter run` again to make sure it loads from the previous
// state. dev compilers loads up from previously compiled JavaScript.
{
final Process process = await startProcess(
path.join(flutterDirectory.path, 'bin', 'flutter'),
flutterCommandArgs('run', options),
environment: <String, String>{
'FLUTTER_WEB': 'true',
},
);
final Completer<void> stdoutDone = Completer<void>();
final Completer<void> stderrDone = Completer<void>();
process.stdout
.transform<String>(utf8.decoder)
.transform<String>(const LineSplitter())
.listen((String line) {
if (line.contains('To hot restart')) {
process.stdin.write('R');
}
if (line.contains('Restarted')) {
process.stdin.writeln('q');
}
print('stdout: $line');
}, onDone: () {
stdoutDone.complete();
});
process.stderr
.transform<String>(utf8.decoder)
.transform<String>(const LineSplitter())
.listen((String line) {
print('stderr: $line');
}, onDone: () {
stderrDone.complete();
});
await Future.wait<void>(<Future<void>>[
stdoutDone.future,
stderrDone.future,
]);
await process.exitCode;
}
});
});
if (hotRestartCount != 1) {
return TaskResult.failure(null);
}
return TaskResult.success(null);
};
}
| 35.359155
| 111
| 0.534157
|
07f8d01021e7755818c79095559db558f030b39e
| 452
|
rb
|
Ruby
|
lib/generators/brain_damage/lib/ruby_simple_parser/class_definition.rb
|
fedeaux/brain_damge2
|
3ab4dc6e8cf534e2034591709e4c86ec09205aa8
|
[
"MIT"
] | null | null | null |
lib/generators/brain_damage/lib/ruby_simple_parser/class_definition.rb
|
fedeaux/brain_damge2
|
3ab4dc6e8cf534e2034591709e4c86ec09205aa8
|
[
"MIT"
] | null | null | null |
lib/generators/brain_damage/lib/ruby_simple_parser/class_definition.rb
|
fedeaux/brain_damge2
|
3ab4dc6e8cf534e2034591709e4c86ec09205aa8
|
[
"MIT"
] | null | null | null |
module RubySimpleParser
class ClassDefinition < Block
CLASS_REGEX = /class\s+(?<class_name>\w+)/
def initialize(definition, visibility, parent = nil)
super definition, parent
@visibility = visibility
@name = ClassDefinition.extract_class_name definition
end
def self.extract_class_name(code)
(code.match CLASS_REGEX)[:class_name].to_sym
end
def definition
@lines.first.print
end
end
end
| 22.6
| 59
| 0.692478
|