hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1aa5aeb3566cbde2134c5f1f81ca0d817b703ac2
| 17,966
|
py
|
Python
|
writer-ident/data.py
|
VChristlein/gmp
|
b04c0ec4289112f53903eb3429b0d8bc78231ceb
|
[
"MIT"
] | 13
|
2019-08-20T03:08:55.000Z
|
2021-12-08T14:00:39.000Z
|
writer-ident/data.py
|
VChristlein/gmp
|
b04c0ec4289112f53903eb3429b0d8bc78231ceb
|
[
"MIT"
] | 1
|
2021-02-23T02:40:45.000Z
|
2021-03-05T02:21:47.000Z
|
writer-ident/data.py
|
VChristlein/gmp
|
b04c0ec4289112f53903eb3429b0d8bc78231ceb
|
[
"MIT"
] | 1
|
2020-05-23T14:05:16.000Z
|
2020-05-23T14:05:16.000Z
|
import os
import shutil
import random
from distutils.command.config import config
import torch
import torchvision
import numpy as np
from PIL.Image import NEAREST
from torch.utils.data import Dataset, Sampler
from torchvision import transforms
import matplotlib.pyplot as plt
from skimage import feature
import config
class WriterData:
"""
Dataset from the ICDAR 2017 Historical-WI challenge.
"""
def __init__(self, path_to_data, color=True, trainval=False):
# path to data: is root dir of data folder, has subdirectories: color_train, color_test,
# binarized_train, binarized_test
if color:
if config.USE_PATCHES:
if trainval:
train_dir = os.path.join(path_to_data, "color_trainval")
else:
train_dir = os.path.join(path_to_data, "color_train")
self.val_dir = os.path.join(path_to_data, "color_test")
else:
if trainval:
train_dir = os.path.join(path_to_data, "color_trainval")
else:
train_dir = os.path.join(path_to_data, "color_train_unpatched")
self.val_dir = os.path.join(path_to_data, "color_test")
self.val2_dir = os.path.join(path_to_data, "color_test")
self.comp_dir = os.path.join(path_to_data, 'comp_color')
else:
if trainval:
train_dir = os.path.join(path_to_data, 'binarized_trainval')
else:
train_dir = os.path.join(path_to_data, 'binarized_train')
self.val_dir = os.path.join(path_to_data, 'binarized_test')
self.comp_dir = os.path.join(path_to_data, 'comp_binarized')
self.val2_dir = os.path.join(path_to_data, "binarized_test")
if color:
mean = config.MEAN_WRITER
std = config.STD_WRITER
if config.USE_PATCHES:
trans_train = transforms.Compose([
transforms.RandomRotation(degrees=15),
transforms.ColorJitter(brightness=0.1, contrast=0.1),
transforms.ToTensor(),
transforms.Normalize(mean, std),
])
trans_val = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean, std)
])
else:
# use the whole image
trans_train = transforms.Compose([
transforms.Resize(440),
transforms.RandomCrop(340),
# transforms.RandomRotation(degrees=3),
# transforms.ColorJitter(brightness=0.1, contrast=0.1),
transforms.ToTensor(),
transforms.Normalize(mean, std),
])
trans_val = transforms.Compose([
transforms.Resize(440),
transforms.CenterCrop(340),
transforms.ToTensor(),
transforms.Normalize(mean, std)
])
else:
# Transform for binarized version
if config.USE_PATCHES:
trans_train = transforms.Compose([
# transforms.RandomRotation(degrees=3, ),
transforms.Resize(256),
transforms.ToTensor(),
])
trans_val = transforms.Compose([
transforms.Resize(256),
transforms.ToTensor(),
])
else:
trans_train = transforms.Compose([
transforms.Resize(350),
transforms.RandomCrop(300),
# transforms.RandomRotation(degrees=3),
transforms.ToTensor(),
])
trans_val = transforms.Compose([
transforms.Resize(350),
transforms.RandomCrop(300),
transforms.ToTensor(),
])
self.train_data = torchvision.datasets.ImageFolder(train_dir,
transform=trans_train)
self.val_data = torchvision.datasets.ImageFolder(self.val_dir,
transform=trans_val)
self.comp_data = torchvision.datasets.ImageFolder(self.comp_dir,
transform=trans_val)
def get_test_data_loader(self, transform, batch_size=1):
data = torchvision.datasets.ImageFolder(self.comp_dir, transform=transform)
seq_sampler = torch.utils.data.SequentialSampler(data)
return torch.utils.data.DataLoader(data, batch_size=batch_size, sampler=seq_sampler, shuffle=False)
def get_val_data_loader2(self, transform, batch_size=1):
data = torchvision.datasets.ImageFolder(self.val2_dir, transform=transform)
seq_sampler = torch.utils.data.SequentialSampler(data)
return torch.utils.data.DataLoader(data, batch_size=batch_size, sampler=seq_sampler, shuffle=False)
def get_train_data_loader(self, batch_sampler=None, batch_size=12, guaranteed_triplets=5):
if not batch_sampler:
batch_sampler = TripletBatchSampler(self.train_data,
batch_size=batch_size,
guaranteed_triplets=guaranteed_triplets)
return torch.utils.data.DataLoader(self.train_data, batch_sampler=batch_sampler, batch_size=1)
def get_val_data_loader(self, batch_sampler=None, batch_size=12, guaranteed_triplets=5):
if not batch_sampler:
batch_sampler = TripletBatchSampler(self.train_data,
batch_size=batch_size,
guaranteed_triplets=guaranteed_triplets)
return torch.utils.data.DataLoader(self.val_data, batch_sampler=batch_sampler, batch_size=1)
def get_sequential_data_loader(self, batch_size=10):
"""
Returns a sequential data loader over the writer data.
"""
sampler = torch.utils.data.SequentialSampler(self.val_data)
return torch.utils.data.DataLoader(self.val_data, sampler=sampler, batch_size=batch_size, shuffle=False)
def get_competition_data_loader(self, batch_size=15):
sampler = torch.utils.data.SequentialSampler(self.comp_data)
return torch.utils.data.DataLoader(self.comp_data, sampler=sampler, batch_size=batch_size, shuffle=False)
def get_train_val_loader(self, batch_sampler):
pass
class ICDAR2013:
"""
Dataset from the ICDAR 2013 WI challenge.
"""
def __init__(self, path_to_data, trainval=False):
# path to data: is root dir of data folder, has subdirectories: color_train, color_test,
# binarized_train, binarized_test
train_dir = os.path.join(path_to_data, 'train')
self.val_dir = os.path.join(path_to_data, 'val')
self.comp_dir = os.path.join(path_to_data, 'comp')
self.val2_dir = os.path.join(path_to_data, "binarized2_test_unpatched")
self.trainval = os.path.join(path_to_data, 'trainval')
# use the whole image
trans_train = transforms.Compose([
transforms.Resize(200),
transforms.RandomCrop((144, 350)),
# transforms.RandomRotation(degrees=3),
# transforms.ColorJitter(brightness=0.1, contrast=0.1),
transforms.ToTensor(),
])
trans_val = transforms.Compose([
transforms.Resize(200),
transforms.CenterCrop((144, 350)),
transforms.ToTensor(),
])
self.train_data = torchvision.datasets.ImageFolder(train_dir,
transform=trans_train)
self.val_data = torchvision.datasets.ImageFolder(self.val_dir,
transform=trans_val)
self.comp_data = torchvision.datasets.ImageFolder(self.comp_dir,
transform=trans_val)
def get_test_data_loader(self, transform, batch_size=1):
data = torchvision.datasets.ImageFolder(self.comp_dir, transform=transform)
seq_sampler = torch.utils.data.SequentialSampler(data)
return torch.utils.data.DataLoader(data, batch_size=batch_size, sampler=seq_sampler, shuffle=False)
def get_val_data_loader2(self, transform, batch_size=1):
data = torchvision.datasets.ImageFolder(self.val2_dir, transform=transform)
seq_sampler = torch.utils.data.SequentialSampler(data)
return torch.utils.data.DataLoader(data, batch_size=batch_size, sampler=seq_sampler, shuffle=False)
def get_train_data_loader(self, batch_sampler=None, batch_size=12, guaranteed_triplets=5):
if not batch_sampler:
batch_sampler = TripletBatchSampler(self.train_data,
batch_size=batch_size,
guaranteed_triplets=guaranteed_triplets)
return torch.utils.data.DataLoader(self.train_data, batch_sampler=batch_sampler, batch_size=1)
def get_val_data_loader(self, batch_sampler=None, batch_size=12, guaranteed_triplets=5):
if not batch_sampler:
batch_sampler = TripletBatchSampler(self.train_data,
batch_size=batch_size,
guaranteed_triplets=guaranteed_triplets)
return torch.utils.data.DataLoader(self.val_data, batch_sampler=batch_sampler, batch_size=1)
def get_sequential_data_loader(self, batch_size=10):
"""
Returns a sequential data loader over the writer data.
"""
sampler = torch.utils.data.SequentialSampler(self.val_data)
return torch.utils.data.DataLoader(self.val_data, sampler=sampler, batch_size=batch_size, shuffle=False)
def get_competition_data_loader(self, batch_size=20):
print("comp loader")
sampler = torch.utils.data.SequentialSampler(self.comp_data)
return torch.utils.data.DataLoader(self.comp_data, sampler=sampler, batch_size=batch_size, shuffle=False)
class HymenopteraData:
"""
Small subset of Imagenet. Contains two classes: ants and bees.
Should be easy to overfit to.
"""
def __init__(self, path_to_data):
data_transforms = {
'train': transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
]),
'val': transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
]),
}
self.image_datasets = {x: torchvision.datasets.ImageFolder(os.path.join(path_to_data, x),
data_transforms[x])
for x in ['train', 'val']}
self.dataset_sizes = {x: len(self.image_datasets[x]) for x in ['train', 'val']}
self.class_names = self.image_datasets['train'].classes
def get_train_loader(self, batch_size=16):
return torch.utils.data.DataLoader(self.image_datasets['train'],
batch_size=batch_size,
num_workers=4,
shuffle=True,
drop_last=True)
def get_validation_loader(self, batch_size, shuffle=True):
return torch.utils.data.DataLoader(self.image_datasets['val'], batch_size=16, num_workers=4, shuffle=shuffle)
class OxfordData:
pass
class GoogleLandmarkData:
pass
class TripletBatchSampler(Sampler):
"""
The sampler to be used with the writer data.
Since the writer dataset consits of only very few samples per class (i.e. per writer),
we have to sample the data in a way that ensures, that each batch contains triplets
consisting of an anchor, a positive sample, and multiple negatives.
"""
def __init__(self, imgloader, batch_size, guaranteed_triplets):
self.batch_size = batch_size
self.guaranteed_triplets = guaranteed_triplets
self.other_samples = self.batch_size - 2 * self.guaranteed_triplets
self.imgloader = imgloader
# save all samples per class in a dict
self.samples_per_class = {writer_id: list(range(idx * 3, idx * 3 + 3))
for (writer_id, idx) in imgloader.class_to_idx.items()}
def __iter__(self):
# returns a batch (the repsective indices), first sample is the anchor, second is the positive exmample
# the rest of the batch are the negative samples
count = 0
while count + self.batch_size < len(self.imgloader):
# sample classes -> take one as anchor class, rest are the other samples
anchor_classes = random.sample(self.imgloader.classes, self.guaranteed_triplets)
other_classes = random.choices(self.imgloader.classes, k=self.other_samples)
indices = []
# get positive samples of the guaranteed triplets
for ac in anchor_classes:
anchor, positive_sample = random.sample(self.samples_per_class[ac], 2)
indices += [anchor, positive_sample]
other_samples = [
x for samples in
[random.sample(self.samples_per_class[e], 1) for e in other_classes]
for x in samples
]
indices += other_samples
count += self.batch_size
yield indices
def __len__(self):
return len(self.imgloader) // self.batch_size
class PerClassBatchSampler(Sampler):
"""
Retrieves N samples from K classes, total batch size B = N * K.
"""
def __init__(self, imgloader: torchvision.datasets.ImageFolder, num_classes: int, num_samples: int):
self.imgloader = imgloader
self.batch_size = num_samples * num_classes
self.num_samples = num_samples
self.num_classes = num_classes
# save all samples per class in a dict
# self.samples_per_class = {writer_id: list(range(idx * 3, idx * 3 + 3))
# for (writer_id, idx) in imgloader.class_to_idx.items()}
# this list holds tuple with the beginning index end the ending index of the samples
# for every class in the dataset. If the i-th tuple is (k, l) then all samples
# in range(k, l) belong to class i.
self.sample_indices_per_class = []
lastindx = 0
# current_target = imgloader.targets[0] # doesnt exist in torchvision
# 0.2.1
targets = [s[1] for s in imgloader.samples]
current_target = targets[0]
#for i, t in enumerate(imgloader.targets):
for i, t in enumerate(targets):
if current_target != t:
self.sample_indices_per_class.append((lastindx, i))
current_target = t
lastindx = i
# append the last entry
#self.sample_indices_per_class.append((lastindx, len(imgloader.targets)))
self.sample_indices_per_class.append((lastindx, len(targets)))
def __iter__(self):
count = 0
while count + self.batch_size < len(self.imgloader):
# classes = random.sample(self.imgloader.classes, self.num_classes)
class_idx_ranges = random.sample(self.sample_indices_per_class, self.num_classes)
indices = []
for idx_start, idx_end in class_idx_ranges:
samples = random.sample(range(idx_start, idx_end), self.num_samples)
# samples = random.sample(self.samples_per_class[c], self.num_samples)
indices += samples
count += self.batch_size
yield indices
def __len__(self):
return (len(self.imgloader) // self.batch_size)
def show_images(dataloader, batches_shown):
i = 0
normalize = transforms.Normalize(config.MEAN_WRITER, config.STD_WRITER)
for inputs, t in dataloader:
# inputs = torch.stack([normalize(patch.view(3, 256, 256)) for patch in inputs])
# inputs.view(-1, 3, 256, 256)
# for im in inputs:
# im = im.detach().numpy()
# im = im.sum(axis=0) / 3
# # print(im.shape)
# print("canny", feature.canny(im, sigma=3).sum())
# print("std", im.std())
grid = torchvision.utils.make_grid(inputs)
grid = grid.numpy().transpose((1, 2, 0))
mean = np.array([0.7985, 0.7381, 0.6377])
std = np.array([0.1211, 0.1262, 0.1330])
grid = std * grid + mean
grid = np.clip(grid, 0, 1)
plt.imshow(grid)
plt.show()
i += 1
if i >= batches_shown:
break
if __name__ == '__main__':
data = WriterData(config.WRITER_DATA_DIR['default'], color=True)
# sampler = PerClassBatchSampler(data.train_data, num_samples=3, num_classes=5)
# for e in data.get_train_data_loader(batch_sampler=sampler):
# print(e)
sampler = PerClassBatchSampler(data.train_data, num_classes=1, num_samples=2)
dl = data.get_train_data_loader(batch_sampler=sampler)
# trans = transforms.Compose([
# transforms.Resize(584),
# transforms.FiveCrop(256),
# transforms.Lambda(lambda crops: [transforms.ToTensor()(crop) for crop in crops]),
# ])
# dl = data.get_test_data_loader(transform=trans)
show_images(dl,
200)
| 43.501211
| 117
| 0.601859
|
2dad575f7d238197d35c1c55757d92b6771137b2
| 715
|
dart
|
Dart
|
lib/jmap/mail/email/email_body_properties.dart
|
Arsnael/jmap-dart-client
|
c66175ec32b56a858c845e6e90917d4189a00fc0
|
[
"MIT"
] | 15
|
2021-08-17T02:03:59.000Z
|
2022-02-23T02:21:54.000Z
|
lib/jmap/mail/email/email_body_properties.dart
|
Arsnael/jmap-dart-client
|
c66175ec32b56a858c845e6e90917d4189a00fc0
|
[
"MIT"
] | 9
|
2021-07-24T00:46:30.000Z
|
2022-02-08T03:43:41.000Z
|
lib/jmap/mail/email/email_body_properties.dart
|
Arsnael/jmap-dart-client
|
c66175ec32b56a858c845e6e90917d4189a00fc0
|
[
"MIT"
] | 8
|
2021-07-01T23:13:02.000Z
|
2022-01-02T08:04:52.000Z
|
import 'package:jmap_dart_client/jmap/core/properties/properties.dart';
import 'package:json_annotation/json_annotation.dart';
part 'email_body_properties.g.dart';
@JsonSerializable()
class EmailBodyProperties extends Properties {
static EmailBodyProperties defaultEmailBodyProperties = EmailBodyProperties({
'partId', 'blobId', 'size', 'name', 'type', 'charset',
'disposition', 'cid', 'language', 'location'
});
EmailBodyProperties(Set<String> value) : super(value);
factory EmailBodyProperties.fromJson(Map<String, dynamic> json) => _$EmailBodyPropertiesFromJson(json);
Map<String, dynamic> toJson() => _$EmailBodyPropertiesToJson(this);
@override
List<Object?> get props => [value];
}
| 34.047619
| 105
| 0.752448
|
d02abc89cb014d4c0f855ffb737f5786d45a8535
| 2,132
|
cpp
|
C++
|
NITIKA.cpp
|
yashji9/COMPETITIVE-PROGRAMMING
|
5c7a255be9b01001ddcde8ca4e6ff4bcb1c62f0f
|
[
"MIT"
] | 2
|
2018-01-18T13:39:48.000Z
|
2018-09-18T09:27:07.000Z
|
NITIKA.cpp
|
yashji9/COMPETITIVE-PROGRAMMING
|
5c7a255be9b01001ddcde8ca4e6ff4bcb1c62f0f
|
[
"MIT"
] | null | null | null |
NITIKA.cpp
|
yashji9/COMPETITIVE-PROGRAMMING
|
5c7a255be9b01001ddcde8ca4e6ff4bcb1c62f0f
|
[
"MIT"
] | 2
|
2018-09-30T19:12:02.000Z
|
2018-10-01T09:31:55.000Z
|
#include<iostream>
#include<string.h>
#include<ctype.h>
using namespace std;
int main()
{
int t;
cin>>t;
cin.ignore();
while(t--)
{
string s;
getline(cin,s);
char name[12],c,ch1,ch2;
int c1=0;
for(int i=0;s[i]!='\0';i++)
{
if(s[i]==' ')
c1++;
}
if(c1==0)
{
c=toupper(s[0]);
name[0]=c;
int i;
for(i=1;s[i]!='\0';i++)
{
c=tolower(s[i]);
name[i]=c;
}
name[i]='\0';
cout<<name<<endl;
}
else if(c1==1)
{
ch1=toupper(s[0]);
int j=0;
int flag=0;
for(int i=1;s[i]!='\0';i++)
{
if(s[i]==' ')
{
flag=1;
i++;
name[j]=toupper(s[i]);
j++;
}
else if(flag)
{
c=tolower(s[i]);
name[j]=c;
j++;
}
}
name[j]='\0';
cout<<ch1<<". "<<name<<endl;
}
else
{
ch1=toupper(s[0]);
int j=0;
int flag=0;
for(int i=1;s[i]!='\0';i++)
{
if(s[i]==' '&&flag==0)
{
i++;
ch2=toupper(s[i]);
flag=1;
}
else if(flag==1&&s[i]==' ')
{
flag=2;
i++;
name[j]=toupper(s[i]);
j++;
}
else if(flag==2)
{
c=tolower(s[i]);
name[j]=c;
j++;
}
}
name[j]='\0';
cout<<ch1<<". "<<ch2<<". "<<name<<endl;
}
}
}
| 21.535354
| 52
| 0.229362
|
6091aa15f848814da45841942503531f85842b07
| 597
|
h
|
C
|
OpenGL-Sandbox/src/Menu.h
|
j-delrosario/mario
|
5fbc27f424fe22ba78d516eae69deb19c03ce904
|
[
"Apache-2.0"
] | null | null | null |
OpenGL-Sandbox/src/Menu.h
|
j-delrosario/mario
|
5fbc27f424fe22ba78d516eae69deb19c03ce904
|
[
"Apache-2.0"
] | null | null | null |
OpenGL-Sandbox/src/Menu.h
|
j-delrosario/mario
|
5fbc27f424fe22ba78d516eae69deb19c03ce904
|
[
"Apache-2.0"
] | null | null | null |
#pragma once
#include <GLCore.h>
#include <GLCoreUtils.h>
#include <GLCoreRendererAPI.h>
#include "Defines.h"
using namespace GLCore;
using namespace GLCore::Utils;
using namespace GLCore::RendererAPI;
class Menu
{
public:
Menu();
~Menu();
void Init();
void OnUpdate(Timestep ts);
void OnEvent(Event& e);
void OnRender();
bool Exit() { return exit; }
private:
bool OnKeyPressed(KeyPressedEvent& e);
private:
float m_Time = 0.0f;
std::unique_ptr<Texture> m_Background;
std::shared_ptr<Texture> m_Mario;
glm::vec4 m_BgTexCoords;
glm::vec4 m_MarioTexCoords;
bool exit = false;
};
| 17.057143
| 39
| 0.726968
|
c670921decb78808fa54a35c45e3d2d15ab57a67
| 309
|
py
|
Python
|
crabageprediction/venv/Lib/site-packages/pandas/tests/indexes/base_class/test_pickle.py
|
13rianlucero/CrabAgePrediction
|
92bc7fbe1040f49e820473e33cc3902a5a7177c7
|
[
"MIT"
] | 28,899
|
2016-10-13T03:32:12.000Z
|
2022-03-31T21:39:05.000Z
|
crabageprediction/venv/Lib/site-packages/pandas/tests/indexes/base_class/test_pickle.py
|
13rianlucero/CrabAgePrediction
|
92bc7fbe1040f49e820473e33cc3902a5a7177c7
|
[
"MIT"
] | 31,004
|
2016-10-12T23:22:27.000Z
|
2022-03-31T23:17:38.000Z
|
crabageprediction/venv/Lib/site-packages/pandas/tests/indexes/base_class/test_pickle.py
|
13rianlucero/CrabAgePrediction
|
92bc7fbe1040f49e820473e33cc3902a5a7177c7
|
[
"MIT"
] | 15,149
|
2016-10-13T03:21:31.000Z
|
2022-03-31T18:46:47.000Z
|
from pandas import Index
import pandas._testing as tm
def test_pickle_preserves_object_dtype():
# GH#43188, GH#43155 don't infer numeric dtype
index = Index([1, 2, 3], dtype=object)
result = tm.round_trip_pickle(index)
assert result.dtype == object
tm.assert_index_equal(index, result)
| 25.75
| 50
| 0.728155
|
07a44e7f9860e3a36733b1216d212b1da30fc94c
| 982
|
rb
|
Ruby
|
db/migrate/20170201124248_add_defaults_to_timestamps_in_measurements.rb
|
Safecast/ingest
|
c927ce5c5efd658e2d5522bd6d6fc40bc00460a3
|
[
"MIT"
] | null | null | null |
db/migrate/20170201124248_add_defaults_to_timestamps_in_measurements.rb
|
Safecast/ingest
|
c927ce5c5efd658e2d5522bd6d6fc40bc00460a3
|
[
"MIT"
] | 36
|
2017-01-12T07:52:21.000Z
|
2022-03-30T22:02:25.000Z
|
db/migrate/20170201124248_add_defaults_to_timestamps_in_measurements.rb
|
Safecast/ingest
|
c927ce5c5efd658e2d5522bd6d6fc40bc00460a3
|
[
"MIT"
] | 3
|
2017-01-18T02:11:52.000Z
|
2018-07-02T23:17:31.000Z
|
class AddDefaultsToTimestampsInMeasurements < ActiveRecord::Migration[5.0]
def up
change_table :measurements do |t|
%i(created_at updated_at).each do |col|
t.change col, :datetime, null: true, default: -> { 'now()' }
end
end
execute <<-SQL
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = now();
RETURN NEW;
END;
$$ language 'plpgsql';
SQL
execute <<-SQL
CREATE TRIGGER update_measurements_updated_at
BEFORE UPDATE ON measurements
FOR EACH ROW EXECUTE PROCEDURE update_updated_at_column();
SQL
end
def down
execute 'DROP TRIGGER IF EXISTS update_measurements_updated_at ON measurements;'
execute 'DROP FUNCTION IF EXISTS update_updated_at_column();'
change_table :measurements do |t|
%i(created_at updated_at).each do |col|
t.change col, :datetime, null: false
end
end
end
end
| 27.277778
| 84
| 0.663951
|
ef2a64d95d4e4cee7851966c37dc9b7abc5d13a5
| 2,781
|
h
|
C
|
src/SimConnectFlightPlan.h
|
till213/FS2020SimConnectPlugin
|
e434d240b104fdb23cf4a515d307bbba2188dfa0
|
[
"MIT"
] | 21
|
2021-03-01T00:19:41.000Z
|
2022-02-22T02:57:19.000Z
|
src/SimConnectFlightPlan.h
|
till213/FS2020SimConnectPlugin
|
e434d240b104fdb23cf4a515d307bbba2188dfa0
|
[
"MIT"
] | 11
|
2021-06-20T20:16:56.000Z
|
2022-03-28T19:03:34.000Z
|
src/SimConnectFlightPlan.h
|
till213/FS2020SimConnectPlugin
|
e434d240b104fdb23cf4a515d307bbba2188dfa0
|
[
"MIT"
] | 3
|
2021-04-06T19:06:16.000Z
|
2022-01-20T21:22:39.000Z
|
/**
* Sky Dolly - The Black Sheep for your Flight Recordings
*
* Copyright (c) Oliver Knoll
* All rights reserved.
*
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this
* software and associated documentation files (the "Software"), to deal in the Software
* without restriction, including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
* to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
* FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#ifndef SIMCONNECTFLIGHTPLAN_H
#define SIMCONNECTFLIGHTPLAN_H
#include <windows.h>
#include <strsafe.h>
#include <SimConnect.h>
#include "../../../../../Model/src/Waypoint.h"
/*!
* Simulation variables which represent aircraft lights, e.g. navigation light
* and taxi light.
*
* Implementation note: this struct needs to be packed.
*/
#pragma pack(push, 1)
struct SimConnectFlightPlan
{
char gpsWpNextId[8];
char gpsWpPrevId[8];
float gpsWpNextLat;
float gpsWpNextLon;
float gpsWpNextAlt;
float gpsWpPrevLat;
float gpsWpPrevLon;
float gpsWpPrevAlt;
inline Waypoint toNextWaypoint() const noexcept
{
Waypoint waypoint;
// Length check
if (SUCCEEDED(StringCbLengthA(&gpsWpNextId[0], sizeof(gpsWpNextId), nullptr))) {
waypoint.identifier = QString(gpsWpNextId);
}
waypoint.latitude = gpsWpNextLat;
waypoint.longitude = gpsWpNextLon;
waypoint.altitude = gpsWpNextAlt;
return waypoint;
}
inline Waypoint toPreviousWaypoint() const noexcept
{
Waypoint waypoint;
// Length check
if (SUCCEEDED(StringCbLengthA(&gpsWpPrevId[0], sizeof(gpsWpPrevId), nullptr))) {
waypoint.identifier = QString(gpsWpPrevId);
}
waypoint.latitude = gpsWpPrevLat;
waypoint.longitude = gpsWpPrevLon;
waypoint.altitude = gpsWpPrevAlt;
return waypoint;
}
static void addToDataDefinition(HANDLE simConnectHandle) noexcept;
};
#pragma pack(pop)
#endif // SIMCONNECTFLIGHTPLAN_H
| 31.602273
| 93
| 0.709457
|
d99b5194f0d4f660772416f9943fd5669b48b502
| 68
|
sql
|
SQL
|
db/check_user.sql
|
HGIRyan/ad-free-app
|
362727be4cb985c23db115e18d7a77d80f26ca3b
|
[
"AML",
"Info-ZIP"
] | 1
|
2022-01-07T12:14:21.000Z
|
2022-01-07T12:14:21.000Z
|
db/check_user.sql
|
HGIRyan/ad-free-app
|
362727be4cb985c23db115e18d7a77d80f26ca3b
|
[
"AML",
"Info-ZIP"
] | null | null | null |
db/check_user.sql
|
HGIRyan/ad-free-app
|
362727be4cb985c23db115e18d7a77d80f26ca3b
|
[
"AML",
"Info-ZIP"
] | null | null | null |
select * from users
where user_email = $1 and user_username = $2;
| 17
| 45
| 0.705882
|
2cc8dfd15f717ff7eecb70aeb39197afddaf2730
| 480
|
py
|
Python
|
pygmsh/built_in/line_loop.py
|
chabotsi/pygmsh
|
f2c26d9193c63efd9fa7676ea0860a18de7e8b52
|
[
"MIT"
] | null | null | null |
pygmsh/built_in/line_loop.py
|
chabotsi/pygmsh
|
f2c26d9193c63efd9fa7676ea0860a18de7e8b52
|
[
"MIT"
] | null | null | null |
pygmsh/built_in/line_loop.py
|
chabotsi/pygmsh
|
f2c26d9193c63efd9fa7676ea0860a18de7e8b52
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
class LineLoop(object):
_ID = 0
dimension = 1
def __init__(self, lines):
self.lines = lines
self.id = 'll{}'.format(LineLoop._ID)
LineLoop._ID += 1
self.code = '\n'.join([
'{} = newll;'.format(self.id),
'Line Loop({}) = {{{}}};'.format(
self.id, ', '.join([l.id for l in lines])
)])
return
def __len__(self):
return len(self.lines)
| 20
| 57
| 0.458333
|
0736fa5d15a712af45200fc10723273005890a9a
| 566
|
css
|
CSS
|
public_html/media/css/user/favorites.css
|
Ken-Oh99/malgraph4
|
8a840fc3270dddac1443835445d1351d7618fb51
|
[
"MIT"
] | null | null | null |
public_html/media/css/user/favorites.css
|
Ken-Oh99/malgraph4
|
8a840fc3270dddac1443835445d1351d7618fb51
|
[
"MIT"
] | null | null | null |
public_html/media/css/user/favorites.css
|
Ken-Oh99/malgraph4
|
8a840fc3270dddac1443835445d1351d7618fb51
|
[
"MIT"
] | null | null | null |
.decades .target-wrapper {
width: 100%;
}
.decades .target {
height: 300px;
width: 100%;
}
.types .target-wrapper {
width: 100%;
}
.types .target {
height: 220px;
}
table {
font-size: 90%;
}
table .title {
width: 100%;
}
table .ord {
text-align: center;
}
table .mean, table .time, table .count {
text-align: right;
}
table.tablesorter thead th:first-child {
padding-left: 6px;
padding-right: 6px;
}
span.info {
font-size: 7pt;
color: gray;
}
tr.entries-wrapper-row td {
background: none !important;
}
.toggle-decades-msg {
text-align: center;
}
| 12.577778
| 40
| 0.655477
|
74cdfbdbd88079f96cfd591e4c95ad5baf24d428
| 21,427
|
css
|
CSS
|
frontend/web/estore/styles/style.css
|
hieuhx01001/estore-v1
|
0e5512892f8d3039d7d84978cc53796f4b192d09
|
[
"BSD-3-Clause"
] | null | null | null |
frontend/web/estore/styles/style.css
|
hieuhx01001/estore-v1
|
0e5512892f8d3039d7d84978cc53796f4b192d09
|
[
"BSD-3-Clause"
] | null | null | null |
frontend/web/estore/styles/style.css
|
hieuhx01001/estore-v1
|
0e5512892f8d3039d7d84978cc53796f4b192d09
|
[
"BSD-3-Clause"
] | null | null | null |
@font-face {
font-family: 'Lato';
src: url('fonts/Lato/lato-medium.ttf');
}
/*
Lastore - CSS File
*/
/* #Reset & Basics (Inspired by E. Meyers)
================================================== */
html, body, div, span, applet, object, iframe, h1, h2, h3, h4, h5, h6, p, blockquote, pre, a, abbr, acronym, address, big, cite, code, del, dfn, em, img, ins, kbd, q, s, samp, small, strike, sub, sup, tt, var, b, u, i, center, dl, dt, dd, ol, ul, li, fieldset, form, label, legend, table, caption, tbody, tfoot, thead, tr, th, td, article, aside, canvas, details, embed, figure, figcaption, footer, header, hgroup, menu, nav, output, ruby, section, summary, time, mark, audio, video {
margin: 0;
padding: 0;
border: 0;
font-size: 100%;
font: inherit;
vertical-align: baseline; }
article, aside, details, figcaption, figure, footer, header, hgroup, menu, nav, section {
display: block; }
body {
line-height: 1; }
ol, ul {
list-style: none; }
blockquote, q {
quotes: none; }
blockquote:before, blockquote:after,
q:before, q:after {
content: '';
content: none; }
table {
border-collapse: collapse;
border-spacing: 0; }
/*============================================ GLOBAL =================================================*/
* {
font-size: 13px !important;
}
body {
font-family: 'Lato', sans-serif, arial;
font-size: 12px;
margin:0 auto;
padding:0;
line-height:20px;
}
* {
margin:0;
padding:0;
}
*:focus {
outline:none; /* removes ugly dotted border but may make template more unsuable, up to you
if you want to keep it! */
}
form{margin:0; padding:0;}
hr {
border-width:0;
height:1px;
line-height:0;
margin:30px 0px 30px 0px;
page-break-after:always;
text-align:center;
width:100%;
clear:both;
}
em{ font-style:italic;}
/*** END OF GLOBAL ***/
/*============================================ DEFAULT ELEMENT STYLES =================================================*/
/* heading */
h1, h2 {margin-bottom:20px; font-weight:normal; line-height:normal;}
h3, h4, h5{margin-bottom:20px; font-weight:normal; line-height:normal;}
h1 {font-size:20px;}
h2 {font-size:18px;}
h3 {font-size:16px;}
h4 {font-size:14px;}
h5 {font-size:12px;}
h6 {font-size:10px;}
.button, .button:visited, input[type="submit"], button{font-family: 'Ubuntu', sans-serif, arial;}
.pagetitle{ margin-bottom:20px;}
.title{ padding:8px 0 7px 8px; text-transform:uppercase;}
.titleRegular{font-weight:400}
.titleBold{font-weight:700}
.titleLight{font-weight:300}
.titleUppercase{text-transform:uppercase}
.titleNormal{text-transform:none}
.italic{font-style:italic}
.textBoldUppercase{font-weight:700; text-transform:uppercase;}
/* links */
a, a:visited {text-decoration:none;}
a:hover{text-decoration:none;}
a img{border:none}
/* float align */
.alignleft,
img.alignleft {
display: inline;
float: left;
margin-right: 15px;
margin-top: 3px;
}
.alignright,
img.alignright {
display: inline;
float: right;
margin-left: 15px;
margin-top: 5px;
}
.aligncenter,
img.aligncenter {
clear: both;
display: block;
margin-left: auto;
margin-right: auto;
}
.alignnone,
img.alignnone {
clear: both;
display: block;
margin-left: auto;
margin-right: auto;
margin:0px;
}
img.alignleft,
img.alignright,
img.aligncenter
{
margin-bottom: 10px;
}
/* lists, blockquotes and paragraphs */
p, ul, ol, blockquote {
margin-bottom:5px;
}
ul {
list-style: disc;
margin: 0 0 18px 1.5em;
}
ul li{ padding-bottom:10px;}
ol {
list-style: decimal;
margin: 0 0 18px 2.2em;
}
ol ol {
list-style: upper-alpha;
}
ol ol ol {
list-style: lower-roman;
}
ol ol ol ol {
list-style: lower-alpha;
}
ul ul,
ol ol,
ul ol,
ol ul {
margin-bottom: 0;
}
blockquote {
background-image: url("../images/quote.png");
background-position: 0 0;
background-repeat: no-repeat;
clear: both;
margin: 0 20px 20px 40px;
padding:0 0 0 35px;
font-size:20px;
font-style:italic;
line-height:30px
}
blockquote.left, blockquote.right {
float:right;
margin:0 0 0 20px;
width:65%;
}
blockquote.left{
float:left;
margin-left:0px;
margin-right:20px;}
/* code */
code {
font-family:Verdana, Arial;
letter-spacing:1px;
margin:25px 0 25px 0px;
display:block;
font-size:0.9em;
padding:15px 10px;
}
/*** END OF DEFAULT ELEMENT STYLES ***/
/*============================================ CONTAINER =================================================*/
#bodychild{
max-width:1000px;
margin:0 auto;
padding:0;
}
/*** END CONTAINER ***/
/*** HEADER ***/
/* Top */
#top{ padding:5px 0px 7px;}
#topmenu{ margin-bottom:0px;}
#topmenu ul{ margin:0px; padding:0px; list-style-type:none;}
#topmenu li{ display:inline; margin-right:8px;}
#topright{ margin:0; text-align:right;}
.language,.currency{ display:inline-block;}
.language { margin-right:10px;}
/* Logo */
#logo-wrapper{ padding:33px 0px;}
#logo{height:auto; padding-top:0px;}
#logo img{ float:left;}
#logo h1{margin:0; font-size:28px; line-height:28px;}
#logo .desc{ float:left; padding-left:10px; margin:15px 0 0 20px; line-height:normal;}
#logo-wrapper .right{float:right; margin:0; text-align:right;}
/* Search */
#logo-wrapper #searchform {position: relative; display:inline-block; margin:0px 0 3px;}
#logo-wrapper #searchform #s { padding: 8px 0px 8px 9px !important; vertical-align:top; width:190px; margin-bottom:0px;}
#logo-wrapper .searchbutton {
height:30px;
width: 27px;
margin-left:-3px;
}
/**/
#shopping-cart-wrapper {
float: right;
list-style-type: none;
margin-left:20px;
}
#shopping_cart {
position: relative;
margin-bottom:0px;
}
#shopping_cart a#shop-bag {
background: url("../images/arrow-down.gif") no-repeat 157px center #FFFFFF;
border: 1px solid #E3E3E3;
color: #BBBBBB;
display: inline-block;
height: 21px;
margin-right: 27px;
padding: 4px 27px 3px 9px;
width: 137px;
text-align:left;
}
#shopping_cart a.btncart {
background: url("../images/cart.png") no-repeat scroll center center #666666;
border: 0 none !important;
border-radius: 0 0 0 0;
display: block;
height: 30px;
margin: 0;
padding: 0;
position: absolute;
right: 0;
top: 0;
width: 27px;
}
#shop-bag{ }
.shop-box {
width: 200px;
clear: both;
position:absolute;
left:0;
top:30px;
z-index:250;
margin:0px;
padding:5px 0 0 0;
list-style-type:none;
}
.shop-box li{padding:5px 10px 5px; font-size:12px; clear:both;}
.shop-box li:last-child{ padding:13px 10px;}
.shop-box h2{ font-size:12px; font-weight:bold; margin-bottom:0px; float:left; margin-top:4px;}
.shop-box .price{ float:right}
.shop-box .btn-wrapper { text-align:center; font-size:11px; text-transform:uppercase; }
.shop-box li.btn-wrapper a.cart{ float:left; padding:3px 8px;}
.shop-box li.btn-wrapper a.checkout{float:right; padding:3px 8px;}
/* TOP MENU */
#navigation{position:relative; z-index:200; margin:0 0 20px 0; width:100%;}
#nav-wrap{ margin-bottom:0px;}
/* Nav Menu */
#sf-nav{
margin:0;
padding:0;
list-style-type:none;
overflow:visible;
position:relative;
font-size:12px;
}
.sf-menu > li{ margin-right:20px!important;}
.sf-menu a {
text-decoration:none;
display: block;
position: relative;
padding: 0px 0px;
margin: 15px 0px;
text-transform:uppercase;
font-weight:300;
}
.sf-menu li:hover, .sf-menu li.current{ margin-top:-1px;}
.sf-menu a:hover, .sf-menu li.current > a{ }
/* Drop down menu */
.sf-menu ul a:hover {}
.sf-menu li li {
text-align:left;
line-height:20px;
margin:0;
padding:0px;
width:auto;
text-align:left;
}
.sf-menu, .sf-menu * {
margin: 0;
padding: 0;
list-style: none;
}
.sf-menu {
line-height:100%;
position:absolute;
right:0;
bottom:0;
float:left;
}
.sf-menu ul {
position: absolute;
top: -999em;
width: 12em; /* left offset of submenus need to match (see below) */
}
.sf-menu ul li {
width: 100%;
}
.sf-menu li:hover {
visibility: inherit; /* fixes IE7 'sticky bug' */
}
.sf-menu li {
float: left;
position: relative;
margin:0;
}
.sf-menu li li{margin:0px 0px!important;}
.sf-menu li:hover ul,
.sf-menu li.sfHover ul {
left: -20px;
top: 3.5em; /* match top ul list item height */
z-index: 99;
}
ul.sf-menu li:hover li ul,
ul.sf-menu li.sfHover li ul {
top: -999em;
}
ul.sf-menu li li:hover ul,
ul.sf-menu li li.sfHover ul {
left: 12em; /* match ul width */
top: -1px;
margin-left: 0px;
}
ul.sf-menu li li:hover li ul,
ul.sf-menu li li.sfHover li ul {
top: -999em;
}
ul.sf-menu li li li:hover ul,
ul.sf-menu li li li.sfHover ul {
left: 12em; /* match ul width */
top: -1px;
}
.sf-menu ul li a{
padding:8px 20px 7px 20px!important;
margin:0px;
font-size:11px;
}
.sf-menu ul li a:hover{}
.sf-menu li ul {
}
.sf-menu a.sf-with-ul {
min-width: 1px; /* trigger IE7 hasLayout so spans position accurately */
}
.sf-sub-indicator {
position: absolute;
display: block;
right: 10px;
top: 1.05em; /* IE6 only */
width: 10px;
height: 10px;
text-indent: -999em;
overflow: hidden;
}
/* dropdown mobile */
.tinynav { display: none }
.headline{ text-align:center; margin:0px 0 0 0; padding:0px 0 10px;}
.headline .columns{ margin-bottom:0px;}
.headline h1{ font-weight:300; margin-bottom:0px; font-size:20px;}
/*============================================ SLIDER =================================================*/
#slidercontainer{}
#slider{position:relative; margin:20px 0 30px 0;}
#slider img{ display:block;}
/*============================================ CONTENT =================================================*/
#maincontent{ }
.row .positionright{float:right;}
.row .positionleft{ float:left;}
.padcontent{padding-right:30px;}
.six.columns .padcontent{padding:0px 20px;}
.positionright .padcontent{padding-right:0px; padding-left:20px;}
.header-wrapper{ margin-bottom:14px; clear:both;}
.header-wrapper span{
border-bottom: 1px solid #EEEEEE;
display: block;
height: 8px;
margin-left: auto;
width: 100%;
}
.header-wrapper h2{
background-color: #FFFFFF;
border: 0 none;
float: left;
padding-right: 10px;
text-transform: none;
font-size:14px;
}
.indentleft{overflow:hidden;}
.margin_top{ padding-top:5px;}
.margin_bottomoff{margin-bottom:0 !important;}
.margin_bottom_small{margin-bottom:5px;}
.margin_bottom_middle{margin-bottom:12px;}
.margin_bottom_large{margin-bottom:16px;}
.margin_bottom_extralarge{margin-bottom:25px;}
.textCenter{text-align:center;}
.textMiddle{ padding:10px 0px 20px;}
.center{ margin:0 auto!important; float:none; display:block;}
.nomargin{margin-right:0}
.nopadding{ padding-right:0px;}
.displayblock{display:block;}
.breadcrumb{ color:#666; margin-bottom:20px;}
/*=== Products ===*/
.product-wrapper{ padding:15px 15px 10px; text-align:center;}
.product-wrapper .new{ background:url(../images/new.png) no-repeat; width:22px; height:69px; position:absolute; top:0px; right:17px;}
.product-wrapper h3{ font-size:13px; }
.price-cart-wrapper{ padding:10px 0px 0px;}
.price-cart-wrapper .price{ float:left; font-size:13px;}
.price-cart-wrapper .cart{ float:right; padding-top:5px; line-height:normal;}
.price-cart-wrapper .more, .price-cart-wrapper .buy{text-indent:-9999px; display:inline-block; width:16px; }
.price-cart-wrapper .more{ margin-right:5px;}
.price-cart-wrapper .buy{ margin-left:5px; }
/*=== Featured Products ===*/
.featured-products{ margin-bottom:10px;}
/*=== New Products ===*/
/*=== Highlight Content ===*/
.highlight-content {
clear: both;
margin: 0 0 0;
padding: 0 0 20px;
text-align: center;
}
.highlight-content h4 {
line-height:24px;
margin-bottom:5px;
}
.highlight-content em{ font-size:11px;}
.highlight-content .top.separator {
height: 1px;
margin-bottom: 23px;
}
.highlight-content .bottom.separator {
height: 1px;
margin-top:26px;
margin-bottom: 20px;
padding:0px;
}
.alert{ margin-bottom:20px; padding:10px 21px; width:35%;}
/*=== List ===*/
ul.list{list-style-type: square; list-style-position:inside; padding:0; margin:0;}
ul.list li{padding:3px 0 3px 0px; border-bottom:1px dotted #c3c3c3;}
/*=== Post Shortcode ===*/
.post-shortcode h2{margin-bottom:10px;}
.post-shortcode h2 a{ font-weight:normal; font-size:16px; }
.post-shortcode .date-wrapper{ float:left; text-align:center; margin-right:30px;}
.post-shortcode .date-wrapper .date-value {
font-size: 24px;
font-weight: bold;
margin-bottom: 8px;
}
.post-shortcode .date-wrapper .month-value {
font-size: 18px;
margin-bottom: 5px;
}
.post-shortcode .date-wrapper .year-value {
font-size: 14px;
}
/*=== Images ===*/
img.scale-with-grid {
max-width: 100%;
height: auto;
}
/*=== Form & Button ===*/
form{margin:0; padding:0;}
input[type="text"],
textarea, input[type="password"]{
padding:7px 5px;
font-size:11px;
margin-bottom:15px;
}
select{
padding:4px 2px;
font-size:11px;
margin-bottom:15px;
}
textarea{width:90%;}
textarea:focus{outline:0;}
.button, .button:visited, input[type="submit"], button{
padding:6px 13px 9px 13px;
cursor:pointer;
font-size:13px;
line-height:normal;
}
.button:hover, input[type="submit"]:hover, button:hover{text-decoration:none;}
.button.small, input[type="submit"].small, button.small{font-size:12px; padding:8px 12px;}
.button.medium, input[type="submit"].medium, button.medium{font-size:14px; padding:12px 14px;}
.button.large, input[type="submit"].large, button.large{font-size:14px; padding:14px 20px;}
/*=== Separator ===*/
.separator{
display:block;
height:1px;
/*padding-top:20px;*/
margin:0px 0 40px;
text-align:center;
width:100%;
clear:both;
}
/*=== Dropcaps ===*/
.dropcap1{
display:block;
float:left;
font-size:32px;
line-height:34px;
margin:4px 8px 0 0;
}
.dropcap2{
display: block;
float:left;
border-radius: 55px 55px 55px 55px;
display: block;
float: left;
font-size: 26px;
line-height: 47px;
margin: 6px 8px 0 0;
text-align: center;
width: 47px;
}
.dropcap3{
display:block;
float:left;
font-size:26px;
line-height:35px;
width:47px;
height:40px;
text-align: center;
margin:6px 8px 0 0;
padding:5px 0;
}
/*== Highlight ==*/
.highlight1{padding:2px 5px;}
.highlight2{padding:2px 5px;}
/*=== Pullquotes ===*/
.pullquote-right, .pullquote-left{
padding:0px 10px 0px 30px;
background-image:url(../images/quote.png);
background-repeat:no-repeat;
background-position:0px 0px;
float:right;
font-style:italic;
font-size:12px;
letter-spacing:0px;
line-height:22px;
margin:0px 2px 20px 20px;
width:50%;
}
.pullquote-left{float:left;margin-left:2px;margin-right:20px;}
/*==== Tabs ====*/
.tabcontainer{margin:0;}
ul.tabs {
margin: 0;
margin-bottom:-1px;
padding: 0 0 0px 0;
list-style: none;
width: 100%;
float:left;
}
ul.tabs li {
float: left;
margin: 0 0;
padding: 0 30px;
overflow: hidden;
position: relative;
font-size:13px;
}
ul.tabs li a {
text-decoration: none;
display: block;
padding: 5px 0px;
outline: none;
}
.tab-content { }
ul.tabs li:hover{}
.tab-body{padding:20px 30px 10px 30px; float:left; width: 95%}
/*==== jQuery Toggles ====*/
#toggle{border: 0px }
h2.trigger {
padding:0;
margin: 0 0 7px 0;
font-size:13px;
font-weight:normal;
cursor:pointer;
}
h2.trigger span {
text-decoration: none;
display: inline-block;
padding:10px 12px;
margin-right:20px;
font-size:14px;
font-weight:bold;
}
h2.active span{}
h2.active {}
.toggle_container {
margin: 0 0 7px 40px;
margin-top:-7px;
padding: 10px 20px;
overflow: hidden;
clear: both;
}
/*==== Tables ====*/
table {border-collapse:separate;border-spacing:0;width:100%; margin-bottom:18px;}
table,td,th { text-align:left; }
th{padding:10px;}
td{padding:10px;}
tfoot td{border:0px;}
th,tr:hover{}
table {
text-align: left;
margin: 0 -1px 24px 0;
width: 100%;
}
tr th,
thead th {
font-size: 13px;
line-height: 18px;
padding: 9px 24px;
}
tr td {
padding: 6px 24px;
}
tr.odd td {}
/*============================================ SIDEBAR =================================================*/
.sidebar{ margin-left:-20px}
.widget-title {
padding-bottom:9px;
font-size:13px;
line-height:20px;
text-transform:uppercase;
}
.sidebar ul{
list-style-type:none;
list-style-position:outside;
margin:0;
padding:0;
}
.sidebar ul li{
list-style-type:none;
margin:0;
padding:0;
}
.sidebar .widget-container{margin-bottom:20px; padding-bottom:0px; }
.sidebar li.widget-container:last-child{margin-bottom:0; padding-bottom:10px}
.sidebar li li{
list-style-type:none;
margin:0 0 4px 0;
padding:0 0 4px 13px;
}
.sidebar li li:last-child{ margin-bottom:0;}
.sidebar li li a{ font-size:12px;}
.sidebar li li a:hover, .sidebar li li a.active{text-decoration:none;}
.sidebar .textwidget{}
.sidebar .textwidget p{ margin-bottom:10px;}
/*==== Gallery Widget ====*/
ul.flickr{list-style-type:none; padding:0; margin:0;}
ul.flickr li{ float:left; margin-right:10px !important;}
ul.flickr li.nomargin{margin-right:0 !important;}
/*==== Testimonial Widget ====*/
.testimonial-widget{}
.testimonial-widget h6{line-height:20px; margin-bottom:10px}
.testimonial-widget .alignleft{margin-right:10px;}
.testimonial-widget .quote{ display:block; padding:5px 0 0 0; font-size:13px; font-style:italic}
/*==== Tag ====*/
.tags a{padding:5px 10px!important; margin:0 0px 3px 0 !important; display:inline-block; font-size:11px; font-weight:bold;}
.tags a:hover{}
/*==== Recent post widget ====*/
.sp-widget li{padding: 0 0 16px !important; background:none!important; list-style:none!important; text-align:center;}
.sp-widget li.last{margin:0 !important; padding:0 !important;}
.sp-widget li h3{font-size:12px; margin-bottom:8px; font-weight:normal; text-transform:none;}
/*============================================ FOOTER =================================================*/
#footer{ margin-top:20px;}
/*=== Elastislide Style ===*/
.es-carousel-wrapper{
padding:0px 0px;
margin:5px 0 0 0;
position:relative;
}
.es-carousel{
overflow:hidden;
padding:20px 0px 10px;
width:90%;
margin:0 auto;
}
.es-carousel ul{
display:none;
}
.es-carousel ul li{
height:100%;
float:left;
display:block;
}
.es-carousel ul li a{
display:block;
}
.es-carousel ul li a img{
display:block;
border:none;
max-height:100%;
max-width:100%;
}
.es-nav{right:0px; top:37px; position:absolute; width:100%;}
.es-nav span{
display:block!important;
width:7px;
height:11px;
position:absolute;
cursor:pointer;
color:#f00;
text-indent:-9000px!important;
}
.es-nav span.es-nav-next{
background:url(../images/nav-next.png) no-repeat top;
right:18px;
}
.es-nav span.es-nav-prev{
background:url(../images/nav-prev.png) no-repeat top;
left:18px;
}
.es-nav span:hover{ background-position:0 -18px;}
/*==== FOOTER SIDEBAR ====*/
#footersidebar{padding:20px 0 5px 0;}
#footersidebar ul{list-style-type:none; margin:0; padding:0;}
#footersidebar li.widget-container{list-style-type:none; margin:0 0 20px 0; padding:0; border:0px;}
#footersidebar .widget-title{
font-size:14px;
padding:0 0 13px 0;
margin-bottom:18px;
}
#footersidebar li li{padding:0 0 0px 0px; margin:0 0 5px 0;}
#footersidebar li li:last-child{ border-bottom:0px;}
#footersidebar li a{ padding:0px; margin:0px;}
#footersidebar li a:hover{ text-decoration:none;}
/*==== COPYRIGHT ====*/
#copyright{ padding:20px 10px; font-size:11px; float:left;}
#copyright a, #copyright a:visited{}
/* Social */
.sn{list-style-type:none; margin:0 0 0 0; padding:16px 10px 0 0px; float:right;}
.sn li{list-style-type:none; margin:0; padding:0 0px 0 5px; display:inline; background:transparent}
.sn span{height:23px; width:23px; display:inline-block;
-webkit-transition: all 0.4s ease; /* Saf3.2+, Chrome */
-moz-transition: all 0.4s ease; /* FF4+ */
-o-transition: all 0.4s ease; /* Opera 10.5+ */
transition: all 0.4s ease;
}
.sn .icon-img{background-position:0 -31px !important;}
.sn .icon-img:hover{background-position:0 0px !important;}
.priceInfo {
width: 60%;
display: inline-block;
float: left;
transition: opacity .3s;
-webkit-transition: opacity .3s;
-moz-transition: opacity .3s;
-ms-transition: opacity .3s;
-o-transition: opacity .3s;
}
span.old-price {
float: left;
width: 100%;
text-align: left;
font-size: 12px;
font-weight: 400;
text-decoration: line-through;
margin-top: 0;
}
span.price {
display: block;
color: #ee3124;
font-size: 14px;
font-weight: 700;
margin-right: 6px;
}
.cart-sale {
position: relative;
top: 18px
}
a, a:hover {
cursor: pointer;
}
| 22.867663
| 485
| 0.629813
|
5d80dd58c89d66bc2183c7349df579e3c406d907
| 1,124
|
lua
|
Lua
|
modules/misc/mirror_timers.lua
|
PedroZC90/LuaUI
|
5b3fab73f4d774b04f03b1097f44212c6ecb947b
|
[
"MIT"
] | null | null | null |
modules/misc/mirror_timers.lua
|
PedroZC90/LuaUI
|
5b3fab73f4d774b04f03b1097f44212c6ecb947b
|
[
"MIT"
] | null | null | null |
modules/misc/mirror_timers.lua
|
PedroZC90/LuaUI
|
5b3fab73f4d774b04f03b1097f44212c6ecb947b
|
[
"MIT"
] | null | null | null |
local T, C, L = Tukui:unpack()
local MirrorTimers = T.Miscellaneous.MirrorTimers
----------------------------------------------------------------
-- Mirror Timers (e.g: Breath Timer)
----------------------------------------------------------------
local baseUpdate = MirrorTimers.Update
function MirrorTimers:Update()
-- first, we call the base function
baseUpdate(self)
-- second, we edit it
local Texture = C.Medias.Blank
local Font, FontSize, FontStyle = C.Medias.Font, 12, nil
for i = 1, MIRRORTIMER_NUMTIMERS, 1 do
local Bar = _G["MirrorTimer"..i]
local Status = _G[Bar:GetName().."StatusBar"]
local Border = _G[Bar:GetName().."Border"]
local Text = _G[Bar:GetName().."Text"]
Bar:SetWidth(210)
Bar:SetHeight(16)
Bar.Backdrop:SetOutside()
Status:ClearAllPoints()
Status:SetAllPoints()
Status:SetStatusBarTexture(Texture)
Text:ClearAllPoints()
Text:SetPoint("CENTER", Bar, "CENTER", 0, 0)
Text:SetFont(Font, FontSize, FontStyle)
end
end
| 30.378378
| 65
| 0.540036
|
850fb68aacd9647c610426798dd6295de4e8b0bb
| 6,382
|
cs
|
C#
|
src/Ao.ObjectDesign.Controls/TextBoxBaseSetting.cs
|
Cricle/Ao.ObjectDesign
|
8f780dcba60213490b34147723a6dd035de9099a
|
[
"Apache-2.0"
] | 4
|
2021-07-05T16:22:51.000Z
|
2022-03-20T01:43:09.000Z
|
src/Ao.ObjectDesign.Controls/TextBoxBaseSetting.cs
|
Cricle/Ao.ObjectDesign
|
8f780dcba60213490b34147723a6dd035de9099a
|
[
"Apache-2.0"
] | 2
|
2021-08-22T02:12:57.000Z
|
2021-12-12T08:54:08.000Z
|
src/Ao.ObjectDesign.Controls/TextBoxBaseSetting.cs
|
Cricle/Ao.ObjectDesign
|
8f780dcba60213490b34147723a6dd035de9099a
|
[
"Apache-2.0"
] | 3
|
2021-07-05T14:53:29.000Z
|
2021-08-22T02:10:15.000Z
|
using Ao.ObjectDesign.Designing.Annotations;
using Ao.ObjectDesign.Wpf.Designing;
using System.ComponentModel;
using System.Windows.Controls;
using System.Windows.Controls.Primitives;
namespace Ao.ObjectDesign.Controls
{
[MappingFor(typeof(TextBoxBase))]
public abstract class TextBoxBaseSetting : ControlSetting, IMiddlewareDesigner<TextBoxBase>
{
private ScrollBarVisibility verticalScrollBarVisibility = ScrollBarVisibility.Auto;
private bool acceptsReturn = true;
private bool acceptsTab = true;
private bool isReadOnlyCaretVisible;
private ScrollBarVisibility horizontalScrollBarVisibility = ScrollBarVisibility.Auto;
private double selectionOpacity = 1;
private bool isUndoEnabled = true;
private int undoLimit = 1;
private bool autoWordSelection = true;
private BrushDesigner selectionBrush;
private bool isReadOnly;
private BrushDesigner caretBrush;
private bool isInactiveSelectionHighlightEnabled;
[DefaultValue(false)]
public virtual bool IsInactiveSelectionHighlightEnabled
{
get => isInactiveSelectionHighlightEnabled;
set => Set(ref isInactiveSelectionHighlightEnabled, value);
}
public virtual BrushDesigner CaretBrush
{
get => caretBrush;
set => Set(ref caretBrush, value);
}
[DefaultValue(false)]
public virtual bool IsReadOnly
{
get => isReadOnly;
set => Set(ref isReadOnly, value);
}
public virtual BrushDesigner SelectionBrush
{
get => selectionBrush;
set => Set(ref selectionBrush, value);
}
[DefaultValue(false)]
public virtual bool AutoWordSelection
{
get => autoWordSelection;
set => Set(ref autoWordSelection, value);
}
[DefaultValue(1)]
public virtual int UndoLimit
{
get => undoLimit;
set => Set(ref undoLimit, value);
}
[DefaultValue(true)]
public virtual bool IsUndoEnabled
{
get => isUndoEnabled;
set => Set(ref isUndoEnabled, value);
}
[DefaultValue(0.4d)]
public virtual double SelectionOpacity
{
get => selectionOpacity;
set => Set(ref selectionOpacity, value);
}
[DefaultValue(ScrollBarVisibility.Hidden)]
public virtual ScrollBarVisibility HorizontalScrollBarVisibility
{
get => horizontalScrollBarVisibility;
set => Set(ref horizontalScrollBarVisibility, value);
}
[DefaultValue(false)]
public virtual bool IsReadOnlyCaretVisible
{
get => isReadOnlyCaretVisible;
set => Set(ref isReadOnlyCaretVisible, value);
}
[DefaultValue(false)]
public virtual bool AcceptsTab
{
get => acceptsTab;
set => Set(ref acceptsTab, value);
}
[DefaultValue(true)]
public virtual bool AcceptsReturn
{
get => acceptsReturn;
set => Set(ref acceptsReturn, value);
}
[DefaultValue(ScrollBarVisibility.Hidden)]
public virtual ScrollBarVisibility VerticalScrollBarVisibility
{
get => verticalScrollBarVisibility;
set => Set(ref verticalScrollBarVisibility, value);
}
public override void SetDefault()
{
base.SetDefault();
IsReadOnlyCaretVisible = false;
VerticalScrollBarVisibility = ScrollBarVisibility.Hidden;
AcceptsReturn = true;
AcceptsTab = false;
HorizontalScrollBarVisibility = ScrollBarVisibility.Hidden;
SelectionOpacity = 0.4;
IsUndoEnabled = true;
UndoLimit = 1;
AutoWordSelection = false;
SelectionBrush = new BrushDesigner();
IsReadOnly = false;
CaretBrush = new BrushDesigner();
IsInactiveSelectionHighlightEnabled = false;
}
public void Apply(TextBoxBase value)
{
if (value is null)
{
SetDefault();
}
else
{
Apply((Control)value);
VerticalScrollBarVisibility = value.VerticalScrollBarVisibility;
AcceptsReturn = value.AcceptsReturn;
AcceptsTab = value.AcceptsTab;
HorizontalScrollBarVisibility = value.HorizontalScrollBarVisibility;
SelectionOpacity = value.SelectionOpacity;
IsUndoEnabled = value.IsUndoEnabled;
UndoLimit = value.UndoLimit;
AutoWordSelection = value.AutoWordSelection;
SelectionBrush = new BrushDesigner();
SelectionBrush.SetBrush(value.SelectionBrush);
IsReadOnly = value.IsReadOnly;
CaretBrush = new BrushDesigner();
CaretBrush.SetBrush(value.CaretBrush);
IsInactiveSelectionHighlightEnabled = value.IsInactiveSelectionHighlightEnabled;
}
}
public void WriteTo(TextBoxBase value)
{
if (value != null)
{
WriteTo((Control)value);
value.VerticalScrollBarVisibility = verticalScrollBarVisibility;
value.AcceptsReturn = acceptsReturn;
value.AcceptsTab = acceptsTab;
value.HorizontalScrollBarVisibility = horizontalScrollBarVisibility;
value.SelectionOpacity = selectionOpacity;
value.IsUndoEnabled = isUndoEnabled;
value.UndoLimit = undoLimit;
value.AutoWordSelection = autoWordSelection;
value.SelectionBrush = selectionBrush?.GetBrush();
value.IsReadOnly = isReadOnly;
value.CaretBrush = caretBrush?.GetBrush();
value.IsInactiveSelectionHighlightEnabled = isInactiveSelectionHighlightEnabled;
}
}
}
}
| 35.653631
| 97
| 0.580539
|
b048ab0524ac49c5dd48e70b3fcf3da7bb5e9fa0
| 826
|
py
|
Python
|
main/views.py
|
marc-marquez/issue-tracker
|
d9f2a589f416d74174499e59c5ba94900f2be063
|
[
"MIT"
] | 1
|
2018-12-07T19:52:44.000Z
|
2018-12-07T19:52:44.000Z
|
main/views.py
|
marc-marquez/issue-tracker
|
d9f2a589f416d74174499e59c5ba94900f2be063
|
[
"MIT"
] | 6
|
2020-02-11T23:12:02.000Z
|
2022-02-10T08:17:07.000Z
|
main/views.py
|
marc-marquez/issue-tracker
|
d9f2a589f416d74174499e59c5ba94900f2be063
|
[
"MIT"
] | 1
|
2018-12-06T20:55:42.000Z
|
2018-12-06T20:55:42.000Z
|
"""
Creates the views for the:
- index page
- FAQ page
- About page
- Contact Us page
"""
from django.shortcuts import render
def get_index(request):
"""
Returns index page
:param request: The request type
:return: index page
"""
return render(request, 'index.html')
def get_faq(request):
"""
Returns FAQ page
:param request: The request type
:return: FAQ page
"""
return render(request, 'faq.html')
def get_about(request):
"""
Returns the About Us page
:param request: The request type
:return: About Us page
"""
return render(request, 'about.html')
def get_contact(request):
"""
Returns the Contact Us page
:param request: The request type
:return: Contact Us page
"""
return render(request, 'contact.html')
| 20.146341
| 42
| 0.62954
|
54aa3240df55767c092529f1005da366eda7768c
| 2,622
|
css
|
CSS
|
css/layout.css
|
ahmadassaf/Hacker-menu
|
8084c27a8e002c7817f7767ece3b2a81db1b2128
|
[
"MIT"
] | 1,117
|
2015-09-11T16:57:57.000Z
|
2020-07-23T21:31:18.000Z
|
css/layout.css
|
ahmadassaf/Hacker-menu
|
8084c27a8e002c7817f7767ece3b2a81db1b2128
|
[
"MIT"
] | 19
|
2015-09-17T04:21:53.000Z
|
2020-03-11T16:06:33.000Z
|
css/layout.css
|
ahmadassaf/Hacker-menu
|
8084c27a8e002c7817f7767ece3b2a81db1b2128
|
[
"MIT"
] | 107
|
2015-09-13T03:28:39.000Z
|
2020-05-06T13:54:56.000Z
|
/* overrides ratchet */
.content {
background: white;
margin: 0 0 44px !important;
}
ul, code {
font-size: 14px;
}
.bar .status {
color: #979797;
font-size: 12px;
padding-top: 13px;
}
.comment {
font-size: 11px;
}
/* global */
body {
font-family: 'Roboto', sans-serif;
}
.clickable {
cursor: pointer;
}
.clickable:hover {
color: #ff6601;
}
/* spinner */
.spinner:before,
.spinner:after,
.spinner {
border-radius: 50%;
width: 1.5em;
height: 1.5em;
animation-fill-mode: both;
animation: animate-spinner 1.8s infinite ease-in-out;
}
.spinner {
font-size: 10px;
margin: 50px auto;
position: relative;
text-indent: -9999em;
transform: translateZ(0);
animation-delay: -0.16s;
}
.spinner:before {
left: -2.5em;
animation-delay: -0.32s;
}
.spinner:after {
left: 2.5em;
}
.spinner:before,
.spinner:after {
content: '';
position: absolute;
top: 0;
}
@keyframes animate-spinner {
0%,
80%,
100% {
box-shadow: 0 2.5em 0 -1.3em #ff6601;
}
40% {
box-shadow: 0 2.5em 0 0 #ff6601;
}
}
/* header */
.bar-nav {
background: #f0f0f0;
}
.bar.bar-nav {
padding: 0;
}
.bar .segmented-control {
background: transparent;
border: 0;
border-radius: 0;
top: auto;
}
.segmented-control .control-item {
cursor: pointer;
padding-bottom: 14px;
padding-top: 18px;
text-transform: uppercase;
}
.segmented-control .control-item.active {
background: white;
}
/* content */
.table-view {
border-bottom: 0;
}
.story {
border-left: 4px solid #ff6601;
color: inherit;
margin: -11px -65px -11px -15px;
overflow: hidden;
padding: inherit;
position: relative;
}
.story:hover {
background: #fdfdfd;
}
.story.read {
border-left-color: #bdbdbd;
}
.story .badge {
background: #f0f0f0;
font-size: .85rem;
position: absolute;
right: 15px;
top: 27%;
-webkit-transform: translateY(-50%);
-ms-transform: translateY(-50%);
transform: translateY(-50%);
}
.story-title {
display: block;
font-size: 16px;
}
.story-host {
display: inline-block;
font-size: 12px;
font-style: italic;
margin-top: .45rem;
}
.icon-comment:before {
background: url(../images/icon-comment.svg) no-repeat center center;
content: '';
display: block;
float: left;
height: 20px;
margin-right: 5px;
width: 20px;
}
.story-poster {
font-size: 12px;
font-style: italic;
}
/* footer */
.bar-footer {
background: #f0f0f0;
}
.bar .btn {
background: transparent;
border: 0;
border-left: 1px solid #979797;
border-radius: 0;
padding: 17px 14px 14px 23px;
top: 0;
}
.bar .btn:active {
outline: none;
}
| 14.327869
| 70
| 0.633105
|
58d57a6394c59a0204d6c80b18490b796e8e3a41
| 282
|
css
|
CSS
|
md-ripple.css
|
Hkh12/md-ripple
|
95ee1f015425a04157909b6890cd6addcdf39780
|
[
"MIT"
] | 1
|
2017-10-04T16:37:15.000Z
|
2017-10-04T16:37:15.000Z
|
md-ripple.css
|
Hkh12/md-ripple
|
95ee1f015425a04157909b6890cd6addcdf39780
|
[
"MIT"
] | null | null | null |
md-ripple.css
|
Hkh12/md-ripple
|
95ee1f015425a04157909b6890cd6addcdf39780
|
[
"MIT"
] | null | null | null |
[ripple]{
-webkit-tap-highlight-color: transparent;
position: relative;
overflow: hidden;
}
[ripple] .ripple-effect{
position: absolute;
height: 10px;
width: 10px;
border-radius: 50%;
transform: translate3d(-50%, -50%, 0);
opacity: .125;
pointer-events: none
}
| 18.8
| 43
| 0.670213
|
0ee5e3265cb35fc9c06156b64421b3446bf05d7e
| 620
|
sql
|
SQL
|
db/auth-db/scripts/scripts.sql
|
shankarramr/eshop
|
e90c6944e4227473d06d3bb5aaf0ad9002b31865
|
[
"Unlicense"
] | null | null | null |
db/auth-db/scripts/scripts.sql
|
shankarramr/eshop
|
e90c6944e4227473d06d3bb5aaf0ad9002b31865
|
[
"Unlicense"
] | null | null | null |
db/auth-db/scripts/scripts.sql
|
shankarramr/eshop
|
e90c6944e4227473d06d3bb5aaf0ad9002b31865
|
[
"Unlicense"
] | null | null | null |
/* VERSION Table*/
CREATE TABLE VERSION(
VERSION VARCHAR(255)
);
INSERT INTO VERSION VALUES(
'1.0'
);
/* OAUTH_ACCESS_TOKEN Table*/
CREATE TABLE OAUTH_ACCESS_TOKEN (
ID INTEGER PRIMARY KEY AUTO_INCREMENT,
CLIENT_ID VARCHAR(50),
CLIENT_SECRET VARCHAR(150),
SCOPE VARCHAR(150),
AUTHORIZED_GRANT_TYPES VARCHAR(150)
);
INSERT INTO OAUTH_ACCESS_TOKEN(
CLIENT_ID,
CLIENT_SECRET,
SCOPE,
AUTHORIZED_GRANT_TYPES
)
VALUES(
'portal',
'$2a$10$4bT4qTSIEVb/ZJ1A8PwTfu9T79WKhRaGfQR7QsLBhJVo.ZbGGTL8u',
'read,write,trust',
'authorization_code,password,refresh_token,implicit',
);
| 22.142857
| 67
| 0.725806
|
79847c073c9663c178049122cef67199c67db18d
| 2,336
|
php
|
PHP
|
application/views/modul5/v_aksesstudytracer.php
|
MasDeny/siakad
|
e5ffa65c8dc0655d9ca1482cd5c2c455ee6998cc
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
application/views/modul5/v_aksesstudytracer.php
|
MasDeny/siakad
|
e5ffa65c8dc0655d9ca1482cd5c2c455ee6998cc
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
application/views/modul5/v_aksesstudytracer.php
|
MasDeny/siakad
|
e5ffa65c8dc0655d9ca1482cd5c2c455ee6998cc
|
[
"Apache-2.0",
"MIT"
] | 2
|
2020-03-10T08:39:30.000Z
|
2021-09-01T09:47:22.000Z
|
<html lang="en">
<head>
<meta charset="utf-8">
<title>Pendaftaran Tracer Study</title>
<script>
$(document).ready(function() {
$('select').material_select('destroy');
});
</script>
<link rel="stylesheet" href="<?php echo base_url() ?>asset/css/bootstrap.css">
<link rel="stylesheet" href="<?php echo base_url() ?>asset/css/style.css">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap.min.css">
<link href="<?php echo base_url() ?>asset/materialize.min.css" type="text/css" rel="stylesheet" media="screen,projection"/>
</head>
<body>
<div class="section no-pad-bot" id="index-banner">
<div class="container">
<br><br>
<h3 class="header center">Maaf, Anda tidak dapat mengakses halaman pendaftaran Study Tracer.</h3>
<div class="row center">
<h6 class="header col s12 light">Belum dapat melakukan pendaftaran study tracer, silakan lakukan validasi <a href="<?php echo base_url() ?>index.php/modul5/Tracer/daftar_tracer">disini.</a></h6>
</div>
</div>
</div>
<br><br>
<br><br>
<br><br>
<br>
<section id="footer" class="section footer">
<div class="container">
<div class="row align-center copyright">
<div class="col-sm-12">
<div class="credits">
<!--
All the links in the footer should remain intact.
You can delete the links only if you purchased the pro version.
Licensing information: https://bootstrapmade.com/license/
Purchase the pro version with working PHP/AJAX contact form: https://bootstrapmade.com/buy/?theme=Amoeba
-->
<a href="">Copyright © 2017 DEVteam_Modul5_PSI</a><a href="https://bootstrapmade.com/"> | Bootstrap Themes</a> <a href="https://bootstrapmade.com/"> by BootstrapMade</a>
</div>
</div>
</div>
</div>
</section>
<script src="<?php echo base_url() ?>asset/js/jquery-2.1.1.min.js"></script>
<script src="<?php echo base_url() ?>asset/js/materialize.min.js"></script>
<script>
(function($){
$(function(){
$('.button-collapse').sideNav();
});
})(jQuery);
</script>
<script>
$(document).ready(function() {
Materialize.updateTextFields();
});
</script>
</body>
</html>
| 33.855072
| 202
| 0.61387
|
bd1395e2d88c11d876e4206379266e3558ee07cb
| 2,887
|
html
|
HTML
|
chrome/browser/resources/settings/autofill_page/passwords_export_dialog.html
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 575
|
2015-06-18T23:58:20.000Z
|
2022-03-23T09:32:39.000Z
|
chrome/browser/resources/settings/autofill_page/passwords_export_dialog.html
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 113
|
2015-05-04T09:58:14.000Z
|
2022-01-31T19:35:03.000Z
|
chrome/browser/resources/settings/autofill_page/passwords_export_dialog.html
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 52
|
2015-07-14T10:40:50.000Z
|
2022-03-15T01:11:49.000Z
|
<style include="settings-shared iron-flex">
paper-progress {
--paper-progress-active-color: var(--google-blue-500);
width: 100%;
}
@media (prefers-color-scheme: dark) {
paper-progress {
/* TODO(dbeam): this is the same as downloads (and probably anywhere
* else that uses paper-progress). Should we make something like a
* paper_progress_style_css.html? */
--paper-progress-active-color: var(--google-blue-refresh-300);
}
}
.action-button {
margin-inline-start: 8px;
}
</style>
<template is="dom-if" if="[[showStartDialog_]]" restamp>
<cr-dialog id="dialog_start" close-text="$i18n{close}" show-on-attach>
<div slot="title">$i18n{exportPasswordsTitle}</div>
<div slot="body">
<div class="layout horizontal center">
<div>$i18n{exportPasswordsDescription}</div>
</div>
</div>
<div slot="button-container">
<cr-button class="secondary-button header-aligned-button"
on-click="onCancelButtonTap_" id="cancelButton">
$i18n{cancel}
</cr-button>
<cr-button class="action-button header-aligned-button"
on-click="onExportTap_" id="exportPasswordsButton">
$i18n{exportPasswords}
</cr-button>
</div>
</cr-dialog>
</template>
<template is="dom-if" if="[[showProgressDialog_]]" restamp>
<cr-dialog id="dialog_progress" no-cancel="true" show-on-attach>
<div slot="title">$i18n{exportingPasswordsTitle}</div>
<div slot="body">
<paper-progress indeterminate class="blue"></paper-progress>
</div>
<div slot="button-container">
<cr-button id="cancel_progress_button" class="header-aligned-button"
on-click="onCancelProgressButtonTap_">
$i18n{cancel}
</cr-button>
</div>
</cr-dialog>
</template>
<template is="dom-if" if="[[showErrorDialog_]]" restamp>
<cr-dialog id="dialog_error" close-text="$i18n{close}" show-on-attach>
<div slot="title">[[exportErrorMessage]]</div>
<div slot="body">
$i18n{exportPasswordsFailTips}
<ul>
<li>$i18n{exportPasswordsFailTipsEnoughSpace}</li>
<li>$i18n{exportPasswordsFailTipsAnotherFolder}</li>
</ul>
</div>
<div slot="button-container">
<cr-button class="header-aligned-button" on-click="onCancelButtonTap_"
id="cancelErrorButton">
$i18n{cancel}
</cr-button>
<cr-button class="action-button header-aligned-button"
on-click="onExportTap_" id="tryAgainButton">
$i18n{exportPasswordsTryAgain}
</cr-button>
</div>
</cr-dialog>
</template>
| 36.544304
| 80
| 0.578802
|
54b5d6885d9b85ce05cd64760ef01149cc161189
| 664
|
css
|
CSS
|
cartridges/andromda-jsf2/src/main/resources/resources/views/css/primefaces/custom.css
|
wpradnyana/andromda
|
30018001a006f561f74ad7c6db26e490674c5fcd
|
[
"BSD-3-Clause"
] | 2
|
2019-02-24T13:05:56.000Z
|
2019-11-02T06:25:37.000Z
|
cartridges/andromda-jsf2/src/main/resources/resources/views/css/primefaces/custom.css
|
wpradnyana/andromda
|
30018001a006f561f74ad7c6db26e490674c5fcd
|
[
"BSD-3-Clause"
] | 2
|
2022-01-21T23:12:36.000Z
|
2022-01-21T23:12:50.000Z
|
cartridges/andromda-jsf2/src/main/resources/resources/views/css/primefaces/custom.css
|
wpradnyana/andromda
|
30018001a006f561f74ad7c6db26e490674c5fcd
|
[
"BSD-3-Clause"
] | 4
|
2018-06-21T20:34:02.000Z
|
2021-09-24T00:33:57.000Z
|
/** Add any custom styles to this css **/
* {
font-size: 97%;
}
.ui-layout-container {
overflow: auto !important;;
}
.ui-layout-north, .ui-layout-west, .ui-layout-south {
z-index:100 !important;
}
.ui-layout-south {
position: fixed !important;
}
.ui-layout-north .ui-layout-unit-content, .ui-layout-west, .ui-layout-north, .ui-layout-center, .ui-panelgrid td, .ui-panelgrid tr {
border:none !important;
}
.ui-layout-west .ui-layout-unit-content, .ui-layout-north, .ui-widget-content, .ui-datatable *{
overflow: visible !important;
}
#pageContent {
width: 98% !important;
}
table.padding_autocompletenew td {
padding-left: 0px;
}
| 23.714286
| 132
| 0.673193
|
b317781a0f7ed0d7d20dba0b2eb5b4bfc7c2fa2b
| 181
|
py
|
Python
|
Codeforces/C_The_Child_and_Toy.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
Codeforces/C_The_Child_and_Toy.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
Codeforces/C_The_Child_and_Toy.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
n,m = list(map(int,input().split()))
cost = list(map(int,input().split()))
ans=0
for i in range(m):
f,s = list(map(int,input().split()))
ans += min(cost[f-1],cost[s-1])
print(ans)
| 25.857143
| 37
| 0.607735
|
7f9fecdc5bb764259769e48cafbf616fc407c852
| 68
|
php
|
PHP
|
resources/views/site/base/_partials/scripts.blade.php
|
alexsynarchin/online-education
|
5823ad5c74b8de483c852fc9fc04fa3e546462e7
|
[
"MIT"
] | null | null | null |
resources/views/site/base/_partials/scripts.blade.php
|
alexsynarchin/online-education
|
5823ad5c74b8de483c852fc9fc04fa3e546462e7
|
[
"MIT"
] | null | null | null |
resources/views/site/base/_partials/scripts.blade.php
|
alexsynarchin/online-education
|
5823ad5c74b8de483c852fc9fc04fa3e546462e7
|
[
"MIT"
] | null | null | null |
<script src="{{mix('js/app.js','assets/site')}}?ver=42"></script>
| 17
| 65
| 0.588235
|
f5ddb54a39df3f520196b34d9f12cc015bc7b1e5
| 2,653
|
css
|
CSS
|
src/assets/css/custom.css
|
ArtyDev57/facerecon_dashboard
|
ef36e992a66f69d48c211021a046d4d10b49db53
|
[
"MIT"
] | 2
|
2020-06-25T19:36:01.000Z
|
2020-07-06T11:03:31.000Z
|
src/assets/css/custom.css
|
ArtyDev57/facerecon_dashboard
|
ef36e992a66f69d48c211021a046d4d10b49db53
|
[
"MIT"
] | 2
|
2020-06-25T19:37:54.000Z
|
2020-07-01T11:29:12.000Z
|
src/assets/css/custom.css
|
ArtyDev57/facerecon_dashboard
|
ef36e992a66f69d48c211021a046d4d10b49db53
|
[
"MIT"
] | 1
|
2020-10-04T09:18:04.000Z
|
2020-10-04T09:18:04.000Z
|
#components-layout-demo-custom-trigger .trigger {
font-size: 18px;
line-height: 64px;
padding: 0 24px;
cursor: pointer;
transition: color 0.3s;
}
#components-layout-demo-custom-trigger .trigger:hover {
color: #1890ff;
}
#components-layout-demo-custom-trigger .logo {
height: 32px;
background: rgba(255, 255, 255, 0.2);
margin: 16px;
}
.logo {
padding: 5px;
background-color: #002140;
height: 50px;
}
.logo div {
padding: 7px;
}
.logo span {
margin-left: 10px;
color: rgba(255, 255, 255, 0.65);
}
.hideName {
visibility: hidden;
display: none;
transition: visibility 1s, opacity 0.5s linear;
list-style: none;
}
.sub-name {
display: inline;
}
.sub-name:hover {
overflow: hidden;
color: #fff;
}
div.logo > div:hover {
background-color: transparent;
}
/* Padding */
.pt-0 {
padding-top: 0 !important;
}
/* Padding */
/* Margin */
.mt-5 {
margin-top: 5s !important;
}
/* Margin */
/* Border */
.bt-1 {
border-top: 1px solid #fff;
}
.bb-1 {
border-bottom: 1px solid #fff;
}
/* Border */
.ant-form-item {
margin-bottom: 0;
}
#components-table-demo-resizable-column .react-resizable {
position: relative;
background-clip: padding-box;
}
#components-table-demo-resizable-column .react-resizable-handle {
position: absolute;
width: 10px;
height: 100%;
bottom: 0;
right: -5px;
cursor: col-resize;
z-index: 1;
}
.table-responsive {
display: block;
width: 100%;
overflow-x: auto;
}
.ant-card.card-bill .ant-card-body {
padding: 10px;
}
.float-right {
float: right;
}
.text-center {
text-align: center;
}
.profile {
float: right;
padding-right: 30px;
}
a {
font-family: "Phetsarath OT";
}
.wiget {
background-color: #fff;
margin: 5px auto;
}
.wiget > .wiget-icon {
max-width: 100px;
display: inline-block;
padding: 20px;
color: #fff;
background-color: #1890ff;
border-color: #1890ff;
}
.wiget-body {
padding-top: 10px;
padding-left: 10px;
display: inline-block;
vertical-align: top;
}
.wiget-number {
font-weight: 900;
font-size: 20px;
line-height: 20px;
}
.font-w {
color: rgba(255, 255, 255, 0.65);
}
.column-click {
cursor: pointer;
}
.image-container {
position: relative;
}
.overlay-image {
position: absolute;
bottom: 0;
background: rgb(0, 0, 0);
background: rgba(0, 0, 0, 0.3); /* Black see-through */
color: #f1f1f1;
width: 100%;
transition: 0.5s ease;
opacity: 0;
color: white;
font-size: 20px;
height: 100%;
}
.image-container:hover .overlay-image {
opacity: 1;
}
.overlay-image .setting {
height: 100%;
display: flex;
justify-content: center;
align-items: center;
flex-direction: column;
}
| 16.898089
| 65
| 0.649077
|
e2cf9bdd87a6fd73e2ce5e1bc1b1d74505a5064e
| 269
|
rs
|
Rust
|
src/primitives/ray/reflected.rs
|
U007D/rayt
|
ddd901a96fa8d55e1f8550c76f415e373c6bf587
|
[
"Apache-2.0"
] | null | null | null |
src/primitives/ray/reflected.rs
|
U007D/rayt
|
ddd901a96fa8d55e1f8550c76f415e373c6bf587
|
[
"Apache-2.0"
] | null | null | null |
src/primitives/ray/reflected.rs
|
U007D/rayt
|
ddd901a96fa8d55e1f8550c76f415e373c6bf587
|
[
"Apache-2.0"
] | null | null | null |
use crate::primitives::{Pixel, Ray};
#[derive(Clone, Debug, PartialEq)]
pub struct Reflected {
ray: Ray,
attenuation: Pixel,
}
impl Reflected {
#[must_use]
pub const fn new(ray: Ray, attenuation: Pixel) -> Self { Self { ray, attenuation } }
}
| 20.692308
| 88
| 0.628253
|
8e765f89c4dfe9e01918b5844f6bc777dd6fec59
| 1,263
|
js
|
JavaScript
|
template/backendservices/CloudCode/CloudFunctions/likeActivity.js
|
krupa23/krupatest
|
7dfc1cba4f51728db324dcfae18f273fcc1541a3
|
[
"BSD-2-Clause"
] | 21
|
2015-01-12T05:11:19.000Z
|
2021-03-13T12:17:24.000Z
|
template/backendservices/CloudCode/CloudFunctions/likeActivity.js
|
krupa23/krupatest
|
7dfc1cba4f51728db324dcfae18f273fcc1541a3
|
[
"BSD-2-Clause"
] | 10
|
2015-04-21T06:51:27.000Z
|
2018-09-05T11:55:10.000Z
|
template/backendservices/CloudCode/CloudFunctions/likeActivity.js
|
krupa23/krupatest
|
7dfc1cba4f51728db324dcfae18f273fcc1541a3
|
[
"BSD-2-Clause"
] | 33
|
2015-01-26T15:50:04.000Z
|
2019-05-27T02:04:01.000Z
|
Everlive.CloudFunction.onRequest(function(request, response, done) {
var activityId = request.queryString.activityId;
if (!activityId) {
response.statusCode = 500;
response.result = 'Invalid activity id';
return done();
}
var principal = request.principal;
var principalId;
if (principal && principal.type === 'user' && principal.data) {
principalId = principal.data._id;
}
if (!principalId) {
response.statusCode = 403;
response.result = 'Invalid authentication';
return done();
}
var sdk = Everlive.Sdk.withMasterKey();
var activitiesData = sdk.data('Activities');
activitiesData.getById(activityId).then(function (res) {
var activity = res.result;
var likes = activity.Likes || [];
var index = likes.indexOf(principalId);
var model = {
Likes: principalId
};
var promise;
if (index === -1) {
promise = activitiesData.rawUpdate({
'$push': model
}, activityId);
} else {
promise = activitiesData.rawUpdate({
'$pull': model
}, activityId);
}
promise.then(done, done);
});
});
| 26.3125
| 68
| 0.558987
|
b3633be06eeede7af5df15868f7a79269db3dcc5
| 8,207
|
py
|
Python
|
2020/20/20.py
|
jeremy-quicklearner/advent-of-code
|
a87541fda41326b47498bc7e5134515a8793d896
|
[
"MIT"
] | null | null | null |
2020/20/20.py
|
jeremy-quicklearner/advent-of-code
|
a87541fda41326b47498bc7e5134515a8793d896
|
[
"MIT"
] | null | null | null |
2020/20/20.py
|
jeremy-quicklearner/advent-of-code
|
a87541fda41326b47498bc7e5134515a8793d896
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
import copy
import random
with open('tiles.txt') as fh:
lines = fh.readlines()
tilestrs = ''.join(lines).split('\n\n')
tilestrs = {int(t.split('\n')[0][5:9]):'\n'.join(t.strip().split('\n')[1:]) for t in tilestrs}
tiles = {}
for tilekey,tilestr in tilestrs.items():
tile = []
for rowstr in tilestr.split('\n'):
tile.append(rowstr.strip())
tiles[tilekey] = tile
def ptile(tile):
print('\n'.join([' '.join(r) for r in tile]))
def vreflect(tile):
return [t for t in list(reversed(tile))]
def hreflect(tile):
return [list(reversed(t)) for t in tile]
def rotate(tile, degree):
ttile = tile
res = ttile
while degree > 0:
res = [['' for c in range(len(ttile))] for r in range(len(ttile[0]))]
for row in range(len(ttile[0])):
for col in range(len(ttile)):
res[row-1][col] = ttile[col][-row]
ttile = res
degree -= 1
return res
def transform(tile, vref, href, rot):
ttile = tile
if vref:
ttile = vreflect(ttile)
if href:
ttile = hreflect(ttile)
if rot:
ttile = rotate(ttile, rot)
return ttile
def memohash(vref, href, rot):
return (100 if vref else 0) + (10 if href else 0) + rot
memo = {}
def memoget(id, vref, href, rot):
if id not in memo:
return None
return memo[id].get(memohash(vref, href, rot), None)
def memoset(id, vref, href, rot, tile):
if id not in memo:
memo[id] = {}
memo[id][memohash(vref, href, rot)] = tile
def variants(id):
vars = []
for vref in [False,True]:
for href in [False,True]:
for rot in range(0,4):
v = memoget(id, vref, href, rot)
if not v:
v = transform(tiles[id], vref, href, rot)
memoset(id, vref, href, rot, v)
vars.append((id,vref,href,rot))
return vars
def fit(tile, othertile, pos):
# Pos = 0 -> other is to the right
# Pos = 1 -> other is above
# Pos = 2 -> other is to the left
# Pos = 3 -> other is below
if pos == 0:
edge = [r[-1] for r in tile]
otheredge = [r[0] for r in othertile]
if pos == 1:
edge = tile[0]
otheredge = othertile[-1]
if pos == 2:
edge = [r[0] for r in tile]
otheredge = [r[-1] for r in othertile]
if pos == 3:
edge = tile[-1]
otheredge = othertile[0]
for (e,o) in zip(edge,otheredge):
if e != o:
return False
return True
def memofithash(memotile, othermemotile, pos):
return str(memotile) + str(othermemotile) + str(pos)
memofitd = {}
def memofit(memotile, othermemotile, pos):
mfh = memofithash(memotile, othermemotile, pos)
if mfh not in memofitd:
memofitd[mfh] = fit(memoget(*memotile),memoget(*othermemotile),pos)
return memofitd[mfh]
# I counted 144 tiles, so it's a 12x12 square. If we use one of the corners as
# the starting point, then we need enough room for the whole puzzle to fill one
# quadrant. So use a 23x23 grid. For algorithmic simplicity, add an extra border
# slots around the edge
grid = [[None for _ in range(25)] for _ in range(25)]
pool = list(tiles.keys())
random.shuffle(list(reversed(pool)))
# Arbitrarily select tile 1669 as the starting point, with no transformations
grid[12][12] = (1669,0,0,0)
pool.remove(1669)
variants(1669)
def solve():
for row in range(len(grid)):
for col in range(len(grid)):
print(('[' + str(grid[row][col][0]) + ']' if grid[row][col] else '......'), end='')
print('')
print(pool)
for row in range(1, len(grid) - 1):
for col in range(1, len(grid[row]) - 1):
# If cell is already filled, we can't add a tile to it
if grid[row][col]:
continue
# If no neighbours are filled, don't waste time on this cell.
# This is the part that benefits from the extra border
right = grid[row][col+1]
above = grid[row-1][col]
left = grid[row][col-1]
below = grid[row+1][col]
if not right and not above and not left and not below:
continue
# Try all variants of all tiles from the pool
for id in pool:
for variant in variants(id):
if right and not memofit(variant, right, 0):
continue
if above and not memofit(variant, above, 1):
continue
if left and not memofit(variant, left, 2):
continue
if below and not memofit(variant, below, 3):
continue
# Found a variant that works. Remove from the pool, add to the
# grid, and recurse
idx = pool.index(id)
pool.remove(id)
grid[row][col] = variant
solve()
# If the pool is empty after recursing, we have a solution.
if not pool:
return
# Otherwise the solve failed and we are backtracking. Try
# the next variant.
grid[row][col] = None
pool.insert(idx,id)
solve()
for id,variants in memo.items():
for mh,variant in variants.items():
pruned = copy.deepcopy(variant)
pruned = pruned[1:-1]
pruned = [p[1:-1] for p in pruned]
memo[id][mh] = pruned
minrow = 0
for (idx,row) in enumerate(grid):
filled = 0
for cell in row:
if cell:
filled = 1
break
if filled:
minrow = idx
break
maxrow = 0
for (idx,row) in reversed(list(enumerate(grid))):
filled = 0
for cell in row:
if cell:
filled = 1
break
if filled:
maxrow = idx
break
mincol = 0
for (idx,cell) in enumerate(grid[minrow]):
if cell:
mincol = idx
break
maxcol = 0
for (idx,cell) in reversed(list(enumerate(grid[maxrow]))):
if cell:
maxcol = idx
break
trimmedgrid = grid[minrow:maxrow+1]
for idx,row in enumerate(trimmedgrid):
trimmedgrid[idx] = row[mincol:maxcol+1]
imagetiles = [[memoget(*c) for c in r] for r in trimmedgrid]
image = []
for tilerow in imagetiles:
for subrowidx in range(8):
subrow = []
for tile in tilerow:
subrow += tile[subrowidx]
image.append(subrow)
monsterimg = [list(' # '),
list('# ## ## ###'),
list(' # # # # # # ')]
monstervariants = []
for vref in [False,True]:
for href in [False,True]:
for rot in range(0,4):
monstervariants.append(transform(monsterimg, vref, href, rot))
for mvar in monstervariants:
for mrow in (mvar):
print(''.join(mrow))
print('')
inmonster = [[False for _ in r] for r in image]
def checkmonster(row, col, monster):
if row + len(monster) > len(image):
return False
if col + len(monster[0]) > len(image[row]):
return False
for mrow in range(len(monster)):
for mcol in range(len(monster[mrow])):
if monster[mrow][mcol] == '#' and image[row+mrow][col+mcol] != '#':
return False
return True
for row in range(len(image)):
for col in range(len(image[row])):
for mvar in monstervariants:
if checkmonster(row, col, mvar):
for mrow in range(len(mvar)):
for mcol in range(len(mvar[mrow])):
if mvar[mrow][mcol] == '#':
inmonster[row+mrow][col+mcol] = True
print('\n'.join([' '.join(r) for r in image]))
print('\n'.join(' '.join([{True:'#',False:' '}[c] for c in r]) for r in inmonster))
monstercount = 0
nonmonstercount = 0
for row in range(len(image)):
for col in range(len(image)):
if image[row][col] != '#':
continue
if inmonster[row][col]:
monstercount += 1
else:
nonmonstercount += 1
print(nonmonstercount)
| 28.897887
| 95
| 0.541002
|
43a5cd61d0c97f2dfd182182ea5ba841a11947b0
| 9,490
|
tsx
|
TypeScript
|
client/dist/widgets/common/list/src/runtime/styles/style.tsx
|
Freda0502/ArcGISExperienceBuilder
|
8bb8df3ce2fe09a89b3f577f2d5be3ad71cf24b1
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-06-18T18:20:55.000Z
|
2021-06-18T18:20:55.000Z
|
client/dist/widgets/common/list/src/runtime/styles/style.tsx
|
Freda0502/ArcGISExperienceBuilder
|
8bb8df3ce2fe09a89b3f577f2d5be3ad71cf24b1
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
client/dist/widgets/common/list/src/runtime/styles/style.tsx
|
Freda0502/ArcGISExperienceBuilder
|
8bb8df3ce2fe09a89b3f577f2d5be3ad71cf24b1
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
import { css, polished, SerializedStyles, AppMode, utils, IMThemeVariables} from 'jimu-core';
import { ListProps } from '../widget';
import { DirectionType, LIST_CARD_PADDING, PageStyle,
DS_TOOL_BOTTOM_PADDING, DS_TOOL_H, BOTTOM_TOOL_TOP_PADDING, BOTTOM_TOOL_H, COMMON_PADDING, CardSize } from '../../config';
interface LisStyleOption {
pageStyle: PageStyle,
scrollBarOpen: boolean,
direction: DirectionType,
appMode: AppMode,
theme: IMThemeVariables,
isHeightAuto: boolean,
isWidthAuto: boolean,
currentCardSize: CardSize,
showTopTools: boolean,
bottomToolH: number,
topRightToolW: number,
hasRecords: boolean
}
export const listStyle = utils.memoize(function(options: LisStyleOption): SerializedStyles{
const {showTopTools, bottomToolH, topRightToolW, hasRecords, currentCardSize, direction, pageStyle, scrollBarOpen, appMode, theme, isHeightAuto, isWidthAuto} = options;
const topToolsH = showTopTools ? DS_TOOL_H : 0;
const isHorizon = direction === DirectionType.Horizon;
const flexDirection = direction === DirectionType.Horizon ? 'column' : 'row'
return css`
&.list-container {
/* position: ${isHeightAuto ? 'absolute' : 'relative'}; */
position: relative;
z-index: 0;
overflow: hidden;
.bottom-boundary {
height: 2px;
width: 2px;
position: absolute;
bottom: 0;
right: 0;
opacity: 0;
}
${direction === DirectionType.Horizon ?
`
margin-left: ${LIST_CARD_PADDING + 'px'};
height: ${isHeightAuto ? 'auto' : '100%'};
width: ${isWidthAuto ? 'auto' : `calc(100% - ${LIST_CARD_PADDING + 'px'})`};
` :
`
margin-top: ${LIST_CARD_PADDING + 'px'};
width: ${isWidthAuto ? 'auto' : '100%'};
height: ${isHeightAuto ? 'auto' : `calc(100% - ${LIST_CARD_PADDING + 'px'})`};
`}
.editing-mask-list {
position: absolute;
top: ${(direction === DirectionType.Vertical && hasRecords) ? currentCardSize.height + topToolsH : topToolsH}px;
left: ${(direction === DirectionType.Horizon && hasRecords) ? currentCardSize.width : 0}px;
bottom: ${polished.rem(bottomToolH)};
right: 0;
z-index: 10;
background-color: ${polished.rgba(theme.colors.black, 0.2)};
}
.editing-mask-ds-tool {
position: absolute;
z-index: 10;
top: 0;
left: 0;
bottom: 0;
right: 0;
background-color: ${polished.rgba(theme.colors.black, 0.2)};
}
.editing-mask-bottom-tool {
position: absolute;
z-index: 10;
top: 0;
left: 0;
bottom: 0;
right: 0;
background-color: ${polished.rgba(theme.colors.black, 0.2)};
}
.tip-mask-list {
position: absolute;
top: ${topToolsH}px;
left: 0;
bottom: 0;
right: 0;
z-index: 10;
padding-top: 20%;
}
.datasource-tools {
position: relative;
height: ${polished.rem(topToolsH)};
padding-bottom: ${polished.rem(DS_TOOL_BOTTOM_PADDING)};
.sort-fields-input {
width: 200px;
margin-left: 8px;
margin-right: 16px;
}
.tool-row {
height: ${DS_TOOL_H - DS_TOOL_BOTTOM_PADDING}px;
}
.ds-tools-line {
width: 100%;
height: 1px;
margin-top: ${polished.rem(6)};
background-color: ${theme.colors.palette.light[500]};
}
.ds-tools-line-blue {
background-color: ${theme.colors.palette.info[500]};
}
.list-search-div {
width: calc(100% - ${topRightToolW}px);
.list-search {
margin-bottom: ${polished.rem(-4)};
width: 100%;
}
}
}
.tools-menu {
color: ${theme.colors.palette.light[800]};
margin-top: ${polished.rem(4)};
}
.tools-menu:hover {
color: ${theme.colors.palette.info[500]};
}
.bottom-tools {
position: relative;
padding-top: ${polished.rem(BOTTOM_TOOL_TOP_PADDING)};
min-height: ${polished.rem(BOTTOM_TOOL_H)};
.scroll-navigator {
.btn {
border-radius: ${theme.borderRadiuses.circle};
}
}
}
.widget-list-list:focus {
outline: none;
}
.widget-list-list {
&>div {
position: relative;
flex: 1;
box-sizing: content-box;
${isHorizon && `padding-right: ${polished.rem(15)}`};
${!isHorizon && `padding-bottom: ${polished.rem(15)}`};
}
padding: 0;
position: relative;
/* box-sizing: border-box; */
${(!window.jimuConfig.isInBuilder || appMode === AppMode.Run) ? `overflow-${isHorizon ? 'y' : 'x'}: hidden !important;` : 'overflow: hidden !important;'}
height: ${isHeightAuto ? 'auto' : `calc(100% - ${topToolsH}px - ${ bottomToolH }px)`} !important;
width: ${isWidthAuto ? 'auto' : '100%'} !important;
display: flex;
${`flex-direction: ${flexDirection}`};
${isHeightAuto && `min-height: ${currentCardSize.height}px;`};
${isWidthAuto && `min-width: ${currentCardSize.width}px;`};
${isHorizon ?
`
${`max-width: ${document.body.scrollWidth}px`};
` :
`
${`max-height: ${document.body.scrollHeight}px`};
`
}
}
${pageStyle === PageStyle.Scroll && !scrollBarOpen ?
`
.widget-list-list::-webkit-scrollbar {
display: none; //Safari and Chrome
}
.widget-list-list {
-ms-overflow-style: none; //IE 10+
overflow: -moz-scrollbars-none; //Firefox
}
` : ''}
}
`
})
export function getStyle(props: ListProps, isEditing: boolean, showBottomTool: boolean): SerializedStyles{
const {config, id, appMode, isHeightAuto, isWidthAuto, theme} = props;
return css`
${'&.list-widget-' + id} {
overflow: visible;
background-color: transparent;
border: ${polished.rem(COMMON_PADDING)} solid ${polished.rgba(theme.colors.black, window.jimuConfig.isInBuilder && isEditing ? 0.2 : 0)};
height: ${isHeightAuto ? 'auto' : '100%'};
width: ${isWidthAuto ? 'auto' : '100%'};
.list-with-mask {
position: absolute;
left: 0;
right: 0;
bottom: 0;
top: 0;
background-color: ${polished.rgba(theme.colors.black, 0)};
z-index: 1;
}
.refresh-loading-con {
right: 0;
bottom:${showBottomTool ? polished.rem(BOTTOM_TOOL_H) : 0};
align-items: center;
height: ${polished.rem(24)};
.auto-refresh-loading {
background: ${polished.rgba(theme.colors?.palette?.dark?.[100], 0.2)};
color: ${theme.colors?.black};
font-size: ${polished.rem(12)};
line-height: ${polished.rem(24)};
padding: 0 ${polished.rem(7)};
}
&.horizon-loading {
bottom:${showBottomTool ? polished.rem(BOTTOM_TOOL_H+6) : polished.rem(6)};
}
&.vertical-loading {
right: ${polished.rem(6)};
}
}
.loading-con {
@keyframes loading {
0% {transform: rotate(0deg); };
100% {transform: rotate(360deg)};
}
width: ${polished.rem(16)};
height: ${polished.rem(16)};
border: 1px solid ${theme?.colors?.palette?.dark?.[100]};
border-radius: 50%;
border-top: 1px solid ${theme?.colors?.palette?.dark?.[800]};
box-sizing: border-box;
animation:loading 2s infinite linear;
margin-right: ${polished.rem(4)};
}
.widget-list {
overflow: ${(window.jimuConfig.isInBuilder && appMode !== AppMode.Run) ? 'hidden' : 'auto'};
height: ${isHeightAuto ? 'auto' : '100%'};
width: ${isWidthAuto ? 'auto' : '100%'};
/* align-items: ${config.alignType};
justify-content: ${config.alignType}; */
${
config.direction === DirectionType.Horizon ?
`
overflow-y: hidden;
` :
`
overflow-x: hidden;
`
}
}
}
`
}
export function getToolsPopperStyle(props: ListProps): SerializedStyles{
const { theme } = props;
return css `
&{
padding: ${polished.rem(6)} ${polished.rem(11)};
height: ${polished.rem(40)};
width: ${polished.rem(246)};
}
.ds-tools-line {
width: 100%;
height: 1px;
background-color: ${theme.colors.palette.light[500]};
}
.ds-tools-line-blue {
background-color: ${theme.colors.palette.info[500]};
}
.close-search {
margin-top: ${polished.rem(-6)};
}
`
}
export function getSearchToolStyle(props: ListProps): SerializedStyles{
const { theme } = props;
return css `
.close-search {
border: 1px solid ${theme.colors.palette.light[500]};
box-sizing: border-box;
background-color: ${theme.colors.white};
}
.search-box-content {
flex-direction: column;
align-items: flex-start;
}
`
}
| 32.389078
| 171
| 0.540358
|
6b0a3f7b2edfc3d869b249450ed7e01cb45a1bb7
| 6,175
|
js
|
JavaScript
|
client/src/tables/orgEmployeeExLvl.js
|
vio/infobase
|
5c3e6fc2533940dd82e96bc9686a0d174db00696
|
[
"MIT"
] | null | null | null |
client/src/tables/orgEmployeeExLvl.js
|
vio/infobase
|
5c3e6fc2533940dd82e96bc9686a0d174db00696
|
[
"MIT"
] | null | null | null |
client/src/tables/orgEmployeeExLvl.js
|
vio/infobase
|
5c3e6fc2533940dd82e96bc9686a0d174db00696
|
[
"MIT"
] | null | null | null |
import text from "./orgEmployeeExLvl.yaml";
import {
stats,
trivial_text_maker,
Statistics,
people_five_year_percentage_formula,
businessConstants,
year_templates,
} from "./table_common";
const {
compact_ex_level_map,
ex_levels,
} = businessConstants;
const {
people_years,
people_years_short_second,
} = year_templates;
export default {
text,
id: "orgEmployeeExLvl",
legacy_id: "table112",
source: [ "RPS" ],
tags: [
"PEOPLE",
"FPS",
"EX_LVL",
"ANNUAL",
],
"link": {
"en": "http://open.canada.ca/data/en/dataset/2e4e5626-3185-4c8d-932a-7e161355fb96",
"fr": "http://ouvert.canada.ca/data/fr/dataset/2e4e5626-3185-4c8d-932a-7e161355fb96",
},
"name": {
"en": "Population by Executive Level",
"fr": "Population selon les niveaux des cadres supérieurs",
},
"title": {
"en": "Population by Executive Level",
"fr": "Population selon les niveaux des cadres supérieurs",
},
"add_cols": function () {
this.add_col({
"type": "int",
"key": true,
"hidden": true,
"nick": "dept",
"header": '',
});
this.add_col({
"type": "int",
"key": true,
"nick": "ex_lvl",
"header": trivial_text_maker("ex_level"),
});
_.each(people_years, (header,ix)=>{
this.add_col({
"simple_default": ix === 4,
"type": "big_int",
"nick": header,
"header": `${trivial_text_maker("fiscal_year_end")}, ${people_years_short_second[ix]}`,
"description": {
"en": "Corresponds to the active employee population by Executive Level, as of March 31 " +people_years_short_second[ix],
"fr": "Correspond à l'effectif actif par direction niveaux, au 31 mars "+people_years_short_second[ix],
},
});
});
this.add_col({
"type": "percentage1",
"nick": "five_year_percent",
"header": trivial_text_maker("five_year_percent_header"),
"description": {
"en": trivial_text_maker("five_year_percent_description"),
"fr": trivial_text_maker("five_year_percent_description"),
},
"formula": people_five_year_percentage_formula("ex_lvl",people_years),
});
},
"mapper": function (row) {
row.splice(1, 1, ex_levels[row[1]].text);
return row;
},
"dimensions": [
{
"title_key": "horizontal",
include_in_report_builder: true,
filter_func: function(options){
return function(row){
return row.ex_lvl;
};
},
},
{
title_key: "ex_level_condensed",
include_in_report_builder: true,
filter_func: function(options){
return function(row){
return compact_ex_level_map[row.ex_lvl];
};
},
},
],
"queries": {
"gov_grouping": function() {
return _.chain(this.table.horizontal(people_years,false))
.map(function(years, key){
return [key].concat(years);
})
.sortBy(function(row){
return d3.sum(_.tail(row));
})
.value();
},
"summed_levels": function() {
return _.groupBy(this.data, function(x){
return compact_ex_level_map[x.ex_lvl];
});
},
},
};
Statistics.create_and_register({
id: 'orgEmployeeExLvl_dept_info',
table_deps: ['orgEmployeeExLvl'],
level: 'dept',
compute: (subject, tables, infos, add, c) => {
const table = tables.orgEmployeeExLvl;
const q = table.q(subject);
c.dept = subject;
const all_years = q.get_top_x(["ex_lvl", ...people_years], Infinity,{zip: true});
stats.year_over_year_multi_stats_active_years(add,"head_count_ex_level_years",all_years,false,people_years);
const num_active_years = _.chain( all_years )
.map( group => _.tail(group) )
.pipe( groups => _.zip.apply(null, groups) )
.map( zipped_groups => d3.sum(zipped_groups) )
.countBy( total => total === 0 ? 'inactive' : 'active' )
.pipe( _.property('active') )
.value();
const all_years_only_ex = _.filter(all_years, a => (a[0] !== "Non-EX"));
if ( !_.isEmpty(all_years_only_ex) ){
stats.year_over_year_multi_stats_active_years(add,"head_count_ex_level",all_years_only_ex,num_active_years);
const ex_string = window.lang === 'en' ? 'Executive' : 'Cadres supérieurs';
const ex_lev_EX_avg = _.chain( q.summed_levels() )
.pipe( _.property(ex_string) )
.pipe( ex_levels => _.map(people_years, y =>
d3.sum( _.map(ex_levels, _.property(y)) )
))
.pipe( totals_by_year => d3.sum(totals_by_year)/num_active_years )
.value();
add("head_count_ex_level_avg_ex", ex_lev_EX_avg );
add("head_count_ex_avg_share", (ex_lev_EX_avg*num_active_years)/d3.sum(_.map(all_years, a => d3.sum(a.slice(1)))));
} else {
const not_avail_str = window.lang === 'en' ? 'N.A' : 'S.A';
add("head_count_count_ex_level_first_active_year", people_years[0]);
add("head_count_count_ex_level_last_active_year", people_years[4]);
_.each(
[
"head_count_ex_level_top",
"head_count_ex_level_top_avg",
"head_count_ex_level_bottom",
"head_count_ex_level_bottom_avg",
"head_count_ex_level_avg_ex",
"head_count_ex_avg_share",
],
key => { add(key, not_avail_str); }
);
}
},
});
Statistics.create_and_register({
id: 'orgEmployeeExLvl_gov_info',
table_deps: ['orgEmployeeExLvl'],
level: 'gov',
compute: (subject, tables, infos, add, c) => {
const table = tables.orgEmployeeExLvl;
const q = table.q(subject);
const all_years_unfiltered = q.gov_grouping();
const all_years = _.filter(all_years_unfiltered, a => a[0] !== "Non-EX");
stats.year_over_year_multi_stats(add,"head_count_ex_level",all_years);
const year_group_vals = _.map(all_years, group => _.tail(group) );
const year_totals = _.map(year_group_vals, d => d3.sum(d) );
add("head_count_ex_level_avg_ex", d3.sum(year_totals)/5);
add("head_count_ex_avg_share", (d3.sum(year_totals)/d3.sum(q.sum(people_years, {as_object: false}))));
},
});
| 29.545455
| 131
| 0.61749
|
2fbe60556591e6ea771b8d9b0fb07aa24ee7d52e
| 412
|
py
|
Python
|
src/bilbyui/migrations/0003_bilbyjob_job_id.py
|
gravitationalwavedc/gwcloud_bilby
|
f5074fe60ff2a3cfa6a7e8d3e97c9573a6152563
|
[
"MIT"
] | 1
|
2020-10-26T02:35:26.000Z
|
2020-10-26T02:35:26.000Z
|
src/bilbyui/migrations/0003_bilbyjob_job_id.py
|
gravitationalwavedc/gwcloud_bilby
|
f5074fe60ff2a3cfa6a7e8d3e97c9573a6152563
|
[
"MIT"
] | 31
|
2020-05-04T05:57:45.000Z
|
2022-02-23T04:35:35.000Z
|
src/bilbyui/migrations/0003_bilbyjob_job_id.py
|
gravitationalwavedc/gwcloud_bilby
|
f5074fe60ff2a3cfa6a7e8d3e97c9573a6152563
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.12 on 2020-05-04 01:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bilbyui', '0002_auto_20200403_0346'),
]
operations = [
migrations.AddField(
model_name='bilbyjob',
name='job_id',
field=models.IntegerField(blank=True, default=None, null=True),
),
]
| 21.684211
| 75
| 0.609223
|
583a3d39a4507be9630a42e6961cdb45055ef665
| 4,264
|
css
|
CSS
|
dual-screen-css/boxstyles-legacy.css
|
conceptdev/web-samples
|
79a1d752367c4dbdeb8c5bd9a80b7a135a83a76a
|
[
"MIT"
] | 1
|
2022-02-20T05:47:27.000Z
|
2022-02-20T05:47:27.000Z
|
dual-screen-css/boxstyles-legacy.css
|
conceptdev/web-samples
|
79a1d752367c4dbdeb8c5bd9a80b7a135a83a76a
|
[
"MIT"
] | null | null | null |
dual-screen-css/boxstyles-legacy.css
|
conceptdev/web-samples
|
79a1d752367c4dbdeb8c5bd9a80b7a135a83a76a
|
[
"MIT"
] | 1
|
2021-06-26T13:36:11.000Z
|
2021-06-26T13:36:11.000Z
|
/*Copyright (c) 2020 Intel Corporation. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
https://github.com/foldable-devices/device-configurator/blob/master/LICENSE.md*/
*,
*::after,
*::before {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
width: 100vw;
height: 100vh;
overflow: hidden;
}
.text {
font-weight: bold;
color: white;
margin-top: 12px;
}
.stripes {
height: 250px;
width: 200px;
background-size: 40px 40px;
}
.angled {
background-color: #737373;
background-image:
linear-gradient(45deg, rgba(255, 255, 255, .2) 25%, transparent 25%,
transparent 50%, rgba(255, 255, 255, .2) 50%, rgba(255, 255, 255, .2) 75%,
transparent 75%, transparent);
}
.fold {
height: 0;
width: 0;
}
.blue {
height: 100px;
width: 100px;
background-color: blueviolet;
text-align: center;
color: white;
}
.yellow {
height: 100px;
width: 100px;
background-color: yellow;
text-align: center;
}
.pink {
height: 100px;
width: 100px;
background-color: pink;
text-align: center;
}
.green {
height: 100px;
width: 100px;
background-color: green;
text-align: center;
}
@media (spanning: single-fold-vertical) {
.fold {
height: env(fold-height);
width: env(fold-width);
left: env(fold-left);
top: 0;
position: absolute;
}
.content {
flex-direction: row;
}
.blue {
height: 100px;
width: 100px;
position: absolute;
left: calc(env(fold-left) - 100px);
top: 0;
text-align: center;
}
.yellow {
height: 100px;
width: calc(100vw - env(fold-left) - env(fold-width));
position: absolute;
left: calc(env(fold-left) + env(fold-width));
top: 0;
background-color: yellow;
text-align: center;
}
.pink {
height: 100px;
width: env(fold-left);
position: absolute;
left: 0;
bottom: 0;
background-color: pink;
text-align: center;
}
.green {
height: 100px;
width: 100px;
position: absolute;
left: calc(env(fold-left) + env(fold-width));
bottom: 0;
background-color: green;
text-align: center;
}
}
@media (spanning: single-fold-horizontal) {
.fold {
height: env(fold-height);
width: env(fold-width);
left: 0;
top: env(fold-top);
position: absolute;
}
.content {
flex-direction: column-reverse;
}
}
@media (spanning: none) {
.fold {
height: 0;
width: 0;
}
.content {
flex-direction: row;
}
.second-container {
height: 0;
width: 0;
display: none;
}
}
| 28.810811
| 1,269
| 0.629221
|
d7f667469d684b3042f3eb2a8c3ab3efcb1199a9
| 358
|
rb
|
Ruby
|
data/azure_cognitiveservices_spellcheck/lib/1.0/generated/azure_cognitiveservices_spellcheck/models/action_type.rb
|
poanchen/azure-sdk-for-ruby
|
a5e8900bca941f2826f5da7017c509c55649d9bb
|
[
"MIT"
] | 237
|
2015-01-03T03:57:55.000Z
|
2022-03-31T09:41:58.000Z
|
data/azure_cognitiveservices_spellcheck/lib/1.0/generated/azure_cognitiveservices_spellcheck/models/action_type.rb
|
poanchen/azure-sdk-for-ruby
|
a5e8900bca941f2826f5da7017c509c55649d9bb
|
[
"MIT"
] | 1,785
|
2015-01-13T19:32:37.000Z
|
2021-08-24T13:58:11.000Z
|
data/azure_cognitiveservices_spellcheck/lib/1.0/generated/azure_cognitiveservices_spellcheck/models/action_type.rb
|
poanchen/azure-sdk-for-ruby
|
a5e8900bca941f2826f5da7017c509c55649d9bb
|
[
"MIT"
] | 242
|
2015-01-14T15:49:48.000Z
|
2022-01-28T18:35:08.000Z
|
# encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::CognitiveServices::SpellCheck::V1_0
module Models
#
# Defines values for ActionType
#
module ActionType
Edit = "Edit"
Load = "Load"
end
end
end
| 21.058824
| 70
| 0.687151
|
1a5db05d80628acb7af640b6359068158657dd05
| 5,318
|
py
|
Python
|
Software/plot_temp_RH_Klosters_gondola.py
|
AnnikaLau/SIP_initiated_by_melting_layer
|
efff0d6e47eead5997da527fec9209483bb7ea6a
|
[
"Apache-2.0"
] | null | null | null |
Software/plot_temp_RH_Klosters_gondola.py
|
AnnikaLau/SIP_initiated_by_melting_layer
|
efff0d6e47eead5997da527fec9209483bb7ea6a
|
[
"Apache-2.0"
] | null | null | null |
Software/plot_temp_RH_Klosters_gondola.py
|
AnnikaLau/SIP_initiated_by_melting_layer
|
efff0d6e47eead5997da527fec9209483bb7ea6a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 7 11:02:00 2021
@author: Annika
"""
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import scipy.io
from datetime import datetime, timedelta
import numpy as np
from read_log_files import *
from read_time_rides import *
def datenum(d):
return 366 + d.toordinal() + (d - datetime.fromordinal(d.toordinal())).total_seconds()/(24*60*60)
#Function to reverse datenum
def datestr(x, tz=None):
dt = datetime.fromordinal(int(x)) + timedelta(days=x%1) - timedelta(days = 366)
return dt
def plot_temp_RH_Klosters_gondola(path_log_file,path_time_rides,path_data_Klosters):
myFmt = mdates.DateFormatter('%H:%M')
start_time = '2019-02-22 08:00:00'
end_time = '2019-02-22 10:00:00'
#Read in data from weather station in Klosters
mat = scipy.io.loadmat(path_data_Klosters)
data_KLA = mat['WS']
T_KLA = data_KLA['T']
T_KLA = T_KLA[0][0][0]
T_KLA = np.array(T_KLA,dtype=np.float)
RH_KLA = data_KLA['RH']
RH_KLA = RH_KLA[0][0][0]
RH_KLA = np.array(RH_KLA,dtype=np.float)
time_KLA = data_KLA['time']
time_KLA = time_KLA[0][0][0]
time_KLA = np.array([datestr(time_KLA[i]) for i in range(len(time_KLA))])
index_KLA = pd.DatetimeIndex(time_KLA)
T_KLA = pd.Series(T_KLA,index = index_KLA)
RH_KLA = pd.Series(RH_KLA,index=index_KLA)
#Read in log file from HOLIMO
log = read_log_file(start_time,end_time,path_log_file)
day_of_month = log['day_of_month'][0]
month = log['month'][0]
year = log['year'][0]
hour = log['hour'][0]
minute = log['minute'][0]
second = log['second'][0]
time_gondel = [str(day_of_month[i])+'/'+str(month[i])+'/'+str(year[i])+' ' +str(hour[i])+':'+str(minute[i])+':'+str(second[i]) for i in range(0,len(month))]
index_gondel = pd.DatetimeIndex(time_gondel)
T_gondel = pd.Series(log['temp'][0],index = index_gondel)
RH_gondel = pd.Series(log['rh'][0],index = index_gondel)
time_gondel = [datenum(index_gondel[i]) for i in range(0,len(index_gondel))]
#Read in time of gondola rides
[start_time_ride,end_time_ride] = read_time_rides(path_time_rides)
#Derive temperature at Gotschnaboden (Gondola at lowest point considered for measurements)
idx_gb = [np.argmin(np.abs(time_gondel-start_time_ride[i])) for i in range(0,len(start_time_ride))]
T_GB=T_gondel[idx_gb]
RH_GB=RH_gondel[idx_gb]
index_GB = index_gondel[idx_gb]
T_GB = pd.Series(T_GB,index=index_GB)
RH_GB = pd.Series(RH_GB,index=index_GB)
#Derive temperature at Gotschnagrat (Gondola at highest point considered for measurements)
idx_gg = [np.argmin(np.abs(time_gondel-end_time_ride[i])) for i in range(0,len(end_time_ride))]
T_GG=T_gondel[idx_gg]
RH_GG=RH_gondel[idx_gg]
index_GG = index_gondel[idx_gg]
T_GG = pd.Series(T_GG,index=index_GG)
RH_GG = pd.Series(RH_GG,index=index_GG)
time_gb = np.array([datestr(start_time_ride[i]) for i in range(len(start_time_ride))])
time_gg = np.array([datestr(end_time_ride[i]) for i in range(len(end_time_ride))])
x_gr = np.column_stack((time_gb,time_gg))
y_gr = np.column_stack((T_GB,T_GG))
y_gr_RH = np.column_stack((RH_GB,RH_GG))
#Melting layer
melting = [0,0]
time_melting = [start_time,end_time]
time_melting = pd.to_datetime(time_melting)
index_melting = pd.DatetimeIndex(time_melting)
melting = pd.Series(melting, index=index_melting)
#Lines for gondel rides
fs=25
f=1
plt.figure(f)
gr = plt.plot(x_gr.transpose(),y_gr.transpose(),color = [0.7, 0.7, 0.7])
gg, = plt.plot(T_GG[start_time:end_time].index,T_GG[start_time:end_time],label='Gotschnagrat 2300m',color = [0,0.447,0.741])
gb, = plt.plot(T_GB[start_time:end_time].index,T_GB[start_time:end_time],label='Gotschnaboden 1700m',color = [0.9290, 0.6940, 0.1250])
kla, = plt.plot(T_KLA[start_time:end_time].index,T_KLA[start_time:end_time],label='Klosters 1200m',color = [0, 0.5, 0])
m = plt.plot(melting[start_time:end_time].index,melting[start_time:end_time],'k')
plt.gcf().autofmt_xdate()
plt.gca().xaxis.set_major_formatter(myFmt)
plt.gca().invert_yaxis()
plt.xlim(start_time,end_time)
plt.ylim(4,-3)
plt.xlabel('Time (UTC)',fontsize=fs)
plt.ylabel('Temperature (°C)',fontsize=fs)
plt.tick_params(right=True)
plt.yticks(fontsize=fs)
plt.xticks(fontsize=fs)
plt.show()
f=2
plt.figure(f)
gr = plt.plot(x_gr.transpose(),y_gr_RH.transpose(),color = [0.7, 0.7, 0.7])
gg, = plt.plot(RH_GG[start_time:end_time].index,RH_GG[start_time:end_time],label='Gotschnagrat 2300m',color = [0,0.447,0.741])
gb, = plt.plot(RH_GB[start_time:end_time].index,RH_GB[start_time:end_time],label='Gotschnaboden 1700m',color = [0.9290, 0.6940, 0.1250])
kla, = plt.plot(RH_KLA[start_time:end_time].index,RH_KLA[start_time:end_time],label='Klosters 1200m',color = [0, 0.5, 0])
plt.gcf().autofmt_xdate()
plt.gca().xaxis.set_major_formatter(myFmt)
plt.xlim(start_time,end_time)
plt.ylim(75,100)
plt.xlabel('Time (UTC)',fontsize=fs)
plt.ylabel('RH (%)',fontsize=fs)
plt.tick_params(right=True)
plt.yticks(fontsize=fs)
plt.xticks(fontsize=fs)
plt.show()
| 38.817518
| 160
| 0.682399
|
a14ef61e6c091008f1e67ae68ecea5d9edafdd8f
| 2,074
|
tsx
|
TypeScript
|
components/templates/page.tsx
|
k4m4/www
|
1864e9daebb99df9cc0df09d5c643facf13207dc
|
[
"MIT"
] | 1
|
2020-12-26T20:34:18.000Z
|
2020-12-26T20:34:18.000Z
|
components/templates/page.tsx
|
k4m4/www
|
1864e9daebb99df9cc0df09d5c643facf13207dc
|
[
"MIT"
] | 1
|
2020-10-25T23:31:23.000Z
|
2020-10-25T23:31:23.000Z
|
components/templates/page.tsx
|
k4m4/k4m4.dev
|
1864e9daebb99df9cc0df09d5c643facf13207dc
|
[
"MIT"
] | 1
|
2021-03-08T23:54:41.000Z
|
2021-03-08T23:54:41.000Z
|
import { Box } from '@chakra-ui/react';
import { NextSeo as NextSEO } from 'next-seo';
import type { OpenGraph } from 'next-seo/lib/types';
import { useRouter } from 'next/router';
import { ReactNode } from 'react';
import Footer from '../organisms/footer';
import Nav from '../organisms/nav';
type PageProps = {
children: ReactNode;
title?: string;
description?: string;
showNav?: boolean;
};
const Page = ({
children,
title,
description,
showNav = true,
}: PageProps) => {
const router = useRouter();
const url = `https://${process.env.DEFAULT_DOMAIN}${router.pathname}`;
const fullTitle = title && `${title} – k4m4`;
const cardURL = `https://cards.microlink.io/?p=2gKXPD4KICA8TGluawogICAgaHJlZj0naHR0cHM6Ly9mb250cy5nb29nbGVhcGlzLmNvbS9jc3M_ZmFtaWx5PU1laWUrU2NyaXB0JmRpc3BsYXk9c3dhcCcKICAgIHJlbD0nc3R5bGVzaGVldCcKICAvPgogIDxGbGV4CiAgICBzeD17ewogICAgICBqdXN0aWZ5Q29udGVudDogJ2NlbnRlcicsCiAgICAgIGZsZXhEaXJlY3Rpb246ICdjb2x1bW4nLAogICAgICBhbGlnbkl0ZW1zOiAnY2VudGVyJywKICAgICAgYmc6ICdibGFjaycsCiAgICB9fQogID4KICAgIDxCb3gKICAgICAgc3g9e3sKICAgICAgICBjb2xvcjogJyNGNUY3RkEnLAogICAgICB9fQogICAgPgogICAgICA8SW1hZ2UKICAgICAgICBzcmM9Imh0dHBzOi8vazRtNC5kZXYvc3RhdGljL2ltYWdlcy9vZy1jYXJkL2xvZ28ucG5nIgogICAgICAgIHdpZHRoPXsyMDB9CiAgICAgIC8-CiAgICA8L0JveD4KICAgIDxUZXh0CiAgICAgIGFzPSJzcGFuIgogICAgICBzeD17ewogICAgICAgIGNvbG9yOiAnIzcxODA5NicsCiAgICAgICAgZm9udFdlaWdodDogNTAwLAogICAgICAgIGZvbnRTaXplOiAyMCwKICAgICAgICB0ZXh0VHJhbnNmb3JtOiAndXBwZXJjYXNlJywKICAgICAgICBsZXR0ZXJTcGFjaW5nOiAzLAogICAgICB9fQogICAgPgogICAgICB7cXVlcnkudGl0bGV9CiAgICA8L1RleHQ-CiAgPC9GbGV4Pgo8Lz4K&title=${title}`;
const openGraph: OpenGraph = {
title: fullTitle,
description,
url,
};
if (title) {
openGraph.images = [{
url: `https://i.microlink.io/${encodeURIComponent(cardURL)}`,
alt: title,
width: 1686,
height: 948,
}];
}
return (
<>
<NextSEO
canonical={url}
description={description}
openGraph={openGraph}
title={fullTitle}
/>
{showNav && <Nav />}
<Box as="main">
{children}
</Box>
<Footer />
</>
);
};
export default Page;
| 35.152542
| 953
| 0.788814
|
edb3081a2fa9309a36efcc87de0474908dd8401d
| 3,116
|
lua
|
Lua
|
script/c62873541.lua
|
Xargs007/CardsCustom
|
ef54658d3b3d8600e41759ad58e031cb6301b6c2
|
[
"CC0-1.0"
] | null | null | null |
script/c62873541.lua
|
Xargs007/CardsCustom
|
ef54658d3b3d8600e41759ad58e031cb6301b6c2
|
[
"CC0-1.0"
] | null | null | null |
script/c62873541.lua
|
Xargs007/CardsCustom
|
ef54658d3b3d8600e41759ad58e031cb6301b6c2
|
[
"CC0-1.0"
] | null | null | null |
--蒼眼の銀龍
function c62873541.initial_effect(c)
--synchro summon
--aux.AddSynchroProcedure(c,nil,aux.NonTuner(Card.IsType,TYPE_NORMAL),1)
aux.AddFusionProcCodeFun(c,(89631139),aux.FilterBoolFunction(Card.IsRace,RACE_DRAGON),1,false,false)
c:EnableReviveLimit()
--spsummon fusion condition
local e1=Effect.CreateEffect(c)
e1:SetType(EFFECT_TYPE_SINGLE)
e1:SetProperty(EFFECT_FLAG_CANNOT_DISABLE+EFFECT_FLAG_UNCOPYABLE)
e1:SetCode(EFFECT_SPSUMMON_CONDITION)
e1:SetValue(aux.fuslimit)
c:RegisterEffect(e1)
--negate
local e2=Effect.CreateEffect(c)
e2:SetDescription(aux.Stringid(62873541,0))
e2:SetType(EFFECT_TYPE_SINGLE+EFFECT_TYPE_TRIGGER_F)
e2:SetCode(EVENT_SPSUMMON_SUCCESS)
e2:SetOperation(c62873541.effop)
c:RegisterEffect(e2)
--spsumon
local e3=Effect.CreateEffect(c)
e3:SetDescription(aux.Stringid(62873541,0))
e3:SetCategory(CATEGORY_SPECIAL_SUMMON)
e3:SetType(EFFECT_TYPE_FIELD+EFFECT_TYPE_TRIGGER_O)
e3:SetProperty(EFFECT_FLAG_CARD_TARGET)
e3:SetRange(LOCATION_MZONE)
e3:SetCode(EVENT_PHASE+PHASE_STANDBY)
e3:SetCountLimit(1)
e3:SetCondition(c62873541.spcon)
e3:SetTarget(c62873541.sptg)
e3:SetOperation(c62873541.spop)
c:RegisterEffect(e3)
local e4=Effect.CreateEffect(c)
e4:SetType(EFFECT_TYPE_SINGLE)
e4:SetProperty(EFFECT_FLAG_SINGLE_RANGE)
e4:SetCode(EFFECT_CHANGE_CODE)
e4:SetRange(LOCATION_MZONE+LOCATION_GRAVE)
e4:SetValue(89631139)
c:RegisterEffect(e4)
end
function c62873541.filter(c)
return c:IsFaceup() and c:IsRace(RACE_DRAGON)
end
function c62873541.effop(e,tp,eg,ep,ev,re,r,rp)
local c=e:GetHandler()
local g=Duel.GetMatchingGroup(c62873541.filter,tp,LOCATION_MZONE,0,nil)
local tc=g:GetFirst()
while tc do
local e1=Effect.CreateEffect(c)
e1:SetType(EFFECT_TYPE_SINGLE)
e1:SetCode(EFFECT_INDESTRUCTABLE_EFFECT)
e1:SetProperty(EFFECT_FLAG_CANNOT_DISABLE)
e1:SetValue(1)
e1:SetReset(RESET_EVENT+0x1fe0000+RESET_PHASE+PHASE_END,2)
tc:RegisterEffect(e1)
local e2=Effect.CreateEffect(c)
e2:SetType(EFFECT_TYPE_SINGLE)
e2:SetCode(EFFECT_CANNOT_BE_EFFECT_TARGET)
e2:SetProperty(EFFECT_FLAG_CANNOT_DISABLE)
e2:SetValue(aux.tgval)
e2:SetReset(RESET_EVENT+0x1fe0000+RESET_PHASE+PHASE_END,2)
tc:RegisterEffect(e2)
tc=g:GetNext()
end
end
function c62873541.spcon(e,tp,eg,ep,ev,re,r,rp)
return Duel.GetTurnPlayer()==tp
end
function c62873541.spfilter(c,e,tp)
return c:IsSetCard(0xdd) and c:IsCanBeSpecialSummoned(e,0,tp,false,false)
end
function c62873541.sptg(e,tp,eg,ep,ev,re,r,rp,chk,chkc)
if chkc then return chkc:IsLocation(LOCATION_GRAVE) and chkc:IsControler(tp) and c62873541.spfilter(chkc,e,tp) end
if chk==0 then return Duel.GetLocationCount(tp,LOCATION_MZONE)>0
and Duel.IsExistingTarget(c62873541.spfilter,tp,LOCATION_GRAVE,0,1,nil,e,tp) end
Duel.Hint(HINT_SELECTMSG,tp,HINTMSG_SPSUMMON)
local g=Duel.SelectTarget(tp,c62873541.spfilter,tp,LOCATION_GRAVE,0,1,1,nil,e,tp)
Duel.SetOperationInfo(0,CATEGORY_SPECIAL_SUMMON,g,1,0,0)
end
function c62873541.spop(e,tp,eg,ep,ev,re,r,rp)
local tc=Duel.GetFirstTarget()
if tc and tc:IsRelateToEffect(e) then
Duel.SpecialSummon(tc,0,tp,tp,false,false,POS_FACEUP)
end
end
| 35.816092
| 115
| 0.807766
|
5f2c00794f9227b074dfd66d8577ba294e52b31c
| 1,026
|
rb
|
Ruby
|
api/app/models/enquiry.rb
|
jollopre/mps
|
4917cc5477861cda5939fca7a2441fcfdfe96723
|
[
"MIT"
] | null | null | null |
api/app/models/enquiry.rb
|
jollopre/mps
|
4917cc5477861cda5939fca7a2441fcfdfe96723
|
[
"MIT"
] | 61
|
2017-01-29T22:47:12.000Z
|
2019-04-09T19:33:11.000Z
|
api/app/models/enquiry.rb
|
jollopre/mps
|
4917cc5477861cda5939fca7a2441fcfdfe96723
|
[
"MIT"
] | null | null | null |
class Enquiry < ApplicationRecord
belongs_to :quotation
belongs_to :product
has_many :feature_values, dependent: :destroy
has_many :features, through: :feature_values
has_and_belongs_to_many :composed_emails
validates :quantity, numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :quantity2, numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :quantity3, numericality: { only_integer: true, greater_than_or_equal_to: 0 }
after_create :insert_default_value_every_feature
def as_json(options = nil)
return super({
only: [:id, :quantity, :quantity2, :quantity3, :quotation_id, :product_id],
}).merge('feature_values' => feature_values_to_h) unless options
super(options)
end
private
def insert_default_value_every_feature
FeatureValue::BULK_INSERT_DEFAULT_VALUE_QUERY.call(self.product.features, self)
end
def feature_values_to_h()
self.feature_values.reduce({}) { |h, fv| h["#{fv.id}"] = fv.as_json(); h }
end
end
| 33.096774
| 89
| 0.754386
|
173ee340a300ee42132cf985f06e81c6f6427414
| 411
|
sql
|
SQL
|
service/src/main/schema/scripts/common/003_PROJECT_DATA/030_PROJECT_BLACKLIST_DATA.sql
|
SAP-samples/smp-mobiliser-template
|
12a2b574151abab2ce7b94e74ce1428b28c06c35
|
[
"Apache-2.0"
] | null | null | null |
service/src/main/schema/scripts/common/003_PROJECT_DATA/030_PROJECT_BLACKLIST_DATA.sql
|
SAP-samples/smp-mobiliser-template
|
12a2b574151abab2ce7b94e74ce1428b28c06c35
|
[
"Apache-2.0"
] | 6
|
2021-03-29T21:01:52.000Z
|
2022-02-02T22:25:01.000Z
|
service/src/main/schema/scripts/common/003_PROJECT_DATA/030_PROJECT_BLACKLIST_DATA.sql
|
SAP-samples/smp-mobiliser-template
|
12a2b574151abab2ce7b94e74ce1428b28c06c35
|
[
"Apache-2.0"
] | 1
|
2022-02-02T01:57:18.000Z
|
2022-02-02T01:57:18.000Z
|
INSERT INTO MOB_ERROR_CODES (ID_ERROR_CODE, ID_ERROR_LEVEL, STR_INFORMATION) VALUES (10101, 'INFO', 'NAME ON BLACKLIST:Name has been on blacklist');
INSERT INTO MOB_ERROR_CODES (ID_ERROR_CODE, ID_ERROR_LEVEL, STR_INFORMATION) VALUES (10102, 'INFO', 'NAME PROBABLE ON BLACKLIST: Name has a likely match on blacklist');
INSERT INTO CUS_BLACKLIST_TYPES (ID_BLACKLIST_TYPE, STR_BLACKLIST_TYPE) VALUES (0, 'OFAC');
| 82.2
| 168
| 0.800487
|
c99bfb7e4305ae215080ce1f2113c11f54d31088
| 627
|
ts
|
TypeScript
|
packages/typeorm/src/typeorm.sql.interface.ts
|
mobilejazz/harmony-nest-ts
|
445753902de2e5d0cf7611dd59bb5b2ee27e843d
|
[
"Apache-2.0"
] | 5
|
2019-03-08T08:16:03.000Z
|
2022-01-28T14:55:14.000Z
|
packages/typeorm/src/typeorm.sql.interface.ts
|
mobilejazz/harmony-nest-ts
|
445753902de2e5d0cf7611dd59bb5b2ee27e843d
|
[
"Apache-2.0"
] | 30
|
2019-12-20T10:55:07.000Z
|
2021-11-24T17:16:21.000Z
|
packages/typeorm/src/typeorm.sql.interface.ts
|
mobilejazz/harmony-typescript
|
445753902de2e5d0cf7611dd59bb5b2ee27e843d
|
[
"Apache-2.0"
] | null | null | null |
import { EntityManager } from 'typeorm';
import { SQLInterface } from '@mobilejazz/harmony-core';
export class TypeORMSQLInterface implements SQLInterface {
constructor(private readonly entityManager: EntityManager) {}
query(query: string, parameters?: any[]): Promise<any> {
return this.entityManager.query(query, parameters);
}
transaction<T>(runInTransaction: (sqlInterface: SQLInterface) => Promise<T>): Promise<T> {
return this.entityManager.transaction((entityManager: EntityManager) => {
return runInTransaction(new TypeORMSQLInterface(entityManager));
});
}
}
| 36.882353
| 94
| 0.708134
|
a19633b58fafc7fdb2262556171b07101476e6b4
| 1,261
|
ts
|
TypeScript
|
src/isadom-models/produit/appareil/adapter/ConfigAppTiersAdapter.ts
|
ptrkvsky/nomad
|
f216842be9a8578311ef4e0ad2ef34523bc5ab40
|
[
"MIT"
] | null | null | null |
src/isadom-models/produit/appareil/adapter/ConfigAppTiersAdapter.ts
|
ptrkvsky/nomad
|
f216842be9a8578311ef4e0ad2ef34523bc5ab40
|
[
"MIT"
] | 7
|
2021-12-02T23:44:06.000Z
|
2021-12-26T08:18:25.000Z
|
src/isadom-models/produit/appareil/adapter/ConfigAppTiersAdapter.ts
|
ptrkvsky/nomad
|
f216842be9a8578311ef4e0ad2ef34523bc5ab40
|
[
"MIT"
] | null | null | null |
import { Adapter } from '../../../_adapter';
import { ConfigAppareilsTiersAPI } from '../api';
import { ConfigAppTiersModel } from '../front';
import { TiersAdapter } from '../../..';
import { ConfigProduitSpecAdapter } from '../..';
export class ConfigAppTiersAdapter extends Adapter<
ConfigAppTiersModel,
ConfigAppareilsTiersAPI
> {
private tiersAdapter = new TiersAdapter();
private configProduitSpecAdapter = new ConfigProduitSpecAdapter();
toClient(src: ConfigAppareilsTiersAPI): ConfigAppTiersModel {
if (!src) {
return null as unknown as ConfigAppTiersModel;
}
return {
tabTypeConfig: src.tabTypeConfig,
sTypeConfig: src.sTypeConfig,
tabconfigProduitSpec: this.configProduitSpecAdapter.toClients(
src.tabconfigProduitSpec,
),
tiers: this.tiersAdapter.toClient(src.tiers),
};
}
toAPI(src: ConfigAppTiersModel): ConfigAppareilsTiersAPI {
if (!src) {
return null as unknown as ConfigAppareilsTiersAPI;
}
return {
tabTypeConfig: src.tabTypeConfig,
sTypeConfig: src.sTypeConfig,
tabconfigProduitSpec: this.configProduitSpecAdapter.toAPIs(
src.tabconfigProduitSpec,
),
tiers: this.tiersAdapter.toAPI(src.tiers),
};
}
}
| 28.659091
| 68
| 0.694687
|
c56ef574998cfe8997b46408472a863ec0bb65a4
| 1,313
|
css
|
CSS
|
_styleguide/ui/main.reel/main.css
|
marchant/filament
|
8c83401c4ee1d3ba9c9af3cbe99b666eb6ae4d47
|
[
"BSD-3-Clause"
] | null | null | null |
_styleguide/ui/main.reel/main.css
|
marchant/filament
|
8c83401c4ee1d3ba9c9af3cbe99b666eb6ae4d47
|
[
"BSD-3-Clause"
] | 2
|
2019-01-15T16:57:29.000Z
|
2019-01-15T16:58:34.000Z
|
_styleguide/ui/main.reel/main.css
|
montagestudio/filament
|
8bffdb07a98dc312a65ac35523628510619b5eb7
|
[
"BSD-3-Clause"
] | null | null | null |
* {
-moz-box-sizing: border-box;
box-sizing: border-box;
}
html {
font-family: Clear Sans, sans-serif;
font-size: 16px;
color: hsl(0, 0%, 50%);
background-color: hsl(0, 0%, 92%);
}
body {
margin: 0;
font-size: 16px;
}
a {
text-decoration: none;
color: hsl(0, 0%, 40%);
}
a:hover { color: hsl(0, 0%, 50%); }
a:active { color: hsl(0,0%,20%); }
h1 {
margin: 1em 0;
text-align: center;
font-size: 4em;
font-weight: 300;
color: hsl(0, 0%, 31%);
}
.hl {
color: hsl(0, 0%, 30%);
}
h2 {
margin: 1em;
text-align: center;
font-size: 2em;
font-weight: normal;
}
table {
width: 100%;
margin: 0 auto 100px auto;
padding: 0;
list-style: none;
border-spacing: 0;
border-bottom: 1px solid hsla(0,0%,0%,.1);
}
th {
font-size: 1.2em;
font-weight: normal;
padding: 10px;
}
td {
text-align: center;
padding: 20px;
border-top: 1px solid hsla(0,0%,0%,.06);
}
td:nth-child(1) {
font-family: monospace;
}
td:nth-child(3) {
border-top-color: hsla(0,0%,100%,.06);
}
td:nth-child(1) { background: hsl(0, 0%, 88%); color: hsl(0, 0%, 50%);}
td:nth-child(2) { background: hsl(0, 0%, 92%); color: hsl(0, 0%, 30%);}
td:nth-child(3) { background: hsl(0, 0%, 20%); color: hsl(0, 0%, 70%);}
| 16.620253
| 71
| 0.544554
|
9d161fda6882bf764ffbf21deac1bcdaf67d5f2e
| 925
|
sh
|
Shell
|
vpn.lbaction/Contents/Scripts/media.sh
|
Sihan001/lbActions
|
7729d5bbe35aea39c0652daf24a0117cca5c149d
|
[
"Apache-2.0"
] | null | null | null |
vpn.lbaction/Contents/Scripts/media.sh
|
Sihan001/lbActions
|
7729d5bbe35aea39c0652daf24a0117cca5c149d
|
[
"Apache-2.0"
] | null | null | null |
vpn.lbaction/Contents/Scripts/media.sh
|
Sihan001/lbActions
|
7729d5bbe35aea39c0652daf24a0117cca5c149d
|
[
"Apache-2.0"
] | 1
|
2018-09-18T14:27:39.000Z
|
2018-09-18T14:27:39.000Z
|
#!/bin/bash
ICON_END=".png"
ICON_WIFI="wifi-4.png"
ICON_WIFI_="wifi-"
ICON_WIFI_4="wifi-4.png"
ICON_WIFI_3="wifi-3.png"
ICON_WIFI_2="wifi-2.png"
ICON_WIFI_1="wifi-1.png"
ICON_WIFI_LOCK="wifi-lock-4.png"
ICON_WIFI_LOCK_="wifi-lock-"
ICON_WIFI_LOCK_4="wifi-lock-4.png"
ICON_WIFI_LOCK_3="wifi-lock-3.png"
ICON_WIFI_LOCK_2="wifi-lock-2.png"
ICON_WIFI_LOCK_1="wifi-lock-1.png"
ICON_WIFI_STAR="wifi-star-4.png"
ICON_WIFI_STAR_="wifi-star-"
ICON_WIFI_STAR_4="wifi-star-4.png"
ICON_WIFI_STAR_3="wifi-star-3.png"
ICON_WIFI_STAR_2="wifi-star-2.png"
ICON_WIFI_STAR_1="wifi-star-1.png"
ICON_WIFI_ACTIVE="wifi-active-4.png"
ICON_WIFI_ACTIVE_="wifi-active-"
ICON_WIFI_ACTIVE_4="wifi-active-4.png"
ICON_WIFI_ACTIVE_3="wifi-active-3.png"
ICON_WIFI_ACTIVE_2="wifi-active-2.png"
ICON_WIFI_ACTIVE_1="wifi-active-1.png"
ICON_WIFI_ERROR="wifi-error-4.png"
ICON_ETH="ethernet.png"
ICON_VPN="vpn.png"
ICON_VPN_CONNECTED="vpn-connected.png"
| 23.717949
| 38
| 0.784865
|
e7ec54b8613934514b39937004c04eef54135985
| 286
|
lua
|
Lua
|
scripts/globals/items/scroll_of_shellra_iv.lua
|
PaulAnthonyReitz/topaz
|
ffa3a785f86ffdb2f6a5baf9895b649e3e3de006
|
[
"FTL"
] | 6
|
2021-06-01T04:17:10.000Z
|
2021-06-01T04:32:21.000Z
|
scripts/globals/items/scroll_of_shellra_iv.lua
|
PaulAnthonyReitz/topaz
|
ffa3a785f86ffdb2f6a5baf9895b649e3e3de006
|
[
"FTL"
] | 5
|
2020-04-10T19:33:53.000Z
|
2021-06-27T17:50:05.000Z
|
scripts/globals/items/scroll_of_shellra_iv.lua
|
PaulAnthonyReitz/topaz
|
ffa3a785f86ffdb2f6a5baf9895b649e3e3de006
|
[
"FTL"
] | 2
|
2020-04-11T16:56:14.000Z
|
2021-06-26T12:21:12.000Z
|
-----------------------------------------
-- ID: 4741
-- Scroll of Shellra IV
-- Teaches the white magic Shellra IV
-----------------------------------------
function onItemCheck(target)
return target:canLearnSpell(133)
end
function onItemUse(target)
target:addSpell(133)
end
| 20.428571
| 41
| 0.527972
|
ee1b0261d799f6d3a8e0b8b40a57aa3c79e1699f
| 49,719
|
sql
|
SQL
|
smp7.sql
|
GalihDC/Web-Profil-SMPN-7-Surakarta
|
94b5a3333a780de4d4262120bca1071921d0450e
|
[
"MIT"
] | null | null | null |
smp7.sql
|
GalihDC/Web-Profil-SMPN-7-Surakarta
|
94b5a3333a780de4d4262120bca1071921d0450e
|
[
"MIT"
] | null | null | null |
smp7.sql
|
GalihDC/Web-Profil-SMPN-7-Surakarta
|
94b5a3333a780de4d4262120bca1071921d0450e
|
[
"MIT"
] | null | null | null |
-- phpMyAdmin SQL Dump
-- version 4.7.4
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: 31 Des 2019 pada 02.44
-- Versi Server: 10.1.26-MariaDB
-- PHP Version: 7.1.9
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `smp7`
--
-- --------------------------------------------------------
--
-- Struktur dari tabel `agenda`
--
CREATE TABLE `agenda` (
`agenda_id` int(11) NOT NULL,
`agenda_nama` varchar(200) DEFAULT NULL,
`agenda_tanggal` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`agenda_deskripsi` text,
`agenda_mulai` date DEFAULT NULL,
`agenda_selesai` date DEFAULT NULL,
`agenda_tempat` varchar(90) DEFAULT NULL,
`agenda_waktu` varchar(30) DEFAULT NULL,
`agenda_keterangan` varchar(200) DEFAULT NULL,
`agenda_author` varchar(60) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `agenda`
--
INSERT INTO `agenda` (`agenda_id`, `agenda_nama`, `agenda_tanggal`, `agenda_deskripsi`, `agenda_mulai`, `agenda_selesai`, `agenda_tempat`, `agenda_waktu`, `agenda_keterangan`, `agenda_author`) VALUES
(1, 'Sand', '2019-12-22 08:02:23', 'Idul Adha yang biasa disebut lebaran haji atapun lebaran kurban sangat identik dengan penyembelihan hewan kurban. M-Sekolah tahun ini juga melakukan penyembelihan hewan kurban. Yang rencananya akan dihadiri oleh guru-guru, siswa dan pengurus OSIS.', '2017-01-22', '2017-01-22', 'SMP 7', '08.00 - 11.00 WIB', 'Dihadiri oleh guru-guru, siswa dan pengurus OSIS', 'Bambang'),
(2, 'Sandiiii', '2019-12-23 01:26:05', 'Peluncuran website resmi M-Sekolah, sebagai media informasi dan akademik online untuk pelayanan pendidikan yang lebih baik kepada siswa, orangtua, dan masyarakat pada umumnya semakin meningkat.', '2017-01-04', '2017-01-04', 'SMP 7', '07.30 - 12.00 WIB', 'ada', 'Bambang'),
(3, 'Penerimaan Raport Semester Ganjil Tahun Ajaran 2017-2018', '2017-01-22 06:29:49', 'Berakhirnya semester ganjil tahun pelajaran 2016-2017, ditandai dengan pembagian laporan hasil belajar.', '2017-02-17', '2017-02-17', 'SMP 7', '07.30 - 12.00 WIB', 'Untuk kelas XI dan XII, pembagian raport dimulai pukul 07.30 WIB. Sedangkan untuk kelas X pada pukul 09.00 WIB. Raport diambil oleh orang tua/wali murid masing-masing.', 'Bambang');
-- --------------------------------------------------------
--
-- Struktur dari tabel `album`
--
CREATE TABLE `album` (
`album_id` int(11) NOT NULL,
`album_nama` varchar(50) DEFAULT NULL,
`album_tanggal` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`album_author` varchar(60) DEFAULT NULL,
`album_count` int(11) DEFAULT '0',
`album_cover` varchar(40) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `album`
--
INSERT INTO `album` (`album_id`, `album_nama`, `album_tanggal`, `album_author`, `album_count`, `album_cover`) VALUES
(1, 'Kedatangan Tamu Asing', '2016-09-08 13:00:55', 'saya', 5, '202aa754590dfc1070c624bad294abbc.jpg'),
(3, 'Pemilu Osis 2016-2017', '2017-01-21 01:58:16', 'saya', 3, 'dc088a9fb62333012ff7a601828219d7.jpg'),
(4, 'Kegiatan Belajar Siswa', '2017-01-24 01:31:13', 'saya', 7, '203bc0411a07ed0430d39bcc38ec2c56.jpg'),
(7, 'bar bar', '2019-12-25 09:16:10', 'sandi', 0, '440b88b2ef03e64f9db9e93a7418a33b.jpg');
-- --------------------------------------------------------
--
-- Struktur dari tabel `berita`
--
CREATE TABLE `berita` (
`tulisan_id` int(11) NOT NULL,
`tulisan_judul` varchar(100) DEFAULT NULL,
`tulisan_isi` text,
`tulisan_tanggal` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`tulisan_kategori_id` int(11) DEFAULT NULL,
`tulisan_kategori_nama` varchar(30) DEFAULT NULL,
`tulisan_views` int(11) DEFAULT '0',
`tulisan_gambar` varchar(40) DEFAULT NULL,
`tulisan_pengguna_id` int(11) DEFAULT NULL,
`tulisan_author` varchar(40) DEFAULT NULL,
`tulisan_img_slider` int(2) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `berita`
--
INSERT INTO `berita` (`tulisan_id`, `tulisan_judul`, `tulisan_isi`, `tulisan_tanggal`, `tulisan_kategori_id`, `tulisan_kategori_nama`, `tulisan_views`, `tulisan_gambar`, `tulisan_pengguna_id`, `tulisan_author`, `tulisan_img_slider`) VALUES
(18, 'Belajar di luar ruangan merupakan cara yang efektif dalam belajar', '<p>Menjalani aktifitas belajar yang padat, terkadang dapat meningkatkan stres yang tinggi bagi siswa. Setiap siswa memiliki cara yang berbeda untuk mengembalikan semangat mereka dalam belajar.</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel v Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n', '2017-05-17 09:15:54', 1, 'Pendidikan', 7, '2dc57557d0e3902753e5e88979119e67.jpg', 1, 'M Fikri Setiadi', 1),
(19, 'Cantik, multi talenta. itulah sebuat untuk siswa yang satu ini', '<p>Talenta luar biasa bukan hanya dimiliki oleh kaum pria saja. Nyatanya siswa cantik yang satu ini sangat berakat memainkan alat musik tradisional yaitu seruling. Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel .</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel v Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel vv Ini adalah sampel artikel Ini adalah sampel artikel vvv Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel .</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel .</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n', '2017-05-17 09:21:24', 6, 'Prestasi', 4, '2c4b0960f1ca907c24af163b29ae1cec.jpg', 1, 'M Fikri Setiadi', 0),
(20, 'Persiapan siswa menjelang ujian nasional', '<p>Banyak metode bejalar yang dilakukan oleh siswa untuk persiapan menghadapi ujian nasional (UN). Biantaranya mengingat dengan metode Mind Map, ataupun bejalar diluar kelas (outdoor). Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p> </p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n', '2017-05-17 09:24:42', 1, 'Pendidikan', 1, 'df0d8ccac0ba567fe01079b0477c5c12.jpg', 1, 'M Fikri Setiadi', 1),
(21, 'Siswi alai M-Sekolah', '<p>Remaja beumur 15-22 tahun, biasa beperilaku alai dan cenderung lebai. Di M-Sekolah sendiri ada begitu banyak siswa yang berperilaku alai dan lebai. Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n', '2017-05-17 09:34:28', 1, 'Pendidikan', 6, '20b326d88bc3454a82624a671021e1da.jpg', 1, 'M Fikri Setiadi', 0),
(22, 'Prestasi membangga dari siswa m-sekolah', '<p>Prestasi dan penghargaan merupakan trigger (pemicu) semangat belajar siswa. Ada banyak prestasi yang telah diraih oleh siswa m-sekolah. Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n', '2017-05-17 09:38:21', 6, 'Prestasi', 2, '2b594ea1f1b782e89f10f572ce193122.jpg', 1, 'M Fikri Setiadi', 1),
(23, 'Pelaksanaan Ujian Nasional M-Sekolah', '<p>Pelaksanaan UN (Ujian Nasional) di sekolah M-Sekolah berlangsung tentram dan damai. Terlihat ketenangan terpancar diwajah siswa berprestasi. Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n', '2017-05-17 09:41:30', 1, 'Pendidikan', 5, '6866d36b2085f40dd4d5b18b664af6bc.jpg', 1, 'M Fikri Setiadi', 0),
(24, 'Proses belajar mengajar m-sekolah', '<p>Proses belajar mengajar di sekolah m-sekolah berlangsung menyenangkan. Didukung oleh instruktur yang fun dengan metode mengajar yang tidak biasa. Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel a Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel .</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n\r\n<p>Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel Ini adalah sampel artikel.</p>\r\n', '2017-05-17 09:46:29', 1, 'Pendidikan', 3, '67bac7f1c3891034cf9d55e66c44aa10.jpg', 1, 'M Fikri Setiadi', 1);
-- --------------------------------------------------------
--
-- Struktur dari tabel `data`
--
CREATE TABLE `data` (
`data_id` int(11) NOT NULL,
`data_sambutan` text NOT NULL,
`data_visi` text NOT NULL,
`data_misi` text NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `data`
--
INSERT INTO `data` (`data_id`, `data_sambutan`, `data_visi`, `data_misi`) VALUES
(0, ' \r\nPuji syukur kami panjatkan ke hadirat Tuhan Yang Maha Esa atas karunia dan hidayah-Nya, sehingga kita semua dapat membaktikan segala hal yang kita miliki untuk kemajuan dunia pendidikan. Apapun bentuk dan sumbangsih yang kita berikan, jika dilandasi niat yang tulus tanpa memandang imbalan apapun akan menghasilkan mahakarya yang agung untuk bekal kita dan generasi setelah kita. Pendidikan adalah harga mati untuk menjadi pondasi bangsa dan negara dalam menghadapi perkembangan zaman. Hal ini seiring dengan penguasaan teknologi untuk dimanfaatkan sebaik mungkin, sehingga menciptakan iklim kondusif dalam ranah keilmuan. Dengan konsep yang kontekstual dan efektif, kami mengejewantahkan nilai-nilai pendidikan yang tertuang dalam visi misi M-Sekolah, sebagai panduan hukum dalam menjabarkan tujuan hakiki pendidikan.\r\n\r\n\r\nDalam sebuah sistem ketata kelolaan Sekolah Berbasis Manajemen, kami berusaha terus meningkatkan kinerja dan profesionalisme demi terwujudnya pelayanan prima dalam cakupan Lembaga Pendidikan terutama di Sekolah M-Sekolah ini. Kami sudah mulai menerapkan sistem Teknologi Komputerisasi agar transparansi pengelolaan pendidikan terjaga optimalisasinya. Sebuah sistem akan bermanfaat dan berdaya guna tinggi jika didukung dan direalisasikan oleh semua komponen yang berkompeten di M-Sekolah baik sistem manajerial, akademik, pelayanan publik, prestasi,moralitas dan semua hal yang berinteraksi di dalamnya. Alhamdulilah peningkatan tersebut dapat dilihat dari data-data kepegawaian dan karya-karya nyata yang telah dihasilkan walaupun masih ada kelemahan yang terus kami treatment dengan menyeimbangkan hasil kinerja dan prize yang diberikan. Mudah-mudahan semua yang kita berikan untuk kemajuan dan keajegan nilai-nilai pendidikan dapat terus meningkat. \r\n\r\n\r\nSecara pribadi saya mohon maaf, jika pemenuhan tuntutan dan kinerja yang saya lakukan masih ada kelemahan. Oleh karena itu, bantuan dan kerjasama dari berbagai pihak untuk optimalisasi mutu dan kualitas pendidikan sangat saya harapkan. Mudah-mudahan dalam tiap langkah dan nafas kita menciptakan nilai jual yang tinggi bagi keilmuan dan nilai hakiki di hadapan Tuhan Yang Maha Esa.\r\nDemikian sambutan ini saya sampaikan, ditutup dengan pesan moral dan keilmuan bagi kita semua.\r\n', ' Ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel visi ini adalah sampel ini adalah sampel visi ini adalah sampel visi.', 'Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi. Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi.Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi Ini adalah sampel misi');
-- --------------------------------------------------------
--
-- Struktur dari tabel `files`
--
CREATE TABLE `files` (
`file_id` int(11) NOT NULL,
`file_judul` varchar(120) DEFAULT NULL,
`file_deskripsi` text,
`file_tanggal` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`file_oleh` varchar(60) DEFAULT NULL,
`file_download` int(11) DEFAULT '0',
`file_data` varchar(120) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `files`
--
INSERT INTO `files` (`file_id`, `file_judul`, `file_deskripsi`, `file_tanggal`, `file_oleh`, `file_download`, `file_data`) VALUES
(2, 'Dasar-dasar CSS', 'Modul dasar-dasar CSS 3. Modul ini membantu anda untuk memahami struktur dasar CSS', '2017-01-23 04:30:01', 'Drs. Joko', 0, 'ab9a183ff240deadbedaff78e639af2f.pdf'),
(3, '14 Teknik Komunikasi Yang Paling Efektif', 'Ebook 14 teknik komunikasi paling efektif membantu anda untuk berkomunikasi dengan baik dan benar', '2017-01-23 15:26:06', 'Drs. Joko', 0, 'ab2cb34682bd94f30f2347523112ffb9.pdf'),
(4, 'Bagaimana Membentuk Pola Pikir yang Baru', 'Ebook ini membantu anda membentuk pola pikir baru.', '2017-01-23 15:27:07', 'Drs. Joko', 0, '30f588eb5c55324f8d18213f11651855.pdf'),
(5, '7 Tips Penting mengatasi Kritik', '7 Tips Penting mengatasi Kritik', '2017-01-23 15:27:44', 'Drs. Joko', 0, '329a62b25ad475a148e1546aa3db41de.docx'),
(6, '8 Racun dalam kehidupan kita', '8 Racun dalam kehidupan kita', '2017-01-23 15:28:17', 'Drs. Joko', 0, '8e38ad4948ba13758683dea443fbe6be.docx'),
(7, 'Jurnal Teknolgi Informasi', 'Jurnal Teknolgi Informasi', '2017-01-25 03:18:53', 'Gunawan, S.Pd', 0, '87ae0f009714ddfdd79e2977b2a64632.pdf'),
(8, 'Jurnal Teknolgi Informasi 2', 'Jurnal Teknolgi Informasi', '2017-01-25 03:19:22', 'Gunawan, S.Pd', 0, 'c4e966ba2c6e142155082854dc5b3602.pdf'),
(9, 'Naskah Publikasi IT', 'Naskah Teknolgi Informasi', '2017-01-25 03:21:04', 'Gunawan, S.Pd', 0, '71380b3cf16a17a02382098c028ece9c.pdf'),
(10, 'Modul Teknologi Informasi', 'Modul Teknologi Informasi', '2017-01-25 03:22:08', 'Gunawan, S.Pd', 0, '029143a3980232ab2900d94df36dbb0c.pdf'),
(11, 'Modul Teknologi Informasi Part II', 'Modul Teknologi Informasi', '2017-01-25 03:22:54', 'Gunawan, S.Pd', 0, 'ea8f3f732576083156e509657614f551.pdf'),
(12, 'Modul Teknologi Informasi Part III', 'Modul Teknologi Informasi', '2017-01-25 03:23:21', 'Gunawan, S.Pd', 0, 'c5e5e7d16e4cd6c3d22c11f64b0db2af.pdf'),
(13, 's', NULL, '2019-12-22 10:29:52', 'BA,', 0, '9a0f3a2474648e063b128e29affcd7f0.docx');
-- --------------------------------------------------------
--
-- Struktur dari tabel `galeri`
--
CREATE TABLE `galeri` (
`galeri_id` int(11) NOT NULL,
`galeri_judul` varchar(60) DEFAULT NULL,
`galeri_tanggal` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`galeri_gambar` varchar(40) DEFAULT NULL,
`galeri_album_id` int(11) DEFAULT NULL,
`galeri_author` varchar(60) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `galeri`
--
INSERT INTO `galeri` (`galeri_id`, `galeri_judul`, `galeri_tanggal`, `galeri_gambar`, `galeri_album_id`, `galeri_author`) VALUES
(4, 'Diskusi Pemilihan Ketua Osis', '2019-12-15 03:44:12', '9b10fa300633f62f105e9b52789fc8f3.jpg', 3, 'saya'),
(5, 'Panitia Pemilu Osis', '2019-12-15 03:44:12', '0ec0c2f9aae6501d7ed7930995d48b57.jpg', 3, 'saya'),
(6, 'Proses Pemilu Osis', '2019-12-15 03:44:12', 'bfbe6cc1c8096f5f36c68e93da53c248.jpg', 3, 'saya'),
(7, 'Belajar dengan native speaker', '2019-12-15 03:44:12', '831e5ad43ccc3c851d50c128ff095541.jpg', 1, 'saya'),
(8, 'Diskusi dengan native speaker', '2019-12-15 03:44:12', '84afbf1d3ad45932f1d7ac47b8a00949.jpg', 1, 'saya'),
(9, 'Foto bareng native speaker', '2019-12-15 03:44:12', 'a99ab060d5d5bf8c96f24fe385f7dd8b.jpg', 1, 'saya'),
(10, 'Foto bareng native speaker', '2019-12-15 03:44:12', 'd70cedba6391b7b3c74b914efd82953f.jpg', 1, 'saya'),
(11, 'Foto bareng native speaker', '2019-12-15 03:44:12', '10de99f425b9961ce1e87c5e5575f8f4.jpg', 1, 'saya'),
(12, 'Belajar sambil bermain', '2019-12-15 03:44:12', '9df82241493b94d1e06b461129cf57b2.jpg', 4, 'saya'),
(13, 'Belajar sambil bermain', '2019-12-15 03:44:12', '5374415f11683ad6dd31572a7bbf8a7b.jpg', 4, 'saya'),
(14, 'Belajar komputer programming', '2019-12-15 03:44:12', '82b91bd35706b21c3ab04e205e358eb6.jpg', 4, 'saya'),
(15, 'Belajar komputer programming', '2019-12-15 03:44:12', '93048f2a103987bce8c8ec8d6912de06.jpg', 4, 'saya'),
(16, 'Belajar komputer programming', '2019-12-15 03:44:12', '41f46be181f2f8452c2041b5e79a05a5.jpg', 4, 'saya'),
(17, 'Belajar sambil bermain', '2019-12-15 03:44:12', '2858b0555c252690e293d29b922ba8e6.jpg', 4, 'saya'),
(18, 'Makan bersama', '2019-12-15 03:44:12', '90d67328e33a31d3f5eecd7dcb25b55d.jpg', 4, 'saya'),
(19, NULL, NULL, NULL, 0, 'Bambang');
-- --------------------------------------------------------
--
-- Struktur dari tabel `guru`
--
CREATE TABLE `guru` (
`guru_id` int(11) NOT NULL,
`guru_nip` varchar(30) DEFAULT NULL,
`guru_nama` varchar(70) DEFAULT NULL,
`guru_jenkel` varchar(2) DEFAULT NULL,
`guru_tmp_lahir` varchar(80) DEFAULT NULL,
`guru_tgl_lahir` varchar(80) DEFAULT NULL,
`guru_mapel` varchar(120) DEFAULT NULL,
`guru_photo` varchar(40) DEFAULT NULL,
`guru_tgl_input` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`guru_keterangan` varchar(100) NOT NULL,
`guru_jabatan_id` int(11) NOT NULL,
`guru_jenis_id` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `guru`
--
INSERT INTO `guru` (`guru_id`, `guru_nip`, `guru_nama`, `guru_jenkel`, `guru_tmp_lahir`, `guru_tgl_lahir`, `guru_mapel`, `guru_photo`, `guru_tgl_input`, `guru_keterangan`, `guru_jabatan_id`, `guru_jenis_id`) VALUES
(2, '927482658274981', 'Thomas Mullerrrrr', 'P', 'Germanyyyyyy', '25 September 1901', 'Olahgarawanita', NULL, '2017-01-26 13:38:54', ' GURU ', 4, 1),
(3, '-', 'Joko Subroto', 'P', 'Jakarta', '25 September 1989', 'PPKN, Matematika', NULL, '2017-01-26 13:41:20', ' ', 4, 1),
(4, '-', 'Kusta Otomo', 'P', 'Jakarta', '25 September 1989', 'Seni Budaya', 'ff0cfc65b3c5ad39cfa8b4f31cc8b9a5.jpg', '2017-01-26 13:42:08', ' ', 4, 1),
(5, '-', 'Yuliani Ningsih', 'P', 'Padang', '27 September 1993', 'Bahasa Indonesia', NULL, '2017-01-26 13:42:48', ' ', 4, 1),
(6, '927482658274993', 'Ari Hidayat', 'L', 'Padang', '25 September 1993', 'Bahasa Inggris', NULL, '2017-01-26 13:43:46', '', 1, 1),
(7, '927482658274998', 'Irma Cantika', 'P', 'Padang', '25 September 1993', 'Bahasa Inggris, IPA', '4200d2514abf45755943526b74474c16.jpg', '2017-01-26 13:45:11', '', 1, 1),
(8, '-', 'Ririn Febriesta', 'P', 'Padang', '27 September 1994', 'Pend. Agama Islam', NULL, '2017-01-27 04:28:23', '', 1, 1),
(10, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2019-11-29 02:08:00', '', 0, 0),
(12, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2019-12-21 14:56:13', '', 1, 1),
(15, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2019-12-21 14:58:08', '', 1, 1),
(18, '12121', '', 'P', '', '', '', '771ad56889be58624f132021e77e0662.jpg', '2019-12-21 15:08:14', '', 1, 1),
(19, '1111', 'Sandiiiiiiiiiioooo', 'L', 'Solo', '1 Feb 1934', 'Fiska', '1c5927561007b1d8abe3a38569a1dd36.jpg', '2019-12-25 06:52:19', ' Kepala Sekolah', 2, 2),
(20, '123321', 'namamama', 'L', 'Solo', '1 Januarie 1992', 'Matematika', 'a2760293d9971bbde84a45b208a81e10.jpg', '2019-12-27 07:44:45', ' Wakil', 1, 2);
-- --------------------------------------------------------
--
-- Struktur dari tabel `inbox`
--
CREATE TABLE `inbox` (
`inbox_id` int(11) NOT NULL,
`inbox_nama` varchar(40) DEFAULT NULL,
`inbox_email` varchar(60) DEFAULT NULL,
`inbox_kontak` varchar(20) DEFAULT NULL,
`inbox_pesan` text,
`inbox_tanggal` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`inbox_status` int(11) DEFAULT '1' COMMENT '1=Belum dilihat, 0=Telah dilihat'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `inbox`
--
INSERT INTO `inbox` (`inbox_id`, `inbox_nama`, `inbox_email`, `inbox_kontak`, `inbox_pesan`, `inbox_tanggal`, `inbox_status`) VALUES
(2, 'Mamat', 'mamatgaming@gmail.com', '-', 'Assalamu\'alaikum', '2019-12-15 03:44:12', 0),
(3, 'coy', 'coygaming@gmail.com', '-', 'Ini adalah pesan ', '2019-12-15 03:44:12', 0),
(5, 'Sandi', 'sandibarbar@gmail.com', '-', 'Ping !', '2019-12-15 03:44:12', 0),
(7, 'Ucok', 'ucok@gmail.com', '-', 'Hi, there!', '2019-12-15 03:44:12', 0),
(10, 'Sando', 'iqbalnurhaq@ymail.com', '33', 'saya tinggalkan pesan disini\r\n', '2019-12-29 11:53:06', 1);
-- --------------------------------------------------------
--
-- Struktur dari tabel `jabatan`
--
CREATE TABLE `jabatan` (
`jabatan_id` int(11) NOT NULL,
`jabatan_nama` varchar(40) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `jabatan`
--
INSERT INTO `jabatan` (`jabatan_id`, `jabatan_nama`) VALUES
(1, 'Guru Madya'),
(2, 'Guru Muda'),
(3, 'Guru Pertama'),
(4, 'CPNS');
-- --------------------------------------------------------
--
-- Struktur dari tabel `jenis_guru`
--
CREATE TABLE `jenis_guru` (
`jenis_guru_id` int(11) NOT NULL,
`jenis_guru_nama` varchar(40) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `jenis_guru`
--
INSERT INTO `jenis_guru` (`jenis_guru_id`, `jenis_guru_nama`) VALUES
(1, 'Guru Mapel'),
(2, 'Guru BK');
-- --------------------------------------------------------
--
-- Struktur dari tabel `kategori`
--
CREATE TABLE `kategori` (
`kategori_id` int(11) NOT NULL,
`kategori_nama` varchar(30) DEFAULT NULL,
`kategori_tanggal` timestamp NULL DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `kategori`
--
INSERT INTO `kategori` (`kategori_id`, `kategori_nama`, `kategori_tanggal`) VALUES
(1, 'Pendidikan', '2016-09-06 05:49:04'),
(2, 'Politik', '2016-09-06 05:50:01'),
(3, 'Sains', '2016-09-06 05:59:39'),
(5, 'Penelitian', '2016-09-06 06:19:26'),
(6, 'Prestasi', '2016-09-07 02:51:09'),
(13, 'Olah Raga', '2017-01-13 13:20:31');
-- --------------------------------------------------------
--
-- Struktur dari tabel `kelas`
--
CREATE TABLE `kelas` (
`kelas_id` int(11) NOT NULL,
`kelas_nama` varchar(40) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `kelas`
--
INSERT INTO `kelas` (`kelas_id`, `kelas_nama`) VALUES
(1, 'Kelas 7A'),
(2, 'Kelas 7B'),
(3, 'Kelas 7C'),
(4, 'Kelas 7D'),
(5, 'Kelas 7E'),
(6, 'Kelas 7F'),
(7, 'Kelas 7G'),
(8, 'Kelas 7H'),
(9, 'Kelas 8A'),
(10, 'Kelas 8B'),
(11, 'Kelas 8C'),
(12, 'Kelas 8D'),
(13, 'Kelas 8E'),
(14, 'Kelas 8F'),
(15, 'Kelas 8G'),
(16, 'Kelas 9A'),
(17, 'Kelas 9B'),
(18, 'Kelas 9C'),
(19, 'Kelas 9D'),
(20, 'Kelas 9E'),
(21, 'Kelas 9F'),
(22, 'Kelas 9G');
-- --------------------------------------------------------
--
-- Struktur dari tabel `pengguna`
--
CREATE TABLE `pengguna` (
`pengguna_id` int(11) NOT NULL,
`pengguna_nama` varchar(50) DEFAULT NULL,
`pengguna_moto` varchar(100) DEFAULT NULL,
`pengguna_jenkel` varchar(2) DEFAULT NULL,
`pengguna_username` varchar(30) DEFAULT NULL,
`pengguna_password` varchar(35) DEFAULT NULL,
`pengguna_tentang` text,
`pengguna_email` varchar(50) DEFAULT NULL,
`pengguna_nohp` varchar(20) DEFAULT NULL,
`pengguna_facebook` varchar(35) DEFAULT NULL,
`pengguna_twitter` varchar(35) DEFAULT NULL,
`pengguna_linkdin` varchar(35) DEFAULT NULL,
`pengguna_google_plus` varchar(35) DEFAULT NULL,
`pengguna_status` int(2) DEFAULT '1',
`pengguna_level` varchar(3) DEFAULT NULL,
`pengguna_register` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`pengguna_photo` varchar(40) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `pengguna`
--
INSERT INTO `pengguna` (`pengguna_id`, `pengguna_nama`, `pengguna_moto`, `pengguna_jenkel`, `pengguna_username`, `pengguna_password`, `pengguna_tentang`, `pengguna_email`, `pengguna_nohp`, `pengguna_facebook`, `pengguna_twitter`, `pengguna_linkdin`, `pengguna_google_plus`, `pengguna_status`, `pengguna_level`, `pengguna_register`, `pengguna_photo`) VALUES
(3, 'Admin', NULL, 'L', 'Admin', '202cb962ac59075b964b07152d234b70', NULL, 'mmt@gmail.com', '081288831443', NULL, NULL, NULL, NULL, 1, '1', '2017-01-12 03:29:47', 'ea682f37eef8fa2ca3e037d30323ecca.jpg');
-- --------------------------------------------------------
--
-- Struktur dari tabel `pengumuman`
--
CREATE TABLE `pengumuman` (
`pengumuman_id` int(11) NOT NULL,
`pengumuman_judul` varchar(150) DEFAULT NULL,
`pengumuman_isi` text,
`pengumuman_tanggal` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`pengumuman_author` varchar(60) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `pengumuman`
--
INSERT INTO `pengumuman` (`pengumuman_id`, `pengumuman_judul`, `pengumuman_isi`, `pengumuman_tanggal`, `pengumuman_author`) VALUES
(1, 'Pengumuman Libur Semester Ganjil Tahun Ajaran 2019 wkwkw', ' Libur semester ganjil tahun ajaran 2016-2017 dimulai dari tanggal 3 sampai dengan tanggal 7 September 2019.', '2019-11-28 02:16:22', 'Sandi'),
(2, 'Pengumuman Pembagian Raport Semester Ganjil Tahun Ajaran 2016-2017', 'Menjelang berakhirnya proses belajar-mengajar di semester ganjil tahun ajaran 2016-2017, maka akan diadakan pembagian hasil belajar/raport pada tanggal 4 Maret 2017 pukul 07.30 WIB.\r\nYang bertempat di M-Sekolah. Raport diambil oleh orang tua/wali kelas murid masing-masing', '2019-11-28 02:09:20', 'Sandi'),
(4, 'Pengumuman Proses Belajar Mengajar di Semester Genap Tahun Ajaran 2019', 'Setelah libur semester ganjil tahun ajaran 2019, proses belajar mengajar di semester genap tahun ajaran 2019 mulai aktif kembali tanggal 2 Maret 2020.', '2017-01-22 02:15:14', 'Sandi'),
(5, 'Pengumuman Peresmian dan Launching Website Perdana M-Sekolah', 'Peresmian dan launching website resmi akan diadakan pada hari 23 Desember 2019 pukul 10.00, bertepatan dengan pembagian raport semester ganjil tahun ajaran 2019', '2019-11-28 02:11:12', 'Sandi'),
(6, 'Pembelajaran sekolah', 'Untuk anak kelas 9 diharapkan untuk dapat mengikuti kegiatan sekolah dengan tepat', '2019-12-29 13:47:55', 'Sandi'),
(8, 'Judul', 'Isikan', '2019-12-29 14:29:29', 'saya'),
(9, 'syanggggggggggggggggggg', 's', '2019-12-29 14:33:52', 'ss'),
(10, 'Judul', 'ss', '2019-12-29 14:37:11', 'Bambang');
-- --------------------------------------------------------
--
-- Struktur dari tabel `siswa`
--
CREATE TABLE `siswa` (
`siswa_id` int(11) NOT NULL,
`siswa_nis` varchar(20) DEFAULT NULL,
`siswa_nama` varchar(70) DEFAULT NULL,
`siswa_tmp_lahir` varchar(20) NOT NULL,
`siswa_tgl_lahir` varchar(45) NOT NULL,
`siswa_jenkel` varchar(2) DEFAULT NULL,
`siswa_kelas_id` int(11) DEFAULT NULL,
`siswa_photo` varchar(40) DEFAULT NULL,
`siswa_alamat` varchar(100) NOT NULL,
`siswa_keterangan` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `siswa`
--
INSERT INTO `siswa` (`siswa_id`, `siswa_nis`, `siswa_nama`, `siswa_tmp_lahir`, `siswa_tgl_lahir`, `siswa_jenkel`, `siswa_kelas_id`, `siswa_photo`, `siswa_alamat`, `siswa_keterangan`) VALUES
(1, '9287482', 'Sandi Nur Alam alamsyah', 'Depok Bekasi jabar', '8 Oktober 1912', 'P', 13, 'bf883909ad6b991ec9b264fd02beb01b.jpg', 'Jl. Merdeka, wegah sambat, wani rekoso', 'Juara 1 Mobile Legends'),
(2, '9287483', 'Ririn Cantika', '', '', 'P', 8, '74eec6ad37550cc12fe8fa83d46878af.jpg', '', ''),
(4, '123083', 'Ari Hidayat', '', '', 'L', 1, 'e371e67618ad53c99de380782c373023.png', '', ''),
(5, '123084', 'Irma Chaiyo', '', '', 'P', 1, '1e148b42c71562841ba3018fc97b748a.png', '', ''),
(6, '123085', 'Nadila Ginting', '', '', 'P', 1, '8125da21f903803b6992214967239ab3.png', '', ''),
(7, '123086', 'Anna Marina', '', '', 'P', 1, '33eaf3e3faf28a0fe31670c022f641f1.png', '', ''),
(8, '123086', 'Dhea Lubis', '', '', 'P', 1, '03e651410e969c3c26e8e0d35380470d.png', '', ''),
(9, '123087', 'Nadia Ginting', '', '', 'P', 1, 'd7823f8d98d376c085aa284a54d63264.png', '', ''),
(10, '123088', 'Mita Febrina', '', '', 'P', 1, 'eca0280a4a57c911ee68b8318d1e517f.png', '', ''),
(11, '123089', 'Elizabeth ', '', '', 'P', 1, 'ec1232a08d650bc8c3197c9db95a7fc8.png', '', ''),
(12, '123090', 'Della Guswono', '', '', 'P', 1, '6c82fce13bb3eff1fd2e897b2c3cfeeb.png', '', ''),
(13, '7655775', 'iqbol', '', '', 'L', 1, NULL, '', ''),
(14, NULL, 'Sand', 's', '12', NULL, 11, '3cceb55f314003c18dc115672948a055.png', '', ''),
(15, NULL, 'Sando', 'Solo', '1 Januarie 1992', NULL, 7, '44ff43b80cac3ad51d2bfea6f80da23c.jpg', '', ''),
(16, NULL, '', '', '', NULL, 21, NULL, 'ss', ''),
(17, NULL, 'nananaasss', 's', '1 Januarie 1992', 'P', 21, 'ae76c81229539ca16224b5a3e34a9851.jpg', 'jakarta', ''),
(19, '199', 'namamama', 'Solo', '1 Januarie 1992', 'L', 3, NULL, 'sssss', 'juara ');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `agenda`
--
ALTER TABLE `agenda`
ADD PRIMARY KEY (`agenda_id`);
--
-- Indexes for table `album`
--
ALTER TABLE `album`
ADD PRIMARY KEY (`album_id`);
--
-- Indexes for table `berita`
--
ALTER TABLE `berita`
ADD PRIMARY KEY (`tulisan_id`),
ADD KEY `tulisan_kategori_id` (`tulisan_kategori_id`),
ADD KEY `tulisan_pengguna_id` (`tulisan_pengguna_id`);
--
-- Indexes for table `data`
--
ALTER TABLE `data`
ADD PRIMARY KEY (`data_id`);
--
-- Indexes for table `files`
--
ALTER TABLE `files`
ADD PRIMARY KEY (`file_id`);
--
-- Indexes for table `galeri`
--
ALTER TABLE `galeri`
ADD PRIMARY KEY (`galeri_id`),
ADD KEY `galeri_album_id` (`galeri_album_id`);
--
-- Indexes for table `guru`
--
ALTER TABLE `guru`
ADD PRIMARY KEY (`guru_id`);
--
-- Indexes for table `inbox`
--
ALTER TABLE `inbox`
ADD PRIMARY KEY (`inbox_id`);
--
-- Indexes for table `jabatan`
--
ALTER TABLE `jabatan`
ADD PRIMARY KEY (`jabatan_id`);
--
-- Indexes for table `jenis_guru`
--
ALTER TABLE `jenis_guru`
ADD PRIMARY KEY (`jenis_guru_id`);
--
-- Indexes for table `kategori`
--
ALTER TABLE `kategori`
ADD PRIMARY KEY (`kategori_id`);
--
-- Indexes for table `kelas`
--
ALTER TABLE `kelas`
ADD PRIMARY KEY (`kelas_id`);
--
-- Indexes for table `pengguna`
--
ALTER TABLE `pengguna`
ADD PRIMARY KEY (`pengguna_id`);
--
-- Indexes for table `pengumuman`
--
ALTER TABLE `pengumuman`
ADD PRIMARY KEY (`pengumuman_id`);
--
-- Indexes for table `siswa`
--
ALTER TABLE `siswa`
ADD PRIMARY KEY (`siswa_id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `agenda`
--
ALTER TABLE `agenda`
MODIFY `agenda_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4;
--
-- AUTO_INCREMENT for table `album`
--
ALTER TABLE `album`
MODIFY `album_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
--
-- AUTO_INCREMENT for table `berita`
--
ALTER TABLE `berita`
MODIFY `tulisan_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=25;
--
-- AUTO_INCREMENT for table `files`
--
ALTER TABLE `files`
MODIFY `file_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=14;
--
-- AUTO_INCREMENT for table `galeri`
--
ALTER TABLE `galeri`
MODIFY `galeri_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=20;
--
-- AUTO_INCREMENT for table `guru`
--
ALTER TABLE `guru`
MODIFY `guru_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=21;
--
-- AUTO_INCREMENT for table `inbox`
--
ALTER TABLE `inbox`
MODIFY `inbox_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=11;
--
-- AUTO_INCREMENT for table `jabatan`
--
ALTER TABLE `jabatan`
MODIFY `jabatan_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `jenis_guru`
--
ALTER TABLE `jenis_guru`
MODIFY `jenis_guru_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT for table `kategori`
--
ALTER TABLE `kategori`
MODIFY `kategori_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=14;
--
-- AUTO_INCREMENT for table `kelas`
--
ALTER TABLE `kelas`
MODIFY `kelas_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=23;
--
-- AUTO_INCREMENT for table `pengguna`
--
ALTER TABLE `pengguna`
MODIFY `pengguna_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `pengumuman`
--
ALTER TABLE `pengumuman`
MODIFY `pengumuman_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=11;
--
-- AUTO_INCREMENT for table `siswa`
--
ALTER TABLE `siswa`
MODIFY `siswa_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=20;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| 74.878012
| 3,837
| 0.746958
|
38d36146faecab55e13040120293073ce327a1a2
| 2,424
|
php
|
PHP
|
app/Http/Controllers/Control.php
|
aditya2214/frontend
|
4e07b83a036e1be452aafce1696f2183ab0a3817
|
[
"MIT"
] | null | null | null |
app/Http/Controllers/Control.php
|
aditya2214/frontend
|
4e07b83a036e1be452aafce1696f2183ab0a3817
|
[
"MIT"
] | 1
|
2021-02-02T17:59:52.000Z
|
2021-02-02T17:59:52.000Z
|
app/Http/Controllers/Control.php
|
aditya2214/frontend
|
4e07b83a036e1be452aafce1696f2183ab0a3817
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Http\Controllers;
use Illuminate\Support\Facades\DB;
use Illuminate\Http\Request;
use App\Models\Blog;
class Control extends Controller
{
public function index()
{
// Insert bisa
// $blog = new Blog;
// $blog->title='halo Cimahi';
// $blog->description='isi dari halo cimahi';
// $blog->save();
// insert mass assigment
// Blog::create([
// 'title'=>'Halo Bekasi',
// 'description'=>'Ini adalah isi Bekasi',
// ]);
// update biasa
// $blog = Blog::where('title','Halo Bekasi')->first();
// $blog->title = 'halo bali';
// $blog->description ='ini adalah isi bali';
// $blog->save();
// update mass assigment
// Blog::find(6)->update([
// 'title'=>'halo Banten',
// 'description'=>'isi dari banten'
// ]);
// delete
// $blog=Blog::find(6);
// $blog->delete();
// delete destroy
// Blog::destroy(3,4);
// return soft delete
// Blog::withTrashed()->restore();
// mengambil data dari table pegawai
// $blogs = DB::table('blogs')->paginate(10);
// mengirim data pegawai ke view index
$blogs=Blog::all();
$blogs = DB::table('blogs')->paginate(10);
return view('blog/nama',['blogs'=>$blogs]);
}
public function show($id)
{
$blog = Blog::find($id);
if(!$blog)
abort(404);
return view('blog/single',['blog'=>$blog]);
}
public function create(){
return view ('blog/create');
}
public function store(Request $request){
// membuat peraturan pada form -> lihat create.blade.php
$this->validate($request, [
'title' => 'required|min:5',
'description' =>'required|min:5|max:200'
]);
$blog = new Blog;
$blog->title=$request->title;
$blog->description=$request->description;
$blog->save();
return redirect ('/blog');
}
public function edit($id)
{
$blog = Blog::find($id);
if(!$blog)
abort(404);
return view('blog/edit',['blog'=>$blog]);
}
public function update(Request $request,$id){
$blog = Blog::find($id);
$blog->title = $request->title;
$blog->description =$request->description;
$blog->save();
return redirect ('blog/'.$id);
}
public function destroy($id)
{
$blogs=Blog::find($id);
$blogs->delete();
return redirect('blog');
}
}
| 19.086614
| 60
| 0.552805
|
92c00457d8456b3f33c744710e90e70c1acbd351
| 1,471
|
sql
|
SQL
|
inst/sql/src/sql/BANK/GROUNDFISH/TABLES/SDPOKDET_TMP.sql
|
AtlanticR/bio.utilities
|
aaa52cf86afa4ee9e6f46c4516a48d27cc0bfed9
|
[
"MIT"
] | null | null | null |
inst/sql/src/sql/BANK/GROUNDFISH/TABLES/SDPOKDET_TMP.sql
|
AtlanticR/bio.utilities
|
aaa52cf86afa4ee9e6f46c4516a48d27cc0bfed9
|
[
"MIT"
] | null | null | null |
inst/sql/src/sql/BANK/GROUNDFISH/TABLES/SDPOKDET_TMP.sql
|
AtlanticR/bio.utilities
|
aaa52cf86afa4ee9e6f46c4516a48d27cc0bfed9
|
[
"MIT"
] | null | null | null |
--------------------------------------------------------
-- DDL for Table SDPOKDET_TMP
--------------------------------------------------------
CREATE TABLE "GROUNDFISH"."SDPOKDET_TMP"
( "CRUNO" VARCHAR2(10 BYTE),
"SETNO" NUMBER(3,0),
"SPEC" NUMBER(4,0),
"FSHNO" VARCHAR2(10 BYTE),
"FLEN" NUMBER(4,0),
"SEX" VARCHAR2(1 BYTE),
"FWT" NUMBER(6,0),
"FULLNESS" NUMBER(1,0),
"CONTENT_CD" NUMBER(1,0),
"STOCONT" VARCHAR2(50 BYTE),
"STOCONTWT" NUMBER(8,2),
"PREY1" VARCHAR2(25 BYTE),
"QUANT1" VARCHAR2(20 BYTE),
"PREY2" VARCHAR2(25 BYTE),
"QUANT2" VARCHAR2(20 BYTE),
"PREY3" VARCHAR2(25 BYTE),
"QUANT3" VARCHAR2(20 BYTE),
"PREY4" VARCHAR2(25 BYTE),
"QUANT4" VARCHAR2(20 BYTE),
"PREY5" VARCHAR2(25 BYTE),
"QUANT5" VARCHAR2(20 BYTE),
"PREY6" VARCHAR2(25 BYTE),
"QUANT6" VARCHAR2(20 BYTE),
"PREY7" VARCHAR2(25 BYTE),
"QUANT7" VARCHAR2(20 BYTE),
"PREY8" VARCHAR2(25 BYTE),
"QUANT8" VARCHAR2(20 BYTE),
"REMARKS" VARCHAR2(100 BYTE)
) PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING
STORAGE(INITIAL 532480 NEXT 65536 MINEXTENTS 1 MAXEXTENTS 2147483645
PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT)
TABLESPACE "MFD_GROUNDFISH" ;
GRANT SELECT ON "GROUNDFISH"."SDPOKDET_TMP" TO "MFD_STOMACH";
GRANT SELECT ON "GROUNDFISH"."SDPOKDET_TMP" TO "RICARDD";
GRANT SELECT ON "GROUNDFISH"."SDPOKDET_TMP" TO "HUBLEYB";
GRANT SELECT ON "GROUNDFISH"."SDPOKDET_TMP" TO "GREYSONP";
| 32.688889
| 70
| 0.638341
|
9e0f761c0a0a7c22af02c7fae0006ef5a77287cd
| 4,893
|
ps1
|
PowerShell
|
DSC/DeveloperWorkstation.ps1
|
aultt/Workstation
|
ddb79fdd5e161f46b1d11a0e45c903ba32ae7deb
|
[
"MIT"
] | null | null | null |
DSC/DeveloperWorkstation.ps1
|
aultt/Workstation
|
ddb79fdd5e161f46b1d11a0e45c903ba32ae7deb
|
[
"MIT"
] | null | null | null |
DSC/DeveloperWorkstation.ps1
|
aultt/Workstation
|
ddb79fdd5e161f46b1d11a0e45c903ba32ae7deb
|
[
"MIT"
] | null | null | null |
configuration DeveloperWorkstation
{
param
(
[Parameter(Mandatory)]
[String]$DomainName,
[Parameter(Mandatory)]
[System.Management.Automation.PSCredential]$Admincreds,
[string]$datadriveLetter = 'F',
[string]$TimeZone = "Eastern Standard Time",
[Int]$RetryCount = 20,
[Int]$RetryIntervalSec = 30
)
Import-DscResource -ModuleName StorageDsc -ModuleVersion 5.0.0
Import-DscResource -ModuleName ComputerManagementdsc -ModuleVersion 8.2.0
Import-DscResource -ModuleName cChoco -ModuleVersion 2.4.1.0
$domain = $DomainName.Split("{.}")[0]
Node localhost
{
LocalConfigurationManager
{
RebootNodeIfNeeded = $True
RefreshMode = "Push"
ConfigurationMode = "ApplyOnly"
ActionAfterReboot = 'ContinueConfiguration'
}
PowerShellExecutionPolicy 'ExecutionPolicy'
{
ExecutionPolicyScope = 'LocalMachine'
ExecutionPolicy = 'RemoteSigned'
}
WaitForDisk DataVolume{
DiskId = 2
RetryIntervalSec = 60
RetryCount =60
}
Disk DataVolume{
DiskId = 2
DriveLetter = $datadriveLetter
FSFormat = 'NTFS'
AllocationUnitSize = 64kb
DependsOn = '[WaitForDisk]DataVolume'
}
PowerPlan 'HighPerf'
{
IsSingleInstance = 'Yes'
Name = 'High performance'
}
TimeZone 'SetTimeZone'
{
IsSingleInstance = 'Yes'
TimeZone = $TimeZone
}
Computer DomainJoin
{
Name = $env:COMPUTERNAME
DomainName = $DomainName
Credential = $AdminCreds
}
Group Admin
{
GroupName = 'Administrators'
Ensure = 'Present'
Credential = $AdminCreds
MembersToInclude = @("$($domain)\DBA")
DependsOn = '[Computer]DomainJoin'
PsDscRunAsCredential = $AdminCreds
}
cChocoInstaller InstallChoco
{
InstallDir = "${datadriveletter}:\choco"
DependsOn = '[Computer]DomainJoin','[Disk]DataVolume'
}
cChocoPackageInstaller installAzureDataStudio
{
Name = 'azure-data-studio'
Ensure = 'Present'
DependsOn = '[cChocoInstaller]installChoco'
}
cChocoPackageInstaller installAzureDataStudioExt1
{
Name = 'azure-data-studio-sql-server-admin-pack'
Ensure = 'Present'
DependsOn = '[cChocoPackageInstaller]installAzureDataStudio'
}
cChocoPackageInstaller installAzureDataStudioExt2
{
Name = 'azuredatastudio-powershell'
Ensure = 'Present'
DependsOn = '[cChocoPackageInstaller]installAzureDataStudio'
}
cChocoPackageInstaller azcopy
{
Name = 'azcoyp10'
Ensure = 'Present'
DependsOn = '[cChocoPackageInstaller]installAzureDataStudio'
}
cChocoPackageInstaller sqlservermgmtstudio
{
Name = 'sql-server-management-studio'
Ensure = 'Present'
DependsOn = '[cChocoPackageInstaller]installAzureDataStudio'
}
cChocoPackageInstaller vscode
{
Name = 'vscode'
Ensure = 'Present'
DependsOn = '[cChocoPackageInstaller]installAzureDataStudio'
}
cChocoPackageInstaller vscodemssql
{
Name = 'vscode-mssql'
Ensure = 'Present'
DependsOn = '[cChocoPackageInstaller]vscode'
}
cChocoPackageInstaller vscodepowershell
{
Name = 'vscode-powershell'
Ensure = 'Present'
DependsOn = '[cChocoPackageInstaller]vscode'
}
}
}
$ConfigData = @{
AllNodes = @(
@{
NodeName = 'localhost'
PSDscAllowPlainTextPassword = $true
}
)
}
# $AdminCreds = Get-Credential
# DeveloperWorkstation -DomainName demolab.local -Admincreds $AdminCreds -Verbose -ConfigurationData $ConfigData -OutputPath C:\Packages\Plugins\Microsoft.Powershell.DSC\2.80.0.0\DSCWork\DeveloperWorkstation.0\DeveloperWorkstation
# Start-DscConfiguration -wait -Force -Verbose -Path C:\Packages\Plugins\Microsoft.Powershell.DSC\2.80.0.0\DSCWork\DeveloperWorkstation.0\DeveloperWorkstation
| 30.968354
| 230
| 0.538116
|
38be4738377c6eb885aafc152ba196365318d363
| 7,184
|
php
|
PHP
|
application/modules/ujianmurid/views/vmulai.php
|
rendyrey/its
|
0571550541fcd615021b9d1bfbb31e3b67bcf027
|
[
"MIT"
] | null | null | null |
application/modules/ujianmurid/views/vmulai.php
|
rendyrey/its
|
0571550541fcd615021b9d1bfbb31e3b67bcf027
|
[
"MIT"
] | null | null | null |
application/modules/ujianmurid/views/vmulai.php
|
rendyrey/its
|
0571550541fcd615021b9d1bfbb31e3b67bcf027
|
[
"MIT"
] | null | null | null |
<?php
$num_of_per = count($pertanyaan);
$count = 1;
?>
<div class="main-content">
<div class="main-content-inner">
<div class="page-content">
<div class="row">
<div class="col-xs-12">
<div>
<form id="anserForm" action="<?= site_url('ujianmurid'); ?>" method="post">
<div class="question_content">
<input type="hidden" name="ujian_id" value="<?= $ujian->title_id; ?>">
<input type="hidden" name="token" value="<?= time(); ?>">
<div id="Carousel" class="carousel" data-interval="false" data-wrap="false" >
<div class="carousel-inner">
<?php
foreach ($pertanyaan as $per): $type = ($per->option_type == 'Radio') ? 'radio' : 'checkbox'; ?>
<div class="item <?= ($count == $num_of_per) ? 'active' : '' ?>">
<div class="col-xs-9">
<h3><?= $per->pertanyaan ?></h3><br/>
<?php if (!empty($per->media_type) AND ($per->media_type != '') AND ($per->media_link != '')) {
switch ($per->media_type) {
case 'image':
echo '<img src="'.base_url("assets/upload/pertanyaan-media/".$per->media_link).'" alt="image" style="width: 70%;height: 70%" href="'.base_url("assets/upload/pertanyaan-media/".$per->media_link).'" ';
break;
default:
break;
}
echo "<br/><br/>";
}
?>
<?php
foreach ($jawaban[$per->per_id][0] as $jaw) { ?>
<div class="<?= $type ?>" style="margin-left: 23px; margin-top:10px;">
<label><input type="<?= $type ?>" name="jaw[<?= $per->per_id; ?>]<?= ($type == 'checkbox') ? '[]' : '' ?>" value="<?=$jaw->jaw_id; ?>">
<?=form_prep($jaw->jawaban); ?>
</label>
</div>
<?php
} ?>
</div>
<div class="col-xs-3">
<p>
<span class="pull-right">Pertanyaan <?= ($num_of_per - $count + 1) . ' Dari ' . $num_of_per; ?><br>WAKTU:
</span>
<span class="time-duration"></span>
</p>
<p id="submit_button" style="margin: 30px 30px;"></p>
</div>
</div>
<?php $count++;
endforeach;
?>
</div>
</div>
<div class=" me-control-btn">
<a class="btn btn-lg btn-info col-xs-5 col-xs-offset-0 me-prev" href="#Carousel" data-slide="next" disabled> « Pertanyaan Sebelumnya<span class="hidden-xxs"></span></a>
<a class="btn btn-lg btn-info col-xs-5 col-xs-offset-2 me-next" href="#Carousel" data-slide="prev"> <span class="hidden-xxs">Pertanyaan Selanjutnya</span> » </a>
</div>
</div>
<div class="row">
</div>
</form>
</div>
</div><!-- /.col -->
</div><!-- /.row -->
</div><!-- /.page-content -->
</div>
</div>
<script language="JavaScript"><!--
javascript:window.history.forward(1);
//--></script>
<script type="text/javascript">
$(document).ready(function() {
// Set Time
var count = <?= ($duration) ?>;
var h, m, s, newTime;
var counter = setInterval(timer, 1000);
function timer() {
count = count - 1;
if (count < 0) {
clearInterval(counter);
return;
}
h = Math.floor(count / 3600);
m = Math.floor(count / 60) - (h * 60);
s = count % 60;
if (m.toString().length == 1)
m = '0' + m;
if (s.toString().length == 1)
s = '0' + s;
if (h) {
if (h.toString().length == 1)
h = '0' + h;
newTime = '<h1><font size="50"><strong>' + h + ':' + m + ':' + s + '</strong></font></h1> <small class="text-muted"></small>';
} else {
newTime = '<h1><font size="50"><strong>' + m + ':' + s + '</strong></font></h1> <small class="text-muted"></small>';
}
//Update timer cookie
var now = new Date();
var time = now.getTime();
time += count * 1000;
now.setTime(time);
document.cookie="UjianTimeDuration="+count+"; expires="+now.toUTCString()+"; path=/";
//Update time to HTML
$('.time-duration').html(newTime);
}
// Coltrol Buttons
var submit_btn = '<button type="submit" class="btn btn-primary btn-large" > <i class="fa fa-check-square-o"></i> <span class="hidden-xxs">Selesai </span></a>';
var slide_count = 1;
var num_of_per = "<?php echo $num_of_per; ?>";
$('.me-next').click(function() {
$('.me-prev').removeAttr('disabled');
slide_count++;
if (slide_count >= num_of_per) {
$('.me-next').attr('disabled', 'disabled'); //disable Nest button for last question.
if (!$("#submit_button > button").length) { //Check if the submit button already placed add if not.
$("#submit_button").append(submit_btn);
}
}
});
$('.me-prev').click(function() {
$('.me-next').removeAttr('disabled');
slide_count--;
if (slide_count == 1) {
$('.me-prev').attr('disabled', 'disabled'); //disable Prev button for fast question.
}
});
//Sumbit after time out
var timeout = <?= ($duration * 1000) ?>;
setTimeout(function() {
alert('WAKTU ANDA HABIS!');
$('#anserForm').submit();
}, timeout);
});
</script>
| 43.539394
| 264
| 0.372355
|
a0077965fdbfd284dd81e298795f5c55e97788d4
| 2,220
|
ts
|
TypeScript
|
packages/generic/core/src/modules/statistic/statisticType.ts
|
Cour-de-cassation/label
|
e6c829f965e969f1f4d51981fccb6517c91b583f
|
[
"MIT"
] | 16
|
2020-12-16T17:57:50.000Z
|
2022-03-03T14:10:39.000Z
|
packages/generic/core/src/modules/statistic/statisticType.ts
|
Cour-de-cassation/label
|
e6c829f965e969f1f4d51981fccb6517c91b583f
|
[
"MIT"
] | 362
|
2020-11-03T08:28:56.000Z
|
2022-03-17T13:41:59.000Z
|
packages/generic/core/src/modules/statistic/statisticType.ts
|
Cour-de-cassation/label
|
e6c829f965e969f1f4d51981fccb6517c91b583f
|
[
"MIT"
] | 2
|
2020-11-02T17:35:43.000Z
|
2021-08-24T10:38:31.000Z
|
import { documentModel } from '../document/documentType';
import { idType } from '../id';
import { buildModel, buildType } from '../modelType';
export { statisticModel };
export type { statisticType };
const statisticModel = buildModel({
kind: 'object',
content: {
_id: { kind: 'custom', content: 'id' },
annotationsCount: { kind: 'primitive', content: 'number' },
appealNumber: {
kind: 'or',
content: [
{ kind: 'primitive', content: 'string' },
{ kind: 'primitive', content: 'undefined' },
],
},
documentNumber: documentModel.content.documentNumber,
decisionDate: documentModel.content.decisionMetadata.content.date,
documentExternalId: { kind: 'primitive', content: 'string' },
chamberName: {
kind: 'or',
content: [
{ kind: 'primitive', content: 'string' },
{ kind: 'primitive', content: 'undefined' },
],
},
jurisdiction: {
kind: 'or',
content: [
{ kind: 'primitive', content: 'string' },
{ kind: 'primitive', content: 'undefined' },
],
},
linkedEntitiesCount: { kind: 'primitive', content: 'number' },
publicationCategory: { kind: 'array', content: { kind: 'primitive', content: 'string' } },
session: {
kind: 'or',
content: [
{ kind: 'primitive', content: 'string' },
{ kind: 'primitive', content: 'undefined' },
],
},
source: { kind: 'primitive', content: 'string' },
surAnnotationsCount: {
kind: 'primitive',
content: 'number',
},
subAnnotationsSensitiveCount: {
kind: 'primitive',
content: 'number',
},
subAnnotationsNonSensitiveCount: {
kind: 'primitive',
content: 'number',
},
treatmentDate: { kind: 'primitive', content: 'number' },
treatmentsSummary: {
kind: 'array',
content: {
kind: 'object',
content: {
userId: { kind: 'custom', content: 'id' },
treatmentDuration: { kind: 'primitive', content: 'number' },
},
},
},
wordsCount: { kind: 'primitive', content: 'number' },
},
} as const);
type statisticType = buildType<typeof statisticModel, { id: idType }>;
| 29.210526
| 94
| 0.56982
|
d16a823dfd4a3f2e3130410c417a1d40384068d0
| 3,968
|
html
|
HTML
|
web/themes/default/article/notice-post.html
|
Broomspun/shanque
|
67cec34de5e4951d6593a5e88778b06b27fe5f61
|
[
"MIT"
] | null | null | null |
web/themes/default/article/notice-post.html
|
Broomspun/shanque
|
67cec34de5e4951d6593a5e88778b06b27fe5f61
|
[
"MIT"
] | null | null | null |
web/themes/default/article/notice-post.html
|
Broomspun/shanque
|
67cec34de5e4951d6593a5e88778b06b27fe5f61
|
[
"MIT"
] | null | null | null |
{template 'common/header'}
<ol class="breadcrumb we7-breadcrumb">
<a href="{php echo url('article/notice/list');}"><i class="wi wi-back-circle"></i> </a>
<li>
<a href="{php echo url('article/notice/list');}">公告列表</a>
</li>
<li>
添加公告
</li>
</ol>
{if $do == 'post'}
<form action="{php echo url('article/notice/post');}" method="post" class="we7-form" role="form" id="form1">
<input type="hidden" name="id" value="{$notice['id']}"/>
<div class="form-group">
<label class="col-sm-2 control-label">公告标题</label>
<div class="col-sm-8 col-lg-9 col-xs-12">
<input type="text" class="form-control" name="title" value="{$notice['title']}" placeholder="公告标题"/>
<div class="help-block">请填写公告标题</div>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label">公告分类</label>
<div class="col-sm-8 col-lg-9 col-xs-12">
<select name="cateid" id="cateid" class="form-control">
<option value="">==请选择公告分类==</option>
{loop $categorys $category}
<option value="{$category['id']}" {if $notice['cateid'] == $category['id']}selected{/if}>{$category['title']}</option>
{/loop}
</select>
<div class="help-block">还没有分类,点我 <a href="{php echo url('article/notice/category_post');}" target="_blank"><i class="fa fa-plus-circle"></i> 添加分类</a></div>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label">内容</label>
<div class="col-sm-8 col-lg-9 col-xs-12">
{php echo tpl_ueditor('content', $notice['content']);}
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label">阅读次数</label>
<div class="col-sm-8 col-lg-9 col-xs-12">
<input type="text" class="form-control" name="click" value="{$notice['click']}" placeholder="阅读次数"/>
<div class="help-block">默认为0。您可以设置一个初始值,阅读次数会在该初始值上增加。</div>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label">排序</label>
<div class="col-sm-8 col-lg-9 col-xs-12">
<input type="text" class="form-control" name="displayorder" value="{$notice['displayorder']}" placeholder="阅读次数"/>
<div class="help-block">数字越大,越靠前。</div>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label">是否显示</label>
<div class="col-sm-8 col-lg-9 col-xs-12">
<input type="radio" id="is_display-1" name="is_display" value="1" {if $notice['is_display'] == 1} checked{/if}>
<label class="radio-inline" for="is_display-1">显示</label>
<input type="radio" name="is_display" id="is_display-0" value="0" {if $notice['is_display'] == 0} checked{/if}>
<label class="radio-inline" for="is_display-0"> 不显示</label>
</div>
</div>
<div class="form-group">
<label class="col-sm-2 control-label">显示在首页</label>
<div class="col-sm-8 col-lg-9 col-xs-12">
<input type="radio" name="is_show_home" id="is_show_home-1" value="1" {if $notice['is_show_home'] == 1} checked{/if}>
<label class="radio-inline" for="is_show_home-1">是</label>
<input type="radio" name="is_show_home" id="is_show_home-0" value="0" {if $notice['is_show_home'] == 0} checked{/if}>
<label class="radio-inline" for="is_show_home-0"> 否</label>
</div>
</div>
<div class="form-group">
<div class="">
<input type="submit" class="btn btn-primary" name="submit" value="提交" />
<input type="hidden" name="token" value="{$_W['token']}" />
</div>
</div>
</form>
{/if}
<script>
$(function(){
$('#form1').submit(function(){
if(!$.trim($(':text[name="title"]').val())) {
util.message('请填写公告标题', '', 'error');
return false;
}
if(!$.trim($('#cateid').val())) {
util.message('请选择公告分类', '', 'error');
return false;
}
if(!$.trim($('textarea[name="content"]').val())) {
util.message('请填写公告内容', '', 'error');
return false;
}
return true;
});
});
</script>
{template 'common/footer'}
| 39.287129
| 161
| 0.593246
|
a7b937dc2808a4b0e99de922ca7bb1c9507c3598
| 656
|
css
|
CSS
|
client/src/components/project/project.css
|
delaluz12/myReactPortfolio
|
770f9a53c0dd11cc9c6c283bef483f76ddd44f9e
|
[
"MIT"
] | null | null | null |
client/src/components/project/project.css
|
delaluz12/myReactPortfolio
|
770f9a53c0dd11cc9c6c283bef483f76ddd44f9e
|
[
"MIT"
] | null | null | null |
client/src/components/project/project.css
|
delaluz12/myReactPortfolio
|
770f9a53c0dd11cc9c6c283bef483f76ddd44f9e
|
[
"MIT"
] | null | null | null |
.project {
width: 30%;
height: 40vh;
margin: 20px 10px;
border: 2px solid rgb(243,242, 242);
border-radius: 10px 10px 0px 0px;
overflow: hidden;
}
.project-browser {
height: 20px;
background-color: rgb(243,242,242);
display: flex;
align-items: center;
}
.project-circle {
width: 5px;
height: 6px;
background-color: white;
border-radius: 50%;
margin: 3px;
}
.project-image {
width: 100%;
transition: all 10s ease;
}
.project:hover .project-img {
transform: translateX(-25%);
}
@media screen and (max-width: 480px) {
.project {
width: 40%;
height: 20vh;
}
}
| 16.820513
| 40
| 0.592988
|
e73dcf16764bfff9a25bb3c6b1d5220eb5343da5
| 958
|
php
|
PHP
|
src/CoreBundle/Factory/WeatherInformationFactory.php
|
stud3nt/test-project
|
0f90d1278469585656bbff0cca25199267f0c9c4
|
[
"MIT"
] | null | null | null |
src/CoreBundle/Factory/WeatherInformationFactory.php
|
stud3nt/test-project
|
0f90d1278469585656bbff0cca25199267f0c9c4
|
[
"MIT"
] | null | null | null |
src/CoreBundle/Factory/WeatherInformationFactory.php
|
stud3nt/test-project
|
0f90d1278469585656bbff0cca25199267f0c9c4
|
[
"MIT"
] | null | null | null |
<?php
namespace CoreBundle\Factory;
use CoreBundle\Entity\Weather\Information;
class WeatherInformationFactory
{
public static function get(array $weatherData) : Information
{
$information = new Information();
$information->setCreatedAt(new \DateTime("now"));
$information->setLat($weatherData['lat']);
$information->setLon($weatherData['lon']);
$information->setCity($weatherData['city']);
$information->setClouds($weatherData['clouds'] ?? null);
$information->setTemperatureNow($weatherData['temperature']['now'] ?? null);
$information->setTemperatureMin($weatherData['temperature']['min'] ?? null);
$information->setTemperatureMax($weatherData['temperature']['max'] ?? null);
$information->setWindDirection($weatherData['wind']['direction'] ?? null);
$information->setWindSpeed($weatherData['wind']['speed'] ?? null);
return $information;
}
}
| 36.846154
| 84
| 0.661795
|
fdb61e44af0b4cb2a4c861bc3e6d4e8c339854b1
| 530
|
css
|
CSS
|
src/landing/assets/fonts/font.css
|
Pipeline-by-TalentQL/team-delta-diet-app
|
c286166a6dbf4486eef6b531779f226387c65de9
|
[
"MIT"
] | null | null | null |
src/landing/assets/fonts/font.css
|
Pipeline-by-TalentQL/team-delta-diet-app
|
c286166a6dbf4486eef6b531779f226387c65de9
|
[
"MIT"
] | 18
|
2021-09-25T08:53:53.000Z
|
2022-02-25T09:53:57.000Z
|
src/landing/assets/fonts/font.css
|
Pipeline-by-TalentQL/team-delta-diet-app
|
c286166a6dbf4486eef6b531779f226387c65de9
|
[
"MIT"
] | null | null | null |
@font-face {
font-family: 'Segoe UI';
src: url('./src/landing/assets/fonts/Segoe-UI.woff') format('woff');
font-weight: 400;
font-style: normal;
font-display: swap;
}
@font-face {
font-family: 'Segoe UI';
src: url('./src/landing/assets/fonts/Segoe-UI-Italic.wof') format('woff');
font-weight: 400;
font-style: italic;
font-display: swap;
}
@font-face {
font-family: 'Segoe UI';
src: url('./src/landing/assets/fonts/Segoe-UI-Bold.woff') format('woff');
font-weight: bold;
font-style: normal;
font-display: swap;
}
| 22.083333
| 75
| 0.675472
|
0d3464dd173a98d2ef3f5050992aa151c6c9a79d
| 3,385
|
cs
|
C#
|
src/DancingGoat/Controllers/SubscriptionController.cs
|
techbuzzz/Mvc
|
ad8788fb03fba8e0898db9e2c44defe4a6f152f0
|
[
"MIT"
] | null | null | null |
src/DancingGoat/Controllers/SubscriptionController.cs
|
techbuzzz/Mvc
|
ad8788fb03fba8e0898db9e2c44defe4a6f152f0
|
[
"MIT"
] | null | null | null |
src/DancingGoat/Controllers/SubscriptionController.cs
|
techbuzzz/Mvc
|
ad8788fb03fba8e0898db9e2c44defe4a6f152f0
|
[
"MIT"
] | 1
|
2020-01-22T13:06:16.000Z
|
2020-01-22T13:06:16.000Z
|
using System;
using System.Web.Mvc;
using CMS.Helpers;
using Kentico.Newsletters;
using DancingGoat.Models.Subscription;
namespace DancingGoat.Web.Controllers
{
public class SubscriptionController : Controller
{
private readonly NewsletterSubscriptionService mService;
public SubscriptionController(NewsletterSubscriptionService subscriptionService)
{
mService = subscriptionService;
}
// POST: Subscription/Subscribe
[HttpPost]
[ValidateAntiForgeryToken]
[ValidateInput(false)]
public ActionResult Subscribe(SubscribeModel model)
{
if (ModelState.IsValid)
{
if (mService.Subscribe(model.Email, "DancingGoatMvcNewsletter"))
{
model.SubscriptionSaved = true;
}
else
{
ModelState.AddModelError("Email", ResHelper.GetString("DancingGoatMvc.News.SubscribeError"));
}
}
return PartialView("_Subscribe", model);
}
// GET: Subscription/Unsubscribe
[ValidateInput(false)]
public ActionResult Unsubscribe(UnsubscriptionModel model)
{
bool unsubscribed = false;
string invalidUrlMessage = ResHelper.GetString("DancingGoatMvc.News.InvalidUnsubscriptionLink");
if (ModelState.IsValid)
{
bool emailIsValid = mService.ValidateEmail(model.Email, model.Hash);
if (emailIsValid)
{
try
{
if (model.UnsubscribeFromAll)
{
unsubscribed = mService.UnsubscribeFromAll(model.Email, model.NewsletterGuid, model.IssueGuid);
}
else
{
unsubscribed = mService.Unsubscribe(model.Email, model.NewsletterGuid, model.IssueGuid);
}
}
catch (ArgumentException)
{
model.UnsubscriptionResult = invalidUrlMessage;
}
}
else
{
model.UnsubscriptionResult = invalidUrlMessage;
}
}
else
{
model.UnsubscriptionResult = invalidUrlMessage;
}
model.IsError = !unsubscribed;
if (unsubscribed)
{
// Return a successful message
model.UnsubscriptionResult = ResHelper.GetString(model.UnsubscribeFromAll ? "DancingGoatMvc.News.UnsubscribedAll" : "DancingGoatMvc.News.Unsubscribed");
}
else if (String.IsNullOrEmpty(model.UnsubscriptionResult))
{
// Return a general error message unless a specific error message is already defined
model.UnsubscriptionResult = ResHelper.GetString("DancingGoatMvc.News.UnsubscribeError");
}
return View(model);
}
// GET: Subscription/Show
public ActionResult Show()
{
var model = new SubscribeModel();
return PartialView("_Subscribe", model);
}
}
}
| 31.933962
| 168
| 0.531462
|
4b4bc68b49788b08d12f4710502fd9729092c961
| 1,490
|
swift
|
Swift
|
CameraViewer/Cells/TextInputCell.swift
|
ms2138/CameraViewer
|
72a019c31c7cdee1469ff4a363a39556860b3924
|
[
"MIT"
] | null | null | null |
CameraViewer/Cells/TextInputCell.swift
|
ms2138/CameraViewer
|
72a019c31c7cdee1469ff4a363a39556860b3924
|
[
"MIT"
] | null | null | null |
CameraViewer/Cells/TextInputCell.swift
|
ms2138/CameraViewer
|
72a019c31c7cdee1469ff4a363a39556860b3924
|
[
"MIT"
] | null | null | null |
//
// TextInputCell.swift
// CameraViewer
//
// Created by mani on 2021-08-08.
//
import UIKit
class TextInputCell: UITableViewCell {
var keyboardType: UIKeyboardType = .default {
didSet {
textField.keyboardType = keyboardType
}
}
var returnKeyType: UIReturnKeyType = .default {
didSet {
textField.returnKeyType = returnKeyType
}
}
let textField: UITextField
override init(style: UITableViewCell.CellStyle, reuseIdentifier: String?) {
textField = UITextField()
super.init(style: style, reuseIdentifier: reuseIdentifier)
initialize()
}
required init?(coder aDecoder: NSCoder) {
textField = UITextField()
super.init(coder: aDecoder)
initialize()
}
private func initialize() {
textField.clearButtonMode = .whileEditing
textField.translatesAutoresizingMaskIntoConstraints = false
contentView.addSubview(textField)
}
override func layoutSubviews() {
super.layoutSubviews()
let margins = self.layoutMarginsGuide
NSLayoutConstraint.activate([
textField.topAnchor.constraint(equalTo: contentView.topAnchor),
textField.bottomAnchor.constraint(equalTo: contentView.bottomAnchor),
textField.leadingAnchor.constraint(equalTo: margins.leadingAnchor),
textField.trailingAnchor.constraint(equalTo: margins.trailingAnchor)
])
}
}
| 25.254237
| 81
| 0.657718
|
ae2e2daaadc54721b816abedd4a9bdcdf968d099
| 805
|
cs
|
C#
|
InMobile.Sms.ApiClient/Json/InMobileJsonSerializerSettings.cs
|
inMobile/inMobile-.NET-API-Client
|
c6cf6194f3c349b686c367742ad62ef0353ea977
|
[
"BSD-3-Clause"
] | 1
|
2021-08-24T10:57:55.000Z
|
2021-08-24T10:57:55.000Z
|
InMobile.Sms.ApiClient/Json/InMobileJsonSerializerSettings.cs
|
inMobile/inMobile-.NET-API-Client
|
c6cf6194f3c349b686c367742ad62ef0353ea977
|
[
"BSD-3-Clause"
] | 1
|
2021-08-17T09:12:59.000Z
|
2021-08-17T09:12:59.000Z
|
InMobile.Sms.ApiClient/Json/InMobileJsonSerializerSettings.cs
|
inMobile/inMobile-.NET-API-Client
|
c6cf6194f3c349b686c367742ad62ef0353ea977
|
[
"BSD-3-Clause"
] | 1
|
2020-02-19T07:42:38.000Z
|
2020-02-19T07:42:38.000Z
|
using Newtonsoft.Json;
namespace InMobile.Sms.ApiClient
{
/// <summary>
/// Serializer created by https://gist.github.com/alexeyzimarev
/// Repo: https://gist.github.com/alexeyzimarev/c00b79c11c8cce6f6208454f7933ad24
/// </summary>
public class InMobileJsonSerializerSettings : JsonSerializerSettings
{
/// <summary>
/// Used to avoid having the client setting up his/her own serializer and then accidentially affecting how this client works.
/// </summary>
public static JsonSerializerSettings Settings { get; } = new JsonSerializerSettings();
/// <summary>
///
/// </summary>
public InMobileJsonSerializerSettings()
{
Converters.Add(new EnumConverter<MessageEncoding>());
}
}
}
| 30.961538
| 133
| 0.647205
|
2cf8f75648bafbe38fc9d8db4b9356b3f69c9a04
| 4,410
|
lua
|
Lua
|
server.lua
|
Zeemahh/Badger-911
|
85870cad72c8bcccb6bb5f9aaa912e235ef765a3
|
[
"MIT"
] | null | null | null |
server.lua
|
Zeemahh/Badger-911
|
85870cad72c8bcccb6bb5f9aaa912e235ef765a3
|
[
"MIT"
] | null | null | null |
server.lua
|
Zeemahh/Badger-911
|
85870cad72c8bcccb6bb5f9aaa912e235ef765a3
|
[
"MIT"
] | null | null | null |
--- CONFIG ---
webhookURL = ''
prefix = '^5[^1911^5] ^3';
roleList = {
['SAHP'] = 1,
['BCSO'] = 1,
['BCPD'] = 1,
['CO'] = 1,
['Fire & EMS'] = 1,
}
--- CODE ---
function sendMsg(src, msg)
TriggerClientEvent('chat:addMessage', src, {
args = { msg }
})
end
function sendToDisc(title, message, footer)
local embed = {}
embed = {
{
["color"] = 16711680, -- GREEN = 65280 --- RED = 16711680
["title"] = "**".. title .."**",
["description"] = "" .. message .. "",
["footer"] = {
["text"] = footer,
},
}
}
-- Start
-- TODO Input Webhook
PerformHttpRequest(webhookURL,
function(err, text, headers) end, 'POST', json.encode({username = name, embeds = embed}), { ['Content-Type'] = 'application/json' })
-- END
end
isCop = {}
AddEventHandler('playerDropped', function (reason)
-- Clear their lists
local src = source;
isCop[src] = nil;
end)
RegisterNetEvent('Badger-911:CheckPerms')
AddEventHandler('Badger-911:CheckPerms', function()
local src = source;
for k, v in ipairs(GetPlayerIdentifiers(src)) do
if string.sub(v, 1, string.len("discord:")) == "discord:" then
identifierDiscord = v
end
end
-- TriggerClientEvent("FaxDisVeh:CheckPermission:Return", src, true, false)
if identifierDiscord then
local roles = exports.discord_perms:GetRoles(src)
if not (roles == false) then
for i = 1, #roles do
for roleName, roleID in pairs(roleList) do
if tonumber(roles[i]) == tonumber(roleID) then
-- Return the index back to the Client script
isCop[tonumber(src)] = true;
print(GetPlayerName(src) .. " received Badger-911 permissions SUCCESS")
end
end
end
else
print(GetPlayerName(src) .. " did not receive Badger-911 permissions because roles == false")
end
elseif identifierDiscord == nil then
print("identifierDiscord == nil")
end
end)
locationTracker = {}
idCounter = 0;
function mod(a, b)
return a - (math.floor(a/b)*b)
end
RegisterCommand("resp", function(source, args, raw)
if (#args > 0) then
if tonumber(args[1]) ~= nil then
if locationTracker[tonumber(args[1])] ~= nil then
-- It is valid, set their waypoint
local loc = locationTracker[tonumber(args[1])]
TriggerClientEvent("Badger-911:SetWaypoint", source, loc[1], loc[2]);
sendMsg(source, "Your waypoint has been set to the situation!")
else
-- Not valid
sendMsg(source, "^1ERROR: That is not a valid situation...")
end
else
-- Not a valid number
sendMsg(source, "^1ERROR: That is not a valid number you supplied...")
end
end
end)
RegisterCommand("911", function(source, args, raw)
-- /911 command
local x, y, z = table.unpack(GetEntityCoords(GetPlayerPed(source)));
if (#args > 0) then
idCounter = idCounter + 1;
locationTracker[idCounter] = {x, y};
if mod(idCounter, 12) == 0 then
-- Is a multiple of 12 with no remainder, we can remove 6 of the last
local cout = idCounter - 12;
while cout < (idCounter - 6) do
locationTracker[cout] = nil;
cout = cout + 1;
end
idCounter = 1;
locationTracker[idCounter] = {x, y};
end
sendMsg(source, "Your 911 call has been received! The authorities are on their way!");
sendToDisc("[RESPONSE CODE: " .. idCounter .. "] " ..
"INCOMING TRANSMISSION:", table.concat(args, " "), "[" .. source .. "] " .. GetPlayerName(source))
for _, id in ipairs(GetPlayers()) do
if isCop[tonumber(id)] ~= nil and isCop[tonumber(id)] == true then
-- They are a cop, send them it
sendMsg(id, "[^7Use ^2/resp " .. idCounter .. "^7 to respond^3] " .. "^1INCOMING TRANSMISSION: ^3" .. table.concat(args, " "));
end
end
end
end)
| 36.147541
| 144
| 0.52585
|
797cd883c97ed21650c51d61baf0ec61d0aad944
| 4,787
|
php
|
PHP
|
resources/views/auth/passwords/reset.blade.php
|
CryptoTrusts/crypto
|
b85540e3cb4371af57da7be176c6e79c6f17cf71
|
[
"MIT"
] | null | null | null |
resources/views/auth/passwords/reset.blade.php
|
CryptoTrusts/crypto
|
b85540e3cb4371af57da7be176c6e79c6f17cf71
|
[
"MIT"
] | null | null | null |
resources/views/auth/passwords/reset.blade.php
|
CryptoTrusts/crypto
|
b85540e3cb4371af57da7be176c6e79c6f17cf71
|
[
"MIT"
] | 2
|
2020-01-03T14:32:07.000Z
|
2021-03-01T13:22:37.000Z
|
@include('home/assetss')
<body class="auth-page" style="background-color:#f8f8f8;">
<!-- Wrapper Starts -->
<div class="wrapper">
<div class="container user-auth" style="padding:20px;">
<div class="row">
<div class="col-sm-5 col-sm-offset-4 col-md-offset-4 col-lg-offset-4 col-md-5 col-lg-5">
<!-- Logo Starts -->
<a class="visible-xs" href="{{url('/')}}" style="text-align:center; color:#555;">
<h3>{{$settings->site_name}}</h3>
<!-- <img id="logo" class="img-responsive" src="{{ asset('images/'.$settings->logo)}}" alt="logo">-->
</a>
<!-- Logo Ends -->
@if(Session::has('message'))
<div class="row">
<div class="col-lg-12">
<div class="alert alert-danger alert-dismissable">
<button type="button" class="close" data-dismiss="alert" aria-hidden="true">×</button>
<i class="fa fa-warning"></i> {{ Session::get('message') }}
</div>
</div>
</div>
@endif
<div class="form-container">
<div>
<!-- Section Title Starts -->
<div class="row text-center">
<h2 class="title-head" style="font-size:1.5em; color:#555;">Create new password</h2>
</div>
<!-- Section Title Ends -->
<!-- Form Starts -->
@if (session('status'))
<div class="alert alert-success">
{{ session('status') }}
</div>
@endif
<form class="form-horizontal" method="POST" action="{{ route('password.request') }}">
{{ csrf_field() }}
<input type="hidden" name="token" value="{{ $token }}">
<div class="form-group{{ $errors->has('email') ? ' has-error' : '' }}">
<label for="email" class="control-label">E-Mail Address</label>
<input style="background:transparent; color:#555;" id="email" type="email" class="form-control" name="email" value="{{ $email or old('email') }}" required autofocus>
@if ($errors->has('email'))
<span class="help-block">
<strong>{{ $errors->first('email') }}</strong>
</span>
@endif
</div>
<div class="form-group{{ $errors->has('password') ? ' has-error' : '' }}">
<label for="password" class="control-label">Password</label>
<input style="background:transparent; color:#555;" id="password" type="password" class="form-control" name="password" required>
@if ($errors->has('password'))
<span class="help-block">
<strong>{{ $errors->first('password') }}</strong>
</span>
@endif
</div>
<div class="form-group{{ $errors->has('password_confirmation') ? ' has-error' : '' }}">
<label for="password-confirm" class="control-label">Confirm Password</label>
<input style="background:transparent; color:#555;" id="password-confirm" type="password" class="form-control" name="password_confirmation" required>
@if ($errors->has('password_confirmation'))
<span class="help-block">
<strong>{{ $errors->first('password_confirmation') }}</strong>
</span>
@endif
</div>
<div class="form-group">
<button type="submit" class="btn btn-primary">
Reset Password
</button>
</div>
</form>
<!-- Form Ends -->
</div>
</div>
<!-- Copyright Text Starts -->
<p class="text-center copyright-text">Copyright © 2018 {{$settings->site_name}} All Rights Reserved</p>
<!-- Copyright Text Ends -->
</div>
</div>
</div>
</div>
<!-- Wrapper Ends -->
</body>
</html>
| 42.741071
| 197
| 0.414665
|
3a9277f95629b99c82898455e820272cb21fc17c
| 832
|
go
|
Go
|
mmap_back_test.go
|
nikandfor/xrain
|
f669ab2645f2f9c534e8301e027ea47cc5114792
|
[
"MIT"
] | null | null | null |
mmap_back_test.go
|
nikandfor/xrain
|
f669ab2645f2f9c534e8301e027ea47cc5114792
|
[
"MIT"
] | null | null | null |
mmap_back_test.go
|
nikandfor/xrain
|
f669ab2645f2f9c534e8301e027ea47cc5114792
|
[
"MIT"
] | null | null | null |
// +build linux darwin
package xrain
import (
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestBackMmap(t *testing.T) {
fn := t.TempDir() + "/xrain_mmap_test"
// fn := "/tmp/xrain_mmap_test"
t.Logf("file: %v", fn)
m, err := Mmap(fn, os.O_CREATE|os.O_RDWR|os.O_TRUNC)
require.NoError(t, err)
t.Logf("mmap: %+v %x %x", m, os.O_RDWR, os.O_WRONLY)
assert.Equal(t, int64(0), m.Size())
assert.Panics(t, func() { m.Access(0, 0x10) })
err = m.Truncate(0x400)
require.NoError(t, err)
assert.Equal(t, int64(0x400), m.Size())
p := m.Access(0, 0x100)
copy(p, "header")
m.Unlock(p)
p, p2 := m.Access2(0, 0x100, 0x100, 0x100)
copy(p2, p[:0x10])
m.Unlock2(p, p2)
p = m.Access(0x100, 0x10)
assert.Equal(t, []byte("header"), p[:len("header")])
m.Unlock(p)
}
| 18.488889
| 54
| 0.63101
|
14b018bf58ae9c0da87ca0b0b955fff26e87de96
| 32
|
ts
|
TypeScript
|
packages/client-common/src/version.ts
|
vschoettke/algoliasearch-client-javascript
|
43aef83c9a976bf1eb6163d10b9c0d1046e81011
|
[
"MIT"
] | null | null | null |
packages/client-common/src/version.ts
|
vschoettke/algoliasearch-client-javascript
|
43aef83c9a976bf1eb6163d10b9c0d1046e81011
|
[
"MIT"
] | null | null | null |
packages/client-common/src/version.ts
|
vschoettke/algoliasearch-client-javascript
|
43aef83c9a976bf1eb6163d10b9c0d1046e81011
|
[
"MIT"
] | null | null | null |
export const version = '4.2.0';
| 16
| 31
| 0.65625
|
2c587c7c1bcbada26c08c51bec23b813908451c3
| 1,726
|
py
|
Python
|
pandaserver/daemons/scripts/panda_activeusers_query.py
|
PanDAWMS/panda-server
|
41442ac3a2db95c234ef2b1f417205e7873b3312
|
[
"Apache-2.0"
] | 7
|
2015-03-30T14:15:35.000Z
|
2021-12-22T06:48:22.000Z
|
pandaserver/daemons/scripts/panda_activeusers_query.py
|
PanDAWMS/panda-server
|
41442ac3a2db95c234ef2b1f417205e7873b3312
|
[
"Apache-2.0"
] | 16
|
2015-06-01T13:48:01.000Z
|
2022-02-08T15:03:32.000Z
|
pandaserver/daemons/scripts/panda_activeusers_query.py
|
PanDAWMS/panda-server
|
41442ac3a2db95c234ef2b1f417205e7873b3312
|
[
"Apache-2.0"
] | 15
|
2015-03-02T08:57:35.000Z
|
2022-03-01T09:48:45.000Z
|
import re
from pandacommon.pandalogger.PandaLogger import PandaLogger
from pandacommon.pandalogger.LogWrapper import LogWrapper
from pandaserver.config import panda_config
from pandaserver.proxycache import panda_proxy_cache
# logger
_logger = PandaLogger().getLogger('panda_activeusers_query')
# main
def main(tbuf=None, **kwargs):
# logger
tmpLog = LogWrapper(_logger)
tmpLog.debug("================= start ==================")
# instantiate TB
if tbuf is None:
from pandaserver.taskbuffer.TaskBuffer import taskBuffer
taskBuffer.init(panda_config.dbhost,panda_config.dbpasswd,nDBConnection=1)
else:
taskBuffer = tbuf
# instantiate MyProxy I/F
my_proxy_interface_instance = panda_proxy_cache.MyProxyInterface()
# roles
if hasattr(panda_config,'proxy_cache_roles'):
roles = panda_config.proxy_cache_roles.split(',')
else:
roles = ['atlas','atlas:/atlas/Role=production','atlas:/atlas/Role=pilot']
# get users
sql = 'select distinct DN FROM ATLAS_PANDAMETA.users WHERE GRIDPREF LIKE :patt'
varMap = {}
varMap[':patt'] = '%p%'
tmpStat,tmpRes = taskBuffer.querySQLS(sql,varMap)
for realDN, in tmpRes:
if realDN is None:
continue
realDN = re.sub('/CN=limited proxy','',realDN)
realDN = re.sub('(/CN=proxy)+','',realDN)
realDN = re.sub('(/CN=\d+)+$','',realDN)
name = taskBuffer.cleanUserID(realDN)
# check proxy
tmpLog.debug("check proxy cache for {}".format(name))
for role in roles:
my_proxy_interface_instance.checkProxy(realDN, role=role, name=name)
tmpLog.debug("done")
# run
if __name__ == '__main__':
main()
| 30.821429
| 83
| 0.662225
|
c2892f759fb961608e49275cc383db711d41e1bf
| 2,291
|
h
|
C
|
ompi/mpi/fortran/use-mpi-f08-desc/mpi-f-interfaces-bind.h
|
urids/XSCALAMPI
|
38624f682211d55c047183637fed8dbcc09f6d74
|
[
"BSD-3-Clause-Open-MPI"
] | 1
|
2015-12-16T08:16:23.000Z
|
2015-12-16T08:16:23.000Z
|
ompi/mpi/fortran/use-mpi-f08-desc/mpi-f-interfaces-bind.h
|
urids/XSCALAMPI
|
38624f682211d55c047183637fed8dbcc09f6d74
|
[
"BSD-3-Clause-Open-MPI"
] | null | null | null |
ompi/mpi/fortran/use-mpi-f08-desc/mpi-f-interfaces-bind.h
|
urids/XSCALAMPI
|
38624f682211d55c047183637fed8dbcc09f6d74
|
[
"BSD-3-Clause-Open-MPI"
] | 3
|
2015-11-29T06:00:56.000Z
|
2021-03-29T07:03:29.000Z
|
! -*- f90 -*-
!
! Copyright (c) 2009-2012 Cisco Systems, Inc. All rights reserved.
! Copyright (c) 2009-2012 Los Alamos National Security, LLC.
! All rights reserved.
! $COPYRIGHT$
!
! This file provides the interface specifications for the MPI Fortran
! API bindings. It effectively maps between public names ("MPI_Init")
! and the back-end implementation subroutine name (e.g., "ompi_init_f").
interface
subroutine ompi_comm_rank_f(comm,rank,ierror) &
BIND(C, name="ompi_comm_rank_f")
use :: mpi_f08_types
implicit none
INTEGER, INTENT(IN) :: comm
INTEGER, INTENT(OUT) :: rank
INTEGER, INTENT(OUT) :: ierror
end subroutine ompi_comm_rank_f
subroutine ompi_comm_size_f(comm,size,ierror) &
BIND(C, name="ompi_comm_size_f")
use :: mpi_f08_types
implicit none
INTEGER, INTENT(IN) :: comm
INTEGER, INTENT(OUT) :: size
INTEGER, INTENT(OUT) :: ierror
end subroutine ompi_comm_size_f
subroutine ompi_finalize_f(ierror) &
BIND(C, name="ompi_finalize_f")
use :: mpi_f08_types
implicit none
INTEGER, INTENT(OUT) :: ierror
end subroutine ompi_finalize_f
subroutine ompi_init_f(ierror) &
BIND(C, name="ompi_init_f")
use :: mpi_f08_types
implicit none
INTEGER, INTENT(OUT) :: ierror
end subroutine ompi_init_f
! ompi_send_f/ompi_recv_f interfaces not needed as they are called from C
!
subroutine ompi_type_commit_f(datatype,ierror) &
BIND(C, name="ompi_type_commit_f")
use :: mpi_f08_types
implicit none
INTEGER, INTENT(INOUT) :: datatype
INTEGER, INTENT(OUT) :: ierror
end subroutine ompi_type_commit_f
subroutine ompi_type_contiguous_f(count,oldtype,newtype,ierror) &
BIND(C, name="ompi_type_contiguous_f")
use :: mpi_f08_types
implicit none
INTEGER, INTENT(IN) :: count
INTEGER, INTENT(IN) :: oldtype
INTEGER, INTENT(OUT) :: newtype
INTEGER, INTENT(OUT) :: ierror
end subroutine ompi_type_contiguous_f
subroutine ompi_type_vector_f(count,blocklength,stride,oldtype,newtype,ierror) &
BIND(C, name="ompi_type_vector_f")
use :: mpi_f08_types
implicit none
INTEGER, INTENT(IN) :: count, blocklength, stride
INTEGER, INTENT(IN) :: oldtype
INTEGER, INTENT(OUT) :: newtype
INTEGER, INTENT(OUT) :: ierror
end subroutine ompi_type_vector_f
end interface
| 29
| 80
| 0.72763
|
a31c74dbcdf03659aa132c0e9c5aa0bf17d13386
| 2,521
|
java
|
Java
|
subprojects/execution/src/main/java/org/gradle/internal/execution/history/impl/DefaultPreviousExecutionState.java
|
candrews/gradle
|
07c85008a0930ef74c9fd9d824eeddd5f3f0be03
|
[
"Apache-2.0"
] | 2
|
2015-12-10T21:06:45.000Z
|
2016-08-04T19:35:30.000Z
|
subprojects/execution/src/main/java/org/gradle/internal/execution/history/impl/DefaultPreviousExecutionState.java
|
candrews/gradle
|
07c85008a0930ef74c9fd9d824eeddd5f3f0be03
|
[
"Apache-2.0"
] | null | null | null |
subprojects/execution/src/main/java/org/gradle/internal/execution/history/impl/DefaultPreviousExecutionState.java
|
candrews/gradle
|
07c85008a0930ef74c9fd9d824eeddd5f3f0be03
|
[
"Apache-2.0"
] | 1
|
2019-06-26T20:28:16.000Z
|
2019-06-26T20:28:16.000Z
|
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.execution.history.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedMap;
import org.gradle.caching.internal.origin.OriginMetadata;
import org.gradle.internal.execution.history.PreviousExecutionState;
import org.gradle.internal.fingerprint.FileCollectionFingerprint;
import org.gradle.internal.snapshot.FileSystemSnapshot;
import org.gradle.internal.snapshot.ValueSnapshot;
import org.gradle.internal.snapshot.impl.ImplementationSnapshot;
public class DefaultPreviousExecutionState extends AbstractInputExecutionState<FileCollectionFingerprint> implements PreviousExecutionState {
private final ImmutableSortedMap<String, FileSystemSnapshot> outputFilesProducedByWork;
private final OriginMetadata originMetadata;
private final boolean successful;
public DefaultPreviousExecutionState(
OriginMetadata originMetadata,
ImplementationSnapshot implementation,
ImmutableList<ImplementationSnapshot> additionalImplementations,
ImmutableSortedMap<String, ValueSnapshot> inputProperties,
ImmutableSortedMap<String, FileCollectionFingerprint> inputFileProperties,
ImmutableSortedMap<String, FileSystemSnapshot> outputFilesProducedByWork,
boolean successful
) {
super(implementation, additionalImplementations, inputProperties, inputFileProperties);
this.outputFilesProducedByWork = outputFilesProducedByWork;
this.originMetadata = originMetadata;
this.successful = successful;
}
@Override
public ImmutableSortedMap<String, FileSystemSnapshot> getOutputFilesProducedByWork() {
return outputFilesProducedByWork;
}
@Override
public OriginMetadata getOriginMetadata() {
return originMetadata;
}
@Override
public boolean isSuccessful() {
return successful;
}
}
| 40.015873
| 141
| 0.779453
|
56b5744240b7904f5d4437567a6151fd3261c5a7
| 11,498
|
rs
|
Rust
|
src/lexer.rs
|
BracketMaster/rtlil_frontend
|
041d40c8151217cdf3d85f0bb895e091efceea0a
|
[
"MIT"
] | null | null | null |
src/lexer.rs
|
BracketMaster/rtlil_frontend
|
041d40c8151217cdf3d85f0bb895e091efceea0a
|
[
"MIT"
] | null | null | null |
src/lexer.rs
|
BracketMaster/rtlil_frontend
|
041d40c8151217cdf3d85f0bb895e091efceea0a
|
[
"MIT"
] | null | null | null |
// Copyright (c) 2020 xhe
// Copyright (c) 2020 Yehowshua
//! Lexer for RTLIL files.
use super::ast::*;
use std::fmt;
pub type Error = anyhow::Error;
#[derive(Debug, Clone)]
pub struct Location {
offset: usize,
line: u32,
column: u32,
}
impl Default for Location {
fn default() -> Self {
Self {
offset: 0,
line: 1,
column: 1,
}
}
}
impl Location {
pub fn new() -> Self {
Self::default()
}
pub fn bump(&mut self) {
self.offset += 1;
self.column += 1;
}
pub fn bump_line(&mut self) {
self.line += 1;
self.column = 1;
}
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"line {} column {} (offset {})",
self.line, self.column, self.offset
)
}
}
#[derive(Debug, Clone)]
pub enum Token {
Ident(String),
Literal(String),
Int(i64),
Signal(Signal),
LBracket,
RBracket,
LBrace,
RBrace,
Comma,
Colon,
Autoidx,
Module,
Attribute,
Parameter,
Signed,
Real,
Wire,
Memory,
Width,
Upto,
Offset,
Size,
Input,
Output,
Inout,
Cell,
Connect,
Switch,
Case,
Assign,
Sync,
Low,
High,
Posedge,
Negedge,
Edge,
Always,
Global,
Init,
Update,
Process,
End,
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
/// A lexer for RTLIL files.
pub struct Lexer<I> {
input: I,
peek: [Option<char>; 2],
loc: Location,
}
impl<I: Iterator<Item = char>> Lexer<I> {
/// Create a new lexer.
pub fn new(input: I) -> Self {
let mut lexer = Self {
input,
peek: [None, None],
loc: Location::new(),
};
lexer.bump();
lexer.bump();
lexer
}
/// Advance the lexer to the next character.
fn bump(&mut self) {
self.peek[0] = self.peek[1];
self.peek[1] = self.input.next();
self.loc.bump();
}
}
impl<I: Iterator<Item = char>> Iterator for Lexer<I> {
type Item = Result<(Location, Token, Location), Error>;
fn next(&mut self) -> Option<Self::Item> {
loop {
let loc = self.loc.clone();
match (self.peek[0], self.peek[1]) {
// skip single-line comment
(Some('#'), _) => {
while self.peek[0].map_or(false, |c| c != '\n' && c != '\r') {
self.bump();
}
continue;
}
// skip whitespace
(Some(c), _) if c == ' ' || c == '\t' => {
self.bump();
continue;
}
(Some(c), d) if c == '\r' || c == '\n' => {
self.bump();
self.loc.bump_line();
if c == '\r' && d.map_or(false, |d| d == '\n') {
self.bump();
}
continue
}
(Some('['), _) => {
self.bump();
return Some(Ok((loc, Token::LBracket, self.loc.clone())));
}
(Some(']'), _) => {
self.bump();
return Some(Ok((loc, Token::RBracket, self.loc.clone())));
}
(Some('{'), _) => {
self.bump();
return Some(Ok((loc, Token::LBrace, self.loc.clone())));
}
(Some('}'), _) => {
self.bump();
return Some(Ok((loc, Token::RBrace, self.loc.clone())));
}
(Some(','), _) => {
self.bump();
return Some(Ok((loc, Token::Comma, self.loc.clone())));
}
(Some(':'), _) => {
self.bump();
return Some(Ok((loc, Token::Colon, self.loc.clone())));
}
// Literal
(Some('"'), _) => {
self.bump();
let mut v = String::new();
while self.peek[0].map_or(false, |c| c != '"') {
let mut c = self.peek[0].unwrap();
if c == '\\' {
self.bump();
if self.peek[0].is_none() {
panic!("syntax error: {}: expect esaped character", self.loc);
}
c = self.peek[0].unwrap()
}
v.push(c);
self.bump();
}
self.bump();
return Some(Ok((loc, Token::Literal(v), self.loc.clone())));
}
// Identifier
(Some(c), _) if c == '$' || c == '\\' => {
let mut v = String::new();
v.push(c);
self.bump();
while self.peek[0].map_or(false, |c| !c.is_ascii_whitespace()) {
v.push(self.peek[0].unwrap());
self.bump();
}
return Some(Ok((loc, Token::Ident(v), self.loc.clone())));
}
(Some(c), _) if c == '.' => {
let mut v = String::new();
v.push(c);
self.bump();
while self.peek[0].map_or(false, |c| c.is_ascii_digit()) {
v.push(self.peek[0].unwrap());
self.bump();
}
return Some(Ok((loc, Token::Ident(v), self.loc.clone())));
}
// Keyword
(Some(c), _) if c.is_ascii_alphabetic() => {
let mut v = String::new();
while self.peek[0].map_or(false, |c| c.is_ascii_alphabetic()) {
v.push(self.peek[0].unwrap());
self.bump();
}
return match v.as_str() {
"autoidx" => Some(Ok((loc, Token::Autoidx, self.loc.clone()))),
"module" => Some(Ok((loc, Token::Module, self.loc.clone()))),
"attribute" => Some(Ok((loc, Token::Attribute, self.loc.clone()))),
"parameter" => Some(Ok((loc, Token::Parameter, self.loc.clone()))),
"signed" => Some(Ok((loc, Token::Signed, self.loc.clone()))),
"real" => Some(Ok((loc, Token::Real, self.loc.clone()))),
"wire" => Some(Ok((loc, Token::Wire, self.loc.clone()))),
"memory" => Some(Ok((loc, Token::Memory, self.loc.clone()))),
"width" => Some(Ok((loc, Token::Width, self.loc.clone()))),
"upto" => Some(Ok((loc, Token::Upto, self.loc.clone()))),
"offset" => Some(Ok((loc, Token::Offset, self.loc.clone()))),
"size" => Some(Ok((loc, Token::Size, self.loc.clone()))),
"input" => Some(Ok((loc, Token::Input, self.loc.clone()))),
"output" => Some(Ok((loc, Token::Output, self.loc.clone()))),
"inout" => Some(Ok((loc, Token::Inout, self.loc.clone()))),
"cell" => Some(Ok((loc, Token::Cell, self.loc.clone()))),
"connect" => Some(Ok((loc, Token::Connect, self.loc.clone()))),
"switch" => Some(Ok((loc, Token::Switch, self.loc.clone()))),
"case" => Some(Ok((loc, Token::Case, self.loc.clone()))),
"assign" => Some(Ok((loc, Token::Assign, self.loc.clone()))),
"sync" => Some(Ok((loc, Token::Sync, self.loc.clone()))),
"low" => Some(Ok((loc, Token::Low, self.loc.clone()))),
"high" => Some(Ok((loc, Token::High, self.loc.clone()))),
"posedge" => Some(Ok((loc, Token::Posedge, self.loc.clone()))),
"negedge" => Some(Ok((loc, Token::Negedge, self.loc.clone()))),
"edge" => Some(Ok((loc, Token::Edge, self.loc.clone()))),
"always" => Some(Ok((loc, Token::Always, self.loc.clone()))),
"global" => Some(Ok((loc, Token::Global, self.loc.clone()))),
"init" => Some(Ok((loc, Token::Init, self.loc.clone()))),
"update" => Some(Ok((loc, Token::Update, self.loc.clone()))),
"process" => Some(Ok((loc, Token::Process, self.loc.clone()))),
"end" => Some(Ok((loc, Token::End, self.loc.clone()))),
_ => {
panic!(
"syntax error: {}: except a keyword, but get {}",
self.loc, v,
);
}
};
}
// Integer or Signal
(Some(c), _) if c.is_ascii_digit() || c == '-' => {
let mut v = String::new();
if c == '-' {
v.push(c);
self.bump();
}
while self.peek[0].map_or(false, |c| c.is_ascii_digit()) {
v.push(self.peek[0].unwrap());
self.bump();
}
let num = v.parse::<i64>();
if num.is_err() {
panic!("syntax error: {}: {}", self.loc, num.unwrap_err(),);
}
let num = num.unwrap();
if c == '-' || self.peek[0].map_or(false, |c| c != '\'') {
return Some(Ok((loc, Token::Int(num), self.loc.clone())));
}
// it is signal vector
self.bump();
let mut r = Vec::with_capacity(num as usize);
loop {
let s = self.peek[0];
if s.is_none() {
break;
}
let c = match s.unwrap() {
'0' => State::S0,
'1' => State::S1,
'x' => State::Sx,
'z' => State::Sz,
'm' => State::Sm,
'-' => State::Sa,
_ => break,
};
r.push(c);
self.bump();
}
return Some(Ok((
loc,
Token::Signal(Signal::new(num, r)),
self.loc.clone(),
)));
}
// End of file.
(None, _) => return None,
(Some(c), _) => panic!("syntax error: {}: unexpected \"{}\"", self.loc, c),
}
}
}
}
| 33.424419
| 94
| 0.365803
|
a3dc5de4bac9350a58d4ccb07a67b14f605968e0
| 7,121
|
java
|
Java
|
src/Reduction.java
|
wata-orz/fvs
|
610f53cb4fe898ed4be7fc0ac414781c4989aa5b
|
[
"MIT"
] | 9
|
2016-08-10T10:57:38.000Z
|
2021-11-03T20:25:48.000Z
|
src/Reduction.java
|
wata-orz/fvs
|
610f53cb4fe898ed4be7fc0ac414781c4989aa5b
|
[
"MIT"
] | null | null | null |
src/Reduction.java
|
wata-orz/fvs
|
610f53cb4fe898ed4be7fc0ac414781c4989aa5b
|
[
"MIT"
] | 2
|
2016-12-02T10:28:08.000Z
|
2019-06-20T16:52:40.000Z
|
import static java.lang.Math.*;
import static java.util.Arrays.*;
import tc.wata.data.*;
import tc.wata.debug.*;
public class Reduction {
public static int LEVEL = 3;
public static int D = 6;
public static void reduce(Graph g, int ub) {
for (;;) {
if (LEVEL >= 1) deg2(g);
if (LEVEL >= 3) {
if (deg2Ex(g)) continue;
if (degLB(g, ub)) continue;
if (dominate(g)) continue;
if (bridge(g)) continue;
}
break;
}
}
/**
* d(v) <= 2 => v in F
*/
static boolean deg2(Graph g) {
int oldN = g.n();
int[] N2 = new int[g.n];
for (;;) {
boolean ok = true;
loop : for (int v = 0; v < g.n; v++) if (g.used[v] == 0) {
int p = g.N2(v, N2);
for (int i = 0; i < p; i++) if (g.used[N2[i]] == 'F') {
g.setS(v);
ok = false;
continue loop;
}
if (g.adj[v].length <= 2) {
g.eliminate(v);
ok = false;
}
}
if (ok) break;
}
int newN = g.n();
return oldN != newN;
}
static boolean deg2Ex(Graph g) {
int oldN = g.n();
int[] N = new int[g.n];
int[][] h = new int[D][D];
boolean[] inN2 = new boolean[D];
boolean[] checked = new boolean[1 << D];
int[] que = new int[1 << D];
boolean[] adjS = new boolean[g.n];
for (int i = 0; i < g.n; i++) if (g.used[i] == 'F' && g.adj[i].length > 0) {
for (int v : g.adj[i]) adjS[v] = true;
}
int qs = 0, qt = 0;
loop : for (int v = 0; v < g.n; v++) if (g.used[v] == 0) {
for (int i = qs; i < qt; i++) checked[que[i]] = false;
qs = qt = 0;
int d = g.N(v, N);
if (d > D) continue;
int s = -1;
for (int i = 0, p = 0; p < d; i++, p++) {
if (i + 1 < g.adj[v].length && g.adj[v][i] == g.adj[v][i + 1]) {
if (g.used[g.adj[v][i]] == 'F') {
g.setS(v);
continue loop;
}
inN2[p] = true;
i++;
} else {
if (g.used[g.adj[v][i]] == 'F') s = p;
inN2[p] = false;
}
}
for (int i = 0; i < d; i++) {
for (int j = 0; j < i; j++) h[i][j] = h[j][i] = g.hasEdge(N[i], N[j]);
}
if (s >= 0) {
que[qt++] = 1 << s;
} else {
for (int i = 0; i < d; i++) que[qt++] = 1 << i;
}
while (qs < qt) {
int F = que[qs++];
int size = Integer.bitCount(F);
Debug.check(size <= 3);
checked[F] = false;
boolean independent = false;
if (size == 2) {
loop2 : for (int i = 0; i < d; i++) if ((F >> i & 1) != 0) {
for (int j = i + 1; j < d; j++) if ((F >> j & 1) != 0) {
if (h[i][j] == 0) independent = true;
break loop2;
}
}
}
loop2 : for (int i = 0; i < d; i++) if ((F >> i & 1) == 0 && !checked[F | (1 << i)]) {
int count = 0;
for (int j = 0; j < d && count <= 1; j++) if ((F >> j & 1) != 0 && h[i][j] > 0) {
if (adjS[N[i]] && adjS[N[j]]) continue loop2;
count += h[i][j];
}
if (count <= 1) {
if (size + 1 >= 4 || independent && count == 0) continue loop;
checked[F | 1 << i] = true;
que[qt++] = F | 1 << i;
}
}
int w = -1;
for (int i = 0; i < d; i++) if ((F >> i & 1) != 0 && inN2[i]) {
if (w >= 0) continue loop;
w = i;
}
if (size == 3) {
if (w >= 0) {
boolean b = false;
for (int i = 0; i < d; i++) if ((F >> i & 1) != 0 && i != w) {
if (h[w][i] == 0) {
if (b || s >= 0 || !adjS[N[w]] || !adjS[N[i]]) continue loop;
b = true;
}
}
} else if (s >= 0) {
loop2 : for (int i = 0; i < d; i++) if ((F >> i & 1) != 0 && i != s) {
for (int j = i + 1; j < d; j++) if ((F >> j & 1) != 0 && j != s) {
if (h[i][j] == 0) continue loop;
break loop2;
}
}
}
}
}
if (adjS[v]) {
for (int u : g.adj[v]) if (g.used[u] == 0) adjS[u] = true;
}
g.eliminate(v);
}
int newN = g.n();
return oldN != newN;
}
static boolean degLB(Graph g, int ub) {
int oldN = g.n();
int[] deg = new int[g.n];
int n = 0, p = 0, m = 0;
for (int i = 0; i < g.n; i++) if (g.adj[i].length > 0) {
n++;
m += g.adj[i].length;
if (g.used[i] != 'F') deg[p++] = g.adj[i].length;
}
m /= 2;
if (p == 0 || g.k + 1 >= ub || g.k + p < ub) return false;
sort(deg, 0, p);
int sum = 0, k = 1;
for ( ; k + g.k < ub; k++) {
sum += deg[p - k];
}
boolean[] adjF = new boolean[g.n];
int s = -1;
for (int i = 0; i < g.n; i++) if (g.adj[i].length > 0 && g.used[i] == 'F') s = i;
if (s >= 0) {
for (int i : g.adj[s]) adjF[i] = true;
}
IntArray a = new IntArray(), b = new IntArray();
for (int v = 0; v < g.n; v++) if (g.used[v] == 0) {
int sum2 = sum + min(0, deg[p - k] - g.adj[v].length);
for (int i = 0; i < g.adj[v].length; i++) {
int u = g.adj[v][i];
if (i + 1 < g.adj[v].length && u == g.adj[v][i + 1]) {
i++;
sum2 += min(0, g.adj[u].length - deg[p - k]);
} else if (adjF[v] && adjF[u]) {
sum2 += min(0, g.adj[u].length - deg[p - k]);
}
}
if (sum2 - (ub - g.k - 1) <= m - n) {
a.add(v);
}
}
for (int i = 0; i < g.n; i++) if (adjF[i] && g.adj[i].length < deg[p - k + 1] && g.used[i] != 'F') {
if (sum + g.adj[i].length - deg[p - k + 1] - (ub - g.k - 1) <= m - n) {
b.add(i);
}
}
for (int i = 0; i < a.length; i++) g.setS(a.at[i]);
for (int i = 0; i < b.length; i++) if (g.used[b.at[i]] == 0) g.contract(b.at[i], s);
int newN = g.n();
return oldN != newN;
}
static boolean dominate(Graph g) {
int oldN = g.n();
boolean[] adjS = new boolean[g.n];
for (int s = 0; s < g.n; s++) if (g.used[s] == 'F' && g.adj[s].length > 0) {
for (int v : g.adj[s]) adjS[v] = true;
}
for (int v = 0; v < g.n; v++) if (g.used[v] == 0) {
loop : for (int i = 0; i < g.adj[v].length; i++) {
int u = g.adj[v][i];
if (i + 1 < g.adj[v].length && g.adj[v][i + 1] == u) i++;
else if (!adjS[u] || !adjS[v]) continue;
if (g.adj[v].length >= g.adj[u].length) {
for (int j = 0, p = 0; j < g.adj[u].length; j++) if (g.adj[u][j] != v) {
while (p < g.adj[v].length && (g.adj[u][j] > g.adj[v][p] || g.adj[v][p] == u)) p++;
if (p >= g.adj[v].length || g.adj[u][j] < g.adj[v][p]) continue loop;
p++;
}
g.setS(v);
break;
}
}
}
int newN = g.n();
return oldN != newN;
}
static boolean bridge(Graph g) {
boolean reduced = false;
Biconnected bi = new Biconnected(g.adj);
IntArray tmp = new IntArray();
for (int v = 0; v < g.n; v++) if (g.adj[v].length > 0) {
tmp.length = 0;
for (int i = 0; i < g.adj[v].length; i++) {
int u = g.adj[v][i];
if (i + 1 < g.adj[v].length && g.adj[v][i + 1] == u) {
i++;
continue;
}
if (bi.bi[v] != bi.bi[u]) {
reduced = true;
tmp.add(u);
}
}
for (int i = 0; i < tmp.length; i++) g.removeE(v, tmp.at[i]);
}
int[] N1 = new int[g.n];
for (int v = 0; v < g.n; v++) if (g.adj[v].length > 0) {
int d = g.N1(v, N1);
tmp.length = 0;
loop : for (int i = 0; i < d; i++) {
for (int j = 0; j < d; j++) if (i != j) {
if (g.hasEdge(N1[i], N1[j]) < 2) continue loop;
}
tmp.add(N1[i]);
reduced = true;
}
for (int i = 0; i < tmp.length; i++) g.removeE(v, tmp.at[i]);
}
return reduced;
}
}
| 27.388462
| 102
| 0.431821
|
641060faf8e977b1d3541829c3cb44301cdcb1b6
| 2,993
|
py
|
Python
|
admin_tools_stats/utils.py
|
afkmamunbd/django-admin-tools-stats
|
b68fa8a1ffe6d73cb7e931450d95159cdeecd5bc
|
[
"MIT"
] | 204
|
2015-04-08T22:58:27.000Z
|
2022-02-27T21:59:33.000Z
|
admin_tools_stats/utils.py
|
afkmamunbd/django-admin-tools-stats
|
b68fa8a1ffe6d73cb7e931450d95159cdeecd5bc
|
[
"MIT"
] | 39
|
2015-07-08T14:55:50.000Z
|
2021-11-22T14:31:03.000Z
|
admin_tools_stats/utils.py
|
afkmamunbd/django-admin-tools-stats
|
b68fa8a1ffe6d73cb7e931450d95159cdeecd5bc
|
[
"MIT"
] | 40
|
2015-06-30T09:53:50.000Z
|
2022-02-21T01:23:57.000Z
|
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <info@star2billing.com>
#
from django.contrib.auth.models import User
from django.test import TestCase, Client
from django.test.client import RequestFactory
import base64
import unittest
import inspect
def build_test_suite_from(test_cases):
"""Returns a single or group of unittest test suite(s) that's ready to be
run. The function expects a list of classes that are subclasses of
TestCase.
The function will search the module where each class resides and
build a test suite from that class and all subclasses of it.
"""
test_suites = []
for test_case in test_cases:
mod = __import__(test_case.__module__)
components = test_case.__module__.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
tests = []
for item in mod.__dict__.values():
if type(item) is type and issubclass(item, test_case):
tests.append(item)
test_suites.append(unittest.TestSuite(
map(unittest.TestLoader().loadTestsFromTestCase, tests)))
return unittest.TestSuite(test_suites)
class BaseAuthenticatedClient(TestCase):
"""Common Authentication"""
fixtures = ['auth_user']
def setUp(self):
"""To create admin user"""
self.client = Client()
self.user = User.objects.get(username='admin')
auth = '%s:%s' % ('admin', 'admin')
auth = 'Basic %s' % base64.encodestring(auth.encode('utf8'))
auth = auth.strip()
self.extra = {
'HTTP_AUTHORIZATION': auth,
}
try:
self.client.force_login(self.user)
except AttributeError: # Django < 1.8
login = self.client.login(username='admin', password='admin')
self.assertTrue(login)
self.factory = RequestFactory()
class Choice(object):
class __metaclass__(type):
def __init__(self, *args, **kwargs):
self._data = []
for name, value in inspect.getmembers(self):
if not name.startswith('_') and not inspect.ismethod(value):
if isinstance(value, tuple) and len(value) > 1:
data = value
else:
pieces = [x.capitalize() for x in name.split('_')]
data = (value, ' '.join(pieces))
self._data.append(data)
setattr(self, name, data[0])
self._hash = dict(self._data)
def __iter__(self):
for value, data in self._data:
yield value, data
@classmethod
def get_value(self, key):
return self._hash[key]
| 32.89011
| 77
| 0.604744
|
9072990c3f8e93cd5e17b2d3d17a5f5025e0d014
| 1,015
|
h
|
C
|
src/qwr/file_helpers.h
|
razielanarki/fb2k_utils
|
bbac41b04c84f13ba6b9da56f9f00f9314ea0b90
|
[
"MIT"
] | 5
|
2020-10-02T06:30:41.000Z
|
2021-09-16T02:59:07.000Z
|
src/qwr/file_helpers.h
|
razielanarki/fb2k_utils
|
bbac41b04c84f13ba6b9da56f9f00f9314ea0b90
|
[
"MIT"
] | 1
|
2021-09-22T20:33:17.000Z
|
2021-09-22T20:33:17.000Z
|
src/qwr/file_helpers.h
|
razielanarki/fb2k_utils
|
bbac41b04c84f13ba6b9da56f9f00f9314ea0b90
|
[
"MIT"
] | 1
|
2021-09-16T02:59:19.000Z
|
2021-09-16T02:59:19.000Z
|
#pragma once
#include <shtypes.h>
#include <filesystem>
#include <optional>
#include <span>
#include <string>
namespace qwr::file
{
/// @throw smp::SmpException
qwr::u8string ReadFile( const std::filesystem::path& path, UINT codepage, bool checkFileExistense = true );
/// @throw smp::SmpException
std::wstring ReadFileW( const std::filesystem::path& path, UINT codepage, bool checkFileExistense = true );
/// @throw smp::SmpException
void WriteFile( const std::filesystem::path& path, qwr::u8string_view content, bool write_bom = true );
UINT DetectFileCharset( const std::filesystem::path& path );
struct FileDialogOptions
{
std::vector<COMDLG_FILTERSPEC> filterSpec{ { L"All files", L"*.*" } };
std::wstring defaultExtension = L"";
std::wstring defaultFilename = L"";
std::optional<GUID> savePathGuid;
};
std::optional<std::filesystem::path>
FileDialog( const std::wstring& title,
bool saveFile,
const FileDialogOptions& options = {} );
} // namespace qwr::file
| 26.710526
| 107
| 0.702463
|
15dced6718d82446af218ba61a0a6c4fe27f0be5
| 690
|
rb
|
Ruby
|
app/models/rtu_clicks_request.rb
|
MothOnMars/search-gov
|
988839ec221fa4cac97aebe6305c4a8144f00256
|
[
"ImageMagick"
] | null | null | null |
app/models/rtu_clicks_request.rb
|
MothOnMars/search-gov
|
988839ec221fa4cac97aebe6305c4a8144f00256
|
[
"ImageMagick"
] | 3
|
2020-10-23T14:52:00.000Z
|
2021-03-10T17:17:53.000Z
|
app/models/rtu_clicks_request.rb
|
MothOnMars/search-gov
|
988839ec221fa4cac97aebe6305c4a8144f00256
|
[
"ImageMagick"
] | null | null | null |
class RtuClicksRequest
MAX_RESULTS = 1000
include Virtus.model
extend ActiveModel::Naming
include ActiveModel::Conversion
include RtuAnalyticsRequestable
attr_reader :start_date, :end_date, :available_dates, :filter_bots
attribute :site, Affiliate
attribute :start_date, String
attribute :end_date, String
attribute :filter_bots, Boolean
def top_urls
@top_stats
end
private
def compute_top_stats
query = DateRangeTopNQuery.new(
site.name,
'click',
start_date,
end_date,
{ field: 'params.url', size: MAX_RESULTS }
)
rtu_top_clicks = RtuTopClicks.new(query.body, filter_bots)
rtu_top_clicks.top_n
end
end
| 19.714286
| 68
| 0.72029
|
af9e1f10822bd3fa4072e0b6ce06f03c577499ad
| 1,261
|
py
|
Python
|
test_com.py
|
MarcSchaetz/CMakician
|
3dd52ca08018bf9f4a2aa2c44fd15c62f55f199b
|
[
"MIT"
] | null | null | null |
test_com.py
|
MarcSchaetz/CMakician
|
3dd52ca08018bf9f4a2aa2c44fd15c62f55f199b
|
[
"MIT"
] | null | null | null |
test_com.py
|
MarcSchaetz/CMakician
|
3dd52ca08018bf9f4a2aa2c44fd15c62f55f199b
|
[
"MIT"
] | null | null | null |
import pytest, os, shutil
from hamcrest import (assert_that, equal_to)
import cmakician as com
def test_result_yes_returns_correct_for_ascii():
for i in range(0, 128):
char = ascii(i)
if char == "y" or char == "Y":
assert com.result_yes(char) == True
else:
assert com.result_yes(char) == False
def test_result_yes_returns_correct_for_empty_string():
assert com.result_yes("") == True
def test_print_error_if_template_path_not_existend(capsys):
com.list_templates("./testtemplatefolder")
captured = capsys.readouterr()
assert captured.out == "Path {0}/testtemplatefolder does not exist\n".format(
os.getcwd())
@pytest.fixture
def templatedir_setup_teardown(capsys):
dir = "./testtemplatefolder"
if os.path.exists(dir):
shutil.rmtree(dir)
os.mkdir(dir)
for file in ["monolith.py", "structured.py", "src_include.py"]:
with open(f"{dir}/{file}", "w") as _:
pass
yield capsys
shutil.rmtree(dir)
def test_print_files_in_templatedir(templatedir_setup_teardown):
com.list_templates("./testtemplatefolder")
captured = templatedir_setup_teardown.readouterr()
assert captured.out == "monolith\nsrc_include\nstructured\n"
| 28.659091
| 81
| 0.68755
|
543d750b8c53d331300598120008bde9c5f10eab
| 2,315
|
css
|
CSS
|
css/calculator.css
|
Philmist/grbl_calc
|
11d315b86b1cdf8808179c9b3e9756544b923234
|
[
"MIT"
] | null | null | null |
css/calculator.css
|
Philmist/grbl_calc
|
11d315b86b1cdf8808179c9b3e9756544b923234
|
[
"MIT"
] | null | null | null |
css/calculator.css
|
Philmist/grbl_calc
|
11d315b86b1cdf8808179c9b3e9756544b923234
|
[
"MIT"
] | null | null | null |
/* vim:set sts=4 sw=4 ts=4 expandtab: */
/*
* 計算機コンポーネントにかかるスタイル
*
* CSS Modules用
*/
/* 画面全体の構成 */
/* アプリ全体 */
.app {
font-size: 100%;
vertical-align: baseline;
background: transparent;
}
/* アプリの配置 */
.whole_app {
composes: app;
margin: auto;
width : -moz-fit-content;
width : -webkit-fit-content;
width : fit-content;
flex: 1;
}
/* ヘッダ(タイトル)部分の配置 */
.title_header {
margin : 5px;
width : -moz-fit-content;
width : -webkit-fit-content;
width : fit-content;
}
/* 計算機本体部分の配置 */
.calculator_box {
width: available;
width: -moz-fit-content;
width: -webkit-fit-content;
width: fit-content;
}
.calculator_row {
width: available;
display: flex;
flex-direction: row;
width: -moz-fit-content;
width: -webkit-fit-content;
width: fit-content;
}
/* 計算機内左側の配置 */
.calculator_row .left_box {
margin: 5px;
width: auto;
}
/* 計算機内右側の配置 */
.calculator_row .right_box {
margin : 5px;
width : -moz-fit-content;
width : -webkit-fit-content;
width : fit-content;
flex: 1;
}
.calculator_box .footer_box {
}
/* フッタ(著作権表示等) */
.footer {
clear : both;
margin : 5px;
width : auto;
width : -moz-fit-content;
width : -webkit-fit-content;
width : fit-content;
}
/* 要素 */
/* 上部タイトルバーの装飾等 */
.whole_title {
color: #ffffff;
background-color: #2a2a2a;
font-size: 16pt;
line-height: 1em;
margin: 10px 0px;
padding: 5px 5px;
box-shadow:1px 3px 7px 0px #666666 ;
border-bottom: 3px solid #ff6347;
border-radius: 5px;
}
/* テーブル */
.base {
composes: base from "./tables.css";
}
.row {
composes: row from "./tables.css";
}
.header {
composes: header from "./tables.css";
}
.cell {
composes: data from "./tables.css";
}
.result {
composes: cell;
width: 5em;
}
/* 小タイトル */
.title {
composes: header from "./app.css";
}
/* 下部ナビゲーション */
.nav {
padding: 0;
margin: 0em 1em;
}
.nav-item {
display: inline-block;
padding-left: 0.5em;
}
.nav-item::before {
content: ">";
}
/* 下部著作権表示 */
.comment {
margin: 0.5em 0;
}
.copyright {
margin-left: 1em;
}
.copyright-list {
margin-left: 0.5em;
padding-left: 0;
}
.copyright-list-item {
list-style-type: none;
list-style-position: inside;
}
| 14.378882
| 41
| 0.590497
|
e24bf1f444933d1bc1782ae43d0bdc56ded18b7b
| 947
|
py
|
Python
|
script.py
|
BrightIT/docker-ipython-selenium-client
|
decea8aa3530dd4571132d4c19fbb7a8085ff196
|
[
"MIT"
] | null | null | null |
script.py
|
BrightIT/docker-ipython-selenium-client
|
decea8aa3530dd4571132d4c19fbb7a8085ff196
|
[
"MIT"
] | null | null | null |
script.py
|
BrightIT/docker-ipython-selenium-client
|
decea8aa3530dd4571132d4c19fbb7a8085ff196
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
ff = webdriver.Remote(command_executor='http://172.17.0.5:4444/wd/hub', desired_capabilities=webdriver.DesiredCapabilities.FIREFOX)
ff.get("https://www.hdi.global/de/en")
print(repr(ff.title))
assert "HDI Global" in ff.title
WebDriverWait(ff, 10).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "body .main input[name='q']")))
search_box = ff.find_element_by_css_selector("body .main input[name='q']")
search_box.send_keys("test")
search_box.send_keys("\n")
WebDriverWait(ff, 10).until(EC.visibility_of_element_located((By.CSS_SELECTOR, ".search-filter-result a")))
assert ff.title == "Search results"
ff.find_element_by_css_selector(".search-filter-result a").click()
ff.close()
exit()
| 36.423077
| 131
| 0.789863
|
ddaa4229d1b556a38287521dc860460d6b2c5683
| 9,225
|
java
|
Java
|
tapestry-core/src/main/java/org/apache/tapestry5/ComponentResourcesCommon.java
|
xfyre/tapestry-5
|
5db48b2ccfe2e6e3d51c1b73216385ede3ca0097
|
[
"Apache-2.0"
] | null | null | null |
tapestry-core/src/main/java/org/apache/tapestry5/ComponentResourcesCommon.java
|
xfyre/tapestry-5
|
5db48b2ccfe2e6e3d51c1b73216385ede3ca0097
|
[
"Apache-2.0"
] | null | null | null |
tapestry-core/src/main/java/org/apache/tapestry5/ComponentResourcesCommon.java
|
xfyre/tapestry-5
|
5db48b2ccfe2e6e3d51c1b73216385ede3ca0097
|
[
"Apache-2.0"
] | 1
|
2021-12-16T20:27:20.000Z
|
2021-12-16T20:27:20.000Z
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5;
import org.apache.tapestry5.ioc.Locatable;
import org.apache.tapestry5.services.pageload.ComponentResourceSelector;
import org.slf4j.Logger;
import java.util.Locale;
/**
* Operations shared by the public {@link org.apache.tapestry5.ComponentResources} interface and
* {@link org.apache.tapestry5.internal.structure.ComponentPageElement} interface (on the internal side).
*/
@SuppressWarnings(
{ "JavaDoc" })
public interface ComponentResourcesCommon extends Locatable
{
/**
* Returns the simple (or local) id of the component. The id will be unique within the component's immediate
* container. For a page's root component, the value null is returned.
*/
String getId();
/**
* Return a string consisting the concatenated ids of all containing components, separated by periods. In addition,
* nested ids are always all lower case. I.e., "foo.bar.baz". Returns null for the root component of a page.
*/
String getNestedId();
/**
* Returns a string consisting of the logical name of the containing page, and the {@link #getNestedId() nested id}
* of this component, separated by a colon. I.e., "MyPage:foo.bar.baz". For a page, returns just the page's name.
*
* This value is often used to obtain an equivalent component instance in a later request.
*
* @see org.apache.tapestry5.services.ComponentSource#getComponent(String)
*/
String getCompleteId();
/**
* A convenience method for invoking {@link #triggerContextEvent(String, EventContext , ComponentEventCallback)}. Wraps
* the context values into an {@link org.apache.tapestry5.EventContext}.
*
* @param eventType
* event type (as determined from the request, or otherwise by design)
* @param contextValues
* Values that may be provided to the event handler method as method parameters, or null if no
* context values are available
* @param callback
* the handler to be informed of the result, or null if the event is a notification that does
* not support return values from event handler methods (the value true is allowed even if the
* handler is null).
* @return true if any event handler was invoked (even if no event handler method returns a non-null value)
* @throws org.apache.tapestry5.runtime.ComponentEventException
* if an event handler method throws a checked or unchecked exception
* @see org.apache.tapestry5.internal.transform.OnEventWorker
* @see org.apache.tapestry5.annotations.OnEvent
*/
boolean triggerEvent(String eventType, Object[] contextValues, ComponentEventCallback callback);
/**
* Triggers a component event. A search for an event handling method will occur, first in the component, then its
* container, and so on. When a matching event handler method is located, it is invoked. If the method returns a
* value, the value is passed to the callback (if callback is null, then it is an error for a method to return a
* non-null value).
*
* Resolution of event type to event handler methods is case insensitive.
*
* @param eventType
* event type (as determined from the request, or otherwise by design)
* @param context
* the context (as extracted from the request, or provided by the triggering component); these
* values may be provided to event handler methods via their parameters (may not be null)
* @param callback
* the handler to be informed of the result, or null if the event is a notification that does not
* support return values from event handler methods (the value true is allowed even if the handler
* is null).
* @return true if any event handler was invoked (even if no event handler method returns a non-null value)
* @throws org.apache.tapestry5.runtime.ComponentEventException
* if an event handler method throws a checked or unchecked exception
* @see org.apache.tapestry5.internal.transform.OnEventWorker
* @see org.apache.tapestry5.annotations.OnEvent
*/
boolean triggerContextEvent(String eventType, EventContext context, ComponentEventCallback callback);
/**
* Returns true if the component is currently rendering, false otherwise. This is most often used to determine if
* parameter values should be cached.
*/
boolean isRendering();
/**
* Returns the log instance associated with the component (which is based on the component or mixin's class name).
*
* @see org.apache.tapestry5.model.ComponentModel#getLogger()
*/
Logger getLogger();
/**
* Returns the locale for the page containing this component.
*
* @see #getResourceSelector()
*/
Locale getLocale();
/**
* Returns the selector used when constructing the component and its containing page.
*
* @since 5.3
*/
ComponentResourceSelector getResourceSelector();
/**
* Returns the name of element that represents the component in its template, or the provided default element name
* if the element was a component type (in the Tapestry namespace).
*
* @param defaultElementName
* element name to return if the element name is not known (may be null)
* @return the element name
*/
String getElementName(String defaultElementName);
/**
* Returns a block from the component's template, referenced by its id.
*
* @param blockId
* the id of the block (case insensitive)
* @return the identified Block
* @throws BlockNotFoundException
* if no block with the given id exists
* @see #findBlock(String)
*/
Block getBlock(String blockId);
/**
* As with {@link #getBlock(String)}, but returns null if the block is not found.
*
* @param blockId
* the id of the block (case insensitive)
* @return the block, or null
*/
Block findBlock(String blockId);
/**
* Returns the <em>logical</em> name of the page containing this component. This is the short name (it often appears
* in URLs)
*
* @return the logical name of the page which contains this component
*/
String getPageName();
/**
* Returns true if the element has a body and false otherwise. Only components may have a body; pages and mixins
* will return false.
*/
boolean hasBody();
/**
* Returns the body of this component as a (possibly empty) block. When invoked on a mixin, returns the containing
* component's body.
*/
Block getBody();
/**
* Creates a component event request link as a callback for this component. The event type and context (as well as
* the page name and nested component id) will be encoded into a URL. A request for the URL will
* {@linkplain #triggerEvent(String, Object[], org.apache.tapestry5.ComponentEventCallback)} trigger} the named
* event on the
* component.
*
* @param eventType
* the type of event to be triggered. Event types should be Java identifiers (contain only
* letters, numbers and the underscore).
* @param context
* additional objects to be encoded into the path portion of the link; each is converted to a
* string and URI encoded
* @return link object for the callback
*/
Link createEventLink(String eventType, Object... context);
/**
* Creates a component event request link as a callback for this component. The event type and context (as well as
* the page name and nested component id) will be encoded into a URL. A request for the URL will
* {@linkplain #triggerEvent(String, Object[], org.apache.tapestry5.ComponentEventCallback)} trigger} the named
* event on the
* component. This is only used for form submission events, as extra data may be encoded in the form as hidden
* fields.
*
* @param eventType
* the type of event to be triggered. Event types should be Java identifiers (contain only
* letters, numbers and the underscore).
* @param context
* additional objects to be encoded into the path portion of the link; each is converted to a
* string and URI encoded
* @return link object for the callback
*/
Link createFormEventLink(String eventType, Object... context);
}
| 43.928571
| 123
| 0.675339
|
ffb73c662981d9fa6333759268ab71b56b0e3eeb
| 1,859
|
py
|
Python
|
imsim/dict_wcs.py
|
g-braeunlich/imSim
|
c6f97ba39f05313570d15671f2b9eb9bd1de24bc
|
[
"BSD-3-Clause"
] | 13
|
2017-06-03T12:38:08.000Z
|
2022-01-24T18:56:31.000Z
|
imsim/dict_wcs.py
|
g-braeunlich/imSim
|
c6f97ba39f05313570d15671f2b9eb9bd1de24bc
|
[
"BSD-3-Clause"
] | 225
|
2016-11-29T23:08:20.000Z
|
2022-03-08T03:20:17.000Z
|
imsim/dict_wcs.py
|
g-braeunlich/imSim
|
c6f97ba39f05313570d15671f2b9eb9bd1de24bc
|
[
"BSD-3-Clause"
] | 9
|
2017-10-26T17:20:23.000Z
|
2022-03-29T09:53:18.000Z
|
import yaml
import galsim
from galsim.config import WCSBuilder, RegisterWCSType
class DictWCS(WCSBuilder):
def __init__(self):
self.d = {} # Empty dict means we haven't read the file yet.
def buildWCS(self, config, base, logger):
"""Build the TanWCS based on the specifications in the config dict.
Parameters:
config: The configuration dict for the wcs type.
base: The base configuration dict.
logger: If provided, a logger for logging debug statements.
Returns:
the constructed WCS object.
"""
req = { "file_name": str,
"key": str
}
opt = { "fix_ab": bool,
}
params, safe = galsim.config.GetAllParams(config, base, req=req, opt=opt)
file_name = params['file_name']
key = params['key']
fix_ab = params.get('fix_ab', True)
logger.info("Finding WCS for %s",key)
if not self.d or file_name != self.file_name:
with open(file_name) as f:
self.d = yaml.load(f.read(), Loader=yaml.SafeLoader)
self.file_name = file_name
logger.debug("Using WCS: %s",self.d[key])
# The dict stores the WCS as its repr. Eval it to make an actual WCS.
wcs = galsim.utilities.math_eval(self.d[key])
# I changed the internal storage of the ab matrices in GalSim, so the reprs I have
# in the yaml file are wrong. This fixes them.
# TODO: Fix the yaml file and get rid of this hack.
if fix_ab:
if wcs.ab is not None:
wcs.ab[0,1,0] += 1
wcs.ab[1,0,1] += 1
if wcs.abp is not None:
wcs.abp[0,1,0] += 1
wcs.abp[1,0,1] += 1
return wcs
RegisterWCSType('Dict', DictWCS())
| 33.196429
| 90
| 0.559978
|
b02420be3ddb9d6e79f3bb5890f664b8143695c8
| 7,159
|
py
|
Python
|
client/backup.py
|
lhupfeldt/borgbackup_notifications_multi_target
|
ef0e8461bbc80535a4fa541c6c00077e57f05572
|
[
"BSD-3-Clause"
] | null | null | null |
client/backup.py
|
lhupfeldt/borgbackup_notifications_multi_target
|
ef0e8461bbc80535a4fa541c6c00077e57f05572
|
[
"BSD-3-Clause"
] | null | null | null |
client/backup.py
|
lhupfeldt/borgbackup_notifications_multi_target
|
ef0e8461bbc80535a4fa541c6c00077e57f05572
|
[
"BSD-3-Clause"
] | null | null | null |
#!/bin/env python3
#!/opt/local/bin/python3.3
# Copyright (c) 2015-2016 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
import sys, os, shutil
from os.path import join as jp
import subprocess, time, resource
from . import notifications
from .singleton_script import singleton_script
from .rotate_logs import rotate_logs
from .config_objects import config_dir
sys.path.insert(0, os.path.dirname(__file__))
sys.path.insert(0, config_dir)
def _check_cfg_file(cfg_file, cfg_file_example_name, msg=''):
"""Check the existence of a cfg file and prints a message about renaming and editing the example file if the cfg file is not found.
Return (int): 1 if found, 0 if not found (to allow simple counting).
"""
has_cfg = True
if not os.path.exists(cfg_file):
has_cfg = False
print("*** Error: Configuration file '{cf}' not found".format(cf=cfg_file), file=sys.stderr)
here = os.path.dirname(__file__)
cfg_template_src = jp(here, 'config', cfg_file_example_name)
try:
os.mkdir(config_dir)
except FileExistsError:
pass
print(" You can copy '{tmplt}' to '{cf}' and edit it.".format(cf=cfg_file, tmplt=cfg_template_src), file=sys.stderr)
print(" " + msg + '\n', file=sys.stderr)
return 0
return 1
cfg_file_name = 'config.py'
cfg_template_file_name = cfg_file_name + '.template'
cfg_found = _check_cfg_file(jp(config_dir, cfg_file_name), cfg_template_file_name)
try:
from config import config # pylint: disable=wrong-import-order,no-name-in-module
except ImportError as ex:
if cfg_found == 1:
raise
sys.exit(1)
def get_exclude_pattern_files():
exclude_from_files = []
exclude_from_file_example = 'user_file_selection.conf.example'
for exclude_from_file in config.exclude_from_files:
if not _check_cfg_file(
exclude_from_file, exclude_from_file_example,
"Make sure to REVIEW it closely, so that you do not exclude anything you want to backup."):
continue
exclude_from_files.append(exclude_from_file)
if len(exclude_from_files) < len(config.exclude_from_files):
sys.exit(1)
return exclude_from_files
program_name = 'Backup'
def message(msg, use_notify=True):
if sys.stdout.isatty():
print(msg)
with config.log_file.open('a+') as log_file:
print(msg, file=log_file)
if use_notify:
notifications.notify(program_name, msg, notifications.STOCK_DIALOG_INFO, expire_timeout=10000)
def error(msg):
err = "*** ERROR: "
print(err, msg, file=sys.stderr)
check_log_file_msg = "Check log file: '{!s}'".format(config.log_file)
print(check_log_file_msg, file=sys.stderr)
with config.log_file.open('a+') as log_file:
print(err, msg, file=log_file)
msg += " " + check_log_file_msg
notifications.notify(program_name, msg, notifications.STOCK_DIALOG_ERROR)
def borg(args):
if config.ssh_key:
os.environ['BORG_RSH'] = "ssh -i " + str(config.ssh_key)
os.environ['BORG_PASSPHRASE'] = config.passphrase
cmd = [config.borg]
cmd.extend(args)
cmd.append('-v')
message(' '.join(cmd), use_notify=False)
if not sys.stdout.isatty():
with config.log_file.open('a+') as log_file:
subprocess.check_call(cmd, stdout=log_file, stderr=log_file)
else:
subprocess.check_call(cmd)
def backup(from_dir, remote_url, prefix, exclude_from_opts):
fdrel = lambda path: jp(from_dir, path)
# Options for excluding files
excl = []
try:
# If DOWNLOAD dir does not exist xdg-user-dir will return home dir, make sure we don't exclude it
download_dir = os.path.normpath(subprocess.check_output(['xdg-user-dir', 'DOWNLOAD']).decode('unicode_escape').strip())
if not download_dir in (os.path.normpath(from_dir), os.path.normpath(os.path.expanduser("~"))):
excl.append(download_dir)
except FileNotFoundError:
excl.extend([fdrel('Downloads')])
excl = ["--exclude=sh:" + fdrel(dd) for dd in excl]
# Start backup
borg(['create', '--stats', '--lock-wait', '300', '--show-rc', '--progress', '--compression', 'lz4',
'--exclude-caches'] + excl + exclude_from_opts +
[remote_url + '::' + prefix + '-' + time.strftime("%Y-%m-%d:%H.%M.%S"), from_dir])
def prune(remote_url, prefix, keep_within, keep_hourly, keep_daily, keep_weekly, keep_monthly, keep_yearly):
borg(['prune', '--lock-wait', '300', '--show-rc',
# --save-space
'--prefix', prefix,
'--keep-within', keep_within,
'--keep-hourly', str(keep_hourly),
'--keep-daily', str(keep_daily),
'--keep-weekly', str(keep_weekly),
'--keep-monthly', str(keep_monthly),
'--keep-yearly', str(keep_yearly),
remote_url])
def all_backups():
notifications.init(program_name, ignore_errors=True)
try:
for dd in reversed(list(config.log_file.parents)):
os.makedirs(str(dd), exist_ok=True)
except FileExistsError:
# os.makedirs still raises FileExistsError if mode is not as expected
assert os.path.isdir(config.log_dir)
if config.ssh_key:
# Test that key file exists and is readable
with open(config.ssh_key):
pass
exclude_from_opts = ['--exclude-from=' + str(exclude_from_file) for exclude_from_file in get_exclude_pattern_files()]
message("Starting backups")
rotate_logs(config.log_file)
singleton_script()
resource.setrlimit(resource.RLIMIT_NOFILE, (1024, 1024))
failed = []
for backup_rule in config.backup_rules.values():
remote_url = backup_rule.target_user + '@' + backup_rule.target_host + ':backup'
try:
message("Backing up " + repr(backup_rule.from_dir) + " to " + repr(remote_url))
backup(backup_rule.from_dir, remote_url, backup_rule.prefix, exclude_from_opts)
message("Cleaning up at " + repr(remote_url))
prune(remote_url, backup_rule.prefix,
backup_rule.keep_within,
backup_rule.keep_hourly, backup_rule.keep_daily, backup_rule.keep_weekly, backup_rule.keep_monthly, backup_rule.keep_yearly)
message("Successfully backed up " + repr(backup_rule.from_dir) + " to " + repr(remote_url))
except subprocess.CalledProcessError as ex:
error(str(ex) + ". Backup (or cleanup) to " + repr(remote_url) + " failed!")
failed.append(remote_url)
if failed:
msg = "ALL backups (or all cleanups) failed" if len(failed) == len(config.backup_rules) else "Some backups (or cleanups) failed"
raise Exception(msg + ": " + str(failed))
message("All Successful")
# TODO leave notifications with a timeout if possible and don't sleep!
time.sleep(2 if sys.stdout.isatty() else 60)
notifications.clear()
def main():
try:
all_backups()
except Exception as ex:
error(str(ex))
raise
if __name__ == "__main__":
main()
| 34.584541
| 142
| 0.659729
|
f5ad78dfbbb4d7c4fe4f6a7da2144e0a69097071
| 13,809
|
swift
|
Swift
|
Sources/Meek/MeekTCPConnection.swift
|
OperatorFoundation/Shapeshifter-Swift-Transports
|
a0385e99b35512d64d33bfc9d1558289d117b2f8
|
[
"MIT"
] | 2
|
2020-05-07T08:13:41.000Z
|
2020-12-18T19:40:00.000Z
|
Sources/Meek/MeekTCPConnection.swift
|
OperatorFoundation/Shapeshifter-Swift-Transports
|
a0385e99b35512d64d33bfc9d1558289d117b2f8
|
[
"MIT"
] | null | null | null |
Sources/Meek/MeekTCPConnection.swift
|
OperatorFoundation/Shapeshifter-Swift-Transports
|
a0385e99b35512d64d33bfc9d1558289d117b2f8
|
[
"MIT"
] | 2
|
2018-02-05T03:54:08.000Z
|
2020-01-14T09:47:05.000Z
|
//
// MeekTCPConnection.swift
// Shapeshifter-Swift-Transports
//
// Created by Brandon Wiley on 10/24/17.
// Copyright © 2017 Operator Foundation. All rights reserved.
//
import Foundation
//import NetworkExtension
#if (os(macOS) || os(iOS) || os(watchOS) || os(tvOS))
import CryptoKit
#else
import Crypto
#endif
import Transport
import SwiftQueue
public func createMeekTCPConnection(provider: PacketTunnelProvider, to: URL, serverURL: URL, logQueue: Queue<String>) -> MeekTCPConnection?
{
let conn = MeekTCPConnection(provider: provider, to: to, url: serverURL, logQueue: logQueue)
guard let c = conn
else
{
return nil
}
return c
}
public class MeekTCPConnection: TCPConnection
{
public var hasBetterPath: Bool
{
get
{
return network.hasBetterPath
}
}
public var endpoint: NWEndpoint
{
get
{
return network.endpoint
}
}
public var connectedPath: NWPath?
public var localAddress: NWEndpoint?
public var remoteAddress: NWEndpoint?
public var txtRecord: Data?
public var error: Error?
public var serverURL: URL
public var frontURL: URL
public var network: TCPConnection
public var bodyBuffer = Data()
public var sessionID = ""
///Meek server is no longer accepting POST
public var meekIsClosed = false
private var logQueue: Queue<String>
let minLength = 1
let maxLength = MemoryLayout<UInt32>.size
public enum MeekError: Error
{
case unknownError
case connectionError
case meekIsClosed
case invalidRequest
case notFound
case invalidResponse
case serverError
case serverUnavailable
case timeOut
case unsuppotedURL
}
public var isViable: Bool
{
get
{
return _isViable
}
}
public var state: NWTCPConnectionState
{
get
{
return _state
}
}
public var stateCallback: ((NWTCPConnectionState, Error?) -> Void)?
private var _isViable: Bool
private var _error: Error?
private var _state: NWTCPConnectionState
{
didSet
{
logQueue.enqueue("Meek Connection State has changed: \(_state.description)")
NotificationCenter.default.post(name: .meekConnectionState, object: _state.description)
guard let callback = stateCallback
else { return }
callback(_state, nil)
}
}
public init?(provider: PacketTunnelProvider, to front: URL, url: URL, logQueue: Queue<String>)
{
self.logQueue = logQueue
serverURL = url
frontURL = front
let frontHostname = frontURL.host!
let endpoint: NWEndpoint = NWHostEndpoint(hostname: frontHostname, port: "80")
guard let tcpConnection = provider.createTCPConnectionThroughTunnel(to: endpoint, enableTLS: true, tlsParameters: nil, delegate: nil)
else
{
return nil
}
network = tcpConnection
_state = .connected
_isViable = true
sessionID = generateSessionID() ?? ""
}
public func observeState(_ callback: @escaping (NWTCPConnectionState, Error?) -> Void) {
self.stateCallback=callback
}
// Currrently this function ignores the minimum and maximum lengths provided.
public func readMinimumLength(_ minimum: Int, maximumLength maximum: Int, completionHandler completion: @escaping (Data?, Error?) -> Void)
{
guard isViable
else
{
let error = MeekError.connectionError
completion(nil, error)
return
}
guard !meekIsClosed
else
{
let data = self.bodyBuffer
self.bodyBuffer = Data()
self.cleanup()
completion(data, nil)
return
}
write(Data())
{
(maybeError) in
if let writeError = maybeError
{
if self.bodyBuffer.isEmpty
{
completion(nil, writeError)
}
else
{
let data = self.bodyBuffer
self.bodyBuffer = Data()
completion(data, nil)
}
}
else
{
let data = self.bodyBuffer
self.bodyBuffer = Data()
completion(data, nil)
}
}
}
public func readLength(_ length: Int, completionHandler completion: @escaping (Data?, Error?) -> Void)
{
logQueue.enqueue("Meek readLength Called")
readMinimumLength(length, maximumLength: length, completionHandler: completion)
}
public func write(_ data: Data, completionHandler completion: @escaping (Error?) -> Void)
{
logQueue.enqueue("Meek Write Called")
guard isViable
else
{
let error = MeekError.connectionError
completion(error)
return
}
guard !meekIsClosed
else
{
let error = MeekError.meekIsClosed
completion(error)
return
}
let encoded = encodePOST(data)!
network.write(encoded)
{
(error) in
self.checkForData(responseBuffer: Data(), completionHandler: completion)
}
}
public func writeClose()
{
network.writeClose()
}
public func cancel()
{
_isViable = false
_state = .cancelled
network.cancel()
}
func checkForData(responseBuffer: Data, completionHandler completion: @escaping (Error?) -> Void)
{
logQueue.enqueue("Meek checkForData Called")
self.network.readMinimumLength(60, maximumLength: 60 + 65536, completionHandler:
{
(maybeData, maybeError) in
var dataBuffer = responseBuffer
guard maybeError == nil
else
{
print("Received an error when attempting to read from the network:")
print(maybeError!)
completion(nil)
return
}
guard let someData = maybeData
else
{
completion(nil)
return
}
dataBuffer.append(someData)
let (maybeStatusCode, maybeBody) = self.decodeResponse(dataBuffer)
guard let statusCode = maybeStatusCode
else
{
self.checkForData(responseBuffer: dataBuffer, completionHandler: completion)
return
}
guard statusCode == "200"
else
{
self.logQueue.enqueue("Meek status code is not 200")
if self.bodyBuffer.isEmpty
{
self.cleanup()
}
else
{
self.meekIsClosed = true
self.network.cancel()
}
return
}
self.logQueue.enqueue("Meek Server response status code is 200.")
guard let bodyData = maybeBody
else
{
self.checkForBody(responseBuffer: Data(), completionHandler: completion)
return
}
self.checkForBody(responseBuffer: bodyData, completionHandler: completion)
})
}
func checkForBody(responseBuffer: Data, completionHandler completion: @escaping (Error?) -> Void)
{
self.network.readMinimumLength(1, maximumLength: 65536, completionHandler:
{
(maybeData, maybeError) in
var dataBuffer = responseBuffer
guard maybeError == nil
else
{
self.bodyBuffer.append(dataBuffer)
completion(nil)
return
}
guard let someData = maybeData
else
{
self.bodyBuffer.append(dataBuffer)
completion(nil)
return
}
dataBuffer.append(someData)
self.checkForBody(responseBuffer: dataBuffer, completionHandler: completion)
})
}
func encodePOST(_ data: Data) -> Data?
{
guard let host = serverURL.host
else
{
print("Unable to resolver server host.")
return nil
}
let header1 = "Host: \(host)"
let header2 = "X-Session-Id: \(sessionID)"
let httpRequestString = "POST \(frontURL.path) HTTP/1.1 \r\n\(header1)\r\n\(header2)\r\n\r\n"
var postData = httpRequestString.data(using: .utf8)
if postData != nil
{
postData!.append(data)
}
return postData
}
func decodeResponse(_ data: Data) -> (statusCode: String?, body: Data?)
{
logQueue.enqueue("Meek: Decoding Server Response")
guard let (headerString, bodyData) = splitOnBlankLine(data: data)
else
{
return (nil, nil)
}
let statusCode = getStatusCode(fromHeader: headerString)
return (statusCode, bodyData)
}
func cleanup()
{
network.cancel()
_state = .disconnected
_isViable = false
}
func getStatusCode(fromHeader headerString: String) -> String?
{
let lines = headerString.components(separatedBy: "\r\n")
guard let statusLine = lines.first
else
{
return nil
}
let statusComponents = statusLine.components(separatedBy: " ")
let statusCodeString = statusComponents[1]
return statusCodeString
}
func splitOnBlankLine(data: Data) -> (header: String, body: Data)?
{
guard let emptyLineIndex = findEmptyLineIndex(data: data)
else
{
print("Unable to find empty line.")
return nil
}
let headerData = data.prefix(through: emptyLineIndex - 2)
if let headerString = String(data: headerData, encoding: .ascii)
{
let bodyData = data.suffix(from: emptyLineIndex + 3)
return (headerString, bodyData)
}
else
{
return nil
}
}
func findEmptyLineIndex(data: Data) -> Int?
{
var dataToCheck = data
if let newlineIndex = dataToCheck.index(of: 10)
{
let next = dataToCheck[newlineIndex + 1]
if next == 13
{
return newlineIndex
}
else
{
if dataToCheck.count > 2
{
dataToCheck = dataToCheck.suffix(from: newlineIndex + 1)
return findEmptyLineIndex(data: dataToCheck)
}
else
{
return nil
}
}
}
else
{
return nil
}
}
///This generates a random hex string of random bytes using SHA256.
func generateSessionID() -> String?
{
let byteCount = 64
var randomHex = ""
var randomBytesArray = [UInt8](repeating: 0, count: byteCount)
//Create an array of random bytes.
let result = SecRandomCopyBytes(kSecRandomDefault, byteCount, &randomBytesArray)
if result == errSecSuccess
{
//Create data from bytes array.
let randomBytes = Data(bytes: randomBytesArray)
//SHA256 random bytes.
let hash = randomBytes.sha256()
//Create hex from the first 16 values of the hash array.
let first16Hash = hash.prefix(16)
let hexArray = first16Hash.map({String(format: "%02hhx", $0)})
randomHex = hexArray.joined(separator: "")
//🔮
return randomHex
}
else
{
return nil
}
}
// ///Testing Only <-------------------
// convenience init?(testDate: Date)
// {
// let provider = FakePacketTunnelProvider()
// let sURL = URL(string: "http://TestServer.com")!
// let fURL = URL(string: "http://TestFront.com")!
// self.init(provider: provider, to: fURL, url: sURL)
// }
//func createMeekTCPConnection(testDate: Date) -> MeekTCPConnection?
//{
// let conn = MeekTCPConnection(testDate: testDate)
// guard let c = conn
// else
// {
// return nil
// }
//
// return c
//}
}
public extension Notification.Name
{
static let meekConnectionState = Notification.Name("MeekTCPConnectionState")
}
extension NWTCPConnectionState: CustomStringConvertible
{
public var description: String
{
switch self
{
case .cancelled: return "Cancelled"
case .connected: return "Connected"
case .connecting: return "Connecting"
case .disconnected: return "Disconnected"
case .invalid: return "Invalid"
case .waiting: return "Waiting"
}
}
}
| 26.761628
| 142
| 0.526106
|
96f50a431a7836d05d2b429a91a6f6adb370c8ad
| 293
|
cshtml
|
C#
|
PizzaBoxFrontEnd/PizzaBoxFrontEnd/Views/OrderPizza/Index.cshtml
|
210329-UTA-SH-UiPath/P1_Sean_Spring
|
ffab0bba963948ec369766a91f2934d25e9e8dbe
|
[
"MIT"
] | null | null | null |
PizzaBoxFrontEnd/PizzaBoxFrontEnd/Views/OrderPizza/Index.cshtml
|
210329-UTA-SH-UiPath/P1_Sean_Spring
|
ffab0bba963948ec369766a91f2934d25e9e8dbe
|
[
"MIT"
] | null | null | null |
PizzaBoxFrontEnd/PizzaBoxFrontEnd/Views/OrderPizza/Index.cshtml
|
210329-UTA-SH-UiPath/P1_Sean_Spring
|
ffab0bba963948ec369766a91f2934d25e9e8dbe
|
[
"MIT"
] | null | null | null |
@*
For more information on enabling MVC for empty projects, visit https://go.microsoft.com/fwlink/?LinkID=397860
*@
<h1>Your pizza has been added!</h1>
<a asp-action="Return" asp-controller="Order">Go Back to Order</a><br />
<a asp-controller="Home" asp-action="Index">Finish Order</a>
| 32.555556
| 113
| 0.709898
|
c6dc7c19d2d53c9e643357bc5aab5bdaa2926e7c
| 19,774
|
py
|
Python
|
k_lstm.py
|
minoriwww/MeterDetection
|
0373cd30fe8a1de4886a8a60860c4714b2b28333
|
[
"MIT"
] | 7
|
2019-11-13T01:02:14.000Z
|
2021-08-10T10:03:46.000Z
|
k_lstm.py
|
minoriwww/MeterDetection
|
0373cd30fe8a1de4886a8a60860c4714b2b28333
|
[
"MIT"
] | null | null | null |
k_lstm.py
|
minoriwww/MeterDetection
|
0373cd30fe8a1de4886a8a60860c4714b2b28333
|
[
"MIT"
] | 1
|
2019-12-26T10:34:18.000Z
|
2019-12-26T10:34:18.000Z
|
# -*- coding: utf-8 -*-
# run in py3 !!
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "1";
import tensorflow as tf
config = tf.ConfigProto()
# config.gpu_options.per_process_gpu_memory_fraction=0.5
config.gpu_options.allow_growth = True
tf.Session(config=config)
import numpy as np
from sklearn import preprocessing
import tensorflow as tf
import time
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
import pandas as pd
from keras import backend as K
import keras.layers.convolutional as conv
from keras.layers import merge
from keras.wrappers.scikit_learn import KerasRegressor
from keras import utils
from keras.layers.pooling import MaxPooling1D, MaxPooling2D
from keras.layers import pooling
from keras.models import Sequential, Model
from keras.regularizers import l1, l2
from keras import layers
from keras.layers import Dense, Dropout, Activation, Flatten, Input, Convolution1D, Convolution2D, LSTM
from keras.optimizers import SGD, RMSprop
from keras.layers.normalization import BatchNormalization
from keras import initializers
from keras.callbacks import EarlyStopping
from keras import callbacks
from keras import backend as K
from keras.utils import to_categorical
from keras.callbacks import EarlyStopping, ModelCheckpoint, Callback
from keras.models import Model
from keras import initializers, layers
from keras.optimizers import SGD, Adadelta, Adam
from keras.regularizers import l1, l2
from keras import regularizers
import sys
sys.path.append('.')
from hist_figure import his_figures
if len(sys.argv) > 1:
prefix = sys.argv[1]
else:
prefix = time.time()
DATAPATH = '5fold/'
RESULT_PATH = './results/'
feature_num = 25
batch_num = 2
# batch_size = 32
batch_size = 512
SEQ_LENGTH = 20
STATEFUL = False
scaler = None # tmp, for fit_transform
# id,usage,date,com_date,week,month,year
# com_date,date,id,month,usage,week,year
def get_data(path_to_dataset='df_dh.csv', sequence_length=20, stateful=False, issplit=True):
fold_index = 1
###
dtypes = {'sub': 'float', 'super': 'float', 'error': 'float', 'com_date': 'int', 'week': 'str', 'month': 'str',
'year': 'str', 'numbers': 'int', 'log': 'float', 'id': 'str', 'usage': 'float'}
parse_dates = ['date']
print(path_to_dataset)
df = pd.read_csv(DATAPATH + path_to_dataset, header=0, dtype=dtypes, parse_dates=parse_dates, encoding="utf-8")
# print(path_to_dataset)
print(df.columns)
df = df[df['error'] >= 0]
# df_test = pd.read_csv(DATAPATH+"test"+str(fold_index)+".csv", header = 0, dtype=dtypes, parse_dates=parse_dates,encoding="utf-8")
def helper(x):
split = list(map(int, x.strip('[').strip(']').split(',')))
d = {}
for counter, value in enumerate(split):
k = str(len(split)) + "-" + str(counter)
d[k] = value
return d
# df_train_temp = df_train['week'].apply(helper).apply(pd.Series)
df_week = df['week'].apply(helper).apply(pd.Series).as_matrix() # 7
df_month = df['month'].apply(helper).apply(pd.Series).as_matrix() # 12
df_year = df['year'].apply(helper).apply(pd.Series).as_matrix() # 3
df_empty = df[['super', 'com_date', 'error', 'numbers']].copy()
# print(df_empty)
df_super = df_empty.ix[:, [0]]
df_com_date = df_empty.ix[:, [1]]
df_error = df_empty.ix[:, [2]]
df_numbers = df_empty.ix[:, [3]]
X_train_ = np.column_stack((df_super, df_com_date, df_numbers, df_week, df_month))
Y_train_ = df_error.as_matrix()
ss_x = preprocessing.MaxAbsScaler()
ss_y = preprocessing.MaxAbsScaler()
global scaler
scaler = ss_y
# ss_x = preprocessing.StandardScaler()
array_new = ss_x.fit_transform(df_empty.ix[:, [0]])
df_super = pd.DataFrame(array_new)
array_new = ss_x.fit_transform(df_empty.ix[:, [1]])
df_com_date = pd.DataFrame(array_new)
array_new = ss_x.fit_transform(df_empty.ix[:, [3]])
df_numbers = pd.DataFrame(array_new)
array_new = ss_y.fit_transform(df_empty.ix[:, [2]])
df_error = pd.DataFrame(array_new)
df_week = ss_x.fit_transform(df_week)
df_week = pd.DataFrame(df_week)
df_month = ss_x.fit_transform(df_month)
df_month = pd.DataFrame(df_month)
X_train = np.column_stack((df_super, df_com_date, df_numbers, df_week, df_month))
Y_train = df_error.as_matrix()
print('Xshape:' + str(X_train.shape))
print('Yshape:' + str(Y_train.shape))
y_arr = Y_train.T.tolist()
# print(y_arr)
try:
y_arr = ss_y.inverse_transform(y_arr)
#draw_error_line(y_arr[0], df)
#draw_error_bar(y_arr[0])
except Exception as e:
print(e)
if not issplit:
print('Xshape:' + str(X_train.shape))
print('Yshape:' + str(Y_train.shape))
X_train, X_test, Y_train, Y_test = train_test_split(X_train_, Y_train_, test_size=0.1, shuffle=False)
X_train, X_val, Y_train, Y_val = train_test_split(X_train, Y_train, test_size=0.1, shuffle=False)
return X_train, Y_train, X_test, Y_test, X_val, Y_val
else:
return split_CV(X_train, Y_train, sequence_length=sequence_length, stateful=False)
import datetime
def get_data_single_user(path_to_dataset='df_dh.csv', sequence_length=20, stateful=False, issplit=True):
fold_index = 1
###
dtypes = {'sub': 'float', 'super': 'float', 'error': 'float', 'com_date': 'int', 'week': 'str', 'month': 'str',
'year': 'str', 'numbers': 'int', 'log': 'float', 'id': 'str', 'usage': 'float'}
parse_dates = ['date']
print('$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$' + path_to_dataset)
df = pd.read_csv(DATAPATH + path_to_dataset, header=0, dtype=dtypes, parse_dates=parse_dates, encoding="utf-8")
# print(path_to_dataset)
print(df.columns)
df = df[df['usage'] >= 0]
# df_test = pd.read_csv(DATAPATH+"test"+str(fold_index)+".csv", header = 0, dtype=dtypes, parse_dates=parse_dates,encoding="utf-8")
def helper(x):
split = list(map(int, x.strip('[').strip(']').split(',')))
d = {}
for counter, value in enumerate(split):
k = str(len(split)) + "-" + str(counter)
d[k] = value
return d
# df_train_temp = df_train['week'].apply(helper).apply(pd.Series)
df_week = df['week'].apply(helper).apply(pd.Series).as_matrix() # 7
df_month = df['month'].apply(helper).apply(pd.Series).as_matrix() # 12
df_year = df['year'].apply(helper).apply(pd.Series).as_matrix() # 3
df_empty = df[['com_date', 'usage']].copy()
# print(df_empty)
df_com_date = df_empty.ix[:, [0]]
df_usage = df_empty.ix[:, [1]]
ss_x = preprocessing.MaxAbsScaler()
ss_y = preprocessing.MaxAbsScaler()
global scaler
scaler = ss_y
# ss_x = preprocessing.StandardScaler()
array_new = ss_x.fit_transform(df_empty.ix[:, [0]])
df_com_date = pd.DataFrame(array_new)
array_new = ss_y.fit_transform(df_empty.ix[:, [1]])
df_usage = pd.DataFrame(array_new)
df_week = ss_x.fit_transform(df_week)
df_week = pd.DataFrame(df_week)
df_month = ss_x.fit_transform(df_month)
df_month = pd.DataFrame(df_month)
X_train = np.column_stack((df_week, df_month))
Y_train = df_usage.as_matrix()
print(X_train)
print(Y_train.shape)
y_arr = Y_train.T.tolist()
# print(y_arr)
print(df)
y_arr = ss_y.inverse_transform(y_arr)
draw_error_line(y_arr[0], df)
draw_error_bar(y_arr[0])
# try:
#
# except Exception as e:
# print(e)
if not issplit:
return X_train, Y_train
else:
return split_CV(X_train, Y_train, sequence_length=sequence_length, stateful=False)
def inverse_xy_transform(scaler, *para):
temp = []
for i in para:
print(i.reshape(-1, 1))
temp.append(scaler.inverse_transform(i.reshape(-1, 1)))
return temp
def split_CV(X_train, Y_train, sequence_length=20, stateful=False):
"""return ndarray
"""
print(X_train)
print(Y_train.shape[0])
result_x = []
result_y = []
for index in range(len(Y_train) - sequence_length):
result_x.append(X_train[index: index + sequence_length])
# result_y.append(Y_train[index: index + sequence_length])
result_y.append(Y_train[index + sequence_length])
X_train = np.array(result_x)
Y_train = np.array(result_y)
print(X_train.shape) # (705, 20, 24)
print(Y_train.shape) # (705, 1)
print('##################################################################')
if stateful == True:
# X_train, X_test, Y_train, Y_test = train_test_split(X_train, Y_train, test_size=0.1,shuffle=False)
cp_X_train = X_train.copy()
cp_Y_train = Y_train.copy()
X_train = cp_X_train[:640, ...]
X_test = cp_X_train[640:, ...]
Y_train = cp_Y_train[:640, ...]
Y_test = cp_Y_train[640:, ...]
print(X_test.shape[0]) #
print(Y_test.shape[0]) #
X_train, X_val, Y_train, Y_val = train_test_split(X_train, Y_train, test_size=0.1, shuffle=False)
print('##################################################################')
if stateful == False:
X_train, X_test, Y_train, Y_test = train_test_split(X_train, Y_train, test_size=0.1, shuffle=False)
X_train, X_val, Y_train, Y_val = train_test_split(X_train, Y_train, test_size=0.1, shuffle=False)
# print(X_train.shape)#(705, 20, 24)
# print(Y_train.shape)#(705, 1)
# train_x_disorder = X_train.reshape((X_train.shape[0],X_train.shape[1] , feature_num))
# test_x_disorder = X_test.reshape((X_test.shape[0],X_test.shape[1], feature_num ))
# X_val = X_val.reshape((X_val.shape[0], X_val.shape[1] , feature_num))
# print(train_x_disorder.dtype)
train_y_disorder = Y_train.reshape(-1, 1)
test_y_disorder = Y_test.reshape(-1, 1)
Y_val = Y_val.reshape(-1, 1)
print(X_train.shape[0]) # (705, 20, 24)
print(Y_train.shape[0]) # (705, 1)
print('@' * 40)
# print(X_test)
print(train_y_disorder.shape)
print('@' * 40)
return [X_train, train_y_disorder, X_test, test_y_disorder, X_val, Y_val] # ndarray
def LSTM2(X_train):
model = Sequential()
# layers = [1, 50, 100, 1]
layers = [1, 30, 30, 1]
if STATEFUL == False:
model.add(LSTM(
layers[1],
input_shape=(X_train.shape[1], X_train.shape[2]),
stateful=STATEFUL,
return_sequences=True,
kernel_initializer='he_normal'
# , kernel_regularizer=l2(0.01)
))
else:
model.add(LSTM(
layers[1],
# input_shape=(X_train.shape[1], X_train.shape[2]),
batch_input_shape=(batch_size, X_train.shape[1], X_train.shape[2]),
stateful=STATEFUL,
return_sequences=True,
kernel_initializer='he_normal'
# , kernel_regularizer=l2(0.01)
))
# model.add(Dropout(0.2))
model.add(LSTM(
layers[2],
stateful=STATEFUL,
return_sequences=False,
kernel_initializer='he_normal'
# ,kernel_regularizer=l2(0.01)
))
model.add(Dropout(0.2))
# model.add(Flatten())
model.add(Dense(
layers[3]
, kernel_initializer='he_normal'
, kernel_regularizer=l2(0.01)
, activity_regularizer=l1(0.01)
))
model.add(BatchNormalization())
model.add(Activation("linear"))
start = time.time()
sgd = SGD(lr=1e-3, decay=1e-8, momentum=0.9, nesterov=True)
ada = Adadelta(lr=1e-4, rho=0.95, epsilon=1e-6)
rms = RMSprop(lr=0.001, rho=0.9, epsilon=1e-6, decay=1e-8)
adam = Adam(lr=1e-3)
# model.compile(loss="mse", optimizer=sgd)
# try:
# model.load_weights("./lstm.h5")
# except Exception as ke:
# print(str(ke))
model.compile(loss="mse", optimizer=adam)
print("Compilation Time : ", time.time() - start)
return model
def draw_error_bar(y_array):
fig = plt.figure()
axes = fig.add_subplot(1, 1, 1)
x = list(range(len(y_array)))
plt.bar(x, y_array, label='error')
# plt.legend(handles=[line1, line2,line3])
plt.legend()
plt.title('error bar')
# plt.show()
axes.grid()
fig.tight_layout()
fig.savefig(RESULT_PATH + str(batch_size) + 'bar_error.png', dpi=300)
def draw_error_line(y_array, df):
fig = plt.figure()
axes = fig.add_subplot(1, 1, 1)
x = list(range(len(y_array)))
plt.plot(x, y_array, label='error')
x = list(range(len(df['error'])))
plt.plot(x, df['error'], label='error')
# plt.legend(handles=[line1, line2,line3])
plt.legend()
plt.title('error plot')
# plt.show()
axes.grid()
fig.tight_layout()
fig.savefig(RESULT_PATH + str(batch_size) + 'line_error.png', dpi=300)
def draw_scatter(predicted, y_test, X_test, x_train, y_train, data_file):
fig = plt.figure()
axes = fig.add_subplot(1, 1, 1)
x = list(range(len(predicted)))
total_width, n = 0.8, 2
width = total_width / n
plt.bar(x, y_test.T[0], width=width, label='truth', fc='y')
for i in range(len(x)):
x[i] = x[i] + width
plt.bar(x, predicted, width=width, label='predict', fc='r')
# plt.legend(handles=[line1, line2,line3])
plt.legend()
plt.title('lstm')
# plt.show()
axes.grid()
fig.tight_layout()
fig.savefig(RESULT_PATH + str(batch_size) + data_file + str(prefix) + 'bar_lstm.png', dpi=300)
fig = plt.figure()
plt.scatter(y_test.T[0], predicted)
# plt.plot(y_test.T[0], predicted, linewidth =0.3, color='red')
plt.xlim(0, 1)
plt.ylim(0, 1)
plt.xlabel('truth')
plt.ylabel('predict')
# plt.show()
fig.savefig(RESULT_PATH + str(batch_size) + data_file + str(prefix) + '_scatter_lstm.png',
dpi=300)
def draw_line(predicted, y_test, X_test, x_train, y_train, data_file):
fig = plt.figure()
axes = fig.add_subplot(1, 1, 1)
x = list(range(len(predicted)))
total_width, n = 0.8, 2
width = total_width / n
plt.bar(x, y_test.T[0], width=width, label='True', fc='y')
for i in range(len(x)):
x[i] = x[i] + width
plt.bar(x, predicted, width=width, label='Predicted', fc='r')
# plt.legend(handles=[line1, line2,line3])
plt.legend()
plt.title('lstm')
# plt.show()
axes.grid()
axes = fig.add_subplot(1, 1, 1)
fig.tight_layout()
fig.savefig(RESULT_PATH + str(batch_size) + data_file + str(prefix) + 'bar_lstm.png', dpi=300)
fig = plt.figure()
plt.scatter(y_test.T[0], predicted)
# plt.plot(y_test.T[0], predicted, linewidth =0.3, color='red')
plt.xlim(0, 1)
plt.ylim(0, 1)
plt.xlabel('True')
plt.ylabel('Predicted')
# plt.show()
fig.savefig(RESULT_PATH + str(batch_size) + data_file + str(prefix) + '_scatter_lstm.png',
dpi=300)
fig = plt.figure()
axes = fig.add_subplot(1, 1, 1)
plt.plot(x, y_test.T[0], label='True')
for i in range(len(x)):
x[i] = x[i] + width
plt.plot(x, predicted, label='Predicted')
plt.legend()
axes.grid()
fig.tight_layout()
fig.savefig(RESULT_PATH + str(batch_size) + data_file + str(prefix) + 'line_lstm.png', dpi=300)
def stat_metrics(X_test, y_test, predicted):
predicted = np.reshape(predicted, y_test.shape[0])
train_error = np.abs(y_test - predicted)
mean_error = np.mean(train_error)
min_error = np.min(train_error)
max_error = np.max(train_error)
std_error = np.std(train_error)
print(predicted)
print(y_test.T[0])
print(np.mean(X_test))
print("#" * 20)
print(mean_error)
print(std_error)
print(max_error)
print(min_error)
print("#" * 20)
print(X_test[:, 1])
# 0.165861394194
# ####################
# 0.238853857898
# 0.177678269353
# 0.915951014937
# 5.2530646691e-0
pass
def run_regressor(model=LSTM2, sequence_length = SEQ_LENGTH, data=None, data_file='df_dh.csv', isload_model=True, testonly=False):
epochs = 1000
path_to_dataset = data_file
global mses
if data is None:
X_train, y_train, X_test, y_test, X_val, Y_val = get_data(sequence_length=sequence_length, stateful=STATEFUL,
path_to_dataset=data_file)
else:
X_train, y_train, X_test, y_test, X_val, Y_val = data
if STATEFUL:
X_test = X_test[:int(X_test.shape[0] / batch_size) * batch_size]
y_test = y_test[:int(y_test.shape[0] / batch_size) * batch_size]
estimator = KerasRegressor(build_fn=lambda x=X_train: model(x))
# if testonly == True:
# # predicted = model.predict(X_test, verbose=1,batch_size=batch_size)
# prediction = estimator.predict(X_test)
# stat_metrics(X_test, y_test, prediction)
# draw_scatter(predicted_arr[0], y_test, X_test, X_train, y_train, data_file)
# return
early_stopping = EarlyStopping(monitor='val_loss', verbose=1, patience=40)
checkpoint = ModelCheckpoint("./lstm.h5", monitor='val_loss', verbose=1, save_best_only=True,
save_weights_only=True)
################
hist = estimator.fit(X_train, y_train, validation_data=(X_val, Y_val), callbacks=[checkpoint, early_stopping],
epochs=epochs, batch_size=batch_size, verbose=1)
# prediction = estimator.predict(X_test)
score = mean_squared_error(y_test, estimator.predict(X_test))
estimator_score = estimator.score(X_test, y_test)
print(score)
mses.append(score)
prediction = estimator.predict(X_test)
print(prediction)
print(X_test)
print("##############################################")
# predicted_arr = prediction.T.tolist()
# print(predicted_arr)
global scaler
prediction_, y_test_, y_train_ = inverse_xy_transform(scaler, prediction, y_test, y_train)
predicted_df = pd.DataFrame(prediction_)
y_test_df = pd.DataFrame(y_test_)
# X_test_df = pd.DataFrame(X_test) #columns
predicted_df.to_csv(DATAPATH + str(prefix) + data_file + str(batch_size) + str(sequence_length) + "predicted_df.csv")
y_test_df.to_csv(DATAPATH + str(prefix) + data_file + str(batch_size) + str(sequence_length) + "y_test_df.csv")
# X_test_df.to_csv(DATAPATH+data_file+"X_test_df.csv")
draw_scatter(prediction, y_test, X_test, X_train, y_train, data_file)
his_figures(hist)
draw_line(prediction, y_test, X_test, X_train, y_train, data_file)
return predicted_df, y_test_df
if __name__ == '__main__':
# get_data_single_user()
x = range(5, 121, 5)
total_mses =[]
for i in range(1,11):
mses = []
for length in x:
X_train, y_train, X_test, y_test, X_val, Y_val = get_data(sequence_length=length, stateful=STATEFUL)
run_regressor(sequence_length = length,data=[X_train, y_train, X_test, y_test, X_val, Y_val],
data_file='df_dh.csv', isload_model=True)
total_mses.append(mses)
print(total_mses)
np.save(RESULT_PATH + str(prefix) + 'mses.npy', np.asarray(total_mses))
'''
# stock_predict tf
# https://github.com/LouisScorpio/datamining/blob/master/tensorflow-program/rnn/stock_predict/stock_predict_2.py
# boston tf
# https://blog.csdn.net/baixiaozhe/article/details/54410313
########### consume predict keras
# http://www.cnblogs.com/arkenstone/p/5794063.html
# bike number predict keras
# http://resuly.me/2017/08/16/keras-rnn-tutorial/#%E4%BB%BB%E5%8A%A1%E6%8F%8F%E8%BF%B0
# Multivariate Time Series Forecasting with LSTMs in Keras
# https://zhuanlan.zhihu.com/p/28746221
'''
| 33.177852
| 136
| 0.640943
|
01c316b7c8d0189698bcbdec6d26d41bf3d7aa5f
| 1,063
|
c
|
C
|
chapter3/3.10.c
|
Ognimalf/Data-Structures-in-C
|
d35948015aa79178ec5402501a6c2a70752ef3d3
|
[
"CC0-1.0"
] | null | null | null |
chapter3/3.10.c
|
Ognimalf/Data-Structures-in-C
|
d35948015aa79178ec5402501a6c2a70752ef3d3
|
[
"CC0-1.0"
] | null | null | null |
chapter3/3.10.c
|
Ognimalf/Data-Structures-in-C
|
d35948015aa79178ec5402501a6c2a70752ef3d3
|
[
"CC0-1.0"
] | null | null | null |
#include "stdio.h"
#include "stdlib.h"
typedef struct list List;
struct list{
int index;
List *next;
};
List *insertNode(List *head, int val);
int main()
{
int size = 5, flag = 2;
List *josephus = NULL;
for (int i = 1; i <= size; ++i)
josephus = insertNode(josephus, i);
List *p = josephus, *pr = josephus;
while (p->next) p = p->next;
p->next = josephus;
p = josephus;
int k = 1;
while (p->next != p) {
if (k == flag) {
pr->next = p->next;
free(p);
p = pr->next;
k = 1;
} else {
pr = p;
p = p->next;
++k;
}
}
printf("%d", p->index);
free(josephus);
return 0;
}
List *insertNode(List *head, int val)
{
List *node = (List *)malloc(sizeof (List)), *p = head;
node->index = val;
node->next = NULL;
if (!head) head = node;
else {
while (p->next) p = p->next;
p->next = node;
}
return head;
}
| 20.442308
| 59
| 0.444026
|
d64ea3fc7b2eaf49af2fa8fe4822783f5bdd0819
| 2,051
|
cs
|
C#
|
src/NaGet.Protocol/Search/RawAutocompleteClient.cs
|
SeppPenner/NaGet
|
9053598a55cd2b83dab270540c8fe9ca4c52143b
|
[
"MIT"
] | null | null | null |
src/NaGet.Protocol/Search/RawAutocompleteClient.cs
|
SeppPenner/NaGet
|
9053598a55cd2b83dab270540c8fe9ca4c52143b
|
[
"MIT"
] | 1
|
2022-03-13T16:00:55.000Z
|
2022-03-13T16:00:55.000Z
|
src/NaGet.Protocol/Search/RawAutocompleteClient.cs
|
SeppPenner/NaGet
|
9053598a55cd2b83dab270540c8fe9ca4c52143b
|
[
"MIT"
] | null | null | null |
namespace NaGet.Protocol.Internal;
/// <summary>
/// The client used to search for packages.
///
/// See https://docs.microsoft.com/en-us/nuget/api/search-autocomplete-service-resource
/// </summary>
public class RawAutocompleteClient : IAutocompleteClient
{
private readonly HttpClient httpClient;
private readonly string autocompleteUrl;
/// <summary>
/// Create a new Search client.
/// </summary>
/// <param name="httpClient">The HTTP client used to send requests.</param>
/// <param name="autocompleteUrl">The NuGet server's autocomplete URL.</param>
public RawAutocompleteClient(HttpClient httpClient, string autocompleteUrl)
{
this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
this.autocompleteUrl = autocompleteUrl ?? throw new ArgumentNullException(nameof(autocompleteUrl));
}
public async Task<AutocompleteResponse?> AutocompleteAsync(
string? query = null,
int skip = 0,
int take = 20,
bool includePrerelease = true,
bool includeSemVer2 = true,
CancellationToken cancellationToken = default)
{
var url = RawSearchClient.AddSearchQueryString(
autocompleteUrl,
query,
skip,
take,
includePrerelease,
includeSemVer2,
"q");
return await httpClient.GetFromJsonAsync<AutocompleteResponse?>(url, cancellationToken);
}
public async Task<AutocompleteResponse?> ListPackageVersionsAsync(
string packageId,
bool includePrerelease = true,
bool includeSemVer2 = true,
CancellationToken cancellationToken = default)
{
var url = RawSearchClient.AddSearchQueryString(
autocompleteUrl,
packageId,
skip: null,
take: null,
includePrerelease,
includeSemVer2,
"id");
return await httpClient.GetFromJsonAsync<AutocompleteResponse?>(url, cancellationToken);
}
}
| 33.080645
| 107
| 0.653827
|
93710c1628201300e0cf3d136217816ab7f7daeb
| 4,320
|
cs
|
C#
|
Services/CoverDownloadService.cs
|
isenmann/Covers
|
30ff06f50c9f26aff13d93ee9e0c8c5177d36a99
|
[
"MIT"
] | 4
|
2021-02-08T06:47:00.000Z
|
2021-02-18T08:19:02.000Z
|
Services/CoverDownloadService.cs
|
isenmann/Covers
|
30ff06f50c9f26aff13d93ee9e0c8c5177d36a99
|
[
"MIT"
] | null | null | null |
Services/CoverDownloadService.cs
|
isenmann/Covers
|
30ff06f50c9f26aff13d93ee9e0c8c5177d36a99
|
[
"MIT"
] | null | null | null |
using Covers.Contracts;
using Covers.Contracts.Interfaces;
using ImageMagick;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using System;
using System.Diagnostics;
using System.IO;
using System.Threading.Tasks;
namespace Covers.Services
{
public class CoverDownloadService : ICoverDownloadService
{
private readonly ILogger<CoverDownloadService> _logger;
private readonly ISpotifyService _spotifyService;
private readonly CoverDownloadConfiguration _coverDownloaderConfiguration;
public CoverDownloadService(ILogger<CoverDownloadService> logger, IConfiguration configuration, ISpotifyService spotifyService)
{
if (configuration == null)
{
throw new ArgumentNullException(nameof(configuration));
}
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_spotifyService = spotifyService ?? throw new ArgumentNullException(nameof(spotifyService));
_coverDownloaderConfiguration = configuration.GetSection(CoverDownloadConfiguration.CoverDownloader).Get<CoverDownloadConfiguration>();
}
public async Task<Tuple<byte[], byte[]>> DownloadCoverAsync(string albumName, string artist)
{
if (string.IsNullOrWhiteSpace(albumName) || string.IsNullOrWhiteSpace(artist))
{
return null;
}
if (string.IsNullOrWhiteSpace(_coverDownloaderConfiguration.Executable))
{
return null;
}
if (!File.Exists(_coverDownloaderConfiguration.Executable))
{
return null;
}
string arguments = string.Empty;
switch (_coverDownloaderConfiguration.Type)
{
case CoverDownloaderType.AAD:
arguments = $"/ar \"{artist}\" /al \"{albumName}\" /path \"%type%.jpg\" /coverType front,back /s \"Qobuz (fr-fr),Amazon (.com),iTunes\"";
break;
case CoverDownloaderType.SACAD:
arguments = $"--disable-low-quality-sources \"{artist}\" \"{albumName}\" 800 Front.jpg";
break;
}
byte[] frontCover = null;
byte[] backCover = null;
if (_coverDownloaderConfiguration.Type == CoverDownloaderType.Spotify)
{
frontCover = await _spotifyService.GetAlbumCover(albumName, artist);
backCover = null;
}
else
{
var process = new Process();
process.StartInfo.FileName = _coverDownloaderConfiguration.Executable;
process.StartInfo.Arguments = arguments;
process.Start();
await process.WaitForExitAsync();
var errorCode = process.ExitCode;
if (errorCode != 0)
{
return null;
}
}
switch (_coverDownloaderConfiguration.Type)
{
case CoverDownloaderType.AAD:
frontCover = ScaleAndConvert("Front.jpg");
backCover = ScaleAndConvert("Back.jpg");
break;
case CoverDownloaderType.SACAD:
frontCover = ScaleAndConvert("Front.jpg");
backCover = null; // not supported by SACAD
break;
}
if (File.Exists("Front.jpg"))
{
File.Delete("Front.jpg");
}
if (File.Exists("Back.jpg"))
{
File.Delete("Back.jpg");
}
return new Tuple<byte[], byte[]>(frontCover, backCover);
}
private static byte[] ScaleAndConvert(string fileName)
{
if (File.Exists(fileName))
{
using var cover = new MagickImage(File.ReadAllBytes(fileName));
if (cover.Width > 800)
{
cover.Scale(new MagickGeometry { IgnoreAspectRatio = false, Width = 800 });
}
return cover.ToByteArray(MagickFormat.Png);
}
return null;
}
}
}
| 34.83871
| 157
| 0.554167
|
6afa8d6d62e293545e5d56f52de23a238f8ce040
| 23,350
|
c
|
C
|
yml.c
|
kzwkt/yambar
|
19cbf120fd6200fdf17b004a890557c74f99a684
|
[
"MIT"
] | 1
|
2022-02-20T20:58:15.000Z
|
2022-02-20T20:58:15.000Z
|
yml.c
|
kzwkt/yambar
|
19cbf120fd6200fdf17b004a890557c74f99a684
|
[
"MIT"
] | null | null | null |
yml.c
|
kzwkt/yambar
|
19cbf120fd6200fdf17b004a890557c74f99a684
|
[
"MIT"
] | null | null | null |
#include "yml.h"
#include <stdlib.h>
#include <stdio.h>
#include <stdint.h>
#include <stdbool.h>
#include <assert.h>
#include <yaml.h>
#include <tllist.h>
#define UNUSED __attribute__((unused))
enum yml_error {
YML_ERR_NONE,
YML_ERR_DUPLICATE_KEY,
YML_ERR_INVALID_ANCHOR,
YML_ERR_UNKNOWN,
};
enum node_type {
ROOT,
SCALAR,
DICT,
LIST,
};
struct yml_node;
struct dict_pair {
struct yml_node *key;
struct yml_node *value;
};
struct anchor_map {
char *anchor;
const struct yml_node *node;
};
struct yml_node {
enum node_type type;
union {
struct {
struct yml_node *root;
struct anchor_map anchors[100]; /* TODO: dynamic resize */
size_t anchor_count;
} root;
struct {
char *value;
} scalar;
struct {
tll(struct dict_pair) pairs;
bool next_is_value;
} dict;
struct {
tll(struct yml_node *) values;
} list;
};
size_t line;
size_t column;
struct yml_node *parent;
};
static struct yml_node *
clone_node(struct yml_node *parent, const struct yml_node *node)
{
struct yml_node *clone = calloc(1, sizeof(*clone));
clone->type = node->type;
clone->line = node->line;
clone->column = node->column;
clone->parent = parent;
switch (node->type) {
case SCALAR:
clone->scalar.value = strdup(node->scalar.value);
break;
case DICT:
tll_foreach(node->dict.pairs, it) {
struct dict_pair p = {
.key = clone_node(clone, it->item.key),
.value = clone_node(clone, it->item.value),
};
tll_push_back(clone->dict.pairs, p);
}
break;
case LIST:
tll_foreach(node->list.values, it)
tll_push_back(clone->list.values, clone_node(clone, it->item));
break;
case ROOT:
assert(false);
break;
}
return clone;
}
static bool
node_equal(const struct yml_node *a, const struct yml_node *b)
{
if (a->type != b->type)
return false;
if (a->type != SCALAR) {
/* TODO... */
return false;
}
return strcmp(a->scalar.value, b->scalar.value) == 0;
}
static bool
dict_has_key(const struct yml_node *node, const struct yml_node *key)
{
assert(node->type == DICT);
tll_foreach(node->dict.pairs, pair) {
if (node_equal(pair->item.key, key))
return true;
}
return false;
}
static enum yml_error
add_node(struct yml_node *parent, struct yml_node *new_node, yaml_mark_t loc)
{
new_node->line = loc.line + 1; /* yaml uses 0-based line numbers */
new_node->column = loc.column;
switch (parent->type) {
case ROOT:
assert(parent->root.root == NULL);
parent->root.root = new_node;
new_node->parent = parent;
break;
case DICT:
if (!parent->dict.next_is_value) {
if (dict_has_key(parent, new_node))
return YML_ERR_DUPLICATE_KEY;
tll_push_back(parent->dict.pairs, (struct dict_pair){.key = new_node});
parent->dict.next_is_value = true;
} else {
tll_back(parent->dict.pairs).value = new_node;
parent->dict.next_is_value = false;
}
new_node->parent = parent;
break;
case LIST:
tll_push_back(parent->list.values, new_node);
new_node->parent = parent;
break;
case SCALAR:
assert(false);
return YML_ERR_UNKNOWN;
}
return YML_ERR_NONE;
}
static void
add_anchor(struct yml_node *root, const char *anchor,
const struct yml_node *node)
{
assert(root->type == ROOT);
struct anchor_map *map = &root->root.anchors[root->root.anchor_count];
map->anchor = strdup(anchor);
map->node = node;
root->root.anchor_count++;
}
static bool
post_process(struct yml_node *node, char **error)
{
switch (node->type) {
case ROOT:
if (node->root.root != NULL)
if (!post_process(node->root.root, error))
return false;
break;
case SCALAR:
//assert(strcmp(node->scalar.value, "<<") != 0);
break;
case LIST:
tll_foreach(node->list.values, it)
if (!post_process(it->item, error))
return false;
break;
case DICT:
tll_foreach(node->dict.pairs, it) {
if (!post_process(it->item.key, error) ||
!post_process(it->item.value, error))
{
return false;
}
}
tll_foreach(node->dict.pairs, it) {
if (it->item.key->type != SCALAR)
continue;
if (strcmp(it->item.key->scalar.value, "<<") != 0)
continue;
if (it->item.value->type == LIST) {
/*
* Merge value is a list (of dictionaries)
* e.g. <<: [*foo, *bar]
*/
tll_foreach(it->item.value->list.values, v_it) {
if (v_it->item->type != DICT) {
int cnt = snprintf(
NULL, 0, "%zu:%zu: cannot merge non-dictionary anchor",
v_it->item->line, v_it->item->column);
*error = malloc(cnt + 1);
snprintf(
*error, cnt + 1, "%zu:%zu: cannot merge non-dictionary anchor",
v_it->item->line, v_it->item->column);
return false;
}
tll_foreach(v_it->item->dict.pairs, vv_it) {
struct dict_pair p = {
.key = vv_it->item.key,
.value = vv_it->item.value,
};
/* TODO: handle this. Is it an error? Or
* should we replace the existing key/value
* pair */
assert(!dict_has_key(node, vv_it->item.key));
tll_push_back(node->dict.pairs, p);
}
/* Destroy list, but don't free (since its nodes
* have been moved to this node), *before*
* destroying the key/value nodes. This ensures
* the dict nodes aren't free:d in the
* yml_destroy() below). */
tll_free(v_it->item->dict.pairs);
}
} else {
/*
* Merge value is a dictionary only
* e.g. <<: *foo
*/
if (it->item.value->type != DICT) {
int cnt = snprintf(
NULL, 0, "%zu:%zu: cannot merge non-dictionary anchor",
it->item.value->line, it->item.value->column);
*error = malloc(cnt + 1);
snprintf(
*error, cnt + 1, "%zu:%zu: cannot merge non-dictionary anchor",
it->item.value->line, it->item.value->column);
return false;
}
tll_foreach(it->item.value->dict.pairs, v_it) {
struct dict_pair p = {
.key = v_it->item.key,
.value = v_it->item.value,
};
/* TODO: handle this. Is it an error? Or should we
* replace the existing key/value pair */
assert(!dict_has_key(node, v_it->item.key));
tll_push_back(node->dict.pairs, p);
}
/* Destroy list here, *without* freeing nodes (since
* nodes have been moved to this node), *before*
* destroying the key/value nodes. This ensures the
* dict nodes aren't free:d in the yml_destroy()
* below */
tll_free(it->item.value->dict.pairs);
}
yml_destroy(it->item.key);
yml_destroy(it->item.value);
tll_remove(node->dict.pairs, it);
}
break;
}
return true;
}
static const char *
format_error(enum yml_error err,
const struct yml_node *parent,
const struct yml_node *node,
const char *anchor)
{
static char err_str[512];
switch (err) {
case YML_ERR_NONE:
assert(false);
break;
case YML_ERR_DUPLICATE_KEY: {
/* Find parent's key (i.e its name) */
if (parent->parent != NULL &&
parent->parent->type == DICT &&
node->type == SCALAR)
{
tll_foreach(parent->parent->dict.pairs, pair) {
if (pair->item.value != parent)
continue;
if (pair->item.key->type != SCALAR)
break;
assert(pair->item.key->type == SCALAR);
assert(node->type == SCALAR);
snprintf(err_str, sizeof(err_str),
"%s: duplicate key: '%s'",
pair->item.key->scalar.value,
node->scalar.value);
return err_str;
}
}
if (node->type == SCALAR) {
snprintf(err_str, sizeof(err_str),
"duplicate key: %s", node->scalar.value);
} else
snprintf(err_str, sizeof(err_str), "duplicate key");
break;
}
case YML_ERR_INVALID_ANCHOR:
if (parent->parent != NULL && parent->parent->type == DICT) {
tll_foreach(parent->parent->dict.pairs, pair) {
if (pair->item.value != parent)
continue;
if (pair->item.key->type != SCALAR)
break;
snprintf(err_str, sizeof(err_str),
"%s: invalid anchor: %s",
pair->item.key->scalar.value,
anchor != NULL ? anchor : "<unknown>");
return err_str;
}
}
snprintf(err_str, sizeof(err_str), "invalid anchor: %s",
anchor != NULL ? anchor : "<unknown>");
break;
case YML_ERR_UNKNOWN:
snprintf(err_str, sizeof(err_str), "unknown error");
break;
}
return err_str;
}
struct yml_node *
yml_load(FILE *yml, char **error)
{
yaml_parser_t yaml;
yaml_parser_initialize(&yaml);
yaml_parser_set_input_file(&yaml, yml);
bool done = false;
int indent UNUSED = 0;
struct yml_node *root = malloc(sizeof(*root));
root->type = ROOT;
root->root.root = NULL;
root->root.anchor_count = 0;
struct yml_node *n = root;
const char *error_str = NULL;
while (!done) {
yaml_event_t event;
if (!yaml_parser_parse(&yaml, &event)) {
if (error != NULL) {
int cnt = snprintf(
NULL, 0, "%zu:%zu: %s %s",
yaml.problem_mark.line + 1,
yaml.problem_mark.column,
yaml.problem,
yaml.context != NULL ? yaml.context : "");
*error = malloc(cnt + 1);
snprintf(*error, cnt + 1, "%zu:%zu: %s %s",
yaml.problem_mark.line + 1,
yaml.problem_mark.column,
yaml.problem,
yaml.context != NULL ? yaml.context : "");
}
goto err_no_error_formatting;
}
switch (event.type) {
case YAML_NO_EVENT:
break;
case YAML_STREAM_START_EVENT:
indent += 2;
break;
case YAML_STREAM_END_EVENT:
indent -= 2;
done = true;
break;
case YAML_DOCUMENT_START_EVENT:
indent += 2;
break;
case YAML_DOCUMENT_END_EVENT:
indent -= 2;
break;
case YAML_ALIAS_EVENT: {
bool got_match = false;
for (size_t i = 0; i < root->root.anchor_count; i++) {
const struct anchor_map *map = &root->root.anchors[i];
if (strcmp(map->anchor, (const char *)event.data.alias.anchor) != 0)
continue;
struct yml_node *clone = clone_node(NULL, map->node);
assert(clone != NULL);
enum yml_error err = add_node(n, clone, event.start_mark);
if (err != YML_ERR_NONE) {
error_str = format_error(err, n, clone, NULL);
yml_destroy(clone);
yaml_event_delete(&event);
goto err;
}
got_match = true;
break;
}
if (!got_match) {
error_str = format_error(
YML_ERR_INVALID_ANCHOR, n, NULL,
(const char *)event.data.alias.anchor);
yaml_event_delete(&event);
goto err;
}
break;
}
case YAML_SCALAR_EVENT: {
struct yml_node *new_scalar = calloc(1, sizeof(*new_scalar));
new_scalar->type = SCALAR;
new_scalar->scalar.value = strndup(
(const char*)event.data.scalar.value, event.data.scalar.length);
enum yml_error err = add_node(n, new_scalar, event.start_mark);
if (err != YML_ERR_NONE) {
error_str = format_error(err, n, new_scalar, NULL);
yml_destroy(new_scalar);
yaml_event_delete(&event);
goto err;
}
if (event.data.scalar.anchor != NULL) {
const char *anchor = (const char *)event.data.scalar.anchor;
add_anchor(root, anchor, new_scalar);
}
break;
}
case YAML_SEQUENCE_START_EVENT: {
indent += 2;
struct yml_node *new_list = calloc(1, sizeof(*new_list));
new_list->type = LIST;
enum yml_error err = add_node(n, new_list, event.start_mark);
if (err != YML_ERR_NONE) {
error_str = format_error(err, n, new_list, NULL);
yml_destroy(new_list);
yaml_event_delete(&event);
goto err;
}
n = new_list;
if (event.data.sequence_start.anchor != NULL) {
const char *anchor = (const char *)event.data.sequence_start.anchor;
add_anchor(root, anchor, new_list);
}
break;
}
case YAML_SEQUENCE_END_EVENT:
indent -= 2;
assert(n->parent != NULL);
n = n->parent;
break;
case YAML_MAPPING_START_EVENT: {
indent += 2;
struct yml_node *new_dict = calloc(1, sizeof(*new_dict));
new_dict->type = DICT;
enum yml_error err = add_node(n, new_dict, event.start_mark);
if (err != YML_ERR_NONE) {
error_str = format_error(err, n, new_dict, NULL);
yml_destroy(new_dict);
yaml_event_delete(&event);
goto err;
}
n = new_dict;
if (event.data.mapping_start.anchor != NULL) {
const char *anchor = (const char *)event.data.mapping_start.anchor;
add_anchor(root, anchor, new_dict);
}
break;
}
case YAML_MAPPING_END_EVENT:
assert(!n->dict.next_is_value);
indent -= 2;
assert(n->parent != NULL);
n = n->parent;
break;
}
yaml_event_delete(&event);
}
yaml_parser_delete(&yaml);
if (!post_process(root, error)) {
yml_destroy(root);
return NULL;
}
return root;
err:
if (error_str != NULL) {
int cnt = snprintf(
NULL, 0, "%zu:%zu: %s",
yaml.mark.line + 1,
yaml.mark.column,
error_str);
*error = malloc(cnt + 1);
snprintf(
*error, cnt + 1, "%zu:%zu: %s",
yaml.mark.line + 1,
yaml.mark.column,
error_str);
} else {
int cnt = snprintf(NULL, 0, "%zu:%zu: unknown error",
yaml.mark.line + 1, yaml.mark.column);
*error = malloc(cnt + 1);
snprintf(*error, cnt + 1, "%zu:%zu: unknown error",
yaml.mark.line + 1, yaml.mark.column);
}
err_no_error_formatting:
yml_destroy(root);
yaml_parser_delete(&yaml);
return NULL;
}
void
yml_destroy(struct yml_node *node)
{
if (node == NULL)
return;
switch (node->type) {
case ROOT:
yml_destroy(node->root.root);
for (size_t i = 0; i < node->root.anchor_count; i++)
free(node->root.anchors[i].anchor);
break;
case SCALAR:
free(node->scalar.value);
break;
case LIST:
tll_free_and_free(node->list.values, yml_destroy);
break;
case DICT:
tll_foreach(node->dict.pairs, it) {
yml_destroy(it->item.key);
yml_destroy(it->item.value);
}
tll_free(node->dict.pairs);
break;
}
free(node);
}
bool
yml_is_scalar(const struct yml_node *node)
{
return node->type == SCALAR;
}
bool
yml_is_dict(const struct yml_node *node)
{
return node->type == DICT;
}
bool
yml_is_list(const struct yml_node *node)
{
return node->type == LIST;
}
const struct yml_node *
yml_get_value(const struct yml_node *node, const char *_path)
{
if (node != NULL && node->type == ROOT)
node = node->root.root;
if (node == NULL)
return NULL;
char *path = strdup(_path);
for (const char *part = strtok(path, "."), *next_part = strtok(NULL, ".");
part != NULL;
part = next_part, next_part = strtok(NULL, "."))
{
assert(yml_is_dict(node));
tll_foreach(node->dict.pairs, it) {
assert(yml_is_scalar(it->item.key));
if (strcmp(it->item.key->scalar.value, part) == 0) {
if (next_part == NULL) {
free(path);
return it->item.value;
}
node = it->item.value;
break;
}
}
}
free(path);
return NULL;
}
struct yml_list_iter
yml_list_iter(const struct yml_node *list)
{
assert(yml_is_list(list));
tll_foreach(list->list.values, it) {
return (struct yml_list_iter){
.node = it->item,
.private = it,
};
}
return (struct yml_list_iter){
.node = NULL,
.private = NULL,
};
}
void
yml_list_next(struct yml_list_iter *iter)
{
if (iter->private == NULL)
return;
const struct yml_node *d = (const void *)(uintptr_t)0xdeadbeef;
__typeof__(d->list.values.head) it = (__typeof__(d->list.values.head))iter->private;
__typeof__(d->list.values.head) next = it->next;
iter->node = next != NULL ? next->item : NULL;
iter->private = next;
}
size_t
yml_list_length(const struct yml_node *list)
{
assert(yml_is_list(list));
size_t length = 0;
for (struct yml_list_iter it = yml_list_iter(list);
it.node != NULL;
yml_list_next(&it), length++)
;
return length;
}
struct yml_dict_iter
yml_dict_iter(const struct yml_node *dict)
{
assert(yml_is_dict(dict));
tll_foreach(dict->dict.pairs, it) {
return (struct yml_dict_iter){
.key = it->item.key,
.value = it->item.value,
.private1 = it,
};
}
return (struct yml_dict_iter) {
.key = NULL,
.value = NULL,
.private1 = NULL,
};
}
void
yml_dict_next(struct yml_dict_iter *iter)
{
const struct yml_node *d = (const void *)(uintptr_t)0xdeadbeef;
__typeof__(d->dict.pairs.head) it = (__typeof__(d->dict.pairs.head))iter->private1;
if (it == NULL)
return;
__typeof__(d->dict.pairs.head) next = it->next;
iter->key = next != NULL ? next->item.key : NULL;
iter->value = next != NULL ? next->item.value : NULL;
iter->private1 = next;
}
size_t
yml_dict_length(const struct yml_node *dict)
{
assert(yml_is_dict(dict));
return tll_length(dict->dict.pairs);
}
const char *
yml_value_as_string(const struct yml_node *value)
{
if (!yml_is_scalar(value))
return NULL;
return value->scalar.value;
}
static bool
_as_int(const struct yml_node *value, long *ret)
{
const char *s = yml_value_as_string(value);
if (s == NULL)
return false;
int cnt;
int res = sscanf(s, "%ld%n", ret, &cnt);
return res == 1 && strlen(s) == (size_t)cnt;
}
bool
yml_value_is_int(const struct yml_node *value)
{
long dummy;
return _as_int(value, &dummy);
}
long
yml_value_as_int(const struct yml_node *value)
{
long ret = -1;
_as_int(value, &ret);
return ret;
}
static bool
_as_bool(const struct yml_node *value, bool *ret)
{
if (!yml_is_scalar(value))
return false;
const char *v = yml_value_as_string(value);
if (strcasecmp(v, "y") == 0 ||
strcasecmp(v, "yes") == 0 ||
strcasecmp(v, "true") == 0 ||
strcasecmp(v, "on") == 0)
{
*ret = true;
return true;
} else if (strcasecmp(v, "n") == 0 ||
strcasecmp(v, "no") == 0 ||
strcasecmp(v, "false") == 0 ||
strcasecmp(v, "off") == 0)
{
*ret = false;
return true;
}
return false;
}
bool
yml_value_is_bool(const struct yml_node *value)
{
bool dummy;
return _as_bool(value, &dummy);
}
bool
yml_value_as_bool(const struct yml_node *value)
{
bool ret = false;
_as_bool(value, &ret);
return ret;
}
size_t
yml_source_line(const struct yml_node *node)
{
return node->line;
}
size_t
yml_source_column(const struct yml_node *node)
{
return node->column;
}
static void
_print_node(const struct yml_node *n, int indent)
{
if (n == NULL)
return;
switch (n->type) {
case ROOT:
_print_node(n->root.root, indent);
break;
case DICT:
tll_foreach(n->dict.pairs, it) {
_print_node(it->item.key, indent);
printf(": ");
if (it->item.value->type != SCALAR) {
printf("\n");
_print_node(it->item.value, indent + 2);
} else {
_print_node(it->item.value, 0);
printf("\n");
}
}
break;
case LIST:
tll_foreach(n->list.values, it) {
printf("%*s- ", indent, "");
if (it->item->type != SCALAR) {
printf("\n");
_print_node(it->item, indent + 2);
} else {
_print_node(it->item, 0);
}
}
break;
case SCALAR:
printf("%*s%s", indent, "", n->scalar.value);
break;
}
}
void
print_node(const struct yml_node *n)
{
_print_node(n, 0);
}
| 25.886918
| 91
| 0.507066
|
726dc5d861e488323657b750ecbb8b58903e3b70
| 4,239
|
lua
|
Lua
|
opennmt/SeqTagger.lua
|
OpenNMT/Im2Text
|
6184e033c1cdcfdba984b3a5cf7aaa1131f6d096
|
[
"MIT"
] | 153
|
2016-12-19T18:01:41.000Z
|
2022-03-30T15:13:46.000Z
|
opennmt/SeqTagger.lua
|
OpenNMT/Im2Text
|
6184e033c1cdcfdba984b3a5cf7aaa1131f6d096
|
[
"MIT"
] | 17
|
2016-12-20T07:59:28.000Z
|
2020-10-24T21:44:04.000Z
|
opennmt/SeqTagger.lua
|
OpenNMT/Im2Text
|
6184e033c1cdcfdba984b3a5cf7aaa1131f6d096
|
[
"MIT"
] | 26
|
2016-12-20T03:18:13.000Z
|
2020-07-10T13:14:05.000Z
|
--[[ Sequence to sequence model with attention. ]]
local SeqTagger, parent = torch.class('SeqTagger', 'Model')
local options = {
{
'-word_vec_size', '500',
[[Comma-separated list of embedding sizes: `word[,feat1[,feat2[,...] ] ]`.]],
{
structural = 0
}
},
{
'-pre_word_vecs_enc', '',
[[Path to pretrained word embeddings on the encoder side serialized as a Torch tensor.]],
{
valid = onmt.utils.ExtendedCmdLine.fileNullOrExists,
init_only = true
}
},
{
'-fix_word_vecs_enc', 0,
[[Fix word embeddings on the encoder side.]],
{
enum = {0, 1},
structural = 1
}
},
{
'-feat_merge', 'concat',
[[Merge action for the features embeddings.]],
{
enum = {'concat', 'sum'},
structural = 0
}
},
{
'-feat_vec_exponent', 0.7,
[[When features embedding sizes are not set and using `-feat_merge concat`, their dimension
will be set to `N^feat_vec_exponent` where `N` is the number of values the feature takes.]],
{
structural = 0
}
},
{
'-feat_vec_size', 20,
[[When features embedding sizes are not set and using `-feat_merge sum`,
this is the common embedding size of the features]],
{
valid = onmt.utils.ExtendedCmdLine.isUInt(),
structural = 0
}
}
}
function SeqTagger.declareOpts(cmd)
cmd:setCmdLineOptions(options, SeqTagger.modelName())
onmt.Encoder.declareOpts(cmd)
onmt.Factory.declareOpts(cmd)
end
function SeqTagger:__init(args, dicts, verbose)
parent.__init(self, args)
onmt.utils.Table.merge(self.args, onmt.utils.ExtendedCmdLine.getModuleOpts(args, options))
self.models.encoder = onmt.Factory.buildWordEncoder(self.args, dicts.src, verbose)
self.models.generator = onmt.Factory.buildGenerator(self.args.rnn_size, dicts.tgt)
self.criterion = onmt.ParallelClassNLLCriterion(onmt.Factory.getOutputSizes(dicts.tgt))
end
function SeqTagger.load(args, models, dicts, isReplica)
local self = torch.factory('SeqTagger')()
parent.__init(self, args)
onmt.utils.Table.merge(self.args, onmt.utils.ExtendedCmdLine.getModuleOpts(args, options))
self.models.encoder = onmt.Factory.loadEncoder(models.encoder, isReplica)
self.models.generator = onmt.Factory.loadGenerator(models.generator, isReplica)
self.criterion = onmt.ParallelClassNLLCriterion(onmt.Factory.getOutputSizes(dicts.tgt))
return self
end
-- Returns model name.
function SeqTagger.modelName()
return 'Sequence Tagger'
end
-- Returns expected dataMode
function SeqTagger.dataType()
return 'bitext'
end
function SeqTagger:enableProfiling()
_G.profiler.addHook(self.models.encoder, 'encoder')
_G.profiler.addHook(self.models.generator, 'generator')
_G.profiler.addHook(self.criterion, 'criterion')
end
function SeqTagger:getOutput(batch)
return batch.targetOutput
end
function SeqTagger:forwardComputeLoss(batch)
local _, context = self.models.encoder:forward(batch)
local loss = 0
for t = 1, batch.sourceLength do
local genOutputs = self.models.generator:forward(context:select(2, t))
local output = batch:getTargetOutput(t)
-- Same format with and without features.
if torch.type(output) ~= 'table' then output = { output } end
loss = loss + self.criterion:forward(genOutputs, output)
end
return loss
end
function SeqTagger:trainNetwork(batch)
local loss = 0
local _, context = self.models.encoder:forward(batch)
local gradContexts = context:clone():zero()
-- For each word of the sentence, generate target.
for t = 1, batch.sourceLength do
local genOutputs = self.models.generator:forward(context:select(2, t))
local output = batch:getTargetOutput(t)
-- Same format with and without features.
if torch.type(output) ~= 'table' then output = { output } end
loss = loss + self.criterion:forward(genOutputs, output)
local genGradOutput = self.criterion:backward(genOutputs, output)
for j = 1, #genGradOutput do
genGradOutput[j]:div(batch.totalSize)
end
gradContexts[{{}, t}]:copy(self.models.generator:backward(context:select(2, t), genGradOutput))
end
self.models.encoder:backward(batch, nil, gradContexts)
return loss
end
return SeqTagger
| 27.348387
| 99
| 0.701109
|
d6363aa23b8f7e68bd50d2e540bcae69a846e98d
| 765
|
cs
|
C#
|
src/Ticketbooth.Api/Requests/Examples/SetNoReleaseBlocksRequestExample.cs
|
drmathias/Ticketbooth
|
522434a8daf43f7b71dd2f96e72bab4a09b1bcc8
|
[
"MIT"
] | 1
|
2019-12-02T18:52:28.000Z
|
2019-12-02T18:52:28.000Z
|
src/Ticketbooth.Api/Requests/Examples/SetNoReleaseBlocksRequestExample.cs
|
drmathias/SmartTicket
|
522434a8daf43f7b71dd2f96e72bab4a09b1bcc8
|
[
"MIT"
] | 22
|
2019-12-18T23:21:17.000Z
|
2020-01-28T12:06:23.000Z
|
src/Ticketbooth.Api/Requests/Examples/SetNoReleaseBlocksRequestExample.cs
|
drmathias/Ticketbooth
|
522434a8daf43f7b71dd2f96e72bab4a09b1bcc8
|
[
"MIT"
] | null | null | null |
using Swashbuckle.AspNetCore.Examples;
namespace Ticketbooth.Api.Requests.Examples
{
public class SetNoReleaseBlocksRequestExample : IExamplesProvider
{
private readonly ExampleGenerator _exampleGenerator;
public SetNoReleaseBlocksRequestExample(ExampleGenerator exampleGenerator)
{
_exampleGenerator = exampleGenerator;
}
public object GetExamples()
{
return new SetNoReleaseBlocksRequest
{
AccountName = "account 0",
Count = 5000,
GasPrice = 100,
Password = "Hunter2",
Sender = _exampleGenerator.ValidAddress(),
WalletName = "Wallet One"
};
}
}
}
| 27.321429
| 82
| 0.586928
|
743a8b8699e4072235c915a02f3d7c30f170c092
| 3,265
|
h
|
C
|
pyhmmer/reexports/esl_sqio_ascii.h
|
althonos/pyhmmer
|
eb6fe7c0e74557e0ae9d647693711583d2d86b68
|
[
"MIT"
] | 26
|
2020-11-10T22:57:49.000Z
|
2022-03-24T16:58:55.000Z
|
pyhmmer/reexports/esl_sqio_ascii.h
|
althonos/pyhmmer
|
eb6fe7c0e74557e0ae9d647693711583d2d86b68
|
[
"MIT"
] | 13
|
2020-11-12T11:41:08.000Z
|
2022-03-09T18:17:48.000Z
|
pyhmmer/reexports/esl_sqio_ascii.h
|
althonos/pyhmmer
|
eb6fe7c0e74557e0ae9d647693711583d2d86b68
|
[
"MIT"
] | 2
|
2021-04-04T05:13:07.000Z
|
2021-11-30T09:11:23.000Z
|
#include "esl_config.h"
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <ctype.h>
#include <unistd.h>
#include "easel.h"
#include "esl_alphabet.h"
#include "esl_msa.h"
#include "esl_msafile.h"
#include "esl_sqio.h"
#include "esl_sq.h"
#include "esl_ssi.h"
/* format specific routines */
int sqascii_GuessFileFormat(ESL_SQFILE *sqfp, int *ret_fmt);
int sqascii_Position (ESL_SQFILE *sqfp, off_t offset);
void sqascii_Close (ESL_SQFILE *sqfp);
int sqascii_SetDigital (ESL_SQFILE *sqfp, const ESL_ALPHABET *abc);
int sqascii_GuessAlphabet (ESL_SQFILE *sqfp, int *ret_type);
int sqascii_Read (ESL_SQFILE *sqfp, ESL_SQ *sq);
int sqascii_ReadInfo (ESL_SQFILE *sqfp, ESL_SQ *sq);
int sqascii_ReadSequence (ESL_SQFILE *sqfp, ESL_SQ *sq);
int sqascii_ReadWindow (ESL_SQFILE *sqfp, int C, int W, ESL_SQ *sq);
int sqascii_ReadBlock (ESL_SQFILE *sqfp, ESL_SQ_BLOCK *sqBlock, int max_residues, int max_sequences, int max_init_window, int long_target);
int sqascii_Echo (ESL_SQFILE *sqfp, const ESL_SQ *sq, FILE *ofp);
int sqascii_IsRewindable (const ESL_SQFILE *sqfp);
const char *sqascii_GetError (const ESL_SQFILE *sqfp);
int sqascii_OpenSSI (ESL_SQFILE *sqfp, const char *ssifile_hint);
int sqascii_PositionByKey (ESL_SQFILE *sqfp, const char *key);
int sqascii_PositionByNumber(ESL_SQFILE *sqfp, int which);
int sqascii_Fetch (ESL_SQFILE *sqfp, const char *key, ESL_SQ *sq);
int sqascii_FetchInfo (ESL_SQFILE *sqfp, const char *key, ESL_SQ *sq);
int sqascii_FetchSubseq (ESL_SQFILE *sqfp, const char *source, int64_t start, int64_t end, ESL_SQ *sq);
/* Internal routines shared by parsers. */
int loadmem (ESL_SQFILE *sqfp);
int loadbuf (ESL_SQFILE *sqfp);
int nextchar (ESL_SQFILE *sqfp, char *ret_c);
int seebuf (ESL_SQFILE *sqfp, int64_t maxn, int64_t *opt_nres, int64_t *opt_endpos);
void addbuf (ESL_SQFILE *sqfp, ESL_SQ *sq, int64_t nres);
void skipbuf (ESL_SQFILE *sqfp, int64_t nskip);
int read_nres(ESL_SQFILE *sqfp, ESL_SQ *sq, int64_t nskip, int64_t nres, int64_t *opt_actual_nres);
int skip_whitespace(ESL_SQFILE *sqfp);
/* EMBL format; also UniProt, TrEMBL */
void config_embl(ESL_SQFILE *sqfp);
void inmap_embl (ESL_SQFILE *sqfp, const ESL_DSQ *abc_inmap);
int header_embl(ESL_SQFILE *sqfp, ESL_SQ *sq);
int skip_embl (ESL_SQFILE *sqfp, ESL_SQ *sq);
int end_embl (ESL_SQFILE *sqfp, ESL_SQ *sq);
/* GenBank format; also DDBJ */
void config_genbank(ESL_SQFILE *sqfp);
void inmap_genbank (ESL_SQFILE *sqfp, const ESL_DSQ *abc_inmap);
int header_genbank(ESL_SQFILE *sqfp, ESL_SQ *sq);
int skip_genbank (ESL_SQFILE *sqfp, ESL_SQ *sq);
int end_genbank (ESL_SQFILE *sqfp, ESL_SQ *sq);
/* FASTA format */
void config_fasta(ESL_SQFILE *sqfp);
void inmap_fasta (ESL_SQFILE *sqfp, const ESL_DSQ *abc_inmap);
int header_fasta(ESL_SQFILE *sqfp, ESL_SQ *sq);
int skip_fasta (ESL_SQFILE *sqfp, ESL_SQ *sq);
int end_fasta (ESL_SQFILE *sqfp, ESL_SQ *sq);
/* daemon format */
void config_daemon(ESL_SQFILE *sqfp);
void inmap_daemon (ESL_SQFILE *sqfp, const ESL_DSQ *abc_inmap);
int end_daemon (ESL_SQFILE *sqfp, ESL_SQ *sq);
/* HMMPGMD format */
int fileheader_hmmpgmd(ESL_SQFILE *sqfp);
| 41.858974
| 146
| 0.728943
|
b069b63e8a38556ca29f8d1f68f507923cdd046c
| 15,516
|
py
|
Python
|
phangsPipeline/scBackups.py
|
astrojysun/phangs_imaging_scripts
|
e82f303b5d79a29d9269bce69ddb7456e08e3a9d
|
[
"MIT"
] | 21
|
2021-04-24T19:10:11.000Z
|
2022-02-08T14:17:49.000Z
|
phangsPipeline/scBackups.py
|
astrojysun/phangs_imaging_scripts
|
e82f303b5d79a29d9269bce69ddb7456e08e3a9d
|
[
"MIT"
] | 35
|
2021-04-14T15:58:38.000Z
|
2022-02-25T12:11:31.000Z
|
phangsPipeline/scBackups.py
|
astrojysun/phangs_imaging_scripts
|
e82f303b5d79a29d9269bce69ddb7456e08e3a9d
|
[
"MIT"
] | 6
|
2021-04-28T08:19:11.000Z
|
2021-09-15T13:25:12.000Z
|
def write_moment1_hybrid(
cube, rms=None, channel_correlation=None,
outfile=None, errorfile=None,
overwrite=True, unit=None,
return_products=True,
strict_vfield=None,
broad_vfield=None,
broad_signal=None,
vfield_prior=None,
vfield_prior_res=None,
vfield_reject_thresh='30km/s',
mom0_thresh_for_mom1=2.0,
context=None):
"""Write out moment1 map using combination of other moment maps.
This is a secondary moment that needs to be calculated in the
context of other moments.
Keywords:
---------
cube : SpectralCube
Included to keep same call signature but not used
outfile : str
File name of output file
errorfile : str
File name of map for the uncertainty
rms : SpectralCube
Included to keep the same call signature but not used.
channel_correlation : np.array
Included to keep the same call signature but not used.
overwrite : bool
Set to True (the default) to overwrite existing maps if present.
unit : astropy.Unit
Preferred unit for moment masks
return_products : bool
Return products calculated in the map
strict_vfield : str
Moment tag for velocity field to be used as a high confidence map
broad_vfield : str
Moment tag for velocity field for low confidence map
broad_signal : str
Moment tag to be used as an estimate of the signal for a S/N
cut on where the broad_vfield is valid. Also finds a noise
estimate of the same and uses this for the Noise component
vfield_prior : str
Moment tag for low-resolution prior map of velocity field
vfield_prior_res : str
Resolution tag for low-resolution prior map of velocity field
vfield_reject_thresh : astropy.units.Quantity
The maximum difference between the broad field and the prior
field in units that can convert to that of the velocity field.
mom0_thresh_for_mom1 : float
S/N threshold for using a broad_vfield estimate in the map
"""
# Resolution to work with
resname = context['res_tag']
if resname is None:
resname = ''
# The threshold for outlier rejection from the prior velocity field
vfield_reject_thresh = u.Quantity(vfield_reject_thresh)
# The root for the maps
moment_root = utilsFilenames.get_cube_filename(
target=context['target'], config=context['config'],
product=context['product'],
ext=resname + context['extra_ext'])
moment_root = moment_root.replace('.fits','')
# This strict moment1 field will remain in place no matter what
strict_moment1_name = ''.join([context['indir'],
moment_root,
context['allmoments'][strict_vfield]['ext'],
'.fits'])
mom1strict = convert_and_reproject(strict_moment1_name, unit=unit)
strict_moment1_err_name = ''.join([context['indir'],
moment_root,
context['allmoments'][strict_vfield]['ext_error'],
'.fits'])
mom1strict_error = convert_and_reproject(strict_moment1_err_name, unit=unit)
# This broad moment 0 map will be used to help prune faint emission
broad_moment0_name = ''.join([context['indir'],
moment_root,
context['allmoments'][broad_signal]['ext'],
'.fits'])
mom0broad = convert_and_reproject(broad_moment0_name, template=mom1strict)
broad_moment0_err_name = ''.join([context['indir'],
moment_root,
context['allmoments'][broad_signal]['ext_error'],
'.fits'])
mom0broad_error = convert_and_reproject(broad_moment0_err_name, template=mom1strict)
# This broad moment 1 map will be used as a candidate velocity field
broad_moment1_name = ''.join([context['indir'],
moment_root,
context['allmoments'][broad_vfield]['ext'],
'.fits'])
mom1broad = convert_and_reproject(broad_moment1_name, template=mom1strict,
unit=unit)
broad_moment1_err_name = ''.join([context['indir'],
moment_root,
context['allmoments'][broad_vfield]['ext_error'],
'.fits'])
mom1broad_error = convert_and_reproject(broad_moment1_err_name, template=mom1strict,
unit=unit)
# This prior velocity field will be used to reject outliers
resname = vfield_prior_res
# AKL - need to make the config tunable and separate from the input maps here
moment_root = utilsFilenames.get_cube_filename(
target=context['target'], config=context['config'],
product=context['product'],
ext=resname + context['extra_ext'])
moment_root = moment_root.replace('.fits','')
prior_moment1_name = ''.join([context['indir'],
moment_root,
context['allmoments'][vfield_prior]['ext'],
'.fits'])
mom1prior = convert_and_reproject(prior_moment1_name, template=mom1strict,
unit=unit)
# Now hybridize
# ... start with the high quality strict mask
mom1hybrid = mom1strict.value
# ... candidate entries are places with a broad value
valid_broad_mom1 = np.isfinite(mom1broad.value)
# ... but not any strict value
valid_broad_mom1[np.isfinite(mom1strict)] = False
# If thresholding on intensity, apply that
if mom0broad_error is not None:
valid_broad_mom1 *= (mom0broad.value
> (mom0_thresh_for_mom1
* mom0broad_error.value))
# If thresholding relative to prior field, apply that
if mom1prior is not None:
valid_broad_mom1 = (valid_broad_mom1 *
(np.abs(mom1broad - mom1prior)
< vfield_reject_thresh)
)
# Fill in the still-valid locations in the hybrid
mom1hybrid[valid_broad_mom1] = (mom1broad.value)[valid_broad_mom1]
mom1hybrid = u.Quantity(mom1hybrid, unit)
if unit is not None:
mom1hybrid = mom1hybrid.to(unit)
# Attach to WCS
mom1hybrid_proj = Projection(mom1hybrid,
wcs=mom1strict.wcs,
header=mom1strict.header,
meta=mom1strict.meta)
# Write
if outfile is not None:
mom1hybrid_proj.write(outfile,
overwrite=overwrite)
# Propagate errors from the input map to an error map
mom1hybrid_error = None
if (type(mom1broad_error) is Projection and
type(mom1strict_error) is Projection):
mom1hybrid_error = mom1broad_error
mom1hybrid_error[~np.isfinite(mom1hybrid.value)] = np.nan
strictvals = np.isfinite(mom1strict_error.value)
mom1hybrid_error[strictvals] = mom1strict_error[strictvals]
if unit is not None:
mom1hybrid_error = mom1hybrid_error.to(unit)
mom1hybrid_error_proj = Projection(mom1hybrid_error,
wcs=mom1strict.wcs,
header=mom1strict.header,
meta=mom1strict.meta)
if errorfile is not None:
mom1hybrid_error_proj = update_metadata(mom1hybrid_error_proj,
cube, error=True)
mom1hybrid_error_proj.write(errorfile,
overwrite=overwrite)
if return_products and mom1hybrid_error_proj is not None:
return(mom1hybrid_proj, mom1hybrid_error_proj)
elif return_products and mom1hybrid_error_proj is None:
return(mom1hybrid_proj)
def old_write_moment1_hybrid(cube,
broad_mask=None,
moment1_prior=None,
order='bilinear',
outfile=None,
errorfile=None,
rms=None,
channel_correlation=None,
overwrite=True,
vfield_reject_thresh=30 * u.km / u.s,
mom0_thresh_for_mom1=2.0,
unit=None,
return_products=False):
"""
Writes a moment 1 map
Parameters:
-----------
cube : SpectralCube
SpectralCube of original data with strict masking applied
Keywords:
---------
broad_mask : SpectralCube or np.array
Array with same shape as the input SpectralCube to be used
as the broad (permissive) mask
moment1_prior : FITS filename or Projection
FITS filename or Projection containting the velocity field prior
order : str
Specifies the order of interpolation to be used for aligning spectral
cubes to each other from 'nearest-neighbor', 'bilinear',
'biquadratic', 'bicubic'. Defaults to 'bilinear'.
errorfile : str
File name of map for the uncertainty
rms : SpectralCube
Root-mean-square estimate of the error. This must have an estimate
the noise level at all positions where there is signal, and only at
those positions.
channel_correlation : np.array
One-dimensional array containing the channel-to-channel
normalize correlation coefficients
overwrite : bool
Set to True (the default) to overwrite existing maps if present.
unit : astropy.Unit
Preferred unit for moment masks
vfield_reject_thresh : astropy.Quantity
Velocity range beyond which deviations from a prior velocity
field are rejected. Default 30 km/s
mom0_thresh_for_mom1 : int
Signal-to-noise ratio in a moment-0 to accept a measurement
of a moment1 map.
return_products : bool
Return products calculated in the map
"""
(mom1strict,
mom1strict_error) = write_moment1(cube, rms=rms,
channel_correlation=channel_correlation,
unit=unit,
return_products=True)
spaxis = cube.spectral_axis.value
if moment1_prior is not None:
if type(moment1_prior) is Projection:
mom1prior = moment1_prior
elif type(moment1_prior) is str:
hdu_list = fits.open(moment1_prior)
mom1prior = Projection.from_hdu(hdu_list[0])
mom1prior = mom1prior.to(cube.spectral_axis.unit)
mom1prior = mom1prior.reproject(mom1strict.header, order=order)
else:
mom1prior = None
if type(broad_mask) is SpectralCube:
strict_mask = SpectralCube(cube.mask.include(),
wcs=cube.wcs,
header=cube.header)
hybrid_mask = hybridize_mask(strict_mask,
broad_mask,
return_cube=False)
broad_cube = cube.with_mask(hybrid_mask,
inherit_mask=False)
elif type(broad_mask) is str:
broad_mask = SpectralCube.read(broad_mask)
strict_mask = SpectralCube(cube.mask.include(),
wcs=cube.wcs,
header=cube.header)
hybrid_mask = hybridize_mask(strict_mask,
broad_mask,
return_cube=False)
broad_cube = cube.with_mask(hybrid_mask,
inherit_mask=False)
elif type(broad_mask) is np.ndarray:
broad_cube = cube.with_mask(broad_mask.astype(np.bool),
inherit_mask=False)
(mom0broad,
mom0broad_error) = write_moment0(broad_cube, rms=rms,
channel_correlation=channel_correlation,
return_products=True)
(mom1broad,
mom1broad_error) = write_moment1(broad_cube, rms=rms,
channel_correlation=channel_correlation,
unit=unit,
return_products=True)
mom1hybrid = mom1strict.value
valid_broad_mom1 = np.isfinite(mom1broad.value)
valid_broad_mom1[np.isfinite(mom1strict)] = False
if mom0broad_error is not None:
valid_broad_mom1 *= (mom0broad.value
> (mom0_thresh_for_mom1
* mom0broad_error.value))
if mom1prior is not None:
valid_broad_mom1 = (valid_broad_mom1 *
(np.abs(mom1broad - mom1prior)
< vfield_reject_thresh)
)
mom1hybrid[valid_broad_mom1] = (mom1broad.value)[valid_broad_mom1]
mom1hybrid = u.Quantity(mom1hybrid, cube.spectral_axis.unit)
if unit is not None:
mom1hybrid = mom1hybrid.to(unit)
mom1hybrid_proj = Projection(mom1hybrid,
wcs=mom1strict.wcs,
header=mom1strict.header,
meta=mom1strict.meta)
if outfile is not None:
mom1hybrid_proj = update_metadata(mom1hybrid_proj, cube)
mom1hybrid_proj.write(outfile,
overwrite=overwrite)
mom1hybrid_error = None
if (type(mom1broad_error) is Projection and
type(mom1strict_error) is Projection):
mom1hybrid_error = mom1broad_error
mom1hybrid_error[~np.isfinite(mom1hybrid.value)] = np.nan
strictvals = np.isfinite(mom1strict_error.value)
mom1hybrid_error[strictvals] = mom1strict_error[strictvals]
if unit is not None:
mom1hybrid_error = mom1hybrid_error.to(unit)
mom1hybrid_error_proj = Projection(mom1hybrid_error,
wcs=mom1strict.wcs,
header=mom1strict.header,
meta=mom1strict.meta)
if errorfile is not None:
mom1hybrid_error_proj = update_metadata(mom1hybrid_error_proj,
cube, error=True)
mom1hybrid_error_proj.write(errorfile,
overwrite=overwrite)
if return_products and mom1hybrid_error_proj is not None:
return(mom1hybrid_proj, mom1hybrid_error_proj)
elif return_products and mom1hybrid_error_proj is None:
return(mom1hybrid_proj)
| 37.93643
| 89
| 0.56503
|
12786095e6f24247f93bfc7feedd5115d86dd0a7
| 12,563
|
cs
|
C#
|
src/BlackJack.cs
|
Davichet-e/BlackJack-CSharp
|
64edd37f27316ae05dedbede74af908125a600d2
|
[
"BSD-3-Clause"
] | null | null | null |
src/BlackJack.cs
|
Davichet-e/BlackJack-CSharp
|
64edd37f27316ae05dedbede74af908125a600d2
|
[
"BSD-3-Clause"
] | null | null | null |
src/BlackJack.cs
|
Davichet-e/BlackJack-CSharp
|
64edd37f27316ae05dedbede74af908125a600d2
|
[
"BSD-3-Clause"
] | null | null | null |
using System;
using System.Collections.Generic;
using System.Threading;
using System.Linq;
using Hands;
using Cards;
#pragma warning disable CS8618
public class BlackJack
{
private static Deck _deck;
private static IList<Player> _players = new List<Player>();
private static Hand _dealerHand;
public static void Main(string[] args)
{
Console.WriteLine("This BlackJack Game has been created by David Garcia Morillo");
int nOfDecks;
while (true)
{
try
{
Console.Write("How many decks do you want to use (4-8)\n> ");
nOfDecks = Int32.Parse(Console.ReadLine());
if (nOfDecks <= 3 || nOfDecks > 8)
Console.WriteLine("The number of decks must be between 4 and 8\n");
else
break;
}
catch (FormatException)
{
Console.WriteLine("Please, use only integral values");
}
}
_deck = new Deck(nOfDecks);
_dealerHand = new Hand(_deck);
StartGame();
while (true)
{
//TODO console.wr(game started)
Console.WriteLine($"\nThe first card of the dealer is {_dealerHand.Cards[0]}");
foreach (Player player in _players)
{
PlayerTurn(player);
}
DealerTurn();
EndGame();
if (!NextGame())
{
break;
}
}
}
private static void StartGame()
{
int numberOfPeople = AskNumberOfPeople();
AskAndSetPlayerAttributes(numberOfPeople);
}
private static int AskNumberOfPeople()
{
int numberOfPeople;
while (true)
{
try
{
Console.Write("How many people are going to play? (1-5)\n> ");
numberOfPeople = int.Parse(Console.ReadLine());
if (!(0 < numberOfPeople) && (numberOfPeople <= 5))
Console.WriteLine("The number of people must be between 1 and 5\n");
else
break;
}
catch (FormatException)
{
Console.WriteLine("Please, use only integral values.\n");
}
}
return numberOfPeople;
}
private static void AskAndSetPlayerAttributes(int numberOfPeople)
{
for (int i = 1; i <= numberOfPeople; i++)
{
Console.Write($"Please, enter your name, Player {i}\n> ");
string name = Console.ReadLine();
while (true)
{
try
{
Console.Write("\nHow much money do you have? (Use only integral values)\n> ");
int initialMoney = int.Parse(Console.ReadLine());
if (initialMoney < 50)
Console.WriteLine("The initial money must be greater or equal than 50\n");
else
{
_players.Add(new Player(name, initialMoney, _deck));
break;
}
}
catch (Exception)
{
Console.WriteLine("Please, use only integral values.\n");
}
}
}
}
private static void AskPlayerBet(Player player)
{
while (true)
{
try
{
Console.Write("What bet do you wanna make?\n> ");
int bet = int.Parse(Console.ReadLine());
if (bet > player.ActualMoney)
Console.WriteLine("Your bet cannot be greater than your actual money.\n");
else if (bet <= 0)
Console.WriteLine("Your bet must be greater than 0.\n");
else
{
player.Bet = bet;
break;
}
}
catch (Exception)
{
Console.WriteLine("Please, use only integral values.\n");
}
}
}
private static bool HandWinOrLose(Hand hand)
{
bool result;
int playerPoints = hand.Points;
if (playerPoints == 21)
{
if (hand.HasBlackJack())
Console.WriteLine("BLACKJACK!");
else
Console.WriteLine("YOU GOT 21 POINTS!");
result = true;
}
else if (playerPoints == 0)
{
Console.WriteLine("BUST.\nI'm afraid you lose this game :(\n");
result = true;
}
else
result = false;
return result;
}
private static bool CheckIfYes(string userDecision)
{
string[] positiveAnswers = { "y", "yes", "1", "true" };
return positiveAnswers.Contains(userDecision.Trim().ToLower());
}
private static bool AskIfHit()
{
Console.Write("Do you wanna hit? (y/n)\n> ");
string decision = Console.ReadLine();
return CheckIfYes(decision);
}
private static void PlayerTurn(Player player)
{
Console.WriteLine($"###### {player}'s turn ######\n");
Console.WriteLine($"{player}, your actual money is {player.ActualMoney} Euros\n");
AskPlayerBet(player);
Console.WriteLine("Your cards are: ");
Console.WriteLine(String.Join(" and ", player.Hands[0]!.Cards));
Thread.Sleep(1000);
bool hasSplitted = false;
bool hasDoubled = false;
foreach ((Hand hand, int i) in player.Hands.Select((i, hand) => (i, hand)))
{
if (hand is null)
break;
// If the player has doubled, he can only hit one more time
while (!HandWinOrLose(hand) && (!hasDoubled || hand.Cards.Count < 3))
{
if (hasSplitted)
{
Console.WriteLine($"(Hand #{i})");
Console.WriteLine($"Your cards are: {hand}");
}
Console.Write("\nWhat do you want to do?\nAvailable Commands: (h)it, (s)tand, (sp)lit, (d)ouble, (surr)ender\n> ");
string userDecision = Console.ReadLine().Trim().ToLower();
bool breaking = false;
switch (userDecision)
{
case "h":
case "hit":
player.Hit(i);
Console.WriteLine($"Now, your cards are: {hand}");
break;
case "s":
case "stand":
Console.WriteLine($"Player ${player} stood.");
breaking = true;
break;
case "sp":
case "split":
if (!hasDoubled)
{
string? errorMessage = player.Split();
if (!string.IsNullOrEmpty(errorMessage))
Console.WriteLine(errorMessage);
else
{
hasSplitted = true;
Console.WriteLine("You have splitted the hand!");
}
}
else
Console.WriteLine("You cannot split because you have already doubled");
break;
case "d":
case "doubled":
if (!hasDoubled)
{
string? errorMessage = player.Double();
if (!string.IsNullOrEmpty(errorMessage))
Console.WriteLine(errorMessage);
else
{
hasDoubled = true;
Console.WriteLine("You have doubled the bet!");
}
}
else
Console.WriteLine("You cannot double because you have already doubled");
break;
case "surr":
case "surrender":
if (!hasDoubled)
{
string? errorMessage = player.Surrender();
if (!string.IsNullOrEmpty(errorMessage))
Console.WriteLine(errorMessage);
else
{
Console.WriteLine("You have surrendered!");
breaking = true;
}
}
else
Console.WriteLine("You cannot surrender because you have already doubled");
break;
default:
Console.WriteLine("Invalid command!\nAvailable Commands: (h)it, (s)tand, (sp)lit, (d)ouble, (surr)ender");
break;
}
if (breaking)
break;
}
}
}
private static bool DealerLost()
{
if (_dealerHand.Points == 0)
{
Console.WriteLine("The dealer busted. The game ended :)\n");
return true;
}
return false;
}
private static void DealerTurn()
{
Console.WriteLine("###### Dealer's Turn ######\n");
Thread.Sleep(2000);
Console.WriteLine($"The dealer cards are {_dealerHand.Cards[0]} and {_dealerHand.Cards[1]}\n");
while (!DealerLost() && _dealerHand.Points < 17)
{
Thread.Sleep(2000);
Console.WriteLine("The dealer is going to hit a card\n");
_dealerHand.DealCard();
Thread.Sleep(1000);
Console.WriteLine($"Now, the dealer cards are: {_dealerHand}");
}
}
private static void EndGame()
{
// TODO Console.WriteLine(Results)
int dealerPoints = _dealerHand.Points;
foreach (Player player in _players)
{
foreach ((Hand hand, int i) in player.Hands.Select((i, hand) => (i, hand)))
{
if (hand is null)
break;
int handPoints = hand.Points;
if (handPoints > _dealerHand.Points ||
(hand.HasBlackJack() && !_dealerHand.HasBlackJack()))
{
int moneyEarned = player.Win();
string handSpecification = player.Hands.Length == 1 ? "" : $" (#{i + 1} hand)";
Console.WriteLine($"\n{player}{handSpecification} won {moneyEarned} Euros :)\n");
}
else if (handPoints == 0 || handPoints < dealerPoints)
{
player.Lose();
Console.WriteLine($"\n{player} lost against the dealer :(\n");
}
else
Console.WriteLine($"\n{player}, it is a Tie! :|\n");
}
Thread.Sleep(1000);
}
}
private static bool AskIfReset(Player player)
{
bool playerResets = false;
string finalBalance = $"{player.ActualMoney - player.InitialMoney} Euros";
if (!finalBalance.Contains("-"))
{
finalBalance = "+" + finalBalance;
}
if (player.ActualMoney > 0)
{
Console.Write($"{player}, do you want to play again? (y/n)\n> ");
string decision = Console.ReadLine();
if (CheckIfYes(decision))
{
player.ResetHands();
playerResets = true;
}
else
Console.WriteLine($"Thanks for playing {player}, your final balance is {finalBalance}\n");
}
else
Console.WriteLine($"{player}, you have lost all your money. Thanks for playing\n");
return playerResets;
}
private static bool NextGame()
{
//TODO Console.WriteLine(Game Finished)
_players = _players.Where(AskIfReset).ToList();
if (_players.Count != 0)
{
_dealerHand.InitializeAttributes();
return true;
}
return false;
}
}
| 31.329177
| 131
| 0.452758
|
072156ffc9346d0042f464ee74d0fa950c010874
| 71,182
|
rb
|
Ruby
|
omni_scrape.rb
|
bmaynard1991/omni-scrape
|
94bf36dbc868ec6835342bcf1c6f7f96076fe8a5
|
[
"MIT"
] | null | null | null |
omni_scrape.rb
|
bmaynard1991/omni-scrape
|
94bf36dbc868ec6835342bcf1c6f7f96076fe8a5
|
[
"MIT"
] | null | null | null |
omni_scrape.rb
|
bmaynard1991/omni-scrape
|
94bf36dbc868ec6835342bcf1c6f7f96076fe8a5
|
[
"MIT"
] | null | null | null |
require "omni_scrape/version"
module OmniScrape
##########################################################################################
def CrawlScrape(url, depth, sub_url)
if (depth<0)
depth=0
end#if
s_depth = depth #true
#open the starting page
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE})) #good
#collect all of the links from the page
links= page.css('a') #good
#initialize variables
refarr=[]
hrefs = []
#add title and href to arrays for each link
links.each do |link|
if(link['href']!=nil && link['href']!="")
hrefs.push(link)
end#if
end#do
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
refarr.push(value)
end#while
#setup for recognition of the end of the array
refarr.push("-")
#create folder for storing current set of scraped pages
if (Dir.exist?('./results'+depth.to_s))
else Dir.mkdir('./results'+depth.to_s)
end#if
#in each link
check =(refarr.length-1)
for i in 0..check
if(refarr[i]!="-")#still valid links
#evaluate whether link is internal or external
if(refarr[i]['href'].include?('://') && refarr[i]!=nil)
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
end#if include?
fourofour=false
begin
if(fourofour==false)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end#if
#test for a 404
rescue Exception =>ex
fourofour=true
retry
end#begin
if (fourofour==false)
#store html from the link with title of the link
crfile=File.new(('./results'+depth.to_s+"/page"+i.to_s+".html").chomp,"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
end#if
end#if != "-"
end#end for each
end#def crawlscrape
#############################################################################################
def Localize(url, depth, sub_url)
#initialize to extract from user view
@location = Hash.new
s_depth = depth
i_page = 0
prev_ipage = 0
link_to_add =""
if (depth<0)
depth=0
end
#open the starting page
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
x=0
#add href to arrays for each link
links.each do |link|
if(link['href']!=nil && link['href']!="")
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
total=0
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
refarr.push(value)
total+=1
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
begin
if(fourofour==false)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
if (depth == s_depth)
linkref = (('./pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
else
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
end
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location.has_key?(refarr[i]['href']))
loc = @location[(refarr[i]['href'])]
sub_loc = loc.match(/(.\/[a-z]{5}\d{1,20}\/\d{1,20}[a-z]{3}\/\d{1,20}[x]\d{1,20}[a-z]{4}.[a-z]{1,20})/)
refarr[i]['href'] =sub_loc
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
#HERE!!!!!**!*!*@*!!@@***!
if (depth == s_depth)
full_link = "../../"+linkref
else
full_link = linkref
end
@location[initial_link]=full_link
#puts "working"
end# @location.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link)
#create subdirectory for storing current links page
#if (Dir.exist?('./pages'+depth.to_s+'/link'+i.to_s))
#else Dir.mkdir('./pages'+depth.to_s+'/link'+i.to_s)
#end
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
puts "page: "
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
end
end #end def Localize
#########################################################################################
def FLocalize(url, depth, sub_url, s_depth, i_page, prev_ipage, link_to_add)
#open the starting page
if (depth<0)
depth=0
end
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
x=0
#add href to arrays for each link
links.each do |link|
if(link['href']!=nil && link['href']!="")
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
total=0
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
refarr.push(value)
total+=1
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
begin
if(fourofour==false)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html"))
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location.has_key?(refarr[i]['href']))
pass_a_link = "this_is_a_duplicate"
refarr[i]['href'] = @location[(refarr[i]['href'])]
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
full_link = linkref
@location[initial_link]=linkref
#puts "working"
end# @location.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link)
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
if (link_to_add!="this_is_a_duplicate")
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
else
end
end
end #end def FLocalize
#########################################################################################
#############################################################################################
def Localize_CSS(url, depth, sub_url,selector)
#initialize to extract from user view
@location_CSS = Hash.new
s_depth = depth
i_page = 0
prev_ipage = 0
link_to_add =""
if (depth<0)
depth=0
end
#open the starting page
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
linkseti= []
linkset= []
x=0
linkseti = page.css(selector+' a')
#add each link with valid href to array
links.each do |link|
if(link['href']!=nil && link['href']!="")
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
linkseti.each do |ilink|
if(ilink['href']!=nil && ilink['href']!="")
# puts x
# puts (link['title'].split.join)
# x+=1
linkset.push(ilink)
end
end
hrefslength = (hrefs.length-1)
for i in 0..hrefslength
if(linkset.include?(hrefs[i]))
else
if(hrefs[i]['href']!=nil && hrefs[i]['href']!="")
hrefs[i]['href']=""
end
end
end
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
if (value['href']!=nil && value['href']!="")
refarr.push(value)
end
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
if(linkset.include?(refarr[i]))
else
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
begin
if(fourofour==false && refarr[i]['href']!=nil)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
if (depth == s_depth)
linkref = (('./pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
else
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
end
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location_CSS.has_key?(refarr[i]['href']))
loc = @location_CSS[(refarr[i]['href'])]
sub_loc = loc.match(/(.\/[a-z]{5}\d{1,20}\/\d{1,20}[a-z]{3}\/\d{1,20}[x]\d{1,20}[a-z]{4}.[a-z]{1,20})/)
refarr[i]['href'] =sub_loc
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
#HERE!!!!!**!*!*@*!!@@***!
if (depth == s_depth)
full_link = "../../"+linkref
else
full_link = linkref
end
@location_CSS[initial_link]=full_link
#puts "working"
end# @location_CSS.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize_CSS(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link, selector)
#create subdirectory for storing current links page
#if (Dir.exist?('./pages'+depth.to_s+'/link'+i.to_s))
#else Dir.mkdir('./pages'+depth.to_s+'/link'+i.to_s)
#end
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
puts "page: "
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
end
end #end def Localize_CSS
#########################################################################################
def FLocalize_CSS(url, depth, sub_url, s_depth, i_page, prev_ipage, link_to_add, selector)
#open the starting page
if (depth<0)
depth=0
end
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
linkseti= []
linkset= []
x=0
linkseti = page.css(selector+' a')
#add each link with valid href to array
links.each do |link|
if(link['href']!=nil && link['href']!="")
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
linkseti.each do |ilink|
if(ilink['href']!=nil && ilink['href']!="")
# puts x
# puts (link['title'].split.join)
# x+=1
linkset.push(ilink)
end
end
hrefslength = (hrefs.length-1)
for i in 0..hrefslength
if(linkset.include?(hrefs[i]))
else
if(hrefs[i]['href']!=nil && hrefs[i]['href']!="")
hrefs[i]['href']=""
end
end
end
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
if (value['href']!=nil && value['href']!="")
refarr.push(value)
end
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
#refarr[i]['href'] is nil :S this a result of reference to other array? how to do a true dup without reference?
begin
if(fourofour==false)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html"))
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location_CSS.has_key?(refarr[i]['href']))
pass_a_link = "this_is_a_duplicate"
refarr[i]['href'] = @location_CSS[(refarr[i]['href'])]
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
full_link = linkref
@location_CSS[initial_link]=linkref
#puts "working"
end# @location_CSS.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize_CSS(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link, selector)
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
if (link_to_add!="this_is_a_duplicate")
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
else
end
end
end #end def FLocalize_CSS
#########################################################################################
#############################################################################################
def Localize_IN(url, depth, sub_url)
#initialize to extract from user view
@location_in = Hash.new
s_depth = depth
i_page = 0
prev_ipage = 0
link_to_add =""
if (depth<0)
depth=0
end
#open the starting page
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
x=0
#add href to arrays for each link
links.each do |link|
if(link['href']!=nil && link['href']!="" && !link['href'].include?('://'))
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
total=0
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
refarr.push(value)
total+=1
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
begin
if(fourofour==false)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
if (depth == s_depth)
linkref = (('./pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
else
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
end
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location_in.has_key?(refarr[i]['href']))
loc = @location_in[(refarr[i]['href'])]
sub_loc = loc.match(/(.\/[a-z]{5}\d{1,20}\/\d{1,20}[a-z]{3}\/\d{1,20}[x]\d{1,20}[a-z]{4}.[a-z]{1,20})/)
refarr[i]['href'] =sub_loc
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
#HERE!!!!!**!*!*@*!!@@***!
if (depth == s_depth)
full_link = "../../"+linkref
else
full_link = linkref
end
@location_in[initial_link]=full_link
#puts "working"
end# @location.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize_IN(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link)
#create subdirectory for storing current links page
#if (Dir.exist?('./pages'+depth.to_s+'/link'+i.to_s))
#else Dir.mkdir('./pages'+depth.to_s+'/link'+i.to_s)
#end
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
puts "page: "
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
end
end #end def Localize_IN
#########################################################################################
def FLocalize_IN(url, depth, sub_url, s_depth, i_page, prev_ipage, link_to_add)
#open the starting page
if (depth<0)
depth=0
end
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
x=0
#add href to arrays for each link
links.each do |link|
if(link['href']!=nil && link['href']!="" && !link['href'].include?('://'))
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
total=0
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
refarr.push(value)
total+=1
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
begin
if(fourofour==false)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html"))
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location_in.has_key?(refarr[i]['href']))
pass_a_link = "this_is_a_duplicate"
refarr[i]['href'] = @location_in[(refarr[i]['href'])]
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
full_link = linkref
@location_in[initial_link]=linkref
#puts "working"
end# @location.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize_IN(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link)
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
if (link_to_add!="this_is_a_duplicate")
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
else
end
end
end #end def FLocalize_IN
#########################################################################################
#############################################################################################
def Localize_EX(url, depth, sub_url)
#initialize to extract from user view
@location_ex = Hash.new
s_depth = depth
i_page = 0
prev_ipage = 0
link_to_add =""
if (depth<0)
depth=0
end
#open the starting page
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
x=0
#add href to arrays for each link
links.each do |link|
if(link['href']!=nil && link['href']!="" && link['href'].include?('://'))
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
total=0
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
refarr.push(value)
total+=1
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
begin
if(fourofour==false)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false && refarr[i]['href']!="" && refarr[i]['href']!=nil)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
if (depth == s_depth)
linkref = (('./pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
else
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
end
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location_ex.has_key?(refarr[i]['href']))
loc = @location_ex[(refarr[i]['href'])]
sub_loc = loc.match(/(.\/[a-z]{5}\d{1,20}\/\d{1,20}[a-z]{3}\/\d{1,20}[x]\d{1,20}[a-z]{4}.[a-z]{1,20})/)
refarr[i]['href'] =sub_loc
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
#HERE!!!!!**!*!*@*!!@@***!
if (depth == s_depth)
full_link = "../../"+linkref
else
full_link = linkref
end
@location_ex[initial_link]=full_link
#puts "working"
end# @location.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize_EX(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link)
#create subdirectory for storing current links page
#if (Dir.exist?('./pages'+depth.to_s+'/link'+i.to_s))
#else Dir.mkdir('./pages'+depth.to_s+'/link'+i.to_s)
#end
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
puts "page: "
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
end
end #end def Localize_EX
#########################################################################################
def FLocalize_EX(url, depth, sub_url, s_depth, i_page, prev_ipage, link_to_add)
#open the starting page
if (depth<0)
depth=0
end
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
x=0
#add href to arrays for each link
links.each do |link|
if(link['href']!=nil && link['href']!="" && link['href'].include?('://'))
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
total=0
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
refarr.push(value)
total+=1
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
begin
if(fourofour==false)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false && refarr[i]['href']!="" && refarr[i]['href']!=nil)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html"))
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location_ex.has_key?(refarr[i]['href']))
pass_a_link = "this_is_a_duplicate"
refarr[i]['href'] = @location_ex[(refarr[i]['href'])]
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
full_link = linkref
@location_ex[initial_link]=linkref
#puts "working"
end# @location.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize_EX(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link)
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
if (link_to_add!="this_is_a_duplicate")
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
else
end
end
end #end def FLocalize_EX
#########################################################################################
#############################################################################################
def Localize_IN_CSS(url, depth, sub_url,selector)
#initialize to extract from user view
@location_IN_CSS = Hash.new
s_depth = depth
i_page = 0
prev_ipage = 0
link_to_add =""
if (depth<0)
depth=0
end
#open the starting page
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
linkseti= []
linkset= []
x=0
linkseti = page.css(selector+' a')
#add each link with valid href to array
links.each do |link|
if(link['href']!=nil && link['href']!="" && !link['href'].include?('://'))
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
linkseti.each do |ilink|
if(ilink['href']!=nil && ilink['href']!="")
# puts x
# puts (link['title'].split.join)
# x+=1
linkset.push(ilink)
end
end
hrefslength = (hrefs.length-1)
for i in 0..hrefslength
if(linkset.include?(hrefs[i]))
else
if(hrefs[i]['href']!=nil && hrefs[i]['href']!="")
hrefs[i]['href']=""
end
end
end
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
if (value['href']!=nil && value['href']!="")
refarr.push(value)
end
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
if(linkset.include?(refarr[i]))
else
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
begin
if(fourofour==false && refarr[i]['href']!=nil)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
if (depth == s_depth)
linkref = (('./pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
else
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
end
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location_IN_CSS.has_key?(refarr[i]['href']))
loc = @location_IN_CSS[(refarr[i]['href'])]
sub_loc = loc.match(/(.\/[a-z]{5}\d{1,20}\/\d{1,20}[a-z]{3}\/\d{1,20}[x]\d{1,20}[a-z]{4}.[a-z]{1,20})/)
refarr[i]['href'] =sub_loc
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
#HERE!!!!!**!*!*@*!!@@***!
if (depth == s_depth)
full_link = "../../"+linkref
else
full_link = linkref
end
@location_IN_CSS[initial_link]=full_link
#puts "working"
end# @location_CSS.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize_IN_CSS(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link, selector)
#create subdirectory for storing current links page
#if (Dir.exist?('./pages'+depth.to_s+'/link'+i.to_s))
#else Dir.mkdir('./pages'+depth.to_s+'/link'+i.to_s)
#end
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
puts "page: "
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
end
end #end def Localize_IN_CSS
#########################################################################################
def FLocalize_IN_CSS(url, depth, sub_url, s_depth, i_page, prev_ipage, link_to_add, selector)
#open the starting page
if (depth<0)
depth=0
end
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
linkseti= []
linkset= []
x=0
linkseti = page.css(selector+' a')
#add each link with valid href to array
links.each do |link|
if(link['href']!=nil && link['href']!="" && !link['href'].include?('://'))
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
linkseti.each do |ilink|
if(ilink['href']!=nil && ilink['href']!="")
# puts x
# puts (link['title'].split.join)
# x+=1
linkset.push(ilink)
end
end
hrefslength = (hrefs.length-1)
for i in 0..hrefslength
if(linkset.include?(hrefs[i]))
else
if(hrefs[i]['href']!=nil && hrefs[i]['href']!="")
hrefs[i]['href']=""
end
end
end
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
if (value['href']!=nil && value['href']!="")
refarr.push(value)
end
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
#refarr[i]['href'] is nil :S this a result of reference to other array? how to do a true dup without reference?
begin
if(fourofour==false)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html"))
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location_IN_CSS.has_key?(refarr[i]['href']))
pass_a_link = "this_is_a_duplicate"
refarr[i]['href'] = @location_IN_CSS[(refarr[i]['href'])]
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
full_link = linkref
@location_IN_CSS[initial_link]=linkref
#puts "working"
end# @location_CSS.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize_IN_CSS(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link, selector)
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
if (link_to_add!="this_is_a_duplicate")
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
else
end
end
end #end def FLocalize_IN_CSS
#########################################################################################
#############################################################################################
def Localize_EX_CSS(url, depth, sub_url,selector)
#initialize to extract from user view
@location_EX_CSS = Hash.new
s_depth = depth
i_page = 0
prev_ipage = 0
link_to_add =""
if (depth<0)
depth=0
end
#open the starting page
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
linkseti= []
linkset= []
x=0
linkseti = page.css(selector+' a')
#add each link with valid href to array
links.each do |link|
if(link['href']!=nil && link['href']!="" && link['href'].include?('://'))
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
linkseti.each do |ilink|
if(ilink['href']!=nil && ilink['href']!="")
# puts x
# puts (link['title'].split.join)
# x+=1
linkset.push(ilink)
end
end
hrefslength = (hrefs.length-1)
for i in 0..hrefslength
if(linkset.include?(hrefs[i]))
else
if(hrefs[i]['href']!=nil && hrefs[i]['href']!="")
hrefs[i]['href']=""
end
end
end
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
if (value['href']!=nil && value['href']!="")
refarr.push(value)
end
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
if(linkset.include?(refarr[i]))
else
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
begin
if(fourofour==false && refarr[i]['href']!=nil)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
if (depth == s_depth)
linkref = (('./pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
else
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html").chomp)
end
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location_EX_CSS.has_key?(refarr[i]['href']))
loc = @location_EX_CSS[(refarr[i]['href'])]
sub_loc = loc.match(/(.\/[a-z]{5}\d{1,20}\/\d{1,20}[a-z]{3}\/\d{1,20}[x]\d{1,20}[a-z]{4}.[a-z]{1,20})/)
refarr[i]['href'] =sub_loc
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
#HERE!!!!!**!*!*@*!!@@***!
if (depth == s_depth)
full_link = "../../"+linkref
else
full_link = linkref
end
@location_EX_CSS[initial_link]=full_link
#puts "working"
end# @location_CSS.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize_EX_CSS(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link, selector)
#create subdirectory for storing current links page
#if (Dir.exist?('./pages'+depth.to_s+'/link'+i.to_s))
#else Dir.mkdir('./pages'+depth.to_s+'/link'+i.to_s)
#end
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
puts "page: "
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
end
end #end def Localize_EX_CSS
#########################################################################################
def FLocalize_EX_CSS(url, depth, sub_url, s_depth, i_page, prev_ipage, link_to_add, selector)
#open the starting page
if (depth<0)
depth=0
end
page = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
#collect all of the links from the page
links= page.css('a')
title = page.css('title')
#initialize variables
refarr=[]
hrefs = []
linkseti= []
linkset= []
x=0
linkseti = page.css(selector+' a')
#add each link with valid href to array
links.each do |link|
if(link['href']!=nil && link['href']!="" && link['href'].include?('://'))
# puts x
# puts (link['title'].split.join)
# x+=1
hrefs.push(link)
end
end
linkseti.each do |ilink|
if(ilink['href']!=nil && ilink['href']!="")
# puts x
# puts (link['title'].split.join)
# x+=1
linkset.push(ilink)
end
end
hrefslength = (hrefs.length-1)
for i in 0..hrefslength
if(linkset.include?(hrefs[i]))
else
if(hrefs[i]['href']!=nil && hrefs[i]['href']!="")
hrefs[i]['href']=""
end
end
end
#transfer links to other array
while(!hrefs.empty?)
value= hrefs.pop
if (value['href']!=nil && value['href']!="")
refarr.push(value)
end
end
#setup for recognition of the end of the array
refarr.push("-")
if(depth>0)
#create subdirectory for storing current set of scraped pages
if (Dir.exist?('./pages'+depth.to_s))
else Dir.mkdir('./pages'+depth.to_s)
end
#in each link
check = (refarr.length-1)
for i in 0..check
if(refarr[i]!="-")
#evaluate whether link is internal or external
if(refarr[i]['href']!=nil && refarr[i]['href']!="")
if(refarr[i]['href'].include?('://'))
url=refarr[i]['href']
else
url=sub_url+refarr[i]['href']
#puts "external link"
end#refarr[i]['href'].include?
end#refarr[i]['href']!=nil
fourofour=false
#refarr[i]['href'] is nil :S this a result of reference to other array? how to do a true dup without reference?
begin
if(fourofour==false)
pagina = Nokogiri::HTML(open(url,{ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE}))
end
#test for a 404
rescue Exception =>ex
#puts "got a 404"
#replace href (no navigation onclick)
refarr[i]['href'] =""
fourofour=true
retry
end #begin
if (fourofour==false)
#make relevant links reference local files
if(refarr[i]['href']!="" && refarr[i]['href']!=nil)
j_depth = s_depth - depth
appendval = "../"
clutch = 0
for r in 1..j_depth
clutch +=1
end
if (Dir.exist?('./pages'+depth.to_s+"/"+clutch.to_s+"set"))
else Dir.mkdir('./pages'+depth.to_s+"/"+clutch.to_s+"set")
end
linkref = ((appendval+'../pages'+depth.to_s+"/"+clutch.to_s+"set/"+i_page.to_s+"x"+i.to_s+"page.html"))
pass_a_link = i_page.to_s+"x"+i.to_s+"page.html"
if (@location_EX_CSS.has_key?(refarr[i]['href']))
pass_a_link = "this_is_a_duplicate"
refarr[i]['href'] = @location_EX_CSS[(refarr[i]['href'])]
else
initial_link=refarr[i]['href']
refarr[i]['href']=linkref
full_link = linkref
@location_EX_CSS[initial_link]=linkref
#puts "working"
end# @location_CSS.haskey
end #refarr[i]['href']!=""
#trim it down and remove special characters for display
trimval=refarr[i]['href']
finval=trimval.gsub!(/[!:\/-]/, '')
#puts refarr[i]
if(finval==nil && refarr[i]!=nil)
finval=refarr[i]
end #finval == nil
n_depth = depth-1
if(finval!=nil)
self. FLocalize_EX_CSS(url, n_depth, sub_url, s_depth, i, i_page, pass_a_link, selector)
end #finval!=nil
end #fourofour==false
end #refarr[i]!="-"
end#end for each
else#<< depth not > 0
check = (refarr.length-1)
for i in 0..check
if (refarr[i]['href']!=nil && refarr[i]['href']!="")
refarr[i]['href']=""
end
end
end
if (depth == s_depth)
#store newly generated html/links for current page
mainpage =File.new('./page.html',"w")
mainpage.puts page
mainpage.close
else
#store page from the link in the subdirectory
p_depth = depth +1
j_depth = s_depth - depth
appendval = ""
clutch = 0
for r in 1..j_depth
appendval += "../"
clutch +=1
end
clutch -=1
if (link_to_add!="this_is_a_duplicate")
crfile=File.new(('./pages'+p_depth.to_s+"/"+clutch.to_s+"set/"+link_to_add),"w")
encodingissue=false
begin
if(encodingissue==false)
crfile.puts page
end
rescue
encodingissue=true
retry
end
crfile.close
else
end
end
end #end def FLocalize_EX_CSS
#########################################################################################
end#module
| 27.231064
| 129
| 0.480894
|
55df3d4d3b5adf283087cde234392bb6fc094f0c
| 98
|
sql
|
SQL
|
docs/compiled/jaffle_shop/schema_test/not_null_fct_orders_credit_card_amount.sql
|
tharwaninitin/dbt-example
|
95025f1339f8285e411a37f3c572fd60b75c6618
|
[
"Apache-2.0"
] | null | null | null |
docs/compiled/jaffle_shop/schema_test/not_null_fct_orders_credit_card_amount.sql
|
tharwaninitin/dbt-example
|
95025f1339f8285e411a37f3c572fd60b75c6618
|
[
"Apache-2.0"
] | null | null | null |
docs/compiled/jaffle_shop/schema_test/not_null_fct_orders_credit_card_amount.sql
|
tharwaninitin/dbt-example
|
95025f1339f8285e411a37f3c572fd60b75c6618
|
[
"Apache-2.0"
] | null | null | null |
select count(*)
from "jaffle_shop"."dbt_alice"."fct_orders"
where credit_card_amount is null
| 10.888889
| 43
| 0.755102
|
fb75e85ab5384f1ef6ddae6c5468687e072e2234
| 799
|
c
|
C
|
examples/01-processing/main.c
|
island-org/island
|
3cb72404f8bdf07b49b301690d8ecdd28d424ffa
|
[
"MIT"
] | 254
|
2015-01-05T04:33:08.000Z
|
2022-01-17T16:18:32.000Z
|
examples/01-processing/main.c
|
island-org/island
|
3cb72404f8bdf07b49b301690d8ecdd28d424ffa
|
[
"MIT"
] | 2
|
2016-02-22T09:00:16.000Z
|
2018-05-01T01:32:03.000Z
|
examples/01-processing/main.c
|
island-org/island
|
3cb72404f8bdf07b49b301690d8ecdd28d424ffa
|
[
"MIT"
] | 30
|
2015-02-12T00:54:37.000Z
|
2020-09-11T05:33:53.000Z
|
#define SKETCH_2D_IMPLEMENTATION
#include "sketch2d.h"
PImage img1, img2;
PFont font;
void setup()
{
size(displayWidth, displayHeight);
noCursor();
img1 = loadImage("../3rdparty/nanovg/example/images/image9.jpg");
img2 = loadImage("../3rdparty/nanovg/example/images/image10.jpg");
font = loadFont("../3rdparty/nanovg/example/Roboto-Regular.ttf");
}
void draw()
{
background(gray(122));
if (mousePressed)
{
image(img1, mouseX, mouseY, img1.width, img1.height);
}
else
{
image(img2, mouseX, mouseY, img2.width, img2.height);
}
textFont(font);
textAlign(NVG_ALIGN_CENTER);
textSize(30);
textLeading(5);
text("test everything here", width/2, height/2);
}
void teardown()
{
}
| 20.487179
| 71
| 0.61577
|
fe3a19ab09e455ec7171a1fb667fd720ee8e726c
| 719
|
kt
|
Kotlin
|
src/main/kotlin/exnihilofabrico/modules/tools/HammerTool.kt
|
SirLyle/ExNihiloCreatio
|
5224a14b7b50ab5df34a9edbd5ea9e63d743ed62
|
[
"MIT"
] | 4
|
2019-11-26T22:19:38.000Z
|
2021-12-27T11:21:23.000Z
|
src/main/kotlin/exnihilofabrico/modules/tools/HammerTool.kt
|
SirLyle/ExNihiloCreatio
|
5224a14b7b50ab5df34a9edbd5ea9e63d743ed62
|
[
"MIT"
] | 8
|
2019-11-26T12:37:37.000Z
|
2022-01-09T18:11:36.000Z
|
src/main/kotlin/exnihilofabrico/modules/tools/HammerTool.kt
|
SirLyle/ExNihiloCreatio
|
5224a14b7b50ab5df34a9edbd5ea9e63d743ed62
|
[
"MIT"
] | 12
|
2020-02-15T06:45:46.000Z
|
2021-12-29T22:06:38.000Z
|
package exnihilofabrico.modules.tools
import exnihilofabrico.api.registry.ExNihiloRegistries
import exnihilofabrico.modules.ModTags.HAMMER_TAG
import net.minecraft.block.BlockState
import net.minecraft.item.ItemStack
import net.minecraft.item.ToolMaterial
class HammerTool(material: ToolMaterial, settings: Settings):
ToolItemWithRegistry(material, ExNihiloRegistries.HAMMER, settings.maxDamage(material.durability)) {
override fun isEffectiveOn(state: BlockState) = ExNihiloRegistries.HAMMER.isRegistered(state.block)
companion object {
@JvmStatic
fun isHammer(stack: ItemStack): Boolean {
return (stack.item is HammerTool || stack.item.isIn(HAMMER_TAG))
}
}
}
| 34.238095
| 104
| 0.773296
|
d6844d606b9c241384b24b028b79a1dd56ba8460
| 260
|
cs
|
C#
|
Sibusten.Philomena.Downloader.Cmd/Commands/Preset/Arguments/PresetUpdateCommandArgs.cs
|
basisbit/derpibooru-downloader
|
3d2b3ab35d7982e3aa7c5b9f67d57ef3eac82ee7
|
[
"MIT"
] | 62
|
2017-07-17T14:16:57.000Z
|
2022-01-27T10:18:19.000Z
|
Sibusten.Philomena.Downloader.Cmd/Commands/Preset/Arguments/PresetUpdateCommandArgs.cs
|
basisbit/derpibooru-downloader
|
3d2b3ab35d7982e3aa7c5b9f67d57ef3eac82ee7
|
[
"MIT"
] | 47
|
2017-03-08T22:54:18.000Z
|
2022-03-23T15:28:27.000Z
|
Sibusten.Philomena.Downloader.Cmd/Commands/Preset/Arguments/PresetUpdateCommandArgs.cs
|
basisbit/derpibooru-downloader
|
3d2b3ab35d7982e3aa7c5b9f67d57ef3eac82ee7
|
[
"MIT"
] | 9
|
2018-02-09T15:08:57.000Z
|
2022-03-11T22:58:20.000Z
|
using Sibusten.Philomena.Downloader.Cmd.Commands.Common.Arguments;
namespace Sibusten.Philomena.Downloader.Cmd.Commands.Preset.Arguments
{
public class PresetUpdateCommandArgs : SearchQueryArgs
{
public string Name { get; set; } = "";
}
}
| 26
| 69
| 0.734615
|
0518a6156fdb2610c75d42680c4f9f990dc522c3
| 206
|
css
|
CSS
|
client/src/components/Grid/style.css
|
ashkaning/project-manager-webApp
|
8193505f44bc465219d587f46effaf9afef368da
|
[
"MIT"
] | null | null | null |
client/src/components/Grid/style.css
|
ashkaning/project-manager-webApp
|
8193505f44bc465219d587f46effaf9afef368da
|
[
"MIT"
] | 1
|
2021-09-02T11:55:45.000Z
|
2021-09-02T11:55:45.000Z
|
client/src/components/Grid/style.css
|
ashkaning/project-manager-webApp
|
8193505f44bc465219d587f46effaf9afef368da
|
[
"MIT"
] | null | null | null |
.col-md-4, .col-sm-4{
margin-top: 30px;
}
.colDarkBg{
background-color: black;
color: white;
padding: 20px;
}
.colLightBorder{
border: 1px solid gray;
box-shadow: 2px 5px 10px 6px;
}
| 17.166667
| 33
| 0.626214
|
cdc57953e58923f1151b6ddbbece383cc34784cb
| 4,063
|
cs
|
C#
|
Neustart/AppContainer.cs
|
SuperiorServers/Neustart
|
611c4b0fb167243f4417da4edea3eb00602c89b8
|
[
"MIT"
] | 8
|
2016-03-24T05:38:49.000Z
|
2018-07-16T18:45:01.000Z
|
Neustart/AppContainer.cs
|
SuperiorServers/Neustart
|
611c4b0fb167243f4417da4edea3eb00602c89b8
|
[
"MIT"
] | 6
|
2016-08-08T20:08:42.000Z
|
2022-03-24T22:25:47.000Z
|
Neustart/AppContainer.cs
|
SuperiorServers/Neustart
|
611c4b0fb167243f4417da4edea3eb00602c89b8
|
[
"MIT"
] | 10
|
2016-04-22T22:00:54.000Z
|
2021-06-28T17:46:10.000Z
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;
using Newtonsoft.Json;
using System.Windows.Forms;
using System.Timers;
namespace Neustart
{
public class AppContainer
{
public static event EventHandler OnLoadCompleted;
private List<AppConfig> m_ConfigContainer;
private List<App> m_AppContainer = new List<App>();
private System.Timers.Timer m_SaveTimer;
private string configPath = "Apps.json";
public AppContainer()
{
Debug.Log("App container created");
LoadConfig();
}
private bool LoadConfig()
{
Debug.Log("Loading configurations");
string configString = File.Exists(configPath) ? File.ReadAllText(configPath) : "[]";
try
{
m_ConfigContainer = JsonConvert.DeserializeObject<List<AppConfig>>(configString);
Debug.Log("Loaded. Now populating App container and initializing apps.");
foreach (AppConfig appConfig in m_ConfigContainer)
SetupApp(appConfig);
AppConfig.OnConfiguationChanged += SaveConfig;
SaveConfig(this, null);
OnLoadCompleted?.Invoke(this, null);
} catch(Exception e)
{
Debug.Error("Couldn't load Neustart config: " + e.Message);
if (MessageBox.Show(null, "Configuration file is corrupt. Would you like to reset it?", "Neustart", MessageBoxButtons.YesNo, MessageBoxIcon.Error, MessageBoxDefaultButton.Button2) == DialogResult.Yes)
{
File.Move(configPath, configPath + ".bak");
Debug.Warning("Configuration file has been deleted. Trying again..");
return LoadConfig();
} else
{
Debug.Error("Cannot proceed.");
MessageBox.Show(null, "Please inspect " + configPath + " for JSON errors and correct them. Neustart will now close.", "Neustart");
Core.ForceQuit();
return false;
}
}
return true;
}
private void SaveConfig(object sender, EventArgs e)
{
if (sender != this && !m_ConfigContainer.Contains(sender))
return;
if (m_SaveTimer != null && m_SaveTimer.Enabled) // Just restart the timer we have instead of creating a new one
{
m_SaveTimer.Stop();
m_SaveTimer.Start();
return;
}
m_SaveTimer = new System.Timers.Timer(250)
{
AutoReset = false
};
m_SaveTimer.Elapsed += (o, args) =>
{
Debug.Log("AppConfig modified - saving.");
string jsonData = JsonConvert.SerializeObject(m_ConfigContainer, Formatting.Indented);
File.WriteAllText(configPath, jsonData);
m_SaveTimer.Stop();
};
m_SaveTimer.Start();
}
public void SetupApp(AppConfig appConfig, bool isNew = false)
{
if (isNew)
{
m_ConfigContainer.Add(appConfig);
SaveConfig(this, null);
}
App app = new App(appConfig);
m_AppContainer.Add(app);
}
public void DeleteApp(App app)
{
Forms.Main.Get().AppDeleted(app);
app.Stop();
m_ConfigContainer.Remove(app.Config);
m_AppContainer.Remove(app);
SaveConfig(this, null);
Debug.Log("Deleted app: " + app.Config.ID);
}
public App GetAppByID(string id)
{
foreach(App app in m_AppContainer)
{
if (app.Config.ID == id)
return app;
}
return null;
}
}
}
| 29.230216
| 216
| 0.532857
|
3b69e529aab4094503c174d5bd3380b404b4adb1
| 3,128
|
rs
|
Rust
|
src/scans_test.rs
|
bnclabs/robt
|
2ad5d429ce3e5993d8bf9c0c07ce3c89588359a3
|
[
"MIT"
] | 1
|
2022-02-25T02:18:42.000Z
|
2022-02-25T02:18:42.000Z
|
src/scans_test.rs
|
bnclabs/robt
|
2ad5d429ce3e5993d8bf9c0c07ce3c89588359a3
|
[
"MIT"
] | 2
|
2020-12-22T17:50:31.000Z
|
2020-12-22T17:53:14.000Z
|
src/scans_test.rs
|
bnclabs/robt
|
2ad5d429ce3e5993d8bf9c0c07ce3c89588359a3
|
[
"MIT"
] | null | null | null |
use rand::{prelude::random, rngs::SmallRng, Rng, SeedableRng};
use super::*;
use crate::util;
#[test]
fn test_build_scan() {
use std::time::Duration;
let seed: u128 = random();
// let seed: u128 = 284595450980088120127817086088032225381;
println!("test_build_scan {}", seed);
let mut rng = SmallRng::from_seed(seed.to_le_bytes());
let inserts = 1_000_000;
let mdb = util::load_index(seed, 0, inserts, 0, 1_000, None);
let start_seqno = rng.gen::<u64>() % ((mdb.len() as u64) * 2);
let mut iter = BuildScan::new(mdb.iter().unwrap(), start_seqno);
let mut count = 0;
while let Some(entry) = iter.next() {
count += 1;
if count % 10 == 0 {
iter.push(entry)
}
}
let (build_time, seqno, count, _deleted, epoch, mut iter) = iter.unwrap().unwrap();
println!(
"BuildScan build_time {:?}",
Duration::from_nanos(build_time)
);
println!("BuildScan epoch {:?}", Duration::from_nanos(epoch));
assert_eq!(seqno, cmp::max(start_seqno, mdb.to_seqno()));
assert_eq!(count, mdb.len() as u64);
assert_eq!(iter.next(), None);
}
#[test]
fn test_nobitmap_scan() {
use mkit::nobitmap::NoBitmap;
let seed: u128 = random();
// let seed: u128 = 284595450980088120127817086088032225381;
println!("test_nobitmap_scan {}", seed);
let mut rng = SmallRng::from_seed(seed.to_le_bytes());
let inserts = 1_000_000;
let mdb = util::load_index(seed, 0, inserts, 0, 1_000, None);
// with NoBitmap
let mut iter = BitmappedScan::new(mdb.iter().unwrap(), NoBitmap);
let len: usize = iter.by_ref().map(|_| 1).sum();
let (mut bitmap, mut iter) = iter.unwrap().unwrap();
bitmap.build();
assert_eq!(len, mdb.len());
assert_eq!(iter.next(), None);
assert_eq!(bitmap.to_bytes().unwrap().len(), 0);
let bitmap = NoBitmap::from_bytes(&bitmap.to_bytes().unwrap()).unwrap().0;
for _i in 0..1_000_000 {
let key = rng.gen::<u16>();
assert!(bitmap.contains(&key), "{}", key);
}
}
#[test]
fn test_xorfilter_scan() {
use xorfilter::Xor8;
let seed: u128 = random();
// let seed: u128 = 55460639888202704213451510247183500784;
println!("test_xorfilter_scan {}", seed);
let mut rng = SmallRng::from_seed(seed.to_le_bytes());
let inserts = 1_000_000;
let mdb = util::load_index(seed, 0, inserts, 0, 1_000, None);
// with xorfilter
let mut iter = BitmappedScan::new(mdb.iter().unwrap(), Xor8::new());
let len: usize = iter.by_ref().map(|_| 1).sum();
let (mut bitmap, mut iter) = iter.unwrap().unwrap();
bitmap.build();
assert_eq!(len, mdb.len());
assert_eq!(iter.next(), None);
let bitma = {
let bytes = <Xor8 as Bloom>::to_bytes(&bitmap).unwrap();
<Xor8 as Bloom>::from_bytes(&bytes).unwrap().0
};
let mut found_keys = 0;
for _i in 0..1_000_000 {
let key = rng.gen::<u16>();
if mdb.get(&key).is_ok() {
found_keys += 1;
assert!(bitma.contains(&key), "{}", key);
}
}
println!("found keys in xor8 {}", found_keys);
}
| 31.59596
| 87
| 0.601023
|
be51dd1f8534ecf0f600940e6ea0f5470dcee447
| 4,154
|
ts
|
TypeScript
|
src/math/Math.ts
|
yszhao91/xtorcga
|
9e5225ce713df07e20974f5bd4411acce8faf22c
|
[
"MIT"
] | 264
|
2020-01-06T06:43:42.000Z
|
2020-12-08T07:23:56.000Z
|
src/math/Math.ts
|
yszhao91/xtorcga
|
9e5225ce713df07e20974f5bd4411acce8faf22c
|
[
"MIT"
] | 1
|
2020-02-25T04:34:55.000Z
|
2020-10-23T07:47:32.000Z
|
src/math/Math.ts
|
yszhao91/xtorcga
|
9e5225ce713df07e20974f5bd4411acce8faf22c
|
[
"MIT"
] | 21
|
2020-01-18T02:51:27.000Z
|
2020-12-07T05:55:04.000Z
|
export const delta4 = 1e-4;
export const delta5 = 1e-5;
export const delta6 = 1e-6;
export const delta7 = 1e-7;
export const delta8 = 1e-8;
export const delta9 = 1e-9;
/**
* pi
*
* @type {Number}
* @constant
*/
export const PI = Math.PI;
/**
* 1/pi
*
* @type {Number}
* @constant
*/
export const ONE_OVER_PI = 1.0 / Math.PI;
/**
* pi/2
*
* @type {Number}
* @constant
*/
export const PI_OVER_TWO = Math.PI / 2.0;
/**
* pi/3
*
* @type {Number}
* @constant
*/
export const PI_OVER_THREE = Math.PI / 3.0;
/**
* pi/4
*
* @type {Number}
* @constant
*/
export const PI_OVER_FOUR = Math.PI / 4.0;
/**
* pi/6
*
* @type {Number}
* @constant
*/
export const PI_OVER_SIX = Math.PI / 6.0;
/**
* 3pi/2
*
* @type {Number}
* @constant
*/
export const THREE_PI_OVER_TWO = (3.0 * Math.PI) / 2.0;
/**
* 2pi
*
* @type {Number}
* @constant
*/
export const PI_TWO = 2.0 * Math.PI;
/**
* 1/2pi
*
* @type {Number}
* @constant
*/
export const ONE_OVER_TWO_PI = 1.0 / (2.0 * Math.PI);
/**
* The number of radians in a degree.
*
* @type {Number}
* @constant
*/
export const RADIANS_PER_DEGREE = Math.PI / 180.0;
/**
* The number of degrees in a radian.
*
* @type {Number}
* @constant
*/
export const DEGREES_PER_RADIAN = 180.0 / Math.PI;
/**
* The number of radians in an arc second.
*
* @type {Number}
* @constant
*/
export const RADIANS_PER_ARCSECOND = RADIANS_PER_DEGREE / 3600.0;
export function sign(value: number) {
return value >= 0 ? 1 : -1;
}
export function approximateEqual(v1: number, v2: number, precision = delta4) {
return Math.abs(v1 - v2) < precision
}
export function clamp(value: number, min: number, max: number) {
return Math.max(min, Math.min(max, value));
}
export function lerp(x: number, y: number, t: number) {
return (1 - t) * x + t * y;
}
export function smoothstep(x: number, min: number, max: number) {
if (x <= min) return 0;
if (x >= max) return 1;
x = (x - min) / (max - min);
return x * x * (3 - 2 * x);
}
export function smootherstep(x: number, min: number, max: number) {
if (x <= min) return 0;
if (x >= max) return 1;
x = (x - min) / (max - min);
return x * x * x * (x * (x * 6 - 15) + 10);
}
// Random integer from <low, high> interval
export function randInt(low: number, high: number) {
return low + Math.floor(Math.random() * (high - low + 1));
}
// Random float from <low, high> interval
/**
* 生成一个low~high之间的浮点数
* @param {*} low
* @param {*} high
*/
export function randFloat(low: number, high: number) {
return low + Math.random() * (high - low);
}
export function isPowerOfTwo(value: number) {
return (value & (value - 1)) === 0 && value !== 0;
}
export function ceilPowerOfTwo(value: number) {
return Math.pow(2, Math.ceil(Math.log(value) / Math.LN2));
}
export function floorPowerOfTwo(value: number) {
return Math.pow(2, Math.floor(Math.log(value) / Math.LN2));
}
export function toRadians(degrees: number) {
return degrees * RADIANS_PER_DEGREE;
}
export function ToDegrees(radians: number) {
return radians * DEGREES_PER_RADIAN;
}
/**
* 数字或者向量固定位数
* @param {Object} obj 数字或者向量
* @param {*} fractionDigits
*/
export function toFixed(obj: { toFixed: (arg0: any) => string; x: number | undefined; y: number | undefined; z: number | undefined; }, fractionDigits: number | undefined) {
if (obj instanceof Number)
return parseFloat(obj.toFixed(fractionDigits))
else {
if (obj.x !== undefined)
obj.x = parseFloat(obj.x.toFixed(fractionDigits))
if (obj.y !== undefined)
obj.y = parseFloat(obj.y.toFixed(fractionDigits))
if (obj.z !== undefined)
obj.z = parseFloat(obj.z.toFixed(fractionDigits))
}
return obj;
}
/**
* 数组中所有数字或者向量固定位数
* @param {Array} array
* @param {Number} precision
*/
export function toFixedAry(array: Array<any>, precision: number = delta4) {
for (let i = 0; i < array.length; i++) {
const e = array[i];
if (e instanceof Array)
toFixedAry(e);
else
array[i] = toFixed(e, precision);
}
}
| 19.411215
| 172
| 0.604959
|
8e29350b3f201102d9da54603ed7bf1d46ddf0ae
| 1,927
|
rs
|
Rust
|
mdlint/src/rules/md001.rs
|
robertohuertasm/rusty-markdownlint
|
986abc7ccfb103b5a065fcdbb774780f33242712
|
[
"MIT"
] | 2
|
2019-05-28T10:53:37.000Z
|
2019-07-03T00:45:23.000Z
|
mdlint/src/rules/md001.rs
|
robertohuertasm/rusty-markdownlint
|
986abc7ccfb103b5a065fcdbb774780f33242712
|
[
"MIT"
] | null | null | null |
mdlint/src/rules/md001.rs
|
robertohuertasm/rusty-markdownlint
|
986abc7ccfb103b5a065fcdbb774780f33242712
|
[
"MIT"
] | 1
|
2018-11-16T14:02:59.000Z
|
2018-11-16T14:02:59.000Z
|
use comrak::nodes::{AstNode, NodeValue};
use crate::parser::{filter_nodes, is_heading};
use crate::rules::extensions::VecExt;
use crate::ruleset::{RuleResult, RuleResultDetails};
crate fn check<'a>(root: &'a AstNode<'a>) -> RuleResult {
let mut prev_level = 0;
let mut details: Vec<RuleResultDetails> = Vec::new();
filter_nodes(root, is_heading)
.into_iter()
.map(|x| x.data.borrow())
.for_each(|node| {
if let NodeValue::Heading(x) = node.value {
let current_level = x.level;
if current_level > prev_level + 1 {
details.push(RuleResultDetails::from_node(&node));
}
prev_level = current_level;
}
});
RuleResult::new(
"MD001",
"header-increment",
"Header levels should only increment by one level at a time",
details.to_option(),
)
}
#[cfg(test)]
mod test {
use super::*;
use crate::parser::get_ast;
use crate::rules::common_tests;
use typed_arena::Arena;
#[test]
fn it_does_not_have_details_if_all_ok() {
common_tests::all_ok("fixtures/md001/md001_ok.md", Box::new(check));
}
#[test]
fn it_has_details_if_ko() {
let arena = Arena::new();
let root = get_ast("fixtures/md001/md001_ko.md", &arena);
let result = check(root);
assert!(result.details.is_some());
let details = result.details.unwrap();
assert_eq!(details.len(), 1);
let first = &details[0];
assert_eq!(first.line, 9);
assert_eq!(first.column, 1);
assert_eq!(first.content, "TITLE3");
}
#[test]
fn it_does_not_have_details_if_no_headers() {
let arena = Arena::new();
let root = get_ast("fixtures/md001/md001_no_items.md", &arena);
let result = check(root);
assert!(result.details.is_none());
}
}
| 29.19697
| 76
| 0.582252
|
347a1242f41b54c682ab56d145d9ab2f1c25864e
| 439
|
sql
|
SQL
|
source/sql/trigger/comment_on_movie.sql
|
amirhnajafiz/DB-Final-Project
|
4d7d517fe2abd9949e959fa0805259488928ec33
|
[
"MIT"
] | 4
|
2021-07-22T12:03:12.000Z
|
2021-07-27T22:28:45.000Z
|
source/sql/trigger/comment_on_movie.sql
|
amirhnajafiz/DB-Final-Project
|
4d7d517fe2abd9949e959fa0805259488928ec33
|
[
"MIT"
] | null | null | null |
source/sql/trigger/comment_on_movie.sql
|
amirhnajafiz/DB-Final-Project
|
4d7d517fe2abd9949e959fa0805259488928ec33
|
[
"MIT"
] | null | null | null |
CREATE TRIGGER comment_on_movie AFTER INSERT ON "comment"
BEGIN
SELECT CASE
WHEN(NEW.username NOT IN (SELECT W.username FROM "watch" as W WHERE NEW.movie_id = W.movie_id)) AND (NEW.username NOT IN (SELECT U.username FROM "watch_special" as W JOIN "special_user" as U ON W.pro_id = U.pro_id WHERE NEW.movie_id = W.movie_id))
THEN RAISE(ABORT, 'Cannot comment on this movie. Not watched yet!')
END;
END;
END;
| 54.875
| 255
| 0.70615
|
d4e71ba99d682768dc6b89789b75e42aa3519c06
| 1,296
|
tsx
|
TypeScript
|
packages/memo/dev/grouped.tsx
|
lawrencecchen/solid-primitives
|
11f7dc29b424f2caba8c7543588920b131cd09af
|
[
"MIT"
] | null | null | null |
packages/memo/dev/grouped.tsx
|
lawrencecchen/solid-primitives
|
11f7dc29b424f2caba8c7543588920b131cd09af
|
[
"MIT"
] | null | null | null |
packages/memo/dev/grouped.tsx
|
lawrencecchen/solid-primitives
|
11f7dc29b424f2caba8c7543588920b131cd09af
|
[
"MIT"
] | null | null | null |
import { createDebouncedMemo, createThrottledMemo } from "../src";
import { Component, Show } from "solid-js";
import { createMousePosition } from "@solid-primitives/mouse";
const Grouped: Component = () => {
const [poz] = createMousePosition();
const debPoz = createDebouncedMemo(() => ({ x: poz.x(), y: poz.y() }), 200);
const thrPoz = createThrottledMemo(() => ({ x: poz.x(), y: poz.y() }), 200);
return (
<div>
<div
class="ball bg-green-500"
style={{
transform: `translate(${poz.x()}px, ${poz.y()}px)`
}}
></div>
<Show when={debPoz()}>
{({ x, y }) => (
<div
class="ball bg-yellow-600"
style={{
transform: `translate(${x}px, ${y}px)`
}}
></div>
)}
</Show>
<Show when={thrPoz()}>
{({ x, y }) => (
<div
class="ball bg-cyan-500"
style={{
transform: `translate(${x}px, ${y}px)`
}}
></div>
)}
</Show>
<p class="font-bold text-green-500 opacity-50">normal</p>
<p class="font-bold text-yellow-600 opacity-50">debounced</p>
<p class="font-bold text-cyan-500 opacity-50">throttled</p>
</div>
);
};
export default Grouped;
| 28.8
| 78
| 0.490741
|
8e5a2839a28f0c01e8a5b56741b5fdd033aec1a3
| 404
|
js
|
JavaScript
|
src/rdx/actions/index.js
|
GoodNightCouchSide/gncs
|
5193a3345b13759347b0e469ef1a1e9f584f7db0
|
[
"Apache-2.0"
] | null | null | null |
src/rdx/actions/index.js
|
GoodNightCouchSide/gncs
|
5193a3345b13759347b0e469ef1a1e9f584f7db0
|
[
"Apache-2.0"
] | null | null | null |
src/rdx/actions/index.js
|
GoodNightCouchSide/gncs
|
5193a3345b13759347b0e469ef1a1e9f584f7db0
|
[
"Apache-2.0"
] | null | null | null |
import { createAction } from 'redux-actions'
import { PUSH_ROUTE_TO_HISTORY } from 'rdx/constants/actionTypes'
export const pushRouteWihtDebounce = (debounce) =>
createAction(
PUSH_ROUTE_TO_HISTORY,
(route) => route,
() => ({
debounce: {
time: debounce,
key: 'PUSH_ROUTE_TO_HISTORY',
},
})
)
export const pushRoute = createAction(PUSH_ROUTE_TO_HISTORY)
| 22.444444
| 65
| 0.663366
|
b2e7ddbdca73cb92800fc2213d50c880b433f6d0
| 679
|
css
|
CSS
|
page_assets/style_list.css
|
wakepon21/Gasyori100knock
|
b9c92bfda9d2288d2b4742c2fb67719ee0ab338b
|
[
"MIT"
] | 5
|
2021-06-08T16:09:01.000Z
|
2021-12-10T09:42:43.000Z
|
page_assets/style_list.css
|
wakepon21/Gasyori100knock
|
b9c92bfda9d2288d2b4742c2fb67719ee0ab338b
|
[
"MIT"
] | null | null | null |
page_assets/style_list.css
|
wakepon21/Gasyori100knock
|
b9c92bfda9d2288d2b4742c2fb67719ee0ab338b
|
[
"MIT"
] | 1
|
2021-05-24T04:14:27.000Z
|
2021-05-24T04:14:27.000Z
|
/*
list checkmark
*/
.checklist{
list-style-type: none;
}
.checklist > li {
position: relative;
}
.checklist > li::after {
content: '';
display: block;
position: absolute;
top: .5em;
left: -1.5em;
width: 10px;
height: 5px;
border-left: 2px solid #25AF01;
border-bottom: 2px solid #25AF01;
transform: rotate(-45deg);
}
.nonhead-list > ul {
list-style-type: none;
}
.nonhead-list > li {
position: relative;
}
.nonheadlist > li::after {
content: '';
display: block;
position: absolute;
top: .5em;
left: -1.5em;
width: 10px;
height: 5px;
border-left: 2px solid #25AF01;
border-bottom: 2px solid #25AF01;
transform: rotate(-45deg);
}
| 16.166667
| 35
| 0.637703
|
05a8d03267afbafd11cfe922730ee5e7d3cb4c4f
| 8,030
|
py
|
Python
|
tools/rs_vis.py
|
PengchengAi/tf-faster-rcnn-pcai
|
b054345728c20c8eb64561a6800e85594ce76fd3
|
[
"MIT"
] | null | null | null |
tools/rs_vis.py
|
PengchengAi/tf-faster-rcnn-pcai
|
b054345728c20c8eb64561a6800e85594ce76fd3
|
[
"MIT"
] | null | null | null |
tools/rs_vis.py
|
PengchengAi/tf-faster-rcnn-pcai
|
b054345728c20c8eb64561a6800e85594ce76fd3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# --------------------------------------------------------
# Tensorflow Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Xinlei Chen, based on code from Ross Girshick
# --------------------------------------------------------
"""
Demo script showing detections in sample images.
See README.md for installation instructions before running.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import _init_paths
from model.config import cfg
from model.test import im_detect
from model.nms_wrapper import nms
from utils.timer import Timer
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
import os, cv2
import argparse
from nets.vgg16 import vgg16
from nets.resnet_v1 import resnetv1
import pyrealsense2 as rs
CLASSES = ('__background__',
'box', 'sucker')
NETS = {'vgg16': ('vgg16_faster_rcnn_iter_2000.ckpt',),'res101': ('res101_faster_rcnn_iter_110000.ckpt',)}
DATASETS= {'pascal_voc': ('voc_2007_trainval',),'pascal_voc_0712': ('voc_2007_trainval+voc_2012_trainval',)}
def vis_detections(color_image, depth_colormap, class_col, dets_col, thresh=0.5):
"""Draw detected bounding boxes."""
for cls_ind, class_name in enumerate(class_col):
dets = dets_col[cls_ind]
inds = np.where(dets[:, -1] >= thresh)[0]
if len(inds) == 0:
continue
for i in inds:
bbox = [int(e) for e in dets[i, :4]]
score = dets[i, -1]
cv2.rectangle(color_image, (bbox[0], bbox[1]),
(bbox[2], bbox[3]), (0, 0, 255), 3)
cv2.rectangle(depth_colormap, (bbox[0], bbox[1]),
(bbox[2], bbox[3]), (0, 0, 255), 3)
font = cv2.FONT_HERSHEY_SIMPLEX
color_image = cv2.putText(color_image, '{:s} {:.3f}'.format(class_name, score),
(bbox[0], max(bbox[1] - 2, 1)), font, 0.5, (255, 255, 255), 2)
depth_colormap = cv2.putText(depth_colormap, '{:s} {:.3f}'.format(class_name, score),
(bbox[0], max(bbox[1] - 2, 1)), font, 0.5, (255, 255, 255), 2)
# Stack both images horizontally
images = np.hstack((color_image, depth_colormap))
# Show images
cv2.imshow('RealSense', images)
def calc_histogram(depth_image, class_col, dets_col, thresh=0.5):
# return value
depth_col = np.zeros((len(class_col), 2), dtype=float)
bbox_col = np.zeros((len(class_col), 4), dtype=float)
# per class
for cls_ind in range(len(class_col)):
dets = dets_col[cls_ind]
inds = np.where(dets[:, -1] >= thresh)[0]
if len(inds) == 0:
continue
ind = np.argmax(dets[:, -1])
bbox = [int(e) for e in dets[ind, :4]]
depth_select = depth_image[bbox[1]:bbox[3], bbox[0]:bbox[2]]
# plt.imshow(depth_select)
# plt.colorbar()
# plt.show()
depth_select = np.reshape(depth_select, (-1))
depth_index = np.array([i for i, elem in enumerate(depth_select) if elem > 1500],
dtype=np.int32)
depth_select = depth_select[depth_index]
depth_hist, bin_edge = np.histogram(depth_select, bins="fd")
# plt.hist(depth_select, bins="fd")
# plt.show()
# plt.close("all")
depth_mean = np.mean([elem for elem in depth_hist])
front = bin_edge[0]
end = bin_edge[-1]
in_middle = False
for i, elem in enumerate(depth_hist):
if elem >= depth_mean:
front = bin_edge[i]
in_middle = True
if in_middle and elem <= depth_mean:
end = bin_edge[i]
in_middle = False
break
depth_col[cls_ind, :] = np.array((front, end))
bbox_col[cls_ind, :] = np.array((dets[ind, :4]))
return depth_col, bbox_col
def demo(sess, net, color_image, depth_colormap):
"""Detect object classes in an image using pre-computed object proposals."""
# Detect all object classes and regress object bounds
timer = Timer()
timer.tic()
scores, boxes = im_detect(sess, net, color_image)
timer.toc()
print('Detection took {:.3f}s for {:d} object proposals'.format(timer.total_time, boxes.shape[0]))
# Visualize detections for each class
CONF_THRESH = 0.7
NMS_THRESH = 0.3
dets_col = []
cls_col = []
for cls_ind, cls in enumerate(CLASSES[1:]):
cls_ind += 1 # because we skipped background
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
dets_col.append(dets)
cls_col.append(cls)
vis_detections(color_image, depth_colormap, cls_col, dets_col, thresh=CONF_THRESH)
depth_col, bbox_col = calc_histogram(depth_image, cls_col, dets_col, thresh=CONF_THRESH)
print("box depth:", depth_col[0], "sucker depth:", depth_col[1])
print("box bbox:", bbox_col[0], "sucker bbox", bbox_col[1])
def parse_args():
"""Parse input arguments."""
parser = argparse.ArgumentParser(description='Tensorflow Faster R-CNN demo')
parser.add_argument('--net', dest='demo_net', help='Network to use [vgg16 res101]',
choices=NETS.keys(), default='vgg16')
parser.add_argument('--dataset', dest='dataset', help='Trained dataset [pascal_voc pascal_voc_0712]',
choices=DATASETS.keys(), default='pascal_voc')
args = parser.parse_args()
return args
if __name__ == '__main__':
cfg.TEST.HAS_RPN = True # Use RPN for proposals
args = parse_args()
# model path
demonet = args.demo_net
dataset = args.dataset
tfmodel = os.path.join('output', demonet, DATASETS[dataset][0], 'default',
NETS[demonet][0])
if not os.path.isfile(tfmodel + '.meta'):
raise IOError(('{:s} not found.\nDid you download the proper networks from '
'our server and place them properly?').format(tfmodel + '.meta'))
# set config
tfconfig = tf.ConfigProto(allow_soft_placement=True)
tfconfig.gpu_options.allow_growth=True
# init session
sess = tf.Session(config=tfconfig)
# load network
if demonet == 'vgg16':
net = vgg16()
elif demonet == 'res101':
net = resnetv1(num_layers=101)
else:
raise NotImplementedError
net.create_architecture("TEST", 3,
tag='default', anchor_scales=[8, 16, 32])
saver = tf.train.Saver()
saver.restore(sess, tfmodel)
print('Loaded network {:s}'.format(tfmodel))
# Configure depth and color streams
pipeline = rs.pipeline()
config = rs.config()
config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30)
# Start streaming
pipeline.start(config)
while True:
# Wait for a coherent pair of frames: depth and color
frames = pipeline.wait_for_frames()
depth_frame = frames.get_depth_frame()
color_frame = frames.get_color_frame()
if not depth_frame or not color_frame:
continue
# Convert images to numpy arrays
depth_image = np.asanyarray(depth_frame.get_data())
color_image = np.asanyarray(color_frame.get_data())
# Apply colormap on depth image (image must be converted to 8-bit per pixel first)
depth_colormap = cv2.applyColorMap(cv2.convertScaleAbs(depth_image, alpha=0.03), cv2.COLORMAP_JET)
cv2.namedWindow('RealSense', cv2.WINDOW_AUTOSIZE)
demo(sess, net, color_image, depth_colormap)
if cv2.waitKey(1) & 0xFF == ord('q'):
print("I'm done")
break
| 34.612069
| 108
| 0.608219
|
e6b7ef9125b87b612e5c9eda89ee064e187fbdb6
| 2,469
|
h
|
C
|
Source/AstralShipwright/UI/Menu/NovaMainMenuFlight.h
|
arbonagw/Nova
|
a09331a903f4a2da1c5398f870d57b2b96dc194b
|
[
"BSD-3-Clause"
] | null | null | null |
Source/AstralShipwright/UI/Menu/NovaMainMenuFlight.h
|
arbonagw/Nova
|
a09331a903f4a2da1c5398f870d57b2b96dc194b
|
[
"BSD-3-Clause"
] | null | null | null |
Source/AstralShipwright/UI/Menu/NovaMainMenuFlight.h
|
arbonagw/Nova
|
a09331a903f4a2da1c5398f870d57b2b96dc194b
|
[
"BSD-3-Clause"
] | null | null | null |
// Astral Shipwright - Gwennaël Arbona
#pragma once
#include "UI/NovaUI.h"
#include "UI/Widget/NovaTabView.h"
#include "Online.h"
/** Flight menu */
class SNovaMainMenuFlight
: public SNovaTabPanel
, public INovaGameMenu
{
/*----------------------------------------------------
Slate arguments
----------------------------------------------------*/
SLATE_BEGIN_ARGS(SNovaMainMenuFlight)
{}
SLATE_ARGUMENT(class SNovaMenu*, Menu)
SLATE_ARGUMENT(TWeakObjectPtr<class UNovaMenuManager>, MenuManager)
SLATE_END_ARGS()
public:
SNovaMainMenuFlight();
void Construct(const FArguments& InArgs);
/*----------------------------------------------------
Interaction
----------------------------------------------------*/
virtual void Tick(const FGeometry& AllottedGeometry, const double CurrentTime, const float DeltaTime) override;
virtual void Show() override;
virtual void Hide() override;
virtual void UpdateGameObjects() override;
virtual void HorizontalAnalogInput(float Value) override;
virtual void VerticalAnalogInput(float Value) override;
virtual TSharedPtr<SNovaButton> GetDefaultFocusButton() const override;
/*----------------------------------------------------
Content callbacks
----------------------------------------------------*/
protected:
bool CanFastForward() const;
bool IsUndockEnabled() const;
bool IsDockEnabled() const;
bool IsManeuveringEnabled() const;
bool IsMainDriveAvailable() const;
/*----------------------------------------------------
Callbacks
----------------------------------------------------*/
protected:
void FastForward();
void OnUndock();
void OnDock();
void OnAlignToManeuver();
void OnEnableMainDrive();
/*----------------------------------------------------
Data
----------------------------------------------------*/
protected:
// Game objects
TWeakObjectPtr<UNovaMenuManager> MenuManager;
class ANovaPlayerController* PC;
class ANovaSpacecraftPawn* SpacecraftPawn;
class UNovaSpacecraftMovementComponent* SpacecraftMovement;
class ANovaGameState* GameState;
class UNovaOrbitalSimulationComponent* OrbitalSimulation;
// Slate widgets
TSharedPtr<class SNovaButton> UndockButton;
TSharedPtr<class SNovaButton> DockButton;
TSharedPtr<class SNovaButton> AlignManeuverButton;
TSharedPtr<class SNovaButton> AuthorizeManeuverButton;
TSharedPtr<class SNovaButton> FastForwardButton;
};
| 25.71875
| 112
| 0.594978
|