identifier
stringlengths 42
383
| collection
stringclasses 1
value | open_type
stringclasses 1
value | license
stringlengths 0
1.81k
| date
float64 1.99k
2.02k
⌀ | title
stringlengths 0
100
| creator
stringlengths 1
39
| language
stringclasses 157
values | language_type
stringclasses 2
values | word_count
int64 1
20k
| token_count
int64 4
1.32M
| text
stringlengths 5
1.53M
| __index_level_0__
int64 0
57.5k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
https://github.com/fbraem/kwai-api/blob/master/src/kwai/Modules/Users/UseCases/InviteUserCommand.php
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
kwai-api
|
fbraem
|
PHP
|
Code
| 90
| 216
|
<?php
/**
* @package Modules
* @subpackage Users
*/
declare(strict_types = 1);
namespace Kwai\Modules\Users\UseCases;
/**
* InviteUserCommand is a datatransferobject for the usecase InviteUser.
*/
final class InviteUserCommand
{
/**
* Email of the sender
*/
public string $sender_mail;
/**
* The name of the sender;
*/
public string $sender_name;
/**
* Email of the recipient
*/
public string $email;
/**
* Number of days before the invitation expires
*/
public int $expiration;
/**
* Remark
*/
public ?string $remark = null;
/**
* Username
*/
public string $name;
}
| 6,999
|
https://github.com/smirkingman/RealSense-Calibrator/blob/master/UI/Forms/Main/SessionEvents.vb
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
RealSense-Calibrator
|
smirkingman
|
Visual Basic
|
Code
| 363
| 1,272
|
Imports System.ComponentModel
Imports System.Threading
Imports Calibrator
Imports NLog
Partial Public Class Main
Private Sub Session_CameraChanged(sender As Object, oldcamera As Camera, newcamera As Camera) Handles _Session.CameraChanged
Try
_Camera = newcamera
StartCamera()
Catch ex As Exception
HandleError(ex)
End Try
End Sub
Private Sub Session_FrameReady(sender As Object, depthmap As DepthMap, colour As Bitmap, colorised As Bitmap) Handles _Session.FrameReady
Try
If _ColourPicture IsNot Nothing Then
_ColourPicture.Image = colour
End If
If _DepthPicture IsNot Nothing Then
_DepthPicture.DepthMap = depthmap
_DepthPicture.Image = colorised
End If
If _DGVClock.Expired Then
BeginInvoke(
Sub()
Try
txtCamHFov.Text = _Camera.DepthHFov.ToString("#,##0.000000")
txtCamHFovD.Text = (_Camera.DepthHFov * TODEGREES).ToString("#,##0.000")
txtCamVFov.Text = _Camera.DepthVFov.ToString("#,##0.000000")
txtCamVFovD.Text = (_Camera.DepthVFov * TODEGREES).ToString("#,##0.000")
dgvMeasures.AutoResizeColumns(DataGridViewAutoSizeColumnsMode.AllCells)
dgvMeasures.AutoGenerateColumns = False
dgvMeasures.DataSource = _Session.Planes
dgvMeasures.Refresh()
If _Session.State = State.ImproveTargets Then
Measuring1.ShowSkew()
End If
Catch ex As Exception
HandleError(ex)
End Try
End Sub)
End If
Catch ex As Exception
HandleError(ex)
End Try
End Sub
Private Sub _Session_PlaneAdded(sender As Object, plane As Plane) Handles _Session.PlaneAdded
Try
BeginInvoke(
Sub()
Try
Measuring1.Plane = plane
dgvMeasures.AutoGenerateColumns = False
dgvMeasures.DataSource = _Session.Planes
dgvMeasures.Refresh()
Catch ex As Exception
HandleError(ex)
End Try
End Sub)
Catch ex As Exception
HandleError(ex)
End Try
End Sub
Private Sub AdjustMenus(sender As Object, state As State) Handles _Session.StateChanged
Try
BeginInvoke(
Sub(newstate As State)
Try
ChartToolStripMenuItem.Enabled = _Session.Model IsNot Nothing
Select Case state
Case State.Idle
CrosshairsToolStripMenuItem.Enabled = True
FindTargetsToolStripMenuItem.Enabled = True
MeasureToolStripMenuItem.Enabled = False
FinaliseMeasurementToolStripMenuItem.Enabled = False
OptimiseToolStripMenuItem.Enabled = _Session.Planes.Any
SaveAsToolStripMenuItem.Enabled = _Session.Model IsNot Nothing
ChartToolStripMenuItem.Enabled = _Session.Model IsNot Nothing
Case State.FindFloor
CrosshairsToolStripMenuItem.Enabled = False
FindTargetsToolStripMenuItem.Enabled = False
MeasureToolStripMenuItem.Enabled = False
FinaliseMeasurementToolStripMenuItem.Enabled = True
OptimiseToolStripMenuItem.Enabled = False
SaveAsToolStripMenuItem.Enabled = _Session.Model IsNot Nothing
Case State.FindTargets
CrosshairsToolStripMenuItem.Enabled = False
FindTargetsToolStripMenuItem.Enabled = True
MeasureToolStripMenuItem.Enabled = False
FinaliseMeasurementToolStripMenuItem.Enabled = False
OptimiseToolStripMenuItem.Enabled = False
SaveAsToolStripMenuItem.Enabled = _Session.Model IsNot Nothing
Case State.ImproveTargets
CrosshairsToolStripMenuItem.Enabled = True
FindTargetsToolStripMenuItem.Enabled = True
MeasureToolStripMenuItem.Enabled = True
FinaliseMeasurementToolStripMenuItem.Enabled = False
OptimiseToolStripMenuItem.Enabled = False
SaveAsToolStripMenuItem.Enabled = _Session.Model IsNot Nothing
Case State.Measure
CrosshairsToolStripMenuItem.Enabled = False
FindTargetsToolStripMenuItem.Enabled = False
MeasureToolStripMenuItem.Enabled = False
FinaliseMeasurementToolStripMenuItem.Enabled = True
OptimiseToolStripMenuItem.Enabled = False
SaveAsToolStripMenuItem.Enabled = _Session.Model IsNot Nothing
Case State.NoTargets
CrosshairsToolStripMenuItem.Enabled = True
FindTargetsToolStripMenuItem.Enabled = True
MeasureToolStripMenuItem.Enabled = False
FinaliseMeasurementToolStripMenuItem.Enabled = False
OptimiseToolStripMenuItem.Enabled = False
SaveAsToolStripMenuItem.Enabled = _Session.Model IsNot Nothing
End Select
Catch ex As Exception
HandleError(ex)
End Try
End Sub,
state)
Catch ex As Exception
HandleError(ex)
End Try
End Sub
End Class
| 6,843
|
https://github.com/nanditho/UnityTerrain/blob/master/Assets/PlanetTerrain/Scripts/Creator/CameraOrbit.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
UnityTerrain
|
nanditho
|
C#
|
Code
| 103
| 416
|
using UnityEngine;
using System.Collections;
public class CameraOrbit : MonoBehaviour {
public Transform planet;
private PlanetData ps;
private Camera cam;
private float distance = 1000f;
private float rotateSpeed = 50f;
void Start() {
ps = planet.GetComponent<PlanetTerrain>().planet;
cam = gameObject.GetComponent<Camera>();
}
void Update () {
if (Input.GetKeyUp("left shift")) {
Cursor.lockState = CursorLockMode.None;
Cursor.visible = true;
} else if (Input.GetKey("left shift")) {
Cursor.lockState = CursorLockMode.Locked;
Cursor.visible = false;
transform.RotateAround(Vector3.zero, Vector3.up, Input.GetAxis("Mouse X")*Time.deltaTime*rotateSpeed);
transform.RotateAround(Vector3.zero, transform.right, -Input.GetAxis("Mouse Y")*Time.deltaTime*rotateSpeed);
}
if (Input.GetKey("x")) {
distance *= 0.99f;
} else if (Input.GetKey("z")) {
distance *= 1.01f;
if (ps.radius+distance+cam.nearClipPlane > cam.farClipPlane) {
distance = cam.farClipPlane-ps.radius-cam.nearClipPlane;
}
}
transform.position = transform.position.normalized*(ps.radius+distance+cam.nearClipPlane);
}
}
| 15,676
|
https://github.com/Evil1991/bitrixdock/blob/master/www/html/bitrix/modules/main/lib/phonenumber/metadataprovider.php
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
bitrixdock
|
Evil1991
|
PHP
|
Code
| 257
| 971
|
<?php
namespace Bitrix\Main\PhoneNumber;
use Bitrix\Main\IO\File;
use Bitrix\Main\SystemException;
class MetadataProvider
{
protected $metadata;
protected $codeToCountries;
protected static $instance;
const PARSED_METADATA_FILENAME = 'metadata.php';
protected function __construct()
{
$this->loadMetadata();
}
/**
* Returns instance of MetadataProvider.
* @return MetadataProvider
*/
public static function getInstance()
{
if(is_null(static::$instance))
{
static::$instance = new static();
}
return static::$instance;
}
/**
* Returns array of 2-letter country codes of the countries, sharing the specified phone country code.
* @param string $countryCode Phone country code.
* @return array
*/
public function getCountriesByCode($countryCode)
{
return is_array($this->codeToCountries[$countryCode]) ? $this->codeToCountries[$countryCode] : array();
}
public function isValidCountryCode($countryCode)
{
return isset($this->codeToCountries[$countryCode]);
}
/**
* Returns metadata record for the country.
* @param string $country 2-letter country code.
* @return array|false
*/
public function getCountryMetadata($country)
{
$country = mb_strtoupper($country);
return isset($this->metadata[$country]) ? $this->metadata[$country] : false;
}
public function toArray()
{
return array(
'codeToCountries' => $this->codeToCountries,
'metadata' => $this->metadata
);
}
/**
* Parses google metadata from the PhoneNumberMetadata.xml
* @see https://github.com/googlei18n/libphonenumber/blob/master/resources/
* @params string $fileName Metadata file.
* @return array Returns parsed metadata.
*/
public static function parseGoogleMetadata($fileName)
{
$metadataBuilder = new \Bitrix\Main\PhoneNumber\Tools\MetadataBuilder($fileName);
$metadata = $metadataBuilder->build();
$codeToCountries = array();
foreach ($metadata as $metadataRecord)
{
$country = mb_strtoupper($metadataRecord['id']);
if(!is_array($codeToCountries[$metadataRecord['countryCode']]))
{
$codeToCountries[$metadataRecord['countryCode']] = array();
}
if($metadataRecord['mainCountryForCode'])
array_unshift($codeToCountries[$metadataRecord['countryCode']], $country);
else
$codeToCountries[$metadataRecord['countryCode']][] = $country;
}
return array(
'codeToCountries' => $codeToCountries,
'metadata' => $metadata
);
}
/**
* Loads parsed metadata.
* @return void
* @throws SystemException
*/
protected function loadMetadata()
{
if(File::isFileExists(static::PARSED_METADATA_FILENAME))
throw new SystemException("Metadata file is not found");
$parsedMetadata = include(static::PARSED_METADATA_FILENAME);
$this->codeToCountries = $parsedMetadata['codeToCountries'];
foreach ($parsedMetadata['metadata'] as $metadataRecord)
{
$this->metadata[$metadataRecord['id']] = $metadataRecord;
}
}
}
| 36,617
|
https://github.com/a316523235/go-study/blob/master/spider/v1.go
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
go-study
|
a316523235
|
Go
|
Code
| 230
| 955
|
package spider
import (
"fmt"
"github.com/PuerkitoBio/goquery"
"io/ioutil"
"net/http"
"os"
"strconv"
"time"
)
//创建文件夹
func CreateDateDir(basePath string) {
_, err := os.Stat("./images/" + basePath)
if err != nil {
fmt.Println("不存在文件")
if os.IsNotExist(err) {
err := os.Mkdir("./images/"+basePath, os.ModePerm)
if err != nil {
fmt.Printf("创建失败![%v]\n", err)
os.Exit(1)
}
return
}
return
}
}
//下载图片
func download(herf string, path string) {
CreateDateDir(path) //创建文件夹
var a = time.Now().UnixNano()
fmt.Println(herf)
resp, err := http.Get(herf)
if err != nil {
fmt.Println("访问图片出错")
}
_data, _err2 := ioutil.ReadAll(resp.Body)
if _err2 != nil {
panic(_err2)
}
//保存到本地
ioutil.WriteFile(fmt.Sprintf("./images/"+path+"/%d.jpg", a), _data, 0644)
fmt.Println("图片下载成功")
}
//爬取第二岑
func getwinimage(urls string) {
resp, err := http.Get(urls)
if err != nil {
fmt.Println("http get error", err)
return
}
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return
}
defer resp.Body.Close()
pathname := doc.Find(".arc_location a").Eq(2).Text() //直接提取title的内容
fmt.Println(pathname)
doc.Find("div.fleft.arc_pic .swiper-wrapper a").Each(func(i int, selection *goquery.Selection) {
fmt.Println(selection.Attr("src"))
_url, _err := selection.Attr("src")
if _err {
download(_url, pathname)
time.Sleep(1 * time.Second)
}
})
}
//获取第一层
func getimages(urls string) {
resp, err := http.Get(urls)
if err != nil {
fmt.Println("http get error", err)
return
}
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return
}
defer resp.Body.Close()
doc.Find(".egeli_pic_dl dd a").Each(func(i int, selection *goquery.Selection) {
_href, _err := selection.Attr("href")
//fmt.Println(selection.Attr("href"))
if _err { //如果存在
getwinimage(_href)
}
})
}
func Main() {
fmt.Println("开始爬取")
//爬取页数
for i := 1; i <= 1; i++ {
getimages("https://mm.enterdesk.com/dongmanmeinv/" + strconv.Itoa(i) + ".html")
}
fmt.Println("结束爬取")
}
| 1,169
|
https://github.com/anicholson/mode-mode/blob/master/server/features/step_definitions/mode_steps.rb
|
Github Open Source
|
Open Source
|
MIT
| null |
mode-mode
|
anicholson
|
Ruby
|
Code
| 45
| 194
|
Given(/^the following mode list:$/) do |table|
allow(YAML).to receive(:load_file).and_return(modes: table.symbolic_hashes)
@modes_repo = Application['modes_repository']
end
When(/^I load the modes$/) do
@pre_load_count = @modes_repo.count
loader = Application['mode_loader']
loader.load_modes
@lol = "Pre: #{@pre_load_count}. Post: #{ @modes_repo.count }"
end
Then(/^There are (\d+) modes$/) do |mode_count|
expect(@modes_repo.count).to eq(Integer(mode_count) + @pre_load_count)
end
| 39,466
|
https://github.com/ScottishCovidResponse/Contact-Tracing-Model/blob/master/src/main/java/uk/co/ramp/statistics/types/Infection.java
|
Github Open Source
|
Open Source
|
BSD-2-Clause
| 2,020
|
Contact-Tracing-Model
|
ScottishCovidResponse
|
Java
|
Code
| 17
| 77
|
package uk.co.ramp.statistics.types;
import org.immutables.gson.Gson;
import org.immutables.value.Value;
@Value.Immutable
@Gson.TypeAdapters
public interface Infection {
int seed();
int infections();
}
| 38,595
|
https://github.com/abdelazeem201/-Radar-Display-using-FPGA/blob/master/RTL/Address_Generator.vhd
|
Github Open Source
|
Open Source
|
MIT
| null |
-Radar-Display-using-FPGA
|
abdelazeem201
|
VHDL
|
Code
| 75
| 258
|
library IEEE;
use IEEE.STD_LOGIC_1164.ALL;
use IEEE.STD_LOGIC_unsigned.ALL;
entity addr is
Port ( reset,clk : in STD_LOGIC;
y_axis : out STD_LOGIC;
dout : out STD_LOGIC_VECTOR (8 downto 0));
end addr;
architecture Behavioral of addr is
signal reg: std_logic_vector(8 downto 0):="100001101";
signal flag: std_logic:='0';
signal const:integer:=0;
signal up,down:std_logic:='0';
begin
process(clk,reset)
begin
if reset='1' then
reg<="100001101";
elsif rising_edge(clk) then
if reg=0 then
reg<="101100111";
else
reg<=reg-1;
end if;
end if;
end process;
dout<=reg;
y_axis<='1' when reg>180 else '0';
end Behavioral;
| 32,724
|
https://github.com/zhoujiajia123/zchat/blob/master/src/main/java/com/zj/zchat/shiro/ShiroConfiguration.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,019
|
zchat
|
zhoujiajia123
|
Java
|
Code
| 216
| 1,586
|
package com.zj.zchat.shiro;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.codec.Base64;
import org.apache.shiro.spring.LifecycleBeanPostProcessor;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.CookieRememberMeManager;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.apache.shiro.web.servlet.SimpleCookie;
import org.springframework.aop.framework.autoproxy.DefaultAdvisorAutoProxyCreator;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* shiro配置类
* Created by cdyoue on 2016/10/21.
*/
@Configuration
public class ShiroConfiguration {
/**
* LifecycleBeanPostProcessor,这是个DestructionAwareBeanPostProcessor的子类,
* 负责org.apache.shiro.util.Initializable类型bean的生命周期的,初始化和销毁。
* 主要是AuthorizingRealm类的子类,以及EhCacheManager类。
*/
@Bean("lifecycleBeanPostProcessor")
public LifecycleBeanPostProcessor lifecycleBeanPostProcessor() {
return new LifecycleBeanPostProcessor();
}
/**
* HashedCredentialsMatcher,这个类是为了对密码进行编码的,
* 防止密码在数据库里明码保存,当然在登陆认证的时候,
* 这个类也负责对form里输入的密码进行编码。
*/
@Bean("hashedCredentialsMatcher")
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher credentialsMatcher=new HashedCredentialsMatcher();
credentialsMatcher.setHashAlgorithmName("MD5");
credentialsMatcher.setHashIterations(2);
credentialsMatcher.setStoredCredentialsHexEncoded(true);
return credentialsMatcher;
}
/**
* ShiroRealm,这是个自定义的认证类,继承自AuthorizingRealm,
* 负责用户的认证和权限的处理,可以参考JdbcRealm的实现。
*/
@Bean("shiroRealm")
@DependsOn("lifecycleBeanPostProcessor")
public ShiroRealm shiroRealm(){
ShiroRealm realm = new ShiroRealm();
/*这样在验证的时候,会把token传进去的密码自动加密*/
//realm.setCredentialsMatcher(hashedCredentialsMatcher());
return new ShiroRealm();
}
@Bean
public SimpleCookie rememberMeCookie(){
SimpleCookie simpleCookie=new SimpleCookie("rememberMe");
simpleCookie.setHttpOnly(true);
simpleCookie.setMaxAge(180);
return simpleCookie;
}
@Bean
public CookieRememberMeManager rememberMeManager(){
CookieRememberMeManager cookieRememberMeManager = new CookieRememberMeManager();
cookieRememberMeManager.setCookie(rememberMeCookie());
cookieRememberMeManager.setCipherKey(Base64.decode("2AvVhdsgUs0FSA3SDFAdag=="));
return cookieRememberMeManager;
}
/**
* SecurityManager,权限管理,这个类组合了登陆,登出,权限,session的处理,是个比较重要的类。
*/
@Bean("securityManager")
public DefaultWebSecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(shiroRealm());
return securityManager;
}
/**
* ShiroFilterFactoryBean,是个factorybean,为了生成ShiroFilter。
* 它主要保持了三项数据,securityManager,filters,filterChainDefinitionManager。
*/
@Bean("shiroFilter")
public ShiroFilterFactoryBean shiroFilterFactoryBean(){
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager());
Map<String,String> filterChainDefinitionManager=new LinkedHashMap<>();
filterChainDefinitionManager.put("/login/logout","logout");
filterChainDefinitionManager.put("/login/indexpage","authc");
filterChainDefinitionManager.put("/login/**","anon");
filterChainDefinitionManager.put("/**","authc");
shiroFilterFactoryBean.setSuccessUrl("/index");
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setLoginUrl("/login/loginpage");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionManager);
return shiroFilterFactoryBean;
}
/**
* DefaultAdvisorAutoProxyCreator,Spring的一个bean,由Advisor决定对哪些类的方法进行AOP代理。
*/
@Bean
@ConditionalOnMissingBean
public DefaultAdvisorAutoProxyCreator defaultAdvisorAutoProxyCreator(){
DefaultAdvisorAutoProxyCreator defaultAAP=new DefaultAdvisorAutoProxyCreator();
defaultAAP.setProxyTargetClass(true);
return defaultAAP;
}
/**
* AuthorizationAttributeSourceAdvisor,shiro里实现的Advisor类,
* 内部使用AopAllianceAnnotationsAuthorizingMethodInterceptor来拦截用以下注解的方法。
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(){
AuthorizationAttributeSourceAdvisor aASA = new AuthorizationAttributeSourceAdvisor();
aASA.setSecurityManager(securityManager());
return aASA;
}
}
| 18,212
|
https://github.com/yangliuy/Intent-Aware-Ranking-Transformers/blob/master/IART/utils/preprocess.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
Intent-Aware-Ranking-Transformers
|
yangliuy
|
Python
|
Code
| 2,405
| 8,184
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from nltk.tokenize import word_tokenize
import jieba
import sys
import numpy as np
from nltk.corpus import stopwords as nltk_stopwords
from nltk.stem import SnowballStemmer
from tqdm import tqdm
sys.path.append('../inputs')
sys.path.append('../utils')
from preparation import *
# from rank_io import *
class Preprocess(object):
_valid_lang = ['en', 'cn']
_stemmer = SnowballStemmer('english')
def __init__(self,
word_seg_config = {},
doc_filter_config = {},
word_stem_config = {},
word_lower_config = {},
word_filter_config = {},
word_index_config = {}
):
# set default configuration
self._word_seg_config = { 'enable': True, 'lang': 'en' }
self._doc_filter_config = { 'enable': True, 'min_len': 0, 'max_len': sys.maxint }
self._word_stem_config = { 'enable': True }
self._word_lower_config = { 'enable': True }
self._word_filter_config = { 'enable': True, 'stop_words': nltk_stopwords.words('english'),
'min_freq': 0, 'max_freq': sys.maxint, 'words_useless': None }
self._word_index_config = { 'word_dict': None }
self._word_seg_config.update(word_seg_config)
self._doc_filter_config.update(doc_filter_config)
self._word_stem_config.update(word_stem_config)
self._word_lower_config.update(word_lower_config)
self._word_filter_config.update(word_filter_config)
self._word_index_config.update(word_index_config)
self._word_dict = self._word_index_config['word_dict']
self._words_stats = dict()
def run(self, file_path):
print('load...')
dids, docs = Preprocess.load(file_path)
if self._word_seg_config['enable']:
print('word_seg...')
docs = Preprocess.word_seg(docs, self._word_seg_config)
if self._doc_filter_config['enable']:
print('doc_filter...')
dids, docs = Preprocess.doc_filter(dids, docs, self._doc_filter_config)
if self._word_stem_config['enable']:
print('word_stem...')
docs = Preprocess.word_stem(docs)
if self._word_lower_config['enable']:
print('word_lower...')
docs = Preprocess.word_lower(docs)
self._words_stats = Preprocess.cal_words_stat(docs)
if self._word_filter_config['enable']:
print('word_filter...')
docs, self._words_useless = Preprocess.word_filter(docs, self._word_filter_config, self._words_stats)
print('word_index...')
docs, self._word_dict = Preprocess.word_index(docs, self._word_index_config)
return dids, docs
def run_2d(self, file_path):
print('load...')
dids, docs = Preprocess.load_2d(file_path)
# dids: a list of corpus ids
# docs: a list of context/responses. The context is seperated by \t
print('transfer to 2d docs...')
# firstly transfer docs to a 2D list [corpus_text_size, utterance_list]
# a corpus text could be a list of utterances (for context) or 1 utterance (for response)
docs_2d = Preprocess.transfer_to_2ddocs(docs)
if self._word_seg_config['enable']:
print('word_seg...')
docs_2d = Preprocess.word_seg_2d(docs_2d)
if self._word_stem_config['enable']:
print('word_stem...')
docs_2d = Preprocess.word_stem_2d(docs_2d)
if self._word_lower_config['enable']:
print('word_lower...')
docs_2d = Preprocess.word_lower_2d(docs_2d)
# print ('after word_lower, docs_2d[0:100] = ', docs_2d[0:100])
print('cal_words_stat...')
self._words_stats = Preprocess.cal_words_stat_2d(docs_2d)
if self._word_filter_config['enable']:
print('word_filter...')
docs_2d, self._words_useless = Preprocess.word_filter_2d(docs_2d, self._word_filter_config, self._words_stats)
print('word_index...')
docs_2d, self._word_dict = Preprocess.word_index_2d(docs_2d, self._word_index_config)
return dids, docs_2d
def run_2d_smn(self, file_path):
'''
Minimize the preprocess steps to be consistant with Yu Wu's SMN code
Refer to the build_multiturn_data function in PreProcess.py of the
Theano code of Yu Wu's SMN source code
:param file_path:
:return:
'''
print('load...')
dids, docs = Preprocess.load_2d(file_path)
# removed _ as what Yu Wu did in SMN preprocess code
# dids: a list of corpus ids
# docs: a list of context/responses. The context is seperated by \t
print('transfer to 2d docs...')
# firstly transfer docs to a 2D list [corpus_text_size, utterance_list]
# a corpus text could be a list of utterances (for context) or 1 utterance (for response)
docs_2d = Preprocess.transfer_to_2ddocs(docs)
print('word_seg... (necessary for ms_dialog data)')
docs_2d = Preprocess.word_seg_2d(docs_2d)
print('word_lower... (necessary for ms_dialog data)')
docs_2d = Preprocess.word_lower_2d(docs_2d)
print('following SMN, just split with split() and index...')
print('build word dict...')
words = set()
for c_text in tqdm(docs_2d):
for utt in c_text:
# words.update(set(utt.split()))
words.update(set(utt))
print('vocab size: ', len(words))
word_id = 1
self._word_dict = {}
for word in words:
self._word_dict[word] = word_id
word_id += 1
print('map words to ids ...')
docs_index = []
for doc in tqdm(docs_2d):
# docs_index.append([[self._word_dict[w] for w in utt.split()] for utt in doc])
docs_index.append([[self._word_dict[w] for w in utt] for utt in doc])
return dids, docs_index
@staticmethod
def transfer_to_2ddocs(docs):
'''
transfer a docs to a 2 dimensional docs [corpus_text_size, utterance_list]
a corpus text could be a list of utterances (for context) or 1 utterance (for response)
'''
docs_2d = []
for c_text in tqdm(docs):
docs_2d.append(list(c_text.split('\t')))
return docs_2d
@staticmethod
def parse(line):
subs = line.split(' ', 1)
if 1 == len(subs):
return subs[0], ''
else:
return subs[0], subs[1]
@staticmethod
def load(file_path):
dids = list()
docs = list()
f = open(file_path, 'r')
for line in tqdm(f):
line = line.decode('utf8')
line = line.strip()
if '' != line:
did, doc = Preprocess.parse(line)
dids.append(did)
docs.append(doc)
f.close()
return dids, docs
@staticmethod
def load_2d(file_path):
dids = list()
docs = list()
f = open(file_path, 'r')
for line in tqdm(f):
line = line.decode('utf8')
line = line.replace("_", "") # same with SMN code by Yu Wu
line = line.strip()
if '' != line:
subs = line.split('\t')
did, doc = subs[0], '\t'.join(subs[1:len(subs)])
dids.append(did)
docs.append(doc)
f.close()
return dids, docs
@staticmethod
def word_seg_2d(docs):
docs_seg = []
for doc in tqdm(docs):
docs_seg.append([word_tokenize(utt) for utt in doc])
return docs_seg
@staticmethod
def word_seg_en(docs):
docs = [word_tokenize(sent) for sent in tqdm(docs)]
# show the progress of word segmentation with tqdm
return docs
@staticmethod
def word_seg_cn(docs):
docs = [list(jieba.cut(sent)) for sent in docs]
return docs
@staticmethod
def word_seg(docs, config):
assert config['lang'].lower() in Preprocess._valid_lang, 'Wrong language type: %s' % config['lang']
docs = getattr(Preprocess, '%s_%s' % (sys._getframe().f_code.co_name, config['lang']))(docs)
return docs
@staticmethod
def cal_words_stat(docs):
words_stats = {}
docs_num = len(docs)
for ws in docs:
for w in ws:
if w not in words_stats:
words_stats[w] = {}
words_stats[w]['cf'] = 0
words_stats[w]['df'] = 0
words_stats[w]['idf'] = 0
words_stats[w]['cf'] += 1
for w in set(ws):
words_stats[w]['df'] += 1
for w, winfo in words_stats.items():
words_stats[w]['idf'] = np.log( (1. + docs_num) / (1. + winfo['df']))
return words_stats
@staticmethod
def cal_words_stat_2d(docs):
words_stats = {}
docs_num = len(docs)
for ws in tqdm(docs): # for each corpus text
for ww in ws: # for each utterance
for w in ww: # for each word
if w not in words_stats:
words_stats[w] = {}
words_stats[w]['cf'] = 0
words_stats[w]['df'] = 0
words_stats[w]['idf'] = 0
words_stats[w]['cf'] += 1
for w in set(ww):
words_stats[w]['df'] += 1
for w, winfo in words_stats.items():
words_stats[w]['idf'] = np.log((1. + docs_num) / (1. + winfo['df']))
return words_stats
@staticmethod
def word_filter(docs, config, words_stats):
if config['words_useless'] is None:
config['words_useless'] = set()
# filter with stop_words
config['words_useless'].update(config['stop_words'])
# filter with min_freq and max_freq
for w, winfo in words_stats.items():
# filter too frequent words or rare words
if config['min_freq'] > winfo['df'] or config['max_freq'] < winfo['df']:
config['words_useless'].add(w)
# filter with useless words
docs = [[w for w in ws if w not in config['words_useless']] for ws in tqdm(docs)]
return docs, config['words_useless']
@staticmethod
def word_filter_2d(docs, config, words_stats):
if config['words_useless'] is None:
config['words_useless'] = set()
# filter with stop_words
config['words_useless'].update(config['stop_words'])
# filter with min_freq and max_freq
for w, winfo in words_stats.items():
# filter too frequent words or rare words
if config['min_freq'] > winfo['df'] or config['max_freq'] < winfo['df']:
config['words_useless'].add(w)
# filter with useless words
print('filter useless words: ', len(config['words_useless']))
docs_filter_word = []
for doc in tqdm(docs):
docs_filter_word.append([[w for w in ws if w not in config['words_useless']] for ws in doc])
return docs_filter_word, config['words_useless']
@staticmethod
def doc_filter(dids, docs, config):
new_docs = list()
new_dids = list()
for i in tqdm(range(len(docs))):
if config['min_len'] <= len(docs[i]) <= config['max_len']:
new_docs.append(docs[i])
new_dids.append(dids[i])
return new_dids, new_docs
@staticmethod
def word_stem(docs):
docs = [[Preprocess._stemmer.stem(w) for w in ws] for ws in tqdm(docs)]
return docs
@staticmethod
def word_stem_2d(docs):
docs_stem = []
for doc in tqdm(docs):
docs_stem.append([[Preprocess._stemmer.stem(w) for w in ws] for ws in doc])
return docs_stem
@staticmethod
def word_lower(docs):
docs = [[w.lower() for w in ws] for ws in tqdm(docs)]
return docs
@staticmethod
def word_lower_2d(docs):
docs_lower = []
for doc in tqdm(docs):
docs_lower.append([[w.lower() for w in ws] for ws in doc])
return docs_lower
@staticmethod
def build_word_dict(docs):
word_dict = dict()
for ws in docs:
for w in ws:
word_dict.setdefault(w, len(word_dict))
return word_dict
@staticmethod
def build_word_dict_2d(docs):
word_dict = dict()
for doc in docs:
for ws in doc:
for w in ws:
word_dict.setdefault(w, len(word_dict))
return word_dict
@staticmethod
def word_index(docs, config):
if config['word_dict'] is None:
config['word_dict'] = Preprocess.build_word_dict(docs)
docs = [[config['word_dict'][w] for w in ws if w in config['word_dict']] for ws in tqdm(docs)]
return docs, config['word_dict']
@staticmethod
def word_index_2d(docs, config):
if config['word_dict'] is None:
config['word_dict'] = Preprocess.build_word_dict_2d(docs)
docs_index = []
for doc in tqdm(docs):
docs_index.append([[config['word_dict'][w] for w in ws if w in config['word_dict']] for ws in doc])
return docs_index, config['word_dict']
@staticmethod
def save_lines(file_path, lines):
f = open(file_path, 'w')
for line in lines:
line = line.encode('utf8')
f.write(line + "\n")
f.close()
@staticmethod
def load_lines(file_path):
f = open(file_path, 'r')
lines = f.readlines()
f.close()
return lines
@staticmethod
def save_dict(file_path, dic, sort=False):
if sort:
dic = sorted(dic.items(), key=lambda d:d[1], reverse=False)
lines = ['%s %s' % (k, v) for k, v in dic]
else:
lines = ['%s %s' % (k, v) for k, v in dic.iteritems()]
Preprocess.save_lines(file_path, lines)
@staticmethod
def load_dict(file_path):
lines = Preprocess.load_lines(file_path)
dic = dict()
for line in lines:
k, v = line.split()
dic[k] = v
return dic
def save_words_useless(self, words_useless_fp):
Preprocess.save_lines(words_useless_fp, self._words_useless)
def load_words_useless(self, words_useless_fp):
self._words_useless = set(Preprocess.load_lines(words_useless_fp))
def save_word_dict(self, word_dict_fp, sort=False):
Preprocess.save_dict(word_dict_fp, self._word_dict, sort)
def load_word_dict(self, word_dict_fp):
self._word_dict = Preprocess.load_dict(word_dict_fp)
def save_words_stats(self, words_stats_fp, sort=False):
if sort:
word_dic = sorted(self._word_dict.items(), key=lambda d:d[1], reverse=False)
lines = ['%s %d %d %f' % (wid, self._words_stats[w]['cf'], self._words_stats[w]['df'],
self._words_stats[w]['idf']) for w, wid in word_dic]
else:
lines = ['%s %d %d %f' % (wid, self._words_stats[w]['cf'], self._words_stats[w]['df'],
self._words_stats[w]['idf']) for w, wid in self._word_dict.items()]
Preprocess.save_lines(words_stats_fp, lines)
def load_words_stats(self, words_stats_fp):
lines = Preprocess.load_lines(words_stats_fp)
for line in lines:
wid, cf, df, idf = line.split()
self._words_stats[wid] = {}
self._words_stats[wid]['cf'] = int(cf)
self._words_stats[wid]['df'] = int(df)
self._words_stats[wid]['idf'] = float(idf)
class NgramUtil(object):
def __init__(self):
pass
@staticmethod
def unigrams(words):
"""
Input: a list of words, e.g., ["I", "am", "Denny"]
Output: a list of unigram
"""
assert type(words) == list
return words
@staticmethod
def bigrams(words, join_string, skip=0):
"""
Input: a list of words, e.g., ["I", "am", "Denny"]
Output: a list of bigram, e.g., ["I_am", "am_Denny"]
"""
assert type(words) == list
L = len(words)
if L > 1:
lst = []
for i in range(L - 1):
for k in range(1, skip + 2):
if i + k < L:
lst.append(join_string.join([words[i], words[i + k]]))
else:
# set it as unigram
lst = NgramUtil.unigrams(words)
return lst
@staticmethod
def trigrams(words, join_string, skip=0):
"""
Input: a list of words, e.g., ["I", "am", "Denny"]
Output: a list of trigram, e.g., ["I_am_Denny"]
"""
assert type(words) == list
L = len(words)
if L > 2:
lst = []
for i in range(L - 2):
for k1 in range(1, skip + 2):
for k2 in range(1, skip + 2):
if i + k1 < L and i + k1 + k2 < L:
lst.append(join_string.join([words[i], words[i + k1], words[i + k1 + k2]]))
else:
# set it as bigram
lst = NgramUtil.bigrams(words, join_string, skip)
return lst
@staticmethod
def fourgrams(words, join_string):
"""
Input: a list of words, e.g., ["I", "am", "Denny", "boy"]
Output: a list of trigram, e.g., ["I_am_Denny_boy"]
"""
assert type(words) == list
L = len(words)
if L > 3:
lst = []
for i in xrange(L - 3):
lst.append(join_string.join([words[i], words[i + 1], words[i + 2], words[i + 3]]))
else:
# set it as trigram
lst = NgramUtil.trigrams(words, join_string)
return lst
@staticmethod
def uniterms(words):
return NgramUtil.unigrams(words)
@staticmethod
def biterms(words, join_string):
"""
Input: a list of words, e.g., ["I", "am", "Denny", "boy"]
Output: a list of biterm, e.g., ["I_am", "I_Denny", "I_boy", "am_Denny", "am_boy", "Denny_boy"]
"""
assert type(words) == list
L = len(words)
if L > 1:
lst = []
for i in range(L - 1):
for j in range(i + 1, L):
lst.append(join_string.join([words[i], words[j]]))
else:
# set it as uniterm
lst = NgramUtil.uniterms(words)
return lst
@staticmethod
def triterms(words, join_string):
"""
Input: a list of words, e.g., ["I", "am", "Denny", "boy"]
Output: a list of triterm, e.g., ["I_am_Denny", "I_am_boy", "I_Denny_boy", "am_Denny_boy"]
"""
assert type(words) == list
L = len(words)
if L > 2:
lst = []
for i in xrange(L - 2):
for j in xrange(i + 1, L - 1):
for k in xrange(j + 1, L):
lst.append(join_string.join([words[i], words[j], words[k]]))
else:
# set it as biterm
lst = NgramUtil.biterms(words, join_string)
return lst
@staticmethod
def fourterms(words, join_string):
"""
Input: a list of words, e.g., ["I", "am", "Denny", "boy", "ha"]
Output: a list of fourterm, e.g., ["I_am_Denny_boy", "I_am_Denny_ha", "I_am_boy_ha", "I_Denny_boy_ha", "am_Denny_boy_ha"]
"""
assert type(words) == list
L = len(words)
if L > 3:
lst = []
for i in xrange(L - 3):
for j in xrange(i + 1, L - 2):
for k in xrange(j + 1, L - 1):
for l in xrange(k + 1, L):
lst.append(join_string.join([words[i], words[j], words[k], words[l]]))
else:
# set it as triterm
lst = NgramUtil.triterms(words, join_string)
return lst
@staticmethod
def ngrams(words, ngram, join_string=" "):
"""
wrapper for ngram
"""
if ngram == 1:
return NgramUtil.unigrams(words)
elif ngram == 2:
return NgramUtil.bigrams(words, join_string)
elif ngram == 3:
return NgramUtil.trigrams(words, join_string)
elif ngram == 4:
return NgramUtil.fourgrams(words, join_string)
elif ngram == 12:
unigram = NgramUtil.unigrams(words)
bigram = [x for x in NgramUtil.bigrams(words, join_string) if len(x.split(join_string)) == 2]
return unigram + bigram
elif ngram == 123:
unigram = NgramUtil.unigrams(words)
bigram = [x for x in NgramUtil.bigrams(words, join_string) if len(x.split(join_string)) == 2]
trigram = [x for x in NgramUtil.trigrams(words, join_string) if len(x.split(join_string)) == 3]
return unigram + bigram + trigram
@staticmethod
def nterms(words, nterm, join_string=" "):
"""wrapper for nterm"""
if nterm == 1:
return NgramUtil.uniterms(words)
elif nterm == 2:
return NgramUtil.biterms(words, join_string)
elif nterm == 3:
return NgramUtil.triterms(words, join_string)
elif nterm == 4:
return NgramUtil.fourterms(words, join_string)
def cal_hist(t1_rep, t2_rep, qnum, hist_size):
#qnum = len(t1_rep)
mhist = np.zeros((qnum, hist_size), dtype=np.float32)
mm = t1_rep.dot(np.transpose(t2_rep))
for (i,j), v in np.ndenumerate(mm):
if i >= qnum:
break
vid = int((v + 1.) / 2. * (hist_size - 1.))
mhist[i][vid] += 1.
mhist += 1.
mhist = np.log10(mhist)
return mhist.flatten()
def cal_binsum(t1_rep, t2_rep, qnum, bin_num):
mbinsum = np.zeros((qnum, bin_num), dtype=np.float32)
mm = t1_rep.dot(np.transpose(t2_rep))
for (i, j), v in np.ndenumerate(mm):
if i >= qnum:
break
vid = int((v + 1.) / 2. * (bin_num - 1.))
mbinsum[i][vid] += v
#mhist += 1. # smooth is not needed for computing bin sum
#mhist = np.log10(mhist) # not needed for computing bin sum
return mbinsum.flatten()
def _test_ngram():
words = 'hello, world! hello, deep!'
print(NgramUtil.ngrams(list(words), 3, ''))
# def _test_hist():
# embedfile = '../../data/mq2007/embed_wiki-pdc_d50_norm'
# queryfile = '../../data/mq2007/qid_query.txt'
# docfile = '../../data/mq2007/docid_doc.txt'
# relfile = '../../data/mq2007/relation.test.fold5.txt'
# histfile = '../../data/mq2007/relation.test.fold5.hist-30.txt'
# embed_dict = read_embedding(filename = embedfile)
# print('after read embedding ...')
# _PAD_ = 193367
# embed_dict[_PAD_] = np.zeros((50, ), dtype=np.float32)
# embed = np.float32(np.random.uniform(-0.2, 0.2, [193368, 50]))
# embed = convert_embed_2_numpy(embed_dict, embed = embed)
#
# query, _ = read_data(queryfile)
# print('after read query ....')
# doc, _ = read_data(docfile)
# print('after read doc ...')
# rel = read_relation(relfile)
# print('after read relation ... ')
# fout = open(histfile, 'w')
# for label, d1, d2 in rel:
# assert d1 in query
# assert d2 in doc
# qnum = len(query[d1])
# d1_embed = embed[query[d1]]
# d2_embed = embed[doc[d2]]
# curr_hist = cal_hist(d1_embed, d2_embed, qnum, 30)
# curr_hist = curr_hist.tolist()
# fout.write(' '.join(map(str, curr_hist)))
# fout.write('\n')
# print(qnum)
# #print(curr_hist)
# fout.close()
if __name__ == '__main__':
#_test_ngram()
# test with sample data
basedir = '../../data/example/ranking/'
prepare = Preparation()
sample_file = basedir + 'sample.txt'
corpus, rels = prepare.run_with_one_corpus(sample_file)
print ('total corpus size', len(corpus))
print ('total relations size', len(rels))
prepare.save_corpus(basedir + 'corpus.txt', corpus)
prepare.save_relation(basedir + 'relation.txt', rels)
print ('preparation finished ...')
print ('begin preprocess...')
# Prerpocess corpus file
preprocessor = Preprocess(min_freq=1)
dids, docs = preprocessor.run(basedir + 'corpus.txt')
preprocessor.save_word_dict(basedir + 'word_dict.txt')
preprocessor.save_words_stats(basedir + 'word_stats.txt')
fout = open(basedir + 'corpus_preprocessed.txt', 'w')
for inum, did in enumerate(dids):
fout.write('%s\t%s\n' % (did, ' '.join(map(str, docs[inum]))))
fout.close()
print('preprocess finished ...')
| 24,809
|
https://github.com/josealbertohm/boom-homework/blob/master/src/main/java/com/spring/boom/credit/homework/company/CompanyRepository.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
boom-homework
|
josealbertohm
|
Java
|
Code
| 18
| 95
|
package com.spring.boom.credit.homework.company;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.stereotype.Repository;
@Repository
public interface CompanyRepository extends JpaRepository<Company, Long>, JpaSpecificationExecutor<Company> {
}
| 26,427
|
https://github.com/shuwang1992/WebClient-Vue/blob/master/stories/UITimeline.stories.js
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
WebClient-Vue
|
shuwang1992
|
JavaScript
|
Code
| 71
| 297
|
import MapgisUiTimeline from "../ui/src/components/timeline/Timeline.vue";
export default {
title: "界面/时间轴",
component: MapgisUiTimeline,
argTypes: {
},
};
const Template = (args, { argTypes }) => ({
props: Object.keys(argTypes),
components: { MapgisUiTimeline },
data() {
return {
};
},
methods: {
},
template: `
<mapgis-ui-timeline>
<mapgis-ui-timeline-item>Create a services site 2015-09-01</mapgis-ui-timeline-item>
<mapgis-ui-timeline-item>Solve initial network problems 2015-09-01</mapgis-ui-timeline-item>
<mapgis-ui-timeline-item>Technical testing 2015-09-01</mapgis-ui-timeline-item>
<mapgis-ui-timeline-item>Network problems being solved 2015-09-01</mapgis-ui-timeline-item>
</mapgis-ui-timeline>
`,
});
export const Timeline = Template.bind({});
Timeline.args = {};
| 23,215
|
https://github.com/mogmog/beetroute/blob/master/src/components/Cards/HDObservationCard/mapper.js
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
beetroute
|
mogmog
|
JavaScript
|
Code
| 51
| 237
|
const db = require('./db')
const JM = require('json-mapper');
const mapping = {
title : 'factbook.name',
subtitle : 'factbook.people.population.total',
subsubtitle : 'data.people.languages.language',
map : 'map'
};
const merge = db.mergeObject({component : 'CountryInfoCard', data : {}, key : {type : 'question', id : 1}});
//db.addAsync('Estonia').then((response) => JM.makeConverter(mapping)(response)).then(merge).then(db.createCard);
db.addAsync('France').then((response) => JM.makeConverter(mapping)(response)).then(merge).then(db.createCard);
db.addAsync('Germany').then((response) => JM.makeConverter(mapping)(response)).then(merge).then(db.createCard);
| 24,618
|
https://github.com/develmax/Crm.Sdk.Core/blob/master/Microsoft.Crm.Sdk.Proxy/Messages/AddSolutionComponentRequest.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
Crm.Sdk.Core
|
develmax
|
C#
|
Code
| 310
| 803
|
using Microsoft.Xrm.Sdk;
using System;
using System.Runtime.Serialization;
namespace Microsoft.Crm.Sdk.Messages
{
/// <summary>Contains the data that is needed to add a solution component to an unmanaged solution.</summary>
[DataContract(Namespace = "http://schemas.microsoft.com/crm/2011/Contracts")]
public sealed class AddSolutionComponentRequest : OrganizationRequest
{
/// <summary>Gets or sets the ID of the solution component. Required.</summary>
/// <returns>Type: Returns_Guid
/// The ID of the solution component.</returns>
public Guid ComponentId
{
get
{
return this.Parameters.Contains(nameof (ComponentId)) ? (Guid) this.Parameters[nameof (ComponentId)] : new Guid();
}
set
{
this.Parameters[nameof (ComponentId)] = (object) value;
}
}
/// <summary>Gets or sets the value that represents the solution component that you are adding. Required.</summary>
/// <returns>Type: Returns_Int32
/// The integer value of the componenttype enumeration.</returns>
public int ComponentType
{
get
{
return this.Parameters.Contains(nameof (ComponentType)) ? (int) this.Parameters[nameof (ComponentType)] : 0;
}
set
{
this.Parameters[nameof (ComponentType)] = (object) value;
}
}
/// <summary>Gets or sets the unique name of the solution you are adding the solution component to. Required. </summary>
/// <returns>Type: Returns_String
/// The unique name of the solution you are adding the solution component to.</returns>
public string SolutionUniqueName
{
get
{
return this.Parameters.Contains(nameof (SolutionUniqueName)) ? (string) this.Parameters[nameof (SolutionUniqueName)] : (string) null;
}
set
{
this.Parameters[nameof (SolutionUniqueName)] = (object) value;
}
}
/// <summary>Gets or sets a value that indicates whether other solution components that are required by the solution component that you are adding should also be added to the unmanaged solution. Required.</summary>
/// <returns>Type: Returns_Booleantrue if the components that are required by the solution component you are adding should also be added to the unmanaged solution; otherwise, false.</returns>
public bool AddRequiredComponents
{
get
{
return this.Parameters.Contains(nameof (AddRequiredComponents)) && (bool) this.Parameters[nameof (AddRequiredComponents)];
}
set
{
this.Parameters[nameof (AddRequiredComponents)] = (object) value;
}
}
/// <summary>Initializes a new instance of the <see cref="T:Microsoft.Crm.Sdk.Messages.AddSolutionComponentRequest"></see> class.</summary>
public AddSolutionComponentRequest()
{
this.RequestName = "AddSolutionComponent";
this.ComponentId = new Guid();
this.ComponentType = 0;
this.SolutionUniqueName = (string) null;
this.AddRequiredComponents = false;
}
}
}
| 1,914
|
https://github.com/sidasu92/pynet_test_dev/blob/master/day2/classes_ex2.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
pynet_test_dev
|
sidasu92
|
Python
|
Code
| 86
| 344
|
class NetworkDevice(object):
def __init__(self, ip_addr, username, password ):
self.ip_addr = ip_addr
self.username = username
self.password = password
self.serial_number = ""
self.vendor = ""
self.model = ""
self.os_version = ""
self.uptime = ""
def getIpAddress(self):
print(f"The IP address for this device is {self.ip_addr}")
def getUsernamePwd(self):
print(f"The username is {self.username} and pwd is {self.password}")
def setVendor(self, vendor):
self.vendor = vendor
print(f"vendor set to {self.vendor}")
net_device1 = NetworkDevice("1.1.1.1", "admin", "pwd")
net_device2 = NetworkDevice("2.2.2.2", "admin2", "pwd2")
net_device3 = NetworkDevice("3.3.3.3", "admin3", "pwd3")
net_device4 = NetworkDevice("4.4.4.4", "admin4", "pwd4")
print(net_device1.ip_addr)
net_device3.getIpAddress()
net_device3.getUsernamePwd()
net_device3.setVendor("Lenovo")
| 12,968
|
https://github.com/DojoDev/laravel-dojodelivery/blob/master/resources/views/admin/clients/edit.blade.php
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
laravel-dojodelivery
|
DojoDev
|
Blade
|
Code
| 52
| 222
|
@extends('app')
@section('content')
<div class="container">
<div class="row">
<header>
<h1>Client Edit: {{ $client->user->name }}</h1>
<hr>
</header>
@include('errors._check')
{!! Form::model($client , ['class'=>'form-horizontal'], ['route'=>['admin.clients.update', $client->id]]) !!}
<br>
@include('admin.clients._form')
<div class="form-group">
{!! Form::submit('.:: SAVE ::.', ['class'=>'btn btn-primary']) !!}
</div>
{!! Form::close() !!}
</div><!-- End Row -->
<hr>
<a href="{{route('admin.clients.index')}}" class="btn btn-default"><< Back</a>
</div><!-- End Container -->
@endsection
| 4,089
|
https://github.com/princeleo/yii-base/blob/master/common/log/ActionLog.php
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| null |
yii-base
|
princeleo
|
PHP
|
Code
| 270
| 939
|
<?php
/**
* Author: Richard <chenz@snsshop.cn>
* Date: 2016/11/28
* Time: 16:22
*/
namespace app\common\log;
use app\models\ActionLogModel;
use yii\base\Object;
class ActionLog extends Object
{
public $system;
public $resourceTypes;
protected $request;
const ADD = 'add';
const DELETE = 'delete';
const EDIT = 'edit';
const SEARCH = 'query';
const LOGIN = 'login';
const LOGOUT = 'logout';
const SYNC = 'sync';
const DOWNLOAD = 'down';
const SUCCESS = 1;
const FAILED = 2;
public function record($resourceType, $resourceId, $actionType, $params, $data = [], $result = self::SUCCESS, $ext = '')
{
if ((! $this->checkResourceType($resourceType)) ||
empty($resourceId) ||
(! $this->checkActionType($actionType)) ||
empty($params)) {
return false;
}
$request = \Yii::$app->request;
$m = new ActionLogModel();
$m->system_id = $this->system;
$m->resource_type = $resourceType;
$m->resource_id = $resourceId;
$m->action_type = $actionType;
$m->action_time = time();
$m->action_params = json_encode($params, JSON_UNESCAPED_UNICODE);
$m->action_data = empty($data) ? '' : json_encode($data, JSON_UNESCAPED_UNICODE);
$m->action_result = $result;
$m->action_aid = empty(\Yii::$app->user->identity) ? (!empty($params['uid']) ? $params['uid'] : 0) : \Yii::$app->user->identity->getId();
$m->ip = $request->userIP;
$m->request_host = $request->hostInfo;
$m->user_agent = empty($request->userAgent) ? '' : $request->userAgent;
if ('' !== $ext) {
$m->ext_data = json_encode($ext, JSON_UNESCAPED_UNICODE);
}
return $m->save();
}
public function getResourceTypes()
{
return $this->resourceTypes;
}
public function getActionTypes()
{
return self::$actions;
}
public function init()
{
parent::init();
$this->system = \Yii::$app->id;
}
protected function checkResourceType($resourceType)
{
return in_array($resourceType, array_keys($this->resourceTypes));
}
protected function checkActionType($actionType)
{
return in_array($actionType, array_keys(self::$actions));
}
protected static $actions = [
'add' => '新增',
'delete' => '删除',
'edit' => '修改',
'search' => '查询',
'login' => '登录',
'logout' => '退出',
'sync' => '同步',
'down' => '下载',
];
/**
* 操作结果状态
* @var array
*/
protected static $actionResults = [
self::SUCCESS => '成功',
self::FAILED => '失败',
];
public function getActionResults()
{
return self::$actionResults;
}
}
| 29,951
|
https://github.com/qqxliufeng/YXYX_NEW/blob/master/src/store/getters.js
|
Github Open Source
|
Open Source
|
MIT
| null |
YXYX_NEW
|
qqxliufeng
|
JavaScript
|
Code
| 84
| 277
|
const getters = {
sidebar: state => state.app.sidebar,
size: state => state.app.size,
device: state => state.app.device,
visitedViews: state => state.tagsView.visitedViews,
cachedViews: state => state.tagsView.cachedViews,
token: state => state.user.token,
avatar: state => state.user.avatar,
name: state => state.user.name || localStorage.getItem('username'),
phone: state => state.user.phone,
roleId: state => state.user.roleId || localStorage.getItem('roleId'),
userId: state => state.user.userId,
schoolId: state => state.user.schoolId,
schoolStatus: state => state.user.schoolStatus,
roles: state => state.permission.roles,
userRoles: state => state.user.roles,
introduction: state => state.user.introduction,
permission_routes: state => state.permission.routes,
errorLogs: state => state.errorLog.logs
}
export default getters
| 16,099
|
https://github.com/CESNET/velia/blob/master/src/system/LLDPCallback.cpp
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
velia
|
CESNET
|
C++
|
Code
| 161
| 628
|
/*
* Copyright (C) 2020 CESNET, https://photonics.cesnet.cz/
*
* Written by Tomáš Pecka <tomas.pecka@fit.cvut.cz>
*
*/
#include <numeric>
#include "LLDPCallback.h"
#include "utils/log.h"
namespace velia::system {
LLDPCallback::LLDPCallback(std::shared_ptr<LLDPDataProvider> lldp)
: m_log(spdlog::get("system"))
, m_lldp(std::move(lldp))
, m_lastRequestId(0)
{
}
int LLDPCallback::operator()(std::shared_ptr<::sysrepo::Session> session, const char* module_name, const char* xpath, const char* request_xpath, uint32_t request_id, std::shared_ptr<libyang::Data_Node>& parent)
{
m_log->trace("operational data callback: XPath {} req {} orig-XPath {}", xpath, request_id, request_xpath);
// when asking for something in the subtree of THIS request
if (m_lastRequestId == request_id) {
m_log->trace(" ops data request already handled");
return SR_ERR_OK;
}
m_lastRequestId = request_id;
auto ctx = session->get_context();
auto mod = ctx->get_module(module_name);
parent = std::make_shared<libyang::Data_Node>(ctx, "/czechlight-lldp:nbr-list", nullptr, LYD_ANYDATA_CONSTSTRING, 0);
for (const auto& n : m_lldp->getNeighbors()) {
auto ifc = std::make_shared<libyang::Data_Node>(parent, mod, "neighbors");
auto ifName = std::make_shared<libyang::Data_Node>(ifc, mod, "ifName", n.m_portId.c_str());
for (const auto& [key, val] : n.m_properties) { // garbage properties in, garbage out
auto prop = std::make_shared<libyang::Data_Node>(ifc, mod, key.c_str(), val.c_str());
}
}
m_log->trace("Pushing to sysrepo (JSON): {}", parent->print_mem(LYD_FORMAT::LYD_JSON, 0));
return SR_ERR_OK;
}
}
| 4,455
|
https://github.com/octoblu/meshblu-connector-configurator-windows-registry/blob/master/command.js
|
Github Open Source
|
Open Source
|
MIT
| null |
meshblu-connector-configurator-windows-registry
|
octoblu
|
JavaScript
|
Code
| 155
| 554
|
#!/usr/bin/env node
const OctoDash = require("octodash")
const packageJSON = require("./package.json")
const { MeshbluConnectorConfigurator } = require("./lib/configurator")
const CLI_OPTIONS = [
{
names: ["connector-home"],
type: "string",
required: true,
env: "MESHBLU_CONNECTOR_HOME",
help: "Base location of meshblu connectors",
helpArg: "PATH",
completionType: "file",
},
{
names: ["pm2-home"],
type: "string",
required: true,
env: "MESHBLU_CONNECTOR_PM2_HOME",
help: "Base location of meshblu-connector-pm2",
helpArg: "PATH",
completionType: "file",
},
{
names: ["root-key"],
type: "string",
required: true,
env: "MESHBLU_CONNECTOR_ROOT_KEY",
help: "Root Key alias [HKLM, HKCU]",
helpArg: "ROOT",
default: "HKCU",
},
]
class MeshbluConnectorConfiguratorCommand {
constructor({ argv, cliOptions = CLI_OPTIONS } = {}) {
this.octoDash = new OctoDash({
argv,
cliOptions,
name: packageJSON.name,
version: packageJSON.version,
})
}
run() {
const options = this.octoDash.parseOptions()
const { connectorHome, pm2Home, rootKey } = options
const configurator = new MeshbluConnectorConfigurator({ connectorHome, pm2Home, rootKey })
return configurator.configurate()
}
die(error) {
this.octoDash.die(error)
}
}
const command = new MeshbluConnectorConfiguratorCommand({ argv: process.argv })
command
.run()
.catch(error => {
command.die(error)
})
.then(() => {
process.exit(0)
})
| 34,353
|
https://github.com/anskuratov/ar-mr-therapy/blob/master/Assets/Resources/Spider/fear_end.fbx.meta
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
ar-mr-therapy
|
anskuratov
|
Unity3D Asset
|
Code
| 1,866
| 9,819
|
fileFormatVersion: 2
guid: bf0743a76451ef342bc3f40f9ed6a3cb
timeCreated: 1523181283
licenseType: Free
ModelImporter:
serializedVersion: 19
fileIDToRecycleName:
100000: body
100002: body 1
100004: //RootNode
100006: hel_l_control
100008: hel_r_control
100010: ikHandle
100012: ikHandle_0_l
100014: ikHandle_0_r
100016: ikHandle_1_l
100018: ikHandle_1_r
100020: ikHandle_2_l
100022: ikHandle_2_r
100024: ikHandle_3_l
100026: ikHandle_3_r
100028: ikHandle_4_l
100030: ikHandle_4_r
100032: ikHandle_l
100034: ikHandle_r
100036: joint2
100038: joint3
100040: joint4
100042: joint5
100044: joint6
100046: leg_00_l
100048: leg_00_r
100050: leg_01_l
100052: leg_01_r
100054: leg_02_l
100056: leg_02_r
100058: leg_03_l
100060: leg_03_r
100062: leg_0_l_control
100064: leg_0_r_control
100066: leg_1
100068: leg_10_l
100070: leg_10_r
100072: leg_11_l
100074: leg_11_r
100076: leg_12_l
100078: leg_12_r
100080: leg_13_l
100082: leg_13_r
100084: leg_14_l
100086: leg_14_r
100088: leg_1_l
100090: leg_1_l_control
100092: leg_1_r
100094: leg_1_r_control
100096: leg_2
100098: leg_20_l
100100: leg_20_r
100102: leg_21_l
100104: leg_21_r
100106: leg_22_l
100108: leg_22_r
100110: leg_23_l
100112: leg_23_r
100114: leg_24_l
100116: leg_24_r
100118: leg_2_l
100120: leg_2_l_control
100122: leg_2_r
100124: leg_2_r_control
100126: leg_3
100128: leg_30_l
100130: leg_30_r
100132: leg_31_l
100134: leg_31_r
100136: leg_32_l
100138: leg_32_r
100140: leg_33_l
100142: leg_33_r
100144: leg_34_l
100146: leg_34_r
100148: leg_3_l
100150: leg_3_l_control
100152: leg_3_r
100154: leg_3_r_control
100156: leg_4
100158: leg_40_l
100160: leg_40_r
100162: leg_41_l
100164: leg_41_r
100166: leg_42_l
100168: leg_42_r
100170: leg_43_l
100172: leg_43_r
100174: leg_44_l
100176: leg_44_r
100178: leg_4_l
100180: leg_4_l_control
100182: leg_4_r
100184: leg_4_r_control
100186: legs
100188: nurbsCircle1
100190: rig_body
100192: rig_legs
100194: s_ass_control1
100196: s_body_control
100198: s_root_control
100200: spider_body
100202: spider_curves
100204: spider_mesh
100206: spider_rig
400000: body
400002: body 1
400004: //RootNode
400006: hel_l_control
400008: hel_r_control
400010: ikHandle
400012: ikHandle_0_l
400014: ikHandle_0_r
400016: ikHandle_1_l
400018: ikHandle_1_r
400020: ikHandle_2_l
400022: ikHandle_2_r
400024: ikHandle_3_l
400026: ikHandle_3_r
400028: ikHandle_4_l
400030: ikHandle_4_r
400032: ikHandle_l
400034: ikHandle_r
400036: joint2
400038: joint3
400040: joint4
400042: joint5
400044: joint6
400046: leg_00_l
400048: leg_00_r
400050: leg_01_l
400052: leg_01_r
400054: leg_02_l
400056: leg_02_r
400058: leg_03_l
400060: leg_03_r
400062: leg_0_l_control
400064: leg_0_r_control
400066: leg_1
400068: leg_10_l
400070: leg_10_r
400072: leg_11_l
400074: leg_11_r
400076: leg_12_l
400078: leg_12_r
400080: leg_13_l
400082: leg_13_r
400084: leg_14_l
400086: leg_14_r
400088: leg_1_l
400090: leg_1_l_control
400092: leg_1_r
400094: leg_1_r_control
400096: leg_2
400098: leg_20_l
400100: leg_20_r
400102: leg_21_l
400104: leg_21_r
400106: leg_22_l
400108: leg_22_r
400110: leg_23_l
400112: leg_23_r
400114: leg_24_l
400116: leg_24_r
400118: leg_2_l
400120: leg_2_l_control
400122: leg_2_r
400124: leg_2_r_control
400126: leg_3
400128: leg_30_l
400130: leg_30_r
400132: leg_31_l
400134: leg_31_r
400136: leg_32_l
400138: leg_32_r
400140: leg_33_l
400142: leg_33_r
400144: leg_34_l
400146: leg_34_r
400148: leg_3_l
400150: leg_3_l_control
400152: leg_3_r
400154: leg_3_r_control
400156: leg_4
400158: leg_40_l
400160: leg_40_r
400162: leg_41_l
400164: leg_41_r
400166: leg_42_l
400168: leg_42_r
400170: leg_43_l
400172: leg_43_r
400174: leg_44_l
400176: leg_44_r
400178: leg_4_l
400180: leg_4_l_control
400182: leg_4_r
400184: leg_4_r_control
400186: legs
400188: nurbsCircle1
400190: rig_body
400192: rig_legs
400194: s_ass_control1
400196: s_body_control
400198: s_root_control
400200: spider_body
400202: spider_curves
400204: spider_mesh
400206: spider_rig
4300000: spider_body
4300002: leg_1_r
4300004: leg_1_l
4300006: leg_2_r
4300008: leg_2_l
4300010: leg_3_r
4300012: leg_3_l
4300014: leg_4_r
4300016: leg_4_l
7400000: spider_fear_end_full
7400002: spider_fear_end
9500000: //RootNode
13700000: leg_1_l
13700002: leg_1_r
13700004: leg_2_l
13700006: leg_2_r
13700008: leg_3_l
13700010: leg_3_r
13700012: leg_4_l
13700014: leg_4_r
13700016: spider_body
materials:
importMaterials: 1
materialName: 0
materialSearch: 1
animations:
legacyGenerateAnimations: 4
bakeSimulation: 0
resampleCurves: 1
optimizeGameObjects: 0
motionNodeName:
rigImportErrors:
rigImportWarnings:
animationImportErrors:
animationImportWarnings:
animationRetargetingWarnings:
animationDoRetargetingWarnings: 0
animationCompression: 1
animationRotationError: 0.5
animationPositionError: 0.5
animationScaleError: 0.5
animationWrapMode: 0
extraExposedTransformPaths: []
clipAnimations:
- serializedVersion: 16
name: spider_fear_end_full
takeName: Take 001
firstFrame: 1
lastFrame: 205
wrapMode: 0
orientationOffsetY: 0
level: 0
cycleOffset: 0
loop: 0
hasAdditiveReferencePose: 0
loopTime: 0
loopBlend: 0
loopBlendOrientation: 0
loopBlendPositionY: 0
loopBlendPositionXZ: 0
keepOriginalOrientation: 0
keepOriginalPositionY: 1
keepOriginalPositionXZ: 0
heightFromFeet: 0
mirror: 0
bodyMask: 01000000010000000100000001000000010000000100000001000000010000000100000001000000010000000100000001000000
curves: []
events: []
transformMask:
- path:
weight: 1
- path: ikHandle
weight: 1
- path: ikHandle/ikHandle_l
weight: 1
- path: ikHandle/ikHandle_l/ikHandle_0_l
weight: 1
- path: ikHandle/ikHandle_l/ikHandle_1_l
weight: 1
- path: ikHandle/ikHandle_l/ikHandle_2_l
weight: 1
- path: ikHandle/ikHandle_l/ikHandle_3_l
weight: 1
- path: ikHandle/ikHandle_l/ikHandle_4_l
weight: 1
- path: ikHandle/ikHandle_r
weight: 1
- path: ikHandle/ikHandle_r/ikHandle_0_r
weight: 1
- path: ikHandle/ikHandle_r/ikHandle_1_r
weight: 1
- path: ikHandle/ikHandle_r/ikHandle_2_r
weight: 1
- path: ikHandle/ikHandle_r/ikHandle_3_r
weight: 1
- path: ikHandle/ikHandle_r/ikHandle_4_r
weight: 1
- path: spider_curves
weight: 1
- path: spider_curves/leg_1_l_control
weight: 1
- path: spider_curves/leg_1_r_control
weight: 1
- path: spider_curves/leg_2_l_control
weight: 1
- path: spider_curves/leg_2_r_control
weight: 1
- path: spider_curves/leg_3_l_control
weight: 1
- path: spider_curves/leg_3_r_control
weight: 1
- path: spider_curves/leg_4_l_control
weight: 1
- path: spider_curves/leg_4_r_control
weight: 1
- path: spider_curves/nurbsCircle1
weight: 1
- path: spider_curves/nurbsCircle1/hel_l_control
weight: 1
- path: spider_curves/nurbsCircle1/hel_r_control
weight: 1
- path: spider_curves/nurbsCircle1/leg_0_l_control
weight: 1
- path: spider_curves/nurbsCircle1/leg_0_r_control
weight: 1
- path: spider_curves/nurbsCircle1/s_ass_control1
weight: 1
- path: spider_curves/nurbsCircle1/s_body_control
weight: 1
- path: spider_curves/s_root_control
weight: 1
- path: spider_mesh
weight: 1
- path: spider_mesh/body
weight: 1
- path: spider_mesh/body/spider_body
weight: 1
- path: spider_mesh/legs
weight: 1
- path: spider_mesh/legs/leg_1
weight: 1
- path: spider_mesh/legs/leg_1/leg_1_l
weight: 1
- path: spider_mesh/legs/leg_1/leg_1_r
weight: 1
- path: spider_mesh/legs/leg_2
weight: 1
- path: spider_mesh/legs/leg_2/leg_2_l
weight: 1
- path: spider_mesh/legs/leg_2/leg_2_r
weight: 1
- path: spider_mesh/legs/leg_3
weight: 1
- path: spider_mesh/legs/leg_3/leg_3_l
weight: 1
- path: spider_mesh/legs/leg_3/leg_3_r
weight: 1
- path: spider_mesh/legs/leg_4
weight: 1
- path: spider_mesh/legs/leg_4/leg_4_l
weight: 1
- path: spider_mesh/legs/leg_4/leg_4_r
weight: 1
- path: spider_rig
weight: 1
- path: spider_rig/rig_body
weight: 1
- path: spider_rig/rig_body/body 1
weight: 1
- path: spider_rig/rig_body/body 1/joint2
weight: 1
- path: spider_rig/rig_body/body 1/joint2/joint3
weight: 1
- path: spider_rig/rig_body/body 1/joint2/joint3/joint4
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_l/leg_11_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_l/leg_11_l/leg_12_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_l/leg_11_l/leg_12_l/leg_13_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_l/leg_11_l/leg_12_l/leg_13_l/leg_14_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_r/leg_11_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_r/leg_11_r/leg_12_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_r/leg_11_r/leg_12_r/leg_13_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_r/leg_11_r/leg_12_r/leg_13_r/leg_14_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_l/leg_21_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_l/leg_21_l/leg_22_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_l/leg_21_l/leg_22_l/leg_23_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_l/leg_21_l/leg_22_l/leg_23_l/leg_24_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_r/leg_21_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_r/leg_21_r/leg_22_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_r/leg_21_r/leg_22_r/leg_23_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_r/leg_21_r/leg_22_r/leg_23_r/leg_24_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_l/leg_31_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_l/leg_31_l/leg_32_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_l/leg_31_l/leg_32_l/leg_33_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_l/leg_31_l/leg_32_l/leg_33_l/leg_34_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_r/leg_31_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_r/leg_31_r/leg_32_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_r/leg_31_r/leg_32_r/leg_33_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_r/leg_31_r/leg_32_r/leg_33_r/leg_34_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_l/leg_41_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_l/leg_41_l/leg_42_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_l/leg_41_l/leg_42_l/leg_43_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_l/leg_41_l/leg_42_l/leg_43_l/leg_44_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_r/leg_41_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_r/leg_41_r/leg_42_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_r/leg_41_r/leg_42_r/leg_43_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_r/leg_41_r/leg_42_r/leg_43_r/leg_44_r
weight: 1
- path: spider_rig/rig_body/body 1/joint5
weight: 1
- path: spider_rig/rig_body/body 1/joint6
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_l
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_l/leg_01_l
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_l/leg_01_l/leg_02_l
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_l/leg_01_l/leg_02_l/leg_03_l
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_r
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_r/leg_01_r
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_r/leg_01_r/leg_02_r
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_r/leg_01_r/leg_02_r/leg_03_r
weight: 1
- path: spider_rig/rig_legs
weight: 1
maskType: 3
maskSource: {instanceID: 0}
additiveReferencePoseFrame: 0
- serializedVersion: 16
name: spider_fear_end
takeName: Take 001
firstFrame: 110
lastFrame: 196
wrapMode: 0
orientationOffsetY: 0
level: 0
cycleOffset: 0
loop: 0
hasAdditiveReferencePose: 0
loopTime: 0
loopBlend: 0
loopBlendOrientation: 0
loopBlendPositionY: 0
loopBlendPositionXZ: 0
keepOriginalOrientation: 0
keepOriginalPositionY: 1
keepOriginalPositionXZ: 0
heightFromFeet: 0
mirror: 0
bodyMask: 01000000010000000100000001000000010000000100000001000000010000000100000001000000010000000100000001000000
curves: []
events: []
transformMask:
- path:
weight: 1
- path: ikHandle
weight: 1
- path: ikHandle/ikHandle_l
weight: 1
- path: ikHandle/ikHandle_l/ikHandle_0_l
weight: 1
- path: ikHandle/ikHandle_l/ikHandle_1_l
weight: 1
- path: ikHandle/ikHandle_l/ikHandle_2_l
weight: 1
- path: ikHandle/ikHandle_l/ikHandle_3_l
weight: 1
- path: ikHandle/ikHandle_l/ikHandle_4_l
weight: 1
- path: ikHandle/ikHandle_r
weight: 1
- path: ikHandle/ikHandle_r/ikHandle_0_r
weight: 1
- path: ikHandle/ikHandle_r/ikHandle_1_r
weight: 1
- path: ikHandle/ikHandle_r/ikHandle_2_r
weight: 1
- path: ikHandle/ikHandle_r/ikHandle_3_r
weight: 1
- path: ikHandle/ikHandle_r/ikHandle_4_r
weight: 1
- path: spider_curves
weight: 1
- path: spider_curves/leg_1_l_control
weight: 1
- path: spider_curves/leg_1_r_control
weight: 1
- path: spider_curves/leg_2_l_control
weight: 1
- path: spider_curves/leg_2_r_control
weight: 1
- path: spider_curves/leg_3_l_control
weight: 1
- path: spider_curves/leg_3_r_control
weight: 1
- path: spider_curves/leg_4_l_control
weight: 1
- path: spider_curves/leg_4_r_control
weight: 1
- path: spider_curves/nurbsCircle1
weight: 1
- path: spider_curves/nurbsCircle1/hel_l_control
weight: 1
- path: spider_curves/nurbsCircle1/hel_r_control
weight: 1
- path: spider_curves/nurbsCircle1/leg_0_l_control
weight: 1
- path: spider_curves/nurbsCircle1/leg_0_r_control
weight: 1
- path: spider_curves/nurbsCircle1/s_ass_control1
weight: 1
- path: spider_curves/nurbsCircle1/s_body_control
weight: 1
- path: spider_curves/s_root_control
weight: 1
- path: spider_mesh
weight: 1
- path: spider_mesh/body
weight: 1
- path: spider_mesh/body/spider_body
weight: 1
- path: spider_mesh/legs
weight: 1
- path: spider_mesh/legs/leg_1
weight: 1
- path: spider_mesh/legs/leg_1/leg_1_l
weight: 1
- path: spider_mesh/legs/leg_1/leg_1_r
weight: 1
- path: spider_mesh/legs/leg_2
weight: 1
- path: spider_mesh/legs/leg_2/leg_2_l
weight: 1
- path: spider_mesh/legs/leg_2/leg_2_r
weight: 1
- path: spider_mesh/legs/leg_3
weight: 1
- path: spider_mesh/legs/leg_3/leg_3_l
weight: 1
- path: spider_mesh/legs/leg_3/leg_3_r
weight: 1
- path: spider_mesh/legs/leg_4
weight: 1
- path: spider_mesh/legs/leg_4/leg_4_l
weight: 1
- path: spider_mesh/legs/leg_4/leg_4_r
weight: 1
- path: spider_rig
weight: 1
- path: spider_rig/rig_body
weight: 1
- path: spider_rig/rig_body/body 1
weight: 1
- path: spider_rig/rig_body/body 1/joint2
weight: 1
- path: spider_rig/rig_body/body 1/joint2/joint3
weight: 1
- path: spider_rig/rig_body/body 1/joint2/joint3/joint4
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_l/leg_11_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_l/leg_11_l/leg_12_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_l/leg_11_l/leg_12_l/leg_13_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_l/leg_11_l/leg_12_l/leg_13_l/leg_14_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_r/leg_11_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_r/leg_11_r/leg_12_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_r/leg_11_r/leg_12_r/leg_13_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_10_r/leg_11_r/leg_12_r/leg_13_r/leg_14_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_l/leg_21_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_l/leg_21_l/leg_22_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_l/leg_21_l/leg_22_l/leg_23_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_l/leg_21_l/leg_22_l/leg_23_l/leg_24_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_r/leg_21_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_r/leg_21_r/leg_22_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_r/leg_21_r/leg_22_r/leg_23_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_20_r/leg_21_r/leg_22_r/leg_23_r/leg_24_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_l/leg_31_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_l/leg_31_l/leg_32_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_l/leg_31_l/leg_32_l/leg_33_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_l/leg_31_l/leg_32_l/leg_33_l/leg_34_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_r/leg_31_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_r/leg_31_r/leg_32_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_r/leg_31_r/leg_32_r/leg_33_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_30_r/leg_31_r/leg_32_r/leg_33_r/leg_34_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_l/leg_41_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_l/leg_41_l/leg_42_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_l/leg_41_l/leg_42_l/leg_43_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_l/leg_41_l/leg_42_l/leg_43_l/leg_44_l
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_r/leg_41_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_r/leg_41_r/leg_42_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_r/leg_41_r/leg_42_r/leg_43_r
weight: 1
- path: spider_rig/rig_body/body 1/joint2/leg_40_r/leg_41_r/leg_42_r/leg_43_r/leg_44_r
weight: 1
- path: spider_rig/rig_body/body 1/joint5
weight: 1
- path: spider_rig/rig_body/body 1/joint6
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_l
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_l/leg_01_l
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_l/leg_01_l/leg_02_l
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_l/leg_01_l/leg_02_l/leg_03_l
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_r
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_r/leg_01_r
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_r/leg_01_r/leg_02_r
weight: 1
- path: spider_rig/rig_body/body 1/leg_00_r/leg_01_r/leg_02_r/leg_03_r
weight: 1
- path: spider_rig/rig_legs
weight: 1
maskType: 3
maskSource: {instanceID: 0}
additiveReferencePoseFrame: 0
isReadable: 1
meshes:
lODScreenPercentages: []
globalScale: 1
meshCompression: 0
addColliders: 0
importBlendShapes: 1
swapUVChannels: 0
generateSecondaryUV: 0
useFileUnits: 1
optimizeMeshForGPU: 1
keepQuads: 0
weldVertices: 1
secondaryUVAngleDistortion: 8
secondaryUVAreaDistortion: 15.000001
secondaryUVHardAngle: 88
secondaryUVPackMargin: 4
useFileScale: 1
tangentSpace:
normalSmoothAngle: 60
normalImportMode: 0
tangentImportMode: 3
importAnimation: 1
copyAvatar: 0
humanDescription:
serializedVersion: 2
human: []
skeleton: []
armTwist: 0.5
foreArmTwist: 0.5
upperLegTwist: 0.5
legTwist: 0.5
armStretch: 0.05
legStretch: 0.05
feetSpacing: 0
rootMotionBoneName:
rootMotionBoneRotation: {x: 0, y: 0, z: 0, w: 1}
hasTranslationDoF: 0
hasExtraRoot: 0
skeletonHasParents: 1
lastHumanDescriptionAvatarSource: {instanceID: 0}
animationType: 2
humanoidOversampling: 1
additionalBone: 0
userData:
assetBundleName:
assetBundleVariant:
| 38,654
|
https://github.com/Sonoport/freesound-java/blob/master/src/test/java/com/sonoport/freesound/response/mapping/SoundListMappingTest.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,014
|
freesound-java
|
Sonoport
|
Java
|
Code
| 221
| 536
|
/*
* Copyright 2014 Sonoport (Asia) Pte Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sonoport.freesound.response.mapping;
import static org.junit.Assert.assertTrue;
import java.util.List;
import com.sonoport.freesound.response.Sound;
/**
* Unit tests to ensure the correct operation of {@link PagingResponseMapper} when handling sounds and {@link Sound}
* objects.
*
* The test data used is defined in <code>/src/test/resources/sound-list.json</code>.
*/
public class SoundListMappingTest extends PagingResponseMapperTest<Sound> {
/** The value for the number of results. */
private static final Integer COUNT = Integer.valueOf(4557);
/** URI to the next page of results. */
private static final String NEXT_PAGE =
"http://www.freesound.org/apiv2/search/text/?&query=cars&page=3&page_size=3";
/** URI to the previous page of results. */
private static final String PREVIOUS_PAGE =
"http://www.freesound.org/apiv2/search/text/?&query=cars&page=1&page_size=3";
/**
* No-arg constructor.
*/
public SoundListMappingTest() {
super(new PagingResponseMapper<>(new SoundMapper()), "/sound-list.json", COUNT, NEXT_PAGE, PREVIOUS_PAGE);
}
@Override
protected void checkMappedResults(final List<Sound> results) {
assertTrue(results.size() == 3);
}
}
| 9,567
|
https://github.com/nathansamson/OMF/blob/master/external/frisbee/imagezip/extfs/linux_types.h
|
Github Open Source
|
Open Source
|
MIT
| 2,015
|
OMF
|
nathansamson
|
C
|
Code
| 34
| 142
|
#ifndef _LINUX_TYPES_H
#define _LINUX_TYPES_H
#include <sys/types.h>
typedef u_int8_t __u8;
typedef u_int16_t __u16;
typedef u_int32_t __u32;
typedef u_int64_t __u64;
typedef int8_t __s8;
typedef int16_t __s16;
typedef int32_t __s32;
typedef int64_t __s64;
#endif /* _LINUX_TYPES_H */
| 15,146
|
https://github.com/Matheus-Peres/backend/blob/master/shared/jsservice/request.js
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
backend
|
Matheus-Peres
|
JavaScript
|
Code
| 175
| 724
|
"use strict";
class Request {
constructor(service, msg) {
this.service = service;
this.source = msg.source;
this.worker = msg.worker;
this.request = msg.request_id;
this.type = msg.request_type;
this.msg = msg;
this.streaming = false;
this.alive = true;
}
finish() {
this.alive = false;
delete this.service;
delete this.source;
delete this.worker;
delete this.request;
delete this.type;
delete this.streaming;
}
openStream() {
if (!this.alive) { return; }
this.streaming = true;
this.service.send(
this.source,
"response",
{
response_type: "stream",
request_id: this.request
},
this.worker
);
}
error() {
if (!this.alive) { return; }
this.service.send(
this.source,
"response",
{
response_type: "error",
request_id: this.request
},
this.worker
);
this.finish();
}
reject(type, args, kwargs) {
if (!this.alive) { return; }
this.service.send(
this.source,
"response",
{
response_type: "reject",
request_id: this.request,
rejection_type: type,
args: args,
kwargs: kwargs
},
this.worker
);
this.finish();
}
send(content) {
if (!this.alive) { return; }
this.service.send(
this.source,
"response",
{
response_type: this.streaming ? "content" : "simple",
request_id: this.request,
content: content
},
this.worker
);
if (!this.streaming) {
this.finish();
}
}
end() {
if (!this.alive) { return; }
this.service.send(
this.source,
"response",
{
response_type: "end",
request_id: this.request
},
this.worker
);
this.finish();
}
}
module.exports = Request;
| 35,269
|
https://github.com/slavapestov/swift/blob/master/lib/ASTSectionImporter/CMakeLists.txt
|
Github Open Source
|
Open Source
|
Apache-2.0, Swift-exception
| 2,016
|
swift
|
slavapestov
|
CMake
|
Code
| 6
| 46
|
add_swift_library(swiftASTSectionImporter
ASTSectionImporter.cpp
LINK_LIBRARIES swiftBasic
COMPONENT_DEPENDS core)
| 13,261
|
https://github.com/PyTechMike/js-this-car/blob/master/test/spec.test.js
|
Github Open Source
|
Open Source
|
MIT
| null |
js-this-car
|
PyTechMike
|
JavaScript
|
Code
| 496
| 1,795
|
import car from '../src/js/car';
describe('Module "car"', () => {
beforeEach(() => {
spyOn(car, 'drive').and.callThrough();
spyOn(car, 'put').and.callThrough();
spyOn(car, 'land').and.callThrough();
});
describe('has a correct interface with a property', () => {
it('"color" that is a String', () => {
expect(car.color).toEqual(jasmine.any(String));
});
it('"doors" that is a Number', () => {
expect(car.doors).toEqual(jasmine.any(Number));
});
it('"passengers" that is a Array', () => {
expect(car.passengers).toEqual(jasmine.any(Array));
});
it('"seats" that is a Number', () => {
expect(car.seats).toEqual(jasmine.any(Number));
});
it('"maxSpeed" that is a Number', () => {
expect(car.maxSpeed).toEqual(jasmine.any(Number));
});
it('"defaultSpeed" that is a Number', () => {
expect(car.defaultSpeed).toEqual(jasmine.any(Number));
});
it('"speed" that is a Number', () => {
expect(car.speed).toEqual(jasmine.any(Number));
});
it('"drive" that is a Function', () => {
expect(car.drive).toEqual(jasmine.any(Function));
});
it('"put" that is a Function', () => {
expect(car.put).toEqual(jasmine.any(Function));
});
it('"land" that is a Function', () => {
expect(car.land).toEqual(jasmine.any(Function));
});
});
describe('by default has a property', () => {
it('"color" equal "white"', () => {
expect(car.color).toEqual('white');
});
it('"doors" equal 4', () => {
expect(car.doors).toEqual(4);
});
it('"passengers" that is empty', () => {
expect(car.passengers).toEqual([]);
});
it('"seats" equal 4', () => {
expect(car.seats).toEqual(4);
});
it('"maxSpeed" equal 100', () => {
expect(car.maxSpeed).toEqual(100);
});
it('"defaultSpeed" equal 60', () => {
expect(car.defaultSpeed).toEqual(60);
});
it('"speed" equal 0', () => {
expect(car.speed).toEqual(0);
});
});
describe('pessangers', () => {
afterEach(() => {
car.speed = 0;
car.seats = 8;
car.maxSpeed = 200;
car.defaultSpeed = 150;
car.passengers.length = 0;
});
it('can be put', () => {
car.put();
car.put();
expect(car.passengers.length).toEqual(2);
});
it('can be landed', () => {
car.put();
car.put();
car.land();
expect(car.passengers.length).toEqual(1);
});
it('can\'t be put more than seats', () => {
let times = car.seats * 2;
while (times--) {
car.put();
}
expect(car.passengers.length).toEqual(car.seats);
});
it('are not landed if one more passengers are put than possible', () => {
let times = car.seats * 2;
while (times--) {
car.put();
}
expect(car.land).not.toHaveBeenCalled();
});
});
describe('when driven', () => {
afterEach(() => {
car.speed = 0;
car.seats = 8;
car.maxSpeed = 200;
car.defaultSpeed = 150;
car.passengers.length = 0;
});
it('can\'t drive without a driver', () => {
const speed = 20;
car.drive(speed);
expect(car.speed).toEqual(0);
});
it('can drive with a driver', () => {
car.put();
car.drive();
expect(car.speed).toBeGreaterThan(0);
});
it('changes speed', () => {
const speed = 85;
car.put();
car.drive(speed);
expect(car.speed).toEqual(speed);
});
it('can be stopped', () => {
const speed = 85;
car.put();
car.drive(speed);
car.drive(0);
expect(car.speed).toEqual(0);
});
it('doesn\'t change speed without a driver', () => {
const speed = 85;
car.drive(speed);
expect(car.speed).toEqual(0);
});
it('doesn\'t drive without a driver and with to high speed set', () => {
const speed = 185;
car.drive(speed);
expect(car.speed).toEqual(0);
});
it('doesn\'t change speed if additional passenger is put', () => {
const speed = 85;
car.put();
car.drive(speed);
car.put();
expect(car.speed).toEqual(speed);
});
it('doesn\'t change speed if some passenger is landed', () => {
const speed = 85;
car.put();
car.put();
car.drive(speed);
car.land();
expect(car.speed).toEqual(speed);
});
it('has a default speed', () => {
car.put();
car.drive();
expect(car.speed).toEqual(car.defaultSpeed);
});
it('is limited to max speed', () => {
const speed = car.maxSpeed + 85;
car.put();
car.drive(speed);
expect(car.speed).toEqual(car.maxSpeed);
});
it('stopped if all passengers are landed', () => {
const speed = 20;
car.put();
car.drive(speed);
car.land();
expect(car.speed).toEqual(0);
});
});
});
| 25,261
|
https://github.com/dgouvea/code-template-repository/blob/master/repository-generator/src/main/java/coderepository/MavenDependencyManager.java
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
code-template-repository
|
dgouvea
|
Java
|
Code
| 81
| 447
|
package coderepository;
import java.util.Objects;
public class MavenDependencyManager extends DependencyManager {
@Override
public String getName() {
return "Maven";
}
@Override
public String getSettingFileName() {
return "pom.xml";
}
@Override
protected String toString(Dependency dependency) {
StringBuilder xml = new StringBuilder();
xml.append("\n\t\t<dependency>");
if (Objects.nonNull(dependency.getGroup()) && !dependency.getGroup().isEmpty()) {
xml.append("\n\t\t\t<groupId>").append(dependency.getGroup()).append("</groupId>");
}
if (Objects.nonNull(dependency.getArtifact()) && !dependency.getArtifact().isEmpty()) {
xml.append("\n\t\t\t<artifactId>").append(dependency.getArtifact()).append("</artifactId>");
}
if (Objects.nonNull(dependency.getVersion()) && !dependency.getVersion().isEmpty()) {
xml.append("\n\t\t\t<version>").append(dependency.getVersion()).append("</version>");
}
if (Objects.nonNull(dependency.getScope()) && !dependency.getScope().isEmpty()) {
xml.append("\n\t\t\t<scope>").append(dependency.getScope()).append("</scope>");
}
xml.append("\n\t\t</dependency>");
return xml.toString();
}
@Override
protected String getAnchor() {
return "<!-- dependencies -->";
}
}
| 30,911
|
https://github.com/yuwenbai/rex_study/blob/master/01/Assets/Editor/XUPort/Mods/Xcode/Categories/UIImage+ScaleImage.h
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
rex_study
|
yuwenbai
|
Objective-C
|
Code
| 37
| 156
|
//
// UIImage+ScaleImage.h
// Unity-iPhone
//
// Created by kang on 2017/9/14.
//
//
#import <UIKit/UIKit.h>
@interface UIImage (ScaleImage)
/**
生成微信分享缩略图
@return 二进制缩略图数据
*/
- (NSData *) getImageData;
+ (UIImage *)scaleImage:(UIImage *)image toScale:(float)scaleSize;
+ (UIImage *)getLaunchImageName;
@end
| 6,103
|
https://github.com/macsux/steeltoe/blob/master/src/Messaging/src/RabbitMQ/Data/MessagePropertiesBuilder.cs
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
steeltoe
|
macsux
|
C#
|
Code
| 170
| 371
|
// Copyright 2017 the original author or authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace Steeltoe.Messaging.Rabbit.Data
{
public class MessagePropertiesBuilder : AbstractMessageBuilder<MessageProperties>
{
public static MessagePropertiesBuilder NewInstance()
{
return new MessagePropertiesBuilder();
}
public static MessagePropertiesBuilder FromProperties(MessageProperties properties)
{
return new MessagePropertiesBuilder(properties);
}
public static MessagePropertiesBuilder FromClonedProperties(MessageProperties properties)
{
var builder = NewInstance();
return builder.CopyProperties(properties);
}
private MessagePropertiesBuilder()
{
}
private MessagePropertiesBuilder(MessageProperties properties)
: base(properties)
{
}
public new MessagePropertiesBuilder CopyProperties(MessageProperties properties)
{
base.CopyProperties(properties);
return this;
}
public override MessageProperties Build()
{
return Properties;
}
}
}
| 34,663
|
https://github.com/Kney-Delach/OpenGL-Rendering/blob/master/OpenGL-Rendering-Engine/Sandbox/Resources/Environment_Scene/Shaders/Explosion/ExplosionGeometry.glsl
|
Github Open Source
|
Open Source
|
MIT
| null |
OpenGL-Rendering
|
Kney-Delach
|
GLSL
|
Code
| 214
| 736
|
/***************************************************************************
* Filename : ExplosionGeometry.glsl
* Name : Ori Lazar
* Date : 26/10/2019
* Description : Geometry shader used to explode a spaceship for the animation sequence.
.---.
.'_:___".
|__ --==|
[ ] :[|
|__| I=[|
/ / ____|
|-/.____.'
/___\ /___\
***************************************************************************/
#version 330
layout(triangles) in;
layout(triangle_strip, max_vertices = 1024) out;
in ShaderData
{
vec3 v_FragPosition;
vec3 v_Normal;
vec2 v_TexCoord;
} IN[];
out ShaderData
{
vec3 v_FragPosition;
vec3 v_Normal;
vec2 v_TexCoord;
} OUT;
uniform float u_Time;
// translates the position of the vertex if the time is above 118.5 (used for the spaceship destruction animation)
vec4 explode(vec4 position, vec3 normal)
{
float magnitude = 6.0;
vec3 direction = normal * (((u_Time - 118.5f) + 1.0) / 2.0) * magnitude;
if (u_Time > 118.5f)
return position + vec4(direction, 0.0);
else
return position;
}
// returns the normal direction between the two vertices
vec3 GetNormal()
{
vec3 a = vec3(gl_in[0].gl_Position) - vec3(gl_in[1].gl_Position);
vec3 b = vec3(gl_in[2].gl_Position) - vec3(gl_in[1].gl_Position);
return normalize(cross(a, b));
}
void main()
{
vec3 normal = GetNormal();
gl_Position = explode(gl_in[0] .gl_Position, normal);
OUT.v_TexCoord = IN[0].v_TexCoord;
OUT.v_FragPosition = IN[0].v_FragPosition;
OUT.v_Normal = IN[0].v_Normal;
EmitVertex();
gl_Position = explode(gl_in[1].gl_Position, normal);
OUT.v_TexCoord = IN[1].v_TexCoord;
OUT.v_FragPosition = IN[1].v_FragPosition;
OUT.v_Normal = IN[1].v_Normal;
EmitVertex();
gl_Position = explode(gl_in[2].gl_Position, normal);
OUT.v_TexCoord = IN[2].v_TexCoord;
OUT.v_FragPosition = IN[2].v_FragPosition;
OUT.v_Normal = IN[2].v_Normal;
EmitVertex();
EndPrimitive();
}
| 4,285
|
https://github.com/danielsogl/ngx-gallery/blob/master/lib/core/src/services/gallery-ref.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
ngx-gallery
|
danielsogl
|
TypeScript
|
Code
| 435
| 1,262
|
import { BehaviorSubject } from 'rxjs/BehaviorSubject';
import { filter } from 'rxjs/operators/filter';
import { Observable } from 'rxjs/Observable';
import { defaultConfig, defaultState } from '../utils/gallery.default';
import {
GalleryState,
GalleryConfig,
GalleryAction,
GalleryItem
} from '../models';
export class GalleryRef {
state$: BehaviorSubject<GalleryState>;
config$: BehaviorSubject<GalleryConfig>;
constructor(
public config: GalleryConfig = defaultConfig,
public state: GalleryState = defaultState
) {
this.state$ = new BehaviorSubject<GalleryState>(state);
this.config$ = new BehaviorSubject<GalleryConfig>(defaultConfig);
this.setConfig(config);
}
/**
* Set gallery config
* @param config
*/
setConfig(config: GalleryConfig) {
this.config = { ...defaultConfig, ...this.config, ...config };
this.config$.next(this.config);
}
/**
* Set gallery state
* @param state
*/
setState(state: GalleryState) {
this.state = { ...this.state, ...state };
this.state$.next(this.state);
}
/** Add gallery item
* @param item - Gallery item object
* @param active - Set the new item as current slide
*/
add(item: GalleryItem, active?: boolean) {
const items = [...this.state.items, item];
this.setState({
action: GalleryAction.ITEMS_CHANGED,
items: items,
hasNext: items.length > 1,
currIndex: active ? items.length - 1 : this.state.currIndex
});
}
/** Remove gallery item
* @param i - Item index
*/
remove(i: number) {
const items = [
...this.state.items.slice(0, i),
...this.state.items.slice(i + 1, this.state.items.length)
];
this.setState({
action: GalleryAction.ITEMS_CHANGED,
items: items,
hasNext: items.length > 1,
hasPrev: i > 0
});
}
/**
* Load items and reset the state
* @param items - Gallery images data
*/
load(items: GalleryItem[]) {
if (items) {
this.setState({
action: GalleryAction.ITEMS_CHANGED,
items: items,
hasNext: items.length > 1,
hasPrev: false
});
}
}
/**
* Set active item
* @param i - Active Index
* @param action - Action type
*/
set(i: number) {
if (i !== this.state.currIndex) {
this.setState({
action: GalleryAction.INDEX_CHANGED,
currIndex: i,
hasNext: i < this.state.items.length - 1,
hasPrev: i > 0
});
}
}
/**
* Next item
*/
next() {
if (this.state.hasNext) {
this.set(this.state.currIndex + 1);
} else if (this.config.loop) {
this.set(0);
}
}
/**
* Prev item
*/
prev() {
if (this.state.hasPrev) {
this.set(this.state.currIndex - 1);
} else if (this.config.loop) {
this.set(this.state.items.length - 1);
}
}
/**
* Reset gallery to initial state
*/
reset() {
this.setState(defaultState);
}
/**
* Destroy GalleryRef (for internal use only)
*/
destroy() {
this.state$.complete();
this.config$.complete();
}
/** Gallery Events */
/**
* Stream that emits when gallery is initialized/reset
*/
initialized(): Observable<GalleryState> {
return this.state$.pipe(
filter(
(state: GalleryState) => state.action === GalleryAction.INITIALIZED
)
);
}
/**
* Stream that emits when items is changed (items loaded, item added, item removed)
*/
itemsChanged(): Observable<GalleryState> {
return this.state$.pipe(
filter(
(state: GalleryState) => state.action === GalleryAction.ITEMS_CHANGED
)
);
}
/**
* Stream that emits when current item is changed
*/
indexChanged(): Observable<GalleryState> {
return this.state$.pipe(
filter(
(state: GalleryState) => state.action === GalleryAction.INDEX_CHANGED
)
);
}
}
| 16,316
|
https://github.com/SamuraiCrow/RetroMode.library-mos/blob/master/retromode_files/main/retroDrawVideo.c
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
RetroMode.library-mos
|
SamuraiCrow
|
C
|
Code
| 1,944
| 5,637
|
/* :ts=4
* $VER: retroDrawVideo.c $Revision$ (16-Oct-2017)
*
* This file is part of retromode.
*
* Copyright (c) 2017 LiveForIt Software.
* MIT License.
*
* $Id$
*
* $Log$
*
*
*/
#include <stdlib.h>
#include <stdio.h>
#include <stdbool.h>
#include <exec/exec.h>
#include <proto/exec.h>
#include <dos/dos.h>
#include <exec/types.h>
#include <libraries/retromode.h>
#include <proto/retromode.h>
#include <stdarg.h>
#include <math.h>
#include "libbase.h"
/****** retromode/main/retroDrawVideo ******************************************
*
* NAME
* retroDrawVideo -- Description
*
* SYNOPSIS
* void retroDrawVideo(struct retroVideo * video);
*
* FUNCTION
*
* INPUTS
* video -
*
* RESULT
* This function does not return a result
*
* EXAMPLE
*
* NOTES
*
* BUGS
*
* SEE ALSO
*
*****************************************************************************
*
*/
void resetScanlines(struct retroVideo * video)
{
struct retroParallax *line = video -> scanlines;
int beamY;
for (beamY=0; beamY < video-> height; beamY++)
{
line -> scanline[0].mode = NULL;
line -> scanline[1].mode = NULL;
line ++;
}
}
extern void draw_lowred_pixeled_color( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_lowred_emulate_color_changes( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_lowred_ham6( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_lowred_ham8( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_hires( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_hires_ham6( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_hires_ham8( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_transparent_lowred_pixeled_color( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_transparent_lowred_emulate_color_changes( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_transparent_lowred_ham6( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_transparent_lowred_ham8( struct retroScanline *line, int beamY, unsigned int *video_buffer );
extern void draw_transparent_hires( struct retroScanline *line, int beamY, unsigned int *video_buffer );
void draw_none( struct retroScanline *line, int beamY, unsigned int *video_buffer )
{
}
static void do_all_screen_color_effects( struct retroScreen *screen)
{
struct retroFlashTable **flash;
struct retroFlashTable *_flash;
struct retroShiftColors **shift;
struct retroShiftColors *_shift;
struct retroRGB temp;
int color;
int from_color,to_color;
for (flash = screen -> allocatedFlashs ; flash < screen -> allocatedFlashs_end; flash ++)
{
_flash = *flash;
if (_flash -> colors>0)
{
_flash -> countDelay ++;
if (_flash -> countDelay > _flash -> table[ _flash -> index ].delay)
{
_flash -> countDelay = 0;
_flash -> index = (_flash -> index+1) % _flash -> colors;
screen -> rowPalette[ _flash -> color & 255 ] = _flash -> table[ _flash -> index ].rgb ;
}
}
}
for (shift = screen -> allocatedShifts ; shift < screen -> allocatedShifts_end; shift ++)
{
_shift = *shift;
_shift -> countDelay ++;
if ( _shift -> countDelay > _shift -> delay )
{
_shift -> countDelay = 0;
from_color = _shift -> firstColor;
to_color = _shift -> lastColor;
switch (_shift -> flags)
{
case 0:
for (color = to_color; color > from_color; color -- ) screen->rowPalette[color] = screen->rowPalette[color-1];
break;
case 1:
temp = screen -> rowPalette[to_color];
for (color = to_color; color > from_color; color -- ) screen->rowPalette[color] = screen->rowPalette[color-1];
screen -> rowPalette[ from_color ] = temp;
break;
case 2:
for (color = from_color+1; color <= to_color; color ++ ) screen->rowPalette[color-1] = screen->rowPalette[color];
break;
case 3:
temp = screen -> rowPalette[from_color];
for (color = from_color+1; color <= to_color; color ++ ) screen->rowPalette[color-1] = screen->rowPalette[color];
screen -> rowPalette[ to_color ] = temp;
break;
}
}
}
}
static void copper_to_scanline( struct retroRainbow *rainbow, int rainbow_scanline, struct retroScanline *scanline )
{
scanline->rowPalette[rainbow ->color] = rainbow->table[ (rainbow_scanline + rainbow -> offset) % rainbow->tableSize];
}
static void color_reset( struct retroVideo * video, struct retroScanline *scanline)
{
int n;
for ( n=0; n<3;n++ )
{
if (video->rainbow[n].table)
{
scanline -> rowPalette[video->rainbow[n].color] = scanline -> orgPalette[video->rainbow[n].color];
}
}
}
void set_no_scanline( int n, struct retroParallax *line,struct retroScreen * screen, struct retroVideo * video )
{
struct retroScanline *scanline = &line -> scanline[n];
scanline -> beamStart = 0;
scanline -> videoWidth = video -> width;
scanline -> screen = screen;
scanline -> pixels = screen -> displayWidth;
scanline -> data[0] = NULL;
scanline -> data[1] = NULL;
scanline -> mode = draw_none;
scanline -> rowPalette = screen -> rowPalette;
scanline -> orgPalette = screen -> orgPalette;
}
void set_scanline( int n, struct retroParallax *line,struct retroScreen * screen, struct retroVideo * video, int offset)
{
int videomode = screen -> videomode & ~retroInterlaced;
struct retroScanline *scanline = &line -> scanline[n];
scanline -> beamStart = screen -> scanline_x;
scanline -> videoWidth = video -> width;
scanline -> screen = screen;
scanline -> pixels = screen -> displayWidth;
if (screen -> Memory[1])
{
scanline -> data[0] = screen -> Memory[0] + offset;
scanline -> data[1] = screen -> Memory[1] + offset;
}
else
{
scanline -> data[0] = screen -> Memory[0] + offset;
scanline -> data[1] = NULL;
}
scanline -> mode = NULL;
scanline -> rowPalette = screen -> rowPalette;
scanline -> orgPalette = screen -> orgPalette;
if (screen -> dualScreen) // has dualScreen, so this need to be transparent.
{
switch (videomode)
{
case retroLowres:
scanline -> mode = draw_transparent_lowred_emulate_color_changes;
break;
case retroLowres|retroHam6:
scanline -> mode = draw_transparent_lowred_ham6;
break;
case retroLowres|retroHam8:
scanline -> mode = draw_transparent_lowred_ham8;
break;
case retroLowres_pixeld:
scanline -> mode = draw_transparent_lowred_pixeled_color;
break;
case retroLowres_pixeld|retroHam6:
scanline -> mode = draw_transparent_lowred_ham6;
break;
case retroLowres_pixeld|retroHam8:
scanline -> mode = draw_transparent_lowred_ham8;
break;
case retroHires:
scanline -> mode = draw_transparent_hires;
break;
case retroHires|retroHam6:
scanline -> mode = draw_transparent_hires;
break;
case retroHires|retroHam8:
scanline -> mode = draw_transparent_hires;
break;
}
}
else
{
switch (videomode)
{
case retroLowres:
scanline -> mode = draw_lowred_emulate_color_changes;
break;
case retroLowres|retroHam6:
scanline -> mode = draw_lowred_ham6;
break;
case retroLowres|retroHam8:
scanline -> mode = draw_lowred_ham8;
break;
case retroLowres_pixeld:
scanline -> mode = draw_lowred_pixeled_color;
break;
case retroLowres_pixeld|retroHam6:
scanline -> mode = draw_lowred_ham6;
break;
case retroLowres_pixeld|retroHam8:
scanline -> mode = draw_lowred_ham8;
break;
case retroHires:
scanline -> mode = draw_hires;
break;
case retroHires|retroHam6:
scanline -> mode = draw_hires_ham6;
break;
case retroHires|retroHam8:
scanline -> mode = draw_hires_ham8;
break;
}
}
}
// do not change, its not a option.
#define is_displayed 0
void Screen_To_Scanlines( struct retroScreen * screen, struct retroVideo * video )
{
int n;
int scanline_x,scanline_y,screen_y ;
int hw_start ;
int hw_end ;
int hw_y ;
int offset;
int displayed ;
int physical_vfacor ;
if (screen -> flags & retroscreen_flag_hide) return;
scanline_x = screen -> scanline_x;
scanline_y = screen -> scanline_y;
for (n=0;n<=1;n++) // dual screen.
{
screen_y = 0;
hw_start = 0;
hw_end = 0;
hw_y = 0;
displayed = (screen ->videomode & retroInterlaced) ? 0 : 1;
physical_vfacor = (screen ->videomode & retroInterlaced) ? 1 : 2;
hw_start = scanline_y;
hw_end = hw_start + (screen -> displayHeight * physical_vfacor );
if (hw_end<0) return; // outside of scope.
if (hw_start> ((int) video->height)) return; // outside of scope.
if (hw_start<0)
{
screen_y = -hw_start / physical_vfacor;
hw_start = 0;
}
if (hw_end>video->height) hw_end = video->height;
screen_y += screen -> offset_y;
for (hw_y = hw_start; hw_y<hw_end; hw_y++)
{
// first hw line not displayed, 2en line is displayed
if ((hw_y & displayed) == is_displayed)
{
if ((screen_y>=0) && (screen_y <= screen -> realHeight))
{
offset = (screen -> bytesPerRow * screen_y) + screen -> offset_x;
video -> scanlines[ hw_y ].beamStart = scanline_x;
set_scanline( n, &video -> scanlines[ hw_y ], screen, video, offset );
}
screen_y ++;
}
else
{
set_no_scanline( n, &video -> scanlines[ hw_y ], screen, video );
}
}
if (screen -> dualScreen == NULL) break;
screen = screen -> dualScreen;
}
}
bool inside_screen(struct retroScreen *screen, int scanline_y)
{
int physical_vfacor = (screen ->videomode & retroInterlaced) ? 1 : 2;
int hw_start = screen -> scanline_y;
int hw_end = hw_start + (screen -> displayHeight * physical_vfacor );
if (screen -> flags & retroscreen_flag_hide) return false; // if screen is hidden then we do not need to worry about it..
if ((hw_start<=scanline_y) && (hw_end>=scanline_y)) return true;
return false;
}
bool inside_screens( struct retroVideo * video, struct retroScreen *this_screen, int scanline_y)
{
bool this_screen_found = false;
struct retroScreen **screen_item;
for (screen_item = video -> attachedScreens; screen_item < video -> attachedScreens_end; screen_item++)
{
if (this_screen_found == false)
{
if (*screen_item == this_screen ) this_screen_found = true;
}
else // we only need to check if screen after is inside.. (we are checking if we render on top...)
{
if (inside_screen(*screen_item, scanline_y)) return true;
}
}
return false;
}
void refresh_screen_scanlines( struct retroScreen * screen, struct retroVideo * video )
{
int n;
int scanline_x,scanline_y,screen_y ;
int hw_start ;
int hw_end ;
int hw_y ;
int offset;
int displayed ;
int physical_vfacor ;
if (screen -> flags & retroscreen_flag_hide) return;
scanline_x = screen -> scanline_x;
scanline_y = screen -> scanline_y;
for (n=0;n<=1;n++)
{
screen_y = 0;
hw_start = 0;
hw_end = 0;
hw_y = 0;
displayed = (screen ->videomode & retroInterlaced) ? 0 : 1;
physical_vfacor = (screen ->videomode & retroInterlaced) ? 1 : 2;
hw_start = scanline_y;
hw_end = hw_start + (screen -> displayHeight * physical_vfacor );
if (hw_end<0) return; // outside of scope.
if (hw_start> ((int) video->height)) return; // outside of scope.
if (hw_start<0)
{
screen_y = -hw_start / physical_vfacor;
hw_start = 0;
}
if (hw_end>video->height) hw_end = video->height;
screen_y += screen -> offset_y;
for (hw_y = hw_start; hw_y<hw_end; hw_y++)
{
if ((hw_y & displayed) == is_displayed)
{
if (inside_screens( video, screen, hw_y ) == false )
{
if ((screen_y>=0) && (screen_y <= screen -> realHeight))
{
offset = (screen -> bytesPerRow * screen_y) + screen -> offset_x;
video -> scanlines[ hw_y ].beamStart = scanline_x;
set_scanline( n, &video -> scanlines[ hw_y ], screen, video, offset );
}
}
screen_y ++;
}
}
if (screen -> dualScreen == NULL) break;
screen = screen -> dualScreen;
}
}
void update_all_scanlines( struct retroVideo * video )
{
struct retroScreen **screen_item;
for (screen_item = video -> attachedScreens; screen_item < video -> attachedScreens_end; screen_item++)
{
Screen_To_Scanlines( *screen_item, video );
}
}
void update_some_scanlines( struct retroVideo * video )
{
struct retroScreen **screen_item;
for (screen_item = video -> attachedScreens; screen_item < video -> attachedScreens_end; screen_item++)
{
if ( (*screen_item) -> refreshScanlines == TRUE)
{
refresh_screen_scanlines( *screen_item, video );
(*screen_item) -> refreshScanlines = FALSE;
}
}
}
void _retromode_retroDrawVideo(struct RetroModeIFace *Self, struct retroVideo * video)
{
struct retroParallax *line;
struct retroScreen **screen_item;
unsigned int *video_buffer;
int beamY;
int intsPerRow = video -> BytesPerRow / 4;
struct retroRainbow *compressed_rainbow_table[4];
struct retroRainbow **compressed_rainbow_table_end;
struct retroRainbow **rainbow_ptr;
int n;
BOOL coopered = FALSE;
BOOL *coopered_last = NULL;
int rainStart, rainEnd;
if (video -> refreshAllScanlines == TRUE)
{
video -> refreshAllScanlines = FALSE;
resetScanlines(video);
update_all_scanlines( video);
}
if (video -> refreshSomeScanlines == TRUE)
{
video -> refreshSomeScanlines = FALSE;
update_some_scanlines( video);
}
// only allocated rainbow tables are in the compressed table
// allowing me skip validating in main loop.
compressed_rainbow_table_end = compressed_rainbow_table;
for ( n = 0; n <= 3; n++)
{
if (video->rainbow[n].table)
{
*compressed_rainbow_table_end++ = &video->rainbow[n];
}
}
for ( n=1; n>=0; n-- )
{
line = video -> scanlines;
video_buffer = video -> Memory;
for (beamY=0; beamY < video-> height; beamY++)
{
if (line -> scanline[n].mode != NULL)
{
coopered = FALSE;
for ( rainbow_ptr = compressed_rainbow_table; rainbow_ptr < compressed_rainbow_table_end; rainbow_ptr++)
{
rainStart = ((*rainbow_ptr) -> verticalOffset * 2);
rainEnd = rainStart + ((*rainbow_ptr) -> height * 2) ;
if (( beamY > rainStart) && ( beamY < rainEnd ))
{
copper_to_scanline( *rainbow_ptr, (beamY - rainStart) / 2 , &line -> scanline[0] );
coopered = TRUE;
}
}
coopered_last = line -> scanline[n].screen ? &(line ->scanline[n].screen -> coopered_last) : NULL;
if (coopered_last) // have pointer...
{
if ((coopered == FALSE)&&( *coopered_last == TRUE))
{
color_reset( video, &line -> scanline[n] );
}
line -> scanline[n].mode( &line -> scanline[n], beamY, video_buffer );
*coopered_last = coopered;
}
}
video_buffer += intsPerRow; // next line
line ++;
}
}
for (screen_item = video -> attachedScreens; screen_item < video -> attachedScreens_end; screen_item++)
{
do_all_screen_color_effects(*screen_item);
}
}
| 41,180
|
https://github.com/steveharter/corefxlab/blob/master/src/System.Text.JsonLab.Serialization/System/Text/Json/Serialization/Policies/EnumerableConverterAttribute.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
corefxlab
|
steveharter
|
C#
|
Code
| 79
| 170
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
namespace System.Text.Json.Serialization.Policies
{
[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Field | AttributeTargets.Property, AllowMultiple = true)]
public abstract class EnumerableConverterAttribute : Attribute
{
public EnumerableConverterAttribute() { }
public Type EnumerableType { get; protected set; }
public abstract IEnumerable CreateFromList(Type elementType, IList sourceList);
}
}
| 13,800
|
https://github.com/doc22940/rreusser.github.io/blob/master/src/lib/default-idyll-components/menu.js
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
rreusser.github.io
|
doc22940
|
JavaScript
|
Code
| 82
| 393
|
import React from 'react';
import classNames from 'classnames';
class Menu extends React.Component {
constructor (props) {
super(props);
this.state = {
expanded: false
};
this.handleClick = this.handleClick.bind(this);
}
handleClick () {
this.setState({expanded: !this.state.expanded});
}
render () {
return (
<nav className={classNames("menu", {"menu--expanded": this.state.expanded})}>
<button className="menu__thumb" onClick={this.handleClick}>
<span className="menu__hamburger"></span>
<span className="menu__hamburger"></span>
<span className="menu__hamburger"></span>
</button>
<div className="menu__content">
<div className="menu__heading">
<a href="/">rreusser.github.io</a>
</div>
<div className="menu__items">
<a className="menu__item" href="/sketches/">Sketches</a>
<a className="menu__item" href="/writing/">Writing</a>
<a className="menu__item" href="https://github.com/rreusser">github.com/rreusser</a>
<a className="menu__item" href="https://twitter.com/rickyreusser">@rickyreusser</a>
</div>
</div>
</nav>
);
}
}
export default Menu;
| 30,976
|
https://github.com/gnzlbg/ispc-rs/blob/master/examples/aobench/src/main.rs
|
Github Open Source
|
Open Source
|
MIT
| null |
ispc-rs
|
gnzlbg
|
Rust
|
Code
| 148
| 411
|
#[macro_use]
extern crate ispc;
extern crate image;
extern crate rand;
use rand::Rng;
ispc_module!(ao);
fn main() {
let width = 256;
let height = 256;
let n_samples = 16;
let mut fimg = vec![0.0; width * height];
let mut rng = rand::thread_rng();
// We need a random seed for each scanline of the image
let scanline_seeds: Vec<_> = rng.gen_iter::<i32>().take(height).collect();
unsafe {
//ao::aobench(width as i32, height as i32, n_samples, rng.gen::<i32>(), fimg.as_mut_ptr());
ao::aobench_parallel(width as i32, height as i32, n_samples, scanline_seeds.as_ptr(),
fimg.as_mut_ptr());
}
// Convert the image to grey scale u8 to save
let img = fimg.iter().map(|x| {
if *x >= 1.0 {
255
} else if *x <= 0.0 {
0
} else {
(*x * 255.0) as u8
}
}).collect::<Vec<u8>>();
match image::save_buffer("ao.png", &img[..], width as u32, height as u32, image::Gray(8)) {
Ok(_) => println!("AO Bench image saved to ao.png"),
Err(e) => panic!("Error saving AO Bench image: {}", e),
};
}
| 15,038
|
https://github.com/cassie01/PumpLibrary/blob/master/TestCase/FreeTest/test_FreeTest_part_008.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
PumpLibrary
|
cassie01
|
Python
|
Code
| 35
| 207
|
from TestCase.check_pump_status import *
class TestFreeTest(unittest.TestCase):
"""
FreeTest Part 8 test case
"""
"""
Part 8
1.获取累计量A;
2.执行排气操作,使排气量达到最大值6mL后停止;
3.获取累计量B。
获取的累计量A与B数值一致。
"""
def test_FreeTest_part_008(self):
# 获取累计量
checkPumpStatus()
time.sleep(7)
lvp.start_bolus()
lvp.start_prime()
time.sleep(6 / 1200 * 3600)
lvp.stop_prime()
| 45,914
|
https://github.com/fanfilmu/tpr2016/blob/master/scalarm/spec/scalarm/experiment_spec.rb
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
tpr2016
|
fanfilmu
|
Ruby
|
Code
| 175
| 494
|
require 'spec_helper'
describe Scalarm::Experiment do
let(:experiment) { described_class.new }
let(:input_file_data) { {}.to_json }
before do
allow(File).to receive(:read).with('input.json').and_return(input_file_data)
end
describe '#inputs' do
subject { experiment.inputs }
context 'for empty input file' do
it { is_expected.to eq({}) }
end
context 'for file with inputs' do
let(:input_file_data) { { processors: 5, difficult: 'Noo.' }.to_json }
it { is_expected.to eq(processors: 5, difficult: 'Noo.') }
end
end
describe '#result' do
subject { experiment.result }
context 'when no outputs are assigned' do
it { is_expected.to eq(status: :ok, results: {}) }
end
context 'when outputs are assigned' do
before { experiment.fine_result = :gold }
it { is_expected.to eq(status: :ok, results: { fine_result: :gold }) }
end
end
describe '#save_result' do
subject { output.string }
let(:output) { StringIO.new }
before do
expect(File).to receive(:open).with('output.json', 'wb+').and_yield(output)
end
context 'when no outputs are assigned' do
it do
experiment.save_result
is_expected.to eq({ status: :ok, results: {} }.to_json)
end
end
context 'when outputs are assigned' do
before { experiment.fine_result = :gold }
it do
experiment.save_result
is_expected.to eq({ status: :ok, results: { fine_result: :gold } }.to_json)
end
end
end
end
| 38,165
|
https://github.com/PaulMB/media-adapter/blob/master/media-adapter-synology/src/main/webapp/javascript/adapter/form/GlobalConfigurationPanel.js
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
media-adapter
|
PaulMB
|
JavaScript
|
Code
| 121
| 545
|
/*global Ext,SYNO,Media*/
Ext.define('Media.adapter.form.GlobalConfigurationPanel', {
extend: 'SYNO.SDS.Utils.FormPanel',
constructor: function (config) {
Ext.apply(this, config);
this.executorComboBox = new SYNO.ux.ComboBox({
anchor: '100%',
width: 'auto',
fieldLabel: Media.adapter.util.AppUtil.msg('config', 'executor'),
displayField: 'name',
valueField: 'name',
mode: 'local',
editable: false,
store: new Ext.data.JsonStore({
autoDestroy: true,
fields: ['name'],
data: []
})
});
var configuration = {
bodyStyle:'padding:5px 5px 0',
border: false,
useDefaultBtn: true,
title: Media.adapter.util.AppUtil.msg('config', 'global'),
items: [ this.executorComboBox ]
};
this.callParent([configuration]);
},
setConfiguration: function(configuration) {
this.configuration = configuration;
this.executorComboBox.getStore().loadData(configuration.executors);
this.executorComboBox.setValue(configuration.active);
},
applyHandler: function () {
var me = this;
var parentWindow = me.findAppWindow();
parentWindow.setStatusBusy();
Ext.Ajax.request({
url: Media.adapter.util.AppUtil.getUrl() + 'config/executor/active',
method: 'PUT',
jsonData: this.executorComboBox.getValue(),
scope: this,
success: function (responseObject) {
parentWindow.clearStatusBusy();
},
failure: function () {
parentWindow.clearStatusBusy();
parentWindow.getMsgBox().alert(Media.adapter.util.AppUtil.msg('app', 'app_name'), Media.adapter.util.AppUtil.msg('config', 'save_error'));
}
});
},
cancelHandler: function () {
this.setConfiguration(this.configuration);
}
});
| 15,266
|
https://github.com/xiao333ma/XMWebView/blob/master/Example/XMWebView/XMTest1ViewController.h
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
XMWebView
|
xiao333ma
|
C
|
Code
| 30
| 122
|
//
// XMTest1ViewController.h
// XMWebView_Example
//
// Created by 马贞赛 on 3/27/20.
// Copyright © 2020 xiao3333ma@gmail.com. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface XMTest1ViewController : UIViewController
@end
NS_ASSUME_NONNULL_END
| 34,124
|
https://github.com/katascope/Glyphics/blob/master/Glyphics/GlyphicsLibrary/Atomics/Atomics.TriangleUnitCube.cs
|
Github Open Source
|
Open Source
|
BSD-2-Clause
| 2,016
|
Glyphics
|
katascope
|
C#
|
Code
| 435
| 1,485
|
#region Copyright
/*Copyright (c) 2015, Katascope
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
#endregion
using System.Collections.Generic;
namespace GlyphicsLibrary.Atomics
{
internal class TriangleUnitCube
{
private TriangleUnitCube() { }
public static ITriangles GetUnitCube()
{
var triangles = new List<ITriangle>();
//Front lower right
ITriangle triangle = new CTriangle(0.0f, 0.0f, 1.0f);
triangle.SetTriangle(0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f);
triangles.Add(triangle);
//Front Upper left
triangle = new CTriangle(0.0f, 0.0f, 1.0f);
triangle.SetTriangle(0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f);
triangles.Add(triangle);
//Left Side back bottom
triangle = new CTriangle(-1.0f, 0.0f, 0.0f);
triangle.SetTriangle(0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, 0.0f);
triangles.Add(triangle);
//Left side front top
triangle = new CTriangle(-1.0f, 0.0f, 0.0f);
triangle.SetTriangle(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f);
triangles.Add(triangle);
//Top
triangle = new CTriangle(0.0f, 1.0f, 0.0f);
triangle.SetTriangle(0.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f);
triangles.Add(triangle);
triangle = new CTriangle(0.0f, 1.0f, 0.0f);
triangle.SetTriangle(0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f);
triangles.Add(triangle);
//Right
triangle = new CTriangle(1.0f, 0.0f, 0.0f);
triangle.SetTriangle(1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f);
triangles.Add(triangle);
triangle = new CTriangle(1.0f, 0.0f, 0.0f);
triangle.SetTriangle(1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f);
triangles.Add(triangle);
//Bottom
triangle = new CTriangle(0.0f, -1.0f, 0.0f);
triangle.SetTriangle(0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f);
triangles.Add(triangle);
triangle = new CTriangle(0.0f, -1.0f, 0.0f);
triangle.SetTriangle(0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f);
triangles.Add(triangle);
//Back
triangle = new CTriangle(0.0f, 0.0f, 1.0f);
triangle.SetTriangle(0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f);
triangles.Add(triangle);
triangle = new CTriangle(0.0f, 0.0f, 1.0f);
triangle.SetTriangle(0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f);
triangles.Add(triangle);
return new CTriangles(triangles.ToArray());
}
}
}
| 2,967
|
https://github.com/Phoenix-GH/BisnerXamarin/blob/master/Bisner.Mobile.Core/Models/General/Notifications/INotification.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
BisnerXamarin
|
Phoenix-GH
|
C#
|
Code
| 63
| 165
|
using System;
using Bisner.Mobile.Core.Models.Base;
namespace Bisner.Mobile.Core.Models.General.Notifications
{
public interface INotification : IItemBase, IDisplayUser
{
Guid RelatedItemId { get; set; }
string NotificationType { get; set; }
DateTime CreationDateTime { get; set; }
DateTime? IsReadOnDateTime { get; set; }
bool IsRead { get; set; }
string Text { get; set; }
string ExtraText { get; set; }
void ShowRelatedItem();
void Update(INotification notification);
}
}
| 183
|
https://github.com/m90/go-chatbase/blob/master/client.go
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
go-chatbase
|
m90
|
Go
|
Code
| 283
| 643
|
package chatbase
// Client wraps a Chatbase API Key and can be used to
// generate messages, events and link
type Client string
// New returns a new Client using the given Chatbase API Key
func New(apiKey string) *Client {
c := Client(apiKey)
return &c
}
func (c *Client) String() string {
return string(*c)
}
// Message returns a new Message using the client's key and
// the current time as its "TimeStamp" value
func (c *Client) Message(typ MessageType, userID, platform string) *Message {
return &Message{
APIKey: c.String(),
Type: typ,
UserID: userID,
TimeStamp: TimeStamp(),
Platform: platform,
}
}
// UserMessage is a convenience method for creating a user created message
func (c *Client) UserMessage(userID, platform string) *Message {
return c.Message(UserType, userID, platform)
}
// AgentMessage is a convenience method for creating an agent created message
func (c *Client) AgentMessage(userID, platform string) *Message {
return c.Message(AgentType, userID, platform)
}
// Event creates a new Event using the client's API Key
func (c *Client) Event(userID, intent string) *Event {
return &Event{
APIKey: c.String(),
UserID: userID,
Intent: intent,
}
}
// Update creates a new Update using the client's API key
func (c *Client) Update(messageID string) *Update {
return &Update{
APIKey: c.String(),
MessageID: MessageID(messageID),
}
}
// FacebookMessage creates a new native Facebook message
func (c *Client) FacebookMessage(payload interface{}) *FacebookMessage {
return &FacebookMessage{
Payload: payload,
APIKey: c.String(),
}
}
// FacebookRequestResponse creates a new wrapper around a request and response
func (c *Client) FacebookRequestResponse(request, response interface{}) *FacebookRequestResponse {
return &FacebookRequestResponse{
APIKey: c.String(),
Request: request,
Response: response,
}
}
// Link returns a trackable link to the given URL
func (c *Client) Link(url, platform string) *Link {
return &Link{
APIKey: c.String(),
URL: url,
Platform: platform,
}
}
| 27,715
|
https://github.com/wajayanath/micro/blob/master/vue-admin/src/secure/roles/RolesCreate.vue
|
Github Open Source
|
Open Source
|
MIT
| null |
micro
|
wajayanath
|
Vue
|
Code
| 157
| 573
|
<template>
<form @submit.prevent="submit">
<div class="form-group row">
<label class="col-sm-2 col-form-label">Name</label>
<div class="col-sm-10">
<input type="text" class="form-control" name="name" id="name" v-model="name"/>
</div>
</div>
<div class="form-group row">
<label class="col-sm-2 col-form-label">Permissions</label>
<div class="col-sm-10">
<div class="form-check form-check-inline col-3" v-for="permission in permissions" :key="permission.id">
<input class="form-check-input" type="checkbox" :value="permission.id" @change="select(permission.id, $event.target.checked)"/>
<label class="form-check-label">{{ permission.name }}</label>
</div>
</div>
</div>
<button class="btn btn-outline-secondary">Save</button>
</form>
</template>
<script lang="ts">
import {ref, onMounted} from 'vue';
import axios from 'axios';
import {useRouter} from 'vue-router';
export default {
name: "RolesCreate",
setup() {
const name = ref('');
const permissions = ref([]);
const selected = ref([] as number[]);
const router = useRouter();
onMounted(async () => {
const response = await axios.get('permission');
permissions.value = response.data.data;
});
const select = (id: number, checked: boolean) => {
if(checked){
selected.value = [...selected.value, id];
return;
}
selected.value = selected.value.filter(s => s !== id);
}
const submit = async () => {
await axios.post('roles', {
name:name.value,
permissions: selected.value
});
await router.push('/roles');
}
return {
name,
permissions,
select,
submit
}
}
}
</script>
| 48,636
|
https://github.com/vividos/WhereToFly/blob/master/src/App/UnitTest/Views/WeatherDashboardPageTest.cs
|
Github Open Source
|
Open Source
|
BSD-2-Clause
| 2,022
|
WhereToFly
|
vividos
|
C#
|
Code
| 80
| 262
|
using Microsoft.VisualStudio.TestTools.UnitTesting;
using WhereToFly.App.Core.Views;
namespace WhereToFly.App.UnitTest.Views
{
/// <summary>
/// Unit tests for WeatherDashboardPage class
/// </summary>
[TestClass]
public class WeatherDashboardPageTest
{
/// <summary>
/// Sets up tests by initializing Xamarin.Forms.Mocks
/// </summary>
[TestInitialize]
public void SetUp()
{
Xamarin.Forms.Mocks.MockForms.Init();
FFImageLoading.ImageService.EnableMockImageService = true;
}
/// <summary>
/// Tests default ctor of WeatherDashboardPage
/// </summary>
[TestMethod]
public void TestDefaultCtor()
{
// set up
var page = new WeatherDashboardPage();
// check
Assert.IsTrue(page.Title.Length > 0, "page title must have been set");
}
}
}
| 25,032
|
https://github.com/flypig5211/zf-admin/blob/master/zf/src/main/java/com/chinazhoufan/admin/modules/bas/dao/AddTestDataDao.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,017
|
zf-admin
|
flypig5211
|
Java
|
Code
| 64
| 306
|
/**
* Copyright © 2012-2014 <a href="https://github.com/chinazhoufan/admin">JeeSite</a> All rights reserved.
*/
package com.chinazhoufan.admin.modules.bas.dao;
import com.chinazhoufan.admin.common.persistence.CrudDao;
import com.chinazhoufan.admin.common.persistence.annotation.MyBatisDao;
import com.chinazhoufan.admin.modules.bas.entity.ScanCode;
/**
* 添加测试数据DAO接口
* @author 张金俊
* @version 2016-11-16
*/
@MyBatisDao
public interface AddTestDataDao extends CrudDao<Object> {
/**
* 查询某类型的最后一个电子码记录
* @param type
* @return
*/
public ScanCode getScanCodeLastByType(String type);
/**
* 清空所有货品的电子码
*/
public void clearScanCodeForProduct();
/**
* 清空所有货位的电子码
*/
public void clearScanCodeForWareplace();
}
| 36,490
|
https://github.com/chikage8640/apils-translate/blob/master/setruntime.py
|
Github Open Source
|
Open Source
|
MIT
| null |
apils-translate
|
chikage8640
|
Python
|
Code
| 141
| 723
|
import sys
import os
import shutil
import urllib.error
import urllib.request
import zipfile
# ダウンロードする
def fileDownload(url,filepath):
try:
urllib.request.urlretrieve(url, filepath)
return 0
except urllib.error as e:
print(e)
print("Failed to download [" + url + "].")
sys.exit()
return 0
# 必要なビルドツールがあるかの確認
if os.path.exists("C:/Program Files (x86)/Microsoft Visual Studio/2019/BuildTools/VC/Tools/MSVC") == False:
print("C++ build tool is not installed. Please install Microsoft C++ Build Tools.\nhttps://visualstudio.microsoft.com/ja/visual-cpp-build-tools/")
sys.exit()
# 保存先ディレクトリ作成
tempPath = "./temp/"
if os.path.exists(tempPath):
# 既にある場合は、先に丸ごと削除する
shutil.rmtree(tempPath)
os.mkdir(tempPath)
print("Start setup...")
# 必要なファイルのダウンロード
print("Downloading lid.176.bin...")
fileDownload("https://dl.fbaipublicfiles.com/fasttext/supervised-models/lid.176.bin", "./lid.176.bin")
print("Downloading runtime...")
fileDownload("https://www.python.org/ftp/python/3.9.7/python-3.9.7-embed-amd64.zip", tempPath + "runtime.zip")
# runtimeの解凍
print("Unzipping runtime...")
with zipfile.ZipFile(tempPath + "runtime.zip") as f:
f.extractall('./runtime')
# さらにその中にfasttextをクローン
print("Downloading fasttext...")
fileDownload("https://github.com/facebookresearch/fastText/archive/refs/heads/master.zip", tempPath + "fasttext.zip")
print("Unzipping fasttext...")
with zipfile.ZipFile(tempPath + "fasttext.zip") as f:
f.extractall(tempPath + "fasttext")
# runtimeでpipを使えるようにする
print("Downloading get-pip.py...")
fileDownload("https://bootstrap.pypa.io/get-pip.py", tempPath + "get-pip.py")
with open("./runtime/python39._pth", "a") as f:
f.write("import site")
# バッチに投げる
print("Calling setruntime.bat...")
os.system(".\\setruntime.bat " + sys.exec_prefix)
# 作業フォルダ削除
shutil.rmtree(tempPath)
| 2,482
|
https://github.com/KouChengjian/manyou/blob/master/library/src/main/java/com/acg12/lib/utils/ForegroundUtil.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,016
|
manyou
|
KouChengjian
|
Java
|
Code
| 186
| 527
|
package com.acg12.lib.utils;
import android.app.Activity;
import android.app.Application;
import android.os.Bundle;
import android.widget.TextView;
public class ForegroundUtil implements Application.ActivityLifecycleCallbacks {
private final static String TAG = ForegroundUtil.class.getName();
private Activity activity;
private TextView textView;
//单例
private static final ForegroundUtil instance = new ForegroundUtil();
//用于判断是否程序在前台
private boolean foreground = false, paused = true;
public static void register(Application app) {
app.registerActivityLifecycleCallbacks(instance);
}
public static void unregister(Application app) {
app.unregisterActivityLifecycleCallbacks(instance);
}
public static ForegroundUtil get() {
return instance;
}
private ForegroundUtil() {
}
@Override
public void onActivityCreated(Activity activity, Bundle savedInstanceState) {
// TODO Auto-generated method stub
}
@Override
public void onActivityStarted(Activity activity) {
// TODO Auto-generated method stub
}
// TODO :kaiser @2018/7/22:需要重构 这里会出现 sending message to a Handler on a dead thread
@Override
public void onActivityResumed(Activity activity) {
this.activity = activity;
foreground = true;
}
@Override
public void onActivityPaused(Activity activity) {
foreground = false;
}
@Override
public void onActivityStopped(Activity activity) {
}
@Override
public void onActivityDestroyed(Activity activity) {
// TODO Auto-generated method stub
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle outState) {
// TODO Auto-generated method stub
}
public boolean isForeground() {
return foreground;
}
public Activity getActivity() {
return activity;
}
}
| 48,756
|
https://github.com/reubenjohn/AEvo2D/blob/master/Assets/Creature/Actuator/IActuator.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
AEvo2D
|
reubenjohn
|
C#
|
Code
| 23
| 69
|
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public interface IActuator
{
int InputCount { get; }
void Act(ArraySegment<float> activations);
IEnumerable<string> GetLabels();
}
| 29,976
|
https://github.com/dorateq/accumulo/blob/master/core/src/main/thrift/security.thrift
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
accumulo
|
dorateq
|
Thrift
|
Code
| 194
| 390
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace java org.apache.accumulo.core.security.thrift
namespace cpp org.apache.accumulo.core.security.thrift
struct TCredentials {
1:string principal
2:string tokenClassName
3:binary token
4:string instanceId
}
struct TAuthenticationTokenIdentifier {
1:string principal
2:optional i32 keyId
3:optional i64 issueDate
4:optional i64 expirationDate
5:optional string instanceId
}
struct TAuthenticationKey {
1:binary secret
2:optional i32 keyId
3:optional i64 expirationDate
4:optional i64 creationDate
}
struct TDelegationToken {
1:binary password
2:TAuthenticationTokenIdentifier identifier
}
struct TDelegationTokenConfig {
1:optional i64 lifetime
}
| 29,801
|
https://github.com/ElCuboNegro/Kronos-Torre/blob/master/src/backend/lib/config/config.js
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
Kronos-Torre
|
ElCuboNegro
|
JavaScript
|
Code
| 51
| 222
|
'use strict';
const crypto = require('crypto');
const pjson = require('../../package.json');
exports.environment = {
NAME: pjson.name,
PORT: Number(process.env.PORT ?? 3000),
VERSION: pjson.version,
DEBUG: process.env.DEBUG == 'TRUE',
DB: {
USERNAME: process.env.DB_USERNAME,
PASSWORD: process.env.DB_PASSWORD,
DATABASE: process.env.DB_NAME,
HOST: process.env.DB_HOST,
PORT: process.env.DB_PORT,
DIALECT: process.env.DB_DIALECT ?? 'sqlite',
},
JWT: {
SECRET_KEY: process.env.SECRET_KEY ?? crypto
.randomBytes(32).toString('base64'),
},
};
| 35,267
|
https://github.com/valerijakoncar/diplomski/blob/master/app/Http/Controllers/admin/ActivityController.php
|
Github Open Source
|
Open Source
|
MIT
| null |
diplomski
|
valerijakoncar
|
PHP
|
Code
| 32
| 111
|
<?php
namespace App\Http\Controllers\admin;
use App\Http\Controllers\Controller;
use App\Models\Activity;
use Illuminate\Http\Request;
class ActivityController extends Controller
{
public function sortActivity(Request $request){
$am = new Activity();
$act = $am->sortActivity($request->input("sort"));
return response()->json(["activities" => $act], 200);
}
}
| 33,499
|
https://github.com/loranger32/weight_tracker_roda/blob/master/db/migrations/022_add_final_constraints_on_batches_target_column.rb
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
weight_tracker_roda
|
loranger32
|
Ruby
|
Code
| 48
| 241
|
Sequel.migration do
up do
alter_table(:batches) do
set_column_not_null :target
c = Sequel[:target]
add_constraint(:target_format, c.like("AA__A%") | c.like("Ag__A%") | c.like("AQ__A%"))
add_constraint(:encrypted_target_length, Sequel.char_length(c) >= 88)
add_constraint(:enc_base64_target) do
octet_length(decode(regexp_replace(regexp_replace(c, "_", "/", "g"), "-", "+", "g"), "base64")) >= 65
end
end
end
down do
alter_table(:batches) do
set_column_allow_null :target
drop_constraint(:target_format)
drop_constraint(:encrypted_target_length)
drop_constraint(:enc_base64_target)
end
end
end
| 831
|
https://github.com/markthethomas/quill/blob/master/src/server/routes/errors.js
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
quill
|
markthethomas
|
JavaScript
|
Code
| 64
| 219
|
import PrettyError from 'pretty-error';
export default function handleErrors(err, req, res) {
if (err.name === 'UnauthorizedError') {
res.status(401)
.send('Unauthorized');
}
if (process.env.NODE_ENV === 'development') {
// dev error handler (more verbose)
console.error(err);
console.error(err.stack);
const errorRenderer = new PrettyError();
res.status(err.status || 500)
.send(errorRenderer.render(err));
}
// production error handler (no stacktraces leaked to user)
if (process.env.NODE_ENV === 'production') {
console.error(err);
res.status(500)
.send('Internal Server Error ¯\\_(ツ)_/¯');
}
}
| 37,879
|
https://github.com/jakobrichert/RichDAO/blob/master/algovenv/lib/python3.8/site-packages/nacl/bindings/crypto_kx.py
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
RichDAO
|
jakobrichert
|
Python
|
Code
| 599
| 2,282
|
# Copyright 2018 Donald Stufft and individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Tuple
from nacl import exceptions as exc
from nacl._sodium import ffi, lib
from nacl.exceptions import ensure
__all__ = [
"crypto_kx_keypair",
"crypto_kx_client_session_keys",
"crypto_kx_server_session_keys",
"crypto_kx_PUBLIC_KEY_BYTES",
"crypto_kx_SECRET_KEY_BYTES",
"crypto_kx_SEED_BYTES",
"crypto_kx_SESSION_KEY_BYTES",
]
"""
Implementations of client, server key exchange
"""
crypto_kx_PUBLIC_KEY_BYTES: int = lib.crypto_kx_publickeybytes()
crypto_kx_SECRET_KEY_BYTES: int = lib.crypto_kx_secretkeybytes()
crypto_kx_SEED_BYTES: int = lib.crypto_kx_seedbytes()
crypto_kx_SESSION_KEY_BYTES: int = lib.crypto_kx_sessionkeybytes()
def crypto_kx_keypair() -> Tuple[bytes, bytes]:
"""
Generate a keypair.
This is a duplicate crypto_box_keypair, but
is included for api consistency.
:return: (public_key, secret_key)
:rtype: (bytes, bytes)
"""
public_key = ffi.new("unsigned char[]", crypto_kx_PUBLIC_KEY_BYTES)
secret_key = ffi.new("unsigned char[]", crypto_kx_SECRET_KEY_BYTES)
res = lib.crypto_kx_keypair(public_key, secret_key)
ensure(res == 0, "Key generation failed.", raising=exc.CryptoError)
return (
ffi.buffer(public_key, crypto_kx_PUBLIC_KEY_BYTES)[:],
ffi.buffer(secret_key, crypto_kx_SECRET_KEY_BYTES)[:],
)
def crypto_kx_seed_keypair(seed: bytes) -> Tuple[bytes, bytes]:
"""
Generate a keypair with a given seed.
This is functionally the same as crypto_box_seed_keypair, however
it uses the blake2b hash primitive instead of sha512.
It is included mainly for api consistency when using crypto_kx.
:param seed: random seed
:type seed: bytes
:return: (public_key, secret_key)
:rtype: (bytes, bytes)
"""
public_key = ffi.new("unsigned char[]", crypto_kx_PUBLIC_KEY_BYTES)
secret_key = ffi.new("unsigned char[]", crypto_kx_SECRET_KEY_BYTES)
ensure(
isinstance(seed, bytes) and len(seed) == crypto_kx_SEED_BYTES,
"Seed must be a {} byte long bytes sequence".format(
crypto_kx_SEED_BYTES
),
raising=exc.TypeError,
)
res = lib.crypto_kx_seed_keypair(public_key, secret_key, seed)
ensure(res == 0, "Key generation failed.", raising=exc.CryptoError)
return (
ffi.buffer(public_key, crypto_kx_PUBLIC_KEY_BYTES)[:],
ffi.buffer(secret_key, crypto_kx_SECRET_KEY_BYTES)[:],
)
def crypto_kx_client_session_keys(
client_public_key: bytes,
client_secret_key: bytes,
server_public_key: bytes,
) -> Tuple[bytes, bytes]:
"""
Generate session keys for the client.
:param client_public_key:
:type client_public_key: bytes
:param client_secret_key:
:type client_secret_key: bytes
:param server_public_key:
:type server_public_key: bytes
:return: (rx_key, tx_key)
:rtype: (bytes, bytes)
"""
ensure(
isinstance(client_public_key, bytes)
and len(client_public_key) == crypto_kx_PUBLIC_KEY_BYTES,
"Client public key must be a {} bytes long bytes sequence".format(
crypto_kx_PUBLIC_KEY_BYTES
),
raising=exc.TypeError,
)
ensure(
isinstance(client_secret_key, bytes)
and len(client_secret_key) == crypto_kx_SECRET_KEY_BYTES,
"Client secret key must be a {} bytes long bytes sequence".format(
crypto_kx_PUBLIC_KEY_BYTES
),
raising=exc.TypeError,
)
ensure(
isinstance(server_public_key, bytes)
and len(server_public_key) == crypto_kx_PUBLIC_KEY_BYTES,
"Server public key must be a {} bytes long bytes sequence".format(
crypto_kx_PUBLIC_KEY_BYTES
),
raising=exc.TypeError,
)
rx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES)
tx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES)
res = lib.crypto_kx_client_session_keys(
rx_key, tx_key, client_public_key, client_secret_key, server_public_key
)
ensure(
res == 0,
"Client session key generation failed.",
raising=exc.CryptoError,
)
return (
ffi.buffer(rx_key, crypto_kx_SESSION_KEY_BYTES)[:],
ffi.buffer(tx_key, crypto_kx_SESSION_KEY_BYTES)[:],
)
def crypto_kx_server_session_keys(
server_public_key: bytes,
server_secret_key: bytes,
client_public_key: bytes,
) -> Tuple[bytes, bytes]:
"""
Generate session keys for the server.
:param server_public_key:
:type server_public_key: bytes
:param server_secret_key:
:type server_secret_key: bytes
:param client_public_key:
:type client_public_key: bytes
:return: (rx_key, tx_key)
:rtype: (bytes, bytes)
"""
ensure(
isinstance(server_public_key, bytes)
and len(server_public_key) == crypto_kx_PUBLIC_KEY_BYTES,
"Server public key must be a {} bytes long bytes sequence".format(
crypto_kx_PUBLIC_KEY_BYTES
),
raising=exc.TypeError,
)
ensure(
isinstance(server_secret_key, bytes)
and len(server_secret_key) == crypto_kx_SECRET_KEY_BYTES,
"Server secret key must be a {} bytes long bytes sequence".format(
crypto_kx_PUBLIC_KEY_BYTES
),
raising=exc.TypeError,
)
ensure(
isinstance(client_public_key, bytes)
and len(client_public_key) == crypto_kx_PUBLIC_KEY_BYTES,
"Client public key must be a {} bytes long bytes sequence".format(
crypto_kx_PUBLIC_KEY_BYTES
),
raising=exc.TypeError,
)
rx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES)
tx_key = ffi.new("unsigned char[]", crypto_kx_SESSION_KEY_BYTES)
res = lib.crypto_kx_server_session_keys(
rx_key, tx_key, server_public_key, server_secret_key, client_public_key
)
ensure(
res == 0,
"Server session key generation failed.",
raising=exc.CryptoError,
)
return (
ffi.buffer(rx_key, crypto_kx_SESSION_KEY_BYTES)[:],
ffi.buffer(tx_key, crypto_kx_SESSION_KEY_BYTES)[:],
)
| 2,133
|
https://github.com/molly7657/flavortowntrip/blob/master/server/api/restaurants.js
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
flavortowntrip
|
molly7657
|
JavaScript
|
Code
| 55
| 165
|
const router = require('express').Router()
const {Restaurant} = require('../db/models')
module.exports = router
router.get('/', async (req, res, next) => {
try {
const restaurants = await Restaurant.findAll()
res.json(restaurants)
} catch (err) {
next(err)
}
})
router.get('/:id', async (req, res, next) => {
try {
const restaurant = await Restaurant.findByPk(req.params.id)
res.json(restaurant)
} catch (err) {
next(err)
}
})
| 35,401
|
https://github.com/starl1n/Xamarin.Forms-Samples-of-Clases/blob/master/Clase 09/Proyectos/XamarinFormsClase09/XamarinFormsClase09/ControlesPersonalizados/EntryPersonalizado.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
Xamarin.Forms-Samples-of-Clases
|
starl1n
|
C#
|
Code
| 15
| 44
|
using System;
using Xamarin.Forms;
namespace XamarinFormsClase09
{
public class EntryPersonalizado : Entry
{
}
}
| 6,867
|
https://github.com/imec-int/federated-learning-lib/blob/master/examples/PHR/aggregator/aggregator.ddl
|
Github Open Source
|
Open Source
|
IBM-pibs
| null |
federated-learning-lib
|
imec-int
|
SQL
|
Code
| 28
| 97
|
CREATE SCHEMA results;
CREATE TABLE results.nbAdmissionsByGender(
sourceDatabase CHARACTER VARYING(64),
gender CHARACTER VARYING(64),
admissionHour INTEGER,
count INTEGER
);
CREATE TABLE results.sofaScores(
sourceDatabase CHARACTER VARYING(64),
sofaAvg REAL,
sofaStd REAL
);
| 23,961
|
https://github.com/genious97/ImageManipulation/blob/master/ImageManipulation/Blurer.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
ImageManipulation
|
genious97
|
C#
|
Code
| 277
| 843
|
using System;
using System.Collections.Generic;
using System.Windows.Media;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ImageManipulation
{
class Blurer
{
CopyableBitmap _inputImage;
public Blurer(CopyableBitmap inputImage)
{
_inputImage = inputImage;
}
public EditableBitmap Blur()
{
EditableBitmap outputImage = new EditableBitmap(_inputImage.Width, _inputImage.Height);
for (int x = 0; x < _inputImage.Width; x++)
{
for (int y = 0; y < _inputImage.Height; y++)
{
BlurPixel(x, y, outputImage);
}
}
return outputImage;
}
// gets the color value of surrounding pixels, and returns that information as a list of byte arrays
private List<byte[]> GetSurroundingColors(int x, int y, CopyableBitmap image)
{
List<byte[]> capturedPixels = new List<byte[]>();
for (int verticalDifference = -5; verticalDifference <= 5; verticalDifference++)
{
for (int horizontalDifference = -5; horizontalDifference <= 5; horizontalDifference++)
{
byte[] colors = image.GetPixelBytes(x + horizontalDifference, y + verticalDifference);
if (colors != null)
{
capturedPixels.Add(colors);
}
}
}
return capturedPixels;
}
private Color GetBlurredColor(List<byte[]> colorData)
{
int[] colorSums = SumColorChannels(colorData);
int[] blurredChannels = DivideArray(colorSums, colorData.Count);
return ToColor(blurredChannels);
}
private void BlurPixel(int x, int y, EditableBitmap outputImage)
{
Color blurredColor = GetBlurredColor(GetSurroundingColors(x, y, _inputImage));
outputImage.SetPixelColor(x, y, blurredColor);
}
// returns the sum of each individual color channel
private int[] SumColorChannels(List<byte[]> colorData)
{
int[] colorSums = { 0, 0, 0, 0 };
foreach (byte[] color in colorData)
{
colorSums[0] += color[0];
colorSums[1] += color[1];
colorSums[2] += color[2];
colorSums[3] += color[3];
}
return colorSums;
}
private int[] DivideArray(int[] input, int divisor)
{
int[] output = (int[])input.Clone();
for (int i = 0; i < input.Length; i++)
{
output[i] = (output[i] / divisor);
}
return output;
}
private Color ToColor(int[] input)
{
Color blurredColor = new Color();
blurredColor.B = (byte)input[0];
blurredColor.G = (byte)input[1];
blurredColor.R = (byte)input[2];
blurredColor.A = (byte)input[3];
return blurredColor;
}
}
}
| 31,603
|
https://github.com/MGelein/paralleloi/blob/master/src/trb1914/paralleloi/data/Text.java
|
Github Open Source
|
Open Source
|
MIT
| null |
paralleloi
|
MGelein
|
Java
|
Code
| 287
| 680
|
package trb1914.paralleloi.data;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import trb1914.paralleloi.Comparison;
import trb1914.paralleloi.Paralleloi;
/**
* This text object holds the methods used for a loaded text
* to interact with it
* @author Mees Gelein
*/
public class Text {
/** Stored texts that have been succesfully loaded*/
private static ArrayList<Text> texts = new ArrayList<Text>();
/**
* Runs a comparison to compare every text against every other text
*/
public static void compareAll() {
for(int i = 0; i < texts.size(); i++) {
for(int j = i + 1; j < texts.size(); j++) {
(new Comparison(texts.get(i), texts.get(j))).run();
}
}
}
/** The dictionary object of this Text*/
public Dict dict;
/** Reference to the actual file */
public File file;
/** The contents of the file once the encoding has been applied*/
public String contents;
/**
* Tries to create a new TextObject from a provided URL.
* @param url
*/
public Text(String url) {
//Prevent bad input
if(url == null || url.length() == 0) {
System.err.println("Cannot instantiate a new text from null url string!");
return;
}
//Create the file
file = new File(url);
//Test if it exists
if(!file.exists()) {
System.err.println("Cannot instantiate a new text. Cannot find file: " + file.getAbsolutePath());
return;
}
//Reads all bytes into memory, but catches any exceptions
try {
System.out.println("Loading file: " + url);
byte[] fileBytes = Files.readAllBytes(Paths.get(url));
//Now create the string from the files using the character set
contents = new String(fileBytes, Paralleloi.charSet);
//Store the text in the hashmap of loaded texts
texts.add(this);
//Load the dictionary
System.out.println("Creating Dictionary for: " + url);
dict = new Dict(contents);
} catch (IOException e) {
System.err.println("Something went wrong reading the bytes into memory of file: " + url);
e.printStackTrace();
}
}
}
| 12,367
|
https://github.com/chrismcgraw60/chocosolver/blob/master/src/test/java/org/clafer/ir/IrArithmXYTest.java
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
chocosolver
|
chrismcgraw60
|
Java
|
Code
| 125
| 515
|
package org.clafer.ir;
import org.chocosolver.solver.Solver;
import org.chocosolver.solver.constraints.Constraint;
import org.chocosolver.solver.variables.IntVar;
import org.chocosolver.solver.variables.Variable;
import org.chocosolver.solver.variables.impl.FixedBoolVarImpl;
import org.chocosolver.solver.variables.impl.FixedIntVarImpl;
import org.chocosolver.solver.variables.view.IntView;
import org.clafer.ir.IrQuickTest.Check;
import org.clafer.ir.IrQuickTest.Solution;
import static org.clafer.ir.Irs.compare;
import org.clafer.test.Term;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
*
* @author jimmy
*/
@RunWith(IrQuickTest.class)
public class IrArithmXYTest {
@Check
public void check(Solver solver) {
assertTrue("Correct but not optimized.", solver.getModel().getNbCstrs() <= 1);
assertTrue("Correct but not optimized.", solver.getModel().getNbVars() <= 4);
for (Variable var : solver.getModel().getVars()) {
assertFalse("Correct but not optimized.",
var instanceof FixedIntVarImpl && !(var instanceof FixedBoolVarImpl));
assertFalse("Correct but not optimized.", var instanceof IntView);
}
}
@Test(timeout = 60000)
public IrBoolExpr setup(Term left, IrCompare.Op op, Term right) {
return compare(left.toIrExpr(), op, right.toIrExpr());
}
@Solution
public Constraint setup(IntVar left, IrCompare.Op op, IntVar right) {
return left.getModel().arithm(left, op.getSyntax(), right);
}
}
| 29,095
|
https://github.com/kimhanui/2020-InterfaceApp-api/blob/master/src/main/java/com/infe/app/web/dto/PostsListResponseDto.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
2020-InterfaceApp-api
|
kimhanui
|
Java
|
Code
| 55
| 206
|
package com.infe.app.web.dto;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.infe.app.domain.posts.Posts;
import lombok.Getter;
import java.time.LocalDateTime;
@Getter
public class PostsListResponseDto {
private Long id;
private String title;
private String content;
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm", timezone = "Asia/Seoul")
private LocalDateTime modifiedDate;
public PostsListResponseDto(Posts entity) {
this.id = entity.getId();
this.title = entity.getTitle();
this.modifiedDate = entity.getModifiedDateTime();
this.content = entity.getContent();
}
}
| 4,155
|
https://github.com/jquorning/iNow/blob/master/source/decoration.adb
|
Github Open Source
|
Open Source
|
blessing
| 2,020
|
iNow
|
jquorning
|
Ada
|
Code
| 248
| 586
|
--
-- The author disclaims copyright to this source code. In place of
-- a legal notice, here is a blessing:
--
-- May you do good and not evil.
-- May you find forgiveness for yourself and forgive others.
-- May you share freely, not taking more than you give.
--
with Symbols;
package body Decoration is
use Symbols;
subtype Mark_String is String (1 .. 3);
To_Mark : constant array (State_Type) of Mark_String :=
(Fresh => " - ",
Executing => " + ",
Done => UTF8 (White_Star), -- ;'*',
Omitted => " o ",
Deleted => " X ");
-- if Database.Events.Is_Done (Desc.Id) then
-- Put (Symbols.UTF8 (Symbols.Black_Star));
-- else
-- Put (Symbols.UTF8 (Symbols.White_Star));
-- end if;
Space : constant String := " ";
Left_Parenthesis : constant String :=
UTF8 (Medium_Flattened_Left_Parenthesis_Ornament);
Right_Parenthesis : constant String :=
UTF8 (Medium_Flattened_Right_Parenthesis_Ornament);
function Status_Image (Status : Status_Type)
return String
is
First : constant String :=
(if Status.Partly then Left_Parenthesis else Space);
Last : constant String :=
(if Status.Partly then Right_Parenthesis else Space);
Mark : constant String :=
To_Mark (Status.State);
begin
return First & Mark & Last;
end Status_Image;
function Title_Image (Title : String;
Status : Status_Type)
return String
is
First : constant Character := (if Status.Partly then '(' else ' ');
Last : constant Character := (if Status.Partly then ')' else ' ');
begin
return First & Title & Last;
end Title_Image;
function Current_Image (Status : Status_Type)
return String
is
pragma Unreferenced (Status);
begin
return "???";
end Current_Image;
end Decoration;
| 4,987
|
https://github.com/MinexAutomation/R5T.NetStandard.IO.Serialization/blob/master/source/R5T.NetStandard.IO.Serialization/Code/Classes/XmlFileSerializer.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
R5T.NetStandard.IO.Serialization
|
MinexAutomation
|
C#
|
Code
| 69
| 233
|
using System;
namespace R5T.NetStandard.IO.Serialization
{
public static class XmlFileSerializer
{
public static T Deserialize<T>(string xmlFilePath)
{
using (var fileStream = FileStreamHelper.NewRead(xmlFilePath))
{
var output = XmlStreamSerializer.Deserialize<T>(fileStream);
return output;
}
}
public static void Serialize<T>(string xmlFilePath, T obj, bool overwrite = true)
{
using (var fileStream = FileStreamHelper.NewWrite(xmlFilePath, overwrite))
{
XmlStreamSerializer.Serialize(fileStream, obj);
}
}
}
public class XmlFileSerializer<T> : StreamFileSerializer<T>
{
public XmlFileSerializer()
: base(new XmlStreamSerializer<T>())
{
}
}
}
| 33,114
|
https://github.com/orclassiq/amazon-braket-sdk-python/blob/master/examples/job.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
amazon-braket-sdk-python
|
orclassiq
|
Python
|
Code
| 153
| 411
|
# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
from braket.aws import AwsDevice, AwsQuantumJob
from braket.circuits import Circuit
from braket.jobs import save_job_result
def run_job():
device = AwsDevice(os.environ.get("AMZN_BRAKET_DEVICE_ARN"))
bell = Circuit().h(0).cnot(0, 1)
num_tasks = 10
results = []
for i in range(num_tasks):
task = device.run(bell, shots=100)
result = task.result().measurement_counts
results.append(result)
print(f"iter {i}: {result}")
save_job_result({"results": results})
if __name__ == "__main__":
job = AwsQuantumJob.create(
device="arn:aws:braket:::device/quantum-simulator/amazon/sv1",
source_module="job.py",
entry_point="job:run_job",
wait_until_complete=True,
)
print(job.result())
| 19,552
|
https://github.com/ckooma001c/Comcast-traffic_control/blob/master/traffic_router/core/src/main/java/com/comcast/cdn/traffic_control/traffic_router/core/cache/CacheLocationManager.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,017
|
Comcast-traffic_control
|
ckooma001c
|
Java
|
Code
| 205
| 414
|
/*
* Copyright 2015 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.cdn.traffic_control.traffic_router.core.cache;
import java.util.Collection;
import java.util.Map;
/**
* Provides access to the configured {@link CacheLocation}s and the {@link Cache}s that are a part
* of them.
*/
public interface CacheLocationManager {
/**
* Gets the {@link CacheLocation} specified by the provided ID.
*
* @param id
* the ID for the desired <code>CacheLocation</code>
* @return the <code>CacheLocation</code> or null if no location exists for the specified ID.
*/
CacheLocation getCacheLocation(final String id);
/**
* Returns the configured {@link CacheLocation}s.
*
* @return the configured <code>CacheLocations</code> or an empty {@link Collection} if no
* locations are configured
*/
Collection<CacheLocation> getCacheLocations();
void setCacheMap(Map<String,Cache> map);
Map<String,Cache> getCacheMap();
}
| 40,147
|
https://github.com/ZLValien/erda/blob/master/modules/openapi/component-protocol/scenarios/action/components/actionForm/testplan-run_test.go
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
erda
|
ZLValien
|
Go
|
Code
| 313
| 1,242
|
// Copyright (c) 2021 Terminus, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package action
import (
"fmt"
"reflect"
"testing"
"github.com/erda-project/erda/apistructs"
)
func Test_fillTestPlanFields(t *testing.T) {
type args struct {
field []apistructs.FormPropItem
testPlans []map[string]interface{}
cms []map[string]interface{}
}
tests := []struct {
name string
args args
want []apistructs.FormPropItem
}{
// TODO: Add test cases.
{
name: "Filled",
args: args{
field: []apistructs.FormPropItem{
apistructs.FormPropItem{
Label: "执行条件",
Component: "input",
Required: true,
Group: "params",
},
},
testPlans: []map[string]interface{}{
map[string]interface{}{
"name": "a",
"value": "1",
},
map[string]interface{}{
"name": "b",
"value": "2",
},
},
cms: []map[string]interface{}{
map[string]interface{}{
"name": "aa",
"value": "11",
},
map[string]interface{}{
"name": "bb",
"value": "22",
},
},
},
want: []apistructs.FormPropItem{
apistructs.FormPropItem{
Label: "执行条件",
Component: "input",
Required: true,
Group: "params",
},
apistructs.FormPropItem{
Component: "formGroup",
ComponentProps: map[string]interface{}{
"title": "任务参数",
},
Group: "params",
Key: "params",
},
apistructs.FormPropItem{
Label: "测试计划",
Component: "select",
Required: true,
Key: "params.test_plan",
ComponentProps: map[string]interface{}{
"options": []map[string]interface{}{
map[string]interface{}{
"name": "a",
"value": 1,
},
map[string]interface{}{
"name": "b",
"value": 2,
},
},
},
Group: "params",
},
apistructs.FormPropItem{
Label: "参数配置",
Component: "select",
Required: true,
Key: "params.cms",
ComponentProps: map[string]interface{}{
"options": []map[string]interface{}{
map[string]interface{}{
"name": "aa",
"value": 11,
},
map[string]interface{}{
"name": "bb",
"value": 22,
},
},
},
Group: "params",
},
apistructs.FormPropItem{
Label: "失败后是否继续执行",
Component: "input",
Required: false,
Key: "params.is_continue_execution",
Group: "params",
DefaultValue: true,
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := fillTestPlanFields(tt.args.field, tt.args.testPlans, tt.args.cms); !reflect.DeepEqual(got, tt.want) {
fmt.Println(got)
fmt.Println(tt.want)
}
})
}
}
| 20,335
|
https://github.com/tiaanduplessis/jormat/blob/master/test.js
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
jormat
|
tiaanduplessis
|
JavaScript
|
Code
| 39
| 118
|
'use strict'
const jormat = require('./')
const { milliseconds } = require('jormat-formatters')
test('should be defined', () => {
expect(jormat).toBeDefined()
})
test('should format and parse', () => {
const { format, parse } = jormat(milliseconds)
expect(format(9000)).toBe('9 seconds')
expect(parse('6 minutes')).toBe(360000)
})
| 13,479
|
https://github.com/SMKrizan/WoodlandConsultingLLC/blob/master/client/src/pages/About/index.js
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
WoodlandConsultingLLC
|
SMKrizan
|
JavaScript
|
Code
| 275
| 696
|
import React from 'react';
import './about.css'
import {useSpring, animated} from 'react-spring'
function About() {
const propsMove2 = useSpring(
{opacity: 1,
from: {scale:10,transform: 'scale(0.5)'},
to: { scale: 150, transform: 'scale(1)', freq: '0.0, 0.0' },
config: { duration: 1000 }});
return (
<section>
<div className="about-section bg-about pad ">
<div className="box-over2 glow3">
<h2>About Jessica Walther</h2>
<p>Jessica A. Walther, principal and founder, is a certified, professional lighting designer with
22 years’ experience. Ms. Walther offers strong project coordination skills and
has served as Project Manager and Construction Administrator on many large-
scale lighting projects. Her skill set includes performing photometric analysis,
producing technical documents, providing electrical circuiting recommendations
and record drawings.
Jessica’s project experience includes interior and exterior lighting design for
Industrial, Commercial and Office buildings, Educational (K-12 and higher
education), Healthcare, Libraries, Religious, Financial, Retail, Assisted Living /
Nursing Home, Hospice Care, Historical Renovation, Lighting Retrofit and
Airport projects throughout Wisconsin and the United States.</p>
</div>
<div className="about-img2">
<animated.div style={propsMove2}>
<img src={require(`../../assets/images/jess1sq.jpg`).default} alt='Jessica Walther' />
</animated.div>
</div>
</div>
<div className="pad margin-b80 about-section">
<div className="about-img1">
<animated.div style={propsMove2}>
<img src={require(`../../assets/images/portfolio/UW_SOHE_4-min.JPG`).default} alt='Jessica Walther' />
</animated.div>
</div>
<div className="box-over glow3 margin-b80">
<h2>About Woodland Consulting LLC</h2>
<p>Woodland Consulting, LLC, established in August 2016, provides electrical
lighting design consulting services. Woodland implements comprehensive
lighting designs including—but not limited to—aesthetic and functional layout;
line and low voltage lighting control systems; fixture and lamp
recommendations as well as all respective schedules and specifications. To
coordinate designs, Woodland works closely with architects, interior designers,
owners, contractors, and engineers. Woodland’s priority is to provide energy
efficient designs and documentation in compliance with local, state, and federal
building and energy codes.</p>
</div>
</div>
</section>
)
};
export default About;
| 19,138
|
https://github.com/G-Node/GCA-Web/blob/master/test/frontend/conftest.py
|
Github Open Source
|
Open Source
|
BSD-2-Clause
| 2,021
|
GCA-Web
|
G-Node
|
Python
|
Code
| 265
| 1,535
|
import pytest
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.action_chains import ActionChains
import Cookies
# Firefox-workaround, as move_to_element not working if element not in current screen
def scroll(driver, object):
if 'firefox' in driver.capabilities['browserName']:
scroll_by_coord = 'window.scrollTo(%s,%s);' % (
object.location['x'],
object.location['y']
)
driver.execute_script(scroll_by_coord)
def move_to_element_by_id(driver, name):
element = driver.find_element_by_id(name)
scroll(driver, element)
hover = ActionChains(driver).move_to_element(element)
hover.perform()
def move_to_element_by_class_name(driver, name):
element = driver.find_element_by_class_name(name)
scroll(driver, element)
hover = ActionChains(driver).move_to_element(element)
hover.perform()
def move_to_element_by_xpath(driver, xpath):
element = driver.find_element_by_xpath(xpath)
scroll(driver, element)
hover = ActionChains(driver).move_to_element(element)
hover.perform()
def element_click_by_id(driver, name):
move_to_element_by_id(driver, name)
driver.find_element_by_id(name).click()
def element_click_by_class_name(driver, name):
move_to_element_by_class_name(driver, name)
driver.find_element_by_class_name(name).click()
def element_click_by_xpath(driver, xpath):
move_to_element_by_xpath(driver, xpath)
driver.find_element_by_xpath(xpath).click()
def element_send_keys_by_id(driver, name, keys):
move_to_element_by_id(driver, name)
driver.find_element_by_id(name).clear()
driver.find_element_by_id(name).send_keys(keys)
def element_send_keys_by_class_name(driver, name, keys):
move_to_element_by_class_name(driver, name)
driver.find_element_by_class_name(name).clear()
driver.find_element_by_class_name(name).send_keys(keys)
def element_send_keys_by_xpath(driver, xpath, keys):
move_to_element_by_xpath(driver, xpath)
driver.find_element_by_xpath(xpath).clear()
driver.find_element_by_xpath(xpath).send_keys(keys)
def element_get_attribute_by_id(driver, name, attr):
move_to_element_by_id(driver, name)
return driver.find_element_by_id(name).get_attribute(attr)
def element_get_attribute_by_class_name(driver, name, attr):
move_to_element_by_class_name(driver, name)
return driver.find_element_by_class_name(name).get_attribute(attr)
def element_get_attribute_by_xpath(driver, xpath, attr):
move_to_element_by_xpath(driver, xpath)
return driver.find_element_by_xpath(xpath).get_attribute(attr)
def maximize_login(request):
if request.param == "chrome":
driver = webdriver.Remote(
command_executor="http://" + Cookies.get_host_ip() + ":4444/wd/hub",
desired_capabilities={'browserName': 'chrome', 'javascriptEnabled': True}
)
if request.param == "firefox":
driver = webdriver.Remote(
command_executor="http://" + Cookies.get_host_ip() + ":4444/wd/hub",
desired_capabilities={'browserName': 'firefox', 'javascriptEnabled': True}
)
session = request.node
for item in session.items:
cls = item.getparent(pytest.Class)
setattr(cls.obj, "driver", driver)
driver.get("http://" + Cookies.get_host_ip() + ":9000/login")
driver.maximize_window()
return driver
@pytest.fixture(params=["chrome", "firefox"], scope="session")
def setup_login(request):
driver = maximize_login(request)
yield
driver.quit()
@pytest.fixture(params=["chrome", "firefox"], scope="session")
def setup_editor(request):
driver = maximize_login(request)
Cookies.admin_login(driver)
driver.get("http://" + Cookies.get_host_ip() + ":9000/conference/BC14/submission")
WebDriverWait(driver, 30).until(
EC.presence_of_element_located((By.CLASS_NAME, 'btn-success'))
)
yield
driver.quit()
@pytest.fixture(params=["chrome", "firefox"], scope="session")
def setup_conference_creation(request):
driver = maximize_login(request)
Cookies.admin_login(driver=driver)
driver.get("http://" + Cookies.get_host_ip() + ":9000/dashboard/conference")
WebDriverWait(driver, 30).until(
EC.presence_of_element_located((By.CLASS_NAME, 'btn-success'))
)
yield
driver.quit()
| 29,046
|
https://github.com/ibrahim85/Predicting-Text-Readability-using-Natural-Language-Processing-Methods/blob/master/.gitignore
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
Predicting-Text-Readability-using-Natural-Language-Processing-Methods
|
ibrahim85
|
Ignore List
|
Code
| 19
| 109
|
# dataset
data/WeeBit/
data/weebit.csv
data/weebit_train.csv
data/weebit_test.csv
# features
features/weebit_train_with_features.csv
features/weebit_test_with_features.csv
# saved ML models
ml_models/models/saved_models
# other
**/__pycache__/
.ipynb_checkpoints
| 17,350
|
https://github.com/AragondaJyosna/chainladder-python/blob/master/chainladder/methods/mack.py
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
chainladder-python
|
AragondaJyosna
|
Python
|
Code
| 600
| 2,289
|
"""
:ref:`chainladder.methods<methods>`.MackChainladder
===================================================
:ref:`MackChainladder<mack>` produces the same IBNR results as the deterministic
approach, but ldf selection happens in a regression framework that allows for
the calculation of prediction errors. The Mack Chainladder technique is the OG
stochastic method.
"""
import numpy as np
import pandas as pd
import copy
from chainladder.methods import Chainladder
class MackChainladder(Chainladder):
""" Basic stochastic chainladder method popularized by Thomas Mack
Parameters
----------
None
Attributes
----------
triangle
returns **X**
ultimate_
The ultimate losses per the method
ibnr_
The IBNR per the method
full_expectation_
The ultimates back-filled to each development period in **X** replacing
the known data
full_triangle_
The ultimates back-filled to each development period in **X** retaining
the known data
summary_
summary of the model
full_std_err_
The full standard error
total_process_risk_
The total process error
total_parameter_risk_
The total parameter error
mack_std_err_
The total prediction error by origin period
total_mack_std_err_
The total prediction error across all origin periods
"""
def fit(self, X, y=None, sample_weight=None):
"""Fit the model with X.
Parameters
----------
X : Triangle-like
Data to which the model will be applied.
y : Ignored
sample_weight : Ignored
Returns
-------
self : object
Returns the instance itself.
"""
super().fit(X, y, sample_weight)
self._mack_recursion('param_risk')
self._mack_recursion('process_risk')
self._mack_recursion('total_param_risk')
return self
@property
def full_std_err_(self):
obj = copy.deepcopy(self.X_)
tri_array = self.full_triangle_.triangle
weight_dict = {'regression': 2, 'volume': 1, 'simple': 0}
val = np.array([weight_dict.get(item.lower(), 2)
for item in list(self.average_) + ['volume']])
for i in [2, 1, 0]:
val = np.repeat(np.expand_dims(val, 0), tri_array.shape[i], axis=0)
k, v, o, d = val.shape
weight = np.sqrt(tri_array[..., :len(self.X_.ddims)]**(2-val))
weight[weight == 0] = np.nan
obj.triangle = self.X_.sigma_.triangle / weight
w = np.concatenate((self.X_.w_, np.ones((k, v, o, 1))*np.nan), axis=3)
w[np.isnan(w)] = 1
obj.triangle = np.nan_to_num(obj.triangle) * w
obj.nan_override = True
return obj
@property
def total_process_risk_(self):
obj = copy.deepcopy(self.process_risk_)
obj.triangle = np.sqrt(np.nansum(self.process_risk_.triangle**2, 2))
obj.triangle = np.expand_dims(obj.triangle, 2)
obj.odims = ['tot_proc_risk']
return obj
def _mack_recursion(self, est):
obj = copy.deepcopy(self.X_)
# replace this with nan_x_latest
nans = np.expand_dims(np.expand_dims(self.X_.nan_triangle(), 0), 0)
k, v, o, d = self.X_.shape
nans = nans * np.ones((k, v, o, d))
nans = np.concatenate((nans, np.ones((k, v, o, 1))*np.nan), 3)
nans = 1-np.nan_to_num(nans)
properties = self.full_triangle_
obj.ddims, obj.valuation = properties.ddims, properties.valuation
obj.nan_override = True
risk_arr = np.zeros((k, v, o, 1))
if est == 'param_risk':
obj.triangle = self._get_risk(nans, risk_arr,
obj.std_err_.triangle)
self.parameter_risk_ = obj
elif est == 'process_risk':
obj.triangle = self._get_risk(nans, risk_arr,
self.full_std_err_.triangle)
self.process_risk_ = obj
else:
risk_arr = risk_arr[..., 0:1, :]
obj.triangle = self._get_tot_param_risk(risk_arr)
obj.odims = ['Total param risk']
self.total_parameter_risk_ = obj
def _get_risk(self, nans, risk_arr, std_err):
full_tri = self.full_triangle_.triangle[..., :len(self.X_.ddims)]
t1_t = (full_tri * std_err)**2
extend = self.X_.ldf_.shape[-1]-self.X_.shape[-1]+1
ldf = self.X_.ldf_.triangle[..., :len(self.X_.ddims)-1]
ldf = np.concatenate(
(ldf, np.prod(self.X_.ldf_.triangle[..., -extend:], -1,
keepdims=True)), -1)
for i in range(len(self.X_.ddims)):
t1 = t1_t[..., i:i+1]
t2 = (ldf[..., i:i+1] * risk_arr[..., i:i+1])**2
t_tot = np.sqrt(t1+t2)*nans[..., i+1:i+2]
risk_arr = np.concatenate((risk_arr, t_tot), 3)
return risk_arr
def _get_tot_param_risk(self, risk_arr):
""" This assumes triangle symmertry """
t1 = self.full_triangle_.triangle[..., :len(self.X_.ddims)] - \
np.nan_to_num(self.X_.triangle) + \
np.nan_to_num(self.X_.get_latest_diagonal(False).triangle)
t1 = np.expand_dims(np.sum(t1*self.X_.std_err_.triangle, 2), 2)
extend = self.X_.ldf_.shape[-1]-self.X_.shape[-1]+1
ldf = self.X_.ldf_.triangle[..., :len(self.X_.ddims)-1]
ldf = np.concatenate(
(ldf, np.prod(self.X_.ldf_.triangle[..., -extend:], -1,
keepdims=True)), -1)
ldf = np.unique(ldf, axis=-2)
for i in range(self.full_triangle_.shape[-1]-1):
t_tot = np.sqrt((t1[..., i:i+1])**2 + (ldf[..., i:i+1] *
risk_arr[..., -1:])**2)
risk_arr = np.concatenate((risk_arr, t_tot), -1)
return risk_arr
@property
def mack_std_err_(self):
obj = copy.deepcopy(self.parameter_risk_)
obj.triangle = np.sqrt(self.parameter_risk_.triangle**2 +
self.process_risk_.triangle**2)
return obj
@property
def total_mack_std_err_(self):
# This might be better as a dataframe
obj = copy.deepcopy(self.X_.latest_diagonal)
obj.triangle = np.sqrt(self.total_process_risk_.triangle**2 +
self.total_parameter_risk_.triangle**2)
obj.triangle = obj.triangle[..., -1:]
obj.ddims = ['Total Mack Std Err']
obj.odims = ['Total']
return obj
@property
def summary_(self):
# This might be better as a dataframe
obj = copy.deepcopy(self.X_)
obj.triangle = np.concatenate(
(self.X_.latest_diagonal.triangle,
self.ibnr_.triangle,
self.ultimate_.triangle,
self.mack_std_err_.triangle[..., -1:]), 3)
obj.ddims = ['Latest', 'IBNR', 'Ultimate', 'Mack Std Err']
obj.nan_override = True
return obj
| 28,072
|
https://github.com/dekbanbanproject/rpst/blob/master/database/migrations/2021_06_13_100034_create_page_groups_table.php
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
rpst
|
dekbanbanproject
|
PHP
|
Code
| 63
| 279
|
<?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class CreatePageGroupsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
if (!Schema::hasTable('page_groups'))
{
Schema::create('page_groups', function (Blueprint $table) {
$table->increments('group_id',11);
$table->string('group_name',255)->nullable();
$table->mediumText('group_detail')->nullable();
$table->enum('status', ['true', 'false'])->default('false');
$table->string('group_type',20)->nullable();
$table->string('layout_id',11)->nullable();
$table->timestamps();
});
}
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('page_groups');
}
}
| 7,032
|
https://github.com/octoshell/octoshell-v2/blob/master/config/initializers/jruby_sendmail_fix.rb
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
octoshell-v2
|
octoshell
|
Ruby
|
Code
| 90
| 339
|
# got if from here: http://stackoverflow.com/questions/8746699/rails-app-wont-send-mail-via-sendmail-under-jruby
if RUBY_ENGINE == 'jruby'
module Mail
class Sendmail
def initialize(values)
self.settings = { :location => '/usr/sbin/sendmail',
:arguments => '-i -t' }.merge(values)
end
attr_accessor :settings
def deliver!(mail)
envelope_from = mail.return_path || mail.sender || mail.from_addrs.first
return_path = "-f \"#{envelope_from.to_s.shellescape}\"" if envelope_from
arguments = [settings[:arguments], return_path].compact.join(" ")
Sendmail.call(settings[:location], arguments, mail.destinations.collect(&:shellescape).join(" "), mail)
end
def Sendmail.call(path, arguments, destinations, mail)
IO.popen("#{path} #{arguments} #{destinations}", "r+") do |io|
io.puts mail.encoded.to_lf
io.close_write # <------ changed this from flush to close_write
sleep 1 # <-------- added this line
end
end
end
end
end
| 45,852
|
https://github.com/Peterkoricar/Open-Lab-03.05/blob/master/Open-Lab-03.05/Comparator.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
Open-Lab-03.05
|
Peterkoricar
|
C#
|
Code
| 24
| 88
|
using System;
using System.Text.RegularExpressions;
namespace Open_Lab_03._05
{
public class Comparator
{
public bool MatchCaseInsensitive(string str1, string str2)
{
return (Regex.IsMatch(str1,str2, RegexOptions.IgnoreCase));
}
}
}
| 17,325
|
https://github.com/ricardolopes86/nvm-exercises/blob/master/exercise2/build.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
nvm-exercises
|
ricardolopes86
|
Python
|
Code
| 106
| 362
|
"""
This script will compile/build our application exercise2.py
"""
import py_compile
import os
def build():
"""
Function that will be used to build our application.
"""
print("\033[1;33;40mCompiling our application...\033[0m")
py_compile.compile("exercise2.py", "build/app.pyc")
print("\033[1;32;40mDone... Application can be found in build/ directory\033[0m")
def check_dir():
"""
Function that will be used to check if build directory exists, if not, create it then call build function.
"""
buil_dir="build/"
if not os.path.exists(buil_dir):
print("\033[1;33;40mCreating build directory...\033[0m")
try:
os.mkdir(buil_dir)
print("\033[1;32;40mBuild directory created...\033[0m")
except Exception as error:
raise Exception("Error creating build directory at: {}".format(buil_dir))
print("\033[1;35;40mError creating dir: {}\033[0m".format(error))
print()
else:
build()
else:
print("\033[1;33;40mBuild directory found... build to build now!\033[0m")
build()
if __name__ == "__main__":
check_dir()
| 11,017
|
https://github.com/deamont66/yottly-hlidacstatu/blob/master/src/app/models/Contract.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
yottly-hlidacstatu
|
deamont66
|
TypeScript
|
Code
| 53
| 147
|
import { Contractor } from 'app/models/Contractor';
import { Client } from 'app/models/Client';
import { Checkable } from 'app/models/Checkable';
export interface ContractData extends Omit<Contract, 'contractors' | 'checked'> {
contractors: Omit<Contractor, 'checked'>[];
client: Omit<Client, 'contracts' | 'checked'>;
}
export interface Contract extends Checkable {
id: number;
subject: string;
numberOfErrors: number;
numberOfWords: number;
contractors: number[];
}
| 44,297
|
https://github.com/AgavisJSC/station-mobile/blob/master/src/screens/NoInternet.tsx
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
station-mobile
|
AgavisJSC
|
TSX
|
Code
| 183
| 574
|
import React, { ReactElement, useEffect, useState } from 'react'
import { Icon, Text } from 'components'
import StatusBar from 'components/StatusBar'
import { SafeAreaView } from 'react-native-safe-area-context'
import { StyleSheet } from 'react-native'
import NetInfo, {
NetInfoState,
} from '@react-native-community/netinfo'
import color from 'styles/color'
import layout from 'styles/layout'
const NoInternet = (): ReactElement => {
const [isNetworkUnavailable, setNetworkUnavailable] = useState(
false
)
useEffect(() => {
const unsubscribe = NetInfo.addEventListener(
(state: NetInfoState) => {
setNetworkUnavailable(
state.isConnected === false ||
state.isInternetReachable === false
)
}
)
return (): void => unsubscribe()
}, [])
return (
<>
{isNetworkUnavailable && (
<SafeAreaView style={styles.container}>
<StatusBar theme="white" />
<Icon
name="signal-wifi-off"
color={color.sapphire}
size={53}
style={styles.icon}
/>
<Text fontType="bold" style={styles.title}>
{'No internet connection'}
</Text>
<Text fontType="book" style={styles.subTitle}>
{'Please check your internet connection and retry again.'}
</Text>
</SafeAreaView>
)}
</>
)
}
const styles = StyleSheet.create({
container: {
position: 'absolute',
width: layout.getWindowWidth(),
height: layout.getWindowHeight(),
justifyContent: 'center',
alignItems: 'center',
paddingHorizontal: 20,
backgroundColor: color.sky,
},
icon: { marginBottom: 15 },
title: {
fontSize: 24,
color: color.sapphire,
marginBottom: 5,
lineHeight: 36,
},
subTitle: {
fontSize: 16,
color: color.sapphire,
lineHeight: 24,
textAlign: 'center',
},
})
export default NoInternet
| 11,312
|
https://github.com/Bayusyaits/personal_web/blob/master/resources/assets/frontend/sass/partials/_mixins.scss
|
Github Open Source
|
Open Source
|
MIT
| null |
personal_web
|
Bayusyaits
|
SCSS
|
Code
| 294
| 1,127
|
// rem fallback - credits: http://zerosixthree.se/
@function calculateRem($size) {
$remSize: $size / 16px;
@return $remSize * 1rem;
}
@mixin round($size) {
border-radius: 100%;
width: $size;
height: $size;
margin-left: auto;
margin-right: auto;
}
@mixin font-size($size) {
font-size: $size;
font-size: calculateRem($size);
}
// center vertically and/or horizontally an absolute positioned element
@mixin center($xy:xy) {
@if $xy == xy {
left: 50%;
top: 50%;
bottom: auto;
right: auto;
@include transform(translateX(-50%) translateY(-50%));
}
@else if $xy == x {
left: 50%;
right: auto;
@include transform(translateX(-50%));
}
@else if $xy == y {
top: 50%;
bottom: auto;
@include transform(translateY(-50%));
}
}
// border radius
@mixin border-radius($radius:.25em) {
border-radius: $radius;
}
@function set-text-color($color) {
@if (lightness($color) > 50) {
@return #1d1f1e; // Lighter backgorund, return dark color
} @else {
@return #ffffff; // Darker background, return light color
}
}
/* Transition */
@mixin transition-duration($duration) {
-webkit-transition-duration: $duration;
-moz-transition-duration: $duration;
-ms-transition-duration: $duration;
-o-transition-duration: $duration;
transition-duration: $duration;
}
@mixin hypens($hypens...) {
-webkit-hyphens: $hypens;
-moz-hyphens: $hypens;
hyphens: $hypens;
}
@mixin transition($transition...) {
-moz-transition: $transition;
-o-transition: $transition;
-webkit-transition: $transition;
transition: $transition;
}
@mixin transition-property($property...) {
-moz-transition-property: $property;
-o-transition-property: $property;
-webkit-transition-property: $property;
transition-property: $property;
}
@mixin transition-duration($duration...) {
-moz-transition-property: $duration;
-o-transition-property: $duration;
-webkit-transition-property: $duration;
transition-property: $duration;
}
@mixin transition-timing-function($timing...) {
-moz-transition-timing-function: $timing;
-o-transition-timing-function: $timing;
-webkit-transition-timing-function: $timing;
transition-timing-function: $timing;
}
@mixin transition-delay($delay...) {
-moz-transition-delay: $delay;
-o-transition-delay: $delay;
-webkit-transition-delay: $delay;
transition-delay: $delay;
}
// generic transform
@mixin transform($transforms) {
-moz-transform: $transforms;
-o-transform: $transforms;
-ms-transform: $transforms;
-webkit-transform: $transforms;
transform: $transforms;
}
// rotate
@mixin rotate ($deg) {
@include transform(rotate(#{$deg}deg));
}
// scale
@mixin scale($scale) {
@include transform(scale($scale));
}
// translate
@mixin translate ($x, $y) {
@include transform(translate($x, $y));
}
// skew
@mixin skew ($x, $y) {
@include transform(skew(#{$x}deg, #{$y}deg));
}
//transform origin
@mixin transform-origin ($origin) {
moz-transform-origin: $origin;
-o-transform-origin: $origin;
-ms-transform-origin: $origin;
-webkit-transform-origin: $origin;
transform-origin: $origin;
}
| 23,895
|
https://github.com/TiagoDanin-Forks/hacktoberfest/blob/master/src/pages/minha-area.tsx
|
Github Open Source
|
Open Source
|
RSA-MD
| 2,021
|
hacktoberfest
|
TiagoDanin-Forks
|
TypeScript
|
Code
| 198
| 639
|
import React, { useEffect, useState } from 'react'
import { Grid } from '@material-ui/core'
//Internal Components
import Layout from '@components/layout'
import { PersonalDataForm, ShippingForm } from '@components/forms'
import User, { UserProps } from '@services/user'
import { Snackbar } from '@material-ui/core'
import MuiAlert, { AlertProps, Color } from '@material-ui/lab/Alert'
interface HacktoberFestAlertProps extends AlertProps {
message?: string
}
const Alert = (props: HacktoberFestAlertProps) => <MuiAlert elevation={6} variant="filled" {...props}> {props.message} </MuiAlert>
const PersonalAreaPage = () => {
const [alert, setAlert] = useState<HacktoberFestAlertProps>()
const [loaded, setIsLoaded] = useState<boolean>(false)
const [user, setUser] = useState<UserProps>()
const closeSnackbar = () => setAlert(undefined)
const showSnackBar = (severity: Color, message: string) => setAlert({severity, message})
useEffect(() => {
const fetchUser = async () => {
const user: UserProps | undefined = await User.Service.getInstance().GetUser()
setUser(user)
setIsLoaded(true)
}
fetchUser()
}, [])
return (
<Layout title="Minha Área - Globo Hacktoberfest" description="Minha Área - Globo Hacktoberfest" headerTitle="Minha área">
<React.Fragment>
<Snackbar
anchorOrigin={{vertical: 'bottom', horizontal: 'center'}}
autoHideDuration={5000}
onClose={closeSnackbar}
open={alert ? true : false}>
<Alert severity={alert?.severity} message={alert?.message}/>
</Snackbar>
<Grid container alignItems="flex-start" alignContent="center" direction="column">
{ loaded && user?.id &&
<React.Fragment>
<Grid item xs={8} lg={4}>
<PersonalDataForm showSnackBar={showSnackBar} user={user}/>
</Grid>
<Grid item xs={8} lg={4}>
<ShippingForm showSnackBar={showSnackBar} user={user}/>
</Grid>
</React.Fragment>
}
</Grid>
</React.Fragment>
</Layout>
)
}
export default PersonalAreaPage
| 43,792
|
https://github.com/Novelate/NovelateEngine/blob/master/source/novelate/submain.d
|
Github Open Source
|
Open Source
|
LicenseRef-scancode-free-unknown, MIT
| 2,020
|
NovelateEngine
|
Novelate
|
D
|
Code
| 31
| 82
|
module novelate.submain;
version (NOVELATE_CUSTOM_MAIN)
{
}
else
{
/// Entry point when no custom entry point is specified.
private void main()
{
import novelate.core : initialize, run;
initialize();
run();
}
}
| 26,705
|
https://github.com/isgasho/kira/blob/master/kira-lua/src/event.rs
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
kira
|
isgasho
|
Rust
|
Code
| 107
| 424
|
use std::sync::atomic::{AtomicUsize, Ordering};
use kira::Event;
use mlua::prelude::*;
static NEXT_EVENT_INDEX: AtomicUsize = AtomicUsize::new(0);
#[derive(Debug, Copy, Clone)]
pub struct CustomEvent(pub usize);
impl CustomEvent {
pub fn new() -> Self {
let index = NEXT_EVENT_INDEX.fetch_add(1, Ordering::Relaxed);
CustomEvent(index)
}
}
impl LuaUserData for CustomEvent {
fn add_methods<'lua, M: LuaUserDataMethods<'lua, Self>>(methods: &mut M) {
methods.add_meta_method(LuaMetaMethod::Eq, |_: &Lua, this: &Self, other: Self| {
Ok(this.0 == other.0)
})
}
}
pub struct LEvent(pub Event<CustomEvent>);
impl<'lua> ToLua<'lua> for LEvent {
fn to_lua(self, lua: &'lua Lua) -> LuaResult<LuaValue<'lua>> {
let table = lua.create_table()?;
match self.0 {
Event::MetronomeIntervalPassed(interval) => {
table.set("kind", "metronomeIntervalPassed")?;
table.set("interval", interval)?;
}
Event::Custom(event) => {
table.set("kind", "custom")?;
table.set("event", event)?;
}
}
Ok(LuaValue::Table(table))
}
}
| 20,405
|
https://github.com/dwrensha/miniF2F/blob/master/lean/src/test/imo-2007-a6.lean
|
Github Open Source
|
Open Source
|
MIT
| null |
miniF2F
|
dwrensha
|
Lean
|
Code
| 91
| 164
|
/-
Copyright (c) 2021 OpenAI. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Kunhao Zheng
-/
import data.real.nnreal
import data.real.basic
import algebra.big_operators.basic
open_locale big_operators
example (a : ℕ → nnreal) (h₀ : ∑ x in finset.range 100, (( a (x + 1) ) ^ 2) = 1 ) : ∑ x in finset.range 99, (( a (x + 1) ) ^ 2 * a (x + 2 )) + (a 100) ^ 2 * a 1 < 12 / 25 :=
begin
sorry
end
| 29,632
|
https://github.com/lmichaelis/OpenGothic/blob/master/game/graphics/visualobjects.h
|
Github Open Source
|
Open Source
|
MIT
| null |
OpenGothic
|
lmichaelis
|
C
|
Code
| 209
| 843
|
#pragma once
#include <Tempest/Signal>
#include "objectsbucket.h"
class SceneGlobals;
class AnimMesh;
class Sky;
class VisualObjects final {
public:
VisualObjects(const SceneGlobals& globals, const std::pair<Tempest::Vec3, Tempest::Vec3>& bbox);
~VisualObjects();
ObjectsBucket::Item get(const StaticMesh& mesh, const Material& mat,
size_t iboOffset, size_t iboLength, bool staticDraw);
ObjectsBucket::Item get(const StaticMesh& mesh, const Material& mat,
size_t iboOff, size_t iboLen,
const Tempest::AccelerationStructure* blas,
const Tempest::StorageBuffer& desc,
const Bounds& bbox, ObjectsBucket::Type bucket);
ObjectsBucket::Item get(const AnimMesh& mesh, const Material& mat,
size_t iboOff, size_t iboLen,
const MatrixStorage::Id& anim);
ObjectsBucket::Item get(const Tempest::VertexBuffer<Resources::Vertex>* vbo[], const Material& mat);
MatrixStorage::Id getMatrixes(Tempest::BufferHeap heap, size_t boneCnt);
auto matrixSsbo (Tempest::BufferHeap heap, uint8_t fId) const -> const Tempest::StorageBuffer&;
void setupUbo();
void preFrameUpdate(uint8_t fId);
void visibilityPass(const Frustrum fr[]);
void draw (Tempest::Encoder<Tempest::CommandBuffer>& enc, uint8_t fId);
void drawGBuffer (Tempest::Encoder<Tempest::CommandBuffer>& enc, uint8_t fId);
void drawShadow (Tempest::Encoder<Tempest::CommandBuffer>& enc, uint8_t fId, int layer=0);
void drawHiZ (Tempest::Encoder<Tempest::CommandBuffer> &cmd, uint8_t fId);
void resetIndex();
void resetTlas();
void recycle(Tempest::DescriptorSet&& del);
void setLandscapeBlas(const Tempest::AccelerationStructure* blas);
Tempest::Signal<void(const Tempest::AccelerationStructure* tlas)> onTlasChanged;
private:
ObjectsBucket& getBucket(ObjectsBucket::Type type, const Material& mat,
const StaticMesh* st, const AnimMesh* anim, const Tempest::StorageBuffer* desc);
void mkIndex();
void commitUbo(uint8_t fId);
void mkTlas(uint8_t fId);
const SceneGlobals& globals;
VisibilityGroup visGroup;
MatrixStorage matrix;
std::vector<std::unique_ptr<ObjectsBucket>> buckets;
std::vector<ObjectsBucket*> index;
size_t lastSolidBucket = 0;
std::vector<Tempest::DescriptorSet> recycled[Resources::MaxFramesInFlight];
uint8_t recycledId = 0;
bool needtoInvalidateTlas = false;
Tempest::AccelerationStructure tlas;
const Tempest::AccelerationStructure* landBlas = nullptr;
friend class ObjectsBucket;
friend class ObjectsBucket::Item;
};
| 37,769
|
https://github.com/bots-house/share-file-bot/blob/master/bot/middleware.go
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
share-file-bot
|
bots-house
|
Go
|
Code
| 307
| 1,275
|
package bot
import (
"context"
"encoding/json"
"fmt"
"strconv"
"strings"
tgbotapi "github.com/bots-house/telegram-bot-api"
"github.com/fatih/structs"
"github.com/friendsofgo/errors"
"github.com/getsentry/sentry-go"
"github.com/bots-house/share-file-bot/pkg/log"
"github.com/bots-house/share-file-bot/pkg/tg"
"github.com/bots-house/share-file-bot/service"
)
const refDeepLinkPrefix = "ref_"
func extractRefFromMsg(msg *tgbotapi.Message) string {
if msg != nil && msg.Command() == cmdStart {
args := msg.CommandArguments()
if !strings.HasPrefix(args, refDeepLinkPrefix) {
return ""
}
ref := strings.TrimPrefix(args, refDeepLinkPrefix)
if !strings.Contains(ref, "-") {
msg.Text = "/start"
return ref
}
items := strings.Split(ref, "-")
if len(items) > 1 {
msg.Text = "/start " + items[1]
}
return items[0]
}
return ""
}
func serializeStruct(v interface{}) map[string]interface{} {
body, err := json.Marshal(v)
if err != nil {
return structs.Map(v)
}
result := map[string]interface{}{}
if err := json.Unmarshal(body, &result); err != nil {
return structs.Map(v)
}
return result
}
func newAuthMiddleware(srv *service.Auth) tg.Middleware {
return func(next tg.Handler) tg.Handler {
return tg.HandlerFunc(func(ctx context.Context, update *tgbotapi.Update) error {
withSentryHub(ctx, func(hub *sentry.Hub) {
hub.AddBreadcrumb(&sentry.Breadcrumb{
Message: "Update",
Level: sentry.LevelInfo,
Data: serializeStruct(update),
Category: "middleware",
}, nil)
})
var (
tgUser *tgbotapi.User
)
switch {
case update.Message != nil:
tgUser = update.Message.From
case update.EditedMessage != nil:
tgUser = update.EditedMessage.From
case update.CallbackQuery != nil:
tgUser = update.CallbackQuery.From
case update.ChannelPost != nil:
tgUser = nil
default:
log.Warn(ctx, "unsupported update", "id", update.UpdateID)
return nil
}
if tgUser != nil {
if tgUser.UserName != "" {
ctx = log.With(ctx, "user", fmt.Sprintf("%s#%d", tgUser.UserName, tgUser.ID))
} else {
ctx = log.With(ctx, "user", fmt.Sprintf("#%d", tgUser.ID))
}
ref := extractRefFromMsg(update.Message)
user, err := srv.Auth(ctx, &service.UserInfo{
ID: tgUser.ID,
FirstName: tgUser.FirstName,
LastName: tgUser.LastName,
Username: tgUser.UserName,
LanguageCode: tgUser.LanguageCode,
Ref: ref,
})
if err != nil {
return errors.Wrap(err, "auth service")
}
withSentryHub(ctx, func(hub *sentry.Hub) {
ctx = withUser(ctx, user)
hub.AddBreadcrumb(&sentry.Breadcrumb{
Message: "User",
Level: sentry.LevelInfo,
Data: serializeStruct(user),
Category: "auth",
}, nil)
hub.ConfigureScope(func(scope *sentry.Scope) {
scope.SetUser(sentry.User{
ID: strconv.Itoa(int(user.ID)),
Username: user.Username.String,
})
})
})
}
return next.HandleUpdate(ctx, update)
})
}
}
func withSentryHub(ctx context.Context, do func(hub *sentry.Hub)) {
if hub := sentry.GetHubFromContext(ctx); hub != nil {
do(hub)
}
}
| 31,337
|
https://github.com/mittachaitu/bootstrap/blob/master/gke-openebs/hack/install-openebs-0.5-operator.sh
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
bootstrap
|
mittachaitu
|
Shell
|
Code
| 8
| 94
|
kubectl apply -f https://raw.githubusercontent.com/openebs/openebs/v0.5/k8s/openebs-operator.yaml
kubectl apply -f https://raw.githubusercontent.com/openebs/openebs/v0.5/k8s/openebs-storageclasses.yaml
| 50,353
|
https://github.com/ChuniMuni/RED/blob/master/src/RobotFrameworkCore/org.robotframework.ide.core-functions/src/test/java/org/rf/ide/core/testdata/text/write/txt/tables/variables/update/ListVariablesUpdateTest.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,017
|
RED
|
ChuniMuni
|
Java
|
Code
| 44
| 139
|
/*
* Copyright 2016 Nokia Solutions and Networks
* Licensed under the Apache License, Version 2.0,
* see license.txt file for details.
*/
package org.rf.ide.core.testdata.text.write.txt.tables.variables.update;
import org.rf.ide.core.testdata.text.write.tables.variables.update.AListVariablesUpdateTest;
/**
* @author wypych
*/
public class ListVariablesUpdateTest extends AListVariablesUpdateTest {
public ListVariablesUpdateTest() {
super("txt");
}
}
| 22,283
|
https://github.com/dick-the-deployer/dick/blob/master/dick-web/src/test/java/com/dickthedeployer/dick/web/service/DickYmlServiceTest.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
dick
|
dick-the-deployer
|
Java
|
Code
| 160
| 628
|
/*
* Copyright dick the deployer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dickthedeployer.dick.web.service;
import com.dickthedeployer.dick.web.ContextTestBase;
import com.dickthedeployer.dick.web.domain.Build;
import com.dickthedeployer.dick.web.domain.Project;
import com.dickthedeployer.dick.web.exception.DickFileMissingException;
import com.dickthedeployer.dick.web.model.dickfile.Dickfile;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
/**
* @author mariusz
*/
public class DickYmlServiceTest extends ContextTestBase {
@Autowired
DickYmlService dickYmlService;
@Test
public void shouldLoadDickfile() throws DickFileMissingException {
Dickfile dickfile = dickYmlService.loadDickFile(new Build.Builder()
.withSha("bf3d4f2b08952b27e1df8482c503c9e8df6fea96")
.withRef("master")
.withRepository("https://github.com/dick-the-deployer/examples.git")
.withProject(new Project.Builder()
.withRef("master")
.withName("dick-the-deployer/dick")
.withRepository("https://github.com/dick-the-deployer/examples.git")
.build()
).build()
);
assertThat(dickfile).isNotNull();
assertThat(dickfile.getPipeline().getStages())
.asList().hasSize(2).extracting("name").containsExactly("first", "second");
assertThat(dickfile.getJobs()).asList().hasSize(2);
}
}
| 32,357
|
https://github.com/gulfofmaine/Neracoos-1-Buoy-App/blob/master/src/Features/StatefulMap/actionTypes.ts
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
Neracoos-1-Buoy-App
|
gulfofmaine
|
TypeScript
|
Code
| 12
| 27
|
/**
* Map state action types
*/
export const SET_VIEW = "map_state/view/set"
| 6,085
|
https://github.com/HulinCedric/live-it-rich/blob/master/src/Live It Rich/BillingForm.Designer.vb
|
Github Open Source
|
Open Source
|
MIT
| null |
live-it-rich
|
HulinCedric
|
Visual Basic
|
Code
| 568
| 2,481
|
<Global.Microsoft.VisualBasic.CompilerServices.DesignerGenerated()> _
Partial Class BillingForm
Inherits System.Windows.Forms.Form
'Form remplace la méthode Dispose pour nettoyer la liste des composants.
<System.Diagnostics.DebuggerNonUserCode()> _
Protected Overrides Sub Dispose(ByVal disposing As Boolean)
Try
If disposing AndAlso components IsNot Nothing Then
components.Dispose()
End If
Finally
MyBase.Dispose(disposing)
End Try
End Sub
'Requise par le Concepteur Windows Form
Private components As System.ComponentModel.IContainer
'REMARQUE : la procédure suivante est requise par le Concepteur Windows Form
'Elle peut être modifiée à l'aide du Concepteur Windows Form.
'Ne la modifiez pas à l'aide de l'éditeur de code.
<System.Diagnostics.DebuggerStepThrough()> _
Private Sub InitializeComponent()
Me.Panel1 = New System.Windows.Forms.Panel
Me.GroupBox2 = New System.Windows.Forms.GroupBox
Me.Button4 = New System.Windows.Forms.Button
Me.Button2 = New System.Windows.Forms.Button
Me.Button3 = New System.Windows.Forms.Button
Me.GroupBox4 = New System.Windows.Forms.GroupBox
Me.ComboBox2 = New System.Windows.Forms.ComboBox
Me.GroupBox3 = New System.Windows.Forms.GroupBox
Me.Button1 = New System.Windows.Forms.Button
Me.Button22 = New System.Windows.Forms.Button
Me.GroupBox1 = New System.Windows.Forms.GroupBox
Me.ComboBox1 = New System.Windows.Forms.ComboBox
Me.Panel1.SuspendLayout()
Me.GroupBox2.SuspendLayout()
Me.GroupBox4.SuspendLayout()
Me.GroupBox3.SuspendLayout()
Me.GroupBox1.SuspendLayout()
Me.SuspendLayout()
'
'Panel1
'
Me.Panel1.Controls.Add(Me.GroupBox2)
Me.Panel1.Controls.Add(Me.GroupBox4)
Me.Panel1.Controls.Add(Me.GroupBox3)
Me.Panel1.Controls.Add(Me.GroupBox1)
Me.Panel1.Dock = System.Windows.Forms.DockStyle.Fill
Me.Panel1.Location = New System.Drawing.Point(0, 0)
Me.Panel1.Name = "Panel1"
Me.Panel1.Size = New System.Drawing.Size(625, 321)
Me.Panel1.TabIndex = 0
'
'GroupBox2
'
Me.GroupBox2.Controls.Add(Me.Button4)
Me.GroupBox2.Controls.Add(Me.Button2)
Me.GroupBox2.Controls.Add(Me.Button3)
Me.GroupBox2.Location = New System.Drawing.Point(368, 42)
Me.GroupBox2.Name = "GroupBox2"
Me.GroupBox2.Size = New System.Drawing.Size(206, 172)
Me.GroupBox2.TabIndex = 24
Me.GroupBox2.TabStop = False
Me.GroupBox2.Text = "Provider Action"
'
'Button4
'
Me.Button4.Dock = System.Windows.Forms.DockStyle.Top
Me.Button4.Location = New System.Drawing.Point(3, 118)
Me.Button4.Name = "Button4"
Me.Button4.Size = New System.Drawing.Size(200, 51)
Me.Button4.TabIndex = 3
Me.Button4.Text = "Print Bill"
Me.Button4.UseVisualStyleBackColor = True
'
'Button2
'
Me.Button2.Dock = System.Windows.Forms.DockStyle.Top
Me.Button2.Location = New System.Drawing.Point(3, 67)
Me.Button2.Name = "Button2"
Me.Button2.Size = New System.Drawing.Size(200, 51)
Me.Button2.TabIndex = 2
Me.Button2.Text = "Edit Bill"
Me.Button2.UseVisualStyleBackColor = True
'
'Button3
'
Me.Button3.Dock = System.Windows.Forms.DockStyle.Top
Me.Button3.Location = New System.Drawing.Point(3, 16)
Me.Button3.Name = "Button3"
Me.Button3.Size = New System.Drawing.Size(200, 51)
Me.Button3.TabIndex = 1
Me.Button3.Text = "Add Provider"
Me.Button3.UseVisualStyleBackColor = True
'
'GroupBox4
'
Me.GroupBox4.Controls.Add(Me.ComboBox2)
Me.GroupBox4.Location = New System.Drawing.Point(368, 234)
Me.GroupBox4.Name = "GroupBox4"
Me.GroupBox4.Size = New System.Drawing.Size(206, 44)
Me.GroupBox4.TabIndex = 23
Me.GroupBox4.TabStop = False
Me.GroupBox4.Text = "Provider"
'
'ComboBox2
'
Me.ComboBox2.Dock = System.Windows.Forms.DockStyle.Fill
Me.ComboBox2.FormattingEnabled = True
Me.ComboBox2.Location = New System.Drawing.Point(3, 16)
Me.ComboBox2.Name = "ComboBox2"
Me.ComboBox2.Size = New System.Drawing.Size(200, 21)
Me.ComboBox2.TabIndex = 0
'
'GroupBox3
'
Me.GroupBox3.Controls.Add(Me.Button1)
Me.GroupBox3.Controls.Add(Me.Button22)
Me.GroupBox3.Location = New System.Drawing.Point(51, 42)
Me.GroupBox3.Name = "GroupBox3"
Me.GroupBox3.Size = New System.Drawing.Size(206, 120)
Me.GroupBox3.TabIndex = 22
Me.GroupBox3.TabStop = False
Me.GroupBox3.Text = "Guest Action"
'
'Button1
'
Me.Button1.Dock = System.Windows.Forms.DockStyle.Top
Me.Button1.Location = New System.Drawing.Point(3, 67)
Me.Button1.Name = "Button1"
Me.Button1.Size = New System.Drawing.Size(200, 51)
Me.Button1.TabIndex = 2
Me.Button1.Text = "Print Bill"
Me.Button1.UseVisualStyleBackColor = True
'
'Button22
'
Me.Button22.Dock = System.Windows.Forms.DockStyle.Top
Me.Button22.Location = New System.Drawing.Point(3, 16)
Me.Button22.Name = "Button22"
Me.Button22.Size = New System.Drawing.Size(200, 51)
Me.Button22.TabIndex = 1
Me.Button22.Text = "Edit Bill"
Me.Button22.UseVisualStyleBackColor = True
'
'GroupBox1
'
Me.GroupBox1.Controls.Add(Me.ComboBox1)
Me.GroupBox1.Location = New System.Drawing.Point(51, 234)
Me.GroupBox1.Name = "GroupBox1"
Me.GroupBox1.Size = New System.Drawing.Size(206, 44)
Me.GroupBox1.TabIndex = 21
Me.GroupBox1.TabStop = False
Me.GroupBox1.Text = "Guest"
'
'ComboBox1
'
Me.ComboBox1.Dock = System.Windows.Forms.DockStyle.Fill
Me.ComboBox1.FormattingEnabled = True
Me.ComboBox1.Location = New System.Drawing.Point(3, 16)
Me.ComboBox1.Name = "ComboBox1"
Me.ComboBox1.Size = New System.Drawing.Size(200, 21)
Me.ComboBox1.TabIndex = 0
'
'BillingForm
'
Me.AutoScaleDimensions = New System.Drawing.SizeF(6.0!, 13.0!)
Me.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font
Me.ClientSize = New System.Drawing.Size(625, 321)
Me.Controls.Add(Me.Panel1)
Me.Name = "BillingForm"
Me.Text = "Billing"
Me.Panel1.ResumeLayout(False)
Me.GroupBox2.ResumeLayout(False)
Me.GroupBox4.ResumeLayout(False)
Me.GroupBox3.ResumeLayout(False)
Me.GroupBox1.ResumeLayout(False)
Me.ResumeLayout(False)
End Sub
Friend WithEvents Panel1 As System.Windows.Forms.Panel
Friend WithEvents GroupBox2 As System.Windows.Forms.GroupBox
Friend WithEvents Button4 As System.Windows.Forms.Button
Friend WithEvents Button2 As System.Windows.Forms.Button
Friend WithEvents Button3 As System.Windows.Forms.Button
Friend WithEvents GroupBox4 As System.Windows.Forms.GroupBox
Friend WithEvents ComboBox2 As System.Windows.Forms.ComboBox
Friend WithEvents GroupBox3 As System.Windows.Forms.GroupBox
Friend WithEvents Button1 As System.Windows.Forms.Button
Friend WithEvents Button22 As System.Windows.Forms.Button
Friend WithEvents GroupBox1 As System.Windows.Forms.GroupBox
Friend WithEvents ComboBox1 As System.Windows.Forms.ComboBox
End Class
| 31,294
|
https://github.com/ewqasd200g/hhvm/blob/master/hphp/hack/src/hhbc/hhbc_hhas.ml
|
Github Open Source
|
Open Source
|
PHP-3.01, Zend-2.0, BSD-3-Clause
| 2,017
|
hhvm
|
ewqasd200g
|
OCaml
|
Code
| 1,124
| 2,737
|
(**
* Copyright (c) 2017, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the "hack" directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*)
module B = Buffer
module H = Hhbc_ast
open H
let two_spaces = " "
let four_spaces = " "
let buffer_of_instruct_basic prefix instruction =
let result = B.create 0 in
B.add_string result (
prefix ^
match instruction with
| Nop -> "Nop"
| EntryNop -> "EntryNop"
| PopA -> "PopA"
| PopC -> "PopC"
| PopV -> "PopV"
| PopR -> "PopR"
| Dup -> "Dup"
| Box -> "Box"
| Unbox -> "Unbox"
| BoxR -> "BoxR"
| UnboxR -> "UnboxR"
| UnboxRNop -> "UnboxRNop"
| RGetCNop -> "RGetCNop"
); result
let buffer_of_instruct_lit_const prefix instruction =
let result = B.create 0 in
B.add_string result (
prefix ^
match instruction with
| Null -> "Null"
| Int i -> "Int " ^ Int64.to_string i
(**
* TODO (hgo): build that map from id to strings
*)
| String str -> "String \"" ^ str ^ "\""
| _ -> failwith "Not Implemented"
); result
let buffer_of_instruct_operator prefix instruction =
let result = B.create 0 in
B.add_string result (
prefix ^
match instruction with
| Concat -> "Concat"
| Abs -> "Abs"
| Add -> "Add"
| Sub -> "Sub"
| Mul -> "Mul"
| AddO -> "AddO"
| SubO -> "SubO"
| MulO -> "MulO"
| Div -> "Div"
| Mod -> "Mod"
| Pow -> "Pow"
| Sqrt -> "Sqrt"
| Xor -> "Xor"
| Not -> "Not"
| Same -> "Same"
| NSame -> "NSame"
| Eq -> "Eq"
| Neq -> "Neq"
| Lt -> "Lt"
| Lte -> "Lte"
| Gt -> "Gt"
| Gte -> "Gte"
| Cmp -> "Cmp"
| BitAnd -> "BitAnd"
| BitOr -> "BitOr"
| BitXor -> "BitXor"
| BitNot -> "BitNot"
| Shl -> "Shl"
| Shr -> "Shr"
| Floor -> "Floor"
| Ceil -> "Ceil"
| CastBool -> "CastBool"
| CastInt -> "CastInt"
| CastDouble -> "CastDouble"
| CastString -> "CastString"
| CastArray -> "Cast"
| CastObject -> "CastObject"
| CastVec -> "CastVec"
| CastDict -> "CastDict"
| CastKeyset -> "CastKeyset"
| InstanceOf -> "InstanceOf"
| InstanceOfD -> "InstanceOfD"
| Print -> "Print"
| Clone -> "Clone"
| H.Exit -> "Exit"
| Fatal -> "Fatal"
); result
let buffer_of_instruct_control_flow prefix instruction =
let result = B.create 0 in
B.add_string result (
prefix ^
match instruction with
| RetC -> "RetC"
| RetV -> "RetV"
| _ -> failwith "Not Implemented"
); result
let buffer_of_instruct_call prefix instruction =
let result = B.create 0 in
B.add_string result (
prefix ^
match instruction with
| FPushFuncD (n_params, litstr) ->
"FPushFuncD "
^ string_of_int n_params
^ " \"" ^ litstr ^ "\""
| FCall param_id -> "FCall " ^ string_of_int param_id
| _ -> failwith "instruct_call Not Implemented"
); result
let buffer_of_instruction_list prefix instructs =
let lpad = B.create 2 in
let f_fold acc inst =
B.add_buffer acc (
match inst with
| IBasic basic -> buffer_of_instruct_basic prefix basic
| ILitConst lit_const ->
buffer_of_instruct_lit_const prefix lit_const
| IOp op -> buffer_of_instruct_operator prefix op
| IContFlow cont_flow -> buffer_of_instruct_control_flow prefix cont_flow
| ICall f_call -> buffer_of_instruct_call prefix f_call
);
B.add_string acc "\n";
acc in
List.fold_left f_fold lpad instructs
let buffer_of_return_types return_types =
let buf = B.create 0 in
B.add_string buf "<";
(match return_types with
| [] -> B.add_string buf "\"HH\\void\" N "
| _ -> B.add_string buf "we only support functions returning void for the moment");
B.add_string buf "> ";
buf
let buffer_of_fun_def fun_def =
let buf = B.create 0 in
B.add_string buf "\n.function ";
B.add_buffer buf @@ buffer_of_return_types fun_def.f_return_types;
B.add_string buf fun_def.f_name;
B.add_string buf "()";
B.add_string buf " {\n";
B.add_buffer buf (buffer_of_instruction_list two_spaces fun_def.f_body);
B.add_string buf "}\n";
buf
let method_special_attributes m =
let attrs = [] in
let attrs = if m.method_is_static then "static" :: attrs else attrs in
let attrs = if m.method_is_final then "final" :: attrs else attrs in
let attrs = if m.method_is_abstract then "abstract" :: attrs else attrs in
let attrs = if m.method_is_public then "public" :: attrs else attrs in
let attrs = if m.method_is_protected then "protected" :: attrs else attrs in
let attrs = if m.method_is_private then "private" :: attrs else attrs in
let text = String.concat " " attrs in
let text = if text = "" then "" else "[" ^ text ^ "] " in
text
let buffer_of_method_def method_def =
let buf = B.create 0 in
(* TODO: attributes *)
B.add_string buf "\n .method ";
B.add_string buf (method_special_attributes method_def);
B.add_string buf method_def.method_name;
(* TODO: generic type parameters *)
(* TODO: parameters *)
(* TODO: where clause *)
B.add_string buf "()";
(* TODO: return type *)
B.add_string buf " {\n";
B.add_buffer buf
(buffer_of_instruction_list four_spaces method_def.method_body);
B.add_string buf " }\n";
buf
let class_special_attributes c =
let attrs = [] in
let attrs = if c.class_is_trait then "trait" :: attrs else attrs in
let attrs = if c.class_is_interface then "interface" :: attrs else attrs in
let attrs = if c.class_is_final then "final" :: attrs else attrs in
let attrs = if c.class_is_enum then "enum" :: attrs else attrs in
let attrs = if c.class_is_abstract then "abstract" :: attrs else attrs in
let text = String.concat " " attrs in
let text = if text = "" then "" else "[" ^ text ^ "] " in
text
let buffer_of_class_def class_def =
let buf = B.create 0 in
(* TODO: user attributes *)
B.add_string buf "\n.class ";
B.add_string buf (class_special_attributes class_def);
B.add_string buf class_def.class_name;
(* TODO: extends *)
(* TODO: implements *)
B.add_string buf " {\n";
List.iter
(fun x -> B.add_buffer buf (buffer_of_method_def x);)
class_def.class_methods;
(* TODO: other members *)
(* TODO: If there is no ctor, generate one *)
B.add_string buf "}\n";
buf
let buffer_of_hhas_prog prog =
let buf = B.create 0 in
List.iter
(fun x -> B.add_buffer buf (buffer_of_fun_def x);) prog.hhas_fun;
List.iter
(fun x -> B.add_buffer buf (buffer_of_class_def x);) prog.hhas_classes;
buf
let buffer_of_defcls classes =
let buf = B.create 0 in
let rec aux c count =
match c with
| [] -> ()
| _ :: t ->
begin
B.add_string buf (Printf.sprintf " DefCls %n\n" count);
aux t (count + 1)
end in
aux classes 0;
buf
let buffer_of_top_level hhas_prog =
let main_stmts =
[ ILitConst (Int Int64.one)
; IContFlow RetC
] in
let fun_name = ".main {\n" in
let buf = B.create 0 in
B.add_string buf fun_name;
B.add_buffer buf (buffer_of_defcls hhas_prog.hhas_classes);
B.add_buffer buf (buffer_of_instruction_list two_spaces main_stmts);
B.add_string buf "}\n";
buf
let to_string hhas_prog =
let final_buf = buffer_of_top_level hhas_prog in
B.add_buffer final_buf @@ buffer_of_hhas_prog hhas_prog;
B.add_string final_buf "\n";
B.contents final_buf
| 21,623
|
https://github.com/IanSavchenko/AdventOfCode2018/blob/master/23/task.js
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
AdventOfCode2018
|
IanSavchenko
|
JavaScript
|
Code
| 313
| 841
|
let _ = require('lodash');
let fs = require('fs');
let util = require('../util');
// INPUT PROCESSING
let data = fs.readFileSync(__dirname + '/input.txt', 'utf8').trim();
let inp = util.splitLines(data).map(row => {
row = row.trim().split('<');
let bot = {};
let pos = row[1].split('>')[0].split(',').map(el => +el);
bot.x = pos[0];
bot.y = pos[1];
bot.z = pos[2];
bot.r = +row[1].split('r=')[1];
return bot;
});
let md = function(a, b) {
if (!b) {
b = {x: 0, y: 0, z: 0};
}
return Math.abs(a.x-b.x) + Math.abs(a.y-b.y) + Math.abs(a.z-b.z);
};
// PART 1
let strongest = _.maxBy(inp, 'r');
let inRange = _.filter(inp, b => {
return md(strongest, b) <= strongest.r;
});
let part1 = inRange.length;
console.log(`Part 1: ${part1}`);
console.assert(part1 === 341);
// PART 2
let part2 = 0;
let xmax = _.maxBy(inp, 'x').x;
let xmin = _.minBy(inp, 'x').x;
let ymax = _.maxBy(inp, 'y').y;
let ymin = _.minBy(inp, 'y').y;
let zmax = _.maxBy(inp, 'z').z;
let zmin = _.minBy(inp, 'z').z;
let step = _.max([xmax-xmin, ymax-ymin, zmax-zmin]);
do {
let maxPos;
let maxDist;
let maxCnt = 0;
for (let x = xmin; x < xmax; x += step) {
for (let y = ymin; y < ymax; y += step) {
for (let z = zmin; z < zmax; z += step) {
let pos = {x,y,z};
let posDist = md(pos);
let inRange = _.filter(inp, bot => {
return md(bot, pos) - bot.r <= step - 1;
}).length;
if (inRange > maxCnt ||
(inRange === maxCnt && posDist < maxDist)) {
maxCnt = inRange;
maxPos = pos;
maxDist = posDist;
}
}
}
}
part2 = maxDist;
// stretching search interval around the best position
xmin = maxPos.x - step;
ymin = maxPos.y - step;
zmin = maxPos.z - step;
xmax = maxPos.x + step;
ymax = maxPos.y + step;
zmax = maxPos.z + step;
// reducing search step
step = _.floor(step / 2);
} while (step > 0);
console.log(`Part 2: ${part2}`);
console.assert(part2 === 105191907);
| 20,341
|
https://github.com/james-d12/whitehatbootcamp/blob/master/src/module1/week1/scooter-hire/classes/Customer.js
|
Github Open Source
|
Open Source
|
Unlicense
| null |
whitehatbootcamp
|
james-d12
|
JavaScript
|
Code
| 356
| 864
|
const Scooter = require('./Scooter')
const ElectricScooter = require('./ElectricScooter')
const ChargeStation = require('./ChargeStation')
/**
* Customer has name, money and a current scooter.
* @property {String} firstName - The firstname of the customer
* @property {String} lastname - The lastname of the customer
* @property {String} fullname - The fullname of the customer
* @property {Int} money - how much money the customer has.
* @property {Scooter} scooter - The scooter the customer currently has.
* */
class Customer {
/**
* Constructs a Customer Object with the specified parameters.
* @param {String} firstName - The customer's firstname.
* @param {String} lastName - The customer's lastname.
* @param {Float} money - The customer's amount of money.
*/
constructor(firstName="", lastName="", money=0){
this.firstName = firstName
this.lastName = lastName
this.fullName = firstName + " " + lastName
this.money = Math.abs(money)
this.scooter = undefined
}
/**
* Drives the scooter for a specified distance.
* @param {Int} distance - The distance to travel.
*/
driveScooterFor(distance){
if(this.scooter == undefined) { console.log(`${this.fullName} does not have a scooter to drive.`); return }
if(this.scooter.canDriveFor(distance) == false) { console.log(`${this.fullName} cannot drive ${distance} metres.`); return; }
this.scooter.drive(distance)
console.log(`${this.fullName} has driven their scooter for ${distance} metres. Current Charge [${this.scooter.chargeLevel}%].`)
}
/**
* Checks if the customer has a scooter.
* @returns {boolean} - Returns true / false depending on if they have a scooter or not.
*/
hasScooter(){
return this.scooter == undefined ? false : true;
}
/**
* Checks if a customer can afford a specific scooter.
* @param {Scooter} scooter - The scooter to check the price of.
* @returns {Boolean} - Returns a boolean (true/false) on whether they can afford the scooter.
*/
canAffordScooter(scooter){
return (this.money >= scooter.cost) ? true : false
}
/**
* Purchases a scooter and adds it to the customer's inventory.
* @param {Scooter} scooter - The scooter to purchase.
*/
purchaseScooter(scooter){
if(!this.canAffordScooter(scooter)) { return; }
this.money -= scooter.cost
this.scooter = scooter
console.log(`${this.fullName} has purchased scooter [${scooter.id}] for: £${scooter.cost}`)
console.log(`${this.fullName}'s current balance stands at: £${this.money}`)
}
/**
* Removes the Scooter from the customer's inventory.
*/
returnScooter(){
const scooter = this.scooter
this.scooter = undefined
return scooter
}
}
module.exports = Customer
| 36,809
|
https://github.com/obecto/gattakka/blob/master/src/main/scala/com/obecto/gattakka/genetics/descriptors/LongGene.scala
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
gattakka
|
obecto
|
Scala
|
Code
| 143
| 411
|
package com.obecto.gattakka.genetics.descriptors
import scala.math.BigInt
object LongGeneDescriptor {
def apply(bits: Int): LongGeneDescriptor = {
LongGeneDescriptor(0, 1l << bits)
}
}
case class LongGeneDescriptor(val from: Long, val to: Long) extends GeneDescriptor {
assert(to >= from)
val range: Long = to - from + 1
val length: Int = (Math.log(range.toDouble) / Math.log(2)).ceil.toInt
def apply(rnd: scala.util.Random): LongGene = {
new LongGene(rnd.nextLong().abs % range + from, this)
}
def apply(byteArray: Array[Byte]): LongGene = {
new LongGene(BigInt(Array[Byte](0) ++ byteArray.take(byteLength)).toLong % range + from, this)
}
def apply(value: Long): LongGene = {
val normalizedValue = (value - from) % range + from
new LongGene(normalizedValue, this)
}
}
case class LongGene(val value: Long, descriptor: LongGeneDescriptor) extends Gene {
def toByteArray: Array[Byte] = {
var unpadded = BigInt(value - descriptor.from).toByteArray
if (unpadded.head == 0) {
unpadded = unpadded.slice(1, unpadded.length)
}
val padding = Array[Byte]().padTo(descriptor.byteLength - unpadded.length, 0.toByte)
padding ++ unpadded
}
}
| 17,551
|
https://github.com/gcmvanloon/IEC-60870/blob/master/IEC60870/IE/IeProtectionOutputCircuitInformation.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
IEC-60870
|
gcmvanloon
|
C#
|
Code
| 168
| 473
|
using System.IO;
using IEC60870.IE.Base;
namespace IEC60870.IE
{
internal class IeProtectionOutputCircuitInformation : InformationElement
{
private readonly int value;
public IeProtectionOutputCircuitInformation(bool generalCommand, bool commandToL1, bool commandToL2,
bool commandToL3)
{
value = 0;
if (generalCommand)
{
value |= 0x01;
}
if (commandToL1)
{
value |= 0x02;
}
if (commandToL2)
{
value |= 0x04;
}
if (commandToL3)
{
value |= 0x08;
}
}
public IeProtectionOutputCircuitInformation(BinaryReader reader)
{
value = reader.ReadByte();
}
public override int Encode(byte[] buffer, int i)
{
buffer[i] = (byte) value;
return 1;
}
public bool IsGeneralCommand()
{
return (value & 0x01) == 0x01;
}
public bool IsCommandToL1()
{
return (value & 0x02) == 0x02;
}
public bool IsCommandToL2()
{
return (value & 0x04) == 0x04;
}
public bool IsCommandToL3()
{
return (value & 0x08) == 0x08;
}
public override string ToString()
{
return "Protection output circuit information, general command: " + IsGeneralCommand() + ", command to L1: "
+ IsCommandToL1() + ", command to L2: " + IsCommandToL2() + ", command to L3: " + IsCommandToL3();
}
}
}
| 28,606
|
https://github.com/kei-uma/cms2017/blob/master/vendor/engines/consult_management/lib/consult_management.rb
|
Github Open Source
|
Open Source
|
MIT
| 2,017
|
cms2017
|
kei-uma
|
Ruby
|
Code
| 5
| 17
|
require "consult_management/engine"
module ConsultManagement
end
| 19,622
|
https://github.com/JannisBush/code_relation_prediction/blob/master/pytorch/run_classifier_ba.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
code_relation_prediction
|
JannisBush
|
Python
|
Code
| 2,432
| 8,199
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
# Modifications copyright (C) 2019 Jannis Rautenstrauch
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BERT finetuning runner."""
from __future__ import absolute_import, division, print_function
import argparse
import logging
import os
import sys
import random
import torch
import numpy as np
import pandas as pd
from tqdm import tqdm, trange
from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset
from torch.nn import CrossEntropyLoss
from tensorboardX import SummaryWriter
from pytorch_pretrained_bert.file_utils import WEIGHTS_NAME, CONFIG_NAME
from pytorch_pretrained_bert.modeling import BertForSequenceClassification
from pytorch_pretrained_bert.tokenization import BertTokenizer
from pytorch_pretrained_bert.optimization import BertAdam
from run_classifier_dataset_utils import processors, convert_examples_to_features, compute_metrics
if sys.version_info[0] == 2:
import cPickle as pickle
else:
import pickle
logger = logging.getLogger(__name__)
def main():
"""Fine-tune BERT for a given task with given parameters."""
# Define all parameters, using argparse/Command Line Interface.
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
def add_args():
"""Add all possible options and defaults to the parser."""
# Hyperparameters of BERT
# Parameters often changed
parser.add_argument("--bert_model",
default="bert-base-uncased",
type=str,
help="Bert pre-trained model selected in the list: bert-base-uncased, "
"bert-large-uncased, bert-base-cased, bert-large-cased, "
"bert-base-multilingual-uncased, bert-base-multilingual-cased, bert-base-chinese.")
parser.add_argument("--max_seq_length",
default=128,
type=int,
help="The maximum total input sequence length after WordPiece tokenization. \n"
"Sequences longer than this will be truncated, and sequences shorter \n"
"than this will be padded.")
parser.add_argument("--train_batch_size",
default=16,
type=int,
help="Total batch size for training.")
parser.add_argument("--learning_rate",
default=2e-5,
type=float,
help="The initial learning rate for Adam.")
parser.add_argument("--num_train_epochs",
default=3.0,
type=float,
help="Total number of training epochs to perform.")
parser.add_argument("--do_lower_case",
action='store_true',
help="Set this flag if you are using an uncased model.")
# Parameters usually unchanged
parser.add_argument("--warmup_proportion",
default=0.1,
type=float,
help="Proportion of training to perform linear learning rate warmup for. "
"E.g., 0.1 = 10%% of training.")
parser.add_argument("--eval_batch_size",
default=8,
type=int,
help="Total batch size for eval.")
# Parameters of the task
parser.add_argument("--task_name",
default="node",
type=str,
help="The name of the task to train. One of node, political-as, "
"political-ru, political-asu, agreement, node-ext, political-as-topics,"
"political-ru-topics, political-asu-topics, agreement-topics")
parser.add_argument("--input_to_use",
type=str,
default="both",
help="Which input to use. One of both, org, response, response-org.")
# Parameters for reproduction
parser.add_argument('--seed',
type=int,
default=42,
help="random seed for initialization")
parser.add_argument('--gradient_accumulation_steps',
type=int,
default=1,
help="Number of updates steps to accumulate before performing a backward/update pass.")
# Parameters for where to save/load data
parser.add_argument("--data_dir",
default="../data",
type=str,
help="The input data dir. Should contain the .tsv file (or other data files) for the task.")
parser.add_argument("--output_dir",
default="run",
type=str,
help="The output directory where the model predictions and checkpoints will be written.")
parser.add_argument("--cache_dir",
default="",
type=str,
help="Where do you want to store the pre-trained models downloaded from s3")
parser.add_argument('--overwrite_output_dir',
action='store_true',
help="Overwrite the content of the output directory")
# Parameters to decide what to do (train, test, crossval, save the model)
parser.add_argument("--do_train",
action='store_true',
help="Whether to run training.")
parser.add_argument("--do_eval",
action='store_true',
help="Whether to run eval on the dev set.")
parser.add_argument("--do_train_eval",
action='store_true',
help="Whether to run training and eval.")
parser.add_argument('--n_times',
type=int,
default=10,
help="Number of restarts for every parameter setting in train&eval mode")
parser.add_argument("--do_cross_val",
action='store_true',
help="Whether to run cross-validation.")
parser.add_argument("--do_save",
action='store_true',
help="Whether to save the resulting model.")
parser.add_argument("--do_visualization",
action='store_true',
help="Whether to run visualization.")
# Additional parameters
parser.add_argument("--no_cuda",
action='store_true',
help="Whether not to use CUDA when available")
parser.add_argument('--log_level',
type=str,
default="info",
help="Verbosity of logging output. One of info or warn.")
# Add all parameters to the parser and parse them.
add_args()
args = parser.parse_args()
# Set up all parameters given the CLI arguments.
device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu")
n_gpu = torch.cuda.device_count()
args.device = device
task_name = args.task_name.lower()
processor = processors[task_name](args.input_to_use)
label_list = processor.get_labels()
num_labels = len(label_list)
global_step = 0
tr_loss = 0
tb_writer = SummaryWriter()
# Prepare the logging.
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO if args.log_level == "info" else logging.WARN)
logger.info("device: {} n_gpu: {}".format(
device, n_gpu))
# Check the arguments and fail if the arguments are invalid.
if not args.do_train and not args.do_eval and not args.do_cross_val and not args.do_visualization \
and not args.do_train_eval:
raise ValueError("At least one of `do_train`, `do_eval` `do_cross_val` "
"or `do_visualization` or 'do_train_eval` must be True.")
if os.path.exists(args.output_dir) and os.listdir(
args.output_dir) and not args.overwrite_output_dir:
raise ValueError("Output directory ({}) already exists and is not empty. "
"Use the --overwrite_output_dir option.".format(args.output_dir))
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
if task_name not in processors:
raise ValueError("Task not found: %s" % (task_name))
if args.gradient_accumulation_steps < 1:
raise ValueError("Invalid gradient_accumulation_steps parameter: {}, should be >= 1".format(
args.gradient_accumulation_steps))
# Calculate the train_batch_size if gradient accumulation is used
args.train_batch_size = args.train_batch_size // args.gradient_accumulation_steps
# Set all seeds for reproducibility
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
def get_features_examples(mode):
"""Returns the features and examples of train or test mode."""
def convert(split, modus, exs):
"""Converts the examples or load them from cache."""
cached_features_file = os.path.join(args.data_dir, 'cache', '{0}_{1}_{2}_{3}_{4}_{5}'.format(modus,
list(filter(None, args.bert_model.split('/'))).pop(),
str(args.max_seq_length),
str(task_name), str(args.input_to_use), split))
# Try to load the cached features.
try:
with open(cached_features_file, "rb") as reader:
fs = pickle.load(reader)
# Creates and cache the features.
except FileNotFoundError:
if not os.path.exists(os.path.join(args.data_dir, 'cache')):
os.makedirs(os.path.join(args.data_dir, 'cache'))
fs = convert_examples_to_features(
exs, label_list, args.max_seq_length, tokenizer)
logger.info('Saving {0} features into cached file {1}'.format(mode, cached_features_file))
with open(cached_features_file, "wb") as writer:
pickle.dump(fs, writer)
return fs
# Return the features, examples and dataframes depending on the mode.
if mode == "train":
train_ex, df = processor.get_train_examples(args.data_dir)
return convert("X", mode, train_ex), train_ex, df
elif mode == "dev":
dev_ex, df = processor.get_dev_examples(args.data_dir)
return convert("X", mode, dev_ex), dev_ex, df
elif mode == "cross_val":
data = processor.get_splits(args.data_dir)
train_f_list, train_e_list, train_df_list, test_f_list, test_e_list, test_df_list = ([] for _ in range(6))
for i, (train_ex, train_df, test_ex, test_df) in enumerate(data):
train_e_list.append(train_ex)
train_df_list.append(train_df)
test_e_list.append(test_ex)
test_df_list.append(test_df)
# Create features from the examples
train_f_list.append(convert(i, "train", train_ex))
test_f_list.append(convert(i, "dev", test_ex))
return train_f_list, train_e_list, train_df_list, test_f_list, test_e_list, test_df_list
else:
raise ValueError("Invalid feature mode.")
def create_tensor_dataset(exfeatures):
"""Creates a TensoDataset out of the features."""
all_input_ids = torch.tensor([f.input_ids for f in exfeatures], dtype=torch.long)
all_input_mask = torch.tensor([f.input_mask for f in exfeatures], dtype=torch.long)
all_segment_ids = torch.tensor([f.segment_ids for f in exfeatures], dtype=torch.long)
all_label_ids = torch.tensor([f.label_id for f in exfeatures], dtype=torch.long)
return TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_label_ids)
def do_training(train_fs, train_exs):
"""Runs BERT fine-tuning."""
# Allows to write to enclosed variables global_step
nonlocal global_step
# Create the batched training data out of the features.
train_data = create_tensor_dataset(train_fs)
train_sampler = RandomSampler(train_data)
train_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=args.train_batch_size)
# Calculate the number of optimization steps.
num_train_optimization_steps = len(train_dataloader) // args.gradient_accumulation_steps * args.num_train_epochs
# Prepare optimizer.
param_optimizer = list(model.named_parameters())
no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
optimizer_grouped_parameters = [
{'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01},
{'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
optimizer = BertAdam(optimizer_grouped_parameters,
lr=args.learning_rate,
warmup=args.warmup_proportion,
t_total=num_train_optimization_steps)
# Log some information about the training.
logger.info("***** Running training *****")
logger.info(" Num examples = %d", len(train_exs))
logger.info(" Batch size = %d", args.train_batch_size)
logger.info(" Num steps = %d", num_train_optimization_steps)
# Set the model to training mode and train for X epochs.
model.train()
for _ in trange(int(args.num_train_epochs), desc="Epoch"):
tr_loss = 0
nb_tr_examples, nb_tr_steps = 0, 0
# Iterate over all batches.
for step, batch in enumerate(tqdm(train_dataloader, desc="Iteration")):
batch = tuple(t.to(device) for t in batch)
input_ids, input_mask, segment_ids, label_ids = batch
# Get the Logits and calculate the loss.
logits = model(input_ids, token_type_ids=segment_ids, attention_mask=input_mask)
loss = CrossEntropyLoss()(logits.view(-1, num_labels), label_ids.view(-1))
# Scale the loss in gradient accumulation mode.
if args.gradient_accumulation_steps > 1:
loss = loss / args.gradient_accumulation_steps
# Calculate the gradients.
loss.backward()
tr_loss += loss.item()
nb_tr_examples += input_ids.size(0)
nb_tr_steps += 1
# Update the weights every gradient_accumulation_steps steps.
if (step + 1) % args.gradient_accumulation_steps == 0:
optimizer.step()
optimizer.zero_grad()
global_step += 1
tb_writer.add_scalar('lr', optimizer.get_lr()[0], global_step)
tb_writer.add_scalar('loss', loss.item(), global_step)
def do_save():
"""Saves the current model, tokenizer and arguments."""
nonlocal model
nonlocal tokenizer
model_to_save = model.module if hasattr(model, 'module') else model
# Using the predefined names, we can load using `from_pretrained`.
output_model_file = os.path.join(args.output_dir, WEIGHTS_NAME)
output_config_file = os.path.join(args.output_dir, CONFIG_NAME)
# Save the trained model, configuration and tokenizer
torch.save(model_to_save.state_dict(), output_model_file)
model_to_save.config.to_json_file(output_config_file)
tokenizer.save_vocabulary(args.output_dir)
# Save the training arguments together with the trained model.
output_args_file = os.path.join(args.output_dir, 'training_args.bin')
torch.save(args, output_args_file)
def do_eval(eval_features, eval_examples):
"""Do evaluation on the current model."""
# Logg some information.
logger.info("***** Running evaluation *****")
logger.info(" Num examples = %d", len(eval_examples))
logger.info(" Batch size = %d", args.eval_batch_size)
# Get the eval data and create a sequential dataloader.
eval_data = create_tensor_dataset(eval_features)
eval_sampler = SequentialSampler(eval_data)
eval_dataloader = DataLoader(eval_data, sampler=eval_sampler, batch_size=args.eval_batch_size)
# Set the model to eval mode (disable dropout)
model.eval()
eval_loss = 0
nb_eval_steps = 0
preds = []
out_label_ids = None
# Iterate over the evaluation data.
for input_ids, input_mask, segment_ids, label_ids in tqdm(eval_dataloader, desc="Evaluating"):
input_ids = input_ids.to(device)
input_mask = input_mask.to(device)
segment_ids = segment_ids.to(device)
label_ids = label_ids.to(device)
# Forward pass with deactivated autograd engine.
with torch.no_grad():
logits = model(input_ids, token_type_ids=segment_ids, attention_mask=input_mask)
# Calculate eval loss.
tmp_eval_loss = CrossEntropyLoss()(logits.view(-1, num_labels), label_ids.view(-1))
eval_loss += tmp_eval_loss.mean().item()
nb_eval_steps += 1
if len(preds) == 0:
preds.append(logits.detach().cpu().numpy())
out_label_ids = label_ids.detach().cpu().numpy()
else:
preds[0] = np.append(
preds[0], logits.detach().cpu().numpy(), axis=0)
out_label_ids = np.append(
out_label_ids, label_ids.detach().cpu().numpy(), axis=0)
# Calculate the mean loss and get all predictions.
eval_loss = eval_loss / nb_eval_steps
loss = tr_loss/global_step if args.do_train else None
preds = preds[0]
preds = np.argmax(preds, axis=1)
# Compute the metrics for the given task
result = compute_metrics(task_name, preds, out_label_ids)
# Save additional information in the result dict.
result['eval_loss'] = eval_loss
result['global_step'] = global_step
result['loss'] = loss
# Save all settings for external evaluation
result['_task'] = task_name
result['_input_mode'] = args.input_to_use
result['_learning_rate'] = args.learning_rate
result['_bert-model'] = args.bert_model
result['_batch_size'] = args.train_batch_size
result['_warmup'] = args.warmup_proportion
result['_num_epochs'] = args.num_train_epochs
result['_seq_len'] = args.max_seq_length
result['_seed'] = args.seed
result['_gradient_acc'] = args.gradient_accumulation_steps
return result, preds
def save_results(result_list, pred_list):
"""Saves the results and the predictions."""
# Save the results in a text file.
output_eval_file = os.path.join(args.output_dir, "eval_results.txt")
with open(output_eval_file, "a") as writer:
logger.info("***** Eval results *****")
for i, result_dict in enumerate(result_list):
logger.info("Run %i", i)
writer.write("Run %i\n" % i)
for key in sorted(result_dict.keys()):
if not key.startswith("_"):
logger.info(" %s = %s", key, str(result_dict[key]))
writer.write("%s = %s\n" % (key, str(result_dict[key])))
# Save the results and predictions in csv and tsv files.
output_csv_file = os.path.join(args.output_dir, "../eval_results.tsv")
output_preds_file = os.path.join(args.output_dir, "../eval_preds.csv")
df_res = pd.DataFrame(result_list)
df_preds = pd.DataFrame(pred_list)
df_preds['run'] = '{0}_{1}_{2}_{3}'.format(
args.bert_model, args.num_train_epochs, args.train_batch_size, args.learning_rate)
# If the files do not exist, create them with headers.
if not os.path.exists(output_csv_file):
df_res.to_csv(output_csv_file, encoding='utf-8', sep='\t', index=False)
df_preds.to_csv(output_preds_file, encoding='utf-8', index=False)
# If the files already exist, just append to them without headers.
else:
df_res.to_csv(output_csv_file, mode='a', encoding='utf-8', sep='\t', index=False, header=False)
df_preds.to_csv(output_preds_file, mode='a', encoding='utf-8', index=False, header=False)
# Load the tokenizer and the model.
tokenizer = BertTokenizer.from_pretrained(args.bert_model, do_lower_case=args.do_lower_case)
model = BertForSequenceClassification.from_pretrained(args.bert_model, num_labels=num_labels)
model.to(device)
# Train and test .
if args.do_train_eval:
# Get the train and test features only once.
train_features, train_examples, _ = get_features_examples("train")
test_features, test_examples, _ = get_features_examples("dev")
# Repeat N times.
for i in range(args.n_times):
# Train.
do_training(train_features, train_examples)
# Eval.
result, preds = do_eval(test_features, test_examples)
# Save the results.
save_results([result], [preds])
# Reset and new seeds.
if i+1 < args.n_times:
args.seed += 1
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
# Reset model.
model = BertForSequenceClassification.from_pretrained(args.bert_model, num_labels=num_labels)
model.to(device)
# Training
if args.do_train:
# Get the train features.
features, examples, df = get_features_examples("train")
# Train.
do_training(features, examples)
# Save the model if wanted.
if args.do_save:
do_save()
# Evaluation.
if args.do_eval:
# Get the dev features.
features, examples, df = get_features_examples("dev")
# Evaluate.
result, preds = do_eval(features, examples)
# Save the results.
save_results([result], [preds])
# CrossVal.
if args.do_cross_val:
# Get the data for all splits
train_f_l, train_e_l, train_df_l, test_f_l, test_e_l, test_df_l = get_features_examples("cross_val")
# Iterate over all splits
for train_features, train_examples, test_features, test_examples in zip(
train_f_l, train_e_l, test_f_l, test_e_l):
# Reset model.
model = BertForSequenceClassification.from_pretrained(args.bert_model, num_labels=num_labels)
model.to(device)
# Train.
do_training(train_features, train_examples)
# Eval.
result, preds = do_eval(test_features, test_examples)
# Save results.
save_results([result], [preds])
# Visualization.
if args.do_visualization:
# Additional imports needed for the visualizations.
import spacy
from skorch import NeuralNetClassifier
from sklearn.pipeline import make_pipeline
from run_classifier_dataset_utils import InputExample
from anchor import anchor_text
from lime.lime_text import LimeTextExplainer
# Example sentences.
raw_text_1 = "But Mr. Nixon did n't say a word that was ever publicly recorded . Even more incredible , " \
"he did n't say a word when the Communists took power in Cuba - not 4 miles off their shores , " \
"but only 90 miles off our shores . Mr. Nixon saw what was happening in Cuba ."
raw_text_2 = "Cordoba House is no act of tolerance, but of excess/arrogance. Building this structure on the " \
"edge of the battlefield created by radical Islamists is not a celebration of " \
"religious pluralism and mutual tolerance; it is a political statement of shocking arrogance " \
"and hypocrisy."
raw_text_3 = "Are not right no does he alcohol child china play"
raw_text_list = [raw_text_1, raw_text_2, raw_text_3]
class BertConverter:
"""Pipeline-Class to convert text to the input format of BERT."""
def transform(self, X, y=None, **fit_params):
"""Transforms a list of strings to a list of BERT inputs."""
exs = []
for text in X:
exs.append(InputExample(guid=None, text_a=text, text_b=None, label="attack"))
visu_features = convert_examples_to_features(exs, label_list, args.max_seq_length, tokenizer)
all_input_ids = torch.tensor([f.input_ids for f in visu_features], dtype=torch.long)
all_input_mask = torch.tensor([f.input_mask for f in visu_features], dtype=torch.long)
all_segment_ids = torch.tensor([f.segment_ids for f in visu_features], dtype=torch.long)
return [all_input_ids, all_segment_ids, all_input_mask]
def fit(self, X, y=None, **fit_params):
return self
class MyBERT(torch.nn.Module):
"""Class to wrap the current BERT model."""
def __init__(self):
super(MyBERT, self).__init__()
self.model = model
def forward(self, X):
"""Apply a softmax function to the output of the BERT model."""
return torch.nn.functional.softmax(self.model(*X), dim=1)
# Creates a NeuralNetClassifier.
if device == torch.device('cuda'):
net = NeuralNetClassifier(MyBERT, device='cuda', max_epochs=0, lr=0.0, train_split=None)
else:
net = NeuralNetClassifier(MyBERT, max_epochs=0, lr=0.0, train_split=None)
# Set up the pipeline.
c = make_pipeline(BertConverter(), net)
# To initialize the pipeline (does not train, because epochs=0).
c.fit(raw_text_list, y=torch.zeros(len(raw_text_list), dtype=torch.long))
# Print the predictions and probabilities for the example texts.
print(c.predict_proba(raw_text_list))
# Creates the LimeTextExplainer.
# bow=True to replace all occurrences of a string at once.
explainer = LimeTextExplainer(class_names=processor.get_labels(), bow=False, mask_string="[UNK]")
# Explain the first example in the list and save the result using LIME.
idx = 0
exp = explainer.explain_instance(raw_text_list[idx], c.predict_proba)
print('Document id: %d' % idx)
print('Probability(support) =', c.predict_proba([raw_text_list[idx]])[0, 1])
print('True class: %s' % "None")
print(exp.as_list())
exp.save_to_file(os.path.join(args.output_dir, "lime.html"))
# Explain the first example using the ANCHOR explainer and save the result.
nlp = spacy.load("en_core_web_sm")
explainer2 = anchor_text.AnchorText(nlp, processor.get_labels(), use_unk_distribution=True)
exp2 = explainer2.explain_instance(raw_text_list[idx], c.predict, threshold=0.95, use_proba=True)
pred = explainer2.class_names[c.predict([raw_text_list[idx]])[0]]
alternative = explainer2.class_names[1 - c.predict([raw_text_list[idx]])[0]]
print('Anchor: %s' % (' AND '.join(exp2.names())))
print('Precision: %.2f\n' % exp2.precision())
print('Examples where anchor applies and model predicts %s:\n' % pred)
print('\n'.join([x[0] for x in exp2.examples(only_same_prediction=True)]))
print('Examples where anchor applies and model predicts %s:\n' % alternative)
print('\n'.join([x[0] for x in exp2.examples(only_different_prediction=True)]))
exp2.save_to_file(os.path.join(args.output_dir, "anchor.html"))
if __name__ == "__main__":
"""Command line program to fine-tune BERT."""
main()
| 38,443
|
https://github.com/P79N6A/demo/blob/master/100-RollingNotice-master/RollingNotice/CustomNoticeCell.h
|
Github Open Source
|
Open Source
|
MIT, Apache-2.0
| 2,018
|
demo
|
P79N6A
|
Objective-C
|
Code
| 32
| 107
|
//
// CustomNoticeCell.h
// RollingNotice
//
// Created by qm on 2017/12/8.
// Copyright © 2017年 qm. All rights reserved.
//
#import "GYNoticeViewCell.h"
@interface CustomNoticeCell : GYNoticeViewCell
- (void)noticeCellWithArr:(NSArray *)arr forIndex:(NSUInteger)index;
@end
| 9,339
|
https://github.com/fo-dicom/fo-dicom/blob/master/FO-DICOM.Core/IO/Buffer/EvenLengthBuffer.cs
|
Github Open Source
|
Open Source
|
MIT, MS-PL, OFFIS, IJG, LicenseRef-scancode-warranty-disclaimer, BSD-2-Clause, LicenseRef-scancode-unknown-license-reference
| 2,023
|
fo-dicom
|
fo-dicom
|
C#
|
Code
| 394
| 890
|
// Copyright (c) 2012-2023 fo-dicom contributors.
// Licensed under the Microsoft Public License (MS-PL).
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace FellowOakDicom.IO.Buffer
{
/// <summary>
/// Wrapper class for uneven length buffers that needs to be represented as even length buffers.
/// </summary>
public class EvenLengthBuffer : IByteBuffer
{
/// <summary>
/// Initializes an instance of the <see cref="EvenLengthBuffer"/> class.
/// </summary>
/// <param name="buffer">Uneven length buffer.</param>
/// <remarks>Constructor is private to ensure that instance is not created for an even length buffer. Static method <see cref="Create"/>
/// should be used to initialize buffers.</remarks>
private EvenLengthBuffer(IByteBuffer buffer)
{
Buffer = buffer;
}
/// <summary>
/// Underlying uneven length buffer.
/// </summary>
public IByteBuffer Buffer { get; }
/// <summary>
/// Gets whether the buffer is held in memory.
/// </summary>
public bool IsMemory => Buffer.IsMemory;
/// <summary>
/// Gets the size of the even length buffer, which is always equal to the underlying (uneven length) buffer plus 1.
/// </summary>
public long Size => Buffer.Size + 1;
/// <summary>
/// Gets the buffer data, which is equal to the underlying buffer data plus a padding byte at the end.
/// </summary>
public byte[] Data
{
get
{
var data = new byte[Size];
System.Buffer.BlockCopy(Buffer.Data, 0, data, 0, (int)Buffer.Size);
return data;
}
}
/// <inheritdoc />
public void GetByteRange(long offset, int count, byte[] output)
{
if (output == null)
{
throw new ArgumentNullException(nameof(output));
}
if (output.Length < count)
{
throw new ArgumentException($"Output array with {output.Length} bytes cannot fit {count} bytes of data");
}
System.Buffer.BlockCopy(Buffer.Data, (int)offset, output, 0, (int)Math.Min(Buffer.Size - offset, count));
}
public void CopyToStream(Stream s)
{
// Writing the contents of the uneven buffer
Buffer.CopyToStream(s);
// Writing another single byte, so that the contents are even
s.WriteByte(0);
}
public async Task CopyToStreamAsync(Stream s, CancellationToken cancellationToken)
{
// Writing the contents of the uneven buffer
await Buffer.CopyToStreamAsync(s, cancellationToken).ConfigureAwait(false);
// Writing another single byte, so that the contents are even
s.WriteByte(0);
}
/// <summary>
/// If necessary, creates an even length buffer for the specified <paramref name="buffer"/>.
/// </summary>
/// <param name="buffer">Buffer that is required to be of even length.</param>
/// <returns>
/// If <paramref name="buffer"/> is of uneven length, returns an even length buffer wrapping the <paramref name="buffer"/>,
/// otherwise returns the buffer itself.
/// </returns>
public static IByteBuffer Create(IByteBuffer buffer)
=> (buffer.Size & 1) == 1
? new EvenLengthBuffer(buffer)
: buffer;
}
}
| 31,792
|
https://github.com/johnrulon/TechTime/blob/master/NullObjectExample/NullObjectExample/NullWorkStrategy.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
TechTime
|
johnrulon
|
C#
|
Code
| 37
| 110
|
namespace NullObjectExample
{
using System;
using Common.Strategies;
public class NullWorkStrategy : WorkStrategy
{
public override string StrategyText
{
get
{
return "...";
}
}
public override void StartStrategy()
{
// do nothing
Console.WriteLine("Null strategy.");
}
}
}
| 49,148
|
https://github.com/Cian747/job/blob/master/app/models/user.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
job
|
Cian747
|
Python
|
Code
| 103
| 451
|
from app import db
from werkzeug.security import generate_password_hash,check_password_hash
from flask_login import UserMixin
from app import login_manager
from datetime import datetime
class User(UserMixin,db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer,primary_key = True)
roles_id = db.Column(db.Integer,db.ForeignKey('roles.id'))
first_name = db.Column(db.String(255), nullable=False)
other_names = db.Column(db.String(255), nullable=False)
username = db.Column(db.String(255))
email = db.Column(db.String(255),unique = True,index = True)
bio = db.Column(db.String())
created_at = db.Column(db.DateTime, default=datetime.utcnow)
updated_at = db.Column(db.DateTime, default=datetime.utcnow)
profile_pic_path = db.Column(db.String())
password_hash = db.Column(db.String(255))
my_job_post = db.relationship('Jobs', backref='job',lazy='dynamic')
@property
def password(self):
raise AttributeError('You cannot read the password attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self,password):
return check_password_hash(self.password_hash,password)
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
def __repr__(self):
return f'User {self.username}'
| 371
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.