identifier
stringlengths 42
383
| collection
stringclasses 1
value | open_type
stringclasses 1
value | license
stringlengths 0
1.81k
| date
float64 1.99k
2.02k
⌀ | title
stringlengths 0
100
| creator
stringlengths 1
39
| language
stringclasses 157
values | language_type
stringclasses 2
values | word_count
int64 1
20k
| token_count
int64 4
1.32M
| text
stringlengths 5
1.53M
| __index_level_0__
int64 0
57.5k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
https://github.com/plandrover/property_marketplace/blob/master/app/PostProperty.php
|
Github Open Source
|
Open Source
|
MIT
| 2,018
|
property_marketplace
|
plandrover
|
PHP
|
Code
| 46
| 135
|
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class PostProperty extends Model
{
//
protected $fillable = [
'house_number', 'post_code', 'property_type','beds_no','bathrooms_no', 'price', 'commission', 'description', 'image'
];
/**
* The attributes that should be hidden for arrays.
*
* @var array
*/
protected $hidden = [
'password', 'remember_token',
];
}
| 24,347
|
https://github.com/asdad-emizzy/pinpoint/blob/master/commons-server/src/test/java/com/navercorp/pinpoint/common/server/bo/codec/stat/header/Jdk7BitCountingHeaderEncoder.java
|
Github Open Source
|
Open Source
|
DOC, LicenseRef-scancode-free-unknown, CC0-1.0, OFL-1.1, GPL-1.0-or-later, CC-PDDC, GPL-2.0-only, Apache-2.0, LicenseRef-scancode-public-domain, CC-BY-3.0, LicenseRef-scancode-unknown-license-reference, MITNFA, MIT, CC-BY-4.0, OFL-1.0
| 2,020
|
pinpoint
|
asdad-emizzy
|
Java
|
Code
| 178
| 374
|
/*
* Copyright 2017 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.common.server.bo.codec.stat.header;
import java.util.BitSet;
/**
* JDK 7 implementation of {@link BitCountingHeaderEncoder}.
*
* @author HyunGil Jeong
*/
public class Jdk7BitCountingHeaderEncoder implements AgentStatHeaderEncoder {
private final BitSet headerBitSet = new BitSet();
private int position = 0;
@Override
public void addCode(int code) {
if (code < 0) {
throw new IllegalArgumentException("code must be positive");
}
int fromIndex = this.position;
int toIndex = this.position + code;
this.headerBitSet.set(fromIndex, toIndex);
this.position = toIndex + 1;
}
@Override
public byte[] getHeader() {
return headerBitSet.toByteArray();
}
}
| 15,189
|
https://github.com/eram/neta/blob/master/tags/NetaScutter/v0/src/main/java/com/netalign/netascutter/utils/ModelUtils.java
|
Github Open Source
|
Open Source
|
Unlicense
| null |
neta
|
eram
|
Java
|
Code
| 380
| 1,243
|
package com.netalign.netascutter.utils;
import com.hp.hpl.jena.rdf.model.*;
import com.hp.hpl.jena.vocabulary.*;
import com.netalign.rdf.vocabulary.FOAF;
import com.netalign.rdf.vocabulary.SIOC;
/**
* Utility class for working with <code>foaf:Person</code> resources within a
* Jena model.
*
* @author ldodds (person) / yoavram (post)
*/
public class ModelUtils
{
public static Selector getKnowsSelector(final Resource person)
{
return new Selector() {
@Override
public boolean test(Statement statement)
{
if ( statement.getSubject().equals(person)
&& statement.getPredicate().getLocalName().equals(
FOAF.knows.toString()) )
{
return true;
}
return false;
}
@Override
public Resource getSubject()
{
return null;
}
@Override
public Property getPredicate()
{
return null;
}
@Override
public RDFNode getObject()
{
return null;
}
@Override
public boolean isSimple()
{
return false;
}
};
}
public static Selector getHasReplySelector(final Resource post)
{
return new Selector() {
@Override
public boolean test(Statement statement)
{
if ( statement.getSubject().equals(post)
&& statement.getPredicate().getLocalName().equals(
SIOC.has_reply.toString()) )
{
return true;
}
return false;
}
@Override
public Resource getSubject()
{
return null;
}
@Override
public Property getPredicate()
{
return null;
}
@Override
public RDFNode getObject()
{
return null;
}
@Override
public boolean isSimple()
{
return false;
}
};
}
public static Resource findTopicOfPersonalProfileDocument(Model model, String base)
{
Resource ppd = model.getResource(base);
Resource primaryTopic = null;
if ( ppd.hasProperty(FOAF.primaryTopic) )
{
primaryTopic = (Resource) ppd.getProperty(FOAF.primaryTopic).getObject();
}
return primaryTopic;
}
public static Resource findMaker(Model model, String base)
{/*
Selector makerSelector = new SimpleSelector(model.createResource(base),
FOAF.maker, (Object) null);
StmtIterator iterator = model.listStatements(makerSelector);
if ( !iterator.hasNext() )
{
return null;
}
Statement statement = (Statement) iterator.next();
return (Resource) statement.getObject();
*/
return findPersonByProperty(model, FOAF.maker);
}
private static Resource findPersonByProperty(Model model, Property property) {
for (NodeIterator i = model.listObjectsOfProperty(property); i.hasNext() ; ) {
Resource r = (Resource)i.nextNode().as(Resource.class);
if (ModelUtils.isFoafPerson(r)) {
return r;
}
}
return null;
}
public static Resource findCreator(Model model, String base)
{
/*
Selector creatorSelector = new SimpleSelector(model
.createResource(base), DC.creator, (Object) null);
StmtIterator iterator = model.listStatements(creatorSelector);
if ( !iterator.hasNext() )
{
return null;
}
Statement statement = (Statement) iterator.next();
return (Resource) statement.getObject();
*/
Resource r = null;
if (( r = findPersonByProperty(model, SIOC.has_creator)) != null ) {
;
} else if (( r = findPersonByProperty(model, DC.creator)) != null ) {
;
}
return r;
}
public static boolean isFoafPerson(Resource resource)
{
return resource.hasProperty(RDF.type, FOAF.Person);
}
public static boolean isSiocPost(Resource resource)
{
return resource.hasProperty(RDF.type, SIOC.Post);
}
public static boolean isSiocUser(Resource resource)
{
return resource.hasProperty(RDF.type, SIOC.User);
}
public static boolean isSiocForum(Resource resource)
{
return resource.hasProperty(RDF.type, SIOC.Forum);
}
}
| 19,368
|
https://github.com/pedrohd21/Cursos-Feitos/blob/master/Curso_Python/Secao3-Python-Intermediario-Programacao-Procedural/88-89-90_desafio_valide_um_cnpj/88_validando_cnpj.py
|
Github Open Source
|
Open Source
|
MIT
| null |
Cursos-Feitos
|
pedrohd21
|
Python
|
Code
| 36
| 159
|
from validando_cnpj import remover_caracteres, formula, validando, total, gera
cnpj = '04.252.011/0001-10'
numero_liso = remover_caracteres(cnpj)
lista_nova = numero_liso[0:12]
numeros_novos = (total(numero_liso))
lista_nova += numeros_novos
resultado_final = validando(numero_liso, lista_nova)
print(resultado_final)
for c in range(100):
geras = gera()
print(geras)
| 27,906
|
https://github.com/najamelan/byte_stream/blob/master/src/ring_buffer.rs
|
Github Open Source
|
Open Source
|
Unlicense
| 2,021
|
byte_stream
|
najamelan
|
Rust
|
Code
| 465
| 1,078
|
use crate::import::*;
/// A RingBuffer that implements `AsyncRead` and `AsyncWrite` from the futures library.
///
/// This object is rather special in that it's read and writes are connected to a single
/// ringbuffer. It's good for low level unit tests for (eg. framing a connection with a
/// codec) and verifying that a codec writes the correct data, but it does not mock a full
/// network connection. Subtle things can go wrong, like when using `AsyncRead::split` and
/// dropping the `WriteHalf`, the `ReadHalf` cannot detect that and the task won't be woken up.
///
/// If you want to mock a network connection, use [Endpoint](crate::Endpoint).
//
#[ allow( dead_code )]
//
pub struct RingBuffer<T: Sized + Copy>
{
pub(crate) producer : Producer<T> ,
pub(crate) consumer : Consumer<T> ,
pub(crate) read_waker : Option<Waker> ,
pub(crate) write_waker: Option<Waker> ,
pub(crate) closed : bool ,
}
impl<T: Sized + Copy> RingBuffer<T>
{
/// Create a new RingBuffer<T> with a defined capacity. Note that `capacity != length`, similar
/// to Vec.
//
pub fn new( size: usize ) -> Self
{
let (producer, consumer) = SyncRingBuffer::new( size ).split();
Self
{
producer ,
consumer ,
read_waker : None ,
write_waker : None ,
closed : false ,
}
}
/// The total capacity of the buffer
//
pub fn capacity( &self ) -> usize
{
self.producer.capacity()
}
/// Whether there is no data at all in the buffer.
//
pub fn is_empty( &self ) -> bool
{
self.producer.is_empty()
}
/// Whether the buffer is completely full.
//
pub fn is_full(&self) -> bool
{
self.producer.is_full()
}
/// The length of the data in the buffer.
//
pub fn len(&self) -> usize
{
self.producer.len()
}
/// How much free space there is left in the container. On empty, `remaining == capacity`
//
pub fn remaining(&self) -> usize
{
self.producer.remaining()
}
}
/// The compiler cannot verify that the producer/consumer are from the same `RingBuffer` object.
/// Obviously if you abuse this things won't work as expected.
///
/// I added this so you can seed a buffer before passing it to futures_ringbuf.
//
impl<T: Sized + Copy> From< (Producer<T>, Consumer<T>) > for RingBuffer<T>
{
fn from( buffer: (Producer<T>, Consumer<T>) ) -> Self
{
let (producer, consumer) = (buffer.0, buffer.1);
Self
{
producer ,
consumer ,
read_waker : None ,
write_waker : None ,
closed : false ,
}
}
}
impl<T: Sized + Copy> From< SyncRingBuffer<T> > for RingBuffer<T>
{
fn from( buffer: SyncRingBuffer<T> ) -> Self
{
let (producer, consumer) = buffer.split();
Self
{
producer ,
consumer ,
read_waker : None ,
write_waker : None ,
closed : false ,
}
}
}
impl<T: Sized + Copy> fmt::Debug for RingBuffer<T>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
{
write!( f, "RingBuffer with capacity: {}", self.capacity() )
}
}
| 32,771
|
https://github.com/isramv/opengist/blob/master/src/AppBundle/UserBundle/Resources/views/layout-auth.html.twig
|
Github Open Source
|
Open Source
|
MIT
| null |
opengist
|
isramv
|
Twig
|
Code
| 65
| 261
|
{% extends '@BetterGists/base-uikit-no-sidebar.html.twig' %}
{% block navigation %}
<ul class="uk-tab">
<li class="{{ (app.request.requestUri == path('fos_user_profile_show')) ? 'uk-active' }}"><a href="{{ path('fos_user_profile_show') }}">Profile</a></li>
<li class="{{ (app.request.requestUri == path('fos_user_change_password')) ? 'uk-active' }}"><a href="{{ path('fos_user_change_password') }}">Change Password</a></li>
<li class="{{ (app.request.requestUri == path('fos_user_profile_edit')) ? 'uk-active' }}"><a href="{{ path('fos_user_profile_edit') }}">Edit Profile</a></li>
</ul>
{% endblock %}
{% block content %}
<div class="rc-container">
{% block fos_user_content %}{% endblock %}
</div>
{% endblock %}
| 4,541
|
https://github.com/swagkarna/arissploit/blob/master/modules/port_scanner.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
arissploit
|
swagkarna
|
Python
|
Code
| 223
| 737
|
from core.arissploit import *
from core import colors
import socket
import subprocess
from datetime import datetime
conf = {
"name": "port_scanner",
"version": "1.0",
"shortdesc": "Scan open ports.",
"author": "Entynetproject",
"initdate": "4.3.2016",
"lastmod": "3.1.2017",
"apisupport": True
}
# List of the variables
variables = OrderedDict((
('target', ['google.com', 'Target address.']),
('first', [1, 'First port which will be scanned.']),
('last', [100, 'Last port which will be scanned.']),
))
# Simple changelog
changelog = "Version 1.0:\nrelease"
def run():
open_ports = []
variables['target'][0] = variables['target'][0].replace("http://", "")
variables['target'][0] = variables['target'][0].replace("https://", "")
try:
targetip = socket.gethostbyname(variables['target'][0])
except(socket.gaierror):
printError('Hostname could not be resolved!')
return ModuleError("Hostname could not be resolved!")
socket.setdefaulttimeout(0.5)
print(colors.blue+"-" * 60)
print("Please wait, scanning target...", targetip)
print("-" * 60+colors.end)
t1 = datetime.now()
end = variables['last'][0] + 1
try:
for port in range(int(variables['first'][0]),int(end)):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex((targetip, port))
if result == 0:
print(colors.green+"Port {}: Open".format(port)+colors.end)
open_ports.append(port)
else:
print(colors.red+"Port {}: Closed".format(port)+colors.end)
sock.close()
except(socket.gaierror):
printError('Hostname could not be resolved!')
return ModuleError("Hostname could not be resolved!")
except(socket.error):
printError(colors.red+"Couldn't connect to server!"+colors.end)
return ModuleError("Couldn't connect to server!")
except(ValueError):
printError("Port value must be integer!")
return ModuleError("Port value must be integer!")
# Checking the time again
t2 = datetime.now()
# Calculates the difference of time, to see how long it took to run the script
total = t2 - t1
# Printing the information to screen
printInfo('Scanning completed in: '+ str(total))
return open_ports
| 19,544
|
https://github.com/FrancisVarga/reborn/blob/master/vendor/processus-framework/src/Processus/Contrib/Zend/Code/Exception/InvalidArgumentException.php
|
Github Open Source
|
Open Source
|
MIT
| 2,012
|
reborn
|
FrancisVarga
|
PHP
|
Code
| 13
| 41
|
<?php
namespace Zend\Code\Exception;
use Zend\Code\Exception;
class InvalidArgumentException extends \InvalidArgumentException implements
Exception
{
}
| 1,216
|
https://github.com/benthomasson/ansible-automata/blob/master/tests/test_ast.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
ansible-automata
|
benthomasson
|
Python
|
Code
| 206
| 1,026
|
from ansible_automata import ast
from ansible_automata import parser
import yaml
import os
HERE = os.path.abspath(os.path.dirname(__file__))
def load_test_fsm(name):
with open(os.path.join(HERE, 'inputs', name)) as f:
return yaml.safe_load(f.read())
def test_null():
null_ast = parser.parse_to_ast(list())
assert len(null_ast.fsms) == 0
assert type(null_ast) == ast.AST
def test_single():
data = load_test_fsm('single.yml')
single_ast = parser.parse_to_ast(data)
assert len(single_ast.fsms) == 1
assert single_ast.fsms[0].name == "FSM1"
assert type(single_ast.fsms[0]) == ast.FSM
def test_single_with_states():
data = load_test_fsm('single_with_states.yml')
single_ast = parser.parse_to_ast(data)
assert len(single_ast.fsms) == 1
assert single_ast.fsms[0].name == "FSM1"
assert single_ast.fsms[0].gather_facts is False
assert len(single_ast.fsms[0].states) == 2
assert type(single_ast.fsms[0].states[0]) == ast.State
assert single_ast.fsms[0].states[0].name == "Start"
assert single_ast.fsms[0].states[1].name == "End"
def test_single_with_import_from():
data = load_test_fsm('single_with_import_from.yml')
single_ast = parser.parse_to_ast(data)
assert len(single_ast.fsms) == 1
assert single_ast.fsms[0].name == "FSM1"
assert single_ast.fsms[0].gather_facts is False
assert single_ast.fsms[0].import_from == 'tests/inputs/fsm1.yml'
def test_single_with_handlers():
data = load_test_fsm('single_with_handlers.yml')
single_ast = parser.parse_to_ast(data)
assert len(single_ast.fsms) == 1
assert single_ast.fsms[0].name == "FSM1"
assert single_ast.fsms[0].gather_facts is False
assert len(single_ast.fsms[0].states) == 2
assert type(single_ast.fsms[0].states[0]) == ast.State
assert len(single_ast.fsms[0].states[0].handlers) == 2
assert len(single_ast.fsms[0].states[1].handlers) == 1
assert len(single_ast.fsms[0].states[0].handlers) == 2
assert type(single_ast.fsms[0].states[0].handlers[0]) == ast.Handler
assert type(single_ast.fsms[0].states[0].handlers[1]) == ast.Handler
assert single_ast.fsms[0].states[0].handlers[0].name == "enter"
assert single_ast.fsms[0].states[0].handlers[1].name == "exit"
assert single_ast.fsms[0].states[1].handlers[0].name == "enter"
def test_multiple():
data = load_test_fsm('multiple.yml')
multiple_ast = parser.parse_to_ast(data)
assert len(multiple_ast.fsms) == 2
assert multiple_ast.fsms[0].name == "FSM1"
assert type(multiple_ast.fsms[0]) == ast.FSM
assert multiple_ast.fsms[1].name == "FSM2"
assert type(multiple_ast.fsms[1]) == ast.FSM
| 47,754
|
https://github.com/NoNews/NHL/blob/master/app/src/main/java/com/example/nhlstats/features/standings/StandingsFlowFragment.kt
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
NHL
|
NoNews
|
Kotlin
|
Code
| 36
| 169
|
package com.example.nhlstats.features.standings
import android.os.Bundle
import com.example.nhlstats.FlowKey
import com.example.nhlstats.common.presentation.FlowFragment
import com.example.nhlstats.features.standings.currentseasontable.CurrentSeasonContract
class StandingsFlowFragment : FlowFragment() {
override fun flowName(): String = FlowKey.STANDINGS
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
if (savedInstanceState == null) {
router.replaceScreen(CurrentSeasonContract.createScreen())
}
}
}
| 25,103
|
https://github.com/980792492/YIFUReactWX/blob/master/app/js/components/index.js
|
Github Open Source
|
Open Source
|
MIT
| null |
YIFUReactWX
|
980792492
|
JavaScript
|
Code
| 46
| 114
|
//Mixin
export AuthMixin from './AuthMixin';
export BgColorMixin from './BgColorMixin';
//UI
export Top from './Top';
export BottomBar from './BottomBar';
export SendSmsButton from './SendSmsButton';
export Dialog from './Dialog';
export Icon from './Icon';
export Pagination from './Pagination';
export MyScroll from './MyScroll';
export PayLink from './PayLink';
export SendButton from './SendButton';
| 46,504
|
https://github.com/Tauron1990/ImageOrgenizer/blob/master/Tauron.Application.ImageOrganizer.BL/Operations/ScheduleDownloadRule.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
ImageOrgenizer
|
Tauron1990
|
C#
|
Code
| 148
| 602
|
using System;
using System.Collections.Generic;
using System.Linq;
using Tauron.Application.Common.BaseLayer;
using Tauron.Application.Common.BaseLayer.Core;
using Tauron.Application.ImageOrganizer.BL.Provider;
using Tauron.Application.ImageOrganizer.Data.Entities;
using Tauron.Application.ImageOrganizer.Data.Repositories;
using Tauron.Application.Ioc;
namespace Tauron.Application.ImageOrganizer.BL.Operations
{
[ExportRule(RuleNames.ScheduleDonwnload)]
public class ScheduleDownloadRule : IOBusinessRuleBase<DownloadItem[], DownloadItem[]>
{
[Inject]
public IProviderManager ProviderManager { get; set; }
[InjectRepo]
public IDownloadRepository DownloadRepository { get; set; }
[InjectRepo]
public Lazy<IImageRepository> ImageRepository { get; set; }
public override DownloadItem[] ActionImpl(DownloadItem[] inputs)
{
List<DownloadEntity> items = new List<DownloadEntity>();
using (var db = Enter())
{
foreach (var input in inputs)
{
//if (input.DownloadType == DownloadType.DownloadImage || input.DownloadType == DownloadType.ReDownload)
//{
// AppConststands.NotImplemented();
// continue;
//}
if(input.AvoidDouble)
{
string name;
try
{
name = ProviderManager.Get(input.Provider).NameFromUrl(input.Image);
}
catch
{
name = input.Image;
}
if (DownloadRepository.Contains(input.Image, input.Metadata, input.DownloadType) || (ImageRepository.Value.Containes(name) && string.IsNullOrEmpty(input.Metadata)))
continue;
}
var queue = input.DownloadStade == DownloadStade.Paused ? DownloadStade.Paused : DownloadStade.Queued;
items.Add(DownloadRepository.Add(input.Image, input.DownloadType, input.Schedule, input.Provider, input.AvoidDouble, input.RemoveImageOnFail, input.Metadata, queue));
}
if(items.Count > 0)
db.SaveChanges();
}
return items.Select(de => new DownloadItem(de)).ToArray();
}
}
}
| 28,681
|
https://github.com/otto-de/rx-composer/blob/master/composer-core/src/main/java/de/otto/rx/composer/page/CompositeFragment.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,017
|
rx-composer
|
otto-de
|
Java
|
Code
| 313
| 891
|
package de.otto.rx.composer.page;
import com.google.common.collect.ImmutableList;
import de.otto.rx.composer.content.Content;
import de.otto.rx.composer.content.Parameters;
import de.otto.rx.composer.content.Position;
import de.otto.rx.composer.tracer.Tracer;
import org.slf4j.Logger;
import rx.Observable;
import java.util.List;
import java.util.function.Function;
import static de.otto.rx.composer.content.ErrorContent.errorContent;
import static java.util.stream.Collectors.toList;
import static org.slf4j.LoggerFactory.getLogger;
import static rx.Observable.just;
import static rx.Observable.merge;
/**
* {@inheritDoc}
*
* <p>
* A Fragment that consists of a single delegate Fragment and a list of nested Fragments that are used to continue
* retrieving content based on the results of the initial content.
* </p>
* <p>
* Example:
* </p>
* <p>
* First fetch {@link Content} from System X. Extract {@link Parameters} from this Content and proceed
* with these Parameters by calling the nested Fragments.
* </p>
*/
class CompositeFragment implements Fragment {
private static final Logger LOG = getLogger(CompositeFragment.class);
static class FragmentContinuation {
final ImmutableList<Fragment> nested;
final Function<Content,Parameters> paramExtractor;
FragmentContinuation(final Function<Content, Parameters> paramExtractor,
final ImmutableList<Fragment> nested) {
this.nested = nested;
this.paramExtractor = paramExtractor;
}
}
/** The initial Fragment. */
private final Fragment first;
/** The continuation that is executed using the results from the first Fragment. */
private final FragmentContinuation continuation;
/**
* Creates a CompositeFragment from a first Fragment and a continuation.
* @param first the fist / initial Fragment to fetch
* @param continuation Function to extract Parameters from the first Fragment plus list of nested Fragments.
*/
CompositeFragment(final Fragment first,
final FragmentContinuation continuation) {
this.first = first;
this.continuation = continuation;
}
@Override
public Observable<Content> fetchWith(final Tracer tracer, final Parameters parameters) {
final long startedTs = System.currentTimeMillis();
return first
.fetchWith(tracer, parameters)
.onErrorReturn(e -> errorContent(first.getPosition(), e, startedTs))
.filter(Content::isAvailable)
.flatMap(content -> {
final Parameters nestedParams = parameters.with(continuation.paramExtractor.apply(content));
final List<Observable<Content>> observables = this.continuation.nested
.stream()
.map(fragment -> fragment
.fetchWith(tracer, nestedParams)
.onErrorReturn(e -> errorContent(fragment.getPosition(), e, startedTs))
.filter(Content::isAvailable)
)
.collect(toList());
// Add the content, so we can retrieve the content from the first fragment:
observables.add(just(content));
return merge(observables)
.doOnError((t) -> LOG.error(t.getMessage(), t));
});
}
@Override
public Position getPosition() {
return first.getPosition();
}
}
| 39,403
|
https://github.com/DovetailSoftware/dovetail-bootstrap/blob/master/source/Dovetail.SDK.ModelMap/Serialization/ObjectBuilderError.cs
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,023
|
dovetail-bootstrap
|
DovetailSoftware
|
C#
|
Code
| 23
| 54
|
namespace Dovetail.SDK.ModelMap.Serialization
{
public class ObjectBuilderError
{
public string Key { get; set; }
public string Message { get; set; }
}
}
| 20,427
|
https://github.com/Rithlock/VichUploaderSerializationBundle/blob/master/Tests/Fixtures/UserB.php
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
VichUploaderSerializationBundle
|
Rithlock
|
PHP
|
Code
| 273
| 889
|
<?php
/*
* This file is part of the FreshVichUploaderSerializationBundle
*
* (c) Artem Henvald <genvaldartem@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace Fresh\VichUploaderSerializationBundle\Tests\Fixtures;
use Fresh\VichUploaderSerializationBundle\Annotation as Fresh;
use JMS\Serializer\Annotation as JMS;
use Symfony\Component\HttpFoundation\File\File;
use Vich\UploaderBundle\Mapping\Annotation as Vich;
/**
* UserB Entity.
*
* @JMS\ExclusionPolicy("all")
*
* @Vich\Uploadable
*
* @Fresh\VichSerializableClass
*/
class UserB
{
/**
* @var string
*
* @JMS\Expose
* @JMS\SerializedName("photo")
*
* @Fresh\VichSerializableField("photoFile")
*/
private $photoName;
/**
* @var File
*
* @JMS\Exclude
*
* @Vich\UploadableField(mapping="user_photo_mapping", fileNameProperty="photoName")
*/
private $photoFile;
/**
* @var string
*
* @JMS\Expose
* @JMS\SerializedName("cover")
*
* @Fresh\VichSerializableField("coverFile", includeHost=false)
*/
private $coverName;
/**
* @var File
*
* @JMS\Exclude
*
* @Vich\UploadableField(mapping="user_cover_mapping", fileNameProperty="coverName")
*/
private $coverFile;
/**
* @return string
*/
public function getPhotoName(): ?string
{
return $this->photoName;
}
/**
* @param string $photoName
*
* @return $this
*/
public function setPhotoName(?string $photoName): self
{
$this->photoName = $photoName;
return $this;
}
/**
* @return File
*/
public function getPhotoFile(): ?File
{
return $this->photoFile;
}
/**
* @param File $photoFile
*
* @return $this
*/
public function setPhotoFile(File $photoFile): self
{
$this->photoFile = $photoFile;
return $this;
}
/**
* @return string
*/
public function getCoverName(): ?string
{
return $this->coverName;
}
/**
* @param string $coverName
*
* @return $this
*/
public function setCoverName(?string $coverName): self
{
$this->coverName = $coverName;
return $this;
}
/**
* @return File
*/
public function getCoverFile(): ?File
{
return $this->coverFile;
}
/**
* @param File $coverFile
*
* @return $this
*/
public function setCoverFile(File $coverFile): self
{
$this->coverFile = $coverFile;
return $this;
}
}
| 20,592
|
https://github.com/googleads/googleads-perl-lib/blob/master/t/util/TestAPIUtils.pm
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
googleads-perl-lib
|
googleads
|
Perl
|
Code
| 1,164
| 4,160
|
# Copyright 2011, Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package TestAPIUtils;
use strict;
use vars qw(@EXPORT_OK @ISA);
use Google::Ads::Common::MediaUtils;
use Data::Uniqid qw(uniqid);
use Exporter;
use File::Basename;
use File::Spec;
use POSIX;
@ISA = qw(Exporter);
@EXPORT_OK = qw(get_api_package create_campaign delete_campaign create_ad_group
delete_ad_group create_text_ad delete_text_ad create_keyword
delete_keyword get_test_image
get_location_for_address create_experiment delete_experiment add_draft
delete_draft add_trial delete_trial);
sub get_api_package {
my $client = shift;
my $name = shift;
my $import = shift;
my $api_version = $client->get_version();
my $full_package_name = "Google::Ads::AdWords::${api_version}::${name}";
if ($import) {
eval("use $full_package_name");
}
return $full_package_name;
}
sub create_campaign {
my $client = shift;
my $advertising_type = shift;
my $bidding_strategy = shift;
if (!$bidding_strategy) {
$bidding_strategy =
get_api_package($client, "BiddingStrategyConfiguration", 1)->new({
biddingStrategyType => "MANUAL_CPC",
biddingScheme => get_api_package($client, "ManualCpcBiddingScheme", 1)
->new({enhancedCpcEnabled => 0})});
}
my $budget = get_api_package($client, "Budget", 1)->new({
name => "Test " . uniqid(),
amount => {microAmount => 50000000},
deliveryMethod => "STANDARD",
isExplicitlyShared => "false"
});
my $budget_operation = get_api_package($client, "BudgetOperation", 1)->new({
operand => $budget,
operator => "ADD"
});
$budget =
$client->BudgetService()->mutate({operations => ($budget_operation)})
->get_value();
my $campaign = get_api_package($client, "Campaign", 1)->new({
name => "Campaign #" . uniqid(),
biddingStrategyConfiguration => $bidding_strategy,
budget => $budget,
});
$campaign->set_advertisingChannelType($advertising_type);
my $operation = get_api_package($client, "CampaignOperation", 1)->new({
operand => $campaign,
operator => "ADD"
});
$campaign =
$client->CampaignService()->mutate({operations => [$operation]})
->get_value();
return $campaign;
}
sub create_campaign_with_subtype {
my ($client, $advertising_type, $advertising_sub_type) = @_;
my $budget = get_api_package($client, "Budget", 1)->new({
name => "Test " . uniqid(),
amount => {microAmount => 50000000},
deliveryMethod => "STANDARD",
isExplicitlyShared => "false"
});
my $budget_operation = get_api_package($client, "BudgetOperation", 1)->new({
operand => $budget,
operator => "ADD"
});
$budget =
$client->BudgetService()->mutate({operations => ($budget_operation)})
->get_value();
my $bidding_strategy =
get_api_package($client, "BiddingStrategyConfiguration", 1)->new({
biddingStrategyType => "MANUAL_CPC",
biddingScheme => get_api_package($client, "ManualCpcBiddingScheme", 1)
->new({enhancedCpcEnabled => 0})});
my $campaign = get_api_package($client, "Campaign", 1)->new({
name => "Campaign #" . uniqid(),
biddingStrategyConfiguration => $bidding_strategy,
budget => $budget,
advertisingChannelType => $advertising_type,
advertisingChannelSubType => $advertising_sub_type
});
my $operation = get_api_package($client, "CampaignOperation", 1)->new({
operand => $campaign,
operator => "ADD"
});
$campaign =
$client->CampaignService()->mutate({operations => [$operation]})
->get_value();
return $campaign;
}
sub delete_campaign {
my $client = shift;
my $campaign_id = shift;
my $campaign =
get_api_package($client, "Campaign", 1)->new({id => $campaign_id,});
$campaign->set_status("REMOVED");
my $operation = get_api_package($client, "CampaignOperation", 1)->new({
operand => $campaign,
operator => "SET"
});
$client->CampaignService()->mutate({operations => [$operation]});
}
sub create_ad_group {
my $client = shift;
my $campaign_id = shift;
my $name = shift || uniqid();
my $bids = shift;
my $adgroup;
$adgroup = get_api_package($client, "AdGroup", 1)->new({
name => $name,
campaignId => $campaign_id,
biddingStrategyConfiguration =>
get_api_package($client, "BiddingStrategyConfiguration", 1)->new({
bids => $bids
|| [
get_api_package($client, "CpcBid", 1)->new({
bid => get_api_package($client, "Money", 1)
->new({microAmount => "500000"})}
),
]})});
my $operations = [
get_api_package($client, "AdGroupOperation", 1)->new({
operand => $adgroup,
operator => "ADD"
})];
my $return_ad_group =
$client->AdGroupService()->mutate({operations => $operations})->get_value();
return $return_ad_group;
}
sub delete_ad_group {
my $client = shift;
my $adgroup_id = shift;
my $adgroup =
get_api_package($client, "AdGroup", 1)->new({id => $adgroup_id,});
$adgroup->set_status("REMOVED");
my $operation = get_api_package($client, "AdGroupOperation", 1)->new({
operand => $adgroup,
operator => "SET"
});
return $client->AdGroupService()->mutate({operations => [$operation]});
}
sub create_keyword {
my $client = shift;
my $ad_group_id = shift;
my $criterion = get_api_package($client, "Keyword", 1)->new({
text => "Luxury Cruise to Mars",
matchType => "BROAD"
});
my $keyword_biddable_ad_group_criterion =
get_api_package($client, "BiddableAdGroupCriterion", 1)->new({
adGroupId => $ad_group_id,
criterion => $criterion
});
my $result = $client->AdGroupCriterionService()->mutate({
operations => [
get_api_package($client, "AdGroupCriterionOperation", 1)->new({
operator => "ADD",
operand => $keyword_biddable_ad_group_criterion
})]});
return $result->get_value()->[0]->get_criterion();
}
sub delete_keyword {
my $client = shift;
my $ad_group_id = shift;
my $criterion_id = shift;
my $ad_group_criterion = get_api_package($client, "AdGroupCriterion", 1)->new(
{
adGroupId => $ad_group_id,
criterion =>
get_api_package($client, "Criterion", 1)->new({id => $criterion_id})});
my $operation = get_api_package($client, "AdGroupCriterionOperation", 1)->new(
{
operand => $ad_group_criterion,
operator => "REMOVE"
});
return $client->AdGroupCriterionService()
->mutate({operations => [$operation]});
}
sub create_text_ad {
my $client = shift;
my $ad_group_id = shift;
my $text_ad = get_api_package($client, "ExpandedTextAd", 1)->new({
headlinePart1 => "Luxury Cruise to Mars",
headlinePart2 => "Best Space Cruise Line",
description => "Buy your tickets now!",
finalUrls => ["http://www.example.com/"],
path1 => "all-inclusive",
path2 => "deals"});
my $ad_group_ad = get_api_package($client, "AdGroupAd", 1)->new({
adGroupId => $ad_group_id,
ad => $text_ad
});
my $result = $client->AdGroupAdService()->mutate({
operations => [
get_api_package($client, "AdGroupAdOperation", 1)->new({
operator => "ADD",
operand => $ad_group_ad
})]});
return $result->get_value()->[0]->get_ad();
}
sub delete_text_ad {
my $client = shift;
my $ad_group_id = shift;
my $text_ad_id = shift;
my $ad_group_ad =
get_api_package($client, "AdGroupAd", 1)
->new({
adGroupId => $ad_group_id,
ad => get_api_package($client, "Ad", 1)->new({id => $text_ad_id})});
my $operation = get_api_package($client, "AdGroupAdOperation", 1)->new({
operand => $ad_group_ad,
operator => "REMOVE"
});
return $client->AdGroupAdService()->mutate({operations => [$operation]});
}
sub create_experiment {
my $client = shift;
my $campaign_id = shift;
my $experiment = get_api_package($client, "Experiment", 1)->new({
campaignId => $campaign_id,
name => "Test experiment",
queryPercentage => 50
});
my $result = $client->ExperimentService()->mutate({
operations => [
get_api_package($client, "ExperimentOperation", 1)->new({
operator => "ADD",
operand => $experiment
})]});
return $result->get_value()->[0];
}
sub delete_experiment {
my $client = shift;
my $experiment_id = shift;
my $experiment =
get_api_package($client, "Experiment", 1)->new({id => $experiment_id});
my $operation = get_api_package($client, "ExperimentOperation", 1)->new({
operand => $experiment,
operator => "REMOVE"
});
return $client->ExperimentService()->mutate({operations => [$operation]});
}
sub get_test_image {
return Google::Ads::Common::MediaUtils::get_base64_data_from_url(
"https://goo.gl/3b9Wfh");
}
sub get_any_child_client_email {
my $client = shift;
my $current_client_id = $client->get_client_id();
$client->set_client_id(undef);
my $email;
my $selector =
get_api_package($client, "Selector", 1)
->new({fields => ["Login", "CanManageClients"]});
my $page =
$client->ManagedCustomerService()->get({serviceSelector => $selector});
foreach my $customer (@{$page->get_entries()}) {
if ($customer->get_login() ne "" && !$customer->get_canManageClients()) {
$email = $customer->get_login()->get_value();
last;
}
}
$client->set_client_id($current_client_id);
return $email;
}
sub create_draft {
my $client = shift;
my $base_campaign_id = shift;
my $draft = get_api_package($client, "Draft", 1)->new({
baseCampaignId => $base_campaign_id,
draftName => sprintf("Test Draft #%s", uniqid())});
# Create operation.
my $draft_operation = get_api_package($client, "DraftOperation", 1)->new({
operator => "ADD",
operand => $draft
});
# Add draft.
my $result =
$client->DraftService()->mutate({operations => [$draft_operation]});
$draft = $result->get_value()->[0];
return $draft;
}
sub delete_draft {
my $client = shift;
my $base_campaign_id = shift;
my $draft_id = shift;
my $draft = get_api_package($client, "Draft", 1)->new({
baseCampaignId => $base_campaign_id,
draftId => $draft_id,
draftStatus => "ARCHIVED"
});
my $operation = get_api_package($client, "DraftOperation", 1)->new({
operand => $draft,
operator => "SET"
});
return $client->DraftService()->mutate({operations => [$operation]});
}
sub create_trial {
my $client = shift;
my $base_campaign_id = shift;
my $draft_id = shift;
my $trial = get_api_package($client, "Trial", 1)->new({
draftId => $draft_id,
baseCampaignId => $base_campaign_id,
name => sprintf("Test Trial #%s", uniqid()),
trafficSplitPercent => 50,
});
# Create operation.
my $trial_operation = get_api_package($client, "TrialOperation", 1)->new({
operator => "ADD",
operand => $trial
});
# Add trial.
my $result =
$client->TrialService()->mutate({operations => [$trial_operation]});
$trial = $result->get_value()->[0];
return $trial;
}
sub delete_trial {
my $client = shift;
my $trial_id = shift;
my $trial =
get_api_package($client, "Trial", 1)
->new({id => $trial_id, status => "ARCHIVED"});
my $operation = get_api_package($client, "TrialOperation", 1)->new({
operand => $trial,
operator => "SET"
});
return $client->TrialService()->mutate({operations => [$operation]});
}
return 1;
| 12,748
|
https://github.com/briancatraguna/SixPackTeamApp/blob/master/.gitignore
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
SixPackTeamApp
|
briancatraguna
|
Ignore List
|
Code
| 1
| 16
|
"scraping/lapor_scraping.py"
| 16,705
|
https://github.com/ncsoft/Unreal.js/blob/master/Examples/Content/Scripts/demos/demo-rest.js
|
Github Open Source
|
Open Source
|
LicenseRef-scancode-unknown-license-reference, BSD-3-Clause
| 2,023
|
Unreal.js
|
ncsoft
|
JavaScript
|
Code
| 132
| 509
|
/// <reference path="../typings/ue.d.ts">/>
let UMG = require('UMG')
let viewport_widget = require('./lib/viewport-widget')
let rest_texture = require('./lib/remote-texture')
async function demo(defer) {
let elem = viewport_widget()
defer(_ => elem.destroy())
const url = 'https://github.com/ncsoft/Unreal.js-core/raw/master/Resources/Icon128.png'
const font = {
FontObject : GEngine.SmallFont,
Size : 25
}
let texture
let set_image
async function set_url(imageUrl) {
if (!set_image) {
console.error('?',imageUrl)
return
}
set_image(await rest_texture(imageUrl))
}
let editStyle = {
Font: font,
ColorAndOpacity: {
SpecifiedColor: { R: 0.5, G: 0.8, B: 1, A: 1 },
ColorUseRule: 'UseColor_Specified'
}
}
let design = UMG.div({},
UMG.text({},"Input url to show image :"),
UMG(EditableText,{
WidgetStyle:editStyle,
HintText:'Url to show image',
Text:url,
OnTextChanged:text => {
set_url(text)
}
}),
UMG.span({},
UMG(SizeBox,{WidthOverride:256,HeightOverride:256},
UMG(UImage,{
$link:elem => set_image = texture => {
elem.SetBrushFromTexture(texture, false)
}
})
)
)
)
elem.add_child(
UMG(Border,{
BrushColor:{A:0.4},
Padding:{Left:100,Top:100,Right:100}
}, design)
)
}
module.exports = demo
| 4,969
|
https://github.com/cooperative-tech-club/club-website/blob/master/app/Policies/TeamPolicy.php
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
club-website
|
cooperative-tech-club
|
PHP
|
Code
| 134
| 373
|
<?php
namespace App\Policies;
use App\Model\User;
use App\Model\Team;
use Illuminate\Auth\Access\HandlesAuthorization;
class TeamPolicy
{
use HandlesAuthorization;
/**
* Determine whether the user can see the teams.
*
* @param \App\Model\User $user
* @return boolean
*/
public function viewAny(User $user)
{
return true;
}
/**
* Determine whether the user can create teams.
*
* @param \App\Model\User $user
* @return boolean
*/
public function create(User $user)
{
return $user->isLead();
}
/**
* Determine whether the user can update the team.
*
* @param \App\Model\User $user
* @param \App\Model\Team $team
* @return boolean
*/
public function update(User $user, Team $team)
{
return $user->isLead();
}
/**
* Determine whether the user can delete the team.
*
* @param \App\Model\User $user
* @param \App\Model\Team $team
* @return boolean
*/
public function delete(User $user, Team $team)
{
return $user->isLead();
}
}
| 19,541
|
https://github.com/dfki-ric-underactuated-lab/torque_limited_simple_pendulum/blob/master/software/python/simple_pendulum/simulation/simulation.py
|
Github Open Source
|
Open Source
|
MIT, Apache-2.0, LicenseRef-scancode-unknown-license-reference, BSD-3-Clause
| 2,023
|
torque_limited_simple_pendulum
|
dfki-ric-underactuated-lab
|
Python
|
Code
| 1,226
| 4,433
|
"""
Simulator
=========
"""
import time
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
import matplotlib.animation as mplanimation
from matplotlib.patches import Arc, RegularPolygon
from numpy import radians as rad
class Simulator:
def __init__(self, plant):
"""
Simulator class, can simulate and animate the pendulum
Parameters
----------
plant: plant object
(e.g. PendulumPlant from simple_pendulum.models.pendulum_plant.py)
"""
self.plant = plant
self.x = np.zeros(2*self.plant.dof) # position, velocity
self.t = 0.0 # time
self.reset_data_recorder()
def set_state(self, time, x):
"""
set the state of the pendulum plant
Parameters
----------
time: float
time, unit: s
x: type as self.plant expects a state,
state of the pendulum plant
"""
self.x = np.copy(x)
self.t = np.copy(float(time))
def get_state(self):
"""
Get current state of the plant
Returns
-------
self.t : float,
time, unit: s
self.x : type as self.plant expects a state
plant state
"""
return self.t, self.x
def reset_data_recorder(self):
"""
Reset the internal data recorder of the simulator
"""
self.t_values = []
self.x_values = []
self.tau_values = []
def record_data(self, time, x, tau):
"""
Records data in the internal data recorder
Parameters
----------
time : float
time to be recorded, unit: s
x : type as self.plant expects a state
state to be recorded, units: rad, rad/s
tau : type as self.plant expects an actuation
torque to be recorded, unit: Nm
"""
self.t_values.append(np.copy(time))
self.x_values.append(np.copy(x))
self.tau_values.append(np.copy(tau))
def euler_integrator(self, t, y, tau):
"""
Euler integrator for the simulated plant
Parameters
----------
t : float
time, unit: s
y: type as self.plant expects a state
state of the pendulum
tau: type as self.plant expects an actuation
torque input
Returns
-------
array-like : the Euler integrand
"""
return self.plant.rhs(t, y, tau)
def runge_integrator(self, t, y, dt, tau):
"""
Runge-Kutta integrator for the simulated plant
Parameters
----------
t : float
time, unit: s
y: type as self.plant expects a state
state of the pendulum
dt: float
time step, unit: s
tau: type as self.plant expects an actuation
torque input
Returns
-------
array-like : the Runge-Kutta integrand
"""
k1 = self.plant.rhs(t, y, tau)
k2 = self.plant.rhs(t + 0.5 * dt, y + 0.5 * dt * k1, tau)
k3 = self.plant.rhs(t + 0.5 * dt, y + 0.5 * dt * k2, tau)
k4 = self.plant.rhs(t + dt, y + dt * k3, tau)
return (k1 + 2 * (k2 + k3) + k4) / 6.0
def step(self, tau, dt, integrator="runge_kutta"):
"""
Performs a single step of the plant.
Parameters
----------
tau: type as self.plant expects an actuation
torque input
dt: float
time step, unit: s
integrator: string
"euler" for euler integrator
"runge_kutta" for Runge-Kutta integrator
"""
if integrator == "runge_kutta":
self.x += dt * self.runge_integrator(self.t, self.x, dt, tau)
elif integrator == "euler":
self.x += dt * self.euler_integrator(self.t, self.x, tau)
else:
raise NotImplementedError(
f'Sorry, the integrator {integrator} is not implemented.')
self.t += dt
self.record_data(self.t, self.x.copy(), tau)
def simulate(self, t0, x0, tf, dt, controller=None,
integrator="runge_kutta"):
"""
Simulates the plant over a period of time.
Parameters
----------
t0: float
start time, unit s
x0: type as self.plant expects a state
start state
tf: float
final time, unit: s
controller: A controller object of the type of the
AbstractController in
simple_pendulum.controllers.abstract_controller.py
If None, a free pendulum is simulated.
integrator: string
"euler" for euler integrator,
"runge_kutta" for Runge-Kutta integrator
Returns
-------
self.t_values : list
a list of time values
self.x_values : list
a list of states
self.tau_values : list
a list of torques
"""
self.set_state(t0, x0)
self.reset_data_recorder()
while (self.t <= tf):
if controller is not None:
_, _, tau = controller.get_control_output(
meas_pos=self.x[:self.plant.dof],
meas_vel=self.x[self.plant.dof:],
meas_tau=np.zeros(self.plant.dof),
meas_time=self.t)
else:
tau = np.zeros(self.plant.n_actuators)
self.step(tau, dt, integrator=integrator)
return self.t_values, self.x_values, self.tau_values
def _animation_init(self):
"""
init of the animation plot
"""
self.animation_ax.set_xlim(self.plant.workspace_range[0][0],
self.plant.workspace_range[0][1])
self.animation_ax.set_ylim(self.plant.workspace_range[1][0],
self.plant.workspace_range[1][1])
self.animation_ax.set_xlabel("x position [m]")
self.animation_ax.set_ylabel("y position [m]")
for ap in self.animation_plots[:-1]:
ap.set_data([], [])
self.animation_plots[-1].set_text("t = 0.000")
self.tau_arrowarcs = []
self.tau_arrowheads = []
for link in range(self.plant.n_links):
arc, head = get_arrow(radius=0.001,
centX=0,
centY=0,
angle_=110,
theta2_=320,
color_="red")
self.tau_arrowarcs.append(arc)
self.tau_arrowheads.append(head)
self.animation_ax.add_patch(arc)
self.animation_ax.add_patch(head)
return self.animation_plots + self.tau_arrowarcs + self.tau_arrowheads
def _animation_step(self, par_dict):
"""
simulation of a single step which also updates the animation plot
"""
t0 = time.time()
dt = par_dict["dt"]
controller = par_dict["controller"]
integrator = par_dict["integrator"]
if controller is not None:
_, _, tau = controller.get_control_output(
meas_pos=self.x[:self.plant.dof],
meas_vel=self.x[self.plant.dof:],
meas_tau=np.zeros(self.plant.dof),
meas_time=self.t)
else:
tau = np.zeros(self.plant.n_actuators)
self.step(tau, dt, integrator=integrator)
ee_pos = self.plant.forward_kinematics(self.x[:self.plant.dof])
ee_pos.insert(0, self.plant.base)
ani_plot_counter = 0
for link in range(self.plant.n_links):
self.animation_plots[ani_plot_counter].set_data(
[ee_pos[link][0], ee_pos[link+1][0]],
[ee_pos[link][1], ee_pos[link+1][1]])
ani_plot_counter += 1
self.animation_plots[ani_plot_counter].set_data(ee_pos[link+1][0],
ee_pos[link+1][1])
ani_plot_counter += 1
set_arrow_properties(self.tau_arrowarcs[link],
self.tau_arrowheads[link],
float(np.squeeze(tau)),
ee_pos[link][0],
ee_pos[link][1])
t = float(self.animation_plots[ani_plot_counter].get_text()[4:])
t = round(t+dt, 3)
self.animation_plots[ani_plot_counter].set_text(f"t = {t}")
# if the animation runs slower than real time
# the time display will be red
if time.time() - t0 > dt:
self.animation_plots[ani_plot_counter].set_color("red")
else:
self.animation_plots[ani_plot_counter].set_color("black")
return self.animation_plots + self.tau_arrowarcs + self.tau_arrowheads
def _ps_init(self):
"""
init of the phase space animation plot
"""
self.ps_ax.set_xlim(-np.pi, np.pi)
self.ps_ax.set_ylim(-10, 10)
self.ps_ax.set_xlabel("degree [rad]")
self.ps_ax.set_ylabel("velocity [rad/s]")
for ap in self.ps_plots:
ap.set_data([], [])
return self.ps_plots
def _ps_update(self, i):
"""
update of the phase space animation plot
"""
for d in range(self.plant.dof):
self.ps_plots[d].set_data(
np.asarray(self.x_values).T[d],
np.asarray(self.x_values).T[self.plant.dof+d])
return self.ps_plots
def simulate_and_animate(self, t0, x0, tf, dt, controller=None,
integrator="runge_kutta", phase_plot=False,
save_video=False, video_name="video"):
"""
Simulation and animation of the plant motion.
The animation is only implemented for 2d serial chains.
input:
Simulates the plant over a period of time.
Parameters
----------
t0: float
start time, unit s
x0: type as self.plant expects a state
start state
tf: float
final time, unit: s
controller: A controller object of the type of the
AbstractController in
simple_pendulum.controllers.abstract_controller.py
If None, a free pendulum is simulated.
integrator: string
"euler" for euler integrator,
"runge_kutta" for Runge-Kutta integrator
phase_plot: bool
whether to show a plot of the phase space together with
the animation
save_video: bool
whether to save the animation as mp4 video
video_name: string
if save_video, the name of the file where the video will be stored
Returns
-------
self.t_values : list
a list of time values
self.x_values : list
a list of states
self.tau_values : list
a list of torques
"""
self.set_state(t0, x0)
self.reset_data_recorder()
fig = plt.figure(figsize=(20, 20))
self.animation_ax = plt.axes()
self.animation_plots = []
for link in range(self.plant.n_links):
bar_plot, = self.animation_ax.plot([], [], "-",
lw=5, color="black")
self.animation_plots.append(bar_plot)
ee_plot, = self.animation_ax.plot([], [], "o",
markersize=25.0, color="blue")
self.animation_plots.append(ee_plot)
text_plot = self.animation_ax.text(0.15, 0.85, [],
fontsize=40,
transform=fig.transFigure)
self.animation_plots.append(text_plot)
num_steps = int(tf / dt)
par_dict = {}
par_dict["dt"] = dt
par_dict["controller"] = controller
par_dict["integrator"] = integrator
frames = num_steps*[par_dict]
self.animation = FuncAnimation(fig, self._animation_step, frames=frames,
init_func=self._animation_init, blit=True,
repeat=False, interval=dt*1000)
if phase_plot:
ps_fig = plt.figure(figsize=(10, 10))
self.ps_ax = plt.axes()
self.ps_plots = []
for d in range(self.plant.dof):
ps_plot, = self.ps_ax.plot([], [], "-", lw=1.0, color="blue")
self.ps_plots.append(ps_plot)
self.animation2 = FuncAnimation(ps_fig, self._ps_update,
init_func=self._ps_init, blit=True,
repeat=False, interval=dt*1000)
if save_video:
print(f"Saving video to {video_name}.mp4")
Writer = mplanimation.writers['ffmpeg']
writer = Writer(fps=60, bitrate=1800)
self.animation.save(video_name+'.mp4', writer=writer)
print("Saving video done.")
plt.show()
return self.t_values, self.x_values, self.tau_values
def get_arrow(radius, centX, centY, angle_, theta2_, color_='black'):
arc = Arc([centX, centY],
radius,
radius,
angle=angle_,
theta1=0,
theta2=theta2_,
capstyle='round',
linestyle='-',
lw=2,
color=color_)
endX = centX+(radius/2)*np.cos(rad(theta2_+angle_))
endY = centY+(radius/2)*np.sin(rad(theta2_+angle_))
head = RegularPolygon((endX, endY), # (x,y)
3, # number of vertices
radius/20, # radius
rad(angle_+theta2_), # orientation
color=color_)
return arc, head
def set_arrow_properties(arc, head, tau, x, y):
tau_rad = np.clip(0.1*np.abs(tau) + 0.1, -1, 1)
if tau > 0:
theta2 = -40
arrow_angle = 110
endX = x+(tau_rad/2)*np.cos(rad(theta2+arrow_angle))
endY = y+(tau_rad/2)*np.sin(rad(theta2+arrow_angle))
orientation = rad(arrow_angle + theta2)
else:
theta2 = 320
arrow_angle = 110
endX = x+(tau_rad/2)*np.cos(rad(arrow_angle))
endY = y+(tau_rad/2)*np.sin(rad(arrow_angle))
orientation = rad(-arrow_angle-theta2)
arc.center = [x, y]
arc.width = tau_rad
arc.height = tau_rad
arc.angle = arrow_angle
arc.theta2 = theta2
head.xy = [endX, endY]
head.radius = tau_rad/20
head.orientation = orientation
if np.abs(tau) <= 0.01:
arc.set_visible(False)
head.set_visible(False)
else:
arc.set_visible(True)
head.set_visible(True)
| 25,304
|
https://github.com/hieund97/MyCMS/blob/master/app/Models/User.php
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
MyCMS
|
hieund97
|
PHP
|
Code
| 135
| 401
|
<?php
namespace App\Models;
use Illuminate\Notifications\Notifiable;
use Illuminate\Contracts\Auth\MustVerifyEmail;
use Illuminate\Foundation\Auth\User as Authenticatable;
class User extends Authenticatable
{
use Notifiable;
/**
* The attributes that are mass assignable.
*
* @var array
*/
protected $guarded = ['id'];
protected $date = [
'created_at', 'updated_at'
];
// protected $fillable = [
// 'first_name', 'email', 'password', 'last_name', 'user_name', 'address', 'city', 'country', 'postal_code', 'about_me', 'phone'
// ];
/**
* The attributes that should be hidden for arrays.
*
* @var array
*/
protected $hidden = [
'password', 'remember_token',
];
/**
* The attributes that should be cast to native types.
*
* @var array
*/
protected $casts = [
'email_verified_at' => 'datetime',
];
public function blog(){
return $this ->hasMany('App\Models\Blog', 'id', 'user_id');
}
public function order()
{
return $this->hasMany('App\Models\Order', 'order_id', 'id');
}
public function review()
{
return $this->hasMany('App\Models\Review', 'user_id', 'id');
}
}
| 45,985
|
https://github.com/sapcc/python-designateclient/blob/master/designateclient/v2/cli/tsigkeys.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
python-designateclient
|
sapcc
|
Python
|
Code
| 402
| 1,647
|
# Copyright 2017 SAP SE
#
# Author: Rudolf Vriend <rudolf.vriend@sap.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from osc_lib.command import command
from designateclient import utils
from designateclient.v2.cli import common
from designateclient.v2.utils import get_all
LOG = logging.getLogger(__name__)
def _format_tsigkey(tsigkey):
# Remove unneeded fields for display output formatting
tsigkey.pop('links', None)
class ListTSIGKeysCommand(command.Lister):
"""List tsigkeys"""
columns = ['id', 'name', 'algorithm', 'secret', 'scope', 'resource_id']
def get_parser(self, prog_name):
parser = super(ListTSIGKeysCommand, self).get_parser(prog_name)
parser.add_argument('--name', help="TSIGKey NAME", required=False)
parser.add_argument('--algorithm', help="TSIGKey algorithm",
required=False)
parser.add_argument('--scope', help="TSIGKey scope", required=False)
common.add_all_common_options(parser)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.dns
common.set_all_common_headers(client, parsed_args)
criterion = {}
if parsed_args.name is not None:
criterion["name"] = parsed_args.name
if parsed_args.algorithm is not None:
criterion["algorithm"] = parsed_args.algorithm
if parsed_args.scope is not None:
criterion["scope"] = parsed_args.scope
data = get_all(client.tsigkeys.list, criterion)
cols = self.columns
return cols, (utils.get_item_properties(s, cols) for s in data)
class ShowTSIGKeyCommand(command.ShowOne):
"""Show tsigkey details"""
def get_parser(self, prog_name):
parser = super(ShowTSIGKeyCommand, self).get_parser(prog_name)
parser.add_argument('id', help="TSIGKey ID")
common.add_all_common_options(parser)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.dns
common.set_all_common_headers(client, parsed_args)
data = client.tsigkeys.get(parsed_args.id)
_format_tsigkey(data)
return zip(*sorted(data.items()))
class CreateTSIGKeyCommand(command.ShowOne):
"""Create new tsigkey"""
def get_parser(self, prog_name):
parser = super(CreateTSIGKeyCommand, self).get_parser(prog_name)
parser.add_argument('--name', help="TSIGKey Name", required=True)
parser.add_argument('--algorithm', help="TSIGKey algorithm",
required=True)
parser.add_argument('--secret', help="TSIGKey secret", required=True)
parser.add_argument('--scope', help="TSIGKey scope", required=True)
parser.add_argument('--resource-id', help="TSIGKey resource_id",
required=True)
common.add_all_common_options(parser)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.dns
common.set_all_common_headers(client, parsed_args)
data = client.tsigkeys.create(parsed_args.name, parsed_args.algorithm,
parsed_args.secret, parsed_args.scope,
parsed_args.resource_id)
_format_tsigkey(data)
return zip(*sorted(data.items()))
class SetTSIGKeyCommand(command.ShowOne):
"""Set tsigkey properties"""
def get_parser(self, prog_name):
parser = super(SetTSIGKeyCommand, self).get_parser(prog_name)
parser.add_argument('id', help="TSIGKey ID")
parser.add_argument('--name', help="TSIGKey Name")
parser.add_argument('--algorithm', help="TSIGKey algorithm")
parser.add_argument('--secret', help="TSIGKey secret")
parser.add_argument('--scope', help="TSIGKey scope")
common.add_all_common_options(parser)
return parser
def take_action(self, parsed_args):
data = {}
if parsed_args.name:
data['name'] = parsed_args.name
if parsed_args.algorithm:
data['algorithm'] = parsed_args.algorithm
if parsed_args.secret:
data['secret'] = parsed_args.secret
if parsed_args.scope:
data['scope'] = parsed_args.scope
client = self.app.client_manager.dns
common.set_all_common_headers(client, parsed_args)
data = client.tsigkeys.update(parsed_args.id, data)
_format_tsigkey(data)
return zip(*sorted(data.items()))
class DeleteTSIGKeyCommand(command.Command):
"""Delete tsigkey"""
def get_parser(self, prog_name):
parser = super(DeleteTSIGKeyCommand, self).get_parser(prog_name)
parser.add_argument('id', help="TSIGKey ID")
common.add_all_common_options(parser)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.dns
common.set_all_common_headers(client, parsed_args)
client.tsigkeys.delete(parsed_args.id)
LOG.info('TSIGKey %s was deleted', parsed_args.id)
| 19,881
|
https://github.com/shalles/shalles-kit/blob/master/SPBA/Demo/sync.sh
|
Github Open Source
|
Open Source
|
ISC
| 2,015
|
shalles-kit
|
shalles
|
Shell
|
Code
| 5
| 32
|
rsync -rzcv --rsh=ssh src/export /Users/shalles/Workspace/Demo/Map/src/remote
| 1,186
|
https://github.com/Alexander-Serov/Placenta-Morphometry/blob/master/core/data/utility/Exception.cpp
|
Github Open Source
|
Open Source
|
MIT
| null |
Placenta-Morphometry
|
Alexander-Serov
|
C++
|
Code
| 49
| 189
|
#include"core/data/utility/Exception.h"
#ifndef NOCIMG
#include"cImg/CImg.h"
#endif
namespace pop
{
pexception::pexception(){_message = "ERROR";}
pexception::pexception(const char * message) {_message = message;}
pexception::pexception(std::string message) {_message = message;}
const char * pexception::what() const throw() { return _message.c_str(); }
void pexception::display() const throw() {
#ifndef NOCIMG
cimg_library::cimg::dialog("Population exception", _message.c_str(),"Abort");
#endif
}
pexception::~pexception() throw() {}
}
| 14,696
|
https://github.com/hypefide/ether/blob/master/src/utilities/vendors.sass
|
Github Open Source
|
Open Source
|
MIT
| null |
ether
|
hypefide
|
Sass
|
Code
| 6
| 58
|
@import url('https://fonts.googleapis.com/css?family=Work+Sans:400,700,900&display=swap') // sass-lint:disable-line no-url-domains, no-url-protocols
| 49,602
|
https://github.com/hcmarchezi/manipulator-robot-api/blob/master/application/appmain.cpp
|
Github Open Source
|
Open Source
|
MIT
| null |
manipulator-robot-api
|
hcmarchezi
|
C++
|
Code
| 127
| 512
|
#include "appmain.h"
#include "robotpersistence.h"
#include "geometryreader.h"
#include "linktyperepository.h"
#include "linktypepersistence.h"
namespace TMC {
AppMain::AppMain():_robot(new PDC::Robot())
{
}
AppMain::~AppMain()
{
deleteRobot();
}
void AppMain::loadLinkTypeList(const std::vector<std::string>& linktypeFilePaths)
{
DMC::GeometryReader reader;
for(unsigned int index=0; index < linktypeFilePaths.size(); index++)
{
PDC::LinkType* linkType = DMC::LinkTypePersistence::LoadFromFile(linktypeFilePaths[index]);
PDC::GeometrySource geometrySource = linkType->GetGeometrySource();
PDC::Geometry* geometry = reader.read3DSFile(geometrySource);
linkType->SetGeometry(geometry);
PDC::LinkTypeRepository::Add(linkType);
}
}
///////////// user actions //////////////////
void AppMain::newRobot()
{
deleteRobot();
_robot = new PDC::Robot();
}
void AppMain::loadRobot(const std::string& filename)
{
deleteRobot();
_robot = DMC::RobotPersistence::Load(filename);
_robotFilename = filename;
}
void AppMain::saveRobot(const std::string& filename)
{
if (_robot == NULL) return;
DMC::RobotPersistence::Save(_robot, filename);
_robotFilename = filename;
}
////////////////////////////////////////////////////
void AppMain::deleteRobot()
{
if (_robot != NULL)
{
delete _robot;
_robot = NULL;
}
}
PDC::Robot* AppMain::robot()
{
return _robot;
}
const std::string& AppMain::robotFilename()
{
return _robotFilename;
}
} // namespace TMC
| 50,180
|
https://github.com/JetBrains/YouTrackSharp/blob/master/src/YouTrackSharp/ColorIndices.cs
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,023
|
YouTrackSharp
|
JetBrains
|
C#
|
Code
| 418
| 989
|
using JetBrains.Annotations;
namespace YouTrackSharp
{
/// <summary>
/// Represents the color indices used by YouTrack, as explained on <a href="https://www.jetbrains.com/help/youtrack/devportal/Color-Indices.html">Color Indices List</a>.
/// </summary>
[PublicAPI]
public static class ColorIndices
{
/// <summary>
/// Supported color indices. The array index corresponds to the documented <a href="https://www.jetbrains.com/help/youtrack/devportal/Color-Indices.html">color index</a>.
/// </summary>
public static readonly YouTrackColor[] Colors = {
new YouTrackColor { Foreground = "#444", Background = "#fff" },
new YouTrackColor { Foreground = "#fff", Background = "#8d5100" },
new YouTrackColor { Foreground = "#fff", Background = "#ce6700" },
new YouTrackColor { Foreground = "#fff", Background = "#409600" },
new YouTrackColor { Foreground = "#fff", Background = "#0070e4" },
new YouTrackColor { Foreground = "#fff", Background = "#900052" },
new YouTrackColor { Foreground = "#fff", Background = "#0050a1" },
new YouTrackColor { Foreground = "#fff", Background = "#2f9890" },
new YouTrackColor { Foreground = "#fff", Background = "#8e1600" },
new YouTrackColor { Foreground = "#fff", Background = "#dc0083" },
new YouTrackColor { Foreground = "#fff", Background = "#7dbd36" },
new YouTrackColor { Foreground = "#fff", Background = "#ff7123" },
new YouTrackColor { Foreground = "#fff", Background = "#ff7bc3" },
new YouTrackColor { Foreground = "#444", Background = "#fed74a" },
new YouTrackColor { Foreground = "#444", Background = "#b7e281" },
new YouTrackColor { Foreground = "#45818e", Background = "#d8f7f3" },
new YouTrackColor { Foreground = "#888", Background = "#e6e6e6" },
new YouTrackColor { Foreground = "#4da400", Background = "#e6f6cf" },
new YouTrackColor { Foreground = "#b45f06", Background = "#ffee9c" },
new YouTrackColor { Foreground = "#444", Background = "#ffc8ea" },
new YouTrackColor { Foreground = "#fff", Background = "#e30000" },
new YouTrackColor { Foreground = "#3d85c6", Background = "#e0f1fb" },
new YouTrackColor { Foreground = "#dc5766", Background = "#fce5f1" },
new YouTrackColor { Foreground = "#b45f06", Background = "#f7e9c1" },
new YouTrackColor { Foreground = "#444", Background = "#92e1d5" },
new YouTrackColor { Foreground = "#444", Background = "#a6e0fc" },
new YouTrackColor { Foreground = "#444", Background = "#e0c378" },
new YouTrackColor { Foreground = "#444", Background = "#bababa" },
new YouTrackColor { Foreground = "#fff", Background = "#25beb2" },
new YouTrackColor { Foreground = "#fff", Background = "#42a3df" },
new YouTrackColor { Foreground = "#fff", Background = "#878787" },
new YouTrackColor { Foreground = "#fff", Background = "#4d4d4d" },
new YouTrackColor { Foreground = "#fff", Background = "#246512" },
new YouTrackColor { Foreground = "#fff", Background = "#00665e" },
new YouTrackColor { Foreground = "#fff", Background = "#553000" },
new YouTrackColor { Foreground = "#fff", Background = "#1a1a1a" }
};
}
}
| 15,858
|
https://github.com/rajethanm4/Programmers-Community/blob/master/Basic/Reverse A String/solutionByAshutosh.py
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
Programmers-Community
|
rajethanm4
|
Python
|
Code
| 8
| 35
|
def reverse(a):
return a[::-1]
a=input("enter your string:")
print(reverse(a))
| 18,886
|
https://github.com/demon-xxi/r8/blob/master/src/test/java/com/android/tools/r8/utils/codeinspector/FoundAnnotationSubject.java
|
Github Open Source
|
Open Source
|
Apache-2.0, BSD-3-Clause
| 2,021
|
r8
|
demon-xxi
|
Java
|
Code
| 105
| 279
|
// Copyright (c) 2018, the R8 project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
package com.android.tools.r8.utils.codeinspector;
import com.android.tools.r8.errors.Unreachable;
import com.android.tools.r8.graph.DexAnnotation;
import com.android.tools.r8.graph.DexEncodedAnnotation;
public class FoundAnnotationSubject extends AnnotationSubject {
private final DexAnnotation annotation;
public FoundAnnotationSubject(DexAnnotation annotation) {
this.annotation = annotation;
}
@Override
public boolean isPresent() {
return true;
}
@Override
public boolean isRenamed() {
return false;
}
@Override
public boolean isSynthetic() {
throw new Unreachable("Cannot determine if an annotation is synthetic");
}
@Override
public DexEncodedAnnotation getAnnotation() {
return annotation.annotation;
}
}
| 41,812
|
https://github.com/rtsonneveld/Ray1Map/blob/master/Assets/Scripts/DataTypes/GBAIsometric/Common/Object/GBAIsometric_ObjectType.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
Ray1Map
|
rtsonneveld
|
C#
|
Code
| 57
| 180
|
namespace R1Engine
{
public class GBAIsometric_ObjectType : R1Serializable
{
public uint ObjFlags { get; set; }
public Pointer DataPointer { get; set; }
public GBAIsometric_ObjectTypeData Data { get; set; }
public override void SerializeImpl(SerializerObject s)
{
ObjFlags = s.Serialize<uint>(ObjFlags, name: nameof(ObjFlags));
DataPointer = s.SerializePointer(DataPointer, name: nameof(DataPointer));
Data = s.DoAt(DataPointer, () => s.SerializeObject<GBAIsometric_ObjectTypeData>(Data, name: nameof(Data)));
}
}
}
| 34,914
|
https://github.com/Altinn/altinn-studio/blob/master/frontend/packages/ux-editor/src/components/config/componentSpecificContent/Button/ButtonComponent.tsx
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,023
|
altinn-studio
|
Altinn
|
TSX
|
Code
| 170
| 658
|
import React from 'react';
import { FieldSet, Select } from '@digdir/design-system-react';
import classes from './ButtonComponent.module.css';
import { useText } from '../../../../hooks';
import { EditSettings, IGenericEditComponent } from '../../componentConfig';
import { ComponentType } from 'app-shared/types/ComponentType';
import { EditTextResourceBinding } from '../../editModal/EditTextResourceBinding';
import { EditTextResourceBindings } from '../../editModal/EditTextResourceBindings';
export const ButtonComponent = ({ component, handleComponentChange }: IGenericEditComponent) => {
const t = useText();
const handleButtonTypeChange = (selected: string) => {
const componentCopy = { ...component };
if (!componentCopy.textResourceBindings) {
componentCopy.textResourceBindings = {};
}
if (selected === ComponentType.NavigationButtons) {
componentCopy.type = ComponentType.NavigationButtons;
componentCopy.showBackButton = true;
} else if (selected === ComponentType.Button) {
componentCopy.type = ComponentType.Button;
delete componentCopy.showPrev;
delete componentCopy.showBackButton;
}
handleComponentChange(componentCopy);
};
const types = [
{
value: ComponentType.Button,
label: t('ux_editor.modal_properties_button_type_submit'),
},
{
value: ComponentType.NavigationButtons,
label: t('ux_editor.modal_properties_button_type_navigation'),
},
];
return (
<FieldSet className={classes.root}>
<div>
<Select
label={t('ux_editor.modal_properties_button_type_helper')}
options={types}
value={types.find((element) => element.value === component.type).value}
onChange={handleButtonTypeChange}
/>
</div>
{component.type === ComponentType.Button && (
<EditTextResourceBinding
component={component}
handleComponentChange={handleComponentChange}
textKey={EditSettings.Title}
labelKey={`ux_editor.modal_properties_textResourceBindings_${EditSettings.Title}`}
placeholderKey={`ux_editor.modal_properties_textResourceBindings_${EditSettings.Title}_add`}
/>
)}
{component.type === ComponentType.NavigationButtons && (
<EditTextResourceBindings
component={component}
handleComponentChange={handleComponentChange}
textResourceBindingKeys={['next', 'back']}
/>
)}
</FieldSet>
);
};
| 9,182
|
https://github.com/eugenebokhan/metal-compute-tools/blob/master/Sources/MetalComputeTools/Kernels/TextureResize/TextureResize.metal
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
metal-compute-tools
|
eugenebokhan
|
Metal
|
Code
| 64
| 218
|
#include "../../../MetalComputeToolsSharedTypes/Definitions.h"
kernel void textureResize(texture2d<float, access::sample> source [[ texture(0) ]],
texture2d<float, access::write> destination [[ texture(1) ]],
sampler s [[ sampler(0) ]],
const ushort2 position [[ thread_position_in_grid ]]) {
const auto textureSize = ushort2(destination.get_width(),
destination.get_height());
checkPosition(position, textureSize, deviceSupportsNonuniformThreadgroups);
const auto positionF = float2(position);
const auto textureSizeF = float2(textureSize);
const auto normalizedPosition = (positionF + 0.5f) / textureSizeF;
auto sampledValue = source.sample(s, normalizedPosition);
destination.write(sampledValue, position);
}
| 10,240
|
https://github.com/QuarkNet-HEP/e-Labs/blob/master/cosmic/src/sql/patch_states.sql
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| null |
e-Labs
|
QuarkNet-HEP
|
SQL
|
Code
| 17
| 30
|
alter Table state alter abbreviation TYPE varchar(3);
Alter table state Add type int;
update state SET type=1;
| 14,981
|
https://github.com/andyli/pyextern/blob/master/out/science/tensorflow/python/training/py_checkpoint_reader/Py_checkpoint_reader.hx
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,022
|
pyextern
|
andyli
|
Haxe
|
Code
| 15
| 60
|
/* This file is generated, do not edit! */
package tensorflow.python.training.py_checkpoint_reader;
typedef Py_checkpoint_reader = tensorflow.python.training.py_checkpoint_reader.Py_checkpoint_reader_Module;
| 2,599
|
https://github.com/RT-EGG/CsLibraries/blob/master/GLTestVisualizer/TestView/TransformMatrixDexomposition/Ctrl_TransformMatrixDecompositionTestView.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
CsLibraries
|
RT-EGG
|
C#
|
Code
| 446
| 1,931
|
using OpenTK.Graphics.OpenGL4;
using RtCs.MathUtils;
using RtCs.OpenGL;
using System;
namespace GLTestVisualizer.TestView.TransformMatrixDexomposition
{
public partial class Ctrl_TransformMatrixDecompositionTestView : GLTestVisualizer.TestView.Ctrl_TestView
{
public Ctrl_TransformMatrixDecompositionTestView()
{
InitializeComponent();
return;
}
public override string SceneName => "Transform Matrix Decomposition";
public override void Start()
{
base.Start();
ComboRotationOrder.Items.Clear();
foreach (EEulerRotationOrder order in Enum.GetValues(typeof(EEulerRotationOrder))) {
ComboRotationOrder.Items.Add(order);
}
ComboRotationOrder.SelectedIndex = 0;
Vector4[] vertColors = new Vector4[m_Cube.Vertices.Length];
for (int i = 0; i < m_Cube.Vertices.Length; ++i) {
if ((0 <= i) && (i <= 3)) {
vertColors[i] = new Vector4(1.0f, 0.0f, 0.0f, 1.0f); // -x
} else if ((4 <= i) && (i <= 7)) {
vertColors[i] = new Vector4(0.0f, 1.0f, 1.0f, 1.0f); // +x
} else if ((8 <= i) && (i <= 11)) {
vertColors[i] = new Vector4(0.0f, 1.0f, 0.0f, 1.0f); // -y
} else if ((12 <= i) && (i <= 15)) {
vertColors[i] = new Vector4(1.0f, 0.0f, 1.0f, 1.0f); // +y
} else if ((16 <= i) && (i <= 19)) {
vertColors[i] = new Vector4(0.0f, 0.0f, 1.0f, 1.0f); // -z
} else if ((20 <= i) && (i <= 23)) {
vertColors[i] = new Vector4(1.0f, 1.0f, 0.0f, 1.0f); // +z
}
}
m_Cube.Colors = vertColors;
m_Cube.Apply();
m_MatrixInputView.Renderer.Mesh = m_Cube;
m_MatrixInputView.Renderer.Material = m_Material;
m_MatrixInputView.Transform.LocalPosition = new Vector3(-1.0f, 0.0f, 0.0f);
m_MatrixInputView.FrustumCullingMode = EGLFrustumCullingMode.AlwaysRender;
m_MatrixOutputView.Renderer.Mesh = m_Cube;
m_MatrixOutputView.Renderer.Material = m_Material;
m_MatrixOutputView.Transform.LocalPosition = new Vector3(1.0f, 0.0f, 0.0f);
m_MatrixOutputView.FrustumCullingMode = EGLFrustumCullingMode.AlwaysRender;
m_MatrixInputAxisView.Transform.Parent = m_MatrixInputView.Transform;
m_MatrixInputAxisView.FrustumCullingMode = EGLFrustumCullingMode.AlwaysRender;
m_MatrixOutputAxisView.Transform.Parent = m_MatrixOutputView.Transform;
m_MatrixOutputAxisView.FrustumCullingMode = EGLFrustumCullingMode.AlwaysRender;
m_Projection.Near = 0.01f;
m_Projection.Far = 100.0f;
m_Camera.Projection = m_Projection;
m_Camera.Transform.LocalPosition = new Vector3(0.0f, 2.0f, 2.0f);
m_Camera.Transform.LookAt(new Vector3());
m_Scene.DisplayList.Register(m_MatrixInputView);
m_Scene.DisplayList.Register(m_MatrixInputAxisView);
m_Scene.DisplayList.Register(m_MatrixOutputView);
m_Scene.DisplayList.Register(m_MatrixOutputAxisView);
return;
}
public override void Exit()
{
base.Exit();
timer1.Enabled = false;
m_MatrixInputAxisView.Dispose();
m_MatrixOutputAxisView.Dispose();
m_MatrixInputView.Dispose();
m_MatrixOutputView.Dispose();
m_Material.Dispose();
m_Cube.Dispose();
return;
}
private void UpdateDecomposited()
{
Ctrl_MatrixOutput.Translation = m_DummyTransform.LocalPosition;
Ctrl_MatrixOutput.Rotation = m_DummyTransform.LocalRotation.ToEuler(RotationOrder).RadToDeg();
Ctrl_MatrixOutput.Scale = m_DummyTransform.LocalScale;
m_MatrixInputView.Transform.LocalRotation = m_DummyTransform.LocalRotation;
m_MatrixOutputView.Transform.LocalRotation = Quaternion.FromEuler(Ctrl_MatrixOutput.Rotation.DegToRad(), RotationOrder);
return;
}
private EEulerRotationOrder RotationOrder
{
get => (EEulerRotationOrder)ComboRotationOrder.SelectedItem;
set => ComboRotationOrder.SelectedItem = value;
}
private void Ctrl_MatrixInput_TranslationChanged(object inSender, Vector3 inValue)
{
m_DummyTransform.LocalPosition = inValue;
UpdateDecomposited();
return;
}
private void Ctrl_MatrixInput_RotationChanged(object inSender, Vector3 inValue)
{
m_DummyTransform.LocalRotation = Quaternion.FromEuler(inValue.DegToRad(), RotationOrder);
UpdateDecomposited();
return;
}
private void Ctrl_MatrixInput_ScaleChanged(object inSender, Vector3 inValue)
{
m_DummyTransform.LocalScale = inValue;
UpdateDecomposited();
return;
}
private void GLViewr_OnRenderScene(object inSender, EventArgs inArgs)
{
m_Projection.SetAngleAndViewportSize(60.0f, GLViewer.Width, GLViewer.Height);
m_Scene.Render(m_Camera);
return;
}
private void timer1_Tick(object sender, EventArgs e)
{
GLViewer.Invalidate();
return;
}
private void ComboRotationOrder_SelectedIndexChanged(object sender, EventArgs e)
{
m_DummyTransform.LocalRotation = Quaternion.FromEuler(Ctrl_MatrixInput.Rotation.DegToRad(), RotationOrder);
UpdateDecomposited();
return;
}
private GLMaterial m_Material = new GLVertexColorMaterial();
private GLMesh m_Cube = GLPrimitiveMesh.CreateBox(1.0f, 1.0f, 1.0f);
private GLRenderObject m_MatrixInputView = new GLRenderObject();
private GLAxisRenderObject m_MatrixInputAxisView = new GLAxisRenderObject();
private GLRenderObject m_MatrixOutputView = new GLRenderObject();
private GLAxisRenderObject m_MatrixOutputAxisView = new GLAxisRenderObject();
private Transform m_DummyTransform = new Transform();
private GLScene m_Scene = new GLScene();
private GLPerspectiveProjection m_Projection = new GLPerspectiveProjection();
private GLCamera m_Camera = new GLCamera();
}
}
| 8,700
|
https://github.com/alexjpwalker/typedb-client-java/blob/master/test/behaviour/connection/session/SessionSteps.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
typedb-client-java
|
alexjpwalker
|
Java
|
Code
| 397
| 1,337
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package grakn.client.test.behaviour.connection.session;
import grakn.client.GraknClient;
import io.cucumber.java.en.Then;
import io.cucumber.java.en.When;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Stream;
import static grakn.client.test.behaviour.connection.ConnectionSteps.THREAD_POOL_SIZE;
import static grakn.client.test.behaviour.connection.ConnectionSteps.client;
import static grakn.client.test.behaviour.connection.ConnectionSteps.sessions;
import static grakn.client.test.behaviour.connection.ConnectionSteps.sessionsParallel;
import static grakn.client.test.behaviour.connection.ConnectionSteps.threadPool;
import static grakn.common.util.Collections.list;
import static java.util.Objects.isNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class SessionSteps {
@When("connection open session for keyspace: {word}")
public void connection_open_session_for_keyspace(String name) {
connection_open_sessions_for_keyspaces(list(name));
}
@When("connection open session(s) for keyspace(s):")
public void connection_open_sessions_for_keyspaces(List<String> names) {
for (String name : names) {
sessions.add(client.session(name));
}
}
@When("connection open sessions in parallel for keyspaces:")
public void connection_open_sessions_in_parallel_for_keyspaces(List<String> names) {
assertTrue(THREAD_POOL_SIZE >= names.size());
for (String name : names) {
sessionsParallel.add(CompletableFuture.supplyAsync(() -> client.session(name), threadPool));
}
}
@Then("session(s) is/are null: {bool}")
public void sessions_are_null(Boolean isNull) {
for (GraknClient.Session session : sessions) {
assertEquals(isNull, isNull(session));
}
}
@Then("session(s) is/are open: {bool}")
public void sessions_are_open(Boolean isOpen) {
for (GraknClient.Session session : sessions) {
assertEquals(isOpen, session.isOpen());
}
}
@Then("sessions in parallel are null: {bool}")
public void sessions_in_parallel_are_null(Boolean isNull) {
Stream<CompletableFuture<Void>> assertions = sessionsParallel
.stream().map(futureSession -> futureSession.thenApplyAsync(session -> {
assertEquals(isNull, isNull(session));
return null;
}));
CompletableFuture.allOf(assertions.toArray(CompletableFuture[]::new)).join();
}
@Then("sessions in parallel are open: {bool}")
public void sessions_in_parallel_are_open(Boolean isOpen) {
Stream<CompletableFuture<Void>> assertions = sessionsParallel
.stream().map(futureSession -> futureSession.thenApplyAsync(session -> {
assertEquals(isOpen, session.isOpen()); return null;
}));
CompletableFuture.allOf(assertions.toArray(CompletableFuture[]::new)).join();
}
@Then("session(s) has/have keyspace(s):")
public void sessions_have_keyspaces(List<String> names) {
assertEquals(names.size(), sessions.size());
Iterator<GraknClient.Session> sessionIter = sessions.iterator();
for (String name : names) {
assertEquals(name, sessionIter.next().keyspace().name());
}
}
@Then("sessions in parallel have keyspaces:")
public void sessions_in_parallel_have_keyspaces(List<String> names) {
assertEquals(names.size(), sessionsParallel.size());
Iterator<CompletableFuture<GraknClient.Session>> futureSessionIter = sessionsParallel.iterator();
CompletableFuture[] assertions = new CompletableFuture[names.size()];
int i = 0;
for (String name : names) {
assertions[i++] = futureSessionIter.next().thenApplyAsync(session -> {
assertEquals(name, session.keyspace().name()); return null;
});
}
CompletableFuture.allOf(assertions).join();
}
}
| 39,259
|
https://github.com/andersonhdb/kdm-companion/blob/master/screens/ExpansionsScreen.js
|
Github Open Source
|
Open Source
|
MIT
| 2,019
|
kdm-companion
|
andersonhdb
|
JavaScript
|
Code
| 46
| 119
|
import React from 'react'
import { ScrollView } from 'react-native'
import { Screen } from '@shoutem/ui'
import Expansions from '../components/Expansions'
export default class ExpansionsScreen extends React.Component {
render() {
return (
<Screen style={{ paddingTop: 5, paddingLeft: 5 }}>
<ScrollView>
<Expansions />
</ScrollView>
</Screen>
)
}
}
| 35,931
|
https://github.com/enfoTek/tomato.linksys.e2000.nvram-mod/blob/master/release/src/linux/linux/drivers/media/radio/radio-maxiradio.c
|
Github Open Source
|
Open Source
|
FSFAP
| 2,021
|
tomato.linksys.e2000.nvram-mod
|
enfoTek
|
C
|
Code
| 949
| 3,712
|
/*
* Guillemot Maxi Radio FM 2000 PCI radio card driver for Linux
* (C) 2001 Dimitromanolakis Apostolos <apdim@grecian.net>
*
* Based in the radio Maestro PCI driver. Actually it uses the same chip
* for radio but different pci controller.
*
* I didn't have any specs I reversed engineered the protocol from
* the windows driver (radio.dll).
*
* The card uses the TEA5757 chip that includes a search function but it
* is useless as I haven't found any way to read back the frequency. If
* anybody does please mail me.
*
* For the pdf file see:
* http://www.semiconductors.philips.com/pip/TEA5757H/V1
*
*
* CHANGES:
* 0.75b
* - better pci interface thanks to Francois Romieu <romieu@cogenit.fr>
*
* 0.75
* - tiding up
* - removed support for multiple devices as it didn't work anyway
*
* BUGS:
* - card unmutes if you change frequency
*
*/
#include <linux/module.h>
#include <linux/init.h>
#include <linux/ioport.h>
#include <linux/delay.h>
#include <linux/sched.h>
#include <asm/io.h>
#include <asm/uaccess.h>
#include <asm/semaphore.h>
#include <linux/pci.h>
#include <linux/videodev.h>
/* version 0.75 Sun Feb 4 22:51:27 EET 2001 */
#define DRIVER_VERSION "0.75"
#ifndef PCI_VENDOR_ID_GUILLEMOT
#define PCI_VENDOR_ID_GUILLEMOT 0x5046
#endif
#ifndef PCI_DEVICE_ID_GUILLEMOT
#define PCI_DEVICE_ID_GUILLEMOT_MAXIRADIO 0x1001
#endif
/* TEA5757 pin mappings */
const int clk = 1, data = 2, wren = 4, mo_st = 8, power = 16 ;
static int radio_nr = -1;
MODULE_PARM(radio_nr, "i");
#define FREQ_LO 50*16000
#define FREQ_HI 150*16000
#define FREQ_IF 171200 /* 10.7*16000 */
#define FREQ_STEP 200 /* 12.5*16 */
#define FREQ2BITS(x) ((( (unsigned int)(x)+FREQ_IF+(FREQ_STEP<<1))\
/(FREQ_STEP<<2))<<2) /* (x==fmhz*16*1000) -> bits */
#define BITS2FREQ(x) ((x) * FREQ_STEP - FREQ_IF)
static int radio_open(struct video_device *, int);
static int radio_ioctl(struct video_device *, unsigned int, void *);
static void radio_close(struct video_device *);
static struct video_device maxiradio_radio=
{
owner: THIS_MODULE,
name: "Maxi Radio FM2000 radio",
type: VID_TYPE_TUNER,
hardware: VID_HARDWARE_SF16MI,
open: radio_open,
close: radio_close,
ioctl: radio_ioctl,
};
static struct radio_device
{
__u16 io, /* base of radio io */
muted, /* VIDEO_AUDIO_MUTE */
stereo, /* VIDEO_TUNER_STEREO_ON */
tuned; /* signal strength (0 or 0xffff) */
unsigned long freq;
struct semaphore lock;
} radio_unit = {0, 0, 0, 0, };
static void sleep_125ms(void)
{
current->state = TASK_INTERRUPTIBLE;
schedule_timeout(HZ >> 3);
}
static void outbit(unsigned long bit, __u16 io)
{
if(bit != 0)
{
outb( power|wren|data ,io); udelay(4);
outb( power|wren|data|clk ,io); udelay(4);
outb( power|wren|data ,io); udelay(4);
}
else
{
outb( power|wren ,io); udelay(4);
outb( power|wren|clk ,io); udelay(4);
outb( power|wren ,io); udelay(4);
}
}
static void turn_power(__u16 io, int p)
{
if(p != 0) outb(power, io); else outb(0,io);
}
static void set_freq(__u16 io, __u32 data)
{
unsigned long int si;
int bl;
/* TEA5757 shift register bits (see pdf) */
outbit(0,io); // 24 search
outbit(1,io); // 23 search up/down
outbit(0,io); // 22 stereo/mono
outbit(0,io); // 21 band
outbit(0,io); // 20 band (only 00=FM works I think)
outbit(0,io); // 19 port ?
outbit(0,io); // 18 port ?
outbit(0,io); // 17 search level
outbit(0,io); // 16 search level
si = 0x8000;
for(bl = 1; bl <= 16 ; bl++) { outbit(data & si,io); si >>=1; }
outb(power,io);
}
static int get_stereo(__u16 io)
{
outb(power,io); udelay(4);
return !(inb(io) & mo_st);
}
static int get_tune(__u16 io)
{
outb(power+clk,io); udelay(4);
return !(inb(io) & mo_st);
}
inline static int radio_function(struct video_device *dev,
unsigned int cmd, void *arg)
{
struct radio_device *card=dev->priv;
switch(cmd) {
case VIDIOCGCAP: {
struct video_capability v;
strcpy(v.name, "Maxi Radio FM2000 radio");
v.type=VID_TYPE_TUNER;
v.channels=v.audios=1;
v.maxwidth=v.maxheight=v.minwidth=v.minheight=0;
if(copy_to_user(arg,&v,sizeof(v)))
return -EFAULT;
return 0;
}
case VIDIOCGTUNER: {
struct video_tuner v;
if(copy_from_user(&v, arg,sizeof(v))!=0)
return -EFAULT;
if(v.tuner)
return -EINVAL;
card->stereo = 0xffff * get_stereo(card->io);
card->tuned = 0xffff * get_tune(card->io);
v.flags = VIDEO_TUNER_LOW | card->stereo;
v.signal = card->tuned;
strcpy(v.name, "FM");
v.rangelow = FREQ_LO;
v.rangehigh = FREQ_HI;
v.mode = VIDEO_MODE_AUTO;
if(copy_to_user(arg,&v, sizeof(v)))
return -EFAULT;
return 0;
}
case VIDIOCSTUNER: {
struct video_tuner v;
if(copy_from_user(&v, arg, sizeof(v)))
return -EFAULT;
if(v.tuner!=0)
return -EINVAL;
return 0;
}
case VIDIOCGFREQ: {
unsigned long tmp=card->freq;
if(copy_to_user(arg, &tmp, sizeof(tmp)))
return -EFAULT;
return 0;
}
case VIDIOCSFREQ: {
unsigned long tmp;
if(copy_from_user(&tmp, arg, sizeof(tmp)))
return -EFAULT;
if ( tmp<FREQ_LO || tmp>FREQ_HI )
return -EINVAL;
card->freq = tmp;
set_freq(card->io, FREQ2BITS(card->freq));
sleep_125ms();
return 0;
}
case VIDIOCGAUDIO: {
struct video_audio v;
strcpy(v.name, "Radio");
v.audio=v.volume=v.bass=v.treble=v.balance=v.step=0;
v.flags=VIDEO_AUDIO_MUTABLE | card->muted;
v.mode=VIDEO_SOUND_STEREO;
if(copy_to_user(arg,&v, sizeof(v)))
return -EFAULT;
return 0;
}
case VIDIOCSAUDIO: {
struct video_audio v;
if(copy_from_user(&v, arg, sizeof(v)))
return -EFAULT;
if(v.audio)
return -EINVAL;
card->muted = v.flags & VIDEO_AUDIO_MUTE;
if(card->muted)
turn_power(card->io, 0);
else
set_freq(card->io, FREQ2BITS(card->freq));
return 0;
}
case VIDIOCGUNIT: {
struct video_unit v;
v.video=VIDEO_NO_UNIT;
v.vbi=VIDEO_NO_UNIT;
v.radio=dev->minor;
v.audio=0;
v.teletext=VIDEO_NO_UNIT;
if(copy_to_user(arg, &v, sizeof(v)))
return -EFAULT;
return 0;
}
default: return -ENOIOCTLCMD;
}
}
static int radio_ioctl(struct video_device *dev, unsigned int cmd, void *arg)
{
struct radio_device *card=dev->priv;
int ret;
down(&card->lock);
ret = radio_function(dev, cmd, arg);
up(&card->lock);
return ret;
}
static int radio_open(struct video_device *dev, int flags)
{
return 0;
}
static void radio_close(struct video_device *dev)
{
}
MODULE_AUTHOR("Dimitromanolakis Apostolos, apdim@grecian.net");
MODULE_DESCRIPTION("Radio driver for the Guillemot Maxi Radio FM2000 radio.");
MODULE_LICENSE("GPL");
EXPORT_NO_SYMBOLS;
static int __devinit maxiradio_init_one(struct pci_dev *pdev, const struct pci_device_id *ent)
{
if(!request_region(pci_resource_start(pdev, 0),
pci_resource_len(pdev, 0), "Maxi Radio FM 2000")) {
printk(KERN_ERR "radio-maxiradio: can't reserve I/O ports\n");
goto err_out;
}
if (pci_enable_device(pdev))
goto err_out_free_region;
radio_unit.io = pci_resource_start(pdev, 0);
init_MUTEX(&radio_unit.lock);
maxiradio_radio.priv = &radio_unit;
if(video_register_device(&maxiradio_radio, VFL_TYPE_RADIO, radio_nr)==-1) {
printk("radio-maxiradio: can't register device!");
goto err_out_free_region;
}
printk(KERN_INFO "radio-maxiradio: version "
DRIVER_VERSION
" time "
__TIME__ " "
__DATE__
"\n");
printk(KERN_INFO "radio-maxiradio: found Guillemot MAXI Radio device (io = 0x%x)\n",
radio_unit.io);
return 0;
err_out_free_region:
release_region(pci_resource_start(pdev, 0), pci_resource_len(pdev, 0));
err_out:
return -ENODEV;
}
static void __devexit maxiradio_remove_one(struct pci_dev *pdev)
{
video_unregister_device(&maxiradio_radio);
release_region(pci_resource_start(pdev, 0), pci_resource_len(pdev, 0));
}
static struct pci_device_id maxiradio_pci_tbl[] __devinitdata = {
{ PCI_VENDOR_ID_GUILLEMOT, PCI_DEVICE_ID_GUILLEMOT_MAXIRADIO,
PCI_ANY_ID, PCI_ANY_ID, },
{ 0,}
};
MODULE_DEVICE_TABLE(pci, maxiradio_pci_tbl);
static struct pci_driver maxiradio_driver = {
name: "radio-maxiradio",
id_table: maxiradio_pci_tbl,
probe: maxiradio_init_one,
remove: __devexit_p(maxiradio_remove_one),
};
int __init maxiradio_radio_init(void)
{
return pci_module_init(&maxiradio_driver);
}
void __exit maxiradio_radio_exit(void)
{
pci_unregister_driver(&maxiradio_driver);
}
module_init(maxiradio_radio_init);
module_exit(maxiradio_radio_exit);
| 41,988
|
https://github.com/ankushgoel27/api/blob/master/packages/api/test/index.test.ts
|
Github Open Source
|
Open Source
|
MIT
| null |
api
|
ankushgoel27
|
TypeScript
|
Code
| 1,047
| 3,594
|
import { assert, expect } from 'chai';
import uniqueTempDir from 'unique-temp-dir';
import nock from 'nock';
import api from '../src';
import Cache from '../src/cache';
import pkg from '../package.json';
let petstoreSdk;
let readmeSdk;
const petstoreServerUrl = 'http://petstore.swagger.io/api';
describe('api', function () {
// eslint-disable-next-line mocha/no-setup-in-describe
this.beforeAll(function () {
// Set a unique cache dir so these tests won't collide with other tests and we don't need to go
// through the trouble of mocking out the filesystem.
Cache.setCacheDir(uniqueTempDir());
});
beforeEach(async function () {
const petstore = require.resolve('@readme/oas-examples/3.0/json/petstore-expanded.json');
await new Cache(petstore).saveFile();
petstoreSdk = api(petstore);
const readme = require.resolve('@readme/oas-examples/3.0/json/readme.json');
await new Cache(readme).saveFile();
readmeSdk = api(readme);
});
describe('#preloading', function () {
let uspto;
beforeEach(function () {
uspto = require.resolve('@readme/oas-examples/3.0/json/uspto.json');
});
it('should proxy an sdk for the first time', async function () {
const mock = nock('https://developer.uspto.gov/ds-api')
.get('/')
.reply(200, uri => uri)
.get('/two')
.reply(200, uri => uri);
// Asserting that we have not previously loaded this API.
expect(new Cache(uspto).isCached()).to.be.false;
const sdk = api(uspto);
// SDK should still not be loaded since we haven't officially called it yet.
expect(new Cache(uspto).isCached()).to.be.false;
expect(Object.keys(sdk)).to.deep.equal(['auth', 'config', 'server']);
expect(await sdk.get('/')).to.equal('/ds-api/');
// Now that we've called something on the SDK, it should now be fully loaded.
expect(new Cache(uspto).isCached()).to.be.true;
expect(Object.keys(sdk)).to.deep.equal([
'auth',
'config',
'server',
'get',
'put',
'post',
'delete',
'options',
'head',
'patch',
'trace',
'list-data-sets',
'list-searchable-fields',
'perform-search',
]);
// Calling the same method again should also work as expected.
expect(await sdk.get('/two')).to.equal('/ds-api/two');
mock.done();
});
it('should support supplying a raw JSON OAS object', function () {
const sdk = api(uspto);
expect(sdk.get).to.be.a('function');
});
});
describe('#accessors', function () {
it('should have a function for each http method', function () {
['get', 'put', 'post', 'delete', 'options', 'head', 'patch', 'trace'].forEach(method => {
expect(petstoreSdk[method]).to.be.a('function');
});
});
describe('#operationId()', function () {
it('should work for operationId', async function () {
const mock = nock(petstoreServerUrl).get('/pets').reply(200, 'it worked!');
expect(await petstoreSdk.findPets()).to.equal('it worked!');
mock.done();
});
it('should work with operationIds that have contain spaces', function () {
expect(petstoreSdk['find pet by id']).to.be.a('function');
});
it('should work for other methods', async function () {
const mock = nock(petstoreServerUrl).post('/pets').reply(200, 'it worked!');
expect(await petstoreSdk.addPet()).to.equal('it worked!');
mock.done();
});
it.skip('should allow operationId to be the same as a http method');
it('should error if an operationId does not exist', async function () {
await petstoreSdk
.findPetz()
.then(() => assert.fail())
.catch(err => {
expect(err.message).to.match(/does not appear to be a valid operation/);
});
});
});
describe('#method(path)', function () {
it('should work for method and path', async function () {
const mock = nock(petstoreServerUrl).get('/pets').reply(200, 'it worked!');
expect(await petstoreSdk.get('/pets')).to.equal('it worked!');
mock.done();
});
it('should error if method does not exist', async function () {
await petstoreSdk
.fetch('/pets')
.then(() => assert.fail())
.catch(err => {
expect(err.message).to.match(/does not appear to be a valid operation/);
});
});
it.skip('should error if a path does not exist on a method');
});
});
describe('#fetch', function () {
const petId = 123;
it('should reject for error-level status codes', async function () {
const response = {
error: 'ENDPOINT_NOTFOUND',
message: `The endpoint you called (GET /pets/${petId}) doesn't exist`,
};
const mock = nock(petstoreServerUrl).delete(`/pets/${petId}`).reply(404, response);
await petstoreSdk
.deletePet({ id: petId })
.then(() => assert.fail())
.catch(async err => {
expect(err.status).to.equal(404);
const json = await err.json();
expect(json).to.deep.equal(response);
});
mock.done();
});
it('should contain a custom user agent for the library in requests', async function () {
const userAgent = `${pkg.name} (node)/${pkg.version}`;
const mock = nock(petstoreServerUrl, {
reqheaders: {
'User-Agent': userAgent,
},
})
.delete(`/pets/${petId}`)
.reply(200, function () {
return this.req.headers['user-agent'];
});
expect(await petstoreSdk.deletePet({ id: petId })).to.deep.equal([userAgent]);
mock.done();
});
describe('operationId', function () {
it('should pass through parameters for operationId', async function () {
const response = {
id: petId,
name: 'Buster',
};
const mock = nock(petstoreServerUrl).delete(`/pets/${petId}`).reply(200, response);
expect(await petstoreSdk.deletePet({ id: petId })).to.deep.equal(response);
mock.done();
});
it('should pass through body for operationId', async function () {
const body = { name: 'Buster' };
const mock = nock(petstoreServerUrl)
.post('/pets', body)
.reply(200, (uri, requestBody) => requestBody);
expect(await petstoreSdk.addPet(body)).to.deep.equal(body);
mock.done();
});
it('should pass through parameters and body for operationId', async function () {
const slug = 'new-release';
const body = {
title: 'revised title',
body: 'updated body',
};
const mock = nock('https://dash.readme.com/api/v1')
.put(`/changelogs/${slug}`, body)
.reply(200, (uri, requestBody) => ({ uri, requestBody }));
expect(await readmeSdk.updateChangelog(body, { slug })).to.deep.equal({
requestBody: body,
uri: '/api/v1/changelogs/new-release',
});
mock.done();
});
});
describe('method + path', function () {
it('should pass through body for method + path', async function () {
const body = { name: 'Buster' };
const mock = nock(petstoreServerUrl)
.post('/pets', body)
.reply(200, (uri, requestBody) => requestBody);
expect(await petstoreSdk.post('/pets', body)).to.deep.equal(body);
mock.done();
});
it('should pass through parameters for method + path', async function () {
const slug = 'new-release';
const mock = nock('https://dash.readme.com/api/v1')
.put(`/changelogs/${slug}`)
.reply(200, uri => uri);
expect(await readmeSdk.put('/changelogs/{slug}', { slug })).to.equal('/api/v1/changelogs/new-release');
mock.done();
});
it('should pass through parameters and body for method + path', async function () {
const slug = 'new-release';
const body = {
title: 'revised title',
body: 'updated body',
};
const mock = nock('https://dash.readme.com/api/v1')
.put(`/changelogs/${slug}`, body)
.reply(200, function (uri, requestBody) {
return {
uri,
requestBody,
};
});
expect(await readmeSdk.put('/changelogs/{slug}', body, { slug })).to.deep.equal({
uri: '/api/v1/changelogs/new-release',
requestBody: body,
});
mock.done();
});
});
describe('query parameter encoding', function () {
let queryEncoding;
beforeEach(function () {
queryEncoding = api({
servers: [{ url: 'https://httpbin.org/' }],
paths: {
'/anything': {
get: {
operationId: 'getAnything',
parameters: [
{ name: 'stringPound', in: 'query', schema: { type: 'string' } },
{ name: 'stringPound2', in: 'query', schema: { type: 'string' } },
{ name: 'stringHash', in: 'query', schema: { type: 'string' } },
{ name: 'stringArray', in: 'query', schema: { type: 'string' } },
{ name: 'stringWeird', in: 'query', schema: { type: 'string' } },
{ name: 'array', in: 'query', schema: { type: 'array', items: { type: 'string' } } },
],
},
},
},
});
});
it('should encode query parameters', async function () {
const params = {
stringPound: 'something¬hing=true',
stringHash: 'hash#data',
stringArray: 'where[4]=10',
stringWeird: 'properties["$email"] == "testing"',
array: [
encodeURIComponent('something¬hing=true'), // This is already encoded so it shouldn't be double encoded.
'nothing&something=false',
'another item',
],
};
const mock = nock('https://httpbin.org/')
.get('/anything')
.query(true)
.reply(200, function () {
return { path: this.req.path };
});
expect(await queryEncoding.getAnything(params)).to.deep.equal({
path: '/anything?stringPound=something%26nothing%3Dtrue&stringHash=hash%23data&stringArray=where%5B4%5D%3D10&stringWeird=properties%5B%22%24email%22%5D%20%3D%3D%20%22testing%22&array=something%26nothing%3Dtrue&array=nothing%26something%3Dfalse&array=another%20item',
});
mock.done();
});
it("should not double encode query params if they're already encoded", async function () {
const params = {
stringPound: encodeURIComponent('something¬hing=true'),
stringHash: encodeURIComponent('hash#data'),
stringArray: encodeURIComponent('where[4]=10'),
stringWeird: encodeURIComponent('properties["$email"] == "testing"'),
array: [
'something¬hing=true', // Should still encode this one eventhrough the others are already encoded.
encodeURIComponent('nothing&something=false'),
encodeURIComponent('another item'),
],
};
const mock = nock('https://httpbin.org/')
.get('/anything')
.query(true)
.reply(200, function () {
return { path: this.req.path };
});
expect(await queryEncoding.getAnything(params)).to.deep.equal({
path: '/anything?stringPound=something%26nothing%3Dtrue&stringHash=hash%23data&stringArray=where%5B4%5D%3D10&stringWeird=properties%5B%22%24email%22%5D%20%3D%3D%20%22testing%22&array=something%26nothing%3Dtrue&array=nothing%26something%3Dfalse&array=another%20item',
});
mock.done();
});
});
});
});
| 16,957
|
https://github.com/shekharAggarwal/URDriver/blob/master/app/src/main/java/com/urdriver/urdriver/Adapter/UserRequestAdminAdapter.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,020
|
URDriver
|
shekharAggarwal
|
Java
|
Code
| 894
| 4,270
|
package com.urdriver.urdriver.Adapter;
import android.app.Activity;
import android.content.Intent;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import com.google.gson.Gson;
import com.jaredrummler.materialspinner.MaterialSpinner;
import com.squareup.picasso.Picasso;
import com.urdriver.urdriver.Common.Common;
import com.urdriver.urdriver.DriversRequest;
import com.urdriver.urdriver.R;
import com.urdriver.urdriver.model.Cab;
import com.urdriver.urdriver.model.DataMessage;
import com.urdriver.urdriver.model.MyResponse;
import com.urdriver.urdriver.model.RequestData;
import com.urdriver.urdriver.model.Token;
import com.urdriver.urdriver.retrofit.IFCMService;
import com.urdriver.urdriver.retrofit.IURDriver;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import de.hdodenhof.circleimageview.CircleImageView;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.functions.Consumer;
import io.reactivex.schedulers.Schedulers;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
public class UserRequestAdminAdapter extends RecyclerView.Adapter<UserRequestAdminAdapter.ViewHolder> {
Activity context;
List<RequestData> requestData;
IURDriver mService;
CompositeDisposable compositeDisposable;
public UserRequestAdminAdapter(Activity context, List<RequestData> requestData) {
this.context = context;
this.requestData = requestData;
mService = Common.getAPI();
compositeDisposable = new CompositeDisposable();
}
@NonNull
@Override
public UserRequestAdminAdapter.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
return new UserRequestAdminAdapter.ViewHolder(LayoutInflater.from(context).inflate(R.layout.item_cab_request_admin_layout, parent, false));
}
@Override
public void onBindViewHolder(@NonNull final ViewHolder holder, final int position) {
holder.txtDate.setText(requestData.get(position).getPickupDate());
holder.txtTime.setText(requestData.get(position).getPickupTime());
Cab cab = new Gson().fromJson(requestData.get(position).getCabs(), Cab.class);
holder.txt_type_cab.setText(requestData.get(position).getCabModel() + "," + cab.cabBrand + " (" + cab.cabType + ")");
holder.txtSource.setText(requestData.get(position).getSource());
holder.txtDestination.setText(requestData.get(position).getDestination());
holder.txtAddress.setText(requestData.get(position).getSourceAddress());
Picasso.get().load(Common.BASE_URL + "UserImage/" + requestData.get(position).getBookAccount() + ".jpeg").error(context.getResources().getDrawable(R.drawable.ic_profile)).into(holder.user_image);
holder.txtFullName.setText(requestData.get(position).getFullName());
holder.txtPhone.setText(requestData.get(position).getPhoneNumber() + "/" + requestData.get(position).getBookAccount());
if (requestData.get(position).getCabType().equals("0"))
holder.txt_Way.setText("One Way");
else if (requestData.get(position).getCabType().equals("1"))
holder.txt_Way.setText("Round Way");
compositeDisposable.add(mService.getCabAdmin(cab.cabType, requestData.get(position).getSource().toUpperCase(), cab.cabModel).subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<List<String>>() {
@Override
public void accept(List<String> cabs) throws Exception {
List<String> cab = new ArrayList<>();
cab.add("Select Driver");
for (int i = 0; i < cabs.size(); i++)
cab.add(cabs.get(i));
holder.spinner_cab_driver.setItems(cab);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
Log.d("ERROR", throwable.getMessage());
}
}));
holder.btnAccept.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (!holder.spinner_cab_driver.getText().toString().equalsIgnoreCase("Select Driver")) {
if (requestData.get(position).getCabType().equals("0")) {
mService.updateRequest(requestData.get(position).getBookAccount(),
requestData.get(position).getCabModel(),
Integer.parseInt(requestData.get(position).getId()),
1,
0,
holder.spinner_cab_driver.getText().toString())
.enqueue(new Callback<String>() {
@Override
public void onResponse(Call<String> call, Response<String> response) {
if (response.body() != null)
if (response.body().equals("ok")) {
context.recreate();
Toast.makeText(context, "Request Accepted", Toast.LENGTH_SHORT).show();
if (requestData.get(position).getCabType().equals("0")) {
Common.way = "0";
context.startActivity(new Intent(context, DriversRequest.class));
sendNotification(holder.spinner_cab_driver.getText().toString());
sendNotificationToUser(requestData.get(position).getBookAccount(), Common.currentDriver.getName());
} else if (requestData.get(position).getCabType().equals("1")) {
Common.way = "1";
context.startActivity(new Intent(context, DriversRequest.class));
sendNotification(holder.spinner_cab_driver.getText().toString());
sendNotificationToUser(requestData.get(position).getBookAccount(), Common.currentDriver.getName());
}
} else {
Toast.makeText(context, "" + response.body(), Toast.LENGTH_SHORT).show();
}
}
@Override
public void onFailure(Call<String> call, Throwable t) {
Toast.makeText(context, "" + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
} else if (requestData.get(position).getCabType().equals("1")) {
mService.updateRequest(requestData.get(position).getBookAccount(),
requestData.get(position).getCabModel(),
Integer.parseInt(requestData.get(position).getId()),
1, 1, holder.spinner_cab_driver.getText().toString())
.enqueue(new Callback<String>() {
@Override
public void onResponse(Call<String> call, Response<String> response) {
if (response.body() != null)
if (response.body().equals("ok")) {
context.recreate();
Toast.makeText(context, "Request Accepted", Toast.LENGTH_SHORT).show();
if (requestData.get(position).getCabType().equals("0")) {
Common.way = "0";
context.startActivity(new Intent(context, DriversRequest.class));
sendNotificationToUser(requestData.get(position).getBookAccount(), Common.currentDriver.getName());
} else if (requestData.get(position).getCabType().equals("1")) {
Common.way = "1";
context.startActivity(new Intent(context, DriversRequest.class));
sendNotificationToUser(requestData.get(position).getBookAccount(), Common.currentDriver.getName());
}
} else {
Toast.makeText(context, "" + response.body(), Toast.LENGTH_SHORT).show();
}
}
@Override
public void onFailure(Call<String> call, Throwable t) {
Toast.makeText(context, "" + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
} else {
Toast.makeText(context, "Select Driver", Toast.LENGTH_SHORT).show();
}
}
});
holder.btnDecline.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (requestData.get(position).getCabType().equals("0"))
mService.updateRequest(requestData.get(position).getBookAccount(),
requestData.get(position).getCabModel(),
Integer.parseInt(requestData.get(position).getId()), 3, 0, "00")
.enqueue(new Callback<String>() {
@Override
public void onResponse(Call<String> call, Response<String> response) {
Log.d("ERROR", new Gson().toJson(response.body()));
if (response.body() != null)
if (response.body().equals("ok")) {
context.recreate();
Toast.makeText(context, "Request Denied", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(context, "" + response.body(), Toast.LENGTH_SHORT).show();
}
}
@Override
public void onFailure(Call<String> call, Throwable t) {
Log.d("ERROR", new Gson().toJson(t.getCause()));
Toast.makeText(context, "" + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
else if (requestData.get(position).getCabType().equals("1"))
mService.updateRequest(requestData.get(position).getBookAccount(),
requestData.get(position).getCabModel(),
Integer.parseInt(requestData.get(position).getId()),
3, 1, "00")
.enqueue(new Callback<String>() {
@Override
public void onResponse(Call<String> call, Response<String> response) {
if (response.body() != null)
if (response.body().equals("ok")) {
context.recreate();
Toast.makeText(context, "Request Denied", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(context, "" + response.body(), Toast.LENGTH_SHORT).show();
}
}
@Override
public void onFailure(Call<String> call, Throwable t) {
Toast.makeText(context, "" + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
});
}
@Override
public int getItemCount() {
return requestData.size();
}
private void sendNotificationToUser(String phone, final String name) {
mService.getToken(phone, "0")
.enqueue(new Callback<Token>() {
@Override
public void onResponse(Call<Token> call, Response<Token> response) {
//when we have Token , just send notification to this token
Map<String, String> contentSend = new HashMap<>();
contentSend.put("title", "Cab Confirmation");
contentSend.put("message", "Your cab request confirm by " + name);
contentSend.put("Phone", Common.currentDriver.getPhone());
DataMessage dataMessage = new DataMessage();
if (response.body().getToken() != null)
dataMessage.setTo(response.body().getToken());
dataMessage.setData(contentSend);
IFCMService ifcmService = Common.getGetFCMService();
ifcmService.sendNotification(dataMessage)
.enqueue(new Callback<MyResponse>() {
@Override
public void onResponse(Call<MyResponse> call, Response<MyResponse> response) {
if (response.code() == 200) {
if (response.body().success == 1) {
Log.d("ERROR", new Gson().toJson(response.body()));
} else {
Log.d("ERROR", new Gson().toJson(response.body()));
}
}
}
@Override
public void onFailure(Call<MyResponse> call, Throwable t) {
Toast.makeText(context, "" + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
@Override
public void onFailure(Call<Token> call, Throwable t) {
// Log.d("ERROR",t.getMessage());
Toast.makeText(context, "" + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
private void sendNotification(String phone) {
mService.getToken(phone, "1")
.enqueue(new Callback<Token>() {
@Override
public void onResponse(Call<Token> call, Response<Token> response) {
//when we have Token , just send notification to this token
Map<String, String> contentSend = new HashMap<>();
contentSend.put("title", "Cab Booking");
contentSend.put("message", "Your receive a new booking");
DataMessage dataMessage = new DataMessage();
if (response.body().getToken() != null)
dataMessage.setTo(response.body().getToken());
dataMessage.setData(contentSend);
IFCMService ifcmService = Common.getGetFCMService();
ifcmService.sendNotification(dataMessage)
.enqueue(new Callback<MyResponse>() {
@Override
public void onResponse(Call<MyResponse> call, Response<MyResponse> response) {
if (response.code() == 200) {
if (response.body().success == 1) {
Log.d("ERROR", new Gson().toJson(response.body()));
} else {
Log.d("ERROR", new Gson().toJson(response.body()));
}
}
}
@Override
public void onFailure(Call<MyResponse> call, Throwable t) {
Toast.makeText(context, "" + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
@Override
public void onFailure(Call<Token> call, Throwable t) {
Toast.makeText(context, "" + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
class ViewHolder extends RecyclerView.ViewHolder {
CircleImageView user_image;
TextView txt_type_cab, txtFullName, txtPhone, txtDate, txtTime, txtSource, txtDestination, txtAddress, txt_Way, txt_decorated;
Button btnAccept, btnDecline;
MaterialSpinner spinner_cab_driver;
ViewHolder(@NonNull View itemView) {
super(itemView);
txt_type_cab = itemView.findViewById(R.id.txt_type_cab);
txtFullName = itemView.findViewById(R.id.txtFullName);
txtPhone = itemView.findViewById(R.id.txtPhone);
txtDate = itemView.findViewById(R.id.txtDate);
txtTime = itemView.findViewById(R.id.txtTime);
txtSource = itemView.findViewById(R.id.txtSource);
txtDestination = itemView.findViewById(R.id.txtDestination);
txtAddress = itemView.findViewById(R.id.txtAddress);
txt_Way = itemView.findViewById(R.id.txt_Way);
btnAccept = itemView.findViewById(R.id.btnAccept);
btnDecline = itemView.findViewById(R.id.btnDecline);
spinner_cab_driver = itemView.findViewById(R.id.spinner_cab_driver);
txt_decorated = itemView.findViewById(R.id.txt_decorated);
user_image = itemView.findViewById(R.id.user_image);
}
}
}
| 25,949
|
https://github.com/fatash89/babel/blob/master/packages/babel-parser/test/fixtures/flow/interfaces-as-identifier/new-expression/input.js
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
babel
|
fatash89
|
JavaScript
|
Code
| 2
| 5
|
new interface();
| 11,866
|
https://github.com/Diegoalesco95/challenge-prework-frontend/blob/master/src/components/Card/styles.js
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
challenge-prework-frontend
|
Diegoalesco95
|
JavaScript
|
Code
| 250
| 885
|
import styled from 'styled-components';
import { colors, size } from 'styles/variables';
export const Container = styled.div`
width: 46.7rem;
height: 58.2rem;
padding: 3.9rem 4.8rem 4.8rem;
border: 0.3rem solid ${colors.black};
margin: 1rem 7.1rem;
box-sizing: border-box;
border-radius: 2rem;
display: grid;
align-items: center;
justify-items: center;
grid-template-columns: repeat(2, 1fr);
grid-template-rows: 0.3fr 0.7fr 1fr;
background-color: ${(props) => props.bgColor};
@media (min-width: ${size.laptop}) {
margin: 0 7.1rem;
}
`;
export const Title = styled.h2`
grid-column: 1 / 3;
font-style: normal;
font-weight: bold;
font-size: 3.6rem;
line-height: 5.4rem;
text-align: center;
color: ${colors.white};
`;
export const Life = styled.div`
grid-column: 1 / 2;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
`;
export const LifeValue = styled.span`
font-style: normal;
font-weight: bold;
font-size: 3rem;
line-height: 4.5rem;
color: ${colors.white};
`;
export const LifeBar = styled.div`
width: 21.5rem;
height: 5rem;
border: 0.3rem solid ${colors.black};
box-sizing: border-box;
border-radius: 3rem;
display: flex;
overflow: hidden;
& .life-remaining {
width: ${(props) => props.remaining || '50%'};
height: 4.4rem;
background-color: ${colors.cuaternary};
}
& .life-eliminated {
width: ${(props) => props.eliminated || '50%'};
height: 4.4rem;
background-color: ${colors.primary};
}
`;
export const Button = styled.button`
grid-column: 2 / 3;
width: 12rem;
height: 12rem;
border: 0.3rem solid ${colors.black};
box-sizing: border-box;
border-radius: 10rem;
background-color: ${colors.primary};
box-shadow: 0.4rem 0.8rem 0rem 0rem ${colors.black};
cursor: pointer;
& span {
font-size: 3.6rem;
font-weight: bold;
line-height: 5.4rem;
color: ${colors.white};
}
&:disabled {
cursor: initial;
background-color: #667199;
}
&.clicked {
box-shadow: unset;
transform: translate(0.4rem, 0.8rem);
}
`;
export const Character = styled.div`
grid-column: 1 / 3;
width: 37rem;
height: 23.1rem;
background-color: ${colors.white};
border: 0.3rem solid ${colors.black};
box-sizing: border-box;
border-radius: 2rem;
`;
| 24,529
|
https://github.com/cpietsch/BrainEdit/blob/master/deploy.sh
|
Github Open Source
|
Open Source
|
MIT
| null |
BrainEdit
|
cpietsch
|
Shell
|
Code
| 13
| 52
|
#!/bin/bash
cp -fr app/* deploy/BrainEdit.app/Contents/Resources/app.nw/
cd deploy/
rm BrainEdit.zip
zip -r BrainEdit.zip BrainEdit.app
| 7,912
|
https://github.com/wmjtxt/c-exercise/blob/master/c2017/wd/wd108/main.cpp
|
Github Open Source
|
Open Source
|
MIT
| null |
c-exercise
|
wmjtxt
|
C++
|
Code
| 47
| 98
|
#include <iostream>
using namespace std;
int main()
{
int i, j;
for(i = 1; i <= 9; i++){
for(j = 1; j <= i; j++){
cout << j << "*" << i << "=" << i*j << " ";
}
cout << endl;
}
return 0;
}
| 28,165
|
https://github.com/tomanomeike/wordpress-bandymas/blob/master/wordpress/wp-config.php
|
Github Open Source
|
Open Source
|
MIT
| null |
wordpress-bandymas
|
tomanomeike
|
PHP
|
Code
| 362
| 1,187
|
<?php
/**
* The base configuration for WordPress
*
* The wp-config.php creation script uses this file during the
* installation. You don't have to use the web site, you can
* copy this file to "wp-config.php" and fill in the values.
*
* This file contains the following configurations:
*
* * MySQL settings
* * Secret keys
* * Database table prefix
* * ABSPATH
*
* @link https://codex.wordpress.org/Editing_wp-config.php
*
* @package WordPress
*/
// ** MySQL settings - You can get this info from your web host ** //
/** The name of the database for WordPress */
define('DB_NAME', 'trecias-bandymas');
/** MySQL database username */
define('DB_USER', 'root');
/** MySQL database password */
define('DB_PASSWORD', 'mysql');
/** MySQL hostname */
define('DB_HOST', 'localhost');
/** Database Charset to use in creating database tables. */
define('DB_CHARSET', 'utf8mb4');
/** The Database Collate type. Don't change this if in doubt. */
define('DB_COLLATE', '');
/**#@+
* Authentication Unique Keys and Salts.
*
* Change these to different unique phrases!
* You can generate these using the {@link https://api.wordpress.org/secret-key/1.1/salt/ WordPress.org secret-key service}
* You can change these at any point in time to invalidate all existing cookies. This will force all users to have to log in again.
*
* @since 2.6.0
*/
define('AUTH_KEY', 'cfzk(R OY?HL,udKo<d`;=p7^(4q_&O~?zyAq @t_1KJjQO_[oIX}Z5`@iQ;[Cg~');
define('SECURE_AUTH_KEY', 'J(0an(W&D(0?#oK(a(#:tr)m[l@#)]l!2|oQ9B5S}tOy]!5SZ;6T{;)RIRxiJdbN');
define('LOGGED_IN_KEY', 'tURj]?x?n059)Mp4-(O!.}9[L)UVXKj#~;*2;1,InqeK1pd@o6$cqo>j)UzDTE{#');
define('NONCE_KEY', ')moTFuMZ`xbpLL?^LdL?TV( rOe>=?+ZmoHgJBk3m1Y0D1KK7L,6yR._xe0m1gb.');
define('AUTH_SALT', ';LcR?!]m|W(!/hW})V(]m(RbCunD3ZIuY!O8b0uxo{4ySq5/A@pWn6 &%?4@U0d9');
define('SECURE_AUTH_SALT', '*^ZnFA*$QubuV.<x<Ic(a@nG~2DwX[~3M`dLFhm7ZYjCQD9CDH*y7vz1)^U+cAO4');
define('LOGGED_IN_SALT', 'FPXB@m/Tj/Hib G<}8<7),4:{EVY[<dRb59=Rz=sg8}<Frj #oGf?_f^t?->MaqY');
define('NONCE_SALT', 'RF>nB*XK@^XgR+CodcyQzI~M7A4B>OR^IW%J,+p7|qr2D!BCz13?c[c}L2}0@BS5');
/**#@-*/
/**
* WordPress Database Table prefix.
*
* You can have multiple installations in one database if you give each
* a unique prefix. Only numbers, letters, and underscores please!
*/
$table_prefix = 'wp_';
/**
* For developers: WordPress debugging mode.
*
* Change this to true to enable the display of notices during development.
* It is strongly recommended that plugin and theme developers use WP_DEBUG
* in their development environments.
*
* For information on other constants that can be used for debugging,
* visit the Codex.
*
* @link https://codex.wordpress.org/Debugging_in_WordPress
*/
define('WP_DEBUG', false);
/* That's all, stop editing! Happy blogging. */
/** Absolute path to the WordPress directory. */
if ( !defined('ABSPATH') )
define('ABSPATH', dirname(__FILE__) . '/');
/** Sets up WordPress vars and included files. */
require_once(ABSPATH . 'wp-settings.php');
| 21,827
|
https://github.com/jfmc/FPSSample/blob/master/Assets/Unity Technologies/Recorder/Extensions/UTJ/FrameCapturer/Editor/AudioRecorderEditor.cs
|
Github Open Source
|
Open Source
|
RSA-MD
| 2,021
|
FPSSample
|
jfmc
|
C#
|
Code
| 34
| 133
|
using System;
using UnityEditor;
using UnityEngine;
namespace UTJ.FrameCapturer
{
[CustomEditor(typeof(AudioRecorder))]
public class AudioRecorderEditor : RecorderBaseEditor
{
public override void OnInspectorGUI()
{
var so = serializedObject;
CommonConfig();
EditorGUILayout.Space();
FramerateControl();
EditorGUILayout.Space();
RecordingControl();
so.ApplyModifiedProperties();
}
}
}
| 39,587
|
https://github.com/wallace5303/dapps/blob/master/app/extend/context.js
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,022
|
dapps
|
wallace5303
|
JavaScript
|
Code
| 72
| 227
|
/**
*
* @type {{foo(*)}}
*/
'use strict';
module.exports = {
isMobile() {
const deviceAgent = this.get('user-agent').toLowerCase();
const agentID = deviceAgent.match(/(iphone|ipod|ipad|android)/);
if (agentID) {
// 手机访问
return true;
}
// 电脑访问
return false;
},
success(msg, data, total) {
this.body = {
success: true,
msg,
result: data,
total,
};
},
failure(msg, data) {
this.body = {
success: false,
msg,
result: data,
};
},
async infoPage(msg) {
await this.render('500', { msg });
},
};
| 13,884
|
https://github.com/ahmadmilzam/autosuggest/blob/master/src/scss/5-components/_my-components.icons.scss
|
Github Open Source
|
Open Source
|
MIT
| null |
autosuggest
|
ahmadmilzam
|
SCSS
|
Code
| 28
| 128
|
.icon{
display: inline-block;
}
.icon--larger{
font-size: rem-calc($base-spacing-unit * 1.25);
}
.icon--double{
font-size: rem-calc($base-spacing-unit * 2);
}
.icon--triple{
font-size: rem-calc($base-spacing-unit * 3);
}
.icon--quadruple{
font-size: rem-calc($base-spacing-unit * 4);
}
| 11,339
|
https://github.com/ScalablyTyped/Distribution/blob/master/g/googleapis/src/main/scala/typings/googleapis/trafficdirectorV2Mod/trafficdirectorV2/SchemaClientStatusResponse.scala
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
Distribution
|
ScalablyTyped
|
Scala
|
Code
| 84
| 302
|
package typings.googleapis.trafficdirectorV2Mod.trafficdirectorV2
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
trait SchemaClientStatusResponse extends StObject {
/**
* Client configs for the clients specified in the ClientStatusRequest.
*/
var config: js.UndefOr[js.Array[SchemaClientConfig]] = js.undefined
}
object SchemaClientStatusResponse {
inline def apply(): SchemaClientStatusResponse = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[SchemaClientStatusResponse]
}
extension [Self <: SchemaClientStatusResponse](x: Self) {
inline def setConfig(value: js.Array[SchemaClientConfig]): Self = StObject.set(x, "config", value.asInstanceOf[js.Any])
inline def setConfigUndefined: Self = StObject.set(x, "config", js.undefined)
inline def setConfigVarargs(value: SchemaClientConfig*): Self = StObject.set(x, "config", js.Array(value*))
}
}
| 5,456
|
https://github.com/sadreactonly/flutter_opencv/blob/master/example/android/app/src/main/java/com/mulgundkar/opencv4_example/MainActivity.java
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,022
|
flutter_opencv
|
sadreactonly
|
Java
|
Code
| 26
| 130
|
package com.mulgundkar.opencv4_example;
import androidx.annotation.NonNull;
import io.flutter.embedding.android.FlutterActivity;
import io.flutter.embedding.engine.FlutterEngine;
import io.flutter.plugins.GeneratedPluginRegistrant;
public class MainActivity extends FlutterActivity {
@Override
public void configureFlutterEngine(@NonNull FlutterEngine flutterEngine) {
GeneratedPluginRegistrant.registerWith(flutterEngine);
}
}
| 298
|
https://github.com/bflaven/BlogArticlesExamples/blob/master/extending_streamlit_usage/001_nlp_spacy_python_realp/019_nlp_spacy_python.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
BlogArticlesExamples
|
bflaven
|
Python
|
Code
| 86
| 359
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
[path]
cd /Users/brunoflaven/Documents/01_work/blog_articles/extending_streamlit_usage/001_nlp_spacy_python_realp/
[file]
python 019_nlp_spacy_python.py
# source
Source: https://realpython.com/natural-language-processing-spacy-python/
"""
import spacy
from spacy import displacy
from spacy.matcher import Matcher
# EN
nlp = spacy.load('en_core_web_sm')
# FR
# nlp = spacy.load('fr_core_news_sm')
print("\n --- result_1")
# EN
print("EN spacy loaded")
# FR
# print("FR spacy loaded")
# Dependency Parsing Using spaCy
print("\n --- result_2")
file_name = 'article_bf_1.txt'
file_name = 'article_bf_2.txt'
all_file_text = open(file_name).read()
all_file_doc = nlp(all_file_text)
for token in all_file_doc:
print(token.text, token.tag_, token.head.text, token.dep_)
print("\n --- result_3")
displacy.serve(all_file_doc, style='dep')
| 22,613
|
https://github.com/masahase0117/delegate/blob/master/resolvy/resconf.c
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,021
|
delegate
|
masahase0117
|
C
|
Code
| 3,230
| 13,945
|
/*////////////////////////////////////////////////////////////////////////
Copyright (c) 1995-2000 Yutaka Sato and ETL,AIST,MITI
Copyright (c) 2001-2006 National Institute of Advanced Industrial Science and Technology (AIST)
AIST-Product-ID: 2000-ETL-198715-01, H14PRO-049, H15PRO-165, H18PRO-443
Permission to use this material for noncommercial and/or evaluation
purpose, copy this material for your own use, and distribute the copies
via publicly accessible on-line media, without fee, is hereby granted
provided that the above copyright notice and this permission notice
appear in all copies.
AIST MAKES NO REPRESENTATIONS ABOUT THE ACCURACY OR SUITABILITY OF THIS
MATERIAL FOR ANY PURPOSE. IT IS PROVIDED "AS IS", WITHOUT ANY EXPRESS
OR IMPLIED WARRANTIES.
/////////////////////////////////////////////////////////////////////////
Content-Type: program/C; charset=US-ASCII
Program: reshost.c
Author: Yutaka Sato <ysato@etl.go.jp>
Description:
History:
950817 created
//////////////////////////////////////////////////////////////////////#*/
#include <ctype.h>
#include <stdio.h>
#include <stdarg.h>
#include <stdlib.h>
#include <time.h>
#include "ystring.h"
#include "vsocket.h"
#include "file.h"
#include "log.h"
#include "dns.h"
void SOCKS_addserv(PCStr(dhost),int dport,PCStr(shost),int sport);
int RES_next_res(PCStr(where),int ri,PVStr(res),PVStr(arg));
void set_nameserver(PCStr(domain),PCStr(addr));
void minit_reshost();
int regGetResolvConf(PVStr(buf),PVStr(where));
int connectTO(int sock,SAP addr,int leng,int timeout);
void *callFuncTimeout(int sec,void *xcode,void *(*func)(void*,...),...);
void VSA_copy(VSAddr *dst,VSAddr *src);
int VSA_comp(VSAddr *sa1,VSAddr *sa2);
int DNS_debug;
#if defined(sgi) || defined(__RES) && (19931104 <= __RES)
#define _RSTATE 1
#else
#define _RSTATE 0
#endif
#ifdef RES_STATE
#define RSTATE !_RSTATE
#else
#define RSTATE _RSTATE
#endif
#if RSTATE
typedef struct __res_state State;
#else
typedef struct state State;
#endif
#if defined(ultrix)
#define NSLIST(sx) ns_list[sx].addr
#else
#define NSLIST(sx) nsaddr_list[sx]
#endif
State _RES = {0};
int MIN_ABSNDOTS = 1;
#define MAX_SORTLIST 16
#define ADDRLEN 4
typedef struct {
char am_addr[ADDRLEN];
char am_mask[ADDRLEN];
} AddrMask;
typedef struct {
MStr( re_Resolvers,RESOLVERS_SIZ);
const char *re_ResolversPushed[64];
int re_ResolversPushedFlags[64];
AddrMask re_sort_list[MAX_SORTLIST]; /**/
int re_Nsortlist;
MStr( re__confid,64);
struct hostent re_myhost;
VSAddr re_selfs[16];
int re_selfN;
int re_lastRRX[256];
int re_F_aton;
const char *re_confpath;
MStr( re_resolv_errmsg,1024);
int re_ns_stop;
} ResolvyEnv;
static ResolvyEnv *resolvyEnv;
#define RE resolvyEnv[0]
#define Resolvers RE.re_Resolvers
/* MStr(Resolvers,re_Resolvers) */
/**/
#define PushedResolvers RE.re_ResolversPushed
#define PushedResolversFlags RE.re_ResolversPushedFlags
#define RES_ADDLAST 1
#define sort_list RE.re_sort_list
#define Nsortlist RE.re_Nsortlist
#define _confid RE.re__confid
#define myhost RE.re_myhost
#define selfs RE.re_selfs
#define selfN RE.re_selfN
#define lastRRX RE.re_lastRRX
#define F_aton RE.re_F_aton
#define confpath RE.re_confpath
struct _resolv_errmsg { defQStr(resolv_errmsg); } resolv_errmsg;
#define ns_stop RE.re_ns_stop
#ifndef RSLV_CONF
#define RSLV_CONF "/etc/resolv.conf"
#endif
#ifndef HOSTSFILE
#define HOSTSFILE "/etc/hosts"
#endif
#ifndef NISMAP_NAME
#define NISMAP_NAME "hosts.byname"
#endif
#ifndef NISMAP_ADDR
#define NISMAP_ADDR "hosts.byaddr"
#endif
#ifndef RSLV_ORDER
#define RSLV_ORDER "CFNDS"
#endif
void minit_resconf()
{
if( resolvyEnv == 0 ){
resolvyEnv = NewStruct(ResolvyEnv);
strcpy(Resolvers,RSLV_ORDER);
setQStr(resolv_errmsg.resolv_errmsg,resolvyEnv->re_resolv_errmsg,sizeof(resolvyEnv->re_resolv_errmsg));
}
minit_reshost();
}
int getthreadix();
char *RES_resolvers(PVStr(resolvers))
{
const char *res;
const char *pushed;
int tix = getthreadix();
if( 0 <= tix && tix < elnumof(PushedResolvers) ){
if( pushed = PushedResolvers[tix] ){
if( PushedResolversFlags[tix] & RES_ADDLAST ){
strcpy(resolvers,Resolvers);
if( strstr(resolvers,pushed) == 0 )
strcat(resolvers,pushed);
}else{
strcpy(resolvers,pushed);
}
return (char*)resolvers;
}
}
strcpy(resolvers,Resolvers);
return Resolvers;
}
int RES_orderSet_FL(FL_PAR,PCStr(resolvers)){
strcpy(Resolvers,resolvers);
return 0;
}
int RES_orderGet_FL(FL_PAR,PVStr(resolvers)){
strcpy(resolvers,Resolvers);
return 0;
}
int RES_orderPush_FL(FL_PAR,PCStr(tmpres),int flags){
int tix = getthreadix();
if( 0 <= tix && tix < elnumof(PushedResolvers) ){
PushedResolvers[tix] = tmpres;
PushedResolversFlags[tix] = flags;
}else{
}
return 0;
}
int RES_orderPop_FL(FL_PAR){
int tix = getthreadix();
if( 0 <= tix && tix < elnumof(PushedResolvers) ){
PushedResolvers[tix] = 0;
}else{
}
return 0;
}
const char *_RSLV_CONF = RSLV_CONF;
const char *_HOSTSFILE = HOSTSFILE;
const char *_NISMAP_NAME = NISMAP_NAME;
const char *_NISMAP_ADDR = NISMAP_ADDR;
const char *RES_VERIFY;
#define VIA_SOCKS "//"
void RES_getconf(PVStr(buf))
{ refQStr(bp,buf); /**/
const char *addr;
const char *dom;
int ni,si;
int options;
sprintf(bp,"RES_ORDER=%s\n",Resolvers); bp += strlen(bp);
sprintf(bp,"HOSTSFILE=%s\n",_HOSTSFILE); bp += strlen(bp);
sprintf(bp,"NISMAP_NAME=%s\n",_NISMAP_NAME); bp += strlen(bp);
sprintf(bp,"NISMAP_ADDR=%s\n",_NISMAP_ADDR); bp += strlen(bp);
sprintf(bp,"RSLV_CONF=%s\n",_RSLV_CONF); bp += strlen(bp);
sprintf(bp,"DNS_NSCOUNT=%d\n",_RES.nscount); bp += strlen(bp);
options = _RES.options & ~(RES_DEBUG);
sprintf(bp,"DNS_OPTIONS=%d\n",options); bp += strlen(bp);
sprintf(bp,"DNS_DEFDNAME=%s\n",_RES.defdname); bp += strlen(bp);
for( ni = 0; ni < _RES.nscount; ni++ ){
addr = VSA_ntoa((VSAddr*)&_RES.nsaddr_list[ni]);
sprintf(bp,"DNS_SERVER=%s\n",addr); bp += strlen(bp);
}
for( si = 0; dom = _RES.dnsrch[si]; si++ ){
sprintf(bp,"DNS_SEARCH=%s\n",dom); bp += strlen(bp);
}
}
char *RES_confid(PVStr(id))
{ CStr(buf,2048);
if( _confid[0] == 0 ){
RES_getconf(AVStr(buf));
toMD5(buf,_confid);
debug(DBG_CACHE,"#### RES_confid = %s\n%s",_confid,buf);
}
strcpy(id,_confid);
debug(DBG_CACHE,"RES_confid = %s\n",id);
return (char*)id;
}
#define clear_confid() _confid[0] = 0;
char *RES_confidSet_FL(FL_PAR,PCStr(wh),PVStr(prev)){
IStr(oldid,128);
IStr(newid,128);
if( prev ){
strcpy(prev,_confid);
}
strcpy(oldid,_confid);
clear_confid();
RES_confid(AVStr(newid));
debug(DBG_FORCE,"confid(%s)[%s]<-[%s]\n",wh,newid,oldid);
return _confid;
}
static int bindINS(VSAddr *sin,int nlisten)
{ int sock,rcode;
int sinlen;
sock = socket(AF_INET,SOCK_STREAM,0);
sinlen = VSA_size(sin);
if( bind(sock,(SAP)sin,sinlen) == 0 ){
listen(sock,nlisten);
return sock;
}else{
close(sock);
return -1;
}
}
static int connINS(VSAddr *sin)
{ int sock;
int sinlen;
sock = socket(AF_INET,SOCK_STREAM,0);
sinlen = VSA_size(sin);
if( connectTO(sock,(struct sockaddr*)sin,sinlen,100) == 0 ){
return sock;
}else{
close(sock);
return -1;
}
}
int RES_SYS_TIMEOUT = 3;
static struct hostent *_GethostByname(PCStr(name))
{ struct hostent *hp;
double St = Time();
double Et;
/*
hp = (struct hostent *)callFuncTimeout(3,NULL,(void*(*)(void*,...))EX_GETHOSTBYNAME,name);
*/
hp = (struct hostent *)callFuncTimeout(RES_SYS_TIMEOUT,NULL,(void*(*)(void*,...))EX_GETHOSTBYNAME,name);
Et = Time() - St;
if( 3 <= Et ){
fprintf(stderr,"[%d] slow gethostbyname(%s)=%X [%.2f]\n",getpid(),name,p2i(hp),Et);
debug(DBG_FORCE,"slow gethostbyname(%s)=%X [%.2f/%d]\n",name,p2i(hp),Et,RES_SYS_TIMEOUT);
}
return hp;
}
static struct hostent *getmyhost()
{ CStr(myname,256);
struct hostent *hp;
if( myhost.h_name != NULL )
return &myhost;
gethostname(myname,sizeof(myname));
if( hp = _GethostByname(myname) )
return hp;
myhost.h_name = stralloc(myname);
myhost.h_length = 4;
myhost.h_addr_list = (char**)malloc(sizeof(char*)*2);
myhost.h_addr_list[0] = (char*)calloc(1,4); /* 0.0.0.0 */
myhost.h_addr_list[1] = NULL;
return &myhost;
}
void RES_isself(int mysock)
{ int len;
VSAddr *self1;
if( elnumof(selfs) <= selfN )
return;
len = sizeof(VSAddr);
self1 = &selfs[selfN];
getsockname(mysock,(SAP)self1,&len);
debug(DBG_ANY,"self [%d] %s\n",selfN,VSA_ntoa(self1));
selfN++;
}
static int isself1(VSAddr *me,VSAddr *to)
{ int testsock;
VSAddr sin;
if( VSA_port(me) != VSA_port(to) )
return 0;
if( VSA_addr(me) == VSA_addr(to) )
return 1;
if( VSA_addrisANY(to) )
return 1;
if(!VSA_addrisANY(me) )
return 0; /* neither is wild card "0.0.0.0" */
/* now "me" is wildcard. so check if "to" is directed to "me" */
if( VSA_addr(to) == inet_addrV4("127.0.0.1") )
return 1;
sin = *to;
VSA_setport(&sin,0);
testsock = bindINS(&sin,1);
if( 0 <= testsock ){
close(testsock);
return 1;
}else return 0;
}
static int isself(VSAddr *to)
{ int si;
for( si = 0; si < selfN; si++ )
if( isself1(&selfs[si],to) )
return 1;
return 0;
}
void RES_nsloopcheck(int mysock)
{ int nsx,nsi,nsj;
int len;
VSAddr me,sin;
len = sizeof(VSAddr);
getsockname(mysock,(SAP)&me,&len);
nsx = _RES.nscount;
for( nsi = 0; nsi < nsx; ){
VSA_copy(&sin,(VSAddr*)&_RES.NSLIST(nsi));
if( isself1(&me,&sin) ){
for( nsj = nsi; nsj < nsx-1; nsj++ )
VSA_copy((VSAddr*)&_RES.NSLIST(nsj),(VSAddr*)&_RES.NSLIST(nsj+1));
VSA_zero((VSAddr*)&_RES.NSLIST(nsj));
_RES.nscount -= 1;
nsx -= 1;
debug(DBG_FORCE,"## removed self as NS[%d] %s:%d\n",
nsi,VSA_ntoa(&sin),VSA_port(&sin));
}else nsi++;
}
}
const char *scanHostport(PCStr(ns),PVStr(nsb),int *portp){
strcpy(nsb,ns);
if( strchr(ns,':') ){
Xsscanf(ns,"%[^:]:%d",BVStr(nsb),portp);
return nsb;
}else
if( strstr(ns,"..") ){
refQStr(np,nsb);
strcpy(nsb,ns);
if( np = strstr(nsb,"..") ){
setVStrEnd(np,0);
*portp = atoi(np+2);
return nsb;
}
}
return ns;
}
void RES_addns(VSAddr *ns);
/*
void RES_socks(PCStr(ns),PCStr(socks));
*/
void RES_socks(VSAddr *nsa,PCStr(socks));
void RES_ns1(State *res,PCStr(ns),PCStr(domain))
{
VSAddr sin;
int nsx;
struct hostent *hp;
const char *saddr;
const char *socks;
const char *cp;
CStr(nsb,256);
CStr(ssb,256);
CStr(nsbp,256);
int port;
if( ns_stop )
return;
if( *ns == '$' ){
if( ServerMain )
debug(DBG_FORCE,"---- RES_ns1(%s) ignored\n",ns);
/* RES_ORDER=D:$N as an internal format */
return;
}
putResTrace("NS(%s)",ns);
if( lDNS_SORT() ){
VSA_atosa(&sin,53,ns);
if( sizeof(res->NSLIST(nsx)) < VSA_size((VSAddr*)&sin) ){
/* should not copy IPv6 */
debug(DBG_FORCE,"#### RES_NS %d/%d %s\n",
isizeof(res->NSLIST(nsx)),VSA_size((VSAddr*)&sin),
VSA_ntoa((VSAddr*)&sin));
RES_addns(&sin);
return;
}
}
clear_confid();
nsx = -1;
if( strcmp(domain,RES_NSDOM0) == 0 ){
/* reserver the position */
if( res->nscount < MAXNS ){
nsx = res->nscount++;
VSA_zero((VSAddr*)&res->NSLIST(nsx));
}
}
ssb[0] = 0;
if( socks = strstr(ns,VIA_SOCKS) ){
strcpy(nsb,ns);
ns = nsb;
socks = strstr(ns,VIA_SOCKS);
truncVStr(socks);
if( cp = strpbrk(ns,"# \t") )
truncVStr(cp);
/*
Xsscanf(socks+strlen(VIA_SOCKS),"%[-_.0-9A-Z:]",AVStr(ssb));
*/
Xsscanf(socks+strlen(VIA_SOCKS),"%[-_.0-9A-Za-z:%%]",AVStr(ssb));
}
port = PORT_DNS;
ns = scanHostport(ns,AVStr(nsbp),&port);
/*
if( strchr(ns,':') ){
Xsscanf(ns,"%[^:]:%d",AVStr(nsbp),&port);
ns = nsbp;
}
*/
if( VSA_strisaddr(ns) )
VSA_atosa(&sin,port,ns);
else
if( hp = _GethostByname(ns) )
VSA_htosa(&sin,port,hp,0);
else
if( hp = _GETHOSTBYNAME(ns) )
VSA_htosa(&sin,port,hp,0);
else VSA_atosa(&sin,port,"255.255.255.255");
if( !VSA_isaddr(&sin) ){
debug(DBG_FORCE,"ERROR: unknown DNS server: %s\n",ns);
/* remove the reserved slot */
if( 0 <= nsx ){
res->nscount--;
for(; nsx < res->nscount; nsx++ )
res->NSLIST(nsx) = res->NSLIST(nsx+1);
}
return;
}
if( isself(&sin) ){
debug(DBG_FORCE,"## don't add self[%s:%d] as NS\n",
VSA_ntoa(&sin),VSA_port(&sin));
return;
}
saddr = VSA_ntoa(&sin);
RES_socks(&sin,ssb);
/*
RES_socks(saddr,ssb);
*/
if( strcmp(domain,RES_NSDOM0) == 0 ){
int nsi;
for( nsi = 0; nsi < res->nscount; nsi++ ){
if( VSA_comp(&sin,(VSAddr*)&res->NSLIST(nsi)) == 0 ){
debug(DBG_FORCE,"dup. RES_NS[%d]=%s:%d ignored\n",
nsi,VSA_ntoa(&sin),VSA_port(&sin));
if( 0 <= nsx && nsx+1 == res->nscount ){
/* remove the reserved slot */
res->nscount--;
}
return;
}
}
if( 0 <= nsx ){
if( lDNS_SORT() ){
RES_addns(&sin);
}
VSA_copy((VSAddr*)&res->NSLIST(nsx),&sin);
debug(DBG_ANY," RES_NS[%d]=%s/%s\n",nsx,saddr,domain);
}else debug(DBG_ANY," ignore RES_NS(%d)=%s/%s\n",MAXNS,ns,domain);
}else{
debug(DBG_ANY," RES_NS[%d]=%s/%s\n",nsx,saddr,domain);
set_nameserver(domain,saddr);
}
}
int DNS_connect(PCStr(addr),int port)
{ VSAddr sin;
VSA_atosa(&sin,port,addr);
return connINS(&sin);
}
int RES_getns1(int nsi,VSAddr *sin)
{ State *res;
res = &_RES;
if( nsi < _RES.nscount ){
VSA_copy(sin,(VSAddr*)&res->NSLIST(nsi));
return 1;
}
return 0;
}
const char *VSA_ntoa(VSAddr *sap);
int RES_getnslist(PVStr(list)){
refQStr(lp,list);
int ni;
VSAddr sin;
const char *addr;
for( ni = 0; ni < _RES.nscount; ni++ ){
if( RES_getns1(ni,&sin) ){
if( addr = VSA_ntoa(&sin) ){
Rsprintf(lp,"%s%s",0<ni?",":"",addr);
}
}
}
return ni;
}
/* acting as a proxy */
int RES_proxy(){
if( _RES.nscount == 0 )
return 0;
if( strchr(Resolvers,'D') != 0 )
return 1;
return 0;
}
int stoBV(PCStr(str),PVStr(buf),int mc,int ez){
const char *sp = str;
int sc;
refQStr(b1,buf); /**/
for(sc = 0; sc < mc; sc++){
setQStr(b1,&buf[sc*ez],ez);
if( 0 ){
sp = wordscanX(sp,ZVStr(b1,ez),ez);
}else{
/* 9.9.8 for search/sortlist delimited by "," */
while( isspace(*sp) || *sp == '#' || *sp == ',' ){
sp++;
}
sp = wordscanY(sp,ZVStr(b1,ez),ez,"^ \t\r\n#,");
}
if( *b1 == 0 )
break;
}
if( *sp != 0 ){
porting_dbg("##resolv.conf ignored too many elements (%d){%s}",
mc,str);
}
return sc;
}
void RES_scan_sortlist(PCStr(line));
void load_rslvconf(State *res,PCStr(path),int loadns)
{ FILE *fp;
CStr(line,1024);
CStr(com,1024);
CStr(arg,1024);
const char *dp;
ACStr(s,8,128);
int sc,si;
clear_confid();
debug(DBG_ANY,"load_rslvconf(%s)\n",path);
if( strncmp(path,"sh:",3) == 0 )
fp = popen(path+3,"r");
else
if( strncmp(path,"file:",5) == 0 )
fp = fopen(path+5,"r");
else fp = fopen(path,"r");
if( fp == NULL ){
CStr(buff,2048);
CStr(where,256);
if( regGetResolvConf(AVStr(buff),AVStr(where)) == 0 ){
fp = TMPFILE("regGetResolvConf");
fputs(buff,fp);
fflush(fp);
fseek(fp,0,0);
debug(DBG_ANY,"resolv.conf from registory: %s\n%s\n",
where,buff);
}
}
if( fp == NULL )
return;
while( fgets(line,sizeof(line),fp) != NULL ){
if( dp = strpbrk(line,"#;") )
truncVStr(dp);
if( Xsscanf(line,"%s %s",AVStr(com),AVStr(arg)) < 2 )
continue;
if( strcmp(com,"debug") == 0 ){
res->options |= RES_DEBUG;
}else
if( strcmp(com,"nameserver") == 0 ){
if( loadns ){
RES_ns1(res,arg,RES_NSDOM0);
}
}else
if( strcmp(com,"domain") == 0 ){
Xstrcpy(EVStr(res->defdname),arg);
res->options |= RES_DEFNAMES;
}else
if( strcmp(com,"ndots") == 0 ){
MIN_ABSNDOTS = atoi(arg);
}else
if( strcmp(com,"search") == 0 ){
/*
sc = sscanf (line,"%*s %s %s %s %s %s %s %s %s",s[0],s[1],s[2],s[3],s[4],s[5],s[6],s[7]);
*/
sc = stoBV(wordscanX(line,EVStr(s[0]),sizeof(s[0])),ZVStr((char*)s,sizeof(s)),8,128);
if( loadns && 0 < sc ){
Xstrcpy(EVStr(res->defdname),s[0]);
for( si = 0; si < MAXDNSRCH && si < sc; si++ ){
debug(DBG_ANY," RES_SRCH[%d] %s\n",si,s[si]);
res->dnsrch[si] = stralloc(s[si]);
}
res->dnsrch[sc] = 0;
res->options |= RES_DNSRCH;
res->options &= ~RES_DEFNAMES;
}
}else
if( strcmp(com,"sortlist") == 0 ){
RES_scan_sortlist(line);
}
}
if( strncmp(path,"sh:",3) == 0 )
pclose(fp);
else fclose(fp);
}
static scanListFunc ns1(PCStr(name))
{ CStr(nameb,128);
const char *domain;
if( strcaseeq(name,"END.") )
ns_stop = 1;
if( ns_stop )
return 0;
strcpy(nameb,name);
if( domain = strchr(nameb,'/') )
if( domain[1] == '/' )
domain = strchr(domain+2,'/');
if( domain ){
truncVStr(domain); domain++;
}else domain = RES_NSDOM0;
RES_ns1(&_RES,nameb,domain);
return 0;
}
int RES_ns(PCStr(nslist))
{
if( nslist )
scan_commaList(nslist,0,scanListCall ns1);
return 0;
}
void RES_af(PCStr(af)){
const char *tp;
for( tp = af; *tp; tp++ ){
if( *tp != '4' && *tp != '6' ){
debug(DBG_FORCE,"unknown RES_AF[%c] %s\n",*tp,af);
return;
}
}
RES_AF = stralloc(af);
}
void RES_verify(PCStr(verify))
{
RES_VERIFY = stralloc(verify);
}
void RES_verifyFaiure(PCStr(host),PVStr(badhost))
{ const char *sp;
char sc;
refQStr(dp,badhost); /**/
if( sp = RES_VERIFY ){
for( ; sc = *sp; sp++ ){
assertVStr(badhost,dp);
if( sc == '*' ){
strcpy(dp,host);
dp += strlen(dp);
}else{
setVStrPtrInc(dp,sc);
}
}
}
setVStrEnd(dp,0);
}
int RES_debug(PCStr(debug))
{
if( streq(debug,"0") ){
_RES.options = 0;
DNS_debug = 0;
return 0;
}
_RES.options |= RES_DEBUG;
if( strncmp(debug,"0x",2) == 0 )
sscanf(debug+2,"%x",&DNS_debug);
else
DNS_debug |= atoi(debug);
return 0;
}
int RES_domain(PCStr(domain))
{
Xstrcpy(EVStr(_RES.defdname),domain);
return 0;
}
int RES_order(PCStr(order),xPVStr(porder))
{ int oi;
CStr(buff,RESOLVERS_SIZ);
minit_resconf();
clear_confid();
if( porder == NULL ){
setPStr(porder,buff,sizeof(buff));
}
strcpy(porder,Resolvers);
if( order == NULL )
return 0;
FStrncpy(Resolvers,order);
debug(DBG_ANY,"RES_order(%s,%s)\n",Resolvers,porder);
return 0;
}
int RES_conf(PCStr(path))
{
if( _RSLV_CONF == confpath )
free((char*)confpath);
_RSLV_CONF = confpath = stralloc(path);
return 0;
}
int RES_hosts(PCStr(path))
{
_HOSTSFILE = stralloc(path);
return 0;
}
void RES_add_sortlist1(PCStr(addr),char mask[],int len);
void RES_add_sortlist(const char *addrs[],char *masks[],int len)
{ int sx;
for( sx = 0; addrs[sx]; sx++)
RES_add_sortlist1(addrs[sx],masks?masks[sx]:NULL,len);
}
void RES_add_sortlist1(PCStr(addr),char mask[],int len)
{ AddrMask *sp;
int ai,nm;
CStr(dmask,32);
const unsigned char *ua;
const unsigned char *um;
if( MAX_SORTLIST <= Nsortlist+1 ){
debug(DBG_ANY,"!! exceed MAX_SORTLIST[%d]\n",MAX_SORTLIST);
return;
}
if( mask == NULL || mask[0] == 0 ){
mask = dmask;
switch( ((int)addr[0] & 0xC0) >> 6 ){
case 0: nm = 1; break;
case 2: nm = 2; break;
default: nm = 3; break;
}
for( ai = 0; ai < nm ; ai++ )
mask[ai] = 255;
for( ; ai < len && ai < sizeof(dmask); ai++ )
mask[ai] = 0;
}
ua = (unsigned char *)addr;
um = (unsigned char *)mask;
debug(DBG_ANY," sortlist[%d] %d.%d.%d.%d / %d.%d.%d.%d\n",
Nsortlist,
ua[0],ua[1],ua[2],ua[3],
um[0],um[1],um[2],um[3]);
sp = &sort_list[Nsortlist++];
for(ai = 0; ai < len; ai++){
sp->am_addr[ai] = addr[ai];
sp->am_mask[ai] = mask[ai];
}
}
void RES_scan_sortlist(PCStr(line))
{ ACStr(s,16,128);
CStr(addr,32);
CStr(mask,32);
int a[16],m[16];
int sc,si,ai;
CStr(ba,16);
CStr(bm,16);
int len = 4;
sc = stoBV(wordscanX(line,EVStr(s[0]),sizeof(s[0])),ZVStr((char*)s,sizeof(s)),16,128);
for( si = 0; si < sc; si++ ){
addr[0] = mask[0] = 0;
Xsscanf(s[si],"%[^/]/%s",AVStr(addr),AVStr(mask));
for( ai = 0; ai < len; ai++ )
a[ai] = m[ai] = 0;
sscanf(addr,"%d.%d.%d.%d",&a[0],&a[1],&a[2],&a[3]);
sscanf(mask,"%d.%d.%d.%d",&m[0],&m[1],&m[2],&m[3]);
for( ai = 0; ai < len; ai++ ){
ba[ai] = a[ai]; /**/
bm[ai] = m[ai]; /**/
}
RES_add_sortlist1(ba,bm,len);
}
}
int RES_ROUNDROBIN = 1;
void RES_roundrobin(PCStr(hosts))
{
if( strcmp(hosts,"*") == 0 )
RES_ROUNDROBIN = 1;
else RES_ROUNDROBIN = 0;
}
int sort_ipaddrs1(const char *addrs[],PCStr(ap),PCStr(mp));
void sort_ipaddrs(const char *addrs[])
{ int si,sx,hit;
if( RES_ROUNDROBIN ){
int sn,si,sx,so,rrx;
const char *saddrs[256]; /**/
const unsigned char *a1;
rrx = 0;
for( sn = 0; saddrs[sn] = addrs[sn]; sn++ ){
if( elnumof(saddrs) <= sn ){
break;
}
a1 = (unsigned char *)saddrs[sn];
rrx += a1[0] + a1[1] + a1[2] + a1[3];
}
rrx %= 256;
sx = lastRRX[rrx] % sn;
lastRRX[rrx] += 1;
so = 0;
for( si = sx; si < sn; si++ )
addrs[so++] = saddrs[si];
for( si = 0; si < sx; si++)
addrs[so++] = saddrs[si];
}
hit = 0;
for( si = 0; si < Nsortlist; si++ ){
const char *ap;
const char *mp;
ap = sort_list[si].am_addr;
mp = sort_list[si].am_mask;
hit += sort_ipaddrs1(&addrs[hit],ap,mp);
}
}
#define btohl(ap) ((ap[0]<<24) | (ap[1]<<16) | (ap[2]<<8) | ap[3])
int sort_ipaddrs1(const char *addrs[],PCStr(sap),PCStr(smp))
{ const unsigned char *ap = (unsigned char *)sap;
const unsigned char *mp = (unsigned char *)smp;
const char *cap;
const char *saddrs[256]; /**/
CStr(done,1);
int ciaddr,cimasked;
int sn,sx,dn,hit,imask;
ciaddr = btohl(ap);
imask = btohl(mp);
cimasked = ciaddr & imask;
for( sn = 0; addrs[sn]; sn++ )
;
dn = 0;
for( sx = 0; cap = addrs[sx]; sx++ ){
if( cap != done )
if( (btohl(cap) & imask) == cimasked ){
if( elnumof(saddrs) <= dn )
break;
saddrs[dn++] = (char*)cap;
addrs[sx] = done;
}
}
hit = dn;
for( sx = 0; sx < sn; sx++ )
if( addrs[sx] != done )
saddrs[dn++] = addrs[sx];
for( sx = 0; sx < sn; sx++ )
addrs[sx] = saddrs[sx];
debug(DBG_ANY,"sort_ipaddrs(%d.%d.%d.%d/%d.%d.%d.%d) - %d/%d\n",
ap[0],ap[1],ap[2],ap[3],mp[0],mp[1],mp[2],mp[3],hit,sn);
return hit;
}
static void res_getoptions(int options,PVStr(soptions))
{
setVStrEnd(soptions,0);
if( options & RES_DEBUG ) strcat(soptions,"DEBUG ");
if( options & RES_RECURSE ) strcat(soptions,"RECURSE ");
if( options & RES_DEFNAMES ) strcat(soptions,"DEFNAMES ");
if( options & RES_DNSRCH ) strcat(soptions,"DNSRCH ");
}
int RES_localdns;
void RES_init()
{ const char *env;
const char *conf;
CStr(savorder,RESOLVERS_SIZ);
int loadns;
struct hostent *hp;
CStr(options,1024);
int rx;
CStr(res1,RESOLVERS_SIZ);
CStr(arg,RESOLVERS_SIZ);
putResTrace("Init");
minit_resconf();
if( _RES.options & RES_INIT )
return;
_RES.options |= RES_INIT;
if( env = getenv("RES_DEBUG") )
RES_debug(env);
if( _RES.options & RES_DEBUG ){
if( DNS_debug == 0 )
DNS_debug = DBG_NS;
}
debug(DBG_ANY,"RES_init()\n");
FStrncpy(savorder,Resolvers);
strcpy(Resolvers,"FND");
FStrncpy(Resolvers,savorder);
if( env = getenv("RES_ORDER") )
FStrncpy(Resolvers,env);
debug(DBG_ANY," RES_ORDER=%s\n",Resolvers);
for( rx = 0; rx = RES_next_res(Resolvers,rx,AVStr(res1),AVStr(arg)); ){
if( res1[0] == 'D' && arg[0] != 0 )
RES_ns1(&_RES,arg,RES_NSDOM0);
}
if( env = getenv("RES_NS") )
RES_ns(env);
if( env = getenv("RES_VRFY") )
RES_verify(env);
/*
loadns = _RES.nscount == 0;
*/
loadns = 1; /* NSLIST is necessary for resolution of other NS ... */
if( (conf = getenv("RES_CONF")) == 0 )
conf = _RSLV_CONF;
load_rslvconf(&_RES,conf,loadns);
if( env = getenv("RES_HOSTS") )
RES_hosts(env);
if( Nsortlist == 0 )
if( hp = getmyhost() )
RES_add_sortlist((const char**)hp->h_addr_list,NULL,hp->h_length);
if( _RES.nscount == 0 )
/* registory about DNS should be searched on Windows ... */
if( hp = getmyhost() ){
const unsigned char *ap;
CStr(saddr,32);
int dnsock;
ap = (unsigned char*)hp->h_addr_list[0];
/* ap[3] = 0xFF; (broad cast in the segment) */
sprintf(saddr,"%d.%d.%d.%d",ap[0],ap[1],ap[2],ap[3]);
if( RES_localdns == 0 ){
if( 0 <= (dnsock = DNS_connect(saddr,PORT_DNS)) ){
close(dnsock);
RES_localdns = 1;
debug(DBG_FORCE,"Found local NS (%s:%d)\n",
saddr,PORT_DNS);
}else{
RES_localdns = -1;
debug(DBG_FORCE,"No local NS (%s:%d)\n",
saddr,PORT_DNS);
}
}
if( 0 < RES_localdns )
RES_ns1(&_RES,saddr,RES_NSDOM0);
}
if( (env = getenv("RES_DOMAIN")) || (env = getenv("LOCALDOMAIN")) ){
Xstrcpy(EVStr(_RES.defdname),env);
_RES.options &= ~RES_DNSRCH;
_RES.options |= RES_DEFNAMES;
}
if( _RES.defdname[0] )
debug(DBG_ANY," RES_DOMAIN=%s\n",_RES.defdname);
res_getoptions(_RES.options,AVStr(options));
debug(DBG_ANY," options = %s\n",options);
}
iFUNCP RES_debugprinter;
int FMT_res_debug(int flag,PCStr(fmt),...)
{ int now;
VARGS(14,fmt);
if( flag == DBG_FORCE || _RES.options & RES_DEBUG && DNS_debug & flag ){
now = time(0);
if( RES_debugprinter )
(*RES_debugprinter)(fmt,VA14);
else{
fprintf(stderr,"%02d:%02d ",(now%3600)/60,now%60);
fprintf(stderr,fmt,VA14);
}
return 1;
}else return 0;
}
int (*RES_log)(int,...);
void res_log(int which,int byname,PCStr(name),char *rv[],PCStr(cname))
{
if( RES_log )
(*RES_log)(which,byname,name,rv,cname);
}
static void putNames(struct hostent *hp)
{ const unsigned char *op;
int hi;
printf("%s",hp->h_name);
for( hi = 0; ;hi++){
op = (unsigned char*)hp->h_aliases[hi];
if( op == NULL )
break;
printf(",%s",op);
}
}
static void putAddrs(struct hostent *hp)
{ const unsigned char *op;
int hi;
for( hi = 0; ; hi++ ){
op = (unsigned char*)hp->h_addr_list[hi];
if( op == NULL )
break;
if( 0 < hi )
printf(",");
/*
printf("%d.%d.%d.%d",op[0],op[1],op[2],op[3]);
*/
printf("%s",VSA_ltoa(op,hp->h_length,hp->h_addrtype));
}
}
struct hostent *RES_gethost(PCStr(addrhost))
{ struct hostent *hp;
VSAddr sab;
int bleng,btype;
const char *baddr;
if( VSA_strisaddr(addrhost) ){
VSA_atosa(&sab,0,addrhost);
bleng = VSA_decomp(&sab,&baddr,&btype,NULL);
hp = _GETHOSTBYADDR(baddr,bleng,btype);
}else{
hp = _GETHOSTBYNAME(addrhost);
}
return hp;
}
void RES_1(int f_aton,FILE *fp,PCStr(arg))
{ struct hostent *hp;
if( f_aton ){
if( VSA_strisaddr(arg) ) /* is IP address */
if( hp = RES_gethost(arg) ){
printf("%s\n",hp->h_name);
return;
}
printf("%s\n",arg);
return;
}
hp = RES_gethost(arg);
if( hp ){
const unsigned char *op;
int hi;
putAddrs(hp);
printf("\t");
putNames(hp);
printf("\n");
}else{
if( VSA_strisaddr(arg) )
printf("?\t%s\n",arg);
else printf("%s\t?\n",arg);
/*exit(1);*/
}
}
int RES_1s(PCStr(addrhost),PVStr(addr_host))
{ struct hostent *hp;
const unsigned char *op;
CStr(addr,32);
if( hp = RES_gethost(addrhost) ){
op = (unsigned char *)hp->h_addr;
sprintf(addr,"%d.%d.%d.%d",op[0],op[1],op[2],op[3]);
sprintf(addr_host,"%s\t%s\n",addr,hp->h_name);
return 1;
}
return 0;
}
extern int RSLV_TIMEOUT;
int RES_timeout(int timeout)
{
RSLV_TIMEOUT = timeout;
return 1;
}
/*
void RES_socks(PCStr(ns),PCStr(socks))
*/
void RES_socks(VSAddr *nsa,PCStr(socks))
{ CStr(host,256);
int port;
const char *ns;
int nsport;
if( *socks == 0 )
return;
port = 1080;
socks = scanHostport(socks,AVStr(host),&port);
/*
Xsscanf(socks,"%[^:]:%d",AVStr(host),&port);
*/
if( !VSA_strisaddr(host) )
return;
ns = VSA_ntoa(nsa);
nsport = VSA_port(nsa);
debug(DBG_ANY,"SOCKS=%s:%d:%s..%d\n",host,port,ns,nsport);
SOCKS_addserv(ns,nsport,host,port);
/*
debug(DBG_ANY,"SOCKS=[%s][%s:%d]\n",ns,host,port);
SOCKS_addserv(ns,PORT_DNS,host,port);
*/
}
char **res_DNSRCH(){
if( (_RES.options & RES_DNSRCH) && _RES.dnsrch[0] )
return _RES.dnsrch;
else return NULL;
}
char *res_DEFDNAME(){
if( (_RES.options & RES_DEFNAMES) && _RES.defdname[0] )
return _RES.defdname;
else return NULL;
}
static void resolv1(PCStr(arg));
void dns_server(int qsock,int rsock);
void DO_INITIALIZE(int ac,const char *av[]);
int resolvy_main(int ac,const char *av[])
{ int ai;
int itvl;
const char *arg;
minit_resconf();
DO_INITIALIZE(ac,av);
if( ac <= 1 ){
fprintf(stderr,
"Usage -- %s [NS=nameserver] { domain-name | ip-address }\n",av[0]);
fprintf(stderr,
"<ip-address> can be specified as a range like: aa.bb.cc.dd-ee\n");
exit(-1);
}
itvl = 0;
for( ai = 1; ai < ac; ai++ ){
arg = av[ai];
if( strncmp(arg,"-d",2) == 0 ){
RES_debug(arg+2);
continue;
}else
if( strncmp(arg,"-i",2) == 0 ){
itvl = atoi(arg+2);
}else
if( strcmp(arg,"+n") == 0 ){
F_aton = 1;
continue;
}else
if( strncmp(arg,"NS=",3) == 0 ){
RES_ns(arg+3);
}else
if( strcmp(arg,"-") == 0 ){
CStr(line,256);
while( Fgets(AVStr(line),sizeof(line),stdin) != NULL ){
RES_1(F_aton,stdout,line);
fflush(stdout);
}
}else
if( strcmp(arg,"-s") == 0 ){
dns_server(0,1);
}else{
resolv1(arg);
if( itvl && ai+1 < ac )
sleep(itvl);
}
}
exit(0);
return 0;
}
static void resolv1(PCStr(arg))
{ int a1,a2,a3,a41,a42,a4;
CStr(addr,256);
if( strneq(arg,"_-",2) ){
const char *dp;
if( dp = strchr(arg+2,'.') )
arg = dp+1;
}
if( sscanf(arg,"%d.%d.%d.%d-%d",&a1,&a2,&a3,&a41,&a42) == 5 ){
for( a4 = a41; a4 < a42; a4++ ){
sprintf(addr,"%d.%d.%d.%d",a1,a2,a3,a4);
RES_1(F_aton,stdout,addr);
}
}else{
RES_1(F_aton,stdout,arg);
}
fflush(stdout);
}
| 8,956
|
https://github.com/warrenzhu25/SparkInsight/blob/master/src/main/scala/com/microsoft/spark/insight/cli/GridBenchCliMain.scala
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,020
|
SparkInsight
|
warrenzhu25
|
Scala
|
Code
| 248
| 852
|
package com.microsoft.spark.insight.cli
import java.net.URI
import java.util.concurrent.TimeUnit
import com.microsoft.spark.insight.cli.argument.{ArgumentConf, ReportGeneration}
import com.microsoft.spark.insight.cli.frontend._
import com.microsoft.spark.insight.utils.spark.CommonStringUtils.{FEEDBACK_LINK, FEEDBACK_LINK_TEMPLATE}
import com.microsoft.spark.insight.utils.spark.SparkHistoryServerMetricsRetriever
import org.rogach.scallop.exceptions.ScallopException
import org.slf4j.{Logger, LoggerFactory}
import scala.concurrent.duration.Duration
/**
* The entry point to GridBench CLI. Usage can be found in the DocIN documentation
*/
object GridBenchCliMain extends App {
val LOGGER: Logger = LoggerFactory.getLogger(GridBenchCliMain.getClass)
private val cliVersion = GridBenchCliMain.getClass.getPackage.getImplementationVersion
private val versionInfo = cliVersion match {
case version: String => s"GridBench CLI Version: $version"
case _ => "No Version"
}
private val userHomePath = System.getProperty("user.home")
println(s"Logging detailed information to $userHomePath/.gridbench/gridbench-cli.log\n$versionInfo\n")
LOGGER.info(s"$versionInfo")
private val startTime = System.currentTimeMillis
private var argumentConf: Option[ArgumentConf] = None
private val result: Either[Int, Exception] =
try {
val argConf = ArgumentConf(args)
argumentConf = Option(argConf)
// If the user specifies version option, Cli program returns immediately
if (argConf.version()) {
Left(0)
} else {
// parse SHS url addr from user input
val shsURL = GridBenchCliUtil.constructShsUrl(argConf.shsEndpoint())
implicit val sparkMetricsRetriever: SparkHistoryServerMetricsRetriever =
new SparkHistoryServerMetricsRetriever(new URI(shsURL))
// Match sub command to trigger different service
argConf.subcommand match {
case Some(subcommand: ReportGeneration) =>
val output = subcommand.genReport
println(output)
Left(output.length)
case _ =>
argConf.printHelp()
Left(0)
}
}
} catch {
case e: Exception =>
val message = s"GridBench CLI failed. Reason: $e"
if (!e.isInstanceOf[ScallopException]) {
// Silence these known exceptions, as we don't want users to receive meaningless stack traces, but still log
println(s"[Error] \u274C $message")
}
LOGGER.error(message)
Right(e)
}
private val duration = Duration(System.currentTimeMillis - startTime, TimeUnit.MILLISECONDS)
println(s"\nTotal processing time: ${duration.toSeconds} seconds")
println(s"$FEEDBACK_LINK_TEMPLATE ${FEEDBACK_LINK.blue.bold}")
// Empty out the buffer to the cli screen
Console.out.flush()
}
| 3,243
|
https://github.com/light0x00/mybatis-ext/blob/master/mybatis-ext-core/src/test/java/io/github/light0x00/mybatisext/User.java
|
Github Open Source
|
Open Source
|
MIT
| null |
mybatis-ext
|
light0x00
|
Java
|
Code
| 65
| 191
|
package io.github.light0x00.mybatisext;
import io.github.light0x00.mybatisext.annotations.Column;
import io.github.light0x00.mybatisext.annotations.TableName;
import lombok.Data;
@Data
@TableName(schema = "test")
public class User {
@Column(primary = true)
private Long pkId;
private String name;
private Integer age;
private String email;
@Override
public String toString() {
return "User{" +
"pkId=" + pkId +
", name='" + name + '\'' +
", age=" + age +
", email='" + email + '\'' +
'}';
}
}
| 18,816
|
https://github.com/takaakit/design-pattern-examples-in-scala/blob/master/src/main/scala/creationalPatterns/prototype/framework/Manager.scala
|
Github Open Source
|
Open Source
|
CC0-1.0
| 2,021
|
design-pattern-examples-in-scala
|
takaakit
|
Scala
|
Code
| 64
| 188
|
// ˅
package creationalPatterns.prototype.framework
import scala.collection.mutable.Map
// ˄
class Manager {
// ˅
// ˄
private val display: Map[String, Display] = Map[String, Display]()
def registerDisplay(displayName: String, display: Display): Unit = {
// ˅
this.display.put(displayName, display)
// ˄
}
def getDisplay(displayName: String): Display = {
// ˅
val d: Display = this.display.apply(displayName)
d.createClone()
// ˄
}
// ˅
// ˄
}
// ˅
// ˄
| 35,045
|
https://github.com/almhirte/terralimb/blob/master/Data/Buff.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,017
|
terralimb
|
almhirte
|
C#
|
Code
| 85
| 264
|
using System.ComponentModel;
namespace TerraLimb
{
public class Buff
{
[DefaultValue("")] public string BuffDescription;
[DefaultValue(-1)] public int BuffID;
[DefaultValue("")] public string BuffName;
[DefaultValue(0)] public int BuffTime;
[DefaultValue(0)] public int Slot;
public Buff()
{
}
public Buff(int id, int time, int slot)
{
Slot = slot;
BuffID = id;
if (time < 0)
BuffTime = 0;
else
BuffTime = time;
BuffName = Constants.Buffs[BuffID].BuffName;
BuffDescription = Constants.Buffs[BuffID].BuffDescription;
}
public override string ToString()
{
if (BuffName != null)
return BuffName;
if (Constants.Loaded)
return Constants.Buffs[BuffID].BuffName;
return string.Empty;
}
}
}
| 12,454
|
https://github.com/jakehyvonen/DeviceBatchGenerics/blob/master/DeviceBatchGenerics/Instruments/PRCameraController.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
DeviceBatchGenerics
|
jakehyvonen
|
C#
|
Code
| 615
| 2,058
|
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO.Ports;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using DeviceBatchGenerics.Support.DataMapping;
namespace DeviceBatchGenerics.Instruments
{
public class PRCameraController
{
bool isInitializing = true;
bool isRecordingELSpec = false;
ManualResetEvent[] dataReceivedEvent = new ManualResetEvent[1] { new ManualResetEvent(false) };
SerialPort serialPort;
public EventHandler DataToParse;
public bool DataReceivedBool = false;
public bool ExceededMeasurementRange = false;
public string ReceivedData;
public List<ELSpecDatum> PresentELSpec = new List<ELSpecDatum>();
public string InitialSerialResponseTerminator;
public string SerialResponseTerminator;
public string InitialCommand;
public string TimeOutResponse = "timed out";
public void Initialize(string initResponse, string response, string initCommand, string comport = "COM2", int baud = 9600)
{
SetupSerialPort(comport, baud);
InitialSerialResponseTerminator = initResponse;
SerialResponseTerminator = response;
InitialCommand = initCommand;
EstablishConnection();
}
private void SetupSerialPort(string comport, int baud)
{
try
{
serialPort = new SerialPort(comport, baud, Parity.None, 8, StopBits.One);
serialPort.Open();
serialPort.DtrEnable = true;
serialPort.RtsEnable = true;
serialPort.DataReceived += TheSerialPort_DataReceived;
}
catch (Exception e)
{
Debug.WriteLine(e.ToString());
}
}
public async void EstablishConnection()
{
string response = await SendCommandAndWaitForResponse(InitialCommand, 1111);
Debug.WriteLine("EstablishConnection response: " + response);
response = await SendCommandAndWaitForResponse(InitialCommand, 1111);
Debug.WriteLine("EstablishConnection response: " + response);
if (response == TimeOutResponse)
{
System.Windows.MessageBox.Show("Please turn on the PhotoResearch Camera");
EstablishConnection();
}
else
{
System.Diagnostics.Debug.WriteLine("PRCamera ACKed");
}
}
#region Measurement Tasks
public async Task<PRCamRawLuminanceDatum> LuminanceMeasurement()
{
return ParseM1String(await SendCommandAndWaitForResponse("M1"));
}
public async Task<List<ELSpecDatum>> ELSpecMeasurement(bool usingM1Reading = false)
{
isRecordingELSpec = true;
if (usingM1Reading)
await SendCommandAndWaitForResponse("D5");//D5 doesn't take a new measurement, only fetches Radiance data
else
await SendCommandAndWaitForResponse("M5");//take measurement and return radiance curve
return PresentELSpec;
}
public async Task<string> SendCommandAndWaitForResponse(string command, int timeoutms = 33333)
{
if (command.Substring(command.Length - 1) == "5")//if the last character is 5, we should expect a spectrum
{
Debug.WriteLine("Recording EL Spec");
//serialPort.DiscardInBuffer();
await Task.Delay(111);
}
return await Task.Run(() =>
{
Debug.WriteLine("Sending command to PR650: " + command);
string response = TimeOutResponse;
dataReceivedEvent = new ManualResetEvent[1] { new ManualResetEvent(false) };
Debug.WriteLine("isRecordingELSpec1 = " + isRecordingELSpec);
SendCommand(command);
var eventResponse = WaitHandle.WaitAny(dataReceivedEvent, timeoutms);
if (eventResponse != WaitHandle.WaitTimeout)
response = ReceivedData;
Debug.WriteLine("response: " + response);
return response;
}
);
}
private Task SendCommand(string command)
{
return Task.Run(() =>
{
DataReceivedBool = false;
byte[] commandBytes = Encoding.ASCII.GetBytes(command);
commandBytes = addByteToEndOfArray(commandBytes, 0x0D);//0x0D=carriage return in ASCII
serialPort.Write(commandBytes, 0, commandBytes.Count());
}
);
}
#endregion
private void TheSerialPort_DataReceived(object sender, SerialDataReceivedEventArgs e)
{
Debug.WriteLine("isRecordingELSpec2 = " + isRecordingELSpec);
if (isInitializing)
{
ReceivedData = serialPort.ReadTo(InitialSerialResponseTerminator); //read until MODE after initialization command
Debug.WriteLine("Initial ReceivedData: " + ReceivedData);
serialPort.ReadExisting();//clear the buffer
serialPort.DiscardInBuffer();//maybe this is more appropriate
if (ReceivedData.Count() > 2)
{
isInitializing = false;
Debug.WriteLine("done initializing");
}
}
else if (isRecordingELSpec)
{
Debug.WriteLine("Began recording EL Spectrum");
PresentELSpec = new List<ELSpecDatum>();
ReceivedData = serialPort.ReadTo(SerialResponseTerminator); //read until CR LF (0x0D 0x0A)
ReceivedData = serialPort.ReadTo(SerialResponseTerminator); //read until CR LF (0x0D 0x0A)
bool reached780nm = false;
while (!reached780nm)
{
string specPoint = serialPort.ReadTo(SerialResponseTerminator);
Debug.WriteLine("specPoint: " + specPoint);
ELSpecDatum datum = ParsedSpecString(specPoint);
PresentELSpec.Add(datum);
if (datum.Wavelength == 780)
{
reached780nm = true;
isRecordingELSpec = false;
Debug.WriteLine("Successfully recorded EL Spectrum");
}
}
}
else
{
ReceivedData = serialPort.ReadTo(SerialResponseTerminator); //read until CR LF (0x0D 0x0A)
Debug.WriteLine("ReceivedData: " + ReceivedData);
}
DataReceivedBool = true;
DataToParse?.Invoke(this, EventArgs.Empty);
dataReceivedEvent[0].Set();
serialPort.DiscardInBuffer();
}
private byte[] addByteToEndOfArray(byte[] bArray, byte newByte)
{
byte[] newArray = new byte[bArray.Length + 1];
bArray.CopyTo(newArray, 0);
newArray[bArray.Length] = newByte;
return newArray;
}
#region Data Processing
private PRCamRawLuminanceDatum ParseM1String(string s)
{
PRCamRawLuminanceDatum datum = new PRCamRawLuminanceDatum();
try
{
string[] data = s.Split(',');
if (data[0].Contains("19"))
{
ExceededMeasurementRange = true;
Debug.WriteLine("Exceeded camera measurement range");
}
else
{
datum.Luminance = Convert.ToDecimal(Convert.ToDouble(data[2]));//can't directly convert to decimal because reasons
datum.CIEx = Convert.ToDecimal(data[3]);
datum.CIEy = Convert.ToDecimal(data[4]);
}
}
catch (Exception e)
{
Debug.WriteLine(e.ToString());
}
return datum;
}
private ELSpecDatum ParsedSpecString(string specstring)
{
ELSpecDatum datum = new ELSpecDatum();
try
{
string[] array = specstring.Split(',');
datum.Wavelength = Convert.ToDouble(array[0]);
datum.Intensity = Convert.ToDouble(array[1]);
}
catch (Exception e)
{
System.Windows.Forms.MessageBox.Show("problem with data format: " + e.ToString());
}
return datum;
}
#endregion
}
}
| 34,510
|
https://github.com/thefringeninja/EventStore.Plugins/blob/master/src/EventStore.Plugins/ConfigParser.cs
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
EventStore.Plugins
|
thefringeninja
|
C#
|
Code
| 198
| 605
|
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Serilog;
using YamlDotNet.RepresentationModel;
using YamlDotNet.Serialization;
namespace EventStore.Plugins {
public static class ConfigParser {
/// <summary>
/// Deserializes a section of configuration from a given config file into the provided settings type
/// </summary>
/// <param name="configPath">The path to the configuration file</param>
/// <param name="sectionName">The section to deserialize</param>
/// <typeparam name="T">The type of settings object to create from the configuration</typeparam>
public static T ReadConfiguration<T>(string configPath, string sectionName) where T : class {
try {
var yamlStream = new YamlStream();
var stringReader = new StringReader(File.ReadAllText(configPath));
try {
yamlStream.Load(stringReader);
} catch (Exception ex) {
throw new Exception(
$"An invalid configuration file has been specified. {Environment.NewLine}{ex.Message}");
}
var yamlNode = (YamlMappingNode)yamlStream.Documents[0].RootNode;
if (!string.IsNullOrEmpty(sectionName)) {
Func<KeyValuePair<YamlNode, YamlNode>, bool> predicate = x =>
x.Key.ToString() == sectionName && x.Value is YamlMappingNode;
var nodeExists = yamlNode.Children.Any(predicate);
if (nodeExists) yamlNode = (YamlMappingNode)yamlNode.Children.First(predicate).Value;
}
if (yamlNode == null) return default;
using (var stream = new MemoryStream())
using (var writer = new StreamWriter(stream))
using (var reader = new StreamReader(stream)) {
new YamlStream(new YamlDocument(yamlNode)).Save(writer);
writer.Flush();
stream.Position = 0;
return new Deserializer().Deserialize<T>(reader);
}
} catch (FileNotFoundException ex) {
Log.Error(ex, "Cannot find the specified config file {0}.", configPath);
throw;
}
}
}
}
| 30,643
|
https://github.com/openworm/org.geppetto.recording/blob/master/org/geppetto/recording/creators/tests/wormsim_recordings/transformations/matrix_anchored_31S_00727.mat
|
Github Open Source
|
Open Source
|
MIT
| 2,015
|
org.geppetto.recording
|
openworm
|
Unity3D Asset
|
Code
| 496
| 2,117
|
1 0 0 0
0 1 0 0
0 0 1 0
-13.7162 50.1071 7.04618 1
0.347377 -0.946018 0.0811866 0
0.700756 0.249962 -0.0857068 0
0.0793921 0.113191 0.979243 0
-112.474 243.557 18.835 1
0.686492 -0.739339 0.0657125 0
0.549631 0.504615 -0.0644584 0
0.0189344 0.104967 0.983187 0
-99.3089 182.47 15.2996 1
0.922592 -0.52859 0.0393723 0
0.373052 0.64678 -0.0582518 0
0.00625591 0.080377 0.932504 0
-73.202 142.274 14.1849 1
1.0218 -0.296636 0.0085756 0
0.208614 0.716905 -0.058424 0
0.013135 0.0722206 0.9331 0
-40.9956 116.013 15.6826 1
1.06385 0.00236515 -0.0189359 0
-0.00236422 0.747881 -0.0394167 0
0.0165247 0.0493066 0.934537 0
6.18687 94.5328 12.5925 1
1.07 0.206756 -0.034468 0
-0.142975 0.734403 -0.0331005 0
0.0205753 0.0449449 0.908322 0
39.9337 88.615 11.2582 1
1.00897 0.410301 -0.0495785 0
-0.282602 0.693446 -0.0124118 0
0.0326263 0.029559 0.9086 0
78.1795 89.6023 6.75581 1
0.953131 0.52726 -0.0486507 0
-0.362992 0.654934 -0.0135399 0
0.0275425 0.0340496 0.908611 0
101.483 94.9618 6.98595 1
0.958359 0.548578 -0.0375956 0
-0.372622 0.649033 -0.028198 0
0.00966186 0.0443856 0.893948 0
102.918 96.7137 10.0984 1
0.960889 0.541721 -0.0635052 0
-0.367654 0.652463 0.00281163 0
0.0464682 0.0223333 0.893615 0
101.539 96.6771 2.60715 1
0.967284 0.531779 -0.0487269 0
-0.361327 0.655808 -0.0156145 0
0.0255848 0.0353829 0.894035 0
98.396 97.4542 7.25187 1
1.00717 0.469094 -0.0337179 0
-0.316796 0.677846 -0.0324152 0
0.00816402 0.0462421 0.887197 0
82.0595 95.1854 11.4049 1
1.09379 0.193755 -0.0406664 0
-0.130766 0.737409 -0.00377355 0
0.0312235 0.0100802 0.887833 0
19.7357 90.7517 2.79262 1
1.10242 0.13729 -0.0372444 0
-0.0931404 0.742877 -0.0185482 0
0.0268104 0.0255247 0.887667 0
6.91428 93.2775 7.36235 1
1.11092 0.0504212 -0.0317816 0
-0.0348685 0.747377 -0.0331216 0
0.0235217 0.0403735 0.88625 0
-13.0395 97.589 11.8626 1
1.0797 -0.267387 -0.0213419 0
0.180055 0.726955 0.00125539 0
0.016168 -0.00553687 0.887317 0
-85.0697 120.725 -0.429689 1
1.05998 -0.336428 -0.0308145 0
0.225907 0.71372 -0.0213415 0
0.0310737 0.0166808 0.88678 0
-99.6193 130.41 7.97829 1
1.04376 -0.369874 -0.0356796 0
0.2488 0.705505 -0.0353225 0
0.0411082 0.0300931 0.890608 0
-106.529 136.433 13.3191 1
1.00627 -0.463451 -0.012093 0
0.313297 0.680243 0.000295386 0
0.00869667 -0.00439273 0.892011 0
-128.869 150.897 -1.11588 1
1.0126 -0.448819 -0.0269896 0
0.302857 0.684605 -0.0219055 0
0.0304345 0.0150593 0.891418 0
-124.282 149.949 8.08566 1
1.01677 -0.409164 -0.0300906 0
0.2788 0.694648 -0.0249081 0
0.0342156 0.0186369 0.902736 0
-114.525 145.173 9.60882 1
1.05412 -0.300971 -0.0196198 0
0.20558 0.720152 -0.00193795 0
0.0161896 -0.00219046 0.903429 0
-86.9593 130.159 -0.155086 1
1.08289 -0.170557 -0.019958 0
0.116354 0.739777 -0.00878801 0
0.0178961 0.00791656 0.903364 0
-51.3345 116.897 2.67551 1
1.0767 0.00469592 -0.0177731 0
-0.00336262 0.748892 -0.00584522 0
0.0152042 0.00727236 0.922991 0
-0.46413 105.402 1.70608 1
1.06316 0.170616 -0.0138012 0
-0.118515 0.739403 0.0110794 0
0.0138446 -0.0116109 0.922969 0
49.8016 101.921 -5.79504 1
0.999508 0.400748 -0.000600324 0
-0.27861 0.694911 0.0189851 0
0.00918637 -0.0215293 0.922849 0
123.243 110.895 -9.92478 1
0.876329 0.566568 0.00791512 0
-0.406404 0.628117 0.0344964 0
0.0178341 -0.0409318 0.955399 0
186.654 133.221 -16.6467 1
0.859413 0.5919 0.00879908 0
-0.424525 0.615832 0.0376215 0
0.0206201 -0.0441392 0.9552 0
195.251 138.521 -18.0777 1
0.856238 0.596539 -0.00337419 0
-0.426986 0.613159 0.0510354 0
0.0397895 -0.0517142 0.954213 0
196.407 140.169 -23.7161 1
0.929539 0.361907 -0.0705678 0
-0.266185 0.696788 0.0672108 0
0.0981341 -0.0583384 0.993462 0
116.976 112.749 -27.1927 1
| 35,033
|
https://github.com/ethz-asl/maplab_summer/blob/master/applications/maplab-console/src/maplab-console.cc
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,020
|
maplab_summer
|
ethz-asl
|
C++
|
Code
| 393
| 1,293
|
#include "maplab-console/maplab-console.h"
#include <dlfcn.h>
#include <cstdlib>
#include <fstream> // NOLINT
#include <string>
#include <unordered_set>
#include <gflags/gflags.h>
#include <maplab-common/file-system-tools.h>
#include <maplab-common/string-tools.h>
#include <visualization/rviz-visualization-sink.h>
DEFINE_bool(ros_free, false, "Enable this flag to run on systems without ROS");
namespace maplab {
MapLabConsole::MapLabConsole(
const std::string& console_name, int argc, char** argv)
: common::Console(console_name) {
setSelectedMapKey("");
discoverAndInstallPlugins(argc, argv);
}
MapLabConsole::~MapLabConsole() {
uninstallAllPlugins();
for (void*& plugin_handle : plugin_handles_) {
CHECK_NOTNULL(plugin_handle);
CHECK_EQ(dlclose(plugin_handle), 0);
}
}
void MapLabConsole::discoverAndInstallPlugins(int argc, char** argv) {
// Plotter can be nullptr if --ros_free has been specified.
const char* plugin_list = std::getenv("MAPLAB_CONSOLE_PLUGINS");
CHECK(plugin_list != nullptr && plugin_list[0] != '\0')
<< "$MAPLAB_CONSOLE_PLUGINS isn't defined. Please source your workspace.";
// Find plugins to install.
std::vector<std::string> plugin_list_vector;
constexpr char kDelimiter = ';';
constexpr bool kRemoveEmpty = true;
common::tokenizeString(
plugin_list, kDelimiter, kRemoveEmpty, &plugin_list_vector);
if (plugin_list_vector.empty()) {
LOG(WARNING) << "No plugin can be loaded as no plugins were definded in "
<< "the environment variable $MAPLAB_CONSOLE_PLUGINS. Make "
<< "sure that you built a plugin and that your workspace is "
<< "sourced.";
}
// Dynamically load found plugins.
std::vector<std::pair</*plugin_handle=*/void*, /*plugin_file=*/std::string>>
try_load_plugins_handle;
for (const std::string& plugin_name : plugin_list_vector) {
void* handle = dlopen(plugin_name.c_str(), RTLD_LAZY);
if (handle == nullptr) {
LOG(ERROR) << "Failed to load library " << plugin_name
<< ". Error message: " << dlerror();
LOG(ERROR) << "The plugin may not be installed properly. Please try to "
<< "reinstall the plugin. If the plugin comes from a catkin "
<< "package, run\n"
<< "\tcatkin build --no-deps --force-cmake <plugin_package>\n"
<< "and then try again.";
continue;
}
try_load_plugins_handle.emplace_back(handle, plugin_name);
}
// Now that all plugins are loaded we can parse the flags and add them to the
// completion index.
google::ParseCommandLineFlags(&argc, &argv, true);
addAllGFlagsToCompletion();
if (!FLAGS_ros_free) {
visualization::RVizVisualizationSink::init();
plotter_.reset(new visualization::ViwlsGraphRvizPlotter);
LOG(INFO) << "RVIZ visualization initialized!";
}
for (const std::pair<void*, std::string>& handle_lib :
try_load_plugins_handle) {
void* handle = handle_lib.first;
common::PluginCreateFunction create_function =
common::PluginCreateFunction(dlsym(handle, "createConsolePlugin"));
common::PluginDestroyFunction destroy_function =
common::PluginDestroyFunction(dlsym(handle, "destroyConsolePlugin"));
if (create_function == nullptr || destroy_function == nullptr) {
LOG(ERROR) << "Error loading the functions from plugin "
<< handle_lib.second << ". Error message: " << dlerror()
<< "\nMake sure that your plugin implements the functions "
<< "\"ConsolePluginBase* "
<< "createConsolePlugin(common::Console*, "
<< "visualization::ViwlsGraphRvizPlotter)\" and \"void "
<< "destroyConsolePlugin(common::ConsolePluginBase*)";
CHECK_EQ(dlclose(handle), 0);
continue;
}
plugin_handles_.emplace_back(handle);
// Create and install plugin.
common::ConsolePluginPtr plugin(
create_function(this, plotter_.get()), destroy_function);
VLOG(1) << "Installed plugin " << plugin->getPluginId() << " from "
<< handle_lib.second << ".";
installPlugin(std::move(plugin));
}
}
} // namespace maplab
| 1,943
|
https://github.com/lpdw/projet0_mmiworkshop/blob/master/spec/factories/admin.rb
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
projet0_mmiworkshop
|
lpdw
|
Ruby
|
Code
| 19
| 62
|
FactoryGirl.define do
factory :admin, class: User do
first_name "Admin"
last_name "User"
email "test@testmail.com"
password "adminadmin"
admin true
end
end
| 9,453
|
https://github.com/jam1garner/mexTool/blob/master/mexTool/GUI/ThemeColors.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
mexTool
|
jam1garner
|
C#
|
Code
| 189
| 532
|
using System.Collections.Generic;
using System.Drawing;
namespace HSDRawViewer.GUI
{
public static class ThemeColors
{
public static Color TabColor = Color.FromArgb(80, 80, 100);
public static Color TabColorSelected = Color.FromArgb(40, 40, 60);
public static List<Color> MainColorList = new List<Color>() {
Color.FromArgb(0x3F, 0x51, 0xB5), // Blue
Color.FromArgb(0x00, 0x96, 0x88), // Green
Color.FromArgb(0xFF, 0x57, 0x22), // Orange
Color.FromArgb(0xd8, 0x00, 0x73), // Pink
Color.FromArgb(106, 0, 255), // Indigo
};
public static List<Color> SecondColorList
{
get
{
if(_secondColorList == null)
{
_secondColorList = new List<Color>();
foreach (var c in MainColorList)
_secondColorList.Add(ChangeColorBrightness(c, -0.25));
}
return _secondColorList;
}
}
public static List<Color> _secondColorList;
private static Color ChangeColorBrightness(Color color, double correctionFactor)
{
double red = color.R;
double green = color.G;
double blue = color.B;
//If correction factor is less than 0, darken color.
if (correctionFactor < 0)
{
correctionFactor = 1 + correctionFactor;
red *= correctionFactor;
green *= correctionFactor;
blue *= correctionFactor;
}
//If correction factor is greater than zero, lighten color.
else
{
red = (255 - red) * correctionFactor + red;
green = (255 - green) * correctionFactor + green;
blue = (255 - blue) * correctionFactor + blue;
}
return Color.FromArgb(color.A, (byte)red, (byte)green, (byte)blue);
}
}
}
| 44,769
|
https://github.com/MondoAurora/ARK/blob/master/platforms/cpp/modules/dust/Text/TextAgentsCore.h
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,020
|
ARK
|
MondoAurora
|
C++
|
Code
| 57
| 208
|
#ifndef TEXTAGENTSCORE_H
#define TEXTAGENTSCORE_H
#include <DustApi.h>
using namespace std;
class TextDictionary : public DustTextDictionary {
map<string, DustEntity> words;
public:
virtual ~TextDictionary();
virtual DustEntity getTextToken(const char* name, DustEntity txtParent = DUST_ENTITY_INVALID);
virtual DustResultType DustResourceInit();
virtual DustResultType DustResourceRelease();
};
class TextLogicCore: public DustNativeLogic {
private:
public:
TextLogicCore();
virtual ~TextLogicCore();
virtual DustResultType DustResourceInit();
virtual DustResultType DustActionExecute();
};
#endif // TEXTAGENTSCORE_H
| 12,666
|
https://github.com/ruygonzalez/Parstagram/blob/master/app/src/main/java/me/ruygonzalez/parstagram/ProfileFragment.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
Parstagram
|
ruygonzalez
|
Java
|
Code
| 451
| 1,547
|
package me.ruygonzalez.parstagram;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.request.RequestOptions;
import com.parse.FindCallback;
import com.parse.ParseException;
import com.parse.ParseFile;
import com.parse.ParseQuery;
import com.parse.ParseUser;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import jp.wasabeef.glide.transformations.RoundedCornersTransformation;
import me.ruygonzalez.parstagram.model.Post;
public class ProfileFragment extends Fragment {
private Button btnLogout;
private TextView tvPosts;
private TextView tvFollowers;
private TextView tvFollowing;
private TextView tvUsername;
private ImageView ivProfilePic;
private final int REQUEST_CODE = 20;
ArrayList<Post> posts;
GridAdapter gridAdapter;
RecyclerView rvGrid;
// The onCreateView method is called when Fragment should create its View object hierarchy,
// either dynamically or via XML layout inflation.
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup parent, Bundle savedInstanceState) {
// Defines the xml file for the fragment
return inflater.inflate(R.layout.fragment_profile, parent, false);
}
// This event is triggered soon after onCreateView().
// Any view setup should occur here. E.g., view lookups and attaching view listeners.
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
// Setup any handles to view objects here
// EditText etFoo = (EditText) view.findViewById(R.id.etFoo);
btnLogout = (Button) view.findViewById(R.id.btnLogout);
btnLogout.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
logout();
}
});
tvPosts = (TextView) view.findViewById(R.id.tvPosts);
tvFollowers = (TextView) view.findViewById(R.id.tvFollowers);
tvFollowing = (TextView) view.findViewById(R.id.tvFollowing);
tvUsername = (TextView) view.findViewById(R.id.tvUsername);
ivProfilePic = (ImageView) view.findViewById(R.id.ivProfilePic);
final ParseUser user = ParseUser.getCurrentUser();
// set textviews to display certain text
//generate random numbers
Random rand = new Random();
int postsnum = rand.nextInt(25)+1;
int followers = rand.nextInt(1000)+1;
int following = rand.nextInt(500)+1;
// find the RecyclerView
rvGrid = (RecyclerView) view.findViewById(R.id.rvGrid);
// init the arraylist (data source)
posts = new ArrayList<>();
// construct the adapter from this datasource
gridAdapter = new GridAdapter(posts);
// RecyclerView setup (layout manager, use adapter)
rvGrid.setLayoutManager(new GridLayoutManager(this.getActivity(), 3));
// set the adapter
rvGrid.setAdapter(gridAdapter);
populateTimeline();
// place texts in views
tvFollowers.setText(Integer.toString(followers));
tvFollowing.setText(Integer.toString(following));
try {
tvUsername.setText(user.fetchIfNeeded().getString("username"));
} catch (ParseException e) {
e.printStackTrace();
}
// Round the corners of the profile images
final RoundedCornersTransformation roundedCornersTransformation
= new RoundedCornersTransformation(600, 15);
final RequestOptions requestOptions = RequestOptions.bitmapTransform(roundedCornersTransformation);
// load image
if((ParseFile)user.get("profilepic") == null){
/*Glide.with(this)
.load("/desktop/profilepicturedef.png")
.apply(requestOptions)
.into(ivProfilePic);*/
}
else {
Glide.with(this)
.load(((ParseFile) user.get("profilepic")).getUrl().toString())
.apply(requestOptions)
.into(ivProfilePic);
}
}
private void logout(){
ParseUser.logOutInBackground();
// want to go to Log In (main) Activity with intent after successful log out
final Intent intent = new Intent(this.getActivity(), LoginActivity.class);
startActivity(intent);
}
private void populateTimeline(){
// Define the class we would like to query
ParseQuery<Post> query = ParseQuery.getQuery(Post.class);
query.whereEqualTo("user",ParseUser.getCurrentUser());
// get the latest 20 messages, order will show up newest to oldest of this group
query.orderByDescending("createdAt");
// Execute the find asynchronously
query.findInBackground(new FindCallback<Post>() {
public void done(List<Post> itemList, ParseException e) {
if (e == null) {
gridAdapter.clear();
// add new items to your adapter
gridAdapter.addAll(itemList);
tvPosts.setText(Integer.toString(itemList.size()));
rvGrid.scrollToPosition(0);
} else {
Log.d("item", "Error: " + e.getMessage());
}
}
});
}
}
| 26,143
|
https://github.com/Laurelinex/flatland-model-diagram-editor/blob/master/flatland/input/model_visitor.py
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
flatland-model-diagram-editor
|
Laurelinex
|
Python
|
Code
| 513
| 1,394
|
""" model_visitor.py """
from arpeggio import PTNodeVisitor
class SubsystemVisitor(PTNodeVisitor):
# Elements
def visit_nl(self, node, children):
return None
def visit_sp(self, node, children):
return None
def visit_mult(self, node, children):
"""Binary association (not association class) multiplicity"""
mult = node.value # No children because literal 1 or M is thrown out
return mult
def visit_acword(self, node, children):
"""All caps word"""
return node.value # No children since this is a literal
def visit_icaps_name(self, node, children):
"""Model element name"""
name = ''.join(children)
return name
def visit_class_name(self, node, children):
name = ''.join(children)
return {'name': name }
def visit_keyletter(self, node, children):
"""Abbreviated keyletter name of class"""
return { 'keyletter': children[0] }
def visit_import(self, node, children):
"""Imported class marker"""
d = {'import': children[0]}
return d
def visit_class_header(self, node, children):
"""Beginning of class section, includes name, optional keyletter and optional import marker"""
items = {k: v for d in children for k, v in d.items()}
return items
def visit_subsystem_header(self, node, children):
"""Beginning of sybsystem section"""
abbr = None if len(children) == 1 else children[1]
return {'subsys_name': children[0], 'abbr': abbr}
def visit_body_line(self, node, children):
"""Lines that we don't need to parse yet, but eventually will"""
# TODO: These should be attributes and actions
body_text_line = children[0]
return body_text_line
def visit_phrase(self, node, children):
"""Phrase on one side of a binary relationship phrase"""
phrase = ''.join(children)
return phrase
def visit_assoc_class(self, node, children):
"""Association class name and multiplicity"""
return { "assoc_mult": children[0], "assoc_cname": children[1] }
def visit_t_side(self, node, children):
"""T side of a binary association"""
return {node.rule_name: {"phrase": children[0], "mult": children[1], "cname": children[2]}}
def visit_p_side(self, node, children):
"""P side of a binary association"""
return {node.rule_name: {"phrase": children[0], "mult": children[1], "cname": children[2]}}
def visit_rname(self, node, children):
"""The Rnum on any relationship"""
return {"rnum": children[0]}
def visit_superclass(self, node, children):
"""Superclass in a generalization relationship"""
return children[0]
def visit_subclass(self, node, children):
"""Subclass in a generalization relationship"""
return children[0]
def visit_gen_rel(self, node, children):
"""Generalization relationship"""
return {"superclass": children[0], "subclasses": children[1:]}
def visit_binary_rel(self, node, children):
"""Binary relationship with or without an association class"""
items = {k: v for d in children for k, v in d.items()}
return items
def visit_rel(self, node, children):
"""Relationship rnum and rel data"""
return {**children[0], **children[1]}
def visit_method_block(self, node, children):
"""Methods (unparsed)"""
# TODO: Parse these eventually
return {"methods": children}
def visit_attr_block(self, node, children):
"""Attribute text (unparsed)"""
# TODO: Parse these eventually
return {"attributes": children}
def visit_class_set(self, node, children):
"""All of the classes"""
return children
def visit_class_block(self, node, children):
"""A complete class with attributes, methods, state model"""
# TODO: No state models yet
class_attrs = children[0] | children[1]
block = class_attrs if len(children) == 2 else class_attrs | children[2]
return block
def visit_rel_section(self, node, children):
"""Relationships section with all of the relationships"""
return children
# Metadata
def visit_text_item(self, node, children):
return children[0], False # Item, Not a resource
def visit_resource_item(self, node, children):
return ''.join(children), True # Item, Is a resource
def visit_item_name(self, node, children):
return ''.join(children)
def visit_data_item(self, node, children):
return { children[0]: children[1] }
def visit_metadata(self, node, children):
"""Meta data section"""
items = {k: v for c in children for k, v in c.items()}
return items
# Root
def visit_subsystem(self, node, children):
"""The complete subsystem"""
return children
| 34,977
|
https://github.com/jj6990/ciclo/blob/master/node_modules/st/test/gzip-after-no-gzip.js
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
ciclo
|
jj6990
|
JavaScript
|
Code
| 110
| 376
|
global.options = {
cachedHeader: true // inspect to see if something is served from cache
}
const zlib = require('zlib')
const { req, stExpect } = require('./common.js')
const { test } = require('tap')
test('does not gzip first response', (t) => {
req('/test/st.js', { 'accept-encoding': 'none' }, (er, res, body) => {
t.equal(res.statusCode, 200)
t.notOk(res.headers['content-encoding'])
t.notOk(res.headers['x-from-cache'])
t.equal(body.toString(), stExpect)
t.end()
})
})
test('gzips second response', (t) => {
req('/test/st.js', { 'accept-encoding': 'gzip' }, (er, res, body) => {
t.error(er, 'no error')
t.equal(res.statusCode, 200)
t.equal(res.headers['content-encoding'], 'gzip')
t.equal(res.headers['x-from-cache'], 'true')
t.ok(body, 'returned a body')
t.notEqual(body.toString(), stExpect, 'gzipped string')
zlib.gunzip(body, (er, body) => {
if (er) {
throw er
}
t.equal(body.toString(), stExpect)
t.end()
})
})
})
| 9,026
|
https://github.com/kirichoi/tellurium/blob/master/examples/tellurium-files/omex/CombineArchiveShowCase.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
tellurium
|
kirichoi
|
Python
|
Code
| 76
| 237
|
"""
Example Running the Combine Showcase Archive.
https://github.com/SemsProject/CombineArchiveShowCase
"""
# TODO: FIXME: import omex from test data
# TODO: create results dir
# redirect backend, so plots only in files and not shown
import matplotlib.pyplot
matplotlib.pyplot.switch_backend("Agg")
# running all SED-ML simulations in archive
# outputs are stored next to the respective SED-ML files in the workingDir
import os.path
from tellurium.sedml.tesedml import executeOMEX, executeSEDML
omexDir = os.path.dirname(os.path.realpath(__file__))
omexPath = os.path.join(omexDir, "CombineArchiveShowCase.omex")
workingDir = os.path.join(omexDir, "./results/_te_CombineArchiveShowCase")
executeOMEX(omexPath, workingDir=workingDir)
| 20,517
|
https://github.com/kappapidelta/Apress-AI/blob/master/sports_timetabling.py
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
Apress-AI
|
kappapidelta
|
Python
|
Code
| 562
| 2,285
|
from random import randint, choice
def compute_weeks(T,P):
from math import ceil
nbTeams = sum([1 for sub in T for e in sub])
nbIntra = P[0]
nbInter = P[1]
nbPerWeek = P[2]
nbGames = 0
nbWeeks = 0
d = 1000
for i in range(len(T)):
nb = len(T[i])
d = min(d,nb)
nbGames += nb*(nb-1)/2 * nbIntra
for j in range(i+1,len(T)):
nbGames += nb * len(T[j]) * nbInter
nbWeeks = nbGames//d//nbPerWeek
return int(nbWeeks)
def gen_data(m,n):
R,team=[],0
for i in range(m):
RR=[]
nb = choice(n)
for j in range(nb):
RR.append(team)
team = team+1
R.append(RR)
X=randint(1,3)
Y=randint(1,X)
feasible = False
Z=randint(1,5)
while not feasible:
Q=compute_weeks(R,(X,Y,Z))
if Q<30:
feasible=True
else:
Z=Z+1
return R,(X,Y,Z,Q)
from my_or_tools import ObjVal, SolVal, newSolver, pairs
def solve_model(Teams,params):
(nbIntra,nbInter,nbPerWeek,nbWeeks) = params
nbTeams = sum([1 for sub in Teams for e in sub])
nbDiv,Cal = len(Teams),[]
s = newSolver('Sports schedule', True)
x = [[[s.IntVar(0,1,'') if i<j else None
for _ in range(nbWeeks)]
for j in range(nbTeams)] for i in range(nbTeams-1)]
for Div in Teams:
for i in Div:
for j in Div:
if i<j:
s.Add(sum(x[i][j][w] for w in range(nbWeeks))==nbIntra)
for d in range(nbDiv-1):
for e in range(d+1,nbDiv):
for i in Teams[d]:
for j in Teams[e]:
s.Add(sum(x[i][j][w] for w in range(nbWeeks))==nbInter)
for w in range(nbWeeks):
for i in range(nbTeams):
s.Add(sum(x[i][j][w] for j in range(nbTeams) if i<j) +
sum(x[j][i][w] for j in range(nbTeams) if j<i )\
<=nbPerWeek)
Value=[x[i][j][w] for Div in Teams for i in Div for j in Div \
for w in range(nbWeeks-len(Div)*nbIntra//nbPerWeek,nbWeeks)\
if i<j]
s.Maximize(sum(Value))
rc = s.Solve()
if rc == 0:
Cal=[[(i,j) \
for i in range(nbTeams-1) for j in range(i+1,nbTeams)\
if SolVal(x[i][j][w])>0] for w in range(nbWeeks)]
return rc,ObjVal(s),Cal
def add_intra(s,Teams,nbWeeks,nbIntra,x):
for Div in Teams:
for i in Div:
for j in Div:
if i<j:
s.Add(sum(x[i][j][w] for w in range(nbWeeks)) == nbIntra)
def add_inter(s,Teams,nbDiv,nbWeeks,nbInter,x):
for d in range(nbDiv-1):
for e in range(d+1,nbDiv):
for i in Teams[d]:
for j in Teams[e]:
s.Add(sum(x[i][j][w] for w in range(nbWeeks)) == nbInter)
def add_games_bound(s,nbWeeks,nbTeams,nbPerWeek,x):
for w in range(nbWeeks):
for i in range(nbTeams):
s.Add(sum(x[i][j][w] for j in range(nbTeams) if i<j) +
sum(x[j][i][w] for j in range(nbTeams) if j<i ) <= nbPerWeek)
def add_objective(s,Teams,nbWeeks,x,nbIntra,nbPerWeek):
Value=[x[i][j][w] for Div in Teams for i in Div for j in Div \
for w in range(nbWeeks-len(Div)*nbIntra//nbPerWeek,nbWeeks) if i<j]
return Value
def basic_model(s,Teams,nbTeams,nbWeeks,nbPerWeek,nbIntra,nbDiv,nbInter,cuts,x):
add_intra(s,Teams,nbWeeks,nbIntra,x)
add_inter(s,Teams,nbDiv,nbWeeks,nbInter,x)
add_games_bound(s,nbWeeks,nbTeams,nbPerWeek,x)
for t,w in cuts:
s.Add(sum(x[p[0]][p[1]][w[0]] for p in pairs(t,[])) <= w[1])
Value = add_objective(s,Teams,nbWeeks,x,nbIntra,nbPerWeek)
s.Maximize(s.Sum(Value))
def solve_model_big(Teams,params):
(nbIntra,nbInter,nbPerWeek,nbWeeks) = params
nbTeams = sum([1 for sub in Teams for e in sub])
nbDiv,cuts = len(Teams),[]
for iter in range(2):
s = newSolver('Sports schedule', False)
x = [[[s.NumVar(0,1,'') if i<j else None
for _ in range(nbWeeks)]
for j in range(nbTeams)] for i in range(nbTeams-1)]
basic_model(s,Teams,nbTeams,nbWeeks,nbPerWeek,nbIntra,\
nbDiv,nbInter,cuts,x)
rc = s.Solve()
bounds = {(3,1):1, (4,1):2, (5,1):2, (5,3):7}
if nbPerWeek <= 3:
for w in range(nbWeeks):
for i in range(nbTeams-2):
for j in range(i+1,nbTeams-1):
for k in range(j+1,nbTeams):
b = bounds.get((3,nbPerWeek),1000)
if sum([SolVal(x[p[0]][p[1]][w]) \
for p in pairs([i,j,k],[])])>b:
cuts.append([[i,j,k],[w,b]])
for l in range(k+1,nbTeams):
b = bounds.get((4,nbPerWeek),1000)
if sum([SolVal(x[p[0]][p[1]][w]) \
for p in pairs([i,j,k,l],[])])>b:
cuts.append([[i,j,k,l],[w,b]])
for m in range(l+1, nbTeams):
b = bounds.get((5,nbPerWeek),1000)
if sum([SolVal(x[p[0]][p[1]][w]) \
for p in pairs([i,j,k,l,m],[])])>b:
cuts.append([[i,j,k,l,m],[w,b]])
else:
break
s = newSolver('Sports schedule', True)
x = [[[s.IntVar(0,1,'') if i<j else None
for _ in range(nbWeeks)]
for j in range(nbTeams)] for i in range(nbTeams-1)]
basic_model(s,Teams,nbTeams,nbWeeks,nbPerWeek,nbIntra,\
nbDiv,nbInter,cuts,x)
rc,Cal = s.Solve(),[]
if rc == 0:
Cal=[[(i,j) \
for i in range(nbTeams-1) for j in range(i+1,nbTeams)\
if SolVal(x[i][j][w])>0] for w in range(nbWeeks)]
return rc,ObjVal(s),Cal
| 37,439
|
https://github.com/thombergs/ids4slf4j/blob/master/src/test/java/io/reflectoring/descriptivelogger/LoggerFactoryTests.java
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
ids4slf4j
|
thombergs
|
Java
|
Code
| 47
| 229
|
package io.reflectoring.descriptivelogger;
import org.junit.jupiter.api.Test;
/** @author Tom Hombergs */
class LoggerFactoryTests {
@Test
void constructorWithStringParamWorks() {
BasicDescriptiveLogger log = LoggerFactory.getLogger(BasicDescriptiveLogger.class, "mylogger");
log.simpleLogMessage();
}
@Test
void constructorWithClassParamWorks() {
BasicDescriptiveLogger log =
LoggerFactory.getLogger(BasicDescriptiveLogger.class, LoggerFactory.class);
log.simpleLogMessage();
}
@Test
void constructorWithLoggerParamWorks() {
BasicDescriptiveLogger log =
LoggerFactory.getLogger(
BasicDescriptiveLogger.class, org.slf4j.LoggerFactory.getLogger(LoggerFactory.class));
log.simpleLogMessage();
}
}
| 46,834
|
https://github.com/usc-isi-i2/kgtk/blob/master/wikidata/patterns/date-property-20101201-or-later-pattern.tsv
|
Github Open Source
|
Open Source
|
MIT
| 2,023
|
kgtk
|
usc-isi-i2
|
TSV
|
Code
| 9
| 38
|
node1 label node2
date_property unknown True
date_property mindate ^2010-12-01T00:00:00Z
| 34,690
|
https://github.com/sarangnx/peedia2.0-dashboard/blob/master/src/views/Users/Users.vue
|
Github Open Source
|
Open Source
|
MIT
| null |
peedia2.0-dashboard
|
sarangnx
|
Vue
|
Code
| 676
| 2,574
|
<template>
<div>
<base-header type="gradient-success" class="pb-6 pb-8 pt-5 pt-md-8">
</base-header>
<div class="container-fluid mt--7">
<div class="row">
<div class="col-12">
<div class="card shadow">
<div class="card-header d-flex justify-content-between flex-column flex-md-row align-items-center">
<h3>Users</h3>
<div class="d-flex align-items-center justify-content-around flex-column flex-md-row">
<!-- FILTER BY USERGROUP -->
<base-button size="sm" v-if="pageLoading"><i class="ni ni-settings-gear-65 spin"></i></base-button>
<base-dropdown v-else position="right" class="mb-2 mb-md-0">
<base-button slot="title" type="primary" class="dropdown-toggle" size="sm">
{{ usergroup.name || 'User group' }}
</base-button>
<a class="dropdown-item text-black"
v-for="(item, index) in activeUsergroups"
:key="index"
@click="usergroup = Object.assign({}, item)"
>
{{ item.name }}
</a>
</base-dropdown>
</div>
</div> <!-- Outer Header -->
<div class="card-body table-responsive p-0 custom__scrollbar">
<base-table
:data="users"
type="hover table-striped table-sm"
>
<template slot="columns">
<th class="text-left">Name</th>
<th>Email</th>
<th>Phone</th>
<th></th>
</template>
<template slot-scope="{row}">
<td class="text-left">
{{ row.name || 'N/A' }}
</td>
<td>
{{ row.email || 'N/A' }}
</td>
<td>
{{ row.phone || 'N/A' }}
</td>
<td>
<div v-if="currentUsergroup.rank > 2 && ( row.usergroup === 'delivery' || row.usergroup === 'storeowner')">
<base-button
v-if="!row.store.length && !storeLoading && ( row.usergroup === 'delivery' || row.usergroup === 'storeowner')"
icon="fa fa-store"
size="sm"
type="success"
title="Add User to a store in their localbody. Localbody store has to be created first."
@click="addStore(row.user_id)"
></base-button>
<loading v-if="storeLoading === row.user_id" size="sm"/>
</div>
</td>
</template>
</base-table> <!-- Table -->
<div class="over__lay d-flex align-items-center" v-if="loading">
<loading color="dark"/>
</div>
</div> <!-- card body -->
<div class="card-footer">
<div class="d-flex justify-content-end mb-3">
<base-button type="success" @click="addModal = true">
<font-awesome-icon icon="plus" class="mr-2"/>
Create User
</base-button>
</div>
<base-pagination
:page-count="total_pages"
v-model="page"
align="center">
</base-pagination>
</div> <!-- card footer -->
</div> <!-- Outer Card -->
</div>
</div>
</div>
<!-- ADD USER MODAL -->
<modal :show.sync="addModal" modalClasses="modal-dialog-scrollable" :clickOut="false">
<template slot="header">
<h1 class="modal-title">Add User</h1>
</template>
<div class="container">
<add-user :key="Date.now()"
@close="closeModal"
:localbodies.sync="localbodies"
:districts.sync="districts"
></add-user>
</div>
</modal>
</div>
</template>
<script>
import AddUser from './AddUser';
export default {
name: 'users',
components: {
AddUser,
},
data: () => ({
page: 1,
per_page: 20,
count: 0,
users: [],
total_pages: 0,
usergroup: { id: 'user', name: 'Customers', rank: 0 },
pageLoading: null,
loading: null,
usergroups: [
{ id: 'user', name: 'Customers', rank: 0 },
{ id: 'delivery', name: 'Delivery', rank: 1 },
{ id: 'storeowner', name: 'Manager', rank: 2 },
{ id: 'admin', name: 'Admin', rank: 3 },
{ id: 'superadmin', name: 'Super Admin', rank: 4 },
],
addModal: false,
localbodies: [],
districts: [],
storeLoading: null,
currentUserProfile: {},
}),
computed: {
currentUser() {
return this.$store.getters.getUser;
},
activeUsergroups() {
const currentUsergroup = this.currentUser.usergroup;
const currentGroup = this.usergroups.find((item) => item.id === currentUsergroup );
return this.usergroups.filter((usergroup) => {
return usergroup.rank < currentGroup.rank;
});
},
currentUsergroup() {
const currentUsergroup = this.currentUser.usergroup;
const currentGroup = this.usergroups.find((item) => item.id === currentUsergroup );
return currentGroup;
}
},
watch: {
page() {
this.refreshPage();
},
usergroup() {
this.refreshPage();
}
},
methods: {
getUsers(page, per_page, usergroup = null, localbody_id = null) {
this.loading = true;
this.$axios({
method: 'get',
url: '/users/profiles',
params: {
page,
per_page,
...(usergroup && { usergroup }),
localbody_id
},
}).then((response) => {
const data = response.data.data;
this.users = data.rows;
this.count = data.count;
this.total_pages = data.total_pages;
}).finally(() => {
this.loading = false;
});
},
getCurrentUserProfile() {
const userId = this.currentUser.user_id;
this.$axios({
method: 'get',
url: `/users/profile/${userId}`,
}).then((response) => {
const data = response.data.data;
this.currentUserProfile = data.user;
if(
this.currentUserProfile.localbody &&
this.currentUserProfile.localbody.localbody_id &&
this.currentUserProfile.usergroup != 'admin' &&
this.currentUserProfile.usergroup != 'superadmin'
) {
this.getUsers(
this.page,
this.per_page,
this.usergroup.id,
this.currentUserProfile.localbody.localbody_id
);
} else {
this.getUsers(
this.page,
this.per_page,
this.usergroup.id
);
}
});
},
listLocalbodies() {
this.$axios({
method: 'get',
url: '/localbodies/list',
}).then((response) => {
const localbodies = response.data.localbodies.rows;
this.localbodies = localbodies;
});
},
listDistricts() {
this.$axios({
method: 'get',
url: '/localbodies/districts',
}).then((response) => {
const districts = response.data.districts;
this.districts = districts.rows;
});
},
refreshPage() {
if(
this.currentUserProfile.localbody &&
this.currentUserProfile.localbody.localbody_id &&
this.currentUserProfile.usergroup != 'admin' &&
this.currentUserProfile.usergroup != 'superadmin'
) {
this.getUsers(
this.page,
this.per_page,
this.usergroup.id,
this.currentUserProfile.localbody.localbody_id
);
} else {
this.getUsers(
this.page,
this.per_page,
this.usergroup.id
);
}
},
addStore(user_id) {
this.storeLoading = user_id;
this.$axios({
method: 'post',
url: '/users/store/add',
data: {
user_id,
}
}).then((response) => {
if(response.data && response.data.status === 'success'){
this.$success('Added to Store.');
} else {
throw new Error('User not added to Store.');
}
}).catch(() => {
this.$error('User not added to Store.');
}).finally(() => {
this.storeLoading = null;
this.refreshPage();
});
},
closeModal() {
this.addModal = false;
this.refreshPage();
}
},
mounted() {
this.getCurrentUserProfile()
this.listLocalbodies();
this.listDistricts();
}
};
</script>
<style scoped>
th, td {
text-align: center;
}
</style>
| 44,964
|
https://github.com/eait-itig/rdp_proto/blob/master/c_src/bitmap.h
|
Github Open Source
|
Open Source
|
BSD-2-Clause
| 2,020
|
rdp_proto
|
eait-itig
|
C
|
Code
| 468
| 1,063
|
/*
%%
%% rdpproxy
%% remote desktop proxy
%%
%% Copyright 2012-2015 Alex Wilson <alex@uq.edu.au>
%% The University of Queensland
%% All rights reserved.
%%
%% Redistribution and use in source and binary forms, with or without
%% modification, are permitted provided that the following conditions
%% are met:
%% 1. Redistributions of source code must retain the above copyright
%% notice, this list of conditions and the following disclaimer.
%% 2. Redistributions in binary form must reproduce the above copyright
%% notice, this list of conditions and the following disclaimer in the
%% documentation and/or other materials provided with the distribution.
%%
%% THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
%% IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
%% OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
%% IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
%% INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
%% NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
%% DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
%% THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
%% (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
%% THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
%%
*/
#if !defined(_BITMAP_H)
#define _BITMAP_H
#include <stdint.h>
#include <assert.h>
#include <string.h>
#include <stdlib.h>
#include "erl_nif.h"
struct stream {
ErlNifBinary bin;
int pos;
};
typedef uint32_t UINT32;
typedef uint16_t UINT16;
typedef int BOOL;
typedef uint8_t BYTE;
#define INLINE inline
#define TRUE 1
#define FALSE 0
#define CopyMemory memmove
static inline void *
_aligned_malloc(size_t s, size_t align)
{
size_t pad = s % align;
if (pad > 0)
pad = align - pad;
void *ptr = malloc(s + pad);
assert((uint64_t)ptr % align == 0);
return ptr;
}
static inline void
_aligned_free(void *ptr)
{
free(ptr);
}
static inline void
init_stream(struct stream *s, int size)
{
assert(size < s->bin.size);
s->pos = 0;
}
static inline int
get_pos(struct stream *s)
{
return s->pos;
}
static inline void
out_uint8(struct stream *s, uint8_t v)
{
assert(s->pos + 1 < s->bin.size);
s->bin.data[s->pos++] = v;
}
static inline void
out_uint16_le(struct stream *s, uint16_t v)
{
assert(s->pos + 2 < s->bin.size);
s->bin.data[s->pos++] = v & 0xff;
s->bin.data[s->pos++] = (v >> 8) & 0xff;
}
static inline void
out_uint8a(struct stream *s, char *data, int n)
{
assert(s->pos + n < s->bin.size);
memcpy(&s->bin.data[s->pos], data, n);
s->pos += n;
}
static inline char *
get_ptr(struct stream *s)
{
return (char *)&s->bin.data[s->pos];
}
static inline char *
get_data(struct stream *s)
{
return (char *)s->bin.data;
}
int xrdp_bitmap_compress(char *in_data, int width, int height,
struct stream *s, int bpp, int byte_limit,
struct stream *temp_s);
BOOL bitmap_decompress(BYTE* srcData, BYTE* dstData, int width, int height, int size, int srcBpp, int dstBpp);
#endif
| 12,511
|
https://github.com/AspirinCode/md-cookbook/blob/master/scripts/seed_from_pdb
|
Github Open Source
|
Open Source
|
MIT
| 2,016
|
md-cookbook
|
AspirinCode
|
Python
|
Code
| 240
| 941
|
#! /usr/bin/env python
"""
Use a PDB file to solvate, minimize, and equilibrate a simulation.
"""
import os
from os.path import basename
from glob import iglob
from mdcookbook.core import solvate, get_sim, get_ff, get_state
from mdcookbook.utils import Timing, serialize, get_args
from simtk.openmm import Platform, Vec3
from simtk.openmm.app import PDBReporter, PDBFile
def run(pdb, n_clones, temp, boxsize, n_solv,
max_iter, n_steps, ion_content, platform, device):
name = basename(pdb).split('.')[0]
if platform == "CUDA":
props = {'CudaPrecision': 'mixed', 'CudaDeviceIndex': device}
elif platform == "OpenCL":
props = {'OpenCLPrecision': 'mixed', 'OpenCLDeviceIndex': device}
else:
props = {}
platform = Platform.getPlatformByName(platform)
with Timing('Load files'):
pdb = PDBFile(pdb)
forcefield = get_ff()
with Timing('Solvation'):
if n_solv is not None:
wat_box = solvate(pdb.positions, pdb.topology, forcefield,
ion_content, numAdded=n_solv)
else:
wat_box = solvate(pdb.positions, pdb.topology, forcefield,
ion_content, boxSize=box_size)
minimizer, _, _ = get_sim(wat_box.positions, wat_box.topology, temp,
forcefield, Platform.getPlatformByName('CPU'),
{})
with Timing('Minimization'):
minimizer.minimizeEnergy(maxIterations=1000)
simulation, system, integrator = get_sim(
minimizer.context.getState(getPositions=True).getPositions(),
wat_box.topology, temp, forcefield, platform, props)
if not os.path.exists('./solv_pdb/'):
os.makedirs('./solv_pdb/')
simulation.reporters.append(
PDBReporter('./solv_pdb/%s.pdb' % name, n_steps))
with Timing('Equilibration'):
simulation.context.setVelocitiesToTemperature(temp)
simulation.step(n_steps)
with Timing('Serialization'):
serialize(system, name, 'system.xml')
serialize(integrator, name, 'integrator.xml')
for i in range(n_clones):
serialize(get_state(simulation, temp), name, 'state%d.xml' % i)
print("Done!")
def parse_cmdln():
parser = get_args()
parser.add_argument('-f', '--input', dest='pdb',
help='Glob string to find PDB files.',
required=True)
args = parser.parse_args()
return args
if __name__ == "__main__":
options = parse_cmdln()
structures = iglob(options.pdb)
box_size = None
exec('box_size = %s' % options.box_size)
if not isinstance(box_size, Vec3):
raise TypeError('box_size must be expressed as Vec3')
for pdb in structures:
run(pdb, options.n_clones, options.temp, box_size, options.n_solv,
options.max_iter, options.n_steps, options.ion_content,
options.platform, str(options.device))
| 42,130
|
https://github.com/mackdoyle/osx-auto-config/blob/master/dotfiles/.config/Text Wrangler/Packages/git.bbpackage/Contents/Scripts/git/Github/github_show_sha1_on_clipboard.sh
|
Github Open Source
|
Open Source
|
LicenseRef-scancode-warranty-disclaimer, LicenseRef-scancode-public-domain
| 2,015
|
osx-auto-config
|
mackdoyle
|
Shell
|
Code
| 9
| 46
|
#!/usr/bin/env sh
cd "$(dirname "$BB_DOC_PATH")"
open "`git config bbedit.githuburl`/commit/`pbpaste`"
| 23,550
|
https://github.com/GuilhermeSenna/NodeJS-Websocket-Practice/blob/master/app/app.js
|
Github Open Source
|
Open Source
|
MIT
| null |
NodeJS-Websocket-Practice
|
GuilhermeSenna
|
JavaScript
|
Code
| 66
| 187
|
const socket = io('ws://localhost:8080');
socket.on('message', text => {
const el = document.createElement('li');
el.innerHTML = text;
document.querySelector('ul').appendChild(el)
});
document.querySelector('button').onclick = () => {
const text = document.querySelector('input').value;
socket.emit('message', text)
}
// const socket = new WebSocket('ws://localhost:8080');
// // Listen for messages
// socket.onmessage = ({ data }) => {
// console.log('Message from server ', data);
// };
// document.querySelector('button').onclick = () => {
// socket.send('hello');
// }
| 27,954
|
https://github.com/gregbenner/-vanillajs-react-mobx-nodegui-starter-/blob/master/webpack.config.js
|
Github Open Source
|
Open Source
|
MIT
| null |
-vanillajs-react-mobx-nodegui-starter-
|
gregbenner
|
JavaScript
|
Code
| 134
| 503
|
const path = require("path");
const webpack = require("webpack");
const ForkTsCheckerWebpackPlugin = require("fork-ts-checker-webpack-plugin");
const { CleanWebpackPlugin } = require("clean-webpack-plugin");
module.exports = (env, argv) => {
const config = {
mode: "production",
entry: ["./src/index.js"],
target: "node",
output: {
path: path.resolve(__dirname, "dist"),
filename: "index.js",
},
module: {
rules: [
{
test: /\.(j|t)sx?$/,
exclude: /node_modules/,
use: {
loader: "babel-loader",
options: { cacheDirectory: true, cacheCompression: false },
},
},
{
test: /\.(png|jpe?g|gif|svg|bmp|otf)$/i,
use: [
{
loader: "file-loader",
options: { publicPath: "dist" },
},
],
},
{
test: /\.node/i,
use: [
{
loader: "native-addon-loader",
options: { name: "[name]-[hash].[ext]" },
},
],
},
],
},
plugins: [new CleanWebpackPlugin()],
resolve: {
extensions: [".tsx", ".ts", ".js", ".jsx", ".json"],
},
};
if (argv.mode === "development") {
config.mode = "development";
config.plugins.push(new webpack.HotModuleReplacementPlugin());
//config.plugins.push(new ForkTsCheckerWebpackPlugin());
config.devtool = "source-map";
config.watch = true;
config.entry.unshift("webpack/hot/poll?100");
}
return config;
};
| 22,490
|
https://github.com/omar-droubi/RAHE/blob/master/RectangleBinPack/CMakeLists.txt
|
Github Open Source
|
Open Source
|
LicenseRef-scancode-public-domain
| 2,016
|
RAHE
|
omar-droubi
|
CMake
|
Code
| 8
| 41
|
project (RBP)
cmake_minimum_required (VERSION 2.6)
add_library(RBP MaxRectsBinPack.cpp Rect.cpp)
| 11,281
|
https://github.com/alldatacenter/alldata/blob/master/olap/doris/regression-test/suites/query_p1/return_binaray/test_return_binary_bitmap.groovy
|
Github Open Source
|
Open Source
|
OpenSSL, Apache-2.0, BSD-3-Clause, LicenseRef-scancode-facebook-patent-rights-2, PSF-2.0, dtoa, MIT, GPL-2.0-only, LicenseRef-scancode-public-domain
| 2,023
|
alldata
|
alldatacenter
|
Groovy
|
Code
| 215
| 548
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
suite("test_return_binary_bitmap") {
def tableName="test_return_binary_bitmap"
sql "drop table if exists ${tableName};"
sql """
CREATE TABLE `${tableName}` (
`dt` int(11) NULL,
`page` varchar(10) NULL,
`user_id` bitmap BITMAP_UNION NULL
) ENGINE=OLAP
AGGREGATE KEY(`dt`, `page`)
COMMENT 'OLAP'
DISTRIBUTED BY HASH(`dt`) BUCKETS 1
PROPERTIES (
"replication_allocation" = "tag.location.default: 1",
"in_memory" = "false",
"storage_format" = "V2",
"disable_auto_compaction" = "false"
);
"""
sql """
insert into ${tableName} values(1,1,to_bitmap(1)),(1,1,to_bitmap(2)),(1,1,to_bitmap(3)),(1,1,to_bitmap(23332));
"""
sql "set return_object_data_as_binary=false;"
def result1 = sql "select * from ${tableName}"
assertTrue(result1[0][2]==null);
sql "set return_object_data_as_binary=true;"
def result2 = sql "select * from ${tableName}"
assertTrue(result2[0][2]!=null);
}
| 46,721
|
https://github.com/alphagov/maslow/blob/master/test/integration/update_a_need_test.rb
|
Github Open Source
|
Open Source
|
MIT, LicenseRef-scancode-proprietary-license
| 2,023
|
maslow
|
alphagov
|
Ruby
|
Code
| 590
| 2,770
|
require_relative "../integration_test_helper"
class UpdateANeedTest < ActionDispatch::IntegrationTest
include NeedHelper
setup do
login_as_stub_editor
stub_publishing_api_has_linkables([], document_type: "organisation")
end
context "updating a need" do
setup do
@content_item = create(:need_content_item)
stub_publishing_api_has_content(
[@content_item],
Need.default_options.merge(
per_page: 50,
),
)
stub_publishing_api_has_linked_items(
[],
content_id: @content_item["content_id"],
link_type: "meets_user_needs",
fields: %w[title base_path document_type],
)
stub_publishing_api_has_links(
content_id: @content_item["content_id"],
links: {
organisations: [],
},
)
stub_publishing_api_has_item(@content_item)
end
should "be able to access edit form" do
visit("/needs")
click_on(format_need_goal(@content_item["details"]["goal"]))
within "#workflow" do
click_on("Edit")
end
assert page.has_content?("Edit need")
assert page.has_field?("As a")
assert page.has_field?("I need to")
assert page.has_field?("So that")
# Other fields are tested in create_a_need_test.rb
end
should "be able to update a need" do
test_need = Need.find(@content_item["content_id"])
test_need.set_attributes(
role: "grandparent",
benefit: "my grandchild can start school",
legislation: "",
)
payload = test_need.send(:publishing_api_payload)
stub_publishing_api_put_content(@content_item["content_id"], payload)
stub_publishing_api_patch_links(
@content_item["content_id"],
links: { "organisations" => [] },
)
visit("/needs")
click_on(format_need_goal(@content_item["details"]["goal"]))
within "#workflow" do
assert page.has_link?("Edit", href: "/needs/#{@content_item['content_id']}/edit")
click_on("Edit")
end
fill_in("As a", with: "grandparent")
fill_in("So that", with: "my grandchild can start school")
fill_in("What legislation underpins this need?", with: "")
within "#workflow" do
click_on_first_button("Save")
end
assert_publishing_api_put_content(@content_item["content_id"], payload)
assert page.has_text?("Need updated"), "No success message displayed"
end
should "be able to update the organisations for a need" do
content_id_of_organisation_to_add = SecureRandom.uuid
stub_publishing_api_has_linkables(
[
{
"content_id": SecureRandom.uuid,
"title" => "Committee on Climate Change",
},
{
"content_id": content_id_of_organisation_to_add,
"title" => "Ministry Of Justice",
},
],
document_type: "organisation",
)
test_need = Need.find(@content_item["content_id"])
payload = test_need.send(:publishing_api_payload)
stub_publishing_api_put_content(@content_item["content_id"], payload)
request = stub_publishing_api_patch_links(
@content_item["content_id"],
links: {
"organisations" => [content_id_of_organisation_to_add],
},
)
visit("/needs")
click_on(format_need_goal(@content_item["details"]["goal"]))
within "#workflow" do
assert page.has_link?("Edit", href: "/needs/#{@content_item['content_id']}/edit")
click_on("Edit")
end
select("Ministry Of Justice", from: "Departments and agencies")
within "#workflow" do
click_on_first_button("Save")
end
assert_requested request
end
should "display met_when criteria on multiple lines" do
met_when = %w[win awesome]
@content_item["details"]["met_when"] = met_when
stub_publishing_api_has_item(@content_item)
visit("/needs")
click_on(format_need_goal(@content_item["details"]["goal"]))
within "#workflow" do
click_on("Edit")
end
within "#met-when-criteria" do
met_when.each_with_index do |criteria, index|
assert_equal(criteria, find_field("criteria-#{index}").value)
end
end
end
should "be able to add more met_when criteria" do
need = Need.send(:need_from_publishing_api_payload, @content_item)
expected_payload = need.send(:publishing_api_payload)
expected_payload[:details]["met_when"] << "more"
request = stub_publishing_api_put_content(
@content_item["content_id"],
expected_payload,
)
stub_publishing_api_patch_links(
@content_item["content_id"],
links: { "organisations" => [] },
)
visit("/needs")
click_on(format_need_goal(@content_item["details"]["goal"]))
within "#workflow" do
click_on("Edit")
end
@content_item["details"]["met_when"].each_with_index do |criteria, index|
assert_equal(criteria, find_field("criteria-#{index}").value)
end
within "#met-when-criteria" do
click_on("Enter another criteria")
end
within "#met-when-criteria" do
fill_in("criteria-2", with: "more")
end
within "#workflow" do
click_on_first_button("Save")
end
assert_requested request
assert page.has_text?("Need updated"), "No success message displayed"
end
should "be able to delete met_when criteria" do
@content_item["details"]["met_when"] = %w[win awesome more]
stub_publishing_api_has_item(@content_item)
visit("/needs")
click_on(format_need_goal(@content_item["details"]["goal"]))
within "#workflow" do
click_on("Edit")
end
met_when_initial_count = @content_item["details"]["met_when"].length
assert met_when_initial_count >= 2
@content_item["details"]["met_when"].each_with_index do |criteria, index|
assert_equal(criteria, find_field("criteria-#{index}").value)
end
within "#met-when-criteria" do
# delete criteria buttons
assert page.has_selector?(:xpath, ".//button[@id='delete-criteria' and @value='0']")
assert page.has_selector?(:xpath, ".//button[@id='delete-criteria' and @value='1']")
assert page.has_selector?(:xpath, ".//button[@id='delete-criteria' and @value='2']")
end
within "#met-when-criteria" do
click_on_first_button("delete-criteria")
end
assert_equal("awesome", find_field("criteria-0").value)
assert_equal("more", find_field("criteria-1").value)
within "#met-when-criteria" do
assert page.has_no_selector?(:xpath, ".//button[@value='2']")
assert page.has_no_field?("criteria-2")
end
end
should "handle 422 errors from the Publishing API" do
put_url = "#{Plek.find('publishing-api')}/v2/content/#{@content_item['content_id']}"
stub_request(:put, put_url).to_return(status: 422)
visit("/needs")
click_on(format_need_goal(@content_item["details"]["goal"]))
within "#workflow" do
click_on("Edit")
end
fill_in("As a", with: "grandparent")
fill_in("So that", with: "my grandchild can start school")
within "#workflow" do
click_on_first_button("Save")
end
assert page.has_content?("Edit need")
assert page.has_text?("There was a problem saving your need.")
end
end
context "updating a need which applies to all organisations" do
setup do
@content_item = create(:need_content_item)
@content_item["details"]["applies_to_all_organisations"] = true
stub_publishing_api_has_content(
[@content_item],
Need.default_options.merge(
per_page: 50,
),
)
stub_publishing_api_has_linked_items(
[],
content_id: @content_item["content_id"],
link_type: "meets_user_needs",
fields: %w[title base_path document_type],
)
stub_publishing_api_has_links(
content_id: @content_item["content_id"],
links: {
organisations: [],
},
)
stub_publishing_api_has_item(@content_item)
end
should "not show the organisations field" do
visit "/needs"
click_on(format_need_goal(@content_item["details"]["goal"]))
within ".need-title" do
assert page.has_content?(format_need_goal(@content_item["details"]["goal"]))
end
within ".nav-tabs" do
assert page.has_link?("Edit", href: "/needs/#{@content_item['content_id']}/edit")
click_on "Edit"
end
assert page.has_selector? "h3", text: "Edit need"
assert page.has_no_select? "Organisations"
assert page.has_content? "This need applies to all organisations"
end
end
end
| 40,502
|
https://github.com/gamemake/Brainiac/blob/master/Assets/Brainiac/Source/Runtime/Serialization/BTPropertyAttribute.cs
|
Github Open Source
|
Open Source
|
MIT
| null |
Brainiac
|
gamemake
|
C#
|
Code
| 150
| 466
|
using System;
using System.Reflection;
#if WINDOWS_STORE
using TP = System.Reflection.TypeInfo;
#else
#endif
using TCU = Brainiac.Serialization.TypeCoercionUtility;
namespace Brainiac.Serialization
{
[AttributeUsage(AttributeTargets.Field | AttributeTargets.Property, AllowMultiple = false)]
public class BTPropertyAttribute : Attribute
{
public string PropertyName { get; set; }
public BTPropertyAttribute()
{
PropertyName = null;
}
public BTPropertyAttribute(string propertyName)
{
PropertyName = propertyName;
}
/// <summary>
/// Gets the name specified for use in serialization.
/// </summary>
/// <returns></returns>
public static string GetPropertyName(object value)
{
if (value == null)
{
return null;
}
Type type = value.GetType();
MemberInfo memberInfo = null;
if (TCU.GetTypeInfo(type).IsEnum)
{
string name = Enum.GetName(type, value);
if (String.IsNullOrEmpty(name))
{
return null;
}
memberInfo = TCU.GetTypeInfo(type).GetField(name);
}
else
{
memberInfo = value as MemberInfo;
}
if (MemberInfo.Equals(memberInfo, null))
{
throw new ArgumentException();
}
#if WINDOWS_STORE
BTPropertyAttribute attribute = memberInfo.GetCustomAttribute<BTPropertyAttribute>(true);
#else
BTPropertyAttribute attribute = Attribute.GetCustomAttribute(memberInfo, typeof(BTPropertyAttribute)) as BTPropertyAttribute;
#endif
return attribute != null ? attribute.PropertyName : null;
}
}
}
| 18,897
|
https://github.com/porcelluscavia/vectors-webtool/blob/master/snaut-english/snaut.py
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
vectors-webtool
|
porcelluscavia
|
Python
|
Code
| 2,076
| 6,578
|
# -*- coding: utf-8 -*-
"""
This is the flask server that provides a web interface to the semspaces module.
"""
import os
import sys
from functools import wraps
from configparser import ConfigParser
import markdown
import json
import unicodecsv as csv
import io
from flask import Flask, jsonify, request, make_response
from flask import Markup, render_template
from .utils.utils import df_to_csv_string, get_logger
#from semspaces.space import SemanticSpace
from .utils.space import SemanticSpace
# The global semspace object representing the loaded semantic space is necessary
# to make it possible to reload the semantic space when running as a local
# instance.
#
# When running as a server the 'allow_space_change' setting in the configuration
# file should be set to 'no' and 'preload_space' should be set to 'yes' so that
# a semspace is loaded when starting the server and never changes later.
#
semspace = None
semspace2 = None
semspace3 = None
def app_factory(conf, init_semspace=None):
"""Return the flask app based on the configuration."""
root_prefix = conf.get('server', 'root_prefix')
static_url = "%s/static" % root_prefix
static_dir = os.path.abspath(conf.get('server', 'static_dir'))
template_dir = os.path.abspath(conf.get('server', 'template_dir'))
doc_dir = os.path.abspath(conf.get('server', 'doc_dir'))
app = Flask(__name__,
static_folder='static',
template_folder='templates',
static_url_path=static_url)
app.config['EXPLAIN_TEMPLATE_LOADING'] = True
log_name = conf.get('server', 'log_name')
log_file = conf.get('server', 'log_file')
log_level = conf.get('server', 'log_level')
#logger = get_logger(log_name, log_file, log_level)
semspaces_dir = conf.get('semantic_space', 'semspaces_dir')
prenormalize = conf.getboolean('semantic_space', 'prenormalize')
matrix_size_limit = conf.getint('semantic_space', 'matrix_size_limit')
numpy_dtype = conf.get('semantic_space', 'numpy_dtype')
allow_space_change = conf.getboolean('semantic_space', 'allow_space_change')
preload_space = conf.getboolean('semantic_space', 'preload_space')
multiple_spaces = conf.getboolean('semantic_space', 'multiple_spaces')
def log_data(f):
"""Decorator that logs request data."""
@wraps(f)
def decorated_log_data(*args, **kwargs):
if not request.headers.getlist("X-Forwarded-For"):
ip = request.remote_addr
else:
ip = request.headers.getlist("X-Forwarded-For")[0]
url_path = request.path
data = request.get_json()
form = request.form.to_dict()
if not data:
data = '-'
if not form:
form = '-'
# logger.info('%s %s %s %s',
# ip, url_path, json.dumps(data),
# json.dumps(form))
return f(*args, **kwargs)
return decorated_log_data
def load_semspace(semspace_path, semspace_format='semspace', semspace_type='normal'):
"""Load a semantic space based on the path and format."""
global semspace
if multiple_spaces:
global semspace2
global semspace3
if semspace_format == 'ssmarket':
semspace = SemanticSpace.from_ssmarket(semspace_path,
prenorm=prenormalize)
if semspace_type =='normal':
semspace = SemanticSpace.from_ssmarket(semspace_path,
prenorm=prenormalize)
elif multiple_spaces:
if semspace_type =='img':
semspace2 = SemanticSpace.from_ssmarket(semspace_path,
prenorm=prenormalize)
elif semspace_type =='proto':
semspace3 = SemanticSpace.from_ssmarket(semspace_path,
prenorm=prenormalize)
return True
elif semspace_format == 'csv':
if semspace_type =='normal':
semspace = SemanticSpace.from_csv(semspace_path,
prenorm=prenormalize,
dtype=numpy_dtype)
elif multiple_spaces:
if semspace_type =='img':
semspace2 = SemanticSpace.from_csv(semspace_path,
prenorm=prenormalize,
dtype=numpy_dtype)
elif semspace_type =='proto':
semspace3 = SemanticSpace.from_csv(semspace_path,
prenorm=prenormalize,
dtype=numpy_dtype)
return True
else:
raise Exception("Space format '%s' unknown!" % semspace_format)
def check_space_preload():
"""Check if semantic space should be preloaded."""
if preload_space:
semspace_path = conf.get('semantic_space', 'preload_space_file')
semspace_format = conf.get('semantic_space', 'preload_space_format')
print('Pre-loading semantic space: %s' % semspace_path)
load_semspace(semspace_path, semspace_format)
print('Semantic space loaded.')
if multiple_spaces:
semspace_path_img = conf.get('semantic_space', 'preload_space_file_img')
print('Pre-loading image space: %s' % semspace_path_img)
load_semspace(semspace_path_img, semspace_format, semspace_type='img')
print('Semantic space loaded.')
semspace_path_proto = conf.get('semantic_space', 'preload_space_file_proto')
print('Pre-loading semantic prototypes space: %s' % semspace_path_proto)
load_semspace(semspace_path_proto, semspace_format, semspace_type='proto')
print('Semantic space loaded.')
return True
else:
print('Semantic space not loaded.')
return False
if not check_space_preload() and init_semspace:
global semspace
semspace = init_semspace
def available_spaces():
"""List spaces available in the directory."""
spaces = []
for top, dirs, files in os.walk(semspaces_dir):
for fname in files:
spaces.append(os.path.join(top, fname))
return spaces
def split_by_defined(words, semspace_type = 'normal'):
"""Split a list of words based on whether they are in the space. Multiple space support not really needed here."""
defined = []
undefined = []
for w in words:
x = [word.upper() for word in w]
# if semspace_type == 'normal':
if semspace.defined_at(x):
defined.append(x)
else:
undefined.append(x)
# elif semspace_type == 'img':
# if semspace2.defined_at(x):
# defined.append(x)
# else:
# undefined.append(x)
# elif semspace_type == 'proto':
# if semspace3.defined_at(x):
# defined.append(x)
# else:
# undefined.append(x)
return defined, undefined
def check_matrix_size(semspace, words1, words2=None):
"""
Verify if the size of the requested data matrix.
Return True if the size does not exceed the size limit specified
in the configuration file.
"""
if matrix_size_limit == -1:
return True
elif words1 and words2:
if len(words1) * len(words2) <= matrix_size_limit:
return True
else:
return False
else:
if len(words1) * semspace.shape[0] <= matrix_size_limit:
return True
else:
return False
# Application routes and controllers
@app.route('%s/' % root_prefix)
def root():
"""Serve index page."""
static_prefix = static_url
url_prefix = root_prefix
api_prefix = root_prefix
return render_template('index.html', **locals())
@app.route('%s/list-semspaces/' % root_prefix)
def req_semspaces_list():
"""Return a list of available semantic spaces.
Returns json with:
* paths - directory including the semantic spaces
* availableSpaces - array listing available spaces
"""
if not allow_space_change:
return make_response('Not allowed!'), 403
data = {'paths': semspaces_dir, 'availableSpaces': available_spaces()}
return jsonify(data)
@app.route('%s/load-semspace/' % root_prefix, methods=['POST'])
def req_load_space():
"""Load another semantic space.
Takes a json with:
* semspacePath - path to a semantic space
* semspaceFormat - format of a semantic space
"""
if not allow_space_change:
return make_response('Not allowed!'), 403
data = request.get_json()
path = data['semspacePath']
space_format = data['semspaceFormat']
# TODO improve this
if load_semspace(path, space_format):
return make_response("ok")
else:
return make_response("error")
@app.route('%s/status/' % root_prefix)
# @app.route('/status')
def status():
"""Return server status.
Returns json with:
* semspaceLoaded - true if any semantic space is loaded
false otherwise
* semspaceTitle - title of the loaded semantic space
* semspaceDesc - description of the loaded semantic space
* allowChange - true if the app would allow to change the loaded space
false otherwise
* allowedMetrics - a list of the metrics that can be computed
"""
if semspace is None:
check_space_preload()
status_dict = {}
status_dict['semspaceLoaded'] = semspace is not None
if semspace:
status_dict['semspaceTitle'] = semspace.title
status_dict['semspaceDesc'] = semspace.readme
status_dict['allowChange'] = allow_space_change
status_dict['allowedMetrics'] = semspace.allowed_metrics()
return jsonify(status_dict)
@app.route('%s/similar/' % root_prefix, methods=['POST'])
@log_data
def similar():
"""Return most similar words.
Takes a json with the following fields:
* words1 - reference list of words
* metric - metric which should be used when calculating
distances
* n (optional; default: 10) - number of neighbours to be
returned
* words2 (optional) - words which can be included in the
result, if not given all words in the space will be used
Returns json with:
* similarities - dictionary with reference words as keys and
list of neighbours with their distances as values
* notDefined:
words1 - words in words1 that are not defined in the space
words2 - words in words2 that are not defined in the space
"""
data = request.get_json()
metric = data['metric']
n = data.get('n', 10)
words_1 = data['words1']
vec_space = data['vecSpace'] #should be 'normal', 'img', or 'proto'
(words_1_ok, words_1_nd) = split_by_defined(words_1, semspace_type = vec_space)
if 'words2' not in data:
words_2_nd = None
most_similar = semspace.most_similar(words_1_ok,n=n, metric=metric, type=vec_space)
#the following code is for when semspaces are loaded in separately
# if vec_space == 'normal':
# most_similar = semspace.most_similar(words_1_ok,
# n=n, metric=metric)
# elif vec_space == 'img':
# most_similar = semspace2.most_similar(words_1_ok,
# n=n, metric=metric)
# elif vec_space == 'proto':
# most_similar = semspace3.most_similar(words_1_ok,
# n=n, metric=metric)
else:
words_2 = data['words2']
(words_2_ok, words_2_nd) = split_by_defined(words_2, semspace_type = vec_space)
if vec_space == 'normal':
most_similar = semspace.most_similar(words_1_ok, words_2_ok,
n=n, metric=metric)
elif vec_space == 'img':
most_similar = semspace2.most_similar(words_1_ok, words_2_ok,
n=n, metric=metric)
elif vec_space == 'proto':
most_similar = semspace3.most_similar(words_1_ok, words_2_ok,
n=n, metric=metric)
result = {'similarities': most_similar,
'notDefined': {'words1': words_1_nd, 'words2': words_2_nd}}
return jsonify(result)
# on button press, do this!
@app.route('%s/similar-csv/' % root_prefix, methods=['POST'])
@log_data
def similar_csv():
"""Return most similar words.
Takes a json with the following fields:
* words1 - reference list of words
* metric - metric which should be used when calculating
distances
* n (optional; default: 10) - number of neighbours to be
returned
* words2 (optional) - words which can be included in the
result, if not given all words in the space will be used
Returns json with:
* similarities - dictionary with reference words as keys and
list of neighbours with their distances as values
* notDefined:
words1 - words in words1 that are not defined in the space
words2 - words in words2 that are not defined in the space
"""
data = request.get_json()
word = data['words1']
# metric = data['metric']
# n = data.get('n', 10)
# words_1 = data['words1']
# vec_space = data['vecSpace']
#
# (words_1_ok, words_1_nd) = split_by_defined(words_1, semspace_type=vec_space)
#
# if 'words2' not in data:
# words_2_nd = None
# if vec_space == 'normal':
# most_similar = semspace.most_similar(words_1_ok,
# n=n, metric=metric)
# elif vec_space == 'img':
# most_similar = semspace2.most_similar(words_1_ok,
# n=n, metric=metric)
# elif vec_space == 'proto':
# most_similar = semspace3.most_similar(words_1_ok,
# n=n, metric=metric)
#
# else:
# words_2 = data['words2']
# (words_2_ok, words_2_nd) = split_by_defined(words_2, semspace_type=vec_space)
#
# if vec_space == 'normal':
# most_similar = semspace.most_similar(words_1_ok, words_2_ok,
# n=n, metric=metric)
# elif vec_space == 'img':
# most_similar = semspace2.most_similar(words_1_ok, words_2_ok,
# n=n, metric=metric)
# elif vec_space == 'proto':
# most_similar = semspace3.most_similar(words_1_ok, words_2_ok,
# n=n, metric=metric)
s = io.BytesIO()
writer = csv.writer(s)
writer.writerow(['word_1', 'word_2', 'distance'])
# for word, val in most_similar.items():
# writer.writerow([word, val])
#
response = make_response(s.getvalue())
response.headers["Content-Disposition"] = (
"attachment; filename=neighbours.csv")
return response
@app.route('%s/similarity-matrix/' % root_prefix, methods=['POST'])
@log_data
def similarity_matrix():
"""Return similarity matrix (in csv).
Expects a json with the following fields:
* words1 - reference list of words
* metric - metric which should be used when calculating
distances
* words2 (optional) - words which can be included in the
result, if not given all words in the space will be used
Return csv, comma separated matrix.
"""
data = json.loads(request.form['data'])
metric = data['metric']
words_1 = data['words1']
vec_space = data['vecSpace']
make_response(vec_space)
if 'words2' not in data:
if check_matrix_size(semspace, words_1):
(words_1_ok, words_1_nd) = split_by_defined(words_1)
if not words_1_ok:
return make_response(
"No valid elements in the list!")
if vec_space == 'normal':
most_similar = semspace.all_distances(words_1_ok, metric=metric)
# elif vec_space == 'img':
# most_similar = semspace2.all_distances(words_1_ok, metric=metric)
# elif vec_space == 'proto':
# most_similar = semspace3.all_distances(words_1_ok, metric=metric)
else:
return make_response("Matrix size error!")
else:
words_2 = data['words2']
if check_matrix_size(semspace, words_1, words_2):
(words_1_ok, words_1_nd) = split_by_defined(words_1)
(words_2_ok, words_2_nd) = split_by_defined(words_2)
if not words_1_ok or not words_2_ok:
return make_response(
"No valid elements in one of the lists!")
# this needs to be changed to just semspace, I believe
most_similar = semspace.matrix_distances(words_1_ok, words_2_ok,
metric=metric)
else:
return make_response("Matrix size error!")
most_similar_csv = df_to_csv_string(most_similar.T)
response = make_response(most_similar_csv)
response.headers["Content-Disposition"] = (
"attachment; filename=similarities.csv")
return response
@app.route('%s/offset/' % root_prefix, methods=['POST'])
@log_data
def offset():
"""
Return n words colsest to a calculated vector.
Current behavior of filtering out used words is consistent with the
implementation in the word2vec tools. It should be considered if this
should not be changed in the future versions.
"""
data = request.get_json()
positive = data['positive']
negative = data['negative']
metric = data['metric']
n = data.get('n', 10)
(positive_ok, positive_nd) = split_by_defined(positive)
(negative_ok, negative_nd) = split_by_defined(negative)
closest = semspace.offset(positive_ok, negative_ok,
metric=metric, n=n, filter_used=True)
result = {'closest': closest,
'notDefined': {'positive': positive_nd,
'negative': negative_nd}}
return jsonify(result)
@app.route('%s/pairs/' % root_prefix, methods=['POST'])
@log_data
def pairs():
"""Return similarity matrix (in csv).
Expects a json with the following fields:
* wordPairs - array containing arrays with pairs of words
* metric - metric which should be used when calculating
distances
Return csv, comma separated matrix.
"""
data = json.loads(request.form['data'])
metric = data['metric']
word_pairs = data['wordPairs']
if matrix_size_limit > 0 and len(word_pairs) * 2 > matrix_size_limit:
return make_response("Matrix size error!")
s = io.BytesIO()
writer = csv.writer(s)
writer.writerow(['word_1', 'word_2', 'distance'])
for w1, w2 in word_pairs:
w1 = [word.upper() for word in w1]
w2 = [word.upper() for word in w2]
if semspace.defined_at(w1) & semspace.defined_at(w2):
dist = semspace.pair_distance(w1, w2, metric)
w1_label = ' '.join(w1)
w2_label = ' '.join(w2)
writer.writerow([w1_label, w2_label, dist])
response = make_response(s.getvalue())
response.headers["Content-Disposition"] = (
"attachment; filename=word-pairs.csv")
return response
@app.route('%s/defined-at/' % root_prefix, methods=['POST'])
@log_data
def defined_at():
"""Return information about which of the listed words
are defined in a semantic space.
Takes a json array with list of words to check.
Returns json with:
* available - json array with list of words defined
* notAvailable - json array with list of words that are undefined
"""
data = request.get_json()
if 'words' in data:
(available, not_available) = split_by_defined(data['words'], semspace_type = 'proto')
response = jsonify({'available': available,
'notAvailable': not_available})
else:
response = jsonify({'error': 'No words listed!'})
return response
@app.route('/process', methods=['POST'])
def process():
text = request.form['searchbox']
if text:
return jsonify({'word': text.upper()})
return jsonify({'error': 'Missing data!'})
@app.route('%s/doc/<section>' % root_prefix)
def help(section):
"""Return a markdown file from the doc directory rendered to html."""
path = '%s/%s.md' % (doc_dir, section)
doc_text = open(path).read()
extentions = ['markdown.extensions.fenced_code',
'markdown.extensions.tables']
processed = markdown.markdown(doc_text, extensions=extentions)
content = Markup(processed)
static_prefix = static_url
url_prefix = root_prefix
return render_template('help.html', **locals())
return app
if __name__ == '__main__':
conf = ConfigParser()
if len(sys.argv) > 1:
conf.read([sys.argv[1]])
else:
# conf.read(['config.ini', 'config_local.ini'])
# conf.read(['/Users/samski/Downloads/snaut-master/config.ini'])
conf.read(['../config.ini'])
server_host = conf.get('server_local', 'host')
server_port = conf.getint('server_local', 'port')
root_prefix = conf.get('server', 'root_prefix')
url = 'http://localhost:%s/%s' % (server_port, root_prefix)
browser_open = conf.getboolean('server_local', 'start_browser')
if browser_open:
welcome_msg = 'If the browser does not start automatically go to '
import threading
import webbrowser
threading.Timer(3.0, lambda: webbrowser.open(url)).start()
else:
welcome_msg = 'Open your browser and go to'
print('%s %s' % (welcome_msg, url))
print('Keep this window open.')
app = app_factory(conf)
app.debug = conf.getboolean('server_local', 'debug')
app.run(host=server_host, port=server_port, debug=True)
| 10,502
|
https://github.com/kdeng00/Icarus/blob/master/Models/Genre.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
Icarus
|
kdeng00
|
C#
|
Code
| 50
| 155
|
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using Newtonsoft.Json;
namespace Icarus.Models
{
public class Genre
{
[JsonProperty("id")]
public int GenreId { get; set; }
[JsonProperty("genre")]
public string GenreName { get; set; }
[JsonProperty("song_count")]
[NotMapped]
public int SongCount { get; set; }
[JsonIgnore]
public List<Song> Songs { get; set; }
}
}
| 2,552
|
https://github.com/FRC6854/2021InfiniteRechargeOfficial/blob/master/src/main/java/frc/robot/commands/climber/DriveClimber.java
|
Github Open Source
|
Open Source
|
BSD-3-Clause, MIT
| 2,021
|
2021InfiniteRechargeOfficial
|
FRC6854
|
Java
|
Code
| 129
| 574
|
package frc.robot.commands.climber;
import edu.wpi.first.wpilibj2.command.CommandBase;
import frc.robot.subsystems.Climber;
import frc.robot.Robot;
import frc.robot.subsystems.LEDController;
import frc.robot.subsystems.LEDController.LEDMode;
/**
* Climber command for endgame
*/
public class DriveClimber extends CommandBase {
private Climber climber = null;
public DriveClimber() {
climber = Climber.getInstance();
addRequirements(climber);
}
@Override
public void initialize() {
climber.zeroLift();
climber.fullStop();
}
@Override
public void execute() {
double liftOutput = Robot.operator.getControllerRTrigger();
double winchOutput = liftOutput;
climber.setShifterOutput(Robot.operator.getControllerRightStickX());
if (Math.abs(liftOutput) > 0.25) {
liftOutput = 0.25;
}
if (Robot.operator.getControllerBButton() == true) {
LEDController.getInstance().setMode(LEDMode.WINCH_ACTIVE);
if (winchOutput > 0) {
if (climber.getLiftTicks() <= 0.50) {
climber.setWinchOutput(0);
}
else {
climber.setWinchOutput(winchOutput);
}
}
else {
climber.setWinchOutput(winchOutput);
}
}
else {
climber.setLiftOutput(liftOutput);
climber.setWinchOutput(0);
}
if(climber.getLiftOutput() == 0 && climber.getWinchOutput() == 0) {
LEDController.getInstance().setMode(LEDMode.DEFAULT);
}
}
@Override
public void end(boolean interrupted) {
}
@Override
public boolean isFinished() {
return false;
}
}
| 15,342
|
https://github.com/vivlai/qanta/blob/master/security_groups.py
|
Github Open Source
|
Open Source
|
MIT
| null |
qanta
|
vivlai
|
Python
|
Code
| 117
| 534
|
#!/usr/bin/env python3
import subprocess
import json
import os
import hcl
def api(command, parse=True):
response = subprocess.run(command, check=True, stdout=subprocess.PIPE)
if parse:
return json.loads(response.stdout.decode('utf8'))
def get_spot_id():
with open('terraform.tfstate') as f:
state = hcl.load(f)
return state['modules'][0]['resources']['aws_spot_instance_request.master']['primary']['id']
def get_instance_id(spot_id):
response = api([
'aws', 'ec2',
'describe-spot-instance-requests',
'--spot-instance-request-ids', spot_id
])
return response['SpotInstanceRequests'][0]['InstanceId']
def get_current_security_groups(instance_id):
response = api([
'aws', 'ec2',
'describe-instance-attribute',
'--attribute', 'groupSet',
'--instance-id', instance_id
])
return [g['GroupId'] for g in response['Groups']]
def attach_security_group(instance_id, sids):
security_groups = get_current_security_groups(instance_id)
security_groups.extend(sids)
api([
'aws', 'ec2',
'modify-instance-attribute',
'--instance-id', instance_id,
'--groups'
] + security_groups, parse=False)
if __name__ == '__main__':
security_groups = os.environ.get('QB_SECURITY_GROUPS')
if security_groups is not None:
print('Adding these security groups:', security_groups)
security_groups = security_groups.split(',')
spot_id = get_spot_id()
instance_id = get_instance_id(spot_id)
attach_security_group(instance_id, security_groups)
else:
print('No additional security groups added')
| 37,830
|
https://github.com/collectivesense/logback-logsense/blob/master/src/main/java/com/logsense/opentracing/ITraceExtractor.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
logback-logsense
|
collectivesense
|
Java
|
Code
| 33
| 80
|
package com.logsense.opentracing;
public interface ITraceExtractor {
/**
* @return current trace-id (if present), null - otherwise
*/
String extractTraceId();
/**
* @return current span-id (if present), null - otherwise
*/
String extractSpanId();
}
| 31,813
|
https://github.com/Mirantis/contrail-web-controller/blob/master/webroot/config/networking/routeaggregate/test/ui/views/routeAggregateGridView.mock.data.js
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,018
|
contrail-web-controller
|
Mirantis
|
JavaScript
|
Code
| 158
| 944
|
/*
* Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
*/
define(['underscore'], function(_){
this.routeAggregateDomainsData = {
"domains": [
{
"href": "http://10.204.216.12:9100/domain/efa3feca-769d-4583-b38f-86614cde1810",
"fq_name": [
"default-domain"
],
"uuid": "efa3feca-769d-4583-b38f-86614cde1810"
}
]
};
this.routeAggregatePojectsData = {
"projects": [
{
"uuid": "ee14bbf4-a3fc-4f98-a7b3-f1fe1d8b29bb",
"fq_name": [
"default-domain",
"admin"
]
},
{
"uuid": "fc176b78-28ff-4e0e-88f7-cc1e0224d237",
"fq_name": [
"default-domain",
"demo"
]
}
]
};
this.routeAggregateMockData = {
"route-aggregates": [
{
"route-aggregate": {
"fq_name": [
"default-domain",
"admin",
"test_route_aggregate"
],
"name": "test_route_aggregate",
"aggregate_route_nexthop": "12.12.12.1",
"parent_uuid": "ee14bbf4-a3fc-4f98-a7b3-f1fe1d8b29bb",
"parent_href": "http://10.204.216.12:9100/project/ee14bbf4-a3fc-4f98-a7b3-f1fe1d8b29bb",
"parent_type": "project",
"perms2": {
"owner": null,
"owner_access": 7,
"global_access": 0,
"share": []
},
"href": "http://10.204.216.12:9100/route-aggregate/1b2bf39a-85ea-4dbb-bf72-072e1950970e",
"id_perms": {
"enable": true,
"uuid": {
"uuid_mslong": 1957926308019850800,
"uuid_lslong": 13795096503163853000
},
"created": "2016-02-03T10:30:13.813523",
"description": null,
"creator": null,
"user_visible": true,
"last_modified": "2016-02-03T10:30:13.813523",
"permissions": {
"owner": "cloud-admin",
"owner_access": 7,
"other_access": 7,
"group": "cloud-admin-group",
"group_access": 7
}
},
"aggregate_route_entries": {
"route": [
"route1",
"route2",
"route3"
]
},
"display_name": "test_route_aggregate",
"uuid": "1b2bf39a-85ea-4dbb-bf72-072e1950970e"
}
}
]
};
return {
routeAggregateDomainsData : routeAggregateDomainsData,
routeAggregatePojectsData : routeAggregatePojectsData,
routeAggregateMockData : routeAggregateMockData
};
});
| 29,355
|
https://github.com/smallbomb/CheckIO_js/blob/master/Ice Base/house-password.js
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
CheckIO_js
|
smallbomb
|
JavaScript
|
Code
| 80
| 375
|
// https://js.checkio.org/mission/verify-anagrams/
function housePassword(data) {
if (data.length >= 10 && data.match(/[a-z]/g) && data.match(/[A-Z]/g) && data.match(/[0-9]/g))
return true;
return false;
}
console.log(housePassword("A1213pokl"));
console.log(housePassword("bAse730onE4"));
console.log(housePassword("asasasasasasasaas"));
console.log(housePassword("QWERTYqwerty"));
console.log(housePassword("123456123456"));
console.log(housePassword("QwErTy911poqqqq"));
// var assert = require('assert');
// if (!global.is_checking) {
// assert.equal(housePassword("A1213pokl"), false, "1st example");
// assert.equal(housePassword("bAse730onE4"), true, "2nd example");
// assert.equal(housePassword("asasasasasasasaas"), false, "3rd example");
// assert.equal(housePassword("QWERTYqwerty"), false, "4th example");
// assert.equal(housePassword("123456123456"), false, "5th example");
// assert.equal(housePassword("QwErTy911poqqqq"), true, "6th example");
// console.log("Coding complete? Click 'Check' to review your tests and earn cool rewards!");
// }
| 23,012
|
https://github.com/yiisoft-contrib/yiiframework.com/blob/master/views/user/badge_user.php
|
Github Open Source
|
Open Source
|
BSD-3-Clause
| 2,023
|
yiiframework.com
|
yiisoft-contrib
|
PHP
|
Code
| 13
| 61
|
<div class="user">
<span class="date grid_2 alpha"><?= Yii::$app->formatter->asRelativeTime($model->complete_time) ?></span> to
<?= $model->user->rankLink ?>
</div>
| 25,324
|
https://github.com/giladbarnea/autocompletion/blob/master/test.py
|
Github Open Source
|
Open Source
|
MIT
| 2,020
|
autocompletion
|
giladbarnea
|
Python
|
Code
| 5
| 19
|
import autocompletion
def test_autocompletion():
pass
| 455
|
https://github.com/choineete/ZhuJiExamSystem/blob/master/src/main/java/com/tencent/wxcloudrun/service/impl/RecordAndResultImpl.java
|
Github Open Source
|
Open Source
|
MIT
| null |
ZhuJiExamSystem
|
choineete
|
Java
|
Code
| 129
| 820
|
package com.tencent.wxcloudrun.service.impl;
import com.tencent.wxcloudrun.dao.ExamRecordMapper;
import com.tencent.wxcloudrun.dao.ExamResultMapper;
import com.tencent.wxcloudrun.model.Question;
import com.tencent.wxcloudrun.service.QuestionService;
import com.tencent.wxcloudrun.service.RecordAndResultService;
import com.tencent.wxcloudrun.util.JsonUtil;
import com.tencent.wxcloudrun.viewmodel.wx.ExamPaper;
import com.tencent.wxcloudrun.viewmodel.wx.ExamRecord;
import com.tencent.wxcloudrun.viewmodel.wx.ExamResult;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Date;
import java.util.List;
@Service
public class RecordAndResultImpl implements RecordAndResultService {
@Autowired
ExamRecordMapper examRecordMapper;
@Autowired
ExamResultMapper examResultMapper;
@Autowired
QuestionService questionService;
@Override
public int recordAndMakeResult(List<Integer> answerList, ExamPaper examPaper) {
int correctCount = 0;
int totalScore = 0;
List<Integer> questionList = JsonUtil.toJsonListObject(examPaper.getContent(),List.class);
for (int i = 0; i < answerList.size(); i++){
Question question = questionService.findQuestionById(questionList.get(i));
ExamRecord record = new ExamRecord();
record.setExamId(examPaper.getExamId());
record.setExamPaperId(examPaper.getId());
record.setUserId(examPaper.getUserId());
record.setQuestionId(question.getId());
record.setAnswer(answerList.get(i));
//判题
if (answerList.get(i) == question.getCorrect()){
//设置正确
record.setIsRight(1);
//正确数量+1
correctCount++;
//总分加上该题分数
totalScore += question.getScore();
}
else{
//设置错误
record.setIsRight(2);
}
//插入答题记录
examRecordMapper.insertExamRecord(record);
}
//所有记录插入完毕之后,makeResult
ExamResult result = new ExamResult();
result.setExamId(examPaper.getExamId());
result.setExamPaperId(examPaper.getId());
result.setUserId(examPaper.getUserId());
result.setCreateTime(new Date());
result.setTotalCount(questionList.size());
//设置正确数量和总分
result.setCorrectCount(correctCount);
result.setScore(totalScore);
//插入到考试记录表中
examResultMapper.insertExamResult(result);
return totalScore;
}
}
| 48,618
|
https://github.com/cmihail/WebServer/blob/master/src/server/request/helper/InvalidRequestException.java
|
Github Open Source
|
Open Source
|
MIT, BSD-2-Clause
| 2,014
|
WebServer
|
cmihail
|
Java
|
Code
| 53
| 123
|
package server.request.helper;
import server.request.InvalidRequest;
/**
* An exception used for signaling an {@link InvalidRequest}.
*
* @author cmihail
*/
public class InvalidRequestException extends Exception {
private static final long serialVersionUID = 1L;
private final String message;
public InvalidRequestException(String message) {
this.message = message;
}
@Override
public String getMessage() {
return message;
}
}
| 21,026
|
https://github.com/embl-communications/science-in-school/blob/master/dist/wp-content/plugins/types-access/vendor/toolset/toolset-common/inc/autoloaded/field/renderer/Rest/Date.php
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
science-in-school
|
embl-communications
|
PHP
|
Code
| 148
| 386
|
<?php
namespace OTGS\Toolset\Common\Field\Renderer\Rest;
/**
* Renderer for the date field in REST API.
*
* Besides the timestamp in the raw format, provide also a 'formatted' key, where the date
* is formatted according to the site date and time format.
*
* @since Types 3.3
*/
class Date extends Raw {
/**
* @inheritdoc
*
* @return array
*/
protected function get_value() {
$output = parent::get_value();
// PHP 5.3 compatibility...
$field = $this->field;
$output = $this->format_single_or_repeatable(
$output,
'formatted',
function ( $single_raw_value ) use ( $field ) {
$timestamp = is_numeric( $single_raw_value ) ? (int) $single_raw_value : 0;
if ( 0 === $timestamp ) {
return null;
}
$formatted = date( get_option( 'date_format' ), $timestamp );
$add_time = ( $field->get_definition()->get_datetime_option() === 'date_and_time' );
if ( $add_time ) {
$formatted .= ' ' . date( get_option( 'time_format' ), $timestamp );
}
return $formatted;
}
);
return $output;
}
}
| 1,242
|
https://github.com/feathericon/feathericon-react/blob/master/src/icons/Camera.js
|
Github Open Source
|
Open Source
|
MIT
| null |
feathericon-react
|
feathericon
|
JavaScript
|
Code
| 70
| 166
|
import React from 'react';
const Camera = props => (
<svg width={24} height={24} {...props}>
<path
fillRule="evenodd"
d="M5 21a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h14a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2H5zM17 5v2h2V5h-2zm-5 12a5 5 0 1 0 0-10 5 5 0 0 0 0 10zm0-2a3 3 0 1 0 0-6 3 3 0 0 0 0 6z"
/>
</svg>
);
export default Camera;
| 4,235
|
https://github.com/BackEndTea/yay/blob/master/tests/phpt/macro/macro_deep_ast_access_error.phpt
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
yay
|
BackEndTea
|
PHP
|
Code
| 68
| 255
|
--TEST--
Test macro $(deep[ast][access]) syntax --pretty-print
--FILE--
<?php
$(macro) {
match
{
$(
chain
(
token(T_STRING) as leaf_level_a
,
chain
(
token(T_STRING) as leaf_level_b
,
chain
(
token(T_STRING) as leaf_level_c
)
as level_c
)
as level_b
)
as level_a
)
}
} >> {
matched($(level_a[level_b][level_c][leaf_level_x]));
}
match {
leaf_level_a
leaf_level_b
leaf_level_c
}
?>
--EXPECTF--
Undefined macro expansion 'level_a[level_b][level_c][leaf_level_x]', in %s.phpt on line 27 with context: [
"level_a"
]
| 36,197
|
https://github.com/davidjpowell80/xrm-datamigration/blob/master/tests/Capgemini.DataMigration.Core.Tests.Unit/Exceptions/ConfigurationExceptionTests.cs
|
Github Open Source
|
Open Source
|
MIT
| 2,021
|
xrm-datamigration
|
davidjpowell80
|
C#
|
Code
| 81
| 318
|
using System;
using System.Diagnostics.CodeAnalysis;
using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Capgemini.DataMigration.Exceptions.Tests
{
[ExcludeFromCodeCoverage]
[TestClass]
public class ConfigurationExceptionTests
{
private ConfigurationException systemUnderTest;
[TestMethod]
public void ConfigurationException()
{
FluentActions.Invoking(() => systemUnderTest = new ConfigurationException())
.Should()
.NotThrow();
}
[TestMethod]
public void ConfigurationExceptionWithStringParameter()
{
var message = "Test message";
FluentActions.Invoking(() => systemUnderTest = new ConfigurationException(message))
.Should()
.NotThrow();
Assert.AreEqual(message, systemUnderTest.Message);
}
[TestMethod]
public void ConfigurationExceptionWithStringAndInnerException()
{
var message = "Test message";
FluentActions.Invoking(() => systemUnderTest = new ConfigurationException(message, new Exception()))
.Should()
.NotThrow();
Assert.AreEqual(message, systemUnderTest.Message);
Assert.IsNotNull(systemUnderTest.InnerException);
}
}
}
| 47,427
|
https://github.com/JWGmeligMeyling/Java-Gitolite-Manager/blob/master/src/main/java/nl/minicom/gitolite/manager/models/User.java
|
Github Open Source
|
Open Source
|
Apache-2.0
| null |
Java-Gitolite-Manager
|
JWGmeligMeyling
|
Java
|
Code
| 455
| 1,322
|
package nl.minicom.gitolite.manager.models;
import java.util.Comparator;
import java.util.Map;
import nl.minicom.gitolite.manager.exceptions.ModificationException;
import nl.minicom.gitolite.manager.models.Recorder.Modification;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
/**
* This class represents a user in Gitolite.
*
* @author Michael de Jong <<a href="mailto:michaelj@minicom.nl">michaelj@minicom.nl</a>>
*/
public final class User implements Identifiable {
static final Comparator<User> SORT_BY_NAME = new Comparator<User>() {
@Override
public int compare(User arg0, User arg1) {
return arg0.getName().compareTo(arg1.getName());
}
};
private final String name;
private final Map<String, String> keys;
private final Recorder recorder;
/**
* Constructs a new {@link User} object with the provided name and public key.
*
* @param name
* The name of the user.
*/
User(String name) {
this(name, new Recorder());
}
/**
* Constructs a new {@link User} object with the provided name and public key.
*
* @param name
* The name of the user.
*
* @param recorder
* The {@link Recorder} to use when recording changes of this {@link User}.
*/
User(String name, Recorder recorder) {
Preconditions.checkNotNull(name);
Preconditions.checkArgument(!name.isEmpty());
Preconditions.checkArgument(name.matches("^\\w[\\w._\\@+-]+$"), "\"" + name + "\" is not a valid user name");
Preconditions.checkNotNull(recorder);
this.name = name;
this.recorder = recorder;
this.keys = Maps.newTreeMap();
}
/**
* @return
* The name of the {@link User}.
*/
public String getName() {
return name;
}
/**
* This method allows you to set (and override existing) SSH keys for this particular {@link User}.
*
* @param name
* The name of the key. This may not be NULL.
*
* @param content
* The content of the public key file. This may not be NULL.
*/
public void setKey(final String name, final String content) {
Preconditions.checkNotNull(name);
Preconditions.checkNotNull(content);
Preconditions.checkArgument(name.matches("^[\\w._+-]*$"), "\"" + name + "\" is not a valid key name");
Preconditions.checkArgument(content.matches("^ssh-rsa\\s.+$"));
synchronized (keys) {
keys.put(name, content);
}
recorder.append(new Modification("Setting key: '%s' for user: '%s'", name, getName()) {
@Override
public void apply(Config config) throws ModificationException {
config.getUser(getName()).setKey(name, content);
}
});
}
/**
* @return
* An {@link Map} of SSH keys for this user. The key of the {@link Map}
* is the name of the key, and the value is the contents of the associated key file.
*/
public ImmutableMap<String, String> getKeys() {
synchronized (keys) {
return ImmutableMap.copyOf(keys);
}
}
/**
* This method removes the SSH key with the specified name from this {@link User} object.
*
* @param name
* The name of the SSH key to remove.
*/
public void removeKey(final String name) {
Preconditions.checkNotNull(name);
synchronized (keys) {
keys.remove(name);
}
recorder.append(new Modification("Removing key: '%s' for user: '%s'", name, getName()) {
@Override
public void apply(Config config) throws ModificationException {
config.getUser(getName()).removeKey(name);
}
});
}
@Override
public int hashCode() {
return new HashCodeBuilder()
.append(name)
.toHashCode();
}
@Override
public boolean equals(Object other) {
if (!(other instanceof User)) {
return false;
}
return new EqualsBuilder()
.append(name, ((User) other).name)
.isEquals();
}
}
| 40,902
|
https://github.com/dsp-uga/Sushanth-Kathirvelu-p0/blob/master/p0_sp_c.py
|
Github Open Source
|
Open Source
|
MIT
| null |
Sushanth-Kathirvelu-p0
|
dsp-uga
|
Python
|
Code
| 248
| 961
|
#!/usr/bin/python
from operator import *
from pyspark import *
from string import punctuation
import json
import sys
conf = (SparkConf().setMaster("local").setAppName("SubProjectB"))
sc = SparkContext(conf = conf)
def remove_blank(x):
"""creating a function to remove the empty words"""
if(x != ""):
return(x)
def has_more_than_two_occurence(x):
"""creating a function for finding words with more than 2 occurences"""
if(x[1]>1):
return(x)
def length_more_than_one(x):
"""creating a function for checking the length of word is greater than 1"""
if(len(x[0])>1):
return(x)
def not_in_stop_words(stopWords,words):
"""creating a function for finding words not in stop words"""
if not(words in stopWords.value):
return(words)
def remove_Punctuations(words):
"""creating a function for removing the punctuations"""
if(len(words)>1):
return(words.strip(punctuation))
def interchanging_key_and_value(x):
"""creating a function for interchanging key and value"""
return (x[1],x[0])
def case_insensitive(x):
"""creating a function for interchanging key and value"""
return x.lower()
def create_a_key_value_pair(x):
"""creating a function for creating a key value pair"""
return (x,1)
def write_to_JSON_file(path, fileName, data):
#"""creating a function for writing into an json file"""
filePathNameWExt = path + '//' + fileName + '.json'
with open(filePathNameWExt, 'w') as fp:
json.dump(data, fp)
"""Merging all the files into a single text file"""
file= sc.wholeTextFiles(sys.argv[1])
fileWithoutStopWord=file.filter(lambda x:(not("stopwords.txt" in x[0])))
"""declaring the stop words as a broadcast file"""
stopWordsFile=file.filter(lambda x:("stopwords.txt" in x[0]))
stopWords=stopWordsFile.flatMap(lambda x:x[1].split('\n'))
stopWordsBroadcast=sc.broadcast(stopWords.collect())
"""wordCount with case-insensitive words and filetering words with more than 2 occurences in the text file and without stop words"""
wordCount= fileWithoutStopWord.map(lambda x:case_insensitive(x[1])).flatMap(lambda x:x.split()).filter(lambda x:not_in_stop_words(stopWordsBroadcast,x)).map(lambda x:create_a_key_value_pair(x)).reduceByKey(add)
wordCountFinal=wordCount.filter(lambda x:has_more_than_two_occurence(x)).filter(lambda x:length_more_than_one(x)).map(lambda x:(remove_Punctuations(x[0]),x[1]))
"""taking only the top 40 most frequently occuring words"""
topFourtyWords=wordCountFinal.map(lambda x:interchanging_key_and_value(x)).sortByKey(False).map(lambda x:interchanging_key_and_value(x)).take(40)
topFourtyWordsRDD=sc.parallelize(topFourtyWords)
topFourtyWordsRDDDict = topFourtyWordsRDD.collectAsMap()
"""saving the output as a JSON"""
write_to_JSON_file(sys.argv[2],sys.argv[3],topFourtyWordsRDDDict)
| 33,864
|
https://github.com/autoai-org/AIFlow/blob/master/components/library/src/components/Common/Alert.tsx
|
Github Open Source
|
Open Source
|
Apache-2.0
| 2,021
|
AIFlow
|
autoai-org
|
TypeScript
|
Code
| 55
| 224
|
import { XCircleIcon } from '@heroicons/react/solid'
export default function Alert(props: any) {
return (
<div className="rounded-md bg-red-50 p-4" style={{ maxWidth: '80vh', marginLeft: 'auto', marginRight: 'auto', marginBottom:'60px' }}>
<div className="flex">
<div className="flex-shrink-0">
<XCircleIcon className="h-5 w-5 text-red-400" aria-hidden="true" />
</div>
<div className="ml-3">
<h3 className="text-sm font-medium text-red-800">{props.title}</h3>
<div className="mt-2 text-sm text-red-700">
{props.message}
</div>
</div>
</div>
</div>
)
}
| 40,739
|
https://github.com/usccolumbia/deeperGATGNN/blob/master/NewMatPropPred/matdeeplearn/__init__.py
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
deeperGATGNN
|
usccolumbia
|
Python
|
Code
| 12
| 19
|
from .models import *
from .training import *
from .process import *
| 28,757
|
https://github.com/madhurimamandal/codeql/blob/master/java/ql/test/stubs/apache-commons-collections4-4.4/org/apache/commons/collections4/queue/PredicatedQueue.java
|
Github Open Source
|
Open Source
|
MIT
| 2,022
|
codeql
|
madhurimamandal
|
Java
|
Code
| 85
| 247
|
// Generated automatically from org.apache.commons.collections4.queue.PredicatedQueue for testing purposes
package org.apache.commons.collections4.queue;
import java.util.Queue;
import org.apache.commons.collections4.Predicate;
import org.apache.commons.collections4.collection.PredicatedCollection;
public class PredicatedQueue<E> extends PredicatedCollection<E> implements Queue<E>
{
protected PredicatedQueue() {}
protected PredicatedQueue(Queue<E> p0, Predicate<? super E> p1){}
protected Queue<E> decorated(){ return null; }
public E element(){ return null; }
public E peek(){ return null; }
public E poll(){ return null; }
public E remove(){ return null; }
public boolean offer(E p0){ return false; }
public static <E> PredicatedQueue<E> predicatedQueue(Queue<E> p0, Predicate<? super E> p1){ return null; }
}
| 3,181
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.