repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
anarcheuz/Funny-school-projects | DevOO/src/test/Test.java | 4576 | package test;
import java.io.File;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.io.*;
import java.nio.file.Path;
import javax.xml.parsers.*;
import org.w3c.dom.*;
import org.xml.sax.*;
import modele.*;
import controleur.*;
public class Test {
public static void main(String[] args){
Intersection inter = new Intersection(1,0,2);
if(ConstrucIntersection(1,0,2)){
System.out.println("V - Création d'une intersection");
if(ConstrucRoute("h1",12,12,inter)){
System.out.println("V - Route à partir de Intersec marche");
Route route = new Route("h1",12,12,inter);
if(addTroncIntersection(inter,route)){
System.out.println("V - Ajout d'un tronc sortant");
} else {
System.out.println("X - Ajout d'un tronc sortant");
}
} else {
System.out.println("X - Route à partir de Intersec marche");
}
DateFormat HOUR_FORMAT = new SimpleDateFormat("HH:mm:ss");
Date dateBis = new Date();
Date date = new Date();
try{
date = HOUR_FORMAT.parse("9:0:0");
dateBis = HOUR_FORMAT.parse("8:0:0");
}catch(Exception e){
e.printStackTrace();
}
if(ConstrucEtape(dateBis,inter)){
System.out.println("V - Création étape");
Etape etape = new Etape(dateBis, inter);
etape.setHeurePassagePrevue(date);
if(etape.getHeurePassagePrevue()==date){
System.out.println("V - Etape - Modification heure");
} else {
System.out.println("X - Etape - Modification heure");
}
} else {
System.out.println("X - Création étape");
}
if(ConstrucLivraison(inter,1,2)){
System.out.println("V - Livraison - Construc");
} else {
System.out.println("X - Livraison - Construc");
}
System.out.println("==============================");
String path = new File("Tests/Plan-Minimal.xml").getAbsolutePath();
System.out.println("Vérifiez que vous disposez du Fichier à :"+path);
File file = new File(path);
//ChargerPlan plan = new ChargerPlan(stringToDom(loadFile(file)));
} else {
System.out.println("X - Création d'une intersection");
}
}
private static boolean ConstrucLivraison(Intersection inter, int i, int j) {
Livraison liv = new Livraison(inter, i, j);
if( (inter == liv.getPointLivraison()) &&
(i == liv.getIdInPH()) &&
(j == liv.getIdClient())
){
return true;
} else {
return false;
}
}
public static boolean ConstrucIntersection(int id, int x, int y){
Intersection inter = new Intersection(id,x,y);
if( (x==inter.getX()) && (y==inter.getY()) && (id==inter.getId())){
return true;
} else {
return false;
}
}
public static boolean ConstrucRoute(String nom, double vitesse, double longueur, Intersection inter1 ){
Route route = new Route(nom, vitesse, longueur, inter1);
if ( (nom == route.getName()) &&
(vitesse== route.getVitesse()) &&
(longueur==route.getLongueur()) &&
(inter1 == route.getInter())
){
return true;
} else {
return false;
}
}
public static boolean addTroncIntersection(Intersection inter, Route route){
inter.addTroncSortant(route);
if( (inter.getTroncsSortants().size() == 1) &&
(inter.getTroncsSortants().get(0) == route)
){
return true;
} else {
return false;
}
}
public static boolean ConstrucEtape(Date heurePassage, Intersection adresse){
Etape etape = new Etape(heurePassage, adresse);
if( (heurePassage == etape.getHeurePassagePrevue()) &&
(adresse == etape.getAdresse())
){
return true;
} else {
return false;
}
}
public static void displayRoute(Route item){
System.out.println(item);
}
public static void displayLivraison(Livraison item){
}
public static Document stringToDom(String xmlSource)
throws SAXException, ParserConfigurationException, IOException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
return builder.parse(new InputSource(new StringReader(xmlSource)));
}
public static String loadFile(File f) {
try {
BufferedInputStream in = new BufferedInputStream(new FileInputStream(f));
StringWriter out = new StringWriter();
int b;
while ((b=in.read()) != -1)
out.write(b);
out.flush();
out.close();
in.close();
return out.toString();
}
catch (IOException ie)
{
ie.printStackTrace();
return "";
}
}
}
| mit |
quintusdias/glymur | tests/test_jp2box_uuid.py | 13523 | # -*- coding: utf-8 -*-
"""Test suite for printing.
"""
# Standard library imports
import importlib.resources as ir
import io
import shutil
import struct
import unittest
import uuid
import warnings
# Third party library imports ...
import lxml.etree
# Local imports
import glymur
from glymur import Jp2k
from glymur.jp2box import UUIDBox
from . import fixtures, data
TIFF_ASCII = 2
TIFF_SHORT = 3
TIFF_LONG = 4
TIFF_RATIONAL = 5
TIFF_DOUBLE = 12
SUBFILETYPE = 254
FILETYPE_REDUCEDIMAGE = 0x1
OSUBFILETYPE = 255
IMAGEWIDTH = 256
IMAGELENGTH = 257
BITSPERSAMPLE = 258
COMPRESSION = 259
COMPRESSION_NONE = 1
PHOTOMETRIC = 262
STRIPOFFSETS = 273
ORIENTATION = 274
PHOTOMETRIC_MINISBLACK = 1
SAMPLESPERPIXEL = 277
ROWSPERSTRIP = 278
STRIPBYTECOUNTS = 279
MINSAMPLEVALUE = 280
MAXSAMPLEVALUE = 281
XRESOLUTION = 282
YRESOLUTION = 283
PLANARCONFIG = 284
MODELPIXELSCALE = 33550
MODELTIEPOINT = 33922
GEOKEYDIRECTORY = 34735
GEOASCIIPARAMS = 34737
class TestSuite(fixtures.TestCommon):
"""Tests for XMP, Exif UUIDs."""
def _create_degenerate_geotiff(self, e):
"""
Create an in-memory degenerate geotiff.
Parameters
----------
e : str
Either '<' for little endian or '>' for big endian.
Returns
-------
bytes
sequence of bytes making up a degenerate geotiff. Should have
something like the following structure:
Magic: 0x4949 <little-endian> Version: 0x2a <ClassicTIFF>
Directory 0: offset 8 (0x8) next 0 (0)
SubFileType (254) LONG (4) 1<1>
ImageWidth (256) SHORT (3) 1<1>
ImageLength (257) SHORT (3) 1<1>
BitsPerSample (258) SHORT (3) 1<8>
Compression (259) SHORT (3) 1<1>
Photometric (262) SHORT (3) 1<1>
StripOffsets (273) LONG (4) 1<1>
SamplesPerPixel (277) SHORT (3) 1<1>
RowsPerStrip (278) LONG (4) 1<1>
StripByteCounts (279) LONG (4) 1<1>
XResolution (282) RATIONAL (5) 1<75>
YResolution (283) RATIONAL (5) 1<75>
33550 (0x830e) DOUBLE (12) 3<10 10 0>
33922 (0x8482) DOUBLE (12) 6<0 0 0 444650 4.64051e+06 0>
34735 (0x87af) SHORT (3) 24<1 1 0 5 1024 0 1 1 1025 0 1 1 ...>
34737 (0x87b1) ASCII (2) 45<UTM Zone 16N NAD27"|Clar ...>
"""
b = io.BytesIO()
# Create the header.
# Signature, version, offset to IFD
if e == '<':
buffer = struct.pack('<2sHI', b'II', 42, 8)
else:
buffer = struct.pack('>2sHI', b'MM', 42, 8)
b.write(buffer)
offset = b.tell()
num_tags = 16
# The CDATA offset is past IFD tag count
offset += 2
# The CDATA offset is past the IFD
offset += num_tags * 12
# The CDATA offset is past the null offset to next IFD
offset += 4
# The CDATA offset is past the image data
offset += 1
# Write the tag count
buffer = struct.pack(e + 'H', num_tags)
b.write(buffer)
# Write out all the IFD tags. Any data that exceeds 4 bytes has to
# be appended later.
lst = [
struct.pack(e + 'HHII', SUBFILETYPE, TIFF_LONG, 1, 1),
struct.pack(e + 'HHII', IMAGEWIDTH, TIFF_SHORT, 1, 1),
struct.pack(e + 'HHII', IMAGELENGTH, TIFF_SHORT, 1, 1),
struct.pack(e + 'HHII', BITSPERSAMPLE, TIFF_SHORT, 1, 8),
struct.pack(e + 'HHII', COMPRESSION, TIFF_SHORT, 1,
COMPRESSION_NONE),
struct.pack(e + 'HHII', PHOTOMETRIC, TIFF_SHORT, 1, 1),
struct.pack(e + 'HHII', STRIPOFFSETS, TIFF_LONG, 1, 1),
struct.pack(e + 'HHII', SAMPLESPERPIXEL, TIFF_SHORT, 1, 1),
struct.pack(e + 'HHII', ROWSPERSTRIP, TIFF_LONG, 1, 1),
struct.pack(e + 'HHII', STRIPBYTECOUNTS, TIFF_LONG, 1, 1),
struct.pack(e + 'HHII', XRESOLUTION, TIFF_RATIONAL, 1, offset),
struct.pack(e + 'HHII', YRESOLUTION, TIFF_RATIONAL, 1, offset + 8),
struct.pack(e + 'HHII', MODELPIXELSCALE, TIFF_DOUBLE, 3,
offset + 16),
struct.pack(e + 'HHII', MODELTIEPOINT, TIFF_DOUBLE, 6,
offset + 40),
struct.pack(e + 'HHII', GEOKEYDIRECTORY, TIFF_SHORT, 24,
offset + 88),
struct.pack(e + 'HHII', GEOASCIIPARAMS, TIFF_ASCII, 45,
offset + 136),
]
for buffer in lst:
b.write(buffer)
# NULL pointer to next IFD
buffer = struct.pack(e + 'I', 0)
b.write(buffer)
# Image data. Just a single byte will do.
buffer = struct.pack(e + 'B', 0)
b.write(buffer)
# Now append the tag payloads that did not fit into the IFD.
# XResolution
tag_payloads = [
(e + 'I', 75), # XResolution
(e + 'I', 1),
(e + 'I', 75), # YResolution
(e + 'I', 1),
(e + 'd', 10), # Model pixel scale tag
(e + 'd', 10),
(e + 'd', 0),
]
# MODELTIEPOINT
datums = [0.0, 0.0, 0.0, 44650.0, 4640510.0, 0.0]
for datum in datums:
tag_payloads.append((e + 'd', datum))
# GeoKeyDirectory
datums = [
1, 1, 0, 5,
1024, 0, 1, 1,
1025, 0, 1, 1,
1026, 34737, 20, 0,
2049, 34737, 24, 20,
3072, 0, 1, 26716,
]
for datum in datums:
tag_payloads.append((e + 'H', datum))
# GEOASCIIPARAMS
items = (e + '45s',
b'UTM Zone 16N NAD27"|Clarke, 1866 by Default| ')
tag_payloads.append(items)
# Tag payloads
for format, datum in tag_payloads:
buffer = struct.pack(format, datum)
b.write(buffer)
b.seek(0)
return b.read()
def test__printing__geotiff_uuid__xml_sidecar(self):
"""
SCENARIO: Print a geotiff UUID with XML sidecar file
EXPECTED RESULT: Should not error out.
"""
box_data = ir.read_binary('tests.data', '0220000800_uuid.dat')
bf = io.BytesIO(box_data)
bf.seek(8)
box = UUIDBox.parse(bf, 0, 703)
str(box)
def test_append_xmp_uuid(self):
"""
SCENARIO: Append an XMP UUID box to an existing JP2 file.
EXPECTED RESULT: The new last box in the JP2 file is UUID.
"""
the_uuid = uuid.UUID('be7acfcb-97a9-42e8-9c71-999491e3afac')
raw_data = fixtures.SIMPLE_RDF.encode('utf-8')
shutil.copyfile(self.jp2file, self.temp_jp2_filename)
jp2 = Jp2k(self.temp_jp2_filename)
ubox = glymur.jp2box.UUIDBox(the_uuid=the_uuid, raw_data=raw_data)
jp2.append(ubox)
# Should be two UUID boxes now.
expected_ids = ['jP ', 'ftyp', 'jp2h', 'uuid', 'jp2c', 'uuid']
actual_ids = [b.box_id for b in jp2.box]
self.assertEqual(actual_ids, expected_ids)
# The data should be an XMP packet, which gets interpreted as
# an ElementTree.
self.assertTrue(isinstance(jp2.box[-1].data, lxml.etree._ElementTree))
def test_bad_exif_tag(self):
"""
Corrupt the Exif IFD with an invalid tag should produce a warning.
"""
b = self._create_exif_uuid('<')
b.seek(0)
buffer = b.read()
# The first tag should begin at byte 32. Replace the entire IDF
# entry with zeros.
tag = struct.pack('<HHII', 0, 3, 0, 0)
buffer = buffer[:40] + tag + buffer[52:]
b = io.BytesIO()
b.write(buffer)
b.seek(8)
with self.assertWarns(UserWarning):
box = glymur.jp2box.UUIDBox.parse(b, 0, 418)
self.assertEqual(box.box_id, 'uuid')
# Should still get the IFD. 16 tags.
self.assertEqual(len(box.data.keys()), 16)
def test_exif(self):
"""
Verify read of both big and little endian Exif IFDs.
"""
# Check both little and big endian.
for endian in ['<', '>']:
self._test_endian_exif(endian)
def _create_exif_uuid(self, endian):
"""
Create a buffer that can be parsed as an Exif UUID.
Parameters
----------
endian : str
Either '<' for little endian or '>' for big endian
"""
b = io.BytesIO()
# Write L, T, UUID identifier.
# 388 = length of degenerate tiff
# 6 = Exif\x0\x0
# 16 = length of UUID identifier
# 8 = length of L, T
# 388 + 6 + 16 + 8 = 418
b.write(struct.pack('>I4s', 418, b'uuid'))
b.write(b'JpgTiffExif->JP2')
b.write(b'Exif\x00\x00')
buffer = self._create_degenerate_geotiff(endian)
b.write(buffer)
b.seek(8)
return b
def _test_endian_exif(self, endian):
"""
Test Exif IFDs.
Parameters
----------
endian : str
Either '<' for little endian or '>' for big endian
"""
bptr = self._create_exif_uuid(endian)
box = glymur.jp2box.UUIDBox.parse(bptr, 0, 418)
self.assertEqual(box.data['XResolution'], 75)
expected = 'UTM Zone 16N NAD27"|Clarke, 1866 by Default| '
self.assertEqual(box.data['GeoAsciiParams'], expected)
@unittest.skip('not sure why this was corrupt')
def test_print_bad_geotiff(self):
"""
SCENARIO: A GeoTIFF UUID is corrupt.
EXPECTED RESULT: The string representation should validate and clearly
state that the UUID box is corrupt.
"""
with ir.path(data, 'issue398.dat') as path:
with path.open('rb') as f:
f.seek(8)
with warnings.catch_warnings():
# Ignore the warnings about invalid TIFF tags, we already
# know that.
warnings.simplefilter('ignore')
box = glymur.jp2box.UUIDBox.parse(f, 0, 380)
actual = str(box)
expected = ("UUID Box (uuid) @ (0, 380)\n"
" UUID: "
"b14bf8bd-083d-4b43-a5ae-8cd7d5a6ce03 (GeoTIFF)\n"
" UUID Data: corrupt")
self.assertEqual(actual, expected)
class TestSuiteHiRISE(fixtures.TestCommon):
"""Tests for HiRISE RDRs."""
def setUp(self):
super(TestSuiteHiRISE, self).setUp()
# Hand-create the boxes needed for HiRISE.
the_uuid = uuid.UUID('2b0d7e97-aa2e-317d-9a33-e53161a2f7d0')
ulst = glymur.jp2box.UUIDListBox([the_uuid])
version = 0
flag = [0, 0, 0]
url = 'ESP_032436_1755_COLOR.LBL'
debox = glymur.jp2box.DataEntryURLBox(version, flag, url)
uuidinfo = glymur.jp2box.UUIDInfoBox([ulst, debox])
uuid_data = ir.read_binary(data, 'degenerate_geotiff.tif')
the_uuid = uuid.UUID('b14bf8bd-083d-4b43-a5ae-8cd7d5a6ce03')
geotiff_uuid = glymur.jp2box.UUIDBox(the_uuid, uuid_data)
# Fabricate a new JP2 file out of the signature, file type, header,
# and codestream out of nemo.jp2, but add in the UUIDInfo and UUID
# box from HiRISE.
jp2 = Jp2k(self.jp2file)
boxes = [jp2.box[0], jp2.box[1], jp2.box[2], uuidinfo, geotiff_uuid,
jp2.box[-1]]
self.hirise_jp2file_name = self.test_dir_path / 'hirise.jp2'
jp2.wrap(self.hirise_jp2file_name, boxes=boxes)
def test_tags(self):
jp2 = Jp2k(self.hirise_jp2file_name)
self.assertEqual(jp2.box[4].data['GeoDoubleParams'],
(0.0, 180.0, 0.0, 0.0, 3396190.0, 3396190.0))
self.assertEqual(jp2.box[4].data['GeoAsciiParams'],
'Equirectangular MARS|GCS_MARS|')
self.assertEqual(jp2.box[4].data['GeoKeyDirectory'], (
1, 1, 0, 18, # noqa
1024, 0, 1, 1, # noqa
1025, 0, 1, 1, # noqa
1026, 34737, 21, 0, # noqa
2048, 0, 1, 32767, # noqa
2049, 34737, 9, 21, # noqa
2050, 0, 1, 32767, # noqa
2054, 0, 1, 9102, # noqa
2056, 0, 1, 32767, # noqa
2057, 34736, 1, 4, # noqa
2058, 34736, 1, 5, # noqa
3072, 0, 1, 32767, # noqa
3074, 0, 1, 32767, # noqa
3075, 0, 1, 17, # noqa
3076, 0, 1, 9001, # noqa
3082, 34736, 1, 2, # noqa
3083, 34736, 1, 3, # noqa
3088, 34736, 1, 1, # noqa
3089, 34736, 1, 0, # noqa
))
self.assertEqual(jp2.box[4].data['ModelPixelScale'], (0.25, 0.25, 0.0))
self.assertEqual(jp2.box[4].data['ModelTiePoint'], (
0.0, 0.0, 0.0, -2523306.125, -268608.875, 0.0
))
@unittest.skipIf(not fixtures._HAVE_GDAL, 'Could not load GDAL')
def test_printing_geotiff_uuid(self):
"""
SCENARIO: Print a geotiff UUID.
EXPECTED RESULT: Should match a known geotiff UUID. The string
representation validates.
"""
jp2 = Jp2k(self.hirise_jp2file_name)
self.maxDiff = None
actual = str(jp2.box[4])
expected = fixtures.GEOTIFF_UUID
self.assertEqual(actual, expected)
| mit |
vivet/GoogleApi | .tests/GoogleApi.Test/Maps/DistanceMatrix/DistanceMatrixTests.cs | 9895 | using System;
using System.Threading;
using GoogleApi.Entities.Common;
using GoogleApi.Entities.Common.Enums;
using GoogleApi.Entities.Maps.Common;
using GoogleApi.Entities.Maps.Common.Enums;
using GoogleApi.Entities.Maps.DistanceMatrix.Request;
using NUnit.Framework;
namespace GoogleApi.Test.Maps.DistanceMatrix
{
[TestFixture]
public class DistanceMatrixTests : BaseTest
{
[Test]
public void DistanceMatrixTest()
{
var origin1 = new Address("285 Bedford Ave, Brooklyn, NY, USA");
var origin2 = new Address("1 Broadway Ave, Manhattan, NY, USA");
var destination1 = new Address("185 Broadway Ave, Manhattan, NY, USA");
var destination2 = new Address("200 Bedford Ave, Brooklyn, NY, USA");
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin1),
new LocationEx(origin2)
},
Destinations = new[]
{
new LocationEx(destination1),
new LocationEx(destination2)
}
};
var result = GoogleMaps.DistanceMatrix.Query(request);
Assert.IsNotNull(result);
Assert.AreEqual(Status.Ok, result.Status);
}
[Test]
public void DistanceMatrixWhenAddressTest()
{
var origin = new Address("285 Bedford Ave, Brooklyn, NY, USA");
var destination = new Address("185 Broadway Ave, Manhattan, NY, USA");
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin)
},
Destinations = new[]
{
new LocationEx(destination)
}
};
var result = GoogleMaps.DistanceMatrix.Query(request);
Assert.IsNotNull(result);
Assert.AreEqual(Status.Ok, result.Status);
}
[Test]
public void DistanceMatrixWhenCoordinateTest()
{
var origin = new CoordinateEx(55.7237480, 12.4208282);
var destination = new CoordinateEx(55.72672682, 12.407996582);
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin)
},
Destinations = new[]
{
new LocationEx(destination)
}
};
var result = GoogleMaps.DistanceMatrix.Query(request);
Assert.IsNotNull(result);
Assert.AreEqual(Status.Ok, result.Status);
}
[Test]
public void DistanceMatrixWhenCoordinateAndHeadingTest()
{
var origin = new CoordinateEx(55.7237480, 12.4208282)
{
Heading = 90
};
var destination = new CoordinateEx(55.72672682, 12.407996582)
{
Heading = 90
};
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin)
},
Destinations = new[]
{
new LocationEx(destination)
}
};
var result = GoogleMaps.DistanceMatrix.Query(request);
Assert.IsNotNull(result);
Assert.AreEqual(Status.Ok, result.Status);
}
[Test]
public void DistanceMatrixWhenCoordinateAndUseSideOfRoadTest()
{
var origin = new CoordinateEx(55.7237480, 12.4208282)
{
UseSideOfRoad = true
};
var destination = new CoordinateEx(55.72672682, 12.407996582)
{
UseSideOfRoad = true
};
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin)
},
Destinations = new[]
{
new LocationEx(destination)
}
};
var result = GoogleMaps.DistanceMatrix.Query(request);
Assert.IsNotNull(result);
Assert.AreEqual(Status.Ok, result.Status);
}
[Test]
public void DistanceMatrixWhenPlaceIdTest()
{
var origin = new Place("ChIJaSLMpEVQUkYRL4xNOWBfwhQ");
var destination = new Place("ChIJuc03_GlQUkYRlLku0KsLdJw");
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin)
},
Destinations = new[]
{
new LocationEx(destination)
}
};
var result = GoogleMaps.DistanceMatrix.Query(request);
Assert.IsNotNull(result);
Assert.AreEqual(Status.Ok, result.Status);
}
[Test]
public void DistanceMatrixWhenAvoidWayTest()
{
var origin = new Address("285 Bedford Ave, Brooklyn, NY, USA");
var destination = new Address("185 Broadway Ave, Manhattan, NY, USA");
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin)
},
Destinations = new[]
{
new LocationEx(destination)
},
Avoid = AvoidWay.Highways
};
var result = GoogleMaps.DistanceMatrix.Query(request);
Assert.IsNotNull(result);
Assert.AreEqual(Status.Ok, result.Status);
}
[Test]
public void DistanceMatrixWhenTravelModeDrivingAndDepartureTimeTest()
{
var origin = new Address("285 Bedford Ave, Brooklyn, NY, USA");
var destination = new Address("185 Broadway Ave, Manhattan, NY, USA");
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin)
},
Destinations = new[]
{
new LocationEx(destination)
},
TravelMode = TravelMode.Driving,
DepartureTime = DateTime.UtcNow.AddHours(1)
};
var result = GoogleMaps.DistanceMatrix.Query(request);
Assert.IsNotNull(result);
Assert.AreEqual(Status.Ok, result.Status);
}
[Test]
public void DistanceMatrixWhenTravelModeTransitAndArrivalTimeTest()
{
var origin = new Address("285 Bedford Ave, Brooklyn, NY, USA");
var destination = new Address("185 Broadway Ave, Manhattan, NY, USA");
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin)
},
Destinations = new[]
{
new LocationEx(destination)
},
TravelMode = TravelMode.Driving,
ArrivalTime = DateTime.UtcNow.AddHours(1),
TransitRoutingPreference = TransitRoutingPreference.Fewer_Transfers
};
var result = GoogleMaps.DistanceMatrix.Query(request);
Assert.IsNotNull(result);
Assert.AreEqual(Status.Ok, result.Status);
}
[Test]
public void DistanceMatrixWhenAsyncTest()
{
var origin = new Address("285 Bedford Ave, Brooklyn, NY, USA");
var destination = new Address("185 Broadway Ave, Manhattan, NY, USA");
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin)
},
Destinations = new[]
{
new LocationEx(destination)
}
};
var result = GoogleMaps.DistanceMatrix.QueryAsync(request).Result;
Assert.IsNotNull(result);
Assert.AreEqual(Status.Ok, result.Status);
}
[Test]
public void DistanceMatrixWhenAsyncAndCancelledTest()
{
var origin = new Address("285 Bedford Ave, Brooklyn, NY, USA");
var destination = new Address("185 Broadway Ave, Manhattan, NY, USA");
var request = new DistanceMatrixRequest
{
Key = this.ApiKey,
Origins = new[]
{
new LocationEx(origin)
},
Destinations = new[]
{
new LocationEx(destination)
}
};
var cancellationTokenSource = new CancellationTokenSource();
var task = GoogleMaps.DistanceMatrix.QueryAsync(request, cancellationTokenSource.Token);
cancellationTokenSource.Cancel();
var exception = Assert.Throws<OperationCanceledException>(() => task.Wait(cancellationTokenSource.Token));
Assert.IsNotNull(exception);
Assert.AreEqual(exception.Message, "The operation was canceled.");
}
}
} | mit |
coffeeaddict/kindergarten | spec/kindergarten/perimeter_spec.rb | 3111 | require 'spec_helper'
describe Kindergarten::Perimeter do
describe :class do
it "should have a :expose method" do
SpecPerimeter.should respond_to(:expose)
SpecPerimeter.should respond_to(:exposed_methods)
end
it "should return exposed methods" do
SpecPerimeter.exposed_methods.should_not be_empty
end
it "should have a :govern method" do
SpecPerimeter.should respond_to(:govern)
SpecPerimeter.should respond_to(:govern_proc)
end
it "should return a govern proc" do
SpecPerimeter.govern_proc.should be_kind_of(Proc)
end
it "should return a governess" do
SpecPerimeter.governess.should_not be_nil
end
it "should return a purpose" do
SpecPerimeter.purpose.should_not be_nil
end
end
describe :instance do
it "should have an initialize method with 2 arguments" do
SpecPerimeter.instance.method(:initialize).arity.should == 2
end
it "should have a :guard method" do
SpecPerimeter.instance.should respond_to(:guard)
end
it "should have an :unguarded method" do
SpecPerimeter.instance.should respond_to(:unguarded)
end
it "should have a :scrub method" do
SpecPerimeter.instance.should respond_to(:scrub)
end
it "should have a :rinse method" do
SpecPerimeter.instance.should respond_to(:rinse)
end
it "should have a :sandbox_methods method" do
SpecPerimeter.instance.should respond_to(:sandbox_methods)
end
end
describe :sandbox do
before(:each) do
@sandbox = Kindergarten.sandbox("child")
@sandbox.extend_perimeter(SpecPerimeter)
end
it "should have the SpecPerimeter" do
@sandbox.perimeters.collect(&:class).should include(SpecPerimeter)
end
it "should fill the governess" do
@sandbox.governess.should_not be_empty
end
it "should have the sandboxed method" do
@sandbox.testing.sandboxed.should eq "child"
end
it "should have the guarded method" do
expect {
@sandbox.testing.guarded
}.to raise_error(Kindergarten::AccessDenied)
end
it "should not have the unboxed method" do
expect {
@sanbox.testing.unboxed
}.to raise_error(NoMethodError)
end
it "should have the not_guarded method" do
@sandbox.testing.not_guarded.should eq "OK"
end
it "should have the unsafe method" do
expect {
@sandbox.testing.unsafe
}.to raise_error(Kindergarten::Perimeter::Unguarded)
end
end
describe :unguarded do
before(:each) do
@sandbox = Kindergarten.sandbox("child")
@sandbox.extend_perimeter(SpecPerimeter)
end
it "should allow the unsafe method" do
expect {
@sandbox.unguarded do
@sandbox.testing.unsafe
end
}.to_not raise_error(Kindergarten::Perimeter::Unguarded)
end
it "should allow the not_guarded method" do
expect {
@sandbox.unguarded do
@sandbox.testing.not_guarded
end
}.to_not raise_error(Kindergarten::Perimeter::Unguarded)
end
end
end
| mit |
bytenoodles/symfony2-orm | src/ByteNoodles/Bundle/Symfony2ORMBundle/Controller/DefaultController.php | 334 | <?php
namespace ByteNoodles\Bundle\Symfony2ORMBundle\Controller;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
class DefaultController extends Controller
{
public function indexAction($name)
{
return $this->render('ByteNoodlesSymfony2ORMBundle:Default:index.html.twig', array('name' => $name));
}
}
| mit |
europa1613/java | leetcode/src/com/test/median/twosortedarrays/Solution.java | 1074 | package com.test.median.twosortedarrays;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
class Solution {
public static void main(String[] args) {
System.out.println(findMedianSortedArrays(new int[] { 1, 2 }, new int[] { 3, 4 }));
}
public static double findMedianSortedArrays(int[] nums1, int[] nums2) {
List<Integer> list = new ArrayList<>();
list.addAll(Arrays .stream(nums1)
.boxed()
.collect(Collectors.toList()));
list.addAll(Arrays .stream(nums2)
.boxed()
.collect(Collectors.toList()));
/*
* double average = list .stream() .mapToInt(item -> item.intValue()) .average()
* .orElse(0)
*/;
List<Integer> list2 = list .stream()
.sorted()
.collect(Collectors.toList());
double res;
if (list2.size() % 2 == 0) {
int position = list2.size() / 2;
res = (list2.get(position) + Double.valueOf(list2.get(position - 1))) / 2;
} else {
int position = list2.size() / 2;
res = list2.get(position);
}
return res;
}
} | mit |
GethosTheWalrus/Goverwatch | models/models.Hero.List.Hero.go | 299 | package models
// Model representing a watered down hero, for the hero list call
// Fields:
// Name: The name of the hero
// Roles: The role(s) of the hero
// Portrait: The hero's portrait (As displayed on Blizzard's website)
type HeroListHero struct {
Name string
Roles string
Portrait string
} | mit |
djfoxer/healthyWithVS | djfoxer.HealthyWithVS/djfoxer.HealthyWithVS/Helpers/Consts.cs | 1129 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace djfoxer.HealthyWithVS.Helpers
{
public static class Consts
{
public const string OptionsCategoryBasicName = "Basic";
public const string PluginName = "Healthy With VS";
public const string OptionsCategoryBasicStatusBarAutostartText = "Autostart Pomodoro Timer";
public const string OptionsCategoryBasicStatusBarAutostartInfoText = "Show Pomodoro Timer on status bar when Visual Studio starts.";
public const string OptionsCategoryBasicWorkoutActiveText = "Workout enabled";
public const string OptionsCategoryBasicWorkoutActiveInfoText = "Enable workout after timer ends.";
public const string OptionsCategoryBasicWorkTimeText = "Timer (minutes)";
public const string OptionsCategoryBasicWorkTimeInfoText = "Set work interval, default 25 minutes";
public const string VisualStudioStatusBarName = "StatusBarPanel";
public const string HealthyWithVS_Element_PomodoroTimer = "PomodorTimer_HealthyWithVS";
}
}
| mit |
zielq701/slack-radio-dj | src/interface/song-metadata.interface.ts | 780 | export interface SongMetadata {
kind: string;
etag: string;
id: string;
snippet: {
publishedAt: string;
channelId: string;
title: string;
description: string;
thumbnails: any;
channelTitle: string;
tags: [
string
],
categoryId: string;
liveBroadcastContent: string;
defaultLanguage: string;
localized: {
title: string;
description: string
},
defaultAudioLanguage: string
};
contentDetails: {
duration: string;
dimension: string;
definition: string;
caption: string;
licensedContent: boolean,
regionRestriction: {
allowed: [
string
],
blocked: [
string
]
},
projection: string;
hasCustomThumbnail: boolean
};
}
| mit |
alexdzul/myPage | myPage/apps/social/models.py | 552 | from django.db import models
# Create your models here.
class SocialNetwork(models.Model):
def image_path(self, filename):
ruta = "SocialNetwork/%s/%s" % (self.name, str(filename))
return ruta
name = models.CharField(max_length=300)
url = models.URLField(max_length=700)
icon = models.ImageField(upload_to=image_path)
status = models.BooleanField(default=True)
def __unicode__(self):
return self.name
class Meta:
verbose_name = "Red Social"
verbose_name_plural = "Redes Sociales" | mit |
Azure/azure-sdk-for-java | sdk/keyvault/azure-security-keyvault-keys/src/test/java/com/azure/security/keyvault/keys/KeyAsyncClientManagedHsmTest.java | 6348 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.security.keyvault.keys;
import com.azure.core.exception.ResourceModifiedException;
import com.azure.core.http.HttpClient;
import com.azure.core.test.TestMode;
import com.azure.core.util.Configuration;
import com.azure.security.keyvault.keys.models.KeyType;
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import reactor.test.StepVerifier;
import java.math.BigInteger;
import java.net.HttpURLConnection;
import static com.azure.security.keyvault.keys.cryptography.TestHelper.DISPLAY_NAME_WITH_ARGUMENTS;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class KeyAsyncClientManagedHsmTest extends KeyAsyncClientTest implements KeyClientManagedHsmTestBase {
public KeyAsyncClientManagedHsmTest() {
this.isHsmEnabled = Configuration.getGlobalConfiguration().get("AZURE_MANAGEDHSM_ENDPOINT") != null;
this.runManagedHsmTest = isHsmEnabled || getTestMode() == TestMode.PLAYBACK;
}
@Override
protected void beforeTest() {
Assumptions.assumeTrue(runManagedHsmTest);
super.beforeTest();
}
/**
* Tests that a RSA key created.
*/
@Override
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createRsaKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createRsaKeyRunner((expected) -> StepVerifier.create(client.createRsaKey(expected))
.assertNext(response -> assertKeyEquals(expected, response))
.verifyComplete());
}
/**
* Tests that an RSA key with a public exponent can be created in the key vault.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createRsaKeyWithPublicExponent(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createRsaKeyWithPublicExponentRunner((createRsaKeyOptions) ->
StepVerifier.create(client.createRsaKey(createRsaKeyOptions))
.assertNext(rsaKey -> {
assertKeyEquals(createRsaKeyOptions, rsaKey);
assertEquals(BigInteger.valueOf(createRsaKeyOptions.getPublicExponent()),
toBigInteger(rsaKey.getKey().getE()));
assertEquals(createRsaKeyOptions.getKeySize(), rsaKey.getKey().getN().length * 8);
})
.verifyComplete());
}
/**
* Tests that a symmetric key is created.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createOctKeyWithDefaultSize(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createOctKeyRunner(null, (createOctKeyOptions) ->
StepVerifier.create(client.createOctKey(createOctKeyOptions))
.assertNext(octKey -> {
assertEquals(createOctKeyOptions.getName(), octKey.getName());
assertEquals(KeyType.OCT_HSM, octKey.getKey().getKeyType());
assertEquals(createOctKeyOptions.getExpiresOn(), octKey.getProperties().getExpiresOn());
assertEquals(createOctKeyOptions.getNotBefore(), octKey.getProperties().getNotBefore());
assertEquals(createOctKeyOptions.getTags(), octKey.getProperties().getTags());
})
.verifyComplete());
}
/**
* Tests that a symmetric key of a valid size is created.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createOctKeyWithValidSize(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createOctKeyRunner(256, (createOctKeyOptions) ->
StepVerifier.create(client.createOctKey(createOctKeyOptions))
.assertNext(octKey -> {
assertEquals(createOctKeyOptions.getName(), octKey.getName());
assertEquals(KeyType.OCT_HSM, octKey.getKey().getKeyType());
assertEquals(createOctKeyOptions.getExpiresOn(), octKey.getProperties().getExpiresOn());
assertEquals(createOctKeyOptions.getNotBefore(), octKey.getProperties().getNotBefore());
assertEquals(createOctKeyOptions.getTags(), octKey.getProperties().getTags());
})
.verifyComplete());
}
/**
* Tests that a symmetric key of an invalid size cannot be created.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void createOctKeyWithInvalidSize(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
createOctKeyRunner(64, (createOctKeyOptions) ->
StepVerifier.create(client.createOctKey(createOctKeyOptions))
.verifyErrorSatisfies(ex ->
assertRestException(ex, ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST)));
}
/**
* Tests that random bytes can be retrieved from a Managed HSM.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void getRandomBytes(HttpClient httpClient, KeyServiceVersion serviceVersion) {
createKeyAsyncClient(httpClient, serviceVersion);
getRandomBytesRunner((count) ->
StepVerifier.create(client.getRandomBytes(count))
.assertNext(randomBytes -> assertEquals(count, randomBytes.getBytes().length))
.verifyComplete());
}
/**
* Tests that an existing key can be released.
*/
@Override
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("getTestParameters")
public void releaseKey(HttpClient httpClient, KeyServiceVersion serviceVersion) {
super.releaseKey(httpClient, serviceVersion);
}
}
| mit |
kasperisager/generator-vanilla | app/templates/class.themehooks.php | 526 | <?php if (!defined('APPLICATION')) exit;
/**
* <%= name %> Theme Hooks
*
* @author <%= author.name %><% if (author.email) { %> <<%= author.email %>><% } %>
* @copyright <%= year %> (c) <%= author.name %>
* @license <%= license %>
* @since 1.0.0
*/
class <%= _(name).classify() %>ThemeHooks implements Gdn_IPlugin {
/**
* This will run when you "Enable" the theme
*
* @since 1.0.0
* @access public
* @return bool
*/
public function setup() {
return true;
}
}
| mit |
breakwang/pykit | daemonize/test/foo.py | 297 | import time
import daemonize
fn = '/tmp/foo'
pidfn = '/tmp/test_daemonize.pid'
def write_file(fn, cont):
with open(fn, 'w') as f:
f.write(cont)
def run():
write_file(fn, 'foo-before')
time.sleep(1)
write_file(fn, 'foo-after')
daemonize.daemonize_cli(run, pidfn)
| mit |
mattsoulanille/compSci | RandomDataAnalyzer/RandomData.java | 714 | /**
* RandomDataAnalyzer.java program
* @author Matthew Soulanille
* @version 2014-10-30
*/
import java.util.ArrayList;
import java.lang.Math;
import java.util.Collections;
public class RandomData
{
public double average;
public double max;
public ArrayList<Double> database = new ArrayList<Double>(100);
public RandomData() {
for (int i = 0; i < 100; i++) {
database.add(Math.random() * 1000);
}
double average = 0;
for (int i = 0; i < 100; i++) {
average += (double) database.get(i);
}
average = average/100;
max = Collections.max(database);
}
public void getData() {
System.out.println("Average is: " + average);
System.out.println("Max is: " + max);
}
}
| mit |
MarcelBraghetto/BlogDemos | DijkstraPart2/app/src/main/java/io/github/marcelbraghetto/dijkstra/part2/models/Crab.java | 2518 | package io.github.marcelbraghetto.dijkstra.part2.models;
import android.support.annotation.NonNull;
import java.util.Stack;
import io.github.marcelbraghetto.dijkstra.part2.systems.Graph;
import io.github.marcelbraghetto.dijkstra.part2.utils.MathUtils;
import io.github.marcelbraghetto.dijkstra.part2.R;
import io.github.marcelbraghetto.dijkstra.part2.utils.ScreenUtils;
/**
* Created by Marcel Braghetto on 12/09/15.
*
* Model describing the 'crab' actor.
*/
public class Crab extends Actor {
private static final float EPSILON = ScreenUtils.dpToPx(1f);
private static final float SPEED = 2f;
private final Graph mParentGraph;
private Stack<String> mPath;
private Node mLastVisitedNode;
private Node mTargetNode;
private MutableVector mDirection;
private boolean mActive;
public Crab(@NonNull Graph parentGraph, @NonNull Node initialNode) {
super(ScreenUtils.getBitmap(R.drawable.crab));
mParentGraph = parentGraph;
mDirection = new MutableVector(0.0, 0.0);
mLastVisitedNode = initialNode;
setPosition(mLastVisitedNode.getPosition().x, mLastVisitedNode.getPosition().y);
}
public void setActive(boolean active) {
mActive = active;
}
public void setPath(@NonNull Stack<String> path) {
mPath = path;
if(mPath.size() > 0) {
mTargetNode = mParentGraph.getNode(mPath.pop());
} else {
mTargetNode = null;
}
}
@NonNull
public Node getLastVisitedNode() {
return mLastVisitedNode;
}
@Override
public void update() {
if(mTargetNode == null || !mActive) {
return;
}
// If we arrived at the target node, choose the next node in
// the path or if there are no more, then we've arrived at
// the target node.
if(MathUtils.distanceBetween(mPosition, mTargetNode.getPosition()) < EPSILON) {
if(mPath.size() > 0) {
mTargetNode = mParentGraph.getNode(mPath.pop());
mLastVisitedNode = mTargetNode;
}
return;
}
// At this stage, we need to move Mr. Crab toward the target node...
mDirection.setDirection(mTargetNode.getPosition().x - mPosition.x, mTargetNode.getPosition().y - mPosition.y);
mDirection.normalize();
mPosition.set(
mPosition.x += mDirection.getDeltaX() * SPEED,
mPosition.y += mDirection.getDeltaY() * SPEED);
}
} | mit |
SparkRebel/sparkrebel.com | src/PW/UserBundle/PWUserBundle.php | 196 | <?php
namespace PW\UserBundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
class PWUserBundle extends Bundle
{
public function getParent()
{
return 'FOSUserBundle';
}
}
| mit |
instructure/lti_skydrive_engine | jsapp/karma.conf.js | 1610 | module.exports = function(config) {
config.set({
basePath: '',
frameworks: ['mocha'],
files: [
// need to figure out how to get webpack to take a glob w/o duplicating
// stuff everywhere
'node_modules/sinon/pkg/sinon.js',
'node_modules/jquery/dist/jquery.js',
'node_modules/jquery.cookie/jquery.cookie.js',
'app/__tests__/main.js'
],
exclude: [],
preprocessors: {
'app/__tests__/main.js': ['webpack']
},
webpack: {
cache: true,
module: {
loaders: [
{ test: /\.less$/, loader: 'style-loader!css-loader!less-loader' },
{ test: /\.js$/, loader: 'jsx-loader' },
{ test: /\.woff$/, loader: 'url-loader?limit=10000&minetype=application/font-woff' },
{ test: /\.ttf$/, loader: 'file-loader' },
{ test: /\.eot$/, loader: 'file-loader' },
{ test: /\.svg$/, loader: 'file-loader' },
{ test: /\.png$/, loader: "url-loader?mimetype=image/png" }
]
}
},
webpackServer: {
stats: {
colors: true
}
},
reporters: ['mocha'],
// reporter options
mochaReporter: {
output: 'autowatch'
},
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['Chrome'],
captureTimeout: 60000,
singleRun: false,
plugins: [
require("karma-mocha"),
require("karma-chrome-launcher"),
require("karma-firefox-launcher"),
require("karma-webpack"),
require("karma-mocha-reporter")
]
});
};
| mit |
mabotech/maboq | py/redis_lua.py | 1110 | """
redis lua
"""
import time
import redis
def main():
# connection pool
r = redis.Redis(host='localhost', port=6379, db=0)
"""
compare value
update value when change
create job to update db when value change
set heartbeat pre tag
"""
lua_code = """if redis.call("EXISTS", KEYS[1]) == 1 then
redis.call("LPUSH", "c1","chan1")
redis.call("PUBLISH", "c1","new")
local payload = redis.call("GET", KEYS[1])
if payload == ARGV[1] then
return "same"
else
redis.call("SET", KEYS[1],ARGV[1])
return payload
end
else
redis.call("SET", KEYS[1],ARGV[1])
return nil
end"""
#benchmark
"""
0.22 ms
4545 times/second
"""
t1 = time.time()
n = 1
for i in xrange(0, n):
v = r.eval(lua_code, 1, "aac","xyz")
t2 = time.time()
t = (t2-t1)*1000/n
print(t)
print(1000/t)
print(v)
h = r.script_load("return 'hello moto'")
print h
print dir(r)
if __name__ == "__main__":
main() | mit |
apiaryio/swagger2blueprint | test.js | 570 | 'use strict';
var assert = require('assert');
var converter = require('./index');
describe('Swagger converter', function () {
it('should read a local file', function (done) {
converter.run({'_': ['petstore_expanded.yaml']}, function (err, blueprint) {
assert.ifError(err);
assert(blueprint);
done();
});
});
it('should read a URL', function (done) {
converter.run({'_': ['http://petstore.swagger.io/v2/swagger.json']}, function (err, blueprint) {
assert.ifError(err);
assert(blueprint);
done();
});
});
});
| mit |
alexmohr/Ikarus | Ikarus.Desktop.Server/Ikarus.Desktop.Server.cpp | 1421 | // Ikarus.Desktop.Server.cpp : Defines the entry point for the console application.
//
#include "stdafx.h"
#include "../Ikarus.Base/CommandHandler.h"
#include "FakePinManager.h"
#include "TcpConnection.h"
#include <iostream>
#include <string>
#include <vector>
#include <sstream>
using namespace Ikarus::Communication;
void WriteCommandDetails(t_command cmd)
{
std::cout << "\tFunction: " << cmd.Function << std::endl;
std::cout << "\tPin: " << cmd.Pin << std::endl;
std::cout << "\tValue: " << cmd.Value << std::endl;
}
int _tmain(int argc, _TCHAR* argv[])
{
FakePinManager* fpm = new FakePinManager();
Ikarus::Communication::CommandHandler* cmdHdlr = new Ikarus::Communication::CommandHandler(fpm);
Ikarus::Desktop::Server::TcpConnection* con = new Ikarus::Desktop::Server::TcpConnection();
con->Open("localhost", 1042);
con->WaitForClient();
while (true)
{
try
{
t_command cmd = con->Receive();
std::cout << "Received command" << std::endl;
WriteCommandDetails(cmd);
std::cout << std::endl << "Executing command" << std::endl;
cmdHdlr->ExecuteCommand(&cmd);
std::cout << "new command" << std::endl;
WriteCommandDetails(cmd);
std::cout << "sending back to client" << std::endl;
con->Send(cmd);
}
catch (...)
{
std::cout << "Client probably has left... waiting for a new one" << std::endl;
con->WaitForClient();
}
}
return 0;
}
| mit |
ubivar/ubivar-python | ubivar/test/resources/test_event.py | 2765 | import os
import ubivar
import warnings
from ubivar.test.helper import (UbivarTestCase, DUMMY_EVENT_1, DUMMY_EVENT_2, DUMMY_EVENT_3)
class UbivarAPIResourcesTests(UbivarTestCase):
def test_event_create(self):
response = ubivar.Event.create(parameters=DUMMY_EVENT_1)
event = response.data[0]
self.assertTrue(hasattr(event, "parameters"))
self.assertTrue(event["id"] == DUMMY_EVENT_1["id"])
response = ubivar.Event.create(parameters=DUMMY_EVENT_2)
event = response.data[0]
self.assertTrue(hasattr(event, "parameters"))
self.assertTrue(event["id"] == DUMMY_EVENT_2["id"])
response = ubivar.Event.create(parameters=DUMMY_EVENT_3)
event = response.data[0]
self.assertTrue(hasattr(event, "parameters"))
self.assertTrue(event["id"] == DUMMY_EVENT_3["id"])
def test_event_list(self):
response = ubivar.Event.list()
self.assertTrue(len(response.data) == 3)
def test_event_list_filter_id_limit(self):
response = ubivar.Event.list(limit=1)
self.assertTrue(len(response.data) == 1)
self.assertTrue(hasattr(response.data, "__iter__"))
def test_event_list_filter_id_order(self):
response = ubivar.Event.list(order="-id")
self.assertTrue(len(response.data) == 3)
self.assertTrue(response.data[0]['id'] == DUMMY_EVENT_3['id'])
response = ubivar.Event.list(order="id")
self.assertTrue(len(response.data) == 3)
self.assertTrue(response.data[0]['id'] == DUMMY_EVENT_1['id'])
def test_event_list_filter_id_gte(self):
response = ubivar.Event.list(id={"gte": 2}, order="id")
self.assertTrue(len(response.data) == 2)
self.assertTrue(response.data[0]['id'] == DUMMY_EVENT_2['id'])
self.assertTrue(response.data[1]['id'] == DUMMY_EVENT_3['id'])
def test_event_list_filter_id_gt(self):
response = ubivar.Event.list(id={"gt": 1}, order="id")
self.assertTrue(len(response.data) == 2)
self.assertTrue(response.data[0]['id'] == DUMMY_EVENT_2['id'])
self.assertTrue(response.data[1]['id'] == DUMMY_EVENT_3['id'])
def test_event_list_filter_id_lte(self):
response = ubivar.Event.list(id={"lte": 2}, order="id")
self.assertTrue(len(response.data) == 2)
self.assertTrue(response.data[0]['id'] == DUMMY_EVENT_1['id'])
self.assertTrue(response.data[1]['id'] == DUMMY_EVENT_2['id'])
def test_event_list_filter_id_lt(self):
response = ubivar.Event.list(id={"lt": 3}, order="id")
self.assertTrue(len(response.data) == 2)
self.assertTrue(response.data[0]['id'] == DUMMY_EVENT_1['id'])
self.assertTrue(response.data[1]['id'] == DUMMY_EVENT_2['id'])
| mit |
butala/pyrsss | pyrsss/mag/themis_stations.py | 1448 | from urllib2 import urlopen
from contextlib import closing
from collections import OrderedDict, namedtuple
INFO_URL = 'http://themis.ssl.berkeley.edu/gmag/gmag_groups.php'
class Info(namedtuple('Info', 'lat lon name mlat mlon')):
pass
PARSE_MAP = {'ccode': ('key', str),
'lat': ('lat', float),
'lng': ('lon', float),
'name': ('name', str),
'mag_lat': ('mlat', float),
'mag_lng': ('mlon', float)}
def get_station_info(info_url=INFO_URL, parse_map=PARSE_MAP):
"""
Parse information for magnetometer sites that report data to the
THEMIS project. Returns a mapping between station IDs and
:class:`Info` regarding the site.
"""
station_info = OrderedDict()
with closing(urlopen(info_url)) as fid:
stn_data = {}
for line in fid:
if line.startswith('};'):
key = stn_data.pop('key')
if 'mlat' not in stn_data:
stn_data['mlat'] = float('nan')
if 'mlon' not in stn_data:
stn_data['mlon'] = float('nan')
station_info[key] = Info(**stn_data)
stn_data = {}
line = line.lstrip()
for search_key, (key, convert) in parse_map.iteritems():
if line.startswith(search_key):
stn_data[key] = convert(line.split('"')[1])
return station_info
| mit |
symulakr/gwt-generators | src/main/java/com/github/symulakr/gwt/generators/rebind/utils/StringUtils.java | 742 | package com.github.symulakr.gwt.generators.rebind.utils;
public class StringUtils
{
public static boolean isEmpty(String str)
{
return str == null || str.length() == 0;
}
public static boolean isNotEmpty(String str)
{
return !StringUtils.isEmpty(str);
}
public static boolean equals(CharSequence cs1, CharSequence cs2)
{
return cs1 == null ? cs2 == null : cs1.equals(cs2);
}
public static String toString(Object... objects)
{
StringBuilder sb = new StringBuilder();
String delimiter = "";
for (Object object : objects)
{
sb.append(delimiter).
append(object.toString());
delimiter = ", ";
}
return sb.toString();
}
}
| mit |
mandino/hotelmilosantabarbara.com | wp-content/plugins/wpml-string-translation/menu/string-translation.php | 42062 | <?php
/** @var WPML_String_Translation $WPML_String_Translation */
global $sitepress, $WPML_String_Translation, $wpdb, $wpml_st_string_factory;
$string_settings = $WPML_String_Translation->get_strings_settings();
icl_st_reset_current_translator_notifications();
if((!isset($sitepress_settings['existing_content_language_verified']) || !$sitepress_settings['existing_content_language_verified']) /*|| 2 > count($sitepress->get_active_languages())*/){
return;
}
if ( filter_input( INPUT_GET, 'trop', FILTER_SANITIZE_NUMBER_INT ) > 0 ) {
include dirname(__FILE__) . '/string-translation-translate-options.php';
return;
} elseif ( filter_input( INPUT_GET, 'download_mo', FILTER_SANITIZE_FULL_SPECIAL_CHARS ) ) {
include dirname(__FILE__) . '/auto-download-mo.php';
return;
}
$status_filter = filter_input( INPUT_GET, 'status', FILTER_SANITIZE_FULL_SPECIAL_CHARS, FILTER_NULL_ON_FAILURE );
$status_filter_text = $status_filter;
$status_filter_lang = false;
if ( preg_match(
"#" . ICL_STRING_TRANSLATION_WAITING_FOR_TRANSLATOR . "-(.+)#",
$status_filter_text,
$matches
)
) {
$status_filter = ICL_STRING_TRANSLATION_WAITING_FOR_TRANSLATOR;
$status_filter_lang = $matches[1];
}else{
$status_filter = filter_input( INPUT_GET, 'status', FILTER_SANITIZE_NUMBER_INT, FILTER_NULL_ON_FAILURE );
}
//$status_filter = $status_filter !== false ? (int) $status_filter : null;
$context_filter = filter_input( INPUT_GET, 'context', FILTER_SANITIZE_FULL_SPECIAL_CHARS );
$search_filter = filter_input( INPUT_GET, 'search', FILTER_SANITIZE_FULL_SPECIAL_CHARS );
$exact_match = filter_input( INPUT_GET, 'em', FILTER_VALIDATE_BOOLEAN );
$active_languages = $sitepress->get_active_languages();
$icl_contexts = icl_st_get_contexts( $status_filter );
$unfiltered_context_counts = $status_filter !== false ? icl_st_get_contexts( false ) : $icl_contexts;
function context_array( $contexts ) {
$count_array = array();
$contexts = $contexts ? array_filter( $contexts ) : array();
foreach ( $contexts as $c ) {
$count_array[ $c->context ] = $c->c;
}
return $count_array;
}
$available_contexts = array_keys( context_array( $icl_contexts ) );
$unfiltered_contexts = context_array( $unfiltered_context_counts );
function _icl_string_translation_rtl_div($language) {
if (in_array($language, array('ar','he','fa'))) {
echo ' dir="rtl" style="text-align:right;direction:rtl;"';
} else {
echo ' dir="ltr" style="text-align:left;direction:ltr;"';
}
}
function _icl_string_translation_rtl_textarea($language) {
if (in_array($language, array('ar','he','fa'))) {
echo ' dir="rtl" style="text-align:right;direction:rtl;width:100%"';
} else {
echo ' dir="ltr" style="text-align:left;direction:ltr;width:100%"';
}
}
$po_importer = apply_filters( 'wpml_st_get_po_importer', null );
?>
<div class="wrap">
<h2><?php echo __('String translation', 'wpml-string-translation') ?></h2>
<?php
do_action( 'display_basket_notification', 'st_dashboard_top' );
?>
<?php if( isset( $po_importer ) && $po_importer->has_strings() ): ?>
<p><?php printf(__("These are the strings that we found in your .po file. Please carefully review them. Then, click on the 'add' or 'cancel' buttons at the %sbottom of this screen%s. You can exclude individual strings by clearing the check boxes next to them.", 'wpml-string-translation'), '<a href="#add_po_strings_confirm">', '</a>'); ?></p>
<form method="post" id="wpml_add_strings" action="<?php echo admin_url("admin.php?page=" . WPML_ST_FOLDER . "/menu/string-translation.php");?>">
<input type="hidden" id="strings_json" name="strings_json">
<?php wp_nonce_field('add_po_strings') ?>
<?php $use_po_translations = filter_input(INPUT_POST, 'icl_st_po_translations', FILTER_VALIDATE_BOOLEAN); ?>
<?php if ( $use_po_translations == true ): ?>
<input type="hidden" name="action" value="icl_st_save_strings" />
<input
type="hidden"
name="icl_st_po_language"
value="<?php echo filter_input(INPUT_POST, 'icl_st_po_language', FILTER_SANITIZE_FULL_SPECIAL_CHARS); ?>"
/>
<?php endif; ?>
<?php
$icl_st_domain = filter_input(INPUT_POST, 'icl_st_i_context_new', FILTER_SANITIZE_FULL_SPECIAL_CHARS);
$icl_st_domain = $icl_st_domain ? $icl_st_domain : filter_input(INPUT_POST, 'icl_st_i_context', FILTER_SANITIZE_FULL_SPECIAL_CHARS);
?>
<input
type="hidden"
name="icl_st_domain_name"
value="<?php echo $icl_st_domain ?>"
/>
<table id="icl_po_strings" class="widefat" cellspacing="0">
<thead>
<tr>
<th scope="col" class="manage-column column-cb check-column"><input type="checkbox" checked="checked" name="" /></th>
<th><?php echo __('String', 'wpml-string-translation') ?></th>
</tr>
</thead>
<tfoot>
<tr>
<th scope="col" class="manage-column column-cb check-column"><input type="checkbox" checked="checked" name="" /></th>
<th><?php echo __('String', 'wpml-string-translation') ?></th>
</tr>
</tfoot>
<tbody>
<?php $k = -1; foreach( $po_importer->get_strings( ) as $str ): $k++; ?>
<tr>
<td><input class="icl_st_row_cb" type="checkbox" name="icl_strings_selected[]"
<?php if($str['exists'] || $use_po_translations !== true): ?>checked="checked"<?php endif;?> value="<?php echo $k ?>" /></td>
<td>
<input type="text" name="icl_strings[]" value="<?php echo esc_attr($str['string']) ?>" readonly="readonly" style="width:100%;" size="100" />
<?php if( $use_po_translations === true ):?>
<input type="text" name="icl_translations[]" value="<?php echo esc_attr($str['translation']) ?>" readonly="readonly" style="width:100%;<?php if($str['fuzzy']):?>;background-color:#ffecec<?php endif; ?>" size="100" />
<input type="hidden" name="icl_fuzzy[]" value="<?php echo $str['fuzzy'] ?>" />
<input type="hidden" name="icl_name[]" value="<?php echo $str['name'] ?>" />
<input type="hidden" name="icl_context[]" value="<?php echo $str['context'] ?>" />
<?php endif; ?>
<?php if($str['name'] != md5($str['string'])): ?>
<i><?php printf(__('Name: %s', 'wpml-string-translation'), $str['name']) ?></i><br />
<?php endif ?>
</td>
</tr>
<?php endforeach; ?>
</tbody>
</table>
<a name="add_po_strings_confirm"></a>
<p><span style="float: left"><input class="js-wpml-btn-cancel button" type="button" value="<?php echo __( 'Cancel', 'wpml-string-translation' ); ?>" onclick="location.href='admin.php?page=<?php echo htmlspecialchars( $_GET['page'], ENT_QUOTES ) ?>'"/>
<input class="js-wpml-btn-add-strings button-primary" type="submit" value="<?php echo __('Add selected strings', 'wpml-string-translation'); ?>" /></span><span class="spinner" style="float: left"></span>
</p>
</form>
<?php else: ?>
<p style="line-height:220%;">
<?php echo __('Select which strings to display:', 'wpml-string-translation'); ?>
<select name="icl_st_filter_status">
<?php
$selected = selected(false, $status_filter, false);
?>
<option value="" <?php echo $selected;?>>
<?php echo __('All strings', 'wpml-string-translation') ?>
</option>
<?php
$selected = selected(ICL_TM_COMPLETE, $status_filter, false);
?>
<option value="<?php echo ICL_TM_COMPLETE ?>" <?php echo $selected;?>>
<?php echo WPML_ST_String_Statuses::get_status( ICL_TM_COMPLETE ) ?>
</option>
<?php
if ( icl_st_is_translator() ) {
if ( $icl_st_pending = icl_st_get_pending_string_translations_stats() ) {
foreach ( $icl_st_pending as $lang => $count ) {
$lang_details = $sitepress->get_language_details( $lang );
$selected = '';
if ( isset( $status_filter_lang ) ) {
$selected = selected( $lang, $status_filter_lang, false );
}
?>
<option value="<?php echo ICL_TM_WAITING_FOR_TRANSLATOR . '-' . $lang ?>" <?php echo $selected; ?>>
<?php printf( __( 'Pending %s translation (%d)', 'wpml-string-translation' ), $lang_details[ 'display_name' ], $count ) ?>
</option>
<?php
}
}
} else {
$selected = selected(ICL_TM_NOT_TRANSLATED, $status_filter, false);
?>
<option value="<?php echo ICL_TM_NOT_TRANSLATED ?>" <?php echo $selected; ?>>
<?php echo __( 'Translation needed', 'wpml-string-translation' ) ?>
</option>
<?php
$selected = selected(ICL_TM_WAITING_FOR_TRANSLATOR, $status_filter, false);
?>
<option value="<?php echo ICL_TM_WAITING_FOR_TRANSLATOR ?>" <?php echo $selected; ?>>
<?php echo __( 'Waiting for translator', 'wpml-string-translation' ) ?>
</option>
<?php
}
?>
</select>
<?php if ( ! empty( $icl_contexts ) ): ?>
<span style="white-space:nowrap">
<?php echo __( 'Select strings within domain:', 'wpml-string-translation' ) ?>
<select name="icl_st_filter_context">
<option value=""
<?php if ( $context_filter === false ): ?>selected="selected"<?php endif; ?>><?php echo __( 'All domains', 'wpml-string-translation' ) ?></option>
<?php foreach ( $icl_contexts as $v ): ?>
<?php
if ( ! $v->context ) {
$v->context = WPML_ST_Strings::EMPTY_CONTEXT_LABEL;
}
?>
<option value="<?php echo esc_attr( $v->context ) ?>"
data-unfiltered-count="<?php echo( isset( $unfiltered_contexts[ $v->context ] ) ? $unfiltered_contexts[ $v->context ] : 0 ) ?>"
<?php if ( $context_filter == filter_var( $v->context, FILTER_SANITIZE_FULL_SPECIAL_CHARS ) ): ?>selected="selected"<?php endif; ?>><?php echo esc_html( $v->context ) . ' (' . $v->c . ')'; ?></option>
<?php endforeach; ?>
</select>
</span>
<?php endif; ?>
<span style="white-space:nowrap">
<label>
<?php echo __('Search for:', 'wpml-string-translation')?>
<input type="text" id="icl_st_filter_search" value="<?php echo $search_filter ?>" />
</label>
<label>
<input type="checkbox" id="icl_st_filter_search_em" value="1" <?php if($exact_match):?>checked="checked"<?php endif;?> />
<?php echo __('Exact match', 'wpml-string-translation')?>
</label>
<input class="button" type="button" value="<?php _e('Search', 'wpml-string-translation')?>" id="icl_st_filter_search_sb" />
</span>
<?php if($search_filter): ?>
<span style="white-space:nowrap">
<?php printf(__('Showing only strings that contain %s', 'wpml-string-translation'), '<i>' . esc_html($search_filter). '</i>') ; ?>
<input class="button" type="button" value="<?php _e('Exit search', 'wpml-string-translation')?>" id="icl_st_filter_search_remove" />
</span>
<?php endif; ?>
</p>
<?php if( ! empty( $icl_contexts ) ): ?>
<p><a href="#" id="wpml-language-of-domains-link"><?php _e( 'Languages of domains', 'wpml-string-translation' ); ?></a></p>
<?php endif; ?>
<?php
$string_translation_table_ui = new WPML_String_Translation_Table( icl_get_string_translations() );
$string_translation_table_ui->render( );
$change_string_language_dialog = new WPML_Change_String_Language_Dialog( $wpdb, $sitepress );
$change_string_language_dialog->render( );
if( ! empty( $icl_contexts ) ) {
$string_factory = new WPML_ST_String_Factory( $wpdb );
$change_string_domain_language_dialog = new WPML_Change_String_Domain_Language_Dialog( $wpdb, $sitepress, $string_factory );
$change_string_domain_language_dialog->render( $icl_contexts );
}
$get_show_results = filter_input( INPUT_GET, 'show_results', FILTER_SANITIZE_FULL_SPECIAL_CHARS );
$get_page = filter_input( INPUT_GET, 'page', FILTER_SANITIZE_URL );
?>
<?php if($wp_query->found_posts > 10): ?>
<div class="tablenav">
<?php
$paged = filter_input( INPUT_GET, 'paged', FILTER_SANITIZE_NUMBER_INT );
$paged = $paged && $get_show_results !== 'all' ? $paged : 1;
$page_links = paginate_links( array(
'base' => add_query_arg('paged', '%#%' ),
'format' => '',
'prev_text' => '«',
'next_text' => '»',
'total' => $wp_query->max_num_pages,
'current' => $paged,
'add_args' => isset($icl_translation_filter)?$icl_translation_filter:array()
)
);
$query_url_params = '?page=' . $get_page;
$query_url_params .= '&paged=';
$query_url_params .= $paged;
$query_url_params .= ( $context_filter !== null ? ( '&context=' . $context_filter ) : '' );
$query_url_params .= ( $status_filter !== null ? ( '&status=' . $status_filter ) : '' );
?>
<?php if( $get_show_results === 'all' ): ?>
<div class="tablenav-pages">
<a href="admin.php<?php echo $query_url_params ?>"><?php printf(__('Display %d results per page', 'wpml-string-translation'), $sitepress_settings['st']['strings_per_page']); ?></a>
</div>
<?php endif; ?>
<div class="tablenav-pages">
<?php if ( $page_links ): ?>
<?php $page_links_text = sprintf( '<span class="displaying-num">' . __( 'Displaying %s–%s of %s', 'wpml-string-translation' ) . '</span>%s',
number_format_i18n( ( $paged - 1 ) * $wp_query->query_vars['posts_per_page'] + 1 ),
number_format_i18n( min( $paged * $wp_query->query_vars['posts_per_page'], $wp_query->found_posts ) ),
number_format_i18n( $wp_query->found_posts ),
$page_links
); echo $page_links_text;
?>
<?php endif; ?>
<?php if( !$get_show_results ): ?>
<?php echo __('Strings per page:', 'wpml-string-translation')?>
<?php
$spp_qsa = '';
$params = array_filter(
array(
'&context=' => $context_filter,
'&status=' => $status_filter,
'&search=' => $search_filter,
'&em=' => $exact_match
)
);
foreach ( $params as $key => $p ) {
$spp_qsa .= $key . $p;
}
$strings_per_page = $wp_query->query_vars['posts_per_page'];
?>
<select name="icl_st_per_page"
onchange="location.href='admin.php?page=<?php echo $get_page ?><?php echo $spp_qsa ?>&strings_per_page='+this.value">
<option value="10"<?php if ( $strings_per_page == 10 ) {
echo ' selected="selected"';
} ?>>10
</option>
<option value="20"<?php if ( $strings_per_page == 20 ) {
echo ' selected="selected"';
} ?>>20
</option>
<option value="50"<?php if ( $strings_per_page == 50 ) {
echo ' selected="selected"';
} ?>>50
</option>
<option value="100"<?php if ( $strings_per_page == 100 ) {
echo ' selected="selected"';
} ?>>100
</option>
</select>
<?php
$url = 'admin.php?page=' . $_GET['page'] . '&show_results=all';
if (isset( $_GET['context'] )) {
$url .= '&context=' . $_GET['context'];
}
if ( isset( $_GET[ 'status' ] ) ) {
$url .= '&status=' . $_GET['status'];
}
$url = esc_url( $url );
?>
<a href="<?php echo $url; ?>"><?php echo __( 'Display all results', 'wpml-string-translation' ); ?></a>
<?php endif; ?>
</div>
</div>
<?php endif; ?>
<?php if(current_user_can('manage_options')): // the rest is only for admins. not for editors ?>
<span class="subsubsub">
<input type="hidden" id="_icl_nonce_dstr"
value="<?php echo wp_create_nonce( 'icl_st_delete_strings_nonce' ) ?>"/>
<div id="wpml-st-package-incomplete" style="display:none;color:red;"><?php echo __( "You have selected strings belonging to a package. Please select all strings from the affected package or unselect these strings.", 'wpml-string-translation' ) ?></div>
<input type="button" class="button-secondary" id="icl_st_delete_selected"
value="<?php echo __( 'Delete selected strings', 'wpml-string-translation' ) ?>"
data-confirm="<?php echo __( "Are you sure you want to delete these strings?\nTheir translations will be deleted too.", 'wpml-string-translation' ) ?>"
disabled="disabled"/>
<input type="button" class="button-secondary" id="icl_st_change_lang_selected"
value="<?php echo __( 'Change language of selected strings', 'wpml-string-translation' ) ?>"
disabled="disabled"/>
</span>
<br clear="all" />
<br />
<?php do_action( 'wpml_st_below_menu', $status_filter_lang, 10, 2 ) ?>
<br style="clear:both;" />
<div id="dashboard-widgets-wrap">
<div id="dashboard-widgets" class="metabox-holder">
<div class="postbox-container" style="width: 49%;">
<div id="normal-sortables-stsel" class="meta-box-sortables ui-sortable">
<div id="dashboard_wpml_stsel_1" class="postbox">
<div class="handlediv" title="<?php echo __('Click to toggle', 'wpml-string-translation'); ?>">
<br/>
</div>
<h3 class="hndle">
<span><?php echo __('Track where strings appear on the site', 'wpml-string-translation')?></span>
</h3>
<div class="inside">
<p class="sub"><?php echo __("WPML can keep track of where strings are used on the public pages. Activating this feature will enable the 'view in page' functionality and make translation easier.", 'wpml-string-translation')?></p>
<form id="icl_st_track_strings" name="icl_st_track_strings" action="">
<?php wp_nonce_field('icl_st_track_strings_nonce', '_icl_nonce'); ?>
<p class="icl_form_errors" style="display:none"></p>
<ul>
<li>
<input type="hidden" name="icl_st[track_strings]" value="0" />
<?php
$track_strings = array_key_exists( 'track_strings', $string_settings ) && $string_settings['track_strings'];
$track_strings_checked = checked( true, $track_strings, false );
$track_strings_display = ' style="color: red;' . (! $track_strings ? 'display: none;' : '') . '""';
$url = 'https://wpml.org/documentation/getting-started-guide/string-translation/#track-strings';
$message_sentences = array();
$anchor_text = _x( 'String Tracking', 'String Tracking warning: sentence 1, anchor text', 'wpml-string-translation' );
$message_sentences[] = _x( '%s allows you to see where strings come from, so you can translate them accurately.', 'String Tracking warning: sentence 1', 'wpml-string-translation' );
$message_sentences[] = _x( 'It needs to parse the PHP source files and the output HTML.', 'String Tracking warning: sentence 2', 'wpml-string-translation' );
$message_sentences[] = _x( 'This feature is CPU-intensive and should only be used while you are developing sites.', 'String Tracking warning: sentence 3', 'wpml-string-translation' );
$message_sentences[] = _x( 'Remember to turn it off before going to production, to avoid performance problems.', 'String Tracking warning: sentence 4', 'wpml-string-translation' );
$anchor = '<a href="' . $url . '" target="_blank">' . $anchor_text . '</a>';
$message = sprintf( implode( ' ', $message_sentences ), $anchor );
?>
<input type="checkbox" id="track_strings" name="icl_st[track_strings]" value="1" <?php echo $track_strings_checked; ?> />
<label for="track_strings"><?php _e( 'Track where strings appear on the site', 'wpml-string-translation' ); ?></label>
<p class="js-track-strings-note" <?php echo $track_strings_display; ?>>
<?php echo $message; ?>
</p>
<p><a href="https://wpml.org/?p=9073" target="_blank"><?php _e('Performance considerations', 'wpml-string-translation') ?> »</a></p>
</li>
<li>
<?php
$hl_color_default = '#FFFF00';
$hl_color = !empty($string_settings['hl_color']) ? $string_settings['hl_color'] : $hl_color_default;
$hl_color_label = __( 'Highlight color for strings', 'wpml-string-translation' );
$color_picker_args = array(
'input_name_group' => 'icl_st',
'input_name_id' => 'hl_color',
'default' => $hl_color_default,
'value' => $hl_color,
'label' => $hl_color_label,
);
$wpml_color_picker = new WPML_Color_Picker($color_picker_args);
echo $wpml_color_picker->get_current_language_color_selector_control();
?>
</li>
</ul>
<p>
<input class="button-secondary" type="submit" name="iclt_st_save" value="<?php _e('Apply', 'wpml-string-translation')?>" />
<span class="icl_ajx_response" id="icl_ajx_response2" style="display:inline"></span>
</p>
</form>
</div>
</div>
<div id="dashboard_wpml_stsel_1.5" class="postbox wpml-st-exclude-contexts">
<div class="handlediv" title="<?php echo esc_attr__('Click to toggle', 'wpml-string-translation'); ?>">
<br/>
</div>
<h3 class="hndle">
<span><?php echo esc_html__('Auto register strings for translation', 'wpml-string-translation')?></span>
</h3>
<div class="inside">
<p class="sub"><?php echo esc_html__('WPML can automatically register strings for translation. This allows you to translate user-generated content with minimal PHP code.', 'wpml-string-translation')?></p>
<p class="wpml-st-excluded-info"
data-all-included="<?php echo esc_attr__('Strings from all text domains will be auto-registered', 'wpml-string-translation') ?>"
data-all-excluded="<?php echo esc_attr__('Strings from all text domains are excluded', 'wpml-string-translation') ?>"
data-excluded-preview="<?php echo esc_attr__('You excluded: ', 'wpml-string-translation') ?>"
data-included-preview="<?php echo esc_attr__('You included: ', 'wpml-string-translation') ?>"
data-preview-suffix="<?php echo esc_attr__('and others', 'wpml-string-translation') ?>"
>
</p>
<p>
<input type="button"
class="button-secondary js-wpml-autoregister-edit-contexts"
value="<?php echo esc_attr__('Edit', 'wpml-string-translation')?>"
/>
</p>
<div class="wpml-st-exclude-contexts-box"
style="display:none;"
title="<?php echo esc_attr__('Auto-register strings from these text domains', 'wpml-string-translation');?>"
>
<form method="post" action="" data-nonce="<?php echo wp_create_nonce( 'wpml-st-cancel-button' ); ?>" >
<?php
$exclude = new WPML_Autoregister_Context_Exclude( $wpdb, new WPML_ST_Settings() );
$excluded = $exclude->get_excluded_contexts();
$has_excluded = count($excluded) > 0;
?>
<div id="wpml-st-filter-and-select-all-box">
<input type="input" name="search" placeholder="<?php echo esc_attr__('Search', 'wpml-string-translation')?>" />
<br/>
<p>
<input type="checkbox" name="select_all" <?php checked( false, $has_excluded); ?> />
<span><?php esc_html__('Select all', 'wpml-string-translation')?></span>
</p>
</div>
<div class="contexts">
<?php foreach ($exclude->get_contexts_and_their_exclude_status() as $context => $status): ?>
<?php if(strlen($context)): ?>
<p>
<input
type="checkbox"
name="<?php echo WPML_Autoregister_Context_Exclude::SETTING_KEY ?>[]"
value="<?php echo $context ?>"
<?php checked(false, $status); ?>
/>
<span><?php echo $context; ?></span>
</p>
<?php endif; ?>
<?php endforeach; ?>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
<div class="postbox-container" style="width: 49%;">
<div id="normal-sortables-poie" class="meta-box-sortables ui-sortable">
<div id="dashboard_wpml_st_poie" class="postbox">
<div class="handlediv" title="<?php echo __('Click to toggle', 'wpml-string-translation'); ?>">
<br/>
</div>
<h3 class="hndle">
<span><?php echo __('Import / export .po', 'wpml-string-translation')?></span>
</h3>
<div class="inside">
<h5><?php echo __('Import', 'wpml-string-translation')?></h5>
<form id="icl_st_po_form" action="" name="icl_st_po_form" method="post" enctype="multipart/form-data">
<?php wp_nonce_field('icl_po_form') ?>
<p class="sub">
<label for="icl_po_file"><?php echo __('.po file:', 'wpml-string-translation')?></label>
<input id="icl_po_file" class="button primary" type="file" name="icl_po_file" />
</p>
<p class="sub" style="line-height:2.3em">
<input type="checkbox" name="icl_st_po_translations" id="icl_st_po_translations" />
<label for="icl_st_po_translations"><?php echo __('Also create translations according to the .po file', 'wpml-string-translation')?></label>
<select name="icl_st_po_language" id="icl_st_po_language" style="display:none">
<?php foreach($active_languages as $al): if($al['code']==$string_settings['strings_language']) continue; ?>
<option value="<?php echo $al['code'] ?>"><?php echo $al['display_name'] ?></option>
<?php endforeach; ?>
</select>
</p>
<p class="sub" style="line-height:2.3em" >
<?php echo __('Select what the strings are for:', 'wpml-string-translation');?>
<?php if(!empty($available_contexts)): ?>
<span>
<select name="icl_st_i_context">
<option value="">-------</option>
<?php foreach($available_contexts as $v):?>
<option value="<?php echo esc_attr($v)?>" <?php if($context_filter == $v ):?>selected="selected"<?php endif;?>><?php echo $v; ?></option>
<?php endforeach; ?>
</select>
<a href="#" onclick="var __nxt = jQuery(this).parent().next(); jQuery(this).prev().val(''); jQuery(this).parent().fadeOut('fast',function(){__nxt.fadeIn('fast')});return false;"><?php echo __('new','wpml-string-translation')?></a>
</span>
<?php endif; ?>
<span <?php if(!empty($available_contexts)):?>style="display:none"<?php endif ?>>
<input type="text" name="icl_st_i_context_new" />
<?php if(!empty($available_contexts)):?>
<a href="#" onclick="var __prv = jQuery(this).parent().prev(); jQuery(this).prev().val(''); jQuery(this).parent().fadeOut('fast',function(){__prv.fadeIn('fast')});return false;"><?php echo __('select from existing','wpml-string-translation')?></a>
<?php endif ?>
</span>
</p>
<p>
<input class="button" name="icl_po_upload" id="icl_po_upload" type="submit" value="<?php echo __('Submit', 'wpml-string-translation')?>" />
<span id="icl_st_err_domain" class="icl_error_text" style="display:none"><?php echo __('Please enter a domain!', 'wpml-string-translation')?></span>
<span id="icl_st_err_po" class="icl_error_text" style="display:none"><?php echo __('Please select the .po file to upload!', 'wpml-string-translation')?></span>
</p>
</form>
<?php if(!empty($icl_contexts)):?>
<h5><?php echo __('Export strings into .po/.pot file', 'wpml-string-translation')?></h5>
<?php
if ( version_compare( WPML_ST_VERSION, '2.2', '<=' ) ) {
?>
<div class="below-h2 error">
<?php echo __( 'PO export may be glitchy. We are working to fix it.', 'wpml-string-translation' ); ?>
</div>
<?php
}
?>
<form method="post" action="">
<?php wp_nonce_field('icl_po_export') ?>
<p>
<?php echo __('Select domain:', 'wpml-string-translation')?>
<select name="icl_st_e_context" id="icl_st_e_context">
<?php foreach($icl_contexts as $v):?>
<option value="<?php echo esc_attr($v->context)?>" <?php if($context_filter == $v->context ):?>selected="selected"<?php endif;?>><?php echo $v->context . ' ('.$v->c.')'; ?></option>
<?php endforeach; ?>
</select>
</p>
<p style="line-height:2.3em">
<input type="checkbox" name="icl_st_pe_translations" id="icl_st_pe_translations" checked="checked" value="1" onchange="if(jQuery(this).attr('checked'))jQuery('#icl_st_e_language').fadeIn('fast'); else jQuery('#icl_st_e_language').fadeOut('fast')" />
<label for="icl_st_pe_translations"><?php echo __('Also include translations', 'wpml-string-translation')?></label>
<select name="icl_st_e_language" id="icl_st_e_language">
<?php foreach($active_languages as $al): if($al['code']==$string_settings['strings_language']) continue; ?>
<option value="<?php echo $al['code'] ?>"><?php echo $al['display_name'] ?></option>
<?php endforeach; ?>
</select>
</p>
<p><input type="submit" class="button-secondary" name="icl_st_pie_e" value="<?php echo __('Submit', 'wpml-string-translation')?>" /></p>
<?php endif ?>
</form>
</div>
</div>
</div>
</div>
<div class="postbox-container" style="width: 49%;">
<div id="normal-sortables-moreoptions" class="meta-box-sortables ui-sortable">
<div id="dashboard_wpml_st_poie" class="postbox">
<div class="handlediv" title="<?php echo __('Click to toggle', 'wpml-string-translation'); ?>">
<br/>
</div>
<h3 class="hndle">
<span><?php echo __('More options', 'wpml-string-translation')?></span>
</h3>
<div class="inside">
<form id="icl_st_more_options" name="icl_st_more_options" method="post">
<?php wp_nonce_field('icl_st_more_options_nonce', '_icl_nonce') ?>
<div>
<?php
$editable_roles = get_editable_roles();
if(!isset($string_settings['translated-users'])) $string_settings['translated-users'] = array();
$tnames = array();
foreach($editable_roles as $role => $details){
if(in_array($role, $string_settings['translated-users'])){
$tnames[] = translate_user_role($details['name'] );
}
}
$tustr = '<span id="icl_st_tusers_list">';
if(!empty($tnames)){
$tustr .= join(', ' , array_map('translate_user_role', $tnames));
}else{
$tustr = __('none', 'wpml-string-translation');
}
$tustr .= '</span>';
$tustr .= ' <a href="#" onclick="jQuery(\'#icl_st_tusers\').slideToggle();return false;">' . __('edit', 'wpml-string-translation') . '</a>';
?>
<?php printf(__('Translating users of types: %s', 'wpml-string-translation'), $tustr); ?>
<div id="icl_st_tusers" style="padding:6px;display: none;">
<?php
foreach ( $editable_roles as $role => $details ) {
$name = translate_user_role($details['name'] );
$checked = in_array($role, (array)$string_settings['translated-users']) ? ' checked="checked"' : '';
?>
<label><input type="checkbox" name="users[<?php echo $role ?>]" value="1"<?php echo $checked ?>/> <span><?php echo $name ?></span></label>
<?php
}
?>
</div>
</div>
<p class="submit">
<input class="button-secondary" type="submit" value="<?php esc_attr_e('Apply', 'wpml-string-translation') ?>" />
<span class="icl_ajx_response" id="icl_ajx_response4" style="display:inline"></span>
</p>
</form>
</div>
</div>
</div>
</div>
</div>
<br clear="all" /><br />
<a href="admin.php?page=<?php echo WPML_ST_FOLDER ?>/menu/string-translation.php&trop=1"><?php _e('Translate texts in admin screens »', 'wpml-string-translation'); ?></a>
<?php endif; //if(current_user_can('manage_options') ?>
<?php endif; ?>
<?php do_action('icl_menu_footer'); ?>
</div>
| mit |
sonata-project/SonataMediaBundle | src/Resources/config/actions.php | 1006 | <?php
declare(strict_types=1);
/*
* This file is part of the Sonata Project package.
*
* (c) Thomas Rabaix <thomas.rabaix@sonata-project.org>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
use Sonata\MediaBundle\Action\MediaDownloadAction;
use Symfony\Component\DependencyInjection\Loader\Configurator\ContainerConfigurator;
use Symfony\Component\DependencyInjection\Loader\Configurator\ReferenceConfigurator;
return static function (ContainerConfigurator $containerConfigurator): void {
// Use "service" function for creating references to services when dropping support for Symfony 4.4
$containerConfigurator->services()
->set('sonata.media.action.media_download', MediaDownloadAction::class)
->public()
->args([
new ReferenceConfigurator('sonata.media.manager.media'),
new ReferenceConfigurator('sonata.media.pool'),
]);
};
| mit |
selametsubu/newmpo | application/models/Vw_smart_vs_sas_detail_last_m.php | 2964 | <?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
* Description of Vw_smart_vs_sas_detail_last_m
*
* @author Selamet Subu - Dell 5459
*/
class Vw_smart_vs_sas_detail_last_m extends My_model {
//put your code here
var $table = "vw_smart_vs_sas_detail_last";
var $view = "vw_smart_vs_sas_detail_last";
var $primary_key = "keukemid";
function get_data($where = NULL, $order_by = NULL) {
if (!empty($where))
$this->db->where($where);
if (!empty($order_by))
$this->db->order_by($order_by);
$query = $this->db->get($this->view);
return $query->result();
}
// For Data Tables
private function _get_datatables_query($column_order, $order, $column_search) {
// you can use table or view
$this->db->from($this->view);
$i = 0;
foreach ($column_search as $item) { // loop column
if (isset($_POST[$item]) && !empty($_POST[$item])) { // if datatable send GET for search
if ($i === 0) { // first loop
$this->db->group_start(); // open bracket. query Where with OR clause better with bracket. because maybe can combine with other WHERE with AND.
$this->db->like($item, $_POST[$item]);
} else {
$this->db->like($item, $_POST[$item]);
}
if (count($column_search) - 1 == $i) //last loop
$this->db->group_end(); //close bracket
}
$i++;
}
if (isset($_POST['order'])) { // here order processing
$this->db->order_by($column_order[$_POST['order']['0']['column']], $_POST['order']['0']['dir']);
} else if (isset($order)) {
$order = $order;
$this->db->order_by($order);
}
}
function get_datatables($column_order, $order, $column_search, $where) {
$this->_get_datatables_query($column_order, $order, $column_search);
if ($_POST['length'] != -1)
$this->db->limit($_POST['length'], $_POST['start']);
if( !empty($where) )
$this->db->where($where);
$query = $this->db->get();
//echo $this->db->last_query();
return $query->result_array();
}
function count_filtered($column_order, $order, $column_search, $where) {
$this->_get_datatables_query($column_order, $order, $column_search);
if( !empty($where) )
$this->db->where($where);
$query = $this->db->get();
return $query->num_rows();
}
public function count_all($where) {
if( !empty($where) )
$this->db->where($where);
$this->db->from($this->table);
return $this->db->count_all_results();
}
// end fata tables
}
| mit |
GLSea1979/fit-o-matic-frontend | app/component/admin/bike/display-bike/display-bike.js | 1389 | 'use strict';
require('./_display-bike.scss');
module.exports = {
template: require('./display-bike.html'),
controller: ['$log','$timeout','$uibModal','bikeService', DisplayBikeController],
controllerAs: 'displayBikeCtrl',
bindings: {
brand: '<',
currentBike: '<',
passCurrentBike: '&'
}
};
function DisplayBikeController($log, $timeout, $uibModal, bikeService){
$log.debug('DisplayBikeController', this.bikes);
this.showEditBike = false;
this.bikes = [];
this.displayBikes = function() {
$log.debug('DisplayBikeController.displayBikes()');
bikeService.fetchMfrBikes(this.brand._id)
.then( data => {
this.bikes = data;
});
};
this.changeBike = function(x){
$log.debug('displayBikeCtrl.changeBike -------------------------> this', this);
$log.debug('HERE IS THE ARGUMENT (BIKE):', x);
this.showEditBike = !this.showEditBike;
this.passCurrentBike({newBike:x});
};
this.editBikeModal = function(bike){
$log.debug('displayBikeCtrl.editBikeModal()');
this.open = () => {
$uibModal.open({
animation: this.animationsEnabled,
component: 'editBikeModal',
size: 'lg',
resolve: {
modalData: bike
}
}).result.then(()=>{}).catch( () => $log.log('closed'));
};
this.open();
};
this.$onInit = function() {
this.displayBikes();
};
}
| mit |
twem007/p1 | code/client/src/core/utils/MD5.ts | 14382 | /*
* A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
* Digest Algorithm, as defined in RFC 1321.
* Version 2.2 Copyright (C) Paul Johnston 1999 - 2009
* Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
* Distributed under the BSD License
* See http://pajhome.org.uk/crypt/md5 for more info.
*/
/*
* Configurable variables. You may need to tweak these to be compatible with
* the server-side, but the defaults work in most cases.
*/
class md5{
public constructor(){
}
private hexcase = 0; /* hex output format. 0 - lowercase; 1 - uppercase */
private b64pad = ""; /* base-64 pad character. "=" for strict RFC compliance */
/*
* These are the privates you'll usually want to call
* They take string arguments and return either hex or base-64 encoded strings
*/
public hex_md5(s) { return this.rstr2hex(this.rstr_md5(this.str2rstr_utf8(s))); }
private b64_md5(s) { return this.rstr2b64(this.rstr_md5(this.str2rstr_utf8(s))); }
private any_md5(s, e) { return this.rstr2any(this.rstr_md5(this.str2rstr_utf8(s)), e); }
private hex_hmac_md5(k, d)
{ return this.rstr2hex(this.rstr_hmac_md5(this.str2rstr_utf8(k), this.str2rstr_utf8(d))); }
private b64_hmac_md5(k, d)
{ return this.rstr2b64(this.rstr_hmac_md5(this.str2rstr_utf8(k), this.str2rstr_utf8(d))); }
private any_hmac_md5(k, d, e)
{ return this.rstr2any(this.rstr_hmac_md5(this.str2rstr_utf8(k), this.str2rstr_utf8(d)), e); }
/*
* Perform a simple self-test to see if the VM is working
*/
private md5_vm_test()
{
return this.hex_md5("abc").toLowerCase() == "900150983cd24fb0d6963f7d28e17f72";
}
/*
* Calculate the MD5 of a raw string
*/
private rstr_md5(s)
{
return this.binl2rstr(this.binl_md5(this.rstr2binl(s), s.length * 8));
}
/*
* Calculate the HMAC-MD5, of a key and some data (raw strings)
*/
private rstr_hmac_md5(key, data)
{
var bkey = this.rstr2binl(key);
if(bkey.length > 16) bkey = this.binl_md5(bkey, key.length * 8);
var ipad = Array(16), opad = Array(16);
for(var i = 0; i < 16; i++)
{
ipad[i] = bkey[i] ^ 0x36363636;
opad[i] = bkey[i] ^ 0x5C5C5C5C;
}
var hash = this.binl_md5(ipad.concat(this.rstr2binl(data)), 512 + data.length * 8);
return this.binl2rstr(this.binl_md5(opad.concat(hash), 512 + 128));
}
/*
* Convert a raw string to a hex string
*/
private rstr2hex(input)
{
try { this.hexcase } catch(e) { this.hexcase=0; }
var hex_tab = this.hexcase ? "0123456789ABCDEF" : "0123456789abcdef";
var output = "";
var x;
for(var i = 0; i < input.length; i++)
{
x = input.charCodeAt(i);
output += hex_tab.charAt((x >>> 4) & 0x0F)
+ hex_tab.charAt( x & 0x0F);
}
return output;
}
/*
* Convert a raw string to a base-64 string
*/
private rstr2b64(input)
{
try { this.b64pad } catch(e) { this.b64pad=''; }
var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
var output = "";
var len = input.length;
for(var i = 0; i < len; i += 3)
{
var triplet = (input.charCodeAt(i) << 16)
| (i + 1 < len ? input.charCodeAt(i+1) << 8 : 0)
| (i + 2 < len ? input.charCodeAt(i+2) : 0);
for(var j = 0; j < 4; j++)
{
if(i * 8 + j * 6 > input.length * 8) output += this.b64pad;
else output += tab.charAt((triplet >>> 6*(3-j)) & 0x3F);
}
}
return output;
}
/*
* Convert a raw string to an arbitrary string encoding
*/
private rstr2any(input, encoding)
{
var divisor = encoding.length;
var i, j, q, x, quotient;
/* Convert to an array of 16-bit big-endian values, forming the dividend */
var dividend = Array(Math.ceil(input.length / 2));
for(i = 0; i < dividend.length; i++)
{
dividend[i] = (input.charCodeAt(i * 2) << 8) | input.charCodeAt(i * 2 + 1);
}
/*
* Repeatedly perform a long division. The binary array forms the dividend,
* the length of the encoding is the divisor. Once computed, the quotient
* forms the dividend for the next step. All remainders are stored for later
* use.
*/
var full_length = Math.ceil(input.length * 8 /
(Math.log(encoding.length) / Math.log(2)));
var remainders = Array(full_length);
for(j = 0; j < full_length; j++)
{
quotient = Array();
x = 0;
for(i = 0; i < dividend.length; i++)
{
x = (x << 16) + dividend[i];
q = Math.floor(x / divisor);
x -= q * divisor;
if(quotient.length > 0 || q > 0)
quotient[quotient.length] = q;
}
remainders[j] = x;
dividend = quotient;
}
/* Convert the remainders to the output string */
var output = "";
for(i = remainders.length - 1; i >= 0; i--)
output += encoding.charAt(remainders[i]);
return output;
}
/*
* Encode a string as utf-8.
* For efficiency, this assumes the input is valid utf-16.
*/
private str2rstr_utf8(input)
{
var output = "";
var i = -1;
var x, y;
while(++i < input.length)
{
/* Decode utf-16 surrogate pairs */
x = input.charCodeAt(i);
y = i + 1 < input.length ? input.charCodeAt(i + 1) : 0;
if(0xD800 <= x && x <= 0xDBFF && 0xDC00 <= y && y <= 0xDFFF)
{
x = 0x10000 + ((x & 0x03FF) << 10) + (y & 0x03FF);
i++;
}
/* Encode output as utf-8 */
if(x <= 0x7F)
output += String.fromCharCode(x);
else if(x <= 0x7FF)
output += String.fromCharCode(0xC0 | ((x >>> 6 ) & 0x1F),
0x80 | ( x & 0x3F));
else if(x <= 0xFFFF)
output += String.fromCharCode(0xE0 | ((x >>> 12) & 0x0F),
0x80 | ((x >>> 6 ) & 0x3F),
0x80 | ( x & 0x3F));
else if(x <= 0x1FFFFF)
output += String.fromCharCode(0xF0 | ((x >>> 18) & 0x07),
0x80 | ((x >>> 12) & 0x3F),
0x80 | ((x >>> 6 ) & 0x3F),
0x80 | ( x & 0x3F));
}
return output;
}
/*
* Encode a string as utf-16
*/
private str2rstr_utf16le(input)
{
var output = "";
for(var i = 0; i < input.length; i++)
output += String.fromCharCode( input.charCodeAt(i) & 0xFF,
(input.charCodeAt(i) >>> 8) & 0xFF);
return output;
}
private str2rstr_utf16be(input)
{
var output = "";
for(var i = 0; i < input.length; i++)
output += String.fromCharCode((input.charCodeAt(i) >>> 8) & 0xFF,
input.charCodeAt(i) & 0xFF);
return output;
}
/*
* Convert a raw string to an array of little-endian words
* Characters >255 have their high-byte silently ignored.
*/
private rstr2binl(input)
{
var output = Array(input.length >> 2);
for(var i = 0; i < output.length; i++)
output[i] = 0;
for(var i = 0; i < input.length * 8; i += 8)
output[i>>5] |= (input.charCodeAt(i / 8) & 0xFF) << (i%32);
return output;
}
/*
* Convert an array of little-endian words to a string
*/
private binl2rstr(input)
{
var output = "";
for(var i = 0; i < input.length * 32; i += 8)
output += String.fromCharCode((input[i>>5] >>> (i % 32)) & 0xFF);
return output;
}
/*
* Calculate the MD5 of an array of little-endian words, and a bit length.
*/
private binl_md5(x, len)
{
/* append padding */
x[len >> 5] |= 0x80 << ((len) % 32);
x[(((len + 64) >>> 9) << 4) + 14] = len;
var a = 1732584193;
var b = -271733879;
var c = -1732584194;
var d = 271733878;
for(var i = 0; i < x.length; i += 16)
{
var olda = a;
var oldb = b;
var oldc = c;
var oldd = d;
a = this.md5_ff(a, b, c, d, x[i+ 0], 7 , -680876936);
d = this.md5_ff(d, a, b, c, x[i+ 1], 12, -389564586);
c = this.md5_ff(c, d, a, b, x[i+ 2], 17, 606105819);
b = this.md5_ff(b, c, d, a, x[i+ 3], 22, -1044525330);
a = this.md5_ff(a, b, c, d, x[i+ 4], 7 , -176418897);
d = this.md5_ff(d, a, b, c, x[i+ 5], 12, 1200080426);
c = this.md5_ff(c, d, a, b, x[i+ 6], 17, -1473231341);
b = this.md5_ff(b, c, d, a, x[i+ 7], 22, -45705983);
a = this.md5_ff(a, b, c, d, x[i+ 8], 7 , 1770035416);
d = this.md5_ff(d, a, b, c, x[i+ 9], 12, -1958414417);
c = this.md5_ff(c, d, a, b, x[i+10], 17, -42063);
b = this.md5_ff(b, c, d, a, x[i+11], 22, -1990404162);
a = this.md5_ff(a, b, c, d, x[i+12], 7 , 1804603682);
d = this.md5_ff(d, a, b, c, x[i+13], 12, -40341101);
c = this.md5_ff(c, d, a, b, x[i+14], 17, -1502002290);
b = this.md5_ff(b, c, d, a, x[i+15], 22, 1236535329);
a = this.md5_gg(a, b, c, d, x[i+ 1], 5 , -165796510);
d = this.md5_gg(d, a, b, c, x[i+ 6], 9 , -1069501632);
c = this.md5_gg(c, d, a, b, x[i+11], 14, 643717713);
b = this.md5_gg(b, c, d, a, x[i+ 0], 20, -373897302);
a = this.md5_gg(a, b, c, d, x[i+ 5], 5 , -701558691);
d = this.md5_gg(d, a, b, c, x[i+10], 9 , 38016083);
c = this.md5_gg(c, d, a, b, x[i+15], 14, -660478335);
b = this.md5_gg(b, c, d, a, x[i+ 4], 20, -405537848);
a = this.md5_gg(a, b, c, d, x[i+ 9], 5 , 568446438);
d = this.md5_gg(d, a, b, c, x[i+14], 9 , -1019803690);
c = this.md5_gg(c, d, a, b, x[i+ 3], 14, -187363961);
b = this.md5_gg(b, c, d, a, x[i+ 8], 20, 1163531501);
a = this.md5_gg(a, b, c, d, x[i+13], 5 , -1444681467);
d = this.md5_gg(d, a, b, c, x[i+ 2], 9 , -51403784);
c = this.md5_gg(c, d, a, b, x[i+ 7], 14, 1735328473);
b = this.md5_gg(b, c, d, a, x[i+12], 20, -1926607734);
a = this.md5_hh(a, b, c, d, x[i+ 5], 4 , -378558);
d = this.md5_hh(d, a, b, c, x[i+ 8], 11, -2022574463);
c = this.md5_hh(c, d, a, b, x[i+11], 16, 1839030562);
b = this.md5_hh(b, c, d, a, x[i+14], 23, -35309556);
a = this.md5_hh(a, b, c, d, x[i+ 1], 4 , -1530992060);
d = this.md5_hh(d, a, b, c, x[i+ 4], 11, 1272893353);
c = this.md5_hh(c, d, a, b, x[i+ 7], 16, -155497632);
b = this.md5_hh(b, c, d, a, x[i+10], 23, -1094730640);
a = this.md5_hh(a, b, c, d, x[i+13], 4 , 681279174);
d = this.md5_hh(d, a, b, c, x[i+ 0], 11, -358537222);
c = this.md5_hh(c, d, a, b, x[i+ 3], 16, -722521979);
b = this.md5_hh(b, c, d, a, x[i+ 6], 23, 76029189);
a = this.md5_hh(a, b, c, d, x[i+ 9], 4 , -640364487);
d = this.md5_hh(d, a, b, c, x[i+12], 11, -421815835);
c = this.md5_hh(c, d, a, b, x[i+15], 16, 530742520);
b = this.md5_hh(b, c, d, a, x[i+ 2], 23, -995338651);
a = this.md5_ii(a, b, c, d, x[i+ 0], 6 , -198630844);
d = this.md5_ii(d, a, b, c, x[i+ 7], 10, 1126891415);
c = this.md5_ii(c, d, a, b, x[i+14], 15, -1416354905);
b = this.md5_ii(b, c, d, a, x[i+ 5], 21, -57434055);
a = this.md5_ii(a, b, c, d, x[i+12], 6 , 1700485571);
d = this.md5_ii(d, a, b, c, x[i+ 3], 10, -1894986606);
c = this.md5_ii(c, d, a, b, x[i+10], 15, -1051523);
b = this.md5_ii(b, c, d, a, x[i+ 1], 21, -2054922799);
a = this.md5_ii(a, b, c, d, x[i+ 8], 6 , 1873313359);
d = this.md5_ii(d, a, b, c, x[i+15], 10, -30611744);
c = this.md5_ii(c, d, a, b, x[i+ 6], 15, -1560198380);
b = this.md5_ii(b, c, d, a, x[i+13], 21, 1309151649);
a = this.md5_ii(a, b, c, d, x[i+ 4], 6 , -145523070);
d = this.md5_ii(d, a, b, c, x[i+11], 10, -1120210379);
c = this.md5_ii(c, d, a, b, x[i+ 2], 15, 718787259);
b = this.md5_ii(b, c, d, a, x[i+ 9], 21, -343485551);
a = this.safe_add(a, olda);
b = this.safe_add(b, oldb);
c = this.safe_add(c, oldc);
d = this.safe_add(d, oldd);
}
return [a, b, c, d];
}
/*
* These privates implement the four basic operations the algorithm uses.
*/
private md5_cmn(q, a, b, x, s, t)
{
return this.safe_add(this.bit_rol(this.safe_add(this.safe_add(a, q), this.safe_add(x, t)), s),b);
}
private md5_ff(a, b, c, d, x, s, t)
{
return this.md5_cmn((b & c) | ((~b) & d), a, b, x, s, t);
}
private md5_gg(a, b, c, d, x, s, t)
{
return this.md5_cmn((b & d) | (c & (~d)), a, b, x, s, t);
}
private md5_hh(a, b, c, d, x, s, t)
{
return this.md5_cmn(b ^ c ^ d, a, b, x, s, t);
}
private md5_ii(a, b, c, d, x, s, t)
{
return this.md5_cmn(c ^ (b | (~d)), a, b, x, s, t);
}
/*
* Add integers, wrapping at 2^32. This uses 16-bit operations internally
* to work around bugs in some JS interpreters.
*/
private safe_add(x, y)
{
var lsw = (x & 0xFFFF) + (y & 0xFFFF);
var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
return (msw << 16) | (lsw & 0xFFFF);
}
/*
* Bitwise rotate a 32-bit number to the left.
*/
private bit_rol(num, cnt)
{
return (num << cnt) | (num >>> (32 - cnt));
}
} | mit |
maqsoftware/maqsoftware | js/contact-form.js | 2048 | (function ($) {
"use strict";
$("#contact").validate();
/* CONTACT FORM */
$("#contact").submit(function (e) {
e.preventDefault();
var name = $("#form-name").val();
var email = $("#form-email").val();
var subject = $("#form-subject").val();
var message = $("#form-message").val();
var dataString = 'name=' + name + '&email=' + email + '&subject=' + subject + '&message=' + message;
function validEmail(emailAddress) {
var pattern = new RegExp(/^((([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+(\.([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+)*)|((\x22)((((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(([\x01-\x08\x0b\x0c\x0e-\x1f\x7f]|\x21|[\x23-\x5b]|[\x5d-\x7e]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(\\([\x01-\x09\x0b\x0c\x0d-\x7f]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]))))*(((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(\x22)))@((([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.)+(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.?$/i);
return pattern.test(emailAddress);
};
if (validEmail(email) && (message.length > 1) && (name.length > 1)) {
$.ajax({
type: "POST",
url: "send-mail.php",
data: dataString,
success: function () {
$('.successContent').fadeIn(1000);
$('.errorContent').fadeOut(500);
}
});
}
else {
$('.errorContent').fadeIn(1000);
$('.successContent').fadeOut(500);
}
return false;
});
})(jQuery); | mit |
gw4e/gw4e.project | bundles/gw4e-eclipse-plugin/src/org/gw4e/eclipse/builder/BuildPoliciesCache.java | 10038 | package org.gw4e.eclipse.builder;
/*-
* #%L
* gw4e
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2017 gw4e-project
* %%
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
* #L%
*/
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.WorkspaceJob;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.gw4e.eclipse.facade.ResourceManager;
import org.gw4e.eclipse.preferences.PreferenceManager;
import org.gw4e.eclipse.preferences.ProjectPropertyChangeListener;
public class BuildPoliciesCache implements ProjectPropertyChangeListener {
IFile resource;
/**
* @param resource
* @throws CoreException
* @throws FileNotFoundException
* @throws InterruptedException
*/
public BuildPoliciesCache(IFile resource) throws CoreException, FileNotFoundException, InterruptedException {
this.resource = resource;
}
/**
*
*/
public BuildPoliciesCache() {
}
/**
* @param buildPolicyFile
* @return
*/
public static String makeFileCacheName(String buildPolicyFile) {
return "." + buildPolicyFile;
}
/**
* @param projectName
* @return
*/
public static String getFileCacheName(String projectName) {
return makeFileCacheName(PreferenceManager.getBuildPoliciesFileName(projectName));
}
/**
* @param resource
* @return
* @throws CoreException
* @throws InterruptedException
* @throws FileNotFoundException
*/
private IFile getCache() throws CoreException, InterruptedException, FileNotFoundException {
String filename = getFileCacheName(resource.getProject().getName());
IContainer folder = resource.getParent();
IFile cache = (IFile) ResourceManager.resfreshFileInContainer(folder, filename);
if (cache != null && cache.exists())
return cache;
cache = ResourceManager.get(folder, filename);
File f = null;
try {
URI uri = cache.getLocationURI();
String path = URLDecoder.decode(uri.getRawPath(), "UTF-8");
f = new File(path);
f.createNewFile();
cache = ResourceManager.toIFile(f);
cache.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor());
} catch (Exception e) {
ResourceManager.logException(e);
return null;
}
return cache;
}
/**
* @throws CoreException
* @throws IOException
* @throws InterruptedException
*/
private void save(final Properties p, IProgressMonitor monitor)
throws CoreException, IOException, InterruptedException {
try {
String newline = System.getProperty("line.separator");
IFile fileCache = getCache();
Iterator iter = p.keySet().iterator();
StringBuffer sb = new StringBuffer();
List<String> sortedKeys = new ArrayList<String>();
while (iter.hasNext()) {
String key = (String) iter.next();
sortedKeys.add(key);
}
Collections.sort(sortedKeys);
iter = sortedKeys.iterator();
while (iter.hasNext()) {
String key = (String) iter.next();
String value = (String) p.get(key);
sb.append(key).append("=").append(value).append(newline).append(newline);
}
byte[] bytes = sb.toString().getBytes();
InputStream source = new ByteArrayInputStream(bytes);
fileCache.setContents(source, IResource.FORCE, monitor);
fileCache.refreshLocal(IResource.DEPTH_INFINITE, monitor);
log("BuildPoliciesCache.save (" + fileCache + ") saved for resource " + resource + " " + p.toString());
} catch (Exception e) {
ResourceManager.logException(e);
}
}
/**
* @param graphFile
* @throws CoreException
* @throws IOException
* @throws InterruptedException
*/
public void update(List<BuildPolicy> policies) throws CoreException, IOException, InterruptedException {
Job job = new WorkspaceJob("Updating cache") {
public IStatus runInWorkspace(IProgressMonitor monitor) throws CoreException {
try {
IFile fileCache = getCache();
Properties p = ResourceManager.loadIFileAsProperties(fileCache,
getFileCacheName(resource.getProject().getName()));
String serialized = serialize(policies);
if (serialized == null) {
log("BuildPoliciesCache.update " + fileCache + " failed to updated for resource " + resource
+ " " + policies.toString());
return Status.OK_STATUS;
}
p.put(resource.getName(), System.currentTimeMillis() + ":" + serialized);
log("BuildPoliciesCache.update " + fileCache + " updated for resource " + resource + " "
+ p.toString());
save(p, monitor);
return Status.OK_STATUS;
} catch (Exception e) {
ResourceManager.logException(e);
return Status.CANCEL_STATUS;
}
}
};
job.setRule(resource.getProject());
job.setUser(true);
job.schedule();
}
/**
* @param policies
* @return
*/
private String serialize(List<BuildPolicy> policies) {
StringBuffer sb = new StringBuffer();
for (BuildPolicy buildPolicy : policies) {
sb.append(BuildPolicy.serialize(buildPolicy)).append(";");
}
return sb.toString();
}
/**
* @param projectName
* @param container
* @param monitor
*/
private static void deleteCache(String cachename, IContainer container, IProgressMonitor monitor) {
try {
IResource[] members = container.members();
for (IResource member : members) {
if (member instanceof IContainer) {
deleteCache(cachename, (IContainer) member, monitor);
} else if (member instanceof IFile) {
IFile file = (IFile) member;
if (cachename.equals(file.getName())) {
file.delete(true, monitor);
}
}
}
} catch (CoreException e) {
ResourceManager.logException(e);
}
}
/**
*
* @param project
* @param monitor
* @throws CoreException
* @throws IOException
* @throws InterruptedException
*/
public static void clean(IProject project, String cachename, IProgressMonitor monitor) {
deleteCache(cachename, project, monitor);
}
/**
* @param graphFile
* @return
* @throws IOException
* @throws CoreException
* @throws InterruptedException
*/
public void invalidate(IProgressMonitor monitor) throws CoreException, IOException, InterruptedException {
try {
IFile fileCache = getCache();
Properties p = ResourceManager.loadIFileAsProperties(fileCache,
getFileCacheName(resource.getProject().getName()));
p.remove(resource.getName());
log("BuildPoliciesCache.invalidate " + fileCache + " invalidated for resource " + resource);
save(p, monitor);
} catch (Exception e) {
ResourceManager.logException(e);
}
}
/**
* @param message
*/
private void log(String message) {
ResourceManager.logInfo(resource.getProject().getName(), message);
}
/**
* @param policies
* @return
*/
public boolean needBuild(List<BuildPolicy> policies) {
try {
IFile fileCache = getCache();
if (fileCache == null) {
return true;
}
Properties p = ResourceManager.loadIFileAsProperties(fileCache,
getFileCacheName(resource.getProject().getName()));
String svalue = p.getProperty(resource.getName(), null);
if (svalue == null) {
return true;
}
StringTokenizer st = new StringTokenizer(svalue, ":");
long graphfiletime = resource.getLocalTimeStamp();
long lastUpdate = Long.parseLong(st.nextToken());
boolean need = graphfiletime > lastUpdate;
if (need)
return true;
List<BuildPolicy> list = BuildPolicy.deserialize(st.nextToken());
List<BuildPolicy> targetPolicies = new ArrayList<BuildPolicy>(policies);
targetPolicies.removeAll(list);
need = (targetPolicies.size() > 0);
return need;
} catch (Exception e) {
return true;
}
}
/*
* (non-Javadoc)
*
* @see org.gw4e.eclipse.preferences.ProjectPropertyChangeListener#
* projectPropertyUpdated(java.lang.String, java.lang.String,
* java.lang.String[], java.lang.String[])
*/
@Override
public void projectPropertyUpdated(String projectName, String property, String[] oldValues, String[] newValues) {
if (PreferenceManager.BUILD_POLICIES_FILENAME.equals(property)) {
IProject project = ResourceManager.getProject(projectName);
String previousCacheName = makeFileCacheName(oldValues.length > 0 ? oldValues[0] : "");
try {
ResourceManager.renameFile(project, previousCacheName ,makeFileCacheName(newValues[0]));
} catch (CoreException e) {
ResourceManager.logException(e);
}
clean(project, previousCacheName, new NullProgressMonitor());
}
}
}
| mit |
jagrutkosti/dashit | app/src/main/java/dashit/uni/com/dashit/model/HistoryFiles.java | 2584 | package dashit.uni.com.dashit.model;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Jagrut on 17-Feb-16.
* POJO to store all data related to one History item
*/
public class HistoryFiles {
//Absolute path
private List<String> filesInDirectory;
//Only the directory name
private String directory;
private String txHash;
private String recipient;
private String submissionTime;
private String privateKey;
private String publicKey;
private String seed;
private String savedHash;
private String accidentLocation;
public String getAccidentLocation() {
return accidentLocation;
}
public void setAccidentLocation(String accidentLocation) {
this.accidentLocation = accidentLocation;
}
public String getSavedHash() {
return savedHash;
}
public void setSavedHash(String savedHash) {
this.savedHash = savedHash;
}
private boolean checkedAtServer;
public boolean isCheckedAtServer() {
return checkedAtServer;
}
public void setCheckedAtServer(boolean checkedAtServer) {
this.checkedAtServer = checkedAtServer;
}
public String getTxHash() {
return txHash;
}
public void setTxHash(String txHash) {
this.txHash = txHash;
}
public String getRecipient() {
return recipient;
}
public void setRecipient(String recipient) {
this.recipient = recipient;
}
public String getSubmissionTime() {
return submissionTime;
}
public void setSubmissionTime(String submissionTime) {
this.submissionTime = submissionTime;
}
public String getPrivateKey() {
return privateKey;
}
public void setPrivateKey(String privateKey) {
this.privateKey = privateKey;
}
public String getPublicKey() {
return publicKey;
}
public void setPublicKey(String publicKey) {
this.publicKey = publicKey;
}
public String getSeed() {
return seed;
}
public void setSeed(String seed) {
this.seed = seed;
}
public String getDirectory() {
return directory;
}
public void setDirectory(String directory) {
this.directory = directory;
}
public HistoryFiles(){
filesInDirectory = new ArrayList<>();
}
public List<String> getFilesInDirectory() {
return filesInDirectory;
}
public void setFilesInDirectory(List<String> filesInDirectory) {
this.filesInDirectory = filesInDirectory;
}
}
| mit |
hitmeister/api-sdk-php | src/Endpoints/ImportFiles/Find.php | 761 | <?php
namespace Hitmeister\Component\Api\Endpoints\ImportFiles;
use Hitmeister\Component\Api\Endpoints\AbstractEndpoint;
use Hitmeister\Component\Api\Endpoints\Traits\RequestGet;
/**
* Class Find
*
* @category PHP-SDK
* @package Hitmeister\Component\Api\Endpoints\ImportFiles
* @author Maksim Naumov <maksim.naumov@hitmeister.de>
* @license https://opensource.org/licenses/MIT MIT
* @link https://www.hitmeister.de/api/v1/
*/
class Find extends AbstractEndpoint
{
use RequestGet;
/**
* {@inheritdoc}
*/
public function getParamWhiteList()
{
return ['status', 'type', 'ts_created:from', 'ts_updated:from', 'sort', 'limit', 'offset'];
}
/**
* {@inheritdoc}
*/
public function getURI()
{
return 'import-files/seller/';
}
} | mit |
YesTeam/Labyrinth | Client/src/main/entities/BatAi.java | 171 | package entities;
public class BatAi extends CreatureAi {
public BatAi(Creature creature) {
super(creature);
}
public void onUpdate(){
wander();
wander();
}
} | mit |
ng2-dev/angular-quick-starter | config/karma.conf.js | 3258 | /**
* @author: @AngularClass
*/
module.exports = function (config) {
var testWebpackConfig = require('./webpack.test.js')({ env: 'test' });
var configuration = {
/**
* Base path that will be used to resolve all patterns (e.g. files, exclude).
*/
basePath: '',
/**
* Frameworks to use
*
* available frameworks: https://npmjs.org/browse/keyword/karma-adapter
*/
frameworks: ['jasmine'],
/**
* List of files to exclude.
*/
exclude: [],
client: {
captureConsole: false
},
/**
* List of files / patterns to load in the browser
*
* we are building the test environment in ./spec-bundle.js
*/
files: [
{ pattern: './config/spec-bundle.js', watched: false },
{ pattern: './src/assets/**/*', watched: false, included: false, served: true, nocache: false }
],
/**
* By default all assets are served at http://localhost:[PORT]/base/
*/
proxies: {
"/assets/": "/base/src/assets/"
},
/**
* Preprocess matching files before serving them to the browser
* available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
*/
preprocessors: { './config/spec-bundle.js': ['coverage', 'webpack', 'sourcemap'] },
/**
* Webpack Config at ./webpack.test.js
*/
webpack: testWebpackConfig,
coverageReporter: {
type: 'in-memory'
},
remapCoverageReporter: {
'text-summary': null,
json: './coverage/coverage.json',
html: './coverage/html'
},
/**
* Webpack please don't spam the console when running in karma!
*/
webpackMiddleware: {
/**
* webpack-dev-middleware configuration
* i.e.
*/
noInfo: true,
/**
* and use stats to turn off verbose output
*/
stats: {
/**
* options i.e.
*/
chunks: false
}
},
/**
* Test results reporter to use
*
* possible values: 'dots', 'progress'
* available reporters: https://npmjs.org/browse/keyword/karma-reporter
*/
reporters: ['mocha', 'coverage', 'remap-coverage'],
/**
* Web server port.
*/
port: 9876,
/**
* enable / disable colors in the output (reporters and logs)
*/
colors: true,
/**
* Level of logging
* possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
*/
logLevel: config.LOG_WARN,
/**
* enable / disable watching file and executing tests whenever any file changes
*/
autoWatch: false,
/**
* start these browsers
* available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
*/
browsers: [
'Chrome'
],
customLaunchers: {
ChromeTravisCi: {
base: 'Chrome',
flags: ['--no-sandbox']
}
},
/**
* Continuous Integration mode
* if true, Karma captures browsers, runs the tests and exits
*/
singleRun: true,
browserNoActivityTimeout: 300000,
};
if (process.env.TRAVIS) {
configuration.browsers = [
'ChromeTravisCi'
];
}
config.set(configuration);
};
| mit |
lolitaframework/branding | LolitaFramework/Controls/Button/Button.php | 824 | <?php
namespace branding\LolitaFramework\Controls\Button;
use \branding\LolitaFramework\Controls\Control;
use \branding\LolitaFramework\Core\Arr;
class Button extends Control
{
/**
* Render control
*
* @author Guriev Eugen <gurievcreative@gmail.com>
* @return string html code.
*/
public function render()
{
$this->setAttributes(
array_merge(
array(
'name' => $this->getName(),
'id' => $this->getId(),
'value' => $this->getValue(),
'type' => 'button',
),
$this->getAttributes()
)
);
return parent::render();
}
}
| mit |
kevinCefalu/vscode-favorites | src/controllers/FavoritesController.ts | 1463 | 'use strict';
import {window} from 'vscode';
import {FavoriteType} from '../common/Enums';
import {Favorite} from '../common/models/favorite';
import {ConfigurationController} from '../controllers/ConfigurationController';
import {FileSystemController} from '../controllers/FileSystemController';
export class FavoritesController
{
private _ConfigurationController : ConfigurationController;
public constructor(configurationController : ConfigurationController)
{
this._ConfigurationController = configurationController
}
public ShowFavoritePicker()
{
window
.showQuickPick(this._ConfigurationController.CollectionPickerOptions)
.then(p => this.OpenFavorite(p.label));
}
public OpenFavorite(name : string)
{
let favorite = this._ConfigurationController.GetFavorite(name);
if (favorite === undefined || favorite.Paths === undefined)
{
return;
}
if (favorite.Type === FavoriteType.File)
{
this.Handle(favorite, FileSystemController.OpenFile);
}
else if (favorite.Type === FavoriteType.Folder)
{
this.Handle(favorite, FileSystemController.OpenDirectory);
}
}
private Handle(favorite : Favorite, callbackfn: (this : void, value : string | Array<string>) => void)
{
typeof favorite.Paths === "string" ?
callbackfn(favorite.Paths) :
favorite.Paths.forEach(p => callbackfn(p));
}
}
| mit |
ngraziano/isystem-to-mqtt | isystem_to_mqtt/convert.py | 24029 | """ Function to convert raw modbus value """
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import json
from . import time_delta_json
def unit(raw_table, base_index):
""" Direct word value """
raw_value = raw_table[base_index]
sign = 1
if raw_value & 0x8000:
sign = -1
return sign * (raw_value & 0x7FFF)
def tenth(raw_table, base_index):
""" Word value divide by ten """
raw_value = raw_table[base_index]
if raw_value == 0xFFFF:
return None
sign = 1
if raw_value & 0x8000:
sign = -1
return sign * (raw_value & 0x7FFF) / 10
def unit_and_ten(raw_table, base_index):
""" Two word values, 0000x and xxxx0 """
return raw_table[base_index] + 10 * raw_table[base_index + 1]
def anticipation(raw_table, base_index):
""" 101 for None or value divide by ten """
raw_value = raw_table[base_index]
if raw_value == 101:
return None
return tenth(raw_table, base_index)
def footprint(raw_table, base_index):
""" 150 for None or value divide by ten """
raw_value = raw_table[base_index]
if raw_value == 150:
return None
return tenth(raw_table, base_index)
def power(raw_table, base_index):
""" Value of MWh, KWh, Wh or None if 65535 """
if (raw_table[base_index] == 0xFFFF
or raw_table[base_index + 1] == 0xFFFF
or raw_table[base_index + 2] == 0xFFFF):
return None
return (raw_table[base_index] * 1000 + raw_table[base_index + 1]) * 1000 + raw_table[base_index + 2]
BIT_ANTIFREEZE = 1
BIT_NIGHT = 2
BIT_DAY = 4
BIT_AUTO = 8
BIT_DHW = 16
BIT_END_OF_PROGRAM = 32
BIT_DHW_END_OF_PROGRAM = 64
BIT_ALL_ZONE = 128
def derog_bit_french(raw_table, base_index):
""" Convert derog bit flag to french """
value = raw_table[base_index]
stringvalue = ""
if value & BIT_ANTIFREEZE:
stringvalue += "Antigel "
if value & BIT_NIGHT:
stringvalue += "Nuit "
if value & BIT_DAY:
stringvalue += "Jour "
if value & BIT_AUTO:
stringvalue += "Automatique "
if value & BIT_DHW:
stringvalue += "Eau "
if value & BIT_END_OF_PROGRAM:
stringvalue += "jusqu'a la fin du programme "
if value & BIT_DHW_END_OF_PROGRAM:
stringvalue += "jusqu'a la fin du programme (eau) "
if value & BIT_ALL_ZONE:
stringvalue += "toutes les zones"
return stringvalue
def derog_bit_english(raw_table, base_index):
""" Convert derog bit flag to English """
value = raw_table[base_index]
stringvalue = ""
if value & BIT_ANTIFREEZE:
stringvalue += "Antifreeze/vacation "
if value & BIT_NIGHT:
stringvalue += "Night "
if value & BIT_DAY:
stringvalue += "Day "
if value & BIT_AUTO:
stringvalue += "Automatic "
if value & BIT_DHW:
stringvalue += "Water "
if value & BIT_END_OF_PROGRAM:
stringvalue += "until the end of program "
if value & BIT_DHW_END_OF_PROGRAM:
stringvalue += "until the end of program (warm water) "
if value & BIT_ALL_ZONE:
stringvalue += "all zones"
return stringvalue
def derog_bit_simple_french(raw_table, base_index):
""" Convert derog bit flag to french do not handle all case """
value = raw_table[base_index]
stringvalue = ""
if value & BIT_ANTIFREEZE:
stringvalue = "Vacances"
if value & BIT_NIGHT:
stringvalue = "Nuit"
if value & BIT_DAY:
stringvalue = "Jour"
if value & BIT_AUTO:
stringvalue = "Automatique"
return stringvalue
def derog_bit_simple_english(raw_table, base_index):
""" Convert derog bit flag to English do not handle all case """
value = raw_table[base_index]
stringvalue = ""
if value & BIT_ANTIFREEZE:
stringvalue = "Vacation"
if value & BIT_NIGHT:
stringvalue = "Night"
if value & BIT_DAY:
stringvalue = "Day"
if value & BIT_AUTO:
stringvalue = "Automatic"
return stringvalue
def boiler_state_bit_english(raw_table, base_index):
""" Convert derog bit flag to English """
value = raw_table[base_index]
stringvalue = ""
if value & (1 << 1):
stringvalue += "[Direct Circuit OFF] "
if value & (1 << 2):
stringvalue += "[3WV Circuit OFF] "
if value & (1 << 3):
stringvalue += "[Secondary pump] "
if value & (1 << 15):
stringvalue += "[Cascade faliure] "
return stringvalue
def hp_state_english(raw_table, base_index):
""" Convert HP state to English """
value = raw_table[base_index]
if value == 0:
return "Stop"
if value == 1:
return "Heating mode"
if value == 2:
return "Heating mode+comp"
if value == 4:
return "Cooling mode"
if value == 5:
return "Cooling mode+comp on"
return "Unknown"
def hp_state_bit_english(raw_table, base_index):
""" Convert derog bit flag to English """
value = raw_table[base_index]
stringvalue = ""
if value & (1 << 1):
stringvalue += "[Defrosting] "
if value & (1 << 1):
stringvalue += "[Boiler Pump Backup] "
if value & (1 << 1):
stringvalue += "[Boiler Backup] "
if value & (1 << 1):
stringvalue += "[HP Pump] "
if value & (1 << 1):
stringvalue += "[Backup 2] "
if value & (1 << 1):
stringvalue += "[Backup 1] "
if value & (1 << 1):
stringvalue += "[Compressor] "
return stringvalue
def system_input_state_english(raw_table, base_index):
""" Convert system input state to English """
value = raw_table[base_index]
if value == 0:
return "Disable"
if value == 1:
return "System"
if value == 2:
return "Storage tank"
if value == 3:
return "DHW STRAT"
if value == 4:
return "Storage tank+ DHW"
return "Unknown"
def zone_aux_type_english(raw_table, base_index):
""" Convert zone aux type to English """
value = raw_table[base_index]
if value == 0:
return "NA"
if value == 1:
return "NA"
if value == 2:
return "NA"
if value == 3:
return "DHW loop"
if value == 4:
return "NA"
if value == 5:
return "Program"
if value == 8:
return "primary pump"
if value == 9:
return "burner command"
if value == 11:
return "DHW"
if value == 13:
return "failure"
if value == 15:
return "Electrical DHW"
if value == 17:
return "VM pump"
if value == 18:
return "cascade failure"
return "Unknown"
def active_mode_french(raw_table, base_index):
""" Convert mode to french """
value = raw_table[base_index]
if value == 0:
return "Antigel"
if value == 2:
return "Nuit"
if value == 4:
return "Jour"
return "Inconnu"
def active_mode_english(raw_table, base_index):
""" Convert mode to English """
value = raw_table[base_index]
if value == 0:
return "Vacation"
if value == 2:
return "Night"
if value == 4:
return "Day"
return "Unknown"
def boiler_mode_french(raw_table, base_index):
""" Convert boiler mode to french """
value = raw_table[base_index]
if value == 4:
return "Ete"
if value == 5:
return "Hiver"
return "Inconnu"
def boiler_mode_english(raw_table, base_index):
""" Convert boiler mode to french """
value = raw_table[base_index]
if value == 4:
return "Summer"
if value == 5:
return "Winter"
return "Unknown"
def day_schedule(raw_table, base_index):
""" Convert schedule of present/away """
current_mode = 0
start_time = datetime.timedelta()
current_time = datetime.timedelta()
schedule = []
interval_for_bit = datetime.timedelta(minutes=30)
for word in raw_table[base_index:base_index + 3]:
for _ in range(16):
mode = word & 0x8000
word <<= 1
# end of period
if mode == 0 and current_mode != 0:
schedule.append((start_time, current_time))
current_mode = mode
current_time += interval_for_bit
# before period
if mode == 0:
start_time = current_time
current_mode = mode
if current_mode != 0:
schedule.append((start_time, current_time))
return schedule
def zone_a_type_english(raw_table, base_index):
""" Convert zone b type to English """
value = raw_table[base_index]
if value == 0:
return "Disable"
if value == 1:
return "Direct"
if value == 2:
return "NA"
if value == 3:
return "NA"
if value == 4:
return "NA"
if value == 5:
return "Program"
if value == 6:
return "NA"
if value == 7:
return "H.temp"
if value == 8:
return "NA"
if value == 9:
return "NA"
if value == 10:
return "NA"
if value == 11:
return "DHW"
if value == 12:
return "NA"
if value == 13:
return "NA"
if value == 14:
return "NA"
if value == 15:
return "Electrical DHW"
return "Unknown"
def zone_bc_type_english(raw_table, base_index):
""" Convert zone b/c type to English """
value = raw_table[base_index]
if value == 0:
return "NA"
if value == 1:
return "Direct"
if value == 2:
return "3WV"
if value == 3:
return "NA"
if value == 4:
return "swiming pool"
return "Unknown"
def error_code(raw_table, base_index):
""" Convert error codes """
value = raw_table[base_index]
if value == 0x0000:
return "D3:OUTL S.B FAIL."
if value == 0x0001:
return "D4:OUTL S.C FAIL."
if value == 0x0002:
return "D5:OUTSI.S.FAIL."
if value == 0x0003:
return "D7:SYST.SENS.FAIL."
if value == 0x0004:
return "D9:DHW S.FAILURE"
if value == 0x0005:
return "D11:ROOM S.A FAIL."
if value == 0x0006:
return "D12:ROOM S.B FAIL."
if value == 0x0007:
return "D13:ROOM S.C FAIL."
if value == 0x0008:
return "D14:MC COM.FAIL"
if value == 0x0009:
return "D15:ST.TANK S.FAIL"
if value == 0x000A:
return "D16:SWIM.P.B.S.FA"
if value == 0x000B:
return "D16:SWIM.P.C.S.FA"
if value == 0x000C:
return "D17:DHW 2 S.FAIL"
if value == 0x000D:
return "D27:PCU COM.FAIL"
if value == 0x000E:
return "Not Available"
if value == 0x000F:
return "Not Available"
if value == 0x0010:
return "Not Available"
if value == 0x0011:
return "Not Available"
if value == 0x0012:
return "D32:5 RESET:ON/OFF"
if value == 0x0013:
return "D37:TA-S SHORT-CIR"
if value == 0x0014:
return "D38:TA-S DISCONNEC"
if value == 0x0015:
return "D39:TA-S FAILURE"
if value == 0x0016:
return "D50:OTH COM.FAIL"
if value == 0x0017:
return "D51:DEF :SEE BOILER"
if value == 0x0018:
return "D18:SOL.HW S.FAIL"
if value == 0x0019:
return "D19:SOL.COL.S.FAIL"
if value == 0x001A:
return "D20:SOL COM.FAIL"
if value == 0x001B:
return "D99:DEF.BAD PCU"
if value == 0x001C:
return "D40:FAIL UNKNOWN"
if value == 0x001D:
return "D254:FAIL UNKNOWN"
if value == 0x800:
return "B0:PSU FAIL"
if value == 0x801:
return "B1:PSU PARAM FAIL"
if value == 0x802:
return "B2:EXCHAN.S.FAIL"
if value == 0x803:
return "B3:EXCHAN.S.FAIL"
if value == 0x804:
return "B4:EXCHAN.S.FAIL"
if value == 0x805:
return "B5:STB EXCHANGE"
if value == 0x806:
return "B6:BACK S.FAILURE"
if value == 0x807:
return "B7:BACK S.FAILURE"
if value == 0x808:
return "B8:BACK S.FAILURE"
if value == 0x809:
return "B9:STB BACK"
if value == 0x80A:
return "B10:DT.EXCH.BAC.FAIL"
if value == 0x80B:
return "B11:DT.BAC.EXCH.FAIL"
if value == 0x80C:
return "B12:STB OPEN"
if value == 0x80D:
return "B14:BURNER FAILURE"
if value == 0x80E:
return "B15:CCE.TST.FAIL"
if value == 0x80F:
return "B16:PARASIT FLAME"
if value == 0x810:
return "B17:VALVE FAIL"
if value == 0x811:
return "B32:DEF.OUTLET S."
if value == 0x812:
return "B33:DEF.OUTLET S."
if value == 0x813:
return "B34:FAN FAILURE"
if value == 0x814:
return "B35:BACK>BOIL FAIL"
if value == 0x815:
return "B36:I-CURRENT FAIL"
if value == 0x816:
return "B37:SU COM.FAIL"
if value == 0x817:
return "B38:PCU COM.FAIL"
if value == 0x818:
return "B39:BL OPEN FAIL"
if value == 0x819:
return "B255:FAIL UNKNOWN"
if value == 0x81A:
return "B254:FAIL UNKNOWN"
if value == 0x1000:
return "DEF.PSU 00"
if value == 0x1001:
return "DEF.PSU PARAM 01"
if value == 0x1002:
return "DEF.S.DEPART 02"
if value == 0x1003:
return "DEF.S.DEPART 03"
if value == 0x1004:
return "DEF.S.DEPART 04"
if value == 0x1005:
return "STB DEPART 05"
if value == 0x1006:
return "DEF.S.RETOUR 06"
if value == 0x1007:
return "DEF.S.RETOUR 07"
if value == 0x1008:
return "DEF.S.RETOUR 08"
if value == 0x1009:
return "STB RETOUR 09"
if value == 0x100A:
return "DT.DEP-RET<MIN 10"
if value == 0x100B:
return "DT.DEP-RET>MAX 11"
if value == 0x100C:
return "STB OUVERT 12"
if value == 0x100D:
return "DEF.ALLUMAGE 14"
if value == 0x100E:
return "FLAM.PARASI. 16"
if value == 0x100F:
return "DEF.VANNE GAZ 17"
if value == 0x1010:
return "DEF.VENTILO 34"
if value == 0x1011:
return "DEF.RET>CHAUD 35"
if value == 0x1012:
return "DEF.IONISATION 36"
if value == 0x1013:
return "DEF.COM.SU 37"
if value == 0x1014:
return "DEF.COM PCU 38"
if value == 0x1015:
return "DEF BL OUVERT 39"
if value == 0x1016:
return "DEF.TEST.HRU 40"
if value == 0x1017:
return "DEF.MANQUE EAU 250"
if value == 0x1018:
return "DEF.MANOMETRE 251"
if value == 0x1019:
return "DEF.INCONNU 255"
if value == 0x101A:
return "DEF.INCONNU 254"
if value == 0x1800:
return "L0:PSU FAIL"
if value == 0x1801:
return "L1:PSU PARAM FAIL"
if value == 0x1802:
return "L2:STB OUTLET"
if value == 0x1803:
return "L3:DEF.OIL.SENSOR"
if value == 0x1804:
return "L4:BURNER FAILURE"
if value == 0x1805:
return "L5:DEF.INTERNAL"
if value == 0x1806:
return "L6:DEF.SPEED.MOT"
if value == 0x1807:
return "L7:DEF.T.WARM UP"
if value == 0x1808:
return "L8:DEF.PAR.FLAME"
if value == 0x1809:
return "L9:OIL.PRES FAIL."
if value == 0x180A:
return "L30:SMOKE PRE.FAIL"
if value == 0x180B:
return "L31:DEF.SMOKE.TEMP"
if value == 0x180C:
return "L32:DEF.OUTLET S."
if value == 0x180D:
return "L33:DEF.OUTLET S."
if value == 0x180E:
return "L34:BACK S.FAILURE"
if value == 0x180F:
return "L35:BACK S.FAILURE"
if value == 0x1810:
return "L36:DEF.FLAME LOS"
if value == 0x1811:
return "L37:SU COM.FAIL"
if value == 0x1812:
return "L38:PCU COM.FAIL"
if value == 0x1813:
return "L39:BL OPEN FAIL"
if value == 0x1814:
return "L250:DEF.WATER MIS."
if value == 0x1815:
return "L251:MANOMETRE FAIL"
if value == 0x1816:
return "L255:FAIL UNKNOWN"
if value == 0x1817:
return "L254:FAIL UNKNOWN"
if value == 0x2000:
return "L1:DEF.COMP.PAC"
if value == 0x2001:
return "L2:DEF.V4V PAC"
if value == 0x2002:
return "L3:DEF.POMPE PAC"
if value == 0x2003:
return "L4:PAC HORS LIMIT"
if value == 0x2004:
return "L5:DEF.DEB.PAC 6"
if value == 0x2005:
return "L6:DEF.DEB.PAC 8"
if value == 0x2006:
return "L7:DEF.COM.PAC"
if value == 0x2007:
return "L8:DEF.S.SOR.COMP"
if value == 0x2008:
return "L9:DEF.H.P PAC"
if value == 0x2009:
return "L10:DEF.B.P PAC"
if value == 0x200A:
return "L11:DEF.PRES.SOURC"
if value == 0x200B:
return "L12:DEF.ANTI.SOUR."
if value == 0x200C:
return "L13:DEF.P.SOURCE"
if value == 0x200D:
return "L14:DEF.ANTI.COND."
if value == 0x200E:
return "L15:DEF.DEGIVRAGE"
if value == 0x200F:
return "L16:DEF.PROT.MOT."
if value == 0x2010:
return "L17:DEF.S.GAZ.CH."
if value == 0x2011:
return "L18:DEF.COM.PAC"
if value == 0x2012:
return "L19:DEF.S.DEP.PAC"
if value == 0x2013:
return "L20:DEF.S.RET.PAC"
if value == 0x2014:
return "L21:DEF.S.EXT.ENT."
if value == 0x2015:
return "L22:DEF.S.EXT.SOR."
if value == 0x2016:
return "L23:DEF.S.GAZ EXP."
if value == 0x2017:
return "L24:DEF.S.EVAPO."
if value == 0x2018:
return "L25:DEF.S.CONDENS."
if value == 0x2019:
return "L32:BL.USER.RESET"
if value == 0x201A:
return "L33:DEF.DEBIT"
if value == 0x201B:
return "L255:DEF.INCONNU"
if value == 0x201C:
return "L254:DEF.INCONNU"
if value == 0xFFFF:
return "no error"
return "Unknown"
def language_english(raw_table, base_index):
""" Convert language name to English """
value = raw_table[base_index]
if value == 0:
return "French"
if value == 1:
return "German"
if value == 2:
return "English"
if value == 3:
return "Italian"
if value == 4:
return "Spanish"
if value == 5:
return "Dutch"
if value == 6:
return "Polish"
if value == 7:
return "Turkish"
if value == 8:
return "Russian"
return "Unknown"
def json_week_schedule(raw_table, base_index):
""" Convert week schedule to a JSON """
schedule = {}
for day in range(7):
schedule[day] = day_schedule(raw_table, base_index + day * 3)
encoder = time_delta_json.CustomDateJSONEncoder()
return encoder.encode(schedule)
def hours_minutes_secondes(raw_table, base_index):
""" Convert raw value to hours """
return "%02d:%02d:%02d" % (raw_table[base_index],
raw_table[base_index + 1],
raw_table[base_index + 2])
def hours_minutes(raw_table, base_index):
""" Convert raw value to hours """
return "%02d:%02d" % (raw_table[base_index],
raw_table[base_index + 1])
def day_month(raw_table, base_index):
""" Convert raw value to date """
return "%02d/%02d" % (raw_table[base_index],
raw_table[base_index + 1])
def day_month_year(raw_table, base_index):
""" Convert raw value to date """
return "%02d/%02d/%02d" % (raw_table[base_index],
raw_table[base_index + 1],
raw_table[base_index + 2])
def decrease_french(raw_table, base_index):
""" Convert decrease flag to french """
if raw_table[base_index] == 0:
return "stop"
else:
return "abaissement"
def decrease_english(raw_table, base_index):
""" Convert decrease flag to french """
if raw_table[base_index] == 0:
return "stop"
else:
return "decreasing"
def off_on(raw_table, base_index):
""" Convert off/on flag to text """
if raw_table[base_index] == 0:
return "off"
else:
return "on"
OUTPUT1_BURNER = 3
OUTPUT1_HYDRAULIC_VALVE_OPEN = 1 << 2
OUTPUT1_HYDRAULIC_VALVE_CLOSE = 1 << 3
OUTPUT1_BOILER_PUMP = 1 << 4
# It's ON on my boiler, I want to follow it.
OUTPUT1_UNKNOW1 = 1 << 5
OUTPUT2_DHW_PUMP = 1 << 0
OUTPUT2_ZONEA_PUMP = 1 << 1
OUTPUT2_ZONEB_PUMP = 1 << 4
OUTPUT2_ZONEB_3WV_OPEN = 1 << 5
OUTPUT2_ZONEB_3WV_CLOSE = 1 << 6
OUTPUT2_ZONEC_PUMP = 1 << 7
OUTPUT2_ZONEC_3WV_OPEN = 1 << 8
OUTPUT2_ZONEC_3WV_CLOSE = 1 << 9
OUTPUT2_AUX_PUMP = 1 << 10
def output_state(raw_table, base_index):
""" Convert output state to JSON """
result = {}
val = raw_table[base_index]
result["burner"] = val & OUTPUT1_BURNER
result["hydraulic_valve_open"] = bool(val & OUTPUT1_HYDRAULIC_VALVE_OPEN)
result["hydraulic_valve_close"] = bool(val & OUTPUT1_HYDRAULIC_VALVE_CLOSE)
result["hydraulic_boiler_pump"] = bool(val & OUTPUT1_BOILER_PUMP)
result["UNKNOWN1"] = bool(val & OUTPUT1_UNKNOW1)
val = raw_table[base_index + 1]
result["DHW_pump"] = bool(val & OUTPUT2_DHW_PUMP)
result["zone_A_pump"] = bool(val & OUTPUT2_ZONEA_PUMP)
result["zone_B_pump"] = bool(val & OUTPUT2_ZONEB_PUMP)
result["zone_B_3WV_open"] = bool(val & OUTPUT2_ZONEB_3WV_OPEN)
result["zone_B_3WV_close"] = bool(val & OUTPUT2_ZONEB_3WV_CLOSE)
result["zone_C_pump"] = bool(val & OUTPUT2_ZONEC_PUMP)
result["zone_C_3WV_open"] = bool(val & OUTPUT2_ZONEC_3WV_OPEN)
result["zone_C_3WV_close"] = bool(val & OUTPUT2_ZONEC_3WV_CLOSE)
result["AUX_pump"] = bool(val & OUTPUT2_AUX_PUMP)
return json.dumps(result)
BASEECS_AUX_PUMP = 1
BASEECS_ZONEA_PUMP_BOILER = 1 << 1
BASEECS_BURNER_1_2 = 1 << 2
BASEECS_BURNER_1_1 = 1 << 3
BASEECS_ZONEA_PUMP = 1 << 4
BASEECS_DHW_PUMP = 1 << 5
BASEECS_ALARM_BURNER = 1 << 6
# BASEECS_ = 1 << 7
BASEECS_VALVE = 1 << 8
def base_ecs(raw_table, base_index):
""" Convert base_ecs state to JSON """
result = {}
val = raw_table[base_index]
result["AUX_pump"] = bool(val & BASEECS_AUX_PUMP)
result["zone_A_pump_boiler"] = bool(val & BASEECS_ZONEA_PUMP_BOILER)
result["burner_1_2"] = bool(val & BASEECS_BURNER_1_2)
result["burner_1_1"] = bool(val & BASEECS_BURNER_1_1)
result["zone_A_pump"] = bool(val & BASEECS_ZONEA_PUMP)
result["DHW_pump"] = bool(val & BASEECS_DHW_PUMP)
result["Alarm_burner"] = bool(val & BASEECS_ALARM_BURNER)
result["valve"] = bool(val & BASEECS_VALVE)
return json.dumps(result)
def fan(raw_table, base_index):
""" Convert for fan speed """
val = raw_table[base_index]
return val & 0x007F
def texte14(raw_table, base_index):
""" Convert 14 char of text """
result = ''
for word in raw_table[base_index:base_index + 7]:
result = result + chr(word >> 8) + chr(word & 0x00FF)
return result
def write_unit(value):
""" Convert unit value to modbus value """
return [int(value)]
def write_tenth(value):
""" Convert tenth value to modbus value """
int_value = int(float(value) * 10)
if int_value < 0:
int_value = abs(int_value) | 0x8000
return [int_value]
DEROG_NAME_TO_VALUE = {
"Vacances": BIT_ANTIFREEZE | BIT_END_OF_PROGRAM,
"Nuit": BIT_NIGHT | BIT_END_OF_PROGRAM,
"Jour": BIT_DAY | BIT_END_OF_PROGRAM,
"Automatique": BIT_AUTO,
"Vacation": BIT_ANTIFREEZE | BIT_END_OF_PROGRAM,
"Night": BIT_NIGHT | BIT_END_OF_PROGRAM,
"Day": BIT_DAY | BIT_END_OF_PROGRAM,
"Automatic": BIT_AUTO
}
def write_derog_bit_simple(value):
""" Convert French Mode to bit value """
if value not in DEROG_NAME_TO_VALUE:
return None
return [DEROG_NAME_TO_VALUE[value]]
LANGUAGE_NAME_TO_VALUE = {
"French": 0,
"German": 1,
"English": 2,
"Italian": 3,
"Spanish": 4,
"Dutch": 5,
"Polish": 6,
"Turkish": 7,
"Russian": 8
}
def write_language(value):
""" Convert French Mode to bit value """
if value not in LANGUAGE_NAME_TO_VALUE:
return None
return [LANGUAGE_NAME_TO_VALUE[value]]
| mit |
Kta-M/nested_attributes_validator | lib/nested_attributes_validator/active_model/validations/nested_attributes_uniqueness_validator.rb | 980 | class NestedAttributesUniquenessValidator < ActiveModel::EachValidator
include NestedAttributesValidatorUtil
def validate_each(record, _attribute, values)
trg_fields = target_fields
# detect duplicated values
duplicated_values = target_values(trg_fields, values)
.group_by{|_k ,v| v}.values
.select{|a| a.size>1}
.flatten(1)
.to_h
.keys
# set errors
duplicated_values.each do |value|
trg_fields.each do |field|
# set error to the parent record
attribute_name = :"#{attributes.first}.#{options[:display_field] || field}"
record.errors[attribute_name] << I18n.t('errors.messages.nested_attributes_not_unique')
record.errors[attribute_name].uniq!
# also set error to the child record to apply "field_with_errors"
value.errors.add(field , nil)
end
end
end
end
| mit |
gabzon/experiensa | templates/about.php | 1005 | <?php
/**
* Template Name: About template
*/
use Experiensa\Modules\QueryBuilder;
$design_settings = get_option('experiensa_design_settings');
$page_object = get_queried_object();
$page_id = get_queried_object_id();
?>
<br>
<br>
<br>
<br>
<div class="ui container" style="margin-top:40px">
<?php while (have_posts()) : the_post(); ?>
<?php get_template_part('templates/page', 'header'); ?>
<?php get_template_part('templates/content', 'page'); ?>
<?php endwhile; ?>
<br>
<?php get_template_part('templates/about/about'); ?>
<br>
<?php get_template_part('templates/about/map'); ?>
</div>
<br>
<br>
<?php
$section_obj = new Experiensa\Component\Section($page_id,$design_settings,'about_section');
if($section_obj->checkExistSections()):
$sections = $section_obj->getSections();
foreach($sections as $section):
// echo "<pre>";
// print_r($section);
// echo "</pre>";
$section_obj->showSection($section);
endforeach;
endif; | mit |
sandwich99/jspm_typescript | app/component/table/table.ts | 187 |
enum Color {Red, Green, Blue};
function printColor(color: Color) {
console.log("", Color.Red);
console.log("", color);
}
printColor(Color.Red);
export function Table(){
} | mit |
lgrabarevic/BuildsAppReborn | BuildsAppReborn.Access/TFS2017/Models/Tfs2017User.cs | 156 | namespace BuildsAppReborn.Access.Models
{
// ReSharper disable once ClassNeverInstantiated.Global
internal class Tfs2017User : TfsUser
{
}
} | mit |
jugstalt/gViewGisOS | gView.MapServer.Lib/MapServer/Lib/TileService/TileServiceInterpreter.cs | 40735 | using System;
using System.Collections.Generic;
using System.Text;
using gView.MapServer;
using gView.Framework.Metadata;
using gView.Framework.IO;
using System.IO;
using gView.Framework.Carto;
using gView.Framework.system;
using gView.Framework.Geometry;
using gView.Framework.Geometry.Tiling;
using gView.MapServer.Lib.TileService;
using System.Drawing;
using System.Drawing.Imaging;
using System.Collections.Specialized;
using gView.Data.Framework.Data.TileCache;
using Newtonsoft.Json;
namespace gView.MapServer.Lib.TileService
{
[gView.Framework.system.RegisterPlugIn("ED770739-12FA-40d7-8EF9-38FE9299564A")]
public class TileServiceInterpreter : IServiceRequestInterpreter
{
private static IFormatProvider _nhi = System.Globalization.CultureInfo.InvariantCulture.NumberFormat;
private IMapServer _mapServer = null;
private static Guid _metaprovider = new Guid("D33D3DD2-DD63-4a47-9F84-F840FE0D01C0");
#region IServiceRequestInterpreter Member
public void OnCreate(IMapServer mapServer)
{
_mapServer = mapServer;
}
public void Request(IServiceRequestContext context)
{
try
{
if (context == null || context.ServiceRequest == null || context.ServiceMap == null)
return;
if (_mapServer == null)
{
context.ServiceRequest.Response = "<FATALERROR>MapServer Object is not available!</FATALERROR>";
return;
}
TileServiceMetadata metadata = context.ServiceMap.MetadataProvider(_metaprovider) as TileServiceMetadata;
if (metadata == null || metadata.Use == false)
{
context.ServiceRequest.Response = "<ERROR>Service is not used with Tile Service</ERROR>";
}
string service = context.ServiceRequest.Service;
string request = context.ServiceRequest.Request;
if (request.ToLower().StartsWith("path="))
request = request.Substring(5);
string[] args = request.Split('/');
string command = args[0].ToLower();
bool renderOnTheFly = false;
if (command.Contains(":"))
{
switch (command.Split(':')[1])
{
case "render":
renderOnTheFly = true;
break;
}
command = command.Split(':')[0];
}
switch (command)
{
case "metadata":
XmlStream stream = new XmlStream("metadata");
metadata.Save(stream);
StringWriter sw = new StringWriter();
stream.WriteStream(sw);
sw.Close();
context.ServiceRequest.Response = sw.ToString();
break;
case "osm":
case "tms":
if (args.Length == 4)
{
int epsg = int.Parse(args[1]);
if (metadata.EPSGCodes.Contains(epsg))
context.ServiceRequest.Response = TmsCapabilities(context, metadata, epsg);
}
else if (args.Length == 7) // tms/srs/1.0.0/service/0/0/0.png
{
int epsg = int.Parse(args[1]);
double scale = metadata.Scales[int.Parse(args[4])];
int row = (args[0] == "tms" ? int.Parse(args[5]) : int.Parse(args[6].Split('.')[0]));
int col = (args[0] == "tms" ? int.Parse(args[6].Split('.')[0]) : int.Parse(args[5]));
string format = ".png";
if (args[6].ToLower().EndsWith(".jpg")) format = ".jpg";
GetTile(context, metadata, epsg, scale, row, col, format, (args[0] == "tms" ? GridOrientation.LowerLeft : GridOrientation.UpperLeft), renderOnTheFly);
}
else if (args.Length == 10) // tms/srs/service/01/000/000/001/000/000/001.png
{
int epsg = int.Parse(args[1]);
double scale = metadata.Scales[int.Parse(args[3])];
int col = int.Parse(args[4]) * 1000000 + int.Parse(args[5]) * 1000 + int.Parse(args[6]);
int row = int.Parse(args[7]) * 1000000 + int.Parse(args[8]) * 1000 + int.Parse(args[9].Split('.')[0]);
string format = ".png";
if (args[9].ToLower().EndsWith(".jpg")) format = ".jpg";
GetTile(context, metadata, epsg, scale, row, col, format, (args[0] == "tms" ? GridOrientation.LowerLeft : GridOrientation.UpperLeft), renderOnTheFly);
}
break;
case "init":
if (args.Length >= 5)
{
string cacheFormat = args[1].ToLower() == "compact" ? "compact" : "";
if (args[2].ToLower() != "ul" &&
args[2].ToLower() != "ll")
throw new ArgumentException();
int epsg = int.Parse(args[3]);
string format = "image/" + args[4].ToLower();
if (args[4].ToLower().EndsWith(".jpg")) format = ".jpg";
WriteConfFile(context, metadata, cacheFormat, epsg, format,
(args[2].ToLower() == "ul" ? GridOrientation.UpperLeft : GridOrientation.LowerLeft));
}
break;
case "tile":
if (args.Length == 5)
{
int epsg = int.Parse(args[1]);
double scale = GetScale(metadata, args[2]); // double.Parse(args[2].Replace(",", "."), _nhi);
int row = int.Parse(args[3]);
int col = int.Parse(args[4].Split('.')[0]);
string format = ".png";
if (args[4].ToLower().EndsWith(".jpg")) format = ".jpg";
GetTile(context, metadata, epsg, scale, row, col, format, GridOrientation.UpperLeft, renderOnTheFly);
}
else if (args.Length == 6)
{
if (args[1].ToLower() != "ul" &&
args[1].ToLower() != "ll")
throw new ArgumentException();
int epsg = int.Parse(args[2]);
double scale = GetScale(metadata, args[3]); // double.Parse(args[3].Replace(",", "."), _nhi);
int row = int.Parse(args[4]);
int col = int.Parse(args[5].Split('.')[0]);
string format = ".png";
if (args[5].ToLower().EndsWith(".jpg")) format = ".jpg";
GetTile(context, metadata, epsg, scale, row, col, format,
(args[1].ToLower() == "ul" ? GridOrientation.UpperLeft : GridOrientation.LowerLeft), renderOnTheFly);
}
else if (args.Length >= 7)
{
string cacheFormat = args[1].ToLower();
if (args[2].ToLower() != "ul" &&
args[2].ToLower() != "ll")
throw new ArgumentException();
int epsg = int.Parse(args[3]);
double scale = GetScale(metadata, args[4]); // double.Parse(args[4].Replace(",", "."), _nhi);
int row = int.Parse(args[5]);
int col = int.Parse(args[6].Split('.')[0]);
string format = ".png";
if (args[6].ToLower().EndsWith(".jpg")) format = ".jpg";
if (cacheFormat == "compact")
{
var boundingTiles = args.Length > 7 ? new BoundingTiles(args[7]) : null;
GetCompactTile(context, metadata, epsg, scale, row, col, format,
(args[2].ToLower() == "ul" ? GridOrientation.UpperLeft : GridOrientation.LowerLeft), boundingTiles, renderOnTheFly);
}
else
{
GetTile(context, metadata, epsg, scale, row, col, format,
(args[2].ToLower() == "ul" ? GridOrientation.UpperLeft : GridOrientation.LowerLeft), renderOnTheFly);
}
}
else
throw new ArgumentException();
break;
}
}
catch (Exception ex)
{
if (context != null && context.ServiceRequest != null)
context.ServiceRequest.Response = "<Exception>" + ex.Message + "</Exception>";
}
}
public string IntentityName
{
get { return "tiles"; }
}
public InterpreterCapabilities Capabilities
{
get {
return null;
}
}
#endregion
private void GetTile(IServiceRequestContext context, TileServiceMetadata metadata, int epsg, double scale, int row, int col, string format, GridOrientation orientation, bool renderOnTheFly)
{
if (!metadata.EPSGCodes.Contains(epsg))
throw new ArgumentException("Wrong epsg argument");
//if (!metadata.Scales.Contains(scale))
// throw new ArgumentException("Wrong scale argument");
scale = metadata.Scales.GetScale(scale);
if (scale <= 0.0)
throw new ArgumentException("Wrong scale argument");
//IEnvelope bounds = metadata.GetEPSGEnvelope(epsg);
//if (bounds == null || bounds.Width == 0.0 || bounds.Height == 0.0)
// throw new Exception("No bounds defined for EPSG:" + epsg);
format = format.ToLower();
if (format != ".png" && format != ".jpg")
throw new Exception("Unsupported image format");
if (format == ".png" && metadata.FormatPng == false)
throw new Exception("Format image/png not supported");
if (format == ".jpg" && metadata.FormatJpg == false)
throw new Exception("Format image/jpeg no supported");
string path = _mapServer.TileCachePath + @"\" + context.ServiceMap.Name + @"\_alllayers\" +
TileServiceMetadata.TilePath(orientation, epsg, scale, row, col) + format;
if ((orientation == GridOrientation.UpperLeft && metadata.UpperLeftCacheTiles) ||
(orientation == GridOrientation.LowerLeft && metadata.LowerLeftCacheTiles))
{
FileInfo fi = new FileInfo(path);
if (fi.Exists)
{
context.ServiceRequest.Response = fi.FullName;
return;
}
else if(!renderOnTheFly && !metadata.RenderTilesOnTheFly)
{
return; // Empty
}
if (!fi.Directory.Exists)
fi.Directory.Create();
}
else
{
path = _mapServer.OutputPath + @"\tile_" + Guid.NewGuid().ToString("N").ToLower() + format;
}
ISpatialReference sRef = SpatialReference.FromID("epsg:" + epsg);
using (IServiceMap map = context.ServiceMap)
{
map.Display.SpatialReference = sRef;
map.Display.dpi = metadata.Dpi;
map.Display.iWidth = metadata.TileWidth;
map.Display.iHeight = metadata.TileHeight;
double res = (double)scale / (metadata.Dpi / 0.0254);
if (map.Display.MapUnits != GeoUnits.Meters)
{
GeoUnitConverter converter = new GeoUnitConverter();
res = converter.Convert(res, GeoUnits.Meters, map.Display.MapUnits);
}
var origin = orientation == GridOrientation.UpperLeft ? metadata.GetOriginUpperLeft(epsg) : metadata.GetOriginLowerLeft(epsg);
double H = metadata.TileHeight * res;
double y = (orientation == GridOrientation.UpperLeft ?
origin.Y - H * (row + 1) :
origin.Y + H * row);
double W = metadata.TileWidth * res;
//if (map.Display.MapUnits == GeoUnits.DecimalDegrees)
//{
// double phi = (2 * y + H) / 2.0;
// W /= Math.Cos(phi / 180.0 * Math.PI);
//}
double x = origin.X + W * col;
map.Display.ZoomTo(new Envelope(x, y, x + W, y + H));
map.Render();
bool maketrans = map.Display.MakeTransparent;
map.Display.MakeTransparent = true;
map.SaveImage(path, format == ".jpg" ? System.Drawing.Imaging.ImageFormat.Jpeg : System.Drawing.Imaging.ImageFormat.Png);
map.Display.MakeTransparent = maketrans;
context.ServiceRequest.Response = path;
_mapServer.Log("CreateTile:", loggingMethod.request_detail, path);
}
}
private void GetCompactTile(IServiceRequestContext context, TileServiceMetadata metadata, int epsg, double scale, int row, int col, string format, GridOrientation orientation, BoundingTiles boundingTiles, bool renderOnTheFly)
{
if (!metadata.EPSGCodes.Contains(epsg))
throw new ArgumentException("Wrong epsg argument");
if(orientation!=GridOrientation.UpperLeft)
throw new ArgumentException("Compact Tiles Orientation must bei Upper Left!");
scale = metadata.Scales.GetScale(scale);
if (scale <= 0.0)
throw new ArgumentException("Wrong scale argument");
//IEnvelope bounds = metadata.GetEGPSEnvelope(epsg);
//if (bounds == null || bounds.Width == 0.0 || bounds.Height == 0.0)
// throw new Exception("No bounds defined for EPSG:" + epsg);
IPoint origin = metadata.GetOriginUpperLeft(epsg);
if(origin==null )
throw new Exception("No origin defined for EPSG:" + epsg);
format = format.ToLower();
if (format != ".png" && format != ".jpg")
throw new Exception("Unsupported image format");
if (format == ".png" && metadata.FormatPng == false)
throw new Exception("Format image/png not supported");
if (format == ".jpg" && metadata.FormatJpg == false)
throw new Exception("Format image/jpeg no supported");
string path = _mapServer.TileCachePath + @"\" + context.ServiceMap.Name + @"\_alllayers\compact\" +
TileServiceMetadata.ScalePath(orientation, epsg, scale);
string compactTileName = CompactTileName(row, col);
string bundleFilename = path + @"\" + compactTileName + ".tilebundle";
string bundleDoneFilename = path + @"\" + compactTileName + ".tilebundle.done";
string bundleCalcFilename = path + @"\" + compactTileName + ".tilebundle.calc";
if (new FileInfo(bundleFilename).Exists)
{
GetCompactTileBytes(context, path, row, col);
return;
}
else if (!renderOnTheFly || new FileInfo(bundleDoneFilename).Exists || new FileInfo(bundleCalcFilename).Exists /* && !metadata.RenderTilesOnTheFly*/)
{
return; // Empty
}
DirectoryInfo di = new DirectoryInfo(path);
if (!di.Exists)
di.Create();
try { File.Delete(bundleFilename); }
catch { }
//temp
//string pathTemp = path + @"\temp";
//DirectoryInfo diTemp = new DirectoryInfo(pathTemp);
//if (!diTemp.Exists)
// diTemp.Create();
File.WriteAllText(bundleCalcFilename, "calc...");
CompactTilesIndexBuilder indexBuilder = new CompactTilesIndexBuilder();
int startRow = CompactTileStart(row), startCol = CompactTileStart(col);
ISpatialReference sRef = SpatialReference.FromID("epsg:" + epsg);
using (IServiceMap map = context.ServiceMap)
{
map.Display.SpatialReference = sRef;
map.Display.dpi = metadata.Dpi;
double res = (double)scale / (metadata.Dpi / 0.0254);
if (map.Display.MapUnits != GeoUnits.Meters)
{
GeoUnitConverter converter = new GeoUnitConverter();
res = converter.Convert(res, GeoUnits.Meters, map.Display.MapUnits);
}
string bundleTempFilename = path + @"\" + compactTileName + "." + Guid.NewGuid().ToString("N").ToLower() + ".tilebundle";
string bundleIndexFilename = path + @"\" + compactTileName + ".tilebundlx";
File.WriteAllBytes(bundleTempFilename, new byte[0]);
int bundlePos = 0;
int tileMatrixWidth = 8, tileMatrixHeight = 8;
map.Display.iWidth = metadata.TileWidth * tileMatrixWidth;
map.Display.iHeight = metadata.TileHeight * tileMatrixHeight;
for (int r = 0; r < 128; r += 8)
{
File.WriteAllText(bundleCalcFilename, "calc...row" + r);
for (int c = 0; c < 128; c += 8)
{
int currentRow = r + startRow, currentCol = c + startCol;
if (boundingTiles != null)
if (!boundingTiles.Check(currentRow, currentCol, 8, 8))
continue;
double H = metadata.TileHeight * res;
double y = origin.Y - H * (currentRow + tileMatrixHeight);
double W = metadata.TileWidth * res;
double x = origin.X + W * currentCol;
map.Display.ZoomTo(new Envelope(x, y, x + W * tileMatrixWidth, y + H * tileMatrixHeight));
if (format != ".jpg") // Make PNG Transparent
map.Display.BackgroundColor = System.Drawing.Color.Transparent;
map.ReleaseImage(); // Delete old Image !!! Because there is no map.SaveImage()!!!!
map.Render();
if (IsEmptyBitmap(map.MapImage, map.Display.BackgroundColor))
continue;
// Temp
//map.MapImage.Save(pathTemp + @"\matrix_" + (startRow + r) + "_" + (startCol + c) + ".png", ImageFormat.Png);
for (int j = 0; j < tileMatrixHeight; j++)
{
for (int i = 0; i < tileMatrixWidth; i++)
{
int tileRow = currentRow + j, tileCol = currentCol + i;
if (boundingTiles != null)
if (!boundingTiles.Check(tileRow, tileCol, 8, 8))
continue;
using (Bitmap bm = new Bitmap(metadata.TileWidth, metadata.TileHeight, map.MapImage.PixelFormat))
using (Graphics gr = Graphics.FromImage(bm))
{
gr.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.NearestNeighbor;
gr.DrawImage(map.MapImage,
new RectangleF(0f, 0f, (float)bm.Width, (float)bm.Height),
new RectangleF(-0.5f + (float)(i * metadata.TileWidth), -0.5f + (float)(j * metadata.TileHeight), (float)metadata.TileWidth, (float)metadata.TileHeight), GraphicsUnit.Pixel);
//for (int py = 0, to_py = bm.Height; py < to_py; py++)
//{
// for (int px = 0, to_px = bm.Width; px < to_px; px++)
// {
// var pCol = map.MapImage.GetPixel(px + i * metadata.TileHeight, py + j * metadata.TileHeight);
// bm.SetPixel(px, py, pCol);
// }
//}
if (IsEmptyBitmap(bm, map.Display.BackgroundColor))
continue;
// Temp
//bm.Save(pathTemp + @"\tile_" + tileRow + "_" + tileCol + ".png", ImageFormat.Png);
//try
//{
// if (format != ".jpg" && map.Display.BackgroundColor.A > 0) // Make PNG Transparent
// bm.MakeTransparent(map.Display.BackgroundColor);
//}
//catch { }
MemoryStream ms = new MemoryStream();
bm.Save(ms, format == ".jpg" ? System.Drawing.Imaging.ImageFormat.Jpeg : System.Drawing.Imaging.ImageFormat.Png);
byte[] imageBytes = ms.ToArray();
using (var stream = new FileStream(bundleTempFilename, FileMode.Append))
{
stream.Write(imageBytes, 0, imageBytes.Length);
}
indexBuilder.SetValue(r + j, c + i, bundlePos, imageBytes.Length);
bundlePos += imageBytes.Length;
}
}
}
}
map.ReleaseImage();
GC.Collect();
}
try { File.Delete(bundleFilename); }
catch { }
if (bundlePos == 0)
{
File.Delete(bundleTempFilename);
File.WriteAllText(bundleDoneFilename, "");
}
else
{
File.Move(bundleTempFilename, bundleFilename);
indexBuilder.Save(bundleIndexFilename);
}
try { File.Delete(bundleCalcFilename); }
catch { }
}
GC.Collect();
}
private void GetCompactTileBytes(IServiceRequestContext context, string path, int row, int col)
{
string compactTileName = CompactTileName(row, col);
string bundleFilename = path + @"\" + compactTileName + ".tilebundle";
string bundleIndexFilename = path + @"\" + compactTileName + ".tilebundlx";
FileInfo fi = new FileInfo(bundleIndexFilename);
if (!fi.Exists)
{
return;
}
CompactTileIndex bundleIndex = new CompactTileIndex(bundleIndexFilename);
int bundleStartRow = CompactTileStart(row);
int bundleStartCol = CompactTileStart(col);
try
{
int tileLength, tilePosition = bundleIndex.TilePosition(row - bundleStartRow, col - bundleStartCol, out tileLength);
if (tilePosition < 0)
return;
using (FileStream fs = File.Open(bundleFilename, FileMode.Open, FileAccess.Read, FileShare.Read)) //new FileStream(bundleFilename, FileMode.Open, FileAccess.Read))
{
fs.Position = tilePosition;
byte[] data = new byte[tileLength];
fs.Read(data, 0, tileLength);
context.ServiceRequest.Response = new MapServerResponse()
{
Data = data,
ContentType = "image/jpg",
Expires = DateTime.UtcNow.AddDays(7)
}.ToString();
}
}
catch (Exception ex)
{
TileServiceMetadata metadata = context.ServiceMap.MetadataProvider(_metaprovider) as TileServiceMetadata;
using (System.Drawing.Bitmap bm = new Bitmap(metadata.TileWidth, metadata.TileHeight))
using (System.Drawing.Graphics gr = Graphics.FromImage(bm))
using (System.Drawing.Font font = new Font("Arial", 9f))
{
gr.DrawString(ex.Message, font, Brushes.Red, new RectangleF(0f, 0f, (float)bm.Width, (float)bm.Height));
MemoryStream ms = new MemoryStream();
bm.Save(ms, ImageFormat.Png);
context.ServiceRequest.Response = new MapServerResponse()
{
Data = ms.ToArray(),
ContentType = "image/jpg"
}.ToString();
}
}
}
private byte[] GetCompactTileBytes(string path, int row,int col)
{
string compactTileName = CompactTileName(row, col);
string bundleFilename = path + @"\" + compactTileName + ".tilebundle";
string bundleIndexFilename = path + @"\" + compactTileName + ".tilebundlx";
FileInfo fi = new FileInfo(bundleIndexFilename);
if (!fi.Exists)
{
return null;
}
CompactTileIndex bundleIndex = new CompactTileIndex(bundleIndexFilename);
int bundleStartRow = CompactTileStart(row);
int bundleStartCol = CompactTileStart(col);
try
{
int tileLength, tilePosition = bundleIndex.TilePosition(row - bundleStartRow, col - bundleStartCol, out tileLength);
if (tilePosition < 0)
return null;
using (FileStream fs = File.Open(bundleFilename, FileMode.Open, FileAccess.Read, FileShare.Read)) //new FileStream(bundleFilename, FileMode.Open, FileAccess.Read))
{
fs.Position = tilePosition;
byte[] data = new byte[tileLength];
fs.Read(data, 0, tileLength);
return data;
}
}
catch
{
return null;
}
}
private void WriteConfFile(IServiceRequestContext context, TileServiceMetadata metadata, string cacheFormat, int epsg, string format, GridOrientation orientation)
{
FileInfo configFileInfo = new FileInfo(_mapServer.TileCachePath + @"\" + context.ServiceMap.Name + @"\_alllayers\" + cacheFormat + @"\" + TileServiceMetadata.EpsgPath(orientation, epsg) + @"\conf.json");
IPoint origin = orientation == GridOrientation.UpperLeft ? metadata.GetOriginUpperLeft(epsg) : metadata.GetOriginLowerLeft(epsg);
IEnvelope bounds = metadata.GetEPSGEnvelope(epsg);
if (origin == null || bounds == null)
return;
List<CompactTileConfig.LevelConfig> levels = new List<CompactTileConfig.LevelConfig>();
for(int i=0;i<metadata.Scales.Count;i++)
{
levels.Add(new CompactTileConfig.LevelConfig()
{
Level = i,
Scale = metadata.Scales[i]
});
}
CompactTileConfig config = new CompactTileConfig()
{
Epsg = epsg,
Dpi = metadata.Dpi,
Origin = new double[] { origin.X, origin.Y },
Extent = new double[] { bounds.minx, bounds.miny, bounds.maxx, bounds.maxy },
TileSize = new int[] { metadata.TileWidth, metadata.TileHeight },
Format = format,
Orientation = orientation.ToString(),
Levels = levels.ToArray()
};
if (configFileInfo.Exists)
configFileInfo.Delete();
if (!configFileInfo.Directory.Exists)
configFileInfo.Directory.Create();
File.WriteAllText(configFileInfo.FullName, JsonConvert.SerializeObject(config, Formatting.Indented));
}
#region Helper
private double GetScale(TileServiceMetadata metadata, string scaleArgument)
{
if (scaleArgument.StartsWith("~"))
{
scaleArgument = scaleArgument.Substring(1);
return metadata.Scales[int.Parse(scaleArgument)];
}
return double.Parse(scaleArgument.Replace(",", "."), _nhi);
}
#endregion
#region Compact Helper
public int CompactTileStart(int index)
{
if (index < 0)
{
return 0;
}
return (index >> 7) * 128; // 128 x 128 Tiles werden zu einem Bundle zusammengefasst
}
public string CompactTileName(int row, int col)
{
return "R" + CompactTileStart(row).ToString("X8") + "C" + CompactTileStart(col).ToString("X8");
}
public double GetStdDev(Bitmap bitmap)
{
double total = 0, totalVariance = 0;
int count = 0;
double stdDev = 0;
// First get all the bytes
BitmapData bmData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadOnly, bitmap.PixelFormat);
int stride = bmData.Stride;
IntPtr Scan0 = bmData.Scan0;
unsafe
{
byte* p = (byte*)(void*)Scan0;
int nOffset = stride - bitmap.Width * 3;
for (int y = 0; y < bitmap.Height; ++y)
{
for (int x = 0; x < bitmap.Width; ++x)
{
count++;
byte blue = p[0];
byte green = p[1];
byte red = p[2];
int pixelValue = Color.FromArgb(0, red, green, blue).ToArgb();
total += pixelValue;
double avg = total / count;
totalVariance += Math.Pow(pixelValue - avg, 2);
stdDev = Math.Sqrt(totalVariance / count);
p += 3;
}
p += nOffset;
}
}
bitmap.UnlockBits(bmData);
return stdDev;
}
public bool IsEmptyBitmap(Bitmap bitmap, Color backgroundColor)
{
BitmapData bmData = null;
try
{
bmData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb /* bitmap.PixelFormat*/);
int stride = bmData.Stride;
IntPtr Scan0 = bmData.Scan0;
int backgroundColorValue = Color.FromArgb(
backgroundColor.A,
backgroundColor.R,
backgroundColor.G,
backgroundColor.B).ToArgb();
unsafe
{
byte* p = (byte*)(void*)Scan0;
int nOffset = stride - bitmap.Width * 4;
for (int y = 0; y < bitmap.Height; ++y)
{
for (int x = 0; x < bitmap.Width; ++x)
{
byte blue = p[0];
byte green = p[1];
byte red = p[2];
byte alpha = p[3];
if (alpha != 0) // Not transparent
{
int pixelValue = Color.FromArgb(alpha, red, green, blue).ToArgb();
if (pixelValue != backgroundColorValue)
return false;
}
p += 4;
}
p += nOffset;
}
}
}
finally
{
if (bitmap != null && bmData != null)
bitmap.UnlockBits(bmData);
}
return true;
}
#endregion
#region Compact Classes
public class CompactTilesIndexBuilder
{
private int[] _index = new int[128 * 128 * 2];
public CompactTilesIndexBuilder()
{
InitIndex();
}
private void InitIndex()
{
for (int i = 0; i < _index.Length; i++)
_index[i] = -1;
}
public void SetValue(int row, int col, int position, int length)
{
if (row < 0 || row > 128 || col < 0 || col > 128)
throw new ArgumentException("Compact Tile Index out of range");
int indexPosition = ((row * 128) + col) * 2;
if (indexPosition > _index.Length - 2)
throw new AggregateException("Index!!!");
_index[indexPosition] = position;
_index[indexPosition + 1] = length;
}
public void Save(string filename)
{
try { File.Delete(filename); }
catch { }
using (var stream = new FileStream(filename, FileMode.Create))
{
for (int i = 0; i < _index.Length; i++)
{
byte[] data = BitConverter.GetBytes(_index[i]);
stream.Write(data, 0, data.Length);
}
}
}
}
public class CompactTileIndex
{
public CompactTileIndex(string filename)
{
this.Filename = filename;
}
public string Filename { get; private set; }
public int TilePosition(int row, int col, out int tileLength)
{
if (row < 0 || row > 128 || col < 0 || col > 128)
throw new ArgumentException("Compact Tile Index out of range");
int indexPosition = ((row * 128) + col) * 8;
using (FileStream fs = File.Open(this.Filename, FileMode.Open, FileAccess.Read, FileShare.Read)) // new FileStream(this.Filename, FileMode.Open, FileAccess.Read))
{
byte[] data = new byte[8];
fs.Position = indexPosition;
fs.Read(data, 0, 8);
int position = BitConverter.ToInt32(data, 0);
tileLength = BitConverter.ToInt32(data, 4);
return position;
}
}
}
public class BoundingTiles
{
public BoundingTiles(string bounds)
{
string[] b = bounds.Split('|');
this.FromRow = int.Parse(b[0]);
this.ToRow = int.Parse(b[1]);
this.FromCol = int.Parse(b[2]);
this.ToCol = int.Parse(b[3]);
}
public int FromRow {get;private set;}
public int ToRow { get; private set; }
public int FromCol { get; private set; }
public int ToCol { get; private set; }
public bool Check(int row, int col)
{
return row >= FromRow && row <= ToRow && col >= FromCol && col <= ToCol;
}
public bool Check(int row, int col, int matrixWidth, int matrixHeight)
{
for (int c = col; c < col + matrixHeight; c++)
{
for (int r = row; r < row + matrixWidth; r++)
{
if (Check(r, c))
return true;
}
}
return false;
}
}
#endregion
#region Classes
public class QueryString
{
NameValueCollection nvc = new NameValueCollection();
public QueryString(string queryString)
{
queryString = queryString.ToLower();
foreach (string argument in queryString.Split('&'))
{
int pos = argument.IndexOf("=");
if (pos > 0)
nvc.Add(argument.Substring(0, pos), argument.Substring(pos + 1));
else
nvc.Add(argument, String.Empty);
}
}
public string GetValue(string key)
{
return nvc[key.ToLower()];
}
public bool HasValue(string key, string val)
{
return nvc[key.ToLower()] == val.ToLower();
}
}
#endregion
private string TmsCapabilities(IServiceRequestContext context, TileServiceMetadata metadata, int srs)
{
IEnvelope box = metadata.GetEPSGEnvelope(srs);
if (box == null)
return String.Empty;
ISpatialReference sRef = SpatialReference.FromID("epsg:" + srs);
StringBuilder sb = new StringBuilder();
sb.Append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
sb.Append("<TileMap version=\"1.0.0\" tilemapservice=\"" + context.ServiceRequest.OnlineResource + "\" >");
sb.Append("<Title>" + context.ServiceMap.Name + "</Title>");
sb.Append("<Abstract>gView Tile Cache</Abstract>");
sb.Append("<SRS>EPSG:" + srs + "</SRS>");
sb.Append("<BoundingBox minx=\"" + box.minx.ToString(_nhi) +
"\" miny=\"" + box.miny.ToString(_nhi) +
"\" maxx=\"" + box.maxx.ToString(_nhi) +
"\" maxy=\"" + box.maxy.ToString(_nhi) + "\" />");
sb.Append("<Origin x=\"" + box.minx.ToString(_nhi) +
"\" y=\"" + box.miny.ToString(_nhi) + "\" />");
sb.Append("<TileFormat width=\"" + metadata.TileWidth + "\" height=\"" + metadata.TileHeight + "\" mime-type=\"image/png\" extension=\"png\" />");
sb.Append("<TileSets>");
int level = 0;
foreach (double scale in metadata.Scales)
{
double res = (double)scale / (metadata.Dpi / 0.0254);
if (sRef.SpatialParameters.IsGeographic)
{
GeoUnitConverter converter = new GeoUnitConverter();
res = converter.Convert(res, GeoUnits.Meters, GeoUnits.DecimalDegrees);
}
sb.Append("<TileSet href=\"" + context.ServiceRequest.OnlineResource + "/" + level + "\" ");
sb.Append("units-per-pixel=\"" + res.ToString(_nhi) + "\" order=\"" + level + "\" />");
level++;
}
sb.Append("</TileSets>");
sb.Append("</TileMap>");
return sb.ToString();
}
}
}
| mit |
ilian1902/TelerikAcademy | C#Part1-Homework/04.Console - Input - Output - Homework/10.FibonacciNumbers/Properties/AssemblyInfo.cs | 1414 | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("10.FibonacciNumbers")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("10.FibonacciNumbers")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("c355f273-cf72-4864-96a1-38d5079367df")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| mit |
ristorantino/ristorantino-vendor | Compras/Test/Case/Controller/PedidoMercaderiasControllerTest.php | 296 | <?php
App::uses('PedidoMercaderiasController', 'Compras.Controller');
/**
* PedidoMercaderiasController Test Case
*/
class PedidoMercaderiasControllerTest extends ControllerTestCase {
/**
* Fixtures
*
* @var array
*/
public $fixtures = array(
'plugin.compras.pedido_mercaderia'
);
}
| mit |
sobeckley/Foreknown | App/src/hophacks/JHU/foreknown/ReadCSV.java | 1173 | package hophacks.JHU.foreknown;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.lang.Number;
public class ReadCSV {
public float[] run(String file) {
//String csvFile = file;
BufferedReader br = null;
String line = "";
String cvsSplitBy = ",";
String[] data = new String[252];
try {
//br = new BufferedReader(new FileReader(getAssets().open(file)));
while ((line = br.readLine()) != null) {
// use comma as separator
data = line.split(cvsSplitBy);
for(int i = 0; i < data.length; i++) {
if(data[i]==null) {
break;
}
System.out.println(data[i]);
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (br != null) {
try {
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
float[] dataFloat = new float[252];
for(int i = 0; i < data.length; i++) {
if(data[i]==null) {
break;
}
dataFloat[i] = Float.parseFloat(data[i]);
}
System.out.println("Done");
return dataFloat;
}
} | mit |
leobauza/microcosm | src/history.js | 7732 | import Action from './action'
import Emitter from './emitter'
import defaultUpdateStrategy from './default-update-strategy'
import { merge } from './utils'
import { BIRTH, START } from './lifecycle'
const DEFAULTS = {
maxHistory: 1,
batch: false,
updater: defaultUpdateStrategy
}
/**
* @fileoverview All Microcosms have a history. This history keeps
* track of outstanding actions, working with a Microcosm to determine
* the next application state as actions move through different
* states.
*/
class History extends Emitter {
constructor(config) {
super()
let options = merge(DEFAULTS, config)
this.size = 0
this.limit = Math.max(1, options.maxHistory)
this.updater = options.updater(options)
// Track whether a release is pending. This prevents .wait() from getting
// stuck in limbo
this.releasing = false
this.release = () => this.closeRelease()
this.begin()
}
/**
* Set the head of the tree to a target action. This has the effect
* of controlling time in a Microcosm's history.
* @param {Action} action The new head of the tree
*/
checkout(action) {
let sharedRoot = this.sharedRoot(action) || this.head
this.head = action || this.head
// Each action has a "next" property that tells the history how to
// move forward. Update that path back to the sharedRoot:
let cursor = this.head
while (cursor != sharedRoot) {
let parent = cursor.parent
parent.next = cursor
cursor = parent
}
this.setSize()
this.reconcile(sharedRoot)
return this
}
/**
* Toggle actions in bulk, then reconcile from the first action
* @param {Action[]} - A list of actions to toggle
*/
toggle(actions) {
let list = [].concat(actions)
list.forEach(action => action.toggle('silently'))
this.reconcile(list[0])
}
/**
* Convert the active branch of history into an array.
*/
toArray() {
return this.map(n => n)
}
/**
* Map over the active branch.
* @param {Function} fn
* @param {*} scope
*/
map(fn, scope) {
let size = this.size
let items = Array(size)
let action = this.head
while (size--) {
items[size] = fn.call(scope, action)
action = action.parent
}
return items
}
/**
* Return a promise that represents the resolution of all actions in
* the current branch.
* @returns {Promise}
*/
wait() {
let actions = this.toArray()
return new Promise((resolve, reject) => {
const checkStatus = () => {
let done = actions.every(action => action.complete)
let errors = actions.filter(action => action.is('reject'))
if (done) {
this.off('release', checkStatus)
if (errors.length) {
reject(errors[0].payload)
} else {
resolve()
}
}
}
if (this.releasing === false) {
checkStatus()
}
this.on('release', checkStatus)
})
}
/**
* Chain off of wait(). Provides a promise interface
* @returns {Promise}
*/
then(pass, fail) {
return this.wait().then(pass, fail)
}
/**
* Setup the head and root action for a history. This effectively
* starts or restarts history.
*/
begin() {
this.head = this.root = null
this.append(START, 'resolve')
}
/**
* Append a new action to the end of history
* @param {Function|string} command
* @param {string} [status]
*/
append(command, status) {
let action = new Action(command, status)
if (this.size > 0) {
this.head.lead(action)
} else {
// Always have a parent node, no matter what
let birth = new Action(BIRTH, 'resolve')
birth.adopt(action)
this.root = action
}
this.head = action
this.size += 1
this._emit('append', action)
action.on('change', this.reconcile, this)
return this.head
}
/**
* Remove an action from history, connecting adjacent actions
* together to bridge the gap.
* @param {Action} action - Action to remove from history
*/
remove(action) {
if (action.isDisconnected()) {
return
}
// cache linking references and activeness
let parent = action.parent
let next = action.next
let wasActive = this.isActive(action)
this.clean(action)
// if there are no more actions left, we're done
if (this.size <= 0) {
this.begin()
return
}
// reset head/root references if necessary
if (action === this.head) {
next = this.head = parent
} else if (action === this.root) {
this.root = next
}
// reconcile history if action was in active history branch
if (wasActive && !action.disabled) {
this.reconcile(next)
}
}
/**
* The actual clean up operation that purges an action from both
* history, and removes all snapshots within tracking repos.
* @param {Action} action - Action to clean up
*/
clean(action) {
this.size -= 1
this._emit('remove', action)
action.remove()
}
/**
* Starting with a given action, emit events such that repos can
* dispatch actions to domains in a consistent order to build a new
* state. This is how Microcosm updates state.
* @param {Action} action
*/
reconcile(action) {
console.assert(this.head, 'History should always have a head node')
console.assert(action, 'History should never reconcile ' + action)
let focus = action
while (focus) {
this._emit('update', focus)
if (focus === this.head) {
break
} else {
focus = focus.next
}
}
this.archive()
this._emit('reconcile', action)
this.queueRelease()
}
/**
* Batch releases by "queuing" an update. See `closeRelease`.
*/
queueRelease() {
if (this.releasing === false) {
this.releasing = true
this.updater(this.release)
}
}
/**
* Complete a release by emitting the "release" event. This function
* is called by the updater for the given history. If batching is
* enabled, it will be asynchronous.
*/
closeRelease() {
this.releasing = false
this._emit('release')
}
/**
* Instead of holding on to actions forever, Microcosm initiates an
* archival process at the end of every reconciliation. If the
* active branch of history is greater than the `limit` property,
* signal that the action should be removed.
*/
archive() {
let size = this.size
let root = this.root
while (size > this.limit && root.complete) {
size -= 1
this._emit('remove', root.parent)
root = root.next
}
root.prune()
this.root = root
this.size = size
}
/**
* Update the size of the tree by bubbling up from the head to the
* root.
*/
setSize() {
let action = this.head
let size = 1
while (action !== this.root) {
action = action.parent
size += 1
}
this.size = size
}
/**
* Determine if provided action is within active history branch
* @param {Action} action
*/
isActive(action) {
let cursor = action
while (cursor) {
if (cursor === this.head) {
return true
}
cursor = cursor.next
}
return false
}
/**
* Starting with the provided action, navigate up the parent chain
* until you find an action which is active. That action is the shared
* root between the provided action and the current head.
* @param {Action} action
*/
sharedRoot(action) {
let cursor = action
while (cursor) {
if (this.isActive(cursor)) {
return cursor
}
cursor = cursor.parent
}
}
}
export default History
| mit |
Welvin/stingle | packages/Users/Users/Exceptions/UserDisabledException.class.php | 58 | <?php
class UserDisabledException extends UserException{ } | mit |
sequelize/umzug | examples/node_modules/umzug/index.d.ts | 29 | export * from '../../../lib'
| mit |
devsunny/jbdstudio | src/main/java/com/asksunny/jbdstudio/JBDStudioCommand.java | 121 | package com.asksunny.jbdstudio;
public interface JBDStudioCommand extends JBDStudioService
{
public void execute();
}
| mit |
CACBridge/ChromeCAC | NativeApps/ChromeCAC.NET/MainWindow.xaml.cs | 2302 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
namespace ChromeCAC
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
public MainWindow()
{
// Just hide the main window
InitializeComponent();
Hide();
// Handle all communication and signing logic
HandleNativeMessages();
// Close the application
Close();
}
void HandleNativeMessages()
{
SignatureRequest r;
while (true)
{
NativeMessage m = NativeMessage.Read();
r = m.Get<SignatureRequest>();
if (r != null)
break;
Thread.Sleep(25);
}
string data = r.data;
//MessageBox.Show("[Debug] " + data);
var result = MessageBox.Show("A website is attempting to sign data with your private key. Do you sure you want to continue?", "CACBridge", MessageBoxButton.YesNo);
if (result == MessageBoxResult.Yes)
{
try
{
var sig = CAC.Sign(data);
NativeMessage nativeResponse = new NativeMessage(sig);
nativeResponse.Send();
if (!string.IsNullOrEmpty(sig.signature))
{
MessageBox.Show("Data was successfully signed", "Success");
}
else
{
MessageBox.Show("Something went wrong, failed to sign data.", "Error");
}
return;
}
catch
{
MessageBox.Show("An error occurred. Data was not signed.", "Error");
}
}
}
}
}
| mit |
TimGeyssens/MCFly | MCFly/Core/FieldTypes/Upload.cs | 1249 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using Umbraco.Core;
namespace MCFly.Core.FieldTypes
{
public class Upload : FieldType
{
public Upload()
{
BackOfficeEditView = UIOMatic.Constants.FieldEditors.File;
BackOfficeListView = "~/App_Plugins/MCFly/UIOMaticAddons/ListViewFields/file.html";
SupportsPlaceholder = false;
}
public override object Process(Form form, Field field, object value, ControllerContext controllerContext)
{
var files = controllerContext.HttpContext.Request.Files;
var file = files[field.Alias];
if (file != null && file.ContentLength > 0)
{
var guid = Guid.NewGuid();
var path = "~/App_Plugins/MCFly/uploads/"+ guid.ToString() + "/";
System.IO.Directory.CreateDirectory(Umbraco.Core.IO.IOHelper.MapPath(path));
file.SaveAs(Umbraco.Core.IO.IOHelper.MapPath(path) + file.FileName);
return "/App_Plugins/MCFly/uploads/" + guid.ToString() + "/" + file.FileName;
}
return string.Empty;
}
}
} | mit |
blond/ho-iter | lib/done.js | 63 | 'use strict';
module.exports = Object.freeze({ done: true });
| mit |
Breeze/breeze.server.java | breeze-hibernate/src/test/java/com/breeze/test/PredicateTest.java | 17537 | package com.breeze.test;
import java.math.BigDecimal;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.hibernate.SessionFactory;
import com.breeze.hib.HibernateMetadata;
import com.breeze.metadata.DataType;
import com.breeze.metadata.IEntityType;
import com.breeze.metadata.Metadata;
import com.breeze.query.AndOrPredicate;
import com.breeze.query.AnyAllPredicate;
import com.breeze.query.BinaryPredicate;
import com.breeze.query.Expression;
import com.breeze.query.FnExpression;
import com.breeze.query.LitExpression;
import com.breeze.query.Operator;
import com.breeze.query.Predicate;
import com.breeze.query.PropExpression;
import com.breeze.query.UnaryPredicate;
import com.breeze.util.JsonGson;
import junit.framework.TestCase;
// TODO: need nested property tests
public class PredicateTest extends TestCase {
private Metadata _metadata;
protected void setUp() throws Exception {
super.setUp();
SessionFactory sf = StaticConfigurator.getSessionFactory();
_metadata = new HibernateMetadata(sf);
_metadata.build();
}
public void testFunc1ArgPred() {
String json = "{ 'month(birthDate)': { gt: 3}}";
Map map = JsonGson.fromJson(json);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred;
IEntityType et = _metadata.getEntityTypeForResourceName("Employees");
pred.validate(et);
FnExpression expr1 = (FnExpression) bpred.getExpr1();
assert(expr1.getFnName().equals("month"));
List<Expression> args = expr1.getExpressions();
assertTrue(args.size() == 1);
PropExpression arg1 = (PropExpression) args.get(0);
assertTrue(arg1.getPropertyPath().equals("birthDate"));
}
public void testFuncNArgsPred() {
String json = "{ 'substring(lastName, 1,3)': { gt: 'ABC'}}";
Map map = JsonGson.fromJson(json);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred;
IEntityType et = _metadata.getEntityTypeForResourceName("Employees");
pred.validate(et);
FnExpression expr1 = (FnExpression) bpred.getExpr1();
assertTrue(expr1.getFnName().equals("substr"));
List<Expression> args = expr1.getExpressions();
assertTrue(args.size() == 3);
PropExpression arg1 = (PropExpression) args.get(0);
assertTrue(arg1.getPropertyPath().equals("lastName"));
}
public void testBinaryPredNull() {
String pJson = "{ shipName: null }";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred;
assertTrue(bpred.getOperator() == Operator.Equals);
assertTrue(bpred.getExpr1Source().equals("shipName"));
assertTrue(bpred.getExpr2Source() == null);
IEntityType et = _metadata.getEntityTypeForResourceName("Orders");
pred.validate(et);
PropExpression expr1 = (PropExpression) bpred.getExpr1();
assertTrue(expr1.getPropertyPath().equals("shipName"));
LitExpression expr2 = (LitExpression) bpred.getExpr2();
assertTrue(expr2.getDataType() == DataType.String);
assertTrue(expr2.getValue() == null);
}
public void testBinaryPredDouble() {
String pJson = "{ freight: { '>' : 100}}";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred;
assertTrue(bpred.getOperator() == Operator.GreaterThan);
assertTrue(bpred.getExpr1Source().equals("freight"));
assertTrue(bpred.getExpr2Source().equals(100.0));
IEntityType et = _metadata.getEntityTypeForResourceName("Orders");
pred.validate(et);
PropExpression expr1 = (PropExpression) bpred.getExpr1();
assertTrue(expr1.getPropertyPath().equals("freight"));
LitExpression expr2 = (LitExpression) bpred.getExpr2();
assertTrue(expr2.getDataType() == DataType.Decimal);
assertTrue(expr2.getValue().equals(BigDecimal.valueOf(100.0)));
}
public void testBinaryPredString() {
String pJson = "{ lastName: { 'startsWith' : 'S'}}";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred;
assertTrue(bpred.getOperator() == Operator.StartsWith);
assertTrue(bpred.getExpr1Source().equals("lastName"));
assertTrue(bpred.getExpr2Source().equals("S"));
IEntityType et = _metadata.getEntityTypeForResourceName("Employees");
pred.validate(et);
PropExpression expr1 = (PropExpression) bpred.getExpr1();
assertTrue(expr1.getPropertyPath().equals("lastName"));
LitExpression expr2 = (LitExpression) bpred.getExpr2();
assertTrue(expr2.getDataType() == DataType.String);
assertTrue(expr2.getValue().equals("S"));
}
public void testBinaryPredBoolean() {
// TODO: can't validate this because 'discontinued' property is no longer on order.
String pJson = "{ discontinued: true }";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred;
assertTrue(bpred.getOperator() == Operator.Equals);
assertTrue(bpred.getExpr1Source().equals("discontinued"));
assertTrue(bpred.getExpr2Source().equals(true));
}
public void testBinaryPredStringQuote() {
String pJson = "{ 'companyName': { 'contains': \"'\" } }";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred;
assertTrue(bpred.getOperator() == Operator.Contains);
assertTrue(bpred.getExpr1Source().equals("companyName"));
assertTrue(bpred.getExpr2Source().equals("'"));
IEntityType et = _metadata.getEntityTypeForResourceName("Customers");
pred.validate(et);
PropExpression expr1 = (PropExpression) bpred.getExpr1();
assertTrue(expr1.getPropertyPath().equals("companyName"));
LitExpression expr2 = (LitExpression) bpred.getExpr2();
assertTrue(expr2.getDataType() == DataType.String);
assertTrue(expr2.getValue().equals("'"));
}
@SuppressWarnings("deprecation")
public void testBinaryExplicitDate() {
String pJson = "{ shippedDate: { value: '2015-02-09T00:00:00', dataType: 'DateTime' }}";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred;
assertTrue(bpred.getOperator() == Operator.Equals);
assertTrue(bpred.getExpr1Source().equals("shippedDate"));
assertTrue(bpred.getExpr2Source() instanceof Map);
Map expr2Source = (Map) bpred.getExpr2Source();
assertTrue(expr2Source.get("dataType").equals("DateTime"));
IEntityType et = _metadata.getEntityTypeForResourceName("Orders");
pred.validate(et);
PropExpression expr1 = (PropExpression) bpred.getExpr1();
assertTrue(expr1.getPropertyPath().equals("shippedDate"));
LitExpression expr2 = (LitExpression) bpred.getExpr2();
assertTrue(expr2.getDataType() == DataType.DateTime);
assertTrue(expr2.getValue().equals(new Date(115,1,9))); // wierd rules: yy - 1900, mm (0-11), dd (1-31)
}
public void testBinaryExplicit2() {
String pJson = "{ 'lastName': { 'startsWith': { value: 'firstName' } } }";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred;
assertTrue(bpred.getOperator() == Operator.StartsWith);
assertTrue(bpred.getExpr1Source().equals("lastName"));
assertTrue(bpred.getExpr2Source() instanceof Map);
Map expr2Source = (Map) bpred.getExpr2Source();
assertTrue(expr2Source.get("value").equals("firstName"));
IEntityType et = _metadata.getEntityTypeForResourceName("Employees");
pred.validate(et);
PropExpression expr1 = (PropExpression) bpred.getExpr1();
assertTrue(expr1.getPropertyPath().equals("lastName"));
LitExpression expr2 = (LitExpression) bpred.getExpr2();
assertTrue(expr2.getDataType() == DataType.String);
assertTrue(expr2.getValue().equals("firstName"));
}
public void testExplicitAnd() {
String pJson = "{ and: [ { freight: { gt: 100} }, { shipCity: { startsWith: 'S'} } ] }";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof AndOrPredicate);
AndOrPredicate aopred = (AndOrPredicate) pred;
assertTrue(aopred.getOperator() == Operator.And);
assertTrue(aopred.getPredicates().size() == 2);
List<Predicate> preds = aopred.getPredicates();
Predicate pred1 = preds.get(0);
assertTrue(pred1 instanceof BinaryPredicate);
BinaryPredicate bpred1 = (BinaryPredicate) pred1;
assertTrue(bpred1.getOperator() == Operator.GreaterThan);
assertTrue(bpred1.getExpr1Source().equals("freight"));
assertTrue(bpred1.getExpr2Source().equals(100.0));
Predicate pred2 = preds.get(1);
assertTrue(pred2 instanceof BinaryPredicate);
BinaryPredicate bpred2 = (BinaryPredicate) pred2;
assertTrue(bpred2.getOperator() == Operator.StartsWith);
assertTrue(bpred2.getExpr1Source().equals("shipCity"));
assertTrue(bpred2.getExpr2Source().equals("S"));
IEntityType et = _metadata.getEntityTypeForResourceName("Orders");
pred.validate(et);
PropExpression expr1 = (PropExpression) bpred1.getExpr1();
assertTrue(expr1.getPropertyPath().equals("freight"));
LitExpression expr2 = (LitExpression) bpred1.getExpr2();
assertTrue(expr2.getDataType() == DataType.Decimal);
assertTrue(expr2.getValue().equals(BigDecimal.valueOf(100.0)));
PropExpression expr1b = (PropExpression) bpred2.getExpr1();
assertTrue(expr1b.getPropertyPath().equals("shipCity"));
LitExpression expr2b = (LitExpression) bpred2.getExpr2();
assertTrue(expr2b.getDataType() == DataType.String);
assertTrue(expr2b.getValue().equals("S"));
}
public void testImplicitAnd() {
String pJson = "{ freight: { '>' : 100, 'lt': 200 }}";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof AndOrPredicate);
AndOrPredicate aopred = (AndOrPredicate) pred;
assertTrue(aopred.getOperator() == Operator.And);
assertTrue(aopred.getPredicates().size() == 2);
List<Predicate> preds = aopred.getPredicates();
Predicate pred1 = preds.get(0);
assertTrue(pred1 instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred1;
assertTrue(bpred.getOperator() == Operator.GreaterThan);
assertTrue(bpred.getExpr1Source().equals("freight"));
assertTrue(bpred.getExpr2Source().equals(100.0));
Predicate pred2 = preds.get(1);
assertTrue(pred2 instanceof BinaryPredicate);
bpred = (BinaryPredicate) pred2;
assertTrue(bpred.getOperator() == Operator.LessThan);
assertTrue(bpred.getExpr1Source().equals("freight"));
assertTrue(bpred.getExpr2Source().equals(200.0));
IEntityType et = _metadata.getEntityTypeForResourceName("Orders");
pred.validate(et);
}
public void testImplicitAnd3Way() {
String pJson = "{ freight: { '>': 100}, rowVersion: { lt: 10}, shippedDate: '2015-02-09T00:00:00' }";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof AndOrPredicate);
AndOrPredicate aopred = (AndOrPredicate) pred;
assertTrue(aopred.getOperator() == Operator.And);
assertTrue(aopred.getPredicates().size() == 3);
List<Predicate> preds = aopred.getPredicates();
Predicate pred1 = preds.get(0);
assertTrue(pred1 instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred1;
assertTrue(bpred.getOperator() == Operator.GreaterThan);
assertTrue(bpred.getExpr1Source().equals("freight"));
assertTrue(bpred.getExpr2Source().equals(100.0));
Predicate pred2 = preds.get(1);
assertTrue(pred2 instanceof BinaryPredicate);
bpred = (BinaryPredicate) pred2;
assertTrue(bpred.getOperator() == Operator.LessThan);
assertTrue(bpred.getExpr1Source().equals("rowVersion"));
assertTrue(bpred.getExpr2Source().equals(10.0));
Predicate pred3 = preds.get(2);
assertTrue(pred3 instanceof BinaryPredicate);
bpred = (BinaryPredicate) pred3;
assertTrue(bpred.getOperator() == Operator.Equals);
assertTrue(bpred.getExpr1Source().equals("shippedDate"));
assertTrue(bpred.getExpr2Source().equals("2015-02-09T00:00:00"));
IEntityType et = _metadata.getEntityTypeForResourceName("Orders");
pred.validate(et);
}
public void testNot() {
String pJson = "{ not: { freight: { gt: 100}}}";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof UnaryPredicate);
UnaryPredicate upred = (UnaryPredicate) pred;
assertTrue(upred.getOperator() == Operator.Not);
Predicate basePred = upred.getPredicate();
assertTrue(basePred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) basePred;
assertTrue(bpred.getOperator() == Operator.GreaterThan);
assertTrue(bpred.getExpr1Source().equals("freight"));
assertTrue(bpred.getExpr2Source().equals(100.0));
IEntityType et = _metadata.getEntityTypeForResourceName("Orders");
pred.validate(et);
}
public void testAny() {
String pJson = "{ orders: { any: {freight: { '>': 950 } } } }";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof AnyAllPredicate);
AnyAllPredicate aapred = (AnyAllPredicate) pred;
assertTrue(aapred.getOperator() == Operator.Any);
assertTrue(aapred.getExprSource().equals("orders"));
Predicate basePred = aapred.getPredicate();
assertTrue(basePred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) basePred;
assertTrue(bpred.getOperator() == Operator.GreaterThan);
assertTrue(bpred.getExpr1Source().equals("freight"));
assertTrue(bpred.getExpr2Source().equals(950.0));
IEntityType et = _metadata.getEntityTypeForResourceName("Customers");
pred.validate(et);
PropExpression propExpr = (PropExpression) aapred.getExpr();
assertTrue(propExpr.getPropertyPath().equals("orders"));
assertTrue(propExpr.getProperty().getName().equals("orders"));
}
public void testAndWithAll() {
String pJson = "{ and: [ { companyName: { contains: 'ar' } }, { orders: { all: { freight: 10 } } } ] }";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof AndOrPredicate);
AndOrPredicate aopred = (AndOrPredicate) pred;
assertTrue(aopred.getOperator() == Operator.And);
assertTrue(aopred.getPredicates().size() == 2);
List<Predicate> preds = aopred.getPredicates();
Predicate pred1 = preds.get(0);
assertTrue(pred1 instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred1;
assertTrue(bpred.getOperator() == Operator.Contains);
assertTrue(bpred.getExpr1Source().equals("companyName"));
assertTrue(bpred.getExpr2Source().equals("ar"));
Predicate pred2 = preds.get(1);
assertTrue(pred2 instanceof AnyAllPredicate);
AnyAllPredicate aapred = (AnyAllPredicate) pred2;
assertTrue(aapred.getOperator() == Operator.All);
assertTrue(aapred.getExprSource().equals("orders"));
Predicate basePred = aapred.getPredicate();
assertTrue(basePred instanceof BinaryPredicate);
bpred = (BinaryPredicate) basePred;
assertTrue(bpred.getOperator() == Operator.Equals);
assertTrue(bpred.getExpr1Source().equals("freight"));
assertTrue(bpred.getExpr2Source().equals(10.0));
}
public void testBinaryPredFn() {
String pJson = "{ 'toLower(\"shipName\")': 'abc' }";
Map map = JsonGson.fromJson(pJson);
Predicate pred = Predicate.predicateFromMap(map);
assertTrue(pred != null);
assertTrue(pred instanceof BinaryPredicate);
BinaryPredicate bpred = (BinaryPredicate) pred;
assertTrue(bpred.getOperator() == Operator.Equals);
assertTrue(bpred.getExpr1Source().equals("toLower(\"shipName\")"));
assertTrue(bpred.getExpr2Source().equals("abc"));
IEntityType et = _metadata.getEntityTypeForResourceName("Orders");
pred.validate(et);
FnExpression expr1 = (FnExpression) bpred.getExpr1();
assertTrue(expr1.getFnName().equals("toLower"));
List<Expression> argExprs = expr1.getExpressions();
PropExpression argExpr1 = (PropExpression) argExprs.get(0);
assertTrue(argExpr1.getDataType() == DataType.String);
assertTrue(argExpr1.getPropertyPath().equals("shipName"));
}
}
| mit |
TeamSPoon/logicmoo_workspace | docker/rootfs/usr/local/lib/swipl/doc/packages/examples/protobufs/some_message.py | 1251 | # A simple message for testing interoperability.
# More tests are in the interop directory.
import some_message_pb2 # generated by: protoc some_message.proto --python_out=.
msg = some_message_pb2.SomeMessage()
msg.first = 100
msg.second = "abcd"
msg.third.extend(["foo", "bar"])
msg.fourth = True
msg.fifth.value = -666
msg.fifth.text = "negative 666"
msg.sixth.extend([msg.NestedMessage(value=1234, text="onetwothreefour"),
msg.NestedMessage(value=2222, text="four twos")])
msg.seventh.extend([1,2,3,4])
msg.eighth.extend([100,-200,1000])
print(msg)
print([i for i in msg.SerializeToString()])
with open("some_message.wire", "wb") as wire:
wire.write(msg.SerializeToString())
msg2 = some_message_pb2.SomeMessage(
first = 100,
second = "abcd",
third = ["foo", "bar"],
fourth = True,
fifth = some_message_pb2.SomeMessage.NestedMessage(value=-666, text = "negative 666"),
sixth = [some_message_pb2.SomeMessage.NestedMessage(value=1234, text="onetwothreefour"),
some_message_pb2.SomeMessage.NestedMessage(value=2222, text="four twos")],
seventh = [1,2,3,4],
eighth = [100,-200,1000],
)
print('=========')
print(msg2)
assert msg.SerializeToString() == msg2.SerializeToString()
| mit |
robpaveza/dbcexplorer | src/DbcReader/ChatProfanityRecord.cs | 398 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace DbcReader
{
public class ChatProfanityRecord
{
[DbcRecordPosition(0)]
public int ID { get; set; }
[DbcRecordPosition(1)]
public DbcStringReference DirtyWord;
[DbcRecordPosition(2)]
public int LanguageID;
}
}
| mit |
Eelco81/server-test-project | Lib/Http/Src/HttpHeaderTester.cpp | 913 |
#include "gmock/gmock.h"
#include "HttpHeader.h"
TEST (HttpHeaderTester, Constructor) {
HTTP::Header header ("my-key", "my-value");
EXPECT_EQ (std::string ("my-key"), header.GetKey ());
EXPECT_EQ (std::string ("my-value"), header.GetValue ());
}
TEST (HttpHeaderTester, GetSetKey) {
HTTP::Header header;
header.SetKey ("my-key");
EXPECT_EQ (std::string ("my-key"), header.GetKey ());
}
TEST (HttpHeaderTester, KeysAreAlwaysLowerCase) {
{
HTTP::Header header ("My-KeY-1234", "my-value");
EXPECT_EQ (std::string ("my-key-1234"), header.GetKey ());
}
{
HTTP::Header header;
header.SetKey ("My-KeY-1234");
EXPECT_EQ (std::string ("my-key-1234"), header.GetKey ());
}
}
TEST (HttpHeaderTester, GetSetValue) {
HTTP::Header header;
header.SetValue ("my-value");
EXPECT_EQ (std::string ("my-value"), header.GetValue ());
}
| mit |
segun-adeleye/converter-money | spec/spec_helper.rb | 269 | require "bundler/setup"
require "converter/money"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| mit |
junwang1216/btgk-edms | edms-admin/src/main/java/com/admin/controller/AdminCenterController.java | 295 | package com.admin.controller;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
/**
* Created by wangjun on 2017/5/1.
*/
@Controller
@RequestMapping("/admin/center")
public class AdminCenterController extends BaseController {
}
| mit |
dragomirevgeniev/HackBulgaria | Programming101-CSharp/week06/2.Thursday/Filters(selectors)/Properties/AssemblyInfo.cs | 1412 | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Filters(selectors)")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Filters(selectors)")]
[assembly: AssemblyCopyright("Copyright © 2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("fd4498dc-8132-46ac-aecc-0a94e6e572e4")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| mit |
mrchess/oracle-sage | test/setup/oracle.js | 4326 | /**
* Used to help set up testing environments.
*/
var oracledb = require('oracledb');
oracledb.stmtCacheSize = 0; // setting this to 0 seems to make import go faster
var Promise = require("bluebird");
var _ = require('lodash');
var fs = require("fs");
var OracleConnector;
var OracleConnector = (function() {
function OracleConnector(config) {
config = config || {};
_.defaults(this, {
_credentials: {
user: "SAGE_TEST",
password: "oracle",
connectString: "127.0.0.1:1521/orcl",
userSchema: "SAGE_TEST"
},
connection: null
});
}
// Returns a Promise that it will connect to the database
OracleConnector.prototype.connect = function(config) {
var self = this;
if(self.connection) {
// Connection already exists. Return it.
return new Promise(function(fulfill, reject) {
fulfill(self.connection);
})
} else {
// No active connection. Make one.
return new Promise(function(fulfill, reject) {
oracledb.getConnection(self._credentials, function(err, connection) {
if(err) {
console.log(err);
}
self.connection = connection;
fulfill(connection);
})
});
}
}
OracleConnector.prototype.disconnect = function() {
var self = this;
if(self.connection) {
return new Promise(function(fulfill, reject) {
self.connection.release(function(err) {
if(err) {
console.error(err.message);
reject(err);
} else {
self.connection = null;
fulfill(true);
}
})
})
} else {
return new Promise(function(fulfill, reject) {
fulfill(true);
})
}
}
// Pass in an array of SQL statements, and it will execute them all
// pass verbose if you want to see the output
OracleConnector.prototype.performStatements = function(statements, config) {
config = config || {};
var self = this;
return new Promise(function(fulfill, reject) {
var statement = statements.shift();
console.log(statement)
self.connection.execute(statement, [], { autoCommit: true }, function(err, result) {
if(config.verbose === true) { console.log(statement) }
if(err) {
console.log(err);
}
if(statements.length) {
self.performStatements(statements, config).then(function() {
fulfill();
})
} else {
fulfill();
}
});
})
}
/*
Runs an entire SQL file against the given connection
Pass in the path to sql
*/
OracleConnector.prototype.runSQL = function(config) {
config = config || {};
if(!config.path) { throw("No SQL provided") }
var self = this;
return new Promise(function(fulfill, reject) {
var schema = fs.readFile(config.path, 'utf8', function(err, data) {
if(err) {
console.log(err)
}
// Now split it
var statements = data.split(';');
// Remove empty item on the end
if(_.last(statements).trim() === "") {
statements.pop();
}
// Fix triggers since they will be split up when you split on ;
var temp = [];
var tempSQL = "";
_.each(statements, function(statement) {
if(statement.trim().indexOf('CREATE OR REPLACE TRIGGER') === 0) { // this is a trigger
temp.push(statement + "; END;");
} else if(statement.trim().indexOf("END") === 0) {
// do nothing
} else {
temp.push(statement);
}
})
statements = temp;
// If dropOnly is set, let's just perform the drops
if(config.dropsOnly) {
var temp = [];
_.each(statements, function(statement) {
if(statement.indexOf("DROP") === 0) {
temp.push(statement);
}
});
statements = temp;
}
// Execute each statement, recurisvely
self.performStatements(statements, { verbose: config.verbose }).then(function() {
fulfill();
})
})
});
}
return OracleConnector;
})();
// Start up Oracle
o = new OracleConnector();
module.exports = o; | mit |
naoto/api-agent | spec/api/agent/json_spec.rb | 257 | #-*- encoding: utf-8
require 'spec_helper'
require './lib/api/agent'
describe JSON do
describe 'open url' do
it 'success' do
json = JSON.open("http://echo.jsontest.com/key/value")
expect(json).to eq({"key" => "value"})
end
end
end
| mit |
karim/adila | database/src/main/java/adila/db/hwu8655_huawei20ideos20y20200.java | 226 | // This file is automatically generated.
package adila.db;
/*
* Huawei
*
* DEVICE: hwu8655
* MODEL: HUAWEI IDEOS Y 200
*/
final class hwu8655_huawei20ideos20y20200 {
public static final String DATA = "Huawei||";
}
| mit |
swaiing/studydeck | root/app/controllers/deck_tags_controller.php | 120 | <?php
class DeckTagsController extends AppController{
var $name = 'DeckTags';
//var $scaffold;
}
?> | mit |
wangi4myself/myFirstReactJs | node_modules/antd/lib/form/Form.js | 6103 | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = exports.FormComponent = undefined;
var _extends2 = require('babel-runtime/helpers/extends');
var _extends3 = _interopRequireDefault(_extends2);
var _defineProperty2 = require('babel-runtime/helpers/defineProperty');
var _defineProperty3 = _interopRequireDefault(_defineProperty2);
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _possibleConstructorReturn2 = require('babel-runtime/helpers/possibleConstructorReturn');
var _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2);
var _inherits2 = require('babel-runtime/helpers/inherits');
var _inherits3 = _interopRequireDefault(_inherits2);
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _classnames = require('classnames');
var _classnames2 = _interopRequireDefault(_classnames);
var _reactAddonsPureRenderMixin = require('react-addons-pure-render-mixin');
var _reactAddonsPureRenderMixin2 = _interopRequireDefault(_reactAddonsPureRenderMixin);
var _omit = require('omit.js');
var _omit2 = _interopRequireDefault(_omit);
var _warning = require('warning');
var _warning2 = _interopRequireDefault(_warning);
var _objectAssign = require('object-assign');
var _objectAssign2 = _interopRequireDefault(_objectAssign);
var _FormItem = require('./FormItem');
var _FormItem2 = _interopRequireDefault(_FormItem);
var _createDOMForm = require('rc-form/lib/createDOMForm');
var _createDOMForm2 = _interopRequireDefault(_createDOMForm);
var _constants = require('./constants');
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
var FormComponent = exports.FormComponent = function (_React$Component) {
(0, _inherits3["default"])(FormComponent, _React$Component);
function FormComponent() {
(0, _classCallCheck3["default"])(this, FormComponent);
return (0, _possibleConstructorReturn3["default"])(this, _React$Component.apply(this, arguments));
}
return FormComponent;
}(_react2["default"].Component);
var Form = function (_React$Component2) {
(0, _inherits3["default"])(Form, _React$Component2);
function Form(props) {
(0, _classCallCheck3["default"])(this, Form);
var _this2 = (0, _possibleConstructorReturn3["default"])(this, _React$Component2.call(this, props));
(0, _warning2["default"])(!props.form, 'It is unnecessary to pass `form` to `Form` after antd@1.7.0.');
return _this2;
}
Form.prototype.shouldComponentUpdate = function shouldComponentUpdate() {
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
return _reactAddonsPureRenderMixin2["default"].shouldComponentUpdate.apply(this, args);
};
Form.prototype.render = function render() {
var _classNames;
var _props = this.props;
var prefixCls = _props.prefixCls;
var className = _props.className;
var inline = _props.inline;
var horizontal = _props.horizontal;
var vertical = _props.vertical;
var formClassName = (0, _classnames2["default"])((_classNames = {}, (0, _defineProperty3["default"])(_classNames, '' + prefixCls, true), (0, _defineProperty3["default"])(_classNames, prefixCls + '-horizontal', horizontal), (0, _defineProperty3["default"])(_classNames, prefixCls + '-vertical', vertical), (0, _defineProperty3["default"])(_classNames, prefixCls + '-inline', inline), (0, _defineProperty3["default"])(_classNames, className, !!className), _classNames));
var formProps = (0, _omit2["default"])(this.props, ['prefixCls', 'className', 'inline', 'horizontal', 'vertical', 'form']);
return _react2["default"].createElement('form', (0, _extends3["default"])({}, formProps, { className: formClassName }));
};
return Form;
}(_react2["default"].Component);
exports["default"] = Form;
Form.defaultProps = {
prefixCls: 'ant-form',
onSubmit: function onSubmit(e) {
e.preventDefault();
}
};
Form.propTypes = {
prefixCls: _react2["default"].PropTypes.string,
vertical: _react2["default"].PropTypes.bool,
horizontal: _react2["default"].PropTypes.bool,
inline: _react2["default"].PropTypes.bool,
children: _react2["default"].PropTypes.any,
onSubmit: _react2["default"].PropTypes.func
};
Form.Item = _FormItem2["default"];
Form.create = function (options) {
var formWrapper = (0, _createDOMForm2["default"])((0, _objectAssign2["default"])({}, options, {
fieldNameProp: 'id',
fieldMetaProp: _constants.FIELD_META_PROP
}));
/* eslint-disable react/prefer-es6-class */
return function (Component) {
return formWrapper(_react2["default"].createClass({
propTypes: {
form: _react.PropTypes.object.isRequired
},
childContextTypes: {
form: _react.PropTypes.object.isRequired
},
getChildContext: function getChildContext() {
return {
form: this.props.form
};
},
render: function render() {
var getFieldProps = this.props.form.getFieldProps;
function deprecatedGetFieldProps(name, option) {
(0, _warning2["default"])(false, '`getFieldProps` is deprecated and will be removed in future, please use `getFieldDecorator` instead');
return getFieldProps(name, option);
}
this.props.form.getFieldProps = deprecatedGetFieldProps;
var withRef = {};
if (options && options.withRef) {
withRef.ref = 'formWrappedComponent';
}
return _react2["default"].createElement(Component, (0, _extends3["default"])({}, this.props, withRef));
}
}));
};
}; | mit |
andersao/l5-repository | src/Prettus/Repository/Events/RepositoryEventBase.php | 1136 | <?php
namespace Prettus\Repository\Events;
use Illuminate\Database\Eloquent\Model;
use Prettus\Repository\Contracts\RepositoryInterface;
/**
* Class RepositoryEventBase
* @package Prettus\Repository\Events
* @author Anderson Andrade <contato@andersonandra.de>
*/
abstract class RepositoryEventBase
{
/**
* @var Model
*/
protected $model;
/**
* @var RepositoryInterface
*/
protected $repository;
/**
* @var string
*/
protected $action;
/**
* @param RepositoryInterface $repository
* @param Model $model
*/
public function __construct(RepositoryInterface $repository, Model $model = null)
{
$this->repository = $repository;
$this->model = $model;
}
/**
* @return Model|array
*/
public function getModel()
{
return $this->model;
}
/**
* @return RepositoryInterface
*/
public function getRepository()
{
return $this->repository;
}
/**
* @return string
*/
public function getAction()
{
return $this->action;
}
}
| mit |
kodazzi/amazonas | public_html/src/tools/js/k-alert.js | 2694 | /**
* Created by jorge on 29/06/15.
*/
$(document).ready(function() {
Alert.init();
});
var Alert = {
id : 'ds-alert',
idBottom: 'alert ds-alert-bottom',
classSuccess : 'alert alert-success',
classError : 'alert alert-danger',
classInformation : 'alert alert-info',
classWarning : 'alert alert-warning',
delay : 500,
duration : 5000,
width: 600,
position: 'top',
init : function() {
percentage = (Alert.width * 100) / $(window).width();
left = Math.floor((100 - percentage) / 2);
style_init = {width: Alert.width, left: left+'%', top: '-70px'};
if(Alert.position == 'bottom'){
style_init = {width: Alert.width, left: left+'%', bottom: '-70px'};
}
$('#' + Alert.id).css(style_init);
if($('#' + Alert.id).delay(Alert.delay).length) {
Alert.animation();
}
},
error : function(message, duration) {
Alert.show(message, Alert.classError, duration);
},
success : function(message, duration) {
Alert.show(message, Alert.classSuccess, duration);
},
information : function(message, duration) {
Alert.show(message, Alert.classInformation, duration);
},
warning : function(message, duration) {
Alert.show(message, Alert.classWarning, duration);
},
show : function(message, css_class, duration) {
$('#' + Alert.id).remove();
var alertDiv = $('<div></div>').attr('id', Alert.id).attr('class', css_class);
percentage = (Alert.width * 100) / $(window).width();
left = Math.floor((100 - percentage) / 2);
style_init = {width: Alert.width, left: left+'%', top: '-70px'};
if(Alert.position == 'bottom'){
style_init = {width: Alert.width, left: left+'%', bottom: '-70px'};
}
alertDiv.css(style_init);
$('body').prepend(alertDiv);
$('#' + Alert.id).html(message);
Alert.animation(duration);
},
animation : function(duration) {
if(duration == undefined)
duration = Alert.duration;
var id = ( Alert.position == 'bottom' ) ? Alert.idBottom : Alert.idTop ;
style_show = {top:'80px', opacity: 1};
style_hide = {top:'-70px', opacity: 0};
if(Alert.position == 'bottom'){
style_show = {bottom:'40px', opacity: 1};
style_hide = {bottom:'-70px', opacity: 0};
}
$('#' + Alert.id).animate(style_show, 600);
window.setTimeout(function() {
$('#' + Alert.id).animate(style_hide, 600, null, function(){
$(this).remove();
});
}, duration);
}
} | mit |
piohhmy/euler | p038.py | 784 | def concat_multiples(num, multiples):
return int("".join([str(num*multiple) for multiple in range(1,multiples+1)]))
def is_pandigital(num):
return sorted([int(digit) for digit in str(num)]) == list(range(1,10))
def solve_p038():
# retrieve only 9 digit concatinations of multiples where n = (1,2,..n)
n6 = [concat_multiples(num, 6) for num in [3]]
n5 = [concat_multiples(num, 5) for num in range(5,10)]
n4 = [concat_multiples(num, 4) for num in range(25,33)]
n3 = [concat_multiples(num, 3) for num in range(100,333)]
n2 = [concat_multiples(num, 2) for num in range(5000,9999)]
all_concats = set(n2 + n3 + n4 + n5 + n6)
return max([num for num in all_concats if is_pandigital(num)])
if __name__ == '__main__':
print((solve_p038()))
| mit |
Discordius/Telescope | packages/lesswrong/lib/collections/notifications/permissions.ts | 694 | import Users from '../users/collection';
import Notifications from './collection';
const membersActions = [
'notifications.new.own',
'notifications.edit.own',
'notifications.view.own',
];
Users.groups.members.can(membersActions);
const adminActions = [
'notifications.new.all',
'notifications.edit.all',
'notifications.remove.all',
];
Users.groups.admins.can(adminActions);
Notifications.checkAccess = async (user: DbUser|null, document: DbNotification, context: ResolverContext|null): Promise<boolean> => {
if (!user || !document) return false;
return Users.owns(user, document) ? Users.canDo(user, 'notifications.view.own') : Users.canDo(user, `conversations.view.all`)
};
| mit |
JamaSoftware/simpleCSVExport | verifier.py | 952 | from jama import Jama
from csv_writer import CSVWriter
def verify():
jama = Jama()
csv = CSVWriter()
projects = jama.getProjects()
csv.write("projects.csv", projects)
project_ids = [project["id"] for project in projects]
item_type_ids = [item_type["id"] for item_type in jama.getItemTypes()]
for item_type_id in item_type_ids:
csv.write("{}.csv".format(item_type_id), jama.getItems(item_type_id))
relationships = []
for project_id in project_ids:
# if the relationships file gets too big you can create a file for each project's relationships
# instead of extending this list
relationships.extend(jama.getRelationships(project_id))
csv.write("relationships.csv", relationships)
# if the comments file gets too big you can split the list and scv.write() each half
csv.write("comments.csv", jama.getComments())
if __name__ == "__main__":
verify()
print ("done")
| mit |
sigma-geosistemas/django-tenants | django_tenants/management/commands/collectstatic_schemas.py | 207 | # -*- coding: utf-8 -*-
from . import TenantWrappedCommand
from django.contrib.staticfiles.management.commands import collectstatic
class Command(TenantWrappedCommand):
COMMAND = collectstatic.Command
| mit |
dealproc/Drey | source/Drey.Server.Core/Extensions/ExceptionExtensions.cs | 677 | using System;
namespace Drey.Server.Extensions
{
public static class ExceptionExtensions
{
/// <summary>
/// Pulls the deepest exception out of an exception stack for further processing.
/// </summary>
/// <param name="exc">The exc.</param>
/// <returns></returns>
public static Exception HeadException(this Exception exc)
{
if (exc.InnerException == null) { return exc; }
Exception innerExc = exc;
do
{
innerExc = innerExc.InnerException;
} while (innerExc.InnerException != null);
return innerExc;
}
}
}
| mit |
madgik/exareme | Exareme-Docker/src/exareme/exareme-tools/madis/src/functions/vtable/keep_numeric.py | 2736 | """
.. function:: rowidvt(query:None)
Returns the query input result adding rowid number of the result row.
:Returned table schema:
Same as input query schema with addition of rowid column.
- *rowid* int
Input *query* result rowid.
Examples::
>>> table1('''
... James 10 2
... Mark 7 3
... Lila 74 1
... ''')
>>> sql("rowidvt select * from table1")
rowid | a | b | c
----------------------
1 | James | 10 | 2
2 | Mark | 7 | 3
3 | Lila | 74 | 1
>>> sql("rowidvt select * from table1 order by c")
rowid | a | b | c
----------------------
1 | Lila | 74 | 1
2 | James | 10 | 2
3 | Mark | 7 | 3
Note the difference with rowid table column.
>>> sql("select rowid,* from table1 order by c")
rowid | a | b | c
----------------------
3 | Lila | 74 | 1
1 | James | 10 | 2
2 | Mark | 7 | 3
"""
import functions
import vtbase
### Classic stream iterator
registered = True
def is_number(s):
try:
float(s)
return True
except ValueError:
pass
try:
import unicodedata
unicodedata.numeric(s)
return True
except (TypeError, ValueError):
pass
return False
class keep_numeric(vtbase.VT):
def VTiter(self, *parsedArgs, **envars):
largs, dictargs = self.full_parse(parsedArgs)
self.nonames = True
self.names = []
self.types = []
if 'query' not in dictargs:
raise functions.OperatorError(__name__.rsplit('.')[-1], "No query argument ")
query = dictargs['query']
cur = envars['db'].cursor()
c = cur.execute(query)
schema = cur.getdescriptionsafe()
schema1 = []
first_row = c.next()
first_tuple = []
j = 0
for i in first_row:
if is_number(i):
schema1.append(schema[j])
first_tuple.append(i)
j += 1
yield tuple(schema1)
yield tuple(first_tuple)
for row in c:
tmp_row = []
j = 0
for col in row:
if schema[j] in schema1:
tmp_row.append(col)
j += 1
yield tmp_row
def Source():
return vtbase.VTGenerator(keep_numeric)
if not ('.' in __name__):
"""
This is needed to be able to test the function, put it at the end of every
new function you create
"""
import sys
from functions import *
testfunction()
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
import doctest
doctest.testmod()
| mit |
czen/MMCS_CS311 | Module3/mymain.cs | 1908 | using System;
using System.IO;
using SimpleScanner;
using ScannerHelper;
namespace GeneratedLexer
{
class mymain
{
static void Main(string[] args)
{
int cnt_id = 0;//êîë-âî èäåíòèôèêàòîðîâ
int min_id_len = Int32.MaxValue, max_id_len = 0; //ìèíèìàëüíàÿ, ìàêñèìàëüíàÿ äëèíû èäåíòèôèêàòîðîâ
double avg_id_len = 0; //ñðåäíÿÿ äëèíà èäåíòèôèêàòîðà
int sum_int = 0; //ñóììà âñåõ öåëûõ
double sum_d = 0; //ñóììà âñåõ âåùåñòâåííûõ
// ×òîáû âåùåñòâåííûå ÷èñëà ðàñïîçíàâàëèñü è îòîáðàæàëèñü â ôîðìàòå 3.14 (à íå 3,14 êàê â ðóññêîé Culture)
System.Threading.Thread.CurrentThread.CurrentCulture = new System.Globalization.CultureInfo("en-US");
var fname = @"..\..\a.txt";
Console.WriteLine(File.ReadAllText(fname));
Console.WriteLine("-------------------------");
Scanner scanner = new Scanner(new FileStream(fname, FileMode.Open));
int tok = 0;
do {
tok = scanner.yylex();
if (tok == (int)Tok.EOF)
{
Console.WriteLine();
Console.WriteLine("number of id: {0:D}", cnt_id);
Console.WriteLine("average length of the id: {0:N}", avg_id_len / cnt_id);
Console.WriteLine("min length of the id: {0:D}", min_id_len);
Console.WriteLine("min length of the id: {0:D}", max_id_len);
Console.WriteLine();
Console.WriteLine("sum of int: {0:D}", sum_int);
Console.WriteLine("sum of double: {0:N}", sum_d);
Console.WriteLine();
break;
}
Console.WriteLine(scanner.TokToString((Tok)tok));
} while (true);
Console.ReadKey();
}
}
}
| mit |
bonfimtm/aden | src/app/services/alert.service.ts | 537 | import { Injectable } from '@angular/core';
import * as swal from 'sweetalert';
@Injectable()
export class AlertService {
constructor() {
}
info(message) {
console.log(message);
return swal('Info', message, 'info');
}
success(message) {
console.log(message);
return swal('Success', message, 'success');
}
warning(message) {
console.warn(message);
return swal('Warning', message, 'warning');
}
error(message) {
console.error(message);
return swal('Error', message, 'error');
}
}
| mit |
MatthiasHoldorf/FoodControl | FoodControlTests/RepositoryTests/ActivityLogRepositoryTests.cs | 3773 | using System;
using System.Linq;
using System.Transactions;
using FoodControl.DataAccessLayer;
using FoodControl.Model;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace FoodControlTests.RepositoryTests
{
/// <summary>
/// This is a test class for <see cref="ActivityLog"/> repository and is intended
/// to contain the basic <see cref="ActivityLog"/> repository Unit Tests.
///</summary>
[TestClass]
public class ActivityLogRepositoryTests
{
private IDALContext _context = new DALContext();
private ActivityLog _activityLog;
/// <summary>
/// Initialises the test context which provides
/// information about and functionality for the current test run.
///</summary>
[TestInitialize]
public void Init()
{
// initialise the ActivityLog object.
_activityLog = new ActivityLog()
{
ALID = 1337,
ActID = 1,
UserID = 1,
Duration = 50,
Date = DateTime.Now.AddDays(-1),
};
}
/// <summary>
/// Unit-test for <see cref="ActivityLog"/>.Create().
/// </summary>
/// <remarks>
/// Checks, if ActivityLog is created.
/// </remarks>
[TestMethod]
public void AddActivityLog()
{
using (TransactionScope scope = new TransactionScope())
{
// Act
_context.ActivityLog.Create(_activityLog);
_context.SaveChanges();
// Assert
Assert.AreEqual(_activityLog, _context.ActivityLog.GetAll().LastOrDefault());
Assert.AreEqual(_activityLog, _context.ActivityLog.GetById(_activityLog.ALID));
}
}
/// <summary>
/// Unit-test for <see cref="ActivityLog"/>.Update().
/// </summary>
/// <remarks>
/// Checks, if ActivityLog is updated.
/// </remarks>
[TestMethod]
public void UpdateActivityLog()
{
using (TransactionScope scope = new TransactionScope())
{
// arrange
_context.ActivityLog.Create(_activityLog);
_context.SaveChanges();
// assert ActivityLog is added
Assert.AreEqual(_activityLog, _context.ActivityLog.GetById(_activityLog.ALID));
// Act
_activityLog.Duration = 60;
_context.ActivityLog.Update(_activityLog);
_context.SaveChanges();
// Assert
Assert.IsTrue(_context.ActivityLog.GetById(_activityLog.ALID).Duration == 60);
Assert.AreEqual(_activityLog, _context.ActivityLog.GetById(_activityLog.ALID));
}
}
/// <summary>
/// Unit-test for <see cref="ActivityLog"/>.Delete().
/// </summary>
/// <remarks>
/// Checks, if ActivityLog is deleted.
/// </remarks>
[TestMethod]
public void DeleteActivityLog()
{
using (TransactionScope scope = new TransactionScope())
{
// arrange
_context.ActivityLog.Create(_activityLog);
_context.SaveChanges();
// assert ActivityLog is added
Assert.AreEqual(_activityLog, _context.ActivityLog.GetById(_activityLog.ALID));
// act
_context.ActivityLog.Delete(_activityLog);
_context.SaveChanges();
// assert ActivityLog is deleted
Assert.IsNull(_context.ActivityLog.GetById(_activityLog.ALID));
}
}
}
}
| mit |
jfelipebc/iojs-api-rethinkdb | lib/utils/logger.js | 300 | 'use strict'
import winston from 'winston'
const consoleOptions = {
colorize : true,
prettyPrint : true,
level : 'debug',
label : 'Employees API'
}
let logger = new (winston.Logger)({
transports: [ new winston.transports.Console(consoleOptions) ]
})
export default logger | mit |
BUCTdarkness/jedis | src/test/java/redis/clients/jedis/tests/ShardedJedisPoolTest.java | 6580 | package redis.clients.jedis.tests;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisShardInfo;
import redis.clients.jedis.ShardedJedis;
import redis.clients.jedis.ShardedJedisPool;
import redis.clients.jedis.exceptions.JedisConnectionException;
public class ShardedJedisPoolTest extends Assert {
private static HostAndPort redis1 = HostAndPortUtil.getRedisServers()
.get(0);
private static HostAndPort redis2 = HostAndPortUtil.getRedisServers()
.get(1);
private List<JedisShardInfo> shards;
@Before
public void startUp() {
shards = new ArrayList<JedisShardInfo>();
shards.add(new JedisShardInfo(redis1.getHost(), redis1.getPort()));
shards.add(new JedisShardInfo(redis2.getHost(), redis2.getPort()));
shards.get(0).setPassword("foobared");
shards.get(1).setPassword("foobared");
Jedis j = new Jedis(shards.get(0));
j.connect();
j.flushAll();
j.disconnect();
j = new Jedis(shards.get(1));
j.connect();
j.flushAll();
j.disconnect();
}
@Test
public void checkConnections() {
ShardedJedisPool pool = new ShardedJedisPool(
new GenericObjectPoolConfig(), shards);
ShardedJedis jedis = pool.getResource();
jedis.set("foo", "bar");
assertEquals("bar", jedis.get("foo"));
pool.returnResource(jedis);
pool.destroy();
}
@Test
public void checkConnectionWithDefaultPort() {
ShardedJedisPool pool = new ShardedJedisPool(
new GenericObjectPoolConfig(), shards);
ShardedJedis jedis = pool.getResource();
jedis.set("foo", "bar");
assertEquals("bar", jedis.get("foo"));
pool.returnResource(jedis);
pool.destroy();
}
@Test
public void checkJedisIsReusedWhenReturned() {
ShardedJedisPool pool = new ShardedJedisPool(
new GenericObjectPoolConfig(), shards);
ShardedJedis jedis = pool.getResource();
jedis.set("foo", "0");
pool.returnResource(jedis);
jedis = pool.getResource();
jedis.incr("foo");
pool.returnResource(jedis);
pool.destroy();
}
@Test
public void checkPoolRepairedWhenJedisIsBroken() {
ShardedJedisPool pool = new ShardedJedisPool(
new GenericObjectPoolConfig(), shards);
ShardedJedis jedis = pool.getResource();
jedis.disconnect();
pool.returnBrokenResource(jedis);
jedis = pool.getResource();
jedis.incr("foo");
pool.returnResource(jedis);
pool.destroy();
}
@Test(expected = JedisConnectionException.class)
public void checkPoolOverflow() {
GenericObjectPoolConfig config = new GenericObjectPoolConfig();
config.setMaxTotal(1);
config.setBlockWhenExhausted(false);
ShardedJedisPool pool = new ShardedJedisPool(config, shards);
ShardedJedis jedis = pool.getResource();
jedis.set("foo", "0");
ShardedJedis newJedis = pool.getResource();
newJedis.incr("foo");
}
@Test
public void shouldNotShareInstances() {
GenericObjectPoolConfig config = new GenericObjectPoolConfig();
config.setMaxTotal(2);
ShardedJedisPool pool = new ShardedJedisPool(config, shards);
ShardedJedis j1 = pool.getResource();
ShardedJedis j2 = pool.getResource();
assertNotSame(j1.getShard("foo"), j2.getShard("foo"));
}
@Test
public void checkFailedJedisServer() {
ShardedJedisPool pool = new ShardedJedisPool(
new GenericObjectPoolConfig(), shards);
ShardedJedis jedis = pool.getResource();
jedis.incr("foo");
pool.returnResource(jedis);
pool.destroy();
}
@Test
public void shouldReturnActiveShardsWhenOneGoesOffline() {
GenericObjectPoolConfig redisConfig = new GenericObjectPoolConfig();
redisConfig.setTestOnBorrow(false);
ShardedJedisPool pool = new ShardedJedisPool(redisConfig, shards);
ShardedJedis jedis = pool.getResource();
// fill the shards
for (int i = 0; i < 1000; i++) {
jedis.set("a-test-" + i, "0");
}
pool.returnResource(jedis);
// check quantity for each shard
Jedis j = new Jedis(shards.get(0));
j.connect();
Long c1 = j.dbSize();
j.disconnect();
j = new Jedis(shards.get(1));
j.connect();
Long c2 = j.dbSize();
j.disconnect();
// shutdown shard 2 and check thay the pool returns an instance with c1
// items on one shard
// alter shard 1 and recreate pool
pool.destroy();
shards.set(1, new JedisShardInfo("nohost", 1234));
pool = new ShardedJedisPool(redisConfig, shards);
jedis = pool.getResource();
Long actual = Long.valueOf(0);
Long fails = Long.valueOf(0);
for (int i = 0; i < 1000; i++) {
try {
jedis.get("a-test-" + i);
actual++;
} catch (RuntimeException e) {
fails++;
}
}
pool.returnResource(jedis);
pool.destroy();
assertEquals(actual, c1);
assertEquals(fails, c2);
}
@Test
public void startWithUrlString() {
Jedis j = new Jedis("localhost", 6380);
j.auth("foobared");
j.set("foo", "bar");
j = new Jedis("localhost", 6379);
j.auth("foobared");
j.set("foo", "bar");
List<JedisShardInfo> shards = new ArrayList<JedisShardInfo>();
shards.add(new JedisShardInfo("redis://:foobared@localhost:6380"));
shards.add(new JedisShardInfo("redis://:foobared@localhost:6379"));
GenericObjectPoolConfig redisConfig = new GenericObjectPoolConfig();
ShardedJedisPool pool = new ShardedJedisPool(redisConfig, shards);
Jedis[] jedises = pool.getResource().getAllShards()
.toArray(new Jedis[2]);
Jedis jedis = jedises[0];
assertEquals("PONG", jedis.ping());
assertEquals("bar", jedis.get("foo"));
jedis = jedises[1];
assertEquals("PONG", jedis.ping());
assertEquals("bar", jedis.get("foo"));
}
@Test
public void startWithUrl() throws URISyntaxException {
Jedis j = new Jedis("localhost", 6380);
j.auth("foobared");
j.set("foo", "bar");
j = new Jedis("localhost", 6379);
j.auth("foobared");
j.set("foo", "bar");
List<JedisShardInfo> shards = new ArrayList<JedisShardInfo>();
shards.add(new JedisShardInfo(new URI(
"redis://:foobared@localhost:6380")));
shards.add(new JedisShardInfo(new URI(
"redis://:foobared@localhost:6379")));
GenericObjectPoolConfig redisConfig = new GenericObjectPoolConfig();
ShardedJedisPool pool = new ShardedJedisPool(redisConfig, shards);
Jedis[] jedises = pool.getResource().getAllShards()
.toArray(new Jedis[2]);
Jedis jedis = jedises[0];
assertEquals("PONG", jedis.ping());
assertEquals("bar", jedis.get("foo"));
jedis = jedises[1];
assertEquals("PONG", jedis.ping());
assertEquals("bar", jedis.get("foo"));
}
}
| mit |
Daivuk/cpp11-and-DX11-Tutorials | Chapter2/Chapter2_Tutorial3/Renderer.cpp | 1483 | #include "Renderer.h"
Renderer::Renderer(Window& window) {
createDevice(window);
createRenderTarget();
}
void Renderer::createDevice(Window& window) {
// Define our swap chain
DXGI_SWAP_CHAIN_DESC swapChainDesc = { 0 };
swapChainDesc.BufferCount = 1;
swapChainDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.OutputWindow = window.getHandle();
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.Windowed = true;
// Create the swap chain, device and device context
auto result = D3D11CreateDeviceAndSwapChain(
nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, 0,
nullptr, 0, D3D11_SDK_VERSION,
&swapChainDesc, &m_swapChain,
&m_device, nullptr, &m_deviceContext);
// Check for error
if (result != S_OK) {
MessageBox(nullptr, "Error creating DX11", "Error", MB_OK);
exit(0);
}
}
void Renderer::createRenderTarget() {
ID3D11Texture2D* backBuffer;
m_swapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**) &backBuffer);
m_device->CreateRenderTargetView(backBuffer, nullptr, &m_renderTargetView);
backBuffer->Release();
}
void Renderer::beginFrame() {
// Bind render target
m_deviceContext->OMSetRenderTargets(1, &m_renderTargetView, nullptr);
// Set the background color
float clearColor[] = { .25f, .5f, 1, 1 };
m_deviceContext->ClearRenderTargetView(m_renderTargetView, clearColor);
}
void Renderer::endFrame() {
// Swap the buffer!
m_swapChain->Present(1, 0);
}
| mit |
ist-dresden/composum-platform | testing/testutil/src/main/java/com/composum/sling/platform/testing/testutil/AroundActionsWrapper.java | 5318 | package com.composum.sling.platform.testing.testutil;
import com.composum.sling.platform.testing.testutil.ErrorCollectorAlwaysPrintingFailures.TestingRunnableWithException;
import org.apache.commons.lang3.ClassUtils;
import org.junit.runners.model.MultipleFailureException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.List;
/** Is able to wrap an object such that some configurable actions are done before and after something is called - e.g. committing an ResourceResolver to check for integrity. */
public class AroundActionsWrapper implements InvocationHandler {
private static final Logger LOG = LoggerFactory.getLogger(AroundActionsWrapper.class);
private final Object wrappedObject;
private final TestingRunnableWithException<? extends Throwable> before;
private final TestingRunnableWithException<? extends Throwable> after;
private final TestingRunnableWithException<? extends Throwable> onError;
protected AroundActionsWrapper(Object wrappedObject, TestingRunnableWithException<? extends Throwable> before, TestingRunnableWithException<? extends Throwable> after, TestingRunnableWithException<? extends Throwable> onError) {
this.wrappedObject = wrappedObject;
this.before = before;
this.after = after;
this.onError = onError;
}
/**
* Wraps an object such that specific actions can be done before and after any method of it is called, such as commiting an ResourceResolver to check for integrity.
*
* @param wrappedObject the object
* @param before optional action that is done before each call to a method of the object
* @param after optional action that is done after each call to a method of the object
* @param onError optional action that is run if anything fails - be it the method or the before / after action. E.g. logging of something.
* @param <T> the object type
* @return the wrapped object
*/
@SuppressWarnings("unchecked")
@NotNull
public static <T> T of(@NotNull T wrappedObject, @Nullable TestingRunnableWithException<? extends Throwable> before, @Nullable TestingRunnableWithException<? extends Throwable> after, TestingRunnableWithException<? extends Throwable> onError) {
Class[] interfaces = ClassUtils.getAllInterfaces(wrappedObject.getClass()).toArray(new Class[0]);
return (T) Proxy.newProxyInstance(wrappedObject.getClass().getClassLoader(), interfaces, new AroundActionsWrapper(wrappedObject, before, after, onError));
}
/** Retrieves the object wrapped with {@link #of(Object, TestingRunnableWithException, TestingRunnableWithException)}. */
@SuppressWarnings("unchecked")
public static <T> T retrieveWrappedObject(T wrapper) {
InvocationHandler handler = Proxy.getInvocationHandler(wrapper);
return (T) ((AroundActionsWrapper) handler).wrappedObject;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
List<Throwable> errors = new ArrayList<>();
Object returnvalue = null;
try {
if (before != null)
before.run();
} catch (Throwable t) {
LOG.error("Error executing before action {}", before, t);
errors.add(new IllegalStateException("Error executing before action " + before, t));
runOnError(errors);
}
try {
returnvalue = method.invoke(wrappedObject, args);
} catch (Throwable t) {
if (t instanceof InvocationTargetException) {
t = ((InvocationTargetException) t).getTargetException();
}
if (!(t instanceof RuntimeException) && !(t instanceof Error)) {
boolean found = false;
for (Class<?> exceptionType : method.getExceptionTypes()) {
found = found || exceptionType.isAssignableFrom(t.getClass());
}
if (!found) {
LOG.error("Method throws a checked exception that isn't actually declared: {}", t.getClass().getName(), t);
}
}
errors.add(t);
runOnError(errors);
}
try {
if (after != null)
after.run();
} catch (Throwable t) {
LOG.error("Error when executing after action {}", after, t);
errors.add(new IllegalStateException("Error when executing after action " + after, t));
runOnError(errors);
}
MultipleFailureException.assertEmpty(errors);
return returnvalue;
}
protected void runOnError(List<Throwable> errors) {
if (onError != null) {
try {
onError.run();
} catch (Throwable te) { // Ugh!
LOG.error("Error in onError action {}", onError, onError);
errors.add(new IllegalStateException("Error in onError action " + onError, te));
}
}
}
}
| mit |
levinhtxbt/dotnetcore-vega | Controllers/HomeController.cs | 502 | using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
namespace vega.Controllers
{
public class HomeController : Controller
{
public IActionResult Index()
{
return View();
}
public IActionResult Error()
{
ViewData["RequestId"] = Activity.Current?.Id ?? HttpContext.TraceIdentifier;
return View();
}
}
}
| mit |
abique/hefur | hefur/stat-handler.cc | 3285 | #include <mimosa/format/format.hh>
#include <mimosa/format/print.hh>
#include <mimosa/stream/string-stream.hh>
#include <mimosa/tpl/dict.hh>
#include <mimosa/tpl/include.hh>
#include <mimosa/tpl/list.hh>
#include <mimosa/tpl/template.hh>
#include <mimosa/tpl/value.hh>
#include "hefur.hh"
#include "mimosa/stream/base16-encoder.hh"
#include "mimosa/stream/filter.hh"
#include "options.hh"
#include "stat-handler.hh"
#include "template-factory.hh"
#include "torrent-db.hh"
namespace hefur {
bool StatHandler::handle(mh::RequestReader &request, mh::ResponseWriter &response) const {
auto tpl = TemplateFactory::instance().create("page.html");
if (!tpl)
return false;
auto tpl_body = TemplateFactory::instance().create("stat.html");
if (!tpl_body)
return false;
mt::Dict dict;
HttpServer::commonDict(dict);
dict.append("body", tpl_body);
dict.append("title", "Torrents");
dict.append("tracker_http", mf::str("http://%v:%v/announce", request.host(), request.port()));
if (UDP_PORT)
dict.append("tracker_udp", mf::str("udp://%v:%v", request.host(), UDP_PORT));
auto torrents = new mt::List("torrents");
dict.append(torrents);
auto tdb = Hefur::instance().torrentDb();
if (!tdb) {
response.setStatus(mh::kStatusServiceUnavailable);
return true;
}
response.setContentType("text/html");
{
m::SharedMutex::ReadLocker locker(tdb->torrents_lock_);
uint64_t total_leechers = 0;
uint64_t total_seeders = 0;
uint64_t total_length = 0;
uint64_t total_completed = 0;
tdb->torrents_.foreach ([&](Torrent::Ptr it) {
auto torrent = new mt::Dict("torrent");
torrents->append(torrent);
torrent->append("name", it->name());
{
ms::StringStream ss;
mf::printByteSize(ss, it->length());
torrent->append("length", ss.str());
}
int version = it->version();
torrent->append("version", version);
auto key16 = ms::filter<ms::Base16Encoder>(it->key());
torrent->append("magnet_key", mf::str("%s%s", version == 1 ? "btih:" : "btmh:1220", key16));
torrent->append("info_hash", it->key());
torrent->append("leechers", it->leechers());
torrent->append("seeders", it->seeders());
torrent->append("completed", it->completed());
if (!DISABLE_PEERS_PAGE)
torrent->append("show_peers", 1);
if (!DISABLE_FILE_PAGE)
torrent->append("show_torrent", 1);
total_leechers += it->leechers();
total_seeders += it->seeders();
total_length += it->length();
total_completed += it->completed();
});
dict.append("total_leechers", total_leechers);
dict.append("total_seeders", total_seeders);
dict.append("total_completed", total_completed);
{
ms::StringStream ss;
mf::printByteSize(ss, total_length);
dict.append("total_length", ss.str());
}
}
tpl->execute(&response, dict);
return true;
}
} // namespace hefur
| mit |
mjseaman/retrofitta | app.js | 3498 | /**
* Module dependencies.
*/
var express = require('express')
, http = require('http')
, path = require('path')
, request = require('request')
, ejs = require('ejs');
//SET APP_RELATIVE_PATH to a folder where your app's index.html resides.
var APP_RELATIVE_PATH = path.join(__dirname, '/public/');
console.log(APP_RELATIVE_PATH);
var app = express();
app.configure(function () {
app.set('port', process.env.PORT || 3000);
app.set('view engine', 'ejs');
app.use(express.favicon());
app.use(express.logger('dev'));
app.use(express.bodyParser());
app.use(express.static(APP_RELATIVE_PATH));
});
app.configure('development', function () {
app.use(express.errorHandler());
});
var client_id = process.env.client_id;
var app_url = process.env.app_url;
app.get('/', function (req, res) {
res.render("index", { client_id: client_id, app_url: app_url});
});
app.get('/index.html', function (req, res) {
res.render("index", { client_id: client_id, app_url: app_url});
});
app.post('/hop/?*', function (req, res) {
console.log("BODY");
console.log(JSON.stringify(req.body));
var body = req.body;
var headers = req.headers;
console.log("BODY:");
console.log(body);
request({'url' : 'https://bpd.lbl.gov/api/v1/analyze/peers/',
'method': 'POST',
'headers': {
'Content-Type': 'application/json',
'Authorization': 'ApiKey mitchel.seaman@gmail.com:a4b167aaf2bbd8c3c57aaff9f0d51ef60df3decd'
},
'body': JSON.stringify(body)
}).pipe(res);
});
app.all('/proxy/?*', function (req, res) {
log(req);
var body = req.body;
var contentType = "application/x-www-form-urlencoded";
var sfEndpoint = req.headers["salesforceproxy-endpoint"];
if (body) {
//if doing oauth, then send body as form-urlencoded
if (sfEndpoint && sfEndpoint.indexOf('oauth2') > 0) {
body = getAsUriParameters(body);
} else {//for everything else, it's json
contentType = "application/json";
body = JSON.stringify(body);
}
}
if ((!body || JSON.stringify(body) === "\"{}\"") && (typeof sfEndpoint != "string")) {
return res.send('Request successful (but nothing to proxy to SF)');
}
request({
url: sfEndpoint || "https://login.salesforce.com//services/oauth2/token",
method: req.method,
headers: {"Content-Type": contentType,
"Authorization": req.headers["authorization"] || req.headers['x-authorization'],
"X-User-Agent": req.headers["x-user-agent"]},
body: body
}).pipe(res);
});
function log(req) {
console.log("req.headers[\"authorization\"] = " + req.headers["authorization"]);
console.log("req.headers[\"x-authorization\"] = " + req.headers["x-authorization"]);
console.log("req.headers[\"salesforceproxy-endpoint\"] = " + req.headers["salesforceproxy-endpoint"]);
console.log('req.method = ' + req.method);
console.log('req.body ' + JSON.stringify(req.body));
}
function getAsUriParameters(data) {
var url = '';
for (var prop in data) {
url += encodeURIComponent(prop) + '=' +
encodeURIComponent(data[prop]) + '&';
}
var result = url.substring(0, url.length - 1);
console.log(result);
return result;
}
http.createServer(app).listen(app.get('port'), function () {
console.log("Express server listening on port " + app.get('port'));
});
| mit |
SkewedAspect/pokegonav | client/layers/portal.js | 4330 | //----------------------------------------------------------------------------------------------------------------------
/// PortalLayer
///
/// @module
//----------------------------------------------------------------------------------------------------------------------
import _ from 'lodash'
import $http from 'axios';
import ol from 'openlayers';
import styleSvc from '../services/style';
//----------------------------------------------------------------------------------------------------------------------
class PortalLayer {
constructor()
{
this.showGyms = true;
this.showStops = true;
this.layer = new ol.layer.Vector({
source: new ol.source.Vector({ wrapX: false }),
style: this._styleFunction.bind(this),
updateWhileAnimating: true,
updateWhileInteracting: true
});
this._update = _.debounce((extent) =>
{
var coord1 = ol.proj.toLonLat([ extent[0], extent[1] ]);
var coord2 = ol.proj.toLonLat([ extent[2], extent[3] ]);
// Get the list of all of the points
$http.get(`/portal?bbox=${ coord1.join(',') },${ coord2.join(',') }`)
.then((response) =>
{
_.each(response.data, (portal) =>
{
this._addPortal(portal);
});
});
}, 500, { maxWait: 1000 });
} // end constructor
_addPortal(portal)
{
var coords = ol.proj.fromLonLat([
portal.point.coordinates[0],
portal.point.coordinates[1]
]);
var feature = new ol.Feature(new ol.geom.Point(coords));
feature.setId(portal.id);
feature.set('type', portal.type);
feature.set('name', portal.name);
this.layer.getSource().addFeature(feature);
} // end _addPortal
_styleFunction(feature, resolution)
{
if(resolution < 20)
{
var style;
switch(feature.get('type'))
{
case 'gym':
style = this.showGyms ? styleSvc.gymStyle : null;
break;
case 'pokestop':
style = this.showStops ? styleSvc.stopStyle : null;
break;
//
// default:
// return styleSvc.unknownPortalStyle;
} // end switch
if(style)
{
// Basically, point slope formula for [[.3, 1], [20, .5]]
var scale = Math.abs(-((resolution * .5) / 19.7) + (19.85 / 19.7));
// Floor of 0.5 and Ceiling of 1.00
scale = Math.max(scale, .5);
scale = Math.min(scale, 1);
// Set the scale
style[0].getImage().setScale(scale);
style[1].getImage().setScale(scale);
} // end if
return style;
}
else if(resolution < 300)
{
switch(feature.get('type'))
{
case 'gym':
return this.showGyms ? styleSvc.zoomedGymStyle : null;
case 'pokestop':
return this.showStops ? styleSvc.zoomedStopStyle : null;
default:
return styleSvc.unknownPortalStyle;
} // end switch
} // end if
} // end _styleFunction
setGymsVisible(visible)
{
this.showGyms = visible;
this.redraw();
} // end setGymsVisible
setStopsVisible(visible)
{
this.showStops = visible;
this.redraw();
} // end setStopsVisible
setVisible(visible)
{
this.layer.setVisible(visible);
} // end setVisible
redraw()
{
this.layer.getSource().changed();
} // end redraw()
update(extent, zoom)
{
if(zoom > 9)
{
this._update(extent);
} // end if
} // end update
} // end PortalLayer
//----------------------------------------------------------------------------------------------------------------------
export default new PortalLayer();
//----------------------------------------------------------------------------------------------------------------------
| mit |
fcc-joemcintyre/pinster | app/client/src/lib/Form/FormButtonRow.js | 199 | import styled from 'styled-components';
export const FormButtonRow = styled.div`
display: flex;
flex-wrap: wrap;
justify-content: center;
margin-top: 20px;
> * {
margin: 10px;
}
`;
| mit |
bravebelgica/website | clancenter/json/json_user_full_data.php | 943 |
<?php
require_once( dirname(__FILE__) . '/center-config.php' );
require_once( dirname(__FILE__) . '/center-connect.php' );
$bStatus = true;
$bList = false;
$query = "select P.id_clanplayer, P.alias, C.clanname, CR.role, R.email
from
cc_clanplayer P,
cc_clans C,
cc_clanroles CR,
coc_registrations R
where P.id_clan = C.id_clan
AND P.id_role = CR.id_clanrole
AND P.id_player = R.registration_id
AND R.registration_id = 1";
$rs = mysqli_query($conn,$query);
$return_arr = array();
if (!$rs) {
#echo "Could not execute query: $query\n";
trigger_error(mysql_error(), E_USER_ERROR);
$status="DBERR";
mysqli_close();
return null;
}
while($row = mysqli_fetch_array($rs)) {
$row_array['clan'] = $row['clanname'];
$row_array['alias'] = $row['alias'];
$row_array['id_clanplayer'] = $row['id_clanplayer'];
$row_array['role'] = $row['role'];
array_push($return_arr,$row_array);
}
echo json_encode($return_arr);
?>
| mit |
netcosports/material-calendarview | library/src/main/java/com/prolificinteractive/materialcalendarview/DayViewFacade.java | 3470 | package com.prolificinteractive.materialcalendarview;
import android.graphics.drawable.Drawable;
import androidx.annotation.NonNull;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
/**
* Abstraction layer to help in decorating Day views
*/
public class DayViewFacade {
private boolean isDecorated;
private Drawable backgroundDrawable = null;
private Drawable selectionDrawable = null;
private final LinkedList<Span> spans = new LinkedList<>();
private boolean daysDisabled = false;
DayViewFacade() {
isDecorated = false;
}
/**
* Set a drawable to draw behind everything else
*
* @param drawable Drawable to draw behind everything
*/
public void setBackgroundDrawable(@NonNull Drawable drawable) {
if (drawable == null) {
throw new IllegalArgumentException("Cannot be null");
}
this.backgroundDrawable = drawable;
isDecorated = true;
}
/**
* Set a custom selection drawable
* TODO: define states that can/should be used in StateListDrawables
*
* @param drawable the drawable for selection
*/
public void setSelectionDrawable(@NonNull Drawable drawable) {
if (drawable == null) {
throw new IllegalArgumentException("Cannot be null");
}
selectionDrawable = drawable;
isDecorated = true;
}
/**
* Add a span to the entire text of a day
*
* @param span text span instance
*/
public void addSpan(@NonNull Object span) {
if (spans != null) {
this.spans.add(new Span(span));
isDecorated = true;
}
}
/**
* <p>Set days to be in a disabled state, or re-enabled.</p>
* <p>Note, passing true here will <b>not</b> override minimum and maximum dates, if set.
* This will only re-enable disabled dates.</p>
*
* @param daysDisabled true to disable days, false to re-enable days
*/
public void setDaysDisabled(boolean daysDisabled) {
this.daysDisabled = daysDisabled;
this.isDecorated = true;
}
void reset() {
backgroundDrawable = null;
selectionDrawable = null;
spans.clear();
isDecorated = false;
daysDisabled = false;
}
/**
* Apply things set this to other
*
* @param other facade to apply our data to
*/
void applyTo(DayViewFacade other) {
if (selectionDrawable != null) {
other.setSelectionDrawable(selectionDrawable);
}
if (backgroundDrawable != null) {
other.setBackgroundDrawable(backgroundDrawable);
}
other.spans.addAll(spans);
other.isDecorated |= this.isDecorated;
other.daysDisabled = daysDisabled;
}
boolean isDecorated() {
return isDecorated;
}
Drawable getSelectionDrawable() {
return selectionDrawable;
}
Drawable getBackgroundDrawable() {
return backgroundDrawable;
}
List<Span> getSpans() {
return Collections.unmodifiableList(spans);
}
/**
* Are days from this facade disabled
*
* @return true if disabled, false if not re-enabled
*/
public boolean areDaysDisabled() {
return daysDisabled;
}
static class Span {
final Object span;
public Span(Object span) {
this.span = span;
}
}
}
| mit |
zwei14/node-arp | lib/arp.js | 5582 | var util = require('util');
var spawn = require('child_process').spawn;
/**
* Read the MAC address from the ARP table.
*
* 3 methods for lin/win/mac Linux reads /proc/net/arp
* mac and win read the output of the arp command.
*
* all 3 ping the IP first without checking the response to encourage the
* OS to update the arp table.
*
* 31/12/2014 -- Changelog by Leandre Gohy (leandre.gohy@hexeo.be)
* - FIX : ping command for windows (-n not -c)
*
* 26/08/2013 -- Changelog by Leandre Gohy (leandre.gohy@hexeo.be)
* - FIX : arp command for OSX (-n not -an)
* - MODIFY : rewrite Linux lookup function to avoid looping over all entries and returned lines (arp -n IPADDRESS)
* - MODIFY : rewrite OSX lookup function to avoid looping over all returned lines
* - FIX : OSX formates double zero as a single one (i.e : 0:19:99:50:3a:3 instead of 00:19:99:50:3a:3)
* - FIX : lookup functions did not returns the function on error causing callback to be called twice
* - FIX : Windows lookup function returns wrong mac address due to indexOf usage (192.168.1.1 -> 192.168.1.10)
*
*/
module.exports.getMAC = function(ipaddress, pingOptions = [], cb) {
if(process.platform.indexOf('linux') == 0) {
exports.readMACLinux(ipaddress, pingOptions, cb);
}
else if (process.platform.indexOf('win') == 0) {
exports.readMACWindows(ipaddress, pingOptions, cb);
}
else if (process.platform.indexOf('darwin') == 0) {
exports.readMACMac(ipaddress, pingOptions, cb);
}
};
/**
* read from arp -n IPADDRESS
*/
module.exports.readMACLinux = function(ipaddress, optionalPingOptions = [], cb) {
// ping the ip address to encourage the kernel to populate the arp tables
var pingOptions = ["-c", "1", ipaddress ]
pingOptions.splice.apply(pingOptions, [2, 0].concat(optionalPingOptions))
var ping = spawn("ping", pingOptions);
ping.on('close', function (code) {
// not bothered if ping did not work
var arp = spawn("arp", [ "-n", ipaddress ]);
var buffer = '';
var errstream = '';
arp.stdout.on('data', function (data) {
buffer += data;
});
arp.stderr.on('data', function (data) {
errstream += data;
});
arp.on('close', function (code) {
if (code !== 0) {
console.log("Error running arp " + code + " " + errstream);
cb(true, code);
return;
}
//Parse this format
//Lookup succeeded : Address HWtype HWaddress Flags Mask Iface
// IPADDRESS ether MACADDRESS C IFACE
//Lookup failed : HOST (IPADDRESS) -- no entry
//There is minimum two lines when lookup is successful
var table = buffer.split('\n');
if (table.length >= 2) {
var parts = table[1].split(' ').filter(String);
cb(false, parts[2]);
return;
}
cb(true, "Could not find ip in arp table: " + ipaddress);
});
});
};
/**
* read from arp -a IPADDRESS
*/
module.exports.readMACWindows = function(ipaddress, optionalPingOptions = [], cb) {
// ping the ip address to encourage the kernel to populate the arp table
var pingOptions = ["-n", "1", ipaddress ]
pingOptions.splice.apply(pingOptions, [2, 0].concat(optionalPingOptions))
var ping = spawn("ping", pingOptions);
ping.on('close', function (code) {
// not bothered if ping did not work
var arp = spawn("arp", ["-a", ipaddress] );
var buffer = '';
var errstream = '';
var lineIndex;
arp.stdout.on('data', function (data) {
buffer += data;
});
arp.stderr.on('data', function (data) {
errstream += data;
});
arp.on('close', function (code) {
if (code !== 0) {
console.log("Error running arp " + code + " " + errstream);
cb(true, code);
return;
}
var table = buffer.split('\r\n');
for (lineIndex = 3; lineIndex < table.length; lineIndex++) {
//parse this format
//[blankline]
//Interface: 192.º68.1.54
// Internet Address Physical Address Type
// 192.168.1.1 50-67-f0-8c-7a-3f dynamic
var parts = table[lineIndex].split(' ').filter(String);
if (parts[0] === ipaddress) {
var mac = parts[1].replace(/-/g, ':');
cb(false, mac);
return;
}
}
cb(true, "Count not find ip in arp table: " + ipaddress);
});
});
};
/**
* read from arp -n IPADDRESS
*/
module.exports.readMACMac = function(ipaddress, optionalPingOptions = [], cb) {
// ping the ip address to encourage the kernel to populate the arp tables
var pingOptions = ["-c", "1", ipaddress ]
pingOptions.splice.apply(pingOptions, [2, 0].concat(optionalPingOptions))
var ping = spawn("ping", pingOptions);
ping.on('close', function (code) {
// not bothered if ping did not work
var arp = spawn("arp", ["-n", ipaddress] );
var buffer = '';
var errstream = '';
arp.stdout.on('data', function (data) {
buffer += data;
});
arp.stderr.on('data', function (data) {
errstream += data;
});
arp.on('close', function (code) {
// On lookup failed OSX returns code 1
// but errstream will be empty
if (code !== 0 && errstream !== '') {
console.log("Error running arp " + code + " " + errstream);
cb(true, code);
return;
}
//parse this format
//Lookup succeeded : HOST (IPADDRESS) at MACADDRESS on IFACE ifscope [ethernet]
//Lookup failed : HOST (IPADDRESS) -- no entry
var parts = buffer.split(' ').filter(String);
if (parts[3] !== 'no') {
var mac = parts[3].replace(/^0:/g, '00:').replace(/:0:/g, ':00:').replace(/:0$/g, ':00');
cb(false, mac);
return;
}
cb(true, "Count not find ip in arp table: " + ipaddress);
});
});
};
| mit |
sebastienhouzet/nabaztag-source-code | server/OS/net/violet/platform/api/actions/applications/AddContent.java | 2233 | package net.violet.platform.api.actions.applications;
import java.util.List;
import net.violet.platform.api.actions.AbstractAction;
import net.violet.platform.api.actions.ActionParam;
import net.violet.platform.api.authentication.SessionManager;
import net.violet.platform.api.exceptions.ForbiddenException;
import net.violet.platform.api.exceptions.InvalidParameterException;
import net.violet.platform.api.exceptions.InvalidSessionException;
import net.violet.platform.api.exceptions.NoSuchApplicationException;
import net.violet.platform.api.exceptions.NoSuchFileException;
import net.violet.platform.datamodel.Application;
import net.violet.platform.datamodel.Application.ApplicationClass;
import net.violet.platform.dataobjects.ApplicationContentData;
import net.violet.platform.dataobjects.ApplicationData;
import net.violet.platform.dataobjects.FilesData;
import net.violet.platform.dataobjects.UserData;
public class AddContent extends AbstractAction {
public static final String FILE_ID = "file_id";
@Override
protected Object doProcessRequest(ActionParam inParam) throws NoSuchFileException, InvalidParameterException, ForbiddenException, InvalidSessionException, NoSuchApplicationException {
final ApplicationData theApplication = ApplicationData.findByAPIId(inParam.getMainParamAsString(), inParam.getCallerAPIKey(), true);
final UserData theUser = SessionManager.getUserFromSessionId(inParam.getString(ActionParam.SESSION_PARAM_KEY, true), inParam.getCaller());
if (!theApplication.getOwner().equals(theUser)) {
throw new ForbiddenException();
}
final FilesData theFile = FilesData.getFilesData(inParam.getString(AddContent.FILE_ID, true), inParam.getCallerAPIKey());
final ApplicationContentData theNewContent = ApplicationContentData.create(theApplication, theFile);
if ((theNewContent == null) || !theNewContent.isValid()) {
return null;
}
return theNewContent.getApiId(inParam.getCaller());
}
public long getExpirationTime() {
return 0;
}
public ActionType getType() {
return ActionType.CREATE;
}
public boolean isCacheable() {
return false;
}
@Override
public List<ApplicationClass> getAuthorizedApplicationClasses() {
return Application.CLASSES_UI;
}
}
| mit |
vinylhero/vinylBlack | JS/Factories/SimonFactory.js | 4726 | vinylApp.factory('SimonFactory', function () {
return {
level: {
difficulty: 1
},
order : {
currentGame: [],
lastGame: []
},
simonColours: [
{
Name: 'blue',
Difficulty: '1'
},
{
Name: 'red',
Difficulty: '1'
},
{
Name: 'yellow',
Difficulty: '1'
},
{
Name: 'green',
Difficulty: '1'
},
{
Name: 'purple',
Difficulty: '2'
},
{
Name: 'orange',
Difficulty: '3'
},
{
Name: 'teal',
Difficulty: '4'
},
{
Name: 'grey',
Difficulty: '5'
}
],
getCssName : function (colour, flash) {
var shape = "";
switch (this.level.difficulty) {
case 1 : shape = "Square";
break;
case 2 : shape = "Pentagon";
break;
case 3 : shape = "Hexagon";
break;
case 4 : shape = "Heptagon";
// Todo - change css style names to be a colour and use this as Hept (currently sept which is wrong)
break;
case 5 : shape = "Octagon";
break;
}
var flashing = flash ? "Light" : "";
return "simon" + shape + flashing + colour.charAt(0).toUpperCase() + colour.slice(1);
},
checkMove : function (guess) {
if (guess.toLowerCase() != this.order.currentGame[this.order.currentGame.length - 1].toLowerCase()) {
alert('WRONG');
return false;
}
return true;
},
saveMove : function(newColour) {
if (!this.colourExists(newColour)) {
alert(newColour + ' was not a colour Simon knows, you lose, sorry...');
return false;
}
if (!this.difficultyDoesNotMatchColour(newColour)){
alert(newColour + ' was not available in your difficulty');
return false;
}
this.order.currentGame.push(newColour);
return this.order.currentGame;
},
getColours : function() {
var colourList = [];
var difficulty = this.level.difficulty;
this.simonColours.forEach(function(item) {
if (item.Difficulty <= difficulty) {
colourList.push(item.Name.toLowerCase());
}
});
return colourList;
},
increaseDifficulty : function() {
if (this.level.difficulty < 5) {
this.level.difficulty += 1;
this.newGame();
}
else {
alert('Difficulty cannot be increased.')
}
},
decreaseDifficulty : function() {
if (this.level.difficulty > 1) {
this.level.difficulty -= 1;
this.newGame();
}
else {
alert('Difficulty cannot be decreased.')
}
},
newGame : function () {
this.order.currentGame = [];
alert('Game was reset and not saved.');
},
saveGame : function () {
this.order.lastGame = this.order.currentGame;
this.order.currentGame = [];
alert('Game Saved!');
},
deleteOldGame : function () {
this.order.lastGame = [];
alert('Game History Cleared');
},
resetAll : function() {
this.level.difficulty = 1;
this.deleteOldGame();
this.newGame();
},
colourExists: function(colour){
var result = false;
this.simonColours.forEach(function(item) {
if (item.Name.toLowerCase() == colour.toLowerCase()) {
result = true;
}
});
return result;
},
difficultyDoesNotMatchColour: function(colour) {
var difficulty = this.level.difficulty;
var result = false;
this.simonColours.forEach(function(item) {
if (item.Name.toLowerCase() == colour.toLowerCase()){
result = difficulty <= item.Difficulty;
}
});
return result;
}
}
}); | mit |
kpandya91/WakeUpWithKinect | Accord.NET projects/Accord.Math/Transforms/SineTransform.cs | 5600 | // Accord Math Library
// The Accord.NET Framework
// http://accord-framework.net
//
// Copyright © César Souza, 2009-2015
// cesarsouza at gmail.com
//
// Copyright © Diego Catalano, 2013
// diego.catalano at live.com
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
//
namespace Accord.Math
{
using System;
using AForge.Math;
/// <summary>
/// Discrete Sine Transform
/// </summary>
///
/// <remarks>
/// <para>
/// In mathematics, the discrete sine transform (DST) is a Fourier-related transform
/// similar to the discrete Fourier transform (DFT), but using a purely real matrix. It
/// is equivalent to the imaginary parts of a DFT of roughly twice the length, operating
/// on real data with odd symmetry (since the Fourier transform of a real and odd function
/// is imaginary and odd), where in some variants the input and/or output data are shifted
/// by half a sample.</para>
///
/// <para>
/// References:
/// <list type="bullet">
/// <item><description>
/// Wikipedia contributors, "Discrete sine transform," Wikipedia, The Free Encyclopedia,
/// available at: http://en.wikipedia.org/w/index.php?title=Discrete_sine_transform </description></item>
/// <item><description>
/// K. R. Castleman, Digital Image Processing. Chapter 13, p.288.
/// Prentice. Hall, 1998.</description></item>
/// </list></para>
/// </remarks>
///
public static class SineTransform
{
/// <summary>
/// Forward Discrete Sine Transform.
/// </summary>
///
public static void DST(double[] data)
{
double[] result = new double[data.Length];
for (int k = 1; k < result.Length + 1; k++)
{
double sum = 0;
for (int i = 1; i < data.Length + 1; i++)
sum += data[i - 1] * Math.Sin(Math.PI * ((k * i) / (data.Length + 1.0)));
result[k - 1] = sum;
}
for (int i = 0; i < data.Length; i++)
data[i] = result[i];
}
/// <summary>
/// Inverse Discrete Sine Transform.
/// </summary>
///
public static void IDST(double[] data)
{
double[] result = new double[data.Length];
double inverse = 2.0 / (data.Length + 1);
for (int k = 1; k < result.Length + 1; k++)
{
double sum = 0;
for (int i = 1; i < data.Length + 1; i++)
sum += data[i - 1] * Math.Sin(Math.PI * ((k * i) / (data.Length + 1.0)));
result[k - 1] = sum * inverse;
}
for (int i = 0; i < data.Length; i++)
data[i] = result[i];
}
/// <summary>
/// Forward Discrete Sine Transform.
/// </summary>
///
public static void DST(double[][] data)
{
int rows = data.Length;
int cols = data[0].Length;
double[] row = new double[cols];
double[] col = new double[rows];
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < row.Length; j++)
row[j] = data[i][j];
DST(row);
for (int j = 0; j < row.Length; j++)
data[i][j] = row[j];
}
for (int j = 0; j < cols; j++)
{
for (int i = 0; i < col.Length; i++)
col[i] = data[i][j];
DST(col);
for (int i = 0; i < col.Length; i++)
data[i][j] = col[i];
}
}
/// <summary>
/// Inverse Discrete Sine Transform.
/// </summary>
///
public static void IDST(double[][] data)
{
int rows = data.Length;
int cols = data[0].Length;
double[] row = new double[cols];
double[] col = new double[rows];
for (int j = 0; j < cols; j++)
{
for (int i = 0; i < row.Length; i++)
col[i] = data[i][j];
IDST(col);
for (int i = 0; i < col.Length; i++)
data[i][j] = col[i];
}
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < row.Length; j++)
row[j] = data[i][j];
IDST(row);
for (int j = 0; j < row.Length; j++)
data[i][j] = row[j];
}
}
}
}
| mit |
radiocosmology/draco | draco/analysis/sensitivity.py | 9694 | """Sensitivity Analysis Tasks"""
import numpy as np
from caput import config
from ..core import task, io, containers
from ..util import tools
class ComputeSystemSensitivity(task.SingleTask):
"""Compute the sensitivity of beamformed visibilities.
Parameters
----------
exclude_intracyl : bool
Exclude the intracylinder baselines in the sensitivity estimate.
Default is to use all baselines. Note that a RuntimeError
will be raised if exclude_intracyl is True and the visibilities
have already been stacked over cylinder.
"""
exclude_intracyl = config.Property(proptype=bool, default=False)
def setup(self, telescope):
"""Save the telescope model.
Parameters
----------
telescope : TransitTelescope
"""
self.telescope = io.get_telescope(telescope)
def process(self, data):
"""Estimate the sensitivity of the input data.
Parameters
----------
data : TODContainer
Must have a weight property that contains an
estimate of the inverse variance of the noise
in each visibility. The visibilities can be
stacked to any level of redundancy.
Returns
-------
metrics : SystemSensitivity
Contains the measured and radiometric estimates of
the noise in the beamformed visibilities.
"""
# Ensure we are distributed over frequency. Get shape of visibilities.
data.redistribute("freq")
nfreq, nstack, ntime = data.vis.local_shape
# Extract the input flags. If container has a gain dataset,
# then also check for the default gain 1.0 + 0.0j as this indicates
# that an input was masked for a particular time and frequency.
inpflg = data.input_flags[:].view(np.ndarray).astype(np.bool)
niff = 1
if "gain" in data.datasets:
# Derive frequency dependent flags from gains
gainflg = data.gain[:].view(np.ndarray) != (1.0 + 0.0j)
inpflg = np.swapaxes(inpflg[np.newaxis, :, :] & gainflg, 0, 1)
# Flatten frequency and time axis so we can use numpy's unique
inpflg = inpflg.reshape(inpflg.shape[0], -1)
niff = nfreq
# Find unique sets of input flags
uniq_inpflg, index_cnt = np.unique(inpflg, return_inverse=True, axis=1)
# Calculate redundancy for each unique set of input flags
cnt = tools.calculate_redundancy(
uniq_inpflg.astype(np.float32),
data.prod,
data.reverse_map["stack"]["stack"],
data.stack.size,
)
# Determine stack axis
stack_new, stack_flag = tools.redefine_stack_index_map(
self.telescope, data.input, data.prod, data.stack, data.reverse_map["stack"]
)
if not np.all(stack_flag):
self.log.warning(
"There are %d stacked baselines that are masked "
"in the telescope instance." % np.sum(~stack_flag)
)
ps = data.prod[stack_new["prod"]]
conj = stack_new["conjugate"]
prodstack = ps.copy()
prodstack["input_a"] = np.where(conj, ps["input_b"], ps["input_a"])
prodstack["input_b"] = np.where(conj, ps["input_a"], ps["input_b"])
# Figure out mapping between inputs in data file and inputs in telescope
tel_index = tools.find_inputs(
self.telescope.input_index, data.input, require_match=False
)
# Use the mapping to extract polarisation and EW position of each input
input_pol = np.array(
[
self.telescope.polarisation[ti] if ti is not None else "N"
for ti in tel_index
]
)
ew_position = np.array(
[
self.telescope.feedpositions[ti, 0] if ti is not None else 0.0
for ti in tel_index
]
)
# Next we determine indices into the stack axis for each polarisation product
# The next three lines result in XY and YX being
# combined into a single polarisation product
pa, pb = input_pol[prodstack["input_a"]], input_pol[prodstack["input_b"]]
pol_a = np.where(pa <= pb, pa, pb)
pol_b = np.where(pa <= pb, pb, pa)
baseline_pol = np.core.defchararray.add(pol_a, pol_b)
if self.exclude_intracyl:
baseline_flag = (
ew_position[prodstack["input_a"]] != ew_position[prodstack["input_b"]]
)
else:
baseline_flag = np.ones(prodstack.size, dtype=np.bool)
pol_uniq = [bp for bp in np.unique(baseline_pol) if "N" not in bp]
pol_index = [
np.flatnonzero((baseline_pol == up) & baseline_flag) for up in pol_uniq
]
npol = len(pol_uniq)
auto_flag = (prodstack["input_a"] == prodstack["input_b"]).astype(np.float32)
if self.exclude_intracyl and (np.sum(auto_flag) == npol):
raise ValueError(
"You have requested the exclusion of "
"intracylinder baselines, however it appears "
"that the visibilities have already been stacked "
"over cylinder, preventing calculation of the "
"radiometric estimate."
)
# Dereference the weight dataset
bweight = data.weight[:].view(np.ndarray)
bflag = bweight > 0.0
# Initialize arrays
var = np.zeros((nfreq, npol, ntime), dtype=np.float32)
counter = np.zeros((nfreq, npol, ntime), dtype=np.float32)
# Average over selected baseline per polarization
for pp, ipol in enumerate(pol_index):
pcnt = cnt[ipol, :]
pscale = 2.0 - auto_flag[ipol, np.newaxis]
# Loop over frequencies to reduce memory usage
for ff in range(nfreq):
fslc = slice((ff % niff) * ntime, ((ff % niff) + 1) * ntime)
pfcnt = pcnt[:, index_cnt[fslc]]
pvar = tools.invert_no_zero(bweight[ff, ipol, :])
pflag = bflag[ff, ipol, :].astype(np.float32)
var[ff, pp, :] = np.sum(pfcnt ** 2 * pscale * pflag * pvar, axis=0)
counter[ff, pp, :] = np.sum(pfcnt * pscale * pflag, axis=0)
# Normalize
var *= tools.invert_no_zero(counter ** 2)
# Determine which of the stack indices correspond to autocorrelations
auto_stack_id = np.flatnonzero(auto_flag)
auto_input = prodstack["input_a"][auto_stack_id]
auto_pol = input_pol[auto_input]
auto_cnt = cnt[auto_stack_id, :][:, index_cnt]
auto_cnt = np.swapaxes(auto_cnt.reshape(-1, niff, ntime), 0, 1)
num_feed = auto_cnt * bflag[:, auto_stack_id, :].astype(np.float32)
auto = data.vis[:, auto_stack_id, :].real
# Construct the radiometric estimate of the noise by taking the sum
# of the product of pairs of (possibly stacked) autocorrelations.
radiometer = np.zeros((nfreq, npol, ntime), dtype=np.float32)
radiometer_counter = np.zeros((nfreq, npol, ntime), dtype=np.float32)
for ii, (ai, pi) in enumerate(zip(auto_input, auto_pol)):
for jj, (aj, pj) in enumerate(zip(auto_input, auto_pol)):
if self.exclude_intracyl and (ew_position[ai] == ew_position[aj]):
# Exclude intracylinder baselines
continue
# Combine XY and YX into single polarisation product
pp = pol_uniq.index(pi + pj) if pi <= pj else pol_uniq.index(pj + pi)
# Weight by the number of feeds that were averaged
# together to obtain each stacked autocorrelation
nsq = num_feed[:, ii, :] * num_feed[:, jj, :]
radiometer[:, pp, :] += nsq * auto[:, ii, :] * auto[:, jj, :]
radiometer_counter[:, pp, :] += nsq
# Calculate number of independent samples from the
# integration time, frequency resolution, and fraction of packets lost
tint = np.median(np.abs(np.diff(data.time)))
dnu = np.median(data.index_map["freq"]["width"]) * 1e6
if ("flags" in data) and ("frac_lost" in data["flags"]):
frac_lost = data["flags"]["frac_lost"][:]
else:
frac_lost = np.zeros((1, 1), dtype=np.float32)
nint = dnu * tint * (1.0 - frac_lost[:, np.newaxis, :])
# Normalize by the number of independent samples
# and the total number of baselines squared
radiometer *= tools.invert_no_zero(nint * radiometer_counter ** 2)
# Create output container
metrics = containers.SystemSensitivity(
pol=np.array(pol_uniq, dtype="<U2"),
time=data.time[:],
axes_from=data,
attrs_from=data,
comm=data.comm,
distributed=data.distributed,
)
metrics.redistribute("freq")
# In order to write generic code for generating the radiometric
# estimate of the sensitivity, we had to sum over the upper and lower triangle
# of the visibility matrix. Below we multiply by sqrt(2) in order to
# obtain the sensitivity of the real component.
metrics.radiometer[:] = np.sqrt(2.0 * radiometer)
metrics.measured[:] = np.sqrt(2.0 * var)
# Save the total number of baselines that were averaged in the weight dataset
metrics.weight[:] = counter
# Save the fraction of missing samples
metrics.frac_lost[:] = frac_lost
return metrics
| mit |
kapouer/cache-debounce | test/test.js | 1911 | var assert = require("assert");
var cacheOnDemand = require('../index.js');
describe('cacheOnDemand', function(){
var didTheWork = 0;
var fn = cacheOnDemand(function(a, b, callback) {
// Add two numbers, but take 20 ms to do it asynchronously
setTimeout(function() {
didTheWork++;
return callback(a + b);
}, 20);
}, function(a, b) {
// hash them by concatenating them
return a + ',' + b;
});
it('returns functions', function() {
assert(fn);
});
it('delivers result ten times for ten invocations', function(done) {
var i;
var received = 0;
for (i = 0; (i < 10); i++) {
test();
}
function test() {
return fn(5, 5, function(result) {
assert(result === 10);
received++;
if (received === 10) {
return done();
}
});
}
});
it('does the work only once for those ten invocations', function() {
assert(didTheWork === 1);
});
it('does the work for a second series of invocations', function(done) {
var i;
var received = 0;
for (i = 0; (i < 10); i++) {
test();
}
function test() {
return fn(6, 6, function(result) {
assert(result === 12);
received++;
if (received === 10) {
return done();
}
});
}
});
it('now a total of two times work has been done', function() {
assert(didTheWork === 2);
});
it('new block of simultaneous requests generates new data, but only once', function(done) {
var i;
var received = 0;
for (i = 0; (i < 10); i++) {
test();
}
function test() {
return fn(5, 5, function(result) {
assert(result === 10);
received++;
if (received === 10) {
return done();
}
});
}
});
it('now a total of 3 times work has been done', function() {
assert(didTheWork === 3);
});
});
| mit |
leomelin/throwa.com | gulp-tasks/concat-js.js | 618 | var gulp = require('gulp'),
uglify = require('gulp-uglify'),
concat = require('gulp-concat'),
rev = require('gulp-rev'),
RELEASE_FOLDER = require('../gulp-config.json').RELEASE_FOLDER
module.exports = function () {
return gulp.src(require('../scripts.json').map(function (src) { return './public/' + src }))
.pipe(uglify({
mangle: false,
compress: {
drop_console: true
}
}))
.pipe(concat('main.min.js'))
.pipe(rev())
.pipe(gulp.dest(RELEASE_FOLDER + '/public/javascripts'))
.pipe(rev.manifest('rev-manifest-js.json'))
.pipe(gulp.dest(RELEASE_FOLDER))
}
| mit |
sanofuzir/grafit-group.si | app/cache/dev/assetic/config/8/80640920a9f86f520aacec206358b090.php | 67 | <?php
// StaticBundle:Gallery:gallery.html.twig
return array (
);
| mit |
jievro/parser | spec/expression/binary/multiplicative/divide/adivb_spec.rb | 558 | def source
'a/b'
end
def expected_tokens
[
{
type: 'T_IDENTIFIER',
value: 'a'
},
{
type: 'T_OPERATOR',
value: '/'
},
{
type: 'T_IDENTIFIER',
value: 'b'
}
]
end
def expected_ast
{
__type: 'program',
body: [
{
__type: 'binary-expression',
operator: '/',
left: {
__type: 'identifier',
value: 'a'
},
right: {
__type: 'identifier',
value: 'b'
}
}
]
}
end
load 'spec_builder.rb'
| mit |
RabbitStewDio/AutoCake | src/AutoCake.Release/InternalArgumentParser.cs | 4720 | using System;
using System.Collections.Generic;
using System.Linq;
using Cake.Core;
using Cake.Core.Diagnostics;
internal class InternalArgumentParser
{
readonly ICakeLog _log;
readonly VerbosityParser _verbosityParser;
internal InternalArgumentParser(ICakeLog log)
{
_log = log;
_verbosityParser = new VerbosityParser(log);
}
public Dictionary<string, string> Parse(IEnumerable<string> args)
{
if (args == null)
throw new ArgumentNullException("args");
var options = new Dictionary<string, string>();
var isParsingOptions = false;
var arguments = args.ToList();
foreach (var arg in arguments)
{
var value = arg.UnQuote();
if (isParsingOptions)
if (IsOption(value))
{
if (!ParseOption(value, options))
return options;
}
else
{
_log.Error("More than one build script specified.");
return options;
}
else
try
{
// If they didn't provide a specific build script, search for a default.
if (IsOption(arg))
if (!ParseOption(value, options))
return options;
}
finally
{
// Start parsing options.
isParsingOptions = true;
}
}
return options;
}
static bool IsOption(string arg)
{
if (string.IsNullOrWhiteSpace(arg))
return false;
return arg.StartsWith("--") || arg.StartsWith("-");
}
bool ParseOption(string arg, Dictionary<string, string> options)
{
string name, value;
var nameIndex = arg.StartsWith("--") ? 2 : 1;
var separatorIndex = arg.IndexOfAny(new[] {'='});
if (separatorIndex < 0)
{
name = arg.Substring(nameIndex);
value = string.Empty;
}
else
{
name = arg.Substring(nameIndex, separatorIndex - nameIndex);
value = arg.Substring(separatorIndex + 1);
}
return ParseOption(name, value.UnQuote(), options);
}
bool ParseOption(string name, string value, Dictionary<string, string> options)
{
if (name.Equals("verbosity", StringComparison.OrdinalIgnoreCase) ||
name.Equals("v", StringComparison.OrdinalIgnoreCase))
{
Verbosity verbosity;
if (!_verbosityParser.TryParse(value, out verbosity))
value = "normal";
}
if (name.Equals("showdescription", StringComparison.OrdinalIgnoreCase) ||
name.Equals("s", StringComparison.OrdinalIgnoreCase))
value = ParseBooleanValue(value);
if (name.Equals("dryrun", StringComparison.OrdinalIgnoreCase) ||
name.Equals("noop", StringComparison.OrdinalIgnoreCase)
|| name.Equals("whatif", StringComparison.OrdinalIgnoreCase))
value = ParseBooleanValue(value);
if (name.Equals("help", StringComparison.OrdinalIgnoreCase) ||
name.Equals("?", StringComparison.OrdinalIgnoreCase))
value = ParseBooleanValue(value);
if (name.Equals("version", StringComparison.OrdinalIgnoreCase) ||
name.Equals("ver", StringComparison.OrdinalIgnoreCase))
value = ParseBooleanValue(value);
if (name.Equals("debug", StringComparison.OrdinalIgnoreCase) ||
name.Equals("d", StringComparison.OrdinalIgnoreCase))
value = ParseBooleanValue(value);
if (name.Equals("mono", StringComparison.OrdinalIgnoreCase))
value = ParseBooleanValue(value);
if (name.Equals("experimental", StringComparison.OrdinalIgnoreCase))
value = ParseBooleanValue(value);
if (options.ContainsKey(name))
{
_log.Error("Multiple arguments with the same name ({0}).", name);
return false;
}
options.Add(name, value);
return true;
}
static string ParseBooleanValue(string value)
{
value = (value ?? string.Empty).UnQuote();
if (string.IsNullOrWhiteSpace(value))
return "true";
if (value.Equals("true", StringComparison.OrdinalIgnoreCase))
return "true";
if (value.Equals("false", StringComparison.OrdinalIgnoreCase))
return "false";
throw new InvalidOperationException("Argument value is not a valid boolean value.");
}
} | mit |
piratecb/up1and | app/dashboard/components/PostEditor.js | 4918 | import React from 'react'
import { inject, observer } from 'mobx-react'
import { withRouter, Link } from 'react-router-dom'
// import SimpleMDE from 'simplemde'
import Toolbox from './Toolbox'
function EditorHeader(props) {
return (
<div className='writer-head'>
<div className='writer-head-left'>
<Link to="/posts" className="navigate-back" onClick={props.onBackClicked}>
<i className="icon ion-ios-arrow-thin-left"></i><span>Posts</span>
</Link>
</div>
<div className='writer-head-content'>
<button type="button" className="btn btn-ghost" onClick={props.onPublishClicked}>
<span>Publish</span>
</button>
<button type="button" className="btn btn-text btn-icon-only" onClick={props.onMarkdownHelpClicked}>
<i className="icon ion-help-circled"></i>
</button>
<button type="button" className="btn btn-text btn-icon-only" onClick={props.onPhotoChooserClicked}>
<i className="icon ion-image"></i>
</button>
<button type="button" className="btn btn-text btn-icon-only" onClick={props.onPostMetaClicked}>
<i className="icon ion-pricetag"></i>
</button>
</div>
</div>
)
}
// class MarkdownEditor extends React.Component {
// render() {
// return (
// <textarea placeholder="Content" type="text" />
// )
// }
// }
@inject('postEditor', 'postStore', 'asideStore', 'toolboxStore')
@withRouter
@observer
class PostEditor extends React.Component {
constructor(props) {
super(props)
this.onMarkdownHelpClicked = this.onMarkdownHelpClicked.bind(this)
this.onPhotoChooserClicked = this.onPhotoChooserClicked.bind(this)
this.onPostMetaClicked = this.onPostMetaClicked.bind(this)
this.onPublishClicked = this.onPublishClicked.bind(this)
this.onDeleteClicked = this.onDeleteClicked.bind(this)
this.onBackClicked = this.onBackClicked.bind(this)
this.onMaskClicked = this.onMaskClicked.bind(this)
}
componentWillMount() {
this.props.postEditor.setID(this.props.match.params.id)
}
componentDidMount() {
this.props.asideStore.hide()
this.props.postEditor.load()
document.title = this.props.match.params.id ? 'Edit Post' : 'New Post'
}
componentDidUpdate(prevProps) {
if (this.props.match.params.id !== prevProps.match.params.id) {
this.props.postEditor.setID(this.props.match.params.id)
this.props.postEditor.load()
}
}
onChange = e => {
this.props.postEditor.setValue(e.target)
if (e.target.name === 'content') this.setTextareaHeight()
}
setTextareaHeight() {
this.refs.content.style.cssText = 'height:' + this.refs.content.scrollHeight + 'px'
}
onMarkdownHelpClicked(e) {
this.props.toolboxStore.show('help')
}
onPhotoChooserClicked(e) {
this.props.toolboxStore.show('photo')
}
onPostMetaClicked(e) {
this.props.toolboxStore.show('meta')
}
onMaskClicked(e) {
this.props.toolboxStore.hide()
}
onDeleteClicked(e) {
const id = this.props.postEditor.id
if (id) {
this.props.postStore.destory(id)
.then(() => this.props.history.replace(`/posts/`))
}
}
onBackClicked(e) {
const status = this.props.postEditor.id && this.props.postEditor.status
this.props.postEditor.submit(status)
.then(post => {
this.props.postEditor.reset()
})
}
onPublishClicked(e) {
e.preventDefault();
const { postEditor } = this.props
postEditor.submit(true)
.then(post => {
postEditor.reset()
this.props.history.replace(`/posts/`)
})
}
render() {
const { inProgress, errors, title, headline, content } = this.props.postEditor
return (
<div className="main">
<EditorHeader
onPublishClicked={this.onPublishClicked}
onMarkdownHelpClicked={this.onMarkdownHelpClicked}
onPhotoChooserClicked={this.onPhotoChooserClicked}
onPostMetaClicked={this.onPostMetaClicked}
onBackClicked={this.onBackClicked}
/>
<section className="writer-main">
<form>
<div className="post-field title">
<input placeholder="Title" type="text" value={title} name="title" onChange={this.onChange} />
</div>
<div className="post-field headline">
<input placeholder="Headline" type="text" value={headline} name="headline" onChange={this.onChange} />
</div>
<div className="post-field content">
<textarea placeholder="Content" type="text" className="markdown-area" name="content"
ref="content" value={content} onChange={this.onChange} />
</div>
</form>
</section>
<Toolbox onDeleteClicked={this.onDeleteClicked} onMaskClicked={this.onMaskClicked}/>
</div>
)
}
}
export default PostEditor | mit |
gautamsi/aurelia-OfficeUIFabric | dist/amd/Label/Label.js | 1771 | var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
define(["require", "exports", 'aurelia-framework'], function (require, exports, aurelia_framework_1) {
var Label = (function () {
function Label() {
}
Label = __decorate([
aurelia_framework_1.customElement('office-label'),
__metadata('design:paramtypes', [])
], Label);
return Label;
})();
exports.Label = Label;
});
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIkxhYmVsL0xhYmVsLnRzIl0sIm5hbWVzIjpbIkxhYmVsIiwiTGFiZWwuY29uc3RydWN0b3IiXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7SUFFQTtRQUFBQTtRQUVBQyxDQUFDQTtRQUZERDtZQUFDQSxpQ0FBYUEsQ0FBQ0EsY0FBY0EsQ0FBQ0E7O2tCQUU3QkE7UUFBREEsWUFBQ0E7SUFBREEsQ0FGQSxBQUVDQSxJQUFBO0lBRFksYUFBSyxRQUNqQixDQUFBIiwiZmlsZSI6IkxhYmVsL0xhYmVsLmpzIiwic291cmNlc0NvbnRlbnQiOlsiaW1wb3J0IHtjdXN0b21FbGVtZW50LCBiaW5kYWJsZX0gZnJvbSAnYXVyZWxpYS1mcmFtZXdvcmsnO1xyXG5cclxuQGN1c3RvbUVsZW1lbnQoJ29mZmljZS1sYWJlbCcpXHJcbmV4cG9ydCBjbGFzcyBMYWJlbCB7ICAgIFxyXG59XHJcbiJdLCJzb3VyY2VSb290IjoiL3NvdXJjZS8ifQ==
| mit |
fuybooo/modularization | app/vendor/bootstrap-table/src/locale/bootstrap-table-hr-HR.js | 732 | !function(r){"use strict";r.fn.bootstrapTable.locales["hr-HR"]={formatLoadingMessage:function(){return"Molimo pričekajte ..."},formatRecordsPerPage:function(r){return r+" broj zapisa po stranici"},formatShowingRows:function(r,n,o){return"Prikazujem "+r+". - "+n+". od ukupnog broja zapisa "+o},formatSearch:function(){return"Pretraži"},formatNoMatches:function(){return"Nije pronađen niti jedan zapis"},formatPaginationSwitch:function(){return"Prikaži/sakrij stranice"},formatRefresh:function(){return"Osvježi"},formatToggle:function(){return"Promijeni prikaz"},formatColumns:function(){return"Kolone"},formatAllRows:function(){return"Sve"}},r.extend(r.fn.bootstrapTable.defaults,r.fn.bootstrapTable.locales["hr-HR"])}(jQuery); | mit |
moteus/lua-odbc | test/dba/test.lua | 23964 | print("------------------------------------")
print("Lua version: " .. (_G.jit and _G.jit.version or _G._VERSION))
print("------------------------------------")
print("")
local HAS_RUNNER = not not lunit
local IS_WINDOWS = (require"package".config:sub(1,1) == '\\')
local function prequire(...)
local ok, mod = pcall(require, ...)
if ok then return mod, ... end
return nil, mod
end
prequire"luacov"
local LoadLib = {
["odbc.dba"] = function()
return require "odbc.dba",{
{
Driver = IS_WINDOWS and "SQLite3 ODBC Driver" or "SQLite3";
Database = ":memory:";
}
}
end;
["luasql"] = function()
return require "dba.luasql".load('sqlite3'), {":memory:"}
end;
["odbc.luasql"] = function()
local dba = require "dba"
local luasql = require "odbc.luasql"
dba = dba.load(luasql.odbc)
return dba, {"SQLite3memory"}
end
}
local function pack_n(...)
return { n = select("#", ...), ... }
end
local to_n = tonumber
local unpack = unpack or table.unpack
local lunit = require "lunit"
local lunit = require "lunit"
local IS_LUA52 = _VERSION >= 'Lua 5.2'
TEST_CASE = lunit.TEST_CASE or function (name)
if not IS_LUA52 then
module(name, package.seeall, lunit.testcase)
setfenv(2, _M)
else
return lunit.module(name, 'seeall')
end
end
local function make_tast(name)
local CNN_TYPE = name
local CNN_ROWS = 10
local function init_db(cnn)
local fmt = string.format
assert(cnn:exec"create table Agent(ID INTEGER PRIMARY KEY, Name char(32))")
for i = 1, CNN_ROWS do
assert(cnn:exec(fmt("insert into Agent(ID,NAME)values(%d, 'Agent#%d')", i, i)))
end
end
local _ENV = TEST_CASE('Environment.' .. name) do
local env, dba, cnn
function setup()
local CNN_PARAMS dba, CNN_PARAMS = LoadLib[CNN_TYPE]()
cnn = assert(dba.Connect(unpack(CNN_PARAMS)))
init_db(cnn)
end
function teardown()
if cnn then cnn:destroy() end
if env then env:destroy() end
end
function test_interface()
assert_not_nil(dba.PARAM_NULL)
assert_not_nil(dba.PARAM_DEFAULT)
assert_function(dba.Environment)
assert_function(dba.Connection)
assert_function(dba.Connect)
env = dba.Environment()
assert_function(env.connection)
assert_function(env.connect)
assert_function(env.destroy)
assert_function(env.destroyed)
assert_function(env.handle)
assert_function(env.set_config)
assert_function(env.get_config)
end
end
local _ENV = TEST_CASE('Connection.' .. name) do
local cnn, dba
function setup()
local CNN_PARAMS dba, CNN_PARAMS = LoadLib[CNN_TYPE]()
cnn = assert(dba.Connect(unpack(CNN_PARAMS)))
init_db(cnn)
end
function teardown()
if cnn then cnn:destroy() end
end
function test_interface()
assert_function(cnn.connect)
assert_function(cnn.disconnect)
assert_function(cnn.connected)
assert_function(cnn.destroy)
assert_function(cnn.destroyed)
assert_function(cnn.exec)
assert_function(cnn.each)
assert_function(cnn.ieach)
assert_function(cnn.neach)
assert_function(cnn.teach)
assert_function(cnn.first_row)
assert_function(cnn.first_irow)
assert_function(cnn.first_nrow)
assert_function(cnn.first_trow)
assert_function(cnn.first_value)
assert_function(cnn.fetch_all)
assert_function(cnn.rows)
assert_function(cnn.irows)
assert_function(cnn.nrows)
assert_function(cnn.trows)
assert_function(cnn.commit)
assert_function(cnn.rollback)
assert_function(cnn.set_autocommit)
assert_function(cnn.get_autocommit)
assert_function(cnn.query)
assert_function(cnn.prepare)
assert_function(cnn.handle)
assert_function(cnn.set_config)
assert_function(cnn.get_config)
assert_function(cnn.environment)
end
function test_reconnect()
assert_true(cnn:connected())
assert_true(cnn:disconnect())
assert_false(not not cnn:connected())
assert_true(not not cnn:connect())
end
function test_exec_fail()
assert_nil(cnn:exec("select ID, Name from Agent order by ID"))
end
function test_each()
local sql = "select ID, Name from Agent order by ID"
local n = 0
cnn:each(sql, function(ID, Name)
n = n + 1
assert_equal(n, to_n(ID))
end)
assert_equal(CNN_ROWS, n)
n = 0
cnn:ieach(sql, function(row)
n = n + 1
assert_equal(n, to_n(row[1]))
end)
assert_equal(CNN_ROWS, n)
n = 0
cnn:neach(sql, function(row)
n = n + 1
assert_equal(n, to_n(row.ID))
end)
assert_equal(CNN_ROWS, n)
n = 0
cnn:teach(sql, function(row)
n = n + 1
assert_equal(n, to_n(row.ID))
assert_equal(n, to_n(row[1]))
end)
assert_equal(CNN_ROWS, n)
n = 0
local args = pack_n(cnn:each(sql, function(ID, Name)
n = n + 1
return nil, 1, nil, 2
end))
assert_equal(1, n)
assert_equal(4, args.n)
assert_equal(1, args[2])
assert_equal(2, args[4])
assert_nil(args[1])
assert_nil(args[3])
n = 0
sql = "select ID, Name from Agent where ID > :ID order by ID"
local par = {ID = 1}
assert_true(cnn:each(sql, par, function(ID)
n = n + 1
assert_equal(par.ID + 1, to_n(ID))
return true
end))
assert_equal(1, n)
end
function test_rows()
local sql = "select ID, Name from Agent order by ID"
local n = 0
for ID, Name in cnn:rows(sql) do
n = n + 1
assert_equal(n, to_n(ID))
end
assert_equal(CNN_ROWS, n)
n = 0
for row in cnn:irows(sql) do
n = n + 1
assert_equal(n, to_n(row[1]))
end
assert_equal(CNN_ROWS, n)
n = 0
for row in cnn:nrows(sql) do
n = n + 1
assert_equal(n, to_n(row.ID))
end
assert_equal(CNN_ROWS, n)
n = 0
for row in cnn:trows(sql) do
n = n + 1
assert_equal(n, to_n(row.ID))
assert_equal(n, to_n(row[1]))
end
assert_equal(CNN_ROWS, n)
n = 0
sql = "select ID, Name from Agent where ID > :ID order by ID"
local par = {ID = 1}
for ID in cnn:rows(sql, par) do
n = n + 1
assert_equal(par.ID + 1, to_n(ID))
break
end
assert_equal(1, n)
end
function test_first()
local sql = "select ID, Name from Agent order by ID"
local ID, Name = cnn:first_row(sql)
assert_equal(1, to_n(ID))
assert_equal("Agent#1", Name)
local row
row = cnn:first_nrow(sql)
assert_equal(1, to_n(row.ID))
assert_equal("Agent#1", row.Name)
row = cnn:first_irow(sql)
assert_equal(1, to_n(row[1]))
assert_equal("Agent#1", row[2])
row = cnn:first_trow(sql)
assert_equal(1, to_n(row[1]))
assert_equal(1, to_n(row.ID))
assert_equal("Agent#1", row[2])
assert_equal("Agent#1", row.Name)
assert_equal(CNN_ROWS, to_n(cnn:first_value("select count(*) from Agent")))
assert_equal(CNN_ROWS, to_n(cnn:first_value("select ID from Agent where ID=:ID",{ID=CNN_ROWS})))
end
function test_txn()
assert_equal(CNN_ROWS, to_n(cnn:first_value("select count(*) from Agent")))
cnn:set_autocommit(false)
assert_number(cnn:exec("delete from Agent"))
assert_equal(0, to_n(cnn:first_value("select count(*) from Agent")))
cnn:rollback()
assert_equal(CNN_ROWS, to_n(cnn:first_value("select count(*) from Agent")))
end
function test_rowsaffected()
assert_equal(CNN_ROWS, to_n(cnn:first_value("select count(*) from Agent")))
end
function test_exec()
assert_nil(cnn:exec("select ID, Name from Agent order by ID"))
assert_number(cnn:exec("update Agent set ID=ID"))
end
function test_config()
local env = assert(cnn:environment())
local p1 = assert_boolean(env:get_config("FORCE_REPLACE_PARAMS"))
local p2 = assert_boolean(env:get_config("IGNORE_NAMED_PARAMS") )
assert_equal(p1, cnn:get_config("FORCE_REPLACE_PARAMS"))
assert_equal(p2, cnn:get_config("IGNORE_NAMED_PARAMS"))
env:set_config("FORCE_REPLACE_PARAMS", not p1)
cnn:set_config("IGNORE_NAMED_PARAMS", not p2)
assert_equal( not p1, env:get_config("FORCE_REPLACE_PARAMS") )
assert_equal( p2, env:get_config("IGNORE_NAMED_PARAMS") )
assert_equal( not p1, cnn:get_config("FORCE_REPLACE_PARAMS") )
assert_equal( not p2, cnn:get_config("IGNORE_NAMED_PARAMS") )
cnn:set_config("IGNORE_NAMED_PARAMS", nil)
assert_equal( p2, cnn:get_config("IGNORE_NAMED_PARAMS") )
end
function test_apply_params()
cnn:set_config("FORCE_REPLACE_PARAMS", true)
assert_equal(1, cnn:exec([[
update Agent set NAME = NAME where id=:id;
]],{id=1}))
assert_equal(1, cnn:exec([[
update Agent set NAME = NAME where NAME=:NAME;
]],{NAME="Agent#1"}))
end
function test_fetch_all()
local sql = "select ID, Name from Agent order by ID"
local t = assert_table(cnn:fetch_all("n", sql))
assert_equal(CNN_ROWS, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row[1]))
assert_nil(row.ID)
end
local t = assert_table(cnn:fetch_all("a", sql))
assert_equal(CNN_ROWS, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row.ID))
assert_nil(row[1])
end
local t = assert_table(cnn:fetch_all("an", sql))
assert_equal(CNN_ROWS, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row[1]))
assert_equal(i, to_n(row.ID))
end
local sql = "select ID, Name from Agent where ID=:ID order by ID"
local t = assert_table(cnn:fetch_all("n", sql, {ID=1}))
assert_equal(1, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row[1]))
assert_nil(row.ID)
end
end
end
local _ENV = TEST_CASE('Query.' .. name) do
local dba, cnn, qry
function setup()
local CNN_PARAMS dba, CNN_PARAMS = LoadLib[CNN_TYPE]()
cnn = assert(dba.Connect(unpack(CNN_PARAMS)))
init_db(cnn)
end
function teardown()
if qry then qry:destroy() end
if cnn then cnn:destroy() end
qry, cnn = nil
end
function test_interface()
qry = cnn:query()
assert_function(qry.open)
assert_function(qry.close)
assert_function(qry.closed)
assert_function(qry.opened)
assert_function(qry.destroy)
assert_function(qry.destroyed)
assert_function(qry.exec)
assert_function(qry.each)
assert_function(qry.ieach)
assert_function(qry.neach)
assert_function(qry.teach)
assert_function(qry.first_row)
assert_function(qry.first_irow)
assert_function(qry.first_nrow)
assert_function(qry.first_trow)
assert_function(qry.first_value)
assert_function(qry.fetch_all)
assert_function(qry.rows)
assert_function(qry.irows)
assert_function(qry.nrows)
assert_function(qry.trows)
assert_function(qry.set_autoclose)
assert_function(qry.get_autoclose)
assert_function(qry.prepare)
assert_function(qry.prepared)
assert_function(qry.unprepare)
assert_function(qry.supports_prepare)
assert_function(qry.set_sql)
assert_function(qry.bind)
assert_function(qry.handle)
assert_function(qry.set_config)
assert_function(qry.get_config)
assert_function(qry.connection)
end
function test_create()
local sql = "select ID, Name from Agent order by ID"
local n
local function do_test(ID, Name)
n = n + 1
assert_equal(n, to_n(ID))
end
n = 0
qry = assert(cnn:query())
qry:each(sql, do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
qry:each(do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
sql = "select ID, Name from Agent where 555=cast(:ID as INTEGER) order by ID"
local par = {ID = 555}
n = 0
qry = assert(cnn:query())
qry:each(sql, par, do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
qry:each(par, do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
assert_true(qry:bind(par))
qry:each(do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
--------------------------------------------------------
sql = "select ID, Name from Agent order by ID"
local function do_test(row)
n = n + 1
assert_equal(n, to_n(row.ID))
end
n = 0
qry = assert(cnn:query())
qry:neach(sql, do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
qry:neach(do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
sql = "select ID, Name from Agent where 555=cast(:ID as INTEGER) order by ID"
local par = {ID = 555}
n = 0
qry = assert(cnn:query())
qry:neach(sql, par, do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
qry:neach(par, do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
assert_true(qry:bind(par))
qry:neach(do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
end
function test_exec_fail()
qry = assert(cnn:query())
assert_nil(qry:exec("select ID, Name from Agent order by ID"))
end
function test_each()
local sql = "select ID, Name from Agent order by ID"
local n
n = 0
qry = assert(cnn:query())
qry:each(sql, function(ID)
n = n + 1 assert_equal(n, to_n(ID))
end)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query())
assert(qry:open(sql))
assert_nil(
qry:each(sql, function(ID)
n = n + 1 assert_equal(n, to_n(ID))
end)
)
assert_equal(0, n)
qry:each(function(ID)
n = n + 1 assert_equal(n, to_n(ID))
end)
assert_equal(CNN_ROWS, n)
qry:destroy()
end
function test_rows()
local sql = "select ID, Name from Agent order by ID"
local n
n = 0
qry = assert(cnn:query())
for ID, Name in qry:rows(sql) do
n = n + 1 assert_equal(n, to_n(ID))
end
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
for ID, Name in qry:rows() do
n = n + 1 assert_equal(n, to_n(ID))
end
assert_equal(CNN_ROWS, n)
qry:destroy()
sql = "select ID, Name from Agent where 555=cast(:ID as INTEGER) order by ID"
local par = {ID = 555}
n = 0
qry = assert(cnn:query())
for ID, Name in qry:rows(sql, par) do
n = n + 1 assert_equal(n, to_n(ID))
end
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
for ID, Name in qry:rows(par) do
n = n + 1 assert_equal(n, to_n(ID))
end
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
assert_true(qry:bind(par))
for ID, Name in qry:rows() do
n = n + 1 assert_equal(n, to_n(ID))
end
assert_equal(CNN_ROWS, n)
qry:destroy()
--------------------------------------------------
sql = "select ID, Name from Agent order by ID"
n = 0
qry = assert(cnn:query())
for row in qry:nrows(sql) do
n = n + 1 assert_equal(n, to_n(row.ID))
end
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
for row in qry:nrows() do
n = n + 1 assert_equal(n, to_n(row.ID))
end
assert_equal(CNN_ROWS, n)
qry:destroy()
sql = "select ID, Name from Agent where 555=cast(:ID as INTEGER) order by ID"
local par = {ID = 555}
n = 0
qry = assert(cnn:query())
for row in qry:nrows(sql, par) do
n = n + 1 assert_equal(n, to_n(row.ID))
end
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
for row in qry:nrows(par) do
n = n + 1 assert_equal(n, to_n(row.ID))
end
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
assert_true(qry:bind(par))
for row in qry:nrows() do
n = n + 1 assert_equal(n, to_n(row.ID))
end
assert_equal(CNN_ROWS, n)
qry:destroy()
end
function test_prepare()
local sql = "select ID, Name from Agent order by ID"
local n
local function do_test(ID, Name)
n = n + 1
assert_equal(n, to_n(ID))
end
n = 0
qry = assert(cnn:prepare(sql))
qry:each(do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
sql = "select ID, Name from Agent where 555 = cast(:ID as INTEGER) order by ID"
local par = {ID = 555}
n = 0
qry = assert(cnn:prepare(sql))
qry:each(par, do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:prepare(sql))
assert_true(qry:bind(par))
qry:each(do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:prepare(sql))
assert_true(qry:bind("ID", par.ID))
qry:each(do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
if qry.bindnum then
n = 0
qry = assert(cnn:prepare(sql))
assert_true(qry:bind(1, par.ID))
qry:each(do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:prepare(sql))
assert_true(qry:bind{par.ID})
qry:each(do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
end
end
function test_unprepare()
local sql = "select ID, Name from Agent order by ID"
qry = assert(cnn:prepare(sql))
assert_equal(qry:supports_prepare(), qry:prepared())
assert_true(qry:unprepare())
assert_false(qry:prepared())
qry:destroy()
end
function test_destroy()
qry = assert(cnn:query())
assert_true(qry:closed())
assert_false(qry:destroyed())
qry:open("select ID, Name from Agent order by ID")
assert_false(qry:closed())
assert_false(qry:destroyed())
assert_pass(function() cnn:destroy() end)
-- assert_pass(function() qry:closed() end)
-- assert_true(qry:closed())
assert_true(qry:destroyed())
assert_pass(function() qry:destroy() end)
end
function test_first()
local sql = "select ID, Name from Agent order by ID"
qry = cnn:query()
local ID, Name = qry:first_row(sql)
assert_equal(1, to_n(ID))
assert_equal("Agent#1", Name)
local row
row = qry:first_nrow(sql)
assert_equal(1, to_n(row.ID))
assert_equal("Agent#1", row.Name)
row = qry:first_irow(sql)
assert_equal(1, to_n(row[1]))
assert_equal("Agent#1", row[2])
row = qry:first_trow(sql)
assert_equal(1, to_n(row[1]))
assert_equal(1, to_n(row.ID))
assert_equal("Agent#1", row[2])
assert_equal("Agent#1", row.Name)
local v = assert(qry:first_value("select count(*) from Agent"))
assert_equal(CNN_ROWS, to_n(v))
local v = assert(qry:first_value("select ID from Agent where ID=:ID",{ID=CNN_ROWS}))
assert_equal(CNN_ROWS, to_n(v))
qry:destroy()
sql = "select ID, Name from Agent where ID=:ID"
local par = {ID=CNN_ROWS}
local Agent = "Agent#" .. CNN_ROWS
qry = cnn:prepare(sql)
ID, Name = qry:first_row(par)
assert_equal(CNN_ROWS, to_n(ID))
assert_equal(Agent, Name)
row = qry:first_nrow(par)
assert_equal(CNN_ROWS, to_n(row.ID))
assert_equal(Agent, row.Name)
row = qry:first_irow(par)
assert_equal(CNN_ROWS, to_n(row[1]))
assert_equal(Agent, row[2])
row = qry:first_trow(par)
assert_equal(CNN_ROWS, to_n(row[1]))
assert_equal(CNN_ROWS, to_n(row.ID))
assert_equal(Agent, row[2])
assert_equal(Agent, row.Name)
qry:destroy()
qry = cnn:prepare(sql)
assert_true(qry:bind(par))
ID, Name = qry:first_row()
assert_equal(CNN_ROWS, to_n(ID))
assert_equal(Agent, Name)
row = qry:first_nrow()
assert_equal(CNN_ROWS, to_n(row.ID))
assert_equal(Agent, row.Name)
row = qry:first_irow()
assert_equal(CNN_ROWS, to_n(row[1]))
assert_equal(Agent, row[2])
row = qry:first_trow()
assert_equal(CNN_ROWS, to_n(row[1]))
assert_equal(CNN_ROWS, to_n(row.ID))
assert_equal(Agent, row[2])
assert_equal(Agent, row.Name)
end
function test_config()
qry = cnn:query()
assert_equal(cnn, qry:connection())
local p1 = assert_boolean(cnn:get_config("FORCE_REPLACE_PARAMS"))
local p2 = assert_boolean(cnn:get_config("IGNORE_NAMED_PARAMS") )
assert_equal(p1, qry:get_config("FORCE_REPLACE_PARAMS"))
assert_equal(p2, qry:get_config("IGNORE_NAMED_PARAMS"))
cnn:set_config("FORCE_REPLACE_PARAMS", not p1)
qry:set_config("IGNORE_NAMED_PARAMS", not p2)
assert_equal( not p1, cnn:get_config("FORCE_REPLACE_PARAMS") )
assert_equal( p2, cnn:get_config("IGNORE_NAMED_PARAMS") )
assert_equal( not p1, qry:get_config("FORCE_REPLACE_PARAMS") )
assert_equal( not p2, qry:get_config("IGNORE_NAMED_PARAMS") )
qry:set_config("IGNORE_NAMED_PARAMS", nil)
assert_equal( p2, qry:get_config("IGNORE_NAMED_PARAMS") )
end
function test_fetch_all()
local sql = "select ID, Name from Agent order by ID"
qry = assert(cnn:query(sql))
local t = assert_table(qry:fetch_all("n"))
assert_equal(CNN_ROWS, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row[1]))
assert_nil(row.ID)
end
assert_true(qry:destroy())
qry = assert(cnn:query(sql))
local t = assert_table(qry:fetch_all("a"))
assert_equal(CNN_ROWS, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row.ID))
assert_nil(row[1])
end
assert_true(qry:destroy())
qry = assert(cnn:query(sql))
local t = assert_table(qry:fetch_all("an"))
assert_equal(CNN_ROWS, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row[1]))
assert_equal(i, to_n(row.ID))
end
assert_true(qry:destroy())
qry = assert(cnn:query())
local t = assert_table(qry:fetch_all("n", sql))
assert_equal(CNN_ROWS, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row[1]))
assert_nil(row.ID)
end
assert_true(qry:destroy())
qry = assert(cnn:query())
local t = assert_table(qry:fetch_all("a", sql))
assert_equal(CNN_ROWS, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row.ID))
assert_nil(row[1])
end
assert_true(qry:destroy())
qry = assert(cnn:query())
local t = assert_table(qry:fetch_all("an", sql))
assert_equal(CNN_ROWS, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row[1]))
assert_equal(i, to_n(row.ID))
end
assert_true(qry:destroy())
sql = "select ID, Name from Agent where ID=:ID order by ID"
qry = assert(cnn:query(sql))
local t = assert_table(qry:fetch_all("n", {ID=1}))
assert_equal(1, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row[1]))
assert_nil(row.ID)
end
assert_true(qry:destroy())
sql = "select ID, Name from Agent where ID=:ID order by ID"
qry = assert(cnn:query())
local t = assert_table(qry:fetch_all("n", sql, {ID=1}))
assert_equal(1, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row[1]))
assert_nil(row.ID)
end
assert_true(qry:destroy())
sql = "select ID, Name from Agent where ID=:ID order by ID"
qry = assert(cnn:query(sql))
assert_true(qry:bind("ID", 1))
local t = assert_table(qry:fetch_all("n"))
assert_equal(1, #t)
for i, row in ipairs(t)do
assert_equal(i, to_n(row[1]))
assert_nil(row.ID)
end
assert_true(qry:destroy())
end
function test_replace_string()
local sql = 'select :NAME'
assert_equal("select NULL", cnn:apply_params(sql, {NAME=dba.PARAM_NULL}))
assert_equal("select DEFAULT", cnn:apply_params(sql, {NAME=dba.PARAM_DEFAULT}))
assert_equal("select 1", cnn:apply_params(sql, {NAME=1}))
assert_equal("select '1'", cnn:apply_params(sql, {NAME="1"}))
end
end
local _ENV = TEST_CASE('ODBC.' .. name) do
local dba, cnn, qry
local IS_ODBC
function setup()
local CNN_PARAMS dba, CNN_PARAMS = LoadLib[CNN_TYPE]()
cnn = assert(dba.Connect(unpack(CNN_PARAMS)))
IS_ODBC = not not (cnn.statement and cnn.driverconnect)
if not IS_ODBC then return end
init_db(cnn)
end
function teardown()
if qry then qry:destroy() end
if cnn then cnn:destroy() end
end
function test_bind_variables()
if not IS_ODBC then return end
local ID = dba.ulong(555)
local sql = "select ID, Name from Agent where 555=cast(:ID as INTEGER) order by ID"
local n
local function do_test(ID, Name)
n = n + 1
assert_equal(n, to_n(ID))
end
n = 0
qry = assert(cnn:query())
qry:each(sql, {ID=ID}, do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
assert_true(qry:bind("ID", ID))
qry:each(do_test)
assert_equal(CNN_ROWS, n)
qry:destroy()
n = 0
qry = assert(cnn:query(sql))
assert_true(qry:bind("ID", ID))
local vID = assert(qry:vbind_col_ulong(1))
assert(qry:open())
while qry:vfetch() do
n = n + 1
assert_equal(n, vID:get())
end
assert_equal(CNN_ROWS, n)
qry:destroy()
end
function test_async()
if not IS_ODBC then return end
assert_boolean(cnn:supports_async_mode())
assert_boolean(cnn:supports_async_connection())
assert_boolean(cnn:supports_async_statement())
assert_boolean(cnn:supports_async_query())
qry = assert(cnn:query())
assert_boolean(qry:supports_async_mode())
end
end
end -- make_tast
for _, str in ipairs{
"odbc.dba",
-- "luasql",
-- "odbc.luasql",
} do
make_tast(str)
end
if not HAS_RUNNER then lunit.run() end | mit |