hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6524d4c1c81726c419e60ebe02992ba4b997c9c6
| 408
|
css
|
CSS
|
src/css/demo.css
|
jarwol/aTable
|
85a62895868e02b159de86f4bf8f49ca3c08063b
|
[
"MIT"
] | 3
|
2015-08-21T16:33:37.000Z
|
2017-09-23T14:55:00.000Z
|
src/css/demo.css
|
jarwol/aTable
|
85a62895868e02b159de86f4bf8f49ca3c08063b
|
[
"MIT"
] | 1
|
2015-02-09T03:44:28.000Z
|
2015-02-09T17:52:43.000Z
|
src/css/demo.css
|
jarwol/aTable
|
85a62895868e02b159de86f4bf8f49ca3c08063b
|
[
"MIT"
] | null | null | null |
body {
font-family: "DejaVu Sans Condensed";
padding: 50px 0;
}
hr {
width: 50%;
margin-left: auto;
margin-right: auto;
}
h4 {
margin: 45px 0;
}
pre {
overflow-x: auto;
word-wrap: normal;
white-space: pre;
}
.highlight {
background-color: #FFFF30;
}
.bold {
font-weight: bold;
}
.yellow {
background-color: #FFFF30;
}
.italic {
font-style: italic;
}
| 11.333333
| 41
| 0.57598
|
b0a4bdfb515fadad566f8603bc01be2fbc1c8dba
| 459
|
py
|
Python
|
metrics/metrics.py
|
9dev/django-metrics
|
ba876bc8291f0b8160cd48507e1a8bf628e7d204
|
[
"MIT"
] | null | null | null |
metrics/metrics.py
|
9dev/django-metrics
|
ba876bc8291f0b8160cd48507e1a8bf628e7d204
|
[
"MIT"
] | null | null | null |
metrics/metrics.py
|
9dev/django-metrics
|
ba876bc8291f0b8160cd48507e1a8bf628e7d204
|
[
"MIT"
] | null | null | null |
class BaseMetric(object):
name = None
def get_name(self):
return self.name
class ValueMetric(BaseMetric):
value = None
def get_value(self):
return self.value
class LineChartMetric(BaseMetric):
x = []
y = []
xlabel = 'X Label'
ylabel = 'Y Label'
def get_values(self):
return zip(self.x, self.y)
def get_points(self):
return ['[{},{}]'.format(x, y) for x, y in self.get_values()]
| 17
| 69
| 0.583878
|
07a32be5235cdd555cc22345ea185e6eabb592c8
| 2,073
|
css
|
CSS
|
src/styles/blog.css
|
hmajid2301/personal-site
|
f44f21a769d880fa6f86e57af2a189b017cfc92d
|
[
"MIT"
] | 6
|
2021-03-15T21:40:30.000Z
|
2022-03-24T11:17:40.000Z
|
src/styles/blog.css
|
meokisama/gatsby-portfolio-site
|
c79aac095fe000f1e5ff3752141a756b601d64f3
|
[
"MIT"
] | 10
|
2020-09-11T16:52:44.000Z
|
2022-02-27T07:20:08.000Z
|
src/styles/blog.css
|
hmajid2301/personal-site
|
f44f21a769d880fa6f86e57af2a189b017cfc92d
|
[
"MIT"
] | null | null | null |
.blog-post {
@apply leading-relaxed text-base;
}
@screen sm {
.blog-post {
@apply text-base;
}
}
@screen lg {
.blog-post {
@apply text-lg;
}
}
.blog-post h1,
.blog-post h2 {
@apply text-xl my-6 font-semibold;
}
.blog-post h3,
.blog-post h4,
.blog-post h5,
.blog-post h6 {
@apply text-lg my-3 font-semibold;
}
@screen sm {
.blog-post h1,
.blog-post h2 {
@apply ml-4 text-2xl;
}
.blog-post h3,
.blog-post h4,
.blog-post h5,
.blog-post h6 {
@apply text-xl;
}
}
.blog-post a {
@apply text-main bg-primary-alt border-b-4 border-primary;
}
.blog-post a:hover {
@apply bg-primary transition duration-300;
}
.blog-post p {
@apply mb-8;
}
.blog-post p .code {
@apply my-2;
}
.blog-post ul,
.blog-post ol {
@apply mb-8 ml-8;
}
.blog-post li {
@apply my-4;
}
.blog-post li > p,
.blog-post li > ul,
.blog-post li > ol {
@apply mb-0;
}
.blog-post ol {
@apply list-decimal;
}
.blog-post ul {
@apply list-disc;
}
.blog-post blockquote {
@apply px-4 mb-4 border-l-8 border-blue-500 py-4;
}
.blog-post pre {
@apply px-5 mb-4 border-l-8 border-blue-500 pt-2;
}
.blog-post blockquote > p {
@apply mb-0;
}
.blog-post td,
.blog-post th {
@apply px-2 py-1 border border-gray-400;
}
.blog-post tr:nth-child(odd) {
@apply bg-gray-100;
}
.blog-post table {
@apply mb-6;
}
.blog-post :not(pre) > code.language-text {
white-space: pre-line;
}
code[class*='language-'] {
@apply font-code text-base;
}
.gatsby-code-title {
@apply not-italic text-white font-code;
}
.gatsby-highlight::before {
content: attr(data-language);
background: #222;
@apply sticky px-5 pt-1 uppercase top-0 left-0 text-white;
}
.gatsby-highlight {
@apply mb-5 relative;
}
.gatsby-highlight:hover {
@apply cursor-pointer;
}
.gatsby-highlight:active:after {
background-color: #444;
}
.gatsby-highlight pre[class*='language-'] {
@apply p-5 pt-10;
}
.header-anchor.before {
@apply bg-transparent border-b-0;
}
.header-anchor.before:hover {
@apply bg-transparent text-primary transition duration-300;
}
| 14.102041
| 61
| 0.634829
|
bdd650150c2ec75f6b1eb42ba7a6aeeff14577a5
| 349
|
cs
|
C#
|
ScheduledTasks/CompositeC1AuthorizationFilter.cs
|
burningice2866/CompositeC1Contrib
|
6e987f13aa41ad202e1db400989e3a8de4fa000b
|
[
"MIT"
] | 5
|
2016-05-06T14:01:25.000Z
|
2017-08-04T17:24:02.000Z
|
ScheduledTasks/CompositeC1AuthorizationFilter.cs
|
burningice2866/CompositeC1Contrib
|
6e987f13aa41ad202e1db400989e3a8de4fa000b
|
[
"MIT"
] | 9
|
2017-06-16T12:58:21.000Z
|
2021-05-07T14:16:03.000Z
|
ScheduledTasks/CompositeC1AuthorizationFilter.cs
|
burningice2866/CompositeC1Contrib
|
6e987f13aa41ad202e1db400989e3a8de4fa000b
|
[
"MIT"
] | 4
|
2017-07-27T11:09:19.000Z
|
2019-09-24T09:32:40.000Z
|
using Composite.C1Console.Security;
using Hangfire.Dashboard;
namespace CompositeC1Contrib.ScheduledTasks
{
public class CompositeC1AuthorizationFilter : IDashboardAuthorizationFilter
{
public bool Authorize(DashboardContext context)
{
return UserValidationFacade.IsLoggedIn();
}
}
}
| 23.266667
| 80
| 0.69341
|
f0e568c87f6fe5168e53ae55e65a91110c2cbe37
| 7,003
|
sql
|
SQL
|
morph-examples/examples-srilanka-tourism/2016-P21-query1.sql
|
RodrigoSanchezGonzalez/morph-rdb
|
a599bbf0d615604a36c7f594dfffa537720eb974
|
[
"Apache-2.0"
] | 26
|
2015-02-03T15:36:37.000Z
|
2022-01-10T23:19:02.000Z
|
morph-examples/examples-srilanka-tourism/2016-P21-query1.sql
|
RodrigoSanchezGonzalez/morph-rdb
|
a599bbf0d615604a36c7f594dfffa537720eb974
|
[
"Apache-2.0"
] | 44
|
2015-01-08T10:44:09.000Z
|
2022-01-21T23:17:43.000Z
|
morph-examples/examples-srilanka-tourism/2016-P21-query1.sql
|
RodrigoSanchezGonzalez/morph-rdb
|
a599bbf0d615604a36c7f594dfffa537720eb974
|
[
"Apache-2.0"
] | 42
|
2015-01-20T01:59:39.000Z
|
2021-11-09T11:13:00.000Z
|
SELECT "T2"."COUNTRYOFRESIDENCE" AS "obs",1459016715 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/February' AS "var_month",2117099736 AS "mappingid_month","T2"."FEB" AS "noOfArrivals",691863815 AS "mappingid_noOfArrivals"
FROM "2016-P21" T2
WHERE T2.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T4"."COUNTRYOFRESIDENCE" AS "obs",659996839 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/November' AS "var_month",890160158 AS "mappingid_month","T4"."NOV" AS "noOfArrivals",1768127248 AS "mappingid_noOfArrivals"
FROM "2016-P21" T4
WHERE T4.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T6"."COUNTRYOFRESIDENCE" AS "obs",119112899 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/March' AS "var_month",375074687 AS "mappingid_month","T6"."MAR" AS "noOfArrivals",1429616542 AS "mappingid_noOfArrivals"
FROM "2016-P21" T6
WHERE T6.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T8"."COUNTRYOFRESIDENCE" AS "obs",662666707 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/August' AS "var_month",1821100643 AS "mappingid_month","T8"."AUG" AS "noOfArrivals",593768538 AS "mappingid_noOfArrivals"
FROM "2016-P21" T8
WHERE T8.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T10"."COUNTRYOFRESIDENCE" AS "obs",1191864687 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/September' AS "var_month",466032056 AS "mappingid_month","T10"."SEP" AS "noOfArrivals",690822257 AS "mappingid_noOfArrivals"
FROM "2016-P21" T10
WHERE T10.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T12"."COUNTRYOFRESIDENCE" AS "obs",726212590 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/January' AS "var_month",1058822905 AS "mappingid_month","T12"."JAN" AS "noOfArrivals",475394679 AS "mappingid_noOfArrivals"
FROM "2016-P21" T12
WHERE T12.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T14"."COUNTRYOFRESIDENCE" AS "obs",1574615832 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/October' AS "var_month",1049590050 AS "mappingid_month","T14"."OCT" AS "noOfArrivals",1662592920 AS "mappingid_noOfArrivals"
FROM "2016-P21" T14
WHERE T14.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T16"."COUNTRYOFRESIDENCE" AS "obs",1357686726 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/June' AS "var_month",1491522744 AS "mappingid_month","T16"."JUN" AS "noOfArrivals",501608687 AS "mappingid_noOfArrivals"
FROM "2016-P21" T16
WHERE T16.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T18"."COUNTRYOFRESIDENCE" AS "obs",929706284 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/December' AS "var_month",42820240 AS "mappingid_month","T18"."DEC" AS "noOfArrivals",1822434669 AS "mappingid_noOfArrivals"
FROM "2016-P21" T18
WHERE T18.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T20"."COUNTRYOFRESIDENCE" AS "obs",1592840862 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/July' AS "var_month",955611965 AS "mappingid_month","T20"."JUL" AS "noOfArrivals",1367672657 AS "mappingid_noOfArrivals"
FROM "2016-P21" T20
WHERE T20.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T22"."COUNTRYOFRESIDENCE" AS "obs",1021565761 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/April' AS "var_month",1692375649 AS "mappingid_month","T22"."APR" AS "noOfArrivals",2002790740 AS "mappingid_noOfArrivals"
FROM "2016-P21" T22
WHERE T22.COUNTRYOFRESIDENCE = 'Spain'
UNION
SELECT "T24"."COUNTRYOFRESIDENCE" AS "obs",237373286 AS "mappingid_obs",'http://w3id.org/sri-lanka/tourism/cube-vocab/countryOfResidence' AS "uri_countryOfResidence544834306",'http://w3id.org/sri-lanka/tourism/cube-vocab/month' AS "uri_month884712379",'http://w3id.org/sri-lanka/tourism/cube-vocab/numberOfArrivals' AS "uri_numberOfArrivals1533283167",'http://reference.data.gov.uk/def/intervals/May' AS "var_month",742255664 AS "mappingid_month","T24"."MAY" AS "noOfArrivals",883521348 AS "mappingid_noOfArrivals"
FROM "2016-P21" T24
WHERE T24.COUNTRYOFRESIDENCE = 'Spain'
| 120.741379
| 521
| 0.796516
|
f82585284b715d223bcf79ec5cd9a5661742ea73
| 384
|
ps1
|
PowerShell
|
General/KovertKringle/Public/Remove-KKParticipant.ps1
|
corbob/powershell-depot
|
e77d3b400064502d5165b218f09c0680da8757b6
|
[
"MIT"
] | 30
|
2015-11-05T01:21:27.000Z
|
2021-01-18T16:09:50.000Z
|
General/KovertKringle/Public/Remove-KKParticipant.ps1
|
corbob/powershell-depot
|
e77d3b400064502d5165b218f09c0680da8757b6
|
[
"MIT"
] | 1
|
2018-04-11T23:27:11.000Z
|
2018-04-11T23:27:11.000Z
|
General/KovertKringle/Public/Remove-KKParticipant.ps1
|
corbob/powershell-depot
|
e77d3b400064502d5165b218f09c0680da8757b6
|
[
"MIT"
] | 20
|
2015-07-01T10:52:42.000Z
|
2021-10-02T08:23:18.000Z
|
function Remove-KKParticipant {
<#
.SYNOPSIS
Short description
.DESCRIPTION
Long description
.EXAMPLE
An example
.NOTES
General notes
#>
[CmdletBinding()]
param (
[Parameter(Mandatory)]
[string] $Name
)
$Obj = $Script:Participants | Where-Object {$_.Name -eq $Name}
$Script:Participants.Remove($Obj)
}
| 14.769231
| 66
| 0.588542
|
e739a4c3d41bffa87e0a33f3e2bae4e23e947e85
| 10,710
|
php
|
PHP
|
application/controllers/Alumni.php
|
harkiramadhan/ci_forsadqm
|
2410045d2e318e4b7073aee2602922627cbbfa53
|
[
"MIT"
] | null | null | null |
application/controllers/Alumni.php
|
harkiramadhan/ci_forsadqm
|
2410045d2e318e4b7073aee2602922627cbbfa53
|
[
"MIT"
] | null | null | null |
application/controllers/Alumni.php
|
harkiramadhan/ci_forsadqm
|
2410045d2e318e4b7073aee2602922627cbbfa53
|
[
"MIT"
] | null | null | null |
<?php
class Alumni extends CI_Controller{
public function irhaby(){
$this->load->model('M_Forsadqm');
$data['alumni'] = $this->M_Forsadqm->alumni()->result();
$data['konsulat'] = $this->M_Forsadqm->konsulat()->result();
$data['angkatan'] = $this->M_Forsadqm->angkatan()->result();
$data['jakarta'] = $this->db->get_where('t_alumni',array('konsulat'=>'1'))->result();
$data['bandung'] = $this->db->get_where('t_alumni',array('konsulat'=>'2'))->result();
$data['bogor'] = $this->db->get_where('t_alumni',array('konsulat'=>'3'))->result();
$data['inter'] = $this->db->get_where('t_alumni',array('konsulat'=>'4'))->result();
$data['jabar'] = $this->db->get_where('t_alumni',array('konsulat'=>'5'))->result();
$data['jateng'] = $this->db->get_where('t_alumni',array('konsulat'=>'6'))->result();
$data['jogja'] = $this->db->get_where('t_alumni',array('konsulat'=>'7'))->result();
$data['kalimantan'] = $this->db->get_where('t_alumni',array('konsulat'=>'8'))->result();
$data['malang'] = $this->db->get_where('t_alumni',array('konsulat'=>'9'))->result();
$data['nusa'] = $this->db->get_where('t_alumni',array('konsulat'=>'10'))->result();
$data['sulawesi'] = $this->db->get_where('t_alumni',array('konsulat'=>'11'))->result();
$data['sumatera'] = $this->db->get_where('t_alumni',array('konsulat'=>'12'))->result();
$data['surabaya'] = $this->db->get_where('t_alumni',array('konsulat'=>'13'))->result();
$data['tangerang'] = $this->db->get_where('t_alumni',array('konsulat'=>'14'))->result();
$data['irhaby'] = $this->db->get_where('t_alumni',array('angkatan'=>'1'))->result();
$this->load->view('layout/header');
$this->load->view('forsadqm/irhaby',$data);
$this->load->view('layout/footer');
}
public function hamasatuna(){
$this->load->model('M_Forsadqm');
$data['alumni'] = $this->M_Forsadqm->alumni()->result();
$data['konsulat'] = $this->M_Forsadqm->konsulat()->result();
$data['angkatan'] = $this->M_Forsadqm->angkatan()->result();
$data['jakarta'] = $this->db->get_where('t_alumni',array('konsulat'=>'1'))->result();
$data['bandung'] = $this->db->get_where('t_alumni',array('konsulat'=>'2'))->result();
$data['bogor'] = $this->db->get_where('t_alumni',array('konsulat'=>'3'))->result();
$data['inter'] = $this->db->get_where('t_alumni',array('konsulat'=>'4'))->result();
$data['jabar'] = $this->db->get_where('t_alumni',array('konsulat'=>'5'))->result();
$data['jateng'] = $this->db->get_where('t_alumni',array('konsulat'=>'6'))->result();
$data['jogja'] = $this->db->get_where('t_alumni',array('konsulat'=>'7'))->result();
$data['kalimantan'] = $this->db->get_where('t_alumni',array('konsulat'=>'8'))->result();
$data['malang'] = $this->db->get_where('t_alumni',array('konsulat'=>'9'))->result();
$data['nusa'] = $this->db->get_where('t_alumni',array('konsulat'=>'10'))->result();
$data['sulawesi'] = $this->db->get_where('t_alumni',array('konsulat'=>'11'))->result();
$data['sumatera'] = $this->db->get_where('t_alumni',array('konsulat'=>'12'))->result();
$data['surabaya'] = $this->db->get_where('t_alumni',array('konsulat'=>'13'))->result();
$data['tangerang'] = $this->db->get_where('t_alumni',array('konsulat'=>'14'))->result();
$data['hamasatuna'] = $this->db->get_where('t_alumni',array('angkatan'=>'2'))->result();
$this->load->view('layout/header');
$this->load->view('forsadqm/hamasatuna',$data);
$this->load->view('layout/footer');
}
public function sirius(){
$this->load->model('M_Forsadqm');
$data['alumni'] = $this->M_Forsadqm->alumni()->result();
$data['konsulat'] = $this->M_Forsadqm->konsulat()->result();
$data['angkatan'] = $this->M_Forsadqm->angkatan()->result();
$data['jakarta'] = $this->db->get_where('t_alumni',array('konsulat'=>'1'))->result();
$data['bandung'] = $this->db->get_where('t_alumni',array('konsulat'=>'2'))->result();
$data['bogor'] = $this->db->get_where('t_alumni',array('konsulat'=>'3'))->result();
$data['inter'] = $this->db->get_where('t_alumni',array('konsulat'=>'4'))->result();
$data['jabar'] = $this->db->get_where('t_alumni',array('konsulat'=>'5'))->result();
$data['jateng'] = $this->db->get_where('t_alumni',array('konsulat'=>'6'))->result();
$data['jogja'] = $this->db->get_where('t_alumni',array('konsulat'=>'7'))->result();
$data['kalimantan'] = $this->db->get_where('t_alumni',array('konsulat'=>'8'))->result();
$data['malang'] = $this->db->get_where('t_alumni',array('konsulat'=>'9'))->result();
$data['nusa'] = $this->db->get_where('t_alumni',array('konsulat'=>'10'))->result();
$data['sulawesi'] = $this->db->get_where('t_alumni',array('konsulat'=>'11'))->result();
$data['sumatera'] = $this->db->get_where('t_alumni',array('konsulat'=>'12'))->result();
$data['surabaya'] = $this->db->get_where('t_alumni',array('konsulat'=>'13'))->result();
$data['tangerang'] = $this->db->get_where('t_alumni',array('konsulat'=>'14'))->result();
$data['sirius'] = $this->db->get_where('t_alumni',array('angkatan'=>'3'))->result();
$this->load->view('layout/header');
$this->load->view('forsadqm/sirius',$data);
$this->load->view('layout/footer');
}
public function gradien(){
$this->load->model('M_Forsadqm');
$data['alumni'] = $this->M_Forsadqm->alumni()->result();
$data['konsulat'] = $this->M_Forsadqm->konsulat()->result();
$data['angkatan'] = $this->M_Forsadqm->angkatan()->result();
$data['jakarta'] = $this->db->get_where('t_alumni',array('konsulat'=>'1'))->result();
$data['bandung'] = $this->db->get_where('t_alumni',array('konsulat'=>'2'))->result();
$data['bogor'] = $this->db->get_where('t_alumni',array('konsulat'=>'3'))->result();
$data['inter'] = $this->db->get_where('t_alumni',array('konsulat'=>'4'))->result();
$data['jabar'] = $this->db->get_where('t_alumni',array('konsulat'=>'5'))->result();
$data['jateng'] = $this->db->get_where('t_alumni',array('konsulat'=>'6'))->result();
$data['jogja'] = $this->db->get_where('t_alumni',array('konsulat'=>'7'))->result();
$data['kalimantan'] = $this->db->get_where('t_alumni',array('konsulat'=>'8'))->result();
$data['malang'] = $this->db->get_where('t_alumni',array('konsulat'=>'9'))->result();
$data['nusa'] = $this->db->get_where('t_alumni',array('konsulat'=>'10'))->result();
$data['sulawesi'] = $this->db->get_where('t_alumni',array('konsulat'=>'11'))->result();
$data['sumatera'] = $this->db->get_where('t_alumni',array('konsulat'=>'12'))->result();
$data['surabaya'] = $this->db->get_where('t_alumni',array('konsulat'=>'13'))->result();
$data['tangerang'] = $this->db->get_where('t_alumni',array('konsulat'=>'14'))->result();
$data['gradien'] = $this->db->get_where('t_alumni',array('angkatan'=>'4'))->result();
$this->load->view('layout/header');
$this->load->view('forsadqm/gradien',$data);
$this->load->view('layout/footer');
}
public function equator(){
$this->load->model('M_Forsadqm');
$data['alumni'] = $this->M_Forsadqm->alumni()->result();
$data['konsulat'] = $this->M_Forsadqm->konsulat()->result();
$data['angkatan'] = $this->M_Forsadqm->angkatan()->result();
$data['jakarta'] = $this->db->get_where('t_alumni',array('konsulat'=>'1'))->result();
$data['bandung'] = $this->db->get_where('t_alumni',array('konsulat'=>'2'))->result();
$data['bogor'] = $this->db->get_where('t_alumni',array('konsulat'=>'3'))->result();
$data['inter'] = $this->db->get_where('t_alumni',array('konsulat'=>'4'))->result();
$data['jabar'] = $this->db->get_where('t_alumni',array('konsulat'=>'5'))->result();
$data['jateng'] = $this->db->get_where('t_alumni',array('konsulat'=>'6'))->result();
$data['jogja'] = $this->db->get_where('t_alumni',array('konsulat'=>'7'))->result();
$data['kalimantan'] = $this->db->get_where('t_alumni',array('konsulat'=>'8'))->result();
$data['malang'] = $this->db->get_where('t_alumni',array('konsulat'=>'9'))->result();
$data['nusa'] = $this->db->get_where('t_alumni',array('konsulat'=>'10'))->result();
$data['sulawesi'] = $this->db->get_where('t_alumni',array('konsulat'=>'11'))->result();
$data['sumatera'] = $this->db->get_where('t_alumni',array('konsulat'=>'12'))->result();
$data['surabaya'] = $this->db->get_where('t_alumni',array('konsulat'=>'13'))->result();
$data['tangerang'] = $this->db->get_where('t_alumni',array('konsulat'=>'14'))->result();
$data['equator'] = $this->db->get_where('t_alumni',array('angkatan'=>'5'))->result();
$this->load->view('layout/header');
$this->load->view('forsadqm/equator',$data);
$this->load->view('layout/footer');
}
public function relativity(){
$this->load->model('M_Forsadqm');
$data['alumni'] = $this->M_Forsadqm->alumni()->result();
$data['konsulat'] = $this->M_Forsadqm->konsulat()->result();
$data['angkatan'] = $this->M_Forsadqm->angkatan()->result();
$data['jakarta'] = $this->db->get_where('t_alumni',array('konsulat'=>'1'))->result();
$data['bandung'] = $this->db->get_where('t_alumni',array('konsulat'=>'2'))->result();
$data['bogor'] = $this->db->get_where('t_alumni',array('konsulat'=>'3'))->result();
$data['inter'] = $this->db->get_where('t_alumni',array('konsulat'=>'4'))->result();
$data['jabar'] = $this->db->get_where('t_alumni',array('konsulat'=>'5'))->result();
$data['jateng'] = $this->db->get_where('t_alumni',array('konsulat'=>'6'))->result();
$data['jogja'] = $this->db->get_where('t_alumni',array('konsulat'=>'7'))->result();
$data['kalimantan'] = $this->db->get_where('t_alumni',array('konsulat'=>'8'))->result();
$data['malang'] = $this->db->get_where('t_alumni',array('konsulat'=>'9'))->result();
$data['nusa'] = $this->db->get_where('t_alumni',array('konsulat'=>'10'))->result();
$data['sulawesi'] = $this->db->get_where('t_alumni',array('konsulat'=>'11'))->result();
$data['sumatera'] = $this->db->get_where('t_alumni',array('konsulat'=>'12'))->result();
$data['surabaya'] = $this->db->get_where('t_alumni',array('konsulat'=>'13'))->result();
$data['tangerang'] = $this->db->get_where('t_alumni',array('konsulat'=>'14'))->result();
$data['relativity'] = $this->db->get_where('t_alumni',array('angkatan'=>'6'))->result();
$this->load->view('layout/header');
$this->load->view('forsadqm/relativity',$data);
$this->load->view('layout/footer');
}
}
?>
| 64.518072
| 94
| 0.59197
|
93e9c2cf52dea33c45232d1989d25b4b08278330
| 3,470
|
cs
|
C#
|
NPOI.HSSF.Record/MulBlankRecord.cs
|
iNeverSleeeeep/NPOI-For-Unity
|
e35b85c88a863ab2dae70aa81f058c2db36c7150
|
[
"MIT"
] | 1
|
2020-11-18T06:28:07.000Z
|
2020-11-18T06:28:07.000Z
|
NPOI.HSSF.Record/MulBlankRecord.cs
|
iNeverSleeeeep/NPOI-For-Unity
|
e35b85c88a863ab2dae70aa81f058c2db36c7150
|
[
"MIT"
] | null | null | null |
NPOI.HSSF.Record/MulBlankRecord.cs
|
iNeverSleeeeep/NPOI-For-Unity
|
e35b85c88a863ab2dae70aa81f058c2db36c7150
|
[
"MIT"
] | null | null | null |
using NPOI.Util;
using System.Text;
namespace NPOI.HSSF.Record
{
/// Title: Mulitple Blank cell record
/// Description: Represents a Set of columns in a row with no value but with styling.
/// In this release we have Read-only support for this record type.
/// The RecordFactory Converts this to a Set of BlankRecord objects.
/// REFERENCE: PG 329 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)
/// @author Andrew C. Oliver (acoliver at apache dot org)
/// @author Glen Stampoultzis (glens at apache.org)
/// @version 2.0-pre
/// @see org.apache.poi.hssf.record.BlankRecord
public class MulBlankRecord : StandardRecord
{
public const short sid = 190;
private int _row;
private int _first_col;
private short[] _xfs;
private int _last_col;
/// Get the row number of the cells this represents
///
/// @return row number
public int Row => _row;
/// starting column (first cell this holds in the row)
/// @return first column number
public int FirstColumn => _first_col;
/// ending column (last cell this holds in the row)
/// @return first column number
public int LastColumn => _last_col;
/// Get the number of columns this Contains (last-first +1)
/// @return number of columns (last - first +1)
public int NumColumns => _last_col - _first_col + 1;
public override short Sid => 190;
protected override int DataSize => 6 + _xfs.Length * 2;
/// Creates new MulBlankRecord
public MulBlankRecord()
{
}
public MulBlankRecord(int row, int firstCol, short[] xfs)
{
_row = row;
_first_col = firstCol;
_xfs = xfs;
_last_col = firstCol + xfs.Length - 1;
}
/// Constructs a MulBlank record and Sets its fields appropriately.
///
/// @param in the RecordInputstream to Read the record from
public MulBlankRecord(RecordInputStream in1)
{
_row = in1.ReadUShort();
_first_col = in1.ReadShort();
_xfs = ParseXFs(in1);
_last_col = in1.ReadShort();
}
/// returns the xf index for column (coffset = column - field_2_first_col)
/// @param coffset the column (coffset = column - field_2_first_col)
/// @return the XF index for the column
public short GetXFAt(int coffset)
{
return _xfs[coffset];
}
private short[] ParseXFs(RecordInputStream in1)
{
short[] array = new short[(in1.Remaining - 2) / 2];
for (int i = 0; i < array.Length; i++)
{
array[i] = in1.ReadShort();
}
return array;
}
public override string ToString()
{
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.Append("[MULBLANK]\n");
stringBuilder.Append("row = ").Append(StringUtil.ToHexString(Row)).Append("\n");
stringBuilder.Append("firstcol = ").Append(StringUtil.ToHexString(FirstColumn)).Append("\n");
stringBuilder.Append(" lastcol = ").Append(StringUtil.ToHexString(LastColumn)).Append("\n");
for (int i = 0; i < NumColumns; i++)
{
stringBuilder.Append("xf").Append(i).Append(" = ")
.Append(StringUtil.ToHexString(GetXFAt(i)))
.Append("\n");
}
stringBuilder.Append("[/MULBLANK]\n");
return stringBuilder.ToString();
}
public override void Serialize(ILittleEndianOutput out1)
{
out1.WriteShort(_row);
out1.WriteShort(_first_col);
int num = _xfs.Length;
for (int i = 0; i < num; i++)
{
out1.WriteShort(_xfs[i]);
}
out1.WriteShort(_last_col);
}
public override object Clone()
{
return this;
}
}
}
| 27.76
| 97
| 0.664265
|
9ffe2a4deb11f401370f062f1d769452ff6b6772
| 475
|
py
|
Python
|
mopidy_cd/__init__.py
|
M0Rf30/mopidy-cd
|
1ae7653ab2505b74bdbbb08e5fb18f08b17a9c4b
|
[
"Apache-2.0"
] | 6
|
2021-02-20T23:37:27.000Z
|
2022-03-18T11:47:11.000Z
|
mopidy_cd/__init__.py
|
M0Rf30/mopidy-cd
|
1ae7653ab2505b74bdbbb08e5fb18f08b17a9c4b
|
[
"Apache-2.0"
] | 6
|
2015-01-25T19:06:06.000Z
|
2017-05-25T13:16:24.000Z
|
mopidy_cd/__init__.py
|
M0Rf30/mopidy-cd
|
1ae7653ab2505b74bdbbb08e5fb18f08b17a9c4b
|
[
"Apache-2.0"
] | 3
|
2017-09-05T03:48:23.000Z
|
2018-08-12T08:44:46.000Z
|
from __future__ import unicode_literals
import os
from mopidy import config, ext
__version__ = '0.5.1'
class Extension(ext.Extension):
dist_name = 'Mopidy-Cd'
ext_name = 'cd'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def setup(self, registry):
from .backend import CdBackend
registry.add('backend', CdBackend)
| 19.791667
| 71
| 0.682105
|
143e4649d0e1ebde70fcd67dee39c58182d9386c
| 3,608
|
sql
|
SQL
|
create.sql
|
JeanFranz73/sistema-pedidos-php-sql
|
3c52fbb3148626f6d02e3be7c0d66291621623be
|
[
"Unlicense"
] | 1
|
2021-06-28T23:29:00.000Z
|
2021-06-28T23:29:00.000Z
|
create.sql
|
JeanFranz73/sistema-pedidos-php-sql
|
3c52fbb3148626f6d02e3be7c0d66291621623be
|
[
"Unlicense"
] | null | null | null |
create.sql
|
JeanFranz73/sistema-pedidos-php-sql
|
3c52fbb3148626f6d02e3be7c0d66291621623be
|
[
"Unlicense"
] | null | null | null |
create table cargos
(
id_cargo serial not null,
nome varchar(20) not null,
descricao varchar(50) not null,
constraint cargos_pkey
primary key (id_cargo)
);
create table estado
(
id_estado serial not null,
nome varchar(20) not null,
sigla varchar(2),
constraint estado_pkey
primary key (id_estado)
);
create table cidade
(
id_cidade serial not null,
id_estado integer not null,
nome varchar(20) not null,
constraint cidade_pkey
primary key (id_cidade),
constraint fk_estado
foreign key (id_estado) references estado
);
create table cliente
(
id_cliente serial not null,
nome varchar(30) not null,
telefone integer,
endereco varchar(50),
id_cidade integer not null,
constraint cliente_pkey
primary key (id_cliente),
constraint fk_cidade
foreign key (id_cidade) references cidade
);
create table distribuidor
(
id_distribuidor serial not null,
nome varchar(30) not null,
id_cidade integer not null,
constraint distribuidor_pkey
primary key (id_distribuidor),
constraint fk_cidade
foreign key (id_cidade) references cidade
);
create table estoque_ingred
(
id_item serial not null,
nome varchar(20) not null,
descricao varchar(50),
quantidade integer,
constraint estoque_ingred_pkey
primary key (id_item)
);
create table filial
(
id_filial serial not null,
nome varchar(30) not null,
id_cidade integer not null,
constraint filial_pkey
primary key (id_filial),
constraint fk_cidade
foreign key (id_cidade) references cidade
);
create table funcionario
(
id_func serial not null,
id_filial integer not null,
id_cargo integer not null,
nome varchar(20) not null,
constraint funcionario_pkey
primary key (id_func),
constraint fk_cargo
foreign key (id_cargo) references cargos,
constraint fk_filial
foreign key (id_filial) references filial
);
create table nota_compras
(
id_compra serial not null,
id_item integer not null,
id_func integer not null,
id_dist integer not null,
data_compra date not null,
descricao varchar(50),
constraint nota_compras_pkey
primary key (id_compra, id_item),
constraint fk_dist
foreign key (id_dist) references distribuidor,
constraint fk_func
foreign key (id_func) references funcionario,
constraint fk_item
foreign key (id_item) references estoque_ingred
);
create table sabor_pizza
(
id_sabor serial not null,
nome varchar(30) not null,
constraint sabor_pizza_pkey
primary key (id_sabor)
);
create table tamanho_pizza
(
id_tamanho serial not null,
nome varchar(20) not null,
tamanho varchar(30) not null,
preco integer not null,
constraint tamanho_pizza_pkey
primary key (id_tamanho)
);
create table situacao_pedido
(
id_situacao char not null,
descricao varchar(20),
constraint situacao_pedido_pkey
primary key (id_situacao)
);
create table nota_pedidos
(
id_pedido serial not null,
id_func integer not null,
id_cliente integer not null,
observacao varchar(50),
data_pedido date,
situacao char,
constraint nota_pedidos_pkey
primary key (id_pedido),
constraint fk_cliente
foreign key (id_cliente) references cliente,
constraint fk_func
foreign key (id_func) references funcionario,
constraint fk_situacao
foreign key (situacao) references situacao_pedido
);
create table itens_pedido
(
id_pedido integer not null,
id_tamanho integer not null,
id_sabor integer not null,
quantidade integer not null,
observacao varchar(50),
constraint fk_pedido
foreign key (id_pedido) references nota_pedidos,
constraint fk_sabor
foreign key (id_sabor) references sabor_pizza,
constraint fk_tamanho
foreign key (id_tamanho) references tamanho_pizza
);
| 22
| 51
| 0.782705
|
b04b6ec673eccf2193c77faa7009efb413e1e18a
| 4,348
|
py
|
Python
|
Code/config.py
|
CerpStern/CapStone
|
4c09db174b1ff9a1b96b69e5c298b29c610aefaa
|
[
"MIT"
] | 1
|
2018-03-30T03:21:23.000Z
|
2018-03-30T03:21:23.000Z
|
Code/config.py
|
KevinNovak/Syllabus-Management-System
|
4c09db174b1ff9a1b96b69e5c298b29c610aefaa
|
[
"MIT"
] | null | null | null |
Code/config.py
|
KevinNovak/Syllabus-Management-System
|
4c09db174b1ff9a1b96b69e5c298b29c610aefaa
|
[
"MIT"
] | null | null | null |
import os
import json
import datetime
from flask import Flask, url_for, redirect, \
render_template, session, request
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager, login_required, login_user, \
logout_user, current_user, UserMixin
from requests_oauthlib import OAuth2Session
from requests.exceptions import HTTPError
basedir = os.path.abspath(os.path.dirname(__file__))
"""App Configuration"""
class Auth:
"""Google Project Credentials"""
CLIENT_ID = ('486646633132-nc0hlcn0vt7khirhmhkh518d84omqjea'
'.apps.googleusercontent.com')
CLIENT_SECRET = 'NNKF51kaBY-5RIMpOW1S2bjd'
REDIRECT_URI = 'https://localhost:5000/gCallback'
AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
USER_INFO = 'https://www.googleapis.com/userinfo/v2/me'
SCOPE = ['profile', 'email']
class Config:
"""Base config"""
APP_NAME = "SMS"
SECRET_KEY = "NNKF51kaBY-5RIMpOW1S2bjd"
class DevConfig(Config):
"""Dev config"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, "test.db")
SQLALCHEMY_TRACK_MODIFICATIONS = False
class ProdConfig(Config):
"""Production config"""
DEBUG = False
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, "test.db")
SQLALCHEMY_TRACK_MODIFICATIONS = False
config = {
"dev": DevConfig,
"prod": ProdConfig,
"default": DevConfig
}
class Template():
basic = '<h3>Intro to Blah Blah Blah</h3><p>Meeting Time: XX:XX - XX:XX Day1, Day2</p><p>Meeting Place: Room XXX Foo Hall</p><p>Course Website:</p><p>Intructor Name: Bob Loblaw</p><p>Office Hours: XX:XX - XX:XXX Day1, Day2 Room XXX Building</p><p>Required Materials if any:</p><p>Prerequisites if any:</p>'
description = 'Course Description Goes Here'
topics = 'Topics Covered Go Here'
outcomes = 'Learning Outcomes Go Here'
grading = 'Grading Policy Goes Here'
schedule = 'Planned Schedule Goes Here'
honesty = 'Cheating means to misrepresent the source, nature, or other conditions of your academic work (e.g., tests, papers, projects, assignments) so as to get underserved credit. The use of the intellectual property of others without giving them appropriate credit is a serious academic offense. The University considers cheating and plagiarism very serious offenses and provides for sanctions up to and including dismissal from the University or revocation of a degree. The University’s administrative policy and procedures regarding student cheating and plagiarism can be found in the <a href="https://www.kent.edu/policyreg/administrative-policy-regarding-student-cheating-and-plagiarism" target="_blank" rel="noopener noreferrer">Administrative Policy, 3-01.8</a>. By submitting any material in this (or any other class) you are certifying that it is free of plagiarism.'
deadlines = 'Students have responsibility to ensure they are properly enrolled in classes. You are advised to review your official class schedule (using Student Tools in FlashLine) during the first two weeks of the semester to ensure you are properly enrolled in this class and section. Should you find an error in your class schedule, you have until cut-off date provided by the Undergraduate Office to correct the error with your advising office. If registration errors are not corrected by the cut-off date and you continue to attend and participate in classes for which you are not officially enrolled, you are advised now that you will not receive a grade at the conclusion of the semester for any class in which you are not properly registered.'
accessibility = 'University policy 3342-3-01.3 requires that students with disabilities be provided reasonable accommodations to ensure their equal access to course content. If you have a documented disability and require accommodations, please contact the instructor at the beginning of the semester to make arrangements for necessary classroom adjustments. Please note, you must first verify your eligibility for these through the Student Accessibility Services (contact 330-672-3391 or visit <a href="http://www.kent.edu/sas" target="_blank" rel="noopener noreferrer">www.kent.edu/sas</a> for more information on registration procedures).'
keywords = 'Learning'
| 64.895522
| 888
| 0.76058
|
d728ee2033765e686552c323df3ec8345365fa19
| 41
|
sql
|
SQL
|
migrations/sqls/20210716102934-create-table-person-account-down.sql
|
Eeki/postgraphile-flashcards
|
8602b1fa742dd19dde34b7673bdf69eefe9164c6
|
[
"MIT"
] | null | null | null |
migrations/sqls/20210716102934-create-table-person-account-down.sql
|
Eeki/postgraphile-flashcards
|
8602b1fa742dd19dde34b7673bdf69eefe9164c6
|
[
"MIT"
] | null | null | null |
migrations/sqls/20210716102934-create-table-person-account-down.sql
|
Eeki/postgraphile-flashcards
|
8602b1fa742dd19dde34b7673bdf69eefe9164c6
|
[
"MIT"
] | null | null | null |
DROP TABLE learn_private.person_account;
| 20.5
| 40
| 0.878049
|
3a02f35313f3b69a064ebaacb397e46d9674877e
| 936
|
lua
|
Lua
|
ahsm/tools/run_to_dot.lua
|
xopxe/robotito_firmware
|
c131994cf1420277e0fd576c52018de4f9d64207
|
[
"MIT"
] | 3
|
2019-02-03T20:48:31.000Z
|
2020-11-08T16:03:14.000Z
|
ahsm/tools/run_to_dot.lua
|
xopxe/robotito_firmware
|
c131994cf1420277e0fd576c52018de4f9d64207
|
[
"MIT"
] | null | null | null |
ahsm/tools/run_to_dot.lua
|
xopxe/robotito_firmware
|
c131994cf1420277e0fd576c52018de4f9d64207
|
[
"MIT"
] | null | null | null |
--- Simple dot export script.
-- This script will export a hsm library to a dot file representation.
-- If filename is provided the output will be written to a file. Otherwise
-- the dot output will be writted to stdout.
-- @usage $ lua run_to_dot.lua <fsm.lua> [filename]
-- @usage $ lua tools/run_to_dot.lua examples/composite.lua composite.dot
-- $ lua tools/run_to_dot.lua examples/composite.lua | dot -Tps -o composite.ps
-- $ lua tools/run_to_dot.lua examples/composite.lua | dot -Tpng | display -
-- @script run_to_dot.lua
package.path = package.path .. ";;;tools/?.lua;tools/?/init.lua"
local filehsm = arg[1]
local filetarget = arg[2]
if not filehsm then
io.stderr:write( 'syntax:\n lua run_to_dot.lua <fsm.lua> [filename]\n' )
os.exit()
end
local root = assert(dofile(filehsm))
local to_dot = require 'to_dot'
if filetarget then
assert(to_dot.to_file(root, filetarget))
else
to_dot.to_function(root, print)
end
| 32.275862
| 79
| 0.726496
|
7da48263f7f1c5398935a3358755c0dd65304f18
| 399
|
swift
|
Swift
|
HackerRank/HackerRankTests/Tests/RunningTimeTests.swift
|
wibosco/CodingChallenges
|
ba4008410664c86e5de13c00eb819b1010e67cb2
|
[
"MIT"
] | 5
|
2021-12-30T20:32:13.000Z
|
2022-03-31T09:42:35.000Z
|
HackerRank/HackerRankTests/Tests/RunningTimeTests.swift
|
wibosco/CodingChallenges
|
ba4008410664c86e5de13c00eb819b1010e67cb2
|
[
"MIT"
] | null | null | null |
HackerRank/HackerRankTests/Tests/RunningTimeTests.swift
|
wibosco/CodingChallenges
|
ba4008410664c86e5de13c00eb819b1010e67cb2
|
[
"MIT"
] | null | null | null |
//
// RunningTimeTests.swift
// CodingChallenges
//
// Created by Boles on 15/05/2016.
// Copyright © 2016 Boles. All rights reserved.
//
import XCTest
@testable import HackerRank
class RunningTimeTests: XCTestCase {
func test_A() {
let numberOfShifts = RunningTime.numberOfShiftsToSort(array: [2, 1, 3, 1, 2])
XCTAssertEqual(4, numberOfShifts)
}
}
| 18.136364
| 85
| 0.651629
|
4cd4582462fdb4234d3f1d0aa877a640375d67b2
| 232
|
py
|
Python
|
ch02_03.py
|
sitdh/59.com-prog
|
24f536a72b0467ff3ee1615f515ecff9fbf36bb3
|
[
"MIT"
] | 1
|
2021-04-25T14:46:12.000Z
|
2021-04-25T14:46:12.000Z
|
ch02_03.py
|
sitdh/com-prog
|
24f536a72b0467ff3ee1615f515ecff9fbf36bb3
|
[
"MIT"
] | null | null | null |
ch02_03.py
|
sitdh/com-prog
|
24f536a72b0467ff3ee1615f515ecff9fbf36bb3
|
[
"MIT"
] | null | null | null |
# Body surface
import math
weight = int(input())
height = int(input())
body_surface = 3.207 * math.pow(10, -4) * math.pow(height, 0.3) * math.pow( 1000 * weight, 0.7285 - 0.0188 * ( 3 + math.log(weight, 10)))
print(body_surface)
| 23.2
| 137
| 0.646552
|
b75d4cd4fa9ad3ad38b152ff060b557b21d0287d
| 575
|
cpp
|
C++
|
Redo.cpp
|
albcristi/MoviesApp
|
aeb390e13fd3130474e12ddb55ba7ada3fcd898d
|
[
"MIT"
] | null | null | null |
Redo.cpp
|
albcristi/MoviesApp
|
aeb390e13fd3130474e12ddb55ba7ada3fcd898d
|
[
"MIT"
] | null | null | null |
Redo.cpp
|
albcristi/MoviesApp
|
aeb390e13fd3130474e12ddb55ba7ada3fcd898d
|
[
"MIT"
] | null | null | null |
#include "Redo.h"
Redo::Redo()
{
}
Redo::~Redo()
{
}
RedoAdd::RedoAdd(Movie mov, Repository& rep) :addedMovie{ mov },repo{rep}
{
}
void RedoAdd::redoAction()
{
repo.addElement(addedMovie);
}
RedoAdd::~RedoAdd()
{
}
RedoDelete::RedoDelete(Movie mo, Repository& re) : deletedMovie{ mo }, repo{ re }
{
}
void RedoDelete::redoAction()
{
repo.remove(deletedMovie);
}
RedoDelete::~RedoDelete()
{
}
RedoUpdate::RedoUpdate(Movie tod, Movie toa, Repository& re)
:toDel{ tod }, toAdd{ toa }, repo{ re }
{
}
void RedoUpdate::redoAction()
{
repo.update(toDel, toAdd);
}
| 11.734694
| 81
| 0.662609
|
691849b72dd6d0db19105652aea21cd8dc7a0eeb
| 1,073
|
rb
|
Ruby
|
spec/words_integration_spec.rb
|
andrewlhy/Dictionary
|
e91cca399972228e0bd7943ad9adffdf6ea21432
|
[
"MIT"
] | null | null | null |
spec/words_integration_spec.rb
|
andrewlhy/Dictionary
|
e91cca399972228e0bd7943ad9adffdf6ea21432
|
[
"MIT"
] | null | null | null |
spec/words_integration_spec.rb
|
andrewlhy/Dictionary
|
e91cca399972228e0bd7943ad9adffdf6ea21432
|
[
"MIT"
] | null | null | null |
require('capybara/rspec')
require('./app')
Capybara.app = Sinatra::Application
set(:show_exceptions,false)
describe("a list of words", {:type => :feature})do
it('will show a list of words that you want to add to the dictionary') do
visit('/')
click_on('Add a new word to your list here')
fill_in('wordname', :with => "USA")
click_button('Add Word')
click_on('See all your words here!')
expect(page).to have_content("USA")
end
end
describe("a list of definitions", {:type => :feature})do
it('will show a list of definitions that you want to add to the dictionary') do
visit('/')
click_on('Add a new word to your list here')
fill_in('wordname', :with => "USAtoday")
click_button('Add Word')
click_on('See all your words here!')
click_on("USAtoday")
click_on('Add a new definition')
fill_in('definition', :with => "country in north america")
click_button('Add Definition')
click_on('Go back to your words')
click_on("USAtoday")
expect(page).to have_content("country in north america")
end
end
| 31.558824
| 81
| 0.671016
|
2c5fcc15f86ca47fc4de2c7fe6415eb6a85e0c4c
| 3,010
|
py
|
Python
|
step_1/slack_bot.py
|
kangheeyong/PROJECT-personal-recommendation-system-demo
|
f8c6489f8bc17463fa572dc47e4bcbfc5fdb1397
|
[
"MIT"
] | 2
|
2020-03-23T09:28:35.000Z
|
2020-03-23T09:28:38.000Z
|
step_1/slack_bot.py
|
kangheeyong/PROJECT-personal-recommendation-system-demo
|
f8c6489f8bc17463fa572dc47e4bcbfc5fdb1397
|
[
"MIT"
] | 5
|
2020-03-11T08:38:14.000Z
|
2020-05-30T13:45:15.000Z
|
step_1/slack_bot.py
|
kangheeyong/PROJECT-personal-recommendation-system-demo
|
f8c6489f8bc17463fa572dc47e4bcbfc5fdb1397
|
[
"MIT"
] | null | null | null |
import os
import sys
import asyncio
from datetime import datetime, timedelta
import websockets
from fire import Fire
from slacker import Slacker
from Feynman.etc.util import Config, get_logger
from Feynman.database import Mongodb
class base():
def __init__(self):
self.slack = Slacker(os.environ['SLACK_TOKEN'])
self.logger = get_logger()
self.response = self.slack.rtm.start()
self.endpoint = self.response.body['url']
self._demo_dist_opt = Config(open('config/demo_dist.json').read())
self._mg_data_click = Mongodb(self._demo_dist_opt.mongodb.data_click)
self._mg_data_imp = Mongodb(self._demo_dist_opt.mongodb.data_imp)
def _time_between(self, st, ed, bucket):
return {'$and': [
{'timestamp': {'$gte': st.timestamp()}},
{'timestamp': {'$lt': ed.timestamp()}},
{'bucket': bucket}
]}
def _dashboard(self, channal):
now_t = datetime.now()
for bucket in self._mg_data_click._collection.distinct('bucket'):
click = self._mg_data_click.count_documents(self._time_between(now_t-timedelta(hours=1, minutes=0), now_t, bucket))
imp = self._mg_data_imp.count_documents(self._time_between(now_t-timedelta(hours=1, minutes=0), now_t, bucket))
self.slack.chat.post_message(channal, 'bucket : {}, ctr : {}'.format(bucket, click/(imp + 0.1)))
async def _execute_bot(self):
self.logger.info('start execute_bot')
while True:
try:
self._dashboard('#demo-system-dashboard')
await asyncio.sleep(3600)
except Exception as e:
self.logger.warning('something is wrong... -> {}'.format(e))
break
async def _reaction_bot(self):
self.logger.info('start reaction_bot')
ws = await websockets.connect(self.endpoint)
while True:
try:
message_json = await ws.recv()
self.logger.info('get message...')
message_json = Config(message_json)
if message_json.text and message_json.subtype != 'bot_message':
if '안녕' in message_json.text and '봇' in message_json.text:
self.slack.chat.post_message(message_json.channel, '안녕하세요')
if 'dashboard' in message_json.text and '봇' in message_json.text:
self._dashboard(message_json.channel)
except Exception as e:
self.logger.warning('something is wrong...-> {}'.format(e))
break
async def _main(self):
self.logger.info('start process...')
try:
await asyncio.gather(
self._execute_bot(),
self._reaction_bot()
)
except:
self.logger.warning('something is wrong...')
sys.exit()
def run(self):
asyncio.run(self._main())
if __name__ == '__main__':
Fire(base)
| 36.26506
| 127
| 0.595349
|
9feb52d755e64ca783b48ac95cbf12a852bdd3de
| 56,290
|
py
|
Python
|
examples/development/variants.py
|
abhishekunique/RND-ashwin
|
f8bcf3c593df2dacc0efba0875533be71ccb5011
|
[
"MIT"
] | null | null | null |
examples/development/variants.py
|
abhishekunique/RND-ashwin
|
f8bcf3c593df2dacc0efba0875533be71ccb5011
|
[
"MIT"
] | 7
|
2020-09-25T22:41:46.000Z
|
2022-03-12T00:37:25.000Z
|
examples/development/variants.py
|
abhishekunique/RND-ashwin
|
f8bcf3c593df2dacc0efba0875533be71ccb5011
|
[
"MIT"
] | null | null | null |
from copy import deepcopy
from ray import tune
import numpy as np
import os
from softlearning.misc.utils import get_git_rev, deep_update
DEFAULT_KEY = "__DEFAULT_KEY__"
# M = number of hidden units per layer
# N = number of hidden layers
# M = 512
# N = 2
M = 256
N = 2
REPARAMETERIZE = True
NUM_COUPLING_LAYERS = 2
GAUSSIAN_POLICY_PARAMS_BASE = {
'type': 'GaussianPolicy',
'kwargs': {
'hidden_layer_sizes': (M, ) * N,
'squash': True,
'observation_keys': None,
'goal_keys': None,
'observation_preprocessors_params': {}
}
}
ALGORITHM_PARAMS_BASE = {
'kwargs': {
'epoch_length': 1000,
'train_every_n_steps': 1,
'n_train_repeat': 1,
'eval_n_episodes': 3,
'eval_deterministic': False,
'discount': 0.99,
'tau': 5e-3,
'reward_scale': 1.0,
'save_training_video_frequency': 5,
'eval_render_kwargs': {
'width': 480,
'height': 480,
'mode': 'rgb_array',
},
}
}
ALGORITHM_PARAMS_ADDITIONAL = {
'SAC': {
'type': 'SAC',
'kwargs': {
'reparameterize': REPARAMETERIZE,
'lr': 3e-4,
'target_update_interval': 1,
'tau': 5e-3,
'n_initial_exploration_steps': int(1e3),
'target_entropy': 'auto',
'action_prior': 'uniform',
'verbose': True,
'eval_n_episodes': 3,
'ext_reward_coeff': 1,
'rnd_int_rew_coeff': tune.grid_search([0, 1]),
'normalize_ext_reward_gamma': 0.99,
},
'rnd_params': {
'convnet_params': {
'conv_filters': (16, 32, 64),
'conv_kernel_sizes': (3, 3, 3),
'conv_strides': (2, 2, 2),
'normalization_type': None,
},
'fc_params': {
'hidden_layer_sizes': (256, 256),
'output_size': 512,
},
}
},
'MultiSAC': {
'type': 'MultiSAC',
'kwargs': {
'reparameterize': REPARAMETERIZE,
'lr': 3e-4,
'target_update_interval': 1,
'tau': 5e-3,
'target_entropy': 'auto',
# 'n_initial_exploration_steps': int(1e4),
'n_initial_exploration_steps': int(5e3),
'action_prior': 'uniform',
'her_iters': tune.grid_search([0]),
'rnd_int_rew_coeffs': [0, 0], # [1, 1],
'ext_reward_coeffs': [1, 1], # 0 corresponds to reset policy
'normalize_ext_reward_gamma': 0.99,
'share_pool': False,
},
'rnd_params': {
'convnet_params': {
'conv_filters': (16, 32, 64),
'conv_kernel_sizes': (3, 3, 3),
'conv_strides': (2, 2, 2),
'normalization_type': None,
},
'fc_params': {
'hidden_layer_sizes': (256, 256),
'output_size': 512,
},
},
},
'HERQLearning': {
'type': 'HERQLearning',
'kwargs': {
'reparameterize': REPARAMETERIZE,
'lr': 3e-4,
'target_update_interval': 1,
'tau': 5e-3,
'n_initial_exploration_steps': int(5e3),
'target_entropy': 'auto',
'action_prior': 'uniform',
'ext_reward_coeff': 1,
'eval_n_episodes': 3,
'rnd_int_rew_coeff': tune.grid_search([0]),
# 'normalize_ext_reward_gamma': 0.99,
'verbose': True,
'replace_original_reward': tune.grid_search([True, False]), # True,
},
},
}
MAX_PATH_LENGTH_PER_UNIVERSE_DOMAIN_TASK = {
DEFAULT_KEY: 100,
'gym': {
DEFAULT_KEY: 100,
'Point2D': {
DEFAULT_KEY: 200,
},
'Pusher2D': {
DEFAULT_KEY: 100,
'Simple-v0': 150,
'Test-v0': 150,
},
'MiniGrid': {
DEFAULT_KEY: 50,
},
'DClaw': {
DEFAULT_KEY: 50,
'TurnFixed-v0': 50,
# 'TurnResetFree-v0': 100,
'TurnResetFree-v0': 50,
'TurnResetFreeSwapGoal-v0': tune.grid_search([100]),
'TurnResetFreeRandomGoal-v0': 100,
'TurnFreeValve3Fixed-v0': tune.grid_search([50]),
# 'TurnFreeValve3RandomReset-v0': 50,
'TurnFreeValve3ResetFree-v0': tune.grid_search([100]),
'TurnFreeValve3ResetFreeSwapGoal-v0': tune.grid_search([50]),
'TurnFreeValve3ResetFreeSwapGoalEval-v0': tune.grid_search([50]),
'TurnFreeValve3ResetFreeComposedGoals-v0': tune.grid_search([150]),
# Translating Tasks
'TranslatePuckFixed-v0': 50,
'TranslateMultiPuckFixed-v0': 100,
'TranslatePuckResetFree-v0': 50,
# Lifting Tasks
'LiftDDFixed-v0': tune.grid_search([50]),
'LiftDDResetFree-v0': tune.grid_search([50]),
# Flipping Tasks
'FlipEraserFixed-v0': tune.grid_search([50]),
'FlipEraserResetFree-v0': tune.grid_search([50]),
'FlipEraserResetFreeSwapGoal-v0': tune.grid_search([50]),
# Sliding Tasks
'SlideBeadsFixed-v0': tune.grid_search([25]),
'SlideBeadsResetFree-v0': tune.grid_search([25]),
'SlideBeadsResetFreeEval-v0': tune.grid_search([25]),
},
},
}
NUM_EPOCHS_PER_UNIVERSE_DOMAIN_TASK = {
DEFAULT_KEY: 200,
'gym': {
DEFAULT_KEY: 200,
'Point2D': {
DEFAULT_KEY: int(300),
},
'Pusher2D': {
DEFAULT_KEY: int(100),
},
'MiniGrid': {
DEFAULT_KEY: 100,
},
'DClaw': {
DEFAULT_KEY: int(250),
'TurnFreeValve3Fixed-v0': 750,
'TranslateMultiPuckFixed-v0': 500,
},
},
}
ENVIRONMENT_PARAMS_PER_UNIVERSE_DOMAIN_TASK_STATE = {
'gym': {
'Point2D': {
# === Point Mass ===
'Fixed-v0': {
# 'boundary_distance': tune.grid_search([8, 16]),
# 'action_scale': tune.grid_search([0.5, 0.25]),
'action_scale': 0.5,
'images_are_rgb': True,
'init_pos_range': None, # Random reset
'target_pos_range': None, # Random target
'render_onscreen': False,
# 'reward_type': tune.grid_search(['dense', 'sparse']),
'reward_type': tune.grid_search(['sparse']),
'observation_keys': ('state_achieved_goal', 'state_desired_goal'),
# 'goal_keys': ('state_desired_goal', ),
},
'SingleWall-v0': {
# 'boundary_distance': tune.grid_search([4, 8]),
'action_scale': tune.grid_search([1.0, 0.5]),
'images_are_rgb': True,
'init_pos_range': None, # Random reset
'target_pos_range': None, # Random target
'render_onscreen': False,
'reward_type': tune.grid_search(['dense', 'sparse']),
'observation_keys': ('state_observation', 'state_desired_goal'),
# 'goal_keys': ('state_desired_goal', ),
},
'BoxWall-v1': {
'action_scale': tune.grid_search([0.5]),
'images_are_rgb': True,
'reward_type': tune.grid_search(['sparse']),
'init_pos_range': ((-3, -3), (-3, -3)),
# 'init_pos_range': None, # Random reset
'target_pos_range': ((3, 3), (3, 3)),
'render_onscreen': False,
'observation_keys': ('state_achieved_goal', 'state_desired_goal'),
},
'Maze-v0': {
'action_scale': tune.grid_search([0.5]),
'images_are_rgb': True,
'reward_type': tune.grid_search(['sparse']),
'render_onscreen': False,
'observation_keys': ('state_achieved_goal', 'state_desired_goal'),
'use_count_reward': tune.grid_search([True, False]),
'n_bins': 10,
# === EASY ===
# 'wall_shape': 'easy-maze',
# 'init_pos_range': ((-2.5, -3), (-2.5, -3)),
# 'target_pos_range': ((2.5, -3), (2.5, -3)),
# === MEDIUM ===
'wall_shape': 'medium-maze',
'init_pos_range': ((-3, -3), (-3, -3)),
'target_pos_range': ((3, 3), (3, 3)),
# === HARD ===
# 'wall_shape': 'hard-maze',
# 'init_pos_range': ((-3, -3), (-3, -3)),
# 'target_pos_range': ((-0.5, 1.25), (-0.5, 1.25)),
},
# 'Fixed-v1': {
# 'ball_radius': 0.5,
# 'target_radius': 0.5,
# 'boundary_distance': 4,
# 'images_are_rgb': True,
# 'init_pos_range': None,
# 'target_pos_range': None,
# 'render_onscreen': False,
# 'reward_type': 'sparse',
# 'observation_keys': ('state_observation', ),
# 'goal_keys': ('state_desired_goal', ),
# },
},
'Pusher2D': {
'Simple-v0': {
'init_qpos_range': ((0, 0, 0), (0, 0, 0)),
'init_object_pos_range': ((1, 0), (1, 0)),
'target_pos_range': ((2, 2), (2, 2)),
'reset_gripper': True,
'reset_object': True,
'observation_keys': (
# 'observation',
'gripper_qpos',
'gripper_qvel',
'object_pos',
'object_vel',
'target_pos',
),
},
'Test-v0': {
'do_reset': True,
'multi_reset': False,
'multi_reset_block': False,
'reset_block': True,
'reset_gripper': True,
}
},
'DClaw': {
# === Fixed Screw ===
'TurnFixed-v0': {
'reward_keys_and_weights': {
# 'object_to_target_angle_distance_reward': 1,
'sparse_reward': 1,
},
'init_pos_range': (0, 0),
'target_pos_range': (np.pi, np.pi),
'observation_keys': (
'object_angle_cos',
'object_angle_sin',
'claw_qpos',
'last_action'
),
},
'PoseStatic-v0': {},
'PoseDynamic-v0': {},
'TurnRandom-v0': {},
'TurnResetFree-v0': {
'reward_keys_and_weights': {
'object_to_target_angle_distance_reward': 1,
},
'reset_fingers': True,
'init_pos_range': (0, 0),
'target_pos_range': (np.pi, np.pi),
},
'TurnResetFreeSwapGoal-v0': {
'reward_keys': (
'object_to_target_angle_dist_cost',
),
'reset_fingers': True,
},
'TurnResetFreeRandomGoal-v0': {
'reward_keys': (
'object_to_target_angle_dist_cost',
),
'reset_fingers': True,
},
'TurnRandomDynamics-v0': {},
'TurnFreeValve3Fixed-v0': {
'reward_keys_and_weights': {
# 'object_to_target_position_distance_reward': tune.grid_search([2]),
# 'object_to_target_orientation_distance_reward': 1,
'sparse_reward': 1,
},
'observation_keys': (
'claw_qpos',
'last_action',
'object_xy_position',
'object_z_orientation_cos',
'object_z_orientation_sin',
),
'init_qpos_range': ((0, 0, 0, 0, 0, 0), ) * 2,
'target_qpos_range': ((0, 0, 0, 0, 0, np.pi), ) * 2,
# 'target_qpos_range': [
# (0.01, 0.01, 0, 0, 0, -np.pi / 2),
# (-0.01, -0.01, 0, 0, 0, np.pi / 2)
# ],
# 'init_qpos_range': (
# (-0.08, -0.08, 0, 0, 0, -np.pi),
# (0.08, 0.08, 0, 0, 0, np.pi)
# ),
},
'TurnFreeValve3ResetFree-v0': {
'observation_keys': (
'claw_qpos',
'last_action',
'object_xy_position',
'object_z_orientation_cos',
'object_z_orientation_sin',
),
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': 2,
'object_to_target_orientation_distance_reward': 1,
},
'reset_fingers': True,
'reset_frequency': 0,
'target_qpos_range': [
(0.01, 0.01, 0, 0, 0, -np.pi / 2),
(-0.01, -0.01, 0, 0, 0, np.pi / 2)
],
'init_qpos_range': [
(0, 0, 0, 0, 0, 0)
],
# === BELOW IS FOR SAVING INTO THE REPLAY POOL. ===
# MAKE SURE TO SET `no_pixel_information = True` below in order
# to remove the pixels from the policy inputs/Q inputs.
# 'pixel_wrapper_kwargs': {
# 'observation_key': 'pixels',
# 'pixels_only': False,
# 'render_kwargs': {
# 'width': 32,
# 'height': 32,
# },
# },
# 'camera_settings': {
# 'azimuth': 180,
# 'distance': 0.38,
# 'elevation': -36,
# 'lookat': (0.04, 0.008, 0.025),
# },
},
'TurnFreeValve3RandomReset-v0': {
'reward_keys': (
'object_to_target_position_distance_cost',
'object_to_target_orientation_distance_cost',
),
'initial_distribution_path': '',
'reset_from_corners': True,
},
'TurnFreeValve3ResetFreeRandomGoal-v0': {
'observation_keys': (
'claw_qpos',
'object_position',
'object_orientation_cos',
'object_orientation_sin',
'last_action',
'target_orientation',
'object_to_target_relative_position',
),
'reward_keys': (
'object_to_target_position_distance_cost',
'object_to_target_orientation_distance_cost',
),
'reset_fingers': True,
},
'TurnFreeValve3ResetFreeSwapGoal-v0': {
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': tune.grid_search([1, 2]),
'object_to_target_orientation_distance_reward': 1,
# 'object_to_target_position_distance_reward': tune.grid_search([1]),
# 'object_to_target_orientation_distance_reward': 0,
},
'reset_fingers': True,
'observation_keys': (
'claw_qpos',
'last_action',
'object_xy_position',
'object_z_orientation_cos',
'object_z_orientation_sin',
'target_xy_position',
'target_z_orientation_cos',
'target_z_orientation_sin',
),
'goals': tune.grid_search([
[(0, 0, 0, 0, 0, np.pi / 2), (-0.05, -0.06, 0, 0, 0, 0)],
# [(0.05, 0.06, 0, 0, 0, 0), (-0.05, -0.06, 0, 0, 0, 0)],
# [(0, 0, 0, 0, 0, 0), (-0.05, -0.06, 0, 0, 0, 0)],
]),
},
'TurnFreeValve3ResetFreeSwapGoalEval-v0': {
'reward_keys_and_weights': {
# 'object_to_target_position_distance_reward': tune.grid_search([1, 2]),
'object_to_target_position_distance_reward': tune.grid_search([2]),
'object_to_target_orientation_distance_reward': 1,
},
'observation_keys': (
'claw_qpos',
'last_action',
'object_xy_position',
'object_z_orientation_cos',
'object_z_orientation_sin',
'target_z_orientation_cos',
'target_z_orientation_sin',
'target_xy_position',
),
# 'goals': tune.grid_search([
# [(0, 0, 0, 0, 0, np.pi / 2), (-0.05, -0.06, 0, 0, 0, 0)],
# [(0.05, 0.06, 0, 0, 0, 0), (-0.05, -0.06, 0, 0, 0, 0)],
# [(0, 0, 0, 0, 0, 0), (-0.05, -0.06, 0, 0, 0, 0)],
# ]),
},
'TurnFreeValve3ResetFreeCurriculum-v0': {
'reward_keys': (
'object_to_target_position_distance_cost',
'object_to_target_orientation_distance_cost',
),
'reset_fingers': False,
},
'XYTurnValve3Fixed-v0': {
'reward_keys': (
'object_to_target_position_distance_cost',
'object_to_target_orientation_distance_cost',
'eef_to_object_xy_distance_cost',
),
},
'XYTurnValve3RandomReset-v0': {
'reward_keys': (
'object_to_target_position_distance_cost',
'object_to_target_orientation_distance_cost',
'eef_to_object_xy_distance_cost',
),
'num_goals': 1,
},
'XYTurnValve3Random-v0': {
'reward_keys': (
'object_to_target_position_distance_cost',
'object_to_target_orientation_distance_cost',
'eef_to_object_xy_distance_cost',
),
},
'XYTurnValve3ResetFree-v0': {
'reward_keys': (
'object_to_target_position_distance_cost',
'object_to_target_orientation_distance_cost',
'eef_to_object_xy_distance_cost',
),
'reset_fingers': tune.grid_search([True, False]),
'reset_arm': False,
},
# Lifting Tasks
'LiftDDFixed-v0': {
'reward_keys_and_weights': {
'object_to_target_z_position_distance_reward': 10,
'object_to_target_xy_position_distance_reward': 0,
'object_to_target_orientation_distance_reward': 0, #5,
},
'init_qpos_range': (
(-0.05, -0.05, 0.041, -np.pi, -np.pi, -np.pi),
(0.05, 0.05, 0.041, np.pi, np.pi, np.pi)
),
'target_qpos_range': (
(-0.05, -0.05, 0, 0, 0, 0),
(0.05, 0.05, 0, 0, 0, 0)
),
'use_bowl_arena': False,
},
'LiftDDResetFree-v0': {
'reward_keys_and_weights': {
'object_to_target_z_position_distance_reward': 0,
'object_to_target_xy_position_distance_reward': 1,
'object_to_target_orientation_distance_reward': 0,
},
'init_qpos_range': (
(0, 0, 0.041, -np.pi, -np.pi, -np.pi),
(0, 0, 0.041, np.pi, np.pi, np.pi),
),
'target_qpos_range': (
(-0.05, -0.05, 0, 0, 0, 0),
(0.05, 0.05, 0, 0, 0, 0)
),
'use_bowl_arena': False,
},
# Flipping Tasks
'FlipEraserFixed-v0': {
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': 1,
'object_to_target_orientation_distance_reward': 20,
},
'init_qpos_range': [(0, 0, 0, 0, 0, 0)],
'target_qpos_range': [(0, 0, 0, np.pi, 0, 0)],
},
'FlipEraserResetFree-v0': {
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': 1,
'object_to_target_orientation_distance_reward': 20,
},
},
'FlipEraserResetFreeSwapGoal-v0': {
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': 1,
'object_to_target_orientation_distance_reward': 20,
},
},
# === Translation Tasks ===
'TranslateMultiPuckFixed-v0': {
'init_qpos_ranges': (
((0.1, 0.1, 0, 0, 0, 0), (0.1, 0.1, 0, 0, 0, 0)),
((-0.1, -0.1, 0, 0, 0, 0), (-0.1, -0.1, 0, 0, 0, 0)),
),
'target_qpos_ranges': (
((0.1, -0.1, 0, 0, 0, 0), (0.1, -0.1, 0, 0, 0, 0)),
((-0.1, 0.1, 0, 0, 0, 0), (-0.1, 0.1, 0, 0, 0, 0)),
),
'observation_keys': (
'claw_qpos',
'last_action',
'object1_xy_position',
'object2_xy_position',
),
'reward_keys_and_weights': {
'object1_to_target_position_distance_log_reward': 1,
'object2_to_target_position_distance_log_reward': 1,
}
}
}
}
}
FREE_SCREW_VISION_KWARGS = {
'pixel_wrapper_kwargs': {
'pixels_only': False,
'normalize': False,
'render_kwargs': {
'width': 32,
'height': 32,
'camera_id': -1,
},
},
'camera_settings': {
'azimuth': 180,
'distance': 0.38,
'elevation': -36,
'lookat': (0.04, 0.008, 0.026),
},
}
FIXED_SCREW_VISION_KWARGS = {
'pixel_wrapper_kwargs': {
'pixels_only': False,
'normalize': False,
'render_kwargs': {
'width': 32,
'height': 32,
'camera_id': -1,
}
},
'camera_settings': {
'azimuth': 180,
'distance': 0.3,
'elevation': -50,
'lookat': np.array([0.02, 0.004, 0.09]),
},
}
SLIDE_BEADS_VISION_KWARGS = {
'pixel_wrapper_kwargs': {
'pixels_only': False,
'normalize': False,
'render_kwargs': {
'width': 32,
'height': 32,
'camera_id': -1,
},
},
'camera_settings': {
'azimuth': 90,
'distance': 0.37,
'elevation': -45,
'lookat': (0, 0.0046, -0.016),
},
}
ENVIRONMENT_PARAMS_PER_UNIVERSE_DOMAIN_TASK_VISION = {
'gym': {
'DClaw': {
'TurnFixed-v0': {
**FIXED_SCREW_VISION_KWARGS,
'reward_keys_and_weights': {
'object_to_target_angle_distance_reward': 1,
},
'init_pos_range': (-np.pi, np.pi),
'target_pos_range': [-np.pi / 2, -np.pi / 2],
'observation_keys': (
'claw_qpos',
'pixels',
'last_action',
# 'target_angle_cos',
# 'target_angle_sin',
# === BELOW JUST FOR LOGGING ===
'object_angle_cos',
'object_angle_sin',
),
},
'TurnResetFree-v0': {
**FIXED_SCREW_VISION_KWARGS,
'reward_keys_and_weights': {
'object_to_target_angle_distance_reward': 1,
},
'reset_fingers': True,
'init_pos_range': (0, 0),
'target_pos_range': [-np.pi / 2, -np.pi / 2],
'observation_keys': (
'claw_qpos',
'pixels',
'last_action',
# 'target_angle_cos',
# 'target_angle_sin',
# === BELOW JUST FOR LOGGING ===
'object_angle_cos',
'object_angle_sin',
),
},
# Free screw
'TurnFreeValve3Fixed-v0': {
**FREE_SCREW_VISION_KWARGS,
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': 2,
'object_to_target_orientation_distance_reward': 1,
},
'observation_keys': (
'pixels',
'claw_qpos',
'last_action',
# === BELOW JUST FOR LOGGING ===
'object_xy_position',
'object_z_orientation_cos',
'object_z_orientation_sin',
),
'init_qpos_range': (
(-0.08, -0.08, 0, 0, 0, -np.pi),
(0.08, 0.08, 0, 0, 0, np.pi)
),
# 'target_qpos_range': [
# (0, 0, 0, 0, 0, -np.pi / 2),
# (0, 0, 0, 0, 0, -np.pi / 2)
# ],
'target_qpos_range': [
(0, 0, 0, 0, 0, -np.pi / 2)
],
},
# === Reset-free environment below ===
'TurnFreeValve3ResetFree-v0': {
**FREE_SCREW_VISION_KWARGS,
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': 2,
'object_to_target_orientation_distance_reward': 1,
},
'reset_fingers': True,
'reset_frequency': 0,
'init_qpos_range': [(0, 0, 0, 0, 0, 0)],
'target_qpos_range': [
(0, 0, 0, 0, 0, -np.pi / 2),
# (0, 0, 0, 0, 0, -np.pi / 2)
(0, 0, 0, 0, 0, np.pi / 2)
],
'observation_keys': (
'pixels',
'claw_qpos',
'last_action',
# === BELOW JUST FOR LOGGING ===
'object_xy_position',
'object_z_orientation_cos',
'object_z_orientation_sin',
),
},
'TurnFreeValve3ResetFreeSwapGoal-v0': {
**FREE_SCREW_VISION_KWARGS,
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': tune.grid_search([0.5, 1]),
'object_to_target_orientation_distance_reward': 1,
},
'reset_fingers': True,
'observation_keys': (
'claw_qpos',
'last_action',
'pixels',
'target_xy_position',
'target_z_orientation_cos',
'target_z_orientation_sin',
# === BELOW JUST FOR LOGGING ===
'object_xy_position',
'object_z_orientation_cos',
'object_z_orientation_sin',
),
},
'TurnFreeValve3ResetFreeSwapGoalEval-v0': {
**FREE_SCREW_VISION_KWARGS,
'init_qpos_range': (
(-0.08, -0.08, 0, 0, 0, -np.pi),
(0.08, 0.08, 0, 0, 0, np.pi)
),
'observation_keys': (
'claw_qpos',
'last_action',
'pixels',
'target_xy_position',
'target_z_orientation_cos',
'target_z_orientation_sin',
# === BELOW JUST FOR LOGGING ===
'object_position',
'object_orientation_cos',
'object_orientation_sin',
),
},
'TurnFreeValve3RandomReset-v0': {
'reward_keys': (
'object_to_target_position_distance_cost',
'object_to_target_orientation_distance_cost',
),
'initial_distribution_path': '',
'reset_from_corners': True,
},
'ScrewFixed-v0': {},
'ScrewRandom-v0': {},
'ScrewRandomDynamics-v0': {},
# Translating Puck Tasks
'TranslatePuckFixed-v0': {
'target_qpos_range': [
(0, 0, 0, 0, 0, 0),
(0, 0, 0, 0, 0, 0)
],
'init_qpos_range': (
(-0.08, -0.08, 0, 0, 0, 0),
(0.08, 0.08, 0, 0, 0, 0)
),
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': 1,
},
},
'TranslatePuckResetFree-v0': {
'target_qpos_range': [
(-0.08, -0.08, 0, 0, 0, 0),
(0.08, 0.08, 0, 0, 0, 0)
],
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': 1,
},
},
# Lifting Tasks
'LiftDDFixed-v0': {
'reward_keys_and_weights': {
'object_to_target_z_position_distance_reward': 5,
'object_to_target_xy_position_distance_reward': 1,
'object_to_target_orientation_distance_reward': 0,
},
'init_qpos_range': (
(0, 0, 0.041, -np.pi, -np.pi, -np.pi),
(0, 0, 0.041, np.pi, np.pi, np.pi)
),
'target_qpos_range': [(0, 0, 0.05, 0, 0, 0)],
'use_bowl_arena': False,
},
'LiftDDResetFree-v0': {
'reward_keys_and_weights': {
'object_to_target_z_position_distance_reward': 1,
'object_to_target_xy_position_distance_reward': 0,
'object_to_target_orientation_distance_reward': 0,
},
'target_qpos_range': [(0, 0, 0.05, 0, 0, 0)],
'pixel_wrapper_kwargs': {
'observation_key': 'pixels',
'pixels_only': False,
'render_kwargs': {
'width': 64,
'height': 64,
},
},
'observation_keys': (
'claw_qpos',
'object_position',
'object_quaternion',
'last_action',
'target_position',
'target_quaternion',
'pixels',
),
'camera_settings': {
'azimuth': 180,
'distance': 0.26,
'elevation': -40,
'lookat': (0, 0, 0.06),
}
},
'LiftDDResetFreeComposedGoals-v0': {
'reward_keys_and_weights': {
'object_to_target_z_position_distance_reward': 1,
'object_to_target_xy_position_distance_reward': 1,
'object_to_target_orientation_distance_reward': 1,
},
'reset_policy_checkpoint_path': '',
'goals': [
(0, 0, 0, 0, 0, 0),
(0, 0, 0.05, 0, 0, 0),
],
'reset_frequency': 0,
},
# Flipping Tasks
'FlipEraserFixed-v0': {
'reward_keys_and_weights': {
'object_to_target_position_distance_reward': 1,
'object_to_target_orientation_distance_reward': 20,
},
# In bowl
# 'camera_settings': {
# 'azimuth': 180,
# 'distance': 0.26,
# 'elevation': -32,
# 'lookat': (0, 0, 0.06)
# },
'observation_keys': (
'pixels', 'claw_qpos', 'last_action',
'object_position',
'object_quaternion',
),
'reset_policy_checkpoint_path': None,
},
'LiftDDResetFree-v0': {
# For repositioning
'reward_keys_and_weights': {
'object_to_target_z_position_distance_reward': 0,
'object_to_target_xy_position_distance_reward': 1,
'object_to_target_orientation_distance_reward': 0,
},
'init_qpos_range': (
(0, 0, 0.041, -np.pi, -np.pi, -np.pi),
(0, 0, 0.041, np.pi, np.pi, np.pi),
),
'target_qpos_range': (
(-0.05, -0.05, 0, 0, 0, 0),
(0.05, 0.05, 0, 0, 0, 0)
),
'use_bowl_arena': False,
# For Lifting
# 'reward_keys_and_weights': {
# 'object_to_target_z_position_distance_reward': 10,
# 'object_to_target_xy_position_distance_reward': tune.grid_search([1, 2]),
# 'object_to_target_orientation_distance_reward': 0,
# },
# 'init_qpos_range': (
# (0, 0, 0.041, -np.pi, -np.pi, -np.pi),
# (0, 0, 0.041, np.pi, np.pi, np.pi),
# ),
# 'target_qpos_range': [(0, 0, 0.05, 0, 0, 0)],
'pixel_wrapper_kwargs': {
'pixels_only': False,
'normalize': False,
'render_kwargs': {
'width': 32,
'height': 32,
'camera_id': -1,
}
},
# In box
'camera_settings': {
'azimuth': 180,
'distance': 0.35,
'elevation': -55,
'lookat': (0, 0, 0.03)
},
# In bowl
# 'camera_settings': {
# 'azimuth': 180,
# 'distance': 0.26,
# 'elevation': -32,
# 'lookat': (0, 0, 0.06)
# },
'observation_keys': (
'pixels', 'claw_qpos', 'last_action',
'object_position',
'object_quaternion',
),
'reset_policy_checkpoint_path': None,
},
# Sliding Tasks
'SlideBeadsFixed-v0': {
**SLIDE_BEADS_VISION_KWARGS,
'reward_keys_and_weights': {
'objects_to_targets_mean_distance_reward': 1,
},
'init_qpos_range': (
(-0.0475, -0.0475, -0.0475, -0.0475),
(0.0475, 0.0475, 0.0475, 0.0475),
),
'target_qpos_range': [
(-0.0475, -0.0475, 0.0475, 0.0475),
(-0.0475, -0.0475, 0.0475, 0.0475),
],
'num_objects': 4,
'cycle_goals': True,
'observation_keys': (
'claw_qpos',
'last_action',
'pixels',
# === BELOW JUST FOR LOGGING ==
'objects_target_positions',
'objects_positions',
),
},
'SlideBeadsResetFree-v0': {
**SLIDE_BEADS_VISION_KWARGS,
'reward_keys_and_weights': {
'objects_to_targets_mean_distance_reward': 1,
# 'objects_to_targets_mean_distance_reward': 0, # Make sure 0 ext reward
},
'init_qpos_range': [(0, 0, 0, 0)],
# LNT Baseline
# 'target_qpos_range': [
# (0, 0, 0, 0),
# (-0.0475, -0.0475, 0.0475, 0.0475),
# ],
# 1 goal with RND reset controller
'target_qpos_range': [
(-0.0475, -0.0475, 0.0475, 0.0475),
(-0.0475, -0.0475, 0.0475, 0.0475),
],
'num_objects': 4,
'cycle_goals': True,
'observation_keys': (
'claw_qpos',
'last_action',
'pixels',
# === BELOW JUST FOR LOGGING ===
'objects_positions',
'objects_target_positions',
),
},
'SlideBeadsResetFreeEval-v0': {
'reward_keys_and_weights': {
'objects_to_targets_mean_distance_reward': 1,
},
'init_qpos_range': [(0, 0, 0, 0)],
'num_objects': 4,
'target_qpos_range': [
(0, 0, 0, 0),
(-0.0475, -0.0475, 0.0475, 0.0475),
],
# 'target_qpos_range': [
# (0, 0),
# (-0.0825, 0.0825),
# (0.0825, 0.0825),
# (-0.04, 0.04),
# (-0.0825, -0.0825),
# ],
'cycle_goals': True,
'pixel_wrapper_kwargs': {
'observation_key': 'pixels',
'pixels_only': False,
'render_kwargs': {
'width': 32,
'height': 32,
},
},
'observation_keys': (
'claw_qpos',
'objects_positions',
'last_action',
'objects_target_positions',
'pixels',
),
'camera_settings': {
'azimuth': 23.234042553191497,
'distance': 0.2403358053524018,
'elevation': -29.68085106382978,
'lookat': (-0.00390331, 0.01236683, 0.01093447),
}
},
},
},
}
def get_num_epochs(universe, domain, task):
level_result = NUM_EPOCHS_PER_UNIVERSE_DOMAIN_TASK.copy()
for level_key in (universe, domain, task):
if isinstance(level_result, int):
return level_result
level_result = level_result.get(level_key) or level_result[DEFAULT_KEY]
return level_result
def get_max_path_length(universe, domain, task):
level_result = MAX_PATH_LENGTH_PER_UNIVERSE_DOMAIN_TASK.copy()
for level_key in (universe, domain, task):
if isinstance(level_result, int):
return level_result
level_result = level_result.get(level_key) or level_result[DEFAULT_KEY]
return level_result
def get_initial_exploration_steps(spec):
config = spec.get('config', spec)
initial_exploration_steps = 50 * (
config
['sampler_params']
['kwargs']
['max_path_length']
)
return initial_exploration_steps
def get_checkpoint_frequency(spec):
config = spec.get('config', spec)
checkpoint_frequency = (
config
['algorithm_params']
['kwargs']
['n_epochs']
) // NUM_CHECKPOINTS
return checkpoint_frequency
def get_policy_params(universe, domain, task):
policy_params = GAUSSIAN_POLICY_PARAMS_BASE.copy()
return policy_params
def get_algorithm_params(universe, domain, task):
algorithm_params = {
'kwargs': {
'n_epochs': get_num_epochs(universe, domain, task),
'n_initial_exploration_steps': tune.sample_from(
get_initial_exploration_steps),
}
}
return algorithm_params
def get_environment_params(universe, domain, task, from_vision):
if from_vision:
params = ENVIRONMENT_PARAMS_PER_UNIVERSE_DOMAIN_TASK_VISION
else:
params = ENVIRONMENT_PARAMS_PER_UNIVERSE_DOMAIN_TASK_STATE
environment_params = (
params.get(universe, {}).get(domain, {}).get(task, {}))
return environment_params
NUM_CHECKPOINTS = 10
SAMPLER_PARAMS_PER_DOMAIN = {
'DClaw': {
'type': 'SimpleSampler',
},
}
def get_variant_spec_base(universe, domain, task, task_eval, policy, algorithm, from_vision):
algorithm_params = deep_update(
ALGORITHM_PARAMS_BASE,
get_algorithm_params(universe, domain, task),
ALGORITHM_PARAMS_ADDITIONAL.get(algorithm, {}),
)
variant_spec = {
'git_sha': get_git_rev(__file__),
'environment_params': {
'training': {
'domain': domain,
'task': task,
'universe': universe,
'kwargs': get_environment_params(universe, domain, task, from_vision),
},
'evaluation': {
'domain': domain,
'task': task_eval,
'universe': universe,
'kwargs': (
tune.sample_from(lambda spec: (
spec.get('config', spec)
['environment_params']
['training']
.get('kwargs')
))
if task == task_eval
else get_environment_params(universe, domain, task_eval, from_vision)),
},
},
'policy_params': get_policy_params(universe, domain, task),
'exploration_policy_params': {
'type': 'UniformPolicy',
'kwargs': {
'observation_keys': tune.sample_from(lambda spec: (
spec.get('config', spec)
['policy_params']
['kwargs']
.get('observation_keys')
))
},
},
'Q_params': {
'type': 'double_feedforward_Q_function',
'kwargs': {
'hidden_layer_sizes': (M, ) * N,
'observation_keys': tune.sample_from(lambda spec: (
spec.get('config', spec)
['policy_params']
['kwargs']
.get('observation_keys')
)),
'observation_preprocessors_params': {}
},
# 'discrete_actions': False,
},
'algorithm_params': algorithm_params,
'replay_pool_params': {
'type': 'SimpleReplayPool',
'kwargs': {
'max_size': int(5e5),
},
# 'type': 'HindsightExperienceReplayPool',
# 'kwargs': {
# 'max_size': int(5e5),
# 'her_strategy':{
# 'resampling_probability': 0.5,
# 'type': 'final',
# }
# },
},
'sampler_params': deep_update({
# 'type': 'GoalSampler',
'type': 'SimpleSampler',
'kwargs': {
'max_path_length': get_max_path_length(universe, domain, task),
'min_pool_size': tune.sample_from(lambda spec: (
spec.get('config', spec)
['sampler_params']['kwargs']['max_path_length']
)),
'batch_size': 256, # tune.grid_search([128, 256]),
'store_last_n_paths': 20,
}
}, SAMPLER_PARAMS_PER_DOMAIN.get(domain, {})),
'run_params': {
'seed': tune.sample_from(
lambda spec: np.random.randint(0, 10000)),
'checkpoint_at_end': True,
'checkpoint_frequency': tune.sample_from(get_checkpoint_frequency),
'checkpoint_replay_pool': False,
},
}
# Set this flag if you don't want to pass pixels into the policy/Qs
no_pixel_information = False
# TODO: Clean this up
env_kwargs = variant_spec['environment_params']['training']['kwargs']
env_obs_keys = env_kwargs.get('observation_keys', tuple())
env_goal_keys = env_kwargs.get('goal_keys', tuple())
# === FROM VISION ===
if from_vision and "pixel_wrapper_kwargs" in env_kwargs.keys() and \
"device_path" not in env_kwargs.keys():
# === COMMENT BELOW TO SAVE PIXELS INTO POOL ===
obs_keys = tuple(key for key in env_obs_keys if key != 'pixels')
non_image_obs_key = vsariant_spec['replay_pool_params']['kwargs']['obs_save_keys'] = non_image_obs_keys
# == FILTER OUT GROUND TRUTH STATE ===
non_object_obs_keys = tuple(key for key in env_obs_keys if 'object' not in key)
variant_spec['policy_params']['kwargs']['observation_keys'] = variant_spec[
'exploration_policy_params']['kwargs']['observation_keys'] = variant_spec[
'Q_params']['kwargs']['observation_keys'] = non_object_obs_keys
# === FROM STATE / NO PIXEL INFORMATION ===
elif no_pixel_information or not from_vision:
non_pixel_obs_keys = tuple(key for key in env_obs_keys if key != 'pixels')
variant_spec['policy_params']['kwargs']['observation_keys'] = variant_spec[
'exploration_policy_params']['kwargs']['observation_keys'] = variant_spec[
'Q_params']['kwargs']['observation_keys'] = non_pixel_obs_keys
if env_goal_keys:
variant_spec['policy_params']['kwargs']['goal_keys'] = variant_spec[
'exploration_policy_params']['kwargs']['goal_keys'] = variant_spec[
'Q_params']['kwargs']['goal_keys'] = env_goal_keys
if 'ResetFree' not in task:
variant_spec['algorithm_params']['kwargs']['save_training_video_frequency'] = 0
if domain == 'MiniGrid':
variant_spec['algorithm_params']['kwargs']['reparameterize'] = False
variant_spec['policy_params']['type'] = 'DiscretePolicy'
variant_spec['policy_params']['kwargs']['hidden_layer_sizes'] = (32, 32)
variant_spec['exploration_policy_params']['type'] = 'UniformDiscretePolicy'
variant_spec['environment_params']['training']['kwargs']['normalize'] = False
return variant_spec
IMAGE_ENVS = (
('robosuite', 'InvisibleArm', 'FreeFloatManipulation'),
)
def is_image_env(universe, domain, task, variant_spec):
return ('image' in task.lower()
or 'image' in domain.lower()
or 'pixel_wrapper_kwargs' in (
variant_spec['environment_params']['training']['kwargs'])
or (universe, domain, task) in IMAGE_ENVS)
STATE_PREPROCESSOR_PARAMS = {
'ReplicationPreprocessor': {
'type': 'ReplicationPreprocessor',
'kwargs': {
'n': 0,
'scale_factor': 1,
}
},
'RandomNNPreprocessor': {
'type': 'RandomNNPreprocessor',
'kwargs': {
'hidden_layer_sizes': (32, 32),
'activation': 'linear',
'output_activation': 'linear',
}
},
'RandomMatrixPreprocessor': {
'type': 'RandomMatrixPreprocessor',
'kwargs': {
'output_size_scale_factor': 1,
'coefficient_range': (-1., 1.),
}
},
'None': None,
}
from softlearning.misc.utils import PROJECT_PATH, NFS_PATH
PIXELS_PREPROCESSOR_PARAMS = {
'StateEstimatorPreprocessor': {
'type': 'StateEstimatorPreprocessor',
'kwargs': {
'input_shape': (32, 32, 3),
'num_hidden_units': 512,
'num_hidden_layers': 2,
'state_estimator_path': '/root/softlearning/softlearning/models/state_estimators/state_estimator_from_vae_latents.h5',
'preprocessor_params': {
'type': 'VAEPreprocessor',
'kwargs': {
'encoder_path': '/root/softlearning/softlearning/models/vae_16_dim_beta_3_invisible_claw_l2_reg/encoder_16_dim_3.0_beta.h5',
'decoder_path': '/root/softlearning/softlearning/models/vae_16_dim_beta_3_invisible_claw_l2_reg/decoder_16_dim_3.0_beta.h5',
'trainable': False,
'image_shape': (32, 32, 3),
'latent_dim': 16,
'include_decoder': False,
}
}
}
},
'VAEPreprocessor': {
'type': 'VAEPreprocessor',
'kwargs': {
# 'image_shape': (32, 32, 3),
'image_shape': (64, 64, 3),
# 'latent_dim': 16,
# 'encoder_path': '/nfs/kun1/users/justinvyu/pretrained_models/vae_16_dim_beta_3_invisible_claw_l2_reg/encoder_16_dim_3.0_beta.h5',
# 'decoder_path': '/nfs/kun1/users/justinvyu/pretrained_models/vae_16_dim_beta_3_invisible_claw_l2_reg/decoder_16_dim_3.0_beta.h5',
'latent_dim': 64,
'encoder_path': os.path.join(NFS_PATH,
'pretrained_models',
'vae_64_dim_beta_5_visible_claw_diff_angle',
'encoder_64_dim_5.0_beta.h5'),
'trainable': False,
},
},
# TODO: Merge OnlineVAEPreprocessor and VAEPreprocessor, just don't update
# in SAC if not online
'OnlineVAEPreprocessor': {
'type': 'OnlineVAEPreprocessor',
'kwargs': {
'image_shape': (32, 32, 3),
'latent_dim': 16,
# 'latent_dim': 32,
'beta': 0.5,
# 'beta': 1e-5,
# Optionally specify a pretrained model to start finetuning
# 'encoder_path': os.path.join(PROJECT_PATH,
# 'softlearning',
# 'models',
# 'free_screw_vae_32_dim',
# 'encoder_32_dim_0.5_beta_final.h5'),
# 'decoder_path': os.path.join(PROJECT_PATH,
# 'softlearning',
# 'models',
# 'free_screw_vae_32_dim',
# 'decoder_32_dim_0.5_beta_final.h5'),
},
'shared': True,
},
'RAEPreprocessor': {
'type': 'RAEPreprocessor',
'kwargs': {
'image_shape': (32, 32, 3),
'latent_dim': 32,
},
'shared': True,
},
'ConvnetPreprocessor': tune.grid_search([
{
'type': 'ConvnetPreprocessor',
'kwargs': {
'conv_filters': (8, 16, 32),
'conv_kernel_sizes': (3, ) * 3,
'conv_strides': (2, ) * 3,
'normalization_type': tune.sample_from([None]),
'downsampling_type': 'conv',
},
}
# {
# 'type': 'ConvnetPreprocessor',
# 'kwargs': {
# 'conv_filters': (64, ) * 4,
# 'conv_kernel_sizes': (3, ) * 4,
# 'conv_strides': (2, ) * 4,
# 'normalization_type': normalization_type,
# 'downsampling_type': 'conv',
# 'output_kwargs': {
# 'type': 'flatten',
# },
# },
# # 'weights_path': '/root/nfs/kun1/users/justinvyu/pretrained_models/convnet_64_by_4.pkl',
# }
for normalization_type in (None, )
]),
}
def get_variant_spec_image(universe,
domain,
task,
task_eval,
policy,
algorithm,
from_vision,
preprocessor_type,
*args,
**kwargs):
variant_spec = get_variant_spec_base(
universe,
domain,
task,
task_eval,
policy,
algorithm,
from_vision,
*args, **kwargs)
if from_vision and is_image_env(universe, domain, task, variant_spec):
assert preprocessor_type in PIXELS_PREPROCESSOR_PARAMS or preprocessor_type is None
if preprocessor_type is None:
preprocessor_type = "ConvnetPreprocessor"
preprocessor_params = PIXELS_PREPROCESSOR_PARAMS[preprocessor_type]
variant_spec['policy_params']['kwargs']['hidden_layer_sizes'] = (M, ) * N
variant_spec['policy_params']['kwargs'][
'observation_preprocessors_params'] = {
'pixels': deepcopy(preprocessor_params)
}
variant_spec['Q_params']['kwargs']['hidden_layer_sizes'] = (
tune.sample_from(lambda spec: (deepcopy(
spec.get('config', spec)
['policy_params']
['kwargs']
['hidden_layer_sizes']
)))
)
variant_spec['Q_params']['kwargs'][
'observation_preprocessors_params'] = (
tune.sample_from(lambda spec: (deepcopy(
spec.get('config', spec)
['policy_params']
['kwargs']
['observation_preprocessors_params']
)))
)
elif preprocessor_type:
# Assign preprocessor to all parts of the state
assert preprocessor_type in STATE_PREPROCESSOR_PARAMS
preprocessor_params = STATE_PREPROCESSOR_PARAMS[preprocessor_type]
obs_keys = variant_spec['environment_params']['training']['kwargs'].get('observation_keys', tuple())
variant_spec['policy_params']['kwargs']['hidden_layer_sizes'] = (M, ) * N
variant_spec['policy_params']['kwargs'][
'observation_preprocessors_params'] = {
key: deepcopy(preprocessor_params)
for key in obs_keys
}
variant_spec['Q_params']['kwargs']['hidden_layer_sizes'] = (
tune.sample_from(lambda spec: (deepcopy(
spec.get('config', spec)
['policy_params']
['kwargs']
['hidden_layer_sizes']
)))
)
variant_spec['Q_params']['kwargs'][
'observation_preprocessors_params'] = (
tune.sample_from(lambda spec: (deepcopy(
spec.get('config', spec)
['policy_params']
['kwargs']
['observation_preprocessors_params']
)))
)
return variant_spec
def get_variant_spec(args):
universe, domain, task, task_eval = (
args.universe,
args.domain,
args.task,
args.task_evaluation)
from_vision = args.vision
preprocessor_type = args.preprocessor_type
variant_spec = get_variant_spec_image(
universe,
domain,
task,
task_eval,
args.policy,
args.algorithm,
from_vision,
preprocessor_type)
# if args.checkpoint_replay_pool is not None:
variant_spec['run_params']['checkpoint_replay_pool'] = (
args.checkpoint_replay_pool or False)
return variant_spec
| 36.887287
| 144
| 0.459478
|
7e1165193c3de9014976a95b060c2aa1c3ff22c2
| 7,049
|
rs
|
Rust
|
src/bin/2020_day15_part2.rs
|
dariuswiles/advent-of-code-rust
|
8fe52dda7bbdf2e07d727c9fffc9d4b0d049a5a9
|
[
"Unlicense"
] | null | null | null |
src/bin/2020_day15_part2.rs
|
dariuswiles/advent-of-code-rust
|
8fe52dda7bbdf2e07d727c9fffc9d4b0d049a5a9
|
[
"Unlicense"
] | null | null | null |
src/bin/2020_day15_part2.rs
|
dariuswiles/advent-of-code-rust
|
8fe52dda7bbdf2e07d727c9fffc9d4b0d049a5a9
|
[
"Unlicense"
] | null | null | null |
//! Advent of Code 2020 Day 15
//! https://adventofcode.com/2020/day/15
//!
//! Challenge part 2
//!
//! Follows the game rules explained in the challenge until the given game turn is reached, at
//! which point the answer to the challenge is obtained. Part 2 of the challenge only increases
//! the number of game turns from 2,020 to 30,000,000.
//
// The increase in the number of turns causes the solution for part 1 to run incredibly slow, so
// the code in this file is completely rewritten to provide a fast solution for even large
// numbers of turns.
use std::collections::HashMap;
const CHALLENGE_INPUT: &str = "7,14,0,17,11,1,2";
const STOP_AT_TURN: usize = 30_000_000;
/// The game state consisting of:
/// `state` - holding the last turn each game value was seen;
/// `next_num` - the number to added in the next game turn;
/// `turn` - the turn number (where the first turn is 1).
//
// The game rules rely on knowing the last turn each value was seen. Rather than recording the game
// result for every game turn, storing only the last turn each value was seen allows faster lookups
// and requires less memory. Before adding a new value, a lookup is performed to see if it has
// previously been added, and the result is stored in `next_num`. This is a little ugly, but is
// faster than the alternative of storing the last *two* occurrences of every value in the `Game`
// object.
#[derive(Clone, Debug)]
struct Game {
state: HashMap<usize, usize>,
next_num: usize,
turn: usize,
}
impl Game {
fn from_str(start_string: &str) -> Self {
let mut state = HashMap::new();
let mut next_num = 0;
let nums: Vec<usize> = start_string.split(',').map(|n| n.parse().unwrap()).collect();
for (idx, num) in nums[..nums.len()-1].iter().enumerate() {
state.insert(*num, idx + 1);
}
if let Some(prior_turn) = state.get(nums.last().unwrap()) {
next_num = nums.len() - prior_turn;
}
state.insert(*nums.last().unwrap(), nums.len());
Self { state: state, next_num: next_num, turn: nums.len() }
}
fn play_one_turn(&mut self) {
let num_to_add = self.next_num;
self.turn += 1;
// print!("Turn {}: Adding {} ", &self.turn,& num_to_add);
if let Some(prior_turn) = self.state.get(&num_to_add) {
// println!("which was last seen on turn {}.", &prior_turn);
self.next_num = self.turn - prior_turn;
} else {
// println!("which has not been seen before");
self.next_num = 0;
}
self.state.insert(num_to_add, self.turn);
}
/// Play the game until the given turn is reached.
//
// This is implemented by iterating until one less than the desired turn, and looking in the
// `next_num` field to see what the value stored in the next turn will be. This is required as
// no record is kept of the last value added to the `state` HashMap, so if we iterated until
// the given turn, we would not be able to determine the last value added, which is the
// challenge answer.
fn play_until_turn(&mut self, end_turn: usize) -> usize {
while self.turn < end_turn - 1 {
self.play_one_turn();
}
self.next_num
}
}
fn main() {
let mut game = Game::from_str(CHALLENGE_INPUT);
let result = game.play_until_turn(STOP_AT_TURN);
println!("The answer to the challenge is {:?}", result);
}
// Test data based on examples on the challenge page.
#[cfg(test)]
mod tests {
use super::*;
const TEST_INPUT_0: &str = "0,3,6";
const TEST_INPUT_1: &str = "1,3,2";
const TEST_INPUT_2: &str = "2,1,3";
const TEST_INPUT_3: &str = "1,2,3";
const TEST_INPUT_4: &str = "2,3,1";
const TEST_INPUT_5: &str = "3,2,1";
const TEST_INPUT_6: &str = "3,1,2";
#[test]
fn test_game_0() {
let mut game = Game::from_str(&TEST_INPUT_0);
let result = game.play_until_turn(STOP_AT_TURN);
assert_eq!(result, 175594);
}
#[test]
fn test_game_1() {
let mut game = Game::from_str(&TEST_INPUT_1);
let result = game.play_until_turn(STOP_AT_TURN);
assert_eq!(result, 2578);
}
#[test]
fn test_game_2() {
let mut game = Game::from_str(&TEST_INPUT_2);
let result = game.play_until_turn(STOP_AT_TURN);
assert_eq!(result, 3544142);
}
#[test]
fn test_game_3() {
let mut game = Game::from_str(&TEST_INPUT_3);
let result = game.play_until_turn(STOP_AT_TURN);
assert_eq!(result, 261214);
}
#[test]
fn test_game_4() {
let mut game = Game::from_str(&TEST_INPUT_4);
let result = game.play_until_turn(STOP_AT_TURN);
assert_eq!(result, 6895259);
}
#[test]
fn test_game_5() {
let mut game = Game::from_str(&TEST_INPUT_5);
let result = game.play_until_turn(STOP_AT_TURN);
assert_eq!(result, 18);
}
#[test]
fn test_game_6() {
let mut game = Game::from_str(&TEST_INPUT_6);
let result = game.play_until_turn(STOP_AT_TURN);
assert_eq!(result, 362);
}
#[test]
fn initialize_with_last_num_repeated() {
let game = Game::from_str("1,7,8,9,1");
assert_eq!(game.state.len(), 4);
assert_eq!(game.state[&7], 2);
assert_eq!(game.state[&8], 3);
assert_eq!(game.state[&9], 4);
assert_eq!(game.state[&1], 5);
assert_eq!(game.next_num, 4);
assert_eq!(game.turn, 5);
}
#[test]
fn initialize_with_last_num_not_repeated() {
let game = Game::from_str("1,7,8,9");
assert_eq!(game.state.len(), 4);
assert_eq!(game.state[&1], 1);
assert_eq!(game.state[&7], 2);
assert_eq!(game.state[&8], 3);
assert_eq!(game.state[&9], 4);
assert_eq!(game.next_num, 0);
assert_eq!(game.turn, 4);
}
#[test]
fn initialize_with_all_repeats() {
let game = Game::from_str("7,7,7");
assert_eq!(game.state.len(), 1);
assert_eq!(game.state[&7], 3);
assert_eq!(game.next_num, 1);
assert_eq!(game.turn, 3);
}
#[test]
fn one_turn_0() {
let mut game = Game::from_str("33,33,29,78,1");
game.play_one_turn();
assert_eq!(game.state.len(), 5);
assert_eq!(game.state[&33], 2);
assert_eq!(game.state[&29], 3);
assert_eq!(game.state[&78], 4);
assert_eq!(game.state[&1], 5);
assert_eq!(game.state[&0], 6);
assert_eq!(game.next_num, 0);
assert_eq!(game.turn, 6);
}
#[test]
fn one_turn_1() {
let mut game = Game::from_str("4,0,9,3");
game.play_one_turn();
assert_eq!(game.state.len(), 4);
assert_eq!(game.state[&4], 1);
assert_eq!(game.state[&9], 3);
assert_eq!(game.state[&3], 4);
assert_eq!(game.state[&0], 5);
assert_eq!(game.next_num, 3);
assert_eq!(game.turn, 5);
}
}
| 30.253219
| 99
| 0.599518
|
6231c5af5a9901719aa8aa93fa8ea46216a3f88c
| 2,179
|
py
|
Python
|
src/CommunityGAN/utils.py
|
Bipasha-banerjee/newCGAN
|
073e30bbd43c08c67ee8778b39752a8b75a87fba
|
[
"MIT"
] | 69
|
2018-09-09T03:29:05.000Z
|
2022-03-23T17:12:24.000Z
|
src/CommunityGAN/utils.py
|
Bipasha-banerjee/newCGAN
|
073e30bbd43c08c67ee8778b39752a8b75a87fba
|
[
"MIT"
] | 4
|
2019-07-29T06:44:44.000Z
|
2021-08-11T19:28:19.000Z
|
src/CommunityGAN/utils.py
|
Bipasha-banerjee/newCGAN
|
073e30bbd43c08c67ee8778b39752a8b75a87fba
|
[
"MIT"
] | 34
|
2018-09-09T13:05:58.000Z
|
2022-03-21T10:05:28.000Z
|
import numpy as np
import random
import os
import datetime
def str_list_to_float(str_list):
return [float(item) for item in str_list]
def str_list_to_int(str_list):
return [int(item) for item in str_list]
def read_embeddings(filename, n_node, n_embed):
with open(filename, "r") as f:
embedding_matrix = np.random.rand(n_node, n_embed)
f.readline() # skip the first line
for line in f:
emd = line.split()
embedding_matrix[int(emd[0]), :] = str_list_to_float(emd[1:])
return embedding_matrix
def read_embeddings_with_id_convert(filename, graph, n_embed):
with open(filename, "r") as f:
embedding_matrix = np.random.rand(graph.n_node, n_embed)
f.readline() # skip the first line
for line in f:
emd = line.split()
embedding_matrix[graph.name2id[emd[0]], :] = str_list_to_float(emd[1:])
return embedding_matrix
def agm(x): # x is 1d-array
agm_x = 1 - np.exp(-x)
agm_x[np.isnan(agm_x)] = 0
return np.clip(agm_x, 1e-6, 1)
def agm_softmax(x): # x is 1d-array
agm_x = 1 - np.exp(-x)
agm_x[np.isnan(agm_x)] = 0
agm_x = np.clip(agm_x, 1e-6, 1)
return agm_x / agm_x.sum()
def read_edges_from_file(filename):
with open(filename, "r") as f:
lines = f.readlines()
edges = [str_list_to_int(line.split()) for line in lines if not line[0].startswith('#')]
return edges
def create_file_dir_in_config(config):
for k, v in config.__dict__.items():
if not k.startswith('_') and 'filename' in k:
if not isinstance(v, list):
v = [v]
for path in v:
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
def shuffle(*args):
idx = list(range(len(args[0])))
random.shuffle(idx)
results = []
for array in args:
results.append([array[i] for i in idx])
return tuple(results)
def genearate_tmp_filename(config):
return ('tmp-' + str(hash(str(config.__dict__))) + str(datetime.datetime.now()) + '.pkl').replace(' ', '_').replace(':', '_')
| 27.582278
| 129
| 0.61129
|
4597a17c4a0c58c7fa7ad76f074b26be95cb7802
| 12,360
|
py
|
Python
|
utils/module_wrappers.py
|
limberc/hypercl
|
ad098a3b18cf2a2ae6e3ecd28a2b7af698f7b807
|
[
"Apache-2.0"
] | null | null | null |
utils/module_wrappers.py
|
limberc/hypercl
|
ad098a3b18cf2a2ae6e3ecd28a2b7af698f7b807
|
[
"Apache-2.0"
] | null | null | null |
utils/module_wrappers.py
|
limberc/hypercl
|
ad098a3b18cf2a2ae6e3ecd28a2b7af698f7b807
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Copyright 2019 Christian Henning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
@title :utils/module_wrappers.py
@author :ch
@contact :henningc@ethz.ch
@created :06/13/2019
@version :1.0
@python_version :3.6.8
An interface for a CL hypernetwork and main network. These interfaces ensure
that we can consistently use these networks without knowing their specific
implementation.
"""
from abc import ABC, abstractmethod
from warnings import warn
import numpy as np
class CLHyperNetInterface(ABC):
"""A general interface for task-conditioned hypernetworks, that are used
for continual learning.
.. deprecated:: 1.0
Please use module :class:`hnets.hnet_interface.CLHyperNetInterface`
instead.
Attributes:
theta: Parameters of the hypernetwork (excluding task embeddings).
num_weights: Total number of parameters in this network, including
task embeddings.
num_outputs: The total number of output neurons (number of weights
generated for the target network).
has_theta: Whether the hypernetwork has internal theta weights.
Otherwise, these weights are assumed to be produced by another
hypernetwork.
theta_shapes: A list of lists of integers denoting the shape of every
weight tensor belonging to "theta". Note, the returned list is
independent of whether "has_theta" is True.
has_task_embs: Whether the hypernet has internal task embeddings.
num_task_embs: Number of task embeddings available internally.
requires_ext_input: Whether the hypernet expects an external input
(e.g., another condition in addition to the current task).
target_shapes: A list of list of integers representing the shapes of
weight tensors generated for a main network (i.e., the shapes of
the hypernet output).
"""
def __init__(self):
"""Initialize the network."""
super(CLHyperNetInterface, self).__init__()
warn('Please use class "hnets.hnet_interface.CLHyperNetInterface" ' +
'instead.', DeprecationWarning)
# The following member variables have to be set by all classes that
# implement this interface.
self._theta = None
self._task_embs = None
self._theta_shapes = None
# Task embedding weights + theta weights.
self._num_weights = None
self._num_outputs = None
# If an external input is required, this may not be None.
self._size_ext_input = None
self._target_shapes = None
def _is_properly_setup(self):
"""This method can be used by classes that implement this interface to
check whether all required properties have been set."""
# assert(self._theta is not None)
# assert(self._task_embs is not None)
assert (self._theta_shapes is not None)
assert (self._num_weights is not None)
assert (self._num_outputs is not None)
assert (self._target_shapes is not None)
@property
def theta(self):
"""Getter for read-only attribute theta.
Theta are all learnable parameters of the hypernet except the task
embeddings, i.e., theta comprises all parameters that should be
regularized in order to avoid catastrophic forgetting when training
the hypernetwork in a Continual Learning setting.
Returns:
A :class:`torch.nn.ParameterList` or None, if this network has no
weights.
"""
return self._theta
@property
def num_outputs(self):
"""Getter for the attribute num_outputs."""
return self._num_outputs
@property
def num_weights(self):
"""Getter for read-only attribute num_weights."""
return self._num_weights
@property
def has_theta(self):
"""Getter for read-only attribute has_theta."""
return self._theta is not None
@property
def theta_shapes(self):
"""Getter for read-only attribute theta_shapes.
Returns:
A list of lists of integers.
"""
return self._theta_shapes
@property
def has_task_embs(self):
"""Getter for read-only attribute has_task_embs."""
return self._task_embs is not None
@property
def num_task_embs(self):
"""Getter for read-only attribute num_task_embs."""
assert (self.has_task_embs)
return len(self._task_embs)
@property
def requires_ext_input(self):
"""Getter for read-only attribute requires_ext_input."""
return self._size_ext_input is not None
@property
def target_shapes(self):
"""Getter for read-only attribute target_shapes.
Returns:
A list of lists of integers.
"""
return self._target_shapes
def get_task_embs(self):
"""Return a list of all task embeddings.
Returns:
A list of Parameter tensors.
"""
assert (self.has_task_embs)
return self._task_embs
def get_task_emb(self, task_id):
"""Return the task embedding corresponding to a given task id.
Args:
task_id: Determines the task for which the embedding should be
returned.
Returns:
A list of Parameter tensors.
"""
assert (self.has_task_embs)
return self._task_embs[task_id]
@abstractmethod
def forward(self, task_id=None, theta=None, dTheta=None, task_emb=None,
ext_inputs=None, squeeze=True):
"""Compute all HyperWeights.
Args:
task_id: The index of the task for which the network should
produce weights. The corresponding internal task embedding will
be selected as input. Only one integer can be given!
theta: List of weight tensors, that are used as network parameters.
If "has_theta" is False, then this parameter is mandatory.
Note, when provided, internal parameters (theta) are not used.
dTheta: List of weight tensors, that are added to "theta" (the
internal list of parameters or the one given via the option
"theta"), when computing the output of this network.
task_emb: If "has_task_embs" is False, then one has to provide the
task embedding as additional input via this option.
ext_inputs: If "requires_ext_input" is True, then one has to provide
the additional embeddings as input here. Note, one might provide
a batch of embeddings (see option "squeeze" for details).
squeeze: If a batch of inputs is given, the first dimension of the
resulting weight tensors will have as first dimension the batch
dimension. Though, the main network expects this dimension to
be squeezed away. This will be done automatically if this
option is enabled (hence, it only has an effect for a batch
size of 1).
Returns:
A list of weights. Two consecutive entries always correspond to a
weight matrix followed by a bias vector.
"""
pass # TODO implement
class MainNetInterface(ABC):
"""A general interface for main networks, that can be used stand-alone
(i.e., having their own weights) or with no (or only some) internal
weights, such that the remaining weights have to be passed through the
forward function (e.g., they may be generated through a hypernetwork).
.. deprecated:: 1.0
Please use module :class:`mnets.mnet_interface.MainNetInterface`
instead.
Attributes:
weights: A list of all internal weights of the main network. If all
weights are assumed to be generated externally, then this
attribute will be None.
param_shapes: A list of list of integers. Each list represents the
shape of a parameter tensor. Note, this attribute is
independent of the attribute "weights", it always comprises the
shapes of all weight tensors as if the network would be stand-
alone (i.e., no weights being passed to the forward function).
hyper_shapes: A list of list of integers. Each list represents the
shape of a weight tensor that has to be passed to the forward
function. If all weights are maintained internally, then this
attribute will be None.
has_bias: Whether layers in this network have bias terms.
has_fc_out: Whether the output layer of the network is a fully-
connected layer.
Note, if this attribute is set to True, it is implicitly assumed
that if "hyper_shapes" is not None, the last two entries of
"hyper_shapes" are the weights and biases of this layer.
num_params: The total number of weights in the parameter tensors
described by the attribute "param_shapes".
"""
def __init__(self):
"""Initialize the network.
Args:
"""
super(MainNetInterface, self).__init__()
warn('Please use class "mnets.mnet_interface.MainNetInterface" ' +
'instead.', DeprecationWarning)
# The following member variables have to be set by all classes that
# implement this interface.
self._weights = None
self._all_shapes = None
self._hyper_shapes = None
self._num_params = None
self._has_bias = None
self._has_fc_out = None
def _is_properly_setup(self):
"""This method can be used by classes that implement this interface to
check whether all required properties have been set."""
assert (self._weights is not None or self._hyper_shapes is not None)
if self._weights is not None and self._hyper_shapes is not None:
assert ((len(self._weights) + len(self._hyper_shapes)) == \
len(self._all_shapes))
elif self._weights is not None:
assert (len(self._weights) == len(self._all_shapes))
else:
assert (len(self._hyper_shapes) == len(self._all_shapes))
assert (self._all_shapes is not None)
assert (isinstance(self._has_bias, bool))
assert (isinstance(self._has_fc_out, bool))
@property
def weights(self):
"""Getter for read-only attribute weights.
Returns:
A :class:`torch.nn.ParameterList` or None, if no parameters are
internally maintained.
"""
return self._weights
@property
def param_shapes(self):
"""Getter for read-only attribute param_shapes.
Returns:
A list of lists of integers.
"""
return self._all_shapes
@property
def hyper_shapes(self):
"""Getter for read-only attribute hyper_shapes.
Returns:
A list of lists of integers.
"""
return self._hyper_shapes
@property
def has_bias(self):
"""Getter for read-only attribute has_bias."""
return self._has_bias
@property
def has_fc_out(self):
"""Getter for read-only attribute has_fc_out."""
return self._has_fc_out
@property
def num_params(self):
"""Getter for read-only attribute num_params.
Returns:
Total number of parameters in the network.
"""
if self._num_params is None:
self._num_params = int(np.sum([np.prod(l) for l in
self.param_shapes]))
return self._num_params
if __name__ == '__main__':
pass
| 37.228916
| 80
| 0.644498
|
cd3595a6d4dbc241f61013e9a87a39f44a4319a5
| 723
|
cs
|
C#
|
AspNetCoreDmsSample/Models/Player.cs
|
aws-samples/controltower-cloudendure-simulated-environment
|
c4574fdb4555baa0165a826e954f5330c437234b
|
[
"MIT-0"
] | 2
|
2021-11-03T00:40:08.000Z
|
2022-01-21T20:44:24.000Z
|
AspNetCoreDmsSample/Models/Player.cs
|
aws-samples/controltower-cloudendure-simulated-environment
|
c4574fdb4555baa0165a826e954f5330c437234b
|
[
"MIT-0"
] | 4
|
2019-10-12T19:38:55.000Z
|
2020-03-24T00:39:57.000Z
|
AspNetCoreDmsSample/Models/Player.cs
|
aws-samples/controltower-cloudendure-simulated-environment
|
c4574fdb4555baa0165a826e954f5330c437234b
|
[
"MIT-0"
] | 3
|
2020-04-14T09:36:13.000Z
|
2021-11-03T00:40:00.000Z
|
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
namespace DMSSample.Models
{
public partial class Player
{
[Display(Name = "ID")]
public int Id { get; set; }
[Display(Name = "Team ID")]
public int SportTeamId { get; set; }
[Display(Name = "Last Name")]
public string LastName { get; set; }
[Display(Name = "First Name")]
public string FirstName { get; set; }
[Display(Name = "Full Name")]
public string FullName { get; set; }
[Display(Name = "Team")]
public SportTeam SportTeam { get; set; }
}
}
| 25.821429
| 49
| 0.529737
|
0dea76cdb678be0e62ad52136783f0f2cc72af49
| 4,421
|
cs
|
C#
|
SynapseGaming-SunBurn-Pro/SynapseGaming/LightingSystem/Processors/TerrainMaterialReader_Pro.cs
|
UnGaucho/StarDrive
|
d51409bf460d2d26e95a1b3869a628bc1d166e74
|
[
"MIT"
] | null | null | null |
SynapseGaming-SunBurn-Pro/SynapseGaming/LightingSystem/Processors/TerrainMaterialReader_Pro.cs
|
UnGaucho/StarDrive
|
d51409bf460d2d26e95a1b3869a628bc1d166e74
|
[
"MIT"
] | null | null | null |
SynapseGaming-SunBurn-Pro/SynapseGaming/LightingSystem/Processors/TerrainMaterialReader_Pro.cs
|
UnGaucho/StarDrive
|
d51409bf460d2d26e95a1b3869a628bc1d166e74
|
[
"MIT"
] | null | null | null |
// Decompiled with JetBrains decompiler
// Type: SynapseGaming.LightingSystem.Processors.TerrainMaterialReader_Pro
// Assembly: SynapseGaming-SunBurn-Pro, Version=1.3.2.8, Culture=neutral, PublicKeyToken=c23c60523565dbfd
// MVID: A5F03349-72AC-4BAA-AEEE-9AB9B77E0A39
// Assembly location: C:\Projects\BlackBox\StarDrive\SynapseGaming-SunBurn-Pro.dll
using System;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Content;
using Microsoft.Xna.Framework.Graphics;
using ns3;
using SynapseGaming.LightingSystem.Effects;
using SynapseGaming.LightingSystem.Effects.Deferred;
using SynapseGaming.LightingSystem.Effects.Forward;
using SynapseGaming.LightingSystem.Rendering;
namespace SynapseGaming.LightingSystem.Processors
{
/// <summary />
public class TerrainMaterialReader_Pro : ContentTypeReader<MeshData>
{
/// <summary />
protected override MeshData Read(ContentReader input, MeshData instance)
{
IGraphicsDeviceService service = (IGraphicsDeviceService) input.ContentManager.ServiceProvider.GetService(typeof (IGraphicsDeviceService));
MeshData meshData = new MeshData();
BaseTerrainEffect baseTerrainEffect = !input.ReadBoolean() ? new TerrainEffect(service.GraphicsDevice) : (BaseTerrainEffect) new DeferredTerrainEffect(service.GraphicsDevice);
meshData.Effect = baseTerrainEffect;
meshData.InfiniteBounds = true;
meshData.MeshToObject = input.ReadMatrix();
meshData.PrimitiveCount = input.ReadInt32();
meshData.VertexCount = input.ReadInt32();
meshData.VertexStride = input.ReadInt32();
baseTerrainEffect.MeshSegments = input.ReadInt32();
meshData.ObjectSpaceBoundingSphere = input.ReadObject<BoundingSphere>();
meshData.VertexDeclaration = input.ReadObject<VertexDeclaration>();
meshData.VertexBuffer = input.ReadObject<VertexBuffer>();
meshData.IndexBuffer = input.ReadObject<IndexBuffer>();
baseTerrainEffect.MaterialName = input.ReadString();
baseTerrainEffect.MaterialFile = input.ReadString();
baseTerrainEffect.ProjectFile = input.ReadString();
baseTerrainEffect.NormalMapStrength = input.ReadSingle();
baseTerrainEffect.DiffuseScale = input.ReadSingle();
baseTerrainEffect.HeightScale = input.ReadSingle();
baseTerrainEffect.Tiling = input.ReadSingle();
baseTerrainEffect.SpecularPower = input.ReadSingle();
baseTerrainEffect.SpecularAmount = input.ReadSingle();
Vector4 vector4 = input.ReadVector4();
baseTerrainEffect.SpecularColor = new Vector3(vector4.X, vector4.Y, vector4.Z);
baseTerrainEffect.DiffuseMapLayer1File = input.ReadString();
baseTerrainEffect.DiffuseMapLayer1Texture = input.ReadExternalReference<Texture2D>();
baseTerrainEffect.DiffuseMapLayer2File = input.ReadString();
baseTerrainEffect.DiffuseMapLayer2Texture = input.ReadExternalReference<Texture2D>();
baseTerrainEffect.DiffuseMapLayer3File = input.ReadString();
baseTerrainEffect.DiffuseMapLayer3Texture = input.ReadExternalReference<Texture2D>();
baseTerrainEffect.DiffuseMapLayer4File = input.ReadString();
baseTerrainEffect.DiffuseMapLayer4Texture = input.ReadExternalReference<Texture2D>();
baseTerrainEffect.NormalMapLayer1File = input.ReadString();
baseTerrainEffect.NormalMapLayer1Texture = input.ReadExternalReference<Texture2D>();
baseTerrainEffect.NormalMapLayer2File = input.ReadString();
baseTerrainEffect.NormalMapLayer2Texture = input.ReadExternalReference<Texture2D>();
baseTerrainEffect.NormalMapLayer3File = input.ReadString();
baseTerrainEffect.NormalMapLayer3Texture = input.ReadExternalReference<Texture2D>();
baseTerrainEffect.NormalMapLayer4File = input.ReadString();
baseTerrainEffect.NormalMapLayer4Texture = input.ReadExternalReference<Texture2D>();
baseTerrainEffect.BlendMapFile = input.ReadString();
baseTerrainEffect.BlendMapTexture = input.ReadExternalReference<Texture2D>();
baseTerrainEffect.HeightMapFile = input.ReadString();
baseTerrainEffect.HeightMapTexture = CoreUtils.ConvertToLuminance8(service.GraphicsDevice, input.ReadExternalReference<Texture2D>());
BlockUtil.SkipBlock(input);
if (input.ReadInt32() != 1234)
throw new Exception("Error loading asset.");
return meshData;
}
}
}
| 57.415584
| 182
| 0.761819
|
05f7bf31c69753489474d2b51066423e12d35d28
| 4,954
|
py
|
Python
|
LinkedList/SinglyLinkedList.py
|
ShaswatPrabhat/LinkedList
|
902f282e82ddb4c97ffa780ce28bbec1939b2815
|
[
"Apache-2.0"
] | null | null | null |
LinkedList/SinglyLinkedList.py
|
ShaswatPrabhat/LinkedList
|
902f282e82ddb4c97ffa780ce28bbec1939b2815
|
[
"Apache-2.0"
] | null | null | null |
LinkedList/SinglyLinkedList.py
|
ShaswatPrabhat/LinkedList
|
902f282e82ddb4c97ffa780ce28bbec1939b2815
|
[
"Apache-2.0"
] | null | null | null |
from LinkedList import LinkedListNodes
class SinglyLinkedList:
def __init__(self, sourceList: list):
lastInitializedNode: LinkedListNodes.SinglyLinkedNode or None = None
self.headOfList: LinkedListNodes.SinglyLinkedNode or None = None
self.length = len(sourceList)
for value in sourceList:
node = LinkedListNodes.SinglyLinkedNode(val=value)
if self.headOfList is None:
self.headOfList = node
lastInitializedNode = node
if lastInitializedNode is not None:
lastInitializedNode.setNext(node)
lastInitializedNode = node
def __len__(self) -> int:
return self.length
def isEmptyList(self) -> bool:
return self.headOfList is None
def traverseList(self) -> None:
print("Traversing linked list")
if not self.isEmptyList():
if self.hasCycle():
print("LinkedList contains a cycle, traversal will be infinite LOL!")
else:
node = self.headOfList
while node.getNext() is not None:
print(node.val)
node = node.getNext()
print(node.val)
else:
print("EMPTY LIST!!")
def findValue(self, value) -> LinkedListNodes.SinglyLinkedNode or None:
if not self.isEmptyList():
node = self.headOfList
while node.val != value:
node = node.getNext()
if node is not None:
return node
return None
else:
print("Empty List")
def deleteFirstNode(self) -> bool:
if not self.isEmptyList():
self.headOfList = self.headOfList.getNext()
self.length -= 1
return True
else:
print("Empty list")
return False
def deleteLastNode(self) -> bool:
if self.isEmptyList():
print("Empty list")
return False
elif self.length == 1:
self.headOfList = None
self.length -= 1
return True
else:
node = self.headOfList
# Get the value from previous node of the next node
# Change penultimate node to None
while node.getNext().getNext() is not None:
node = node.getNext()
node.setNext(None)
self.length -= 1
return True
def deleteValue(self, value) -> bool:
if self.isEmptyList():
print("Empty list")
return False
elif self.length == 1:
if self.headOfList.val == value:
self.headOfList = None
self.length -= 1
return True
else:
return False
else:
node = self.headOfList
while node.getNext() is not None and node.getNext().val is not value:
node = node.getNext()
if node.getNext() is None:
return False
node.setNext(node.getNext().getNext())
self.length -= 1
return True
def insertAfterValue(self, value, valueToBeInserted) -> bool:
if self.isEmptyList():
print("Empty list")
return False
else:
node = self.headOfList
while node is not None:
if node.val == value:
newNode = LinkedListNodes.SinglyLinkedNode(val=valueToBeInserted)
newNode.setNext(node.getNext())
node.setNext(newNode)
self.length += 1
return True
node = node.getNext()
return False
def insertValueInBeginning(self, value):
newNode = LinkedListNodes.SinglyLinkedNode(value)
if self.isEmptyList():
self.headOfList = newNode
else:
newNode.setNext(self.headOfList)
self.headOfList = newNode
self.length += 1
def __getitem__(self, item):
if type(item) is not int:
print("Please enter subscriptable index integers only")
return None
if self.isEmptyList():
print("Empty list")
return None
if self.length < item:
print("Index too large, list too small")
return None
counter = 0
node = self.headOfList
while counter != item:
node = node.getNext()
counter += 1
return node.val
def hasCycle(self) -> bool:
if self.isEmptyList():
return False
slow = self.headOfList
fast = self.headOfList.getNext()
while slow.val != fast.val:
if fast is None or fast.getNext() is None:
return False
slow = slow.getNext()
fast = fast.getNext().getNext()
return True
| 31.75641
| 85
| 0.531086
|
f4cc743bf349449a4731914ade390042e7c09351
| 39
|
ts
|
TypeScript
|
lib/api/src/version.ts
|
cheynespc/storybook
|
ec9ba7be9b5d7f10eb15e4e91cc91c43b664bb73
|
[
"MIT"
] | 1
|
2021-10-12T01:53:59.000Z
|
2021-10-12T01:53:59.000Z
|
lib/api/src/version.ts
|
cheynespc/storybook
|
ec9ba7be9b5d7f10eb15e4e91cc91c43b664bb73
|
[
"MIT"
] | null | null | null |
lib/api/src/version.ts
|
cheynespc/storybook
|
ec9ba7be9b5d7f10eb15e4e91cc91c43b664bb73
|
[
"MIT"
] | null | null | null |
export const version = '6.4.0-beta.8';
| 19.5
| 38
| 0.666667
|
4e25aec5bf126276a0e845e179a9dfea411e3f78
| 1,389
|
lua
|
Lua
|
Data/Scenario/Expeditions.lua
|
startrail/SurvivingMars
|
b58abe8393497bb07210ce974baa7ea70dbb5a88
|
[
"BSD-Source-Code"
] | 1
|
2019-05-21T03:01:00.000Z
|
2019-05-21T03:01:00.000Z
|
Data/Scenario/Expeditions.lua
|
startrail/SurvivingMars
|
b58abe8393497bb07210ce974baa7ea70dbb5a88
|
[
"BSD-Source-Code"
] | null | null | null |
Data/Scenario/Expeditions.lua
|
startrail/SurvivingMars
|
b58abe8393497bb07210ce974baa7ea70dbb5a88
|
[
"BSD-Source-Code"
] | null | null | null |
-- ========== THIS IS AN AUTOMATICALLY GENERATED FILE! ==========
PlaceObj('Scenario', {
'name', "Expeditions",
'file_name', "Expeditions",
'singleton', false,
}, {
PlaceObj('ScenarioSequence', {
'name', "MechanicalBreakdown",
}, {
PlaceObj('SA_WaitMessage', {
'title', T{5858, --[[Scenario Expeditions title]] "Expedition Disaster: Marooned In Space"},
'text', T{5859, --[[Scenario Expeditions text]] "The RC Explorer is a robust, yet complex machine. Those unacquainted would be astonished when they see the mission reports where dozens of minor breakdowns happened during an expedition and were ignored by the operators. Expedition veterans know that a dead light bulb won't break down the vehicle and they ignore it with a wry smile.\n\nBut when something as serious as the motor system breaks down, it takes bravery not to panic. There is no time for desperation then, our drones have to repair the malfunction before it causes more harm.\n\n<effect>The RC Explorer suffered a malfunction. It has to be repaired by Drones"},
'voiced_text', T{7184, --[[voice:narrator]] "As with everything made by the hands of man, the Explorer isn’t without its breakdowns and failures."},
'log_entry', true,
'text_param1', T{5845, --[[Scenario Expeditions text_param1]] "50"},
}),
PlaceObj('SA_Exec', {
'expression', 'rover:SetCommand("Malfunction")',
}),
}),
})
| 63.136364
| 678
| 0.715623
|
33e767da0385ed7b45cc73aea1abafa2fe80c7c7
| 468
|
h
|
C
|
Classes/ProcessData/Objects/DTOProductMasterObject.h
|
tuansusu/CRMOneIpad
|
855a25025a2e2f3d005f71ec74e838962768f774
|
[
"MIT"
] | null | null | null |
Classes/ProcessData/Objects/DTOProductMasterObject.h
|
tuansusu/CRMOneIpad
|
855a25025a2e2f3d005f71ec74e838962768f774
|
[
"MIT"
] | null | null | null |
Classes/ProcessData/Objects/DTOProductMasterObject.h
|
tuansusu/CRMOneIpad
|
855a25025a2e2f3d005f71ec74e838962768f774
|
[
"MIT"
] | null | null | null |
//
// DTOProductMasterObject.h
// OfficeOneMB
//
// Created by macpro on 1/1/15.
//
//
#import <Foundation/Foundation.h>
@class Items;
@interface DTOProductMasterObject : NSObject
@property (nonatomic, retain) NSString *code;
@property (nonatomic, retain) NSString *name ;
+(NSArray*)allFields;
-(Items*)itemObject;
- (NSMutableDictionary *)convertToDictionary;
- (void)parseFromDictionary:(NSDictionary *)dictionary;
- (void)parseFromItem:(Items*)item;
@end
| 18.72
| 55
| 0.735043
|
f5de8c72942a7002c86ee754851d1f981395989a
| 766
|
css
|
CSS
|
data/usercss/106171.user.css
|
33kk/uso-archive
|
2c4962d1d507ff0eaec6dcca555efc531b37a9b4
|
[
"MIT"
] | 118
|
2020-08-28T19:59:28.000Z
|
2022-03-26T16:28:40.000Z
|
data/usercss/106171.user.css
|
33kk/uso-archive
|
2c4962d1d507ff0eaec6dcca555efc531b37a9b4
|
[
"MIT"
] | 38
|
2020-09-02T01:08:45.000Z
|
2022-01-23T02:47:24.000Z
|
data/usercss/106171.user.css
|
33kk/uso-archive
|
2c4962d1d507ff0eaec6dcca555efc531b37a9b4
|
[
"MIT"
] | 21
|
2020-08-19T01:12:43.000Z
|
2022-03-15T21:55:17.000Z
|
/* ==UserStyle==
@name Amor Doce Tema : Tomb Raider
@namespace USO Archive
@author Gihglamur99
@description `Um tema bem legal da Lara Croft de Tomb Raider`
@version 20141010.23.9
@license NO-REDISTRIBUTION
@preprocessor uso
==/UserStyle== */
@-moz-document domain("amordoce.com") {
#container{
background-image : url(http://i57.tinypic.com/2ngd1ds.jpg) !important ;
}
#container.connected #header { background-image : url(http://i58.tinypic.com/29p6tj7.png) !important ;
}
.idcard-member { background : url(http://i57.tinypic.com/wwngk8.png) no-repeat !important ;
}
}
@-moz-document regexp("perfil amor doce") {
.idcard-member { background : url(http://i61.tinypic.com/348im4i.png) no-repeat !important ;
}
}
| 33.304348
| 103
| 0.680157
|
0556ad355c475f67a5c617cdde3e6f38dba1c31f
| 1,186
|
rb
|
Ruby
|
controls/V-36661.rb
|
intinternal/microsoft-windows-2012r2-memberserver-stig-baseline
|
96a38314118e2099bdf4d721aa41b38d20276234
|
[
"Apache-2.0"
] | null | null | null |
controls/V-36661.rb
|
intinternal/microsoft-windows-2012r2-memberserver-stig-baseline
|
96a38314118e2099bdf4d721aa41b38d20276234
|
[
"Apache-2.0"
] | null | null | null |
controls/V-36661.rb
|
intinternal/microsoft-windows-2012r2-memberserver-stig-baseline
|
96a38314118e2099bdf4d721aa41b38d20276234
|
[
"Apache-2.0"
] | null | null | null |
control 'V-36661' do
title "Policy must require application account passwords be at least 15
characters in length."
desc "Application/service account passwords must be of sufficient length to
prevent being easily cracked. Application/service accounts that are manually
managed must have passwords at least 15 characters in length."
impact 0.5
tag "gtitle": 'WIN00-000010-01'
tag "gid": 'V-36661'
tag "rid": 'SV-51579r1_rule'
tag "stig_id": 'WN12-00-000010'
tag "fix_id": 'F-44708r2_fix'
tag "cci": ['CCI-000205']
tag "nist": ['IA-5 (1) (a)', 'Rev_4']
tag "documentable": false
tag "ia_controls": 'IAIA-1'
tag "check": "Verify the site has a policy to ensure passwords for manually
managed application/service accounts are at least 15 characters in length. If
such a policy does not exist or has not been implemented, this is a finding."
tag "fix": "Establish a site policy that requires application/service account
passwords that are manually managed to be at least 15 characters in length.
Ensure the policy is enforced."
describe security_policy do
its('MinimumPasswordLength') { should be >= 15 }
end
end
| 43.925926
| 81
| 0.715852
|
c4bbda3e53670c898370397ab10975570b59dde3
| 89
|
hpp
|
C++
|
openexr-c/abigen/imf_deeptiledoutputpart.hpp
|
vfx-rs/openexr-bind
|
ee4fd6010beedb0247737a39ee61ffb87c586448
|
[
"BSD-3-Clause"
] | 7
|
2021-06-04T20:59:16.000Z
|
2022-02-11T01:00:42.000Z
|
openexr-c/abigen/imf_deeptiledoutputpart.hpp
|
vfx-rs/openexr-bind
|
ee4fd6010beedb0247737a39ee61ffb87c586448
|
[
"BSD-3-Clause"
] | 35
|
2021-05-14T04:28:22.000Z
|
2021-12-30T12:08:40.000Z
|
openexr-c/abigen/imf_deeptiledoutputpart.hpp
|
vfx-rs/openexr-bind
|
ee4fd6010beedb0247737a39ee61ffb87c586448
|
[
"BSD-3-Clause"
] | 5
|
2021-05-15T04:02:56.000Z
|
2021-07-02T05:38:01.000Z
|
#pragma once
#include <ostream>
void abi_gen_imf_deeptiledoutputpart(std::ostream& os);
| 17.8
| 55
| 0.797753
|
ff4e0c5db5a9b5edc87dd59a0233b77719aea3b5
| 942
|
py
|
Python
|
setup.py
|
stefanoborini/cookiecutter-traitsui
|
dea5e2684278c09b8e3f9d27dde4c2805bec3f3f
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
stefanoborini/cookiecutter-traitsui
|
dea5e2684278c09b8e3f9d27dde4c2805bec3f3f
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
stefanoborini/cookiecutter-traitsui
|
dea5e2684278c09b8e3f9d27dde4c2805bec3f3f
|
[
"BSD-2-Clause"
] | null | null | null |
import os
from setuptools import setup, find_packages
# Setup version
VERSION = '0.1.0.dev0'
# Read description
with open('README.rst', 'r') as readme:
README_TEXT = readme.read()
def write_version_py():
filename = os.path.join(
os.path.dirname(__file__),
'{{cookiecutter.project_name}}',
'version.py')
ver = "__version__ = '{}'\n"
with open(filename, 'w') as fh:
fh.write(ver.format(VERSION))
write_version_py()
# main setup configuration class
setup(
name='{{cookiecutter.project_name}}',
version=VERSION,
author='{{cookiecutter.author}}',
description='{{cookiecutter.description}}',
long_description=README_TEXT,
install_requires=[
'traitsui~=5.1',
],
packages=find_packages(),
entry_points={
'gui_scripts': [
('{{cookiecutter.project_name}} =
{{cookiecutter.project_name}}.run:main')
]
},
)
| 21.409091
| 54
| 0.619958
|
729ba21de1e819dc7e06eab31d8c03734b5b13ad
| 734
|
cs
|
C#
|
Assets/Scripts/UI/LevelSelectView.cs
|
KeeganL14/SF
|
224330a06493b34e909b3ef92df112bee8f46784
|
[
"MIT"
] | null | null | null |
Assets/Scripts/UI/LevelSelectView.cs
|
KeeganL14/SF
|
224330a06493b34e909b3ef92df112bee8f46784
|
[
"MIT"
] | null | null | null |
Assets/Scripts/UI/LevelSelectView.cs
|
KeeganL14/SF
|
224330a06493b34e909b3ef92df112bee8f46784
|
[
"MIT"
] | null | null | null |
using RedMoon.ReactiveKit;
using System.Collections;
using System.Collections.Generic;
using UniRx;
using UnityEngine;
using UnityEngine.UIElements;
public class LevelSelectView : View<LevelSelectViewModel>
{
public override void OnActivation(LevelSelectViewModel viewModel, CompositeDisposable disposable)
{
var level1 = Root.Q<Button>("level1Button");
var level2 = Root.Q<Button>("level2Button");
var level3 = Root.Q<Button>("level3Button");
level1.BindCallback(viewModel.OnClick, "level1").AddTo(disposable);
level2.BindCallback(viewModel.OnClick, "level2").AddTo(disposable);
level3.BindCallback(viewModel.OnClick, "level3").AddTo(disposable);
}
}
| 34.952381
| 102
| 0.715259
|
58bc0c50da9235501007b43f70779b7ef868e722
| 2,729
|
php
|
PHP
|
backend/controllers/WorkController.php
|
sochianton/blank_docker_yii2_app
|
bf7110c802d30c8d5d1c103c94b7f695d31c730f
|
[
"BSD-3-Clause"
] | null | null | null |
backend/controllers/WorkController.php
|
sochianton/blank_docker_yii2_app
|
bf7110c802d30c8d5d1c103c94b7f695d31c730f
|
[
"BSD-3-Clause"
] | 1
|
2021-06-22T15:45:08.000Z
|
2021-06-22T15:45:08.000Z
|
backend/controllers/WorkController.php
|
sochianton/blank_docker_yii2_app
|
bf7110c802d30c8d5d1c103c94b7f695d31c730f
|
[
"BSD-3-Clause"
] | null | null | null |
<?php
namespace backend\controllers;
use common\ar\Work;
use common\controllers\CRUDController;
use common\services\QualificationService;
use common\services\UserService;
use common\services\WorkService;
use Yii;
use yii\filters\ContentNegotiator;
use yii\filters\VerbFilter;
use yii\helpers\ArrayHelper;
use yii\web\Response;
class WorkController extends CRUDController
{
public $model = Work::class;
public function behaviors()
{
return ArrayHelper::merge(parent::behaviors(), [
// 'access' => [
// 'class' => AccessControl::class,
// 'rules' => [
// [
// 'actions' => [
// 'index',
// 'update',
// 'create',
// 'delete',
// 'ajax-tree-work-nodes',
// ],
// 'allow' => true,
// 'roles' => ['@']
// ],
// ],
// ],
'verbs' => [
'class' => VerbFilter::class,
'actions' => [
'delete' => ['POST'],
],
],
'contentNegotiator' => [
'class' => ContentNegotiator::class,
'formats' => [
//'application/xml' => Response::FORMAT_XML,
'application/json' => Response::FORMAT_JSON,
],
'only' => ['ajax-tree-work-nodes'],
],
]);
}
public function init()
{
$this->indexTitle = Yii::t('app', 'Works');
parent::init();
}
/**
* @param $user_id
* @return array
*/
public function actionAjaxTreeWorkNodes($user_id){
$nodes = [];
$works=[];
if($user_id != -1) $works = UserService::getWorksIds($user_id);
foreach (QualificationService::getList() as $cat){
$tmp =[
'title' => $cat->name,
'folder' => true,
'key' => '',
];
$children = [];
foreach (WorkService::getByCategoryIds($cat->id) as $work){
$children[] = array(
'title' => $work->name,
'key' => $work->id,
//'selected' => (in_array($c->id, $selected) ? true : false),
'selected' => (in_array($work->id, $works)?true:false)
);
}
if(!empty($children)){
$tmp['children'] = $children;
}
$nodes[] = $tmp;
}
return $nodes;
}
}
| 26.754902
| 81
| 0.41114
|
dd88435b65aedaa3e2130ceeb3fee971498b33dd
| 5,026
|
java
|
Java
|
references/bcb_chosen_clones/default#6489#346#436.java
|
cragkhit/elasticsearch
|
05567b30c5bde08badcac1bf421454e5d995eb91
|
[
"Apache-2.0"
] | 23
|
2018-10-03T15:02:53.000Z
|
2021-09-16T11:07:36.000Z
|
references/bcb_chosen_clones/default#6489#346#436.java
|
LandAndLand/Siamese
|
07fb10bec4614f55bcc39e571d1185fc9ce86242
|
[
"Apache-2.0"
] | 18
|
2019-02-10T04:52:54.000Z
|
2022-01-25T02:14:40.000Z
|
references/bcb_chosen_clones/default#6489#346#436.java
|
LandAndLand/Siamese
|
07fb10bec4614f55bcc39e571d1185fc9ce86242
|
[
"Apache-2.0"
] | 19
|
2018-11-16T13:39:05.000Z
|
2021-09-05T23:59:30.000Z
|
public void actionPerformed(ActionEvent evt) {
String name = evt.getActionCommand();
if (name.equals("Load LSculpture")) {
JFileChooser chooser = new JFileChooser(System.getProperties().getProperty("user.dir"));
chooser.setDialogTitle("Select the directory containing the layer layout files");
chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
if (chooser.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) {
directoryPath = chooser.getSelectedFile().getAbsolutePath();
} else {
return;
}
if (foundLayers("ll") || foundLayers("ll2")) {
currentLayerNum = firstLayer;
createLayerList();
mainPane.totalNumLayersLab.setText("" + (lastLayer + 1 - firstLayer));
mainPane.currentLayerNumLab.setText("" + 1);
if (firstLayer == lastLayer) {
mainPane.previousLayerBut.setEnabled(false);
mainPane.nextLayerBut.setEnabled(false);
} else {
mainPane.previousLayerBut.setEnabled(false);
mainPane.nextLayerBut.setEnabled(true);
}
if (!loadLayerBricks(currentLayerNum, "CURRENT")) {
closeCurrentSculpture();
}
mainPane.numBricksLab.setText("" + currentLayer.size());
view2D = new View2D(this, layoutFileExtension);
mainPane.setVisible(true);
showFullSculptureMenuItem.setEnabled(true);
genInstructMenuItem.setEnabled(true);
loadItem.setEnabled(false);
closeItem.setEnabled(true);
} else {
JOptionPane.showMessageDialog(null, "No layout files where found. (layerLayout?.ll)", "No Layouts Found", JOptionPane.ERROR_MESSAGE);
}
} else if (name.equals("Close Current Sculpture")) {
closeCurrentSculpture();
} else if (name.equals("Exit")) {
System.exit(0);
} else if (evt.getSource().equals(showFullSculptureMenuItem)) {
if (fullSculptureView == null) {
fullSculptureView = new JFrame();
fullSculptureView.setTitle("Complete 3D view of the LEGO sculpture");
fullSculptureView.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
fullSculptureView.setSize(400, 400);
fullSculptureView.add(new View3D(this));
fullSculptureView.setVisible(true);
} else {
if (!fullSculptureView.isActive()) {
fullSculptureView = new JFrame();
fullSculptureView.setTitle("Complete 3D view of the LEGO sculpture");
fullSculptureView.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
fullSculptureView.setSize(400, 400);
fullSculptureView.add(new View3D(this));
fullSculptureView.setVisible(true);
}
}
} else if (evt.getSource().equals(genInstructMenuItem)) {
saveBuildingInstructionstoPDF(true);
} else if (name.equals("General Information")) {
helpGUI helpWindow = new helpGUI();
} else if (name.equals("About")) {
JOptionPane.showMessageDialog(null, "LSculpturer: LVisual \nVersion 1.0 \nAuthor : Eugene Smal \nContact: eugene.smal@gmail.com \nStellenbosch University Master's Student" + " \n2008 \n \n", "About LSculpturer: LVisual", JOptionPane.INFORMATION_MESSAGE, new ImageIcon("images\\about.gif"));
} else if (evt.getSource().equals(mainPane.nextLayerBut)) {
mainPane.previousLayerBut.setEnabled(true);
displayLayer(currentLayerNum + 1, false, true);
mainPane.layerList.ensureIndexIsVisible(currentLayerNum);
if (currentLayerNum == lastLayer) {
mainPane.nextLayerBut.setEnabled(false);
}
} else if (evt.getSource().equals(mainPane.previousLayerBut)) {
mainPane.nextLayerBut.setEnabled(true);
if (currentLayerNum - 1 == firstLayer) {
currentLayer = null;
displayLayer(currentLayerNum - 1, false, false);
mainPane.previousLayerBut.setEnabled(false);
previousLayer = null;
} else {
displayLayer(currentLayerNum - 1, true, false);
}
mainPane.layerList.ensureIndexIsVisible(currentLayerNum);
} else if (evt.getSource().equals(mainPane.showPreviousCheck)) {
mustDrawPrevious = mainPane.showPreviousCheck.isSelected();
view2D.canvas.repaint();
view2D.refresh3DLayers();
} else if (evt.getSource().equals(mainPane.showGridCheck)) {
mustDrawGrid = mainPane.showGridCheck.isSelected();
view2D.canvas.repaint();
}
}
| 54.630435
| 302
| 0.59033
|
257b62a58ebafcc733856dc5da460bab0167ab48
| 438
|
js
|
JavaScript
|
frontend/src/components/views/NotFound.js
|
StackedQueries/tyler-richards
|
5d9811402fa1072e395415500ab4f16361c96b21
|
[
"MIT",
"Unlicense"
] | 1
|
2021-05-04T21:53:10.000Z
|
2021-05-04T21:53:10.000Z
|
frontend/src/components/views/NotFound.js
|
StackedQueries/tyler-richards
|
5d9811402fa1072e395415500ab4f16361c96b21
|
[
"MIT",
"Unlicense"
] | null | null | null |
frontend/src/components/views/NotFound.js
|
StackedQueries/tyler-richards
|
5d9811402fa1072e395415500ab4f16361c96b21
|
[
"MIT",
"Unlicense"
] | null | null | null |
import Header from '../Header'
import Footer from '../Footer'
const NotFound = () => {
return (
<div>
<Header />
<div className='page-content'>
<div className="infoform">
<h1 className="page-header">404</h1>
<h2>This page doesn't exsist, has been moved, or deleted.</h2>
</div>
</div>
<Footer />
</div>
)
}
export default NotFound
| 20.857143
| 74
| 0.506849
|
05d942b6219ab700d71bf16a525ce0defff271d7
| 1,294
|
py
|
Python
|
oauth/models.py
|
DheerendraRathor/iitb_oauth_py
|
c06b21acb4f6d09c813bb8cbf545a18dafb46b9c
|
[
"BSD-3-Clause"
] | 2
|
2016-03-10T09:16:39.000Z
|
2020-08-18T12:33:02.000Z
|
oauth/models.py
|
DheerendraRathor/iitb_oauth_py
|
c06b21acb4f6d09c813bb8cbf545a18dafb46b9c
|
[
"BSD-3-Clause"
] | null | null | null |
oauth/models.py
|
DheerendraRathor/iitb_oauth_py
|
c06b21acb4f6d09c813bb8cbf545a18dafb46b9c
|
[
"BSD-3-Clause"
] | 2
|
2017-06-03T12:48:46.000Z
|
2019-08-04T16:51:14.000Z
|
try:
from django.utils import timezone
except ImportError:
from datetime import datetime as timezone
from django.db import models
from django.contrib.auth.models import User
from .request import Token
class OAuthToken(models.Model):
user = models.OneToOneField(User, related_name='token')
refresh_token = models.CharField(max_length=255)
access_token = models.CharField(max_length=255)
token_type = models.CharField(max_length=16)
scope = models.TextField()
expires_in = models.IntegerField()
created_on = models.DateTimeField(auto_now_add=True)
refresh_on = models.DateTimeField(auto_now=True)
def get_access_token(self):
token = Token(refresh_token=self.refresh_token,
access_token=self.access_token,
expires_in=self.expires_in,
scope=self.scope,
token_type=self.token_type,
created_on=self.refresh_on)
if token.has_expired():
token = token.refresh()
self.refresh_token = token.refresh_token
self.access_token = token.access_token
self.expires_in = token.expires_in
self.token_type = token.token_type
self.save()
return self.access_token
| 35.944444
| 59
| 0.66306
|
1aaa55873673dd04de88565b01b693f19c066c10
| 2,923
|
py
|
Python
|
CIM14/CPSM/Equipment/Meas/Discrete.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 58
|
2015-04-22T10:41:03.000Z
|
2022-03-29T16:04:34.000Z
|
CIM14/CPSM/Equipment/Meas/Discrete.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 12
|
2015-08-26T03:57:23.000Z
|
2020-12-11T20:14:42.000Z
|
CIM14/CPSM/Equipment/Meas/Discrete.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 35
|
2015-01-10T12:21:03.000Z
|
2020-09-09T08:18:16.000Z
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.CPSM.Equipment.Meas.Measurement import Measurement
class Discrete(Measurement):
"""Discrete represents a discrete Measurement, i.e. a Measurement reprsenting discrete values, e.g. a Breaker position.- The association to Terminal may not be required depending on how the Measurement is being used. See section Use of Measurement Class for details. - The MeasurementType class is used to define the quantity being measured (Voltage, ThreePhaseActivePower, etc.) by a Measurement. A Measurement must be associated with one and only one measurementType. The valid values for the MeasurementType name are defined in Normative String Tables.
"""
def __init__(self, DiscreteValues=None, *args, **kw_args):
"""Initialises a new 'Discrete' instance.
@param DiscreteValues: The values connected to this measurement.
"""
self._DiscreteValues = []
self.DiscreteValues = [] if DiscreteValues is None else DiscreteValues
super(Discrete, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["DiscreteValues"]
_many_refs = ["DiscreteValues"]
def getDiscreteValues(self):
"""The values connected to this measurement.
"""
return self._DiscreteValues
def setDiscreteValues(self, value):
for x in self._DiscreteValues:
x.Discrete = None
for y in value:
y._Discrete = self
self._DiscreteValues = value
DiscreteValues = property(getDiscreteValues, setDiscreteValues)
def addDiscreteValues(self, *DiscreteValues):
for obj in DiscreteValues:
obj.Discrete = self
def removeDiscreteValues(self, *DiscreteValues):
for obj in DiscreteValues:
obj.Discrete = None
| 44.287879
| 565
| 0.723572
|
f43542add6c4e151895677191e10132b4fcf5dc9
| 296
|
cs
|
C#
|
OrderPriceCalculator/IHasCharges.cs
|
robertsundstrom/PointOfSale
|
50d3c57a02a0148a44d8929b5aad5cb105fa45cf
|
[
"MIT"
] | 1
|
2021-07-25T02:29:32.000Z
|
2021-07-25T02:29:32.000Z
|
OrderPriceCalculator/IHasCharges.cs
|
robertsundstrom/PointOfSale
|
50d3c57a02a0148a44d8929b5aad5cb105fa45cf
|
[
"MIT"
] | null | null | null |
OrderPriceCalculator/IHasCharges.cs
|
robertsundstrom/PointOfSale
|
50d3c57a02a0148a44d8929b5aad5cb105fa45cf
|
[
"MIT"
] | 1
|
2022-01-10T06:04:02.000Z
|
2022-01-10T06:04:02.000Z
|
namespace OrderPriceCalculator;
public interface IHasCharges
{
/// <summary>
/// Gets the Charges.
/// </summary>
IEnumerable<ICharge> Charges { get; }
/// <summary>
/// Gets or sets the charged amount.
/// </summary>
decimal? Charge { get; set; }
}
| 21.142857
| 42
| 0.574324
|
fd73e0b4a5d4bdf9862c7aa72d0c42448f6c30c4
| 4,120
|
css
|
CSS
|
calculator/calculator.css
|
GabrielSSGitb/Front-end-Projects
|
1641c7fee9f35501c8da723cc698651f5d6553c0
|
[
"MIT"
] | 1
|
2021-11-03T19:43:55.000Z
|
2021-11-03T19:43:55.000Z
|
calculator/calculator.css
|
GabrielSSGitb/Front-end-Projects
|
1641c7fee9f35501c8da723cc698651f5d6553c0
|
[
"MIT"
] | null | null | null |
calculator/calculator.css
|
GabrielSSGitb/Front-end-Projects
|
1641c7fee9f35501c8da723cc698651f5d6553c0
|
[
"MIT"
] | null | null | null |
@charset "utf-8";
body {
background-image: linear-gradient(to bottom, black, blue);
background-position: center center;
background-size: 100vh;
background-attachment: fixed;
color: white;
font-family: Arial, Helvetica, sans-serif;
}
#calculatorbox {
margin: auto;
margin-top: 50px;
background-color: black;
width: 300px;
height: 480px;
padding: 10px;
border-radius: 20px;
}
#title {
padding: 0;
margin: 0;
font-size: small;
font-weight: none;
}
#showinc {
background-color: black;
color: white;
margin-left: 20px;
height: 56px;
width: 281px;
font-size: xx-large;
text-align: right;
outline: none;
border-color: black;
}
#showresult {
background-color: black;
margin-left: 20px;
height: 40px;
width: 281px;
color: white;
text-align: right;
font-size: x-large;
outline: none;
border-color: black;
}
.boxes {
margin-top: 60px;
background-color: rgb(88, 88, 88);
width: 25px;
height: 25px;
padding: 15px;
font-size: large;
text-align: center;
cursor: pointer;
border-radius: 10px;
}
.boxes:hover {
background-color: rgb(0, 96, 134);
}
.boxes:active {
background-color: rgba(255, 255, 255, 0.657);
color: black;
}
.numbers {
position: relative;
top: 80px;
left: 20px;
margin-right: 4px;
margin-top: 7px;
background-color: rgb(88, 88, 88);
width: 25px;
height: 25px;
padding: 15px;
font-size: large;
text-align: center;
cursor: pointer;
border-radius: 10px;
display: inline-block;
}
.numbers:hover {
background-color: rgb(0, 96, 134);
}
.numbers:active {
background-color: rgba(255, 255, 255, 0.657);
color: black;
}
#equal {
position: relative;
top: -385px;
left: 225px;
font-size: x-large;
height: 128px;
padding: 20px;
border-radius: 15px;
}
#equal > span {
position: absolute;
top: 62px;
left: 28px;
}
#equal:hover {
background-color: rgb(44, 44, 133);
padding: 21px;
}
#equal:active {
background-color: rgb(74, 74, 219);
color: white;
}
#clear {
position: absolute;
top: 145px;
left: 500px;
border-radius: 15px;
}
#clear:hover {
padding: 18px;
}
#clear > span {
position: absolute;
top: 18px;
left: 22px;
}
#clear:hover > span {
position: absolute;
top: 18px;
left: 25px;
}
#divide {
font-size: x-large;
position: absolute;
top: 145px;
left: 563px;
border-radius: 15px;
}
#divide:hover {
padding: 18px;
}
#subtration {
font-size: xx-large;
padding: 18px;
position: absolute;
top: 145px;
left: 707px;
border-radius: 20px;
}
#subtration:hover {
padding: 19px;
}
#subtration > span {
position: absolute;
top: 12px;
left: 24px;
}
#addup {
position: relative;
top: -335px;
left: 226px;
border-radius: 15px;
padding: 19px;
}
#addup:hover {
padding: 20px;
}
#multiply {
position: absolute;
top: 145px;
left: 627px;
font-size: large;
padding-bottom: 15px;
border-radius: 15px;
}
#multiply:hover {
padding: 18px;
}
#multiply > span {
position: absolute;
top: 18px;
left: 23px;
}
#multiply:hover > span {
position: absolute;
top: 20px;
left: 25px;
}
#zero {
position: relative;
top: 90px;
left: 20px;
background-color: rgb(88, 88, 88);
color: white;
width: 80px;
padding: 20px;
text-align: center;
border-radius: 8px;
}
#zero:hover {
background-color: rgb(0, 96, 134);
}
#doc {
font-size: large;
font-weight: bolder;
padding: 18px;
position: relative;
top: -30px;
left: 145px;
border-radius: 10px;
}
#msg {
background-color: black;
color: white;
margin-top: 100px;
text-align: center;
padding: 20px;
}
#msg button {
background-color: blue;
color: white;
border-color: blue;
padding: 5px;
border-radius: 2px;
}
#msg button:hover {
background-color: lightskyblue;
cursor: pointer;
border-color: lightskyblue;
}
| 18.14978
| 63
| 0.600243
|
f8a36a730d569fc1e9c56a691499d9e75136ca41
| 3,794
|
dart
|
Dart
|
packages/clima_ui/lib/main.dart
|
shashvat-singham/weather-app
|
24f066dd797a95c02277d290c526049c85269592
|
[
"Apache-2.0"
] | null | null | null |
packages/clima_ui/lib/main.dart
|
shashvat-singham/weather-app
|
24f066dd797a95c02277d290c526049c85269592
|
[
"Apache-2.0"
] | null | null | null |
packages/clima_ui/lib/main.dart
|
shashvat-singham/weather-app
|
24f066dd797a95c02277d290c526049c85269592
|
[
"Apache-2.0"
] | null | null | null |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
import 'package:clima_data/models/dark_theme_model.dart';
import 'package:clima_data/models/theme_model.dart';
import 'package:clima_data/providers.dart';
import 'package:clima_data/repos/city_repo_impl.dart';
import 'package:clima_data/repos/full_weather_repo.dart';
import 'package:clima_domain/repos/city_repo.dart';
import 'package:clima_domain/repos/full_weather_repo.dart';
import 'package:clima_ui/screens/weather_screen.dart';
import 'package:clima_ui/state_notifiers/theme_state_notifier.dart' as t;
import 'package:clima_ui/state_notifiers/theme_state_notifier.dart'
show themeStateNotifierProvider;
import 'package:clima_ui/themes/black_theme.dart';
import 'package:clima_ui/themes/dark_theme.dart';
import 'package:clima_ui/themes/light_theme.dart';
import 'package:device_preview/device_preview.dart';
import 'package:flutter/material.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:shared_preferences/shared_preferences.dart';
import 'package:sizer/sizer.dart';
import 'themes/clima_theme.dart';
Future<void> main() async {
// Unless you do this, using method channels (like `SharedPreferences` does)
// before running `runApp` throws an error.
WidgetsFlutterBinding.ensureInitialized();
final sharedPreferences = await SharedPreferences.getInstance();
runApp(
ProviderScope(
overrides: [
sharedPreferencesProvider.overrideWithValue(sharedPreferences),
cityRepoProvider.overrideWithProvider(
Provider((ref) => ref.watch(cityRepoImplProvider)),
),
fullWeatherRepoProvider.overrideWithProvider(
Provider((ref) => ref.watch(fullWeatherRepoImplProvider)),
),
],
child: DevicePreview(
builder: (context) => MyApp(),
),
),
);
}
class MyApp extends HookConsumerWidget {
@override
Widget build(BuildContext context, WidgetRef ref) {
final themeStateNotifier = ref.watch(themeStateNotifierProvider.notifier);
useEffect(
() {
themeStateNotifier.loadTheme();
return null;
},
[themeStateNotifier],
);
final themeState = ref.watch(themeStateNotifierProvider);
if (themeState is t.EmptyState || themeState is t.Loading) {
return const SizedBox.shrink();
}
return Sizer(
builder: (context, orientation, screenType) {
return MaterialApp(
locale: DevicePreview.locale(context),
builder: (context, child) {
final ClimaThemeData climaTheme;
switch (Theme.of(context).brightness) {
case Brightness.light:
climaTheme = lightClimaTheme;
break;
case Brightness.dark:
climaTheme = {
DarkThemeModel.black: blackClimaTheme,
DarkThemeModel.darkGrey: darkGreyClimaTheme,
}[themeState.darkTheme]!;
}
return DevicePreview.appBuilder(
context,
ClimaTheme(data: climaTheme, child: child!),
);
},
home: const WeatherScreen(),
theme: lightTheme,
darkTheme: {
DarkThemeModel.black: blackTheme,
DarkThemeModel.darkGrey: darkGreyTheme,
}[themeState.darkTheme],
themeMode: const {
ThemeModel.systemDefault: ThemeMode.system,
ThemeModel.light: ThemeMode.light,
ThemeModel.dark: ThemeMode.dark,
}[themeState.theme],
);
},
);
}
}
| 32.991304
| 78
| 0.67185
|
8b51587a6f07afd336a52fce39fcac39147f2d19
| 718
|
rb
|
Ruby
|
generators/svg-curve-tmp.rb
|
krlawrence/SVG
|
d1b9e79ef9a5503d303ca0576595dbe3fce791ed
|
[
"Apache-2.0"
] | 1
|
2022-03-12T19:13:17.000Z
|
2022-03-12T19:13:17.000Z
|
generators/svg-curve-tmp.rb
|
krlawrence/SVG
|
d1b9e79ef9a5503d303ca0576595dbe3fce791ed
|
[
"Apache-2.0"
] | null | null | null |
generators/svg-curve-tmp.rb
|
krlawrence/SVG
|
d1b9e79ef9a5503d303ca0576595dbe3fce791ed
|
[
"Apache-2.0"
] | null | null | null |
require "./SVGTools.rb"
c = SVGTools.new
c.emitHeader(500,500)
points = []
points2 = []
ybase = 300
xbase = 200
-3.step(3,0.1) do |x|
v = 10-Math.sqrt(x*x)*50
y = ybase - v
y2 = ybase + v
points += [xbase+x*50,y]
points2 += [xbase+x*50,y2]
end
c.setStroke("blue")
c.setFill("none")
c.drawPolyLine(points)
c.drawPolyLine(points2)
c.setStroke("#999")
c.drawLine(xbase-200,ybase,xbase+200,ybase)
c.drawLine(xbase,100,xbase,ybase+200)
puts "<text x='50' y='50' fill='#00f' stroke='none'>x<tspan baseline-shift='super' font-size='small'>2</tspan> + y<tspan baseline-shift='super' font-size='small'>2</tspan> = 10</text>"
c.emitFooter()
| 24.758621
| 194
| 0.607242
|
1d880cf1c19fd5c455b653b831fc1fde678b4933
| 3,180
|
sql
|
SQL
|
bdtps-master/tp1/Consultas (tp2015)/RegistrosPorTabla.sql
|
marlanbar/academic-projects
|
bcdc8ca36b6984ab3f83c10b8a3ed45576ecfca1
|
[
"MIT"
] | null | null | null |
bdtps-master/tp1/Consultas (tp2015)/RegistrosPorTabla.sql
|
marlanbar/academic-projects
|
bcdc8ca36b6984ab3f83c10b8a3ed45576ecfca1
|
[
"MIT"
] | null | null | null |
bdtps-master/tp1/Consultas (tp2015)/RegistrosPorTabla.sql
|
marlanbar/academic-projects
|
bcdc8ca36b6984ab3f83c10b8a3ed45576ecfca1
|
[
"MIT"
] | null | null | null |
select tabla, registros from(
select count(*) registros ,'acompañante' as tabla from acompañante
union select count(*) registros ,'antecedente' as tabla from antecedente
union select count(*) registros ,'autopista' as tabla from autopista
union select count(*) registros ,'calle' as tabla from calle
union select count(*) registros ,'categoria' as tabla from categoria
union select count(*) registros ,'companiaaseguradora' as tabla from companiaaseguradora
union select count(*) registros ,'condiciongeneral' as tabla from condiciongeneral
union select count(*) registros ,'conductor' as tabla from conductor
union select count(*) registros ,'conductorhabilitado' as tabla from conductorhabilitado
union select count(*) registros ,'denuncia' as tabla from denuncia
union select count(*) registros ,'estadoiluminacion' as tabla from estadoiluminacion
union select count(*) registros ,'fuerealizadopor' as tabla from fuerealizadopor
union select count(*) registros ,'funcionario' as tabla from funcionario
union select count(*) registros ,'informe' as tabla from informe
union select count(*) registros ,'infraccion' as tabla from infraccion
union select count(*) registros ,'licencia' as tabla from licencia
union select count(*) registros ,'lugar' as tabla from lugar
union select count(*) registros ,'organismo' as tabla from organismo
union select count(*) registros ,'peaton' as tabla from peaton
union select count(*) registros ,'persona' as tabla from persona
union select count(*) registros ,'poliza' as tabla from poliza
union select count(*) registros ,'ruta' as tabla from ruta
union select count(*) registros ,'siniestro' as tabla from siniestro
union select count(*) registros ,'siniestroacompañante' as tabla from siniestroacompañante
union select count(*) registros ,'siniestropeaton' as tabla from siniestropeaton
union select count(*) registros ,'siniestrotestigo' as tabla from siniestrotestigo
union select count(*) registros ,'siniestrovehiculoconductor' as tabla from siniestrovehiculoconductor
union select count(*) registros ,'testigo' as tabla from testigo
union select count(*) registros ,'tipoaccidente' as tabla from tipoaccidente
union select count(*) registros ,'tipocausaprobable' as tabla from tipocausaprobable
union select count(*) registros ,'tipocobertura' as tabla from tipocobertura
union select count(*) registros ,'tipocolision' as tabla from tipocolision
union select count(*) registros ,'tipocondicionclimatica' as tabla from tipocondicionclimatica
union select count(*) registros ,'tipoestadovia' as tabla from tipoestadovia
union select count(*) registros ,'tipoinforme' as tabla from tipoinforme
union select count(*) registros ,'tipoinfraccion' as tabla from tipoinfraccion
union select count(*) registros ,'tipolicencia' as tabla from tipolicencia
union select count(*) registros ,'tipopavimento' as tabla from tipopavimento
union select count(*) registros ,'tipovehiculo' as tabla from tipovehiculo
union select count(*) registros ,'vehiculo' as tabla from vehiculo
union select count(*) registros ,'viapublica' as tabla from viapublica
) as Cantidades
where registros =0
order by registros
| 70.666667
| 103
| 0.78805
|
a38b02e7e7b5530c4b9338d8cdf5f3b12f65cc77
| 4,901
|
ts
|
TypeScript
|
src/core/auth/BaseVestingAccount.ts
|
bitwebs/iq.js
|
da9ea7ec9ded41455f34447de7f629668ce53977
|
[
"MIT"
] | null | null | null |
src/core/auth/BaseVestingAccount.ts
|
bitwebs/iq.js
|
da9ea7ec9ded41455f34447de7f629668ce53977
|
[
"MIT"
] | null | null | null |
src/core/auth/BaseVestingAccount.ts
|
bitwebs/iq.js
|
da9ea7ec9ded41455f34447de7f629668ce53977
|
[
"MIT"
] | null | null | null |
import { JSONSerializable } from '../../util/json';
import { Coins } from '../Coins';
import { BaseAccount } from './BaseAccount';
import { BaseAccount as BaseAccount_pb } from '@web4/iq.proto/cosmos/auth/v1beta1/auth';
import { BaseVestingAccount as BaseVestingAccount_pb } from '@web4/iq.proto/cosmos/vesting/v1beta1/vesting';
import * as Long from 'long';
import { PublicKey } from '../PublicKey';
/**
* Holds information about a Account which has vesting information.
*/
export class BaseVestingAccount extends JSONSerializable<
BaseVestingAccount.Amino,
BaseVestingAccount.Data,
BaseVestingAccount.Proto
> {
/**
*
* @param BaseAccount account information
* @param original_vesting initial vesting amount
* @param delegated_free
* @param delegated_vesting
* @param end_time -not used-
* @param vesting_schedules Entries that make up vesting
*/
constructor(
public base_account: BaseAccount,
public original_vesting: Coins,
public delegated_free: Coins,
public delegated_vesting: Coins,
public end_time: number
) {
super();
}
public getAccountNumber(): number {
return this.base_account.account_number;
}
public getSequenceNumber(): number {
return this.base_account.sequence;
}
public getPublicKey(): PublicKey | null {
return this.base_account.public_key;
}
public toAmino(): BaseVestingAccount.Amino {
undefined;
const {
base_account,
original_vesting,
delegated_free,
delegated_vesting,
end_time,
} = this;
return {
type: 'core/BaseVestingAccount',
value: {
base_account: base_account.toAmino().value,
delegated_free: delegated_free.toAmino(),
delegated_vesting: delegated_vesting.toAmino(),
end_time: end_time.toFixed(),
original_vesting: original_vesting.toAmino(),
},
};
}
public static fromAmino(amino: BaseVestingAccount.Amino): BaseVestingAccount {
const base_account = BaseAccount.fromAmino({
type: 'core/Account',
value: amino.value.base_account,
});
return new BaseVestingAccount(
base_account,
Coins.fromAmino(amino.value.original_vesting),
Coins.fromAmino(amino.value.delegated_free),
Coins.fromAmino(amino.value.delegated_vesting),
Number.parseInt(amino.value.end_time)
);
}
public toData(): BaseVestingAccount.Data {
const {
base_account,
original_vesting,
delegated_free,
delegated_vesting,
end_time,
} = this;
return {
'@type': '/iq.vesting.v1beta1.LazyGradedVestingAccount',
base_account: base_account.toData(),
delegated_free: delegated_free.toData(),
delegated_vesting: delegated_vesting.toData(),
end_time: end_time.toFixed(),
original_vesting: original_vesting.toData(),
};
}
public static fromData(data: BaseVestingAccount.Data): BaseVestingAccount {
const base_account = BaseAccount.fromData({
'@type': '/cosmos.auth.v1beta1.BaseAccount',
...data.base_account,
});
return new BaseVestingAccount(
base_account,
Coins.fromData(data.original_vesting),
Coins.fromData(data.delegated_free),
Coins.fromData(data.delegated_vesting),
Number.parseInt(data.end_time)
);
}
public toProto(): BaseVestingAccount.Proto {
const {
base_account,
original_vesting,
delegated_free,
delegated_vesting,
end_time,
} = this;
return BaseVestingAccount_pb.fromPartial({
baseAccount: base_account.toProto(),
delegatedFree: delegated_free.toProto(),
delegatedVesting: delegated_vesting.toProto(),
endTime: Long.fromNumber(end_time),
originalVesting: original_vesting.toProto(),
});
}
public static fromProto(proto: BaseVestingAccount.Proto): BaseVestingAccount {
const baseAccount = BaseAccount.fromProto(
proto.baseAccount as BaseAccount_pb
);
return new BaseVestingAccount(
baseAccount,
Coins.fromProto(proto.originalVesting),
Coins.fromProto(proto.delegatedFree),
Coins.fromProto(proto.delegatedVesting),
proto.endTime.toNumber()
);
}
}
export namespace BaseVestingAccount {
export interface AminoValue {
base_account: BaseAccount.AminoValue;
original_vesting: Coins.Amino;
delegated_free: Coins.Amino;
delegated_vesting: Coins.Amino;
end_time: string;
}
export interface Amino {
type: 'core/BaseVestingAccount';
value: AminoValue;
}
export interface DataValue {
base_account: BaseAccount.DataValue;
original_vesting: Coins.Amino;
delegated_free: Coins.Amino;
delegated_vesting: Coins.Amino;
end_time: string;
}
export interface Data extends DataValue {
'@type': '/iq.vesting.v1beta1.LazyGradedVestingAccount';
}
export type Proto = BaseVestingAccount_pb;
}
| 27.227778
| 108
| 0.694552
|
ed1da3a0a2ea090836b8a728bb8404ffae558a88
| 294
|
c
|
C
|
exercicios/lista-encadeada/soma.c
|
alexNeto/estrutura-de-dados
|
b52d50330d6ae0adfa6cac3573e3eff486356244
|
[
"MIT"
] | null | null | null |
exercicios/lista-encadeada/soma.c
|
alexNeto/estrutura-de-dados
|
b52d50330d6ae0adfa6cac3573e3eff486356244
|
[
"MIT"
] | null | null | null |
exercicios/lista-encadeada/soma.c
|
alexNeto/estrutura-de-dados
|
b52d50330d6ae0adfa6cac3573e3eff486356244
|
[
"MIT"
] | null | null | null |
/*
* SOMA
* Sejam dois números armazenados em listas ligadas com
* cabeça, onde cada conteúdo é um dígito. Os dígitos estão
* armazenados na ordem reversa. Escreva um código que some
* os dois números e retorne uma lista ligada com o resultado.
* Exemplo: 3->1->5 + 5->9->2 = 8->0->8
*/
| 36.75
| 62
| 0.697279
|
15d6e7efe910b9019c62978df61af26da75930b7
| 3,696
|
rb
|
Ruby
|
app/api/grape_example_app/v1/review.rb
|
dreamingechoes/grape_example_app
|
3d20e812c4fcf06898ae9f9d992ab45f35db5127
|
[
"MIT"
] | 13
|
2015-11-19T09:00:06.000Z
|
2021-12-19T09:11:22.000Z
|
app/api/grape_example_app/v1/review.rb
|
dreamingechoes/grape_example_app
|
3d20e812c4fcf06898ae9f9d992ab45f35db5127
|
[
"MIT"
] | null | null | null |
app/api/grape_example_app/v1/review.rb
|
dreamingechoes/grape_example_app
|
3d20e812c4fcf06898ae9f9d992ab45f35db5127
|
[
"MIT"
] | 9
|
2015-10-28T23:31:09.000Z
|
2018-10-26T06:49:07.000Z
|
module GrapeExampleApp
class V1::Review < Grape::API
use Rack::JSONP
helpers do
params :token do
optional :token, type: String, default: nil,
documentation: {
type: 'String',
desc: 'Authenticate token'
}
end
params :attributes do
optional :attributes, type: Hash, default: {},
documentation: {
type: 'Hash',
desc: 'Params attributes of review'
}
end
end
resource :reviews do
desc 'REST GET with no parameters.' do
detail <<-NOTE
Make a query for reviews.
-----------------
Returns an array of all reviews.
NOTE
end
get do
api_response(::Review.all.to_json)
end
route_param :id do
desc 'REST GET with id param.' do
detail <<-NOTE
Make a query for a review with certain id.
-----------------
Returns the result of query for a review.
NOTE
end
params do
use :token, type: String, desc: 'Authentication token'
requires :id, type: Integer, desc: "Review ID"
end
get do
begin
authenticate!
review = ::Review.find(params[:id])
api_response(review.to_json)
rescue ActiveRecord::RecordNotFound => e
status 404 # Not found
end
end
end
desc 'REST Post with attributes param.' do
detail <<-NOTE
Creates a review with the information passed through attributes param.
-----------------
This is a hash, with the estruture:
```
{"name": "Example", "description": "Description example", "image_url": "url", "price": 30, "stock": 4}
```
NOTE
end
params do
use :token
requires :attributes, type: Hash, desc: 'Review object to create' do
requires :title, type: String, desc: 'Title of review'
requires :body, type: String, desc: 'Body of review'
end
end
post do
begin
authenticate!
safe_params = clean_params(params[:attributes]).permit(:title, :body)
if safe_params
::Review.create!(safe_params)
status 200 # Saved OK
end
rescue ActiveRecord::RecordNotFound => e
status 404 # Not found
end
end
desc 'REST Put with attributes param.' do
detail <<-NOTE
Updates a review with the information passed through attributes param.
-----------------
This is a hash, with the estruture:
```
{"name": "Example", "description": "Description example", "image_url": "url", "price": 30, "stock": 4}
```
NOTE
end
params do
use :token, type: String, desc: 'Authentication token'
requires :id, type: Integer, desc: "Review ID"
requires :attributes, type: Hash, desc: 'Review object to create' do
requires :title, type: String, desc: 'Title of review'
requires :body, type: String, desc: 'Body of review'
end
end
put ':id' do
begin
authenticate!
safe_params = clean_params(params[:attributes]).permit(:title, :body)
if safe_params
review = ::Review.find(params[:id])
review.update_attributes(safe_params)
status 200 # Saved OK
end
rescue ActiveRecord::RecordNotFound => e
status 404 # Not found
end
end
end
end
end
| 28
| 116
| 0.524351
|
5ade72881639da9a69b2f6000cf46b44b250e9a5
| 673
|
cs
|
C#
|
src/Cuemon.AspNetCore.Authentication/Digest/DigestAuthenticator.cs
|
gimlichael/cuemoncore
|
7750a3bd67200924b9cfa4dddccf54c765453c24
|
[
"MIT"
] | 11
|
2018-05-30T05:46:59.000Z
|
2020-04-30T11:44:57.000Z
|
src/Cuemon.AspNetCore.Authentication/Digest/DigestAuthenticator.cs
|
gimlichael/cuemoncore
|
7750a3bd67200924b9cfa4dddccf54c765453c24
|
[
"MIT"
] | 4
|
2021-02-28T00:24:26.000Z
|
2021-04-01T17:58:20.000Z
|
src/Cuemon.AspNetCore.Authentication/Digest/DigestAuthenticator.cs
|
gimlichael/cuemoncore
|
7750a3bd67200924b9cfa4dddccf54c765453c24
|
[
"MIT"
] | 3
|
2018-08-29T15:19:47.000Z
|
2019-10-14T20:47:39.000Z
|
using System.Security.Claims;
namespace Cuemon.AspNetCore.Authentication.Digest
{
/// <summary>
/// Represents the method that defines an Authenticator typically assigned on <see cref="DigestAuthenticationOptions"/>.
/// </summary>
/// <param name="username">The username to match and lookup the paired <paramref name="password"/>.</param>
/// <param name="password">The password paired with <paramref name="username"/>.</param>
/// <returns>A <see cref="ClaimsPrincipal"/> that is associated with the result of <paramref name="password"/>.</returns>
public delegate ClaimsPrincipal DigestAuthenticator(string username, out string password);
}
| 56.083333
| 125
| 0.728083
|
17f64dff80a87adbc9d63713dfa85d3640c9d190
| 5,311
|
swift
|
Swift
|
src/main.swift
|
gwk/ploy-swift
|
9cafa1a96b3d0cd3a7e2fa0df5383a17115e8574
|
[
"ISC"
] | null | null | null |
src/main.swift
|
gwk/ploy-swift
|
9cafa1a96b3d0cd3a7e2fa0df5383a17115e8574
|
[
"ISC"
] | null | null | null |
src/main.swift
|
gwk/ploy-swift
|
9cafa1a96b3d0cd3a7e2fa0df5383a17115e8574
|
[
"ISC"
] | null | null | null |
// Copyright © 2015 George King. Permission to use this file is granted in ploy/license.txt.
import Foundation
let usageMsg = """
Ploy compiler usage:
ploy build -mapper mapper-path lib-src-paths… -main main-src-path -o out-path.
ploy test-types src-type dst-type
"""
let validBuildOpts = Set([
"-mapper",
"-main",
"-o",
])
func main() {
if processArguments.count < 2 { fail(usageMsg) }
let subcommand = processArguments[1]
let args = Array(processArguments[2...])
switch subcommand {
case "build": ploy_build(args: args)
case "test-types": ploy_test_types(args: args)
default: fail(usageMsg)
}
}
func ploy_build(args:[String]) {
var srcPaths: [Path] = []
var opts: [String: String] = [:]
var opt: String? = nil
for arg in args {
if let o = opt {
opts[o] = arg
opt = nil
} else if validBuildOpts.contains(arg) {
opt = arg
} else if arg.hasPrefix("-") {
fail("unrecognized option: '\(arg)'")
} else {
srcPaths.append(Path(arg))
}
}
check(opt == nil, "dangling option flag: '\(opt!)'")
for (key, val) in processEnvironment {
if key == "PLOY_DBG_DEFS" {
globalDbgDefSuffixes = Set(val.split(" "))
errL("PLOY_DBG_DEFS: ", globalDbgDefSuffixes)
}
}
guard let mapperPath = Path(opts["-mapper"]) else { fail("`-mapper mapper-path` argument is required.") }
guard let mainPath = Path(opts["-main"]) else { fail("`-main main-src-path` argument is required.") }
guard let outPath = Path(opts["-o"]) else { fail("`-o out-path` argument is required.") }
var libPaths: [Path] = []
var incPaths: [Path] = []
let known_methods = Set([".ploy", ".js", ""])
for path in srcPaths {
if !known_methods.contains(path.ext) {
fail("invalid method for path: \(path)")
}
}
let allSrcPaths = guarded { try walkPaths(roots: srcPaths) }
for path in allSrcPaths {
let ext = path.ext
if ext == ".ploy" {
libPaths.append(path)
} else if ext == ".js" {
incPaths.append(path)
}
}
let mainDefs = parsePloy(path: mainPath)
let libDefs = libPaths.flatMap { parsePloy(path: $0) }
let dumpPath = outPath.append(".dump.jsonl")
let tmpPath = outPath.append(".tmp")
let mapPath = outPath.append(".srcmap")
let tmpFile = guarded { try File(path: tmpPath, mode: .write, create: 0o644) }
let mapPipe = Pipe()
let mapSend = mapPipe.fileHandleForWriting
let mapProc = Process()
mapProc.launchPath = mapperPath.expandUser
mapProc.arguments = [outPath.expandUser, mapPath.expandUser]
mapProc.standardInput = mapPipe.fileHandleForReading
mapProc.standardOutput = FileHandle.standardError
mapProc.standardError = FileHandle.standardError
mapProc.launch()
let ctx = GlobalCtx(dumpPath: dumpPath, outFile: tmpFile, mapSend: mapSend)
let rootSpace = setupRootSpace(ctx)
let mainSpace = MainSpace(ctx, mainPath: mainPath, parent: rootSpace)
rootSpace.bindings["MAIN"] = ScopeRecord(name: "MAIN", sym: nil, isLocal: false, kind: .space(mainSpace))
mainSpace.add(defs: mainDefs, root: rootSpace)
_ = mainSpace.getMainDef() // check that we have `main` before doing additional work.
mainSpace.add(defs: libDefs, root: rootSpace)
compileProgram(includePaths: incPaths, mainSpace: mainSpace, mapPath: mapPath)
renameFile(from: tmpPath, to: outPath)
do {
try File.changePerms(path: outPath, perms: 0o755)
} catch let e {
fail("could not set compiled output file to executable: \(outPath)\n \(e)")
}
mapSend.closeFile() // closing the pipe to gen-source-map causes the map file to be written.
mapProc.waitUntilExit()
if mapProc.terminationStatus != 0 {
fail("gen-source-map subprocess failed.")
}
}
func ploy_test_types(args:[String]) {
if args.count != 2 { fail("test-types expects two type strings.") }
let src_str = args[0] + "\n"
let dst_str = args[1] + "\n"
let src_expr:Expr = parsePloyForm(string: src_str, name: "src")
let dst_expr:Expr = parsePloyForm(string: dst_str, name: "dst")
let rootSpace = setupRootSpace(nullGlobalCtx())
let srcScope = LocalScope(parent: rootSpace)
let dstScope = LocalScope(parent: rootSpace)
let srcType = src_expr.type(srcScope, "src")
let dstType = dst_expr.type(dstScope, "dst")
outL(srcType)
outL(dstType)
var typechecker = TypeChecker()
let comparison = typechecker.compare(src: srcType, dst: dstType)
switch comparison {
case .exact: outL("Exact.")
case .subtype: outL("Subtype.")
case .free: outL("Free.")
default:
outL("Incompatible: \(comparison): \(typechecker.incompatibilityPath)")
}
}
func setupRootSpace(_ ctx: GlobalCtx) -> Space {
let rootSpace = Space(ctx, pathNames: ["ROOT"], parent: nil)
rootSpace.bindings["ROOT"] = ScopeRecord(name: "ROOT", sym: nil, isLocal: false, kind: .space(rootSpace))
// NOTE: reference cycle; could fix it by making a special case for "ROOT" just before lookup failure.
for t in intrinsicTypes {
let rec = ScopeRecord(name: t.description, sym: nil, isLocal: false, kind: .type(t))
rootSpace.bindings[t.description] = rec
}
return rootSpace
}
func nullGlobalCtx() -> GlobalCtx {
return GlobalCtx(
dumpPath: "/dev/null",
outFile: try! File(path: "/dev/null", mode: .write),
mapSend: FileHandle.nullDevice)
}
main()
| 29.181319
| 107
| 0.672943
|
1a4faebf6fe3269c375f2f316d22294746dc7e95
| 5,501
|
py
|
Python
|
python/shoes/get_data_by_edge.py
|
luolongqiang/caffe-luolongqiang
|
5ee132e4451a538d97b62039a62a59a69dc43bb9
|
[
"BSD-2-Clause"
] | 2
|
2017-03-23T04:19:31.000Z
|
2019-07-05T02:31:04.000Z
|
python/shoes/get_data_by_edge.py
|
luolongqiang/caffe-luolongqiang
|
5ee132e4451a538d97b62039a62a59a69dc43bb9
|
[
"BSD-2-Clause"
] | null | null | null |
python/shoes/get_data_by_edge.py
|
luolongqiang/caffe-luolongqiang
|
5ee132e4451a538d97b62039a62a59a69dc43bb9
|
[
"BSD-2-Clause"
] | 1
|
2018-10-11T06:07:19.000Z
|
2018-10-11T06:07:19.000Z
|
# -*- coding:UTF-8 -*-
import numpy as np
from numpy import array
from PIL import Image
from multiprocessing import Pool
import os, sys, argparse, time, cv2, shutil, random
# python python/shoes/get_data_by_edge.py -i data/shoes/img3 -b data/shoes/background -o data/shoes
img_dir = 'JPEGImages'
lbs_dir = 'labels'
def get_file_list(input_dir, backgr_dir, output_root):
if not os.path.exists(output_root):
os.mkdir(output_root)
backgr_img_set = []
for temp_img in os.listdir(backgr_dir):
if temp_img.endswith('.jpg'):
backgr_img = os.path.join(backgr_dir, temp_img)
backgr_img_set.append(backgr_img)
input_img_list = []
output_img_list = []
output_txt_list = []
for temp_img in os.listdir(input_dir):
if temp_img.endswith('.jpg'):
input_img = os.path.join(input_dir, temp_img)
temp_name = temp_img[:temp_img.rfind('.')]
output_img = os.path.join(output_root, img_dir, temp_name+'-add.jpg')
output_txt = os.path.join(output_root, lbs_dir, temp_name+'-add.txt')
input_img_list.append(input_img)
output_img_list.append(output_img)
output_txt_list.append(output_txt)
output_num = len(output_img_list)
backgr_num = len(backgr_img_set)
times = output_num/backgr_num + 1
backgr_img_list = random.sample(times*backgr_img_set, output_num)
return input_img_list, backgr_img_list, output_img_list, output_txt_list
def get_img_edge((input_img_file, backgr_img_file, output_img_file, output_txt_file)):
print input_img_file
img = cv2.imread(input_img_file)
top, right, bottom, left, canimg = get_bbox_by_canny(img)
box_w, box_h = right-left, bottom-top
if box_w*box_h > img.shape[1]*img.shape[0]/16.0:
re_img = np.ones((4*box_h, 4*box_w, img.shape[2]), np.uint8)*255
re_top, re_right, re_bottom, re_left = \
box_h*3/2, box_w*5/2, box_h*5/2, box_w*3/2
re_img[re_top:re_bottom, re_left:re_right, :] = img[top:bottom, left:right, :]
top, right, bottom, left, canimg = get_bbox_by_canny(re_img)
img = re_img
width, height = img.shape[1], img.shape[0]
output_labels(top, right, bottom, left, width, height, output_txt_file)
mask = np.zeros(img.shape[:2], np.uint8)
bgdModel = np.zeros((1, 65), np.float64)
fgdModel = np.zeros((1, 65), np.float64)
rect = (left, top, right - left, bottom - top)
cv2.grabCut(img, mask, rect, bgdModel, fgdModel, 3, cv2.GC_INIT_WITH_RECT)
mask2 = np.where((mask==2)|(mask==0), 0, 1).astype('uint8')
img = img*mask2[:, :, np.newaxis]
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (199, 199))
th3 = cv2.dilate(canimg, kernel)
bin_img = cv2.bitwise_and(th3, th3, mask=mask2)
cv2.imwrite('temp.png', img)
cv2.imwrite('temp.bmp', bin_img)
img = Image.open('temp.png')
bin_img = Image.open('temp.bmp')
bg_img = Image.open(backgr_img_file)
img = img.convert("RGBA")
bg_img = bg_img.convert("RGBA")
bg_img = bg_img.resize(img.size)
bg_img.paste(img, (0, 0, img.size[0], img.size[1]), bin_img)
bg_img.save(output_img_file)
def get_bbox_by_canny(img):
grayed = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
grayed = cv2.blur(grayed, (3, 3))
width = grayed.shape[1]
height = grayed.shape[0]
canimg = cv2.Canny(grayed, 50, 80)
if np.max(canimg) == 0:
top = 0
right = width - 1
bottom = height - 1
left = 0
else:
linepix = np.where(canimg == 255)
top = min(linepix[0])
right = max(linepix[1])
bottom = max(linepix[0])
left = min(linepix[1])
return top, right, bottom, left, canimg
def output_labels(top, right, bottom, left, width, height, output_txt_file):
x = (left + right)/2.0
y = (top + bottom)/2.0
w = right - left
h = bottom - top
x, y, w, h = x/width, y/height, w*1.0/width, h*1.0/height
line = array([[0, x, y, w, h]])
np.savetxt(output_txt_file, line, fmt="%d %f %f %f %f")
def get_args():
parser = argparse.ArgumentParser(description = 'get shoes data')
parser.add_argument('-i', dest = 'input_dir',
help = 'input dir of images', default = None, type = str)
parser.add_argument('-b', dest = 'backgr_dir',
help = 'background dir of images', default = None, type = str)
parser.add_argument('-o', dest = 'output_root',
help = 'output root of JPEGImages and labels', default = None, type = str)
parser.add_argument('-c', dest = 'cpu_num',
help = 'cpu number', default = 8, type = int)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
return args
if __name__ == "__main__":
args = get_args()
input_dir = args.input_dir
backgr_dir = args.backgr_dir
output_root = args.output_root
cpu_num = args.cpu_num
tic = time.time()
input_img_list, backgr_img_list, output_img_list, output_txt_list = \
get_file_list(input_dir, backgr_dir, output_root)
'''
pool = Pool(cpu_num)
pool.map(get_img_edge, zip(input_img_list, backgr_img_list, \
output_img_list, output_txt_list))
'''
for arguments in zip(input_img_list, backgr_img_list, output_img_list, output_txt_list):
get_img_edge(arguments)
toc = time.time()
print 'running time: {} seconds'.format(toc-tic)
| 36.190789
| 99
| 0.638793
|
a9a1a665a684ec24fa011a7e8dc42dba90337d5b
| 174
|
sql
|
SQL
|
Portal/Binaries/Database/Procedures/GalleryCategory_Set.drop.sql
|
dsalunga/mPortal
|
3c727231e576c4ab0c3ace21cc412a2cf344974a
|
[
"MIT"
] | 1
|
2018-05-08T21:06:38.000Z
|
2018-05-08T21:06:38.000Z
|
Portal/Binaries/Database/Procedures/GalleryCategory_Set.drop.sql
|
dsalunga/mPortal
|
3c727231e576c4ab0c3ace21cc412a2cf344974a
|
[
"MIT"
] | null | null | null |
Portal/Binaries/Database/Procedures/GalleryCategory_Set.drop.sql
|
dsalunga/mPortal
|
3c727231e576c4ab0c3ace21cc412a2cf344974a
|
[
"MIT"
] | 3
|
2017-12-19T17:51:25.000Z
|
2022-02-02T03:45:43.000Z
|
if exists (select * from dbo.sysobjects where id = object_id(N'[GalleryCategory_Set]') and OBJECTPROPERTY(id, N'IsProcedure') = 1)
drop procedure [GalleryCategory_Set]
GO
| 24.857143
| 130
| 0.764368
|
92b127495185e9e587a50fc79a809477c95578a4
| 1,452
|
c
|
C
|
03_Data Structure/Algorithms/Map/Single-source shortest path/Topo/topo.c
|
Robert-Stackflow/HUST-Courses
|
300752552e7af035b0e5c7663953850c81871242
|
[
"MIT"
] | 4
|
2021-11-01T09:27:32.000Z
|
2022-03-07T14:24:10.000Z
|
03_Data Structure/Algorithms/Map/Single-source shortest path/Topo/topo.c
|
Robert-Stackflow/HUST-Courses
|
300752552e7af035b0e5c7663953850c81871242
|
[
"MIT"
] | null | null | null |
03_Data Structure/Algorithms/Map/Single-source shortest path/Topo/topo.c
|
Robert-Stackflow/HUST-Courses
|
300752552e7af035b0e5c7663953850c81871242
|
[
"MIT"
] | null | null | null |
#include <stdio.h>
#include <stdlib.h>
#include <limits.h>
#define STACKSIZE 100
#include "map.h"
int sp=-1;
int stack[STACKSIZE];
int empty(void){
return sp==-1;
}
void push(int new){
stack[++sp]=new;
}
int pop(void){
return stack[sp--];
}
int top(void){
return stack[sp];
}
void DFS_connect(struct Graph *map,int startnode,int *visited){
if(visited[startnode])
return;
struct Node *cur=map->nodes[startnode]->next;
while(cur){
if(!visited[cur->vertex])
DFS_connect(map,cur->vertex,visited);
cur=cur->next;
}
visited[startnode]=1;
push(startnode);
}
void DFS(struct Graph *map){
int visited[map->nodesize];
for(int i=0;i<map->nodesize;i++)
visited[i]=0;
for(int i=0;i<map->nodesize;i++)
if(!visited[i])
DFS_connect(map,i,visited);
}
void topo(struct Graph *map){
#define START 1
DFS(map);
int curno;
int shortestpath[map->nodesize];
for(int i=0;i<map->nodesize;i++)
shortestpath[i]=SHRT_MAX;
shortestpath[START]=0;
struct Node *cur;
while(!empty() && top()!=START)
pop();
while(!empty()){
curno=pop();
cur=map->nodes[curno]->next;
while(cur){
if(shortestpath[cur->vertex]>shortestpath[curno]+cur->weight)
shortestpath[cur->vertex]=shortestpath[curno]+cur->weight;
cur=cur->next;
}
}
for(int i=0;i<map->nodesize;i++)
printf("%d,%d\n",i,shortestpath[i]);
}
int main(int argc, char const *argv[]){
if(argc<2)
return 1;
struct Graph *map=read_map(argv[1]);
topo(map);
return 0;
}
| 19.36
| 64
| 0.661157
|
5bc74d69d961579cc89c881c62435c5260c7e067
| 518
|
css
|
CSS
|
website/public/styles/shell_small.css
|
fullwebdev/fullwebdev
|
34afaa07d3f8b55abb691cb11f6a1f19a0eecf5d
|
[
"MIT"
] | 11
|
2020-07-22T22:08:14.000Z
|
2021-11-12T07:01:04.000Z
|
website/public/styles/shell_small.css
|
fullwebdev/fullwebdev
|
34afaa07d3f8b55abb691cb11f6a1f19a0eecf5d
|
[
"MIT"
] | 75
|
2020-08-01T14:59:09.000Z
|
2022-03-02T16:09:47.000Z
|
website/public/styles/shell_small.css
|
fullwebdev/fullwebdev
|
34afaa07d3f8b55abb691cb11f6a1f19a0eecf5d
|
[
"MIT"
] | 4
|
2020-05-27T10:36:38.000Z
|
2021-10-17T04:23:52.000Z
|
/*#region globals*/
/*#endregion globals*/
/*#region navbar */
#main-header {
box-shadow: 0 1px 2px 0 rgba(60, 64, 67, 0.3),
0 2px 6px 2px rgba(60, 64, 67, 0.15);
}
#main-header .row {
height: 64px;
justify-content: space-between;
}
#main-header .navigation {
display: none;
}
#header__language-switch {
display: none;
}
/*#endregion navbar */
/*#region content */
#content-container daucus-menu {
display: none !important;
}
#page-container {
padding: 1rem 2rem;
}
/*#endregion content */
| 14
| 48
| 0.644788
|
1a49fdf71595ced30cdb991b300a6613a3292390
| 12,118
|
py
|
Python
|
projects/pass-manager-tkinter-encrypted/scripts/ui.py
|
srakhe/pass-manager-py
|
b67068d111c2d2d8f5bac1021e782a34f02061b9
|
[
"MIT"
] | null | null | null |
projects/pass-manager-tkinter-encrypted/scripts/ui.py
|
srakhe/pass-manager-py
|
b67068d111c2d2d8f5bac1021e782a34f02061b9
|
[
"MIT"
] | null | null | null |
projects/pass-manager-tkinter-encrypted/scripts/ui.py
|
srakhe/pass-manager-py
|
b67068d111c2d2d8f5bac1021e782a34f02061b9
|
[
"MIT"
] | null | null | null |
from tkinter import *
from tkinter import messagebox
from scripts import data_manager
from scripts import password_generator
import pyperclip
BACKGROUND_COLOUR = '#495664'
FOREGROUND_COLOUR = '#f6f7d3'
DARK_TEXT_COLOUR = '#333c4a'
class UI:
def __init__(self):
# Init objects
self.data_manager_obj = data_manager.DataManager()
# Setup files if first start
if self.data_manager_obj.is_first_start():
self.data_manager_obj.setup_obj.run_setup()
# Create login window if not first start
if not self.data_manager_obj.is_first_start():
self.login_window = Tk()
self.login_window.title('Login to Password Manager')
self.login_window.config(padx=60, pady=50, bg=BACKGROUND_COLOUR)
# Master user credentials
self.master_username = StringVar()
self.master_password = StringVar()
# Init other variables required
self.main_window = None
self.main_image = None
self.symbols_checked = None
self.letters_checked = None
self.numbers_checked = None
self.spinbox_pass_length = None
self.add_new_tag = None
self.add_new_username = None
self.add_new_pass = None
self.select_tag = None
self.tags_option_menu = None
self.user_listbox = None
self.pass_listbox = None
# Init login window objects
self.init_login_window()
self.login_window.mainloop()
def init_login_window(self):
user_label = Label(text='Username: ', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR, pady=20)
user_label.grid(row=0, column=0)
user_entry = Entry(width=30, textvariable=self.master_username)
user_entry.grid(row=0, column=1)
pass_label = Label(text='Password: ', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR, pady=20)
pass_label.grid(row=1, column=0)
pass_entry = Entry(width=30, textvariable=self.master_password, show='*')
pass_entry.grid(row=1, column=1)
go_btn = Button(text='Go', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR, command=self.login_pressed, pady=10)
go_btn.grid(row=2, column=2)
user_entry.focus()
def login_pressed(self):
username = self.master_username.get()
password = self.master_password.get()
if username and password:
self.master_username.set('')
self.master_password.set('')
check_username, check_password = self.data_manager_obj.get_master_details()
if str(check_username) == str(username):
if str(check_password) == str(password):
self.create_main_window()
else:
messagebox.showerror(title='Incorrect', message='Please check the password.')
else:
messagebox.showerror(title='Incorrect', message='Please check the username.')
else:
messagebox.showerror(title='Empty field(s)?', message='Please don\'t leave any field(s) empty.')
def create_main_window(self):
self.login_window.destroy()
self.main_window = Tk()
self.main_window.title('Password Manager')
self.main_window.config(padx=50, pady=50, bg=BACKGROUND_COLOUR)
main_canvas = Canvas(width=600, height=600)
main_canvas.config(bg=BACKGROUND_COLOUR, highlightthickness=0)
self.main_image = PhotoImage(file='images/password-manager.png')
main_canvas.create_image(300, 300, image=self.main_image)
main_canvas.grid(row=0, column=1)
tags_label = Label(text='TAG:', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR, pady=50)
tags_label.grid(row=1, column=0)
self.select_tag = StringVar()
tags_list = self.data_manager_obj.get_saved_password_tags()
self.tags_option_menu = OptionMenu(self.main_window, self.select_tag, *tags_list)
self.select_tag.set(tags_list[0])
self.tags_option_menu.grid(row=1, column=1)
search_btn = Button(text='Search', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR, pady=10,
command=self.list_passwords_clicked)
search_btn.grid(row=1, column=3)
add_btn = Button(text='Add a new entry', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR, pady=10,
command=self.add_new_password_clicked)
add_btn.grid(row=2, column=0)
gen_pass_btn = Button(text='Generate Password', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR, pady=10,
command=self.generate_password_clicked)
gen_pass_btn.grid(row=2, column=1)
def list_passwords_clicked(self):
self.create_list_pass_window(master=self.main_window)
def create_list_pass_window(self, master):
list_pass_window = Toplevel(master=master)
tag_choice = str(self.select_tag.get()).lower()
list_pass_window.title(f'List of passwords for {tag_choice}')
list_pass_window.config(padx=50, pady=50, bg=BACKGROUND_COLOUR)
intruct_label = Label(master=list_pass_window, text='Click on item to copy', bg=BACKGROUND_COLOUR,
fg=FOREGROUND_COLOUR, pady=10)
intruct_label.grid(row=0, column=0)
count, user_list, pass_list = self.data_manager_obj.get_all_passwords(tag_choice)
self.user_listbox = Listbox(master=list_pass_window, height=count)
for i in range(count):
self.user_listbox.insert(i, user_list[i])
self.user_listbox.grid(row=1, column=0)
self.pass_listbox = Listbox(master=list_pass_window, height=count)
for i in range(count):
self.pass_listbox.insert(i, pass_list[i])
self.pass_listbox.grid(row=1, column=1)
self.user_listbox.bind("<<ListboxSelect>>", self.user_listbox_used)
self.pass_listbox.bind("<<ListboxSelect>>", self.pass_listbox_used)
def user_listbox_used(self, event):
if self.user_listbox.curselection():
pyperclip.copy(self.user_listbox.get(self.user_listbox.curselection()))
messagebox.showinfo(title='Copied',
message='Copied this item!')
def pass_listbox_used(self, event):
if self.pass_listbox.curselection():
pyperclip.copy(self.pass_listbox.get(self.pass_listbox.curselection()))
messagebox.showinfo(title='Copied',
message='Copied this item!')
def generate_password_clicked(self):
self.create_gen_pass_window(master=self.main_window)
def create_gen_pass_window(self, master):
generate_pass_window = Toplevel(master=master)
generate_pass_window.title('Generate a new password')
generate_pass_window.config(padx=50, pady=50, bg=BACKGROUND_COLOUR)
self.symbols_checked = IntVar()
self.letters_checked = IntVar()
self.numbers_checked = IntVar()
symbols_check = Checkbutton(master=generate_pass_window, text='Symbols', variable=self.symbols_checked, pady=10)
symbols_check.config(bg=BACKGROUND_COLOUR, highlightthickness=0)
symbols_check.grid(row=0, column=0)
letters_check = Checkbutton(master=generate_pass_window, text='Letters', variable=self.letters_checked, pady=10)
letters_check.config(bg=BACKGROUND_COLOUR, highlightthickness=0)
letters_check.grid(row=1, column=0)
numbers_check = Checkbutton(master=generate_pass_window, text='Numbers', variable=self.numbers_checked, pady=10)
numbers_check.config(bg=BACKGROUND_COLOUR, highlightthickness=0)
numbers_check.grid(row=2, column=0)
self.spinbox_pass_length = Spinbox(master=generate_pass_window, from_=8, to=128, width=5)
self.spinbox_pass_length.grid(row=3, column=0)
go_btn = Button(master=generate_pass_window, text='Go', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR,
command=self.generate_password, pady=10)
go_btn.grid(row=4, column=1)
def generate_password(self):
symbols = self.symbols_checked.get()
letters = self.letters_checked.get()
numbers = self.numbers_checked.get()
pass_length = self.spinbox_pass_length.get()
password = password_generator.generate_password(has_symbols=bool(symbols),
has_letters=bool(letters),
has_numbers=bool(numbers),
pass_length=int(pass_length))
messagebox.showinfo(title='Password Generated!',
message=f'Password is copied to clipboard! \nYour password is: {password}')
def add_new_password_clicked(self):
self.create_add_new_password_window(master=self.main_window)
def create_add_new_password_window(self, master):
add_new_pass_window = Toplevel(master=master)
add_new_pass_window.title('Add a new password entry')
add_new_pass_window.config(padx=50, pady=50, bg=BACKGROUND_COLOUR)
tag_label = Label(master=add_new_pass_window, text='TAG: ', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR, pady=20)
tag_label.grid(row=0, column=0)
self.add_new_tag = StringVar()
tag_entry = Entry(master=add_new_pass_window, width=30, textvariable=self.add_new_tag)
tag_entry.grid(row=0, column=1)
user_label = Label(master=add_new_pass_window, text='USERNAME: ', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR,
pady=20)
user_label.grid(row=1, column=0)
self.add_new_username = StringVar()
user_entry = Entry(master=add_new_pass_window, width=30, textvariable=self.add_new_username)
user_entry.grid(row=1, column=1)
pass_label = Label(master=add_new_pass_window, text='PASSWORD: ', bg=BACKGROUND_COLOUR, fg=FOREGROUND_COLOUR,
pady=20)
pass_label.grid(row=2, column=0)
self.add_new_pass = StringVar()
pass_entry = Entry(master=add_new_pass_window, width=30, textvariable=self.add_new_pass)
pass_entry.grid(row=2, column=1)
add_pass_btn = Button(master=add_new_pass_window, text='Add this password', bg=BACKGROUND_COLOUR,
fg=FOREGROUND_COLOUR, pady=10, command=self.password_add_clicked)
add_pass_btn.grid(row=3, column=1)
def password_add_clicked(self):
tag_value = str(self.add_new_tag.get())
user_value = str(self.add_new_username.get())
pass_value = str(self.add_new_pass.get())
if tag_value and user_value and pass_value:
tag_value = tag_value.lower()
is_okay = messagebox.askokcancel(title='Confirm save?',
message=f'Are you sure you want to proceed with this info?\n' +
f'Tag: {tag_value}\n' +
f'Username: {user_value}\n' +
f'Password: {pass_value}')
if is_okay:
self.data_manager_obj.add_new_password(tag=tag_value, user=user_value, password=pass_value)
messagebox.showinfo(title='Success!',
message='The save operation was successful!')
# Refresh tags list in the main app screen
self.tags_option_menu['menu'].delete(0, "end")
for string in self.data_manager_obj.get_saved_password_tags():
self.tags_option_menu['menu'].add_command(label=string,
command=lambda value=string: self.select_tag.set(value))
self.add_new_tag.set('')
self.add_new_username.set('')
self.add_new_pass.set('')
else:
self.add_new_tag.set('')
self.add_new_username.set('')
self.add_new_pass.set('')
| 52.686957
| 120
| 0.63971
|
0df32adfefcb91aa84b30b396c1d2cc27ea86e72
| 85
|
cs
|
C#
|
TGK-RPG/Assets/Scripts/Item/Sub-Class/Equipment/Attribute/EquipmentAttribute.cs
|
hitnoodle/TGK-RPG
|
9f8bd59935b17f3226ca02660b2c16108793a5d5
|
[
"MIT"
] | 2
|
2018-08-14T01:40:46.000Z
|
2020-04-08T17:44:19.000Z
|
TGK-RPG/TGK-RPG/Assets/Scripts/Item/Sub-Class/Equipment/Attribute/EquipmentAttribute.cs
|
hitnoodle/TGK-RPG
|
9f8bd59935b17f3226ca02660b2c16108793a5d5
|
[
"MIT"
] | null | null | null |
TGK-RPG/TGK-RPG/Assets/Scripts/Item/Sub-Class/Equipment/Attribute/EquipmentAttribute.cs
|
hitnoodle/TGK-RPG
|
9f8bd59935b17f3226ca02660b2c16108793a5d5
|
[
"MIT"
] | 2
|
2018-09-05T09:58:19.000Z
|
2020-04-08T17:44:47.000Z
|
using UnityEngine;
using System.Collections;
public class EquipmentAttribute {
}
| 10.625
| 33
| 0.788235
|
dc0907f2257525eb8ba87823423a0c0e04ff515a
| 1,050
|
lua
|
Lua
|
tests/libOk.lua
|
burn/baby
|
480910e72f6d1a15e208458c0706320cbb433c8c
|
[
"BSD-2-Clause"
] | null | null | null |
tests/libOk.lua
|
burn/baby
|
480910e72f6d1a15e208458c0706320cbb433c8c
|
[
"BSD-2-Clause"
] | 4
|
2018-07-10T03:48:57.000Z
|
2018-07-13T23:08:51.000Z
|
tests/libOk.lua
|
burn/src
|
480910e72f6d1a15e208458c0706320cbb433c8c
|
[
"BSD-2-Clause"
] | null | null | null |
local ok=require("test").ok
local lib=require "lib"
ok { interpolate = function ()
assert(lib.interpolate(0, {1,2,4}, {10,20,40}) == 10)
assert(lib.interpolate(8, {1,2,4}, {10,20,40}) == 40)
assert(lib.interpolate(3, {1,2,4}, {10,20,40}) == 30)
end}
ok { shuffle = function()
local t= {}
lib.rseed(1)
for i = 1,9 do t[i] = i end
for _ = 1,10 do print( lib.join( lib.shuffle(t), "") ) end
end}
ok {sub= function()
assert(lib.sub("timm") == "timm")
assert(lib.sub("timm",2) == "imm")
assert(lib.sub("timm",2,3) == "im")
assert(lib.sub("timm",-1) == "m")
assert(lib.sub("aa",3,10) == "")
end}
ok {minmax= function()
assert(lib.min(2,3) == 2)
assert(lib.max(2,3) == 3)
end}
ok { deepcopy = function()
local b4 = {a={b={c={d=1},k=20}}, m=50}
local now=lib.copy(b4)
b4.a.b.c.d =10
assert( b4.a.b.c.d ~= now.a.b.c.d )
end }
ok { cols = function()
local t={ {"name", "age", "shoesize"},
{"tim Menzies", 20, 40},
{"jane", 2.312211, 20 } }
lib.cols(t, "%20.2f")
end}
| 22.826087
| 62
| 0.535238
|
e00cf882a203d8b70d9ca347f254c3a3de4ebc6f
| 2,105
|
h
|
C
|
src/tree_ai_player.h
|
fizixx/tic-tac-ai
|
131a7bee30be0a80232bfd6fdaf9da0cb0a6138b
|
[
"0BSD"
] | null | null | null |
src/tree_ai_player.h
|
fizixx/tic-tac-ai
|
131a7bee30be0a80232bfd6fdaf9da0cb0a6138b
|
[
"0BSD"
] | null | null | null |
src/tree_ai_player.h
|
fizixx/tic-tac-ai
|
131a7bee30be0a80232bfd6fdaf9da0cb0a6138b
|
[
"0BSD"
] | null | null | null |
// Copyright (c) 2015, Tiaan Louw
//
// Permission to use, copy, modify, and/or distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
// PERFORMANCE OF THIS SOFTWARE.
#ifndef TREE_AI_PLAYER_H_
#define TREE_AI_PLAYER_H_
#include "player.h"
class TreeAIPlayer : public Player {
public:
TreeAIPlayer();
~TreeAIPlayer() override;
// Set whether the player is training of playing.
void setIsTraining(bool isTraining);
size_t getMove(const Board& board, char you) override;
void reportWinner(const Board& board, WinType winType) override;
private:
struct BoardNode;
// Given a |parentNode|, find a child node who's board is the same as the
// given |board|.
BoardNode* findBoardInNode(BoardNode* parentNode, const Board& board) const;
// Play a move we haven't encountered before. |board| is the current board we
// are playing against.
void playUnknownMove(const Board& board);
size_t getBestMoveForNode(BoardNode* startingNode);
// Log the current move in our database of moves. Returns the newly inserted
// node.
BoardNode* logMove(BoardNode* node, const Board& board, size_t move,
char you);
// Log a move from the other player.
BoardNode* logOtherMove(BoardNode* node, const Board board);
// The root of all the board nodes.
BoardNode* m_rootNode;
// The current node we are looking at.
BoardNode* m_currentNode{nullptr};
// Whether the player is playing or training.
bool m_isTraining{false};
};
#endif // PLAYER_H_
| 33.412698
| 80
| 0.739192
|
a10a1b31a548becc24183386fb5e1708703f6be5
| 149
|
tsx
|
TypeScript
|
src/mailto/nls/Mailto.tsx
|
redaktor/widgets-preview
|
f15ba9ad3ac7761758849925d6dddf1e65c6218a
|
[
"MIT"
] | 3
|
2020-09-02T06:00:40.000Z
|
2021-12-18T15:54:09.000Z
|
src/mailto/nls/Mailto.tsx
|
redaktor/widgets-preview
|
f15ba9ad3ac7761758849925d6dddf1e65c6218a
|
[
"MIT"
] | 1
|
2020-08-28T15:50:09.000Z
|
2020-08-28T15:50:09.000Z
|
src/mailto/nls/Mailto.tsx
|
redaktor/widgets-preview
|
f15ba9ad3ac7761758849925d6dddf1e65c6218a
|
[
"MIT"
] | null | null | null |
const locales = {
de: () => import('./de/Mailto')
};
const messages = {
title: `Send an email to {name}`
};
export default { locales, messages };
| 16.555556
| 37
| 0.610738
|
f4574c4a46699d461e0c4b26ec847ecf328e677a
| 253
|
kt
|
Kotlin
|
player/src/main/java/com/tomasznajda/pulpfiction/event/player/PlayerEvent.kt
|
tomasznajda/pulpfiction
|
43b4db0659929cff71018be9109eb2f3ff10dbfa
|
[
"Apache-2.0"
] | 4
|
2018-07-26T07:41:56.000Z
|
2019-06-03T21:36:37.000Z
|
player/src/main/java/com/tomasznajda/pulpfiction/event/player/PlayerEvent.kt
|
tomasznajda/pulpfiction
|
43b4db0659929cff71018be9109eb2f3ff10dbfa
|
[
"Apache-2.0"
] | null | null | null |
player/src/main/java/com/tomasznajda/pulpfiction/event/player/PlayerEvent.kt
|
tomasznajda/pulpfiction
|
43b4db0659929cff71018be9109eb2f3ff10dbfa
|
[
"Apache-2.0"
] | 1
|
2019-01-20T20:46:45.000Z
|
2019-01-20T20:46:45.000Z
|
package com.tomasznajda.pulpfiction.event.player
import com.tomasznajda.pulpfiction.entity.PlayerState
sealed class PlayerEvent
data class PlayerInfo(val state: PlayerState) : PlayerEvent()
data class PlayerError(val error: Throwable) : PlayerEvent()
| 31.625
| 61
| 0.83004
|
beeaeb90dfde876120e3ea63e8de99d806a8ed9d
| 629
|
ts
|
TypeScript
|
src/app/gif.service.ts
|
Joeehab12/giphy-client
|
197f3ec211ab1ba160406fc75c446ddb3fe98dd6
|
[
"MIT"
] | null | null | null |
src/app/gif.service.ts
|
Joeehab12/giphy-client
|
197f3ec211ab1ba160406fc75c446ddb3fe98dd6
|
[
"MIT"
] | null | null | null |
src/app/gif.service.ts
|
Joeehab12/giphy-client
|
197f3ec211ab1ba160406fc75c446ddb3fe98dd6
|
[
"MIT"
] | null | null | null |
import { Injectable } from '@angular/core';
import { HttpClient } from '@angular/common/http';
@Injectable({
providedIn: 'root'
})
export class GIFService {
constructor(private http:HttpClient) { }
getGIFData(){
return this.http.get('https://api.giphy.com/v1/gifs/random?api_key=b9NHChW9WIKiKRQQR8zniLH35Csi4QHD');
}
getStickerData(){
return this.http.get('https://api.giphy.com/v1/stickers/random?api_key=b9NHChW9WIKiKRQQR8zniLH35Csi4QHD');
}
getSearchGIF(query:string){
return this.http.get('https://api.giphy.com/v1/gifs/translate?api_key=b9NHChW9WIKiKRQQR8zniLH35Csi4QHD' + '&s=' + query);
}
}
| 29.952381
| 125
| 0.72655
|
dbda67a88bf72cb3e8da4bcbd2c4be46f7a687b7
| 1,723
|
php
|
PHP
|
tests/ClientTest.php
|
imfurman/php-fcm
|
4924cadd467d46ea281c6f2b4335b59023af8b7e
|
[
"MIT"
] | 209
|
2016-05-26T10:32:38.000Z
|
2022-03-28T02:54:28.000Z
|
tests/ClientTest.php
|
imfurman/php-fcm
|
4924cadd467d46ea281c6f2b4335b59023af8b7e
|
[
"MIT"
] | 37
|
2016-06-13T17:11:32.000Z
|
2022-01-13T05:00:37.000Z
|
tests/ClientTest.php
|
imfurman/php-fcm
|
4924cadd467d46ea281c6f2b4335b59023af8b7e
|
[
"MIT"
] | 85
|
2016-05-31T15:01:59.000Z
|
2022-02-26T14:11:49.000Z
|
<?php
namespace paragraph1\phpFCM\Tests;
use paragraph1\phpFCM\Client;
use paragraph1\phpFCM\Recipient\Topic;
use paragraph1\phpFCM\Message;
use GuzzleHttp;
use GuzzleHttp\Psr7\Response;
class ClientTest extends PhpFcmTestCase
{
private $fixture;
protected function setUp()
{
parent::setUp();
$this->fixture = new Client();
}
public function testSendConstruesValidJsonForNotificationWithTopic()
{
$apiKey = 'key';
$headers = array(
'Authorization' => sprintf('key=%s', $apiKey),
'Content-Type' => 'application/json'
);
$guzzle = \Mockery::mock(\GuzzleHttp\Client::class);
$guzzle->shouldReceive('post')
->once()
->with(Client::DEFAULT_API_URL, array('headers' => $headers, 'body' => '{"to":"\\/topics\\/test","priority":"high"}'))
->andReturn(\Mockery::mock(Response::class));
$this->fixture->injectHttpClient($guzzle);
$this->fixture->setApiKey($apiKey);
$message = new Message();
$message->addRecipient(new Topic('test'));
$this->fixture->send($message);
}
public function testProxyUriOverridesDefaultUrl()
{
$proxy = 'my_nice_proxy_around_that_server';
$this->fixture->setProxyApiUrl($proxy);
$guzzle = \Mockery::mock(\GuzzleHttp\Client::class);
$guzzle->shouldReceive('post')
->once()
->with($proxy, \Mockery::any())
->andReturn(\Mockery::mock(Response::class));
$this->fixture->injectHttpClient($guzzle);
$message = new Message();
$message->addRecipient(new Topic('test'));
$this->fixture->send($message);
}
}
| 28.245902
| 130
| 0.597795
|
5ef5eb2a0e8f68bd249f8dcd272d76b5f57316ee
| 9,119
|
php
|
PHP
|
application/views/content/asisten.php
|
CynthiaBudiono/praktikum_ukp
|
08347473ace0efbf460c92db37c48121204f5dd4
|
[
"MIT"
] | null | null | null |
application/views/content/asisten.php
|
CynthiaBudiono/praktikum_ukp
|
08347473ace0efbf460c92db37c48121204f5dd4
|
[
"MIT"
] | null | null | null |
application/views/content/asisten.php
|
CynthiaBudiono/praktikum_ukp
|
08347473ace0efbf460c92db37c48121204f5dd4
|
[
"MIT"
] | null | null | null |
<!-- page content -->
<div class="right_col" role="main">
<div class="">
<div class="page-title">
<div class="title_left">
<h3><?= isset($title) ? $title : "-" ?> <small>Informatika</small></h3>
</div>
</div>
<div class="clearfix"></div>
<div class="col-md-12 col-sm-12">
<div class="x_panel">
<div class="x_title">
<h2 id="action_title">Add</h2>
<ul class="nav navbar-right panel_toolbox">
<li><a class="collapse-link"><i class="fa fa-chevron-down" id='collapse-add'></i></a>
</li>
<li><a class="close-link"><i class="fa fa-close"></i></a>
</li>
</ul>
<div class="clearfix"></div>
</div>
<div class="x_content" id="content-add" style="display: none;">
<br />
<form class="form-horizontal form-label-left">
<input type="hidden" class="form-control" name="mode" id="mode" value="add">
<input type="hidden" class="form-control" name="id" id="id" required value="<?= (isset($detil[0]['id'])) ? $detil[0]['id'] : '' ?>">
<div class="form-group row">
<label class="control-label col-md-3 col-sm-3 ">Tipe</label>
<div class="col-md-9 col-sm-9 ">
<select class="form-control" id="selecttipeasisten" onchange="getdetail()">
<option value="">--Choose option--</option>
<option value="asisten_dosen">Asisten Dosen</option>
<option value="asisten_tetap">Asisten Tetap</option>
</select>
</div>
</div>
<div class="form-group row">
<label class="control-label col-md-3 col-sm-3 ">NRP</label>
<div class="col-md-9 col-sm-9 ">
<input type="text" class="form-control" name="nrp" id="nrp" placeholder="ex. search nrp" required>
</div>
</div>
<div class="form-group row">
<label class="control-label col-md-3 col-sm-3 "></label>
<div class="col-md-9 col-sm-9">
<div class="container border">
nama
id_pendaftaran asisten
data lengkap
daftar tanggal brp
</div>
</div>
</div>
<div class="form-group row" id="divstatus">
<label class="control-label col-md-3 col-sm-3 ">Status</label>
<div class="col-md-9 col-sm-9 ">
<div class="">
<label>
<input type="checkbox" name="status" id="status" class="toggle-switch" checked/>
</label>
</div>
</div>
</div>
<div class="ln_solid"></div>
<div class="form-group">
<div class="col-md-9 col-sm-9">
<!-- <button type="button" class="btn btn-danger">Cancel</button> -->
<button type="reset" class="btn btn-warning">Reset</button>
<button type="button" class="btn btn-success" id="btnsubmit" onclick="addupdate()"><a href="#data_table" style="color: white;">Submit</a></button>
</div>
</div>
</form>
</div> <!-- /x_content -->
</div> <!-- /x_panel -->
</div> <!-- /col-md -->
<!-- VIEW -->
<div class="col-md-12 col-sm-12 ">
<div class="x_panel">
<div class="x_title">
<h2><?= isset($title) ? $title : "-" ?></h2>
<ul class="nav navbar-right panel_toolbox">
<li><a class="collapse-link"><i class="fa fa-chevron-up"></i></a></li>
<li><a class="close-link"><i class="fa fa-close"></i></a></li>
</ul>
<div class="clearfix"></div>
</div>
<div>
<!-- <a class="btn btn-sm bg-green" href="<?php echo base_url("asisten/adds"); ?>">Tambah</a> -->
</div>
<div class="x_content">
<div class="row">
<div class="col-sm-12">
<div class="card-box table-responsive">
<table id="datatable-buttons" class="table table-striped table-bordered" style="width:100%">
<thead>
<tr>
<th>Actions</th>
<th>id</th>
<th>NRP</th>
<th>nama</th>
<th>status</th>
<th>Tanggal Diterima</th>
<th>keterangan</th>
<th>periode pendaftaran</th>
</tr>
</thead>
<tbody>
<?php if(isset($asisten)) : ?>
<?php if(is_array($asisten)) : ?>
<?php foreach($asisten as $key) : ?>
<tr>
<td>
<!-- <a href="#" class="btn btn-primary btn-sm btn-action"><i class="fa fa-folder"></i> View </a> -->
<a href="<?php echo base_url("asisten/updates/"); echo base64_encode($key['id']);?>" class="btn btn-info btn-sm btn-action"><i class="fa fa-pencil"></i> Edit </a>
<a href="#" class="btn btn-danger btn-sm btn-action"><i class="fa fa-trash-o"></i> Delete </a>
</td>
<td><?= (isset($key['id'])) ? $key['id'] : '' ?></td>
<td><?= (isset($key['NRP'])) ? $key['NRP'] : '' ?></td>
<td><?= (isset($key['nama_mahasiswa'])) ? $key['nama_mahasiswa'] : '' ?></td>
<td>
<?php
if(isset($key['status'])) if($key['status']==1) echo '<span class="badge bg-green">active</span>'; else echo '<span class="badge bg-danger">non active</span>';?>
</td>
<td><?= (isset($key['tanggal_diterima'])) ? $key['tanggal_diterima'] : '' ?></td>
<td><?= (isset($key['keterangan'])) ? $key['keterangan'] : '' ?></td>
<td><?php if(isset($key['semester_pendaftaran_asdos'])) if($key['semester_pendaftaran_asdos'] == 1) echo 'Ganjil'; else echo 'Genap';?> <?= (isset($key['tahun_ajaran_pendaftaran_asdos'])) ? $key['tahun_ajaran_pendaftaran_asdos'] : '' ?></td>
</tr>
<?php endforeach; ?>
<?php endif; ?>
<?php endif; ?>
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<script>
var baseurl = "<?php echo base_url(); ?>";
// view();
$(document).ready(function() {
// alert("masukkkkkkkk ready");
view()
});
function getdetail(){
alert("masuk");
alert($("#selecttipeasisten").val());
$.post(baseurl + "asisten/getdetail", {
nrp: $('#nrp').val(),
},
function(result) {
// if(asisten_dosen) -> getvalue()
// if asisten tetap -> get value angkatan
});
}
function view(){
}
</script>
| 49.559783
| 285
| 0.354534
|
5456d5d3efdedc624ac6e7f2df77b5ad50d5421e
| 835
|
swift
|
Swift
|
MetabolicCompass/DataSources/ChartCollectionDelegate.swift
|
yanif/circator
|
d4293dffe8f8b3931478aa521ffb43b7164967b4
|
[
"Apache-2.0"
] | 3
|
2016-10-06T01:06:58.000Z
|
2017-09-08T14:08:01.000Z
|
MetabolicCompass/DataSources/ChartCollectionDelegate.swift
|
yanif/circator
|
d4293dffe8f8b3931478aa521ffb43b7164967b4
|
[
"Apache-2.0"
] | 53
|
2016-02-29T19:18:16.000Z
|
2016-07-02T07:20:51.000Z
|
MetabolicCompass/DataSources/ChartCollectionDelegate.swift
|
twoolf/circator
|
d4293dffe8f8b3931478aa521ffb43b7164967b4
|
[
"Apache-2.0"
] | 2
|
2016-04-18T13:15:51.000Z
|
2016-08-03T17:27:58.000Z
|
//
// ChartCollectionDelegate.swift
// ChartsMC
//
// Created by Artem Usachov on 6/1/16.
// Copyright © 2016 SROST. All rights reserved.
//
import Foundation
import UIKit
class ChartCollectionDelegate: NSObject, UICollectionViewDelegateFlowLayout, UICollectionViewDelegate {
func collectionView(collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAtIndexPath indexPath: NSIndexPath) -> CGSize {
return CGSizeMake(CGRectGetWidth(collectionView.frame) - 20.0, 196)
}
func collectionView(collectionView: UICollectionView, canFocusItemAtIndexPath indexPath: NSIndexPath) -> Bool {
return false
}
func collectionView(collectionView: UICollectionView, shouldSelectItemAtIndexPath indexPath: NSIndexPath) -> Bool {
return false
}
}
| 34.791667
| 169
| 0.759281
|
216fa994733480ac2a3932f4c1abee139e323e0a
| 544
|
js
|
JavaScript
|
tests/helpers/in_array.spec.js
|
GoGoCarl/chrome-wakatime
|
d315551e4c0c041c2f7b900ec7e9931e28ad0d72
|
[
"BSD-3-Clause"
] | 2
|
2021-09-26T07:31:03.000Z
|
2022-01-09T10:44:44.000Z
|
tests/helpers/in_array.spec.js
|
GoGoCarl/chrome-wakatime
|
d315551e4c0c041c2f7b900ec7e9931e28ad0d72
|
[
"BSD-3-Clause"
] | null | null | null |
tests/helpers/in_array.spec.js
|
GoGoCarl/chrome-wakatime
|
d315551e4c0c041c2f7b900ec7e9931e28ad0d72
|
[
"BSD-3-Clause"
] | 1
|
2017-10-17T16:06:32.000Z
|
2017-10-17T16:06:32.000Z
|
var chai = require('chai');
var expect = chai.expect;
//import in_array from '../../assets/js/helpers/in_array';
describe('in_array', function() {
it('should be a function', function() {
expect(in_array).to.be.a('function');
});
it('should find the needle and return true', function() {
expect(in_array('4', ['4', '3', '2', '1'])).to.equal(true);
});
it('should not find the needle and it should return false', function() {
expect(in_array('5', ['4', '3', '2', '1'])).to.equal(false);
});
});
| 30.222222
| 76
| 0.575368
|
8c2ecffa9446ba1b53d5e50f27f719e00892dce0
| 2,266
|
swift
|
Swift
|
Tests/XcodeProjTests/Workspace/XCWorkspaceDataElementTests.swift
|
nicktrienensfuzz/XcodeProj
|
1cb1b7db3cd16decf1b87b0ab68532df0fbe2d66
|
[
"MIT"
] | 619
|
2019-08-04T16:50:50.000Z
|
2022-03-31T16:42:52.000Z
|
Tests/XcodeProjTests/Workspace/XCWorkspaceDataElementTests.swift
|
nicktrienensfuzz/XcodeProj
|
1cb1b7db3cd16decf1b87b0ab68532df0fbe2d66
|
[
"MIT"
] | 187
|
2018-07-28T15:36:59.000Z
|
2019-08-01T06:19:07.000Z
|
Tests/XcodeProjTests/Workspace/XCWorkspaceDataElementTests.swift
|
nicktrienensfuzz/XcodeProj
|
1cb1b7db3cd16decf1b87b0ab68532df0fbe2d66
|
[
"MIT"
] | 135
|
2019-08-04T21:32:35.000Z
|
2022-03-31T13:18:53.000Z
|
import Foundation
import PathKit
import XcodeProj
import XCTest
final class XCWorkspaceDataElementTests: XCTestCase {
func test_location_when_group() {
let location: XCWorkspaceDataElementLocationType = .absolute("/path/to/group")
let group = XCWorkspaceDataGroup(location: location,
name: "group",
children: [])
let element = XCWorkspaceDataElement.group(group)
XCTAssertEqual(element.location, location)
}
func test_location_when_file() {
let location: XCWorkspaceDataElementLocationType = .absolute("/path/to/file.swift")
let file = XCWorkspaceDataFileRef(location: location)
let element = XCWorkspaceDataElement.file(file)
XCTAssertEqual(element.location, location)
}
func test_equatable_when_unequal_data_elements() {
// Given
let location: XCWorkspaceDataElementLocationType = .absolute("/path/to/file.swift")
let file = XCWorkspaceDataFileRef(location: location)
let element = XCWorkspaceDataElement.file(file)
// When
let firstWorkspace = XCWorkspace(data: .init(children: [element]))
let secondWorkspace = XCWorkspace(data: .init(children: []))
// Then
XCTAssertNotEqual(firstWorkspace, secondWorkspace)
}
func test_equatable_when_equal_data_elements() {
// Given
let groupLocation: XCWorkspaceDataElementLocationType = .absolute("/path/to/group")
let group = XCWorkspaceDataGroup(location: groupLocation,
name: "group",
children: [])
let elementOne = XCWorkspaceDataElement.group(group)
let fileLocation: XCWorkspaceDataElementLocationType = .absolute("/path/to/file.swift")
let file = XCWorkspaceDataFileRef(location: fileLocation)
let elementTwo = XCWorkspaceDataElement.file(file)
// When
let firstWorkspace = XCWorkspace(data: .init(children: [elementOne, elementTwo]))
let secondWorkspace = XCWorkspace(data: .init(children: [elementOne, elementTwo]))
// Then
XCTAssertEqual(firstWorkspace, secondWorkspace)
}
}
| 38.40678
| 95
| 0.651809
|
c67f16fcf8196185a33ef08c6f5f365f5fc2c4ae
| 2,281
|
css
|
CSS
|
src/main/webapp/WEB-INF/static/css/site-style.css
|
ipelovski/emsbd
|
e1b1efdb4d00bf0b8de4326ccd435f441ef2eb2a
|
[
"MIT"
] | null | null | null |
src/main/webapp/WEB-INF/static/css/site-style.css
|
ipelovski/emsbd
|
e1b1efdb4d00bf0b8de4326ccd435f441ef2eb2a
|
[
"MIT"
] | null | null | null |
src/main/webapp/WEB-INF/static/css/site-style.css
|
ipelovski/emsbd
|
e1b1efdb4d00bf0b8de4326ccd435f441ef2eb2a
|
[
"MIT"
] | null | null | null |
body {
background-color: lightgoldenrodyellow;
color: darkgreen;
font-family: sans-serif;
margin: 0;
}
.header {
position: fixed;
width: 100%;
margin: 0;
padding: 0;
top: 0;
left: 0;
right: 0;
background-color: lightgoldenrodyellow;
}
.content {
background-color: beige;
border-radius: 10px;
border-color: yellowgreen;
border-style: solid;
padding: 0.5rem;
margin: 0.5rem;
margin-top: 106px;
}
.icon {
height: 16px;
width: 16px;
display: inline-block;
vertical-align: text-top;
}
.icon > svg {
height: 16px;
width: 16px;
display: inline-block;
}
.top-menu {
background-color: yellowgreen;
color: darkolivegreen;/*lightgoldenrodyellow*/
padding: 1rem;
}
.top-menu a {
color: darkolivegreen;
text-decoration: none;
border-bottom: darkolivegreen dashed 1px;
}
.caps {
text-transform: uppercase;
}
.menu a {
color: darkolivegreen;
text-decoration: none;
}
.menu-item {
background-color: beige;
border-radius: 10px;
border-color: yellowgreen;
border-style: solid;
padding: 0.5rem;
margin: 0.5rem;
color: darkolivegreen;
text-align: center;
}
.menu-item .icon > svg {
fill: darkolivegreen;
}
.content a {
color: darkgreen;
text-decoration: none;
border-bottom: darkgreen dashed 1px;
}
.list, .listwh /*list with a header*/ {
list-style: none;
padding: 0;
text-align: center;
}
.list > li, .listwh > li {
margin-top: 0.5em;
}
.list > li:first-child, .listwh > li.list-header {
font-weight: bold;
border-bottom: yellowgreen solid 2px;
padding-bottom: 0.5em;
}
.list:not(:last-of-type), .listwh:not(:last-of-type) {
border-bottom: yellowgreen solid 2px;
padding-bottom: 0.5em;
margin-bottom: 2em;
}
.sequence {
list-style: none;
padding: 0;
margin: 0 0 5px 0;
}
.sequence > li {
display: inline;
}
.sequence > li:not(:last-of-type):after {
content: ',';
}
.breadcrumbs {
list-style: none;
padding: 0;
margin: 0 0 5px 0;
}
.breadcrumbs > li {
display: inline;
}
.breadcrumbs > li:not(:last-of-type):after {
content: '>';
}
select.being-late {
background-color: yellow;
}
select.absent {
background-color: orange;
}
| 19.330508
| 54
| 0.625164
|
a37f466f2f3bdd7784714c8f3490baae8ffab8aa
| 480
|
java
|
Java
|
src_6/org/benf/cfr/tests/LoopTest26.java
|
Marcono1234/cfr_tests
|
f043e01f14efb987a2f6b6a3c656c9d5f67635bd
|
[
"MIT"
] | 8
|
2019-06-05T11:02:56.000Z
|
2021-04-25T01:41:18.000Z
|
src_6/org/benf/cfr/tests/LoopTest26.java
|
Marcono1234/cfr_tests
|
f043e01f14efb987a2f6b6a3c656c9d5f67635bd
|
[
"MIT"
] | 9
|
2019-08-22T12:32:13.000Z
|
2022-02-24T07:09:24.000Z
|
src_6/org/benf/cfr/tests/LoopTest26.java
|
Marcono1234/cfr_tests
|
f043e01f14efb987a2f6b6a3c656c9d5f67635bd
|
[
"MIT"
] | 5
|
2020-02-01T01:51:23.000Z
|
2022-02-22T15:01:33.000Z
|
package org.benf.cfr.tests;
import java.util.Iterator;
import java.util.List;
public class LoopTest26 {
public boolean cannotMoveIteratorVariableDeclaration(List<Integer> l2) {
Iterator<Integer> i$ = l2.iterator();
while (i$.hasNext()) {
final Integer i2 = i$.next();
System.out.println(i2);
System.out.println(i2 == 4 ? i$ : "b");
}
/*
* blah
*
*/
return true;
}
}
| 21.818182
| 76
| 0.535417
|
3b8b143ca592db17f7f9e151ef0eb2b9fff9709d
| 19,325
|
sql
|
SQL
|
src/test/resources/data.sql
|
LimNoah/backend
|
ffa15625ee33112c161737ff104e3ef0afd57f71
|
[
"MIT"
] | null | null | null |
src/test/resources/data.sql
|
LimNoah/backend
|
ffa15625ee33112c161737ff104e3ef0afd57f71
|
[
"MIT"
] | null | null | null |
src/test/resources/data.sql
|
LimNoah/backend
|
ffa15625ee33112c161737ff104e3ef0afd57f71
|
[
"MIT"
] | null | null | null |
-- board
insert into board (board_id, cnt, content, regdate, title, writer) values(
null,
7093,
'주문 취소는 제품 배송 전일 오후 6시까지 고객 행복 센터(1644-1107) / 1:1 문의 게시판 / 카카오톡(마켓컬리)로 접수 부탁드립니다.\n
- 오후 6시 이후에는 주문 취소가 불가합니다. (예약딜 포함)\n
- 고객 행복 센터(1644-1107) 운영 종료 시간인 오후 4시 이후부터는 1:1문의 게시판 접수만 가능합니다.\n
- 예약 상품은 배송 3~4일 전에만 주문 취소가 가능합니다.\n
- 정확한 처리를 위해 주문번호는 필수로 입력해주세요.\n\n
* 비회원 주문건의 경우 1:1 접수가 불가하기에, 취소 및 주분변경 관련하여\n
배송 전일 오후 4시까지 고객 행복 센터(1644-1107)/카카오톡(마켓컬리)로 접수해주시면 확인하여 조치해드립니다.',
'2015-05-21',
'취소/반품/환불요청은 어떻게 하나요?',
'MarketKurly'
);
insert into board (board_id, cnt, content, regdate, title, writer) values(
null,
6064,
'1/12일 전국 배송이 시작하게 됨에 따라 사용하고 있던 시스템을 이전(카페24->고도몰)하게 되었습니다.\n\n
기존 시스템을 그대로 이용하면서 일정 부분을 변경하는 것이라면, 비번의 변경이 필요 없겠지만, 기존에 사용하던 시스템에서 완전히 체계가 다른 시스템으로 사이트가 이전을 하다보니, 기존 사이트에서의 정보를 이전하는 작업이 필요하였습니다.\n\n
정보보호를 위해 고객님의 비번은 저장되지 않으며, 관리자라 할지라고 정보를 조회하거나 트래킹할 수 없기에 신규사이트로 정보를 이전을 하면서 부득이하게 임시번호를 부여해 드렸음을 알려드립니다.\n\n
(관련하여 1/11~1/12일 기간에 회원가입 시 등록해 주신 휴대폰으로 관련 정보에 대해 SMS 발송 드렸습니다.)\n\n
SMS를 수신하기 못하셨거나 추가적으로 궁금한 점이 있으실 경우,\n
고객센터로 연락주시거나, 통화연결이 어려우신 경우 카카오톡 아이디(ID:kurly)로 문의해주세요\n
성심껏 답변드리도록 하겠습니다.\n
감사합니다.\n\n
마켓컬리 일동.',
'2016-01-12',
'시스템 이전(1/12)에 따른 비밀번호 일괄 변경에 관한 공지',
'MarketKurly'
);
insert into board (board_id, cnt, content, regdate, title, writer) values(
null,
2828,
'4/5일 도입한 에코박스의 종료 시점관련하여 안내드립니다.\n\n\n
- 종료 시점\n\n
~5/15 23:00까지(5/16 수령 샛별배송 주문건까지 에코박스 포장)\n\n
* 상품 품질에 영향을 미칠 수 있을 정도로 급격한 온도변화가 있을 경우 종료 시점 이전에 포장법이 변경될 수 있습니다.\n\n\n
종료 이후 새로운 에코박스 도입을 통해 고객님들꼐서 만족할 수 있도록 노력하는 컬리가 되도록 하겠습니다.\n\n
맛있고 행복한 하루 되세요!\n\n',
'2017-04-06',
'에코박스 도입에 따른 한시적 포장 방법 변경 공지(종료 시점 확정)',
'MarketKurly'
);
insert into board (board_id, cnt, content, regdate, title, writer) values(
null,
7190,
'3월 23부터 주문 취소 가능시간이 오후 9시에서 오후 6시로 변경됩니다. 즉 전날 밤 11시부터 주문하신 모든 건은 오후 6시까지만 취소가 가능하며 오후 6시 이후에 주문해 주신 건의 경우 단순변심으로 인한 취소는 불가능합니다.\n
마켓컬리는 밤 11시까지만 주문하시면 아침 7시까지 배송해드리는 국내에서 가장 빠른 배송을 선보이고 있는데요, 빠르고 정확한 배송을 위해 오후 6시부터는 입고된 상품의 포장이 시작되며 송장이 출력됩니다.\n
송장 출력이 완료된 주문의 경우 취소 처리가 어려운 점 너그러운 양해 부탁 드립니다.\n\n
언제나 더 좋은 서비스를 만들기 위해 노력 하겠습니다.\n\n
감사합니다\n',
'2016-03-22',
'주문취소마감 시간 변경 공지',
'MarketKurly'
);
insert into board (board_id, cnt, content, regdate, title, writer) values(
null,
2223,
'현행 보냉수준 유짓 일부 상품 냉해 우려가 되고, 필요 이상의 포장재&보냉재를 사용하게 됨에 따라 동절기 동안 아래와 같이 변경하고자 하오니 이용에 참고 부탁드립니다.\n\n" +
# 시행일자 11/28(화)수령건\n
# 동절기 포장 적용 시 변경사함\n
- 아이스젤 & 드라이아이스 수량 조정\n
- 1차 포장 일부 상품 해제\n
: 보냉갱화를 위해 은박파우치로 보냉포장 했던 육류 등 일부 상품은 1차 포장 해제 예정.\n
cf) 해산물 등 극신건 상품에 대해서는 1차 포장 유지\n\n\n\n
좋은 품질과 서비스로 찾아뵙기 위해 항상 노력하겠습니다.\n
추운 날씨 건강 유념하시고, 늘 행복하세요.\n\n
감사합니다.\n
마켓컬리 드림.',
'2017-11-24',
'[공지]동절기 포장 적용 안내(11/28 수령건~)',
'MarketKurly'
);
-- goods
insert into product (product_id, category, original_price, detail_img_url, detail_context, product_img_url, name, brand_title, contactant, delivery_time_type_text, detail_image_url,
discount_end_datetime, discount_percent, discounted_price, expiration_date, is_sales, list_image_url, main_image_url, origin, original_image_url, packing_type_text, short_description,
sticker_image_url, unit_text, weight, guides)
values (
3,
0,
3300,
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
'',
'친환경 당근 500g',
'',
'',
'0,1',
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
5,
3135,
'',
true,
'https://img-cf.kurly.com/shop/data/goods/1609141191486s0.jpg',
'https://img-cf.kurly.com/shop/data/goods/1609141191365i0.jpg',
'국내',
'https://img-cf.kurly.com/shop/data/goods/1609141186826l0.jpg',
'냉장/종이포장',
'껍질째 먹을 수 있는 친환경 흙당근 (500g 내외)',
'https://img-cf.kurly.com/shop/data/my_icon/icon_farming_coupon_20_percent.png',
'1봉지',
'500g(2~4개입)',
'["식품 특성상 중량은 5% 내외의 차이가 발생할 수 있습니다.", "해당 상품은 여러 협력업체에서 납품하고 있는 상품으로 수령 시, 상이한 협력업체의 상품을 받아보실 수 있습니다. 품질은 동일 기준으로 입고되고 있사오니 참고 부탁드립니다."]'
);
insert into product (product_id, category, original_price, detail_img_url, detail_context, product_img_url, name, brand_title, contactant, delivery_time_type_text, detail_image_url,
discount_end_datetime, discount_percent, discounted_price, expiration_date, is_sales, list_image_url, main_image_url, origin, original_image_url, packing_type_text, short_description,
sticker_image_url, unit_text, weight, guides)
values (
6,
0,
3300,
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
'',
'친환경 당근 500g',
'',
'',
'0,1',
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
5,
3135,
'',
true,
'https://img-cf.kurly.com/shop/data/goods/1609141191486s0.jpg',
'https://img-cf.kurly.com/shop/data/goods/1609141191365i0.jpg',
'국내',
'https://img-cf.kurly.com/shop/data/goods/1609141186826l0.jpg',
'냉장/종이포장',
'껍질째 먹을 수 있는 친환경 흙당근 (500g 내외)',
'https://img-cf.kurly.com/shop/data/my_icon/icon_farming_coupon_20_percent.png',
'1봉지',
'500g(2~4개입)',
'["식품 특성상 중량은 5% 내외의 차이가 발생할 수 있습니다.", "해당 상품은 여러 협력업체에서 납품하고 있는 상품으로 수령 시, 상이한 협력업체의 상품을 받아보실 수 있습니다. 품질은 동일 기준으로 입고되고 있사오니 참고 부탁드립니다."]'
);
insert into product (product_id, category, original_price, detail_img_url, detail_context, product_img_url, name, brand_title, contactant, delivery_time_type_text, detail_image_url,
discount_end_datetime, discount_percent, discounted_price, expiration_date, is_sales, list_image_url, main_image_url, origin, original_image_url, packing_type_text, short_description,
sticker_image_url, unit_text, weight, guides)
values (
9,
0,
3300,
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
'',
'친환경 당근 500g',
'',
'',
'0,1',
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
5,
3135,
'',
true,
'https://img-cf.kurly.com/shop/data/goods/1609141191486s0.jpg',
'https://img-cf.kurly.com/shop/data/goods/1609141191365i0.jpg',
'국내',
'https://img-cf.kurly.com/shop/data/goods/1609141186826l0.jpg',
'냉장/종이포장',
'껍질째 먹을 수 있는 친환경 흙당근 (500g 내외)',
'https://img-cf.kurly.com/shop/data/my_icon/icon_farming_coupon_20_percent.png',
'1봉지',
'500g(2~4개입)',
'["식품 특성상 중량은 5% 내외의 차이가 발생할 수 있습니다.", "해당 상품은 여러 협력업체에서 납품하고 있는 상품으로 수령 시, 상이한 협력업체의 상품을 받아보실 수 있습니다. 품질은 동일 기준으로 입고되고 있사오니 참고 부탁드립니다."]'
);
insert into product (product_id, category, original_price, detail_img_url, detail_context, product_img_url, name, brand_title, contactant, delivery_time_type_text, detail_image_url,
discount_end_datetime, discount_percent, discounted_price, expiration_date, is_sales, list_image_url, main_image_url, origin, original_image_url, packing_type_text, short_description,
sticker_image_url, unit_text, weight, guides)
values (
12,
0,
3300,
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
'',
'친환경 당근 500g',
'',
'',
'0,1',
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
5,
3135,
'',
true,
'https://img-cf.kurly.com/shop/data/goods/1609141191486s0.jpg',
'https://img-cf.kurly.com/shop/data/goods/1609141191365i0.jpg',
'국내',
'https://img-cf.kurly.com/shop/data/goods/1609141186826l0.jpg',
'냉장/종이포장',
'껍질째 먹을 수 있는 친환경 흙당근 (500g 내외)',
'https://img-cf.kurly.com/shop/data/my_icon/icon_farming_coupon_20_percent.png',
'1봉지',
'500g(2~4개입)',
'["식품 특성상 중량은 5% 내외의 차이가 발생할 수 있습니다.", "해당 상품은 여러 협력업체에서 납품하고 있는 상품으로 수령 시, 상이한 협력업체의 상품을 받아보실 수 있습니다. 품질은 동일 기준으로 입고되고 있사오니 참고 부탁드립니다."]'
);
insert into product (product_id, category, original_price, detail_img_url, detail_context, product_img_url, name, brand_title, contactant, delivery_time_type_text, detail_image_url,
discount_end_datetime, discount_percent, discounted_price, expiration_date, is_sales, list_image_url, main_image_url, origin, original_image_url, packing_type_text, short_description,
sticker_image_url, unit_text, weight, guides)
values (
15,
0,
3300,
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
'',
'친환경 당근 500g',
'',
'',
'0,1',
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
5,
3135,
'',
true,
'https://img-cf.kurly.com/shop/data/goods/1609141191486s0.jpg',
'https://img-cf.kurly.com/shop/data/goods/1609141191365i0.jpg',
'국내',
'https://img-cf.kurly.com/shop/data/goods/1609141186826l0.jpg',
'냉장/종이포장',
'껍질째 먹을 수 있는 친환경 흙당근 (500g 내외)',
'https://img-cf.kurly.com/shop/data/my_icon/icon_farming_coupon_20_percent.png',
'1봉지',
'500g(2~4개입)',
'["식품 특성상 중량은 5% 내외의 차이가 발생할 수 있습니다.", "해당 상품은 여러 협력업체에서 납품하고 있는 상품으로 수령 시, 상이한 협력업체의 상품을 받아보실 수 있습니다. 품질은 동일 기준으로 입고되고 있사오니 참고 부탁드립니다."]'
);
insert into product (product_id, category, original_price, detail_img_url, detail_context, product_img_url, name, brand_title, contactant, delivery_time_type_text, detail_image_url,
discount_end_datetime, discount_percent, discounted_price, expiration_date, is_sales, list_image_url, main_image_url, origin, original_image_url, packing_type_text, short_description,
sticker_image_url, unit_text, weight, guides)
values (
18,
0,
3300,
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
'',
'친환경 당근 500g',
'',
'',
'0,1',
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
5,
3135,
'',
true,
'https://img-cf.kurly.com/shop/data/goods/1609141191486s0.jpg',
'https://img-cf.kurly.com/shop/data/goods/1609141191365i0.jpg',
'국내',
'https://img-cf.kurly.com/shop/data/goods/1609141186826l0.jpg',
'냉장/종이포장',
'껍질째 먹을 수 있는 친환경 흙당근 (500g 내외)',
'https://img-cf.kurly.com/shop/data/my_icon/icon_farming_coupon_20_percent.png',
'1봉지',
'500g(2~4개입)',
'["식품 특성상 중량은 5% 내외의 차이가 발생할 수 있습니다.", "해당 상품은 여러 협력업체에서 납품하고 있는 상품으로 수령 시, 상이한 협력업체의 상품을 받아보실 수 있습니다. 품질은 동일 기준으로 입고되고 있사오니 참고 부탁드립니다."]'
);
insert into product (product_id, category, original_price, detail_img_url, detail_context, product_img_url, name, brand_title, contactant, delivery_time_type_text, detail_image_url,
discount_end_datetime, discount_percent, discounted_price, expiration_date, is_sales, list_image_url, main_image_url, origin, original_image_url, packing_type_text, short_description,
sticker_image_url, unit_text, weight, guides)
values (
21,
0,
3300,
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
'',
'친환경 당근 500g',
'',
'',
'0,1',
'https://img-cf.kurly.com/shop/data/goods/1609141191801m0.jpg',
'',
5,
3135,
'',
true,
'https://img-cf.kurly.com/shop/data/goods/1609141191486s0.jpg',
'https://img-cf.kurly.com/shop/data/goods/1609141191365i0.jpg',
'국내',
'https://img-cf.kurly.com/shop/data/goods/1609141186826l0.jpg',
'냉장/종이포장',
'껍질째 먹을 수 있는 친환경 흙당근 (500g 내외)',
'https://img-cf.kurly.com/shop/data/my_icon/icon_farming_coupon_20_percent.png',
'1봉지',
'500g(2~4개입)',
'["식품 특성상 중량은 5% 내외의 차이가 발생할 수 있습니다.", "해당 상품은 여러 협력업체에서 납품하고 있는 상품으로 수령 시, 상이한 협력업체의 상품을 받아보실 수 있습니다. 품질은 동일 기준으로 입고되고 있사오니 참고 부탁드립니다."]'
);
-- member
insert into member (member_id, uid, password, name, email, phone, gender, date_of_birth, grade, check_term, check_sns, total_cost, role, is_deleted)
values (
1,
'testid1',
'mdmdmd131313',
'임정우1',
'limjw01@gmail.com',
'01011111111',
'남자',
'1991-03-01',
'',
0,
1,
100000,
'MEMBER',
false
);
insert into member (member_id, uid, password, name, email, phone, gender, date_of_birth, grade, check_term, check_sns, total_cost, role, is_deleted)
values (
2,
'testid2',
'mdmdmd232323',
'임정우2',
'limjw02@gmail.com',
'01022222222',
'남자',
'1991-03-02',
'',
1,
1,
200000,
'MEMBER',
true
);
insert into member (member_id, uid, password, name, email, phone, gender, date_of_birth, grade, check_term, check_sns, total_cost, role, is_deleted)
values (
3,
'testid3',
'mdmdmd333333',
'임정우3',
'limjw03@gmail.com',
'01033333333',
'남자',
'1991-03-03',
'',
0,
0,
300000,
'MEMBER',
false
);
| 55.372493
| 212
| 0.427633
|
38b434087ed66e7e7e97c28844383fcd0c93e5b2
| 1,342
|
php
|
PHP
|
app/Http/Controllers/API/StoresController.php
|
loinp58/wabi-BE
|
357bf0489d4bae00a2402f24b058521b8ca5f573
|
[
"MIT"
] | null | null | null |
app/Http/Controllers/API/StoresController.php
|
loinp58/wabi-BE
|
357bf0489d4bae00a2402f24b058521b8ca5f573
|
[
"MIT"
] | null | null | null |
app/Http/Controllers/API/StoresController.php
|
loinp58/wabi-BE
|
357bf0489d4bae00a2402f24b058521b8ca5f573
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Http\Controllers\API;
use App\Models\Store;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
class StoresController extends Controller
{
public function index(Request $request)
{
try {
$filter = null;
$keyword = '';
$type = 0;
if ($request->has('filter')) {
$filter = $request->filter;
}
if ($request->has('search')) {
$keyword = $request->search;
}
if ($request->has('type')) {
$type = config('system.stores.type')[$request->type];
}
$stores = Store::getAllStore($type, $filter, $keyword);
}
catch (\Exception $e) {
return Response()->json([
'status' => false,
'message' => $e->getMessage()
]);
}
return Response()->json(['status' => true, 'data' => $stores]);
}
public function show($id)
{
try {
$store = Store::getDetail($id);
}
catch (\Exception $e) {
return Response()->json([
'status' => false,
'message' => $e->getMessage()
]);
}
return Response()->json(['status' => true, 'data' => $store]);
}
}
| 25.807692
| 71
| 0.455291
|
893d38b15d1a831d077ff14833113d88e0c6e0ef
| 35,250
|
psm1
|
PowerShell
|
Libraries/HyperV.psm1
|
stevecui/trp_sriov_lisav2
|
f8795ebbb0f029b83f1f5e17d437c7ea4783544f
|
[
"Apache-2.0"
] | 1
|
2019-01-28T01:44:58.000Z
|
2019-01-28T01:44:58.000Z
|
Libraries/HyperV.psm1
|
stevecui/trp_sriov_lisav2
|
f8795ebbb0f029b83f1f5e17d437c7ea4783544f
|
[
"Apache-2.0"
] | null | null | null |
Libraries/HyperV.psm1
|
stevecui/trp_sriov_lisav2
|
f8795ebbb0f029b83f1f5e17d437c7ea4783544f
|
[
"Apache-2.0"
] | null | null | null |
##############################################################################################
# HyperV.psm1
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the Apache License.
# Operations :
#
<#
.SYNOPSIS
Required for Hyper-V test execution.
.PARAMETER
<Parameters>
.INPUTS
.NOTES
Creation Date:
Purpose/Change:
.EXAMPLE
#>
###############################################################################################
Function DeployHyperVGroups ($xmlConfig, $setupType, $Distro, $getLogsIfFailed = $false, $GetDeploymentStatistics = $false, $VMGeneration = "1")
{
if( (!$EconomyMode) -or ( $EconomyMode -and ($xmlConfig.config.HyperV.Deployment.$setupType.isDeployed -eq "NO")))
{
try
{
$VerifiedGroups = $NULL
$retValue = $NULL
$i = 0
$role = 1
$setupTypeData = $xmlConfig.config.$TestPlatform.Deployment.$setupType
$isAllDeployed = CreateAllHyperVGroupDeployments -setupType $setupType -xmlConfig $xmlConfig `
-Distro $Distro -VMGeneration $VMGeneration
$isAllVerified = "False"
$isAllConnected = "False"
if($isAllDeployed[0] -eq "True")
{
$DeployedHyperVGroup = $isAllDeployed[1]
$HyperVGroupCount = $isAllDeployed[2]
$DeploymentElapsedTime = $isAllDeployed[3]
$GroupsToVerify = $DeployedHyperVGroup.Split('^')
$allVMData = GetAllHyperVDeployementData -HyperVGroupNames $DeployedHyperVGroup
Set-Variable -Name allVMData -Value $allVMData -Force -Scope Global
if (!$allVMData) {
LogErr "One or more deployments failed..!"
$retValue = $NULL
} else {
$isAllConnected = isAllSSHPortsEnabledRG -AllVMDataObject $allVMData
if ($isAllConnected -eq "True")
{
InjectHostnamesInHyperVVMs -allVMData $allVMData
$VerifiedGroups = $DeployedHyperVGroup
$retValue = $VerifiedGroups
if ( Test-Path -Path .\Extras\UploadDeploymentDataToDB.ps1 )
{
$out = .\Extras\UploadDeploymentDataToDB.ps1 -allVMData $allVMData -DeploymentTime $DeploymentElapsedTime.TotalSeconds
}
if(!$IsWindows)
{
$KernelLogOutput= GetAndCheckKernelLogs -allDeployedVMs $allVMData -status "Initial"
}
}
else
{
LogErr "Unable to connect Some/All SSH ports.."
$retValue = $NULL
}
}
}
else
{
LogErr "One or More Deployments are Failed..!"
$retValue = $NULL
}
}
catch
{
LogMsg "Exception detected. Source : DeployVMs()"
$line = $_.InvocationInfo.ScriptLineNumber
$script_name = ($_.InvocationInfo.ScriptName).Replace($PWD,".")
$ErrorMessage = $_.Exception.Message
LogErr "EXCEPTION : $ErrorMessage"
LogErr "Source : Line $line in script $script_name."
$retValue = $NULL
}
}
else
{
$retValue = $xmlConfig.config.$TestPlatform.Deployment.$setupType.isDeployed
if(!$IsWindows)
{
$KernelLogOutput= GetAndCheckKernelLogs -allDeployedVMs $allVMData -status "Initial"
}
}
if ( $GetDeploymentStatistics )
{
return $retValue, $DeploymentElapsedTime
}
else
{
return $retValue
}
}
Function CreateAllHyperVGroupDeployments($setupType, $xmlConfig, $Distro, $DebugRG = "", $VMGeneration = "1")
{
$DeployedHyperVGroup = @()
if ($DebugRG)
{
return "True", $DebugRG, 1, 180
}
else
{
$HyperVGroupCount = 0
LogMsg $setupType
$setupTypeData = $xmlConfig.config.HyperV.Deployment.$setupType
if($region)
{
$location = $region;
}
$index = 0
foreach ($HyperVGroupXML in $setupTypeData.ResourceGroup )
{
$deployOnDifferentHosts = $HyperVGroupXML.VirtualMachine.DeployOnDifferentHyperVHost
$HyperVHostArray = @()
if ($deployOnDifferentHosts -eq "yes") {
foreach ($HypervHost in $xmlConfig.config.HyperV.Hosts.ChildNodes) {
$HyperVHostArray += $HyperVHost.ServerName
}
} else {
$HyperVHostArray += $xmlConfig.config.HyperV.Hosts.ChildNodes[$index].ServerName
}
$SourceOsVHDPath = $xmlConfig.config.HyperV.Hosts.ChildNodes[$index].SourceOsVHDPath
$DestinationOsVHDPath = $xmlConfig.config.HyperV.Hosts.ChildNodes[$index].DestinationOsVHDPath
$index++
$validateStartTime = Get-Date
$readyToDeploy = $false
while (!$readyToDeploy)
{
#TBD Verify the readiness of the HyperV Host.
$readyToDeploy = $true
}
if ($readyToDeploy)
{
$curtime = ([string]((Get-Date).Ticks / 1000000)).Split(".")[0]
$isHyperVGroupDeployed = "False"
$retryDeployment = 0
if ( $HyperVGroupXML.Tag -ne $null )
{
$HyperVGroupName = "ICA-HG-" + $HyperVGroupXML.Tag + "-" + $Distro + "-" + "$shortRandomWord-" + "$curtime"
}
else
{
$HyperVGroupName = "ICA-HG-" + $setupType + "-" + $Distro + "-" + "$shortRandomWord-" + "$curtime"
}
while (($isHyperVGroupDeployed -eq "False") -and ($retryDeployment -lt 1))
{
if ($ExistingRG)
{
#TBD
#Use existing HypeV group for test.
}
else
{
LogMsg "Creating HyperV Group : $HyperVGroupName."
LogMsg "Verifying that HyperV Group name is not in use."
foreach ($HyperVHost in $HyperVHostArray){
$isHyperVGroupDeleted = DeleteHyperVGroup -HyperVGroupName $HyperVGroupName -HyperVHost $HyperVHost
}
}
if ($isHyperVGroupDeleted)
{
foreach ($HyperVHost in $HyperVHostArray){
$CreatedHyperVGroup = CreateHyperVGroup -HyperVGroupName $HyperVGroupName -HyperVHost $HyperVHost
}
if ($CreatedHyperVGroup)
{
$DeploymentStartTime = (Get-Date)
$ExpectedVMs = 0
$HyperVGroupXML.VirtualMachine | ForEach-Object {$ExpectedVMs += 1}
$VMCreationStatus = CreateHyperVGroupDeployment -HyperVGroupName $HyperVGroupName -HyperVGroupXML $HyperVGroupXML `
-HyperVHost $HyperVHostArray -SourceOsVHDPath $SourceOsVHDPath -DestinationOsVHDPath $DestinationOsVHDPath `
-VMGeneration $VMGeneration
$DeploymentEndTime = (Get-Date)
$DeploymentElapsedTime = $DeploymentEndTime - $DeploymentStartTime
if ( $VMCreationStatus )
{
if($xmlconfig.config.testsDefinition.test.Tags `
-and $xmlconfig.config.testsDefinition.test.Tags.ToString().Contains("nested"))
{
LogMsg "Test Platform is $TestPlatform and nested VMs will be created, need to enable nested virtualization"
$status = EnableHyperVNestedVirtualization -HyperVGroupName $HyperVGroupName -HyperVHost $HyperVHost
}
foreach ($HyperVHost in $HyperVHostArray){
$StartVMStatus = StartHyperVGroupVMs -HyperVGroupName $HyperVGroupName -HyperVHost $HyperVHost
if ($StartVMStatus)
{
$retValue = "True"
$isHyperVGroupDeployed = "True"
$HyperVGroupCount = $HyperVGroupCount + 1
$DeployedHyperVGroup += $HyperVGroupName
}
else
{
LogErr "Unable to start one or more VM's"
$retryDeployment = $retryDeployment + 1
$retValue = "False"
$isHyperVGroupDeployed = "False"
}
}
}
else
{
LogErr "Unable to Deploy one or more VM's"
$retryDeployment = $retryDeployment + 1
$retValue = "False"
$isHyperVGroupDeployed = "False"
}
}
else
{
LogErr "Unable to create $HyperVGroupName"
$retryDeployment = $retryDeployment + 1
$retValue = "False"
$isHyperVGroupDeployed = "False"
}
}
else
{
LogErr "Unable to delete existing HyperV Group - $HyperVGroupName"
$retryDeployment += 1
$retValue = "False"
$isHyperVGroupDeployed = "False"
}
}
}
else
{
LogErr "HyperV server is not ready to deploy."
$retValue = "False"
$isHyperVGroupDeployed = "False"
}
}
return $retValue, $DeployedHyperVGroup, $HyperVGroupCount, $DeploymentElapsedTime
}
}
Function DeleteHyperVGroup([string]$HyperVGroupName, [string]$HyperVHost) {
if ($ExistingRG) {
LogMsg "Skipping removal of Hyper-V VM group ${HyperVGroupName}"
return $true
}
$vmGroup = $null
LogMsg "Checking if Hyper-V VM group '$HyperVGroupName' exists on $HyperVHost..."
$vmGroup = Get-VMGroup -Name $HyperVGroupName -ErrorAction SilentlyContinue `
-ComputerName $HyperVHost
if (!$vmGroup) {
LogWarn "Hyper-V VM group ${HyperVGroupName} does not exist"
return $true
}
$vmGroup.VMMembers | ForEach-Object {
LogMsg "Stop-VM -Name $($_.Name) -Force -TurnOff "
$vm = $_
Stop-VM -Name $vm.Name -Force -TurnOff -ComputerName $HyperVHost
Remove-VMSnapshot -VMName $vm.Name -ComputerName $HyperVHost `
-IncludeAllChildCheckpoints -Confirm:$false
if (!$?) {
LogErr ("Failed to remove snapshots for VM {0}" -f @($vm.Name))
return $false
}
Wait-VMStatus -VMName $vm.Name -VMStatus "Operating Normally" -RetryInterval 2 `
-HvServer $HyperVHost
$vm = Get-VM -Name $vm.Name -ComputerName $HyperVHost
$vm.HardDrives | ForEach-Object {
$vhdPath = $_.Path
$invokeCommandParams = @{
"ScriptBlock" = {
Remove-Item -Path $args[0] -Force
};
"ArgumentList" = $vhdPath;
}
if ($HyperVHost -ne "localhost" -and $HyperVHost -ne $(hostname)) {
$invokeCommandParams.ComputerName = $HyperVHost
}
Invoke-Command @invokeCommandParams
if (!$?) {
LogMsg "Failed to remove ${vhdPath} using Invoke-Command"
$vhdUncPath = $vhdPath -replace '^(.):', "\\$(HyperVHost)\`$1$"
LogMsg "Removing ${vhdUncPath} ..."
Remove-Item -Path $vhdUncPath -Force
if (!$? -or (Test-Path $vhdUncPath)) {
LogErr "Failed to remove ${vhdPath} using UNC paths"
return $false
}
}
LogMsg "VHD ${vhdPath} removed!"
}
Remove-VM -Name $vm.Name -ComputerName $HyperVHost -Force
LogMsg "Hyper-V VM $($vm.Name) removed!"
}
LogMsg "Hyper-V VM group ${HyperVGroupName} is being removed!"
Remove-VMGroup -Name $HyperVGroupName -ComputerName $HyperVHost -Force
LogMsg "Hyper-V VM group ${HyperVGroupName} removed!"
return $true
}
Function CreateHyperVGroup([string]$HyperVGroupName, [string]$HyperVHost)
{
$FailCounter = 0
$retValue = "False"
While(($retValue -eq $false) -and ($FailCounter -lt 5))
{
try
{
$FailCounter++
LogMsg "Using HyperV server : $HyperVHost"
$CreatedHyperVGroup = New-VMGroup -Name $HyperVGroupName -ComputerName $HyperVHost -GroupType VMCollectionType
if ($?)
{
LogMsg "HyperV Group $HyperVGroupName Created with Instance ID: $($CreatedHyperVGroup.InstanceId)."
$retValue = $CreatedHyperVGroup
}
else
{
LogErr "Failed to HyperV Group $HyperVGroupName."
$retValue = $false
$FailCounter += 1
}
}
catch
{
$retValue = $false
}
}
return $retValue
}
Function CreateHyperVGroupDeployment([string]$HyperVGroup, $HyperVGroupNameXML, $HyperVHost, $SourceOsVHDPath, $DestinationOsVHDPath, $VMGeneration)
{
$HyperVMappedSizes = [xml](Get-Content .\XML\AzureVMSizeToHyperVMapping.xml)
$CreatedVMs = @()
$OsVHD = $BaseOsVHD
$ErrorCount = 0
$i = 0
$HyperVHost = $HyperVHost | Select-Object -First 1
$CurrentHyperVGroup = Get-VMGroup -Name $HyperVGroupName -ComputerName $HyperVHost
if ( $CurrentHyperVGroup.Count -eq 1)
{
foreach ( $VirtualMachine in $HyperVGroupXML.VirtualMachine)
{
if ($VirtualMachine.DeployOnDifferentHyperVHost -and ($TestLocation -match ",")) {
$hostNumber = $HyperVGroupXML.VirtualMachine.indexOf($VirtualMachine)
$HyperVHost = $xmlConfig.config.HyperV.Hosts.ChildNodes[$hostNumber].ServerName
$SourceOsVHDPath = $xmlConfig.config.HyperV.Hosts.ChildNodes[$hostNumber].SourceOsVHDPath
$DestinationOsVHDPath = $xmlConfig.config.HyperV.Hosts.ChildNodes[$hostNumber].DestinationOsVHDPath
}
$vhdSuffix = [System.IO.Path]::GetExtension($OsVHD)
$InterfaceAliasWithInternet = (Get-NetIPConfiguration -ComputerName $HyperVHost | where {$_.NetProfile.Name -ne 'Unidentified network'}).InterfaceAlias
$VMSwitches = Get-VMSwitch | where {$InterfaceAliasWithInternet -match $_.Name} | Select-Object -First 1
if ( $VirtualMachine.RoleName)
{
if ($VirtualMachine.RoleName -match "dependency") {
$CurrentVMName = $HyperVGroupName + "-" + $VirtualMachine.RoleName
} else {
$CurrentVMName = $VirtualMachine.RoleName
}
$CurrentVMOsVHDPath = "$DestinationOsVHDPath\$HyperVGroupName-$CurrentVMName-diff-OSDisk${vhdSuffix}"
}
else
{
$CurrentVMName = $HyperVGroupName + "-role-$i"
$CurrentVMOsVHDPath = "$DestinationOsVHDPath\$HyperVGroupName-role-$i-diff-OSDisk${vhdSuffix}"
$i += 1
}
$parentOsVHDPath = $OsVHD
if ($SourceOsVHDPath) {
$parentOsVHDPath = Join-Path $SourceOsVHDPath $OsVHD
}
$infoParentOsVHD = Get-VHD $parentOsVHDPath -ComputerName $HyperVHost
$uriParentOsVHDPath = [System.Uri]$parentOsVHDPath
if ($uriParentOsVHDPath -and $uriParentOsVHDPath.isUnc) {
LogMsg "Parent VHD path ${parentOsVHDPath} is on an SMB share."
if ($infoParentOsVHD.VhdType -eq "Differencing") {
LogErr "Unsupported differencing disk on the share."
$ErrorCount += 1
return $false
}
LogMsg "Checking if we have a local VHD with the same disk identifier on the host"
$hypervVHDLocalPath = (Get-VMHost -ComputerName $HyperVHost).VirtualHardDiskPath
$vhdName = [System.IO.Path]::GetFileNameWithoutExtension($(Split-Path -Leaf $parentOsVHDPath))
$newVhdName = "{0}-{1}{2}" -f @($vhdName, $infoParentOsVHD.DiskIdentifier.Replace("-", ""),$vhdSuffix)
$localVHDPath = Join-Path $hypervVHDLocalPath $newVhdName
if ((Test-Path $localVHDPath)) {
LogMsg "${parentOsVHDPath} is already found at path ${localVHDPath}"
} else {
LogMsg "${parentOsVHDPath} will be copied at path ${localVHDPath}"
Copy-Item -Force $parentOsVHDPath $localVHDPath
}
$parentOsVHDPath = $localVHDPath
}
$Out = New-VHD -ParentPath $parentOsVHDPath -Path $CurrentVMOsVHDPath -ComputerName $HyperVHost
if ($?) {
LogMsg "Prerequiste: Prepare OS Disk $CurrentVMOsVHDPath - Succeeded."
if ($OverrideVMSize)
{
$CurrentVMSize = $OverrideVMSize
}
else
{
$CurrentVMSize = $VirtualMachine.ARMInstanceSize
}
Set-Variable -Name HyperVInstanceSize -Value $CurrentVMSize -Scope Global
$CurrentVMCpu = $HyperVMappedSizes.HyperV.$CurrentVMSize.NumberOfCores
$CurrentVMMemory = $HyperVMappedSizes.HyperV.$CurrentVMSize.MemoryInMB
$CurrentVMMemory = [int]$CurrentVMMemory * 1024 * 1024
LogMsg "New-VM -Name $CurrentVMName -MemoryStartupBytes $CurrentVMMemory -BootDevice VHD -VHDPath $CurrentVMOsVHDPath -Generation $VMGeneration -Switch $($VMSwitches.Name) -ComputerName $HyperVHost"
$NewVM = New-VM -Name $CurrentVMName -MemoryStartupBytes $CurrentVMMemory -BootDevice VHD `
-VHDPath $CurrentVMOsVHDPath -Generation $VMGeneration -Switch $($VMSwitches.Name) -ComputerName $HyperVHost
if ([string]$VMGeneration -eq "2") {
LogMsg "Set-VMFirmware -VMName $CurrentVMName -EnableSecureBoot Off"
Set-VMFirmware -VMName $CurrentVMName -EnableSecureBoot Off
}
if($currentTestData.AdditionalHWConfig.SwitchName)
{
Add-VMNetworkAdapter -VMName $CurrentVMName -SwitchName $currentTestData.AdditionalHWConfig.SwitchName -ComputerName $HyperVHost
}
if ($?)
{
LogMsg "Set-VM -VM $($NewVM.Name) -ProcessorCount $CurrentVMCpu -StaticMemory -CheckpointType Disabled -Notes $HyperVGroupName"
$Out = Set-VM -VM $NewVM -ProcessorCount $CurrentVMCpu -StaticMemory -CheckpointType Disabled -Notes "$HyperVGroupName"
LogMsg "Add-VMGroupMember -Name $HyperVGroupName -VM $($NewVM.Name)"
$Out = Add-VMGroupMember -Name "$HyperVGroupName" -VM $NewVM -ComputerName $HyperVHost
$ResourceDiskPath = ".\Temp\ResourceDisk-$((Get-Date).Ticks)-sdb${vhdSuffix}"
if($DestinationOsVHDPath -ne "VHDs_Destination_Path")
{
$ResourceDiskPath = "$DestinationOsVHDPath\ResourceDisk-$((Get-Date).Ticks)-sdb${vhdSuffix}"
}
LogMsg "New-VHD -Path $ResourceDiskPath -SizeBytes 1GB -Dynamic -ComputerName $HyperVHost"
$VHD = New-VHD -Path $ResourceDiskPath -SizeBytes 1GB -Dynamic -ComputerName $HyperVHost
LogMsg "Add-VMHardDiskDrive -ControllerType SCSI -Path $ResourceDiskPath -VM $($NewVM.Name)"
$NewVM | Add-VMHardDiskDrive -ControllerType SCSI -Path $ResourceDiskPath
$LUNs = $VirtualMachine.DataDisk.LUN
if($LUNs.count -gt 0)
{
LogMsg "check the offline physical disks on host $HyperVHost"
$DiskNumbers = (Get-Disk | where {$_.OperationalStatus -eq 'offline'}).Number
if($DiskNumbers.count -ge $LUNs.count)
{
LogMsg "The offline physical disks are enough for use"
$ControllerType = 'SCSI'
$count = 0
foreach ( $LUN in $LUNs )
{
LogMsg "Add physical disk $($DiskNumbers[$count]) to $ControllerType controller on virtual machine $CurrentVMName."
$NewVM | Add-VMHardDiskDrive -DiskNumber $($DiskNumbers[$count]) -ControllerType $ControllerType
$count ++
}
}
else
{
LogErr "The offline physical disks are not enough for use"
$ErrorCount += 1
}
}
}
else
{
LogErr "Failed to create VM."
LogErr "Removing OS Disk : $CurrentVMOsVHDPath"
$Out = Remove-Item -Path $CurrentVMOsVHDPath -Force
$ErrorCount += 1
}
} else {
LogMsg "Prerequiste: Prepare OS Disk $CurrentVMOsVHDPath - Failed."
$ErrorCount += 1
}
}
}
else
{
LogErr "There are $($CurrentHyperVGroup.Count) HyperV groups. We need 1 HyperV group."
$ErrorCount += 1
}
if ( $ErrorCount -eq 0 )
{
$ReturnValue = $true
}
else
{
$ReturnValue = $false
}
return $ReturnValue
}
Function EnableHyperVNestedVirtualization($HyperVGroupName, $HyperVHost)
{
$AllVMs = Get-VMGroup -Name $HyperVGroupName -ComputerName $HyperVHost
$CurrentErrors = @()
foreach ( $VM in $AllVMs.VMMembers)
{
LogMsg "Enable nested virtualization for $($VM.Name) from $HyperVGroupName..."
Set-VMProcessor -VMName $($VM.Name) -ExposeVirtualizationExtensions $true -ComputerName $HyperVHost
Set-VMNetworkAdapter -VMName $($VM.Name) -MacAddressSpoofing on -ComputerName $HyperVHost
if ( $? )
{
LogMsg "Succeeded."
}
else
{
LogErr "Failed"
$CurrentErrors += "Enable nested virtualization for $($VM.Name) from $HyperVGroupName failed."
}
}
if($CurrentErrors.Count -eq 0)
{
$ReturnValue = $true
$CurrentErrors | ForEach-Object { LogErr "$_" }
}
else
{
$ReturnValue = $false
}
return $ReturnValue
}
Function StartHyperVGroupVMs($HyperVGroupName,$HyperVHost)
{
$AllVMs = Get-VMGroup -Name $HyperVGroupName -ComputerName $HyperVHost
$CurrentErrors = @()
foreach ( $VM in $AllVMs.VMMembers)
{
LogMsg "Starting $($VM.Name) from $HyperVGroupName..."
$StartVMStatus = Start-VM -VM $VM
if ( $? )
{
LogMsg "Succeeded."
}
else
{
LogErr "Failed"
$CurrentErrors += "Starting $($VM.Name) from $HyperVGroupName failed."
}
}
if($CurrentErrors.Count -eq 0)
{
$ReturnValue = $true
$CurrentErrors | ForEach-Object { LogErr "$_" }
}
else
{
$ReturnValue = $false
}
return $ReturnValue
}
Function StopHyperVGroupVMs($HyperVGroupName, $HyperVHost)
{
$AllVMs = Get-VMGroup -Name $HyperVGroupName -ComputerName $HyperVHost
$CurrentErrors = @()
foreach ( $VM in $AllVMs.VMMembers)
{
LogMsg "Shutting down $($VM.Name) from $HyperVGroupName..."
$StopVMStatus = Stop-VM -VM $VM
if ( $? )
{
LogMsg "Succeeded."
}
else
{
LogErr "Shutdown failed. Turning off.."
$StopVMStatus = Stop-VM -VM $VM -Force -TurnOff -ComputerName $HyperVHost
if ( $? )
{
LogMsg "Succeeded."
}
else
{
LogErr "Failed"
$CurrentErrors += "Stopping $($VM.Name) from $HyperVGroupName failed."
}
}
}
if($CurrentErrors.Count -eq 0)
{
$ReturnValue = $true
$CurrentErrors | ForEach-Object { LogErr "$_" }
}
else
{
$ReturnValue = $false
}
return $ReturnValue
}
Function GetAllHyperVDeployementData($HyperVGroupNames,$RetryCount = 100)
{
$allDeployedVMs = @()
function CreateQuickVMNode()
{
$objNode = New-Object -TypeName PSObject
Add-Member -InputObject $objNode -MemberType NoteProperty -Name HyperVHost -Value $HyperVHost -Force
Add-Member -InputObject $objNode -MemberType NoteProperty -Name HyperVGroupName -Value $null -Force
Add-Member -InputObject $objNode -MemberType NoteProperty -Name PublicIP -Value $null -Force
Add-Member -InputObject $objNode -MemberType NoteProperty -Name InternalIP -Value $null -Force
Add-Member -InputObject $objNode -MemberType NoteProperty -Name RoleName -Value $null -Force
if($IsWindows){
Add-Member -InputObject $objNode -MemberType NoteProperty -Name RDPPort -Value 3389 -Force
}
else{
Add-Member -InputObject $objNode -MemberType NoteProperty -Name SSHPort -Value 22 -Force
}
return $objNode
}
$CurrentRetryAttempt = 0
$AllPublicIPsCollected = $false
$ALLVMs = @{}
$index = 0
foreach ($HyperVGroupName in $HyperVGroupNames.Split("^"))
{
$HyperVHost = $xmlConfig.config.Hyperv.Hosts.ChildNodes[$index].ServerName
$index++
LogMsg "Collecting $HyperVGroupName data.."
$CurrentGroupData = Get-VMGroup -Name $HyperVGroupName -ComputerName $HyperVHost
$ALLVMs.Add($CurrentGroupData.ComputerName, $CurrentGroupData.VMMembers)
}
foreach ($ComputerName in $AllVMs.Keys)
{
foreach($property in $ALLVMs[$ComputerName]) {
$VM = Get-VM -Name $property.Name -ComputerName $ComputerName
$VMNicProperties = Get-VMNetworkAdapter -ComputerName $ComputerName -VMName $property.Name
$RetryCount = 50
$CurrentRetryAttempt=0
$QuickVMNode = CreateQuickVMNode
do
{
$CurrentRetryAttempt++
Start-Sleep 5
LogMsg " [$CurrentRetryAttempt/$RetryCount] : $($property.Name) : Waiting for IP address ..."
$QuickVMNode.PublicIP = $VMNicProperties.IPAddresses | Where-Object {$_ -imatch "\b(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}\b"}
}while(($CurrentRetryAttempt -lt $RetryCount) -and (!$QuickVMNode.PublicIP))
if($QuickVMNode.PublicIP -and $QuickVMNode.PublicIP.Split("").Length -gt 1)
{
$QuickVMNode.PublicIP = $QuickVMNode.PublicIP[0]
}
$QuickVMNode.InternalIP = $QuickVMNode.PublicIP
$QuickVMNode.HyperVHost = $ComputerName
if ($QuickVMNode.PublicIP -notmatch "\b(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}\b")
{
$AllPublicIPsCollected = $false
LogMsg ("Cannot collect public IP for VM {0}" -f @($VM.Name))
}
else
{
$QuickVMNode.RoleName = $VM.Name
$QuickVMNode.HyperVGroupName = $VM.Groups.Name
$allDeployedVMs += $QuickVMNode
LogMsg "Collected $($QuickVMNode.RoleName) from $($QuickVMNode.HyperVGroupName) data!"
}
}
}
return $allDeployedVMs
}
Function RestartAllHyperVDeployments($allVMData)
{
foreach ( $VM in $allVMData )
{
$out = StopHyperVGroupVMs -HyperVGroupName $VM.HyperVGroupName -HyperVHost $VM.HyperVHost
}
foreach ( $VM in $allVMData )
{
$out = StartHyperVGroupVMs -HyperVGroupName $VM.HyperVGroupName -HyperVHost $VM.HyperVHost
}
$isSSHOpened = isAllSSHPortsEnabledRG -AllVMDataObject $AllVMData
return $isSSHOpened
}
Function InjectHostnamesInHyperVVMs($allVMData)
{
$ErrorCount = 0
try
{
foreach ( $VM in $allVMData )
{
LogMsg "Injecting hostname '$($VM.RoleName)' in HyperV VM..."
if(!$IsWindows)
{
$out = RunLinuxCmd -username $user -password $password -ip $VM.PublicIP -port $VM.SSHPort -command "echo $($VM.RoleName) > /etc/hostname" -runAsSudo -maxRetryCount 5
}
else
{
$cred = Get-Cred $user $password
Invoke-Command -ComputerName $VM.PublicIP -ScriptBlock {$computerInfo=Get-ComputerInfo;if($computerInfo.CsDNSHostName -ne $args[0]){Rename-computer -computername $computerInfo.CsDNSHostName -newname $args[0] -force}} -ArgumentList $VM.RoleName -Credential $cred
}
}
$RestartStatus = RestartAllHyperVDeployments -allVMData $allVMData
}
catch
{
$ErrorCount += 1
}
finally
{
if ( ($ErrorCount -eq 0) -and ($RestartStatus -eq "True"))
{
LogMsg "Hostnames are injected successfully."
}
else
{
LogErr "Failed to inject $ErrorCount hostnames in HyperV VMs. Continuing the tests..."
}
}
}
Function Get-Cred($user, $password)
{
$secstr = New-Object -TypeName System.Security.SecureString
$password.ToCharArray() | ForEach-Object {$secstr.AppendChar($_)}
$cred = new-object -typename System.Management.Automation.PSCredential -argumentlist $user, $secstr
Set-Item WSMan:\localhost\Client\TrustedHosts * -Force
return $cred
}
function Get-VMPanicEvent {
param(
$VMName,
$HvServer,
$StartTime,
$RetryCount=30,
$RetryInterval=5
)
$currentRetryCount = 0
$testPassed = $false
while ($currentRetryCount -lt $RetryCount -and !$testPassed) {
LogMsg "Checking eventlog for 18590 event sent by VM ${VMName}"
$currentRetryCount++
$events = @(Get-WinEvent -FilterHashTable `
@{LogName = "Microsoft-Windows-Hyper-V-Worker-Admin";
StartTime = $StartTime} `
-ComputerName $hvServer -ErrorAction SilentlyContinue)
foreach ($evt in $events) {
if ($evt.id -eq 18590 -and $evt.message.Contains($vmName)) {
$testPassed = $true
break
}
}
Start-Sleep $RetryInterval
}
return $testPassed
}
function Wait-VMState {
param(
$VMName,
$VMState,
$HvServer,
$RetryCount=30,
$RetryInterval=5
)
$currentRetryCount = 0
while ($currentRetryCount -lt $RetryCount -and `
(Get-VM -ComputerName $HvServer -Name $VMName).State -ne $VMState) {
LogMsg "Waiting for VM ${VMName} to enter ${VMState} state"
Start-Sleep -Seconds $RetryInterval
$currentRetryCount++
}
if ($currentRetryCount -eq $RetryCount) {
throw "VM ${VMName} failed to enter ${VMState} state"
}
}
function Wait-VMStatus {
param(
$VMName,
$VMStatus,
$HvServer,
$RetryCount=30,
$RetryInterval=5
)
$currentRetryCount = 0
while ($currentRetryCount -lt $RetryCount -and `
(Get-VM -ComputerName $HvServer -Name $VMName).Status -ne $VMStatus) {
LogMsg "Waiting for VM ${VMName} to enter '${VMStatus}' status"
Start-Sleep -Seconds $RetryInterval
$currentRetryCount++
}
if ($currentRetryCount -eq $RetryCount) {
throw "VM ${VMName} failed to enter ${VMStatus} status"
}
}
function Wait-VMHeartbeatOK {
param(
$VMName,
$HvServer,
$RetryCount=30,
$RetryInterval=5
)
$currentRetryCount = 0
do {
$currentRetryCount++
Start-Sleep -Seconds $RetryInterval
LogMsg "Waiting for VM ${VMName} to enter Heartbeat OK state"
} until ($currentRetryCount -ge $RetryCount -or `
(Get-VMIntegrationService -VMName $vmName -ComputerName $hvServer | `
Where-Object { $_.name -eq "Heartbeat" }
).PrimaryStatusDescription -eq "OK")
if ($currentRetryCount -eq $RetryCount) {
throw "VM ${VMName} failed to enter Heartbeat OK state"
}
}
Function Wait-ForHyperVVMShutdown($HvServer,$VMNames)
{
LogMsg "Waiting for VM Shutting Down"
if ($VMNames -and $HvServer)
{
foreach ($VMName in $VMNames.split(","))
{
Wait-VMState -VMName $VMName -HvServer $HvServer -VMState "Off"
}
}
else
{
LogError "Please provide HvServer and VMNames."
throw "Wait-ForHyperVVMShutdown Missing Mandatory Paramters"
}
}
| 41.421857
| 278
| 0.524028
|
147e6f8237ed38570c5c452b7e7975f23a48e773
| 4,171
|
ts
|
TypeScript
|
server/api/quote/quote.controller.ts
|
Artimatic/robinhood-merchant
|
d83af39ad2fc1e4b0585165e8a687a9d755052df
|
[
"MIT"
] | 13
|
2019-11-22T13:46:28.000Z
|
2022-01-13T06:42:01.000Z
|
server/api/quote/quote.controller.ts
|
Artimatic/Robinhood-Station-App
|
d83af39ad2fc1e4b0585165e8a687a9d755052df
|
[
"MIT"
] | 13
|
2020-12-26T03:02:58.000Z
|
2022-03-02T02:19:11.000Z
|
server/api/quote/quote.controller.ts
|
Artimatic/Robinhood-Station-App
|
d83af39ad2fc1e4b0585165e8a687a9d755052df
|
[
"MIT"
] | 9
|
2019-12-21T02:51:26.000Z
|
2022-01-13T06:42:02.000Z
|
import * as _ from 'lodash';
import * as Boom from 'boom';
import BaseController from '../templates/base.controller';
import QuoteService from './quote.service';
class QuoteController extends BaseController {
constructor() {
super();
}
getQuote(request, response) {
if (_.isEmpty(request.body)) {
return response.status(Boom.badRequest().output.statusCode).send(Boom.badRequest().output);
} else {
QuoteService.getData(request.body.ticker, request.body.interval, request.body.range)
.then((data) => BaseController.requestGetSuccessHandler(response, data))
.catch((e) => BaseController.requestErrorHandler(response, e));
}
}
getCurrentQuote(request, response) {
if (_.isEmpty(request.body)) {
return response.status(Boom.badRequest().output.statusCode).send(Boom.badRequest().output);
} else {
QuoteService.getLastPrice(request.body.symbol)
.then((data) => BaseController.requestGetSuccessHandler(response, data))
.catch((e) => BaseController.requestErrorHandler(response, e));
}
}
getRawData(request, response) {
if (_.isEmpty(request.body)) {
return response.status(Boom.badRequest().output.statusCode).send(Boom.badRequest().output);
} else {
QuoteService.getRawData(request.body.symbol, request.body.interval, request.body.range)
.then((data) => BaseController.requestGetSuccessHandler(response, data))
.catch((e) => BaseController.requestErrorHandler(response, e));
}
}
getIntraday(request, response) {
if (_.isEmpty(request.body)) {
return response.status(Boom.badRequest().output.statusCode).send(Boom.badRequest().output);
} else {
QuoteService.getIntradayData(request.body.symbol, request.body.interval)
.then((data) => BaseController.requestGetSuccessHandler(response, data))
.catch((e) => BaseController.requestErrorHandler(response, e));
}
}
getTiingoIntraday(request, response) {
if (_.isEmpty(request.body)) {
return response.status(Boom.badRequest().output.statusCode).send(Boom.badRequest().output);
} else {
QuoteService.getTiingoIntraday(request.body.symbol, request.body.startDate)
.then((data) => BaseController.requestGetSuccessHandler(response, data))
.catch((e) => BaseController.requestErrorHandler(response, e));
}
}
getIntradayV2(request, response) {
if (_.isEmpty(request.body)) {
return response.status(Boom.badRequest().output.statusCode).send(Boom.badRequest().output);
} else {
QuoteService.getIntradayDataV2(request.body.symbol, request.body.interval)
.then((data) => BaseController.requestGetSuccessHandler(response, data))
.catch((e) => BaseController.requestErrorHandler(response, e));
}
}
findIntraday(request, response) {
console.log('q: ', request.query);
if (_.isEmpty(request.query)) {
return response.status(Boom.badRequest().output.statusCode).send(Boom.badRequest().output);
} else {
console.log('q: ', request.query);
QuoteService.queryForIntraday(request.query.symbol, request.query.from, request.query.to)
.then((data) => BaseController.requestGetSuccessHandler(response, data))
.catch((e) => BaseController.requestErrorHandler(response, e));
}
}
postIntraday(request, response) {
if (_.isEmpty(request.body)) {
return response.status(Boom.badRequest().output.statusCode).send(Boom.badRequest().output);
} else {
QuoteService.postIntradayData(request.body)
.then((data) => BaseController.requestGetSuccessHandler(response, data))
.catch((e) => BaseController.requestErrorHandler(response, e));
}
}
getOptionChain(request, response) {
if (_.isEmpty(request.body)) {
return response.status(Boom.badRequest().output.statusCode).send(Boom.badRequest().output);
} else {
QuoteService.getOptionChain(request.body.symbol)
.then((data) => BaseController.requestGetSuccessHandler(response, data))
.catch((e) => BaseController.requestErrorHandler(response, e));
}
}
}
export default new QuoteController();
| 37.918182
| 97
| 0.691681
|
c38b900f629bbcad5f4af72e58b93175260079dd
| 931
|
cs
|
C#
|
src/GoogleMapsServices.Client/DistanceMatrixElementStatus.cs
|
Jonathan-Hickey/GoogleMapsServices
|
f83335b0282eec3b49227e7e19c06c31ad241a13
|
[
"MIT"
] | null | null | null |
src/GoogleMapsServices.Client/DistanceMatrixElementStatus.cs
|
Jonathan-Hickey/GoogleMapsServices
|
f83335b0282eec3b49227e7e19c06c31ad241a13
|
[
"MIT"
] | null | null | null |
src/GoogleMapsServices.Client/DistanceMatrixElementStatus.cs
|
Jonathan-Hickey/GoogleMapsServices
|
f83335b0282eec3b49227e7e19c06c31ad241a13
|
[
"MIT"
] | null | null | null |
namespace GoogleMapsServices.Client;
/// <summary>- `OK` indicates the response contains a valid result.
/// - `NOT_FOUND` indicates that the origin and/or destination of this pairing could not be geocoded.
/// - `ZERO_RESULTS` indicates no route could be found between the origin and destination.
/// - `MAX_ROUTE_LENGTH_EXCEEDED` indicates the requested route is too long and cannot be processed.
/// </summary>
[System.CodeDom.Compiler.GeneratedCode("NJsonSchema", "10.0.22.0 (Newtonsoft.Json v11.0.0.0)")]
public enum DistanceMatrixElementStatus
{
[System.Runtime.Serialization.EnumMember(Value = @"OK")]
OK = 0,
[System.Runtime.Serialization.EnumMember(Value = @"NOT_FOUND")]
NOT_FOUND = 1,
[System.Runtime.Serialization.EnumMember(Value = @"ZERO_RESULTS")]
ZERO_RESULTS = 2,
[System.Runtime.Serialization.EnumMember(Value = @"MAX_ROUTE_LENGTH_EXCEEDED")]
MAX_ROUTE_LENGTH_EXCEEDED = 3,
}
| 40.478261
| 101
| 0.740064
|
a467fa7cd9f1f3336bc5d8d7895c4c7931c003da
| 377
|
php
|
PHP
|
home/connect-DB.php
|
sjkady/kadyszewski.com
|
0bf4dedfa44d8d95e1062dccb1773d86d5cb80b2
|
[
"MIT"
] | null | null | null |
home/connect-DB.php
|
sjkady/kadyszewski.com
|
0bf4dedfa44d8d95e1062dccb1773d86d5cb80b2
|
[
"MIT"
] | null | null | null |
home/connect-DB.php
|
sjkady/kadyszewski.com
|
0bf4dedfa44d8d95e1062dccb1773d86d5cb80b2
|
[
"MIT"
] | null | null | null |
<?php
/*
CONNECT-DB.PHP
PHP to connect to your database
*/
$server = 'localhost';
$user = 'kadyszew_blogdb';
$pass = '9P]3PR(S7O';
$db = 'kadyszew_blogdb';
$con = mysql_connect($server, $user, $pass);
if (!$con) {
die('Could not connect: ' . mysql_error());
}
$db_selected = mysql_select_db($db);
if (!$db_selected) {
die ('Can\'t use foo : ' . mysql_error());
}
?>
| 18.85
| 47
| 0.618037
|
1b3df79419714b18b7372c6c1e3922bc2b796508
| 742
|
rb
|
Ruby
|
rovers_team_interface.rb
|
Zeaneth/checkr-mars-rovers
|
ec7e21c4a33fb88863a53e69c71b2c298929a28a
|
[
"MIT"
] | null | null | null |
rovers_team_interface.rb
|
Zeaneth/checkr-mars-rovers
|
ec7e21c4a33fb88863a53e69c71b2c298929a28a
|
[
"MIT"
] | null | null | null |
rovers_team_interface.rb
|
Zeaneth/checkr-mars-rovers
|
ec7e21c4a33fb88863a53e69c71b2c298929a28a
|
[
"MIT"
] | null | null | null |
require './rovers_team'
puts 'Hello there!'
puts "This script will let you start running the 'Mars Rovers' code challenge"
puts 'If you have a test text file of your choice, then write its file_path on the next line:'
puts '>>'
file_path = gets.chomp
puts "It seems you didn't add a text file, so we'll use the default one." if file_path.size.zero?
new_enterprise = RoversTeam.new(file_path) unless file_path.size.zero?
new_enterprise = RoversTeam.new if file_path.size.zero?
puts 'Excellent!'
sleep 1
puts 'Now we have the required data to start mission in...'
puts '3...'
sleep 1
puts '2...'
sleep 1
puts '1...'
sleep 1
puts "TAKE OFF!\n============"
sleep 1
puts 'The final coordinates for this mission are:'
new_enterprise.start_mission
| 30.916667
| 97
| 0.735849
|
af55a1a8b19f31f4ad13ee9d7c91c4576a437ec3
| 4,409
|
py
|
Python
|
src/cons/stop_with_multi_road_names.py
|
alex-baciu-dft/Open_NaPTAN
|
abbb3e162f2638099f5050f51d81099f5a0a72a9
|
[
"MIT"
] | 24
|
2020-07-02T12:08:39.000Z
|
2021-05-12T12:07:32.000Z
|
src/cons/stop_with_multi_road_names.py
|
alex-baciu-dft/Open_NaPTAN
|
abbb3e162f2638099f5050f51d81099f5a0a72a9
|
[
"MIT"
] | 11
|
2020-11-04T12:14:15.000Z
|
2022-03-12T00:38:36.000Z
|
src/cons/stop_with_multi_road_names.py
|
alex-baciu-dft/Open_NaPTAN
|
abbb3e162f2638099f5050f51d81099f5a0a72a9
|
[
"MIT"
] | 7
|
2020-07-03T09:32:11.000Z
|
2021-07-23T18:53:09.000Z
|
import pandas as pd
from report import reporting as rep
from checks import NaptanCheck
# %%
class MultiRoadName(NaptanCheck):
"""[summary] A collection of methods to check that the roads names contain
the correct types and collection of words.
Args:
NaptanCheck ([type]): [description]
Returns:
[type]: [description]
"""
# for reporting
check_name = "Check Multiroad Name words in stop"
check_warning_level = "low"
check_geographic_level = "stops"
@classmethod
def stop_with_multiple_road_names(cls, gdf, col_name="CommonName"):
"""[summary]CommonNames in NaPTAN should be simple and not composite.
Most examples of commonnames which include two of the designated
words are ones where two road names are used in a composite name,
contrary to NaPTAN guidance.
This uses regex, but they could be some other way of doing this...
Arguments:
df {[type]} -- [description]
"""
swmrn_gdf = gdf
swmrn_gdf[col_name] = swmrn_gdf[col_name].str.lower()
try:
# leave this here, no it's not being used, just leave it anyway.
targets = [
"road",
"roads",
"street",
"streets",
"avenue",
"avenues",
"garden",
"gardens",
"lane",
"lanes",
"drive",
"drives",
"way",
"ways",
]
# regex patterns for detection.
pattern = r"\b(road|roads|\
street|streets|\
avenue|\avenues|\
garden|gardens|\
lane|lanes\
drive|drives\
way|ways)\b"
fail_rds_re = (
r"\b('street|streets|avenue|avenues|garden|"
r"gardens|lane|lanes|drive|drives|way|ways')\b"
)
fail_aves_re = (
r"\b('road|roads|street|streets|garden|gardens|"
r"lane|lanes|drive|drives|way|ways')\b"
)
fail_gdns_re = (
r"\b('road|roads|street|streets|avenue|avenues|"
r"lane|lanes|drive|drives|way|ways')\b"
)
fail_lanes_re = (
r"\b('road|roads|street|streets|avenue|avenues|"
r"garden|gardens|drive|drives|way|ways')\b"
)
fail_drives_re = (
r"\b('road|roads|street|streets|avenue|avenues|"
r"garden|gardens|lane|lanes|way|ways')\b"
)
fail_ways_re = (
r"\b('road|roads|street|streets|avenue|avenues|"
r"garden|gardens|lane|lanes|drive|drives')\b"
)
tn = swmrn_gdf[swmrn_gdf[col_name].str.contains(pattern, regex=True)]
roads = tn[tn[col_name].str.contains(r"\b(road|roads)\b")]
fail_rds = roads[roads[col_name].str.contains(fail_rds_re, regex=True)]
aves = tn[tn[col_name].str.contains(r"\b(avenue|avenues)\b")]
fail_aves = aves[aves[col_name].str.contains(fail_aves_re, regex=True)]
gdns = tn[tn[col_name].str.contains(r"\b(garden|gardens)\b")]
failgdns = gdns[gdns[col_name].str.contains(fail_gdns_re, regex=True)]
lanes = tn[tn[col_name].str.contains(r"\b(lane|lanes)\b")]
faillanes = lanes[lanes[col_name].str.contains(fail_lanes_re, regex=True)]
drives = tn[tn[col_name].str.contains(r"\b(drive|drives)\b")]
faildrives = drives[
drives[col_name].str.contains(fail_drives_re, regex=True)
]
ways = tn[tn[col_name].str.contains(r"\b(way|ways)\b")]
failways = ways[ways[col_name].str.contains(fail_ways_re, regex=True)]
all_dfs = [fail_rds, fail_aves, failgdns, faillanes, faildrives, failways]
failed_nodes = pd.concat(all_dfs)
failed_nodes[col_name] = failed_nodes[col_name].str.title()
rep.report_failing_nodes(
gdf, "Stop with Multiple road type names", failed_nodes
)
return failed_nodes
except Exception as e:
raise (e)
| 38.675439
| 86
| 0.533454
|
1b5c3b152f5387d0af61e2657af5ca243cc50288
| 1,239
|
cs
|
C#
|
5. AssociativeArrays/AssociativeArraysEx/Courses/Program.cs
|
KirilRogachev/C-
|
6438395e05c40c9b6446c29d37cbda4cfe199faa
|
[
"MIT"
] | null | null | null |
5. AssociativeArrays/AssociativeArraysEx/Courses/Program.cs
|
KirilRogachev/C-
|
6438395e05c40c9b6446c29d37cbda4cfe199faa
|
[
"MIT"
] | null | null | null |
5. AssociativeArrays/AssociativeArraysEx/Courses/Program.cs
|
KirilRogachev/C-
|
6438395e05c40c9b6446c29d37cbda4cfe199faa
|
[
"MIT"
] | null | null | null |
using System;
using System.Collections.Generic;
using System.Linq;
namespace Courses
{
class Program
{
static void Main(string[] args)
{
string input = "";
var cName = new Dictionary<string, List<string>>();
while ((input = Console.ReadLine()) != "end")
{
string[] inputSp = input.Split(":").ToArray();
string course = inputSp[0];
course = course.Trim();
string name = inputSp[1];
name = name.Trim();
if (!cName.ContainsKey(course))
{
cName.Add(course, new List<string>());
}
cName[course].Add(name);
}
cName = cName
.OrderByDescending(x => x.Value.Count)
.ToDictionary(x => x.Key, x => x.Value);
foreach (var item in cName)
{
Console.WriteLine($"{item.Key}: {item.Value.Count}");
var list = item.Value.OrderBy(x => x);
foreach (var ite2 in list)
{
Console.WriteLine($"-- {ite2}");
}
}
}
}
}
| 26.934783
| 69
| 0.430993
|
7f69a09a9efa6acdc52caca43e57b97a853d75a6
| 797
|
php
|
PHP
|
database/seeds/CommentsTableSeeder.php
|
vbsantos/laravel-fundamentals-course
|
94eeb472865dd29459d2d442002a46b9a12858fa
|
[
"MIT"
] | null | null | null |
database/seeds/CommentsTableSeeder.php
|
vbsantos/laravel-fundamentals-course
|
94eeb472865dd29459d2d442002a46b9a12858fa
|
[
"MIT"
] | 3
|
2021-02-02T20:25:23.000Z
|
2021-10-06T19:49:26.000Z
|
database/seeds/CommentsTableSeeder.php
|
vbsantos/laravel-fundamentals-course
|
94eeb472865dd29459d2d442002a46b9a12858fa
|
[
"MIT"
] | null | null | null |
<?php
use Illuminate\Database\Seeder;
class CommentsTableSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
$posts = App\BlogPost::all();
if ($posts->count() === 0) {
$this->command->info('There are no Blog Posts, so no Comments will be added.');
return;
}
$users = App\User::all();
$commentsCount = (int) $this->command->ask('How many Comments would you like to add?', 100);
factory(App\Comment::class, $commentsCount)->make()->each(function ($comment) use ($posts, $users) {
$comment->blog_post_id = $posts->random()->id;
$comment->user_id = $users->random()->id;
$comment->save();
});
}
}
| 24.90625
| 108
| 0.538269
|
455f478c9acabf6560fb7f36bb147b707d9956c1
| 13,366
|
py
|
Python
|
detDepthStrawb-prunedYolov4Tiny-plainNN.py
|
GlowingHorse/depth-yolov4-tiny-tf2-strawberry-git
|
fc01629ce93799aa639e064662343d4747118c83
|
[
"MIT"
] | 1
|
2022-01-05T08:51:26.000Z
|
2022-01-05T08:51:26.000Z
|
detDepthStrawb-prunedYolov4Tiny-plainNN.py
|
GlowingHorse/depth-yolov4-tiny-tf2-strawberry-git
|
fc01629ce93799aa639e064662343d4747118c83
|
[
"MIT"
] | null | null | null |
detDepthStrawb-prunedYolov4Tiny-plainNN.py
|
GlowingHorse/depth-yolov4-tiny-tf2-strawberry-git
|
fc01629ce93799aa639e064662343d4747118c83
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
Detect strawberry and label its location in spatial
"""
from pathlib import Path
import sys
import cv2
import depthai as dai
import numpy as np
from utils.yolo_utils import yolo_res
from PIL import ImageFont
import time
from datetime import datetime, timedelta
# tiny yolo v4 label texts
labelMap = [
"MatureStrawberry", "GreenStrawberry"
]
syncNN = True
subpixel = False
downscaleColor = True
TARGET_SHAPE = (416, 416)
# Get argument first
nnBlobPath = str((Path(__file__).parent / Path('models/prunedYolov4TinyStrawberry.blob')).resolve().absolute())
if len(sys.argv) > 1:
nnBlobPath = sys.argv[1]
# Start defining a pipeline
pipeline = dai.Pipeline()
pipeline.setOpenVINOVersion(dai.OpenVINO.Version.VERSION_2021_2)
# ----------------------------------------------------------
print("Creating Color Camera...")
cam_rgb = pipeline.createColorCamera()
cam_rgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
# Color cam: 1920x1080
# Mono cam: 640x400
cam_rgb.setIspScale(2, 3) # To match 400P mono cameras
cam_rgb.setBoardSocket(dai.CameraBoardSocket.RGB)
cam_rgb.initialControl.setManualFocus(130)
# for yolo-v4-tiny
cam_rgb.setColorOrder(dai.ColorCameraProperties.ColorOrder.RGB)
cam_rgb.setPreviewSize(TARGET_SHAPE[0], TARGET_SHAPE[1])
cam_rgb.setInterleaved(False)
# isp output linked to XLinkOut
isp_xout = pipeline.createXLinkOut()
isp_xout.setStreamName("cam")
cam_rgb.isp.link(isp_xout.input)
# ---------------------------------------------------------
print("Creating Neural Network...")
detection_nn = pipeline.createNeuralNetwork()
detection_nn.setBlobPath(nnBlobPath)
detection_nn.input.setBlocking(False)
detection_nn.setNumInferenceThreads(2)
cam_rgb.preview.link(detection_nn.input)
# NN output linked to XLinkOut
xout_nn = pipeline.createXLinkOut()
xout_nn.setStreamName("nn")
detection_nn.out.link(xout_nn.input)
# xout_passthrough = pipeline.createXLinkOut()
# xout_passthrough.setStreamName("pass")
# xout_passthrough.setMetadataOnly(True)
# detection_nn.passthrough.link(xout_passthrough.input)
# --------------------------------------------------
monoLeft = pipeline.createMonoCamera()
monoRight = pipeline.createMonoCamera()
monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
monoLeft.setBoardSocket(dai.CameraBoardSocket.LEFT)
monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
monoRight.setBoardSocket(dai.CameraBoardSocket.RIGHT)
outputDepth = True
outputRectified = False
lrcheck = False
subpixel = False
# StereoDepth
stereo = pipeline.createStereoDepth()
stereo.setOutputDepth(outputDepth)
stereo.setOutputRectified(outputRectified)
stereo.setLeftRightCheck(lrcheck)
stereo.setSubpixel(subpixel)
stereo.setConfidenceThreshold(255)
stereo.setDepthAlign(dai.CameraBoardSocket.RGB)
monoLeft.out.link(stereo.left)
monoRight.out.link(stereo.right)
xout_depth = pipeline.createXLinkOut()
xout_depth.setStreamName("depth")
# stereo.depth.link(xout_depth.input)
# -----------------------------------------------------
spatialLocationCalculator = pipeline.createSpatialLocationCalculator()
xoutSpatialData = pipeline.createXLinkOut()
xinSpatialCalcConfig = pipeline.createXLinkIn()
xoutSpatialData.setStreamName("spatialData")
xinSpatialCalcConfig.setStreamName("spatialCalcConfig")
spatialLocationCalculator.passthroughDepth.link(xout_depth.input)
stereo.depth.link(spatialLocationCalculator.inputDepth)
spatialLocationCalculator.setWaitForConfigInput(False)
# ----------
topLeft_ori = dai.Point2f(0.5, 0.5)
bottomRight_ori = dai.Point2f(0.51, 0.51)
config_ori = dai.SpatialLocationCalculatorConfigData()
config_ori.depthThresholds.lowerThreshold = 100
config_ori.depthThresholds.upperThreshold = 10000
config_ori.roi = dai.Rect(topLeft_ori, bottomRight_ori)
spatialLocationCalculator.initialConfig.addROI(config_ori)
# ---------------------------------------
spatialLocationCalculator.out.link(xoutSpatialData.input)
xinSpatialCalcConfig.out.link(spatialLocationCalculator.inputConfig)
# nn data, being the bounding box locations, are in <0..1> range - they need to be normalized with frame width/height
def frameNorm(frame, bbox):
normVals = np.full(len(bbox), frame.shape[0])
normVals[::2] = frame.shape[1]
return (np.clip(np.array(bbox), 0, 1) * normVals).astype(int)
def to_tensor_result(packet):
return {
tensor.name: np.array(packet.getLayerFp16(tensor.name)).reshape(tensor.dims)
for tensor in packet.getRaw().tensors
}
def softmax(x):
"""Compute softmax values for each sets of scores in x."""
e_x = np.exp(x - np.max(x))
return e_x / e_x.sum(axis=0)
def to_planar(arr: np.ndarray, shape: tuple) -> np.ndarray:
resized = cv2.resize(arr, shape)
return resized.transpose(2, 0, 1)
def crop_to_square(frame):
height = frame.shape[0]
width = frame.shape[1]
delta = int((width-height) / 2)
# print(height, width, delta)
return frame[0:height, delta:width-delta]
def dispay_depth(frame, name):
frame_colored = cv2.normalize(frame, None, 255, 0, cv2.NORM_INF, cv2.CV_8UC1)
frame_colored = cv2.equalizeHist(frame_colored)
cv2.imshow(name, frame_colored)
return frame_colored
font = ImageFont.truetype(font='font/simhei.ttf',
size=np.floor(3e-2 * 416 + 0.5).astype('int32'))
colors = [(255, 0, 0)]
color = (255, 255, 255)
stepSize = 0.05
# Pipeline defined, now the device is connected to
with dai.Device(pipeline) as device:
# Start pipeline
device.startPipeline()
q_color = device.getOutputQueue(name="cam", maxSize=4, blocking=False)
q_nn = device.getOutputQueue(name="nn", maxSize=4, blocking=False)
# Output queue will be used to get the depth frames from the outputs defined above
q_depth = device.getOutputQueue(name="depth", maxSize=4, blocking=False)
spatialCalcQueue = device.getOutputQueue(name="spatialData", maxSize=4, blocking=False)
spatialCalcConfigInQueue = device.getInputQueue("spatialCalcConfig")
frameRgb = None
frameDepth = None
while True:
start_time = time.time()
in_nn = q_nn.tryGet()
if in_nn is not None:
all_layer_names = in_nn.getAllLayerNames()
all_layer_data_l = []
for i_layer in range(len(all_layer_names)):
single_layer_data = in_nn.getLayerFp16(all_layer_names[i_layer])
all_layer_data_l.append(single_layer_data)
# (1, 18, 13, 13) conv17
# (1, 18, 26, 26) conv20
conv17 = np.array(all_layer_data_l[0], dtype=np.float32).reshape([1, 21, 13, 13])
conv20 = np.array(all_layer_data_l[1], dtype=np.float32).reshape([1, 21, 26, 26])
conv17_trans = np.transpose(conv17, (0, 2, 3, 1))
conv20_trans = np.transpose(conv20, (0, 2, 3, 1))
print("--- {:.8f} microsecond ---".format(1000000*(time.time() - start_time)))
print("--- fps: {} ---".format(1/(time.time() - start_time + 0.0000001)))
out_boxes, out_scores, out_classes = yolo_res([conv17_trans, conv20_trans, (416, 416)], num_classes=2)
# Get NN output timestamp from the passthrough
in_rgb = q_color.get()
frame = in_rgb.getCvFrame()
frame = crop_to_square(frame)
frame = cv2.resize(frame, TARGET_SHAPE)
frame = np.ascontiguousarray(frame)
cfg = dai.SpatialLocationCalculatorConfig()
for i, c in list(enumerate(out_classes)):
box = out_boxes[i]
score = out_scores[i]
top, left, bottom, right = box
top = top - 5
left = left - 5
bottom = bottom + 5
right = right + 5
top = max(0, np.floor(top + 0.5).astype('int32'))
left = max(0, np.floor(left + 0.5).astype('int32'))
bottom = min(416, np.floor(bottom + 0.5).astype('int32'))
right = min(416, np.floor(right + 0.5).astype('int32'))
cv2.rectangle(frame, (left, top), (right, bottom), (255, 0, 0), 2)
cv2.putText(frame, labelMap[c], (left + 10, top + 20), cv2.FONT_HERSHEY_TRIPLEX,
0.5, 255)
cv2.putText(frame, f"{int(score * 100)}%", (left + 10, top + 40),
cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
config = dai.SpatialLocationCalculatorConfigData()
config.depthThresholds.lowerThreshold = 100
config.depthThresholds.upperThreshold = 10000
topLeft = dai.Point2f(left/416, top/416)
bottomRight = dai.Point2f(right/416, bottom/416)
config.roi = dai.Rect(topLeft, bottomRight)
cfg.addROI(config)
# if cfg is not None:
if len(cfg.getConfigData()) != 0:
spatialCalcConfigInQueue.send(cfg)
else:
config_ori = dai.SpatialLocationCalculatorConfigData()
config_ori.depthThresholds.lowerThreshold = 100
config_ori.depthThresholds.upperThreshold = 10000
config_ori.roi = dai.Rect(topLeft_ori, bottomRight_ori)
cfg.addROI(config_ori)
spatialCalcConfigInQueue.send(cfg)
in_depth = q_depth.tryGet()
inDepthAvg = spatialCalcQueue.tryGet()
if in_depth is not None and inDepthAvg is not None:
depth_frame = in_depth.getFrame()
maxDisparity = 95
if subpixel:
maxDisparity *= 32
depth_frame_processed = cv2.normalize(depth_frame, None, 255, 0, cv2.NORM_INF, cv2.CV_8UC1)
depth_frame_processed = cv2.equalizeHist(depth_frame_processed)
depth_frame_processed = crop_to_square(depth_frame_processed)
depth_frame_processed = cv2.resize(depth_frame_processed, TARGET_SHAPE)
depth_frame_processed = np.ascontiguousarray(depth_frame_processed)
spatialData = inDepthAvg.getSpatialLocations()
for depthData in spatialData:
roi = depthData.config.roi
roi = roi.denormalize(width=depth_frame_processed.shape[1], height=depth_frame_processed.shape[0])
xmin = int(roi.topLeft().x)
ymin = int(roi.topLeft().y)
xmax = int(roi.bottomRight().x)
ymax = int(roi.bottomRight().y)
fontType = cv2.FONT_HERSHEY_TRIPLEX
cv2.rectangle(depth_frame_processed, (xmin, ymin), (xmax, ymax), color, cv2.FONT_HERSHEY_SCRIPT_SIMPLEX)
cv2.putText(depth_frame_processed, f"X: {int(depthData.spatialCoordinates.x)} mm", (xmin + 10, ymin + 20),
fontType,
0.5, color)
cv2.putText(depth_frame_processed, f"Y: {int(depthData.spatialCoordinates.y)} mm", (xmin + 10, ymin + 35),
fontType,
0.5, color)
cv2.putText(depth_frame_processed, f"Z: {int(depthData.spatialCoordinates.z)} mm", (xmin + 10, ymin + 50),
fontType,
0.5, color)
cv2.imshow("rgb", frame)
cv2.imshow("depth", depth_frame_processed)
start_time = 0
newConfig = False
key = cv2.waitKey(1)
if key == ord('q'):
break
# elif key == ord('w'):
# if topLeft.y - stepSize >= 0:
# topLeft.y -= stepSize
# bottomRight.y -= stepSize
# newConfig = True
# elif key == ord('a'):
# if topLeft.x - stepSize >= 0:
# topLeft.x -= stepSize
# bottomRight.x -= stepSize
# newConfig = True
# elif key == ord('s'):
# if bottomRight.y + stepSize <= 1:
# topLeft.y += stepSize
# bottomRight.y += stepSize
# newConfig = True
# elif key == ord('d'):
# if bottomRight.x + stepSize <= 1:
# topLeft.x += stepSize
# bottomRight.x += stepSize
# newConfig = True
#
# if newConfig:
# cfg = dai.SpatialLocationCalculatorConfig()
#
# config = dai.SpatialLocationCalculatorConfigData()
# config.depthThresholds.lowerThreshold = 100
# config.depthThresholds.upperThreshold = 10000
#
# topLeft = dai.Point2f(0.4, 0.4)
# bottomRight = dai.Point2f(0.6, 0.6)
# config.roi = dai.Rect(topLeft, bottomRight)
# cfg.addROI(config)
#
# config2 = dai.SpatialLocationCalculatorConfigData()
# config2.depthThresholds.lowerThreshold = 100
# config2.depthThresholds.upperThreshold = 10000
#
# topLeft2 = dai.Point2f(0.6, 0.6)
# bottomRight2 = dai.Point2f(0.7, 0.7)
# config2.roi = dai.Rect(topLeft2, bottomRight2)
# cfg.addROI(config2)
#
# spatialCalcConfigInQueue.send(cfg)
| 37.757062
| 126
| 0.624645
|
2884dba86e65f6479b712b4e66f2f40ae0636ff5
| 1,143
|
asm
|
Assembly
|
programs/oeis/193/A193041.asm
|
jmorken/loda
|
99c09d2641e858b074f6344a352d13bc55601571
|
[
"Apache-2.0"
] | 1
|
2021-03-15T11:38:20.000Z
|
2021-03-15T11:38:20.000Z
|
programs/oeis/193/A193041.asm
|
jmorken/loda
|
99c09d2641e858b074f6344a352d13bc55601571
|
[
"Apache-2.0"
] | null | null | null |
programs/oeis/193/A193041.asm
|
jmorken/loda
|
99c09d2641e858b074f6344a352d13bc55601571
|
[
"Apache-2.0"
] | null | null | null |
; A193041: Coefficient of x in the reduction by x^2->x+1 of the polynomial p(n,x) defined at Comments.
; 0,1,3,13,44,122,292,631,1267,2411,4408,7820,13560,23109,38867,64721,106964,175782,287660,469275,763795,1241071,2014128,3265848,5292144,8571817,13879587,22468981,36368252,58859186,95251828,154138015
mov $17,$0
mov $19,$0
lpb $19
clr $0,17
mov $0,$17
sub $19,1
sub $0,$19
mov $14,$0
mov $16,$0
lpb $16
clr $0,14
mov $0,$14
sub $16,1
sub $0,$16
mov $11,$0
mov $13,$0
lpb $13
mov $0,$11
sub $13,1
sub $0,$13
mov $7,$0
mov $9,2
lpb $9
mov $0,$7
sub $9,1
add $0,$9
sub $0,1
mov $3,10
mov $6,6
lpb $0
sub $0,1
mov $2,$6
add $2,3
add $6,$3
mov $3,$2
sub $6,3
lpe
mov $1,$6
mov $10,$9
lpb $10
mov $8,$1
sub $10,1
lpe
lpe
lpb $7
mov $7,0
sub $8,$1
lpe
mov $1,$8
sub $1,6
add $12,$1
lpe
add $15,$12
lpe
add $18,$15
lpe
mov $1,$18
| 18.737705
| 199
| 0.468941
|
643ba60ff0d5a117567a7fdda6d3cc8d966d2bfb
| 3,611
|
py
|
Python
|
backend/webapp/api/orders_serializers.py
|
MarcoLagalla/marette_backend
|
d01458028da151d6ee583a080b1146792e81f01b
|
[
"MIT"
] | 1
|
2020-06-26T14:34:03.000Z
|
2020-06-26T14:34:03.000Z
|
backend/webapp/api/orders_serializers.py
|
MarcoLagalla/marette_backend
|
d01458028da151d6ee583a080b1146792e81f01b
|
[
"MIT"
] | 26
|
2020-06-12T14:36:59.000Z
|
2020-07-10T08:39:53.000Z
|
backend/webapp/api/orders_serializers.py
|
MarcoLagalla/marette_backend
|
d01458028da151d6ee583a080b1146792e81f01b
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from ..models.orders import Order, OrderMenuEntry, OrderProductEntry
from ..models.models import Customer, Restaurant, Product
from ..models.menu import Menu
class OrderProductSerializer(serializers.ModelSerializer):
class Meta:
model = OrderProductEntry
fields = '__all__'
class OrderMenuSerializer(serializers.ModelSerializer):
class Meta:
model = OrderMenuEntry
fields = '__all__'
class ReadOrderSerializer(serializers.ModelSerializer):
items = OrderProductSerializer(many=True)
menus_items = OrderMenuSerializer(many=True)
total = serializers.SerializerMethodField()
discount = serializers.SerializerMethodField()
imposable = serializers.SerializerMethodField()
iva = serializers.SerializerMethodField()
class Meta:
model = Order
fields = ('id', 'date_created', 'user', 'restaurant', 'items', 'menus_items',
'total', 'discount', 'imposable', 'iva')
def get_total(self, obj):
return obj.get_total()
def get_discount(self, obj):
return obj.get_total_discount()
def get_imposable(self, obj):
return str(obj.get_imposable())
def get_iva(self, obj):
return obj.get_total_iva()
class OrderSerializer(serializers.ModelSerializer):
items = OrderProductSerializer(many=True, required=False)
menus_items = OrderMenuSerializer(many=True, required=False)
class Meta:
model = Order
fields = ('id', 'date_created', 'user', 'restaurant', 'items', 'menus_items')
def validate(self, attrs):
val_errors = {}
rest = attrs.get('restaurant', None)
rest = Restaurant.objects.all().get(id=rest.id)
try:
items = attrs.get('items', [])
for item in items:
try:
prodotto = Product.objects.all().filter(restaurant=rest).get(id=item['product'].id)
except Product.DoesNotExist:
raise serializers.ValidationError({'error': 'Prodotto ' + item['product'] + ' non trovato'})
except KeyError:
items = []
try:
menus_items = attrs.get('menus_items', [])
for item in menus_items:
try:
menu = Menu.objects.all().filter(restaurant=rest).get(id=item['menu'].id)
except Menu.DoesNotExist:
raise serializers.ValidationError({'error': f'Menu ' + item['menu'] + ' non trovato'})
except KeyError:
menus_items = []
if len(items) == 0 and len(menus_items) == 0:
raise serializers.ValidationError({'error': 'Impossibile creare un ordine senza prodotti'})
if len(val_errors.keys()) != 0:
raise serializers.ValidationError(val_errors)
return attrs
def save(self, **kwargs):
items = self.validated_data.get('items', None)
menus_items = self.validated_data.get('menus_items', None)
order = Order.objects.create(restaurant=self.validated_data['restaurant'],
user=self.validated_data['user'])
if items:
for item in items:
pe = OrderProductEntry.objects.create(product=item['product'], quantity=item['quantity'])
order.items.add(pe)
if menus_items:
for item in menus_items:
pe = OrderMenuEntry.objects.create(menu=item['menu'], quantity=item['quantity'])
order.menus_items.add(pe)
order.save()
return order
| 32.827273
| 112
| 0.616727
|
9621326b7d79d09e95f633a8c75d2752da5be9c4
| 1,141
|
lua
|
Lua
|
Mods ChoGGi/Fix Olympus Hotel Stuck Colonists/metadata.lua
|
awabast/SurvivingMars_CheatMods
|
bbb4a906852e0fd6936f3fff4e77265ccdaa9983
|
[
"MIT"
] | 1
|
2021-04-07T21:17:25.000Z
|
2021-04-07T21:17:25.000Z
|
Mods ChoGGi/Fix Olympus Hotel Stuck Colonists/metadata.lua
|
awabast/SurvivingMars_CheatMods
|
bbb4a906852e0fd6936f3fff4e77265ccdaa9983
|
[
"MIT"
] | null | null | null |
Mods ChoGGi/Fix Olympus Hotel Stuck Colonists/metadata.lua
|
awabast/SurvivingMars_CheatMods
|
bbb4a906852e0fd6936f3fff4e77265ccdaa9983
|
[
"MIT"
] | null | null | null |
return PlaceObj("ModDef", {
"dependencies", {
PlaceObj("ModDependency", {
"id", "ChoGGi_Library",
"title", "ChoGGi's Library",
"version_major", 9,
"version_minor", 6,
}),
},
"title", "Fix Olympus Hotel Stuck Colonists",
"id", "ChoGGi_FixOlympusHotelStuckColonists",
"steam_id", "2428732491",
"pops_any_uuid", "1442ea5d-7d0f-4545-8a93-5c775b59d514",
"lua_revision", 1001569,
"version", 1,
"version_major", 0,
"version_minor", 1,
"image", "Preview.jpg",
"author", "ChoGGi",
"code", {
"Code/Script.lua",
},
"TagOther", true,
"has_options", true,
"description", [[Obsolete: Fixed in https://forum.paradoxplaza.com/forum/threads/tourism-update-hotfix-3.1463960/
(this will still unstick them, since the devs didn't bother doing that)
The hotel uses a door model from Space Race DLC (the Mega Mall), it bugs out when you don't have that DLC.
This will allow the fake door to work properly, and unstick any colonists doing the human centipede.
Turns out this mod also fixes an issue of returning expedition rockets stuck unloading colonists: https://github.com/ChoGGi/SurvivingMars_CheatMods/issues/33
]],
})
| 31.694444
| 157
| 0.716915
|
20cf6412f4260879bead0fc1bad11c0a34d2f988
| 3,118
|
py
|
Python
|
crop_img.py
|
Weifeng-Chen/tools
|
2d436de2af2f437e3c4c009f4cbbc498b8fe86b0
|
[
"MIT"
] | 122
|
2021-01-07T08:04:10.000Z
|
2022-03-23T08:51:40.000Z
|
crop_img.py
|
Weifeng-Chen/tools
|
2d436de2af2f437e3c4c009f4cbbc498b8fe86b0
|
[
"MIT"
] | 3
|
2021-01-19T02:04:33.000Z
|
2022-02-07T09:46:46.000Z
|
crop_img.py
|
Weifeng-Chen/tools
|
2d436de2af2f437e3c4c009f4cbbc498b8fe86b0
|
[
"MIT"
] | 60
|
2021-01-18T06:42:15.000Z
|
2022-03-26T17:05:25.000Z
|
"""
yolo格式数据,裁剪图像中心区域,生成一批新数据。
"""
import cv2
import os
from tqdm import tqdm
def plot_bbox(img, gt=None ,line_thickness=None):
# 可视化测试
colorlist = []
# 5^3种颜色。
for i in range(30,256,50):
for j in range(40,256,50):
for k in range(50,256,50):
colorlist.append((i,j,k))
height, width,_ = img.shape
tl = line_thickness or round(0.002 * (width + height) / 2) + 1 # line/font thickness
font = cv2.FONT_HERSHEY_SIMPLEX
tf = max(tl - 1, 1) # font thickness
with open(gt,'r') as f:
annotations = f.readlines()
# print(annotations)
for ann in annotations:
ann = list(map(float,ann.split()))
ann[0] = int(ann[0])
# print(ann)
cls,x,y,w,h = ann
color = colorlist[cls]
c1, c2 = (int((x-w/2)*width),int((y-h/2)*height)), (int((x+w/2)*width), int((y+h/2)*height))
cv2.rectangle(img, c1, c2, color, thickness=tl*2, lineType=cv2.LINE_AA)
return img
def parse_label(gt, crop_ratio=0.25,):
scale_ratio = 1/(1-crop_ratio*2)
out_str = ''
with open(gt,'r') as f:
annotations = f.readlines()
# print(annotations)
for ann in annotations:
ann = list(map(float,ann.split()))
# print(ann)
if crop_ratio < ann[1] < 1-crop_ratio and crop_ratio < ann[2] < 1-crop_ratio:
# center point in the specified area
# print(ann)
out_l = [int(ann[0]), ann[1]-crop_ratio, ann[2]-crop_ratio, ann[3], ann[4]]
out_l[1:] = [out*scale_ratio for out in out_l[1:]]
out_l = list(map(str,out_l))
out_str += ' '.join(out_l) +'\n'
return out_str
if __name__ == '__main__':
origin_root_dir = '/home/winner/chenwf/yolov5/data/pedestrian/train'
save_dir = '/home/winner/chenwf/yolov5/data/pedestrian/train_crop'
if not os.path.exists(save_dir):
os.makedirs(os.path.join(save_dir,'images'))
os.makedirs(os.path.join(save_dir,'labels'))
img_dir = os.path.join(origin_root_dir,'images')
label_dir = os.path.join(origin_root_dir,'labels')
img_names = os.listdir(img_dir)
# crop ratio
crop_ratio = 0.25
for img_name in tqdm(img_names):
label_name = img_name.replace('jpg','txt')
img_path = os.path.join(img_dir, img_name)
label_path = os.path.join(label_dir, label_name)
img = cv2.imread(img_path)
height,width,_ = img.shape
out_str = parse_label(label_path, crop_ratio)
if out_str:
# 空样本就不添加了
with open(os.path.join(save_dir, 'labels', label_name),'w') as f:
# write
f.write(out_str)
# crop
crop_img = img[int(height*crop_ratio):height-int(height*crop_ratio),int(width*crop_ratio):width-int(width*crop_ratio),:]
# plot_bbox(crop_img, os.path.join(save_dir, 'labels', label_name)) # visualize the bbox
cv2.imwrite(os.path.join(save_dir, 'images', img_name),crop_img)
| 34.644444
| 132
| 0.577935
|
440ca45a4bddc979e7138d3e386fe9d8aaba4fad
| 293
|
py
|
Python
|
global_params.py
|
Xpitfire/XdA
|
3b5e056d4a2c24d1720a484aa6c2471e9636036f
|
[
"MIT"
] | null | null | null |
global_params.py
|
Xpitfire/XdA
|
3b5e056d4a2c24d1720a484aa6c2471e9636036f
|
[
"MIT"
] | null | null | null |
global_params.py
|
Xpitfire/XdA
|
3b5e056d4a2c24d1720a484aa6c2471e9636036f
|
[
"MIT"
] | null | null | null |
class Args(object):
def __init__(self):
self.seed = 0
self.experiment = None
self.network = None
self.approach = None
self.parameter = []
self.taskcla = None
self.comment = None
self.log_dir = None
global args
args = Args()
| 20.928571
| 30
| 0.559727
|
5d52859886d9614157f040a1de9da8c17af91420
| 10,425
|
hpp
|
C++
|
third_party/omr/fvtest/compilertest/compile/Method.hpp
|
xiacijie/omr-wala-linkage
|
a1aff7aef9ed131a45555451abde4615a04412c1
|
[
"Apache-2.0"
] | null | null | null |
third_party/omr/fvtest/compilertest/compile/Method.hpp
|
xiacijie/omr-wala-linkage
|
a1aff7aef9ed131a45555451abde4615a04412c1
|
[
"Apache-2.0"
] | null | null | null |
third_party/omr/fvtest/compilertest/compile/Method.hpp
|
xiacijie/omr-wala-linkage
|
a1aff7aef9ed131a45555451abde4615a04412c1
|
[
"Apache-2.0"
] | null | null | null |
/*******************************************************************************
* Copyright (c) 2000, 2019 IBM Corp. and others
*
* This program and the accompanying materials are made available under
* the terms of the Eclipse Public License 2.0 which accompanies this
* distribution and is available at http://eclipse.org/legal/epl-2.0
* or the Apache License, Version 2.0 which accompanies this distribution
* and is available at https://www.apache.org/licenses/LICENSE-2.0.
*
* This Source Code may also be made available under the following Secondary
* Licenses when the conditions for such availability set forth in the
* Eclipse Public License, v. 2.0 are satisfied: GNU General Public License,
* version 2 with the GNU Classpath Exception [1] and GNU General Public
* License, version 2 with the OpenJDK Assembly Exception [2].
*
* [1] https://www.gnu.org/software/classpath/license.html
* [2] http://openjdk.java.net/legal/assembly-exception.html
*
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 OR LicenseRef-GPL-2.0 WITH Assembly-exception
*******************************************************************************/
#ifndef TEST_METHOD_INCL
#define TEST_METHOD_INCL
#ifndef TR_RESOLVEDMETHOD_COMPOSED
#define TR_RESOLVEDMETHOD_COMPOSED
#define PUT_TEST_RESOLVEDMETHOD_INTO_TR
#endif // TR_RESOLVEDMETHOD_COMPOSED
#include <string.h>
// Choose the OMR base version directly. This is only temporary
// while the Method class is being made extensible.
//
#include "compiler/compile/Method.hpp"
#include "compile/ResolvedMethod.hpp"
namespace TR { class IlGeneratorMethodDetails; }
namespace TR { class IlType; }
namespace TR { class TypeDictionary; }
namespace TR { class IlInjector; }
namespace TR { class MethodBuilder; }
namespace TR { class FrontEnd; }
// quick and dirty implementation to get up and running
// needs major overhaul
namespace TestCompiler
{
class Method : public TR::Method
{
public:
TR_ALLOC(TR_Memory::Method);
Method() : TR::Method(TR::Method::Test) {}
// FIXME: need to provide real code for this group
virtual uint16_t classNameLength() { return strlen(classNameChars()); }
virtual uint16_t nameLength() { return strlen(nameChars()); }
virtual uint16_t signatureLength() { return strlen(signatureChars()); }
virtual char * nameChars() { return "Method"; }
virtual char * classNameChars() { return ""; }
virtual char * signatureChars() { return "()V"; }
virtual bool isConstructor() { return false; }
virtual bool isFinalInObject() { return false; }
};
class ResolvedMethodBase : public TR_ResolvedMethod
{
virtual uint16_t nameLength() { return signatureLength(); }
virtual uint16_t classNameLength() { return signatureLength(); }
virtual uint16_t signatureLength() { return strlen(signatureChars()); }
// This group of functions only make sense for Java - we ought to provide answers from that definition
virtual bool isConstructor() { return false; }
virtual bool isNonEmptyObjectConstructor() { return false; }
virtual bool isFinalInObject() { return false; }
virtual bool isStatic() { return true; }
virtual bool isAbstract() { return false; }
virtual bool isCompilable(TR_Memory *) { return true; }
virtual bool isNative() { return false; }
virtual bool isSynchronized() { return false; }
virtual bool isPrivate() { return false; }
virtual bool isProtected() { return false; }
virtual bool isPublic() { return true; }
virtual bool isFinal() { return false; }
virtual bool isStrictFP() { return false; }
virtual bool isSubjectToPhaseChange(TR::Compilation *comp) { return false; }
virtual bool hasBackwardBranches() { return false; }
virtual bool isNewInstanceImplThunk() { return false; }
virtual bool isJNINative() { return false; }
virtual bool isJITInternalNative() { return false; }
uint32_t numberOfExceptionHandlers() { return 0; }
virtual bool isSameMethod(TR_ResolvedMethod *other)
{
return getPersistentIdentifier() == other->getPersistentIdentifier();
}
};
class ResolvedMethod : public ResolvedMethodBase, public Method
{
public:
ResolvedMethod(TR_OpaqueMethodBlock *method);
ResolvedMethod(TR::MethodBuilder *methodBuilder);
ResolvedMethod(const char * fileName,
const char * lineNumber,
char * name,
int32_t numParms,
TR::IlType ** parmTypes,
TR::IlType * returnType,
void * entryPoint,
TR::IlInjector * ilInjector)
: _fileName(fileName),
_lineNumber(lineNumber),
_name(name),
_signature(0),
_numParms(numParms),
_parmTypes(parmTypes),
_returnType(returnType),
_entryPoint(entryPoint),
_ilInjector(ilInjector)
{
computeSignatureChars();
}
virtual TR::Method * convertToMethod() { return this; }
virtual const char * signature(TR_Memory *, TR_AllocationKind);
char * localName (uint32_t slot, uint32_t bcIndex, int32_t &nameLength, TR_Memory *trMemory);
virtual char * classNameChars() { return (char *)_fileName; }
virtual char * nameChars() { return _name; }
virtual char * signatureChars() { return _signatureChars; }
virtual uint16_t signatureLength() { return strlen(signatureChars()); }
virtual void * resolvedMethodAddress() { return (void *)_ilInjector; }
virtual uint16_t numberOfParameterSlots() { return _numParms; }
virtual TR::DataType parmType(uint32_t slot);
virtual uint16_t numberOfTemps() { return 0; }
virtual void * startAddressForJittedMethod() { return (getEntryPoint()); }
virtual void * startAddressForInterpreterOfJittedMethod() { return 0; }
virtual uint32_t maxBytecodeIndex() { return 0; }
virtual uint8_t * code() { return NULL; }
virtual TR_OpaqueMethodBlock* getPersistentIdentifier() { return (TR_OpaqueMethodBlock *) _ilInjector; }
virtual bool isInterpreted() { return startAddressForJittedMethod() == 0; }
const char * getLineNumber() { return _lineNumber;}
char * getSignature() { return _signature;}
TR::DataType returnType();
TR::IlType * returnIlType() { return _returnType; }
int32_t getNumArgs() { return _numParms;}
void setEntryPoint(void *ep) { _entryPoint = ep; }
void * getEntryPoint() { return _entryPoint; }
void computeSignatureCharsPrimitive();
void computeSignatureChars();
virtual void makeParameterList(TR::ResolvedMethodSymbol *);
TR::IlInjector *getInjector(TR::IlGeneratorMethodDetails * details,
TR::ResolvedMethodSymbol *methodSymbol,
TR::FrontEnd *fe,
TR::SymbolReferenceTable *symRefTab);
protected:
const char *_fileName;
const char *_lineNumber;
char *_name;
char *_signature;
char _signatureChars[64];
int32_t _numParms;
TR::IlType ** _parmTypes;
TR::IlType * _returnType;
void * _entryPoint;
TR::IlInjector * _ilInjector;
};
} // namespace TestCompiler
#if defined(PUT_TEST_RESOLVEDMETHOD_INTO_TR)
namespace TR
{
class ResolvedMethod : public TestCompiler::ResolvedMethod
{
public:
ResolvedMethod(TR_OpaqueMethodBlock *method)
: TestCompiler::ResolvedMethod(method)
{ }
ResolvedMethod(char * fileName,
char * lineNumber,
char * name,
int32_t numArgs,
TR::IlType ** parmTypes,
TR::IlType * returnType,
void * entryPoint,
TR::IlInjector * ilInjector)
: TestCompiler::ResolvedMethod(fileName, lineNumber, name, numArgs,
parmTypes, returnType,
entryPoint, ilInjector)
{ }
ResolvedMethod(TR::MethodBuilder *methodBuilder)
: TestCompiler::ResolvedMethod(methodBuilder)
{ }
};
} // namespace TR
#endif // !defined(PUT_TEST_RESOLVEDMETHOD_INTO_TR)
#endif // !defined(TEST_METHOD_INCL)
| 46.333333
| 135
| 0.523933
|
cd674526811fa4ba19caf14b48f92d3183498980
| 188
|
cs
|
C#
|
FabricAdcHub.User/Events/InternalEvent.cs
|
Caraul/FabricAdcHub
|
f19952211b63493ba018e9c9c028bfbccf38b38d
|
[
"MIT"
] | 1
|
2018-09-11T09:51:16.000Z
|
2018-09-11T09:51:16.000Z
|
FabricAdcHub.User/Events/InternalEvent.cs
|
Caraul/FabricAdcHub
|
f19952211b63493ba018e9c9c028bfbccf38b38d
|
[
"MIT"
] | null | null | null |
FabricAdcHub.User/Events/InternalEvent.cs
|
Caraul/FabricAdcHub
|
f19952211b63493ba018e9c9c028bfbccf38b38d
|
[
"MIT"
] | null | null | null |
namespace FabricAdcHub.User.Events
{
internal enum InternalEvent
{
ClientOpened,
AdcMessageReceived,
ConnectionTimedOut,
DisconnectOccured
}
}
| 17.090909
| 35
| 0.643617
|
7943d1f29a79fd246583fb25792c4fa640f9f051
| 227
|
cpp
|
C++
|
practica_4/src/O3DS.cpp
|
AlbitaMoreno/IG
|
5cb330f711c6fa8044e359af5555a90378e32e55
|
[
"MIT"
] | null | null | null |
practica_4/src/O3DS.cpp
|
AlbitaMoreno/IG
|
5cb330f711c6fa8044e359af5555a90378e32e55
|
[
"MIT"
] | null | null | null |
practica_4/src/O3DS.cpp
|
AlbitaMoreno/IG
|
5cb330f711c6fa8044e359af5555a90378e32e55
|
[
"MIT"
] | null | null | null |
#include "O3DS.h"
void O3DS::dibuja_puntos(){
glBegin(GL_POINTS);
glColor3f(0,1,0); // Puntos en verde
for(int i = 0; i < Vertices.size(); i++){
glVertex3f(Vertices[i].x, Vertices[i].y, Vertices[i].z);
}
glEnd();
}
| 17.461538
| 58
| 0.621145
|
6b54751c5cc7aed64d7fc926e2dd987b92819ca1
| 2,080
|
js
|
JavaScript
|
app/containers/routes_prm.js
|
undvl/qbproj
|
3bfc1c6bfbf4247f7f51f8517a15db332d1d50dd
|
[
"MIT"
] | null | null | null |
app/containers/routes_prm.js
|
undvl/qbproj
|
3bfc1c6bfbf4247f7f51f8517a15db332d1d50dd
|
[
"MIT"
] | null | null | null |
app/containers/routes_prm.js
|
undvl/qbproj
|
3bfc1c6bfbf4247f7f51f8517a15db332d1d50dd
|
[
"MIT"
] | null | null | null |
import NotFound from '../components/NotFound';
import Layout from '../components/Layout';
import Main from './Main';
import PortalContainer from './PortalContainer';
import Board from './Board';
import Rooms from './Rooms';
import JoinPortalReq from './JoinPortalReq';
import Portal from './Portal';
import PortalTheme from './PortalTheme';
import AddTheme from './PortalMain/AddTheme';
import Login from '../pages/Login';
import Signup from '../pages/Signup';
import CreateBoard from './CreateBoard';
import Manage from './Manage';
import MembershipRequests from './Manage/MembershipRequests';
import ManageMembers from './Manage/Members';
// import React from 'react';
// import { Route, IndexRoute } from 'react-router';
// const routes = (
// <Route path="/" component="Layout">
// <IndexRoute component="Main" />
// <Route path="q" component="Board" />
// <Route path="rm" component="Rooms" />
// <Route path="q/:qb/pl/" component="Portal" />
// <Route path="login" component="Login" />
// <Route path="signup" component="Signup" />
// </Route>
// );
const routes = {
path: '/', component: Layout,
indexRoute: { component: Main },
childRoutes: [
{ path: 'q/:qb', component: PortalContainer,
indexRoute: { component: Board },
childRoutes: [
{ path: 'rm', component: Rooms },
{ path: 'pl', component: Portal },
{ path: 'pl/:themeID', component: PortalTheme },
{ path: 'add_theme/:groupID', component: AddTheme },
{ path: 'join_portal', component: JoinPortalReq },
{ path: 'manage', component: Manage,
childRoutes: [
{ path: 'memb_requests', component: MembershipRequests },
{ path: 'members', component: ManageMembers }
]
},
{ path: '*', component: NotFound, status: 404 }
]
},
{ path: 'login', component: Login },
{ path: 'signup', component: Signup },
{ path: 'create_board', component: CreateBoard },
{ path: '*', component: NotFound, status: 404 }
]
};
export default routes;
| 30.588235
| 69
| 0.622596
|
1a61bd5bc6b148086ab84959cb33ffd12946203c
| 45,018
|
py
|
Python
|
sdk/python/pulumi_google_native/datapipelines/v1/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/datapipelines/v1/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/datapipelines/v1/outputs.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = [
'GoogleCloudDatapipelinesV1FlexTemplateRuntimeEnvironmentResponse',
'GoogleCloudDatapipelinesV1LaunchFlexTemplateParameterResponse',
'GoogleCloudDatapipelinesV1LaunchFlexTemplateRequestResponse',
'GoogleCloudDatapipelinesV1LaunchTemplateParametersResponse',
'GoogleCloudDatapipelinesV1LaunchTemplateRequestResponse',
'GoogleCloudDatapipelinesV1RuntimeEnvironmentResponse',
'GoogleCloudDatapipelinesV1ScheduleSpecResponse',
'GoogleCloudDatapipelinesV1WorkloadResponse',
]
@pulumi.output_type
class GoogleCloudDatapipelinesV1FlexTemplateRuntimeEnvironmentResponse(dict):
"""
The environment values to be set at runtime for a Flex Template.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "additionalExperiments":
suggest = "additional_experiments"
elif key == "additionalUserLabels":
suggest = "additional_user_labels"
elif key == "enableStreamingEngine":
suggest = "enable_streaming_engine"
elif key == "flexrsGoal":
suggest = "flexrs_goal"
elif key == "ipConfiguration":
suggest = "ip_configuration"
elif key == "kmsKeyName":
suggest = "kms_key_name"
elif key == "machineType":
suggest = "machine_type"
elif key == "maxWorkers":
suggest = "max_workers"
elif key == "numWorkers":
suggest = "num_workers"
elif key == "serviceAccountEmail":
suggest = "service_account_email"
elif key == "tempLocation":
suggest = "temp_location"
elif key == "workerRegion":
suggest = "worker_region"
elif key == "workerZone":
suggest = "worker_zone"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GoogleCloudDatapipelinesV1FlexTemplateRuntimeEnvironmentResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GoogleCloudDatapipelinesV1FlexTemplateRuntimeEnvironmentResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GoogleCloudDatapipelinesV1FlexTemplateRuntimeEnvironmentResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
additional_experiments: Sequence[str],
additional_user_labels: Mapping[str, str],
enable_streaming_engine: bool,
flexrs_goal: str,
ip_configuration: str,
kms_key_name: str,
machine_type: str,
max_workers: int,
network: str,
num_workers: int,
service_account_email: str,
subnetwork: str,
temp_location: str,
worker_region: str,
worker_zone: str,
zone: str):
"""
The environment values to be set at runtime for a Flex Template.
:param Sequence[str] additional_experiments: Additional experiment flags for the job.
:param Mapping[str, str] additional_user_labels: Additional user labels to be specified for the job. Keys and values must follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions). An object containing a list of key/value pairs. Example: `{ "name": "wrench", "mass": "1kg", "count": "3" }`.
:param bool enable_streaming_engine: Whether to enable Streaming Engine for the job.
:param str flexrs_goal: Set FlexRS goal for the job. https://cloud.google.com/dataflow/docs/guides/flexrs
:param str ip_configuration: Configuration for VM IPs.
:param str kms_key_name: Name for the Cloud KMS key for the job. Key format is: projects//locations//keyRings//cryptoKeys/
:param str machine_type: The machine type to use for the job. Defaults to the value from the template if not specified.
:param int max_workers: The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.
:param str network: Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".
:param int num_workers: The initial number of Compute Engine instances for the job.
:param str service_account_email: The email address of the service account to run the job as.
:param str subnetwork: Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.
:param str temp_location: The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with `gs://`.
:param str worker_region: The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with worker_zone. If neither worker_region nor worker_zone is specified, defaults to the control plane region.
:param str worker_zone: The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with worker_region. If neither worker_region nor worker_zone is specified, a zone in the control plane region is chosen based on available capacity. If both `worker_zone` and `zone` are set, `worker_zone` takes precedence.
:param str zone: The Compute Engine [availability zone](https://cloud.google.com/compute/docs/regions-zones/regions-zones) for launching worker instances to run your pipeline. In the future, worker_zone will take precedence.
"""
pulumi.set(__self__, "additional_experiments", additional_experiments)
pulumi.set(__self__, "additional_user_labels", additional_user_labels)
pulumi.set(__self__, "enable_streaming_engine", enable_streaming_engine)
pulumi.set(__self__, "flexrs_goal", flexrs_goal)
pulumi.set(__self__, "ip_configuration", ip_configuration)
pulumi.set(__self__, "kms_key_name", kms_key_name)
pulumi.set(__self__, "machine_type", machine_type)
pulumi.set(__self__, "max_workers", max_workers)
pulumi.set(__self__, "network", network)
pulumi.set(__self__, "num_workers", num_workers)
pulumi.set(__self__, "service_account_email", service_account_email)
pulumi.set(__self__, "subnetwork", subnetwork)
pulumi.set(__self__, "temp_location", temp_location)
pulumi.set(__self__, "worker_region", worker_region)
pulumi.set(__self__, "worker_zone", worker_zone)
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="additionalExperiments")
def additional_experiments(self) -> Sequence[str]:
"""
Additional experiment flags for the job.
"""
return pulumi.get(self, "additional_experiments")
@property
@pulumi.getter(name="additionalUserLabels")
def additional_user_labels(self) -> Mapping[str, str]:
"""
Additional user labels to be specified for the job. Keys and values must follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions). An object containing a list of key/value pairs. Example: `{ "name": "wrench", "mass": "1kg", "count": "3" }`.
"""
return pulumi.get(self, "additional_user_labels")
@property
@pulumi.getter(name="enableStreamingEngine")
def enable_streaming_engine(self) -> bool:
"""
Whether to enable Streaming Engine for the job.
"""
return pulumi.get(self, "enable_streaming_engine")
@property
@pulumi.getter(name="flexrsGoal")
def flexrs_goal(self) -> str:
"""
Set FlexRS goal for the job. https://cloud.google.com/dataflow/docs/guides/flexrs
"""
return pulumi.get(self, "flexrs_goal")
@property
@pulumi.getter(name="ipConfiguration")
def ip_configuration(self) -> str:
"""
Configuration for VM IPs.
"""
return pulumi.get(self, "ip_configuration")
@property
@pulumi.getter(name="kmsKeyName")
def kms_key_name(self) -> str:
"""
Name for the Cloud KMS key for the job. Key format is: projects//locations//keyRings//cryptoKeys/
"""
return pulumi.get(self, "kms_key_name")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> str:
"""
The machine type to use for the job. Defaults to the value from the template if not specified.
"""
return pulumi.get(self, "machine_type")
@property
@pulumi.getter(name="maxWorkers")
def max_workers(self) -> int:
"""
The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.
"""
return pulumi.get(self, "max_workers")
@property
@pulumi.getter
def network(self) -> str:
"""
Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="numWorkers")
def num_workers(self) -> int:
"""
The initial number of Compute Engine instances for the job.
"""
return pulumi.get(self, "num_workers")
@property
@pulumi.getter(name="serviceAccountEmail")
def service_account_email(self) -> str:
"""
The email address of the service account to run the job as.
"""
return pulumi.get(self, "service_account_email")
@property
@pulumi.getter
def subnetwork(self) -> str:
"""
Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.
"""
return pulumi.get(self, "subnetwork")
@property
@pulumi.getter(name="tempLocation")
def temp_location(self) -> str:
"""
The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with `gs://`.
"""
return pulumi.get(self, "temp_location")
@property
@pulumi.getter(name="workerRegion")
def worker_region(self) -> str:
"""
The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with worker_zone. If neither worker_region nor worker_zone is specified, defaults to the control plane region.
"""
return pulumi.get(self, "worker_region")
@property
@pulumi.getter(name="workerZone")
def worker_zone(self) -> str:
"""
The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with worker_region. If neither worker_region nor worker_zone is specified, a zone in the control plane region is chosen based on available capacity. If both `worker_zone` and `zone` are set, `worker_zone` takes precedence.
"""
return pulumi.get(self, "worker_zone")
@property
@pulumi.getter
def zone(self) -> str:
"""
The Compute Engine [availability zone](https://cloud.google.com/compute/docs/regions-zones/regions-zones) for launching worker instances to run your pipeline. In the future, worker_zone will take precedence.
"""
return pulumi.get(self, "zone")
@pulumi.output_type
class GoogleCloudDatapipelinesV1LaunchFlexTemplateParameterResponse(dict):
"""
Launch Flex Template parameter.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "containerSpecGcsPath":
suggest = "container_spec_gcs_path"
elif key == "jobName":
suggest = "job_name"
elif key == "launchOptions":
suggest = "launch_options"
elif key == "transformNameMappings":
suggest = "transform_name_mappings"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GoogleCloudDatapipelinesV1LaunchFlexTemplateParameterResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GoogleCloudDatapipelinesV1LaunchFlexTemplateParameterResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GoogleCloudDatapipelinesV1LaunchFlexTemplateParameterResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
container_spec_gcs_path: str,
environment: 'outputs.GoogleCloudDatapipelinesV1FlexTemplateRuntimeEnvironmentResponse',
job_name: str,
launch_options: Mapping[str, str],
parameters: Mapping[str, str],
transform_name_mappings: Mapping[str, str],
update: bool):
"""
Launch Flex Template parameter.
:param str container_spec_gcs_path: Cloud Storage path to a file with a JSON-serialized ContainerSpec as content.
:param 'GoogleCloudDatapipelinesV1FlexTemplateRuntimeEnvironmentResponse' environment: The runtime environment for the Flex Template job.
:param str job_name: The job name to use for the created job. For an update job request, the job name should be the same as the existing running job.
:param Mapping[str, str] launch_options: Launch options for this Flex Template job. This is a common set of options across languages and templates. This should not be used to pass job parameters.
:param Mapping[str, str] parameters: The parameters for the Flex Template. Example: `{"num_workers":"5"}`
:param Mapping[str, str] transform_name_mappings: Use this to pass transform name mappings for streaming update jobs. Example: `{"oldTransformName":"newTransformName",...}`
:param bool update: Set this to true if you are sending a request to update a running streaming job. When set, the job name should be the same as the running job.
"""
pulumi.set(__self__, "container_spec_gcs_path", container_spec_gcs_path)
pulumi.set(__self__, "environment", environment)
pulumi.set(__self__, "job_name", job_name)
pulumi.set(__self__, "launch_options", launch_options)
pulumi.set(__self__, "parameters", parameters)
pulumi.set(__self__, "transform_name_mappings", transform_name_mappings)
pulumi.set(__self__, "update", update)
@property
@pulumi.getter(name="containerSpecGcsPath")
def container_spec_gcs_path(self) -> str:
"""
Cloud Storage path to a file with a JSON-serialized ContainerSpec as content.
"""
return pulumi.get(self, "container_spec_gcs_path")
@property
@pulumi.getter
def environment(self) -> 'outputs.GoogleCloudDatapipelinesV1FlexTemplateRuntimeEnvironmentResponse':
"""
The runtime environment for the Flex Template job.
"""
return pulumi.get(self, "environment")
@property
@pulumi.getter(name="jobName")
def job_name(self) -> str:
"""
The job name to use for the created job. For an update job request, the job name should be the same as the existing running job.
"""
return pulumi.get(self, "job_name")
@property
@pulumi.getter(name="launchOptions")
def launch_options(self) -> Mapping[str, str]:
"""
Launch options for this Flex Template job. This is a common set of options across languages and templates. This should not be used to pass job parameters.
"""
return pulumi.get(self, "launch_options")
@property
@pulumi.getter
def parameters(self) -> Mapping[str, str]:
"""
The parameters for the Flex Template. Example: `{"num_workers":"5"}`
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter(name="transformNameMappings")
def transform_name_mappings(self) -> Mapping[str, str]:
"""
Use this to pass transform name mappings for streaming update jobs. Example: `{"oldTransformName":"newTransformName",...}`
"""
return pulumi.get(self, "transform_name_mappings")
@property
@pulumi.getter
def update(self) -> bool:
"""
Set this to true if you are sending a request to update a running streaming job. When set, the job name should be the same as the running job.
"""
return pulumi.get(self, "update")
@pulumi.output_type
class GoogleCloudDatapipelinesV1LaunchFlexTemplateRequestResponse(dict):
"""
A request to launch a Dataflow job from a Flex Template.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "launchParameter":
suggest = "launch_parameter"
elif key == "validateOnly":
suggest = "validate_only"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GoogleCloudDatapipelinesV1LaunchFlexTemplateRequestResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GoogleCloudDatapipelinesV1LaunchFlexTemplateRequestResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GoogleCloudDatapipelinesV1LaunchFlexTemplateRequestResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
launch_parameter: 'outputs.GoogleCloudDatapipelinesV1LaunchFlexTemplateParameterResponse',
location: str,
project: str,
validate_only: bool):
"""
A request to launch a Dataflow job from a Flex Template.
:param 'GoogleCloudDatapipelinesV1LaunchFlexTemplateParameterResponse' launch_parameter: Parameter to launch a job from a Flex Template.
:param str location: The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) to which to direct the request. For example, `us-central1`, `us-west1`.
:param str project: The ID of the Cloud Platform project that the job belongs to.
:param bool validate_only: If true, the request is validated but not actually executed. Defaults to false.
"""
pulumi.set(__self__, "launch_parameter", launch_parameter)
pulumi.set(__self__, "location", location)
pulumi.set(__self__, "project", project)
pulumi.set(__self__, "validate_only", validate_only)
@property
@pulumi.getter(name="launchParameter")
def launch_parameter(self) -> 'outputs.GoogleCloudDatapipelinesV1LaunchFlexTemplateParameterResponse':
"""
Parameter to launch a job from a Flex Template.
"""
return pulumi.get(self, "launch_parameter")
@property
@pulumi.getter
def location(self) -> str:
"""
The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) to which to direct the request. For example, `us-central1`, `us-west1`.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def project(self) -> str:
"""
The ID of the Cloud Platform project that the job belongs to.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="validateOnly")
def validate_only(self) -> bool:
"""
If true, the request is validated but not actually executed. Defaults to false.
"""
return pulumi.get(self, "validate_only")
@pulumi.output_type
class GoogleCloudDatapipelinesV1LaunchTemplateParametersResponse(dict):
"""
Parameters to provide to the template being launched.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "jobName":
suggest = "job_name"
elif key == "transformNameMapping":
suggest = "transform_name_mapping"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GoogleCloudDatapipelinesV1LaunchTemplateParametersResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GoogleCloudDatapipelinesV1LaunchTemplateParametersResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GoogleCloudDatapipelinesV1LaunchTemplateParametersResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
environment: 'outputs.GoogleCloudDatapipelinesV1RuntimeEnvironmentResponse',
job_name: str,
parameters: Mapping[str, str],
transform_name_mapping: Mapping[str, str],
update: bool):
"""
Parameters to provide to the template being launched.
:param 'GoogleCloudDatapipelinesV1RuntimeEnvironmentResponse' environment: The runtime environment for the job.
:param str job_name: The job name to use for the created job.
:param Mapping[str, str] parameters: The runtime parameters to pass to the job.
:param Mapping[str, str] transform_name_mapping: Map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job. Only applicable when updating a pipeline.
:param bool update: If set, replace the existing pipeline with the name specified by jobName with this pipeline, preserving state.
"""
pulumi.set(__self__, "environment", environment)
pulumi.set(__self__, "job_name", job_name)
pulumi.set(__self__, "parameters", parameters)
pulumi.set(__self__, "transform_name_mapping", transform_name_mapping)
pulumi.set(__self__, "update", update)
@property
@pulumi.getter
def environment(self) -> 'outputs.GoogleCloudDatapipelinesV1RuntimeEnvironmentResponse':
"""
The runtime environment for the job.
"""
return pulumi.get(self, "environment")
@property
@pulumi.getter(name="jobName")
def job_name(self) -> str:
"""
The job name to use for the created job.
"""
return pulumi.get(self, "job_name")
@property
@pulumi.getter
def parameters(self) -> Mapping[str, str]:
"""
The runtime parameters to pass to the job.
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter(name="transformNameMapping")
def transform_name_mapping(self) -> Mapping[str, str]:
"""
Map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job. Only applicable when updating a pipeline.
"""
return pulumi.get(self, "transform_name_mapping")
@property
@pulumi.getter
def update(self) -> bool:
"""
If set, replace the existing pipeline with the name specified by jobName with this pipeline, preserving state.
"""
return pulumi.get(self, "update")
@pulumi.output_type
class GoogleCloudDatapipelinesV1LaunchTemplateRequestResponse(dict):
"""
A request to launch a template.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "gcsPath":
suggest = "gcs_path"
elif key == "launchParameters":
suggest = "launch_parameters"
elif key == "validateOnly":
suggest = "validate_only"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GoogleCloudDatapipelinesV1LaunchTemplateRequestResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GoogleCloudDatapipelinesV1LaunchTemplateRequestResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GoogleCloudDatapipelinesV1LaunchTemplateRequestResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
gcs_path: str,
launch_parameters: 'outputs.GoogleCloudDatapipelinesV1LaunchTemplateParametersResponse',
location: str,
project: str,
validate_only: bool):
"""
A request to launch a template.
:param str gcs_path: A Cloud Storage path to the template from which to create the job. Must be a valid Cloud Storage URL, beginning with 'gs://'.
:param 'GoogleCloudDatapipelinesV1LaunchTemplateParametersResponse' launch_parameters: The parameters of the template to launch. This should be part of the body of the POST request.
:param str location: The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) to which to direct the request.
:param str project: The ID of the Cloud Platform project that the job belongs to.
:param bool validate_only: If true, the request is validated but not actually executed. Defaults to false.
"""
pulumi.set(__self__, "gcs_path", gcs_path)
pulumi.set(__self__, "launch_parameters", launch_parameters)
pulumi.set(__self__, "location", location)
pulumi.set(__self__, "project", project)
pulumi.set(__self__, "validate_only", validate_only)
@property
@pulumi.getter(name="gcsPath")
def gcs_path(self) -> str:
"""
A Cloud Storage path to the template from which to create the job. Must be a valid Cloud Storage URL, beginning with 'gs://'.
"""
return pulumi.get(self, "gcs_path")
@property
@pulumi.getter(name="launchParameters")
def launch_parameters(self) -> 'outputs.GoogleCloudDatapipelinesV1LaunchTemplateParametersResponse':
"""
The parameters of the template to launch. This should be part of the body of the POST request.
"""
return pulumi.get(self, "launch_parameters")
@property
@pulumi.getter
def location(self) -> str:
"""
The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) to which to direct the request.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def project(self) -> str:
"""
The ID of the Cloud Platform project that the job belongs to.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="validateOnly")
def validate_only(self) -> bool:
"""
If true, the request is validated but not actually executed. Defaults to false.
"""
return pulumi.get(self, "validate_only")
@pulumi.output_type
class GoogleCloudDatapipelinesV1RuntimeEnvironmentResponse(dict):
"""
The environment values to set at runtime.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "additionalExperiments":
suggest = "additional_experiments"
elif key == "additionalUserLabels":
suggest = "additional_user_labels"
elif key == "bypassTempDirValidation":
suggest = "bypass_temp_dir_validation"
elif key == "enableStreamingEngine":
suggest = "enable_streaming_engine"
elif key == "ipConfiguration":
suggest = "ip_configuration"
elif key == "kmsKeyName":
suggest = "kms_key_name"
elif key == "machineType":
suggest = "machine_type"
elif key == "maxWorkers":
suggest = "max_workers"
elif key == "numWorkers":
suggest = "num_workers"
elif key == "serviceAccountEmail":
suggest = "service_account_email"
elif key == "tempLocation":
suggest = "temp_location"
elif key == "workerRegion":
suggest = "worker_region"
elif key == "workerZone":
suggest = "worker_zone"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GoogleCloudDatapipelinesV1RuntimeEnvironmentResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GoogleCloudDatapipelinesV1RuntimeEnvironmentResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GoogleCloudDatapipelinesV1RuntimeEnvironmentResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
additional_experiments: Sequence[str],
additional_user_labels: Mapping[str, str],
bypass_temp_dir_validation: bool,
enable_streaming_engine: bool,
ip_configuration: str,
kms_key_name: str,
machine_type: str,
max_workers: int,
network: str,
num_workers: int,
service_account_email: str,
subnetwork: str,
temp_location: str,
worker_region: str,
worker_zone: str,
zone: str):
"""
The environment values to set at runtime.
:param Sequence[str] additional_experiments: Additional experiment flags for the job.
:param Mapping[str, str] additional_user_labels: Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions) page. An object containing a list of key/value pairs. Example: { "name": "wrench", "mass": "1kg", "count": "3" }.
:param bool bypass_temp_dir_validation: Whether to bypass the safety checks for the job's temporary directory. Use with caution.
:param bool enable_streaming_engine: Whether to enable Streaming Engine for the job.
:param str ip_configuration: Configuration for VM IPs.
:param str kms_key_name: Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/
:param str machine_type: The machine type to use for the job. Defaults to the value from the template if not specified.
:param int max_workers: The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.
:param str network: Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".
:param int num_workers: The initial number of Compute Engine instances for the job.
:param str service_account_email: The email address of the service account to run the job as.
:param str subnetwork: Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.
:param str temp_location: The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with `gs://`.
:param str worker_region: The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with worker_zone. If neither worker_region nor worker_zone is specified, default to the control plane's region.
:param str worker_zone: The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with worker_region. If neither worker_region nor worker_zone is specified, a zone in the control plane's region is chosen based on available capacity. If both `worker_zone` and `zone` are set, `worker_zone` takes precedence.
:param str zone: The Compute Engine [availability zone](https://cloud.google.com/compute/docs/regions-zones/regions-zones) for launching worker instances to run your pipeline. In the future, worker_zone will take precedence.
"""
pulumi.set(__self__, "additional_experiments", additional_experiments)
pulumi.set(__self__, "additional_user_labels", additional_user_labels)
pulumi.set(__self__, "bypass_temp_dir_validation", bypass_temp_dir_validation)
pulumi.set(__self__, "enable_streaming_engine", enable_streaming_engine)
pulumi.set(__self__, "ip_configuration", ip_configuration)
pulumi.set(__self__, "kms_key_name", kms_key_name)
pulumi.set(__self__, "machine_type", machine_type)
pulumi.set(__self__, "max_workers", max_workers)
pulumi.set(__self__, "network", network)
pulumi.set(__self__, "num_workers", num_workers)
pulumi.set(__self__, "service_account_email", service_account_email)
pulumi.set(__self__, "subnetwork", subnetwork)
pulumi.set(__self__, "temp_location", temp_location)
pulumi.set(__self__, "worker_region", worker_region)
pulumi.set(__self__, "worker_zone", worker_zone)
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="additionalExperiments")
def additional_experiments(self) -> Sequence[str]:
"""
Additional experiment flags for the job.
"""
return pulumi.get(self, "additional_experiments")
@property
@pulumi.getter(name="additionalUserLabels")
def additional_user_labels(self) -> Mapping[str, str]:
"""
Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions) page. An object containing a list of key/value pairs. Example: { "name": "wrench", "mass": "1kg", "count": "3" }.
"""
return pulumi.get(self, "additional_user_labels")
@property
@pulumi.getter(name="bypassTempDirValidation")
def bypass_temp_dir_validation(self) -> bool:
"""
Whether to bypass the safety checks for the job's temporary directory. Use with caution.
"""
return pulumi.get(self, "bypass_temp_dir_validation")
@property
@pulumi.getter(name="enableStreamingEngine")
def enable_streaming_engine(self) -> bool:
"""
Whether to enable Streaming Engine for the job.
"""
return pulumi.get(self, "enable_streaming_engine")
@property
@pulumi.getter(name="ipConfiguration")
def ip_configuration(self) -> str:
"""
Configuration for VM IPs.
"""
return pulumi.get(self, "ip_configuration")
@property
@pulumi.getter(name="kmsKeyName")
def kms_key_name(self) -> str:
"""
Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/
"""
return pulumi.get(self, "kms_key_name")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> str:
"""
The machine type to use for the job. Defaults to the value from the template if not specified.
"""
return pulumi.get(self, "machine_type")
@property
@pulumi.getter(name="maxWorkers")
def max_workers(self) -> int:
"""
The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.
"""
return pulumi.get(self, "max_workers")
@property
@pulumi.getter
def network(self) -> str:
"""
Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="numWorkers")
def num_workers(self) -> int:
"""
The initial number of Compute Engine instances for the job.
"""
return pulumi.get(self, "num_workers")
@property
@pulumi.getter(name="serviceAccountEmail")
def service_account_email(self) -> str:
"""
The email address of the service account to run the job as.
"""
return pulumi.get(self, "service_account_email")
@property
@pulumi.getter
def subnetwork(self) -> str:
"""
Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.
"""
return pulumi.get(self, "subnetwork")
@property
@pulumi.getter(name="tempLocation")
def temp_location(self) -> str:
"""
The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with `gs://`.
"""
return pulumi.get(self, "temp_location")
@property
@pulumi.getter(name="workerRegion")
def worker_region(self) -> str:
"""
The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with worker_zone. If neither worker_region nor worker_zone is specified, default to the control plane's region.
"""
return pulumi.get(self, "worker_region")
@property
@pulumi.getter(name="workerZone")
def worker_zone(self) -> str:
"""
The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with worker_region. If neither worker_region nor worker_zone is specified, a zone in the control plane's region is chosen based on available capacity. If both `worker_zone` and `zone` are set, `worker_zone` takes precedence.
"""
return pulumi.get(self, "worker_zone")
@property
@pulumi.getter
def zone(self) -> str:
"""
The Compute Engine [availability zone](https://cloud.google.com/compute/docs/regions-zones/regions-zones) for launching worker instances to run your pipeline. In the future, worker_zone will take precedence.
"""
return pulumi.get(self, "zone")
@pulumi.output_type
class GoogleCloudDatapipelinesV1ScheduleSpecResponse(dict):
"""
Details of the schedule the pipeline runs on.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "nextJobTime":
suggest = "next_job_time"
elif key == "timeZone":
suggest = "time_zone"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GoogleCloudDatapipelinesV1ScheduleSpecResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GoogleCloudDatapipelinesV1ScheduleSpecResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GoogleCloudDatapipelinesV1ScheduleSpecResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
next_job_time: str,
schedule: str,
time_zone: str):
"""
Details of the schedule the pipeline runs on.
:param str next_job_time: When the next Scheduler job is going to run.
:param str schedule: Unix-cron format of the schedule. This information is retrieved from the linked Cloud Scheduler.
:param str time_zone: Timezone ID. This matches the timezone IDs used by the Cloud Scheduler API. If empty, UTC time is assumed.
"""
pulumi.set(__self__, "next_job_time", next_job_time)
pulumi.set(__self__, "schedule", schedule)
pulumi.set(__self__, "time_zone", time_zone)
@property
@pulumi.getter(name="nextJobTime")
def next_job_time(self) -> str:
"""
When the next Scheduler job is going to run.
"""
return pulumi.get(self, "next_job_time")
@property
@pulumi.getter
def schedule(self) -> str:
"""
Unix-cron format of the schedule. This information is retrieved from the linked Cloud Scheduler.
"""
return pulumi.get(self, "schedule")
@property
@pulumi.getter(name="timeZone")
def time_zone(self) -> str:
"""
Timezone ID. This matches the timezone IDs used by the Cloud Scheduler API. If empty, UTC time is assumed.
"""
return pulumi.get(self, "time_zone")
@pulumi.output_type
class GoogleCloudDatapipelinesV1WorkloadResponse(dict):
"""
Workload details for creating the pipeline jobs.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "dataflowFlexTemplateRequest":
suggest = "dataflow_flex_template_request"
elif key == "dataflowLaunchTemplateRequest":
suggest = "dataflow_launch_template_request"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GoogleCloudDatapipelinesV1WorkloadResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GoogleCloudDatapipelinesV1WorkloadResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GoogleCloudDatapipelinesV1WorkloadResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
dataflow_flex_template_request: 'outputs.GoogleCloudDatapipelinesV1LaunchFlexTemplateRequestResponse',
dataflow_launch_template_request: 'outputs.GoogleCloudDatapipelinesV1LaunchTemplateRequestResponse'):
"""
Workload details for creating the pipeline jobs.
:param 'GoogleCloudDatapipelinesV1LaunchFlexTemplateRequestResponse' dataflow_flex_template_request: Template information and additional parameters needed to launch a Dataflow job using the flex launch API.
:param 'GoogleCloudDatapipelinesV1LaunchTemplateRequestResponse' dataflow_launch_template_request: Template information and additional parameters needed to launch a Dataflow job using the standard launch API.
"""
pulumi.set(__self__, "dataflow_flex_template_request", dataflow_flex_template_request)
pulumi.set(__self__, "dataflow_launch_template_request", dataflow_launch_template_request)
@property
@pulumi.getter(name="dataflowFlexTemplateRequest")
def dataflow_flex_template_request(self) -> 'outputs.GoogleCloudDatapipelinesV1LaunchFlexTemplateRequestResponse':
"""
Template information and additional parameters needed to launch a Dataflow job using the flex launch API.
"""
return pulumi.get(self, "dataflow_flex_template_request")
@property
@pulumi.getter(name="dataflowLaunchTemplateRequest")
def dataflow_launch_template_request(self) -> 'outputs.GoogleCloudDatapipelinesV1LaunchTemplateRequestResponse':
"""
Template information and additional parameters needed to launch a Dataflow job using the standard launch API.
"""
return pulumi.get(self, "dataflow_launch_template_request")
| 47.287815
| 427
| 0.676441
|
f44c33762ba5adefd18e6795eaf004694034beb3
| 463
|
ts
|
TypeScript
|
lite-version/src/app/+reserva/reserva.routing.ts
|
Cuica20/TP2_Project_PardosChiken
|
25c6ed70d35f5911323fead4baf7a665fedfc831
|
[
"Apache-2.0"
] | null | null | null |
lite-version/src/app/+reserva/reserva.routing.ts
|
Cuica20/TP2_Project_PardosChiken
|
25c6ed70d35f5911323fead4baf7a665fedfc831
|
[
"Apache-2.0"
] | null | null | null |
lite-version/src/app/+reserva/reserva.routing.ts
|
Cuica20/TP2_Project_PardosChiken
|
25c6ed70d35f5911323fead4baf7a665fedfc831
|
[
"Apache-2.0"
] | null | null | null |
/**
* Created by javier on 6/13/17.
*/
import { Routes, RouterModule } from '@angular/router';
import {ModuleWithProviders} from "@angular/core";
import {ReservaComponent} from "./reserva.component";
export const reservaRoutes: Routes = [
{
path: '',
component: ReservaComponent,
data: {
pageTitle: 'Reserva'
}
}
];
export const reservaRouting: ModuleWithProviders = RouterModule.forChild(reservaRoutes);
| 22.047619
| 88
| 0.650108
|
aa3aefb333bafb49755ed313cf25104d38a8c617
| 2,237
|
dart
|
Dart
|
lib/features/dashboard/domain/entities/payment/status/payment_status.dart
|
f0rx/amatfoodies
|
f9d0b4f183d3117a5cd802caabd02fa2c42c3960
|
[
"BSD-3-Clause"
] | 4
|
2021-09-24T10:41:04.000Z
|
2021-09-24T10:41:06.000Z
|
lib/features/dashboard/domain/entities/payment/status/payment_status.dart
|
definitelyme/amatfoodies
|
bfdff3fd02c9879ae2f16d8f9be78f9612bb87bd
|
[
"BSD-3-Clause"
] | 2
|
2022-01-01T15:46:17.000Z
|
2022-01-01T15:47:05.000Z
|
lib/features/dashboard/domain/entities/payment/status/payment_status.dart
|
f0rx/amatfoodies
|
f9d0b4f183d3117a5cd802caabd02fa2c42c3960
|
[
"BSD-3-Clause"
] | null | null | null |
library payment_status.dart;
import 'package:built_collection/built_collection.dart';
import 'package:built_value/built_value.dart';
part 'payment_status.g.dart';
class PaymentStatus extends EnumClass {
static const PaymentStatus pending = _$pending;
static const PaymentStatus pay = _$pay;
static const PaymentStatus processing = _$processing;
static const PaymentStatus successful = _$successful;
static const PaymentStatus failed = _$failed;
const PaymentStatus._(String name) : super(name);
String get formatted {
switch (this) {
case PaymentStatus.processing:
return 'Processing payment';
case PaymentStatus.successful:
return 'Payment Successful';
case PaymentStatus.failed:
return 'Payment was declined!';
}
return 'No value';
}
static BuiltSet<PaymentStatus> get values => _$values;
static PaymentStatus valueOf(String name) => _$valueOf(name);
}
extension PaymentStatusX on PaymentStatus {
T maybeWhen<T>({
T Function()? pending,
T Function()? pay,
T Function()? processing,
T Function()? successful,
T Function()? failed,
required T Function() orElse,
}) {
switch (this) {
case PaymentStatus.pending:
return pending?.call() ?? orElse.call();
case PaymentStatus.pay:
return pay?.call() ?? orElse.call();
case PaymentStatus.processing:
return processing?.call() ?? orElse.call();
case PaymentStatus.successful:
return successful?.call() ?? orElse.call();
case PaymentStatus.failed:
return failed?.call() ?? orElse.call();
}
return orElse.call();
}
T when<T>({
required T Function() pending,
required T Function() pay,
required T Function() processing,
required T Function() successful,
required T Function() failed,
}) {
switch (this) {
case PaymentStatus.pending:
return pending.call();
case PaymentStatus.pay:
return pay.call();
case PaymentStatus.processing:
return processing.call();
case PaymentStatus.successful:
return successful.call();
case PaymentStatus.failed:
return failed.call();
}
return pending.call();
}
}
| 26.951807
| 63
| 0.662047
|
05823dc101e6a51a727b0e0a2fc995ea1feb571b
| 6,369
|
rs
|
Rust
|
impl/src/util.rs
|
bigkraig/cargo-raze
|
209e6258b7dd30c555f1c17e48d108360d5094a9
|
[
"Apache-2.0"
] | null | null | null |
impl/src/util.rs
|
bigkraig/cargo-raze
|
209e6258b7dd30c555f1c17e48d108360d5094a9
|
[
"Apache-2.0"
] | null | null | null |
impl/src/util.rs
|
bigkraig/cargo-raze
|
209e6258b7dd30c555f1c17e48d108360d5094a9
|
[
"Apache-2.0"
] | null | null | null |
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{
error::Error as StdError,
fmt,
iter::Iterator,
process::Command,
str::{self, FromStr},
};
use cargo::{core::TargetKind, CargoResult};
use cargo_platform::Cfg;
use slug;
pub const PLEASE_FILE_A_BUG: &str =
"Please file an issue at github.com/google/cargo-raze with details.";
#[derive(Debug)]
pub enum RazeError {
Generic(String),
Internal(String),
Rendering {
crate_name_opt: Option<String>,
message: String,
},
Planning {
dependency_name_opt: Option<String>,
message: String,
},
Config {
field_path_opt: Option<String>,
message: String,
},
}
impl StdError for RazeError {}
impl fmt::Display for RazeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self {
Self::Generic(s) => write!(f, "Raze failed with cause: \"{}\"", s),
Self::Internal(s) => write!(
f,
"Raze failed unexpectedly with cause: \"{}\". {}",
s, PLEASE_FILE_A_BUG
),
Self::Config {
field_path_opt,
message,
} => match field_path_opt {
Some(path) => write!(
f,
"Raze config problem in field \"{}\" with cause: \"{}\"",
path, message
),
None => write!(f, "Raze config problem with cause: \"{}\"", message),
},
Self::Rendering {
crate_name_opt,
message,
} => match crate_name_opt {
Some(name) => write!(
f,
"Raze failed to render crate \"{}\" with cause: \"{}\"",
name, message
),
None => write!(f, "Raze failed to render with cause: \"{}\"", message),
},
Self::Planning {
dependency_name_opt,
message,
} => match dependency_name_opt {
Some(dep_name) => write!(
f,
"Raze failed to plan crate \"{}\" with cause: \"{}\"",
dep_name, message
),
None => write!(f, "Raze failed to render with cause: \"{}\"", message),
},
}
}
}
pub struct PlatformDetails {
target_triple: String,
attrs: Vec<Cfg>,
}
pub struct LimitedResults<T> {
pub items: Vec<T>,
pub count_extras: usize,
}
impl PlatformDetails {
pub fn new_using_rustc(target_triple: &str) -> CargoResult<Self> {
let attrs = fetch_attrs(target_triple)?;
Ok(Self::new(target_triple.to_owned(), attrs))
}
pub fn new(target_triple: String, attrs: Vec<Cfg>) -> Self {
Self {
target_triple,
attrs,
}
}
#[allow(dead_code)]
pub fn target_triple(&self) -> &str {
&self.target_triple
}
pub fn attrs(&self) -> &Vec<Cfg> {
&self.attrs
}
}
impl<T> LimitedResults<T> {
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
}
impl<T: fmt::Debug> fmt::Debug for LimitedResults<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.count_extras > 0 {
write!(f, "{:?} and {} others", &self.items, self.count_extras)
} else {
write!(f, "{:?}", &self.items)
}
}
}
pub fn collect_up_to<T, U: Iterator<Item = T>>(max: usize, iter: U) -> LimitedResults<T> {
let mut items = Vec::new();
let mut count_extras = 0;
for item in iter {
// Spill extra crates into a counter to avoid overflowing terminal
if items.len() < max {
items.push(item);
} else {
count_extras += 1;
}
}
LimitedResults {
items,
count_extras,
}
}
pub fn sanitize_ident(ident: &str) -> String {
slug::slugify(&ident).replace("-", "_")
}
/**
* Extracts consistently named Strings for the provided `TargetKind`.
*
* TODO(acmcarther): Remove this shim borrowed from Cargo when Cargo is upgraded
*/
pub fn kind_to_kinds(kind: &TargetKind) -> Vec<String> {
match *kind {
TargetKind::Lib(ref kinds) => kinds.iter().map(|k| k.crate_type().to_owned()).collect(),
TargetKind::Bin => vec!["bin".to_owned()],
TargetKind::ExampleBin | TargetKind::ExampleLib(_) => vec!["example".to_owned()],
TargetKind::Test => vec!["test".to_owned()],
TargetKind::CustomBuild => vec!["custom-build".to_owned()],
TargetKind::Bench => vec!["bench".to_owned()],
}
}
/** Gets the proper system attributes for the provided platform triple using rustc. */
fn fetch_attrs(target: &str) -> CargoResult<Vec<Cfg>> {
let args = vec![format!("--target={}", target), "--print=cfg".to_owned()];
let output = Command::new("rustc").args(&args).output()?;
if !output.status.success() {
panic!(format!(
"getting target attrs for {} failed with status: '{}' \nstdout: {}\nstderr: {}",
target,
output.status,
String::from_utf8(output.stdout).unwrap_or_else(|_| "[unparseable bytes]".to_owned()),
String::from_utf8(output.stderr).unwrap_or_else(|_| "[unparseable bytes]".to_owned())
))
}
let attr_str =
String::from_utf8(output.stdout).expect("successful run of rustc's output to be utf8");
Ok(
attr_str
.lines()
.map(Cfg::from_str)
.map(|cfg| cfg.expect("attrs from rustc should be parsable into Cargo Cfg"))
.collect(),
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_collect_up_to_works_for_zero() {
let test_items: Vec<u32> = Vec::new();
let results = collect_up_to(10, test_items.iter());
assert!(results.is_empty());
}
#[test]
fn test_collect_up_to_works_for_one() {
let test_items = vec![1];
let results = collect_up_to(10, test_items.iter());
assert_eq!(results.items, vec![&1]);
assert!(!results.is_empty());
}
#[test]
fn test_collect_up_to_works_for_others_and_bounds_correctly() {
let test_items = vec![1, 2, 3];
let results = collect_up_to(2, test_items.iter());
assert_eq!(results.items, vec![&1, &2]);
assert_eq!(results.count_extras, 1);
assert!(!results.is_empty());
}
}
| 26.5375
| 92
| 0.614696
|
b0c416db33c045a852fbba1e3e34d743256a594b
| 101
|
py
|
Python
|
captaincloud/processes/task_runner/__init__.py
|
bpsagar/captaincloud
|
7290fbe65f2c758814986f29cb04edccbd3e7f30
|
[
"MIT"
] | 1
|
2016-11-09T17:49:09.000Z
|
2016-11-09T17:49:09.000Z
|
captaincloud/processes/task_runner/__init__.py
|
bpsagar/captaincloud
|
7290fbe65f2c758814986f29cb04edccbd3e7f30
|
[
"MIT"
] | 3
|
2016-11-09T17:45:51.000Z
|
2016-11-14T16:49:09.000Z
|
captaincloud/processes/task_runner/__init__.py
|
bpsagar/captaincloud
|
7290fbe65f2c758814986f29cb04edccbd3e7f30
|
[
"MIT"
] | null | null | null |
from .api import TaskRunnerAPI
from .process import TaskRunnerProcess
from .runner import TaskRunner
| 25.25
| 38
| 0.851485
|
af7a2e72cacc3ea9052916a109167109ed620205
| 122,459
|
py
|
Python
|
hemlock/test_hemlock.py
|
Lab41/Hemlock
|
2c53cfc11bfbe1e4f901b519db578090fe7a17dd
|
[
"Apache-2.0"
] | 4
|
2015-05-14T18:59:44.000Z
|
2017-03-09T12:49:36.000Z
|
hemlock/test_hemlock.py
|
Lab41/Hemlock
|
2c53cfc11bfbe1e4f901b519db578090fe7a17dd
|
[
"Apache-2.0"
] | null | null | null |
hemlock/test_hemlock.py
|
Lab41/Hemlock
|
2c53cfc11bfbe1e4f901b519db578090fe7a17dd
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright (c) 2013 In-Q-Tel, Inc/Lab41, All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test module for hemlock.py
Created on 19 August 2013
@author: Charlie Lewis
"""
from clients.hemlock_runner import Hemlock_Runner
from clients.hemlock_base import Hemlock_Base
from clients.hfs_old import HFs as hfs_old
from clients.hfs import HFs as hfs
from clients.hmongo import HMongo
from clients.hmysql import HMysql
from clients.hredis import HRedis
from clients.hrest import HRest
from clients.hstream_odd import HStream_Odd
from clients.hstream_odd import handle
from clients.file_types.hcsv import Hcsv
from clients.file_types.hdoc import Hdoc
from clients.file_types.hgeneric import Hgeneric
from clients.file_types.hjson import Hjson
from clients.file_types.hpcap import Hpcap
from clients.file_types.hpdf import Hpdf
from clients.file_types.hppt import Hppt
from clients.file_types.htext import Htext
from clients.file_types.hxls import Hxls
from clients.file_types.hxml import Hxml
from www.hemlock_web import test
from hemlock_scheduler import Hemlock_Scheduler
import conftest
import hemlock
import MySQLdb as mdb
import pymongo
import pytest
import re
import redis
import socket
import sys
class TestClass:
"""
Test class for hemlock.py
"""
def process_print_help(self):
"""
Tests printing help.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
a.HELP_COUNTER = 2
a.print_help("foo")
def process_print_help_specific(self):
"""
Tests printing help.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
a.HELP_COUNTER = 2
a.print_help("user-tenants-list")
def process_debug(self):
"""
Tests debugging.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
x, error2 = a.process_action(1, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
return x, error
def process_client_get(self):
"""
Tests client-get action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':c[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':d[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
x, error5 = a.process_action(0, "client-get", {'--uuid':e[5][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
return x, error
def process_client_list(self):
"""
Tests client-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "client-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':c[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':d[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
x, error5 = a.process_action(0, "client-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
return x, error
def process_client_systems_list(self):
"""
Tests client-systems-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "client-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':c[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':d[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
x, error5 = a.process_action(0, "client-systems-list", {'--uuid':e[5][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
return x, error
def process_system_clients_list(self):
"""
Tests system-clients-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "client-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':c[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':d[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
x, error5 = a.process_action(0, "system-clients-list", {'--uuid':d[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
return x, error
def process_client_purge(self):
"""
Tests client-purge action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':c[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':d[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
x, error5 = a.process_action(0, "client-purge", {'--uuid':e[5][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
return x, error
def process_client_run(self):
"""
Tests client-run action.
:return: returns any data and a list of any errors
"""
error = []
hemlock.raw_input = lambda _: 'y'
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':c[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':d[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "hemlock-server-store", {'--credential_file':'hemlock/hemlock_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
x, error6 = a.process_action(0, "client-run", {'--uuid':e[5][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
return x, error
def process_client_schedule(self):
"""
Tests client-schedule action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error4)
x, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
return x, error
def process_schedule_delete(self):
"""
Tests schedule-delete action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
x, error6 = a.process_action(0, "schedule-delete", {'--uuid':f[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error6)
return x, error
def process_client_schedules_list(self):
"""
Tests client-schedules_list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
x, error6 = a.process_action(0, "client-schedules-list", {'--uuid':d[5][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error6)
return x, error
def process_schedule_clients_list(self):
"""
Tests schedule_clients_list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
x, error6 = a.process_action(0, "schedule-clients-list", {'--uuid':f[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error6)
return x, error
def process_client_store(self):
"""
Tests client-store action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
x, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
cur = m_server.cursor()
str = "select * from clients where uuid = '"+x[2][1]+"'"
cur.execute(str)
y = cur.fetchall()
return x, y, error
def process_schedule_get(self):
"""
Tests schedule-get action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
x, error6 = a.process_action(0, "schedule-get", {'--uuid':f[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error6)
return x, error
def process_schedule_list(self):
"""
Tests schedule-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
x, error6 = a.process_action(0, "schedule-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error6)
return x, error
def process_role_create(self):
"""
Tests role-create action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
x, error1 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
cur = m_server.cursor()
str = "select * from roles where uuid = '"+x[2][1]+"'"
cur.execute(str)
y = cur.fetchall()
return x, y, error
def process_tenant_create(self):
"""
Tests tenant-create action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
x, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
cur = m_server.cursor()
str = "select * from tenants where uuid = '"+x[2][1]+"'"
cur.execute(str)
y = cur.fetchall()
return x, y, error
def process_user_create(self):
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
x, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':b[2][1], '--tenant_id':c[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
cur = m_server.cursor()
str = "select * from users where uuid = '"+x[7][1]+"'"
cur.execute(str)
y = cur.fetchall()
return x, y, error
def process_register_local_system(self):
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
x, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
cur = m_server.cursor()
str = "select * from systems where uuid = '"+x[9][1]+"'"
cur.execute(str)
y = cur.fetchall()
return x, y, error
def process_register_remote_system(self):
"""
Tests register-remote-system action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
x, error2 = a.process_action(0, "register-remote-system", {'--name':'remote-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--port':'80', '--remote_uri':'http://remote.uri/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
cur = m_server.cursor()
str = "select * from systems where uuid = '"+x[10][1]+"'"
cur.execute(str)
y = cur.fetchall()
return x, y, error
def process_role_list(self):
"""
Tests role-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "role-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "role-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "role-create", {'--name':'role2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
x, error5 = a.process_action(0, "role-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
# !! TODO fix what is returned
return x, error
def process_tenant_list(self):
"""
Tests tenant-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "tenant-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
x, error5 = a.process_action(0, "tenant-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
# !! TODO fix what is returned
return x, error
def process_user_list(self):
"""
Tests user-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "user-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
hemlock.getpass.getpass = lambda _: 'boguspw'
e, error4 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':c[2][1], '--tenant_id':d[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "user-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "role-create", {'--name':'role2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
h, error7 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
hemlock.getpass.getpass = lambda _: 'boguspw'
i, error8 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':g[2][1], '--tenant_id':h[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error8)
x, error9 = a.process_action(0, "user-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error9)
# !! TODO fix what is returned
return x, error
def process_system_list(self):
"""
Tests system-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "system-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':c[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "system-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':f[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
x, error7 = a.process_action(0, "system-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
# !! TODO fix what is returned
return x, error
def process_list_all(self):
"""
Tests list-all action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "list-all", {}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
hemlock.getpass.getpass = lambda _: 'boguspw'
e, error4 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':c[2][1], '--tenant_id':d[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
x, error6 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':f[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
# !! TODO fix what is returned
return x, error
def process_role_users_list(self):
"""
Tests role-users-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "role-users-list", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
hemlock.getpass.getpass = lambda _: 'boguspw'
e, error4 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':b[2][1], '--tenant_id':d[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "role-users-list", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
hemlock.getpass.getpass = lambda _: 'boguspw'
h, error7 = a.process_action(0, "user-create", {'--name':'user2', '--username':'username2', '--email':'email@dot.com', '--role_id':b[2][1], '--tenant_id':g[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
x, error8 = a.process_action(0, "role-users-list", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error8)
# !! TODO fix what is returned
return x, error
def process_system_tenants_list(self):
"""
Tests system-tenants-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "system-tenants-list", {'--uuid':c[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "system-add-tenant", {'--uuid':c[9][1], '--tenant_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
x, error6 = a.process_action(0, "system-tenants-list", {'--uuid':c[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
# !! TODO fix what is returned
return x, error
def process_tenant_systems_list(self):
"""
Tests tenant-systems-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-systems-list", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "tenant-systems-list", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "register-local-system", {'--name':'local-system2', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
x, error6 = a.process_action(0, "tenant-systems-list", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
# !! TODO fix what is returned
return x, error
def process_tenant_users_list(self):
"""
Tests tenant-users-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-users-list", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
hemlock.getpass.getpass = lambda _: 'boguspw'
e, error4 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':d[2][1], '--tenant_id':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "tenant-users-list", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "role-create", {'--name':'role2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
hemlock.getpass.getpass = lambda _: 'boguspw'
h, error7 = a.process_action(0, "user-create", {'--name':'user2', '--username':'username2', '--email':'email@dot.com', '--role_id':g[2][1], '--tenant_id':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
x, error8 = a.process_action(0, "tenant-users-list", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error8)
# !! TODO fix what is returned
return x, error
def process_user_roles_list(self):
"""
Tests user-roles-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
d, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':c[2][1], '--tenant_id':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "user-roles-list", {'--uuid':d[7][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "role-create", {'--name':'role2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "user-add-role", {'--uuid':d[7][1], '--role_id':f[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
x, error7 = a.process_action(0, "user-roles-list", {'--uuid':d[7][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
# !! TODO fix what is returned
return x, error
def process_user_tenants_list(self):
"""
Tests user-tenants-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
d, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':c[2][1], '--tenant_id':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "user-tenants-list", {'--uuid':d[7][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "user-add-tenant", {'--uuid':d[7][1], '--tenant_id':f[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
x, error7 = a.process_action(0, "user-tenants-list", {'--uuid':d[7][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
# !! TODO fix what is returned
return x, error
def process_deregister_local_system(self):
"""
Tests deregister-local-system action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
x, error3 = a.process_action(0, "deregister-local-system", {'--uuid':c[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
# !! TODO fix what is returned
return x, error
def process_deregister_remote_system(self):
"""
Tests deregister-remote-system action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
x, error3 = a.process_action(0, "deregister-remote-system", {'--uuid':c[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
# !! TODO fix what is returned
return x, error
def process_role_delete(self):
"""
Tests role-delete action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
x, error2 = a.process_action(0, "role-delete", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
# !! TODO fix what is returned
return x, error
def process_system_add_tenant(self):
"""
Tests system-add-tenant action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
x, error4 = a.process_action(0, "system-add-tenant", {'--uuid':c[9][1], '--tenant_id':d[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
# !! TODO fix what is returned
return x, error
def process_system_get(self):
"""
Tests system-get action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
x, error3 = a.process_action(0, "system-get", {'--uuid':c[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
# !! TODO fix what is returned
return x, error
def process_system_remove_tenant(self):
"""
Tests system-remove-tenant action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
#d, error3 = a.process_action(0, "system-remove-tenant", {'--uuid':c[9][1], '--tenant_id':b[2][1]}, m_server)
#error.append(error3)
e, error4 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "system-add-tenant", {'--uuid':c[9][1], '--tenant_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
x, error6 = a.process_action(0, "system-remove-tenant", {'--uuid':c[9][1], '--tenant_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
# !! TODO fix what is returned
return x, error
def process_tenant_delete(self):
"""
Tests tenant-delete action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
x, error2 = a.process_action(0, "tenant-delete", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
# !! TODO fix what is returned
return x, error
def process_tenant_get(self):
"""
Tests tenant-get action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
x, error2 = a.process_action(0, "tenant-get", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
# !! TODO fix what is returned
return x, error
def process_user_add_role(self):
"""
Tests user-add-role action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
d, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':c[2][1], '--tenant_id':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "role-create", {'--name':'role2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
x, error5 = a.process_action(0, "user-add-role", {'--uuid':d[7][1], '--role_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
# !! TODO fix what is returned
return x, error
def process_user_add_tenant(self):
"""
Tests user-add-tenant action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
d, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':b[2][1], '--tenant_id':c[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
x, error5 = a.process_action(0, "user-add-tenant", {'--uuid':d[7][1], '--tenant_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
# !! TODO fix what is returned
return x, error
def process_user_delete(self):
"""
Tests user-delete action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
d, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':c[2][1], '--tenant_id':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
x, error4 = a.process_action(0, "user-delete", {'--uuid':d[7][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
# !! TODO fix what is returned
return x, error
def process_user_get(self):
"""
Tests user-get action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
d, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':c[2][1], '--tenant_id':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
x, error4 = a.process_action(0, "user-get", {'--uuid':d[7][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
# !! TODO fix what is returned
return x, error
def process_user_remove_role(self):
"""
Tests user-remove-role action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
d, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':c[2][1], '--tenant_id':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
#e, error4 = a.process_action(0, "user-remove-role", {'--uuid':d[7][1], '--role_id':c[2][1]}, m_server)
#error.append(error4)
f, error5 = a.process_action(0, "role-create", {'--name':'role2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "user-add-role", {'--uuid':d[7][1], '--role_id':f[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
x, error7 = a.process_action(0, "user-remove-role", {'--uuid':d[7][1], '--role_id':f[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
# !! TODO fix what is returned
return x, error
def process_user_remove_tenant(self):
"""
Tests user-remove-tenant action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
d, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':b[2][1], '--tenant_id':c[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
#e, error4 = a.process_action(0, "user-remove-tenant", {'--uuid':d[7][1], '--tenant_id':c[2][1]}, m_server)
#error.append(error4)
f, error5 = a.process_action(0, "tenant-create", {'--name':'tenant2'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "user-add-tenant", {'--uuid':d[7][1], '--tenant_id':f[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
x, error7 = a.process_action(0, "user-remove-tenant", {'--uuid':d[7][1], '--tenant_id':f[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
# !! TODO fix what is returned
return x, error
def process_start_scheduler(self):
"""
Tests start-scheduler action.
:return: returns any data and a list of any errors
"""
# !! TODO
x = ""
error = ""
return x, error
def process_query_data_couchbase(self):
"""
Tests query-data action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
d, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':b[2][1], '--tenant_id':c[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':c[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
hemlock.getpass.getpass = lambda _: 'boguspw'
#x, error5 = a.process_action(0, "query-data", {'--user':d[7][1], '--query':'foo'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
#error.append(error5)
x = ""
return x, error
def process_query_data_elasticsearch(self):
"""
Tests query-data action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "role-create", {'--name':'role1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
hemlock.getpass.getpass = lambda _: 'boguspw'
d, error3 = a.process_action(0, "user-create", {'--name':'user1', '--username':'username1', '--email':'email@dot.com', '--role_id':b[2][1], '--tenant_id':c[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':c[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
hemlock.getpass.getpass = lambda _: 'boguspw'
x, error5 = a.process_action(0, "query-data", {'--user':d[7][1], '--query':'foo'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
return x, error
def process_schedule_change_server(self):
"""
Tests schedule-change-server action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error6)
x, error7 = a.process_action(0, "schedule-change-server", {'--uuid':f[9][1], '--schedule_server_id':g[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error7)
return x, error
def process_schedule_server_create(self):
"""
Tests schedule-server-create action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
x, error1 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error1)
return x, error
def process_schedule_server_delete(self):
"""
Tests schedule-server-delete action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error1)
x, error2 = a.process_action(0, "schedule-server-delete", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error2)
return x, error
def process_schedule_server_get(self):
"""
Tests schedule-server-get action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error1)
x, error2 = a.process_action(0, "schedule-server-get", {'--uuid':b[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error2)
return x, error
def process_schedule_server_list(self):
"""
Tests schedule-server-list action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error1)
x, error2 = a.process_action(0, "schedule-server-list", {}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error2)
return x, error
def process_schedule_add_client(self):
"""
Tests schedule-add-client action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
x, error7 = a.process_action(0, "schedule-add-client", {'--uuid':f[9][1], '--client_id':g[5][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
return x, error
def process_client_add_schedule(self):
"""
Tests client-add-schedule action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
h, error7 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':g[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
x, error8 = a.process_action(0, "client-add-schedule", {'--uuid':d[5][1], '--schedule_id':h[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error8)
return x, error
def process_schedule_remove_client(self):
"""
Tests schedule-remove-client action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
h, error7 = a.process_action(0, "schedule-add-client", {'--uuid':f[9][1], '--client_id':g[5][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
x, error8 = a.process_action(0, "schedule-remove-client", {'--uuid':f[9][1], '--client_id':g[5][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error8)
return x, error
def process_client_remove_schedule(self):
"""
Tests client-remove-schedule action.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
b, error1 = a.process_action(0, "tenant-create", {'--name':'tenant1'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error1)
c, error2 = a.process_action(0, "register-local-system", {'--name':'local-system1', '--data_type':'data-type1', '--description': 'description1', '--tenant_id':b[2][1], '--hostname':'hostname1', '--endpoint':'http://endpoint.com/', '--poc_name':'poc-name1', '--poc_email':'poc-email@dot.com'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error2)
d, error3 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error3)
e, error4 = a.process_action(0, "schedule-server-create", {'--name':'asdf'}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error4)
f, error5 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':d[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 1, "http://127.0.0.1:9200")
error.append(error5)
g, error6 = a.process_action(0, "client-store", {'--name':'client1', '--type':'mysql', '--system_id':c[9][1], '--credential_file':'hemlock/clients/mysql_creds_sample'}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error6)
h, error7 = a.process_action(0, "client-schedule", {'--name':'asdf', '--minute':'1', '--hour':'1', '--day_of_month':'1', '--month':'1', '--day_of_week':'1', '--client_id':g[5][1], '--schedule_server_id':e[2][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error7)
i, error8 = a.process_action(0, "client-add-schedule", {'--uuid':d[5][1], '--schedule_id':h[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error8)
x, error9 = a.process_action(0, "client-remove-schedule", {'--uuid':d[5][1], '--schedule_id':h[9][1]}, m_server, "localhost", "hemlock", "hemlock", "password", 0, "http://127.0.0.1:9200")
error.append(error9)
return x, error
def process_hemlock_scheduler(self):
"""
Tests hemlock_scheduler.py.
"""
sys.argv = ["", "hemlock_creds", "asdf"]
a = Hemlock_Scheduler()
a.init_schedule()
a.check_schedules()
def process_hemlock_scheduler2(self):
"""
Tests hemlock_scheduler.py.
"""
sys.argv = ["", "hemlock_creds", "asdf"]
b = Hemlock_Base()
b.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = b.get_creds(0, "hemlock/clients/mysql_creds_sample")
sys.argv = ["", "hemlock_creds", "asdf"]
a = Hemlock_Scheduler()
a.job_work(server_dict, "foo")
def connect_mysql(self, debug, server, user, pw, db):
"""
Makes a connection to the test Hemlock MySQL server.
:return: returns an instance of the MySQL connection
"""
a = hemlock.Hemlock()
m_server = a.mysql_server(debug, server, user, pw, db)
return m_server
def process_hemlock_process_args(self):
"""
Tests hemlock process_args.
:return: returns any data and a list of any errors
"""
error = []
a = hemlock.Hemlock()
x = a.process_args(0, ["client"])
def process_base_args1(self):
"""
Tests hemlock_base without args.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
x = a.process_args(0, [])
def process_base_args2(self):
"""
Tests hemlock_base with args.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
a, b, c = a.process_args(0, ['--uuid', 'asdf', '--client', 'asdf', '--splits',10])
x = a+b+str(c)
return x, error
def process_base_send_data(self):
"""
Tests hemlock_base send_data.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/mysql_creds_sample")
h_server = a.connect_server(0, server_dict, 1)
x = a.send_data(0, [[]], [], h_server, "asdf", 1)
return x, error
def process_base_connect_server_couchbase(self):
"""
Tests hemlock_base connect_server with couchbase.
:return: returns any data and a list of any errors
"""
error = []
x = ""
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/mysql_creds_sample")
h_server = a.connect_server(0, server_dict, 0)
def process_hfs_old_process_files(self):
"""
Tests hfs_old process_files.
:return: returns any data and a list of any errors
"""
error = []
x = ""
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/mysql_creds_sample")
h_server = a.connect_server(0, server_dict, 1)
a = hfs_old()
a.process_files(0, "docs/_build/", h_server, "asdf", 0)
return x, error
def process_hfs_connect_client(self):
"""
Tests hfs connect_client.
:return: returns any data and a list of any errors
"""
error = []
a = hfs()
x = a.connect_client(0, {'FILE_PATH':'docs/_build/'})
x = a.connect_client(0, {})
def process_hfs_get_data(self):
"""
Tests hfs get_data.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/fs_creds_sample")
h_server = a.connect_server(0, server_dict, 1)
a = hfs()
c_server = a.connect_client(0, {'FILE_PATH':'docs/_build/'})
x = a.get_data(0, client_dict, c_server, h_server, "asdf", 0)
return x, error
def process_hfs_format_lists(self):
"""
Tests hfs format_lists.
:return: returns any data and a list of any errors
"""
error = []
x = ""
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/fs_creds_sample")
h_server = a.connect_server(0, server_dict, 1)
a = hfs()
a.format_lists(0, ["'[0]'"], h_server, "asdf", 0)
return x, error
def process_hstream_odd_start(self):
"""
Tests hstream_odd start.
:return: returns any data and a list of any errors
"""
error = []
x = ""
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/stream_odd_creds_sample")
h_server = a.connect_server(0, server_dict, 1)
a = HStream_Odd()
a.flag = 0
a.connect_client(0, client_dict, h_server, "asdf", 0)
a.start(0, "localhost", 50000, h_server, "asdf", 0, 0)
handle(0, "bogus-conn", "bogus-address", h_server, "asdf", 0, 0)
return x, error
def process_hmysql_connect_client(self):
"""
Tests hmysql connect_client.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/mysql_creds_sample")
h_server = a.connect_server(0, server_dict, 1)
a = HMysql()
c_server = a.connect_client(0, client_dict)
x = a.get_data(0, client_dict, c_server, h_server, "asdf", 0)
return x, error
def process_hmysql_connect_client2(self):
"""
Tests hmysql connect_client.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/mysql_creds_sample")
client_dict['MYSQL_TABLE'] = "tenants"
h_server = a.connect_server(0, server_dict, 1)
a = HMysql()
c_server = a.connect_client(0, client_dict)
x = a.get_data(0, client_dict, c_server, h_server, "asdf", 0)
return x, error
def process_hredis_connect_client(self):
"""
Tests hredis connect_client.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/redis_creds_sample")
h_server = a.connect_server(0, server_dict, 1)
a = HRedis()
c_server = a.connect_client(0, client_dict)
x = a.get_data(0, client_dict, c_server, h_server, "asdf", 0)
return x, error
def process_hmongo_connect_client(self):
"""
Tests hmongo connect_client.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/mongo_creds_sample")
h_server = a.connect_server(0, server_dict, 1)
a = HMongo()
c_server = a.connect_client(0, client_dict)
x = a.get_data(0, client_dict, c_server, h_server, "asdf", 0)
return x, error
def process_hrest_connect_client(self):
"""
Tests hrest connect_client.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/rest_creds_sample")
h_server = a.connect_server(0, server_dict, 1)
a = HRest()
a.connect_client(0, client_dict)
def process_hrest_get_data(self):
"""
Tests hrest get_data.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/rest_creds_sample")
h_server = a.connect_server(0, server_dict, 1)
a = HRest()
a.get_data(0, client_dict, "bogus", h_server, "asdf", 0)
def process_hemlock_base_update_hemlock(self):
"""
Tests hemlock_base update_hemlock.
:return: returns any data and a list of any errors
"""
error = []
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/mysql_creds_sample")
a.update_hemlock(0, "asdf", server_dict)
def process_hemlock_runner_mysql_server(self):
"""
Tests hemlock_runner mysql_server.
:return: returns any data and a list of any errors
"""
error = []
x = ""
a = Hemlock_Runner()
x = a.mysql_server(0, "localhost", "travis", "password", "hemlock_test")
return x, error
def process_hemlock_runner_mysql_server2(self):
"""
Tests hemlock_runner mysql_server.
:return: returns any data and a list of any errors
"""
error = []
x = ""
a = Hemlock_Runner()
x = a.mysql_server(0, "localhost", "travis", "bad_password", "hemlock_test")
# call tests
def test_instanciate(self):
"""
Calls the test function for instanciation.
"""
a = hemlock.Hemlock()
a.check_args(["tenant-list"], [], {})
a.check_args(["tenant-list"], ["tenant-list"], {})
a.client_add_schedule([], {})
a.client_get([], {})
a.client_list([], {})
a.client_purge([], {})
a.client_remove_schedule([], {})
a.client_run([], {})
a.client_schedule([], {})
a.client_schedules_list([], {})
a.client_store([], {})
a.client_systems_list([], {})
a.deregister_local_system([], {})
a.deregister_remote_system([], {})
a.hemlock_server_store([], {})
a.list_all([], {})
a.query_data([], {})
a.register_local_system([], {})
a.register_remote_system([], {})
a.role_create([], {})
a.role_delete([], {})
a.role_get([], {})
a.role_list([], {})
a.role_users_list([], {})
a.schedule_add_client([], {})
a.schedule_clients_list([], {})
a.schedule_delete([], {})
a.schedule_get([], {})
a.schedule_list([], {})
a.schedule_remove_client([], {})
a.schedule_change_server([], {})
a.schedule_server_create([], {})
a.schedule_server_delete([], {})
a.schedule_server_get([], {})
a.schedule_server_list([], {})
a.start_scheduler([], {})
a.system_add_tenant([], {})
a.system_clients_list([], {})
a.system_get([], {})
a.system_list([], {})
a.system_remove_tenant([], {})
a.system_tenants_list([], {})
a.tenant_create([], {})
a.tenant_delete([], {})
a.tenant_get([], {})
a.tenant_list([], {})
a.tenant_systems_list([], {})
a.tenant_users_list([], {})
a.user_add_role([], {})
a.user_add_tenant([], {})
a.user_create([], {})
a.user_delete([], {})
a.user_get([], {})
a.user_list([], {})
a.user_remove_role([], {})
a.user_remove_tenant([], {})
a.user_roles_list([], {})
a.user_tenants_list([], {})
a.parse_auth()
a.read_creds(0)
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
a.client_import(0, "mysql")
client_dict, server_dict = a.get_creds(0, "hemlock/clients/mysql_creds_sample")
a.stream_callback("data")
a.get_args(0)
a = Hemlock_Runner()
b = hfs_old()
a = Hemlock_Base()
a.SERVER_CREDS_FILE = "hemlock/hemlock_creds_sample"
client_dict, server_dict = a.get_creds(0, "hemlock/clients/fs_creds_sample")
b.connect_client(0, client_dict)
a = hfs()
a = HMongo()
a = HMysql()
a = HRedis()
a = HRest()
a = HStream_Odd()
a = Hcsv()
a = Hdoc()
a = Hgeneric()
a = Hjson()
a = Hpcap()
a = Hpdf()
a = Hppt()
a = Htext()
a = Hxls()
a = Hxml()
a = test()
a.GET()
def test_process_hemlock_scheduler(self):
"""
Calls the test function for hemlock_scheduler.
"""
#with pytest.raises(SystemExit):
self.process_hemlock_scheduler()
def test_process_hemlock_scheduler2(self):
"""
Calls the test function for hemlock_scheduler.
"""
with pytest.raises(SystemExit):
self.process_hemlock_scheduler2()
def test_connect_mysql(self):
"""
Calls the test function for connecting to MySQL.
"""
m_server = self.connect_mysql(0, "localhost", "travis", "password", "hemlock_test")
cur = m_server.cursor()
cur.execute("DROP TABLE IF EXISTS users_tenants")
cur.execute("DROP TABLE IF EXISTS users_roles")
cur.execute("DROP TABLE IF EXISTS systems_tenants")
cur.execute("DROP TABLE IF EXISTS systems_clients")
cur.execute("DROP TABLE IF EXISTS schedules_clients")
cur.execute("DROP TABLE IF EXISTS users")
cur.execute("DROP TABLE IF EXISTS tenants")
cur.execute("DROP TABLE IF EXISTS systems")
cur.execute("DROP TABLE IF EXISTS roles")
cur.execute("DROP TABLE IF EXISTS schedules")
cur.execute("DROP TABLE IF EXISTS hemlock_server")
cur.execute("DROP TABLE IF EXISTS clients")
cur.execute("DROP TABLE IF EXISTS schedule_servers")
m_server.commit()
m_server.close()
assert 1
def test_connect_redis(self):
"""
Ensures that redis is running on the server running the tests.
"""
r = redis.Redis(host='localhost', port=6379, db=0)
assert r.set('foo', 'bar')
r.hset('test-hash', 'test-field', 'test')
b = r.get('foo')
c = r.hget('test-hash', 'test-field')
assert b == "bar"
assert c == "test"
def test_connect_mongo(self):
"""
Ensures that mongo is running on the server running the tests.
"""
connection = pymongo.MongoClient("127.0.0.1", 27017)
db = connection['local']
collection = db['collection']
collection.insert([{"foo":"bar"}])
assert 1
def test_process_print_help(self):
"""
Calls the test function for print_help.
"""
with pytest.raises(SystemExit):
self.process_print_help()
def test_process_print_help_specific(self):
"""
Calls the test function for print_help.
"""
with pytest.raises(SystemExit):
self.process_print_help_specific()
def test_process_debug(self):
"""
Calls the test function for debugging.
"""
x, error = self.process_debug()
for err in error: assert err == 0
def test_process_role_create(self):
"""
Calls the test function for the role-create action.
"""
x, y, error = self.process_role_create()
for err in error: assert err == 0
assert x[1][1] == 'role1'
a = re.match('[0-f]{8}-[0-f]{4}-[0-f]{4}-[0-f]{4}-[0-f]{12}',x[2][1])
assert a
assert len(y)
def test_process_tenant_create(self):
"""
Calls the test function for the tenant-create action.
"""
x, y, error = self.process_tenant_create()
for err in error: assert err == 0
assert x[1][1] == 'tenant1'
a = re.match('[0-f]{8}-[0-f]{4}-[0-f]{4}-[0-f]{4}-[0-f]{12}',x[2][1])
assert a
assert len(y)
def test_process_user_create(self):
"""
Calls the test function for the user-create action.
"""
x, y, error = self.process_user_create()
for err in error: assert err == 0
assert x[1][1] == 'user1'
assert x[4][1] == 'username1'
assert x[5][1] == 'email@dot.com'
a = re.match('[0-f]{8}-[0-f]{4}-[0-f]{4}-[0-f]{4}-[0-f]{12}',x[7][1])
assert a
assert len(y)
def test_process_register_local_system(self):
"""
Calls the test function for the register-local-system action.
"""
x, y, error = self.process_register_local_system()
for err in error: assert err == 0
assert x[2][1] == 'data-type1'
assert x[3][1] == 'hostname1'
assert x[4][1] == 'poc-name1'
assert x[5][1] == 'local-system1'
assert x[6][1] == 'poc-email@dot.com'
assert x[7][1] == 'http://endpoint.com/'
assert x[8][1] == 'description1'
assert x[11][1] == '0'
a = re.match('[0-f]{8}-[0-f]{4}-[0-f]{4}-[0-f]{4}-[0-f]{12}',x[9][1])
assert a
assert len(y)
def test_process_register_remote_system(self):
"""
Calls the test function for the register-remote-system action.
"""
x, y, error = self.process_register_remote_system()
for err in error: assert err == 0
assert x[2][1] == 'data-type1'
assert x[3][1] == 'hostname1'
assert x[4][1] == 'poc-name1'
assert x[5][1] == '80'
assert x[6][1] == 'http://remote.uri/'
assert x[7][1] == 'remote-system1'
assert x[8][1] == 'poc-email@dot.com'
assert x[9][1] == 'description1'
assert x[12][1] == '1'
a = re.match('[0-f]{8}-[0-f]{4}-[0-f]{4}-[0-f]{4}-[0-f]{12}',x[10][1])
assert a
assert len(y)
def test_process_role_list(self):
"""
Calls the test function for the role-list action.
"""
x, error = self.process_role_list()
# !! TODO - handle case with nothing, one, and more than one
for err in error: assert err == 0
def test_process_tenant_list(self):
"""
Calls the test function for the tenant-list action.
"""
x, error = self.process_tenant_list()
# !! TODO - handle case with nothing, one, and more than one
for err in error: assert err == 0
def test_process_system_list(self):
"""
Calls the test function for the system-list action.
"""
x, error = self.process_system_list()
# !! TODO - handle case with nothing, one, and more than one
for err in error: assert err == 0
def test_process_user_list(self):
"""
Calls the test function for the user-list action.
"""
x, error = self.process_user_list()
# !! TODO - handle case with nothing, one, and more than one
for err in error: assert err == 0
def test_process_tenant_systems_list(self):
"""
Calls the test function for the tenant-systems-list action.
"""
x, error = self.process_tenant_systems_list()
# !! TODO - handle case with nothing, one, and more than one
for err in error: assert err == 0
def test_process_tenant_users_list(self):
"""
Calls the test function for the tenant-users-list action.
"""
x, error = self.process_tenant_users_list()
# !! TODO - handle case with nothing, one, and more than one
for err in error: assert err == 0
def test_process_system_tenants_list(self):
"""
Calls the test function for the system-tenants-list action.
"""
x, error = self.process_system_tenants_list()
# !! TODO - handle case with nothing, one, and more than one
for err in error: assert err == 0
def test_process_role_users_list(self):
"""
Calls the test function for the role-users-list action.
"""
x, error = self.process_role_users_list()
# !! TODO - handle case with nothing, one, and more than one
for err in error: assert err == 0
def test_process_user_roles_list(self):
"""
Calls the test function for the user-roles-list action.
"""
x, error = self.process_user_roles_list()
# !! TODO - handle case with nothing, one, and more than one
for err in error: assert err == 0
def test_process_user_tenants_list(self):
"""
Calls the test function for the user-tenants-list action.
"""
x, error = self.process_user_tenants_list()
# !! TODO - handle case with nothing, one, and more than one
for err in error: assert err == 0
def test_process_deregister_local_system(self):
"""
Calls the test function for the deregister-local-system action.
"""
x, error = self.process_deregister_local_system()
for err in error: assert err == 0
def test_process_deregister_remote_system(self):
"""
Calls the test function for the deregister-remote-system action.
"""
x, error = self.process_deregister_remote_system()
for err in error: assert err == 0
def test_process_role_delete(self):
"""
Calls the test function for the role-delete action.
"""
x, error = self.process_role_delete()
for err in error: assert err == 0
def test_process_system_add_tenant(self):
"""
Calls the test function for the system-add-tenant action.
"""
x, error = self.process_system_add_tenant()
for err in error: assert err == 0
def test_process_system_get(self):
"""
Calls the test function for the system-get action.
"""
x, error = self.process_system_get()
for err in error: assert err == 0
def test_process_system_remove_tenant(self):
"""
Calls the test function for the system-remove-tenant action.
"""
x, error = self.process_system_remove_tenant()
for err in error: assert err == 0
def test_process_tenant_delete(self):
"""
Calls the test function for the tenant-delete action.
"""
x, error = self.process_tenant_delete()
for err in error: assert err == 0
def test_process_tenant_get(self):
"""
Calls the test function for the tenant-get action.
"""
x, error = self.process_tenant_get()
for err in error: assert err == 0
def test_process_user_add_role(self):
"""
Calls the test function for the user-add-role action.
"""
x, error = self.process_user_add_role()
for err in error: assert err == 0
def test_process_user_add_tenant(self):
"""
Calls the test function for the user-add-tenant action.
"""
x, error = self.process_user_add_tenant()
for err in error: assert err == 0
def test_process_user_delete(self):
"""
Calls the test function for the user-delete action.
"""
x, error = self.process_user_delete()
for err in error: assert err == 0
def test_process_user_get(self):
"""
Calls the test function for the user-get action.
"""
x, error = self.process_user_get()
for err in error: assert err == 0
def test_process_user_remove_role(self):
"""
Calls the test function for the user-remove-role action.
"""
x, error = self.process_user_remove_role()
for err in error: assert err == 0
def test_process_user_remove_tenant(self):
"""
Calls the test function for the user-remove-tenant action.
"""
x, error = self.process_user_remove_tenant()
for err in error: assert err == 0
def test_process_list_all(self):
"""
Calls the test function for list-all action.
"""
x, error = self.process_list_all()
for err in error: assert err == 0
def test_process_client_get(self):
"""
Calls the test function for the client-get action.
"""
x, error = self.process_client_get()
for err in error: assert err == 0
def test_process_client_list(self):
"""
Calls the test function for the client-list action.
"""
x, error = self.process_client_list()
for err in error: assert err == 0
def test_process_client_systems_list(self):
"""
Calls the test function for the client-systems-list action.
"""
x, error = self.process_client_systems_list()
for err in error: assert err == 0
def test_process_system_clients_list(self):
"""
Calls the test function for the system_clients-list action.
"""
x, error = self.process_system_clients_list()
for err in error: assert err == 0
def test_process_client_purge(self):
"""
Calls the test function for the client-purge action.
"""
x, error = self.process_client_purge()
for err in error: assert err == 0
def test_process_client_run(self):
"""
Calls the test function for the client-run action.
"""
with pytest.raises(SystemExit):
x, error = self.process_client_run()
def test_process_client_schedule(self):
"""
Calls the test function for the client-schedule action.
"""
x, error = self.process_client_schedule()
for err in error: assert err == 0
def test_process_schedule_delete(self):
"""
Calls the test function for the schedule-delete action.
"""
x, error = self.process_schedule_delete()
for err in error: assert err == 0
def test_process_client_schedules_list(self):
"""
Calls the test function for the client-schedules-list action.
"""
x, error = self.process_client_schedules_list()
for err in error: assert err == 0
def test_process_schedule_clients_list(self):
"""
Calls the test function for the schedule_clients-list action.
"""
x, error = self.process_schedule_clients_list()
for err in error: assert err == 0
def test_process_client_store(self):
"""
Calls the test function for the client-store action.
"""
x, y, error = self.process_client_store()
for err in error: assert err == 0
def test_process_schedule_get(self):
"""
Calls the test function for the schedule-get action.
"""
x, error = self.process_schedule_get()
for err in error: assert err == 0
def test_process_schedule_list(self):
"""
Calls the test function for the schedule-list action.
"""
x, error = self.process_schedule_list()
for err in error: assert err == 0
def test_process_start_scheduler(self):
"""
Calls the test function for the start-scheduler action.
"""
x, error = self.process_start_scheduler()
for err in error: assert err == 0
def test_process_query_data_couchbase(self):
"""
Calls the test function for the query-data action.
"""
x, error = self.process_query_data_couchbase()
for err in error: assert err == 0
def test_process_query_data_elasticsearch(self):
"""
Calls the test function for the query-data action.
"""
with pytest.raises(SystemExit):
x, error = self.process_query_data_elasticsearch()
def test_process_schedule_change_server(self):
"""
Calls the test function for the schedule-change-server action.
"""
x, error = self.process_schedule_change_server()
for err in error: assert err == 0
def test_process_schedule_server_create(self):
"""
Calls the test function for the schedule-server-create action.
"""
x, error = self.process_schedule_server_create()
for err in error: assert err == 0
def test_process_schedule_server_delete(self):
"""
Calls the test function for the schedule-server-delete action.
"""
x, error = self.process_schedule_server_delete()
for err in error: assert err == 0
def test_process_schedule_server_get(self):
"""
Calls the test function for the schedule-server-get action.
"""
x, error = self.process_schedule_server_get()
for err in error: assert err == 0
def test_process_schedule_server_list(self):
"""
Calls the test function for the schedule-server-list action.
"""
x, error = self.process_schedule_server_list()
for err in error: assert err == 0
def test_process_schedule_add_client(self):
"""
Calls the test function for the schedule-add-client action.
"""
x, error = self.process_schedule_add_client()
for err in error: assert err == 0
def test_process_client_add_schedule(self):
"""
Calls the test function for the client-add-schedule action.
"""
x, error = self.process_client_add_schedule()
for err in error: assert err == 0
def test_process_schedule_remove_client(self):
"""
Calls the test function for the schedule-remove-client action.
"""
x, error = self.process_schedule_remove_client()
for err in error: assert err == 0
def test_process_client_remove_schedule(self):
"""
Calls the test function for the client-remove-schedule action.
"""
x, error = self.process_client_remove_schedule()
for err in error: assert err == 0
def test_process_base_args1(self):
"""
Calls the test function for the hemlock_base without arguments.
"""
with pytest.raises(SystemExit):
x, error = self.process_base_args1()
def test_process_base_args2(self):
"""
Calls the test function for the hemlock_base with arguments.
"""
x, error = self.process_base_args2()
for err in error: assert err == 0
def test_process_base_send_data(self):
"""
Calls the test function for the hemlock_base send_data.
"""
x, error = self.process_base_send_data()
for err in error: assert err == 0
def test_process_base_connect_server_couchbase(self):
"""
Calls the test function for the hemlock_base connect_server with couchbase.
"""
with pytest.raises(SystemExit):
x, error = self.process_base_connect_server_couchbase()
def test_process_hemlock_process_args(self):
"""
Calls the test function for the hemlock process_args.
"""
with pytest.raises(SystemExit):
x, error = self.process_hemlock_process_args()
def test_process_hfs_old_process_files(self):
"""
Calls the test function for the hfs_old process_files.
"""
x, error = self.process_hfs_old_process_files()
for err in error: assert err == 0
def test_process_hfs_connect_client(self):
"""
Calls the test function for the hfs connect_client.
"""
with pytest.raises(SystemExit):
x, error = self.process_hfs_connect_client()
def test_process_hfs_get_data(self):
"""
Calls the test function for the hfs get_data.
"""
x, error = self.process_hfs_get_data()
for err in error: assert err == 0
def test_process_hfs_format_lists(self):
"""
Calls the test function for the hfs format_lists.
"""
x, error = self.process_hfs_format_lists()
for err in error: assert err == 0
def test_process_hstream_odd_start(self):
"""
Calls the test function for the hstream_odd start.
"""
x, error = self.process_hstream_odd_start()
for err in error: assert err == 0
def test_process_hemlock_runner_mysql_server(self):
"""
Calls the test function for the hemlock_runner mysql_server.
"""
x, error = self.process_hemlock_runner_mysql_server()
for err in error: assert err == 0
def test_process_hemlock_runner_mysql_server2(self):
"""
Calls the test function for the hemlock_runner mysql_server.
"""
with pytest.raises(SystemExit):
x, error = self.process_hemlock_runner_mysql_server2()
def test_process_hmysql_connect_client(self):
"""
Calls the test function for the hmysql connect_client.
"""
x, error = self.process_hmysql_connect_client()
for err in error: assert err == 0
def test_process_hmysql_connect_client2(self):
"""
Calls the test function for the hmysql connect_client.
"""
x, error = self.process_hmysql_connect_client2()
for err in error: assert err == 0
def test_process_hredis_connect_client(self):
"""
Calls the test function for the hredis connect_client.
"""
x, error = self.process_hredis_connect_client()
for err in error: assert err == 0
def test_process_hmongo_connect_client(self):
"""
Calls the test function for the hmongo connect_client.
"""
x, error = self.process_hmongo_connect_client()
for err in error: assert err == 0
def test_process_hrest_connect_client(self):
"""
Calls the test function for the hrest connect_client.
"""
with pytest.raises(SystemExit):
x, error = self.process_hrest_connect_client()
def test_process_hrest_get_data(self):
"""
Calls the test function for the hrest get_data.
"""
with pytest.raises(SystemExit):
x, error = self.process_hrest_get_data()
def test_process_hemlock_base_update_hemlock(self):
"""
Calls the test function for the hemlock_base update_hemlock.
"""
self.process_hemlock_base_update_hemlock()
def test_process_hemlock_get_auth(self, server_mysql, database, mysql_username, mysql_password, couchbase_server, couchbase_bucket, couchbase_username, couchbase_password, elasticsearch_endpoint, hemlock_debug, no_couchbase, hemlock_version):
"""
Calls the test function for process_hemlock_get_auth.
"""
a = hemlock.Hemlock()
a.get_auth()
| 52.444968
| 402
| 0.598233
|
54a860b4643da8c7d2c5a7db36924cc804edb8c0
| 493
|
sql
|
SQL
|
src/FlatMate.Web/_Migrations/2017-06-17_14-43-33_fix_itemlistfavorite_foreignkey.sql
|
prayzzz/FlatMate-v2
|
3486507e5a613bf944e7dd056bda47bf84f37ed8
|
[
"MIT"
] | null | null | null |
src/FlatMate.Web/_Migrations/2017-06-17_14-43-33_fix_itemlistfavorite_foreignkey.sql
|
prayzzz/FlatMate-v2
|
3486507e5a613bf944e7dd056bda47bf84f37ed8
|
[
"MIT"
] | 7
|
2019-06-26T15:38:03.000Z
|
2021-05-08T04:38:43.000Z
|
src/FlatMate.Web/_Migrations/2017-06-17_14-43-33_fix_itemlistfavorite_foreignkey.sql
|
prayzzz/FlatMate-v2
|
3486507e5a613bf944e7dd056bda47bf84f37ed8
|
[
"MIT"
] | null | null | null |
--
-- Script
--
ALTER TABLE [List].[ItemListFavorite] DROP CONSTRAINT [FK_ItemListFavorite_Item]
GO
ALTER TABLE [List].[ItemListFavorite] WITH CHECK ADD CONSTRAINT [FK_ItemListFavorite_ItemList] FOREIGN KEY([ItemListId])
REFERENCES [List].ItemList ([Id])
GO
ALTER TABLE [List].[ItemListFavorite] CHECK CONSTRAINT [FK_ItemListFavorite_ItemList]
GO
--
-- Migration
--
INSERT INTO [Infrastructure].[Migrations] ([FileName])
VALUES ('2017-06-17_14-43-33_fix_itemlistfavorite_foreignkey');
| 23.47619
| 122
| 0.772819
|
8ec165e4f61c8e9abe42311a04dba753bcc6a0eb
| 963
|
dart
|
Dart
|
lib/src/generated/storekit/SKDownloadInternal.dart
|
rodydavis/cupertino_ffi_generated
|
4d59f68921a5ae1ba58831c9de83ea0d7979148f
|
[
"MIT"
] | 2
|
2021-05-10T01:24:08.000Z
|
2021-12-13T10:24:41.000Z
|
lib/src/generated/storekit/SKDownloadInternal.dart
|
rodydavis/cupertino_ffi_generated
|
4d59f68921a5ae1ba58831c9de83ea0d7979148f
|
[
"MIT"
] | null | null | null |
lib/src/generated/storekit/SKDownloadInternal.dart
|
rodydavis/cupertino_ffi_generated
|
4d59f68921a5ae1ba58831c9de83ea0d7979148f
|
[
"MIT"
] | null | null | null |
// AUTOMATICALLY GENERATED. DO NOT EDIT.
part of cupertino_ffi.storekit;
/// Static methods for Objective-C class `SKDownloadInternal`.
/// See also instance methods in [SKDownloadInternalPointer].
///
/// Find detailed documentation at: [developer.apple.com/documentation/storekit?language=objc](https://developer.apple.com/documentation/storekit?language=objc)
class SKDownloadInternal extends Struct {
/// Allocates a new instance of SKDownloadInternal.
static Pointer<SKDownloadInternal> allocate() {
_ensureDynamicLibraryHasBeenOpened();
return _objc.allocateByClassName<SKDownloadInternal>('SKDownloadInternal');
}
}
/// Instance methods for [SKDownloadInternal] (Objective-C class `SKDownloadInternal`).
///
/// Find detailed documentation at: [developer.apple.com/documentation/storekit?language=objc](https://developer.apple.com/documentation/storekit?language=objc)
extension SKDownloadInternalPointer on Pointer<SKDownloadInternal> {}
| 45.857143
| 160
| 0.795431
|
e06a182f188d184b43c11c7c14d62337935da708
| 7,359
|
c
|
C
|
src/ocs_mgmt_common.c
|
ecdufcdrvr/ecdufcdrvr
|
3cac33376519c8642e28bdd2e4821d0b64c7895a
|
[
"BSD-3-Clause"
] | 1
|
2019-09-11T22:25:58.000Z
|
2019-09-11T22:25:58.000Z
|
src/ocs_mgmt_common.c
|
ecdufcdrvr/ecdufcdrvr
|
3cac33376519c8642e28bdd2e4821d0b64c7895a
|
[
"BSD-3-Clause"
] | 1
|
2019-10-18T23:09:27.000Z
|
2020-05-08T18:47:36.000Z
|
src/ocs_mgmt_common.c
|
ecdufcdrvr/ecdufcdrvr
|
3cac33376519c8642e28bdd2e4821d0b64c7895a
|
[
"BSD-3-Clause"
] | 6
|
2018-06-15T21:09:08.000Z
|
2021-12-31T06:18:44.000Z
|
/*
* Copyright (C) 2020 Broadcom. All Rights Reserved.
* The term “Broadcom” refers to Broadcom Inc. and/or its subsidiaries.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
/**
* @file
* Common functions used for the output formatting by ocs_mgmt_* functions.
*/
#include "ocs.h"
#include "ocs_mgmt.h"
static char *mode_string(int mode);
/**
* @ingroup mgmt
* @brief Generate the beginning of a numbered section in a management XML document.
*
* @par Description
* This function begins a section. The XML information is appended to
* the textbuf. This form of the function is used for sections that might have
* multiple instances, such as a node or a SLI Port (sport). The index number
* is appended to the name.
*
* @param textbuf Pointer to the driver dump text buffer.
* @param name Name of the section.
* @param index Index number of this instance of the section.
*
* @return None.
*/
extern void ocs_mgmt_start_section(ocs_textbuf_t *textbuf, const char *name, int index)
{
ocs_textbuf_printf(textbuf, "<%s instance=\"%d\">\n", name, index);
}
/**
* @ingroup mgmt
* @brief Generate the beginning of an unnumbered section in a management XML document.
*
* @par Description
* This function begins a section. The XML information is appended to
* the textbuf. This form of the function is used for sections that have
* a single instance only. Therefore, no index number is needed.
*
* @param textbuf Pointer to the driver dump text buffer.
* @param name Name of the section.
*
* @return None.
*/
extern void ocs_mgmt_start_unnumbered_section(ocs_textbuf_t *textbuf, const char *name)
{
ocs_textbuf_printf(textbuf, "<%s>\n", name);
}
/**
* @ingroup mgmt
* @brief Generate the end of a section in a management XML document.
*
* @par Description
* This function ends a section. The XML information is appended to
* the textbuf.
*
* @param textbuf Pointer to the driver dump text buffer.
* @param name Name of the section.
*
* @return None.
*/
void ocs_mgmt_end_unnumbered_section(ocs_textbuf_t *textbuf, const char *name)
{
ocs_textbuf_printf(textbuf, "</%s>\n", name);
}
/**
* @ingroup mgmt
* @brief Generate the indexed end of a section in a management XML document.
*
* @par Description
* This function ends a section. The XML information is appended to
* the textbuf.
*
* @param textbuf Pointer to the driver dump text buffer.
* @param name Name of the section.
* @param index Index number of this instance of the section.
*
* @return None.
*/
void ocs_mgmt_end_section(ocs_textbuf_t *textbuf, const char *name, int index)
{
ocs_textbuf_printf(textbuf, "</%s>\n", name);
}
/**
* @ingroup mgmt
* @brief Generate a property, with no value, in a management XML document.
*
* @par Description
* This function generates a property name. The XML information is appended to
* the textbuf. This form of the function is used by the list functions
* when the property name only (and not the current value) is given.
*
* @param textbuf Pointer to the driver dump text buffer.
* @param mode Defines whether the property is read(r)/write(w)/executable(x).
* @param name Name of the property.
*
* @return None.
*/
void ocs_mgmt_emit_property_name(ocs_textbuf_t *textbuf, int mode, const char *name)
{
ocs_textbuf_printf(textbuf, "<%s mode=\"%s\"/>\n", name, mode_string(mode));
}
/**
* @ingroup mgmt
* @brief Generate a property with a string value in a management XML document.
*
* @par Description
* This function generates a property name and a string value.
* The XML information is appended to the textbuf.
*
* @param textbuf Pointer to the driver dump text buffer.
* @param mode Defines whether the property is read(r)/write(w)/executable(x).
* @param name Name of the property.
* @param value Value of the property.
*
* @return None.
*/
void ocs_mgmt_emit_string(ocs_textbuf_t *textbuf, int mode, const char *name, const char *value)
{
ocs_textbuf_printf(textbuf, "<%s mode=\"%s\">%s</%s>\n", name, mode_string(mode), value, name);
}
/**
* @ingroup mgmt
* @brief Generate a property with an integer value in a management XML document.
*
* @par Description
* This function generates a property name and an integer value.
* The XML information is appended to the textbuf.
*
* @param textbuf Pointer to driver dump text buffer.
* @param mode Defines whether the property is read(r)/write(w)/executable(x).
* @param name Name of the property.
* @param fmt A printf format for formatting the integer value.
*
* @return none
*/
void ocs_mgmt_emit_int(ocs_textbuf_t *textbuf, int mode, const char *name, const char *fmt, ...)
{
va_list ap;
char valuebuf[64];
va_start(ap, fmt);
ocs_vsnprintf(valuebuf, sizeof(valuebuf), fmt, ap);
va_end(ap);
ocs_textbuf_printf(textbuf, "<%s mode=\"%s\">%s</%s>\n", name, mode_string(mode), valuebuf, name);
}
/**
* @ingroup mgmt
* @brief Generate a property with a boolean value in a management XML document.
*
* @par Description
* This function generates a property name and a boolean value.
* The XML information is appended to the textbuf.
*
* @param textbuf Pointer to the driver dump text buffer.
* @param mode Defines whether the property is read(r)/write(w)/executable(x).
* @param name Name of the property.
* @param value Boolean value to be added to the textbuf.
*
* @return None.
*/
void ocs_mgmt_emit_boolean(ocs_textbuf_t *textbuf, int mode, const char *name, int value)
{
char *valuebuf = value ? "true" : "false";
ocs_textbuf_printf(textbuf, "<%s mode=\"%s\">%s</%s>\n", name, mode_string(mode), valuebuf, name);
}
static char *mode_string(int mode)
{
static char mode_str[4];
mode_str[0] = '\0';
if (mode & MGMT_MODE_RD) {
strcat(mode_str, "r");
}
if (mode & MGMT_MODE_WR) {
strcat(mode_str, "w");
}
if (mode & MGMT_MODE_EX) {
strcat(mode_str, "x");
}
return mode_str;
}
| 30.920168
| 99
| 0.721565
|
2f4b845278ee63d2799d4d25f6bfdb65fc2ad16f
| 1,421
|
js
|
JavaScript
|
Drupal/zenlan/js/zenlan-autocomplete1.js
|
zenlan/snippets
|
6e6ced1220664aae4e443b6ed7825b34964e6f60
|
[
"MIT"
] | null | null | null |
Drupal/zenlan/js/zenlan-autocomplete1.js
|
zenlan/snippets
|
6e6ced1220664aae4e443b6ed7825b34964e6f60
|
[
"MIT"
] | null | null | null |
Drupal/zenlan/js/zenlan-autocomplete1.js
|
zenlan/snippets
|
6e6ced1220664aae4e443b6ed7825b34964e6f60
|
[
"MIT"
] | null | null | null |
(function ($) {
$(document).ready(function () {
$('#edit-id').hide();
var api_url = Drupal.settings.zenlan.api_url;
var selected = '';
$("#edit-name").autocomplete({
source: function (request, response) {
var selected = '';
var url = api_url + $.base64.encode(request.term).replace(/=/g, '_');
$base = $(this.element);
$base.addClass('throbbing');
$.ajax({
url: url,
dataType: "json",
success: function (data) {
$base.removeClass('throbbing');
response($.map(data, function (item) {
return {
label: item.label,
value: item.value
}
}));
},
error: function (jqXHR, textStatus, errorThrown) {
$base.removeClass('throbbing');
console.log('autocomplete error ' + errorThrown);
}
});
},
minlength: 2,
select: function (event, ui) {
console.log(ui.item ?
"Selected: " + ui.item.label :
"Nothing selected, input was " + this.value);
ui.item ? selected = ui.item : selected = false;
},
close: function () {
if (selected) {
$('#edit-id').val(selected.value);
$('#edit-name').val(selected.label);
$('#edit-id').show();
}
}
});
});
})(jQuery);
| 29.604167
| 77
| 0.472203
|
5449b67a7aeb64fd49f5fd26466049b704f400ea
| 187
|
css
|
CSS
|
src/components/layout/logo/styles.css
|
Acousticdesk/proto-gatsby
|
6d15aec7a24d5dfb8a8f5fc3ec3ca8bf47db761f
|
[
"MIT"
] | null | null | null |
src/components/layout/logo/styles.css
|
Acousticdesk/proto-gatsby
|
6d15aec7a24d5dfb8a8f5fc3ec3ca8bf47db761f
|
[
"MIT"
] | 1
|
2021-09-03T15:17:56.000Z
|
2021-09-03T15:17:56.000Z
|
src/components/layout/logo/styles.css
|
Acousticdesk/proto-gatsby
|
6d15aec7a24d5dfb8a8f5fc3ec3ca8bf47db761f
|
[
"MIT"
] | null | null | null |
.logo {
color: #fff;
font-size: 38px;
line-height: 64px;
margin: 0;
padding: 0;
}
@media screen and (max-width: 768px) {
.logo {
font-size: 24px;
}
}
| 13.357143
| 38
| 0.513369
|
1b08e2462f2c9f25eb53207c9a5b21f21b1621c7
| 1,445
|
rb
|
Ruby
|
test/scraped/response/decorator_test.rb
|
everypolitician/scraped_page
|
7fd43913456af49d32e882d0cb79f3da1f5117ef
|
[
"MIT"
] | 11
|
2016-12-31T07:54:11.000Z
|
2021-07-19T17:36:48.000Z
|
test/scraped/response/decorator_test.rb
|
everypolitician/scraped
|
7fd43913456af49d32e882d0cb79f3da1f5117ef
|
[
"MIT"
] | 70
|
2016-11-21T17:58:07.000Z
|
2017-11-19T23:14:56.000Z
|
test/scraped/response/decorator_test.rb
|
everypolitician/scraped
|
7fd43913456af49d32e882d0cb79f3da1f5117ef
|
[
"MIT"
] | null | null | null |
require 'test_helper'
describe Scraped::Response::Decorator do
class NullDecorator < Scraped::Response::Decorator; end
describe 'with no decorated methods' do
let(:response) do
Scraped::Response.new(
body: 'Hello, world',
url: 'http://example.com'
)
end
let(:new_response) do
NullDecorator.new(response: response).decorated_response
end
it 'returns an identical response' do
new_response.url.must_equal response.url
new_response.body.must_equal response.body
new_response.headers.must_equal response.headers
new_response.status.must_equal response.status
end
end
describe 'with decorated methods' do
class MultiDecorator < Scraped::Response::Decorator
def url
'http://example.net'
end
def body
'Fancy body'
end
def headers
{ 'Foo' => 'bar' }
end
def status
418
end
end
let(:response) do
Scraped::Response.new(
body: 'Hello, world',
url: 'http://example.com'
)
end
let(:new_response) do
MultiDecorator.new(response: response).decorated_response
end
it 'returns the decorated values' do
new_response.url.must_equal 'http://example.net'
new_response.body.must_equal 'Fancy body'
new_response.headers.must_equal 'Foo' => 'bar'
new_response.status.must_equal 418
end
end
end
| 22.578125
| 63
| 0.64083
|
da9ba46ed671caa65f92dedfc192be2466cf80ef
| 1,249
|
php
|
PHP
|
plugins/rainlab/translate/routes.php
|
Samorai/center-lex
|
ff7f9f5aca02689e94ac711705d5fdaa60bd4ecf
|
[
"MIT"
] | null | null | null |
plugins/rainlab/translate/routes.php
|
Samorai/center-lex
|
ff7f9f5aca02689e94ac711705d5fdaa60bd4ecf
|
[
"MIT"
] | null | null | null |
plugins/rainlab/translate/routes.php
|
Samorai/center-lex
|
ff7f9f5aca02689e94ac711705d5fdaa60bd4ecf
|
[
"MIT"
] | null | null | null |
<?php
use RainLab\Translate\Models\Locale;
use RainLab\Translate\Models\Message;
use RainLab\Translate\Classes\Translator;
/*
* Adds a custom route to check for the locale prefix.
*/
App::before(function($request) {
$translator = Translator::instance();
if (!$translator->isConfigured()) {
return;
}
$locale = Request::segment(1);
if (!Locale::isValid($locale)) {
return;
}
$translator->setLocale($locale);
/*
* Register routes
*/
Route::group(['prefix' => $locale], function() {
Route::any('{slug}', 'Cms\Classes\CmsController@run')->where('slug', '(.*)?');
});
Route::any($locale, 'Cms\Classes\CmsController@run');
/*
* Ensure Url::action() retains the localized URL
* by re-registering the route after the CMS.
*/
Event::listen('cms.route', function() use ($locale) {
Route::group(['prefix' => $locale], function() {
Route::any('{slug}', 'Cms\Classes\CmsController@run')->where('slug', '(.*)?');
});
});
});
/*
* Save any used messages to the contextual cache.
*/
App::after(function($request) {
if (class_exists('RainLab\Translate\Models\Message')) {
Message::saveToCache();
}
});
| 23.12963
| 90
| 0.589271
|
a378cefc84f99d4b9cdbb71adceeada88dcc4178
| 2,423
|
java
|
Java
|
truth-android/src/main/java/com/pkware/truth/android/view/KeyCharacterMapSubject.java
|
erikghonyan/truth-android
|
b88ff3ba42d79c7719d7508c9bcdfe73e6d1f2fd
|
[
"Apache-2.0"
] | 12
|
2016-11-24T23:01:55.000Z
|
2021-03-29T15:15:57.000Z
|
truth-android/src/main/java/com/pkware/truth/android/view/KeyCharacterMapSubject.java
|
erikghonyan/truth-android
|
b88ff3ba42d79c7719d7508c9bcdfe73e6d1f2fd
|
[
"Apache-2.0"
] | 11
|
2018-09-26T13:32:25.000Z
|
2020-02-18T19:04:42.000Z
|
truth-android/src/main/java/com/pkware/truth/android/view/KeyCharacterMapSubject.java
|
erikghonyan/truth-android
|
b88ff3ba42d79c7719d7508c9bcdfe73e6d1f2fd
|
[
"Apache-2.0"
] | 3
|
2018-09-22T16:04:14.000Z
|
2019-07-02T10:37:49.000Z
|
/*
* Copyright 2013 Square, Inc.
* Copyright 2016 PKWARE, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pkware.truth.android.view;
import android.view.KeyCharacterMap;
import com.google.common.truth.FailureMetadata;
import com.google.common.truth.Subject;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import static android.view.KeyCharacterMap.ALPHA;
import static android.view.KeyCharacterMap.FULL;
import static android.view.KeyCharacterMap.NUMERIC;
import static android.view.KeyCharacterMap.PREDICTIVE;
import static android.view.KeyCharacterMap.SPECIAL_FUNCTION;
import static com.pkware.truth.android.internal.IntegerUtils.buildNamedValueString;
/**
* Propositions for {@link KeyCharacterMap} subjects.
*/
public class KeyCharacterMapSubject extends Subject {
@Nullable
private final KeyCharacterMap actual;
public KeyCharacterMapSubject(@Nonnull FailureMetadata failureMetadata, @Nullable KeyCharacterMap actual) {
super(failureMetadata, actual);
this.actual = actual;
}
@Nonnull
public static String keyboardTypeToString(@KeyCharacterMapKeyboardType int type) {
return buildNamedValueString(type)
.value(NUMERIC, "numeric")
.value(PREDICTIVE, "predictive")
.value(ALPHA, "alpha")
.value(FULL, "full")
.value(SPECIAL_FUNCTION, "specialFunction")
.get();
}
public void hasKeyboardType(@KeyCharacterMapKeyboardType int type) {
int actualType = actual.getKeyboardType();
//noinspection ResourceType
check("getKeyboardType()")
.withMessage("Expected keyboard type <%s> but was <%s>.",
keyboardTypeToString(type), keyboardTypeToString(actualType))
.that(actualType)
.isEqualTo(type);
}
public void hasModifierBehavior(int value) {
check("getModifierBehavior()").that(actual.getModifierBehavior()).isEqualTo(value);
}
}
| 33.652778
| 109
| 0.746595
|