text stringlengths 1 1.05M |
|---|
Meteor.i18nMessages.codes = {
locale : {
en : {
en : "English",
rx : "$English$"
},
rx : {
en : "Test",
rx : "$Test$"
}
},
logLevel : {
0 : {
en : "Fatal",
rx : "$Fatal$",
code: "FATAL"
},
1 : {
en : "Error",
rx : "$Error$",
code: "ERROR"
},
2 : {
en : "Warning",
rx : "$Warning$",
code: "WARN"
},
3 : {
en : "Info",
rx : "$Info$",
code: "INFO"
},
4 : {
en : "Verbose",
rx : "$Verbose$",
code: "VERBOSE"
},
5 : {
en : "Debug",
rx : "$Debug$",
code: "DEBUG"
},
6 : {
en : "Max",
rx : "$Max$",
code: "MAX"
},
},
state : {
AL : {
en : "Alabama",
rx : "$Alabama$"
},
AK : {
en : "Alaska",
rx : "$Alaska$"
},
AZ : {
en : "Arizona",
rx : "$Arizona$"
},
AR : {
en : "Arkansas",
rx : "$Arkansas$"
},
CA : {
en : "California",
rx : "$California$"
},
CO : {
en : "Colorado",
rx : "$Colorado$"
},
CT : {
en : "Connecticut",
rx : "$Connecticut$"
},
DE : {
en : "Delaware",
rx : "$Delaware$"
},
DC : {
en : "Dist of Columbia",
rx : "$Dist of Columbia$"
},
FL : {
en : "Florida",
rx : "$Florida$"
},
GA : {
en : "Georgia",
rx : "$Georgia$"
},
HI : {
en : "Hawaii",
rx : "$Hawaii$"
},
ID : {
en : "Idaho",
rx : "$Idaho$"
},
IL : {
en : "Illinois",
rx : "$Illinois$"
},
IN : {
en : "Indiana",
rx : "$Indiana$"
},
IA : {
en : "Iowa",
rx : "$Iowa$"
},
KS : {
en : "Kansas",
rx : "$Kansas$"
},
KY : {
en : "Kentucky",
rx : "$Kentucky$"
},
LA : {
en : "Louisiana",
rx : "$Louisiana$"
},
ME : {
en : "Maine",
rx : "$Maine$"
},
MD : {
en : "Maryland",
rx : "$Maryland$"
},
MA : {
en : "Massachusetts",
rx : "$Massachusetts$"
},
MI : {
en : "Michigan",
rx : "$Michigan$"
},
MN : {
en : "Minnesota",
rx : "$Minnesota$"
},
MS : {
en : "Mississippi",
rx : "$Mississippi$"
},
MO : {
en : "Missouri",
rx : "$Missouri$"
},
MT : {
en : "Montana",
rx : "$Montana$"
},
NE : {
en : "Nebraska",
rx : "$Nebraska$"
},
NV : {
en : "Nevada",
rx : "$Nevada$"
},
NH : {
en : "New Hampshire",
rx : "$New Hampshire$"
},
NJ : {
en : "New Jersey",
rx : "$New Jersey$"
},
NM : {
en : "New Mexico",
rx : "$New Mexico$"
},
NY : {
en : "New York",
rx : "$New York$"
},
NC : {
en : "North Carolina",
rx : "$North Carolina$"
},
ND : {
en : "North Dakota",
rx : "$North Dakota$"
},
OH : {
en : "Ohio",
rx : "$Ohio$"
},
OK : {
en : "Oklahoma",
rx : "$Oklahoma$"
},
OR : {
en : "Oregon",
rx : "$Oregon$"
},
PA : {
en : "Pennsylvania",
rx : "$Pennsylvania$"
},
RI : {
en : "Rhode Island",
rx : "$Rhode Island$"
},
SC : {
en : "South Carolina",
rx : "$South Carolina$"
},
SD : {
en : "South Dakota",
rx : "$South Dakota$"
},
TN : {
en : "Tennessee",
rx : "$Tennessee$"
},
TX : {
en : "Texas",
rx : "$Texas$"
},
UT : {
en : "Utah",
rx : "$Utah$"
},
VT : {
en : "Vermont",
rx : "$Vermont$"
},
VA : {
en : "Virginia",
rx : "$Virginia$"
},
WA : {
en : "Washington",
rx : "$Washington$"
},
WV : {
en : "West Virginia",
rx : "$West Virginia$"
},
WI : {
en : "Wisconsin",
rx : "$Wisconsin$"
},
WY : {
en : "Wyoming",
rx : "$Wyoming$"
},
},
country : {
US : {
en : "United States",
rx : "$United States$"
},
AF : {
en : "Afghanistan",
rx : "$Afghanistan$"
},
AL : {
en : "Albania",
rx : "$Albania$"
},
DZ : {
en : "Algeria",
rx : "$Algeria$"
},
AS : {
en : "American Samoa",
rx : "$American Samoa$"
},
AD : {
en : "Andorra",
rx : "$Andorra$"
},
AO : {
en : "Angola",
rx : "$Angola$"
},
AI : {
en : "Anguilla",
rx : "$Anguilla$"
},
AQ : {
en : "Antarctica",
rx : "$Antarctica$"
},
AG : {
en : "Antigua and Barbuda",
rx : "$Antigua and Barbuda$"
},
AR : {
en : "Argentina",
rx : "$Argentina$"
},
AM : {
en : "Armenia",
rx : "$Armenia$"
},
AW : {
en : "Aruba",
rx : "$Aruba$"
},
AU : {
en : "Australia",
rx : "$Australia$"
},
AT : {
en : "Austria",
rx : "$Austria$"
},
AZ : {
en : "Azerbaijan",
rx : "$Azerbaijan$"
},
BS : {
en : "Bahamas",
rx : "$Bahamas$"
},
BH : {
en : "Bahrain",
rx : "$Bahrain$"
},
BD : {
en : "Bangladesh",
rx : "$Bangladesh$"
},
BB : {
en : "Barbados",
rx : "$Barbados$"
},
BY : {
en : "Belarus",
rx : "$Belarus$"
},
BE : {
en : "Belgium",
rx : "$Belgium$"
},
BZ : {
en : "Belize",
rx : "$Belize$"
},
BJ : {
en : "Benin",
rx : "$Benin$"
},
BM : {
en : "Bermuda",
rx : "$Bermuda$"
},
BT : {
en : "Bhutan",
rx : "$Bhutan$"
},
BO : {
en : "Bolivia",
rx : "$Bolivia$"
},
BA : {
en : "Bosnia and Herzegowina",
rx : "$Bosnia and Herzegowina$"
},
BW : {
en : "Botswana",
rx : "$Botswana$"
},
BV : {
en : "Bouvet Island",
rx : "$Bouvet Island$"
},
BR : {
en : "Brazil",
rx : "$Brazil$"
},
IO : {
en : "British Indian Ocean Territory",
rx : "$British Indian Ocean Territory$"
},
BN : {
en : "Brunei Darussalam",
rx : "$Brunei Darussalam$"
},
BG : {
en : "Bulgaria",
rx : "$Bulgaria$"
},
BF : {
en : "Burkina Faso",
rx : "$Burkina Faso$"
},
BI : {
en : "Burundi",
rx : "$Burundi$"
},
KH : {
en : "Cambodia",
rx : "$Cambodia$"
},
CM : {
en : "Cameroon",
rx : "$Cameroon$"
},
CA : {
en : "Canada",
rx : "$Canada$"
},
CV : {
en : "Cape Verde",
rx : "$Cape Verde$"
},
KY : {
en : "Cayman Islands",
rx : "$Cayman Islands$"
},
CF : {
en : "Central African Republic",
rx : "$Central African Republic$"
},
TD : {
en : "Chad",
rx : "$Chad$"
},
CL : {
en : "Chile",
rx : "$Chile$"
},
CN : {
en : "China",
rx : "$China$"
},
CX : {
en : "Christmas Island",
rx : "$Christmas Island$"
},
CC : {
en : "Cocos (Keeling) Islands",
rx : "$Cocos (Keeling) Islands$"
},
CO : {
en : "Colombia",
rx : "$Colombia$"
},
KM : {
en : "Comoros",
rx : "$Comoros$"
},
CG : {
en : "Congo",
rx : "$Congo$"
},
CD : {
en : "Congo, the Democratic Republic of the",
rx : "$Congo, the Democratic Republic of the$"
},
CK : {
en : "Cook Islands",
rx : "$Cook Islands$"
},
CR : {
en : "Costa Rica",
rx : "$Costa Rica$"
},
CI : {
en : "Cote d\'Ivoire",
rx : "$Cote d\'Ivoire$"
},
HR : {
en : "Croatia (Hrvatska)",
rx : "$Croatia (Hrvatska)$"
},
CU : {
en : "Cuba",
rx : "$Cuba$"
},
CY : {
en : "Cyprus",
rx : "$Cyprus$"
},
CZ : {
en : "Czech Republic",
rx : "$Czech Republic$"
},
DK : {
en : "Denmark",
rx : "$Denmark$"
},
DJ : {
en : "Djibouti",
rx : "$Djibouti$"
},
DM : {
en : "Dominica",
rx : "$Dominica$"
},
DO : {
en : "Dominican Republic",
rx : "$Dominican Republic$"
},
TP : {
en : "East Timor",
rx : "$East Timor$"
},
EC : {
en : "Ecuador",
rx : "$Ecuador$"
},
EG : {
en : "Egypt",
rx : "$Egypt$"
},
SV : {
en : "El Salvador",
rx : "$El Salvador$"
},
GQ : {
en : "Equatorial Guinea",
rx : "$Equatorial Guinea$"
},
ER : {
en : "Eritrea",
rx : "$Eritrea$"
},
EE : {
en : "Estonia",
rx : "$Estonia$"
},
ET : {
en : "Ethiopia",
rx : "$Ethiopia$"
},
FK : {
en : "Falkland Islands (Malvinas)",
rx : "$Falkland Islands (Malvinas)$"
},
FO : {
en : "Faroe Islands",
rx : "$Faroe Islands$"
},
FJ : {
en : "Fiji",
rx : "$Fiji$"
},
FI : {
en : "Finland",
rx : "$Finland$"
},
FR : {
en : "France",
rx : "$France$"
},
FX : {
en : "France, Metropolitan",
rx : "$France, Metropolitan$"
},
GF : {
en : "French Guiana",
rx : "$French Guiana$"
},
PF : {
en : "French Polynesia",
rx : "$French Polynesia$"
},
TF : {
en : "French Southern Territories",
rx : "$French Southern Territories$"
},
GA : {
en : "Gabon",
rx : "$Gabon$"
},
GM : {
en : "Gambia",
rx : "$Gambia$"
},
GE : {
en : "Georgia",
rx : "$Georgia$"
},
DE : {
en : "Germany",
rx : "$Germany$"
},
GH : {
en : "Ghana",
rx : "$Ghana$"
},
GI : {
en : "Gibraltar",
rx : "$Gibraltar$"
},
GR : {
en : "Greece",
rx : "$Greece$"
},
GL : {
en : "Greenland",
rx : "$Greenland$"
},
GD : {
en : "Grenada",
rx : "$Grenada$"
},
GP : {
en : "Guadeloupe",
rx : "$Guadeloupe$"
},
GU : {
en : "Guam",
rx : "$Guam$"
},
GT : {
en : "Guatemala",
rx : "$Guatemala$"
},
GN : {
en : "Guinea",
rx : "$Guinea$"
},
GW : {
en : "Guinea-Bissau",
rx : "$Guinea-Bissau$"
},
GY : {
en : "Guyana",
rx : "$Guyana$"
},
HT : {
en : "Haiti",
rx : "$Haiti$"
},
HM : {
en : "Heard and Mc Donald Islands",
rx : "$Heard and Mc Donald Islands$"
},
VA : {
en : "Holy See (Vatican City State)",
rx : "$Holy See (Vatican City State)$"
},
HN : {
en : "Honduras",
rx : "$Honduras$"
},
HK : {
en : "Hong Kong",
rx : "$Hong Kong$"
},
HU : {
en : "Hungary",
rx : "$Hungary$"
},
IS : {
en : "Iceland",
rx : "$Iceland$"
},
IN : {
en : "India",
rx : "$India$"
},
ID : {
en : "Indonesia",
rx : "$Indonesia$"
},
IR : {
en : "Iran (Islamic Republic of)",
rx : "$Iran (Islamic Republic of)$"
},
IQ : {
en : "Iraq",
rx : "$Iraq$"
},
IE : {
en : "Ireland",
rx : "$Ireland$"
},
IL : {
en : "Israel",
rx : "$Israel$"
},
IT : {
en : "Italy",
rx : "$Italy$"
},
JM : {
en : "Jamaica",
rx : "$Jamaica$"
},
JP : {
en : "Japan",
rx : "$Japan$"
},
JO : {
en : "Jordan",
rx : "$Jordan$"
},
KZ : {
en : "Kazakhstan",
rx : "$Kazakhstan$"
},
KE : {
en : "Kenya",
rx : "$Kenya$"
},
KI : {
en : "Kiribati",
rx : "$Kiribati$"
},
KP : {
en : "Korea, Democratic People\'s Republic of",
rx : "$Korea, Democratic People\'s Republic of$"
},
KR : {
en : "Korea, Republic of",
rx : "$Korea, Republic of$"
},
KW : {
en : "Kuwait",
rx : "$Kuwait$"
},
KG : {
en : "Kyrgyzstan",
rx : "$Kyrgyzstan$"
},
LA : {
en : "Lao People\'s Democratic Republic",
rx : "$Lao People\'s Democratic Republic$"
},
LV : {
en : "Latvia",
rx : "$Latvia$"
},
LB : {
en : "Lebanon",
rx : "$Lebanon$"
},
LS : {
en : "Lesotho",
rx : "$Lesotho$"
},
LR : {
en : "Liberia",
rx : "$Liberia$"
},
LY : {
en : "Libyan Arab Jamahiriya",
rx : "$Libyan Arab Jamahiriya$"
},
LI : {
en : "Liechtenstein",
rx : "$Liechtenstein$"
},
LT : {
en : "Lithuania",
rx : "$Lithuania$"
},
LU : {
en : "Luxembourg",
rx : "$Luxembourg$"
},
MO : {
en : "Macau",
rx : "$Macau$"
},
MK : {
en : "Macedonia, The Former Yugoslav Republic of",
rx : "$Macedonia, The Former Yugoslav Republic of$"
},
MG : {
en : "Madagascar",
rx : "$Madagascar$"
},
MW : {
en : "Malawi",
rx : "$Malawi$"
},
MY : {
en : "Malaysia",
rx : "$Malaysia$"
},
MV : {
en : "Maldives",
rx : "$Maldives$"
},
ML : {
en : "Mali",
rx : "$Mali$"
},
MT : {
en : "Malta",
rx : "$Malta$"
},
MH : {
en : "Marshall Islands",
rx : "$Marshall Islands$"
},
MQ : {
en : "Martinique",
rx : "$Martinique$"
},
MR : {
en : "Mauritania",
rx : "$Mauritania$"
},
MU : {
en : "Mauritius",
rx : "$Mauritius$"
},
YT : {
en : "Mayotte",
rx : "$Mayotte$"
},
MX : {
en : "Mexico",
rx : "$Mexico$"
},
FM : {
en : "Micronesia, Federated States of",
rx : "$Micronesia, Federated States of$"
},
MD : {
en : "Moldova, Republic of",
rx : "$Moldova, Republic of$"
},
MC : {
en : "Monaco",
rx : "$Monaco$"
},
MN : {
en : "Mongolia",
rx : "$Mongolia$"
},
MS : {
en : "Montserrat",
rx : "$Montserrat$"
},
MA : {
en : "Morocco",
rx : "$Morocco$"
},
MZ : {
en : "Mozambique",
rx : "$Mozambique$"
},
MM : {
en : "Myanmar",
rx : "$Myanmar$"
},
NA : {
en : "Namibia",
rx : "$Namibia$"
},
NR : {
en : "Nauru",
rx : "$Nauru$"
},
NP : {
en : "Nepal",
rx : "$Nepal$"
},
NL : {
en : "Netherlands",
rx : "$Netherlands$"
},
AN : {
en : "Netherlands Antilles",
rx : "$Netherlands Antilles$"
},
NC : {
en : "New Caledonia",
rx : "$New Caledonia$"
},
NZ : {
en : "New Zealand",
rx : "$New Zealand$"
},
NI : {
en : "Nicaragua",
rx : "$Nicaragua$"
},
NE : {
en : "Niger",
rx : "$Niger$"
},
NG : {
en : "Nigeria",
rx : "$Nigeria$"
},
NU : {
en : "Niue",
rx : "$Niue$"
},
NF : {
en : "Norfolk Island",
rx : "$Norfolk Island$"
},
MP : {
en : "Northern Mariana Islands",
rx : "$Northern Mariana Islands$"
},
NO : {
en : "Norway",
rx : "$Norway$"
},
OM : {
en : "Oman",
rx : "$Oman$"
},
PK : {
en : "Pakistan",
rx : "$Pakistan$"
},
PW : {
en : "Palau",
rx : "$Palau$"
},
PA : {
en : "Panama",
rx : "$Panama$"
},
PG : {
en : "Papua New Guinea",
rx : "$Papua New Guinea$"
},
PY : {
en : "Paraguay",
rx : "$Paraguay$"
},
PE : {
en : "Peru",
rx : "$Peru$"
},
PH : {
en : "Philippines",
rx : "$Philippines$"
},
PN : {
en : "Pitcairn",
rx : "$Pitcairn$"
},
PL : {
en : "Poland",
rx : "$Poland$"
},
PT : {
en : "Portugal",
rx : "$Portugal$"
},
PR : {
en : "Puerto Rico",
rx : "$Puerto Rico$"
},
QA : {
en : "Qatar",
rx : "$Qatar$"
},
RE : {
en : "Reunion",
rx : "$Reunion$"
},
RO : {
en : "Romania",
rx : "$Romania$"
},
RU : {
en : "Russian Federation",
rx : "$Russian Federation$"
},
RW : {
en : "Rwanda",
rx : "$Rwanda$"
},
KN : {
en : "Saint Kitts and Nevis",
rx : "$Saint Kitts and Nevis$"
},
LC : {
en : "Saint LUCIA",
rx : "$Saint LUCIA$"
},
VC : {
en : "Saint Vincent and the Grenadines",
rx : "$Saint Vincent and the Grenadines$"
},
WS : {
en : "Samoa",
rx : "$Samoa$"
},
SM : {
en : "San Marino",
rx : "$San Marino$"
},
ST : {
en : "Sao Tome and Principe",
rx : "$Sao Tome and Principe$"
},
SA : {
en : "Saudi Arabia",
rx : "$Saudi Arabia$"
},
SN : {
en : "Senegal",
rx : "$Senegal$"
},
SC : {
en : "Seychelles",
rx : "$Seychelles$"
},
SL : {
en : "Sierra Leone",
rx : "$Sierra Leone$"
},
SG : {
en : "Singapore",
rx : "$Singapore$"
},
SK : {
en : "Slovakia (Slovak Republic)",
rx : "$Slovakia (Slovak Republic)$"
},
SI : {
en : "Slovenia",
rx : "$Slovenia$"
},
SB : {
en : "Solomon Islands",
rx : "$Solomon Islands$"
},
SO : {
en : "Somalia",
rx : "$Somalia$"
},
ZA : {
en : "South Africa",
rx : "$South Africa$"
},
GS : {
en : "South Georgia and the South Sandwich Islands",
rx : "$South Georgia and the South Sandwich Islands$"
},
ES : {
en : "Spain",
rx : "$Spain$"
},
LK : {
en : "Sri Lanka",
rx : "$Sri Lanka$"
},
SH : {
en : "St. Helena",
rx : "$St. Helena$"
},
PM : {
en : "St. Pierre and Miquelon",
rx : "$St. Pierre and Miquelon$"
},
SD : {
en : "Sudan",
rx : "$Sudan$"
},
SR : {
en : "Suriname",
rx : "$Suriname$"
},
SJ : {
en : "Svalbard and Jan Mayen Islands",
rx : "$Svalbard and Jan Mayen Islands$"
},
SZ : {
en : "Swaziland",
rx : "$Swaziland$"
},
SE : {
en : "Sweden",
rx : "$Sweden$"
},
CH : {
en : "Switzerland",
rx : "$Switzerland$"
},
SY : {
en : "Syrian Arab Republic",
rx : "$Syrian Arab Republic$"
},
TW : {
en : "Taiwan, Province of China",
rx : "$Taiwan, Province of China$"
},
TJ : {
en : "Tajikistan",
rx : "$Tajikistan$"
},
TZ : {
en : "Tanzania, United Republic of",
rx : "$Tanzania, United Republic of$"
},
TH : {
en : "Thailand",
rx : "$Thailand$"
},
TG : {
en : "Togo",
rx : "$Togo$"
},
TK : {
en : "Tokelau",
rx : "$Tokelau$"
},
TO : {
en : "Tonga",
rx : "$Tonga$"
},
TT : {
en : "Trinidad and Tobago",
rx : "$Trinidad and Tobago$"
},
TN : {
en : "Tunisia",
rx : "$Tunisia$"
},
TR : {
en : "Turkey",
rx : "$Turkey$"
},
TM : {
en : "Turkmenistan",
rx : "$Turkmenistan$"
},
TC : {
en : "Turks and Caicos Islands",
rx : "$Turks and Caicos Islands$"
},
TV : {
en : "Tuvalu",
rx : "$Tuvalu$"
},
UG : {
en : "Uganda",
rx : "$Uganda$"
},
UA : {
en : "Ukraine",
rx : "$Ukraine$"
},
AE : {
en : "United Arab Emirates",
rx : "$United Arab Emirates$"
},
GB : {
en : "United Kingdom",
rx : "$United Kingdom$"
},
UM : {
en : "United States Minor Outlying Islands",
rx : "$United States Minor Outlying Islands$"
},
UY : {
en : "Uruguay",
rx : "$Uruguay$"
},
UZ : {
en : "Uzbekistan",
rx : "$Uzbekistan$"
},
VU : {
en : "Vanuatu",
rx : "$Vanuatu$"
},
VE : {
en : "Venezuela",
rx : "$Venezuela$"
},
VN : {
en : "Viet Nam",
rx : "$Viet Nam$"
},
VG : {
en : "Virgin Islands (British)",
rx : "$Virgin Islands (British)$"
},
VI : {
en : "Virgin Islands (U.S.)",
rx : "$Virgin Islands (U.S.)$"
},
WF : {
en : "Wallis and Futuna Islands",
rx : "$Wallis and Futuna Islands$"
},
EH : {
en : "Western Sahara",
rx : "$Western Sahara$"
},
YE : {
en : "Yemen",
rx : "$Yemen$"
},
YU : {
en : "Yugoslavia",
rx : "$Yugoslavia$"
},
ZM : {
en : "Zambia",
rx : "$Zambia$"
},
ZW : {
en : "Zimbabwe",
rx : "$Zimbabwe$"
}
},
monthName : {
JAN: {
en : "January",
rx : "$January$",
number: 1,
dayMax: 31
},
FEB: {
en : "February",
rx : "$February$",
number: 2,
dayMax: 29
},
MAR: {
en : "March",
rx : "$March$",
number: 3,
dayMax: 31
},
APR: {
en : "April",
rx : "$April$",
number: 4,
dayMax: 30
},
MAY: {
en : "May",
rx : "$May$",
number: 5,
dayMax: 31
},
JUN: {
en : "June",
rx : "$June$",
number: 6,
dayMax: 30
},
JUL: {
en : "July",
rx : "$July$",
number: 7,
dayMax: 31
},
AUG: {
en : "August",
rx : "$August$",
number: 8,
dayMax: 31
},
SEP: {
en : "September",
rx : "$September$",
number: 9,
dayMax: 30
},
OCT: {
en : "October",
rx : "$October$",
number: 10,
dayMax: 31
},
NOV: {
en : "November",
rx : "$November$",
number: 11,
dayMax: 30
},
DEC: {
en : "December",
rx : "$December$",
number: 12,
dayMax: 31
}
},
dayName : {
SUNDAY : {
en : "Sunday",
rx : "$Sunday$",
index : 0
},
MONDAY : {
en : "Monday",
rx : "$Monday$",
index : 1
},
TUESDAY : {
en : "Tuesday",
rx : "$Tuesday$",
index : 2
},
WEDNESDAY : {
en : "Wednesday",
rx : "$Wednesday$",
index : 3
},
THURSDAY : {
en : "Thursday",
rx : "$Thursday$",
index : 4
},
FRIDAY : {
en : "Friday",
rx : "$Friday$",
index : 5
},
SATURDAY : {
en : "Saturday",
rx : "$Saturday$",
index : 6
}
},
expirationYear : {
2017: {
en : "2017",
rx : "$2017$"
},
2018: {
en : "2018",
rx : "$2018$"
},
2019: {
en : "2019",
rx : "$2019$"
},
2020: {
en : "2020",
rx : "$2020$"
},
2021: {
en : "2021",
rx : "$2021$"
},
2022: {
en : "2022",
rx : "$2022$"
},
2023: {
en : "2023",
rx : "$2023$"
},
},
notificationType : {
NOTICE : {
en : "Notice",
rx : "$Notice$"
},
INFO : {
en : "Info",
rx : "$Info$"
},
SUCCESS : {
en : "Success",
rx : "$Success$"
},
ERROR : {
en : "Error",
rx : "$Error$"
},
},
notificationMode : {
PNOTIFY : {
en : "PNotify",
rx : "$PNotify$"
},
SMS : {
en : "SMS",
rx : "$SMS$"
},
EMAIL : {
en : "Email",
rx : "$Email$"
}
},
perPage : {
"25" : {
en : "25",
rx : "$25$"
},
"50" : {
en : "50",
rx : "$50$"
},
"100" : {
en : "100",
rx : "$100$"
},
"250" : {
en : "250",
rx : "$250$"
},
"500" : {
en : "500",
rx : "$500$"
}
},
reportFrequency: {
1: {
en: "1",
rx: "1"
},
2: {
en: "2",
rx: "2"
},
3: {
en: "3",
rx: "3"
},
4: {
en: "4",
rx: "4"
},
5: {
en: "5",
rx: "5"
},
6: {
en: "6",
rx: "6"
},
7: {
en: "7",
rx: "7"
},
},
timeUnit: {
MINUTE: {
momentCode: "minute",
en: "Minutes",
rx: "$Minutes$",
timeOptions: [ "TOP", "BOTTOM" ],
singular: {
en: "Minute",
rx: "$Minute$"
}
},
HOUR: {
momentCode: "hour",
en: "Hours",
rx: "$Hours$",
timeOptions: [ "TOP", "BOTTOM" ],
singular: {
en: "Hour",
rx: "$Hour$"
}
},
DAY: {
momentCode: "day",
en: "Days",
rx: "$Days$",
timeOptions: [ "SIXAM", "NOON", "SIXPM", "MIDNIGHT" ],
singular: {
en: "Day",
rx: "$Day$"
}
},
WEEK: {
momentCode: "week",
en: "Weeks",
rx: "$Weeks$",
timeOptions: [ "SUNDAY", "MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY" ],
singular: {
en: "Week",
rx: "$Week$"
}
},
MONTH: {
momentCode: "month",
en: "Months",
rx: "$Months$",
timeOptions: [ "FIRSTDAY", "LASTDAY", "FIRSTMONDAY", "LASTFRIDAY" ],
singular: {
en: "Month",
rx: "$Month$"
}
},
},
timeOption: {
TOP: {
en: "Top",
rx: "$Top$"
},
BOTTOM: {
en: "Bottom",
rx: "$Bottom$"
},
SIXAM: {
en: "6:00 AM",
rx: "$6:00 AM$"
},
NOON: {
en: "Noon",
rx: "$Noon$"
},
SIXPM: {
en: "6:00 PM",
rx: "$6:00 PM$"
},
MIDNIGHT: {
en: "Midnight",
rx: "$Midnight$"
},
SUNDAY: {
en: "Sunday",
rx: "$Sunday$"
},
MONDAY: {
en: "Monday",
rx: "$Monday$"
},
TUESDAY: {
en: "Tuesday",
rx: "$Tuesday$"
},
WEDNESDAY: {
en: "Wednesday",
rx: "$Wednesday$"
},
THURSDAY: {
en: "Thursday",
rx: "$Thursday$"
},
FRIDAY : {
en: "Friday",
rx: "$Friday$"
},
SATURDAY : {
en: "Saturday",
rx: "$Saturday$"
},
FIRSTDAY : {
en: "First Day",
rx: "$First Day$"
},
LASTDAY : {
en: "Last Day",
rx: "$Last Day$"
},
FIRSTMONDAY : {
en: "First Monday",
rx: "$First Monday$"
},
LASTFRIDAY : {
en: "Last Friday",
rx: "$Last Friday$"
}
},
preferenceDefinition : {
ADVANCED_MAIL_CLIENT : {
en : "Prepare email reports with advanced HTML (e.g., SVG graphics)",
rx : "$Prepare email reports with advanced HTML (e.g., SVG graphics)$",
type: CX.BINDING_TYPE_SWITCH
},
ALL_MEMBERS_AND_DOMAINS : {
en : "Show all members and fields in Members & Fields view (Super Administrator only)",
rx : "$Show all members and fields in Members & Fields view (Super Administrator only)$",
type: CX.BINDING_TYPE_SWITCH,
roles: ["SUPERADMIN"]
},
LOGS_DEFEAT_TENANT_FILTERS : {
en : "Defeat tenant filters on events, notifications and system log",
rx : "$Defeat tenant filters on events, notifications and system log$",
type: CX.BINDING_TYPE_SWITCH,
roles: ["SUPERADMIN"]
}
},
userRole : {
DOMAINADMIN : {
en : "Domain Admin",
rx : "$Domain Admin$",
tenantRole : false
},
TENANTADMIN : {
en : "Tenant Admin",
rx : "$Tenant Admin$",
tenantRole : true
},
SUPERADMIN : {
en : "Super Admin",
rx : "Super Admin$"
},
},
eventType : {
SUBSYSTEM_STATUS_RED : {
en : "When a subsystem state changes to red",
rx : "$When a subsystem state changes to red$",
notification : {
type : "ERROR",
icon : "HISTORY",
key : "common.alert_generic",
subjectKey : "common.alert_generic_subject"
},
notificationDefaults : [ "PNOTIFY" ]
},
SUBSYSTEM_STATUS_YELLOW : {
en : "When a subsystem state changes to yellow",
rx : "$When a subsystem state changes to yellow$",
notification : {
type : "ERROR",
icon : "HISTORY",
key : "common.alert_generic",
subjectKey : "common.alert_generic_subject"
},
notificationDefaults : [ "PNOTIFY" ]
},
SUBSYSTEM_STATUS_GREEN : {
en : "When a subsystem state changes to green",
rx : "$When a subsystem state changes to green$",
notification : {
type : "SUCCESS",
icon : "HISTORY",
key : "common.alert_generic",
subjectKey : "common.alert_generic_subject"
},
notificationDefaults : [ "PNOTIFY" ]
},
USER_RETIRE : {
en : "When a user is retired",
rx : "$When a user is retired$",
notification : {
type : "INFO",
icon : "RETIRE",
key : "common.alert_user_retire",
scope : "ALL"
},
notificationDefaults : [ "PNOTIFY" ]
},
DOMAIN_RETIRE : {
en : "When a field is retired",
rx : "$When a field is retired$",
notification : {
type : "INFO",
icon : "RETIRE",
key : "common.alert_domain_retire",
scope : "ALL"
},
notificationDefaults : [ "PNOTIFY" ]
},
TENANT_CREATE : {
en : "When a user creates a new tenant",
rx : "$When a user creates a new tenant$",
notification : {
type : "INFO",
icon : "BULLHORN",
key : "common.alert_tenant_create",
scope : "ALL"
},
notificationDefaults : [ "PNOTIFY" ]
},
TENANT_RETIRE : {
en : "When a user retires a tenant",
rx : "$When a user retires a tenant",
notification : {
type : "INFO",
icon : "BULLHORN",
key : "common.alert_tenant_retire",
scope : "ALL"
},
notificationDefaults : [ "PNOTIFY" ]
},
USER_LOGIN : {
en : "When a user logs in",
rx : "$When a user logs in$",
role : "SYSTEMADMIN",
notification : {
type : "INFO",
icon : "BULLHORN",
key : "common.alert_user_logged_in",
scope : "ALL"
},
notificationDefaults : [ "PNOTIFY" ]
},
USER_LOGOUT : {
en : "When a user logs out",
rx : "$When a user logs out$",
role : "SYSTEMADMIN",
notification : {
type : "INFO",
icon : "BULLHORN",
key : "common.alert_user_logged_out",
scope : "ALL"
},
notificationDefaults : [ "PNOTIFY" ]
},
LIST_IMPORT_START : {
en : "When list CSV import process is started",
rx : "$When list CSV import process is started$",
notification : {
type : "INFO",
icon : "UPLOAD",
key : "common.alert_list_import_start"
},
notificationDefaults : [ "PNOTIFY" ]
},
LIST_IMPORT_FINISH : {
en : "When list CSV import process is finished",
rx : "$When list CSV import process is finished$",
notification : {
type : "INFO",
icon : "UPLOAD",
key : "common.alert_list_import_finish"
},
notificationDefaults : [ "PNOTIFY" ]
},
LIST_IMPORT_STOP : {
en : "When list CSV import process is stopped by the user",
rx : "$When list CSV import process is stopped by the user$",
notification : {
type : "INFO",
icon : "UPLOAD",
key : "common.alert_list_import_stop"
},
notificationDefaults : [ "PNOTIFY" ]
},
LIST_IMPORT_FAIL : {
en : "When list CSV import process fails due to error",
rx : "$When list CSV import process fails due to error$",
notification : {
type : "ERROR",
icon : "UPLOAD",
key : "common.alert_list_import_fail"
},
notificationDefaults : [ "PNOTIFY" ]
},
LIST_IMPORT_RESET : {
en : "When the import daemon stops CSV import after a system restart",
rx : "$When the import daemon stops CSV import after a system restart$",
notification : {
type : "ERROR",
icon : "UPLOAD",
key : "common.alert_list_import_reset"
},
notificationDefaults : [ "PNOTIFY" ]
},
},
subsystemName : {
TEMPLATE : {
en : "Template",
rx : "$Template$"
},
MAILGUN : {
en : "Mailgun",
rx : "$Mailgun$"
},
TWILIO : {
en : "Twilio",
rx : "$Twilio$"
}
},
subsystemIdentifier : {
TEMPLATE : {
en : "Template {{key}}",
rx : "$Template {{key}}$"
},
MAILGUN : {
en : "Mailgun",
rx : "$Mailgun$"
},
TWILIO : {
en : "Twilio",
rx : "$Twilio$"
}
},
subsystemStatus : {
RED : {
en : "Red",
rx : "$Red$"
},
YELLOW : {
en : "Yellow",
rx : "$Yellow$"
},
GREEN : {
en : "Green",
rx : "$Green$"
},
},
reportType : {
USER_LIST : {
en : "User List",
rx : "$User List$",
template_name: "user_list",
timezone : "America/New_York",
parameterDefinitions : {
state : {
en : "State",
rx : "$State$",
type : "SELECT",
code : "state"
}
}
}
},
deploymentAction : {
COPY : {
en : "Copy domain",
rx : "$Copy domain$"
},
RESTORE : {
en : "Restore snapshot",
rx : "$Restore snapshot$"
}
},
daemonJob : {
MAILMAN : {
en : "Process all notification records by sending out email and SMS messages",
rx : "$Process all notification records by sending out email and SMS messages$",
timeInterval: 10,
timeUnit: "SECOND",
execFunctionName: "Mailman.run"
},
REPORTER : {
en : "Send scheduled email reports",
rx : "$Send scheduled email reports$",
timeInterval: 60,
timeUnit: "SECOND",
initFunctionName : "Reporter.init",
execFunctionName: "Reporter.run"
},
RECORD_REMOVER : {
en : "Find and remove old records to conserve space and improve performance",
rx : "$Find and remove old records to conserve space and improve performance$",
timeInterval: 1,
timeUnit: "DAY",
execFunctionName: "RecordRemover.removeAllRecords"
},
RECORD_IMPORTER: {
en : "Import records from CSV files into the system",
rx : "$Import records from CSV files into the system$",
timeInterval: 1,
timeUnit: "SECOND",
execFunctionName: "RecordImporter.findImportRequests"
}
},
uploadType : {
TEMPLATE: {
en : "Email Templates",
rx : "$Email Templates$",
prepareFunctionName: "RecordImporter.prepareGeneric",
validateHeaders : true,
insertMode : "UPSERT",
collection : "templates",
keypath : "name"
},
},
uploadStatus : {
TRANSMITTING : {
en : "Transmitting",
rx : "$Transmitting$"
},
WAITING : {
en : "Waiting for Daemon",
rx : "$Waiting for Daemon$"
},
ACTIVE : {
en : "Active",
rx : "$Active$"
},
INSERTING : {
en : "Inserting records into database",
rx : "$Inserting records into database$"
},
COMPLETED : {
en : "Completed",
rx : "$Completed$"
},
COMPLETED_WITH_ERRORS : {
en : "Completed With Errors",
rx : "$Completed With Errors$"
},
STOPPED : {
en : "Stopped",
rx : "$Stopped$"
},
FAILED : {
en : "Failed due to error",
rx : "$Failed due to error$"
},
CLEARED : {
en : "Cleared",
rx : "$Cleared$"
}
},
functionType : {
GENERAL : {
en : "General Function",
rx : "$General Function$"
}
}
}
|
<filename>com.archimatetool.canvas/src/com/archimatetool/canvas/factory/CanvasConnectionUIProvider.java
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.canvas.factory;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.gef.EditPart;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.swt.graphics.Image;
import com.archimatetool.canvas.model.ICanvasPackage;
import com.archimatetool.editor.diagram.editparts.DiagramConnectionEditPart;
import com.archimatetool.editor.diagram.figures.diagram.LineConnectionFigure;
import com.archimatetool.editor.ui.IArchiImages;
import com.archimatetool.editor.ui.factory.diagram.DiagramConnectionUIProvider;
/**
* Canvas Diagram Connection UI Provider
*
* @author <NAME>
*/
public class CanvasConnectionUIProvider extends DiagramConnectionUIProvider {
@Override
public EClass providerFor() {
return ICanvasPackage.eINSTANCE.getCanvasModelConnection();
}
@Override
public EditPart createEditPart() {
return new DiagramConnectionEditPart(LineConnectionFigure.class);
}
@Override
public String getDefaultName() {
return Messages.CanvasLineConnectionUIProvider_0;
}
@Override
public Image getImage() {
return IArchiImages.ImageFactory.getImage(IArchiImages.ICON_CONNECTION_ARROW);
}
@Override
public ImageDescriptor getImageDescriptor() {
return IArchiImages.ImageFactory.getImageDescriptor(IArchiImages.ICON_CONNECTION_ARROW);
}
}
|
import { is } from "./is";
describe("When testing if value is", () => {
describe("present", () => {
it("should be positive when value is neither null nor undefined", () => {
expect(is(3).present()).toBeTruthy();
expect(is("hello").present()).toBeTruthy();
expect(is(0).present()).toBeTruthy();
expect(is("").present()).toBeTruthy();
expect(is({}).present()).toBeTruthy();
expect(is([]).present()).toBeTruthy();
});
it("should be negative if value is null", () => {
expect(is(null).present()).toBeFalsy();
});
it("should be negative if value is undefined", () => {
expect(is(undefined).present()).toBeFalsy();
});
});
describe("absent", () => {
it("should be positive when value is null or undefined", () => {
expect(is(null).absent()).toBeTruthy();
expect(is(undefined).absent()).toBeTruthy();
});
it("should be negative if value is present", () => {
expect(is(3).absent()).toBeFalsy();
});
});
describe("a string", () => {
it("should be positive when value is a string", () => {
expect(is("hello").aString()).toBeTruthy();
expect(is("").aString()).toBeTruthy();
});
it("should be negative when value is not a string", () => {
expect(is(3).aString()).toBeFalsy();
});
});
describe("a boolean", () => {
it("should be positive when value is a boolean", () => {
expect(is(true).aBoolean()).toBeTruthy();
expect(is(false).aBoolean()).toBeTruthy();
});
it("should be negative when value is not a string", () => {
expect(is(3).aBoolean()).toBeFalsy();
});
});
describe("a number", () => {
it("should be positive when value is a number", () => {
expect(is(3).aNumber()).toBeTruthy();
expect(is(0).aNumber()).toBeTruthy();
});
it("should be negative when value is not a number", () => {
expect(is("3").aNumber()).toBeFalsy();
expect(is(Number.NaN).aNumber()).toBeFalsy();
});
});
describe("a date", () => {
it("should be positive when value is a date", () => {
expect(is(new Date()).aDate()).toBeTruthy();
});
it("should be negative when value is not a date", () => {
expect(is(3).aDate()).toBeFalsy();
});
});
describe("a function", () => {
it("should be positive when value is a function", () => {
expect(is("".toString).aFunction()).toBeTruthy();
expect(is(Array).aFunction()).toBeTruthy();
});
it("should be negative when value is not a string", () => {
expect(is(3).aFunction()).toBeFalsy();
});
});
describe("an array", () => {
it("should be positive when value is an array", () => {
expect(is([1, 2]).anArray()).toBeTruthy();
expect(is([]).anArray()).toBeTruthy();
});
it("should be negative when value is not an array", () => {
expect(is(3).anArray()).toBeFalsy();
});
});
describe("an object", () => {
it("should be positive when value is an object", () => {
expect(is({ test: "ok" }).anObject()).toBeTruthy();
expect(is({}).anObject()).toBeTruthy();
});
it("should be negative when value is not an object", () => {
expect(is(3).anObject()).toBeFalsy();
});
});
});
|
<reponame>mini-crm/mini-crm<filename>product-group-data-api/src/main/java/tr/com/minicrm/productgroup/data/ProductGroup.java
package tr.com.minicrm.productgroup.data;
public interface ProductGroup {
public Long getId();
public String getName();
public int getVersion();
}
|
<reponame>bandey/lexicon-byzlaw-client<gh_stars>0
import React from 'react';
import {Link} from 'react-router-dom';
import DataProvider from '../../data-provider/data-provider.js';
import {getOpusAll} from '../../data-provider/data-provider-queries.js';
import Choice from '../../components/choice/choice.js';
const queryOpusAll = getOpusAll();
function ChoiceOpus({title, match, onChoose}) {
function wrapLink(item, Child) {
const onClick = onChoose ? () => onChoose(item) : null;
return (
<Link key={item.id} to={`${match.url}/${item.id}`} onClick={onClick}>
{Child}
</Link>
);
};
function AdapterChoice({data}) {
return (
<Choice title={title} wrapLink={wrapLink}>
{data}
</Choice>
);
};
return (
<DataProvider query={queryOpusAll}>
{AdapterChoice}
</DataProvider>
);
};
export default ChoiceOpus;
|
<filename>sansio_multipart/wsgi_form_parser.py<gh_stars>1-10
__all__ = ["parse_form_data"]
from io import BytesIO
from urllib.parse import parse_qs
from .parser import MultipartParser
from .utils import MultiDict, parse_options_header
from .errors import MultipartError
def parse_form_data(environ, charset="utf8", strict=False, **kwargs):
""" Parse form data from an environ dict and return a (forms, files) tuple.
Both tuple values are dictionaries with the form-field name as a key
(unicode) and lists as values (multiple values per key are possible).
The forms-dictionary contains form-field values as unicode strings.
The files-dictionary contains :class:`Part` instances, either
because the form-field was a file-upload or the value is too big to fit
into memory limits.
:param environ: An WSGI environment dict.
:param charset: The charset to use if unsure. (default: utf8)
:param strict: If True, raise :exc:`MultipartError` on any parsing
errors. These are silently ignored by default.
"""
forms, files = MultiDict(), MultiDict()
try:
if environ.get("REQUEST_METHOD", "GET").upper() not in ("POST", "PUT"):
raise MultipartError("Request method other than POST or PUT.")
content_length = int(environ.get("CONTENT_LENGTH", "-1"))
content_type = environ.get("CONTENT_TYPE", "")
if not content_type:
raise MultipartError("Missing Content-Type header.")
content_type, options = parse_options_header(content_type)
stream = environ.get("wsgi.input") or BytesIO()
kwargs["charset"] = charset = options.get("charset", charset)
if content_type == "multipart/form-data":
boundary = options.get("boundary", "")
if not boundary:
raise MultipartError("No boundary for multipart/form-data.")
for part in MultipartParser(stream, boundary, content_length, **kwargs):
if part.filename or not part.is_buffered():
files[part.name] = part
else: # TODO: Big form-fields are in the files dict. really?
forms[part.name] = part.value
elif content_type in (
"application/x-www-form-urlencoded",
"application/x-url-encoded",
):
mem_limit = kwargs.get("mem_limit", 2 ** 20)
if content_length > mem_limit:
raise MultipartError("Request too big. Increase MAXMEM.")
data = stream.read(mem_limit).decode(charset)
if stream.read(1): # These is more that does not fit mem_limit
raise MultipartError("Request too big. Increase MAXMEM.")
data = parse_qs(data, keep_blank_values=True)
for key, values in data.items():
for value in values:
forms[key] = value
else:
raise MultipartError("Unsupported content type.")
except MultipartError:
if strict:
raise
return forms, files
|
#!/usr/bin/sh
echo "execute ./covme ./examples/c/function_call/a.out"
echo "covme use gdb for trace program, gdb must be installed"
TEST_GDB=`which gdb`
if [ $? -ne 0 ]; then
echo "gdb not found, please make sure gdb installed"
exit 1
fi
./covme ./examples/c/function_call/a.out 1 2 |
ACTIONS=['start', 'cancel_start', 'stop', 'cancel_stop'] # order is important here. Events will be ordered by this!!!
def calculate_periods(records):
"""
This method is responsible for calculating the periods from the raw records list.
First, it sorts the list by medication_name and event_time and action.
As I want to go over the records once, the sorting ensures the following:
1. I'm dealing with one medication at a time. So if I encounter a new medication, I know no other events exist for the previous one.
2. I'm going over the records by their event_time and not how they were received
3. The order of actions is very important as I need to deal with each case carefully
"""
records.sort(key=lambda item: (item['medication_name'], item['event_time'], ACTIONS.index(item['action']))) # sort by medication_name AND event_time AND action
ans = {} # This will hold my answer. A dict with medication_name as key and a list of tuples for periods as value
start_period = None # Initialize start period
current_medication = None # Initialize current medication
for record in records:
if not record: # safeguard against empty records although consumer should not add to DB in this case
continue
action = record['action']
medication = record['medication_name']
if medication not in ans: # new medication
if current_medication and start_period:
#We have a previous period in progress that have not stopped
ans[current_medication].append((start_period,)) # Add non-ending period
ans[medication] = [] # Initialize new list
start_period = None
current_medication = medication
if action not in ACTIONS: # Safeguard against unknown action
continue
#Now comes all the validations. Here I'm throwing an exception, but we can also just print an error and continue processing.
if action == 'cancel_start' and not start_period: # cancel_start came before start - invalid input
raise Exception(f'Invalid input: Got cancel_start event for medication {medication} at {record["event_time"]} without a start event first')
if action == 'cancel_stop' and (start_period or len(ans[medication]) == 0): # cancel_stop came after start or without a previous period
raise Exception(f'Invalid input: Got cancel_stop event for medication {medication} at {record["event_time"]} without a stop event first')
if action == 'start' and start_period: # start received when already started
raise Exception(f'Invalid input: Got double start event for medication {medication} at {start_period} and {record["event_time"]}')
if action == 'stop' and not start_period: # Stop received without starting
raise Exception(f'Invalid input: Got stop event for medication {medication} at {record["event_time"]} without a start')
# Switch on action
if action == 'start': # Got start event. Set start_period
# print(f'Starting period of {medication} at {record["event_time"]}')
start_period = record["event_time"]
elif action == 'stop': # Got stop event. Add new period to ans and clear start_period
# print(f'Stopping period of {medication} at {record["event_time"]}')
ans[medication].append((start_period, record["event_time"]))
start_period = None
elif action == 'cancel_start': # Got cancel_start, so just clear start_period
# print(f'Cancel start period of {medication} at {record["event_time"]}')
start_period = None
elif action == 'cancel_stop': # Got cancel_Stop. We need to search for previous period and delete it.
# print(f'Cancel stop period of {medication} at {record["event_time"]}')
last_period = ans[medication][-1]
start_period = last_period[0] # continue period as if stop never happened
ans[medication] = ans[medication][:-1] # Remove last period from answer
if current_medication and start_period: # in the end, we need to check if period is in progress
#We have a medication in progress that have not stopped
ans[current_medication].append((start_period,))
return ans |
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
usage() {
echo "
usage: $0 <options>
Required not-so-options:
--build-dir=DIR path to pig dist.dir
--prefix=PREFIX path to install into
Optional options:
--lib-dir=DIR path to install pig home [/usr/lib/pig]
--build-dir=DIR path to pig dist dir
... [ see source for more similar options ]
"
exit 1
}
OPTS=$(getopt \
-n $0 \
-o '' \
-l 'prefix:' \
-l 'lib-dir:' \
-l 'build-dir:' -- "$@")
if [ $? != 0 ] ; then
usage
fi
eval set -- "$OPTS"
while true ; do
case "$1" in
--prefix)
PREFIX=$2 ; shift 2
;;
--build-dir)
BUILD_DIR=$2 ; shift 2
;;
--lib-dir)
LIB_DIR=$2 ; shift 2
;;
--)
shift ; break
;;
*)
echo "Unknown option: $1"
usage
exit 1
;;
esac
done
for var in PREFIX BUILD_DIR ; do
if [ -z "$(eval "echo \$$var")" ]; then
echo Missing param: $var
usage
fi
done
LIB_DIR=${LIB_DIR:-/usr/lib/ycsb}
install -d -m 0755 $PREFIX/$LIB_DIR
(cd ${BUILD_DIR} && tar -cf - .)|(cd $PREFIX/${LIB_DIR} && tar -xf -)
|
def classify_emails(emails):
spam_messages = []
not_spam_messages = []
for email in emails:
words = email.split(" ")
spam_words = 0
for word in words:
if word in spam_words_list:
spam_words += 1
if spam_words > 0:
spam_messages.append(email)
else:
not_spam_messages.append(email)
return spam_messages, not_spam_messages |
<gh_stars>0
package com.google.android.cameraview.demo;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.Toast;
import com.google.android.cameraview.CameraView;
/**
* Created by maxpengli on 2017/6/27.
*/
public class CameraActivity extends AppCompatActivity {
private static final String TAG = "xxp";
private Handler mBackgroundHandler;
private CameraView mCameraView;
private RelativeLayout preview_Layout;
private ImageView preview_imageview;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.camera_layout);
initView();
}
private void initView() {
mCameraView = (CameraView) findViewById(R.id.camera);
mCameraView.addCallback(mCallback);
findViewById(R.id.take_picture).setOnClickListener(mOnClickListener);
findViewById(R.id.switch_button).setOnClickListener(mOnClickListener);
preview_Layout = (RelativeLayout) findViewById(R.id.preview_container);
preview_imageview = (ImageView) findViewById(R.id.preview_image);
findViewById(R.id.preview_close_icon).setOnClickListener(mOnClickListener);
}
@Override
protected void onResume() {
mCameraView.start();
super.onResume();
}
@Override
protected void onPause() {
mCameraView.stop();
super.onPause();
}
private View.OnClickListener mOnClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.take_picture:
if (mCameraView != null) {
mCameraView.takePicture();
}
break;
case R.id.switch_button:
if (mCameraView != null) {
int facing = mCameraView.getFacing();
mCameraView.setFacing(facing == CameraView.FACING_FRONT ?
CameraView.FACING_BACK : CameraView.FACING_FRONT);
}
break;
case R.id.preview_close_icon:
preview_Layout.setVisibility(View.GONE);
break;
}
}
};
private CameraView.Callback mCallback = new CameraView.Callback() {
@Override
public void onCameraOpened(CameraView cameraView) {
Log.d(TAG, "onCameraOpened");
}
@Override
public void onCameraClosed(CameraView cameraView) {
Log.d(TAG, "onCameraClosed");
}
@Override
public void onPictureTaken(CameraView cameraView, final byte[] data) {
Bitmap bitmap = BitmapUtils.createSampledBitmapFromBytes(data, 1024);
Log.d(TAG, "onPictureTaken " + data.length);
Toast.makeText(cameraView.getContext(), "take picture return "+data.length, Toast.LENGTH_SHORT).show();
Matrix matrix = new Matrix();
matrix.setRotate(90);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
preview_Layout.setVisibility(View.VISIBLE);
preview_imageview.setImageBitmap(bitmap);
}
};
}
|
<gh_stars>1-10
#include "stdafx.h"
#include <string.h>
#include <filesystem>
#include <stdexcept>
#include <d3dcompiler.h>
#include <DirectXMath.h>
#include "D3dTiles/D3d/TextureShader.h"
namespace TileEngine::D3d {
TextureShader::TextureShader() {}
TextureShader::~TextureShader() {}
void TextureShader::Initialize(ID3D11DevicePtr device, const std::wstring &shaderPath) {
// Initialize the vertex and pixel shaders.
const std::filesystem::path shaderDir(shaderPath);
InitializeShader(device, (shaderDir / L"D3dTiles.fx").wstring(), (shaderDir / L"D3dTiles.fx").wstring());
}
void TextureShader::InitializeShader(ID3D11DevicePtr device, const std::wstring &vsFilename, const std::wstring &psFilename) {
InitVertexShader(device, vsFilename);
InitPixelShader(device, psFilename);
// Setup the description of the dynamic matrix constant buffer that is in the vertex shader.
D3D11_BUFFER_DESC matrixBufferDesc;
memset(&matrixBufferDesc, 0, sizeof(matrixBufferDesc));
matrixBufferDesc.ByteWidth = sizeof(MatrixBufferType);
matrixBufferDesc.Usage = D3D11_USAGE_DYNAMIC;
matrixBufferDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
matrixBufferDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
matrixBufferDesc.MiscFlags = 0;
matrixBufferDesc.StructureByteStride = 0;
// Create the constant buffer pointer so we can access the vertex shader constant buffer from within this class.
HRESULT hr = device->CreateBuffer(&matrixBufferDesc, nullptr, &m_matrixBuffer.GetInterfacePtr());
if (FAILED(hr)) {
throw std::runtime_error("CreateBuffer() failed");
}
/*
// Create a texture sampler state description.
D3D11_SAMPLER_DESC samplerDesc;
memset(&samplerDesc, 0, sizeof(samplerDesc));
samplerDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
samplerDesc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
samplerDesc.MipLODBias = 0.0f;
samplerDesc.MaxAnisotropy = 1;
samplerDesc.ComparisonFunc = D3D11_COMPARISON_ALWAYS;
samplerDesc.BorderColor[0] = 0;
samplerDesc.BorderColor[1] = 0;
samplerDesc.BorderColor[2] = 0;
samplerDesc.BorderColor[3] = 0;
samplerDesc.MinLOD = 0;
samplerDesc.MaxLOD = D3D11_FLOAT32_MAX;
// Create the texture sampler state.
result = device->CreateSamplerState(&samplerDesc, &m_sampleState);
if (FAILED(result)) {
return false;
}
*/
}
void TextureShader::InitVertexShader(ID3D11DevicePtr device, const std::wstring &vsFilename) {
ID3DBlobPtr vsBlob = CompileShaderFromFile(vsFilename.c_str(), "VS", "vs_4_0");
// Create the vertex shader
HRESULT hr = device->CreateVertexShader(vsBlob->GetBufferPointer(), vsBlob->GetBufferSize(), nullptr, &m_vertexShader.GetInterfacePtr());
if (FAILED(hr)) {
throw std::runtime_error("CreateVertexShader() failed");
}
// Define the input layout
D3D11_INPUT_ELEMENT_DESC layout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
UINT numElements = ARRAYSIZE(layout);
// Create the input layout
hr = device->CreateInputLayout(layout, numElements, vsBlob->GetBufferPointer(),
vsBlob->GetBufferSize(), &m_vertexLayout.GetInterfacePtr());
if (FAILED(hr)) {
throw std::runtime_error("CreateInputLayout() failed");
}
}
void TextureShader::InitPixelShader(ID3D11DevicePtr device, const std::wstring &psFilename) {
ID3DBlobPtr vsBlob = CompileShaderFromFile(psFilename.c_str(), "PS", "ps_4_0");
// Create the pixel shader
HRESULT hr = device->CreatePixelShader(vsBlob->GetBufferPointer(), vsBlob->GetBufferSize(), nullptr, &m_pixelShader.GetInterfacePtr());
if (FAILED(hr)) {
throw std::runtime_error("CreatePixelShader() failed");
}
}
bool TextureShader::Render(ID3D11DeviceContextPtr deviceContext, unsigned indexCount,
const DirectX::XMMATRIX &worldMatrix, const DirectX::XMMATRIX &viewMatrix, const DirectX::XMMATRIX &projectionMatrix,
ID3D11ShaderResourceViewPtr texture) {
// Set the shader parameters that it will use for rendering.
const bool result = SetShaderParameters(deviceContext, worldMatrix, viewMatrix, projectionMatrix, texture);
if (!result) {
return false;
}
// Now render the prepared buffers with the shader.
RenderShader(deviceContext, indexCount);
return true;
}
bool TextureShader::SetShaderParameters(ID3D11DeviceContextPtr deviceContext,
const DirectX::XMMATRIX &worldMatrix, const DirectX::XMMATRIX &viewMatrix, const DirectX::XMMATRIX &projectionMatrix,
ID3D11ShaderResourceViewPtr texture) {
D3D11_MAPPED_SUBRESOURCE mappedResource;
memset(&mappedResource, 0, sizeof(mappedResource));
// Lock the constant buffer so it can be written to.
HRESULT result = deviceContext->Map(m_matrixBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);
if (FAILED(result)) {
return false;
}
// Get a pointer to the data in the constant buffer.
MatrixBufferType *dataPtr = (MatrixBufferType*)mappedResource.pData;
// Transpose the matrices to prepare them for the shader.
const DirectX::XMMATRIX worldMatrixShader = DirectX::XMMatrixTranspose(worldMatrix);
const DirectX::XMMATRIX viewMatrixShader = DirectX::XMMatrixTranspose(viewMatrix);
const DirectX::XMMATRIX projectionMatrixShader = DirectX::XMMatrixTranspose(projectionMatrix);
// Copy the matrices into the constant buffer.
dataPtr->world = worldMatrixShader;
dataPtr->view = viewMatrixShader;
dataPtr->projection = projectionMatrixShader;
// Unlock the constant buffer.
deviceContext->Unmap(m_matrixBuffer, 0);
// Set the position of the constant buffer in the vertex shader.
unsigned bufferNumber = 0;
// Now set the constant buffer in the vertex shader with the updated values.
deviceContext->VSSetConstantBuffers(bufferNumber, 1, &m_matrixBuffer.GetInterfacePtr());
// Set shader texture resource in the pixel shader.
deviceContext->PSSetShaderResources(0, 1, &texture.GetInterfacePtr());
return true;
}
void TextureShader::RenderShader(ID3D11DeviceContextPtr deviceContext, unsigned indexCount) {
// Set the vertex input layout.
deviceContext->IASetInputLayout(m_vertexLayout);
// Set the vertex and pixel shaders that will be used to render this triangle.
deviceContext->VSSetShader(m_vertexShader, nullptr, 0);
deviceContext->PSSetShader(m_pixelShader, nullptr, 0);
// Set the sampler state in the pixel shader.
//deviceContext->PSSetSamplers(0, 1, &m_sampleState);
// Render the triangle.
deviceContext->DrawIndexed(indexCount, 0, 0);
return;
}
ID3DBlobPtr CompileShaderFromFile(const WCHAR* szFileName, LPCSTR szEntryPoint, LPCSTR szShaderModel) {
DWORD dwShaderFlags = D3DCOMPILE_ENABLE_STRICTNESS;
#ifdef _DEBUG
// Set the D3DCOMPILE_DEBUG flag to embed debug information in the shaders.
// Setting this flag improves the shader debugging experience, but still allows
// the shaders to be optimized and to run exactly the way they will run in
// the release configuration of this program.
dwShaderFlags |= D3DCOMPILE_DEBUG;
// Disable optimizations to further improve shader debugging
dwShaderFlags |= D3DCOMPILE_SKIP_OPTIMIZATION;
#endif
ID3DBlobPtr outBlob, errorBlob;
HRESULT hr = D3DCompileFromFile(szFileName, nullptr, nullptr, szEntryPoint, szShaderModel,
dwShaderFlags, 0, &outBlob.GetInterfacePtr(), &errorBlob.GetInterfacePtr());
if (FAILED(hr)) {
if (errorBlob) {
OutputDebugStringA(reinterpret_cast<const char*>(errorBlob->GetBufferPointer()));
errorBlob.Release();
}
throw std::runtime_error("D3DCompileFromFile() failed");
}
return outBlob;
}
} // namespace TileEngine::D3d
|
def ensure_attribute_value(expected_name, expected_namespace):
def decorator(cls):
if getattr(cls, 'name', None) != expected_name or getattr(cls, 'namespace', None) != expected_namespace:
raise AttributeError(f"Invalid attribute values for class {cls.__name__}")
return cls
return decorator
@ensure_attribute_value("primeMeridian", "http://www.opengis.net/gml")
class Meta:
name = "primeMeridian"
namespace = "http://www.opengis.net/gml" |
# 上线所用的git地址
releaseGit='https://github.com/wudaming1/ttWebsiteP.git'
# 当前分支
branch=`git branch | grep -e '^*' | cut -d' ' -f 2`
# 必须得在master下才可操作
if [ "$branch" == 'master' ]
then
# 先编译
npm run build:prod
# 万一没有编译成功,在此退出
test ! -d dist/tt-website && echo 'no dist/tt-website directory' && exit
cd ./dist
# tt-website-release为发布所用的目录,如已存在,则先进行删除操作
test -d tt-website-release && rm -rf tt-website-release
# 先拉取releaseGit 至 tt-website-release目录,并清空目录
git clone $releaseGit tt-website-release && rm -rf tt-website-release/*
# 将tt-website中的所有文件复制至tt-website-release目录
# cp -R tt-website/* tt-website-release/
rsync -av -e ssh --exclude='*.js.map' tt-website/* tt-website-release/
cd tt-website-release
# 正常push及打tag操作
git add . && git commit -m 'Auto-commit' && git push
# tag
git tag 'www/'$(date +"%Y-%m-%d.%H%M") && git push --tags
else
echo 'please merge to master!'
fi
|
<filename>src/maze.c
/*
* Maze Thread
* | - --thread|-t 5 (RANGE = RAND(T*10))
* | - --help|-h
*/
// MAZE HEADER
#include "maze.h"
#include "racing.h"
#include "rrand.h"
#include <ctype.h>
#include <getopt.h>
#include <pthread.h>
#include <stdio.h>
#include <stdlib.h>
#define die(STR) \
{ \
printf(STR); \
exit(1); \
}
void
help()
{
printf("Welcome to Maze Thread Race 2020\n"
"-t\t- number of thread.. or runners(minimum 4) each one have "
"$endline/2 chances to find the value\n"
"-h\t- this message output\n");
die("life is short, try u best or die like the rest\n");
}
void
telao(struct racebj tbj)
{
printf("Welcome to Maze Thread Runner 2020\n"
"Competitors: %d\n"
"Endline: %d\n"
"Good look for all\n",
tbj.runners,
tbj.endline);
}
int
orgarg(int argc, char** argv, struct racebj* raceopt)
{
int copt, ret = 0;
while ((copt = getopt(argc, argv, "ht:")) != -1) {
switch (copt) {
case 'h':
ret = 1;
break;
case 't':
if (atoi(optarg) >= MIN_THREAD)
raceopt->runners = strtol(optarg, NULL, 10) > MAX_THREAD
? MAX_THREAD
: strtol(optarg, NULL, 10);
else
raceopt->runners = MIN_THREAD;
break;
case '?':
ret = 1;
break;
default:
ret = 1;
}
}
return ret;
}
int
main(int argc, char* argv[])
{
struct racebj race;
if (orgarg(argc, argv, &race) != 0 || argc < 2) // Organize the arguments
help();
if (meters(&race) != 0)
die("Impossible to find meters number\n");
telao(race);
// prepare runners
pthread_t tid[race.runners];
race.end = 0;
short int i = 0;
for (; i < race.runners; i++) {
race.player = i;
pthread_create(&tid[i], NULL, rungo, &race);
}
for (i = 0; i < race.runners; i++) {
pthread_join(tid[i], NULL);
}
return 0;
}
|
#!/bin/bash
if [ -d "$1" ]; then
cd "$1"
else
echo "Usage: $0 <datadir>" >&2
echo "Removes obsolete Spoomy database files" >&2
exit 1
fi
LEVEL=0
if [ -f wallet.dat -a -f addr.dat -a -f blkindex.dat -a -f blk0001.dat ]; then LEVEL=1; fi
if [ -f wallet.dat -a -f peers.dat -a -f blkindex.dat -a -f blk0001.dat ]; then LEVEL=2; fi
if [ -f wallet.dat -a -f peers.dat -a -f coins/CURRENT -a -f blktree/CURRENT -a -f blocks/blk00000.dat ]; then LEVEL=3; fi
if [ -f wallet.dat -a -f peers.dat -a -f chainstate/CURRENT -a -f blocks/index/CURRENT -a -f blocks/blk00000.dat ]; then LEVEL=4; fi
case $LEVEL in
0)
echo "Error: no Spoomy datadir detected."
exit 1
;;
1)
echo "Detected old Spoomy datadir (before 0.7)."
echo "Nothing to do."
exit 0
;;
2)
echo "Detected Spoomy 0.7 datadir."
;;
3)
echo "Detected Spoomy pre-0.8 datadir."
;;
4)
echo "Detected Spoomy 0.8 datadir."
;;
esac
FILES=""
DIRS=""
if [ $LEVEL -ge 3 ]; then FILES=$(echo $FILES blk????.dat blkindex.dat); fi
if [ $LEVEL -ge 2 ]; then FILES=$(echo $FILES addr.dat); fi
if [ $LEVEL -ge 4 ]; then DIRS=$(echo $DIRS coins blktree); fi
for FILE in $FILES; do
if [ -f $FILE ]; then
echo "Deleting: $FILE"
rm -f $FILE
fi
done
for DIR in $DIRS; do
if [ -d $DIR ]; then
echo "Deleting: $DIR/"
rm -rf $DIR
fi
done
echo "Done."
|
<reponame>haojile/JavaFxToolDemo<filename>src/main/java/com/xwintop/xJavaFxTool/newui/NewLauncherService.java
package com.xwintop.xJavaFxTool.newui;
import com.xwintop.xJavaFxTool.AppException;
import com.xwintop.xJavaFxTool.model.PluginJarInfo;
import com.xwintop.xJavaFxTool.plugin.PluginLoader;
import javafx.scene.control.Tab;
import javafx.scene.control.TabPane;
import lombok.extern.slf4j.Slf4j;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
/**
* 新界面中负责与插件操作相关的逻辑
*/
@Slf4j
public class NewLauncherService {
private static final NewLauncherService instance = new NewLauncherService();
private NewLauncherController newLauncherController;
private Map<Tab, PluginJarInfo> jarInfoMap = new HashMap<>();
private PluginItemController currentPluginItem;
public static NewLauncherService getInstance() {
return instance;
}
private NewLauncherService() {
}
public void setController(NewLauncherController newLauncherController) {
this.newLauncherController = newLauncherController;
}
public void setCurrentPluginItem(PluginItemController currentPluginItem) {
this.currentPluginItem = currentPluginItem;
}
public PluginItemController getCurrentPluginItem() {
return currentPluginItem;
}
public void loadPlugin(PluginJarInfo pluginJarInfo) {
log.info("加载插件 {}: {}", pluginJarInfo.getName(), pluginJarInfo.getFile().getAbsolutePath());
TabPane tabPane = this.newLauncherController.getTabPane();
for (Entry<Tab, PluginJarInfo> entry : jarInfoMap.entrySet()) {
if (entry.getValue() == pluginJarInfo) {
tabPane.getSelectionModel().select(entry.getKey());
return;
}
}
Tab tab;
String controllerType = pluginJarInfo.getControllerType();
if (controllerType.equals("Node")) {
tab = PluginLoader.loadIsolatedPluginAsTab(pluginJarInfo, tabPane);
} else if (controllerType.equals("WebView")) {
tab = PluginLoader.loadWebViewAsTab(pluginJarInfo, tabPane);
} else {
throw new AppException("找不到 controllerType=" + controllerType + " 的加载方式");
}
if (tab != null) {
tab.setOnClosed(event -> this.jarInfoMap.remove(tab));
jarInfoMap.put(tab, pluginJarInfo);
}
}
}
|
#!/bin/bash
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Search in DuckDuckGo
# @raycast.mode silent
# Optional parameters:
# @raycast.icon images/duck-duck-go.png
# @raycast.packageName Web Searches
# @raycast.argument1 { "type": "text", "placeholder": "query" }
open "https://duckduckgo.com/?q=${1// /%20}"
|
class Scenario < ApplicationRecord
# ActiveRecord scopes used when migrating scenarios with new user values.
module Migratable
# Public: Scenarios which should receive new user values when the ETSource
# data requires changes to existing scenarios.
#
# See migratable_since
#
# Returns an ActiveRecord::Relation
def migratable
migratable_since(1.month.ago.to_date)
end
# Public: Scenarios which should receive new user values when the ETSource
# data requires changes to existing scenarios.
#
# All protected scenarios are included, and any unprotected scenarios updated
# on or after the `since` date or time. Test scenarios and Mechanical Turk
# scenarios are excluded.
#
# Returns an ActiveRecord::Relation
def migratable_since(since)
Scenario.where(
'(protected = ? OR updated_at >= ?) AND ' \
'COALESCE(source, "") != ? AND ' \
'COALESCE(title, "") != ? AND ' \
'user_values IS NOT NULL AND user_values != ?',
true, since, 'Mechanical Turk', 'test',
ActiveSupport::HashWithIndifferentAccess.new.to_yaml
)
end
end
end
|
#!/usr/bin/env bash
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS ordinary_00682"
${CLICKHOUSE_CLIENT} --query="CREATE TABLE ordinary_00682(k UInt32) ENGINE MergeTree ORDER BY k SETTINGS remove_empty_parts=0"
${CLICKHOUSE_CLIENT} --query="INSERT INTO ordinary_00682(k) VALUES (1)"
${CLICKHOUSE_CLIENT} --query="INSERT INTO ordinary_00682(k) VALUES (1)"
${CLICKHOUSE_CLIENT} --query="ALTER TABLE ordinary_00682 DELETE WHERE k = 1" --mutations_sync=1
${CLICKHOUSE_CLIENT} --query="OPTIMIZE TABLE ordinary_00682 PARTITION tuple() FINAL"
${CLICKHOUSE_CLIENT} --query="SELECT * FROM ordinary_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE ordinary_00682"
${CLICKHOUSE_CLIENT} --query="SELECT '*** Vertical merge ***'"
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS vertical_00682"
${CLICKHOUSE_CLIENT} --query="CREATE TABLE vertical_00682(k UInt32, v UInt32) ENGINE MergeTree ORDER BY k \
SETTINGS enable_vertical_merge_algorithm=1, remove_empty_parts=0, \
vertical_merge_algorithm_min_rows_to_activate=0, \
vertical_merge_algorithm_min_columns_to_activate=0"
${CLICKHOUSE_CLIENT} --query="INSERT INTO vertical_00682(k, v) VALUES (1, 1)"
${CLICKHOUSE_CLIENT} --query="INSERT INTO vertical_00682(k, v) VALUES (2, 2)"
${CLICKHOUSE_CLIENT} --query="ALTER TABLE vertical_00682 DELETE WHERE k = 1" --mutations_sync=1
${CLICKHOUSE_CLIENT} --query="OPTIMIZE TABLE vertical_00682 PARTITION tuple() FINAL"
${CLICKHOUSE_CLIENT} --query="SELECT * FROM vertical_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE vertical_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS summing_00682"
${CLICKHOUSE_CLIENT} --query="CREATE TABLE summing_00682(k UInt32, v UInt32) ENGINE SummingMergeTree ORDER BY k SETTINGS remove_empty_parts=0"
${CLICKHOUSE_CLIENT} --query="INSERT INTO summing_00682(k, v) VALUES (1, 1)"
${CLICKHOUSE_CLIENT} --query="INSERT INTO summing_00682(k, v) VALUES (1, 2)"
${CLICKHOUSE_CLIENT} --query="ALTER TABLE summing_00682 DELETE WHERE k = 1" --mutations_sync=1
${CLICKHOUSE_CLIENT} --query="OPTIMIZE TABLE summing_00682 PARTITION tuple() FINAL"
${CLICKHOUSE_CLIENT} --query="SELECT * FROM summing_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE summing_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS aggregating_00682"
${CLICKHOUSE_CLIENT} --query="CREATE TABLE aggregating_00682(k UInt32, v AggregateFunction(count)) ENGINE AggregatingMergeTree ORDER BY k SETTINGS remove_empty_parts=0"
${CLICKHOUSE_CLIENT} --query="INSERT INTO aggregating_00682(k) VALUES (1)"
${CLICKHOUSE_CLIENT} --query="INSERT INTO aggregating_00682(k) VALUES (1)"
${CLICKHOUSE_CLIENT} --query="ALTER TABLE aggregating_00682 DELETE WHERE k = 1" --mutations_sync=1
${CLICKHOUSE_CLIENT} --query="OPTIMIZE TABLE aggregating_00682 PARTITION tuple() FINAL"
${CLICKHOUSE_CLIENT} --query="SELECT * FROM aggregating_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE aggregating_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS replacing_00682"
${CLICKHOUSE_CLIENT} --query="CREATE TABLE replacing_00682(k UInt32, v String) ENGINE ReplacingMergeTree ORDER BY k SETTINGS remove_empty_parts=0"
${CLICKHOUSE_CLIENT} --query="INSERT INTO replacing_00682(k, v) VALUES (1, 'a')"
${CLICKHOUSE_CLIENT} --query="INSERT INTO replacing_00682(k, v) VALUES (1, 'b')"
${CLICKHOUSE_CLIENT} --query="ALTER TABLE replacing_00682 DELETE WHERE k = 1" --mutations_sync=1
${CLICKHOUSE_CLIENT} --query="OPTIMIZE TABLE replacing_00682 PARTITION tuple() FINAL"
${CLICKHOUSE_CLIENT} --query="SELECT * FROM replacing_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE replacing_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS collapsing_00682"
${CLICKHOUSE_CLIENT} --query="CREATE TABLE collapsing_00682(k UInt32, v String, s Int8) ENGINE CollapsingMergeTree(s) ORDER BY k SETTINGS remove_empty_parts=0"
${CLICKHOUSE_CLIENT} --query="INSERT INTO collapsing_00682(k, v, s) VALUES (1, 'a', 1)"
${CLICKHOUSE_CLIENT} --query="INSERT INTO collapsing_00682(k, v, s) VALUES (2, 'b', 1)"
${CLICKHOUSE_CLIENT} --query="ALTER TABLE collapsing_00682 DELETE WHERE k IN (1, 2)" --mutations_sync=1
${CLICKHOUSE_CLIENT} --query="OPTIMIZE TABLE collapsing_00682 PARTITION tuple() FINAL"
${CLICKHOUSE_CLIENT} --query="SELECT * FROM collapsing_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE collapsing_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS versioned_collapsing_00682"
${CLICKHOUSE_CLIENT} --query="CREATE TABLE versioned_collapsing_00682(k UInt32, val String, ver UInt32, s Int8) ENGINE VersionedCollapsingMergeTree(s, ver) ORDER BY k SETTINGS remove_empty_parts=0"
${CLICKHOUSE_CLIENT} --query="INSERT INTO versioned_collapsing_00682(k, val, ver, s) VALUES (1, 'a', 0, 1)"
${CLICKHOUSE_CLIENT} --query="INSERT INTO versioned_collapsing_00682(k, val, ver, s) VALUES (2, 'b', 0, 1)"
${CLICKHOUSE_CLIENT} --query="ALTER TABLE versioned_collapsing_00682 DELETE WHERE k IN (1, 2)" --mutations_sync=1
${CLICKHOUSE_CLIENT} --query="OPTIMIZE TABLE versioned_collapsing_00682 PARTITION tuple() FINAL"
${CLICKHOUSE_CLIENT} --query="SELECT * FROM versioned_collapsing_00682"
${CLICKHOUSE_CLIENT} --query="DROP TABLE versioned_collapsing_00682"
|
programming language: True
interpreted: True
high-level: True
general-purpose: True
OR
programming language: True
interpreted: True
compiled: False
high-level: True
general-purpose: True |
#!/bin/bash
#PBS -N "rfm_blast_tblast_example_job"
#PBS -o rfm_blast_tblast_example_job.out
#PBS -e rfm_blast_tblast_example_job.err
#PBS -l walltime=0:10:0
#PBS -A PZS0710
#PBS -l nodes=1:ppn=28
#PBS -q debug
cd $PBS_O_WORKDIR
module load blast-database/2018-08
module load blast
# Check module environment
module list
echo MODULEPATH=$MODULEPATH 1>&2
#
# Copy input data to the fast file system
#
cp 100.fasta $TMPDIR
cd $TMPDIR
#
# Run tblastn with 16 threads
# compares a protein query sequence against a nucleotide sequence database
# dynamically translated in all six reading frames (both strands).
#
tblastn -db nt -query 100.fasta -num_threads 16 -out 100_tblastn.out
#
# Now, copy data (or move) back once the simulation has completed
#
cp 100_tblastn.out $PBS_O_WORKDIR/
|
#!/bin/bash
OCTOOLSBIN=$(dirname $0)
# =================================================================================================================
# Usage:
# -----------------------------------------------------------------------------------------------------------------
usage() {
cat <<-EOF
Tool to generate OpenShift template parameters files in expected places (project or local) for BC Gov applications.
Usage:
${0##*/} [options]
Options:
========
-f force generation even if the file already exists
EOF
}
# -----------------------------------------------------------------------------------------------------------------
# Initialization:
# -----------------------------------------------------------------------------------------------------------------
# =================================================================================================================
# Process the local command line arguments and pass everything else along.
# - The 'getopts' options string must start with ':' for this to work.
# -----------------------------------------------------------------------------------------------------------------
while [ ${OPTIND} -le $# ]; do
if getopts :f FLAG; then
case ${FLAG} in
# List of local options:
f ) FORCE=1 ;;
# Pass unrecognized options ...
\?)
pass+=" -${OPTARG}"
;;
esac
else
# Pass unrecognized arguments ...
pass+=" ${!OPTIND}"
let OPTIND++
fi
done
# Pass the unrecognized arguments along for further processing ...
shift $((OPTIND-1))
set -- "$@" $(echo -e "${pass}" | sed -e 's/^[[:space:]]*//')
# =================================================================================================================
if [ -f ${OCTOOLSBIN}/settings.sh ]; then
. ${OCTOOLSBIN}/settings.sh
fi
if [ -f ${OCTOOLSBIN}/ocFunctions.inc ]; then
. ${OCTOOLSBIN}/ocFunctions.inc
fi
# What types of files to generate - regular+dev/test/prod or local
if [ ! -z "${APPLY_LOCAL_SETTINGS}" ]; then
PARM_TYPES="l"
else
PARM_TYPES="r d t p"
fi
# -----------------------------------------------------------------------------------------------------------------
# Function(s):
# -----------------------------------------------------------------------------------------------------------------
skipParameterFileGeneration () {
_type=${1}
_isBuildConfig=${2}
if [ -z "${_type}" ]; then
echo -e \\n"skipParameterFileGeneration; Missing parameter - file generation type"\\n
exit 1
fi
unset _skip
case ${type} in
d ) # Dev File
if [ ! -z "${_isBuildConfig}" ]; then
_skip=1
fi
;;
t ) # Test File
if [ ! -z "${_isBuildConfig}" ]; then
_skip=1
fi
;;
p ) # Prod
if [ ! -z "${_isBuildConfig}" ]; then
_skip=1
fi
;;
esac
if [ -z "${_skip}" ]; then
return 1
else
return 0
fi
}
getParameterFileCommentFilter () {
_type=${1}
if [ -z "${_type}" ]; then
echo -e \\n"getParameterFileCommentFilter; Missing parameter!"\\n
exit 1
fi
# Default; Comment out everything ...
_commentFilter="s~^~#~;"
case ${_type} in
r ) # Regular file
_commentFilter=cat
;;
[dtp] ) # Dev, Test, and Prod Files
# Uncomment the main environment specific settings ...
_commentFilter="${_commentFilter}/TAG_NAME/s~^#~~;"
_commentFilter="${_commentFilter}/APPLICATION_DOMAIN/s~^#~~;"
_commentFilter="sed ${_commentFilter}"
;;
l ) # Local file
# Uncomment the main local settings ...
_commentFilter="${_commentFilter}/GIT_REPO_URL/s~^#~~;"
_commentFilter="${_commentFilter}/GIT_REF/s~^#~~;"
_commentFilter="${_commentFilter}/MEMORY_LIMIT/s~^#~~;"
_commentFilter="${_commentFilter}/MEMORY_REQUEST/s~^#~~;"
_commentFilter="${_commentFilter}/CPU_LIMIT/s~^#~~;"
_commentFilter="${_commentFilter}/CPU_REQUEST/s~^#~~;"
_commentFilter="sed ${_commentFilter}"
;;
*) # unrecognized option
_commentFilter="sed ${_commentFilter}"
;;
esac
echo "${_commentFilter}"
}
getParameterFileOutputPath () {
_type=${1}
_fileName=${2}
if [ -z "${_type}" ] || [ -z "${_fileName}" ]; then
echo -e \\n"getParameterFileOutputPath; Missing parameter!"\\n
exit 1
fi
if [ ! -z "${PROFILE}" ] && [ "${PROFILE}" != "${_defaultProfileName}" ]; then
_outputFilename="${_fileName}.${PROFILE}"
else
_outputFilename="${_fileName}"
fi
case ${_type} in
r ) # Regular file
_output=${_outputFilename}.param
;;
d ) # Dev File
_output=${_outputFilename}.${DEV}.param
;;
t ) # Test File
_output=${_outputFilename}.${TEST}.param
;;
p ) # Prod
_output=${_outputFilename}.${PROD}.param
;;
l ) # Local Files
_output=${_outputFilename}.local.param
;;
*) # unrecognized option
echoError "\ngetParameterFileOutputPath; Invalid type option.\n"
;;
esac
echo ${_output}
}
generateParameterFilter (){
_component=${1}
_type=${2}
_templateName=${3}
if [ -z "${_component}" ] ||[ -z "${_type}" ] || [ -z "${_templateName}" ]; then
echo -e \\n"generateParameterFilter; Missing parameter!"\\n
exit 1
fi
_parameterFilters=""
_environment=${DEV}
case ${_type} in
# r ) # Regular file
# _output=${_outputPrefix}$( basename ${_fileName}.param )
# ;;
d ) # Dev File
_environment=${DEV}
;;
t ) # Test File
_environment=${TEST}
;;
p ) # Prod
_environment=${PROD}
;;
esac
_name=$(basename "${_templateName}")
_name=$(echo ${_name} | sed 's~\(^.*\)-\(build\|deploy\)$~\1~')
_parameterFilters="${_parameterFilters}s~\(^NAME=\).*$~\1${_name}~;"
_parameterFilters="${_parameterFilters}s~\(^\(IMAGE_NAMESPACE\|SOURCE_IMAGE_NAMESPACE\)=\).*$~\1${TOOLS}~;"
if [ ! -z "${_environment}" ]; then
_parameterFilters="${_parameterFilters}s~\(^TAG_NAME=\).*$~\1${_environment}~;"
_appDomain="${_name}-${PROJECT_NAMESPACE}-${_environment}${APPLICATION_DOMAIN_POSTFIX}"
_parameterFilters="${_parameterFilters}s~\(^APPLICATION_DOMAIN=\).*$~\1${_appDomain}~;"
fi
echo "sed ${_parameterFilters}"
}
generateParameterFile (){
_component=${1}
_template=${2}
_output=${3}
_force=${4}
_commentFilter=${5}
_parameterFilter=${6}
if [ -z "${_component}" ] || [ -z "${_template}" ]; then
echo -e \\n"generatePipelineParameterFile; Missing parameter!"\\n
exit 1
fi
if [ -f "${_template}" ]; then
if [ ! -f "${_output}" ] || [ ! -z "${_force}" ]; then
if [ -z "${_force}" ]; then
echo -e "Generating parameter file for ${_template}; ${_output} ..."\\n
else
echoWarning "Overwriting the parameter file for ${_template}; ${_output} ...\n"
fi
# Generate the parameter file ...
echo -e "#=========================================================" > ${_output}
echo -e "# OpenShift template parameters for:" >> ${_output}
echo -e "# Component: ${_component}" >> ${_output}
echo -e "# Template File: ${_template}" >> ${_output}
echo -e "#=========================================================" >> ${_output}
appendParametersToFile "${_template}" "${_output}" "${_commentFilter}" "${_parameterFilter}"
exitOnError
else
echoWarning "The parameter file for ${_template} already exisits and will not be overwritten; ${_output} ...\n"
export FORCENOTE=1
fi
else
echoError "Unable to generate parameter file for ${_template}. The file does not exist."
fi
}
# =================================================================================================================
# =================================================================================================================
# Main:
# -----------------------------------------------------------------------------------------------------------------
for component in ${components}; do
if [ ! -z "${COMP}" ] && [ ! "${component}" = "." ] && [ ! "${COMP}" = ${component} ]; then
# Only process named component if -c option specified
continue
fi
echo
echo "================================================================================================================="
echo "Processing templates for ${component}"
echo "-----------------------------------------------------------------------------------------------------------------"
_configTemplates=$(getConfigTemplates $(getTemplateDir ${component}))
# echo "Configuration templates:"
# for configTemplate in ${_configTemplates}; do
# echo ${configTemplate}
# done
# exit 1
# Iterate through each file and generate the params files
for file in ${_configTemplates}; do
# Don't generate dev/test/prod param files for Build templates
TEMPLATE=${file}
if isBuildConfig ${TEMPLATE}; then
_isBuildConfig=1
else
unset _isBuildConfig
fi
for type in ${PARM_TYPES}; do
# Don't create environment specific param files for Build Templates
if ! skipParameterFileGeneration "${type}" "${_isBuildConfig}"; then
_commentFilter=$(getParameterFileCommentFilter "${type}")
_output=$(getParameterFileOutputPath "${type}" "${file%.*}")
_parameterFilter=$(generateParameterFilter "${component}" "${type}" "$(getFilenameWithoutExt ${file})")
# echoWarning "file: ${file}"
# echoWarning "file wo/ext: ${file%.*}"
# echoWarning "_output: ${_output}"
# echoWarning "_commentFilter: ${_commentFilter}"
# echoWarning "_parameterFilter: ${_parameterFilter}"
generateParameterFile "${component}" "${TEMPLATE}" "${_output}" "${FORCE}" "${_commentFilter}" "${_parameterFilter}"
exitOnError
else
# Remove `>/dev/null` to enable this message.
# It's useful for troubleshooting, but annoying otherwise.
echo \
"Skipping environment specific, environmentType '${type}', parameter file generation for build template; ${file} ..." \
>/dev/null
fi
done
done
echo "================================================================================================================="
done
# Print informational messages ...
if [ ! -z "${APPLY_LOCAL_SETTINGS}" ] && [ -z "${FORCENOTE}" ]; then
echoWarning "\nLocal files generated with parmeters commented out. Edit the files to uncomment and set parameters as needed.\n"
fi
if [ ! -z "${FORCENOTE}" ]; then
echoWarning "\nOne or more parameter files to be generated already exist and were not overwritten.\nUse the -f option to force the overwriting of existing files.\n"
unset FORCENOTE
fi
# ================================================================================================================= |
<gh_stars>100-1000
// Copyright (C) 2019. Huawei Technologies Co., Ltd. All rights reserved.
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#include "tensor_computing.h"
#include "ut_util.h"
int tileTest(int argc, char **argv, DataType dt)
{
// input dim
U32 in = atoi(argv[1]);
U32 ic = atoi(argv[2]);
U32 ih = atoi(argv[3]);
U32 iw = atoi(argv[4]);
//input axis and tiles
TileParamSpec tileParamSpec;
tileParamSpec.axis = atoi(argv[5]);
tileParamSpec.dimsSize = 1;
tileParamSpec.repeatsInfo[0] = atoi(argv[6]);
//set input
DataFormat df = DF_NCHW;
TensorDesc inDesc = tensor4df(dt, df, in, ic, ih, iw);
U32 len = tensorNumElements(inDesc);
U8 *input = ut_input_v(len, dt, UT_INIT_RANDOM);
Tensor inputTensor = Tensor::alloc_sized<CPUMem>(inDesc);
memcpy(get_ptr_from_tensor(inputTensor, CPU_GENERAL), input, inputTensor.bytes());
//set output
Tensor outputTensor;
CHECK_STATUS(
tile_infer_output_size(&inputTensor, tileParamSpec, &outputTensor, &UT_CPU_ARCHINFO));
outputTensor.alloc();
if (UT_CHECK) {
Tensor tmpTensor;
CHECK_STATUS(tile(inputTensor, tileParamSpec, tmpTensor, outputTensor, &UT_CPU_ARCHINFO));
CHECK_REQUIREMENT(outputTensor.length() == (len * tileParamSpec.repeatsInfo[0]));
}
return 0;
}
int main(int argc, char **argv)
{
#ifdef _USE_FP16
tileTest(argc, argv, DT_F16);
#endif
#ifdef _USE_FP32
tileTest(argc, argv, DT_F32);
#endif
return 0;
}
|
package mastermind.server.dispatchers;
import mastermind.controllers.GameController;
public class GetWhitesDispatcher extends Dispatcher {
public GetWhitesDispatcher(GameController gameController) {
super(gameController);
}
@Override
public void dispatch() {
int position = tcpip.receiveInt();
int whites = ((GameController) acceptorController).getWhites(position);
tcpip.send(whites);
}
}
|
#@IgnoreInspection BashAddShebang
origin='https://github.com/hanovruslan/bash-get-options.git'
source='src/src.sh' |
public class Setter {
public static void setOpacityProp(Object obj, float val) {
// Implementation to set the opacity property of the object
// Example: ((SomeObject) obj).setOpacity(val);
}
public static void setVisibilityProp(Object obj, int val) {
// Implementation to set the visibility property of the object
// Example: ((SomeObject) obj).setVisibility(val);
}
public static int getVisibility(Object obj) {
// Implementation to retrieve the visibility property of the object
// Example: return ((SomeObject) obj).getVisibility();
return 0; // Placeholder return value
}
}
public class Main {
public static void main(String[] args) {
// Usage of the Setter class
Setter setter = new Setter();
setter.setOpacityVal(0.5f);
setter.setVisibilityVal(1);
int visibility = setter.getVisibilityVal();
System.out.println("Visibility: " + visibility);
}
} |
<reponame>szab100/secmgr
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.enterprise.secmgr.common;
import static com.google.common.truth.Truth.assertThat;
import com.google.enterprise.policychecker.AclUtil;
import com.google.enterprise.policychecker.GroupMembersMap;
import com.google.enterprise.secmgr.identity.AuthnPrincipal;
import com.google.enterprise.supergsa.security.AclGroup;
import com.google.enterprise.supergsa.security.AclPrincipal;
import junit.framework.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Unit tests for {@link MemberToGroupsResolverMap}.
*/
@RunWith(JUnit4.class)
public class MemberToGroupsResolverMapTest {
// Handy AclPrincipals that can be used by several test cases.
private static final AclPrincipal ALICE = AclUtil.userNameToAclPrincipal("alice");
private static final AclPrincipal BOB = AclUtil.userNameToAclPrincipal("bob");
private static final AclPrincipal BOB_CASE_INSENSITIVE =
AclUtil.userNameToAclPrincipalCaseInsensitive("Bob");
private static final AclPrincipal MARK_CASE_SENSITIVE = AclUtil.userNameToAclPrincipal("Mark");
private static final AclPrincipal MARK_CASE_SENSITIVE_ALIAS =
AclUtil.userNameToAclPrincipal("mark");
private static final AclPrincipal JOHN_CASE_INSENSITIVE =
AclUtil.userNameToAclPrincipalCaseInsensitive("John");
private static final AclPrincipal JOHN_CASE_INSENSITIVE_ALIAS =
AclUtil.userNameToAclPrincipalCaseInsensitive("john");
private static final AclPrincipal SMITH_CASE_INSENSITIVE =
AclUtil.userNameToAclPrincipalCaseInsensitive("smith");
private static final AclPrincipal SMITH_CASE_INSENSITIVE_ALIAS1 =
AclUtil.userNameToAclPrincipalCaseInsensitive("Smith");
private static final AclPrincipal SMITH_CASE_INSENSITIVE_ALIAS2 =
AclUtil.userNameToAclPrincipalCaseInsensitive("ſmith");
private static final AclPrincipal ROSS_CASE_INSENSITIVE =
AclUtil.userNameToAclPrincipalCaseInsensitive("ross");
private static final AclPrincipal ROSS_CASE_INSENSITIVE_ALIAS1 =
AclUtil.userNameToAclPrincipalCaseInsensitive("roSS");
private static final AclPrincipal ROSS_CASE_INSENSITIVE_ALIAS2 =
AclUtil.userNameToAclPrincipalCaseInsensitive("roß");
private static final AclPrincipal ROSS_CASE_INSENSITIVE_ALIAS3 =
AclUtil.userNameToAclPrincipalCaseInsensitive("roẞ");
private static final AclPrincipal ENG = AclUtil.groupToAclPrincipal("eng");
private static final AclPrincipal HR = AclUtil.groupToAclPrincipal("hr");
private static final AclPrincipal FINANCE = AclUtil.groupToAclPrincipal("finance");
private static final AclPrincipal HR_CASE_INSENSITIVE =
AclUtil.groupToAclPrincipalCaseInsensitive("Hr");
private static final AclPrincipal FINANCE_CASE_INSENSITIVE =
AclUtil.groupToAclPrincipalCaseInsensitive("Finance");
private static AclGroup buildAclGroup(AclPrincipal principal, AclPrincipal... members) {
AclGroup.Builder builder = AclGroup.newBuilder();
for (AclPrincipal member : members) {
builder.addMembers(member);
}
return builder.setPrincipal(principal).build();
}
/**
* Tests the builder functionality and documents some perhaps unexpected behavior. Namely the
* builder uses the same map for all subsequent calls to build(), so if further additions are made
* to the underlying map they will show up in calls to the previously built
* MemberToGroupsResolverMap objects.
*/
@Test
public void buildIncrementally() {
MemberToGroupsResolverMap.Builder builder = MemberToGroupsResolverMap.builder();
builder.normalizeAndPut(BOB, ENG);
builder.normalizeAndPut(BOB, HR);
MemberToGroupsResolverMap map = builder.build();
assertThat(map.getAllGroupsForUser(BOB)).containsExactly(ENG, HR);
builder.normalizeAndPut(BOB, FINANCE);
MemberToGroupsResolverMap map2 = builder.build();
assertThat(map2.getAllGroupsForUser(BOB)).containsExactly(ENG, HR, FINANCE);
// The changes show up in the original map.
assertThat(map.getAllGroupsForUser(BOB)).containsExactly(ENG, HR, FINANCE);
}
/**
* Verifies IllegalArgumentExceptions are tHRown when trying to build the map with improper
* arguments.
*/
@Test
public void buildWithInvalidArguments() {
MemberToGroupsResolverMap.Builder builder = MemberToGroupsResolverMap.builder();
try {
builder.normalizeAndPut(BOB, ALICE);
Assert.fail("Builder should not allow users as the second argument.");
} catch (IllegalArgumentException expected) {
}
}
/**
* Verifies NullPoinerExceptions are tHRown when trying to build the map with null arguments.
*/
@Test
public void buildWithNullArguments() {
MemberToGroupsResolverMap.Builder builder = MemberToGroupsResolverMap.builder();
try {
builder.normalizeAndPut(null, HR);
Assert.fail("Builder should not allow null as the first argument.");
} catch (NullPointerException expected) {
}
try {
builder.normalizeAndPut(BOB, null);
Assert.fail("Builder should not allow null as the second argument.");
} catch (NullPointerException expected) {
}
}
/**
* Verifies the building commands can be chained together to make things easier to read.
*/
@Test
public void buildIncrementallyWithReturnArguments() {
// Now do all the same stuff but in one line.
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder()
.normalizeAndPut(BOB, ENG)
.normalizeAndPut(BOB, HR)
.build();
assertThat(map.getAllGroupsForUser(BOB)).containsExactly(ENG, HR);
}
/**
* Builds an empty map and ensures no NPE or other undesirable behavior.
*/
@Test
public void buildEmptyMap() {
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().build();
assertThat(map).isEmpty();
}
/**
* Verifies correct behavior on a simple group with one user and one group as members.
*/
@Test
public void buildFromSimpleGroup() {
AclGroup group1 = buildAclGroup(ENG, ALICE, HR);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group1)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.getAllGroupsForUser(ALICE)).containsExactly(ENG);
assertThat(map.keySet()).doesNotContain(ENG);
}
/**
* Verifies that the mapping is correct for users who are members of the nested group but not of
* the top-level group.
*/
@Test
public void buildFromNestedGroup() {
AclGroup group1 = buildAclGroup(ENG, BOB, HR);
AclGroup group2 = buildAclGroup(HR, ALICE);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group1)
.put(group2)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.keySet()).doesNotContain(ENG);
assertThat(map.getAllGroupsForUser(BOB)).containsExactly(ENG);
assertThat(map.getAllGroupsForUser(ALICE)).containsExactly(ENG, HR);
}
/**
* Verifies that the mapping is correct for users who are members of the both the nested group and
* the top-level group. Alice is in both groups.
*/
@Test
public void buildFromNestedGroupWithRedundantUsers() {
AclGroup group1 = buildAclGroup(ENG, BOB, HR, ALICE);
AclGroup group2 = buildAclGroup(HR, ALICE);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group1)
.put(group2)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.keySet()).doesNotContain(ENG);
assertThat(map.getAllGroupsForUser(BOB)).containsExactly(ENG);
assertThat(map.getAllGroupsForUser(ALICE)).containsExactly(ENG, HR);
}
/**
* Verifies that resolving groups will also resolve nested groups with multiple levels.
* Alice is in groups of FINANCE, HR and ENG. Bob is in groups of HR and ENG.
*/
@Test
public void resolveMultipleLevelNestedGroupUsers() {
AclGroup group1 = buildAclGroup(ENG, HR);
AclGroup group2 = buildAclGroup(HR, FINANCE, BOB);
AclGroup group3 = buildAclGroup(FINANCE, ALICE);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group1)
.put(group2)
.put(group3)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.keySet()).doesNotContain(ENG);
assertThat(map.getAllGroupsForUser(BOB)).containsExactly(ENG, HR);
assertThat(map.getAllGroupsForUser(ALICE)).containsExactly(ENG, HR, FINANCE);
}
/**
* Verifies that resolving groups will also resolve nested case insensitive member groups
* with multiple levels. Alice is in groups of FINANCE, HR and ENG. Bob is in groups
* of HR and ENG.
*/
@Test
public void resolveMultipleLevelNestedCaseInsensitiveGroupMemberUsers() {
AclGroup group1 = buildAclGroup(ENG, HR_CASE_INSENSITIVE);
AclGroup group2 = buildAclGroup(HR, FINANCE_CASE_INSENSITIVE, BOB_CASE_INSENSITIVE);
AclGroup group3 = buildAclGroup(FINANCE, ALICE);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group1)
.put(group2)
.put(group3)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.keySet()).doesNotContain(ENG);
assertThat(map.getAllGroupsForUser(ALICE)).containsExactly(ENG, HR, FINANCE);
assertThat(map.getAllGroupsForUser(BOB_CASE_INSENSITIVE)).containsExactly(ENG, HR);
}
/**
* Verifies that cyclic groups defintions should be resolved correctly. Eng group has members of
* HR and BOB, HR group has members of ENG and ALICE.
*/
@Test
public void resolveCyclicGroupUsers() {
AclGroup group1 = buildAclGroup(ENG, HR, BOB);
AclGroup group2 = buildAclGroup(HR, ENG, ALICE);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group1)
.put(group2)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.keySet()).contains(ENG);
assertThat(map.keySet()).contains(HR);
assertThat(map.getAllGroupsForUser(BOB)).containsExactly(ENG, HR);
assertThat(map.getAllGroupsForUser(ALICE)).containsExactly(ENG, HR);
}
/**
* Verifies that case insensitive groups defintions should be resolved correctly. Eng group has
* members of JOHN_CASE_INSENSITIVE and MARK_CASE_SENSITIVE.
*/
@Test
public void resolveCaseInsensitiveUsers() {
AclGroup group1 = buildAclGroup(ENG, JOHN_CASE_INSENSITIVE, MARK_CASE_SENSITIVE);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group1)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.keySet()).doesNotContain(ENG);
assertThat(map.getAllGroupsForUser(JOHN_CASE_INSENSITIVE)).containsExactly(ENG);
assertThat(map.getAllGroupsForUser(JOHN_CASE_INSENSITIVE_ALIAS)).containsExactly(ENG);
assertThat(map.getAllGroupsForUser(MARK_CASE_SENSITIVE)).containsExactly(ENG);
assertThat(map.getAllGroupsForUser(MARK_CASE_SENSITIVE_ALIAS)).isEmpty();
}
/**
* Verifies that case insensitive unicode groups defintions should be resolved correctly. Eng
* group has members of SMITH_CASE_INSENSITIVE and ROSS_CASE_INSENSITIVE.
*/
@Test
public void resolveCaseInsensitiveUnicodeUsers() {
AclGroup group1 = buildAclGroup(ENG, SMITH_CASE_INSENSITIVE, ROSS_CASE_INSENSITIVE);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group1)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.getAllGroupsForUser(SMITH_CASE_INSENSITIVE)).containsExactly(ENG);
assertThat(map.getAllGroupsForUser(SMITH_CASE_INSENSITIVE_ALIAS1)).containsExactly(ENG);
// Test the lower case long s - 'ſ'
assertThat(map.getAllGroupsForUser(SMITH_CASE_INSENSITIVE_ALIAS2)).containsExactly(ENG);
assertThat(map.getAllGroupsForUser(ROSS_CASE_INSENSITIVE)).containsExactly(ENG);
assertThat(map.getAllGroupsForUser(ROSS_CASE_INSENSITIVE_ALIAS1)).containsExactly(ENG);
// Test the lower case sharp s - 'ß'
assertThat(map.getAllGroupsForUser(ROSS_CASE_INSENSITIVE_ALIAS2)).containsExactly(ENG);
// Test the upper case sharp s - 'ẞ'
assertThat(map.getAllGroupsForUser(ROSS_CASE_INSENSITIVE_ALIAS3)).containsExactly(ENG);
}
@Test
public void resolveEmptyDomain() {
// how does the user object look coming from authentication module
AuthnPrincipal authnUser = AuthnPrincipal.make("user", "namespace", "");
// how does the same object look in groups database
AclPrincipal aclUser = AclUtil.buildAclPrincipal(AclPrincipal.SCOPE.USER, "user",
"namespace", "", AclPrincipal.CaseSensitivity.EVERYTHING_CASE_SENSITIVE);
AclGroup group = buildAclGroup(ENG, aclUser);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.getAllGroupsForUser(AclUtil.authnPrincipalToAclPrincipal(authnUser)))
.containsExactly(ENG);
}
@Test
public void resolveDomainCaseInsensitive() {
// how does the user object look coming from authentication module
AuthnPrincipal authnUser = AuthnPrincipal.make("user", "namespace", "DOMAIN");
// how does the same object look in groups database
AclPrincipal aclUser = AclUtil.buildAclPrincipal(AclPrincipal.SCOPE.USER, "user",
"namespace", "domain", AclPrincipal.CaseSensitivity.EVERYTHING_CASE_INSENSITIVE);
AclGroup group = buildAclGroup(ENG, aclUser);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.getAllGroupsForUser(AclUtil.authnPrincipalToAclPrincipal(authnUser)))
.containsExactly(ENG);
}
@Test
public void resolveUserWithUnnormalizedDomainAndGroupWithUnnormalizedDomain() {
// how does the user object look coming from authentication module
AuthnPrincipal authnUser = AuthnPrincipal.make("user", "namespace", "domain.com");
// how does the same object look in groups database
AclPrincipal aclUser = AclUtil.buildAclPrincipal(AclPrincipal.SCOPE.USER, "user",
"namespace", "domain.com", AclPrincipal.CaseSensitivity.EVERYTHING_CASE_SENSITIVE);
AclGroup group = buildAclGroup(ENG, aclUser);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.getAllGroupsForUser(AclUtil.authnPrincipalToAclPrincipal(authnUser)))
.containsExactly(ENG);
}
@Test
public void resolveUserWithUnnormalizedDomainAndGroupWithNormalizedDomain() {
// how does the user object look coming from authentication module
AuthnPrincipal authnUser = AuthnPrincipal.make("user", "namespace", "domain.com");
// how does the same object look in groups database
AclPrincipal aclUser = AclUtil.buildAclPrincipal(AclPrincipal.SCOPE.USER, "user",
"namespace", "domain", AclPrincipal.CaseSensitivity.EVERYTHING_CASE_SENSITIVE);
AclGroup group = buildAclGroup(ENG, aclUser);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.getAllGroupsForUser(AclUtil.authnPrincipalToAclPrincipal(authnUser)))
.containsExactly(ENG);
}
@Test
public void resolveUserWithUnnormalizedDomainAndGroupWithNormalizedDomain2() {
// how does the user object look coming from authentication module
AuthnPrincipal authnUser = AuthnPrincipal.make("user", "namespace", "domain.com.hk");
// how does the same object look in groups database
AclPrincipal aclUser = AclUtil.buildAclPrincipal(AclPrincipal.SCOPE.USER, "user",
"namespace", "domain", AclPrincipal.CaseSensitivity.EVERYTHING_CASE_SENSITIVE);
AclGroup group = buildAclGroup(ENG, aclUser);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.getAllGroupsForUser(AclUtil.authnPrincipalToAclPrincipal(authnUser)))
.containsExactly(ENG);
}
@Test
public void resolveUserWithUnnormalizedDomainAndCaseInsensitive() {
// how does the user object look coming from authentication module
AuthnPrincipal authnUser = AuthnPrincipal.make("user", "namespace", "DOMAIN.com");
// how does the same object look in groups database
AclPrincipal aclUser = AclUtil.buildAclPrincipal(AclPrincipal.SCOPE.USER, "user",
"namespace", "domain", AclPrincipal.CaseSensitivity.EVERYTHING_CASE_INSENSITIVE);
AclGroup group = buildAclGroup(ENG, aclUser);
GroupMembersMap groups = GroupMembersMap.builder()
.put(group)
.build();
MemberToGroupsResolverMap map = MemberToGroupsResolverMap.builder().merge(groups).build();
assertThat(map.getAllGroupsForUser(AclUtil.authnPrincipalToAclPrincipal(authnUser)))
.containsExactly(ENG);
}
}
|
package cn.org.bjca.example.client.nio;
import cn.org.bjca.example.socket.ExampleMessage;
import lombok.extern.slf4j.Slf4j;
import org.glassfish.grizzly.Buffer;
import org.glassfish.grizzly.Connection;
import org.glassfish.grizzly.GrizzlyFuture;
import org.glassfish.grizzly.WriteResult;
import org.glassfish.grizzly.nio.transport.TCPNIOTransport;
import java.util.concurrent.CountDownLatch;
/**
* @author lizhong
* @create:2019-04-09 下午 03:01
*/
@Slf4j
public class ThreadRequest extends Thread {
TCPNIOTransport tcpnioTransport;
private CountDownLatch countDownLatch;
public ThreadRequest(
String name, TCPNIOTransport tcpnioTransport, CountDownLatch countDownLatch) {
super(name);
this.tcpnioTransport = tcpnioTransport;
this.countDownLatch = countDownLatch;
}
@Override
public void run() {
Connection connection = null;
while (true) {
try {
connection = tcpnioTransport.connect("localhost", 8080).get();
ExampleMessage exampleMessage = new ExampleMessage();
exampleMessage.setHead(getName().getBytes("utf-8"));
exampleMessage.setContent(getName().getBytes("utf-8"));
connection.write(exampleMessage);
Thread.sleep(1000);
} catch (Exception e) {
e.printStackTrace();
countDownLatch.countDown();
break;
} finally {
if (connection != null) {
connection.close();
}
}
}
}
}
|
curl --silent https://influxdb.galaxyproject.eu:8086/ping -D - | grep -i influx
|
#!/usr/bin/env bash
set -efu
export CERTS=./test_fixtures
export REGISTRY_HOST="https://localhost:8091"
echo "Running grafana agent"
go run cmd/grafana_dashboard_controller/main.go \
-registry ${REGISTRY_HOST} \
-output-directory /tmp/dashboards \
-tls-pem-path ${CERTS}/client.pem \
-tls-key-path ${CERTS}/client.key \
-tls-root-ca-pem ${CERTS}/ca.pem \
-tls-server-cn localhost
|
# Sample usage of the GroupManager class
class Group:
def __init__(self, members, isPublic, group_id):
self.members = members
self.isPublic = isPublic
self.id = group_id
# Create a group object
group_info = Group(["Alice", "Bob", "Charlie"], True, 123)
# Initialize GroupManager with the group object
group_manager = GroupManager(group_info)
# Test the implemented functionalities
print(group_manager.get_members()) # Output: ["Alice", "Bob", "Charlie"]
print(group_manager.is_public()) # Output: True
print(group_manager.get_group_id()) # Output: 123 |
#!/usr/bin/env python
# _*_ coding: utf-8 _*_
"""
@author : lightnine
@site : https://ligntnine.github.io
@version : 1.0
@file : auto_differentiation.py
@software : PyCharm
@time : 2019/1/22 20:27
tensorflow 中自动求取微分
https://www.tensorflow.org/tutorials/eager/automatic_differentiation?hl=zh-cn
"""
import tensorflow as tf
tf.enable_eager_execution()
def run_example1():
x = tf.ones((2, 2))
# 这里记录了z的计算过程,然后在下方的代码计算梯度,从而计算出值为8
with tf.GradientTape() as t:
t.watch(x)
# x 里面元素全部加起来
y = tf.reduce_sum(x)
print('y:', y)
z = tf.multiply(y, y)
print('z:', z)
# Derivative of z with respect to the original input tensor x
dz_dx = t.gradient(z, x)
print('dz_dx:', dz_dx)
for i in [0, 1]:
for j in [0, 1]:
# numpy:Returns a numpy array or a scalar with the same contents as the Tensor
assert dz_dx[i][j].numpy() == 8.0
def run_example2():
x = tf.ones((2, 2))
with tf.GradientTape() as t:
t.watch(x)
y = tf.reduce_sum(x)
z = tf.multiply(y, y)
# Use the tape to compute the derivative of z with respect to the
# intermediate value y.
dz_dy = t.gradient(z, y)
assert dz_dy.numpy() == 8.0
def run_example3():
"""
演示如何多次计算梯度。因为如果不设置persistent参数为True,调用gradient后,tape资源会释放
:return:
"""
x = tf.constant(3.0)
with tf.GradientTape(persistent=True) as t:
t.watch(x)
y = x * x
z = y * y
dz_dx = t.gradient(z, x) # 108.0 (4*x^3 at x = 3)
dy_dx = t.gradient(y, x) # 6.0
print('dz_dx:', dz_dx)
print('dy_dx:', dy_dx)
del t # Drop the reference to the tape
def f(x, y):
output = 1.0
for i in range(y):
if 1 < i < 5:
output = tf.multiply(output, x)
return output
def grad(x, y):
with tf.GradientTape() as t:
t.watch(x)
out = f(x, y)
return t.gradient(out, x)
def record_control_flow():
# convert_to_tensor 将2.0转为tensor
x = tf.convert_to_tensor(2.0)
print('x:', x)
assert grad(x, 6).numpy() == 12.0
assert grad(x, 5).numpy() == 12.0
assert grad(x, 4).numpy() == 4.0
def hight_order_gradient():
"""
演示如何计算高阶梯度
:return:
"""
x = tf.Variable(1.0) # Create a Tensorflow variable initialized to 1.0
with tf.GradientTape() as t:
with tf.GradientTape() as t2:
y = x * x * x
# Compute the gradient inside the 't' context manager
# which means the gradient computation is differentiable as well.
dy_dx = t2.gradient(y, x)
d2y_dx2 = t.gradient(dy_dx, x)
assert dy_dx.numpy() == 3.0
assert d2y_dx2.numpy() == 6.0
if __name__ == '__main__':
run_example1()
run_example2()
run_example3()
record_control_flow()
hight_order_gradient() |
import { observer } from 'mobx-react-lite';
import React from 'react';
import styles from './BlockCreationRow.module.scss';
export interface IBlockCreationRowProps {
performance: number;
performancePercentage: number;
sharedRewards: number;
slotsElected: number;
slotsElectedPercentage: number;
stakePool: string;
stakePoolName: string;
}
const BlockCreationRow = (props: IBlockCreationRowProps) => (
<div className={styles.blockCreationRowContainer}>
<div className={styles.stakePool}>
<span className={styles.stakePoolName}>[{props.stakePoolName}]</span>{' '}
{props.stakePool}
</div>
<div className={styles.slotsElected}>
<span className={styles.slotsElectedPercentage}>
{props.slotsElected}
</span>{' '}
slots -{' '}
<span className={styles.slotsElectedPercentage}>
{props.slotsElectedPercentage}%
</span>
</div>
<div className={styles.performance}>
{props.performance} of {props.slotsElected} -{' '}
<span className={styles.performancePercentage}>
{props.performancePercentage}%
</span>
</div>
<div className={styles.sharedRewards}>
<span className={styles.sharedRewardsValue}>{props.sharedRewards}</span>{' '}
ADA of{' '}
<span className={styles.sharedRewardsValue}>{props.performance}</span> ADA
</div>
</div>
);
export default observer(BlockCreationRow);
|
<filename>platform/store/minio/minio.go
package minio
import (
"os"
"github.com/minio/minio-go/v6"
)
type Options struct {
Bucket string `json:"bucket" yaml:"bucket"`
Location string `json:"location" yaml:"location"`
Prefix string `json:"prefix" yaml:"prefix"`
URL string `json:"url" yaml:"url"`
Access string `json:"access" yaml:"access"`
Secret string `json:"secret" yaml:"secret"`
Secure bool `json:"secure" yaml:"secure"`
}
type Option func(*Options)
type minioStore struct {
opts Options
}
func (s *minioStore) client() (*minio.Client, error) {
return minio.New(s.opts.URL, s.opts.Access, s.opts.Secret, s.opts.Secure)
}
func (s *minioStore) Save(f *os.File) error {
mc, err := s.client()
if err != nil {
return err
}
if err := mc.MakeBucket(s.opts.Bucket, s.opts.Location); err != nil {
ex, xerr := mc.BucketExists(s.opts.Bucket)
if xerr != nil {
return xerr
}
if !ex {
return err
}
}
_, err = mc.FPutObject(s.opts.Bucket, f.Name(), f.Name(), minio.PutObjectOptions{})
if err != nil {
return err
}
return nil
}
func (s *minioStore) Open(p string, f *os.File) error {
mc, err := s.client()
if err != nil {
return err
}
return mc.FGetObject(s.opts.Bucket, p, f.Name(), minio.GetObjectOptions{})
}
|
class BinaryClassifier:
def __init__(self, model):
self._model = model
def predict_probabilities(self, x_test):
return self._model.predict_proba(x_test) |
import Decimal from 'decimal.js';
import release from "@/api/release";
export const deltaStats = ['Histogram']
export const CL_99 = "99"
export const CL_95 = "95"
export const confLevelOptions = [
{text: "99%", value: .99},
{text: "95%", value: .95},
]
export default {
statisticsUseDelta: function (statistics)
// Only a subset of statistics use delta.
// Return true if any statistics in the dpStatistics table
// use delta, else return false
{
let useDelta = false
statistics.forEach((item) => {
if (deltaStats.includes(item.statistic)) {
useDelta = true
}
})
return useDelta
},
statisticUsesValue(valName, statistic) {
return valName === 'epsilon' || (valName == 'delta' && deltaStats.includes(statistic))
},
isDeltaStat: function (statistic) {
return deltaStats.includes(statistic)
},
redistributeValues(statistics, delta, epsilon, defaultDelta) {
if (statistics && statistics.length > 0) {
if (this.statisticsUseDelta(statistics) && delta == 0) {
delta = defaultDelta
}
if (this.statisticsUseDelta(statistics)) {
delta = 0
}
this.redistributeValue(epsilon, 'epsilon', statistics)
this.redistributeValue(delta, 'delta', statistics)
}
},
redistributeValue(totalValue, property, statistics,) {
// for all statistics that use this value -
// if locked == false, update so that the unlocked value
// is shared equally among them.
let lockedValue = new Decimal('0.0');
let lockedCount = new Decimal('0');
let unlockedCount = new Decimal('0')
statistics.forEach((item) => {
if (this.statisticUsesValue(property, item.statistic)) {
if (item.locked) {
lockedValue = lockedValue.plus(item[property])
lockedCount = lockedCount.plus(1);
} else {
unlockedCount = unlockedCount.plus(1)
}
}
});
const remaining = new Decimal(totalValue).minus(lockedValue)
let valueShare = new Decimal('0')
if (unlockedCount > 0) {
valueShare = this.safeSplit(remaining, unlockedCount) //remaining.div(unlockedCount)
console.log('valueShare: ' + valueShare + "," + typeof (valueShare))
}
// Assign value shares and convert everything back from Decimal to Number
// before saving
statistics.forEach((item) => {
if (this.statisticUsesValue(property, item.statistic)) {
if (!item.locked) {
item[property] = valueShare
} else {
if (typeof (item[property]) == Decimal) {
item[property] = item[property].toNumber()
}
}
} else {
item[property] = 0
}
})
},
safeSplit(budget, k) {
// algorithm from <NAME> to ensure we don't exceed the total budget
let is_x_gte_kv = (x, k, v) =>
x >= Array(k).fill().reduce((s, _) => s + v, 0)
let split_budget = (x, k) => {
// preserve symmetry if possible
if (is_x_gte_kv(x, k, x / k)) return x / k
// try increasingly large offsets until passes
for (pow of Array(20).keys()) {
// candidate value v
let v = (x - Math.pow(10, pow - 20)) / k
if (is_x_gte_kv(x, k, v)) return v
}
}
return split_budget(budget, k)
},
// Returns Promise json object:
// valid: true/false
// data: Array of individual validation flags, accuracy, error messages for each statistic
releaseValidation(analysisPlanId, tempStats) {
let returnObj = {valid: true, data: null}
return release.validate(analysisPlanId, tempStats)
.then((resp) => {
// console.log('releaseValidation, validate response: ' + JSON.stringify(resp))
returnObj.data = resp.data
resp.data.forEach((item, index) => {
if (item.valid !== true) {
returnObj.valid = false;
}
})
return returnObj
})
.catch((error) => {
returnObj.valid = false
returnObj.data = [{"valid": false, "message": error}]
return returnObj
})
}
} |
export interface DataCtx {
date: Date;
isoString: string;
localtime: {
hour: number;
minute: number;
};
timestamp: number;
timezones: string[];
}
export function newYear2000UTC1(): DataCtx {
const isoString = "2000-01-01T00:00:00Z";
const date = new Date(isoString);
const localtime = { hour: 1, minute: 0 };
/** in ms */
const timestamp = 946684800000;
// For each following timezone, localtime for 2000-01-01T00:00:00Z is 01:00
// Daylight Saving Time rules:
// - EU, W-Eur, E-Eur, C-Eur -> Mar last Sun, Oct last Sun
// - WAT no DST, UTC+1 constant
const timezones = [
"Africa/Algiers", // EU
"Africa/Bangui", // WAT
"Africa/Brazzaville", // WAT
"Africa/Ceuta", // EU
"Africa/Douala", // WAT
"Africa/Kinshasa", // WAT
"Africa/Lagos", // WAT
"Africa/Libreville", // WAT
"Africa/Luanda", // WAT
"Africa/Malabo", // WAT
"Africa/Ndjamena", // WAT
"Africa/Niamey", // WAT
"Africa/Porto-Novo", // WAT
"Africa/Tunis", // EU
"Arctic/Longyearbyen", // EU
"Atlantic/Jan_Mayen", // EU
"CET", // EU
"Etc/GMT-1", // none
"Europe/Amsterdam", // EU
"Europe/Andorra", // EU
"Europe/Belgrade", // EU
"Europe/Berlin", // EU
"Europe/Bratislava", // EU
"Europe/Brussels", // EU
"Europe/Budapest", // EU
"Europe/Busingen", // EU
"Europe/Copenhagen", // EU
"Europe/Gibraltar", // EU
"Europe/Ljubljana", // EU
"Europe/Luxembourg", // EU
"Europe/Madrid", // EU
"Europe/Malta", // EU
"Europe/Monaco", // EU
"Europe/Oslo", // EU
"Europe/Paris", // EU
"Europe/Podgorica", // EU
"Europe/Prague", // EU
"Europe/Rome", // EU
"Europe/San_Marino", // EU
"Europe/Sarajevo", // EU
"Europe/Skopje", // EU
"Europe/Stockholm", // EU
"Europe/Tirane", // EU
"Europe/Vaduz", // EU
"Europe/Vatican", // EU
"Europe/Vienna", // EU
"Europe/Warsaw", // EU
"Europe/Zagreb", // EU
"Europe/Zurich", // EU
"MET", // none
"Poland", // EU
];
return { date, timezones, localtime, timestamp, isoString };
}
|
def count_arguments(*args):
'''Given a function with a variable number of arguments (args),
this function will count the number of arguments and print the total.'''
n_args = len(args)
print('Number of arguments:', n_args) |
<reponame>david-yappeter/go-mysql-suite
package tests
import (
"myapp/config"
"myapp/entity"
"os"
"testing"
"github.com/stretchr/testify/suite"
"gorm.io/gorm"
)
type SuiteTest struct {
suite.Suite
db *gorm.DB
}
func TestSuite(t *testing.T) {
os.Setenv("DB_HOST", "127.0.0.1")
os.Setenv("DB_PORT", "3306")
os.Setenv("DB_USER", "root")
os.Setenv("DB_PASS", "root")
os.Setenv("DB_DATABASE", "go_mysql_suite_test")
defer os.Unsetenv("DB_HOST")
defer os.Unsetenv("DB_PORT")
defer os.Unsetenv("DB_USER")
defer os.Unsetenv("DB_PASS")
defer os.Unsetenv("DB_DATABASE")
suite.Run(t, new(SuiteTest))
}
func getModels() []interface{} {
return []interface{}{&entity.User{}}
}
// Setup db value
func (t *SuiteTest) SetupSuite() {
config.ConnectGorm()
t.db = config.GetDB()
// Migrate Table
for _, val := range getModels() {
t.db.AutoMigrate(val)
}
}
// Run After All Test Done
func (t *SuiteTest) TearDownSuite() {
sqlDB, _ := t.db.DB()
defer sqlDB.Close()
// Drop Table
for _, val := range getModels() {
t.db.Migrator().DropTable(val)
}
}
// Run Before a Test
func (t *SuiteTest) SetupTest() {
}
// Run After a Test
func (t *SuiteTest) TearDownTest() {
}
|
#!/bin/bash
echo "--------------------------------------------------"
echo "Environment Vars.................................."
echo "INSTALL_APEX: ${INSTALL_APEX}"
echo "INSTALL_SQLCL: ${INSTALL_SQLCL}"
echo "INSTALL_SQLDEVWEB: ${INSTALL_SQLDEVWEB}"
echo "INSTALL_LOGGER: ${INSTALL_LOGGER}"
echo "INSTALL_OOSUTILS: ${INSTALL_OOSUTILS}"
echo "INSTALL_AOP: ${INSTALL_AOP}"
echo "INSTALL_AME: ${INSTALL_AME}"
echo "INSTALL_SWAGGER: ${INSTALL_SWAGGER}"
echo "INSTALL_CA_CERTS_WALLET: ${INSTALL_CA_CERTS_WALLET}"
echo "DB_INSTALL_VERSION: ${DB_INSTALL_VERSION}"
echo "DBCA_TOTAL_MEMORY: ${DBCA_TOTAL_MEMORY}"
echo "ORACLE_SID: ${ORACLE_SID}"
echo "SERVICE_NAME: ${SERVICE_NAME}"
echo "ORACLE_BASE: ${ORACLE_BASE}"
echo "ORACLE_HOME12: ${ORACLE_HOME12}"
echo "ORACLE_HOME18: ${ORACLE_HOME18}"
echo "ORACLE_HOME19: ${ORACLE_HOME19}"
if [ ${DB_INSTALL_VERSION} == "12" ]; then
export ORACLE_HOME=${ORACLE_HOME12}
fi
if [ ${DB_INSTALL_VERSION} == "18" ]; then
export ORACLE_HOME=${ORACLE_HOME18}
fi
if [ ${DB_INSTALL_VERSION} == "19" ]; then
export ORACLE_HOME=${ORACLE_HOME19}
fi
echo "ORACLE_HOME: ${ORACLE_HOME}"
echo "ORACLE_INVENTORY: ${ORACLE_INVENTORY}"
echo "PASS: ${PASS}"
echo "ORDS_HOME: ${ORDS_HOME}"
echo "JAVA_HOME: ${JAVA_HOME}"
echo "TOMCAT_HOME: ${TOMCAT_HOME}"
echo "APEX_PASS: ${APEX_PASS}"
echo "APEX_ADDITIONAL_LANG: ${APEX_ADDITIONAL_LANG}"
echo "APEX_PATCH_SET_BUNDLE_FILE: ${APEX_PATCH_SET_BUNDLE_FILE}"
echo "TIME_ZONE: ${TIME_ZONE}"
#
#
echo "--------------------------------------------------"
echo "Validations......................................."
./scripts/validations.sh || exit 1
#
echo "--------------------------------------------------"
echo "Image Setup......................................."
./scripts/image_setup.sh
#
echo "--------------------------------------------------"
if [ ${DB_INSTALL_VERSION} == "12" ]; then
echo "Installing ORACLE Database 12 EE......................"
./scripts/install_oracle12ee.sh
fi
if [ ${DB_INSTALL_VERSION} == "18" ]; then
echo "Installing ORACLE Database 18 EE......................"
./scripts/install_oracle18ee.sh
fi
if [ ${DB_INSTALL_VERSION} == "19" ]; then
echo "Installing ORACLE Database 19 EE......................"
./scripts/install_oracle19ee.sh
fi
#
echo "--------------------------------------------------"
echo "Installing JAVA..................................."
./scripts/install_java.sh
#
if [ ${INSTALL_SQLCL} == "true" ]; then
echo "--------------------------------------------------"
echo "Installing SQLCL.................................."
./scripts/install_sqlcl.sh
fi
#
if [ ${INSTALL_APEX} == "true" ]; then
#
echo "--------------------------------------------------"
echo "Installing ORACLE APEX............................"
./scripts/install_apex.sh
#
echo "--------------------------------------------------"
echo "Installing TOMCAT................................."
./scripts/install_tomcat.sh
#
echo "--------------------------------------------------"
echo "Installing ORACLE ORDS............................"
./scripts/install_ords.sh
#
if [ ${INSTALL_AOP} == "true" ]; then
echo "--------------------------------------------------"
echo "Installing AOP...................................."
./scripts/install_aop.sh
fi
if [ ${INSTALL_AME} == "true" ]; then
echo "--------------------------------------------------"
echo "Installing AME...................................."
./scripts/install_ame.sh
fi
if [ ${INSTALL_SWAGGER} == "true" ]; then
echo "--------------------------------------------------"
echo "Installing Swagger................................"
./scripts/install_swagger.sh
fi
if [ ${INSTALL_CA_CERTS_WALLET} == "true" ]; then
echo "--------------------------------------------------"
echo "Installing APEX CA SSL Wallet....................."
./scripts/install_ca_wallet.sh
fi
fi
#
if [ ${INSTALL_LOGGER} == "true" ]; then
#
echo "--------------------------------------------------"
echo "Installing OraOpenSource Logger..................."
./scripts/install_logger.sh
fi
#
if [ ${INSTALL_OOSUTILS} == "true" ]; then
#
echo "--------------------------------------------------"
echo "Installing OraOpenSource OOS Utils................"
./scripts/install_oosutils.sh
fi
#
echo "--------------------------------------------------"
echo "Installing SSH...................................."
./scripts/install_ssh.sh
#
echo "--------------------------------------------------"
echo "Cleanup..........................................."
yum clean all
rm -r -f /tmp/*
rm -r -f /files/*
rm -r -f /var/tmp/*
echo "--------------------------------------------------"
echo "DONE.............................................."
|
require 'dionysus/travisci/gemfile_generator'
namespace :travis do
desc "Generate gemfiles for Travis-ci"
task :gemfiles do
gen = Dionysus::TravisCI::GemfileGenerator.new($rake_root.join("Gemfile").to_s)
# No Mongo
gen.generate($rake_root.join("gemfiles", "no-mongo.gemfile").to_s,
:without => [:mongo])
# Rails 3.1
gen.generate($rake_root.join("gemfiles", "rails31.gemfile").to_s,
:add => [ %(gem "activesupport", "~> 3.1.0"),
%(gem "activemodel", "~> 3.1.0"),
%(gem "rails", "~> 3.1.0", :group => [:development, :test]) ])
puts "Done."
end
end
|
from serial_host.robot_interface import RobotInterface
from serial_host import packet_definitions as pkt
import threading
import hid
DEADBAND = 5
x_vel = 0
y_vel = 0
pwm = 0
angle = 90
def robot_thread():
main.add_motor_command(pkt.pack_MotorCommandPacket(0, pkt.MotorCommand.ENABLE))
main.add_motor_command(pkt.pack_MotorCommandPacket(1, pkt.MotorCommand.ENABLE))
main.send_command()
while True:
main.run()
main.add_motor_command(pkt.pack_MotorCommandPacket(0, pkt.MotorCommand.SET_OMEGA, control=y_vel))
main.add_motor_command(pkt.pack_MotorCommandPacket(1, pkt.MotorCommand.SET_OMEGA, control=x_vel))
main.add_motor_command(pkt.pack_MotorCommandPacket(2, pkt.MotorCommand.SET_OMEGA, control=pwm))
main.add_motor_command(pkt.pack_MotorCommandPacket(3, pkt.MotorCommand.SET_THETA, control=angle))
main.send_command()
if __name__ == "__main__":
main = RobotInterface(0x16c0, 0x0486)
robot = threading.Thread(target=robot_thread, daemon=True)
robot.start()
gamepad = hid.Device(0x057e, 0x2009)
while True:
read_byte = gamepad.read(64, 1000)
dpad = read_byte[5]
button_pad = read_byte[3]
up_down = (read_byte[8] & 0xFC)/4 - 32
left_right = (read_byte[7] & 0x0f) * 4 - 32
a = button_pad & 8
zr = button_pad & 128
x_vel = 0
y_vel = 0
pwm = 0
angle = 0
if abs(left_right) > DEADBAND:
x_vel = left_right/4
if abs(up_down) > DEADBAND:
y_vel = up_down/4
if a:
pwm = 45
if a and zr:
angle = 0
else:
angle = 90
print(up_down, left_right)
|
<reponame>stjordanis/ts2fable
/**
* @deprecated use something else
*/
interface I {}
|
<gh_stars>1-10
package au.org.noojee.irrigation.entities;
import java.time.Duration;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Version;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.pi4j.io.gpio.GpioController;
import com.pi4j.io.gpio.GpioFactory;
import com.pi4j.io.gpio.GpioPinDigitalOutput;
import com.pi4j.io.gpio.PinProvider;
import au.org.noojee.irrigation.controllers.EndPointBus;
import au.org.noojee.irrigation.types.Amperage;
import au.org.noojee.irrigation.types.EndPointType;
import au.org.noojee.irrigation.types.PinActivationType;
import au.org.noojee.irrigation.types.PinStatus;
@Entity
@Table(name="tblEndPoint")
public class EndPoint
{
transient private static final Logger logger = LogManager.getLogger();
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(updatable = false, nullable = false)
private long id;
@Version
private int version;
// Name of the device attached to the pin.
@Column(unique=true)
private String endPointName;
private EndPointType endPointType;
private PinActivationType activationType;
// We store the gpio pin no. here.
@Column(unique=true)
private int pinNo;
// If we are a master valve we offer an option to drain the pressure from the line
// by turning the master valve off before we turn the garden bed valve off.
private boolean drainLine = false;
// The amount of current activating this pin causes the device to draw.
private Amperage startAmps;
// the amount of current the device draws when this pin is active (post startup spike).
private Amperage runningAmps;
// The amount of time the 'startAmps' is drawn once the pin is activiated before
// the current draw settles to the 'runningAmps'
private Duration startupInterval;
public Void hardOn()
{
if (activationType == PinActivationType.HIGH_IS_ON)
setPinHigh();
else
setPinLow();
logger.error("Pin " + pinNo + " for EndPoint: " + (this.endPointType == EndPointType.MasterValve ? "(MasterValve)" : "" ) + this.endPointName + " set On.");
EndPointBus.getInstance().notifyHardOn(this);
return null;
}
public Void hardOff()
{
if (activationType == PinActivationType.HIGH_IS_ON)
setPinLow();
else
setPinHigh();
logger.error("Pin " + pinNo + " for EndPoint: " + (this.endPointType == EndPointType.MasterValve ? "(MasterValve)" : "" ) + this.endPointName + " set Off.");
EndPointBus.getInstance().notifyHardOff(this);
return null;
}
public boolean isDrainingLine()
{
return drainLine;
}
public void setDrainLine(boolean drainLine)
{
this.drainLine = drainLine;
}
private void setPinHigh()
{
GpioPinDigitalOutput gpioPin = this.getPiPin(this.pinNo);
gpioPin.high();
}
private void setPinLow()
{
GpioPinDigitalOutput gpioPin = this.getPiPin(this.pinNo);
gpioPin.low();
}
public void setPiPin(com.pi4j.io.gpio.Pin piPin)
{
this.pinNo = piPin.getAddress();
}
public com.pi4j.io.gpio.Pin getPiPin()
{
return PinProvider.getPinByAddress(pinNo);
}
public void setEndPointName(String endPointName)
{
this.endPointName = endPointName;
}
public void setPinActiviationType(PinActivationType pinActiviationType)
{
this.activationType = pinActiviationType;
}
public void setEndPointType(EndPointType endPointType)
{
this.endPointType = endPointType;
}
public void setStartAmps(Amperage startAmps)
{
this.startAmps = startAmps;
}
public void setRunningAmps(Amperage runningAmps)
{
this.runningAmps = runningAmps;
}
public void setStartupInterval(Duration startupInterval)
{
this.startupInterval = startupInterval;
}
public String getEndPointName()
{
return this.endPointName;
}
public long getId()
{
return id;
}
public EndPointType getEndPointType()
{
return this.endPointType;
}
public PinActivationType getPinActiviationType()
{
return this.activationType;
}
public PinStatus getCurrentStatus()
{
final GpioController gpio = GpioFactory.getInstance();
com.pi4j.io.gpio.Pin internalPin = PinProvider.getPinByAddress(pinNo);
GpioPinDigitalOutput gpioPin = (GpioPinDigitalOutput) gpio.getProvisionedPin(internalPin);
return PinStatus.getStatus(this, gpioPin.isHigh());
}
public int getPinNo()
{
return this.pinNo;
}
public boolean isOn()
{
return getCurrentStatus() == PinStatus.ON;
}
private com.pi4j.io.gpio.GpioPinDigitalOutput getPiPin(int pinNo)
{
final GpioController gpio = GpioFactory.getInstance();
com.pi4j.io.gpio.Pin internalPin = PinProvider.getPinByAddress(pinNo);
GpioPinDigitalOutput gpioPin = (GpioPinDigitalOutput) gpio.getProvisionedPin(internalPin);
return gpioPin;
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + (int) (id ^ (id >>> 32));
return result;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EndPoint other = (EndPoint) obj;
if (id != other.id)
return false;
return true;
}
@Override
public String toString()
{
return "EndPoint [id=" + id + ", endPointName=" + endPointName + ", endPointType=" + endPointType
+ ", activationType=" + activationType + ", pinNo=" + pinNo + ", startAmps=" + startAmps
+ ", runningAmps=" + runningAmps + ", startupInterval=" + startupInterval + "]";
}
public boolean isOff()
{
return !isOn();
}
}
|
#!/bin/sh -x
go run main.go $@
|
#!/bin/sh
# This collection of scripts will take settings from /etc/config/meshwizard, /etc/config/freifunk
# and /etc/config/profile_<community> and setup the router to participate in wireless mesh networks
# Copyright 2011 Manuel Munz <freifunk at somakoma dot de>
# Licensed under the Apache License, Version 2.0 (the "License")
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
. /lib/functions.sh
echo "
/* Meshwizard 0.0.8 */
"
# config
export dir="/usr/bin/meshwizard"
. $dir/functions.sh
[ -f /proc/net/ipv6_route ] && export has_ipv6=1
# Check which packages we have installed
export has_luci=FALSE
opkg list_installed |grep luci-mod-admin > /dev/null && export has_luci=TRUE
export has_luci_splash=FALSE
opkg list_installed |grep luci-app-splash > /dev/null && export has_luci_splash=TRUE
# Check whether we want to cleanup/restore uci config before setting new options
cleanup=$(uci -q get meshwizard.general.cleanup)
[ "$cleanup" == 1 ] && $dir/helpers/restore_default_config.sh
# Rename wifi interfaces
$dir/helpers/rename-wifi.sh
# Get community
community=$(uci -q get meshwizard.community.name || uci -q get freifunk.community.name)
[ -z "$community" ] && echo "Error: Community is not set in /etc/config/freifunk, aborting now." && exit 1
export community="$community"
echo $community
# Get a list of networks we need to setup
networks=$(uci show meshwizard.netconfig | grep -v "netconfig=" | sed -e 's/meshwizard.netconfig\.\(.*\)\_.*/\1/' |sort|uniq)
export networks
[ -z "$networks" ] && echo "Error: No networks to setup could be found in /etc/config/meshwizard, aborting now." && exit 1
# Read default values (first from /etc/config/freifunk, then from /etc/config/profile_$community
# then /etc/config/meshwizard
# last will overwrite first
$dir/helpers/read_defaults.sh $community > /tmp/meshwizard.tmp
while read line; do
export "${line//\"/}"
done < /tmp/meshwizard.tmp
# Do config
$dir/helpers/initial_config.sh
$dir/helpers/setup_dnsmasq.sh
$dir/helpers/setup_system.sh
$dir/helpers/setup_olsrd.sh
$dir/helpers/setup_firewall.sh
$dir/helpers/setup_ssh.sh
$dir/helpers/setup_uhttpd.sh
$dir/helpers/setup_widgets.sh
if [ "$wan_proto" == "static" ] && [ -n "$wan_ip4addr" ] && [ -n "$wan_netmask" ]; then
$dir/helpers/setup_wan_static.sh
fi
if [ "$wan_proto" == "dhcp" ]; then
$dir/helpers/setup_wan_dhcp.sh
fi
if [ "$lan_proto" == "static" ] && [ -n "$lan_ip4addr" ] && [ -n "$lan_netmask" ]; then
$dir/helpers/setup_lan_static.sh
fi
if [ "$ipv6_enabled" == 1 ] && [ "$has_ipv6" = 1 ]; then
$dir/helpers/setup_lan_ipv6.sh
# Setup auto-ipv6
if [ -n "$(echo "$ipv6_config" |grep auto-ipv6)" ]; then
$dir/helpers/setup_auto-ipv6.sh
fi
fi
# Setup policyrouting if internet sharing is disabled and wan is not used for olsrd
# Always disable it first to make sure its disabled when the user decied to share his internet
uci set freifunk-policyrouting.pr.enable=0
if [ ! "$general_sharenet" == 1 ] && [ ! "$(uci -q get meshwizard.netconfig.wan_proto)" == "olsr" ]; then
$dir/helpers/setup_policyrouting.sh
fi
# Configure found networks
for net in $networks; do
# radioX devices need to be renamed
netrenamed="${net/radio/wireless}"
export netrenamed
$dir/helpers/setup_network.sh $net
if [ ! "$net" == "wan" ] && [ ! "$net" == "lan" ]; then
$dir/helpers/setup_wifi.sh $net
fi
$dir/helpers/setup_olsrd_interface.sh $net
net_dhcp=$(uci -q get meshwizard.netconfig.${net}_dhcp)
if [ "$net_dhcp" == 1 ]; then
$dir/helpers/setup_dhcp.sh $net
fi
$dir/helpers/setup_splash.sh $net
$dir/helpers/setup_firewall_interface.sh $net
if [ -n "$(echo "$ipv6_config" |grep auto-ipv6)" ]; then
$dir/helpers/setup_auto-ipv6-interface.sh $net
fi
done
##### Reboot the router (because simply restarting services gave errors)
echo "+ The wizard has finished and the router will reboot now."
reboot
|
<gh_stars>1-10
/* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.gui.chatlog;
import games.stendhal.common.NotificationType;
public class EventLine {
private String header;
private String text;
private NotificationType type;
public EventLine(final String header, final String text, final NotificationType type) {
this.header = header;
this.text = text;
this.type = type;
}
public String getHeader() {
return header;
}
public void setHeader(final String header) {
this.header = header;
}
public String getText() {
return text;
}
public void setText(final String text) {
this.text = text;
}
public NotificationType getType() {
return type;
}
public void setType(final NotificationType type) {
this.type = type;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(type);
sb.append(": ");
if (header.length() > 0) {
sb.append(header);
sb.append(": ");
}
sb.append(text);
return sb.toString();
}
}
|
const { isValidTemplate, compileTemplate } = require('./utils');
function createLoggerAction(fn, name) {
return (...args) => {
const Action = (context, props) =>
fn(
...args.map((arg) => {
if (typeof arg === 'function') {
return arg(context, props);
}
if (typeof arg === 'string' && isValidTemplate(arg)) {
return compileTemplate(arg)(context);
}
return arg;
})
);
const firstArg = args[0];
const actionName =
firstArg && typeof firstArg === 'string'
? `${name}(${
firstArg.length > 15 ? `${firstArg.slice(0, 15)}...` : args
})`
: name;
Object.defineProperty(Action, 'name', { value: actionName });
return Action;
};
}
function createLogger(adapter) {
return {
log: createLoggerAction(adapter.log, 'Log'),
info: createLoggerAction(adapter.info, 'Info'),
warn: createLoggerAction(adapter.warn, 'Warn'),
error: createLoggerAction(adapter.error, 'Error'),
};
}
const { log, info, warn, error } = createLogger(console);
exports.log = log;
exports.info = info;
exports.warn = warn;
exports.error = error;
exports.createLogger = createLogger;
|
<filename>hooks/darkTheme.tsx
import {
useContext,
useEffect,
createContext,
Dispatch,
SetStateAction,
ReactNode,
} from "react";
const DarkThemeContext = createContext<
[boolean, Dispatch<SetStateAction<boolean>>]
>([undefined, undefined]);
export function DarkThemeProvider({
children,
value,
}: {
children: ReactNode;
value: [boolean, Dispatch<SetStateAction<boolean>>];
}) {
const [darkTheme, setDarkTheme] = value;
useEffect(() => {
setDarkTheme(
localStorage.getItem("theme")
? localStorage.getItem("theme") === "dark"
: window.matchMedia("(prefers-color-scheme: dark)").matches
);
}, [setDarkTheme]);
useEffect(() => {
if (darkTheme) document.body.classList.add("dark");
else document.body.classList.remove("dark");
}, [darkTheme]);
return (
<DarkThemeContext.Provider value={[darkTheme, setDarkTheme]}>
{children}
</DarkThemeContext.Provider>
);
}
export function useDarkTheme() {
return useContext(DarkThemeContext);
}
|
envValue=$1
APP_NAME=$2
OPENSHIFT_NAMESPACE=$3
TZVALUE="America/Vancouver"
SOAM_KC_REALM_ID="master"
KCADM_FILE_BIN_FOLDER="/home/jenkins/workspace/${OPENSHIFT_NAMESPACE}-tools/keycloak-9.0.3/bin"
DB_JDBC_CONNECT_STRING=$(oc -o json get configmaps ${APP_NAME}-${envValue}-config | sed -n 's/.*"DB_JDBC_CONNECT_STRING": "\(.*\)",/\1/p')
DB_PWD=$(oc -o json get configmaps ${APP_NAME}-${envValue}-config | sed -n "s/.*\"DB_PWD_${APP_NAME}\": \"\(.*\)\",/\1/p")
DB_USER=$(oc -o json get configmaps ${APP_NAME}-${envValue}-config | sed -n "s/.*\"DB_USER_${APP_NAME}\": \"\(.*\)\"/\1/p")
SOAM_KC=$OPENSHIFT_NAMESPACE-$envValue.pathfinder.gov.bc.ca
NATS_CLUSTER=educ_pen_nats_cluster
NATS_URL="nats://nats.${OPENSHIFT_NAMESPACE}-${envValue}.svc.cluster.local:4222"
oc -o json get secret sso-admin-${envValue} | sed -n 's/.*"password": "\(.*\)",/\1/p' | base64 --decode
oc project $OPENSHIFT_NAMESPACE-$envValue
SOAM_KC_LOAD_USER_ADMIN=$(oc -o json get secret sso-admin-${envValue} | sed -n 's/.*"username": "\(.*\)"/\1/p' | base64 --decode)
SOAM_KC_LOAD_USER_PASS=$(oc -o json get secret sso-admin-${envValue} | sed -n 's/.*"password": "\(.*\)",/\1/p' | base64 --decode)
oc project $OPENSHIFT_NAMESPACE-tools
echo SOAM USER: $SOAM_KC_LOAD_USER_ADMIN
echo SOAM PASS: $SOAM_KC_LOAD_USER_PASS
###########################################################
#Fetch the public key
###########################################################
$KCADM_FILE_BIN_FOLDER/kcadm.sh config credentials --server https://$SOAM_KC/auth --realm $SOAM_KC_REALM_ID --user $SOAM_KC_LOAD_USER_ADMIN --password $SOAM_KC_LOAD_USER_PASS
getPublicKey(){
executorID= $KCADM_FILE_BIN_FOLDER/kcadm.sh get keys -r $SOAM_KC_REALM_ID | grep -Po 'publicKey" : "\K([^"]*)'
}
echo Fetching public key from SOAM
soamFullPublicKey="-----BEGIN PUBLIC KEY----- $(getPublicKey) -----END PUBLIC KEY-----"
newline=$'\n'
formattedPublicKey="${soamFullPublicKey:0:26}${newline}${soamFullPublicKey:27:64}${newline}${soamFullPublicKey:91:64}${newline}${soamFullPublicKey:155:64}${newline}${soamFullPublicKey:219:64}${newline}${soamFullPublicKey:283:64}${newline}${soamFullPublicKey:347:64}${newline}${soamFullPublicKey:411:9}${newline}${soamFullPublicKey:420}"
###########################################################
#Setup for config-map
###########################################################
echo
echo Creating config map $APP_NAME-config-map
oc create -n $OPENSHIFT_NAMESPACE-$envValue configmap $APP_NAME-config-map --from-literal=TZ=$TZVALUE --from-literal=NATS_URL=$NATS_URL --from-literal=NATS_CLUSTER=$NATS_CLUSTER --from-literal=JDBC_URL=$DB_JDBC_CONNECT_STRING --from-literal=ORACLE_USERNAME="$DB_USER" --from-literal=ORACLE_PASSWORD="$DB_PWD" --from-literal=KEYCLOAK_PUBLIC_KEY="$soamFullPublicKey" --from-literal=SPRING_SECURITY_LOG_LEVEL=INFO --from-literal=SPRING_WEB_LOG_LEVEL=INFO --from-literal=APP_LOG_LEVEL=INFO --from-literal=SPRING_BOOT_AUTOCONFIG_LOG_LEVEL=INFO --from-literal=SPRING_SHOW_REQUEST_DETAILS=false --dry-run -o yaml | oc apply -f -
echo
echo Setting environment variables for $APP_NAME-$SOAM_KC_REALM_ID application
oc project $OPENSHIFT_NAMESPACE-$envValue
oc set env --from=configmap/$APP_NAME-config-map dc/$APP_NAME-$SOAM_KC_REALM_ID |
#include <iostream>
#include <vector>
#include <chrono>
std::vector<int> multiplyMatrices(const std::vector<int>& matrix1, const std::vector<int>& matrix2, int size) {
std::vector<int> result(size * size, 0);
for (int i = 0; i < size; ++i) {
for (int j = 0; j < size; ++j) {
for (int k = 0; k < size; ++k) {
result[i * size + j] += matrix1[i * size + k] * matrix2[k * size + j];
}
}
}
return result;
}
int main() {
std::vector<int> matrix1 = {1, 2, 3, 4};
std::vector<int> matrix2 = {5, 6, 7, 8};
int size = 2;
auto startTime = std::chrono::high_resolution_clock::now();
std::vector<int> result = multiplyMatrices(matrix1, matrix2, size);
auto endTime = std::chrono::high_resolution_clock::now();
std::cout << "Resulting matrix: " << std::endl;
for (int i = 0; i < size; ++i) {
for (int j = 0; j < size; ++j) {
std::cout << result[i * size + j] << ' ';
}
std::cout << std::endl;
}
std::cout << "\n" << "TIME: " << std::chrono::duration_cast<std::chrono::microseconds>(endTime - startTime).count() << " microseconds" << std::endl;
return 0;
} |
// TODO: a work in progress
var keycloakUrl = "rest/keycloak.json";
var dukecloak = new function () {
// Data
var self = this;
self.keycloakAuth = new Keycloak(keycloakUrl);
self.auth = {
username: ko.observable(""),
loggedIn: ko.observable(false),
loggedOut: ko.observable(true)
};
function saveTokens() {
dukeconSettings.saveSetting('keycloak_token', dukecloak.keycloakAuth.token);
dukeconSettings.saveSetting('keycloak_refreshToken', dukecloak.keycloakAuth.refreshToken);
dukeconSettings.saveSetting('keycloak_idToken', dukecloak.keycloakAuth.idToken);
dukeconSettings.saveSetting('keycloak_timeSkew', dukecloak.keycloakAuth.timeSkew);
}
function clearTokens() {
dukeconSettings.clearSetting('keycloak_token');
dukeconSettings.clearSetting('keycloak_refreshToken');
dukeconSettings.clearSetting('keycloak_idToken');
dukeconSettings.clearSetting('keycloak_timeSkew');
dukeconSettings.clearSetting('keycloak_username');
}
self.loadUserData = function () {
dukecloak.keycloakAuth.updateToken()
.success(function () {
var username = dukeconSettings.getSetting('keycloak_username');
if (username) {
dukecloak.auth.username(username);
} else {
dukecloak.keycloakAuth.loadUserProfile().success(function (profile) {
dukecloak.auth.username(profile.username);
dukecloak.auth.username(profile.username);
dukeconSettings.saveSetting('keycloak_username', dukecloak.auth.username());
console.log("Logged in: " + dukecloak.auth.username());
}).error(function (result) {
console.log("Unable to load user profile");
console.log("result.status: " + (result && result.status));
console.log("result.text: " + (result && result.text));
});
}
dukeconSynch.pull();
})
.error(function (result) {
console.log("Unable to update token");
console.log("result.status: " + (result && result.status));
console.log("result.text: " + (result && result.text));
/* load user data is quite close to the initial setup
failling an update here might indicate the the saved tokens are no longer valid */
dukecloak.auth.loggedIn(false);
dukecloak.auth.loggedOut(true);
clearTokens();
});
};
function createPromise() {
var p = {
setSuccess: function (result) {
p.success = true;
p.result = result;
if (p.successCallback) {
p.successCallback(result);
}
},
setError: function (result) {
p.error = true;
p.result = result;
if (p.errorCallback) {
p.errorCallback(result);
}
},
promise: {
success: function (callback) {
if (p.success) {
callback(p.result);
} else if (!p.error) {
p.successCallback = callback;
}
return p.promise;
},
error: function (callback) {
if (p.error) {
callback(p.result);
} else if (!p.success) {
p.errorCallback = callback;
}
return p.promise;
}
}
}
return p;
}
self.logout = function () {
clearTokens();
dukecloak.keycloakAuth.logout().success(function () {
dukecloak.auth.loggedIn(false);
dukecloak.auth.loggedOut(true);
dukecloak.auth.username("");
}).error(function () {
console.log("WTF");
});
};
var dukecloakInitialized = false;
self.login = function () {
if (!dukecloakInitialized) {
self.init(true);
return;
}
dukecloak.keycloakAuth.login().success(function () {
dukecloak.auth.loggedIn(true);
dukecloak.auth.loggedOut(false);
}).error(function () {
dukecloak.auth.loggedIn(false);
dukecloak.auth.loggedOut(true);
});
};
self.keycloakAuth.onAuthSuccess = function () {
console.log("Auth Success!!");
saveTokens();
};
self.keycloakAuth.onAuthRefreshSuccess = function () {
console.log("Auth Refreshed!!");
saveTokens();
};
self.keycloakAuth.onAuthLogout = function () {
console.log("Logged out!!");
clearTokens();
};
self.init = function (login) {
if (dukecloakInitialized) {
return;
}
// https://issues.jboss.org/browse/KEYCLOAK-2322
dukecloak.keycloakAuth.timeSkew = dukeconSettings.getSetting('keycloak_timeSkew');
dukecloak.keycloakAuth.init({
onLoad: login ? "login-required" : "check-sso",
token: dukeconSettings.getSetting('keycloak_token'),
idToken: dukeconSettings.getSetting('keycloak_idToken'),
refreshToken: dukeconSettings.getSetting('keycloak_refreshToken')
}).success(function (authenticated) {
dukecloakInitialized = true;
dukecloak.auth.loggedIn(authenticated);
dukecloak.auth.loggedOut(!authenticated);
console.log('Authenticated: ' + authenticated);
if (authenticated) {
console.log('local time: ' + new Date().getTime() / 1000);
console.log('iat: ' + dukecloak.keycloakAuth.tokenParsed.iat);
console.log('diff: ' + (new Date().getTime() / 1000 - dukecloak.keycloakAuth.tokenParsed.iat));
console.log('exp in: ' + (dukecloak.keycloakAuth.tokenParsed.exp - new Date().getTime() / 1000));
console.log('isExpired: ' + dukecloak.keycloakAuth.isTokenExpired());
dukecloak.loadUserData();
}
}).error(function () {
console.log("Error initializing keycloak");
});
}
// ensure that keycloak is initialized when dom is ready and application is online
self.initPromise = createPromise();
self.initPromise.promise.success(function () {
self.init();
});
self.online = false;
self.domReady = false;
self.check = function () {
if (self.online && self.domReady) {
self.initPromise.setSuccess();
}
}
self.nowOnline = function () {
self.online = true;
console.log("dukecloak: online");
self.check();
}
$(document).ready(function () {
console.log("dukecloak: documentready");
self.domReady = true;
self.check();
});
};
|
package club.blog.redis.service.zset;
import java.util.Set;
public interface ZSetJedis {
Set<String> get(String key);
Set<String> get(String key,Long end);
Set<String> get(String key,Long start,Long end);
void zadd(String key,String value);
void zadd(String key,String value,Long score);
void zrem(String key,String value);
void zrem(String key,Set<String> values);
}
|
import mongoose, { Schema } from 'mongoose';
const blogSchema = new Schema({
deletedAt:{ date: {type:Date} , actor:{ type: Schema.Types.ObjectId , ref:'contact'} },
createdAt: { type: Date , default: Date.now() },
updatedAt: { type: Date },
});
const blogModel = mongoose.model('blog', blogSchema);
export default blogModel;
|
<reponame>mason-fish/brim
import {useSelector} from "react-redux"
import React, {useEffect} from "react"
import {ipcRenderer} from "electron"
import {XLatestError} from "./LatestError"
import AboutModal from "./AboutModal"
import ErrorNotice from "./ErrorNotice"
import HTMLContextMenu from "./HTMLContextMenu"
import Preferences from "./Preferences/Preferences"
import Prefs from "../state/Prefs"
import SpaceModal from "./SpaceModal"
import View from "../state/View"
import brim from "../brim"
import SearchPage from "./SearchPage"
import {PopNotice} from "./PopNotice"
import {Modals} from "./Modals"
import BrimToaster from "./BrimToaster"
import BrimTooltip from "./BrimTooltip"
import HookLog from "app/system-test/HookLog"
import ColumnsModal from "app/columns/columns-modal"
export default function App() {
brim.time.setZone(useSelector(View.getTimeZone))
brim.time.setDefaultFormat(useSelector(Prefs.getTimeFormat))
useEffect(() => {
ipcRenderer.invoke("windows:ready")
}, [])
return (
<div className="app-wrapper">
<div className="title-bar-drag-area" />
<XLatestError />
<SearchPage />
{/* Global Modals */}
<ErrorNotice />
<Preferences />
<Modals />
<AboutModal />
<SpaceModal />
<ColumnsModal />
<HTMLContextMenu />
<BrimToaster />
<PopNotice />
<BrimTooltip />
{process.env.BRIM_ITEST === "true" && <HookLog />}
</div>
)
}
|
<gh_stars>1-10
package info.javaspecfeature;
import cucumber.api.CucumberOptions;
import cucumber.api.junit.Cucumber;
import org.junit.runner.RunWith;
@RunWith(Cucumber.class)
@CucumberOptions(//features = {"target"},
format = { "pretty", "html:target/cucumber", "rerun:target/rerun.txt" },
monochrome = true,
tags = { "~@wip" })
public final class FeatureIT { } |
var searchData=
[
['uart_5fbrr_5fsampling16',['UART_BRR_SAMPLING16',['../usart3_8c.html#a9250770f3a90b54703cf9fcb5023e617',1,'usart3.c']]],
['uart_5fbrr_5fsampling8',['UART_BRR_SAMPLING8',['../usart3_8c.html#aa80d222c1ad6f9ca0fddfc0b87fe8464',1,'usart3.c']]],
['usart3_2ec',['usart3.c',['../usart3_8c.html',1,'']]]
];
|
#include <stdio.h>
int main()
{
int numbers;
printf("N: ");
scanf("%d", &numbers);
printf("--- WHILE ---\n");
int w = numbers;
while (w <= 0)
{
printf("W = %d\n", w);
w++;
}
printf("--- FOR ---\n");
for (int i = numbers; i < 0; i++)
{
printf("F = %d\n", i);
}
return 0;
}
|
<gh_stars>1-10
package elasta.composer.respose.generator;
import io.vertx.core.json.JsonArray;
/**
* Created by sohan on 5/12/2017.
*/
public interface JsonArrayResponseGenerator<T> extends ResponseGenerator<T, JsonArray> {
@Override
JsonArray apply(T t) throws Throwable;
}
|
def merge_sets(set1: set, set2: set, set3: set, set4: set) -> list:
merged_list = list(set1) + list(set2) + list(set3) + list(set4) # Convert sets to lists and concatenate
merged_list.sort() # Sort the merged list in ascending order
return merged_list |
<filename>constants/package-suggestions.js
module.exports = {
SEARCH_START: 'PACKAGE_SUGGESTIONS_SEARCH_START',
SEARCH_SUCCESS: 'PACKAGE_SUGGESTIONS_SEARCH_SUCCESS',
SEARCH_ERROR: 'PACKAGE_SUGGESTIONS_SEARCH_ERROR'
}; |
import {useState} from 'react'
export function useStorageState<T extends string>(key: string, defaultValue: T) {
const [state, setState] = useState<T>(() => (
localStorage.getItem(key) || defaultValue
) as T)
function updateState(value?: T) {
if (value === undefined) {
localStorage.removeItem(key)
} else {
localStorage.setItem(key, value as string)
}
setState(value)
}
return [state, updateState] as [typeof state, typeof updateState]
}
|
#!/usr/bin/env bash
# Brave: https://brave.com/linux/
sudo apt install apt-transport-https curl
sudo curl -fsSLo /usr/share/keyrings/brave-browser-archive-keyring.gpg https://brave-browser-apt-release.s3.brave.com/brave-browser-archive-keyring.gpg
echo "deb [signed-by=/usr/share/keyrings/brave-browser-archive-keyring.gpg arch=amd64] https://brave-browser-apt-release.s3.brave.com/ stable main"|sudo tee /etc/apt/sources.list.d/brave-browser-release.list
sudo apt update && sudo apt install -y --no-install-recommends brave-browser
|
<form action="" method="post">
<div>
<label for="firstName">First Name:</label>
<input type="text" id="firstName" name="firstName">
</div>
<div>
<label for="lastName">Last Name:</label>
<input type="text" id="lastName" name="lastName">
</div>
<div>
<label for="email">Email:</label>
<input type="email" id="email" name="email">
</div>
<div>
<label for="dob">Date of Birth:</label>
<input type="date" id="dob" name="dob">
</div>
<button type="submit">Submit</button>
</form> |
package com.cannolicatfish.rankine.util;
import com.cannolicatfish.rankine.init.RankineTags;
import com.cannolicatfish.rankine.init.WGConfig;
import net.minecraft.block.Block;
import net.minecraft.block.BlockState;
import net.minecraft.block.Blocks;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntitySpawnPlacementRegistry;
import net.minecraft.entity.EntityType;
import net.minecraft.entity.LivingEntity;
import net.minecraft.entity.passive.SheepEntity;
import net.minecraft.fluid.FluidState;
import net.minecraft.tags.FluidTags;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.ISeedReader;
import net.minecraft.world.World;
import net.minecraft.world.biome.Biome;
import net.minecraft.world.biome.BiomeRegistry;
import net.minecraft.world.gen.Heightmap;
import net.minecraftforge.registries.ForgeRegistries;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class WorldgenUtils {
public static List<ResourceLocation> GEN_BIOMES = new ArrayList<>();
public static List<Block> O1 = new ArrayList<>();
public static List<Block> A1 = new ArrayList<>();
public static List<Block> B1 = new ArrayList<>();
public static List<Block> O2 = new ArrayList<>();
public static List<Block> A2 = new ArrayList<>();
public static List<Block> B2 = new ArrayList<>();
public static List<List<String>> INTRUSION_LISTS = new ArrayList<>();
public static List<List<Block>> INTRUSION_BLOCKS = new ArrayList<>();
public static List<List<Float>> INTRUSION_WEIGHTS = new ArrayList<>();
public static List<List<Block>> INTRUSION_ORES = new ArrayList<>();
public static List<List<Float>> INTRUSION_ORE_CHANCES = new ArrayList<>();
public static List<WeightedCollection<BlockState>> INTRUSION_COLLECTIONS = new ArrayList<>();
public static List<List<String>> LAYER_LISTS = new ArrayList<>();
public static List<List<String>> VEGETATION_LISTS = new ArrayList<>();
public static List<WeightedCollection<BlockState>> VEGETATION_COLLECTIONS = new ArrayList<>();
public static List<Block> GRAVELS = new ArrayList<>();
public static List<Block> ORE_STONES = new ArrayList<>();
public static List<String> ORE_TEXTURES = new ArrayList<>();
public static void initOreTextures() {
for (String ORE : WGConfig.MISC.ORE_STONES.get()) {
String[] ores = ORE.split("\\|");
if (ores.length > 1) {
ORE_TEXTURES.add(ores[1]);
} else {
ORE_TEXTURES.add(ores[0]);
}
}
}
public static void initConfigs() {
for (String ORE : WGConfig.MISC.ORE_STONES.get()) {
ORE_STONES.add(ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate(ORE.split("\\|")[0])));
}
for (List<Object> L : WGConfig.BIOME_GEN.BIOME_SETTINGS.get()) {
String biomeToAdd = (String) L.get(0);
List<String> biomeName = Arrays.asList(biomeToAdd.split(":"));
if (biomeName.size() > 1) {
Block gravel = ResourceLocation.tryCreate((String) L.get(5)) == null ? Blocks.AIR : ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate((String) L.get(5)));
populateLists(ResourceLocation.tryCreate(biomeToAdd),(List<String>) L.get(1),(List<String>) L.get(2),(List<String>) L.get(3),(List<String>) L.get(4), gravel);
} else {
for (ResourceLocation RS : getBiomeNamesFromCategory(Collections.singletonList(Biome.Category.byName(biomeToAdd)), true)) {
Block gravel = ResourceLocation.tryCreate((String) L.get(5)) == null ? Blocks.AIR : ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate((String) L.get(5)));
populateLists(RS,(List<String>) L.get(1),(List<String>) L.get(2),(List<String>) L.get(3),(List<String>) L.get(4), gravel);
}
}
}
for (List<String> I : INTRUSION_LISTS) {
int ind = 0;
WeightedCollection<BlockState> col = new WeightedCollection<>();
List<Block> tempIB = new ArrayList<>();
List<Float> tempIW = new ArrayList<>();
List<Block> tempIO = new ArrayList<>();
List<Float> tempIC = new ArrayList<>();
for (String entry : I) {
tempIB.add(ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate(entry.split("\\|")[0])));
tempIW.add(Float.parseFloat(entry.split("\\|")[1]));
tempIO.add(ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate(entry.split("\\|")[2])));
tempIC.add(Float.parseFloat(entry.split("\\|")[3]));
col.add(tempIW.get(ind),tempIB.get(ind).getDefaultState());
ind += 1;
}
INTRUSION_BLOCKS.add(tempIB);
INTRUSION_WEIGHTS.add(tempIW);
INTRUSION_ORES.add(tempIO);
INTRUSION_ORE_CHANCES.add(tempIC);
INTRUSION_COLLECTIONS.add(col);
}
for (List<String> V : VEGETATION_LISTS) {
int ind = 0;
WeightedCollection<BlockState> col = new WeightedCollection<>();
List<Block> tempVB = new ArrayList<>();
List<Float> tempVW = new ArrayList<>();
for (String entry : V) {
tempVB.add(ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate(entry.split("\\|")[0])));
tempVW.add(Float.parseFloat(entry.split("\\|")[1]));
col.add(tempVW.get(ind),tempVB.get(ind).getDefaultState());
ind += 1;
}
VEGETATION_COLLECTIONS.add(col);
}
}
private static void populateLists(ResourceLocation BIOME, List<String> SOILS, List<String> INTRUSIONS, List<String> STONES, List<String> VEGETATION, Block GRAVEL) {
GEN_BIOMES.add(BIOME);
if (SOILS.isEmpty()) {
O1.add(Blocks.AIR);
A1.add(Blocks.AIR);
B1.add(Blocks.AIR);
O2.add(Blocks.AIR);
A2.add(Blocks.AIR);
B2.add(Blocks.AIR);
} else {
O1.add(ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate(SOILS.get(0))));
A1.add(ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate(SOILS.get(1))));
B1.add(ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate(SOILS.get(2))));
O2.add(ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate(SOILS.get(3))));
A2.add(ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate(SOILS.get(4))));
B2.add(ForgeRegistries.BLOCKS.getValue(ResourceLocation.tryCreate(SOILS.get(5))));
}
INTRUSION_LISTS.add(INTRUSIONS);
LAYER_LISTS.add(STONES);
VEGETATION_LISTS.add(VEGETATION);
GRAVELS.add(GRAVEL);
}
public static List<ResourceLocation> getBiomeNamesFromCategory(List<Biome.Category> biomeCats, boolean include) {
List<ResourceLocation> b = new ArrayList<>();
for (Biome biome : ForgeRegistries.BIOMES) {
if (!biomeCats.isEmpty()) {
for (Biome.Category cat : biomeCats) {
if (biome.getCategory() == cat && include){
b.add(biome.getRegistryName());
}
if (!include && biome.getCategory() != cat && biome.getCategory() != Biome.Category.NETHER && biome.getCategory() != Biome.Category.THEEND) {
b.add(biome.getRegistryName());
}
}
}
else if (!include && biome.getCategory() != Biome.Category.NETHER && biome.getCategory() != Biome.Category.THEEND) {
b.add(biome.getRegistryName());
}
}
return b;
}
public static boolean isWet(ISeedReader reader, BlockPos pos) {
for(BlockPos POS : BlockPos.getAllInBoxMutable(pos.add(-2,0,-2),pos.add(2,2,2))) {
FluidState fluidstate = reader.getFluidState(POS);
if (fluidstate.isTagged(FluidTags.WATER)) {
return true;
}
}
return false;
}
public static int waterTableHeight(World worldIn, BlockPos pos) {
return (int) Math.max(worldIn.getSeaLevel(), (worldIn.getSeaLevel() + worldIn.getBiome(pos).getDepth()*30));
}
public static boolean inArea(BlockPos b, double radius, BlockPos... targets) {
for (BlockPos target : targets) {
if (b.distanceSq(target) < Math.pow(radius,2)) {
return true;
}
}
return false;
}
public static Block getCeillingBlock(World worldIn, BlockPos pos, int height) {
for (int i = 1; i<= height; ++i) {
if (!worldIn.getBlockState(pos.up(height)).matchesBlock(Blocks.AIR)) {
return worldIn.getBlockState(pos.up(height)).getBlock();
}
}
return Blocks.AIR;
}
}
|
// Copyright (C) 2018 <NAME>. Use of this source code is governed
// by a BSD-style license posted at http://blog.jeff.over.bz/license/
package refactoring
import (
"fmt"
"strconv"
"time"
"github.com/godoctor/godoctor/analysis/names"
"github.com/godoctor/godoctor/refactoring"
"github.com/godoctor/godoctor/text"
)
var CurrentYear string = strconv.Itoa(time.Now().Year())
type AddCopyright struct {
refactoring.RefactoringBase
}
func (r *AddCopyright) Description() *refactoring.Description {
return &refactoring.Description{
Name: "Add Copyright Header",
// ----+----1----+----2----+----3----+----4----+----5
Synopsis: "Add a copyright header to a file",
Usage: "addcopyright <text>",
Multifile: false,
Params: []refactoring.Parameter{{
Label: "Copyright Owner:",
Prompt: "Name to insert into the copyright text.",
DefaultValue: ""}},
Hidden: false,
}
}
func (r *AddCopyright) Run(config *refactoring.Config) *refactoring.Result {
r.Init(config, r.Description())
r.Log.ChangeInitialErrorsToWarnings()
if r.Log.ContainsErrors() {
return &r.Result
}
extent := r.findInComments("Copyright")
if extent != nil {
r.logError(*extent, "An existing copyright was found.")
return &r.Result
}
r.addCopyright(config.Args[0].(string))
r.FormatFileInEditor()
return &r.Result
}
func (r *AddCopyright) findInComments(text string) *text.Extent {
occurrences := names.FindInComments(text, r.File, nil, r.Program.Fset)
if len(occurrences) == 0 {
return nil
}
return occurrences[0]
}
func (r *AddCopyright) logError(extent text.Extent, text string) {
file := r.Program.Fset.File(r.File.Package)
startPos := file.Pos(extent.Offset)
endPos := file.Pos(extent.OffsetPastEnd())
r.Log.Error(text)
r.Log.AssociatePos(startPos, endPos)
}
func (r *AddCopyright) addCopyright(name string) {
extentToReplace := &text.Extent{0, 0}
possibleSpace := " "
if name == "" {
possibleSpace = ""
}
text := fmt.Sprintf("// Copyright %s%s%s. All rights reserved.\n",
CurrentYear, possibleSpace, name)
r.Edits[r.Filename].Add(extentToReplace, text)
}
|
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_4m_M_115_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_4m_M_114_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_4m_M_113_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_4m_M_112_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_4m_M_111_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_4m_F_105_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_4m_F_104_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_4m_F_103_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_4m_F_102_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_4m_F_101_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_21m_M_120_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_21m_M_119_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_21m_M_118_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_21m_M_117_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_21m_F_110_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_21m_M_116_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_21m_F_109_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_21m_F_108_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_21m_F_107_MergedLanes.rmdup.bam
Down_sampling_bam_file_rev.pl 45716676 Neutrophil_ATACseq_21m_F_106_MergedLanes.rmdup.bam |
import pyproj
import xarray as xr
import numpy as np
def proj_coord(coord, proj_in, proj_out):
"""
Returns a packaged tuple (x, y) coordinate in projection proj_out
from one packaged tuple (x, y) coordinat ein projection proj_in
Inputs:
coord: tuple (x, y)
proj_in: pyproj.Proj format projection
proj_out: pyproj.Proj format projection
Outputs:
tuple (x, y)
"""
x, y = coord
return pyproj.transform(proj_in, proj_out, x, y)
def proj_coords(coords, proj_in, proj_out):
"""
project a list of coordinates, return a list.
Inputs:
coords: list of tuples (x, y)
proj_in: pyproj.Proj format projection
proj_out: pyproj.Proj format projection
Outputs:
list of tuples (x, y)
"""
return [proj_coord(coord, proj_in, proj_out) for coord in coords]
def select_bounds(ds, bounds):
"""
selects xarray ds along a provided bounding box
assuming slicing should be done over coordinate axes x and y (hard coded, I was lazy....:-()
"""
xs = slice(bounds[0][0], bounds[1][0])
ys = slice(bounds[1][1], bounds[0][1])
# select over x and y axis
return ds.sel(x=xs, y=ys)
def make_measures_url(url_template, res, dt, freq, HV, AD):
"""
Prepares a url for Measures data to download.
url_template - str url with placeholders for date (%Y.%m.%d), resolution (:d, km), date (%Y%j),
frequency (str), polarisation ('H'/'V'), ascending/descending path ('A', 'D')
"""
datestr1 = dt.strftime('%Y%j')
datestr2 = dt.strftime('%Y.%m.%d')
if str(res) == '25':
# if str(res) == '3.125':
suffix = 'GRD-CSU'
return url_template.format(datestr2, str(res), datestr1, freq, HV, AD, suffix)
def make_measures_download(download_template, url, username, password):
return download_template.format(username, password, url)
def plot_points(ax, points, **kwargs):
x_point, y_point = zip(*points)
ax.plot(x_point, y_point, **kwargs)
return ax
def correct_miss_fill(ds):
"""
Returns a properly decoded Climate-and-Forecast conventional ds, after correction of a conflicting attribute (sjeez....)
"""
for d in ds.data_vars:
try:
ds[d].attrs.update({'missing_value': ds[d]._FillValue})
except:
pass
return xr.decode_cf(ds)
def c_m_ratio(ds_tb, x, y, x_off=62500, y_off=62500):
def cc(ts1, M):
coef = np.ma.corrcoef(np.ma.masked_invalid(ts1.values.flatten()), np.ma.masked_invalid(M.values.flatten()))[1][0]
return xr.DataArray(coef)
ds_tb_sel = ds_tb.sel(x=slice(x-x_off, x+x_off), y=slice(y+y_off, y-y_off))
# convert the xarray data-array into a bunch of point time series
# select series in (M)easurement location
M = ds_tb_sel.sel(x=x, y=y, method='nearest')
tb_points = ds_tb_sel.stack(points=('y', 'x')) # .reset_index(['x', 'y'], drop=True) # .transpose('points', 'time')
# add a coordinate axis to the points
# apply the function over all points to calculate the trend at each point
# import pdb;pdb.set_trace()
coefs = tb_points.groupby('points').apply(cc, M=M)
# unstack back to lat lon coordinates
coefs_2d = coefs.unstack('points').rename(dict(points_level_0='y', points_level_1='x')) # get the 2d back and rename axes back to x, y
#LOWEST CALIBRATION
# find the x/y index where the correlation is lowest
idx_y, idx_x = np.where(coefs_2d==coefs_2d.min())
# select series in (C)alibration location (with lowest correlation)
C = ds_tb_sel[:, idx_y, idx_x].squeeze(['x', 'y']).drop(['x', 'y']) # get rid of the x and y coordinates of calibration pixel
# which has the lowest correlation with the point of interest?
ratio = C / M
# #MEAN CALIBRATION
# # find the x/y index where the correlation is mean
# idx_y_mean, idx_x_mean = np.where(coefs_2d==coefs_2d.mean())
# # select series in (C)alibration location (with lowest correlation)
# C_mean = ds_tb_sel[:, idx_y_mean, idx_x_mean].squeeze(['x','y']).drop(['x','y']) # get rid of the x and y coordinates of calibration pixel
# ratio_mean = C_mean / M
return C, M, ratio #, C_mean, ratio_mean
|
(page, done) => {
var that = this;
let url = page.getURL("last");
let u = new URL(url);
let r = u.origin+"/robots.txt";
let is_done = false;
let max_wait_time = 15000;
let my_user_agent = 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)';
let lable = "DOM";
let msg = "";
let type = "info";
let what = "static & idle";
let getInternalSrcAttributesFromDom = (dom) =>
{
let all_internal_links_slector = "*[src^='"+u.origin+"'], *[src^='/'], *[src^='./'], *[src^='../']";
var n_raw = dom.querySelectorAll(all_internal_links_slector);
var n = [];
for(let a of n_raw)
{
if(a.getAttribute("src").startsWith("//"))
{
if(a.getAttribute("src").startsWith("//"+u.host))
{
n.push(u.protocol+a.getAttribute("src"));
}
}
else
{
if(a.getAttribute("src").startsWith(u.origin))
{
n.push(a.getAttribute("src"));
}
else
{
n.push(u.origin+a.getAttribute("src"));
}
}
}
return n;
}
let rA = [];
let endgame = (blocked_stuff, was_timeout = false) =>
{
if(blocked_stuff.length===0)
{
//nothing blocked
if(was_timeout)
{
type = "warning";
done(that.createResult(lable, "Could not check for robots.txt disallowed ressources. <a href='"+r+"' target='_blank'>"+r+"</a> fetch timeout!", type, what));return;
}
else
{
done(that.createResult());return;
}
}
msg = msg + blocked_stuff.length+ " internal resources blocked via <a href='"+r+"' target='_blank'>"+r+"</a>."+that.partialCodeLink(blocked_stuff);
type = "warning";
done(that.createResult(lable, msg, type, what));return;
}
that.fetch(r, { responseFormat: 'text' }, (response) => {
if(response.status===200)
{
if((response.body.includes('disallow:')) ||
(response.body.includes('Disallow:')) ||
(response.body.includes('DISALLOW:')))
{
let sdom = page.getStaticDom();
let idom = page.getIdleDom();
let sA = getInternalSrcAttributesFromDom(sdom);
let iA = getInternalSrcAttributesFromDom(idom);
sA.push(...iA);
const uniA = [...(new Set(sA))];
for (let s of uniA)
{
let robot = that.simpleRobotTxt(response.body,s, my_user_agent);
if(robot.disallowed)
{
let blocked_string = robot.url+" - "+robot.matches[0][1][0].rule+" - Line "+robot.matches[0][1][0].linenumber;
rA.push(blocked_string);
type="warning";
}
}
is_done = true;
endgame(rA);
}
} else
{
is_done = true;
endgame([]);
}
});
setTimeout(function(){if(!is_done){endgame([],true);}},max_wait_time);
} |
package org.cmayes.hartree.model;
import java.util.List;
/**
* Describes a normal vibrational mode.
*
* @author cmayes
*/
public interface NormalMode {
/**
* Returns the component internal motions that are a part of this mode.
*
* @return the components
*/
List<InternalMotion> getMotions();
/**
* @param comps
* the components to set
*/
void setMotions(final List<InternalMotion> comps);
} |
import { CommandOptions } from '@/typings/utils/command';
import { CmdSpawnRet } from '@/typings/utils/spawn';
export interface Options {
[key: string]: string;
}
export interface OptionsCt {
options: Options;
}
export type ArrayJob = [string, OptionsCt];
export type Job = string | ArrayJob;
export type JobRet = CmdSpawnRet | void;
export interface JobStartEvent {
args: string[];
commandOptions: CommandOptions;
}
export interface JobEndSuccessEvent {
args: string[];
commandOptions: CommandOptions;
ret: CmdSpawnRet;
}
export interface JobEndFailureEvent {
args: string[];
commandOptions: CommandOptions;
error: Error;
}
export type JobEndEvent = JobEndSuccessEvent | JobEndFailureEvent;
export interface JobsStartEvent {
commandOptions: CommandOptions;
jobs: Job[];
}
export interface JobsEndEvent {
commandOptions: CommandOptions;
jobs: Job[];
ret: JobRet[];
}
|
def extract_morphological_features(word, featlst):
if word.feats: # Check if morphological features exist
# Create a dictionary from the morphological features string
morph_feat_dict = dict(x.split("=") for x in word.feats.split("|"))
feat_form = ''
for feat in featlst:
if feat in morph_feat_dict:
feat_form = feat_form + '+' + morph_feat_dict[feat] # Append the feature value to the string
else:
feat_form = feat_form + '+' + '-' # If the feature is not present, append a placeholder '-'
return feat_form.lstrip('+') # Remove leading '+' and return the generated string
else:
return '' # Return an empty string if no morphological features are present |
#!/bin/sh
# This is the command running inside the xterm of our
# debug wrapper. It needs to take care of starting
# the server command, so it can attach to the parent
# process. In addition, here we run the command inside
# of a gdb session to allow for debugging.
# On some systems, running xterm will cause LD_LIBRARY_PATH
# to be cleared, so restore it and PATH to be safe.
export PATH=$PREV_PATH
export LD_LIBRARY_PATH=$PREV_LD_LIBRARY_PATH
# Start binderproc (or whatever sub-command is being run)
# inside of gdb, giving gdb an initial command script to
# automatically run the process without user intervention.
gdb -q -x $2/process_wrapper_gdb.cmds --args "$@"
|
class Paddle {
// which=0: left paddle
// which=1: right paddle
constructor(which) {
this.which = which;
this.x = which === 0 ? PADDLE_MARGIN : WIDTH - PADDLE_MARGIN;
this.y = HEIGHT / 2;
this.height = PADDLE_DEFAULT_HEIGHT;
this.controlsReversed = false;
}
}
|
def factorial(n):
if n == 0 or n == 1:
return 1
else:
return n * factorial(n - 1)
result = factorial(7)
print(result) |
#!/bin/bash
# Copyright 2019 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script includes common functions for testing setup and teardown.
export GO111MODULE=on
source $(dirname $0)/../vendor/knative.dev/test-infra/scripts/e2e-tests.sh
# If gcloud is not available make it a no-op, not an error.
which gcloud &>/dev/null || gcloud() { echo "[ignore-gcloud $*]" 1>&2; }
# Use GNU tools on macOS. Requires the 'grep' and 'gnu-sed' Homebrew formulae.
if [ "$(uname)" == "Darwin" ]; then
sed=gsed
grep=ggrep
fi
# Eventing main config.
readonly EVENTING_CONFIG="config/"
# In-memory channel CRD config.
readonly IN_MEMORY_CHANNEL_CRD_CONFIG_DIR="config/channels/in-memory-channel"
# MT Channel Based Broker config.
readonly MT_CHANNEL_BASED_BROKER_CONFIG_DIR="config/brokers/mt-channel-broker"
# MT Channel Based Broker config.
readonly MT_CHANNEL_BASED_BROKER_DEFAULT_CONFIG="config/core/configmaps/default-broker.yaml"
# Sugar Controller config. For label/annotation magic.
readonly SUGAR_CONTROLLER_CONFIG_DIR="config/sugar"
readonly SUGAR_CONTROLLER_CONFIG="config/sugar/500-controller.yaml"
# Config tracing config.
readonly CONFIG_TRACING_CONFIG="test/config/config-tracing.yaml"
# PreInstall script for v0.16
readonly PRE_INSTALL_V016="config/pre-install/v0.16.0"
# The number of controlplane replicas to run.
readonly REPLICAS=3
# Should deploy a Knative Monitoring as well
readonly DEPLOY_KNATIVE_MONITORING="${DEPLOY_KNATIVE_MONITORING:-1}"
TMP_DIR=$(mktemp -d -t ci-$(date +%Y-%m-%d-%H-%M-%S)-XXXXXXXXXX)
readonly TMP_DIR
readonly KNATIVE_DEFAULT_NAMESPACE="knative-eventing"
# This the namespace used to install and test Knative Eventing.
export TEST_EVENTING_NAMESPACE
TEST_EVENTING_NAMESPACE="${TEST_EVENTING_NAMESPACE:-"knative-eventing-"$(cat /dev/urandom \
| tr -dc 'a-z0-9' | fold -w 10 | head -n 1)}"
latest_version() {
local semver=$(git describe --match "v[0-9]*" --abbrev=0)
local major_minor=$(echo "$semver" | cut -d. -f1-2)
# Get the latest patch release for the major minor
git tag -l "${major_minor}*" | sort -r --version-sort | head -n1
}
# Latest release. If user does not supply this as a flag, the latest
# tagged release on the current branch will be used.
readonly LATEST_RELEASE_VERSION=$(latest_version)
UNINSTALL_LIST=()
# Setup the Knative environment for running tests.
function knative_setup() {
install_knative_eventing
install_mt_broker || fail_test "Could not install MT Channel Based Broker"
install_sugar || fail_test "Could not install Sugar Controller"
unleash_duck || fail_test "Could not unleash the chaos duck"
}
function scale_controlplane() {
for deployment in "$@"; do
# Make sure all pods run in leader-elected mode.
kubectl -n "${TEST_EVENTING_NAMESPACE}" scale deployment "$deployment" --replicas=0 || failed=1
# Give it time to kill the pods.
sleep 5
# Scale up components for HA tests
kubectl -n "${TEST_EVENTING_NAMESPACE}" scale deployment "$deployment" --replicas="${REPLICAS}" || failed=1
done
}
# This installs everything from the config dir but then removes the Channel Based Broker.
# TODO: This should only install the core.
# Args:
# - $1 - if passed, it will be used as eventing config directory
function install_knative_eventing() {
echo ">> Creating ${TEST_EVENTING_NAMESPACE} namespace if it does not exist"
kubectl get ns ${TEST_EVENTING_NAMESPACE} || kubectl create namespace ${TEST_EVENTING_NAMESPACE}
local kne_config
kne_config="${1:-${EVENTING_CONFIG}}"
# Install Knative Eventing in the current cluster.
echo "Installing Knative Eventing from: ${kne_config}"
if [ -d "${kne_config}" ]; then
local TMP_CONFIG_DIR=${TMP_DIR}/config
mkdir -p ${TMP_CONFIG_DIR}
cp -r ${kne_config}/* ${TMP_CONFIG_DIR}
find ${TMP_CONFIG_DIR} -type f -name "*.yaml" -exec sed -i "s/namespace: ${KNATIVE_DEFAULT_NAMESPACE}/namespace: ${TEST_EVENTING_NAMESPACE}/g" {} +
ko apply --strict -f "${TMP_CONFIG_DIR}" || return $?
else
local EVENTING_RELEASE_YAML=${TMP_DIR}/"eventing-${LATEST_RELEASE_VERSION}.yaml"
# Download the latest release of Knative Eventing.
wget "${kne_config}" -O "${EVENTING_RELEASE_YAML}" \
|| fail_test "Unable to download latest knative/eventing file."
# Replace the default system namespace with the test's system namespace.
sed -i "s/namespace: ${KNATIVE_DEFAULT_NAMESPACE}/namespace: ${TEST_EVENTING_NAMESPACE}/g" ${EVENTING_RELEASE_YAML}
echo "Knative EVENTING YAML: ${EVENTING_RELEASE_YAML}"
kubectl apply -f "${EVENTING_RELEASE_YAML}" || return $?
UNINSTALL_LIST+=( "${EVENTING_RELEASE_YAML}" )
fi
# Setup config tracing for tracing tests
local TMP_CONFIG_TRACING_CONFIG=${TMP_DIR}/${CONFIG_TRACING_CONFIG##*/}
sed "s/namespace: ${KNATIVE_DEFAULT_NAMESPACE}/namespace: ${TEST_EVENTING_NAMESPACE}/g" ${CONFIG_TRACING_CONFIG} > ${TMP_CONFIG_TRACING_CONFIG}
kubectl replace -f ${TMP_CONFIG_TRACING_CONFIG}
scale_controlplane eventing-webhook eventing-controller
wait_until_pods_running ${TEST_EVENTING_NAMESPACE} || fail_test "Knative Eventing did not come up"
echo "check the config map"
kubectl get configmaps -n ${TEST_EVENTING_NAMESPACE}
if ! (( DEPLOY_KNATIVE_MONITORING )); then return 0; fi
# Ensure knative monitoring is installed only once
kubectl get ns knative-monitoring|| kubectl create namespace knative-monitoring
knative_monitoring_pods=$(kubectl get pods -n knative-monitoring \
--field-selector status.phase=Running 2> /dev/null | tail -n +2 | wc -l)
if ! [[ ${knative_monitoring_pods} -gt 0 ]]; then
echo ">> Installing Knative Monitoring"
start_knative_monitoring "${KNATIVE_MONITORING_RELEASE}" || fail_test "Knative Monitoring did not come up"
UNINSTALL_LIST+=( "${KNATIVE_MONITORING_RELEASE}" )
else
echo ">> Knative Monitoring seems to be running, pods running: ${knative_monitoring_pods}."
fi
}
function install_head {
# Install Knative Eventing from HEAD in the current cluster.
echo ">> Installing Knative Eventing from HEAD"
install_knative_eventing || \
fail_test "Knative HEAD installation failed"
}
function install_latest_release() {
header ">> Installing Knative Eventing latest public release"
local url="https://github.com/knative/eventing/releases/download/${LATEST_RELEASE_VERSION}"
local yaml="eventing.yaml"
install_knative_eventing \
"${url}/${yaml}" || \
fail_test "Knative latest release installation failed"
}
function run_preinstall_V016() {
local TMP_PRE_INSTALL_V016=${TMP_DIR}/pre_install
mkdir -p ${TMP_PRE_INSTALL_V016}
cp -r ${PRE_INSTALL_V016}/* ${TMP_PRE_INSTALL_V016}
find ${TMP_PRE_INSTALL_V016} -type f -name "*.yaml" -exec sed -i "s/namespace: ${KNATIVE_DEFAULT_NAMESPACE}/namespace: ${TEST_EVENTING_NAMESPACE}/g" {} +
ko apply --strict -f "${TMP_PRE_INSTALL_V016}" || return 1
wait_until_batch_job_complete ${TEST_EVENTING_NAMESPACE} || return 1
}
function install_mt_broker() {
local TMP_MT_CHANNEL_BASED_BROKER_DEFAULT_CONFIG=${TMP_DIR}/${MT_CHANNEL_BASED_BROKER_DEFAULT_CONFIG##*/}
sed "s/namespace: ${KNATIVE_DEFAULT_NAMESPACE}/namespace: ${TEST_EVENTING_NAMESPACE}/g" ${MT_CHANNEL_BASED_BROKER_DEFAULT_CONFIG} > ${TMP_MT_CHANNEL_BASED_BROKER_DEFAULT_CONFIG}
ko apply --strict -f ${TMP_MT_CHANNEL_BASED_BROKER_DEFAULT_CONFIG} || return 1
local TMP_MT_CHANNEL_BASED_BROKER_CONFIG_DIR=${TMP_DIR}/channel_based_config
mkdir -p ${TMP_MT_CHANNEL_BASED_BROKER_CONFIG_DIR}
cp -r ${MT_CHANNEL_BASED_BROKER_CONFIG_DIR}/* ${TMP_MT_CHANNEL_BASED_BROKER_CONFIG_DIR}
find ${TMP_MT_CHANNEL_BASED_BROKER_CONFIG_DIR} -type f -name "*.yaml" -exec sed -i "s/namespace: ${KNATIVE_DEFAULT_NAMESPACE}/namespace: ${TEST_EVENTING_NAMESPACE}/g" {} +
ko apply --strict -f ${TMP_MT_CHANNEL_BASED_BROKER_CONFIG_DIR} || return 1
# TODO(https://github.com/knative/eventing/issues/3591): Enable once MT Broker chaos issues are fixed.
# scale_controlplane mt-broker-controller
wait_until_pods_running ${TEST_EVENTING_NAMESPACE} || fail_test "Knative Eventing with MT Broker did not come up"
}
function install_sugar() {
local TMP_SUGAR_CONTROLLER_CONFIG_DIR=${TMP_DIR}/${SUGAR_CONTROLLER_CONFIG_DIR}
mkdir -p ${TMP_SUGAR_CONTROLLER_CONFIG_DIR}
cp -r ${SUGAR_CONTROLLER_CONFIG_DIR}/* ${TMP_SUGAR_CONTROLLER_CONFIG_DIR}
find ${TMP_SUGAR_CONTROLLER_CONFIG_DIR} -type f -name "*.yaml" -exec sed -i "s/namespace: ${KNATIVE_DEFAULT_NAMESPACE}/namespace: ${TEST_EVENTING_NAMESPACE}/g" {} +
ko apply --strict -f ${TMP_SUGAR_CONTROLLER_CONFIG_DIR} || return 1
kubectl -n ${TEST_EVENTING_NAMESPACE} set env deployment/sugar-controller BROKER_INJECTION_DEFAULT=true || return 1
scale_controlplane sugar-controller
wait_until_pods_running ${TEST_EVENTING_NAMESPACE} || fail_test "Knative Eventing Sugar Controller did not come up"
}
function unleash_duck() {
echo "unleash the duck"
cat test/config/chaosduck.yaml | \
sed "s/namespace: ${KNATIVE_DEFAULT_NAMESPACE}/namespace: ${TEST_EVENTING_NAMESPACE}/g" | \
ko apply --strict -f - || return $?
}
# Teardown the Knative environment after tests finish.
function knative_teardown() {
echo ">> Stopping Knative Eventing"
echo "Uninstalling Knative Eventing"
ko delete --ignore-not-found=true --now --timeout 60s -f ${EVENTING_CONFIG}
wait_until_object_does_not_exist namespaces ${TEST_EVENTING_NAMESPACE}
echo ">> Uninstalling dependencies"
for i in ${!UNINSTALL_LIST[@]}; do
# We uninstall elements in the reverse of the order they were installed.
local YAML="${UNINSTALL_LIST[$(( ${#array[@]} - $i ))]}"
echo ">> Bringing down YAML: ${YAML}"
kubectl delete --ignore-not-found=true -f "${YAML}" || return 1
done
}
# Add function call to trap
# Parameters: $1 - Function to call
# $2...$n - Signals for trap
function add_trap() {
local cmd=$1
shift
for trap_signal in $@; do
local current_trap="$(trap -p $trap_signal | cut -d\' -f2)"
local new_cmd="($cmd)"
[[ -n "${current_trap}" ]] && new_cmd="${current_trap};${new_cmd}"
trap -- "${new_cmd}" $trap_signal
done
}
# Setup resources common to all eventing tests.
function test_setup() {
echo ">> Setting up logging..."
# Install kail if needed.
if ! which kail >/dev/null; then
bash <(curl -sfL https://raw.githubusercontent.com/boz/kail/master/godownloader.sh) -b "$GOPATH/bin"
fi
# Capture all logs.
kail >${ARTIFACTS}/k8s.log.txt &
local kail_pid=$!
# Clean up kail so it doesn't interfere with job shutting down
add_trap "kill $kail_pid || true" EXIT
install_test_resources || return 1
echo ">> Publish test images"
"$(dirname "$0")/upload-test-images.sh" e2e || fail_test "Error uploading test images"
}
# Tear down resources used in the eventing tests.
function test_teardown() {
uninstall_test_resources
}
function install_test_resources() {
install_channel_crds || return 1
}
function uninstall_test_resources() {
uninstall_channel_crds
}
function install_channel_crds() {
echo "Installing In-Memory Channel CRD"
local TMP_IN_MEMORY_CHANNEL_CONFIG_DIR=${TMP_DIR}/in_memory_channel_config
mkdir -p ${TMP_IN_MEMORY_CHANNEL_CONFIG_DIR}
cp -r ${IN_MEMORY_CHANNEL_CRD_CONFIG_DIR}/* ${TMP_IN_MEMORY_CHANNEL_CONFIG_DIR}
find ${TMP_IN_MEMORY_CHANNEL_CONFIG_DIR} -type f -name "*.yaml" -exec sed -i "s/namespace: ${KNATIVE_DEFAULT_NAMESPACE}/namespace: ${TEST_EVENTING_NAMESPACE}/g" {} +
ko apply --strict -f ${TMP_IN_MEMORY_CHANNEL_CONFIG_DIR} || return 1
# TODO(https://github.com/knative/eventing/issues/3590): Enable once IMC chaos issues are fixed.
# scale_controlplane imc-controller imc-dispatcher
wait_until_pods_running ${TEST_EVENTING_NAMESPACE} || fail_test "Failed to install the In-Memory Channel CRD"
}
function uninstall_channel_crds() {
echo "Uninstalling In-Memory Channel CRD"
local TMP_IN_MEMORY_CHANNEL_CONFIG_DIR=${TMP_DIR}/in_memory_channel_config
ko delete --ignore-not-found=true --now --timeout 60s -f ${TMP_IN_MEMORY_CHANNEL_CONFIG_DIR}
}
function dump_extra_cluster_state() {
# Collecting logs from all knative's eventing pods.
echo "============================================================"
local namespace=${TEST_EVENTING_NAMESPACE}
for pod in $(kubectl get pod -n $namespace | grep Running | awk '{print $1}'); do
for container in $(kubectl get pod "${pod}" -n $namespace -ojsonpath='{.spec.containers[*].name}'); do
echo "Namespace, Pod, Container: ${namespace}, ${pod}, ${container}"
kubectl logs -n $namespace "${pod}" -c "${container}" || true
echo "----------------------------------------------------------"
echo "Namespace, Pod, Container (Previous instance): ${namespace}, ${pod}, ${container}"
kubectl logs -p -n $namespace "${pod}" -c "${container}" || true
echo "============================================================"
done
done
}
function wait_for_file() {
local file timeout waits
file="$1"
waits=300
timeout=$waits
echo "Waiting for existance of file: ${file}"
while [ ! -f "${file}" ]; do
# When the timeout is equal to zero, show an error and leave the loop.
if [ "${timeout}" == 0 ]; then
echo "ERROR: Timeout (${waits}s) while waiting for the file ${file}."
return 1
fi
sleep 1
# Decrease the timeout of one
((timeout--))
done
return 0
}
|
import React, {Component} from 'react';
import { Tabs, Tab, Grid, Cell, Card, CardTitle, CardActions, CardText, Button, CardMenu, IconButton} from 'react-mdl';
class Projects extends Component {
constructor(props) {
super(props);
this.state = { activeTab: 0 };
}
toggleCategories() {
if(this.state.activeTab === 0) {
return(
<div className="projects-grid">
<Card shadow={5} style={{minWidth:'650', margin:'auto'}}>
<CardTitle style={{color:'#000', height:'176px', background: 'url(https://cdn-images-1.medium.com/max/679/1*qUlxDdY3T-rDtJ4LhLGkEg.png) center / cover'}}>React Poject</CardTitle>
<CardText>
Lorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem Ipsum
</CardText>
<CardActions border>
<Button colored>Github</Button>
<Button colored>Github</Button>
</CardActions>
<CardMenu style={{color:'#fff'}}>
<IconButton name="share" />
</CardMenu>
</Card>
<Card shadow={5} style={{minWidth:'650', margin:'auto'}}>
<CardTitle style={{color:'#000', height:'176px', background: 'url(https://cdn-images-1.medium.com/max/679/1*qUlxDdY3T-rDtJ4LhLGkEg.png) center / cover'}}>React Poject</CardTitle>
<CardText>
Lorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem Ipsum
</CardText>
<CardActions border>
<Button colored>Github</Button>
<Button colored>Github</Button>
</CardActions>
<CardMenu style={{color:'#fff'}}>
<IconButton name="share" />
</CardMenu>
</Card>
<Card shadow={5} style={{minWidth:'650', margin:'auto'}}>
<CardTitle style={{color:'#000', height:'176px', background: 'url(https://cdn-images-1.medium.com/max/679/1*qUlxDdY3T-rDtJ4LhLGkEg.png) center / cover'}}>React Poject</CardTitle>
<CardText>
Lorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem IpsumLorem Ipsum
</CardText>
<CardActions border>
<Button colored>Github</Button>
<Button colored>Github</Button>
</CardActions>
<CardMenu style={{color:'#fff'}}>
<IconButton name="share" />
</CardMenu>
</Card>
</div>
)
} else if(this.state.activeTab === 1) {
return(
<div><h1>This is Java</h1></div>
)
} else if(this.state.activeTab === 2) {
return(
<div><h1>This is C</h1></div>
)
} else {
return(
<div><h1>This is Node</h1></div>
)
}
}
render() {
return (
<div className="category-tabs">
<Tabs activeTab={this.state.activeTab} onChange={(tabId) => this.setState({activeTab: tabId})} ripple>
<Tab>React</Tab>
<Tab>Java</Tab>
<Tab>C</Tab>
<Tab>Node</Tab>
</Tabs>
<Grid>
<Cell col={12}>
<div className="content">{this.toggleCategories()}</div>
</Cell>
</Grid>
</div>
)
}
}
export default Projects; |
if [ $# -lt 2 ]
then
echo "Invalid arguments provided"
echo "Valid usage: "`basename "$0"`" <release-name> <namespace> <flags(optional)>"
exit 1
fi
RELEASE=$1
NAMESPACE=$2
COMPONENT="${RELEASE}-vault"
REQUIRED_KEY_COUNT=3
SECRET_NAME="$RELEASE-vault-keys"
echo "Getting unseal keys from Kubernetes secret"
UNSEAL_KEYS=$(kubectl get secret -n $NAMESPACE ${SECRET_NAME} -o yaml | grep -e "key[0-9]\:" | awk '{print $2}')
for i in `seq 1 $REQUIRED_KEY_COUNT`;
do
KEY=$(echo "$UNSEAL_KEYS" | sed "${i}q;d" | base64 --decode)
kubectl get po -l component=$COMPONENT,release=$RELEASE -n $NAMESPACE \
| awk '{if(NR>1)print $1}' \
| xargs -I % kubectl exec -n $NAMESPACE -c $RELEASE % -- sh -c "vault operator unseal --tls-skip-verify $KEY";
done
|
def factorial(n)
if n == 0
result = 1
else
result = n * factorial(n-1)
end
return result
end
puts "Enter a number: "
num = gets.to_i
fact = factorial(num)
puts "Factorial of #{num} is #{fact}" |
<gh_stars>1-10
#ifndef FDR_UDPSERVER_H
#define FDR_UDPSERVER_H
#include <DllHelper.h>
#include <udpsocket.h>
#include <string>
class EASYSOCKET_API UDPServer : public UDPSocket
{
public:
UDPServer();
void Bind(int port, FDR_ON_ERROR);
void Bind(std::string IPv4, std::uint16_t port, FDR_ON_ERROR);
};
#endif
|
<reponame>MrHadess/controltool2<filename>controltool/src/main/java/com/mh/controltool2/exceptions/invoke/UnsupportedSerializeObjectException.java
package com.mh.controltool2.exceptions.invoke;
public class UnsupportedSerializeObjectException extends RuntimeException {
}
|
/*
* Copyright (C) 2020 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.functional.assisted;
import static com.google.common.truth.Truth.assertThat;
import dagger.Component;
import dagger.assisted.Assisted;
import dagger.assisted.AssistedFactory;
import dagger.assisted.AssistedInject;
import javax.inject.Inject;
import javax.inject.Provider;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public final class AssistedFactoryTest {
@Component
interface ParentComponent {
// Simple factory using a nested factory.
SimpleFoo.Factory nestedSimpleFooFactory();
// Simple factory using a non-nested factory.
SimpleFooFactory nonNestedSimpleFooFactory();
// Simple factory using a factory that extends a supertype.
ExtendedSimpleFooFactory extendedSimpleFooFactory();
// Factory as interface
FooFactory fooFactory();
// Factory as abstract class
AbstractFooFactory abstractFooFactory();
// Factory without any assisted parameters
NoAssistedParametersFooFactory noAssistedParametersFooFactory();
// Test injecting the factories from another class
SomeEntryPoint someEntryPoint();
}
// This class tests the request of factories from another binding.
static class SomeEntryPoint {
private final SimpleFoo.Factory nestedSimpleFooFactory;
private final SimpleFooFactory nonNestedSimpleFooFactory;
private final ExtendedSimpleFooFactory extendedSimpleFooFactory;
private final FooFactory fooFactory;
private final AbstractFooFactory abstractFooFactory;
private final NoAssistedParametersFooFactory noAssistedParametersFooFactory;
@Inject
SomeEntryPoint(
SimpleFoo.Factory nestedSimpleFooFactory,
SimpleFooFactory nonNestedSimpleFooFactory,
ExtendedSimpleFooFactory extendedSimpleFooFactory,
FooFactory fooFactory,
AbstractFooFactory abstractFooFactory,
NoAssistedParametersFooFactory noAssistedParametersFooFactory) {
this.nestedSimpleFooFactory = nestedSimpleFooFactory;
this.nonNestedSimpleFooFactory = nonNestedSimpleFooFactory;
this.extendedSimpleFooFactory = extendedSimpleFooFactory;
this.fooFactory = fooFactory;
this.abstractFooFactory = abstractFooFactory;
this.noAssistedParametersFooFactory = noAssistedParametersFooFactory;
}
}
static final class Dep1 {
@Inject
Dep1(Dep2 dep2, Dep3 dep3) {}
}
static final class Dep2 {
@Inject
Dep2(Dep3 dep3) {}
}
static final class Dep3 {
@Inject
Dep3(Dep4 dep4) {}
}
static final class Dep4 {
@Inject
Dep4() {}
}
// A base interface to test that factories can reference subclasses of the assisted parameter.
interface AssistedDep {}
static final class AssistedDep1 implements AssistedDep {}
static final class AssistedDep2 implements AssistedDep {}
static final class SimpleFoo {
private final AssistedDep assistedDep;
@AssistedInject
SimpleFoo(@Assisted AssistedDep assistedDep) {
this.assistedDep = assistedDep;
}
@AssistedFactory
interface Factory {
// Use different parameter names than Foo to make sure we're not assuming they're the same.
SimpleFoo createSimpleFoo(AssistedDep factoryAssistedDep);
// A no-op method to test static methods in assisted factories
static void staticMethod() {
return;
}
// A no-op method to test default methods in assisted factories
default void defaultMethod() {
return;
}
}
}
@AssistedFactory
interface SimpleFooFactory {
// Use different parameter names than Foo to make sure we're not assuming they're the same.
SimpleFoo createSimpleFoo(AssistedDep factoryAssistedDep1);
// A no-op method to test static methods are allowed
static void staticMethod() {
return;
}
// A no-op method to test static methods that return assisted type are allowed
static SimpleFoo staticSimpleFooMethod() {
return null;
}
// A no-op method to test default methods are allowed
default void defaultMethod() {
return;
}
// A no-op method to test default methods that return assisted type are allowed
default SimpleFoo defaultSimpleFooMethod() {
return null;
}
}
@AssistedFactory
interface ExtendedSimpleFooFactory extends SimpleFooFactory {}
abstract static class BaseFoo {
@Inject Dep4 dep4;
}
static final class Foo extends BaseFoo {
private final Dep1 dep1;
private final Provider<Dep2> dep2Provider;
private final AssistedDep1 assistedDep1;
private final AssistedDep2 assistedDep2;
private final int assistedInt;
private final FooFactory factory;
@Inject Dep3 dep3;
@AssistedInject
Foo(
Dep1 dep1,
@Assisted AssistedDep1 assistedDep1,
Provider<Dep2> dep2Provider,
@Assisted AssistedDep2 assistedDep2,
@Assisted int assistedInt,
FooFactory factory) {
this.dep1 = dep1;
this.dep2Provider = dep2Provider;
this.assistedDep1 = assistedDep1;
this.assistedDep2 = assistedDep2;
this.assistedInt = assistedInt;
this.factory = factory;
}
}
@AssistedFactory
interface FooFactory {
// Use different parameter names than Foo to make sure we're not assuming they're the same.
Foo createFoo(
AssistedDep1 factoryAssistedDep1, AssistedDep2 factoryAssistedDep2, int factoryAssistedInt);
}
@AssistedFactory
abstract static class AbstractFooFactory {
// Use different parameter names than Foo to make sure we're not assuming they're the same.
abstract Foo createFoo(
AssistedDep1 factoryAssistedDep1, AssistedDep2 factoryAssistedDep2, int factoryAssistedInt);
// A no-op method to test static methods are allowed
static void staticMethod() {
return;
}
// A no-op method to test static methods that return assisted type are allowed
static Foo staticFooMethod() {
return null;
}
// A no-op method to test concrete methods are allowed
void concreteMethod() {
return;
}
// A no-op method to test concrete methods that return assisted type are allowed
Foo concreteFooMethod() {
return null;
}
}
static final class NoAssistedParametersFoo extends BaseFoo {
private final Dep1 dep1;
private final Provider<Dep2> dep2Provider;
private final NoAssistedParametersFooFactory factory;
@Inject Dep3 dep3;
@AssistedInject
NoAssistedParametersFoo(
Dep1 dep1, Provider<Dep2> dep2Provider, NoAssistedParametersFooFactory factory) {
this.dep1 = dep1;
this.dep2Provider = dep2Provider;
this.factory = factory;
}
}
@AssistedFactory
interface NoAssistedParametersFooFactory {
NoAssistedParametersFoo createNoAssistedParametersFoo();
}
@Test
public void testNestedSimpleFooFactory() {
AssistedDep1 assistedDep1 = new AssistedDep1();
SimpleFoo simpleFoo1 =
DaggerAssistedFactoryTest_ParentComponent.create()
.nestedSimpleFooFactory()
.createSimpleFoo(assistedDep1);
assertThat(simpleFoo1.assistedDep).isEqualTo(assistedDep1);
AssistedDep2 assistedDep2 = new AssistedDep2();
SimpleFoo simpleFoo2 =
DaggerAssistedFactoryTest_ParentComponent.create()
.nestedSimpleFooFactory()
.createSimpleFoo(assistedDep2);
assertThat(simpleFoo2.assistedDep).isEqualTo(assistedDep2);
}
@Test
public void testNonNestedSimpleFooFactory() {
AssistedDep1 assistedDep1 = new AssistedDep1();
SimpleFoo simpleFoo =
DaggerAssistedFactoryTest_ParentComponent.create()
.nonNestedSimpleFooFactory()
.createSimpleFoo(assistedDep1);
assertThat(simpleFoo.assistedDep).isEqualTo(assistedDep1);
}
@Test
public void testExtendedSimpleFooFactory() {
AssistedDep1 assistedDep1 = new AssistedDep1();
SimpleFoo simpleFoo =
DaggerAssistedFactoryTest_ParentComponent.create()
.extendedSimpleFooFactory()
.createSimpleFoo(assistedDep1);
assertThat(simpleFoo.assistedDep).isEqualTo(assistedDep1);
}
@Test
public void testFooFactory() {
AssistedDep1 assistedDep1 = new AssistedDep1();
AssistedDep2 assistedDep2 = new AssistedDep2();
int assistedInt = 7;
Foo foo =
DaggerAssistedFactoryTest_ParentComponent.create()
.fooFactory()
.createFoo(assistedDep1, assistedDep2, assistedInt);
assertThat(foo.dep1).isNotNull();
assertThat(foo.dep2Provider).isNotNull();
assertThat(foo.dep2Provider.get()).isNotNull();
assertThat(foo.dep3).isNotNull();
assertThat(foo.dep4).isNotNull();
assertThat(foo.assistedDep1).isEqualTo(assistedDep1);
assertThat(foo.assistedDep2).isEqualTo(assistedDep2);
assertThat(foo.assistedInt).isEqualTo(assistedInt);
assertThat(foo.factory).isNotNull();
}
@Test
public void testNoAssistedParametersFooFactory() {
NoAssistedParametersFoo foo =
DaggerAssistedFactoryTest_ParentComponent.create()
.noAssistedParametersFooFactory()
.createNoAssistedParametersFoo();
assertThat(foo.dep1).isNotNull();
assertThat(foo.dep2Provider).isNotNull();
assertThat(foo.dep2Provider.get()).isNotNull();
assertThat(foo.dep3).isNotNull();
assertThat(foo.dep4).isNotNull();
assertThat(foo.factory).isNotNull();
}
@Test
public void testAssistedFactoryFromSomeEntryPoint() {
SomeEntryPoint someEntryPoint =
DaggerAssistedFactoryTest_ParentComponent.create().someEntryPoint();
assertThat(someEntryPoint.nestedSimpleFooFactory).isNotNull();
assertThat(someEntryPoint.nonNestedSimpleFooFactory).isNotNull();
assertThat(someEntryPoint.extendedSimpleFooFactory).isNotNull();
assertThat(someEntryPoint.fooFactory).isNotNull();
assertThat(someEntryPoint.abstractFooFactory).isNotNull();
assertThat(someEntryPoint.noAssistedParametersFooFactory).isNotNull();
}
}
|
#!/usr/bin/env bash
#
# Run a test build for all images.
set -uo pipefail
IFS=$'\n\t'
info() {
printf "%s\n" "$@"
}
fatal() {
printf "**********\n"
printf "%s\n" "$@"
printf "**********\n"
exit 1
}
cd $(cd ${0%/*} && pwd -P);
versions=( "$@" )
if [ ${#versions[@]} -eq 0 ]; then
versions=( */ )
fi
versions=( "${versions[@]%/}" )
for version in "${versions[@]}"; do
tag=$(cat $version/Dockerfile | grep "ENV NODE_VERSION" | cut -d' ' -f3)
info "Building $tag..."
docker build -q -t node:$tag $version
if [[ $? -gt 0 ]]; then
fatal "Build of $tag failed!"
else
info "Build of $tag succeeded."
fi
variants=( onbuild slim wheezy centos )
for variant in "${variants[@]}"; do
info "Building $tag-$variant variant..."
docker build -q -t node:$tag-$variant $version/$variant
if [[ $? -gt 0 ]]; then
fatal "Build of $tag-$variant failed!"
else
info "Build of $tag-$variant succeeded."
fi
done
done
info "All builds successful!"
exit 0
|
# status
echo db-disk-space-used=$(df --output=used /kpn | tail -1)
echo db-disk-space-available=$(df --output=avail /kpn | tail -1)
echo db-mem-total=$(egrep --color=none -e MemTotal /proc/meminfo | awk '{print $2}')
echo db-mem-free=$(egrep --color=none -e MemFree /proc/meminfo | awk '{print $2}')
echo db-mem-swap-total=$(egrep --color=none -e SwapTotal /proc/meminfo | awk '{print $2}')
echo db-mem-swap-free=$(egrep --color=none -e SwapFree /proc/meminfo | awk '{print $2}')
echo db-mem-available=$(egrep --color=none -e MemAvailable /proc/meminfo | awk '{print $2}')
echo db-mem-buffers=$(egrep --color=none -e Buffers /proc/meminfo | awk '{print $2}')
|
<gh_stars>1-10
import * as messaging from "messaging";
import document from "document";
export function SettingsApplier() {
};
SettingsApplier.prototype.applySettings = function(settingChange) {
console.log("received settingChange:"+JSON.stringify(settingChange));
if (settingChange.key === undefined) {
return;
}
else if(settingChange.key === "clock_style"){
//extract data
let newData = JSON.parse(settingChange.newValue);
console.log("new Value:"+JSON.stringify(newData.selected));
switch (newData.selected[0]) {
case 0 :
console.log("thin");
document.getElementById("timeLabel").style.fontSize = 50;
document.getElementById("weekdayLabel").style.fontSize = 30;
document.getElementById("dateLabel").style.fontSize = 40;
document.getElementById("monthLabel").style.fontSize = 30;
document.getElementById("hrm").style.fontSize = 30;
document.getElementById("hrm_bpm").style.fontSize = 20;
document.getElementById("steps").style.fontSize = 30;
document.getElementById("steps_subtitle").style.fontSize = 20;
break;
case 1 :
console.log("normal");
document.getElementById("timeLabel").style.fontSize = 80;
document.getElementById("weekdayLabel").style.fontSize = 30;
document.getElementById("dateLabel").style.fontSize = 40;
document.getElementById("monthLabel").style.fontSize = 30;
document.getElementById("hrm").style.fontSize = 40;
document.getElementById("hrm_bpm").style.fontSize = 30;
document.getElementById("steps").style.fontSize = 40;
document.getElementById("steps_subtitle").style.fontSize = 30;
break;
case 2 :
console.log("thick");
document.getElementById("timeLabel").style.fontSize = 130;
document.getElementById("weekdayLabel").style.fontSize = 50;
document.getElementById("dateLabel").style.fontSize = 60;
document.getElementById("monthLabel").style.fontSize = 50;
document.getElementById("hrm").style.fontSize = 60;
document.getElementById("hrm_bpm").style.fontSize = 50;
document.getElementById("steps").style.fontSize = 40;
document.getElementById("steps_subtitle").style.fontSize = 50;
break;
}
}
} |
<filename>src/lng/TW.js
export default {
headTitle: '全新ELITE PASS',
headBigTitle: '/images/tw/head-text.png',
timeLine: '10/2',
timeLineText: '磅礡登場',
countText: `<span class="text-highlight">全球</span>Elite Pass擁有者數量`,
owners: '擁有者',
recieve: '獲得',
gold: '金幣',
voucher: '鑽石轉蛋券',
eyeBlinder: '裝兇狠眼罩',
notice: '所有Elite Pass擁有者將可以在蒐集指定數量獎章後獲得多項豐富獎勵',
upgrade1: '現在就升級Elite Pass<br>以獲得多項限定獎勵',
step1: '升級Elite Pass',
step2: '完成各項挑戰可獲得獎章' ,
step3: '獲得限定獎勵',
upgrade2: '<strong>升級Elite Pass</strong><span>以獲得與</span> <span class="gray">10.000</span> <img src="/images/diamond.png" alt=""/> <span>鑽石等值的豐富獎勵</span>',
title1: '限定豪華服飾',
title2: '滑板與降落傘',
title3: '背包與快遞盒',
title4: '頭像、背景與手榴彈造型',
storyTitle: '探索Elite Pass傳說航路',
readmore: '歷史回顧',
diamond1: '/images/tw/diamond1.png',
diamond2: '/images/tw/diamond2.png',
story51: `
<p>凱莉笑得很開心。終於收集滿100枚金幣,第一個挑戰就完成了。</p>
<p>一道黑影從巨石後面衝向凱莉。眨眼之間,查拉領先一步在老練的海盜刺中凱莉之前保護了她。</p>
<p>凱莉對她靴子上那顆綠松石所沾染的鮮血皺起了眉頭。</p>
<p>「查拉,這都是你的錯!他的血灑在我的靴子上!」這是她新的海盜服 - 一雙閃亮的鯨魚皮革靴子,一件無可挑剔的白色亞麻襯衫,還有一個裝飾著羽毛的華麗帽子。</p>
<p>像往常一樣,查拉帶著一抹淡淡的微笑回應。</p>
<p>「我們回家吧。無論如何,謝謝你拯救我的生命。你是我真正的兄弟,」凱利帶著笑容說道。</p>
<p>凱莉迫不及待地告訴她的父親 - 海盜王安德魯 - 關於她的勝利。</p>
<p>「幹得好,凱莉!你一定很狠的教訓了他們對吧?」 - 海盜王一邊喝著萊姆酒時一邊抖著腳。</p>
<p>根據海盜法典,海盜王的後代在他們16歲時開始接受挑戰,任何完成三項挑戰的將成為正式繼承人。作為聲名顯著的血腥瑪利號的的船長,海盜王從15歲開始就擊倒了無數的皇家海軍艦艇。</p>
<p>「你的第二次挑戰,凱莉!劫持華麗艦並將其帶回這裡。」海盜王指出了地圖 ,「3天後,它將穿過百慕達。艦長只是一個15歲的小屁孩罷了。」</p>
<p>「他可是過去100年來皇家海軍最傑出的艦長。華麗艦每側擁有16門炮。這可是需要一點點詭計才能擊敗它。」查拉推測。</p>
<p>「華麗艦絕對不是血腥瑪麗號的對手」凱莉吹噓道。</p>
<p>凱利跳出了房間。查拉轉向海盜王,「陛下,你確定嗎? 華麗艦不是一個容易的獵物。」</p>
<p>「一如既往的精明和謹慎。這就是我相信你照顧我女兒的原因。我送給你一件禮物。」</p>
<p>安德魯海盜王起身向查拉了遞了一個盒子。 「別讓我失望。」</p>
`,
story52: `
<p>海盜們湧入華麗艦上,凱莉注意到一位漂白頭髮的皇家衛兵使用平底鍋勇敢地反擊。海盜公主驚訝的向他走去想仔細瞧瞧他的臉。</p>
<p>一聲槍響救了白頭髮的傢伙。 華麗艦艦長登場了,皇家衛隊士氣大增並且大聲吶喊,此時,凱莉掉到甲板上面,並且面對著兩個對手。 </p>
`,
story53: `
<p>一名皇家衛兵衝向凱莉,凱莉只用了一個迴旋踢就將他踢入海裡。當她靠近馬克西姆的英俊面孔時,公主的心臟砰砰跳。</p>
<p>那張漂亮的臉龐挨了查拉一拳,並且連站都站不穩,倒在了地上。艦長被海盜淹沒並捆綁。其餘的皇家衛隊在艦長被捕後不久就放下了武器。</p>
<p>「勝利是我們的!」查拉驕傲的大喊。但凱莉似乎心不在焉的樣子。</p>
<p>「我們不會殺死那兩個人對吧?查拉」凱莉在回程的路上求情。查拉保持沉默。他不確定海盜王是否會要了他的小命。</p>
<p>通往海盜王房間的大門打開了。凱莉像風一樣衝向海盜王的身邊,抓住了他的手臂。</p>
<p>「父親!你最愛我,不是嗎?你會給我世界上的任何東西,不是嗎?」</p>
<p>安德魯的臉變得嚴肅,似乎知道女兒的意圖。然而他永遠不會放過皇家海軍。</p>
<p>「凱莉,你還想成為海盜王,不是嗎?」</p>
<p>「父親!根據海盜的法典,我可能會在最後的挑戰中選擇我的同伴。即便是海盜王也不能反駁。」</p>
<p>凱莉是對的,海盜王知道這一點。它寫在海盜法典中。</p>
<p>「那麼我的同伴將是查拉和來自華麗艦的兄弟們。請同意,父親。」</p>
<p>「凱莉,你是一個厚顏無恥的小流氓。規則是規則。即使是海盜王也不能違背你的意願。就這樣吧。那兩個皇家衛隊將成為你的同伴。現在是你最後挑戰的時候了。」</p>
<p>海盜王把一張舊地圖推到了桌子上。他指著大西洋上一個奇異的島嶼。</p>
<p>「最後的挑戰......找回災厄之神克蘇魯的靈魂!」</p>
`,
story41: `
<p>從前,有對姐弟名為馬克西姆和米莎,居住在加勒比海上的西班牙殖民地。
由於他們天賦異稟,兩人在很小的時候就被招募到了人人景仰的《皇家侍衛隊》。
</p>
<p>從小生長在沿海城鎮的姐弟倆,殘酷地目睹了父母被海盜冷血殺害...
因此,這兩個孤兒發誓要摧毀加勒比海上所有海盜。
</p>
<p>馬克西姆擁有著令人難以置信的才能與毅力 - 他被提拔為海軍軍械負責人。
米莎則在駕駛方面表現出色 - 她被獲選為《傳奇軍艦》的領導者。
( 聽說《傳奇軍艦》以其強大的砲兵而聞名,每側多達16 個加農炮。)</p>
`,
story42: `
<p>在他們上任隔天立即授命啟航,卻遇到了一場奇怪的風暴...
機警的米莎很快就查覺到了,並前往警告馬克西姆。
然而,他們卻因天氣不佳而受困於加勒比海中最惡名昭著海盜的家 - 百慕達。</p>
`,
story43: `
<p>不幸地,米莎的軍艦被《血腥瑪麗》所圍困,這是一艘比他們軍艦還要大兩倍的海盜船。
這些海盜擁有著與他們一樣的戰鬥意志,結果馬克西姆和米莎被野蠻的海盜擊敗了。
馬克西姆昏倒了,失去了意識...</p>
`,
fail: '當全球Elite Pass所有者達到此數量,所有個所有者將會獲得這個獎勵!',
success: '恭喜您!獎品將在10/12直接派發給有購買Elite pass玩家遊戲內的收信箱,如果您還未購買,趕緊把握機會購買獲得獎勵!'
} |
<reponame>Askinkaty/text-readability<gh_stars>0
# -*- coding: utf-8 -*-
from process_gram import process_grammar
import codecs
first_pp = ['мы', 'я', 'наш', 'мой']
second_pp = ['ты', 'вы', 'ваш', 'твой']
third_pp = ['он', 'она', 'они', 'оно', 'их', 'ee', 'его', 'ихний', 'ихним', 'ихнем']
indef_pron = ['некто', 'некого', 'некому', 'некем', 'нечто', 'нечего', 'нечему', 'нечем', 'некоторый', 'некий', 'любой',
'никто', 'ничто', 'никакой', 'нисколько', 'нигде', 'негде', 'некуда', 'никуда', 'неоткуда', 'ниоткуда',
'некогда', 'никогда', 'никак', 'незачем', 'незачем']
place_adverbs = ['близко', 'ближе', 'вблизи', 'вверх', 'вверху', 'ввысь', 'вглубь', 'вдали', 'вдаль', 'везде', 'взад',
'влево', 'вне', 'вниз', 'внизу', 'внутри', 'внутрь', 'вовне', 'вовнутрь', 'вокруг', 'вперед',
'впереди', 'вправо', 'всюду', 'высоко', 'выше', 'глубоко', 'глубже', 'далеко', 'дальше', 'донизу',
'дома', 'здесь', 'издалека', 'издалече', 'издали', 'изнутри', 'кверху', 'книзу', 'кругом', 'левее',
'наверх', 'наверху', 'наискосок', 'налево', 'направо', 'напротив', 'наружно', 'наружу', 'невысоко',
'неглубоко', 'недалеко', 'неподалеку', 'низко', 'ниже', 'одаль', 'около', 'окрест', 'особняком',
'отдельно', 'откуда', 'отсюда', 'поближе', 'поверх', 'повсеместно', 'повсюду', 'повыше', 'поглубже',
'подальше', 'позади', 'пониже', 'понизу', 'посередке', 'посередине', 'посреди', 'посредине', 'поодаль',
'правее', 'рядом', 'сбоку', 'сверху', 'свыше', 'сзади', 'слева', 'снизу', 'снаружи', 'спереди',
'справа', 'стороной', 'супротив']
time_adverbs = ['бесконечно', 'беспрерывно', 'ввек', 'весной', 'вечно', 'вмиг', 'вначале', 'вовек', 'вовремя', 'впору',
'впоследствии',
'впредь', 'враз', 'временно', 'всечасно', 'вскоре', 'встарь', 'вчера', 'вчерась', 'давеча', 'давно',
'давненько', 'денно', 'длительно', 'днесь', 'доколе', 'долго', 'дольше', 'доныне',
'досветла', 'доселе', 'досрочно', 'дотемна', 'доутра', 'единовременно', 'ежеквартально', 'ежеминутно',
'еженощно', 'ежесекундно', 'ежечасно', 'еще', 'заблаговременно', 'завсегда', 'завтра', 'задолго',
'загодя', 'заранее', 'зараз', 'засим', 'затем', 'зимой', 'извечно', 'издревле', 'изначально', 'иногда',
'исконно', 'испокон', 'исстари', 'круглосуточно', 'кряду', 'летом', 'мимолетно', 'навек', 'навеки',
'навсегда', 'надолго', 'назавтра', 'накануне', 'наконец', 'намедни', 'наперед', 'напоследок',
'напролет', 'насовсем', 'наутро', 'недавно', 'недолго', 'незадолго', 'незамедлительно', 'ненадолго',
'нескоро', 'неоднократно', 'нонче', 'непрерывно', 'непродолжительно', 'нощно', 'ныне', 'нынче',
'однажды', 'одновременно', 'осенью', 'отколе', 'отныне', 'отродясь', 'первоначально', 'позавчера',
'позднее', 'поздно', 'поздновато', 'позже', 'подолгу', 'подряд', 'пожизненно', 'пока', 'покамест',
'поныне', 'поначалу', 'попозже', 'пораньше', 'после', 'послезавтра',
'поспешно', 'поскорее', 'постоянно', 'поутру', 'прежде', 'преждевременно', 'присно',
'продолжительно', 'редко', 'реже', 'ранее', 'рано', 'рановато', 'раньше', 'редко', 'своевременно',
'сегодня', 'скорее', 'скорей', 'скоро', 'смолоду', 'сначала', 'сперва', 'сразу', 'срочно', 'сроду',
'теперича', 'часто', 'уже', 'ужо']
interrogative_pronoun = ['кто', 'что', 'какой', 'каков', 'чей', 'который', 'почему', 'зачем', 'где', 'куда', 'откуда',
'отчего']
def is_have_grammar(e):
# try:
return e[1] != ''
# except KeyError as ke:
# print("Key error:" + str(e))
# raise ke
# 1
# test that the current word is a first person pronoun
def first_person_pronoun(t):
fpp1 = 0
for el in t:
if el[2] in first_pp:
fpp1 += 1
return fpp1
# 2
# test that the current word is a second person pronoun
def second_person_pronoun(t):
spp2 = 0
for el in t:
if el[2] in second_pp:
spp2 += 1
return spp2
# 3
# test that the current word is a third person pronoun
def third_person_pronoun(t):
tpp3 = 0
for el in t:
if el[2] in third_pp:
tpp3 += 1
return tpp3
# 4
# test that the current word is a pronoun
def is_pronoun(t):
pron = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'P':
pron += 1
else:
continue
return pron
# 5
# test that the current word is a finite verb
def is_finite_verb(t):
finite = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V' and d_el.get('vform') == 'i':
finite += 1
else:
continue
return finite
# 6
# test that the current word is an adjective or a participle
# may be we should leave only test for adjectives and add a test that they are modifiers and not parts of predicates
def is_modifier(t):
mod = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'A' or (d_el.get('pos') == 'V' and d_el.get('vform') == 'p'):
mod += 1
else:
continue
return mod
# 7
# test that the current word has a past tense form
def past_tense(t):
past = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V' and d_el.get('tense') == 's':
past += 1
else:
continue
return past
# 8
# test that the current word has a perfect aspect form
def perf_aspect(t):
perf = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V' and d_el.get('aspect') == 'p':
perf += 1
else:
continue
return perf
# 9
# test that the current word has a present tense form
def present_tense(t):
pres = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V' and d_el.get('tense') == 'p':
pres += 1
else:
continue
return pres
# 10
# test that the current word is an adverb
def total_adverb(t):
total_adv = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'R':
total_adv += 1
else:
continue
return total_adv
# nouns
# 11
# 12
# test that the current word a verbal noun (отглагольное сущ.) or not verbal noun
def is_nominalization(t):
nomz = 0
nouns = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'N':
with codecs.open('dictionaries/final_lemmas_nominalizations.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip() for s in f.readlines()])
if el[2].lower() in read_lines:
nomz += 1
else:
nouns += 1
else:
continue
return nomz, nouns
# 13
# test that the current word has a genitive case form
def is_genitive(t):
gen = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if (d_el.get('pos') == 'N' or d_el.get('pos') == 'P' or d_el.get('pos') == 'A') and d_el.get('case') == 'g':
gen += 1
else:
continue
return gen
# 14
# test that the current word has a neuter gender form
def is_neuter(t):
neuter = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if ((d_el.get('pos') == 'N' or d_el.get('pos') == 'P' or d_el.get('pos') == 'A')
and d_el.get('gender') == 'n'):
neuter += 1
else:
continue
return neuter
# 15
# test that the current word has a passive form
def is_passive(t):
passive = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V' and 'p' == d_el.get('voice'):
passive += 1
else:
continue
return passive
# 16
# test that the current verb is an infinitive
def infinitives(t):
infin = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V' and d_el.get('vform') == 'n':
infin += 1
else:
continue
return infin
# 17
# test that the current word is a speech verb
def speech_verb(t):
sp_verb = 0
with codecs.open(r'dictionaries/all_lemmas_verb_speech.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip() for s in f.readlines()])
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V':
if el[2].lower() in read_lines:
sp_verb += 1
else:
continue
return sp_verb
# 18
# test that the current word is a mental verb
def mental_verb(t):
mntl_verb = 0
with codecs.open(r'dictionaries/all_lemmas_verb_mental.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip() for s in f.readlines()])
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V':
if el[2].lower() in read_lines:
mntl_verb += 1
else:
continue
return mntl_verb
# 19
# test that the current sentence includes that-complement clause
def that_complement(t):
that_compl = 0
l = len(t)
for i, el in enumerate(t):
if is_have_grammar(el):
if t[l - 1][0] != '?':
d_el = process_grammar(el)
d_next_el = {}
if i + 1 < len(t):
next_el = t[i + 1]
d_next_el = process_grammar(next_el)
d_next_el = d_next_el if d_next_el is not None else {}
# test that current word is verb or short-form adjective and the next word is not a verb or
# short-form adjective because of sentences like 'Я был счастлив, что она пришла'.
if d_el.get('pos') == 'V' or (d_el.get('pos') == 'A' and d_el.get('definiteness') == 's'):
if is_have_grammar(next_el):
if (d_next_el.get('pos') != 'V' and
(d_next_el.get('pos') != 'A' or d_next_el.get('definiteness') != 's')):
for j in range(4):
# test that there's no pronouns like 'то, это, такой' between the current word and comma
# because of sentences like 'Я не предвидел того, что вы приедете',
# which has relative meaning.
# test that conjunction like 'что', 'чтобы' directly follow after comma
if (i + j + 1 < len(t) and
t[i + j][2] not in ['весь', 'все', 'такой', 'то', 'это', 'тот',
'этот'] and
t[i + j + 1][0] == ',' and i + j + 2 < len(t) and
t[i + j + 2][2] in ['что', 'чтобы']):
if i + j + 3 < len(t):
# test that if the conjunction is 'чтобы', there's no infinitive verb after it
# to check that it's not an infinitive clause
if t[i + j + 2][2] == 'чтобы':
if is_have_grammar(t[i + j + 3]):
d_is_inf_el = process_grammar(t[i + j + 3])
if d_is_inf_el.get('pos') == 'V' and d_is_inf_el.get(
'vform') == 'n':
continue
else:
that_compl += 1
else:
that_compl += 1
else:
continue
else:
continue
return that_compl
# 20
# test that the current sentence includes wh-relative clause (относительное придаточное)
def wh_relatives(t):
wh_relative = 0
l = len(t)
# test that sentence is not interrogative
if t[l - 1][0] != '?':
for i, el in enumerate(t):
# test that pronoun is in the left periphery of the sentence and preceded by comma
if el[2] in ['какой', 'чей', 'который', 'почему', 'зачем', 'где', 'куда', 'откуда', 'отчего']:
d_prev_el = {}
if i - 1 > 0:
prev_el = t[i - 1]
if prev_el[0] == ',':
wh_relative += 1
# test that there's the example of relative clause structure like "Это был тот, кого я не боюсь".
if el[2] in ['кто']:
if i - 1 > 0:
prev_el = t[i - 1]
if prev_el[0] == ',':
if i - 2 > 0 and t[i - 2][2] in ['тот', 'то', 'все', 'весь', 'такой']:
wh_relative += 1
else:
continue
return wh_relative
# 21
# test that the current word is preposition
# (we count all prepositional phrases in the sentence by counting prepositions)
def total_PP(t):
prep_phrase = 0
with codecs.open(r'dictionaries/all_lemmas_prepositions.txt', mode='r', encoding='utf-8') as f:
prepositions = set([s.strip() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] in prepositions:
prep_phrase += 1
else:
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'S' and el[0] != '.':
prep_phrase += 1
else:
continue
return prep_phrase
# 22
# function for counting mean word length
# it is possibly better to count median word length
def word_length(t):
words = 0
letters = 0
for el in t:
if el[0] not in ['.', ',', '!', '?', ':', ';', '"', '-', '—', '–']:
words += 1
for let in el[0]:
letters += 1
else:
continue
return letters, words
# 23
# function for counting all syllables in the sentence
def syllables(t):
syll = 0
complex_w = 0
for el in t:
for ch in el[0]:
if ch in ['а', 'о', 'у', 'и', 'ы', 'е', 'ё', 'я', 'э', 'ю']:
syll += 1
if syll > 4:
complex_w += 1
return syll, complex_w
# 24
# interval between punctuation marks
def text_span(t):
result = 0
sent_span = 0
p = 0
list_of_spans = []
for i, el in enumerate(t):
if el[0] in ['.', ',', '!', '?', ':', ';', '"', '-', '—', '–']:
sent_span = i - p
list_of_spans.append(sent_span)
p = i
if len(list_of_spans) == 0:
result = 0
else:
result = sum(list_of_spans) / len(list_of_spans)
return result
# 25
# function for counting mean sentence length
def sentence_length(t):
sent_words = 0
for el in t:
if el[0] not in ['.', ',', '!', '?', ':', ';', '"', '-', '—', '–'] and el[1] != 'SENT':
sent_words += 1
return sent_words
# 26
# function for counting relation between lemmas and tokens (how many original words does the text include?)
def type_token_ratio(t):
types = set()
tokens = 0
for el in t:
if el[0] not in ['.', ',', '!', '?', ':', ';', '"', '-', '—', '–', '@', '#', '"', '$', '%', '*', '+', ')', '(',
'[', ']', '{', '}', '&']:
types.add(el[2])
tokens += 1
else:
continue
return types, tokens
# 27
# test that the current word is a verbal adverb (деепричастие)
def is_verbal_adverb(t):
gerund = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V' and d_el.get('vform') == 'g':
gerund += 1
else:
continue
return gerund
# 28
# test that the sentence includes passive participles not in predicate position
def passive_participial_clauses(t):
pas_part_clauses = 0
for i, el in enumerate(t):
if is_have_grammar(el):
flag_predicate = False
d_el = process_grammar(el)
# test that current word is past participle
if d_el.get('pos') == 'V' and d_el.get('vform') == 'p' and d_el.get('voice') == 'p':
d_prev_el = {}
d_prev_prev_el = {}
if i > 0:
prev_el = t[i - 1]
d_prev_el = process_grammar(prev_el)
d_prev_el = d_prev_el if d_prev_el is not None else {}
# test that the word is not a part of predicate like 'быть уставшим', 'становиться раздраженным'
if d_prev_el.get('pos') == 'V' and prev_el[2].lower() in ['быть', 'делаться', 'сделаться',
'казаться',
'называться', 'становиться', 'являться']:
flag_predicate = True
if i - 1 > 0:
prev_el = t[i - 1]
prev_prev_el = t[i - 2]
d_prev_el = process_grammar(prev_el)
d_prev_el = d_prev_el if d_prev_el is not None else {}
d_prev_prev_el = process_grammar(prev_prev_el)
d_prev_prev_el = d_prev_prev_el if d_prev_prev_el is not None else {}
# test that the word is not a part of predicate separated by adverb or patricles from the list
if d_prev_prev_el.get('pos') == 'V' and prev_prev_el[2].lower() in ['быть', 'делаться', 'сделаться',
'казаться',
'называться', 'становиться',
'являться']:
if d_prev_el.get('pos') == 'R' or prev_el[0].lower() in ['ли', 'бы', 'не']:
flag_predicate = True
if not flag_predicate:
pas_part_clauses += 1
else:
continue
return pas_part_clauses
# 29
# test that the sentence includes active participles not in predicate position
def active_participial_clauses(t):
act_part_clauses = 0
for i, el in enumerate(t):
if is_have_grammar(el):
flag_predicate = False
d_el = process_grammar(el)
# test that current word is active/medial participle
if d_el.get('pos') == 'V' and d_el.get('vform') == 'p' and d_el.get('voice') != 'p':
d_prev_el = {}
d_prev_prev_el = {}
if i > 0:
prev_el = t[i - 1]
d_prev_el = process_grammar(prev_el)
d_prev_el = d_prev_el if d_prev_el is not None else {}
# test that the word is not a part of predicate like 'быть потрясающим'
if d_prev_el.get('pos') == 'V' and prev_el[2].lower() in ['быть', 'делаться', 'сделаться',
'казаться',
'называться', 'становиться', 'являться']:
flag_predicate = True
if i - 1 > 0:
prev_el = t[i - 1]
prev_prev_el = t[i - 2]
d_prev_el = process_grammar(prev_el)
d_prev_el = d_prev_el if d_prev_el is not None else {}
d_prev_prev_el = process_grammar(prev_prev_el)
d_prev_prev_el = d_prev_prev_el if d_prev_prev_el is not None else {}
# test that the word is not a part of predicate separated by adverb or patricles from the list
if d_prev_prev_el.get('pos') == 'V' and prev_prev_el[2].lower() in ['быть', 'делаться', 'сделаться',
'казаться',
'называться', 'становиться',
'являться']:
if d_prev_el.get('pos') == 'R' or prev_el[0].lower() in ['ли', 'бы', 'не']:
flag_predicate = True
if not flag_predicate:
act_part_clauses += 1
else:
continue
return act_part_clauses
# 30
# test that the current word has an imperative mood form
def imperative_mood(t):
imp_mood = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
d_prev = {}
d_next_el = {}
# test that catch constructions like "давайте пишите" to assign only one mark of imperative mood
if d_el.get('pos') == 'V' and d_el.get('vform') == 'm':
if el[0].lower() == 'давайте' or el[0].lower() == 'давай':
pass
else:
imp_mood += 1
# test that catch only "давай/те" without any verb after it
if el[0].lower() == 'давайте' or el[0].lower() == 'давай':
if i + 1 < len(t):
next_el = t[i + 1]
if is_have_grammar(next_el):
d_next_el = process_grammar(next_el)
d_next_el = d_next_el if d_next_el is not None else {}
if d_next_el.get('pos') != 'V':
imp_mood += 1
elif d_next_el.get('vform') != 'm':
imp_mood += 1
else:
continue
if i > 0:
prev_el = t[i - 1]
if d_el.get('pos') == 'V' and d_el.get('vform') == 'i' and d_el.get('person') == '3':
if prev_el[0].lower() in ['да', 'пусть', 'пускай']:
imp_mood += 1
else:
continue
return imp_mood
# 31
# test that the current word is an adjective in predicative position
def predicative_adjectives(t):
pred_adj = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'A' and d_el.get('definiteness') == 's':
pred_adj += 1
if d_el.get('pos') == 'A' and d_el.get('definiteness') == 'f':
d_prev_el = {}
if i > 0:
prev_el = t[i - 1]
if is_have_grammar(prev_el):
d_prev_el = process_grammar(prev_el)
d_prev_el = d_prev_el if d_prev_el is not None else {}
if d_prev_el.get('pos') == 'V' and prev_el[2] in ['быть', 'делаться', 'сделаться', 'казаться',
'называться', 'становиться', 'являться']:
pred_adj += 1
else:
continue
else:
continue
return pred_adj
# 32
# test that the current word is an adjective in attributive position
def attributive_adjective(t):
attr_adj = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
flag_predicate = False
if d_el.get('pos') == 'A' and d_el.get('definiteness') == 'f':
d_prev_el = {}
if i > 0:
prev_el = t[i - 1]
if is_have_grammar(prev_el):
d_prev_el = process_grammar(prev_el)
d_prev_el = d_prev_el if d_prev_el is not None else {}
if d_prev_el.get('pos') == 'V' and prev_el[2] in ['быть', 'делаться', 'сделаться', 'казаться',
'называться', 'становиться', 'являться']:
flag_predicate = True
if not flag_predicate:
attr_adj += 1
else:
continue
else:
continue
return attr_adj
# 33
# test that the sentence includes causative subordinate clause
def causative_subordinate(t):
causative_sub = 0
for i, el in enumerate(t):
if el[0] in ['поскольку', 'ибо']:
causative_sub += 1
else:
if i > 0:
prev_el = t[i - 1]
# test that conjunction is 'так как'
if prev_el[0] == 'так' and el[0] == 'как':
causative_sub += 1
if i + 1 < len(t):
next_el = t[i + 1]
# test that conjunction is 'затем что', 'потому что', 'оттого что'
if el[0] in ['затем', 'потому', 'оттого'] and next_el[0] == 'что':
causative_sub += 1
if i + 2 < len(t):
next_el = t[i + 1]
next_next_el = t[i + 2]
# test that conjunction is 'затем что', 'потому что', 'оттого что' separated by comma ('потому, что')
if el[0] in ['затем', 'потому', 'оттого'] and next_el[0] == ',' and next_next_el[0] == 'что':
causative_sub += 1
# test that conjunction is 'ввиду того что', 'вследствие того что', 'благодаря тому что'
if el[0] in ['ввиду', 'вследствие', 'благодаря'] and next_el[2] == 'то' and next_next_el[0] == 'что':
causative_sub += 1
if i + 3 < len(t):
next_el = t[i + 1]
next_next_el = t[i + 2]
next_next_next_el = t[i + 3]
# test that conjunction is 'ввиду того, что', 'вследствие того, что', 'благодаря тому, что'
if (el[0] in ['ввиду', 'вследствие', 'благодаря'] and next_el[2] == 'то' and
next_next_el[0] == ',' and next_next_next_el[0] == 'что'):
causative_sub += 1
if i + 2 < len(t) and i > 0:
prev_el = t[i - 1]
next_el = t[i + 1]
next_next_el = t[i + 2]
# test that conjunction is 'в силу того что'
if el[0] == 'силу' and prev_el[0] == 'в' and next_el[2] == 'то' and next_next_el[0] == 'что':
causative_sub += 1
if i + 3 < len(t) and i > 0:
prev_el = t[i - 1]
next_el = t[i + 1]
next_next_el = t[i + 2]
next_next_next_el = t[i + 3]
# test that conjunction is 'в силу того, что'
if (el[0] == 'силу' and prev_el[0] == 'в' and next_el[2] == 'то' and next_next_el[0] == ',' and
next_next_next_el[0] == 'что'):
causative_sub += 1
# test that conjunction is 'в связи с тем что'
if (el[0] == 'связи' and prev_el[0] == 'в' and next_el[0] == 'с' and next_next_el[2] == 'то' and
next_next_next_el[0] == 'что'):
causative_sub += 1
if i + 4 < len(t) and i > 0:
prev_el = t[i - 1]
next_el = t[i + 1]
next_next_el = t[i + 2]
next_next_next_el = t[i + 3]
next_next_next_next_el = t[i + 4]
# test that conjunction is 'в связи с тем, что'
if (el[0] == 'связи' and prev_el[0] == 'в' and next_el[0] == 'с' and next_next_el[2] == 'то' and
next_next_next_el[0] == ',' and next_next_next_next_el[0] == 'что'):
causative_sub += 1
else:
continue
return causative_sub
# 34
# test that the sentence includes concessive subordinate clause
def concessive_subordinate(t):
concessive_sub = 0
for i, el in enumerate(t):
d_next_el = {}
if el[0] == 'хоть':
concessive_sub += 1
if i + 1 < len(t):
next_el = t[i + 1]
if el[0] == 'даром' and next_el[0] == 'что':
concessive_sub += 1
if el[0] in ['несмотря', 'невзирая'] and next_el[0] == 'на':
concessive_sub += 1
if el[0] in ['только', 'лишь', 'добро'] and next_el[0] == 'бы':
concessive_sub += 1
if is_have_grammar(next_el):
d_next_el = process_grammar(next_el)
d_next_el = d_next_el if d_next_el is not None else {}
if el[0] in ['пусть', 'пускай'] and not (
d_next_el.get('pos') == 'V' and d_next_el.get('person') == '3'):
concessive_sub += 1
if is_have_grammar(el):
d_el = process_grammar(el)
if el[0] == 'хотя' and d_el.get('pos') == 'C':
concessive_sub += 1
else:
continue
return concessive_sub
# 35
# test that the sentence includes conditional subordinate clause
def conditional_subordinate(t):
conditional_sub = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if el[0] in ['если', 'ежели', 'кабы', 'коль', 'коли', 'раз'] and d_el.get('pos') == 'C':
conditional_sub += 1
else:
continue
return conditional_sub
# 36
# test that the sentence includes purpose subordinate clause
def purpose_subordinate(t):
purpose_sub = 0
for i, el in enumerate(t):
if el[0] == 'дабы':
purpose_sub += 1
if el[0] in ['чтобы', 'чтоб']:
if i == 0:
purpose_sub += 1
else:
flag_not_purpose = False
if i > 0:
prev_el = t[i - 1]
if prev_el[2] in ['сомневаться', 'хотеть', 'захотеть', 'требовать', 'просить', 'желать',
'ждать', 'мечтать', 'любить', 'загадать', 'захотеться', 'хотеться']:
flag_not_purpose = True
if i - 1 > 0:
prev_el = t[i - 1]
prev_prev_el = t[i - 2]
if prev_el[0] == ',' and prev_prev_el[2] in ['сомневаться', 'хотеть', 'захотеть', 'требовать',
'просить', 'желать', 'ждать', 'мечтать', 'любить',
'загадать', 'захотеться', 'хотеться']:
flag_not_purpose = True
if (prev_el[2] in ['уверить', 'уверен', 'уверенный', 'верить', 'сказать', 'то'] and
prev_prev_el[0] == 'не'):
flag_not_purpose = True
if i - 2 > 0:
prev_el = t[i - 1]
prev_prev_el = t[i - 2]
prev_prev_prev_el = t[i - 3]
if (prev_el[0] == ',' and
prev_prev_el[2] in ['уверенный', 'уверен', 'уверить', 'верить', 'сказать', 'то'] and
prev_prev_prev_el[0] == 'не'):
flag_not_purpose = True
if prev_el[2] == 'сказать' and prev_prev_el[2] == 'мочь' and prev_prev_prev_el[0] == 'не':
flag_not_purpose = True
if i - 3 > 0:
prev_el = t[i - 1]
prev_prev_el = t[i - 2]
prev_prev_prev_el = t[i - 3]
prev_prev_prev_prev_el = t[i - 4]
if (prev_el[0] == ',' and prev_prev_el[2] == 'сказать' and prev_prev_prev_el[2] == 'мочь' and
prev_prev_prev_prev_el[0] == 'не'):
flag_not_purpose = True
if not flag_not_purpose:
purpose_sub += 1
else:
continue
return purpose_sub
# 37
# test that the current word has a conditional mood form
def conditional_mood(t):
cond = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V' and d_el.get('vform') == 'c' or el[0] == 'бы':
cond += 1
else:
continue
return cond
# 38
# test that the current word is a modal word (possibility)
def modal_possibility(t):
mod_pos = 0
for i, el in enumerate(t):
if el[2] in ['мочь'] or el[0] in ['по-видимому']:
mod_pos += 1
if is_have_grammar(el):
d_el = process_grammar(el)
if i + 1 < len(t):
next_el = t[i + 1]
if ((el[0] == 'можно' and next_el[0] == 'быть') or
(el[0] == 'всей' and next_el[0] == 'вероятности') or
(el[0] == 'едва' and next_el[0] == 'ли') or
(el[0] == 'чуть' and next_el[0] == 'ли') or
(el[0] == 'вряд' and next_el[0] == 'ли')):
mod_pos += 1
if d_el.get('pos') == 'R' and el[2] in ['наверное', 'наверно', 'возможно', 'видимо', 'верно', 'вероятно',
'пожалуй', 'можно']:
mod_pos += 1
return mod_pos
# 39
# test that the current word is a modal word (necessity)
def modal_necessity(t):
mod_nec = 0
for i, el in enumerate(t):
d_el = process_grammar(el)
if el[0] == 'требуется':
mod_nec += 1
if i + 1 < len(t):
next_el = t[i + 1]
if is_have_grammar(next_el):
d_next_el = process_grammar(next_el)
d_next_el = d_next_el if d_next_el is not None else {}
if el[0] in ['следует', 'надлежит'] and d_next_el.get('pos') == 'V' and d_next_el.get('vform') == 'n':
mod_nec += 1
if is_have_grammar(el):
if d_el.get('pos') == 'R' and el[2] in ['нужно', 'надо', 'необходимо', 'нельзя', 'обязательно', 'неизбежно',
'непременно']:
mod_nec += 1
if el[2] in ['должный', 'обязанный'] and d_el.get('pos') == 'A' and d_el.get('definiteness') == 's':
mod_nec += 1
return mod_nec
# 40
# test that the current word is evaluative
def evaluative_vocabulary(t):
eval = 0
with codecs.open(r'dictionaries/evaluative_vocab.txt', mode='r', encoding='utf-8') as f:
evaluative_words = set([s.strip() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] in evaluative_words:
eval += 1
return eval
# 41
# test that the current word is academic
def academic_vocabulary(t):
acad = 0
with codecs.open(r'dictionaries/academic_words.txt', mode='r', encoding='utf-8') as f:
academic_words = set([s.strip() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] in academic_words:
acad += 1
return acad
# 42
# test that the sentence includes parenthesis with the meaning of attitude or evaluation
def parenthesis_attitude_evaluation(t):
parent = 0
for i, el in enumerate(t):
flag = False
if el[0] in ['увы', 'странно', 'удивительно', 'надеюсь', 'думаю', 'полагаю', 'пожалуй', 'думается', 'конечно',
'разумеется', 'бесспорно', 'действительно', 'положим', 'предположим', 'допустим', 'признаюсь']:
if i + 1 < len(t):
next_el = t[i + 1]
if i == 0 and next_el[0] == ',':
flag = True
if i > 0:
prev_el = t[i - 1]
if prev_el[0] == ',':
flag = True
if el[0] in ['счастью', 'радости', 'удовольствию', 'несчастью', 'удивлению', 'сожалению', 'изумлению', 'стыду',
'досаде', 'неудовольствю', 'прискорбию', 'огорчению']:
if i > 0:
prev_el = t[i - 1]
if prev_el[0] == 'к':
flag = True
if i - 1 > 0:
prev_prev_el = t[i - 2]
if prev_prev_el[0] == 'к':
flag = True
if i + 1 < len(t):
next_el = t[i + 1]
if i == 1 or (i - 1 > 0 and t[i - 2][0] == ','):
prev_el = t[i - 1]
if (el[0] in ['хорошо', 'хуже', 'плохо', 'хуже', 'обидно'] and prev_el[0] == 'что' or
el[0] in ['несчастью', 'правде', 'существу', 'сути'] and prev_el[0] == 'по' or
el[0] == 'дело' and prev_el[0] in ['странное', 'удивительное', 'непонятное'] or
el[0] == 'доброго' and prev_el[0] == 'чего' or el[0] == 'полагать' and prev_el[
0] == 'надо' or
el[0] == 'сомнения' and prev_el[0] == 'без' or el[0] == 'собой' and prev_el[
0] == 'само' or
el[0] == 'образом' and prev_el[0] == 'некоторым' or el[0] == 'хотите' and prev_el[
0] == 'если' or
el[0] == 'шуток' and prev_el[0] == 'кроме' or el[0] == 'скажу' and prev_el[
0] == 'прямо' or
el[0] == 'беду' and prev_el[0] == 'на' or el[0] == 'делом' and prev_el[
0] == 'грешным' or
el[0] == 'час' and prev_el[0] in ['неровен', 'неровён'] or el[0] == 'нарочно' and
prev_el[0] == 'как'):
if next_el[0] in [',', '.']:
flag = True
if i + 1 < len(t):
next_el = t[i + 1]
if i == 2 or (i - 2 > 0 and t[i - 3][0] == ','):
prev_el = t[i - 1]
prev_prev_el = t[i - 2]
if (el[0] == 'бог' and prev_el[0] == 'дай' and prev_prev_el[0] == 'не' or
el[0] == 'разумеется' and prev_el[0] == 'собой' and prev_prev_el[0] == 'само' or
el[0] == 'смысле' and prev_el[0] == 'каком-то' and prev_prev_el[0] == 'в' or
el[0] == 'совести' and prev_el[0] == 'по' and prev_prev_el[0] == 'говоря' or
el[0] == 'чести' and prev_el[0] == 'по' and prev_prev_el[0] == 'сказать' or
el[0] == 'говоря' and prev_el[0] == 'нами' and prev_prev_el[0] == 'между' or
el[0] == 'сказать' and prev_el[0] == 'правду' and prev_prev_el[0] == 'если' or
el[0] == 'говоря' and prev_el[0] == 'правде' and prev_prev_el[0] == 'по' or
el[0] == 'говоря' and prev_el[0] == 'сущности' and prev_prev_el[0] == 'в' or
el[0] == 'говорить' and prev_el[0] == 'зря' and prev_prev_el[0] == 'нечего' or
el[0] in ['хорошо', 'лучше', 'плохо', 'хуже'] and prev_el[0] == 'еще' and
prev_prev_el[0] == 'что'):
if next_el[0] in [',', '.']:
flag = True
if flag:
parent += 1
return parent
# 43
# test that the current word is an animate noun
def animate_nouns(t):
anim_nouns = 0
with codecs.open('dictionaries/all_lemmas_animate_nouns.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip().lower() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] in read_lines:
anim_nouns += 1
return anim_nouns
# 44
# test that the sentence includes parenthesis with the meaning of accentuation
def parenthesis_accentuation(t):
parent = 0
for i, el in enumerate(t):
flag = False
if el[0] in ['повторяю', 'повторяем', 'подчеркиваю', 'подчеркиваем', 'представь', 'представьте', 'поверишь',
'поверите', 'вообрази', 'вообразите', 'согласись', 'согласитесь', 'заметь', 'заметьте', 'замечу',
'заметим', 'например', 'знаешь', 'знаете', 'значит', 'понимаешь', 'понимаете', 'главное',
'собственно', 'поверь', 'поверьте']:
if i + 1 < len(t):
next_el = t[i + 1]
if i == 0 and next_el[0] == ',':
flag = True
if i > 0:
prev_el = t[i - 1]
if prev_el[0] == ',':
flag = True
if i + 1 < len(t):
next_el = t[i + 1]
if i == 1 or (i - 1 > 0 and t[i - 2][0] == ','):
prev_el = t[i - 1]
if (el[0] in ['важно', 'существенно'] and prev_el[0] == 'что' or
el[0] in ['поверишь', 'поверите'] and prev_el[0] == 'не' or
el[0] == 'дело' and prev_el[0] == 'главное' or
el[0] in ['напоминаю', 'напоминаем'] and prev_el[0] == 'как' or
el[0] == 'примеру' and prev_el[0] == 'к' or
el[0] == 'сказать' and prev_el[0] == 'так' or
el[0] in ['вам', 'тебе'] and prev_el[0] == 'скажу' or
el[0] == 'сказать' and prev_el[0] == 'надо' or
el[0] == 'общем' and prev_el[0] == 'в' or
el[0] == 'говоря' and prev_el[0] == 'собственно'):
if next_el[0] in [',', '.']:
flag = True
if i + 1 < len(t):
next_el = t[i + 1]
if i == 2 or (i - 2 > 0 and t[i - 3][0] == ','):
prev_el = t[i - 1]
prev_prev_el = t[i - 2]
if (el[0] in ['важнее', 'существеннее'] and prev_el[0] == 'еще' and prev_prev_el[0] == 'что' or
el[0] == 'представить' and prev_el[0] == 'себе' and prev_prev_el[0] in ['можешь',
'можете']):
if next_el[0] in [',', '.']:
flag = True
if flag:
parent += 1
return parent
# 45
# test that the sentence includes parenthesis with the meaning of relation
def parenthesis_relation(t):
parent = 0
for i, el in enumerate(t):
flag = False
if el[0] in ['вдобавок', 'притом', 'следовательно', 'напротив', 'наоборот', 'во-первых', 'во-вторых',
'в-третьих', 'в-четвертых', 'в-пятых', 'в-шестых', 'в-седьмых', 'в-восьмых', 'в-девятых',
'в-десятых', 'значит', 'кстати', 'главное']:
if i + 1 < len(t):
next_el = t[i + 1]
if i == 0 and next_el[0] == ',':
flag = True
if i > 0:
prev_el = t[i - 1]
if prev_el[0] == ',':
flag = True
if i + 1 < len(t):
next_el = t[i + 1]
if i == 1 or (i - 1 > 0 and t[i - 2][0] == ','):
prev_el = t[i - 1]
if (el[0] == 'того' and prev_el[0] in ['кроме', 'сверх'] or
el[0] == 'быть' and prev_el[0] == 'стало' or
el[0] == 'более' and prev_el[0] == 'тем' or
el[0] in ['водится', 'повелось', 'всегда'] and prev_el[0] == 'как' or
el[0] in ['обычаю', 'обыкновению'] and prev_el[0] == 'по' or
el[0] in ['твоя', 'ваша'] and prev_el[0] == 'воля' or
el[0] == 'воля' and prev_el[0] in ['твоя', 'ваша'] or
el[0] == 'быть' and prev_el[0] == 'стало' or
el[0] == 'того' and prev_el[0] in ['мало', 'сверх', 'помимо']):
if next_el[0] in [',', '.']:
flag = True
if i + 1 < len(t):
next_el = t[i + 1]
if i == 2 or (i - 2 > 0 and t[i - 3][0] == ','):
prev_el = t[i - 1]
prev_prev_el = t[i - 2]
if (el[0] == 'же' and prev_el[0] == 'тому' and prev_prev_el[0] == 'к' or
el[0] == 'всего' and prev_el[0] == 'довершение' and prev_prev_el[0] == 'в' or
el[0] == 'хочешь' and prev_el[0] == 'ты' and prev_prev_el[0] == 'как' or
el[0] == 'же' and prev_el[0] == 'тому' and prev_prev_el[0] == 'же' or
el[0] == 'концов' and prev_el[0] == 'конце' and prev_prev_el[0] == 'в'):
if next_el[0] in [',', '.']:
flag = True
if flag:
parent += 1
return parent
# 46
# test that the current word is an adverb with the meaning of degree
def degree_adverb(t):
degree = 0
for i, el in enumerate(t):
if el[0] in ['чересчур', 'втрое', 'вчетверо', 'впятеро', 'вшестеро', 'всемеро', 'вдесятеро', 'чуть-чуть',
'невыразимо', 'несказанно', 'беспредельно', 'безмерно', 'невыносимо', 'феноменально',
'сверхъестественно', 'едва-едва']:
degree += 1
if i + 1 < len(t):
next_el = t[i + 1]
if is_have_grammar(next_el):
d_next_el = process_grammar(next_el)
d_next_el = d_next_el if d_next_el is not None else {}
if el[0] == 'несколько' and d_next_el.get('pos') == 'A':
degree += 1
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'R' and el[0] in ['крайне', 'очень', 'страшно', 'удивительно', 'исключительно',
'слишком', 'гораздо', 'абсолютно', 'совершенно', 'необычно',
'весьма', 'совсем', 'настолько', 'вдвое', 'еле', 'еле-еле',
'немного', 'необыкновенно', 'необычайно', 'фантастически',
'чрезвычайно', 'бешено', 'чудовищно', 'неслыханно', 'божественно',
'бесконечно', 'безумно', 'смертельно', 'ослепительно', 'нестерпимо',
'блестяще', 'гениально', 'сравнительно', 'относительно',
'невероятно', 'едва', 'капельку']:
degree += 1
return degree
# 47
# test that the current word is a particle
def particles(t):
particle = 0
for i, el in enumerate(t):
flag = False
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'Q' and el[0] in ['же', 'ну', 'прямо', 'уж', 'вот', 'там', 'разве', 'ли', 'вроде',
'ж', 'дай', 'только', 'ведь', 'даже', 'лишь']:
flag = True
if el[0] in ['таки', 'ка', 'то-то']:
flag = True
if i + 1 < len(t):
next_el = t[i + 1]
if is_have_grammar(next_el):
d_next_el = process_grammar(next_el)
d_next_el = d_next_el if d_next_el is not None else {}
if ((el[0] == 'так' and next_el[0] == 'и') or
(el[2] in ['какой', 'куда', 'где'] and next_el[0] == 'там' and d_next_el.get('pos') == 'Q') or
(el[0] == 'как' and next_el[0] == 'есть') or
(el[0] == 'знай' and next_el[0] == 'себе') or
(el[0] == 'едва' and next_el[0] == 'не') or
(el[0] == 'как' and next_el[0] == 'раз') or
(el[0] == 'чуть' and next_el[0] == 'не') or
(el[0] == 'нет-нет' and next_el[0] == 'и')):
flag = True
if i + 2 < len(t):
next_el = t[i + 1]
next_next_el = t[i + 2]
if ((el[0] == 'не' and next_el[0] == 'то' and next_next_el[0] in ['чтоб', 'чтобы']) or
(el[0] == 'не' and next_el[0] == 'иначе' and next_next_el[0] in ['как', 'чтобы']) or
(el[0] == 'чуть' and next_el[0] == 'было' and next_next_el[0] == 'не') or
(el[0] == 'того' and next_el[0] == 'и' and next_next_el[0] in ['гляди', 'жди']) or
(el[0] == 'нет-нет' and next_el[0] == 'да' and next_next_el[0] == 'и') or
(el[0] == 'ни' and next_el[0] == 'на' and next_next_el[0] == 'есть')):
flag = True
if flag:
particle += 1
return particle
# 48
# test that the current word is a numeral
def numeral(t):
num = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'M':
num += 1
return num
# 49
# test that the current word in or not in the list of top 100 most frequent nouns
def top_100_nouns(t):
top_nouns = 0
not_top = 0
with codecs.open('frequency_dict/frequent_nouns_top_100.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip().lower() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] in read_lines:
top_nouns += 1
else:
not_top += 1
return top_nouns, not_top
# 50
# test that the current word in or not in the list of top 1000 most frequent nouns (without the first top 100)
def top_1000_nouns_minus_head(t):
nouns_minus_head = 0
not_top = 0
with codecs.open('frequency_dict/frequent_nouns_minus_head_100.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip().lower() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] in read_lines:
nouns_minus_head += 1
else:
not_top += 1
return nouns_minus_head, not_top
# 51
# test that the current word in or not in the list of top 100 most frequent verbs
def top_100_verbs(t):
top_verbs = 0
not_top = 0
with codecs.open('frequency_dict/frequent_verbs_top_100.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip().lower() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] in read_lines:
top_verbs += 1
else:
not_top += 1
return top_verbs, not_top
# 52
# test that the current word in or not in the list of top 100 most frequent verbs (without 100 top verbs)
def top_1000_verbs_minus_head(t):
verbs_minus_head = 0
not_top = 0
with codecs.open('frequency_dict/frequent_verbs_minus_head_100.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip().lower() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] in read_lines:
verbs_minus_head += 1
else:
not_top += 1
return verbs_minus_head, not_top
# 53
# test that the current not in the list of top 100 most frequent words
def top_100(t):
top_100 = 0
with codecs.open('frequency_dict/top_100_freq.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip().lower() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] not in read_lines:
top_100 += 1
return top_100
# 54
# test that the current not in the list of top 300 most frequent words
def top_300(t):
top_300 = 0
with codecs.open('frequency_dict/top_300_freq.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip().lower() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] not in read_lines:
top_300 += 1
return top_300
# 55
# test that the current not in the list of top 500 most frequent words
def top_500(t):
top_500 = 0
with codecs.open('frequency_dict/top_500_freq.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip().lower() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] not in read_lines:
top_500 += 1
return top_500
# 56
# test that the current not in the list of top 10000 most frequent words
def top_10000(t):
top_10000 = 0
with codecs.open('frequency_dict/top_10000.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip().lower() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] not in read_lines:
top_10000 += 1
return top_10000
# 57
# test that the current not in the list of top 5000 most frequent words
def top_5000(t):
top_5000 = 0
with codecs.open('frequency_dict/top_5000.txt', mode='r', encoding='utf-8') as f:
read_lines = set([s.strip().lower() for s in f.readlines()])
for i, el in enumerate(t):
if el[2] not in read_lines:
top_5000 += 1
return top_5000
# 58
# test that the current word has a complex ending (like приборостро-ение)
def complex_endings(t):
end = 0
for el in t:
lemma = el[2]
if len(lemma) > 5:
if lemma[-5:] in ['ствие', 'енный', 'вание', 'льный', 'ность', 'еский', 'нение', 'шение',
'ление', 'оящий', 'жение']:
end += 1
elif lemma[-4:] in ['ение', 'ание', 'ьный', 'ость', 'нный', 'ация', 'ский', 'твие', 'ящий']:
end += 1
elif lemma[-3:] in ['ние', 'ный', 'сть', 'ция', 'вие']:
end += 1
elif lemma[-2:] in ['ие', 'ый', 'ия']:
end += 1
return end
# 59
# test that it is a finite verb of 1st or 2nd person
def is_12person_verb(t):
verb = 0
for i, el in enumerate(t):
if is_have_grammar(el):
d_el = process_grammar(el)
if d_el.get('pos') == 'V' and (d_el.get('person') == '1' or d_el.get('person') == '2'):
verb += 1
else:
continue
return verb
|
import java.nio.charset.StandardCharsets;
public class EntryChunk {
public byte[] content;
public long endPointer;
// Other methods and fields are not relevant for this problem
}
public class StringEntryChunkDto {
public String content;
public long endPointer;
public boolean binary;
public StringEntryChunkDto(EntryChunk chunk) {
this.content = new String(chunk.content, StandardCharsets.UTF_8);
this.endPointer = chunk.endPointer;
this.binary = isBinary(chunk.content);
}
private boolean isBinary(byte[] data) {
for (byte b : data) {
if ((b < 0x09 || (b > 0x0D && b < 0x20) || b == 0x7F)) {
return true; // Contains non-printable characters, likely binary
}
}
return false; // Contains only printable characters, likely textual
}
} |
<filename>dac/ui/src/pages/AdminPage/components/modals/AccelerationModal.js
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Component } from 'react';
import PropTypes from 'prop-types';
import Modal from 'components/Modals/Modal';
import AccelerationController from 'components/Acceleration/AccelerationController';
import FormUnsavedWarningHOC from 'components/Modals/FormUnsavedWarningHOC';
import './Modal.less';
export class AccelerationModal extends Component {
static propTypes = {
isOpen: PropTypes.bool,
hide: PropTypes.func,
location: PropTypes.object,
// connected from FormUnsavedWarningHOC
updateFormDirtyState: PropTypes.func
};
render() {
const {isOpen, hide, location} = this.props;
const { datasetId } = location.state || {};
return (
<Modal
size='large'
title={la('Acceleration')}
isOpen={isOpen}
hide={hide}>
<AccelerationController
updateFormDirtyState={this.props.updateFormDirtyState}
onCancel={hide}
onDone={hide}
datasetId={datasetId}
/>
</Modal>
);
}
}
export default FormUnsavedWarningHOC(AccelerationModal);
|
const StatusTracker = require('../../lib/StatusTracker');
const send = (client, config, result) => {
if (!result) {
return;
}
return client.guilds.cache
.get(config.guild)
.channels.cache
.get(config.channel)
.send(result);
};
const Handler = (client) => {
const config = client.config.custom && client.config.custom.StatusTracker;
if (!config) {
return;
}
setInterval(async () => {
try {
const tracker = new StatusTracker({ client });
const result = await tracker.track();
await send(client, config, result);
} catch (err) {
console.error(err);
await send(client, config, ':x: Something bad happened.');
}
}, 60 * 60 * 1000 * config.refreshInterval);
};
module.exports = Handler;
|
<gh_stars>0
export default {
SET_CURRENT_PATH: (state, { fullPath }) => {
state.fullPath = fullPath
},
SET_HOME_PAGE: (state, { home }) => {
state.home = home
},
SET_NONPROFIT: (state, { nonprofit }) => {
state.nonprofit = nonprofit
},
SET_CAMPAIGN: (state, { campaign }) => {
state.campaign = campaign
},
ADD_UPDATES: (state, { updates }) => {
state.updates.data = state.updates.data.concat(updates)
state.updates.current++
},
RESET_CAMPAIGNS: (state) => {
state.campaigns.data = []
},
RESET_CAMPAIGN: (state) => {
state.updates.current = 1
state.updates.data = []
state.donations.current = 1
state.donations.data = []
state.comments.current = 1
state.comments.data = []
},
ADD_COMMENTS: (state, { comments }) => {
state.comments.data = state.comments.data.concat(comments)
state.comments.current++
},
ADD_DONATIONS: (state, { donations }) => {
state.donations.data = state.donations.data.concat(donations)
state.donations.current++
},
ADD_TOP_FUNDRAISERS: (state, { fundraisers }) => {
state.fundraisers.data = state.fundraisers.data.concat(fundraisers)
state.fundraisers.current++
},
SET_CAMPAIGNS: (state, { campaigns }) => {
state.campaigns.data = campaigns
},
SET_COMMON_DATA: (state, { common }) => {
state.common = common
},
UPDATE_COMMENTS: (state, { comment }) => {
console.log(comment)
},
START_DONATION: (state, { initiator }) => {
state.userActions.donation.status = "started"
state.userActions.donation.amount = initiator.givingLevel ? initiator.givingLevel.amount : state.userActions.donation.amount
state.userActions.donation.initiator = initiator
state.userActions.donation.campaignId = initiator.campaignId
state.userActions.donation.nonprofitEin = initiator.nonprofitEin
}
}
|
const addWebsiteButton = document.getElementById('add-website-button');
addWebsiteButton.addEventListener('click', onAddWebsiteClick);
function onAddWebsiteClick() {
chrome.tabs.create({
url: '/edit/index.html'
});
} |
<reponame>loggi/lap
#!/usr/bin/env python
import os
from pkg_resources import parse_version
import shutil
import subprocess
import sys
import traceback
if sys.version_info[0] < 3:
import __builtin__ as builtins
else:
import builtins
builtins.__LAP_SETUP__ = True
DISTNAME = 'lap'
DESCRIPTION = 'Linear Assignment Problem solver (LAPJV/LAPMOD).'
LONG_DESCRIPTION = """
**lap** is a linear assignment problem solver using Jonker-Volgenant
algorithm for dense (LAPJV) or sparse (LAPMOD) matrices.
"""
MAINTAINER = '<NAME>'
MAINTAINER_EMAIL = '<EMAIL>'
URL = 'https://github.com/gatagat/lap'
LICENSE = 'BSD (2-clause)'
DOWNLOAD_URL = URL
import lap
VERSION = lap.__version__
NUMPY_MIN_VERSION = '1.10.1'
SETUPTOOLS_COMMANDS = set([
'develop', 'release', 'bdist_egg', 'bdist_rpm',
'bdist_wininst', 'install_egg_info', 'build_sphinx',
'egg_info', 'easy_install', 'upload', 'bdist_wheel',
'--single-version-externally-managed',
])
if SETUPTOOLS_COMMANDS.intersection(sys.argv):
import setuptools
extra_setuptools_args = dict(
zip_safe=False, # the package can run out of an .egg file
include_package_data=True,
install_requires=[
'numpy >= {0}'.format(NUMPY_MIN_VERSION),
'Cython >=0.29.13<1.0.0',
],
extras_require={
'alldeps': (
'numpy >= {0}'.format(NUMPY_MIN_VERSION),
),
},
)
else:
extra_setuptools_args = dict()
from distutils.command.clean import clean as Clean
class CleanCommand(Clean):
description = "Remove build artifacts from the source tree"
def run(self):
Clean.run(self)
if os.path.exists('build'):
shutil.rmtree('build')
# Remove c files if we are not within a sdist package
cwd = os.path.abspath(os.path.dirname(__file__))
remove_c_files = not os.path.exists(os.path.join(cwd, 'PKG-INFO'))
if remove_c_files:
if os.path.exists('lap/_lapjv.cpp'):
os.unlink('lap/_lapjv.cpp')
for dirpath, dirnames, filenames in os.walk('lap'):
for filename in filenames:
if any(filename.endswith(suffix) for suffix in
(".so", ".pyd", ".dll", ".pyc")):
os.unlink(os.path.join(dirpath, filename))
for dirname in dirnames:
if dirname == '__pycache__':
shutil.rmtree(os.path.join(dirpath, dirname))
cmdclass = {'clean': CleanCommand}
from distutils.version import LooseVersion
def cythonize(cython_file, gen_file):
try:
from Cython.Compiler.Version import version as cython_version
if LooseVersion(cython_version) < LooseVersion('0.21'):
raise ImportError('Installed cython is too old (0.21 required), '
'please "pip install -U cython".')
except ImportError:
raise ImportError('Building lapjv requires cython, '
'please "pip install cython".')
pass
flags = ['--fast-fail']
if gen_file.endswith('.cpp'):
flags += ['--cplus']
try:
try:
rc = subprocess.call(['cython'] +
flags + ["-o", gen_file, cython_file])
if rc != 0:
raise Exception('Cythonizing %s failed' % cython_file)
except OSError:
# There are ways of installing Cython that don't result in a cython
# executable on the path, see scipy issue gh-2397.
rc = subprocess.call([sys.executable, '-c',
'import sys; from Cython.Compiler.Main '
'import setuptools_main as main;'
' sys.exit(main())'] + flags +
["-o", gen_file, cython_file])
if rc != 0:
raise Exception('Cythonizing %s failed' % cython_file)
except OSError:
raise OSError('Cython needs to be installed')
def get_numpy_status():
"""
Returns a dictionary containing a boolean specifying whether NumPy
is up-to-date, along with the version string (empty string if
not installed).
"""
numpy_status = {}
try:
import numpy
numpy_version = numpy.__version__
numpy_status['up_to_date'] = parse_version(
numpy_version) >= parse_version(NUMPY_MIN_VERSION)
numpy_status['version'] = numpy_version
except ImportError:
traceback.print_exc()
numpy_status['up_to_date'] = False
numpy_status['version'] = ""
return numpy_status
def get_wrapper_pyx():
return os.path.join('lap', '_lapjv.pyx')
def generate_cython():
wrapper_pyx_file = get_wrapper_pyx()
wrapper_c_file = os.path.splitext(wrapper_pyx_file)[0] + '.cpp'
cythonize(wrapper_pyx_file, wrapper_c_file)
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration(None, parent_package, top_path)
config.set_options(
ignore_setup_xxx_py=True,
assume_default_configuration=True,
delegate_options_to_subpackages=True,
quiet=True)
config.add_data_dir('lap/tests')
wrapper_pyx_file = get_wrapper_pyx()
wrapper_c_file = os.path.splitext(wrapper_pyx_file)[0] + '.cpp'
c_files = [
os.path.join(os.path.dirname(wrapper_pyx_file), 'lapjv.cpp'),
os.path.join(os.path.dirname(wrapper_pyx_file), 'lapmod.cpp')]
config.add_extension('lap._lapjv', sources=[wrapper_c_file, c_files],
include_dirs=[get_numpy_include_dirs(), 'lap'])
return config
def setup_package():
metadata = dict(name=DISTNAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
license=LICENSE,
packages=['lap'],
url=URL,
version=VERSION,
download_url=DOWNLOAD_URL,
long_description=LONG_DESCRIPTION,
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Programming Language :: C',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
],
cmdclass=cmdclass,
**extra_setuptools_args)
if len(sys.argv) == 1 or (
len(sys.argv) >= 2 and ('--help' in sys.argv[1:] or
sys.argv[1] in ('--help-commands',
'egg_info',
'--version',
'clean'))):
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
else:
numpy_status = get_numpy_status()
if numpy_status['up_to_date'] is False:
if numpy_status['version']:
raise ImportError('Installed numpy is too old, '
'please "pip install -U numpy".')
else:
raise ImportError('lap requires numpy, '
'please "pip install numpy".')
from numpy.distutils.core import setup
metadata['configuration'] = configuration
if len(sys.argv) >= 2 and sys.argv[1] not in 'config':
print('Generating cython files')
cwd = os.path.abspath(os.path.dirname(__file__))
if not os.path.exists(os.path.join(cwd, 'PKG-INFO')):
generate_cython()
setup(**metadata)
if __name__ == "__main__":
setup_package()
|
<filename>packages/build/src/compile-exec.ts<gh_stars>0
import path from 'path';
import { exec as compile } from 'pkg';
import Platform from './platform';
/**
* The executable name enum.
*/
enum ExecName {
Windows = 'mongosh.exe',
Posix = 'mongosh'
};
/**
* Target enum.
*/
enum Target {
Windows = 'win',
MacOs = 'macos',
Linux = 'linux'
}
/**
* Determine the name of the executable based on the
* provided platform.
*
* @param {string} platform - The platform.
*
* @returns {string} The name.
*/
function determineExecName(platform: string): string {
if (platform === Platform.Windows) {
return ExecName.Windows;
}
return ExecName.Posix;
}
/**
* Determine the target name.
*
* @param {string} platform - The platform.
*
* @returns {string} The target name.
*/
const determineTarget = (platform: string): string => {
switch(platform) {
case Platform.Windows: return Target.Windows;
case Platform.MacOs: return Target.MacOs;
default: return Target.Linux;
}
};
/**
* Get the path to the executable itself.
*
* @param {string} outputDir - The directory to save in.
* @param {string} platform - The platform.
*
* @returns {string} The path.
*/
const executablePath = (outputDir: string, platform: string): string => {
return path.join(outputDir, determineExecName(platform));
};
/**
* Compile the executable.
*
* @param {string} input - The root js of the app.
* @param {string} outputDir - The output directory for the executable.
* @param {string} platform - The platform.
*/
const compileExec = async(input: string, outputDir: string, platform: string) => {
const executable = executablePath(outputDir, platform);
console.log('mongosh: creating binary:', executable);
await compile([
input,
'-o',
executable,
'-t',
determineTarget(platform)
]);
};
export default compileExec;
export {
ExecName,
Target,
determineExecName,
determineTarget,
executablePath
};
|
#!/bin/bash
#
# Copyright 2020 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
echo ">>> Installing Operator SDK"
# Use version 0.10.0
RELEASE_VERSION=v0.12.0
# Download binary
curl -LO https://github.com/operator-framework/operator-sdk/releases/download/${RELEASE_VERSION}/operator-sdk-${RELEASE_VERSION}-x86_64-linux-gnu
# Install binary
chmod +x operator-sdk-${RELEASE_VERSION}-x86_64-linux-gnu && mkdir -p /usr/local/bin/ && cp operator-sdk-${RELEASE_VERSION}-x86_64-linux-gnu /usr/local/bin/operator-sdk && rm operator-sdk-${RELEASE_VERSION}-x86_64-linux-gnu
|
import re
def extract_copyright(file_path):
with open(file_path, 'r') as file:
content = file.read()
# Use regular expression to find the copyright notice within comments
pattern = r'//.*?Copyright.*?[\r\n]|/\*.*?Copyright.*?\*/'
matches = re.findall(pattern, content, re.DOTALL)
# Extract the copyright notice from the matches
copyright_notices = [re.sub(r'//|/\*|\*/', '', match).strip() for match in matches]
# Output the extracted copyright notice
for notice in copyright_notices:
print(notice)
# Example usage
file_path = "sampleFile.java"
extract_copyright(file_path) |
/*
Package usecases_test is the test for all UI services: backup, bee, git,
blockchain, graphql, https, pdb. Testing elements start up the delopment
processes of usecase contract. The test file assures available data for the
UI. Run usecases$ go test. Ref https://www.youtube.com/watch?v=0xKGdMCYtvw
*/
package usecases_test
import (
"testing"
// "github.com/khaiphong/mu/backend/entities"
// "fmt"
)
var MockItemsRepo struct {
}
func GetItems() {
}
func TestGetItems (t *testing.T) {
} |
package gv.jleon
import akka.http.scaladsl.server.directives.{ DebuggingDirectives }
import akka.http.scaladsl.server.{ Directives, Route }
import akka.http.scaladsl.{ Http ⇒ AkkaHttp }
object Http {
import Directives.{ _ }
val route: Route = DebuggingDirectives.logRequestResult("leon") {
pathPrefix("arch" / RemainingPath) { path ⇒ println(s"Asking for $path"); complete("") }
}
private[this] implicit def routeHandlingMaterializer(implicit actorSystem: ActorSystem): ActorMaterializer =
ActorMaterializer()
def serve(implicit actorSystem: ActorSystem): Future[AkkaHttp.ServerBinding] = {
AkkaHttp().bindAndHandle(
route,
interface = "0.0.0.0",
port = 14000
)
}
}
|
call create_index('disease2gene', 'GeneID');
call create_index('disease2gene', 'Symbol');
update disease2gene set ConceptID = replace(ConceptID, 'umls:', '');
call create_index('disease2gene', 'ConceptID');
call create_index('disease2gene', 'ConceptName');
call create_index('disease2gene', 'GeneID,ConceptID');
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.