repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
Coderkul2020/kulbot
node_modules/scratch-paint/src/log/log.js
<gh_stars>100-1000 import minilog from 'minilog'; minilog.enable(); export default minilog('scratch-paint');
GeneralZero/CS-577-Final-Project
FreeRadius/freeradius-server-2.1.7/src/modules/rlm_sql/rlm_sql.c
<reponame>GeneralZero/CS-577-Final-Project /* * rlm_sql.c SQL Module * Main SQL module file. Most ICRADIUS code is located in sql.c * * Version: $Id$ * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA * * Copyright 2000,2006 The FreeRADIUS server project * Copyright 2000 <NAME> <<EMAIL>> * Copyright 2000 <NAME> <<EMAIL>> */ #include <freeradius-devel/ident.h> RCSID("$Id$") #include <freeradius-devel/radiusd.h> #include <freeradius-devel/modules.h> #include <freeradius-devel/rad_assert.h> #include <sys/stat.h> #include "rlm_sql.h" static char *allowed_chars = NULL; static const CONF_PARSER module_config[] = { {"driver",PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,sql_driver), NULL, "mysql"}, {"server",PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,sql_server), NULL, "localhost"}, {"port",PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,sql_port), NULL, ""}, {"login", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,sql_login), NULL, ""}, {"password", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,sql_password), NULL, ""}, {"radius_db", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,sql_db), NULL, "radius"}, {"read_groups", PW_TYPE_BOOLEAN, offsetof(SQL_CONFIG,read_groups), NULL, "yes"}, {"sqltrace", PW_TYPE_BOOLEAN, offsetof(SQL_CONFIG,sqltrace), NULL, "no"}, {"sqltracefile", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,tracefile), NULL, SQLTRACEFILE}, {"readclients", PW_TYPE_BOOLEAN, offsetof(SQL_CONFIG,do_clients), NULL, "no"}, {"deletestalesessions", PW_TYPE_BOOLEAN, offsetof(SQL_CONFIG,deletestalesessions), NULL, "yes"}, {"num_sql_socks", PW_TYPE_INTEGER, offsetof(SQL_CONFIG,num_sql_socks), NULL, "5"}, {"lifetime", PW_TYPE_INTEGER, offsetof(SQL_CONFIG,lifetime), NULL, "0"}, {"max_queries", PW_TYPE_INTEGER, offsetof(SQL_CONFIG,max_queries), NULL, "0"}, {"sql_user_name", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,query_user), NULL, ""}, {"default_user_profile", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,default_profile), NULL, ""}, {"nas_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,nas_query), NULL, "SELECT id,nasname,shortname,type,secret FROM nas"}, {"authorize_check_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,authorize_check_query), NULL, ""}, {"authorize_reply_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,authorize_reply_query), NULL, NULL}, {"authorize_group_check_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,authorize_group_check_query), NULL, ""}, {"authorize_group_reply_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,authorize_group_reply_query), NULL, ""}, {"accounting_onoff_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,accounting_onoff_query), NULL, ""}, {"accounting_update_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,accounting_update_query), NULL, ""}, {"accounting_update_query_alt", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,accounting_update_query_alt), NULL, ""}, {"accounting_start_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,accounting_start_query), NULL, ""}, {"accounting_start_query_alt", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,accounting_start_query_alt), NULL, ""}, {"accounting_stop_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,accounting_stop_query), NULL, ""}, {"accounting_stop_query_alt", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,accounting_stop_query_alt), NULL, ""}, {"group_membership_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,groupmemb_query), NULL, NULL}, {"connect_failure_retry_delay", PW_TYPE_INTEGER, offsetof(SQL_CONFIG,connect_failure_retry_delay), NULL, "60"}, {"simul_count_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,simul_count_query), NULL, ""}, {"simul_verify_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,simul_verify_query), NULL, ""}, {"postauth_query", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,postauth_query), NULL, ""}, {"safe-characters", PW_TYPE_STRING_PTR, offsetof(SQL_CONFIG,allowed_chars), NULL, "@abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789.-_: /"}, /* * This only works for a few drivers. */ {"query_timeout", PW_TYPE_INTEGER, offsetof(SQL_CONFIG,query_timeout), NULL, NULL}, {NULL, -1, 0, NULL, NULL} }; /* * Fall-Through checking function from rlm_files.c */ static int fallthrough(VALUE_PAIR *vp) { VALUE_PAIR *tmp; tmp = pairfind(vp, PW_FALL_THROUGH); return tmp ? tmp->vp_integer : 0; } /* * Yucky prototype. */ static int generate_sql_clients(SQL_INST *inst); static size_t sql_escape_func(char *out, size_t outlen, const char *in); /* * sql xlat function. Right now only SELECTs are supported. Only * the first element of the SELECT result will be used. * * For other statements (insert, update, delete, etc.), the * number of affected rows will be returned. */ static int sql_xlat(void *instance, REQUEST *request, char *fmt, char *out, size_t freespace, UNUSED RADIUS_ESCAPE_STRING func) { SQLSOCK *sqlsocket; SQL_ROW row; SQL_INST *inst = instance; char querystr[MAX_QUERY_LEN]; char sqlusername[MAX_STRING_LEN]; size_t ret = 0; RDEBUG("sql_xlat"); /* * Add SQL-User-Name attribute just in case it is needed * We could search the string fmt for SQL-User-Name to see if this is * needed or not */ sql_set_user(inst, request, sqlusername, NULL); /* * Do an xlat on the provided string (nice recursive operation). */ if (!radius_xlat(querystr, sizeof(querystr), fmt, request, sql_escape_func)) { radlog(L_ERR, "rlm_sql (%s): xlat failed.", inst->config->xlat_name); return 0; } query_log(request, inst,querystr); sqlsocket = sql_get_socket(inst); if (sqlsocket == NULL) return 0; /* * If the query starts with any of the following prefixes, * then return the number of rows affected */ if ((strncasecmp(querystr, "insert", 6) == 0) || (strncasecmp(querystr, "update", 6) == 0) || (strncasecmp(querystr, "delete", 6) == 0)) { int numaffected; char buffer[21]; /* 64bit max is 20 decimal chars + null byte */ if (rlm_sql_query(sqlsocket,inst,querystr)) { radlog(L_ERR, "rlm_sql (%s): database query error, %s: %s", inst->config->xlat_name, querystr, (inst->module->sql_error)(sqlsocket, inst->config)); sql_release_socket(inst,sqlsocket); return 0; } numaffected = (inst->module->sql_affected_rows)(sqlsocket, inst->config); if (numaffected < 1) { RDEBUG("rlm_sql (%s): SQL query affected no rows", inst->config->xlat_name); } /* * Don't chop the returned number if freespace is * too small. This hack is necessary because * some implementations of snprintf return the * size of the written data, and others return * the size of the data they *would* have written * if the output buffer was large enough. */ snprintf(buffer, sizeof(buffer), "%d", numaffected); ret = strlen(buffer); if (ret >= freespace){ RDEBUG("rlm_sql (%s): Can't write result, insufficient string space", inst->config->xlat_name); (inst->module->sql_finish_query)(sqlsocket, inst->config); sql_release_socket(inst,sqlsocket); return 0; } memcpy(out, buffer, ret + 1); /* we did bounds checking above */ (inst->module->sql_finish_query)(sqlsocket, inst->config); sql_release_socket(inst,sqlsocket); return ret; } /* else it's a SELECT statement */ if (rlm_sql_select_query(sqlsocket,inst,querystr)){ radlog(L_ERR, "rlm_sql (%s): database query error, %s: %s", inst->config->xlat_name,querystr, (inst->module->sql_error)(sqlsocket, inst->config)); sql_release_socket(inst,sqlsocket); return 0; } ret = rlm_sql_fetch_row(sqlsocket, inst); if (ret) { RDEBUG("SQL query did not succeed"); (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst,sqlsocket); return 0; } row = sqlsocket->row; if (row == NULL) { RDEBUG("SQL query did not return any results"); (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst,sqlsocket); return 0; } if (row[0] == NULL){ RDEBUG("row[0] returned NULL"); (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst,sqlsocket); return 0; } ret = strlen(row[0]); if (ret >= freespace){ RDEBUG("Insufficient string space"); (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst,sqlsocket); return 0; } strlcpy(out,row[0],freespace); RDEBUG("sql_xlat finished"); (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst,sqlsocket); return ret; } static int generate_sql_clients(SQL_INST *inst) { SQLSOCK *sqlsocket; SQL_ROW row; char querystr[MAX_QUERY_LEN]; RADCLIENT *c; char *prefix_ptr = NULL; unsigned int i = 0; int numf = 0; DEBUG("rlm_sql (%s): Processing generate_sql_clients", inst->config->xlat_name); /* NAS query isn't xlat'ed */ strlcpy(querystr, inst->config->nas_query, sizeof(querystr)); DEBUG("rlm_sql (%s) in generate_sql_clients: query is %s", inst->config->xlat_name, querystr); sqlsocket = sql_get_socket(inst); if (sqlsocket == NULL) return -1; if (rlm_sql_select_query(sqlsocket,inst,querystr)){ radlog(L_ERR, "rlm_sql (%s): database query error, %s: %s", inst->config->xlat_name,querystr, (inst->module->sql_error)(sqlsocket, inst->config)); sql_release_socket(inst,sqlsocket); return -1; } while(rlm_sql_fetch_row(sqlsocket, inst) == 0) { i++; row = sqlsocket->row; if (row == NULL) break; /* * The return data for each row MUST be in the following order: * * 0. Row ID (currently unused) * 1. Name (or IP address) * 2. Shortname * 3. Type * 4. Secret * 5. Virtual Server (optional) */ if (!row[0]){ radlog(L_ERR, "rlm_sql (%s): No row id found on pass %d",inst->config->xlat_name,i); continue; } if (!row[1]){ radlog(L_ERR, "rlm_sql (%s): No nasname found for row %s",inst->config->xlat_name,row[0]); continue; } if (!row[2]){ radlog(L_ERR, "rlm_sql (%s): No short name found for row %s",inst->config->xlat_name,row[0]); continue; } if (!row[4]){ radlog(L_ERR, "rlm_sql (%s): No secret found for row %s",inst->config->xlat_name,row[0]); continue; } DEBUG("rlm_sql (%s): Read entry nasname=%s,shortname=%s,secret=%s",inst->config->xlat_name, row[1],row[2],row[4]); c = rad_malloc(sizeof(*c)); memset(c, 0, sizeof(*c)); #ifdef WITH_DYNAMIC_CLIENTS c->dynamic = 1; #endif /* * Look for prefixes */ c->prefix = -1; prefix_ptr = strchr(row[1], '/'); if (prefix_ptr) { c->prefix = atoi(prefix_ptr + 1); if ((c->prefix < 0) || (c->prefix > 128)) { radlog(L_ERR, "rlm_sql (%s): Invalid Prefix value '%s' for IP.", inst->config->xlat_name, prefix_ptr + 1); free(c); continue; } /* Replace '/' with '\0' */ *prefix_ptr = '\0'; } /* * Always get the numeric representation of IP */ if (ip_hton(row[1], AF_UNSPEC, &c->ipaddr) < 0) { radlog(L_CONS|L_ERR, "rlm_sql (%s): Failed to look up hostname %s: %s", inst->config->xlat_name, row[1], fr_strerror()); free(c); continue; } else { char buffer[256]; ip_ntoh(&c->ipaddr, buffer, sizeof(buffer)); c->longname = strdup(buffer); } if (c->prefix < 0) switch (c->ipaddr.af) { case AF_INET: c->prefix = 32; break; case AF_INET6: c->prefix = 128; break; default: break; } /* * Other values (secret, shortname, nastype, virtual_server) */ c->secret = strdup(row[4]); c->shortname = strdup(row[2]); if(row[3] != NULL) c->nastype = strdup(row[3]); numf = (inst->module->sql_num_fields)(sqlsocket, inst->config); if ((numf > 5) && (row[5] != NULL)) c->server = strdup(row[5]); DEBUG("rlm_sql (%s): Adding client %s (%s, server=%s) to clients list", inst->config->xlat_name, c->longname,c->shortname, c->server ? c->server : "<none>"); if (!client_add(NULL, c)) { DEBUG("rlm_sql (%s): Failed to add client %s (%s) to clients list. Maybe there's a duplicate?", inst->config->xlat_name, c->longname,c->shortname); client_free(c); return -1; } } (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst, sqlsocket); return 0; } /* * Translate the SQL queries. */ static size_t sql_escape_func(char *out, size_t outlen, const char *in) { size_t len = 0; while (in[0]) { /* * Non-printable characters get replaced with their * mime-encoded equivalents. */ if ((in[0] < 32) || strchr(allowed_chars, *in) == NULL) { /* * Only 3 or less bytes available. */ if (outlen <= 3) { break; } snprintf(out, outlen, "=%02X", (unsigned char) in[0]); in++; out += 3; outlen -= 3; len += 3; continue; } /* * Only one byte left. */ if (outlen <= 1) { break; } /* * Allowed character. */ *out = *in; out++; in++; outlen--; len++; } *out = '\0'; return len; } /* * Set the SQL user name. * * We don't call the escape function here. The resulting string * will be escaped later in the queries xlat so we don't need to * escape it twice. (it will make things wrong if we have an * escape candidate character in the username) */ int sql_set_user(SQL_INST *inst, REQUEST *request, char *sqlusername, const char *username) { VALUE_PAIR *vp=NULL; char tmpuser[MAX_STRING_LEN]; tmpuser[0] = '\0'; sqlusername[0]= '\0'; /* Remove any user attr we added previously */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); if (username != NULL) { strlcpy(tmpuser, username, sizeof(tmpuser)); } else if (strlen(inst->config->query_user)) { radius_xlat(tmpuser, sizeof(tmpuser), inst->config->query_user, request, NULL); } else { return 0; } strlcpy(sqlusername, tmpuser, MAX_STRING_LEN); RDEBUG2("sql_set_user escaped user --> '%s'", sqlusername); vp = radius_pairmake(request, &request->packet->vps, "SQL-User-Name", NULL, 0); if (!vp) { radlog(L_ERR, "%s", fr_strerror()); return -1; } strlcpy(vp->vp_strvalue, tmpuser, sizeof(vp->vp_strvalue)); vp->length = strlen(vp->vp_strvalue); return 0; } static void sql_grouplist_free (SQL_GROUPLIST **group_list) { SQL_GROUPLIST *last; while(*group_list) { last = *group_list; *group_list = (*group_list)->next; free(last); } } static int sql_get_grouplist (SQL_INST *inst, SQLSOCK *sqlsocket, REQUEST *request, SQL_GROUPLIST **group_list) { char querystr[MAX_QUERY_LEN]; int num_groups = 0; SQL_ROW row; SQL_GROUPLIST *group_list_tmp; /* NOTE: sql_set_user should have been run before calling this function */ group_list_tmp = *group_list = NULL; if (!inst->config->groupmemb_query || (inst->config->groupmemb_query[0] == 0)) return 0; if (!radius_xlat(querystr, sizeof(querystr), inst->config->groupmemb_query, request, sql_escape_func)) { radlog_request(L_ERR, 0, request, "xlat \"%s\" failed.", inst->config->groupmemb_query); return -1; } if (rlm_sql_select_query(sqlsocket, inst, querystr) < 0) { radlog_request(L_ERR, 0, request, "database query error, %s: %s", querystr, (inst->module->sql_error)(sqlsocket,inst->config)); return -1; } while (rlm_sql_fetch_row(sqlsocket, inst) == 0) { row = sqlsocket->row; if (row == NULL) break; if (row[0] == NULL){ RDEBUG("row[0] returned NULL"); (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_grouplist_free(group_list); return -1; } if (*group_list == NULL) { *group_list = rad_malloc(sizeof(SQL_GROUPLIST)); group_list_tmp = *group_list; } else { rad_assert(group_list_tmp != NULL); group_list_tmp->next = rad_malloc(sizeof(SQL_GROUPLIST)); group_list_tmp = group_list_tmp->next; } group_list_tmp->next = NULL; strlcpy(group_list_tmp->groupname, row[0], MAX_STRING_LEN); } (inst->module->sql_finish_select_query)(sqlsocket, inst->config); return num_groups; } /* * sql groupcmp function. That way we can do group comparisons (in the users file for example) * with the group memberships reciding in sql * The group membership query should only return one element which is the username. The returned * username will then be checked with the passed check string. */ static int sql_groupcmp(void *instance, REQUEST *request, VALUE_PAIR *request_vp, VALUE_PAIR *check, VALUE_PAIR *check_pairs, VALUE_PAIR **reply_pairs) { SQLSOCK *sqlsocket; SQL_INST *inst = instance; char sqlusername[MAX_STRING_LEN]; SQL_GROUPLIST *group_list, *group_list_tmp; check_pairs = check_pairs; reply_pairs = reply_pairs; request_vp = request_vp; RDEBUG("sql_groupcmp"); if (!check || !check->vp_strvalue || !check->length){ RDEBUG("sql_groupcmp: Illegal group name"); return 1; } if (!request){ RDEBUG("sql_groupcmp: NULL request"); return 1; } /* * Set, escape, and check the user attr here */ if (sql_set_user(inst, request, sqlusername, NULL) < 0) return 1; /* * Get a socket for this lookup */ sqlsocket = sql_get_socket(inst); if (sqlsocket == NULL) { /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); return 1; } /* * Get the list of groups this user is a member of */ if (sql_get_grouplist(inst, sqlsocket, request, &group_list) < 0) { radlog_request(L_ERR, 0, request, "Error getting group membership"); /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); sql_release_socket(inst, sqlsocket); return 1; } for (group_list_tmp = group_list; group_list_tmp != NULL; group_list_tmp = group_list_tmp->next) { if (strcmp(group_list_tmp->groupname, check->vp_strvalue) == 0){ RDEBUG("sql_groupcmp finished: User is a member of group %s", check->vp_strvalue); /* Free the grouplist */ sql_grouplist_free(&group_list); /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); sql_release_socket(inst, sqlsocket); return 0; } } /* Free the grouplist */ sql_grouplist_free(&group_list); /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); sql_release_socket(inst,sqlsocket); RDEBUG("sql_groupcmp finished: User is NOT a member of group %s", check->vp_strvalue); return 1; } static int rlm_sql_process_groups(SQL_INST *inst, REQUEST *request, SQLSOCK *sqlsocket, int *dofallthrough) { VALUE_PAIR *check_tmp = NULL; VALUE_PAIR *reply_tmp = NULL; SQL_GROUPLIST *group_list, *group_list_tmp; VALUE_PAIR *sql_group = NULL; char querystr[MAX_QUERY_LEN]; int found = 0; int rows; /* * Get the list of groups this user is a member of */ if (sql_get_grouplist(inst, sqlsocket, request, &group_list) < 0) { radlog_request(L_ERR, 0, request, "Error retrieving group list"); return -1; } for (group_list_tmp = group_list; group_list_tmp != NULL && *dofallthrough != 0; group_list_tmp = group_list_tmp->next) { /* * Add the Sql-Group attribute to the request list so we know * which group we're retrieving attributes for */ sql_group = pairmake("Sql-Group", group_list_tmp->groupname, T_OP_EQ); if (!sql_group) { radlog_request(L_ERR, 0, request, "Error creating Sql-Group attribute"); return -1; } pairadd(&request->packet->vps, sql_group); if (!radius_xlat(querystr, sizeof(querystr), inst->config->authorize_group_check_query, request, sql_escape_func)) { radlog_request(L_ERR, 0, request, "Error generating query; rejecting user"); /* Remove the grouup we added above */ pairdelete(&request->packet->vps, PW_SQL_GROUP); return -1; } rows = sql_getvpdata(inst, sqlsocket, &check_tmp, querystr); if (rows < 0) { radlog_request(L_ERR, 0, request, "Error retrieving check pairs for group %s", group_list_tmp->groupname); /* Remove the grouup we added above */ pairdelete(&request->packet->vps, PW_SQL_GROUP); pairfree(&check_tmp); return -1; } else if (rows > 0) { /* * Only do this if *some* check pairs were returned */ if (paircompare(request, request->packet->vps, check_tmp, &request->reply->vps) == 0) { found = 1; RDEBUG2("User found in group %s", group_list_tmp->groupname); /* * Now get the reply pairs since the paircompare matched */ if (!radius_xlat(querystr, sizeof(querystr), inst->config->authorize_group_reply_query, request, sql_escape_func)) { radlog_request(L_ERR, 0, request, "Error generating query; rejecting user"); /* Remove the grouup we added above */ pairdelete(&request->packet->vps, PW_SQL_GROUP); pairfree(&check_tmp); return -1; } if (sql_getvpdata(inst, sqlsocket, &reply_tmp, querystr) < 0) { radlog_request(L_ERR, 0, request, "Error retrieving reply pairs for group %s", group_list_tmp->groupname); /* Remove the grouup we added above */ pairdelete(&request->packet->vps, PW_SQL_GROUP); pairfree(&check_tmp); pairfree(&reply_tmp); return -1; } *dofallthrough = fallthrough(reply_tmp); pairxlatmove(request, &request->reply->vps, &reply_tmp); pairxlatmove(request, &request->config_items, &check_tmp); } } else { /* * rows == 0. This is like having the username on a line * in the user's file with no check vp's. As such, we treat * it as found and add the reply attributes, so that we * match expected behavior */ found = 1; RDEBUG2("User found in group %s", group_list_tmp->groupname); /* * Now get the reply pairs since the paircompare matched */ if (!radius_xlat(querystr, sizeof(querystr), inst->config->authorize_group_reply_query, request, sql_escape_func)) { radlog_request(L_ERR, 0, request, "Error generating query; rejecting user"); /* Remove the grouup we added above */ pairdelete(&request->packet->vps, PW_SQL_GROUP); pairfree(&check_tmp); return -1; } if (sql_getvpdata(inst, sqlsocket, &reply_tmp, querystr) < 0) { radlog_request(L_ERR, 0, request, "Error retrieving reply pairs for group %s", group_list_tmp->groupname); /* Remove the grouup we added above */ pairdelete(&request->packet->vps, PW_SQL_GROUP); pairfree(&check_tmp); pairfree(&reply_tmp); return -1; } *dofallthrough = fallthrough(reply_tmp); pairxlatmove(request, &request->reply->vps, &reply_tmp); pairxlatmove(request, &request->config_items, &check_tmp); } /* * Delete the Sql-Group we added above * And clear out the pairlists */ pairdelete(&request->packet->vps, PW_SQL_GROUP); pairfree(&check_tmp); pairfree(&reply_tmp); } sql_grouplist_free(&group_list); return found; } static int rlm_sql_detach(void *instance) { SQL_INST *inst = instance; paircompare_unregister(PW_SQL_GROUP, sql_groupcmp); if (inst->config) { int i; if (inst->sqlpool) { sql_poolfree(inst); } if (inst->config->xlat_name) { xlat_unregister(inst->config->xlat_name,(RAD_XLAT_FUNC)sql_xlat); free(inst->config->xlat_name); } /* * Free up dynamically allocated string pointers. */ for (i = 0; module_config[i].name != NULL; i++) { char **p; if (module_config[i].type != PW_TYPE_STRING_PTR) { continue; } /* * Treat 'config' as an opaque array of bytes, * and take the offset into it. There's a * (char*) pointer at that offset, and we want * to point to it. */ p = (char **) (((char *)inst->config) + module_config[i].offset); if (!*p) { /* nothing allocated */ continue; } free(*p); *p = NULL; } /* * Catch multiple instances of the module. */ if (allowed_chars == inst->config->allowed_chars) { allowed_chars = NULL; } free(inst->config); inst->config = NULL; } if (inst->handle) { #if 0 /* * FIXME: Call the modules 'destroy' function? */ lt_dlclose(inst->handle); /* ignore any errors */ #endif } free(inst); return 0; } static int rlm_sql_instantiate(CONF_SECTION * conf, void **instance) { SQL_INST *inst; const char *xlat_name; inst = rad_malloc(sizeof(SQL_INST)); memset(inst, 0, sizeof(SQL_INST)); inst->config = rad_malloc(sizeof(SQL_CONFIG)); memset(inst->config, 0, sizeof(SQL_CONFIG)); /* * Export these methods, too. This avoids RTDL_GLOBAL. */ inst->sql_set_user = sql_set_user; inst->sql_get_socket = sql_get_socket; inst->sql_release_socket = sql_release_socket; inst->sql_escape_func = sql_escape_func; inst->sql_query = rlm_sql_query; inst->sql_select_query = rlm_sql_select_query; inst->sql_fetch_row = rlm_sql_fetch_row; /* * If the configuration parameters can't be parsed, then * fail. */ if (cf_section_parse(conf, inst->config, module_config) < 0) { rlm_sql_detach(inst); return -1; } xlat_name = cf_section_name2(conf); if (xlat_name == NULL) xlat_name = cf_section_name1(conf); if (xlat_name){ inst->config->xlat_name = strdup(xlat_name); xlat_register(xlat_name, (RAD_XLAT_FUNC)sql_xlat, inst); } if (inst->config->num_sql_socks > MAX_SQL_SOCKS) { radlog(L_ERR, "rlm_sql (%s): sql_instantiate: number of sqlsockets cannot exceed MAX_SQL_SOCKS, %d", inst->config->xlat_name, MAX_SQL_SOCKS); rlm_sql_detach(inst); return -1; } /* * Sanity check for crazy people. */ if (strncmp(inst->config->sql_driver, "rlm_sql_", 8) != 0) { radlog(L_ERR, "\"%s\" is NOT an SQL driver!", inst->config->sql_driver); rlm_sql_detach(inst); return -1; } inst->handle = lt_dlopenext(inst->config->sql_driver); if (inst->handle == NULL) { radlog(L_ERR, "Could not link driver %s: %s", inst->config->sql_driver, lt_dlerror()); radlog(L_ERR, "Make sure it (and all its dependent libraries!) are in the search path of your system's ld."); rlm_sql_detach(inst); return -1; } inst->module = (rlm_sql_module_t *) lt_dlsym(inst->handle, inst->config->sql_driver); if (!inst->module) { radlog(L_ERR, "Could not link symbol %s: %s", inst->config->sql_driver, lt_dlerror()); rlm_sql_detach(inst); return -1; } radlog(L_INFO, "rlm_sql (%s): Driver %s (module %s) loaded and linked", inst->config->xlat_name, inst->config->sql_driver, inst->module->name); radlog(L_INFO, "rlm_sql (%s): Attempting to connect to %s@%s:%s/%s", inst->config->xlat_name, inst->config->sql_login, inst->config->sql_server, inst->config->sql_port, inst->config->sql_db); if (sql_init_socketpool(inst) < 0) { rlm_sql_detach(inst); return -1; } paircompare_register(PW_SQL_GROUP, PW_USER_NAME, sql_groupcmp, inst); if (inst->config->do_clients){ if (generate_sql_clients(inst) == -1){ radlog(L_ERR, "Failed to load clients from SQL."); rlm_sql_detach(inst); return -1; } } allowed_chars = inst->config->allowed_chars; *instance = inst; return RLM_MODULE_OK; } static int rlm_sql_authorize(void *instance, REQUEST * request) { VALUE_PAIR *check_tmp = NULL; VALUE_PAIR *reply_tmp = NULL; VALUE_PAIR *user_profile = NULL; int found = 0; int dofallthrough = 1; int rows; SQLSOCK *sqlsocket; SQL_INST *inst = instance; char querystr[MAX_QUERY_LEN]; char sqlusername[MAX_STRING_LEN]; /* * the profile username is used as the sqlusername during * profile checking so that we don't overwrite the orignal * sqlusername string */ char profileusername[MAX_STRING_LEN]; /* * Set, escape, and check the user attr here */ if (sql_set_user(inst, request, sqlusername, NULL) < 0) return RLM_MODULE_FAIL; /* * reserve a socket */ sqlsocket = sql_get_socket(inst); if (sqlsocket == NULL) { /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); return RLM_MODULE_FAIL; } /* * After this point, ALL 'return's MUST release the SQL socket! */ /* * Alright, start by getting the specific entry for the user */ if (!radius_xlat(querystr, sizeof(querystr), inst->config->authorize_check_query, request, sql_escape_func)) { radlog_request(L_ERR, 0, request, "Error generating query; rejecting user"); sql_release_socket(inst, sqlsocket); /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); return RLM_MODULE_FAIL; } rows = sql_getvpdata(inst, sqlsocket, &check_tmp, querystr); if (rows < 0) { radlog_request(L_ERR, 0, request, "SQL query error; rejecting user"); sql_release_socket(inst, sqlsocket); /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); pairfree(&check_tmp); return RLM_MODULE_FAIL; } else if (rows > 0) { /* * Only do this if *some* check pairs were returned */ if (paircompare(request, request->packet->vps, check_tmp, &request->reply->vps) == 0) { found = 1; RDEBUG2("User found in radcheck table"); if (inst->config->authorize_reply_query && *inst->config->authorize_reply_query) { /* * Now get the reply pairs since the paircompare matched */ if (!radius_xlat(querystr, sizeof(querystr), inst->config->authorize_reply_query, request, sql_escape_func)) { radlog_request(L_ERR, 0, request, "Error generating query; rejecting user"); sql_release_socket(inst, sqlsocket); /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); pairfree(&check_tmp); return RLM_MODULE_FAIL; } if (sql_getvpdata(inst, sqlsocket, &reply_tmp, querystr) < 0) { radlog_request(L_ERR, 0, request, "SQL query error; rejecting user"); sql_release_socket(inst, sqlsocket); /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); pairfree(&check_tmp); pairfree(&reply_tmp); return RLM_MODULE_FAIL; } if (!inst->config->read_groups) dofallthrough = fallthrough(reply_tmp); pairxlatmove(request, &request->reply->vps, &reply_tmp); } pairxlatmove(request, &request->config_items, &check_tmp); } } /* * Clear out the pairlists */ pairfree(&check_tmp); pairfree(&reply_tmp); /* * dofallthrough is set to 1 by default so that if the user information * is not found, we will still process groups. If the user information, * however, *is* found, Fall-Through must be set in order to process * the groups as well */ if (dofallthrough) { rows = rlm_sql_process_groups(inst, request, sqlsocket, &dofallthrough); if (rows < 0) { radlog_request(L_ERR, 0, request, "Error processing groups; rejecting user"); sql_release_socket(inst, sqlsocket); /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); return RLM_MODULE_FAIL; } else if (rows > 0) { found = 1; } } /* * repeat the above process with the default profile or User-Profile */ if (dofallthrough) { int profile_found = 0; /* * Check for a default_profile or for a User-Profile. */ user_profile = pairfind(request->config_items, PW_USER_PROFILE); if (inst->config->default_profile[0] != 0 || user_profile != NULL){ char *profile = inst->config->default_profile; if (user_profile != NULL) profile = user_profile->vp_strvalue; if (profile && strlen(profile)){ RDEBUG("Checking profile %s", profile); if (sql_set_user(inst, request, profileusername, profile) < 0) { radlog_request(L_ERR, 0, request, "Error setting profile; rejecting user"); sql_release_socket(inst, sqlsocket); /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); return RLM_MODULE_FAIL; } else { profile_found = 1; } } } if (profile_found) { rows = rlm_sql_process_groups(inst, request, sqlsocket, &dofallthrough); if (rows < 0) { radlog_request(L_ERR, 0, request, "Error processing profile groups; rejecting user"); sql_release_socket(inst, sqlsocket); /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); return RLM_MODULE_FAIL; } else if (rows > 0) { found = 1; } } } /* Remove the username we (maybe) added above */ pairdelete(&request->packet->vps, PW_SQL_USER_NAME); sql_release_socket(inst, sqlsocket); if (!found) { RDEBUG("User %s not found", sqlusername); return RLM_MODULE_NOTFOUND; } else { return RLM_MODULE_OK; } } /* * Accounting: save the account data to our sql table */ static int rlm_sql_accounting(void *instance, REQUEST * request) { SQLSOCK *sqlsocket = NULL; VALUE_PAIR *pair; SQL_INST *inst = instance; int ret = RLM_MODULE_OK; int numaffected = 0; int acctstatustype = 0; char querystr[MAX_QUERY_LEN]; char logstr[MAX_QUERY_LEN]; char sqlusername[MAX_STRING_LEN]; #ifdef CISCO_ACCOUNTING_HACK int acctsessiontime = 0; #endif memset(querystr, 0, MAX_QUERY_LEN); /* * Find the Acct Status Type */ if ((pair = pairfind(request->packet->vps, PW_ACCT_STATUS_TYPE)) != NULL) { acctstatustype = pair->vp_integer; } else { radius_xlat(logstr, sizeof(logstr), "packet has no accounting status type. [user '%{User-Name}', nas '%{NAS-IP-Address}']", request, NULL); radlog_request(L_ERR, 0, request, "%s", logstr); return RLM_MODULE_INVALID; } switch (acctstatustype) { /* * The Terminal server informed us that it was rebooted * STOP all records from this NAS */ case PW_STATUS_ACCOUNTING_ON: case PW_STATUS_ACCOUNTING_OFF: RDEBUG("Received Acct On/Off packet"); radius_xlat(querystr, sizeof(querystr), inst->config->accounting_onoff_query, request, sql_escape_func); query_log(request, inst, querystr); sqlsocket = sql_get_socket(inst); if (sqlsocket == NULL) return(RLM_MODULE_FAIL); if (*querystr) { /* non-empty query */ if (rlm_sql_query(sqlsocket, inst, querystr)) { radlog_request(L_ERR, 0, request, "Couldn't update SQL accounting for Acct On/Off packet - %s", (inst->module->sql_error)(sqlsocket, inst->config)); ret = RLM_MODULE_FAIL; } (inst->module->sql_finish_query)(sqlsocket, inst->config); } break; /* * Got an update accounting packet */ case PW_STATUS_ALIVE: /* * Set, escape, and check the user attr here */ sql_set_user(inst, request, sqlusername, NULL); radius_xlat(querystr, sizeof(querystr), inst->config->accounting_update_query, request, sql_escape_func); query_log(request, inst, querystr); sqlsocket = sql_get_socket(inst); if (sqlsocket == NULL) return(RLM_MODULE_FAIL); if (*querystr) { /* non-empty query */ if (rlm_sql_query(sqlsocket, inst, querystr)) { radlog_request(L_ERR, 0, request, "Couldn't update SQL accounting ALIVE record - %s", (inst->module->sql_error)(sqlsocket, inst->config)); ret = RLM_MODULE_FAIL; } else { numaffected = (inst->module->sql_affected_rows)(sqlsocket, inst->config); if (numaffected < 1) { /* * If our update above didn't match anything * we assume it's because we haven't seen a * matching Start record. So we have to * insert this update rather than do an update */ radius_xlat(querystr, sizeof(querystr), inst->config->accounting_update_query_alt, request, sql_escape_func); query_log(request, inst, querystr); if (*querystr) { /* non-empty query */ if (rlm_sql_query(sqlsocket, inst, querystr)) { radlog_request(L_ERR, 0, request, "Couldn't insert SQL accounting ALIVE record - %s", (inst->module->sql_error)(sqlsocket, inst->config)); ret = RLM_MODULE_FAIL; } (inst->module->sql_finish_query)(sqlsocket, inst->config); } } } (inst->module->sql_finish_query)(sqlsocket, inst->config); } break; /* * Got accounting start packet */ case PW_STATUS_START: /* * Set, escape, and check the user attr here */ sql_set_user(inst, request, sqlusername, NULL); radius_xlat(querystr, sizeof(querystr), inst->config->accounting_start_query, request, sql_escape_func); query_log(request, inst, querystr); sqlsocket = sql_get_socket(inst); if (sqlsocket == NULL) return(RLM_MODULE_FAIL); if (*querystr) { /* non-empty query */ if (rlm_sql_query(sqlsocket, inst, querystr)) { radlog_request(L_ERR, 0, request, "Couldn't insert SQL accounting START record - %s", (inst->module->sql_error)(sqlsocket, inst->config)); /* * We failed the insert above. It's probably because * the stop record came before the start. We try * our alternate query now (typically an UPDATE) */ radius_xlat(querystr, sizeof(querystr), inst->config->accounting_start_query_alt, request, sql_escape_func); query_log(request, inst, querystr); if (*querystr) { /* non-empty query */ if (rlm_sql_query(sqlsocket, inst, querystr)) { radlog_request(L_ERR, 0, request, "Couldn't update SQL accounting START record - %s", (inst->module->sql_error)(sqlsocket, inst->config)); ret = RLM_MODULE_FAIL; } (inst->module->sql_finish_query)(sqlsocket, inst->config); } } (inst->module->sql_finish_query)(sqlsocket, inst->config); } break; /* * Got accounting stop packet */ case PW_STATUS_STOP: /* * Set, escape, and check the user attr here */ sql_set_user(inst, request, sqlusername, NULL); radius_xlat(querystr, sizeof(querystr), inst->config->accounting_stop_query, request, sql_escape_func); query_log(request, inst, querystr); sqlsocket = sql_get_socket(inst); if (sqlsocket == NULL) return(RLM_MODULE_FAIL); if (*querystr) { /* non-empty query */ if (rlm_sql_query(sqlsocket, inst, querystr)) { radlog_request(L_ERR, 0, request, "Couldn't update SQL accounting STOP record - %s", (inst->module->sql_error)(sqlsocket, inst->config)); ret = RLM_MODULE_FAIL; } else { numaffected = (inst->module->sql_affected_rows)(sqlsocket, inst->config); if (numaffected < 1) { /* * If our update above didn't match anything * we assume it's because we haven't seen a * matching Start record. So we have to * insert this stop rather than do an update */ #ifdef CISCO_ACCOUNTING_HACK /* * If stop but zero session length AND no previous * session found, drop it as in invalid packet * This is to fix CISCO's aaa from filling our * table with bogus crap */ if ((pair = pairfind(request->packet->vps, PW_ACCT_SESSION_TIME)) != NULL) acctsessiontime = pair->vp_integer; if (acctsessiontime <= 0) { radius_xlat(logstr, sizeof(logstr), "stop packet with zero session length. [user '%{User-Name}', nas '%{NAS-IP-Address}']", request, NULL); radlog_request(L_ERR, 0, request, "%s", logstr); sql_release_socket(inst, sqlsocket); ret = RLM_MODULE_NOOP; } #endif radius_xlat(querystr, sizeof(querystr), inst->config->accounting_stop_query_alt, request, sql_escape_func); query_log(request, inst, querystr); if (*querystr) { /* non-empty query */ if (rlm_sql_query(sqlsocket, inst, querystr)) { radlog_request(L_ERR, 0, request, "Couldn't insert SQL accounting STOP record - %s", (inst->module->sql_error)(sqlsocket, inst->config)); ret = RLM_MODULE_FAIL; } (inst->module->sql_finish_query)(sqlsocket, inst->config); } } } (inst->module->sql_finish_query)(sqlsocket, inst->config); } break; /* * Anything else is ignored. */ default: RDEBUG("Unsupported Acct-Status-Type = %d", acctstatustype); return RLM_MODULE_NOOP; break; } sql_release_socket(inst, sqlsocket); return ret; } /* * See if a user is already logged in. Sets request->simul_count to the * current session count for this user. * * Check twice. If on the first pass the user exceeds his * max. number of logins, do a second pass and validate all * logins by querying the terminal server (using eg. SNMP). */ static int rlm_sql_checksimul(void *instance, REQUEST * request) { SQLSOCK *sqlsocket; SQL_INST *inst = instance; SQL_ROW row; char querystr[MAX_QUERY_LEN]; char sqlusername[MAX_STRING_LEN]; int check = 0; uint32_t ipno = 0; char *call_num = NULL; VALUE_PAIR *vp; int ret; uint32_t nas_addr = 0; int nas_port = 0; /* If simul_count_query is not defined, we don't do any checking */ if (!inst->config->simul_count_query || (inst->config->simul_count_query[0] == 0)) { return RLM_MODULE_NOOP; } if((request->username == NULL) || (request->username->length == 0)) { radlog_request(L_ERR, 0, request, "Zero Length username not permitted\n"); return RLM_MODULE_INVALID; } if(sql_set_user(inst, request, sqlusername, NULL) < 0) return RLM_MODULE_FAIL; radius_xlat(querystr, sizeof(querystr), inst->config->simul_count_query, request, sql_escape_func); /* initialize the sql socket */ sqlsocket = sql_get_socket(inst); if(sqlsocket == NULL) return RLM_MODULE_FAIL; if(rlm_sql_select_query(sqlsocket, inst, querystr)) { radlog(L_ERR, "rlm_sql (%s) sql_checksimul: Database query failed", inst->config->xlat_name); sql_release_socket(inst, sqlsocket); return RLM_MODULE_FAIL; } ret = rlm_sql_fetch_row(sqlsocket, inst); if (ret != 0) { (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst, sqlsocket); return RLM_MODULE_FAIL; } row = sqlsocket->row; if (row == NULL) { (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst, sqlsocket); return RLM_MODULE_FAIL; } request->simul_count = atoi(row[0]); (inst->module->sql_finish_select_query)(sqlsocket, inst->config); if(request->simul_count < request->simul_max) { sql_release_socket(inst, sqlsocket); return RLM_MODULE_OK; } /* * Looks like too many sessions, so let's start verifying * them, unless told to rely on count query only. */ if (!inst->config->simul_verify_query || (inst->config->simul_verify_query[0] == '\0')) { sql_release_socket(inst, sqlsocket); return RLM_MODULE_OK; } radius_xlat(querystr, sizeof(querystr), inst->config->simul_verify_query, request, sql_escape_func); if(rlm_sql_select_query(sqlsocket, inst, querystr)) { radlog_request(L_ERR, 0, request, "Database query error"); sql_release_socket(inst, sqlsocket); return RLM_MODULE_FAIL; } /* * Setup some stuff, like for MPP detection. */ request->simul_count = 0; if ((vp = pairfind(request->packet->vps, PW_FRAMED_IP_ADDRESS)) != NULL) ipno = vp->vp_ipaddr; if ((vp = pairfind(request->packet->vps, PW_CALLING_STATION_ID)) != NULL) call_num = vp->vp_strvalue; while (rlm_sql_fetch_row(sqlsocket, inst) == 0) { row = sqlsocket->row; if (row == NULL) break; if (!row[2]){ (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst, sqlsocket); RDEBUG("Cannot zap stale entry. No username present in entry.", inst->config->xlat_name); return RLM_MODULE_FAIL; } if (!row[1]){ (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst, sqlsocket); RDEBUG("Cannot zap stale entry. No session id in entry.", inst->config->xlat_name); return RLM_MODULE_FAIL; } if (row[3]) nas_addr = inet_addr(row[3]); if (row[4]) nas_port = atoi(row[4]); check = rad_check_ts(nas_addr, nas_port, row[2], row[1]); if (check == 0) { /* * Stale record - zap it. */ if (inst->config->deletestalesessions == TRUE) { uint32_t framed_addr = 0; char proto = 0; int sess_time = 0; if (row[5]) framed_addr = inet_addr(row[5]); if (row[7]){ if (strcmp(row[7], "PPP") == 0) proto = 'P'; else if (strcmp(row[7], "SLIP") == 0) proto = 'S'; } if (row[8]) sess_time = atoi(row[8]); session_zap(request, nas_addr, nas_port, row[2], row[1], framed_addr, proto, sess_time); } } else if (check == 1) { /* * User is still logged in. */ ++request->simul_count; /* * Does it look like a MPP attempt? */ if (row[5] && ipno && inet_addr(row[5]) == ipno) request->simul_mpp = 2; else if (row[6] && call_num && !strncmp(row[6],call_num,16)) request->simul_mpp = 2; } else { /* * Failed to check the terminal server for * duplicate logins: return an error. */ (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst, sqlsocket); radlog_request(L_ERR, 0, request, "Failed to check the terminal server for user '%s'.", row[2]); return RLM_MODULE_FAIL; } } (inst->module->sql_finish_select_query)(sqlsocket, inst->config); sql_release_socket(inst, sqlsocket); /* * The Auth module apparently looks at request->simul_count, * not the return value of this module when deciding to deny * a call for too many sessions. */ return RLM_MODULE_OK; } /* * Execute postauth_query after authentication */ static int rlm_sql_postauth(void *instance, REQUEST *request) { SQLSOCK *sqlsocket = NULL; SQL_INST *inst = instance; char querystr[MAX_QUERY_LEN]; char sqlusername[MAX_STRING_LEN]; if(sql_set_user(inst, request, sqlusername, NULL) < 0) return RLM_MODULE_FAIL; /* If postauth_query is not defined, we stop here */ if (!inst->config->postauth_query || (inst->config->postauth_query[0] == '\0')) return RLM_MODULE_NOOP; /* Expand variables in the query */ memset(querystr, 0, MAX_QUERY_LEN); radius_xlat(querystr, sizeof(querystr), inst->config->postauth_query, request, sql_escape_func); query_log(request, inst, querystr); DEBUG2("rlm_sql (%s) in sql_postauth: query is %s", inst->config->xlat_name, querystr); /* Initialize the sql socket */ sqlsocket = sql_get_socket(inst); if (sqlsocket == NULL) return RLM_MODULE_FAIL; /* Process the query */ if (rlm_sql_query(sqlsocket, inst, querystr)) { radlog(L_ERR, "rlm_sql (%s) in sql_postauth: Database query error - %s", inst->config->xlat_name, (inst->module->sql_error)(sqlsocket, inst->config)); sql_release_socket(inst, sqlsocket); return RLM_MODULE_FAIL; } (inst->module->sql_finish_query)(sqlsocket, inst->config); sql_release_socket(inst, sqlsocket); return RLM_MODULE_OK; } /* globally exported name */ module_t rlm_sql = { RLM_MODULE_INIT, "SQL", RLM_TYPE_THREAD_SAFE, /* type: reserved */ rlm_sql_instantiate, /* instantiation */ rlm_sql_detach, /* detach */ { NULL, /* authentication */ rlm_sql_authorize, /* authorization */ NULL, /* preaccounting */ rlm_sql_accounting, /* accounting */ rlm_sql_checksimul, /* checksimul */ NULL, /* pre-proxy */ NULL, /* post-proxy */ rlm_sql_postauth /* post-auth */ }, };
TONY-All/Hangar
src/main/java/io/papermc/hangar/service/internal/users/NotificationService.java
<filename>src/main/java/io/papermc/hangar/service/internal/users/NotificationService.java package io.papermc.hangar.service.internal.users; import io.papermc.hangar.HangarComponent; import io.papermc.hangar.db.dao.HangarDao; import io.papermc.hangar.db.dao.internal.HangarNotificationsDAO; import io.papermc.hangar.db.dao.internal.table.NotificationsDAO; import io.papermc.hangar.db.dao.internal.table.projects.ProjectsDAO; import io.papermc.hangar.model.common.Permission; import io.papermc.hangar.model.db.NotificationTable; import io.papermc.hangar.model.db.UserTable; import io.papermc.hangar.model.db.projects.ProjectTable; import io.papermc.hangar.model.db.versions.ProjectVersionTable; import io.papermc.hangar.model.internal.user.notifications.HangarNotification; import io.papermc.hangar.model.internal.user.notifications.NotificationType; import io.papermc.hangar.service.PermissionService; import org.springframework.stereotype.Service; import java.util.ArrayList; import java.util.List; @Service public class NotificationService extends HangarComponent { private final NotificationsDAO notificationsDAO; private final HangarNotificationsDAO hangarNotificationsDAO; private final ProjectsDAO projectsDAO; private final PermissionService permissionService; public NotificationService(HangarDao<NotificationsDAO> notificationsDAO, HangarDao<HangarNotificationsDAO> hangarNotificationsDAO, HangarDao<ProjectsDAO> projectsDAO, PermissionService permissionService) { this.notificationsDAO = notificationsDAO.get(); this.hangarNotificationsDAO = hangarNotificationsDAO.get(); this.projectsDAO = projectsDAO.get(); this.permissionService = permissionService; } public List<HangarNotification> getUsersNotifications() { return hangarNotificationsDAO.getNotifications(getHangarPrincipal().getId()); } public boolean markNotificationAsRead(long notificationId) { return notificationsDAO.markAsRead(notificationId, getHangarPrincipal().getId()); } public void notifyUsersNewVersion(ProjectTable projectTable, ProjectVersionTable projectVersionTable, List<UserTable> projectWatchers) { List<NotificationTable> notificationTables = new ArrayList<>(); for (UserTable projectWatcher : projectWatchers) { notificationTables.add(new NotificationTable( projectWatcher.getId(), projectTable.getOwnerName() + "/" + projectTable.getSlug(), projectTable.getId(), new String[]{"notifications.project.newVersion", projectTable.getName(), projectVersionTable.getVersionString()}, NotificationType.NEUTRAL) ); } notificationsDAO.insert(notificationTables); } public void notifyUsersVersionReviewed(ProjectVersionTable projectVersionTable, boolean partial) { List<NotificationTable> notificationTables = new ArrayList<>(); ProjectTable projectTable = projectsDAO.getById(projectVersionTable.getProjectId()); permissionService.getProjectMemberPermissions(projectVersionTable.getProjectId()).forEach((user, perm) -> { if (perm.has(Permission.EditVersion)) { if (partial) { notificationTables.add(new NotificationTable(user.getId(), null, null, new String[]{"notifications.project.reviewedPartial", projectTable.getSlug(), projectVersionTable.getVersionString()}, NotificationType.SUCCESS)); } else { notificationTables.add(new NotificationTable(user.getId(), null, null, new String[]{"notifications.project.reviewed", projectTable.getSlug(), projectVersionTable.getVersionString()}, NotificationType.SUCCESS)); } } }); notificationsDAO.insert(notificationTables); } }
AudiovisualMetadataPlatform/mico
storage/core/src/main/java/eu/mico/platform/storage/impl/StorageServiceLocalFS.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.mico.platform.storage.impl; import eu.mico.platform.storage.api.StorageService; import java.io.*; import java.net.URI; import java.nio.file.Path; import java.nio.file.Paths; /** * Storage implementation for local filesystem * * @author <NAME> */ public class StorageServiceLocalFS implements StorageService { private Path basePath; public StorageServiceLocalFS(URI basePath) { this.basePath = Paths.get(basePath).normalize().toAbsolutePath(); } @Override public OutputStream getOutputStream(URI contentPath) throws IOException { File file = getContentPartPath(contentPath); if (file == null) return null; File parentDir = file.getParentFile(); if (!parentDir.exists()) { parentDir.mkdirs(); //grant read and write permissions for the directory to every user parentDir.setReadable(true, false); parentDir.setWritable(true, false); parentDir.setExecutable(true, false); } return new FileOutputStream(file); } @Override public InputStream getInputStream(URI contentPath) throws IOException { File file = getContentPartPath(contentPath); if (file == null) return null; return new FileInputStream(file); } @Override public boolean delete(URI contentPath) throws IOException { File file = getContentPartPath(contentPath); if (file == null) return false; boolean success = file.delete(); File parent = file.getParentFile(); if (parent.exists() && parent.list().length == 0) { parent.delete(); } return success; } private File getContentPartPath(URI contentPath) { String contentPathPath = contentPath.getPath(); if (contentPathPath == null) { throw new IllegalArgumentException("contentPath must contain path"); } // "plain" usage of URI.get keeps trailing slash. Paths.get() therefore fails with // java.nio.file.InvalidPathException: Illegal char <:> at index 2: /X:/workspace/bla // checks for "windows-style" /C:/... paths if(contentPathPath.matches("/\\p{Upper}:/.*")) { // and removes trailing slash: contentPathPath = contentPathPath.substring(1); } // on windows, we have to check both... if (contentPathPath.endsWith(File.separator) || contentPathPath.endsWith("/")) return null; String path = Paths.get(contentPathPath).normalize().toString(); while(path.startsWith(File.separator)) { path = path.substring(File.separator.length()); } if (path.isEmpty()) { return null; } File file = basePath.resolve(path + ".bin").normalize().toFile(); if (!file.toPath().startsWith(basePath)) return null; file.setReadable(true, false); //everybody can read file.setWritable(true, true); //owner can write file.setExecutable(false, false); //no one can execute return file; } }
Royalkol/Xenco-server
src/main/java/com/nasus/mongodb/service/impl/UserServiceImpl.java
package com.nasus.mongodb.service.impl; import com.nasus.mongodb.entity.XencoUser; import com.nasus.mongodb.jpa.UserRepository; import com.nasus.mongodb.requestinfo.RequestLoginInfo; import com.nasus.mongodb.requestinfo.RequestPhoneInfo; import com.nasus.mongodb.responseinfo.ResponseLoginBody; import com.nasus.mongodb.responseinfo.ResponsePhoneBody; import com.nasus.mongodb.service.UserService; import com.nasus.mongodb.vo.LoginInfoVo; import com.nasus.mongodb.vo.PhoneInfoVo; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.util.List; /** * Created by Royal on 2020/5/9 */ @Service public class UserServiceImpl implements UserService { @Autowired private UserRepository userRepository; @Override public ResponseLoginBody login(RequestLoginInfo requestLoginInfo) { String backMessage = "登录成功"; String backStatus = "0000"; LoginInfoVo loginInfoVo = new LoginInfoVo(); ResponseLoginBody responseLoginBody = new ResponseLoginBody(); XencoUser xencoUser = this.findUserByTrainnumber(requestLoginInfo.getTrainnumber()); System.err.println(xencoUser); //账号不存在的情况 if (xencoUser==null){ backMessage = "登录失败"; backStatus = "0001"; }else { //账号存在密码不匹配的情况 if (!requestLoginInfo.getPassword().equals(xencoUser.getPassword())) { backMessage = "登录失败"; backStatus = "0001"; } } loginInfoVo.setBackMessage(backMessage); loginInfoVo.setBackStatus(backStatus); responseLoginBody.setResultinfo(loginInfoVo); return responseLoginBody; } @Override public ResponsePhoneBody checkPhonNumber(RequestPhoneInfo requestPhoneInfo) { String backMessage = "该手机号下有绑定培训证号"; String backStatus = "0000"; String trainnumber=""; PhoneInfoVo phoneInfoVo = new PhoneInfoVo(); ResponsePhoneBody responsePhoneBody = new ResponsePhoneBody(); XencoUser xencoUser = this.findUserByPhonenumber(requestPhoneInfo.getPhonenumber()); System.err.println(xencoUser); if (xencoUser==null) { backMessage = "该手机号下未绑定培训证号"; backStatus = "0001"; }else { trainnumber =xencoUser.getTrainnumber(); } phoneInfoVo.setTrainnumber(trainnumber); phoneInfoVo.setBackmessage(backMessage); phoneInfoVo.setBackStatus(backStatus); responsePhoneBody.setTraininfo(phoneInfoVo); return responsePhoneBody; } @Override public XencoUser findUserByTrainnumber(String trainnumber) { XencoUser xencoUser = null; List<XencoUser> xencoUserList = userRepository.findXencoUserByTrainnumber(trainnumber); if (xencoUserList!=null&&xencoUserList.size() > 0) { xencoUser = xencoUserList.get(0); } return xencoUser; } @Override @Transactional(rollbackFor = Exception.class) public XencoUser findUserByPhonenumber(String phonenumber) { XencoUser xencoUser = new XencoUser(); List<XencoUser> xencoUserList = userRepository.findXencoUserByPhonenumber(phonenumber); if (xencoUserList!=null&&xencoUserList.size() > 0) { xencoUser = xencoUserList.get(0); } return xencoUser; } @Override @Transactional(rollbackFor = Exception.class) public XencoUser addUser(XencoUser xencoUser) { return userRepository.save(xencoUser); } }
heroletumik/hopis
src/bitbucket.org/atticlab/horizon/ingest/session/ingestion/main.go
package ingestion import ( "bitbucket.org/atticlab/horizon/cache" "bitbucket.org/atticlab/horizon/db2" "bitbucket.org/atticlab/horizon/db2/history" "bitbucket.org/atticlab/horizon/db2/sqx" sq "github.com/lann/squirrel" ) // Ingestion receives write requests from a Session type Ingestion struct { // DB is the sql repo to be used for writing any rows into the horizon // database. DB *db2.Repo CurrentVersion int ledgers *sqx.BatchInsertBuilder transactions *sqx.BatchInsertBuilder transaction_participants *sqx.BatchInsertBuilder operations *sqx.BatchInsertBuilder operation_participants *sqx.BatchInsertBuilder effects *sqx.BatchInsertBuilder accounts *sqx.BatchInsertBuilder statistics *sqx.BatchUpdateBuilder needFlush []sqx.Flushable // cache statisticsCache *cache.AccountStatistics HistoryAccountCache *cache.HistoryAccount } func New(db *db2.Repo, historyAccountCache *cache.HistoryAccount, currentVersion int) *Ingestion { q := &history.Q{ Repo: db, } return &Ingestion{ DB: db, CurrentVersion: currentVersion, HistoryAccountCache: historyAccountCache, statisticsCache: cache.NewAccountStatistics(q), } } func (ingest *Ingestion) HistoryQ() history.QInterface { return &history.Q{ Repo: ingest.DB, } } // Rollback aborts this ingestions transaction func (ingest *Ingestion) Rollback() (err error) { // recreates all inserters to release memory ingest.createInsertBuilders() err = ingest.DB.Rollback() return } // Start makes the ingestion reeady, initializing the insert builders and tx func (ingest *Ingestion) Start() (err error) { err = ingest.DB.Begin() if err != nil { return } ingest.createInsertBuilders() return } // Clear removes data from the ledger func (ingest *Ingestion) Clear(start int64, end int64) error { if start <= 1 { del := sq.Delete("history_accounts").Where("id = 1") ingest.DB.Exec(del) } err := ingest.clearRange(start, end, "history_effects", "history_operation_id") if err != nil { return err } err = ingest.clearRange(start, end, "history_operation_participants", "history_operation_id") if err != nil { return err } err = ingest.clearRange(start, end, "history_operations", "id") if err != nil { return err } err = ingest.clearRange(start, end, "history_transaction_participants", "history_transaction_id") if err != nil { return err } err = ingest.clearRange(start, end, "history_transactions", "id") if err != nil { return err } err = ingest.clearRange(start, end, "history_accounts", "id") if err != nil { return err } err = ingest.clearRange(start, end, "history_ledgers", "id") if err != nil { return err } return nil } // Close finishes the current transaction and finishes this ingestion. func (ingest *Ingestion) Close() error { err := ingest.flushInserters() if err != nil { return err } return ingest.commit() } // Flush writes the currently buffered rows to the db, and if successful // starts a new transaction. func (ingest *Ingestion) Flush() error { err := ingest.flushInserters() if err != nil { return err } err = ingest.commit() if err != nil { return err } return ingest.Start() } func (ingest *Ingestion) flushInserters() error { for _, flusher := range ingest.needFlush { err := flusher.Flush() if err != nil { return err } } return nil } func (ingest *Ingestion) createInsertBuilders() { ingest.statistics = sqx.BatchUpdate(sqx.BatchInsertFromInsert(ingest.DB, history.AccountStatisticsInsert), history.AccountStatisticsUpdateParams, history.AccountStatisticsUpdateWhere) ingest.ledgers = sqx.BatchInsertFromInsert(ingest.DB, history.LedgerInsert) ingest.accounts = sqx.BatchInsertFromInsert(ingest.DB, history.AccountInsert) ingest.transactions = sqx.BatchInsertFromInsert(ingest.DB, history.TransactionInsert) ingest.transaction_participants = sqx.BatchInsertFromInsert(ingest.DB, history.TransactionParticipantInsert) ingest.operations = sqx.BatchInsertFromInsert(ingest.DB, history.OperationInsert) ingest.operation_participants = sqx.BatchInsertFromInsert(ingest.DB, history.OperationParticipantInsert) ingest.effects = sqx.BatchInsertFromInsert(ingest.DB, history.EffectInsert) ingest.needFlush = []sqx.Flushable{ ingest.statistics, ingest.ledgers, ingest.accounts, ingest.transactions, ingest.transaction_participants, ingest.operations, ingest.operation_participants, ingest.effects, } }
flaweoff/OfflineManager
plugin/src/main/java/net/flawe/offlinemanager/commands/OMCommand.java
<filename>plugin/src/main/java/net/flawe/offlinemanager/commands/OMCommand.java package net.flawe.offlinemanager.commands; import net.flawe.offlinemanager.OfflineManager; import net.flawe.offlinemanager.api.IPlaceholder; import net.flawe.offlinemanager.api.OfflineManagerAPI; import net.flawe.offlinemanager.api.command.ICommand; import net.flawe.offlinemanager.configuration.Messages; import net.flawe.offlinemanager.configuration.Settings; import net.flawe.offlinemanager.placeholders.Placeholder; import net.flawe.offlinemanager.placeholders.PlaceholderUtil; import org.bukkit.entity.Player; import java.util.*; public abstract class OMCommand implements ICommand { private final String name; private final String help; private final String permission; private final String[] aliases; private final Set<IPlaceholder> placeholders = new HashSet<>(); protected final OfflineManagerAPI api = OfflineManager.getApi(); protected final Messages messages = ((OfflineManager) api).getMessages(); protected final Settings settings = ((OfflineManager) api).getSettings(); public OMCommand(String name, String help, String permission) { this(name, help, permission, new String[0]); } public OMCommand(String name, String help, String permission, String[] aliases) { this.name = name; this.help = help; this.permission = permission; this.aliases = aliases; } @Override public String getName() { return name; } @Override public String getHelp() { return help; } @Override public String getPermission() { return permission; } @Override public String[] getAliases() { String[] a = new String[aliases.length + 1]; System.arraycopy(aliases, 0, a, 0, aliases.length); a[aliases.length] = name; return a; } @Override public abstract void execute(Player player, String[] args); @Override public boolean hasPermission(Player player) { return player.hasPermission(permission); } @Override public void addPlaceholder(String key, String value) { addPlaceholders(new Placeholder(key, value)); } @Override public void addPlaceholder(IPlaceholder placeholder) { placeholders.remove(placeholder); placeholders.add(placeholder); } @Override public void addPlaceholders(IPlaceholder... placeholders) { List<IPlaceholder> list = Arrays.asList(placeholders); list.forEach(this.placeholders::remove); this.placeholders.addAll(list); } @Override public void removePlaceholder(String key) { placeholders.removeIf(placeholder -> placeholder.getKey().equals(key)); } @Override public void removePlaceholder(IPlaceholder placeholder) { placeholders.remove(placeholder); } @Override public Map<String, String> getPlaceholdersAsMap() { Map<String, String> map = new HashMap<>(); for (IPlaceholder placeholder : placeholders) map.put(placeholder.getKey(), placeholder.getValue()); return map; } @Override public Set<IPlaceholder> getPlaceholders() { return placeholders; } protected void sendPlayerMessage(Player player, String message) { message = api.getConfigManager().fillMessage(player, message); player.sendMessage(PlaceholderUtil.fillPlaceholders(message, placeholders)); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; return Objects.equals(name, ((ICommand) o).getName()); } @Override public int hashCode() { return Objects.hashCode(name); } }
Omnirobotic/godot
modules/scene_manager/include/map_msgs/srv/get_point_map_roi__response.hpp
// generated from rosidl_generator_cpp/resource/msg.hpp.em // generated code does not contain a copyright notice #ifndef MAP_MSGS__SRV__GET_POINT_MAP_ROI__RESPONSE_HPP_ #define MAP_MSGS__SRV__GET_POINT_MAP_ROI__RESPONSE_HPP_ #include "map_msgs/srv/get_point_map_roi__response__struct.hpp" #include "map_msgs/srv/get_point_map_roi__response__traits.hpp" #endif // MAP_MSGS__SRV__GET_POINT_MAP_ROI__RESPONSE_HPP_
Robbbert/messui
src/mame/includes/wiping.h
<gh_stars>10-100 // license:BSD-3-Clause // copyright-holders:<NAME> #ifndef MAME_INCLUDES_WIPING_H #define MAME_INCLUDES_WIPING_H #pragma once #include "emupal.h" class wiping_state : public driver_device { public: wiping_state(const machine_config &mconfig, device_type type, const char *tag) : driver_device(mconfig, type, tag), m_maincpu(*this, "maincpu"), m_audiocpu(*this, "audiocpu"), m_gfxdecode(*this, "gfxdecode"), m_palette(*this, "palette"), m_videoram(*this, "videoram"), m_colorram(*this, "colorram"), m_spriteram(*this, "spriteram") { } void wiping(machine_config &config); private: required_device<cpu_device> m_maincpu; required_device<cpu_device> m_audiocpu; required_device<gfxdecode_device> m_gfxdecode; required_device<palette_device> m_palette; required_shared_ptr<uint8_t> m_videoram; required_shared_ptr<uint8_t> m_colorram; required_shared_ptr<uint8_t> m_spriteram; int m_flipscreen; uint8_t *m_soundregs; // if 0-ed uint8_t m_main_irq_mask; uint8_t m_sound_irq_mask; uint8_t ports_r(offs_t offset); DECLARE_WRITE_LINE_MEMBER(main_irq_mask_w); DECLARE_WRITE_LINE_MEMBER(sound_irq_mask_w); DECLARE_WRITE_LINE_MEMBER(flipscreen_w); void wiping_palette(palette_device &palette) const; virtual void machine_start() override; uint32_t screen_update(screen_device &screen, bitmap_ind16 &bitmap, const rectangle &cliprect); INTERRUPT_GEN_MEMBER(vblank_irq); INTERRUPT_GEN_MEMBER(sound_timer_irq); void main_map(address_map &map); void sound_map(address_map &map); }; #endif // MAME_INCLUDES_WIPING_H
DAIAD/home-web
common/src/main/java/eu/daiad/common/model/query/UserPopulationFilter.java
<filename>common/src/main/java/eu/daiad/common/model/query/UserPopulationFilter.java package eu.daiad.common.model.query; import java.util.ArrayList; import java.util.List; import java.util.UUID; public class UserPopulationFilter extends PopulationFilter { private List<UUID> users = new ArrayList<UUID>(); public UserPopulationFilter() { super(); } public UserPopulationFilter(String label) { super(label); } public UserPopulationFilter(String label, UUID user) { super(label); users.add(user); } public UserPopulationFilter(String label, UUID[] users) { super(label); for (UUID userKey : users) { this.users.add(userKey); } } public UserPopulationFilter(String label, UUID[] users, Ranking ranking) { super(label, ranking); for (UUID userKey : users) { this.users.add(userKey); } } public UserPopulationFilter(String label, UUID[] users, EnumRankingType ranking, EnumMetric metric, int limit) { super(label, new Ranking(ranking, metric, limit)); for (UUID userKey : users) { this.users.add(userKey); } } public List<UUID> getUsers() { return users; } @Override public EnumPopulationFilterType getType() { return EnumPopulationFilterType.USER; } }
hoover/iu
src/components/search/Results.js
<reponame>hoover/iu import React, { memo } from 'react' import { Box, Fab, Fade, Grid, Typography } from '@material-ui/core' import { makeStyles } from '@material-ui/core/styles' import Pagination from './Pagination' import ResultsList from './ResultsList' import ResultsTable from './ResultsTable' import { useSearch } from './SearchProvider' import { reactIcons } from '../../constants/icons' import ResultsProgress from './ResultsProgress' const useStyles = makeStyles(theme => ({ viewTypeIcon: { flex: 'none', boxShadow: 'none', marginLeft: theme.spacing(2), marginTop: theme.spacing(1), marginBottom: theme.spacing(2), }, })) function Results({ maxCount }) { const classes = useStyles() const { query, results, resultsTask, resultsLoading, resultsViewType, setResultsViewType } = useSearch() let loadingETA = Number.MAX_SAFE_INTEGER if (resultsTask) { if (resultsTask.status === 'done') { loadingETA = 0 } else if (resultsTask.eta.total_sec / resultsTask.initialEta < 1) { loadingETA = Math.min(resultsTask.initialEta, resultsTask.eta.total_sec) } } return ( <> <Pagination maxCount={maxCount} /> <Grid container> <Grid item container justify="flex-end"> <Grid item> <Fab size="small" color={resultsViewType === 'list' ? 'primary' : 'default'} className={classes.viewTypeIcon} onClick={() => setResultsViewType('list')} > {reactIcons.listView} </Fab> </Grid> <Grid item> <Fab size="small" color={resultsViewType === 'table' ? 'primary' : 'default'} className={classes.viewTypeIcon} onClick={() => setResultsViewType('table')} > {reactIcons.tableView} </Fab> </Grid> </Grid> </Grid> <Fade in={resultsLoading} unmountOnExit> <Box display="flex" alignItems="center"> <Box width="100%" mr={1}> <ResultsProgress eta={loadingETA} /> </Box> <Box minWidth={35}> {resultsTask?.eta && ( <Typography variant="body2" color="textSecondary"> ETA:&nbsp;{resultsTask.eta.total_sec}s </Typography> )} </Box> </Box> </Fade> {!!results && !query.collections?.length ? <i>no collections selected</i> : resultsViewType === 'list' ? <ResultsList /> : <ResultsTable /> } {!!results?.hits.hits.length && <Pagination maxCount={maxCount} /> } </> ) } export default memo(Results)
ertogrul/ArgMining
tests/loaders/test_THF_sentence_corpus_loader.py
import unittest from argmining.sentence.loaders.THF_sentence_corpus_loader import parse_tree_tagger_lemma from argmining.sentence.loaders.THF_sentence_corpus_loader import parse_IWNLP_lemma from argmining.sentence.loaders.THF_sentence_corpus_loader import parse_polarity class THFSentenceCorpusLoaderTests(unittest.TestCase): def test_tree_tagger_lemma_empty(self): input_value = [] parsed_lemma = parse_tree_tagger_lemma(input_value) self.assertEqual(parsed_lemma, None) def test_tree_tagger_lemma_one_value(self): input_value = ["flächendeckend"] parsed_lemma = parse_tree_tagger_lemma(input_value) self.assertEqual(parsed_lemma, ["flächendeckend"]) def test_tree_tagger_lemma_two_values(self): input_value = ["Rüge", "Rügen"] parsed_lemma = parse_IWNLP_lemma(input_value) self.assertEqual(parsed_lemma, ["Rüge", "Rügen"]) def test_iwnlp_lemma_empty(self): input_value = [] parsed_lemma = parse_IWNLP_lemma(input_value) self.assertEqual(parsed_lemma, None) def test_iwnlp_lemma_one_value(self): input_value = ["Ferkel"] parsed_lemma = parse_IWNLP_lemma(input_value) self.assertEqual(parsed_lemma, ["Ferkel"]) def test_iwnlp_lemma_two_values(self): input_value = ["Rügen", "Rüge"] parsed_lemma = parse_IWNLP_lemma(input_value) self.assertEqual(parsed_lemma, ["Rügen", "Rüge"]) def test_polarity_empty(self): input_value = None parsed_polarity = parse_polarity(input_value) self.assertEqual(parsed_polarity, None) def test_polarity_with_value(self): input_value = 0.004 parsed_polarity = parse_polarity(input_value) self.assertEqual(parsed_polarity, 0.004) if __name__ == '__main__': unittest.main()
lihongjie/tutorials
servlet-jsp-lihongjie/reflectblog/src/main/java/exception/DaoException.java
package exception; /** * DAO层运行时异常 * * @author Administrator */ public class DaoException extends RuntimeException { public DaoException() { super(); // TODO Auto-generated constructor stub } public DaoException(String message, Throwable cause) { super(message, cause); // TODO Auto-generated constructor stub } public DaoException(String message) { super(message); // TODO Auto-generated constructor stub } public DaoException(Throwable cause) { super(cause); // TODO Auto-generated constructor stub } }
AlexRogalskiy/java4you
src/main/java/com/sensiblemetrics/api/alpenidos/pattern/lambda/interpreter/InterpreterLambda.java
package com.sensiblemetrics.api.alpenidos.pattern.lambda.interpreter; import java.util.HashMap; import java.util.Map; import java.util.Stack; import java.util.function.IntBinaryOperator; public class InterpreterLambda { static Map<String, IntBinaryOperator> opMap = new HashMap<>(); static { opMap.put("+", (a, b) -> a + b); opMap.put("*", (a, b) -> a * b); opMap.put("-", (a, b) -> a - b); } public static int evaluate(String expression) { Stack<Integer> stack = new Stack<>(); for (String s : expression.split(" ")) { IntBinaryOperator op = opMap.get(s); if (op != null) { int right = stack.pop(); int left = stack.pop(); stack.push(op.applyAsInt(left, right)); } else { stack.push(Integer.parseInt(s)); } } return stack.pop(); } }
baovevni/homework
basics2.12/src/fightsong/songoutput.java
<filename>basics2.12/src/fightsong/songoutput.java package fightsong; import static fightsong.FightSong.*; public class songoutput{ public static void main(String[] args) { printFirstLine(); System.out.println(line1); System.out.println(); System.out.println(line1); System.out.println(line2); System.out.println(line1); System.out.println(); System.out.println(line1); System.out.println(line2); System.out.println(line1); System.out.println(); System.out.println(line1); } }
sajustsmile/twitter-blockchain
studio/node_modules/@sanity/portable-text-editor/lib/editor/plugins/createWithUtils.js
<reponame>sajustsmile/twitter-blockchain<filename>studio/node_modules/@sanity/portable-text-editor/lib/editor/plugins/createWithUtils.js<gh_stars>1-10 "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.createWithUtils = createWithUtils; var _slate = require("slate"); var _debug = require("../../utils/debug"); function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); } function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } function _iterableToArrayLimit(arr, i) { var _i = arr == null ? null : typeof Symbol !== "undefined" && arr[Symbol.iterator] || arr["@@iterator"]; if (_i == null) return; var _arr = []; var _n = true; var _d = false; var _s, _e; try { for (_i = _i.call(arr); !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } var debug = (0, _debug.debugWithName)('plugin:withUtils'); /** * This plugin makes various util commands available in the editor * */ function createWithUtils(portableTextFeatures) { return function withUtils(editor) { // Expands the the selection to wrap around the word the focus is at editor.pteExpandToWord = () => { var selection = editor.selection; if (selection && !_slate.Range.isExpanded(selection)) { var _Editor$node = _slate.Editor.node(editor, selection.focus, { depth: 2 }), _Editor$node2 = _slicedToArray(_Editor$node, 1), textNode = _Editor$node2[0]; if (!textNode || !_slate.Text.isText(textNode) || textNode.text.length === 0) { debug("pteExpandToWord: Can't expand to word here"); return; } var focus = selection.focus; var focusOffset = focus.offset; var charsBefore = textNode.text.slice(0, focusOffset); var charsAfter = textNode.text.slice(focusOffset, -1); var isEmpty = str => str.match(/\s/g); var whiteSpaceBeforeIndex = charsBefore.split('').reverse().findIndex(str => isEmpty(str)); var newStartOffset = whiteSpaceBeforeIndex > -1 ? charsBefore.length - whiteSpaceBeforeIndex : 0; var whiteSpaceAfterIndex = charsAfter.split('').findIndex(obj => isEmpty(obj)); var newEndOffset = charsBefore.length + (whiteSpaceAfterIndex > -1 ? whiteSpaceAfterIndex : charsAfter.length + 1); if (!(newStartOffset === newEndOffset || isNaN(newStartOffset) || isNaN(newEndOffset))) { debug('pteExpandToWord: Expanding to focused word'); _slate.Transforms.setSelection(editor, { anchor: _objectSpread(_objectSpread({}, selection.anchor), {}, { offset: newStartOffset }), focus: _objectSpread(_objectSpread({}, selection.focus), {}, { offset: newEndOffset }) }); return; } debug("pteExpandToWord: Can't expand to word here"); } }; return editor; }; }
vborisoff/reladomo
reladomoserial/src/main/java/com/gs/reladomo/serial/jackson/JacksonReladomoWrappedDeserializer.java
/* Copyright 2016 <NAME>. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.gs.reladomo.serial.jackson; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.*; import com.fasterxml.jackson.databind.deser.ContextualDeserializer; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import com.gs.fw.common.mithra.MithraDatedTransactionalObject; import com.gs.fw.common.mithra.MithraObject; import com.gs.fw.common.mithra.MithraTransactionalObject; import com.gs.fw.common.mithra.util.serializer.ReladomoDeserializer; import com.gs.fw.common.mithra.util.serializer.Serialized; import com.gs.reladomo.serial.json.IntDateParser; import com.gs.reladomo.serial.json.JsonDeserializerState; import java.io.IOException; import java.util.Date; public class JacksonReladomoWrappedDeserializer extends StdDeserializer<Serialized<?>> implements ContextualDeserializer { private JavaType valueType; public JacksonReladomoWrappedDeserializer() { super(Serialized.class); } @Override public JsonDeserializer<?> createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { JavaType wrapperType = null; if (property != null) { wrapperType = property.getType(); } else { wrapperType = ctxt.getContextualType().getContentType(); } if (wrapperType == null) { wrapperType = ctxt.getContextualType(); } if (wrapperType == null) { return this; } JavaType valueType = wrapperType.containedType(0); JacksonReladomoWrappedDeserializer deserializer = new JacksonReladomoWrappedDeserializer(); if (valueType == null) { valueType = wrapperType; } deserializer.valueType = valueType; return deserializer; } @Override public Serialized<?> deserialize(JsonParser parser, DeserializationContext ctxt) throws IOException { ReladomoDeserializer deserializer; if (this.valueType == null) { deserializer = new ReladomoDeserializer(); } else { Class<?> rawClass = this.valueType.getRawClass(); if (MithraObject.class == rawClass || MithraTransactionalObject.class == rawClass || MithraDatedTransactionalObject.class == rawClass) { deserializer = new ReladomoDeserializer(); } else { deserializer = new ReladomoDeserializer(rawClass); } } deserializer.setIgnoreUnknown(); DateParser dateParser = new DateParser(ctxt); JsonDeserializerState state = JsonDeserializerState.NormalParserState.INSTANCE; do { JsonToken jsonToken = parser.getCurrentToken(); if (JsonToken.START_OBJECT.equals(jsonToken)) { state = state.startObject(deserializer); } else if (JsonToken.END_OBJECT.equals(jsonToken)) { state = state.endObject(deserializer); } else if (JsonToken.START_ARRAY.equals(jsonToken)) { state = state.startArray(deserializer); } else if (JsonToken.END_ARRAY.equals(jsonToken)) { state = state.endArray(deserializer); } else if (JsonToken.FIELD_NAME.equals(jsonToken)) { state = state.fieldName(parser.getCurrentName(), deserializer); } else if (JsonToken.VALUE_EMBEDDED_OBJECT.equals(jsonToken)) { state = state.valueEmbeddedObject(deserializer); } else if (JsonToken.VALUE_FALSE.equals(jsonToken)) { state = state.valueFalse(deserializer); } else if (JsonToken.VALUE_TRUE.equals(jsonToken)) { state = state.valueTrue(deserializer); } else if (JsonToken.VALUE_NULL.equals(jsonToken)) { state = state.valueNull(deserializer); } else if (JsonToken.VALUE_STRING.equals(jsonToken)) { state = state.valueString(parser.getValueAsString(), deserializer); } else if (JsonToken.VALUE_NUMBER_INT.equals(jsonToken)) { state = state.valueNumberInt(parser.getValueAsString(), deserializer, dateParser ); } else if (JsonToken.VALUE_NUMBER_FLOAT.equals(jsonToken)) { state = state.valueNumberFloat(parser.getValueAsString(), deserializer); } parser.nextToken(); } while (!parser.isClosed()); return deserializer.getDeserializedResult(); } private class DateParser implements IntDateParser { private DeserializationContext ctxt; public DateParser(DeserializationContext ctxt) { this.ctxt = ctxt; } @Override public Date parseIntAsDate(String value) throws IOException { return _parseDate(value, ctxt); } } }
wangsun1983/Obotcha
test/testUtil/testText/testXmlReflect/TestReflectToSimpleXml.cpp
#include <stdio.h> #include <unistd.h> #include <iostream> #include <type_traits> #include "Object.hpp" #include "StrongPointer.hpp" #include "Reflect.hpp" #include "String.hpp" #include "Math.hpp" #include "Field.hpp" #include "XmlWriter.hpp" #include "XmlReader.hpp" #include "XmlDocument.hpp" using namespace obotcha; DECLARE_CLASS(ReflectWriteMember) { public: int intData; byte byteData; double doubleData; float floatData; long longData; String stringData; uint8_t uint8Data; uint16_t uint16Data; uint32_t uint32Data; uint64_t uint64Data; bool boolData; DECLARE_REFLECT_FIELD(ReflectWriteMember,intData,byteData,doubleData, floatData,longData,stringData,uint8Data,uint16Data,uint32Data, uint64Data,boolData) }; DECLARE_CLASS(ReflectData) { public: ReflectWriteMember member1; ReflectWriteMember member2; DECLARE_REFLECT_FIELD(ReflectData,member1,member2) }; void testReflectToSimpleXml() { //test1 while(1) { ReflectData data = createReflectData(); data->member1 = createReflectWriteMember(); data->member2 = createReflectWriteMember(); data->member1->intData = 1; data->member1->byteData = 2; data->member1->doubleData = 1.1; data->member1->floatData = 2.2; data->member1->longData = 3; data->member1->stringData = createString("a"); data->member1->uint8Data = 4; data->member1->uint16Data = 5; data->member1->uint32Data = 6; data->member1->uint64Data = 7; data->member1->boolData = false; data->member2->intData = 11; data->member2->byteData = 12; data->member2->doubleData = 11.1; data->member2->floatData = 12.2; data->member2->longData = 13; data->member2->stringData = createString("b"); data->member2->uint8Data = 14; data->member2->uint16Data = 15; data->member2->uint32Data = 16; data->member2->uint64Data = 17; data->member2->boolData = false; XmlDocument doc = createXmlDocument(); doc->importFrom(data); XmlWriter writer = createXmlWriter(doc); writer->write("output4.xml"); XmlReader reader = createXmlReader(createFile("output4.xml")); XmlDocument doc2 = reader->get(); ReflectData rdata3 = createReflectData(); doc2->reflectTo(rdata3); if(data->member1->intData != rdata3->member1->intData) { printf("Reflect to xml test1-------[FAIL] \n"); break; } if(data->member1->byteData != rdata3->member1->byteData) { printf("data1 is %d,data2 is %d \n",data->member1->byteData,rdata3->member1->byteData); printf("Reflect to xml test2-------[FAIL] \n"); break; } if(data->member1->doubleData != rdata3->member1->doubleData) { printf("Reflect to xml test3-------[FAIL] \n"); break; } if(data->member1->floatData != rdata3->member1->floatData) { printf("Reflect to xml test4-------[FAIL] \n"); break; } if(data->member1->longData != rdata3->member1->longData) { printf("Reflect to xml test5-------[FAIL] \n"); break; } if(!data->member1->stringData->equals(rdata3->member1->stringData)) { printf("member string is %s,member2 string is %s \n",data->member1->stringData->toChars(),rdata3->member1->stringData->toChars()); printf("Reflect to xml test6-------[FAIL] \n"); break; } if(data->member1->uint8Data != rdata3->member1->uint8Data) { printf("Reflect to xml test7-------[FAIL] \n"); break; } if(data->member1->uint16Data != rdata3->member1->uint16Data) { printf("Reflect to xml test8-------[FAIL] \n"); break; } if(data->member1->uint32Data != rdata3->member1->uint32Data) { printf("Reflect to xml test9-------[FAIL] \n"); break; } if(data->member1->uint64Data != rdata3->member1->uint64Data) { printf("data1 is %lu,data2 is %lu \n",data->member1->uint64Data,rdata3->member1->uint64Data); printf("Reflect to xml test10-------[FAIL] \n"); break; } if(data->member1->boolData != rdata3->member1->boolData) { printf("Reflect to xml test11-------[FAIL] \n"); break; } printf("Reflect to xml test12-------[OK] \n"); break; } }
Earthcomputer/ShittyMinecraftSuggestions
src/main/java/shittymcsuggestions/mixin/item/MixinNameTagItem.java
package shittymcsuggestions.mixin.item; import net.minecraft.entity.LivingEntity; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.item.ItemStack; import net.minecraft.item.NameTagItem; import net.minecraft.server.network.ServerPlayerEntity; import net.minecraft.util.Hand; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable; import shittymcsuggestions.entity.NameTagHandler; @Mixin(NameTagItem.class) public class MixinNameTagItem { @Inject(method = "useOnEntity", at = @At(value = "INVOKE", target = "Lnet/minecraft/item/ItemStack;decrement(I)V")) private void onUseOnEntity(ItemStack stack, PlayerEntity player, LivingEntity entity, Hand hand, CallbackInfoReturnable<Boolean> ci) { if (player instanceof ServerPlayerEntity) NameTagHandler.onMobNamed(player, entity, stack.getName().getString()); } }
cyrilfr/java-sdk
src/main/java/ch/postfinance/sdk/model/ShopifySubscriptionSuspensionCreate.java
<filename>src/main/java/ch/postfinance/sdk/model/ShopifySubscriptionSuspensionCreate.java /** * PostFinance Checkout SDK * * This library allows to interact with the PostFinance Checkout payment service. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.postfinance.sdk.model; import java.util.Objects; import java.util.Arrays; import ch.postfinance.sdk.model.ShopifySubscriptionSuspensionType; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.time.OffsetDateTime; import java.util.*; import java.time.OffsetDateTime; /** * */ @ApiModel(description = "") public class ShopifySubscriptionSuspensionCreate { @JsonProperty("plannedEndDate") protected OffsetDateTime plannedEndDate = null; @JsonProperty("subscription") protected Long subscription = null; @JsonProperty("type") protected ShopifySubscriptionSuspensionType type = null; public ShopifySubscriptionSuspensionCreate plannedEndDate(OffsetDateTime plannedEndDate) { this.plannedEndDate = plannedEndDate; return this; } /** * * @return plannedEndDate **/ @ApiModelProperty(required = true, value = "") public OffsetDateTime getPlannedEndDate() { return plannedEndDate; } public void setPlannedEndDate(OffsetDateTime plannedEndDate) { this.plannedEndDate = plannedEndDate; } public ShopifySubscriptionSuspensionCreate subscription(Long subscription) { this.subscription = subscription; return this; } /** * * @return subscription **/ @ApiModelProperty(required = true, value = "") public Long getSubscription() { return subscription; } public void setSubscription(Long subscription) { this.subscription = subscription; } public ShopifySubscriptionSuspensionCreate type(ShopifySubscriptionSuspensionType type) { this.type = type; return this; } /** * * @return type **/ @ApiModelProperty(required = true, value = "") public ShopifySubscriptionSuspensionType getType() { return type; } public void setType(ShopifySubscriptionSuspensionType type) { this.type = type; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ShopifySubscriptionSuspensionCreate shopifySubscriptionSuspensionCreate = (ShopifySubscriptionSuspensionCreate) o; return Objects.equals(this.plannedEndDate, shopifySubscriptionSuspensionCreate.plannedEndDate) && Objects.equals(this.subscription, shopifySubscriptionSuspensionCreate.subscription) && Objects.equals(this.type, shopifySubscriptionSuspensionCreate.type); } @Override public int hashCode() { return Objects.hash(plannedEndDate, subscription, type); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ShopifySubscriptionSuspensionCreate {\n"); sb.append(" plannedEndDate: ").append(toIndentedString(plannedEndDate)).append("\n"); sb.append(" subscription: ").append(toIndentedString(subscription)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
Poulhouse/dev_don360
bitrix/modules/landing/install/js/landing/ui/card/headercard/src/headercard.js
import {BaseCard} from 'landing.ui.card.basecard'; import {Dom, Tag, Type} from 'main.core'; import './css/style.css'; /** * @memberOf BX.Landing.UI.Card */ export class HeaderCard extends BaseCard { constructor(options) { super(options); Dom.addClass(this.getLayout(), 'landing-ui-card-headercard'); if (options.bottomMargin === false) { this.setBottomMargin(options.bottomMargin); } if (Type.isNumber(options.level)) { Dom.addClass(this.getLayout(), `landing-ui-card-headercard-${options.level}`); } if (Type.isStringFilled(options.description)) { this.setDescription(options.description); } } getDescriptionLayout(): HTMLSpanElement { return this.cache.remember('descriptionLayout', () => { return Tag.render` <span class="landing-ui-card-headercard-description"></span> `; }); } setDescription(descriptionText: string) { const descriptionLayout = this.getDescriptionLayout(); if (!this.body.contains(descriptionLayout)) { Dom.append(descriptionLayout, this.body); } descriptionLayout.textContent = descriptionText; } setBottomMargin(value) { if (value === true) { Dom.removeClass(this.getLayout(), 'landing-ui-card-headercard-without-bottom-margin'); } else { Dom.addClass(this.getLayout(), 'landing-ui-card-headercard-without-bottom-margin'); } } }
tima-q/connectedhomeip
src/crypto/CHIPCryptoPALOpenSSL.cpp
<filename>src/crypto/CHIPCryptoPALOpenSSL.cpp /* * * Copyright (c) 2020-2021 Project CHIP Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file * openSSL based implementation of CHIP crypto primitives */ #include "CHIPCryptoPAL.h" #include <type_traits> #include <openssl/bn.h> #include <openssl/conf.h> #include <openssl/ec.h> #include <openssl/ecdsa.h> #include <openssl/err.h> #include <openssl/evp.h> #include <openssl/hmac.h> #include <openssl/kdf.h> #include <openssl/ossl_typ.h> #include <openssl/pem.h> #include <openssl/rand.h> #include <openssl/sha.h> #include <openssl/x509.h> #include <openssl/x509v3.h> #include <lib/core/CHIPSafeCasts.h> #include <lib/support/BufferWriter.h> #include <lib/support/CHIPArgParser.hpp> #include <lib/support/CodeUtils.h> #include <lib/support/SafeInt.h> #include <lib/support/SafePointerCast.h> #include <lib/support/logging/CHIPLogging.h> #include <string.h> namespace chip { namespace Crypto { #define kKeyLengthInBits 256 typedef struct stack_st_X509 X509_LIST; enum class DigestType { SHA256 }; enum class ECName { None = 0, P256v1 = 1, }; static int _nidForCurve(ECName name) { switch (name) { case ECName::P256v1: return EC_curve_nist2nid("P-256"); break; default: return NID_undef; break; } } static bool _isValidTagLength(size_t tag_length) { return tag_length == 8 || tag_length == 12 || tag_length == 16; } static bool _isValidKeyLength(size_t length) { // 16 bytes key for AES-CCM-128, 32 for AES-CCM-256 return length == 16 || length == 32; } static void _logSSLError() { unsigned long ssl_err_code = ERR_get_error(); while (ssl_err_code != 0) { const char * err_str_lib = ERR_lib_error_string(ssl_err_code); const char * err_str_routine = ERR_func_error_string(ssl_err_code); const char * err_str_reason = ERR_reason_error_string(ssl_err_code); if (err_str_lib) { ChipLogError(Crypto, " ssl err %s %s %s\n", err_str_lib, err_str_routine, err_str_reason); } ssl_err_code = ERR_get_error(); } } static const EVP_MD * _digestForType(DigestType digestType) { switch (digestType) { case DigestType::SHA256: return EVP_sha256(); break; default: return nullptr; break; } } CHIP_ERROR AES_CCM_encrypt(const uint8_t * plaintext, size_t plaintext_length, const uint8_t * aad, size_t aad_length, const uint8_t * key, size_t key_length, const uint8_t * iv, size_t iv_length, uint8_t * ciphertext, uint8_t * tag, size_t tag_length) { EVP_CIPHER_CTX * context = nullptr; int bytesWritten = 0; size_t ciphertext_length = 0; CHIP_ERROR error = CHIP_NO_ERROR; int result = 1; const EVP_CIPHER * type = nullptr; // Placeholder location for avoiding null params for plaintexts when // size is zero. uint8_t placeholder_empty_plaintext = 0; // Ciphertext block to hold a finalized ciphertext block if output // `ciphertext` buffer is nullptr or plaintext_length is zero (i.e. // we are only doing auth and don't care about output). uint8_t placeholder_ciphertext[kAES_CCM256_Block_Length]; bool ciphertext_was_null = (ciphertext == nullptr); if (plaintext_length == 0) { if (plaintext == nullptr) { plaintext = &placeholder_empty_plaintext; } // Make sure we have at least 1 full block size buffer for the // extraction of final block (required by OpenSSL EVP_EncryptFinal_ex) if (ciphertext_was_null) { ciphertext = &placeholder_ciphertext[0]; } } VerifyOrExit((key_length == kAES_CCM128_Key_Length) || (key_length == kAES_CCM256_Key_Length), error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit((plaintext_length != 0) || ciphertext_was_null, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(plaintext != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(ciphertext != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(key != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(_isValidKeyLength(key_length), error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(iv != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(iv_length > 0, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(CanCastTo<int>(iv_length), error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(tag != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(_isValidTagLength(tag_length), error = CHIP_ERROR_INVALID_ARGUMENT); // TODO: Remove suport for AES-256 since not in 1.0 // Determine crypto type by key length type = (key_length == kAES_CCM128_Key_Length) ? EVP_aes_128_ccm() : EVP_aes_256_ccm(); context = EVP_CIPHER_CTX_new(); VerifyOrExit(context != nullptr, error = CHIP_ERROR_INTERNAL); // Pass in cipher result = EVP_EncryptInit_ex(context, type, nullptr, nullptr, nullptr); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Pass in IV length. Cast is safe because we checked with CanCastTo. result = EVP_CIPHER_CTX_ctrl(context, EVP_CTRL_CCM_SET_IVLEN, static_cast<int>(iv_length), nullptr); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Pass in tag length. Cast is safe because we checked _isValidTagLength. result = EVP_CIPHER_CTX_ctrl(context, EVP_CTRL_CCM_SET_TAG, static_cast<int>(tag_length), nullptr); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Pass in key + iv result = EVP_EncryptInit_ex(context, nullptr, nullptr, Uint8::to_const_uchar(key), Uint8::to_const_uchar(iv)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Pass in plain text length VerifyOrExit(CanCastTo<int>(plaintext_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_EncryptUpdate(context, nullptr, &bytesWritten, nullptr, static_cast<int>(plaintext_length)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Pass in AAD if (aad_length > 0 && aad != nullptr) { VerifyOrExit(CanCastTo<int>(aad_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_EncryptUpdate(context, nullptr, &bytesWritten, Uint8::to_const_uchar(aad), static_cast<int>(aad_length)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); } // Encrypt VerifyOrExit(CanCastTo<int>(plaintext_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_EncryptUpdate(context, Uint8::to_uchar(ciphertext), &bytesWritten, Uint8::to_const_uchar(plaintext), static_cast<int>(plaintext_length)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); VerifyOrExit((ciphertext_was_null && bytesWritten == 0) || (bytesWritten >= 0), error = CHIP_ERROR_INTERNAL); ciphertext_length = static_cast<unsigned int>(bytesWritten); // Finalize encryption result = EVP_EncryptFinal_ex(context, ciphertext + ciphertext_length, &bytesWritten); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); VerifyOrExit(bytesWritten >= 0 && bytesWritten <= static_cast<int>(plaintext_length), error = CHIP_ERROR_INTERNAL); // Get tag VerifyOrExit(CanCastTo<int>(tag_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_CIPHER_CTX_ctrl(context, EVP_CTRL_CCM_GET_TAG, static_cast<int>(tag_length), Uint8::to_uchar(tag)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); exit: if (context != nullptr) { EVP_CIPHER_CTX_free(context); context = nullptr; } return error; } CHIP_ERROR AES_CCM_decrypt(const uint8_t * ciphertext, size_t ciphertext_length, const uint8_t * aad, size_t aad_length, const uint8_t * tag, size_t tag_length, const uint8_t * key, size_t key_length, const uint8_t * iv, size_t iv_length, uint8_t * plaintext) { EVP_CIPHER_CTX * context = nullptr; CHIP_ERROR error = CHIP_NO_ERROR; int bytesOutput = 0; int result = 1; const EVP_CIPHER * type = nullptr; // Placeholder location for avoiding null params for ciphertext when // size is zero. uint8_t placeholder_empty_ciphertext = 0; // Plaintext block to hold a finalized plaintext block if output // `plaintext` buffer is nullptr or ciphertext_length is zero (i.e. // we are only doing auth and don't care about output). uint8_t placeholder_plaintext[kAES_CCM256_Block_Length]; bool plaintext_was_null = (plaintext == nullptr); if (ciphertext_length == 0) { if (ciphertext == nullptr) { ciphertext = &placeholder_empty_ciphertext; } // Make sure we have at least 1 full block size buffer for the // extraction of final block (required by OpenSSL EVP_DecryptFinal_ex) if (plaintext_was_null) { plaintext = &placeholder_plaintext[0]; } } VerifyOrExit((key_length == kAES_CCM128_Key_Length) || (key_length == kAES_CCM256_Key_Length), error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(ciphertext != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(plaintext != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(tag != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(_isValidTagLength(tag_length), error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(key != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(_isValidKeyLength(key_length), error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(iv != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(iv_length > 0, error = CHIP_ERROR_INVALID_ARGUMENT); // TODO: Remove suport for AES-256 since not in 1.0 // Determine crypto type by key length type = (key_length == kAES_CCM128_Key_Length) ? EVP_aes_128_ccm() : EVP_aes_256_ccm(); context = EVP_CIPHER_CTX_new(); VerifyOrExit(context != nullptr, error = CHIP_ERROR_INTERNAL); // Pass in cipher result = EVP_DecryptInit_ex(context, type, nullptr, nullptr, nullptr); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Pass in IV length VerifyOrExit(CanCastTo<int>(iv_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_CIPHER_CTX_ctrl(context, EVP_CTRL_CCM_SET_IVLEN, static_cast<int>(iv_length), nullptr); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Pass in expected tag // Removing "const" from |tag| here should hopefully be safe as // we're writing the tag, not reading. VerifyOrExit(CanCastTo<int>(tag_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_CIPHER_CTX_ctrl(context, EVP_CTRL_CCM_SET_TAG, static_cast<int>(tag_length), const_cast<void *>(static_cast<const void *>(tag))); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Pass in key + iv result = EVP_DecryptInit_ex(context, nullptr, nullptr, Uint8::to_const_uchar(key), Uint8::to_const_uchar(iv)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Pass in cipher text length VerifyOrExit(CanCastTo<int>(ciphertext_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_DecryptUpdate(context, nullptr, &bytesOutput, nullptr, static_cast<int>(ciphertext_length)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); VerifyOrExit(bytesOutput <= static_cast<int>(ciphertext_length), error = CHIP_ERROR_INTERNAL); // Pass in aad if (aad_length > 0 && aad != nullptr) { VerifyOrExit(CanCastTo<int>(aad_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_DecryptUpdate(context, nullptr, &bytesOutput, Uint8::to_const_uchar(aad), static_cast<int>(aad_length)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); VerifyOrExit(bytesOutput <= static_cast<int>(aad_length), error = CHIP_ERROR_INTERNAL); } // Pass in ciphertext. We wont get anything if validation fails. VerifyOrExit(CanCastTo<int>(ciphertext_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_DecryptUpdate(context, Uint8::to_uchar(plaintext), &bytesOutput, Uint8::to_const_uchar(ciphertext), static_cast<int>(ciphertext_length)); if (plaintext_was_null) { VerifyOrExit(bytesOutput <= static_cast<int>(sizeof(placeholder_plaintext)), error = CHIP_ERROR_INTERNAL); } VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); exit: if (context != nullptr) { EVP_CIPHER_CTX_free(context); context = nullptr; } return error; } CHIP_ERROR Hash_SHA256(const uint8_t * data, const size_t data_length, uint8_t * out_buffer) { // zero data length hash is supported. VerifyOrReturnError(data != nullptr, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(out_buffer != nullptr, CHIP_ERROR_INVALID_ARGUMENT); SHA256(data, data_length, Uint8::to_uchar(out_buffer)); return CHIP_NO_ERROR; } CHIP_ERROR Hash_SHA1(const uint8_t * data, const size_t data_length, uint8_t * out_buffer) { // zero data length hash is supported. VerifyOrReturnError(data != nullptr, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(out_buffer != nullptr, CHIP_ERROR_INVALID_ARGUMENT); SHA1(data, data_length, Uint8::to_uchar(out_buffer)); return CHIP_NO_ERROR; } Hash_SHA256_stream::Hash_SHA256_stream() {} Hash_SHA256_stream::~Hash_SHA256_stream() { Clear(); } static_assert(kMAX_Hash_SHA256_Context_Size >= sizeof(SHA256_CTX), "kMAX_Hash_SHA256_Context_Size is too small for the size of underlying SHA256_CTX"); static inline SHA256_CTX * to_inner_hash_sha256_context(HashSHA256OpaqueContext * context) { return SafePointerCast<SHA256_CTX *>(context); } CHIP_ERROR Hash_SHA256_stream::Begin() { SHA256_CTX * const context = to_inner_hash_sha256_context(&mContext); const int result = SHA256_Init(context); VerifyOrReturnError(result == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Hash_SHA256_stream::AddData(const ByteSpan data) { SHA256_CTX * const context = to_inner_hash_sha256_context(&mContext); const int result = SHA256_Update(context, Uint8::to_const_uchar(data.data()), data.size()); VerifyOrReturnError(result == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Hash_SHA256_stream::GetDigest(MutableByteSpan & out_buffer) { SHA256_CTX * context = to_inner_hash_sha256_context(&mContext); // Back-up context as we are about to finalize the hash to extract digest. SHA256_CTX previous_ctx = *context; // Pad + compute digest, then finalize context. It is restored next line to continue. CHIP_ERROR result = Finish(out_buffer); // Restore context prior to finalization. *context = previous_ctx; return result; } CHIP_ERROR Hash_SHA256_stream::Finish(MutableByteSpan & out_buffer) { VerifyOrReturnError(out_buffer.size() >= kSHA256_Hash_Length, CHIP_ERROR_BUFFER_TOO_SMALL); SHA256_CTX * const context = to_inner_hash_sha256_context(&mContext); const int result = SHA256_Final(Uint8::to_uchar(out_buffer.data()), context); VerifyOrReturnError(result == 1, CHIP_ERROR_INTERNAL); out_buffer = out_buffer.SubSpan(0, kSHA256_Hash_Length); return CHIP_NO_ERROR; } void Hash_SHA256_stream::Clear() { OPENSSL_cleanse(this, sizeof(*this)); } CHIP_ERROR HKDF_sha::HKDF_SHA256(const uint8_t * secret, const size_t secret_length, const uint8_t * salt, const size_t salt_length, const uint8_t * info, const size_t info_length, uint8_t * out_buffer, size_t out_length) { CHIP_ERROR error = CHIP_NO_ERROR; int result = 1; EVP_PKEY_CTX * const context = EVP_PKEY_CTX_new_id(EVP_PKEY_HKDF, nullptr); VerifyOrExit(context != nullptr, error = CHIP_ERROR_INTERNAL); VerifyOrExit(secret != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(secret_length > 0, error = CHIP_ERROR_INVALID_ARGUMENT); // Salt is optional if (salt_length > 0) { VerifyOrExit(salt != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); } VerifyOrExit(info_length > 0, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(info != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(out_length > 0, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(out_buffer != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_PKEY_derive_init(context); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); result = EVP_PKEY_CTX_set_hkdf_md(context, EVP_sha256()); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); VerifyOrExit(CanCastTo<int>(secret_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_PKEY_CTX_set1_hkdf_key(context, Uint8::to_const_uchar(secret), static_cast<int>(secret_length)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); if (salt_length > 0 && salt != nullptr) { VerifyOrExit(CanCastTo<int>(salt_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_PKEY_CTX_set1_hkdf_salt(context, Uint8::to_const_uchar(salt), static_cast<int>(salt_length)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); } VerifyOrExit(CanCastTo<int>(info_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = EVP_PKEY_CTX_add1_hkdf_info(context, Uint8::to_const_uchar(info), static_cast<int>(info_length)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); result = EVP_PKEY_CTX_hkdf_mode(context, EVP_PKEY_HKDEF_MODE_EXTRACT_AND_EXPAND); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Get the OKM (Output Key Material) result = EVP_PKEY_derive(context, Uint8::to_uchar(out_buffer), &out_length); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); exit: if (context != nullptr) { EVP_PKEY_CTX_free(context); } return error; } CHIP_ERROR HMAC_sha::HMAC_SHA256(const uint8_t * key, size_t key_length, const uint8_t * message, size_t message_length, uint8_t * out_buffer, size_t out_length) { VerifyOrReturnError(key != nullptr, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(key_length > 0, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(message != nullptr, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(message_length > 0, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(out_length >= kSHA256_Hash_Length, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(out_buffer != nullptr, CHIP_ERROR_INVALID_ARGUMENT); CHIP_ERROR error = CHIP_ERROR_INTERNAL; int error_openssl = 0; unsigned int mac_out_len = 0; HMAC_CTX * mac_ctx = HMAC_CTX_new(); VerifyOrExit(mac_ctx != nullptr, error = CHIP_ERROR_INTERNAL); error_openssl = HMAC_Init_ex(mac_ctx, Uint8::to_const_uchar(key), static_cast<int>(key_length), EVP_sha256(), nullptr); VerifyOrExit(error_openssl == 1, error = CHIP_ERROR_INTERNAL); error_openssl = HMAC_Update(mac_ctx, Uint8::to_const_uchar(message), message_length); VerifyOrExit(error_openssl == 1, error = CHIP_ERROR_INTERNAL); mac_out_len = static_cast<unsigned int>(CHIP_CRYPTO_HASH_LEN_BYTES); error_openssl = HMAC_Final(mac_ctx, Uint8::to_uchar(out_buffer), &mac_out_len); VerifyOrExit(error_openssl == 1, error = CHIP_ERROR_INTERNAL); error = CHIP_NO_ERROR; exit: HMAC_CTX_free(mac_ctx); return error; } CHIP_ERROR PBKDF2_sha256::pbkdf2_sha256(const uint8_t * password, size_t plen, const uint8_t * salt, size_t slen, unsigned int iteration_count, uint32_t key_length, uint8_t * output) { CHIP_ERROR error = CHIP_NO_ERROR; int result = 1; const EVP_MD * md = nullptr; VerifyOrExit(password != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(plen > 0, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(salt != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(slen >= kMin_Salt_Length, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(slen <= kMax_Salt_Length, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(key_length > 0, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(output != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); md = _digestForType(DigestType::SHA256); VerifyOrExit(md != nullptr, error = CHIP_ERROR_INTERNAL); VerifyOrExit(CanCastTo<int>(plen), error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(CanCastTo<int>(slen), error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(CanCastTo<int>(iteration_count), error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(CanCastTo<int>(key_length), error = CHIP_ERROR_INVALID_ARGUMENT); result = PKCS5_PBKDF2_HMAC(Uint8::to_const_char(password), static_cast<int>(plen), Uint8::to_const_uchar(salt), static_cast<int>(slen), static_cast<int>(iteration_count), md, static_cast<int>(key_length), Uint8::to_uchar(output)); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); exit: if (error != CHIP_NO_ERROR) { _logSSLError(); } return error; } CHIP_ERROR add_entropy_source(entropy_source fn_source, void * p_source, size_t threshold) { return CHIP_NO_ERROR; } CHIP_ERROR DRBG_get_bytes(uint8_t * out_buffer, const size_t out_length) { VerifyOrReturnError(out_buffer != nullptr, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(out_length > 0, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(CanCastTo<int>(out_length), CHIP_ERROR_INVALID_ARGUMENT); const int result = RAND_priv_bytes(Uint8::to_uchar(out_buffer), static_cast<int>(out_length)); VerifyOrReturnError(result == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } ECName MapECName(SupportedECPKeyTypes keyType) { switch (keyType) { case SupportedECPKeyTypes::ECP256R1: return ECName::P256v1; default: return ECName::None; } } static inline void from_EC_KEY(EC_KEY * key, P256KeypairContext * context) { *SafePointerCast<EC_KEY **>(context) = key; } static inline EC_KEY * to_EC_KEY(P256KeypairContext * context) { return *SafePointerCast<EC_KEY **>(context); } static inline const EC_KEY * to_const_EC_KEY(const P256KeypairContext * context) { return *SafePointerCast<const EC_KEY * const *>(context); } CHIP_ERROR P256Keypair::ECDSA_sign_msg(const uint8_t * msg, const size_t msg_length, P256ECDSASignature & out_signature) { VerifyOrReturnError((msg != nullptr) && (msg_length > 0), CHIP_ERROR_INVALID_ARGUMENT); uint8_t digest[kSHA256_Hash_Length]; memset(&digest[0], 0, sizeof(digest)); ReturnErrorOnFailure(Hash_SHA256(msg, msg_length, &digest[0])); return ECDSA_sign_hash(&digest[0], sizeof(digest), out_signature); } CHIP_ERROR P256Keypair::ECDSA_sign_hash(const uint8_t * hash, const size_t hash_length, P256ECDSASignature & out_signature) { ERR_clear_error(); CHIP_ERROR error = CHIP_NO_ERROR; int nid = NID_undef; EC_KEY * ec_key = nullptr; ECDSA_SIG * sig = nullptr; const BIGNUM * r = nullptr; const BIGNUM * s = nullptr; static_assert(P256ECDSASignature::Capacity() >= kP256_ECDSA_Signature_Length_Raw, "P256ECDSASignature must be large enough"); VerifyOrExit(mInitialized, error = CHIP_ERROR_INCORRECT_STATE); VerifyOrExit(hash != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(hash_length == kSHA256_Hash_Length, error = CHIP_ERROR_INVALID_ARGUMENT); nid = _nidForCurve(MapECName(mPublicKey.Type())); VerifyOrExit(nid != NID_undef, error = CHIP_ERROR_INVALID_ARGUMENT); ec_key = to_EC_KEY(&mKeypair); VerifyOrExit(ec_key != nullptr, error = CHIP_ERROR_INTERNAL); sig = ECDSA_do_sign(Uint8::to_const_uchar(hash), static_cast<int>(hash_length), ec_key); VerifyOrExit(sig != nullptr, error = CHIP_ERROR_INTERNAL); ECDSA_SIG_get0(sig, &r, &s); VerifyOrExit((r != nullptr) || (s != nullptr) || (BN_num_bytes(r) == kP256_FE_Length) || (BN_num_bytes(s) == kP256_FE_Length), error = CHIP_ERROR_INTERNAL); // Concatenate r and s to output. Sizes were checked above. VerifyOrExit(out_signature.SetLength(kP256_ECDSA_Signature_Length_Raw) == CHIP_NO_ERROR, error = CHIP_ERROR_INTERNAL); BN_bn2binpad(r, out_signature.Bytes() + 0u, kP256_FE_Length); BN_bn2binpad(s, out_signature.Bytes() + kP256_FE_Length, kP256_FE_Length); exit: if (sig != nullptr) { // SIG owns the memory of r, s ECDSA_SIG_free(sig); } if (error != CHIP_NO_ERROR) { _logSSLError(); } return error; } CHIP_ERROR P256PublicKey::ECDSA_validate_msg_signature(const uint8_t * msg, const size_t msg_length, const P256ECDSASignature & signature) const { VerifyOrReturnError((msg != nullptr) && (msg_length > 0), CHIP_ERROR_INVALID_ARGUMENT); uint8_t digest[kSHA256_Hash_Length]; memset(&digest[0], 0, sizeof(digest)); ReturnErrorOnFailure(Hash_SHA256(msg, msg_length, &digest[0])); return ECDSA_validate_hash_signature(&digest[0], sizeof(digest), signature); } CHIP_ERROR P256PublicKey::ECDSA_validate_hash_signature(const uint8_t * hash, const size_t hash_length, const P256ECDSASignature & signature) const { ERR_clear_error(); CHIP_ERROR error = CHIP_ERROR_INTERNAL; int nid = NID_undef; EC_KEY * ec_key = nullptr; EC_POINT * key_point = nullptr; EC_GROUP * ec_group = nullptr; ECDSA_SIG * ec_sig = nullptr; BIGNUM * r = nullptr; BIGNUM * s = nullptr; int result = 0; VerifyOrExit(hash != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(hash_length == kSHA256_Hash_Length, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(signature.Length() == kP256_ECDSA_Signature_Length_Raw, error = CHIP_ERROR_INVALID_ARGUMENT); nid = _nidForCurve(MapECName(Type())); VerifyOrExit(nid != NID_undef, error = CHIP_ERROR_INVALID_ARGUMENT); ec_group = EC_GROUP_new_by_curve_name(nid); VerifyOrExit(ec_group != nullptr, error = CHIP_ERROR_NO_MEMORY); key_point = EC_POINT_new(ec_group); VerifyOrExit(key_point != nullptr, error = CHIP_ERROR_NO_MEMORY); result = EC_POINT_oct2point(ec_group, key_point, Uint8::to_const_uchar(*this), Length(), nullptr); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); ec_key = EC_KEY_new_by_curve_name(nid); VerifyOrExit(ec_key != nullptr, error = CHIP_ERROR_NO_MEMORY); result = EC_KEY_set_public_key(ec_key, key_point); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); result = EC_KEY_check_key(ec_key); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // Build-up the signature object from raw <r,s> tuple r = BN_bin2bn(Uint8::to_const_uchar(signature.ConstBytes()) + 0u, kP256_FE_Length, nullptr); VerifyOrExit(r != nullptr, error = CHIP_ERROR_NO_MEMORY); s = BN_bin2bn(Uint8::to_const_uchar(signature.ConstBytes()) + kP256_FE_Length, kP256_FE_Length, nullptr); VerifyOrExit(s != nullptr, error = CHIP_ERROR_NO_MEMORY); ec_sig = ECDSA_SIG_new(); VerifyOrExit(ec_sig != nullptr, error = CHIP_ERROR_NO_MEMORY); result = ECDSA_SIG_set0(ec_sig, r, s); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); result = ECDSA_do_verify(Uint8::to_const_uchar(hash), static_cast<int>(hash_length), ec_sig, ec_key); VerifyOrExit(result == 1, error = CHIP_ERROR_INVALID_SIGNATURE); error = CHIP_NO_ERROR; exit: _logSSLError(); if (ec_sig != nullptr) { ECDSA_SIG_free(ec_sig); // After ECDSA_SIG_set0 succeeds, r and s memory is managed by ECDSA_SIG object. // We set to nullptr so that we don't try to double-free r = nullptr; s = nullptr; } if (s != nullptr) { BN_clear_free(s); } if (r != nullptr) { BN_clear_free(r); } if (ec_key != nullptr) { EC_KEY_free(ec_key); } if (key_point != nullptr) { EC_POINT_clear_free(key_point); } if (ec_group != nullptr) { EC_GROUP_free(ec_group); } return error; } // helper function to populate octet key into EVP_PKEY out_evp_pkey. Caller must free out_evp_pkey static CHIP_ERROR _create_evp_key_from_binary_p256_key(const P256PublicKey & key, EVP_PKEY ** out_evp_pkey) { CHIP_ERROR error = CHIP_NO_ERROR; EC_KEY * ec_key = nullptr; int result = -1; EC_POINT * point = nullptr; EC_GROUP * group = nullptr; int nid = NID_undef; VerifyOrExit(*out_evp_pkey == nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); nid = _nidForCurve(MapECName(key.Type())); VerifyOrExit(nid != NID_undef, error = CHIP_ERROR_INTERNAL); ec_key = EC_KEY_new_by_curve_name(nid); VerifyOrExit(ec_key != nullptr, error = CHIP_ERROR_INTERNAL); group = EC_GROUP_new_by_curve_name(nid); VerifyOrExit(group != nullptr, error = CHIP_ERROR_INTERNAL); point = EC_POINT_new(group); VerifyOrExit(point != nullptr, error = CHIP_ERROR_INTERNAL); result = EC_POINT_oct2point(group, point, Uint8::to_const_uchar(key), key.Length(), nullptr); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); result = EC_KEY_set_public_key(ec_key, point); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); *out_evp_pkey = EVP_PKEY_new(); VerifyOrExit(*out_evp_pkey != nullptr, error = CHIP_ERROR_INTERNAL); result = EVP_PKEY_set1_EC_KEY(*out_evp_pkey, ec_key); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); exit: if (ec_key != nullptr) { EC_KEY_free(ec_key); ec_key = nullptr; } if (error != CHIP_NO_ERROR && *out_evp_pkey) { EVP_PKEY_free(*out_evp_pkey); out_evp_pkey = nullptr; } if (point != nullptr) { EC_POINT_free(point); point = nullptr; } if (group != nullptr) { EC_GROUP_free(group); group = nullptr; } return error; } CHIP_ERROR P256Keypair::ECDH_derive_secret(const P256PublicKey & remote_public_key, P256ECDHDerivedSecret & out_secret) const { ERR_clear_error(); CHIP_ERROR error = CHIP_NO_ERROR; int result = -1; EVP_PKEY * local_key = nullptr; EVP_PKEY * remote_key = nullptr; EVP_PKEY_CTX * context = nullptr; size_t out_buf_length = 0; EC_KEY * ec_key = EC_KEY_dup(to_const_EC_KEY(&mKeypair)); VerifyOrExit(ec_key != nullptr, error = CHIP_ERROR_INTERNAL); VerifyOrExit(mInitialized, error = CHIP_ERROR_INCORRECT_STATE); local_key = EVP_PKEY_new(); VerifyOrExit(local_key != nullptr, error = CHIP_ERROR_INTERNAL); result = EVP_PKEY_set1_EC_KEY(local_key, ec_key); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); error = _create_evp_key_from_binary_p256_key(remote_public_key, &remote_key); SuccessOrExit(error); context = EVP_PKEY_CTX_new(local_key, nullptr); VerifyOrExit(context != nullptr, error = CHIP_ERROR_INTERNAL); result = EVP_PKEY_derive_init(context); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); result = EVP_PKEY_derive_set_peer(context, remote_key); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); out_buf_length = (out_secret.Length() == 0) ? out_secret.Capacity() : out_secret.Length(); result = EVP_PKEY_derive(context, Uint8::to_uchar(out_secret), &out_buf_length); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); SuccessOrExit(out_secret.SetLength(out_buf_length)); exit: if (ec_key != nullptr) { EC_KEY_free(ec_key); ec_key = nullptr; } if (local_key != nullptr) { EVP_PKEY_free(local_key); local_key = nullptr; } if (remote_key != nullptr) { EVP_PKEY_free(remote_key); remote_key = nullptr; } if (context != nullptr) { EVP_PKEY_CTX_free(context); context = nullptr; } _logSSLError(); return error; } void ClearSecretData(uint8_t * buf, size_t len) { OPENSSL_cleanse(buf, len); } static CHIP_ERROR P256PublicKeyFromECKey(EC_KEY * ec_key, P256PublicKey & pubkey) { ERR_clear_error(); CHIP_ERROR error = CHIP_NO_ERROR; int nid = NID_undef; ECName curve = MapECName(pubkey.Type()); EC_GROUP * group = nullptr; size_t pubkey_size = 0; const EC_POINT * pubkey_ecp = EC_KEY_get0_public_key(ec_key); VerifyOrExit(pubkey_ecp != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); nid = _nidForCurve(curve); VerifyOrExit(nid != NID_undef, error = CHIP_ERROR_INVALID_ARGUMENT); group = EC_GROUP_new_by_curve_name(nid); VerifyOrExit(group != nullptr, error = CHIP_ERROR_INTERNAL); pubkey_size = EC_POINT_point2oct(group, pubkey_ecp, POINT_CONVERSION_UNCOMPRESSED, Uint8::to_uchar(pubkey), pubkey.Length(), nullptr); pubkey_ecp = nullptr; VerifyOrExit(pubkey_size == pubkey.Length(), error = CHIP_ERROR_INVALID_ARGUMENT); exit: if (group != nullptr) { EC_GROUP_free(group); group = nullptr; } _logSSLError(); return error; } CHIP_ERROR P256Keypair::Initialize() { ERR_clear_error(); Clear(); CHIP_ERROR error = CHIP_NO_ERROR; int result = 0; EC_KEY * ec_key = nullptr; ECName curve = MapECName(mPublicKey.Type()); int nid = _nidForCurve(curve); VerifyOrExit(nid != NID_undef, error = CHIP_ERROR_INVALID_ARGUMENT); ec_key = EC_KEY_new_by_curve_name(nid); VerifyOrExit(ec_key != nullptr, error = CHIP_ERROR_INTERNAL); result = EC_KEY_generate_key(ec_key); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); error = P256PublicKeyFromECKey(ec_key, mPublicKey); SuccessOrExit(error); from_EC_KEY(ec_key, &mKeypair); mInitialized = true; ec_key = nullptr; exit: if (ec_key != nullptr) { EC_KEY_free(ec_key); ec_key = nullptr; } _logSSLError(); return error; } CHIP_ERROR P256Keypair::Serialize(P256SerializedKeypair & output) const { CHIP_ERROR error = CHIP_NO_ERROR; const EC_KEY * ec_key = to_const_EC_KEY(&mKeypair); uint8_t privkey[kP256_PrivateKey_Length]; int privkey_size = 0; const BIGNUM * privkey_bn = EC_KEY_get0_private_key(ec_key); VerifyOrExit(privkey_bn != nullptr, error = CHIP_ERROR_INTERNAL); privkey_size = BN_bn2binpad(privkey_bn, privkey, sizeof(privkey)); privkey_bn = nullptr; VerifyOrExit(privkey_size > 0, error = CHIP_ERROR_INTERNAL); VerifyOrExit((size_t) privkey_size == sizeof(privkey), error = CHIP_ERROR_INTERNAL); { size_t len = output.Length() == 0 ? output.Capacity() : output.Length(); Encoding::BufferWriter bbuf(output, len); bbuf.Put(mPublicKey, mPublicKey.Length()); bbuf.Put(privkey, sizeof(privkey)); VerifyOrExit(bbuf.Fit(), error = CHIP_ERROR_NO_MEMORY); output.SetLength(bbuf.Needed()); } exit: ClearSecretData(privkey, sizeof(privkey)); _logSSLError(); return error; } CHIP_ERROR P256Keypair::Deserialize(P256SerializedKeypair & input) { Encoding::BufferWriter bbuf(mPublicKey, mPublicKey.Length()); Clear(); BIGNUM * pvt_key = nullptr; EC_GROUP * group = nullptr; EC_POINT * key_point = nullptr; EC_KEY * ec_key = nullptr; ECName curve = MapECName(mPublicKey.Type()); ERR_clear_error(); CHIP_ERROR error = CHIP_NO_ERROR; int result = 0; int nid = NID_undef; const uint8_t * privkey = Uint8::to_const_uchar(input) + mPublicKey.Length(); VerifyOrExit(input.Length() == mPublicKey.Length() + kP256_PrivateKey_Length, error = CHIP_ERROR_INVALID_ARGUMENT); bbuf.Put(input, mPublicKey.Length()); VerifyOrExit(bbuf.Fit(), error = CHIP_ERROR_NO_MEMORY); nid = _nidForCurve(curve); VerifyOrExit(nid != NID_undef, error = CHIP_ERROR_INVALID_ARGUMENT); group = EC_GROUP_new_by_curve_name(nid); VerifyOrExit(group != nullptr, error = CHIP_ERROR_INTERNAL); key_point = EC_POINT_new(group); VerifyOrExit(key_point != nullptr, error = CHIP_ERROR_INTERNAL); result = EC_POINT_oct2point(group, key_point, Uint8::to_const_uchar(mPublicKey), mPublicKey.Length(), nullptr); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); ec_key = EC_KEY_new_by_curve_name(nid); VerifyOrExit(ec_key != nullptr, error = CHIP_ERROR_INTERNAL); result = EC_KEY_set_public_key(ec_key, key_point); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); pvt_key = BN_bin2bn(privkey, kP256_PrivateKey_Length, nullptr); VerifyOrExit(pvt_key != nullptr, error = CHIP_ERROR_INTERNAL); result = EC_KEY_set_private_key(ec_key, pvt_key); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); from_EC_KEY(ec_key, &mKeypair); mInitialized = true; ec_key = nullptr; exit: if (ec_key != nullptr) { EC_KEY_free(ec_key); ec_key = nullptr; } if (group != nullptr) { EC_GROUP_free(group); group = nullptr; } if (pvt_key != nullptr) { BN_free(pvt_key); pvt_key = nullptr; } if (key_point != nullptr) { EC_POINT_free(key_point); key_point = nullptr; } _logSSLError(); return error; } void P256Keypair::Clear() { if (mInitialized) { EC_KEY * ec_key = to_EC_KEY(&mKeypair); EC_KEY_free(ec_key); mInitialized = false; } } P256Keypair::~P256Keypair() { Clear(); } CHIP_ERROR P256Keypair::NewCertificateSigningRequest(uint8_t * out_csr, size_t & csr_length) { ERR_clear_error(); CHIP_ERROR error = CHIP_NO_ERROR; int result = 0; X509_REQ * x509_req = X509_REQ_new(); EVP_PKEY * evp_pkey = nullptr; EC_KEY * ec_key = to_EC_KEY(&mKeypair); X509_NAME * subject = X509_NAME_new(); VerifyOrExit(subject != nullptr, error = CHIP_ERROR_INTERNAL); VerifyOrExit(mInitialized, error = CHIP_ERROR_INCORRECT_STATE); result = X509_REQ_set_version(x509_req, 0); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); result = EC_KEY_check_key(ec_key); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); evp_pkey = EVP_PKEY_new(); VerifyOrExit(evp_pkey != nullptr, error = CHIP_ERROR_INTERNAL); result = EVP_PKEY_set1_EC_KEY(evp_pkey, ec_key); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); result = X509_REQ_set_pubkey(x509_req, evp_pkey); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); // TODO: mbedTLS CSR parser fails if the subject name is not set (or if empty). // CHIP Spec doesn't specify the subject name that can be used. // Figure out the correct value and update this code. result = X509_NAME_add_entry_by_txt(subject, "O", MBSTRING_ASC, Uint8::from_const_char("CSR"), -1, -1, 0); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); result = X509_REQ_set_subject_name(x509_req, subject); VerifyOrExit(result == 1, error = CHIP_ERROR_INTERNAL); result = X509_REQ_sign(x509_req, evp_pkey, EVP_sha256()); VerifyOrExit(result > 0, error = CHIP_ERROR_INTERNAL); csr_length = static_cast<size_t>(i2d_X509_REQ(x509_req, &out_csr)); exit: ec_key = nullptr; if (evp_pkey != nullptr) { EVP_PKEY_free(evp_pkey); evp_pkey = nullptr; } X509_NAME_free(subject); subject = nullptr; X509_REQ_free(x509_req); _logSSLError(); return error; } CHIP_ERROR VerifyCertificateSigningRequest(const uint8_t * csr, size_t csr_length, P256PublicKey & pubkey) { ERR_clear_error(); CHIP_ERROR error = CHIP_NO_ERROR; int result = 0; EVP_PKEY * evp_pkey = nullptr; EC_KEY * ec_key = nullptr; const unsigned char * csr_buf = Uint8::to_const_uchar(csr); X509_REQ * x509_req = d2i_X509_REQ(nullptr, &csr_buf, (int) csr_length); VerifyOrExit(x509_req != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(X509_REQ_get_version(x509_req) == 0, error = CHIP_ERROR_INVALID_ARGUMENT); evp_pkey = X509_REQ_get0_pubkey(x509_req); VerifyOrExit(evp_pkey != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); result = X509_REQ_verify(x509_req, evp_pkey); VerifyOrExit(result == 1, error = CHIP_ERROR_INVALID_ARGUMENT); ec_key = EVP_PKEY_get1_EC_KEY(evp_pkey); VerifyOrExit(ec_key != nullptr, error = CHIP_ERROR_INVALID_ARGUMENT); error = P256PublicKeyFromECKey(ec_key, pubkey); SuccessOrExit(error); exit: if (x509_req != nullptr) { X509_REQ_free(x509_req); } _logSSLError(); return error; } #define init_point(_point_) \ do \ { \ _point_ = EC_POINT_new(context->curve); \ VerifyOrReturnError(_point_ != nullptr, CHIP_ERROR_INTERNAL); \ } while (0) #define init_bn(_bn_) \ do \ { \ _bn_ = BN_new(); \ VerifyOrReturnError(_bn_ != nullptr, CHIP_ERROR_INTERNAL); \ } while (0) #define free_point(_point_) \ do \ { \ if (_point_ != nullptr) \ { \ EC_POINT_clear_free(static_cast<EC_POINT *>(_point_)); \ } \ } while (0) #define free_bn(_bn_) \ do \ { \ if (_bn_ != nullptr) \ { \ BN_clear_free(static_cast<BIGNUM *>(_bn_)); \ } \ } while (0) typedef struct Spake2p_Context { EC_GROUP * curve; BN_CTX * bn_ctx; const EVP_MD * md_info; } Spake2p_Context; static inline Spake2p_Context * to_inner_spake2p_context(Spake2pOpaqueContext * context) { return SafePointerCast<Spake2p_Context *>(context); } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::InitInternal() { Spake2p_Context * const context = to_inner_spake2p_context(&mSpake2pContext); context->curve = nullptr; context->bn_ctx = nullptr; context->md_info = nullptr; context->curve = EC_GROUP_new_by_curve_name(NID_X9_62_prime256v1); VerifyOrReturnError(context->curve != nullptr, CHIP_ERROR_INTERNAL); G = EC_GROUP_get0_generator(context->curve); VerifyOrReturnError(G != nullptr, CHIP_ERROR_INTERNAL); context->bn_ctx = BN_CTX_secure_new(); VerifyOrReturnError(context->bn_ctx != nullptr, CHIP_ERROR_INTERNAL); context->md_info = EVP_sha256(); VerifyOrReturnError(context->md_info != nullptr, CHIP_ERROR_INTERNAL); init_point(M); init_point(N); init_point(X); init_point(Y); init_point(L); init_point(V); init_point(Z); init_bn(w0); init_bn(w1); init_bn(xy); init_bn(tempbn); init_bn(order); const int error_openssl = EC_GROUP_get_order(context->curve, static_cast<BIGNUM *>(order), context->bn_ctx); VerifyOrReturnError(error_openssl == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } void Spake2p_P256_SHA256_HKDF_HMAC::Clear() { VerifyOrReturn(state != CHIP_SPAKE2P_STATE::PREINIT); Spake2p_Context * const context = to_inner_spake2p_context(&mSpake2pContext); if (context->curve != nullptr) { EC_GROUP_clear_free(context->curve); } if (context->bn_ctx != nullptr) { BN_CTX_free(context->bn_ctx); } free_point(M); free_point(N); free_point(X); free_point(Y); free_point(L); free_point(V); free_point(Z); free_bn(w0); free_bn(w1); free_bn(xy); free_bn(tempbn); free_bn(order); state = CHIP_SPAKE2P_STATE::PREINIT; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::Mac(const uint8_t * key, size_t key_len, const uint8_t * in, size_t in_len, uint8_t * out) { HMAC_sha hmac; return hmac.HMAC_SHA256(key, key_len, in, in_len, out, kSHA256_Hash_Length); } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::MacVerify(const uint8_t * key, size_t key_len, const uint8_t * mac, size_t mac_len, const uint8_t * in, size_t in_len) { VerifyOrReturnError(mac_len == kSHA256_Hash_Length, CHIP_ERROR_INVALID_ARGUMENT); uint8_t computed_mac[kSHA256_Hash_Length]; ReturnErrorOnFailure(Mac(key, key_len, in, in_len, computed_mac)); VerifyOrReturnError(CRYPTO_memcmp(mac, computed_mac, mac_len) == 0, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::FELoad(const uint8_t * in, size_t in_len, void * fe) { BIGNUM * const bn_fe = static_cast<BIGNUM *>(fe); Spake2p_Context * context = to_inner_spake2p_context(&mSpake2pContext); VerifyOrReturnError(CanCastTo<int>(in_len), CHIP_ERROR_INTERNAL); BN_bin2bn(Uint8::to_const_uchar(in), static_cast<int>(in_len), bn_fe); const int error_openssl = BN_mod(bn_fe, bn_fe, (BIGNUM *) order, context->bn_ctx); VerifyOrReturnError(error_openssl == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::FEWrite(const void * fe, uint8_t * out, size_t out_len) { VerifyOrReturnError(CanCastTo<int>(out_len), CHIP_ERROR_INTERNAL); const int bn_out_len = BN_bn2binpad(static_cast<const BIGNUM *>(fe), Uint8::to_uchar(out), static_cast<int>(out_len)); VerifyOrReturnError(bn_out_len == static_cast<int>(out_len), CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::FEGenerate(void * fe) { const int error_openssl = BN_rand_range(static_cast<BIGNUM *>(fe), static_cast<BIGNUM *>(order)); VerifyOrReturnError(error_openssl == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::FEMul(void * fer, const void * fe1, const void * fe2) { const Spake2p_Context * const context = to_inner_spake2p_context(&mSpake2pContext); const int error_openssl = BN_mod_mul(static_cast<BIGNUM *>(fer), static_cast<const BIGNUM *>(fe1), static_cast<const BIGNUM *>(fe2), static_cast<BIGNUM *>(order), context->bn_ctx); VerifyOrReturnError(error_openssl == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::PointLoad(const uint8_t * in, size_t in_len, void * R) { const Spake2p_Context * const context = to_inner_spake2p_context(&mSpake2pContext); const int error_openssl = EC_POINT_oct2point(context->curve, static_cast<EC_POINT *>(R), Uint8::to_const_uchar(in), in_len, context->bn_ctx); VerifyOrReturnError(error_openssl == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::PointWrite(const void * R, uint8_t * out, size_t out_len) { const Spake2p_Context * const context = to_inner_spake2p_context(&mSpake2pContext); const size_t ec_out_len = EC_POINT_point2oct(context->curve, static_cast<const EC_POINT *>(R), POINT_CONVERSION_UNCOMPRESSED, Uint8::to_uchar(out), out_len, context->bn_ctx); VerifyOrReturnError(ec_out_len == out_len, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::PointMul(void * R, const void * P1, const void * fe1) { const Spake2p_Context * const context = to_inner_spake2p_context(&mSpake2pContext); const int error_openssl = EC_POINT_mul(context->curve, static_cast<EC_POINT *>(R), nullptr, static_cast<const EC_POINT *>(P1), static_cast<const BIGNUM *>(fe1), context->bn_ctx); VerifyOrReturnError(error_openssl == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::PointAddMul(void * R, const void * P1, const void * fe1, const void * P2, const void * fe2) { CHIP_ERROR error = CHIP_ERROR_INTERNAL; int error_openssl = 0; EC_POINT * scratch = nullptr; Spake2p_Context * context = to_inner_spake2p_context(&mSpake2pContext); scratch = EC_POINT_new(context->curve); VerifyOrExit(scratch != nullptr, error = CHIP_ERROR_INTERNAL); SuccessOrExit(error = PointMul(scratch, P1, fe1)); SuccessOrExit(error = PointMul(R, P2, fe2)); error_openssl = EC_POINT_add(context->curve, static_cast<EC_POINT *>(R), static_cast<EC_POINT *>(R), static_cast<const EC_POINT *>(scratch), context->bn_ctx); VerifyOrExit(error_openssl == 1, error = CHIP_ERROR_INTERNAL); error = CHIP_NO_ERROR; exit: EC_POINT_clear_free(scratch); return error; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::PointInvert(void * R) { const Spake2p_Context * const context = to_inner_spake2p_context(&mSpake2pContext); const int error_openssl = EC_POINT_invert(context->curve, static_cast<EC_POINT *>(R), context->bn_ctx); VerifyOrReturnError(error_openssl == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::PointCofactorMul(void * R) { // Cofactor on P256 is 1 so this is a NOP return CHIP_NO_ERROR; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::ComputeL(uint8_t * Lout, size_t * L_len, const uint8_t * w1in, size_t w1in_len) { CHIP_ERROR error = CHIP_ERROR_INTERNAL; int error_openssl = 0; BIGNUM * w1_bn = nullptr; EC_POINT * Lout_point = nullptr; Spake2p_Context * context = to_inner_spake2p_context(&mSpake2pContext); w1_bn = BN_new(); VerifyOrExit(w1_bn != nullptr, error = CHIP_ERROR_INTERNAL); Lout_point = EC_POINT_new(context->curve); VerifyOrExit(Lout_point != nullptr, error = CHIP_ERROR_INTERNAL); VerifyOrExit(CanCastTo<int>(w1in_len), error = CHIP_ERROR_INTERNAL); BN_bin2bn(Uint8::to_const_uchar(w1in), static_cast<int>(w1in_len), w1_bn); error_openssl = BN_mod(w1_bn, w1_bn, (BIGNUM *) order, context->bn_ctx); VerifyOrExit(error_openssl == 1, error = CHIP_ERROR_INTERNAL); error_openssl = EC_POINT_mul(context->curve, Lout_point, w1_bn, nullptr, nullptr, context->bn_ctx); VerifyOrExit(error_openssl == 1, error = CHIP_ERROR_INTERNAL); *L_len = EC_POINT_point2oct(context->curve, Lout_point, POINT_CONVERSION_UNCOMPRESSED, Uint8::to_uchar(Lout), *L_len, context->bn_ctx); VerifyOrExit(*L_len != 0, error = CHIP_ERROR_INTERNAL); error = CHIP_NO_ERROR; exit: BN_clear_free(w1_bn); EC_POINT_clear_free(Lout_point); return error; } CHIP_ERROR Spake2p_P256_SHA256_HKDF_HMAC::PointIsValid(void * R) { const Spake2p_Context * const context = to_inner_spake2p_context(&mSpake2pContext); const int error_openssl = EC_POINT_is_on_curve(context->curve, static_cast<EC_POINT *>(R), context->bn_ctx); VerifyOrReturnError(error_openssl == 1, CHIP_ERROR_INTERNAL); return CHIP_NO_ERROR; } static void security_free_cert_list(X509_LIST * certs) { if (certs) { sk_X509_pop_free(certs, X509_free); } } CHIP_ERROR LoadCertsFromPKCS7(const char * pkcs7, X509DerCertificate * x509list, uint32_t * max_certs) { CHIP_ERROR err = CHIP_NO_ERROR; X509_LIST * certs = NULL; BIO * bio_cert = NULL; PKCS7 * p7 = NULL; int p7_type = 0; VerifyOrExit(x509list != nullptr, err = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(max_certs != nullptr, err = CHIP_ERROR_INVALID_ARGUMENT); bio_cert = BIO_new_mem_buf(pkcs7, -1); p7 = PEM_read_bio_PKCS7(bio_cert, NULL, NULL, NULL); VerifyOrExit(p7 != nullptr, err = CHIP_ERROR_WRONG_CERT_TYPE); p7_type = OBJ_obj2nid(p7->type); if (p7_type == NID_pkcs7_signed) { certs = p7->d.sign->cert; } else if (p7_type == NID_pkcs7_signedAndEnveloped) { certs = p7->d.signed_and_enveloped->cert; } VerifyOrExit(certs != NULL, err = CHIP_ERROR_WRONG_CERT_TYPE); VerifyOrExit(static_cast<uint32_t>(sk_X509_num(certs)) <= *max_certs, err = CHIP_ERROR_WRONG_CERT_TYPE); *max_certs = static_cast<uint32_t>(sk_X509_num(certs)); certs = X509_chain_up_ref(certs); for (uint32_t i = 0; i < *max_certs; ++i) { size_t bytes_written = 0; unsigned char * pX509ListEnd = x509list[i]; unsigned char ** pX509ListAux = &pX509ListEnd; bytes_written = static_cast<size_t>(i2d_X509(sk_X509_value(certs, static_cast<int>(i)), pX509ListAux)); VerifyOrExit(bytes_written <= x509list[i].Capacity(), err = CHIP_ERROR_NO_MEMORY); x509list[i].SetLength(bytes_written); } exit: BIO_free_all(bio_cert); PKCS7_free(p7); security_free_cert_list(certs); return err; } CHIP_ERROR LoadCertFromPKCS7(const char * pkcs7, X509DerCertificate * x509list, uint32_t n_cert) { CHIP_ERROR err = CHIP_NO_ERROR; X509_LIST * certs = NULL; BIO * bio_cert = NULL; PKCS7 * p7 = NULL; int p7_type = 0; VerifyOrExit(x509list != nullptr, err = CHIP_ERROR_INVALID_ARGUMENT); bio_cert = BIO_new_mem_buf(pkcs7, -1); p7 = PEM_read_bio_PKCS7(bio_cert, NULL, NULL, NULL); VerifyOrExit(p7 != nullptr, err = CHIP_ERROR_WRONG_CERT_TYPE); p7_type = OBJ_obj2nid(p7->type); if (p7_type == NID_pkcs7_signed) { certs = p7->d.sign->cert; } else if (p7_type == NID_pkcs7_signedAndEnveloped) { certs = p7->d.signed_and_enveloped->cert; } VerifyOrExit(certs != NULL, err = CHIP_ERROR_WRONG_CERT_TYPE); VerifyOrExit(n_cert < static_cast<uint32_t>(sk_X509_num(certs)), err = CHIP_ERROR_INVALID_ARGUMENT); certs = X509_chain_up_ref(certs); { size_t bytes_written = 0; unsigned char * pX509ListEnd = reinterpret_cast<unsigned char *>(x509list); unsigned char ** pX509ListAux = &pX509ListEnd; bytes_written = static_cast<size_t>(i2d_X509(sk_X509_value(certs, static_cast<int>(n_cert)), pX509ListAux)); VerifyOrExit(bytes_written <= x509list->Capacity(), err = CHIP_ERROR_NO_MEMORY); x509list->SetLength(bytes_written); } exit: BIO_free_all(bio_cert); PKCS7_free(p7); security_free_cert_list(certs); return err; } CHIP_ERROR GetNumberOfCertsFromPKCS7(const char * pkcs7, uint32_t * n_certs) { CHIP_ERROR err = CHIP_NO_ERROR; X509_LIST * certs = NULL; BIO * bio_cert = NULL; PKCS7 * p7 = NULL; int p7_type = 0; VerifyOrExit(n_certs != nullptr, err = CHIP_ERROR_INVALID_ARGUMENT); bio_cert = BIO_new_mem_buf(pkcs7, -1); p7 = PEM_read_bio_PKCS7(bio_cert, NULL, NULL, NULL); VerifyOrExit(p7 != nullptr, err = CHIP_ERROR_WRONG_CERT_TYPE); p7_type = OBJ_obj2nid(p7->type); if (p7_type == NID_pkcs7_signed) { certs = p7->d.sign->cert; } else if (p7_type == NID_pkcs7_signedAndEnveloped) { certs = p7->d.signed_and_enveloped->cert; } VerifyOrExit(certs != NULL, err = CHIP_ERROR_WRONG_CERT_TYPE); *n_certs = static_cast<uint32_t>(sk_X509_num(certs)); exit: BIO_free_all(bio_cert); PKCS7_free(p7); return err; } CHIP_ERROR ValidateCertificateChain(const uint8_t * rootCertificate, size_t rootCertificateLen, const uint8_t * caCertificate, size_t caCertificateLen, const uint8_t * leafCertificate, size_t leafCertificateLen) { CHIP_ERROR err = CHIP_NO_ERROR; int status = 0; X509_STORE_CTX * verifyCtx = nullptr; X509_STORE * store = nullptr; X509 * x509RootCertificate = nullptr; X509 * x509CACertificate = nullptr; X509 * x509LeafCertificate = nullptr; store = X509_STORE_new(); VerifyOrExit(store != nullptr, err = CHIP_ERROR_NO_MEMORY); verifyCtx = X509_STORE_CTX_new(); VerifyOrExit(verifyCtx != nullptr, err = CHIP_ERROR_NO_MEMORY); x509RootCertificate = d2i_X509(NULL, &rootCertificate, static_cast<long>(rootCertificateLen)); VerifyOrExit(x509RootCertificate != nullptr, err = CHIP_ERROR_NO_MEMORY); status = X509_STORE_add_cert(store, x509RootCertificate); VerifyOrExit(status == 1, err = CHIP_ERROR_INTERNAL); if (caCertificate != nullptr && caCertificateLen != 0) { x509CACertificate = d2i_X509(NULL, &caCertificate, static_cast<long>(caCertificateLen)); VerifyOrExit(x509CACertificate != nullptr, err = CHIP_ERROR_NO_MEMORY); status = X509_STORE_add_cert(store, x509CACertificate); VerifyOrExit(status == 1, err = CHIP_ERROR_INTERNAL); } x509LeafCertificate = d2i_X509(NULL, &leafCertificate, static_cast<long>(leafCertificateLen)); VerifyOrExit(x509LeafCertificate != nullptr, err = CHIP_ERROR_NO_MEMORY); status = X509_STORE_CTX_init(verifyCtx, store, x509LeafCertificate, NULL); VerifyOrExit(status == 1, err = CHIP_ERROR_INTERNAL); // TODO: If any specific error occurs here, it should be flagged accordingly status = X509_verify_cert(verifyCtx); VerifyOrExit(status == 1, err = CHIP_ERROR_CERT_NOT_TRUSTED); err = CHIP_NO_ERROR; exit: X509_free(x509LeafCertificate); X509_free(x509CACertificate); X509_free(x509RootCertificate); X509_STORE_CTX_free(verifyCtx); X509_STORE_free(store); return err; } CHIP_ERROR IsCertificateValidAtIssuance(const ByteSpan & referenceCertificate, const ByteSpan & toBeEvaluatedCertificate) { CHIP_ERROR error = CHIP_NO_ERROR; X509 * x509ReferenceCertificate = nullptr; X509 * x509toBeEvaluatedCertificate = nullptr; const unsigned char * pReferenceCertificate = referenceCertificate.data(); const unsigned char * pToBeEvaluatedCertificate = toBeEvaluatedCertificate.data(); ASN1_TIME * refNotBeforeTime = nullptr; ASN1_TIME * tbeNotBeforeTime = nullptr; ASN1_TIME * tbeNotAfterTime = nullptr; // int result = 0; VerifyOrReturnError(!referenceCertificate.empty() && !toBeEvaluatedCertificate.empty(), CHIP_ERROR_INVALID_ARGUMENT); x509ReferenceCertificate = d2i_X509(NULL, &pReferenceCertificate, static_cast<long>(referenceCertificate.size())); VerifyOrExit(x509ReferenceCertificate != nullptr, error = CHIP_ERROR_NO_MEMORY); x509toBeEvaluatedCertificate = d2i_X509(NULL, &pToBeEvaluatedCertificate, static_cast<long>(toBeEvaluatedCertificate.size())); VerifyOrExit(x509toBeEvaluatedCertificate != nullptr, error = CHIP_ERROR_NO_MEMORY); refNotBeforeTime = X509_get_notBefore(x509ReferenceCertificate); tbeNotBeforeTime = X509_get_notBefore(x509toBeEvaluatedCertificate); tbeNotAfterTime = X509_get_notAfter(x509toBeEvaluatedCertificate); VerifyOrExit(refNotBeforeTime && tbeNotBeforeTime && tbeNotAfterTime, error = CHIP_ERROR_INTERNAL); // TODO: Handle PAA/PAI re-issue and enable below time validations // result = ASN1_TIME_compare(refNotBeforeTime, tbeNotBeforeTime); // check if referenceCertificate is issued at or after tbeCertificate's notBefore timestamp // VerifyOrExit(result >= 0, error = CHIP_ERROR_CERT_EXPIRED); // result = ASN1_TIME_compare(refNotBeforeTime, tbeNotAfterTime); // check if referenceCertificate is issued at or before tbeCertificate's notAfter timestamp // VerifyOrExit(result <= 0, error = CHIP_ERROR_CERT_EXPIRED); exit: X509_free(x509ReferenceCertificate); X509_free(x509toBeEvaluatedCertificate); return error; } CHIP_ERROR IsCertificateValidAtCurrentTime(const ByteSpan & certificate) { CHIP_ERROR error = CHIP_NO_ERROR; X509 * x509Certificate = nullptr; const unsigned char * pCertificate = certificate.data(); ASN1_TIME * time = nullptr; int result = 0; VerifyOrReturnError(!certificate.empty(), CHIP_ERROR_INVALID_ARGUMENT); x509Certificate = d2i_X509(NULL, &pCertificate, static_cast<long>(certificate.size())); VerifyOrExit(x509Certificate != nullptr, error = CHIP_ERROR_NO_MEMORY); time = X509_get_notBefore(x509Certificate); VerifyOrExit(time, error = CHIP_ERROR_INTERNAL); result = X509_cmp_current_time(time); // check if certificate's notBefore timestamp is earlier than or equal to current time. VerifyOrExit(result == -1, error = CHIP_ERROR_CERT_EXPIRED); time = X509_get_notAfter(x509Certificate); VerifyOrExit(time, error = CHIP_ERROR_INTERNAL); result = X509_cmp_current_time(time); // check if certificate's notAfter timestamp is later than current time. VerifyOrExit(result == 1, error = CHIP_ERROR_CERT_EXPIRED); exit: X509_free(x509Certificate); return error; } CHIP_ERROR ExtractPubkeyFromX509Cert(const ByteSpan & certificate, Crypto::P256PublicKey & pubkey) { CHIP_ERROR err = CHIP_NO_ERROR; EVP_PKEY * pkey = nullptr; X509 * x509certificate = nullptr; const unsigned char * pCertificate = certificate.data(); const unsigned char ** ppCertificate = &pCertificate; unsigned char * pPubkey = pubkey; unsigned char ** ppPubkey = &pPubkey; int pkeyLen; x509certificate = d2i_X509(NULL, ppCertificate, static_cast<long>(certificate.size())); VerifyOrExit(x509certificate != nullptr, err = CHIP_ERROR_NO_MEMORY); pkey = X509_get_pubkey(x509certificate); VerifyOrExit(pkey != nullptr, err = CHIP_ERROR_INTERNAL); VerifyOrExit(EVP_PKEY_base_id(pkey) == EVP_PKEY_EC, err = CHIP_ERROR_INTERNAL); VerifyOrExit(EVP_PKEY_bits(pkey) == 256, err = CHIP_ERROR_INTERNAL); pkeyLen = i2d_PublicKey(pkey, NULL); VerifyOrExit(pkeyLen == static_cast<int>(pubkey.Length()), err = CHIP_ERROR_INTERNAL); VerifyOrExit(i2d_PublicKey(pkey, ppPubkey) == pkeyLen, err = CHIP_ERROR_INTERNAL); exit: EVP_PKEY_free(pkey); X509_free(x509certificate); return err; } namespace { CHIP_ERROR ExtractKIDFromX509Cert(bool isSKID, const ByteSpan & certificate, MutableByteSpan & kid) { CHIP_ERROR err = CHIP_NO_ERROR; X509 * x509certificate = nullptr; const unsigned char * pCertificate = certificate.data(); const unsigned char ** ppCertificate = &pCertificate; const ASN1_OCTET_STRING * kidString = nullptr; x509certificate = d2i_X509(NULL, ppCertificate, static_cast<long>(certificate.size())); VerifyOrExit(x509certificate != nullptr, err = CHIP_ERROR_NO_MEMORY); kidString = isSKID ? X509_get0_subject_key_id(x509certificate) : X509_get0_authority_key_id(x509certificate); VerifyOrExit(kidString != nullptr, err = CHIP_ERROR_INVALID_ARGUMENT); VerifyOrExit(kidString->length <= static_cast<int>(kid.size()), err = CHIP_ERROR_BUFFER_TOO_SMALL); VerifyOrExit(CanCastTo<size_t>(kidString->length), err = CHIP_ERROR_INVALID_ARGUMENT); memcpy(kid.data(), kidString->data, static_cast<size_t>(kidString->length)); kid.reduce_size(static_cast<size_t>(kidString->length)); exit: X509_free(x509certificate); return err; } } // namespace CHIP_ERROR ExtractSKIDFromX509Cert(const ByteSpan & certificate, MutableByteSpan & skid) { return ExtractKIDFromX509Cert(true, certificate, skid); } CHIP_ERROR ExtractAKIDFromX509Cert(const ByteSpan & certificate, MutableByteSpan & akid) { return ExtractKIDFromX509Cert(false, certificate, akid); } namespace { CHIP_ERROR ExtractDNAttributeFromX509Cert(const char * oidString, const ByteSpan & certificate, uint16_t & id) { CHIP_ERROR err = CHIP_NO_ERROR; X509 * x509certificate = nullptr; const unsigned char * pCertificate = certificate.data(); size_t oidStringSize = strlen(oidString) + 1; constexpr size_t sOidStringSize = 22; char dnAttributeOidString[sOidStringSize] = { 0 }; X509_NAME * subject = nullptr; int x509EntryCountIdx = 0; VerifyOrReturnError(oidStringSize == sOidStringSize, CHIP_ERROR_INVALID_ARGUMENT); x509certificate = d2i_X509(NULL, &pCertificate, static_cast<long>(certificate.size())); VerifyOrExit(x509certificate != nullptr, err = CHIP_ERROR_NO_MEMORY); subject = X509_get_subject_name(x509certificate); VerifyOrExit(subject != nullptr, err = CHIP_ERROR_INTERNAL); for (x509EntryCountIdx = 0; x509EntryCountIdx < X509_NAME_entry_count(subject); ++x509EntryCountIdx) { X509_NAME_ENTRY * name_entry = X509_NAME_get_entry(subject, x509EntryCountIdx); VerifyOrExit(name_entry != nullptr, err = CHIP_ERROR_INTERNAL); ASN1_OBJECT * object = X509_NAME_ENTRY_get_object(name_entry); VerifyOrExit(object != nullptr, err = CHIP_ERROR_INTERNAL); VerifyOrExit(OBJ_obj2txt(dnAttributeOidString, sizeof(dnAttributeOidString), object, 0) != 0, err = CHIP_ERROR_INTERNAL); if (strncmp(oidString, dnAttributeOidString, sizeof(dnAttributeOidString)) == 0) { ASN1_STRING * data_entry = X509_NAME_ENTRY_get_data(name_entry); VerifyOrExit(data_entry != nullptr, err = CHIP_ERROR_INTERNAL); unsigned char * str = ASN1_STRING_data(data_entry); VerifyOrExit(str != nullptr, err = CHIP_ERROR_INTERNAL); VerifyOrExit(ArgParser::ParseInt(reinterpret_cast<const char *>(str), id, 16), err = CHIP_ERROR_INTERNAL); break; } } // returning CHIP_ERROR_KEY_NOT_FOUND to indicate VID is not present in the certificate. VerifyOrExit(x509EntryCountIdx < X509_NAME_entry_count(subject), err = CHIP_ERROR_KEY_NOT_FOUND); exit: X509_free(x509certificate); return err; } } // namespace CHIP_ERROR ExtractDNAttributeFromX509Cert(MatterOid matterOid, const ByteSpan & certificate, uint16_t & id) { constexpr char vidOidString[] = "1.3.6.1.4.1.37244.2.1"; // Matter VID OID - taken from Spec constexpr char pidOidString[] = "1.3.6.1.4.1.37244.2.2"; // Matter PID OID - taken from Spec switch (matterOid) { case MatterOid::kVendorId: id = VendorId::NotSpecified; return ExtractDNAttributeFromX509Cert(vidOidString, certificate, id); case MatterOid::kProductId: id = 0; // PID not specified value return ExtractDNAttributeFromX509Cert(pidOidString, certificate, id); default: return CHIP_ERROR_INVALID_ARGUMENT; } } } // namespace Crypto } // namespace chip
AlhonGelios/AO
org/openxmlformats/schemas/spreadsheetml/x2006/main/CTFonts.java
package org.openxmlformats.schemas.spreadsheetml.x2006.main; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.net.URL; import java.util.List; import javax.xml.stream.XMLStreamReader; import org.apache.poi.POIXMLTypeLoader; import org.apache.xmlbeans.SchemaType; import org.apache.xmlbeans.XmlBeans; import org.apache.xmlbeans.XmlException; import org.apache.xmlbeans.XmlObject; import org.apache.xmlbeans.XmlOptions; import org.apache.xmlbeans.XmlUnsignedInt; import org.apache.xmlbeans.xml.stream.XMLInputStream; import org.apache.xmlbeans.xml.stream.XMLStreamException; import org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFont; import org.w3c.dom.Node; public interface CTFonts extends XmlObject { SchemaType type = (SchemaType)XmlBeans.typeSystemForClassLoader(CTFonts.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.sF1327CCA741569E70F9CA8C9AF9B44B2").resolveHandle("ctfonts6623type"); List getFontList(); CTFont[] getFontArray(); CTFont getFontArray(int var1); int sizeOfFontArray(); void setFontArray(CTFont[] var1); void setFontArray(int var1, CTFont var2); CTFont insertNewFont(int var1); CTFont addNewFont(); void removeFont(int var1); long getCount(); XmlUnsignedInt xgetCount(); boolean isSetCount(); void setCount(long var1); void xsetCount(XmlUnsignedInt var1); void unsetCount(); public static final class Factory { public static CTFonts newInstance() { return (CTFonts)POIXMLTypeLoader.newInstance(CTFonts.type, (XmlOptions)null); } public static CTFonts newInstance(XmlOptions var0) { return (CTFonts)POIXMLTypeLoader.newInstance(CTFonts.type, var0); } public static CTFonts parse(String var0) throws XmlException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, (XmlOptions)null); } public static CTFonts parse(String var0, XmlOptions var1) throws XmlException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, var1); } public static CTFonts parse(File var0) throws XmlException, IOException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, (XmlOptions)null); } public static CTFonts parse(File var0, XmlOptions var1) throws XmlException, IOException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, var1); } public static CTFonts parse(URL var0) throws XmlException, IOException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, (XmlOptions)null); } public static CTFonts parse(URL var0, XmlOptions var1) throws XmlException, IOException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, var1); } public static CTFonts parse(InputStream var0) throws XmlException, IOException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, (XmlOptions)null); } public static CTFonts parse(InputStream var0, XmlOptions var1) throws XmlException, IOException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, var1); } public static CTFonts parse(Reader var0) throws XmlException, IOException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, (XmlOptions)null); } public static CTFonts parse(Reader var0, XmlOptions var1) throws XmlException, IOException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, var1); } public static CTFonts parse(XMLStreamReader var0) throws XmlException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, (XmlOptions)null); } public static CTFonts parse(XMLStreamReader var0, XmlOptions var1) throws XmlException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, var1); } public static CTFonts parse(Node var0) throws XmlException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, (XmlOptions)null); } public static CTFonts parse(Node var0, XmlOptions var1) throws XmlException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, var1); } public static CTFonts parse(XMLInputStream var0) throws XmlException, XMLStreamException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, (XmlOptions)null); } public static CTFonts parse(XMLInputStream var0, XmlOptions var1) throws XmlException, XMLStreamException { return (CTFonts)POIXMLTypeLoader.parse(var0, CTFonts.type, var1); } public static XMLInputStream newValidatingXMLInputStream(XMLInputStream var0) throws XmlException, XMLStreamException { return POIXMLTypeLoader.newValidatingXMLInputStream(var0, CTFonts.type, (XmlOptions)null); } public static XMLInputStream newValidatingXMLInputStream(XMLInputStream var0, XmlOptions var1) throws XmlException, XMLStreamException { return POIXMLTypeLoader.newValidatingXMLInputStream(var0, CTFonts.type, var1); } } }
tinamil/Dungeon-Crawl
DungeonCrawl/test/pavlik/john/dungeoncrawl/model/events/EventTest.java
<gh_stars>0 package pavlik.john.dungeoncrawl.model.events; import java.util.HashSet; import java.util.Set; import pavlik.john.dungeoncrawl.model.events.Action; import pavlik.john.dungeoncrawl.model.events.Conditional; import pavlik.john.dungeoncrawl.model.events.Event; import junit.framework.TestCase; /** * @author <NAME> * @see Action */ public class EventTest extends TestCase { Event event; String message; @Override protected void setUp() throws Exception { super.setUp(); message = null; final Set<Action> actions = new HashSet<>(); actions.add(new Action("ActionTag", "Action") { /** * */ private static final long serialVersionUID = 1L; @Override public boolean performAction(pavlik.john.dungeoncrawl.model.Character c) { message = "Not null"; return true; } }); final Set<Conditional> conditionals = new HashSet<>(); conditionals.add(new Conditional("ConditionalTag", "Conditional") { /** * */ private static final long serialVersionUID = 1L; @Override public boolean meetsConditions(pavlik.john.dungeoncrawl.model.Character c) { return message == null; } }); event = new Event(actions); } /** * Test method for pavlik.john.dungeoncrawl.model.events.Action constructor */ public void testConstructor() { // SetUp is sufficient } /** * Test method for pavlik.john.dungeoncrawl.model.Event#execute() */ public void testExecute() { assertNull(message); event.execute(null); assertEquals("Not null", message); } }
xiao-ren-wu/ono
ono-toolkit/src/main/java/org/ywb/ono/toolkit/crypto/AES.java
package org.ywb.ono.toolkit.crypto; import javax.crypto.Cipher; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Locale; import java.util.UUID; /** * @author yuwenbo1 * @date 2021/3/18 21:43 * @since 1.0.0 * AES CBC模式加密工具类 */ public class AES { /** * 加密 * * @param data 需要加密的内容 * @param key 加密密码 * @return byte[] */ public static byte[] encrypt(byte[] data, byte[] key) { try { SecretKeySpec secretKey = new SecretKeySpec(key, "AES"); byte[] enCodeFormat = secretKey.getEncoded(); SecretKeySpec secretKeySpec = new SecretKeySpec(enCodeFormat, "AES"); Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding"); cipher.init(Cipher.ENCRYPT_MODE, secretKeySpec, new IvParameterSpec(key)); return cipher.doFinal(data); } catch (Exception e) { throw new RuntimeException(e); } } /** * 解密 * * @param data 待解密内容 * @param key 解密密钥 * @return byte[] */ public static byte[] decrypt(byte[] data, byte[] key) { try { SecretKeySpec secretKey = new SecretKeySpec(key, "AES"); byte[] enCodeFormat = secretKey.getEncoded(); SecretKeySpec secretKeySpec = new SecretKeySpec(enCodeFormat, "AES"); Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding"); cipher.init(Cipher.DECRYPT_MODE, secretKeySpec, new IvParameterSpec(key)); return cipher.doFinal(data); } catch (Exception e) { throw new RuntimeException(e); } } /** * 加密 * * @param data 需要加密的内容 * @param key 加密密码 * @return str */ public static String encrypt(String data, String key) { byte[] valueByte = encrypt(data.getBytes(StandardCharsets.UTF_8), key.getBytes(StandardCharsets.UTF_8)); return Base64.getEncoder().encodeToString(valueByte); } /** * 解密 * * @param data 待解密内容 base64 字符串 * @param key 解密密钥 * @return str */ public static String decrypt(String data, String key) { byte[] originalData = Base64.getDecoder().decode(data.getBytes()); byte[] valueByte = decrypt(originalData, key.getBytes(StandardCharsets.UTF_8)); return new String(valueByte); } /** * 加密 * * @param data 需要加密的内容 * @param key 加密密码 * @return str */ public static String encrypt(String data, String key, Charset charset) { byte[] valueByte = encrypt(data.getBytes(StandardCharsets.UTF_8), key.getBytes(charset)); return Base64.getEncoder().encodeToString(valueByte); } /** * 解密 * * @param data 待解密内容 base64 字符串 * @param key 解密密钥 * @return str */ public static String decrypt(String data, String key, Charset charset) { byte[] originalData = Base64.getDecoder().decode(data.getBytes()); byte[] valueByte = decrypt(originalData, key.getBytes(charset)); return new String(valueByte); } /** * 生成一个随机字符串密钥 */ public static String generateRandomKey() { return UUID.randomUUID() .toString() .replaceAll("-", "") .toLowerCase(Locale.ROOT) .substring(0, 16); } }
jasonzhang2022/algorithm
src/main/java/jason/datastructure/EnglishDictTrie.java
<reponame>jasonzhang2022/algorithm<filename>src/main/java/jason/datastructure/EnglishDictTrie.java<gh_stars>0 package jason.datastructure; import static org.junit.Assert.assertEquals; import java.util.function.Consumer; import org.junit.Test; public class EnglishDictTrie { char value; public EnglishDictTrie[] children; public EnglishDictTrie(char value){ this.value=value; } public void addWord(char[] word, int offset){ char c=word[offset++]; int index=c-'a'; if (children[index]==null){ EnglishDictTrie child=new EnglishDictTrie(c); children[index]=child; } if (offset<word.length){ EnglishDictTrie child=children[index]; if (child.children==null){ child.children=new EnglishDictTrie[26]; } child.addWord(word, offset); } } public void findWord(char[] word, int offset, StringBuilder prefix, Consumer<String> consumer){ if (offset==word.length){ consumer.accept(prefix.toString()); return; } char c=word[offset++]; int index=c-'a'; if (children==null || children[index]==null){ return; } EnglishDictTrie child=children[index]; child.findWord(word, offset, prefix.append(c), consumer); } //findUsingArray word with one missing letter. //Find in dictionary which has less than one letter than target. Other letter has the same order. public void findMissingLetterWord(char[] word, int offset, Consumer<String> consumer) { if (offset==word.length){ //we reach the end, we can't miss any word. return; } char c=word[offset++]; int index=c-'a'; if (children==null || children[index]==null){ //we could not findUsingArray current character at offset. //we can miss this letter StringBuilder sb=new StringBuilder(); sb.append(word, 0, offset-1); findWord(word, offset, sb, consumer); return; } //we found the character, we have two options. //option 1. miss this letter, StringBuilder sb=new StringBuilder(); sb.append(word, 0, offset-1); findWord(word, offset, sb, consumer); //option 2. use this letter, and let downstream to miss the letter children[index].findMissingLetterWord(word, offset, consumer); } }
fnl/txtfnnl
txtfnnl-bin/src/test/java/txtfnnl/pipelines/TestPipeline.java
package txtfnnl.pipelines; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.util.logging.Level; import java.util.logging.Logger; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import org.apache.uima.UIMAException; import org.apache.uima.analysis_engine.AnalysisEngine; import org.apache.uima.cas.CAS; import org.apache.uima.collection.CollectionException; import org.apache.uima.collection.CollectionReader; import org.apache.uima.collection.CollectionReaderDescription; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.util.Progress; import org.easymock.EasyMock; import org.uimafit.component.CasCollectionReader_ImplBase; import org.uimafit.factory.CollectionReaderFactory; import txtfnnl.pipelines.Pipeline.XmlHandler; import txtfnnl.tika.uima.TikaAnnotator; import txtfnnl.tika.uima.TikaExtractor; import txtfnnl.uima.collection.DirectoryReader; import txtfnnl.uima.collection.FileReader; import txtfnnl.uima.collection.TextWriter; public class TestPipeline { @Before public void setUp() throws Exception {} @Test public final void testAddLogAndHelpOptions() { final Options opts = new Options(); Pipeline.addLogHelpAndInputOptions(opts); for (final String o : new String[] { "h", "help", "i", "info", "q", "quiet", "v", "verbose", "R", "recursive", "mime" }) { Assert.assertNotNull(o, opts.getOption(o)); } Assert.assertNull(opts.getOption("dummy")); } @Test public final void testAddTikaOptions() { final Options opts = new Options(); Pipeline.addTikaOptions(opts); for (final String o : new String[] { "e", "input-encoding", "x", "xml-handler", "normalgreek", }) { Assert.assertNotNull(o, opts.getOption(o)); } Assert.assertNull(opts.getOption("dummy")); } @Test public final void testGetHandler() throws ParseException { final Options opts = new Options(); Pipeline.addTikaOptions(opts); for (final String o : new String[] { "clean", "elsevier", "default" }) { final CommandLine cmd = (new PosixParser()).parse(opts, new String[] { "-x", o }); Assert.assertNotNull(Pipeline.getTikaXmlHandler(cmd)); } } @Test(expected = IllegalArgumentException.class) public final void testGetHandler_IllegalHandler() throws ParseException, UnsupportedEncodingException { final Options opts = new Options(); Pipeline.addTikaOptions(opts); final CommandLine cmd = (new PosixParser()).parse(opts, new String[] { "-x", "illegal" }); final ByteArrayOutputStream tmperr = new ByteArrayOutputStream(); final PrintStream errout = System.err; System.setErr(new PrintStream(tmperr, true, "UTF-8")); // redirect STDOUT try { Pipeline.getTikaXmlHandler(cmd); } finally { System.setErr(errout); String capture = tmperr.toString("UTF-8"); // capture STDOUT Assert.assertEquals("no such XML handler: illegal\n", capture); } } @Test public final void testQuietLoggingSetup() throws ParseException { final Options opts = new Options(); Pipeline.addLogHelpAndInputOptions(opts); final CommandLine cmd = (new PosixParser()).parse(opts, new String[] { "-q" }); final Logger l = Pipeline.loggingSetup(cmd, opts, "USAGE"); Assert.assertTrue(l.isLoggable(Level.SEVERE)); Assert.assertFalse(l.isLoggable(Level.WARNING)); Assert.assertFalse(l.isLoggable(Level.FINE)); Assert.assertEquals(Pipeline.class.getName(), l.getName()); } @Test public final void testVerboseLoggingSetup() throws ParseException { final Options opts = new Options(); Pipeline.addLogHelpAndInputOptions(opts); final CommandLine cmd = (new PosixParser()).parse(opts, new String[] { "-v" }); final Logger l = Pipeline.loggingSetup(cmd, opts, "USAGE"); Assert.assertTrue(l.isLoggable(Level.SEVERE)); Assert.assertTrue(l.isLoggable(Level.WARNING)); Assert.assertTrue(l.isLoggable(Level.FINE)); Assert.assertEquals(Pipeline.class.getName(), l.getName()); } public static class DummyReader extends CasCollectionReader_ImplBase { public void getNext(CAS aCAS) throws IOException, CollectionException {} public boolean hasNext() throws IOException, CollectionException { return false; } public Progress[] getProgress() { return null; } public static CollectionReaderDescription create() throws ResourceInitializationException { return CollectionReaderFactory.createDescription(DummyReader.class); } } @Test public final void testPipeline_ReaderInt() throws ResourceInitializationException { final CollectionReaderDescription crd = DummyReader.create(); final CollectionReader cr = CollectionReaderFactory.createCollectionReader(crd); final Pipeline p = new Pipeline(0, crd); Assert.assertEquals(cr.getClass(), p.getReader().getClass()); Assert.assertEquals(0, p.size()); } @Test public final void testPipeline_Int() { final Pipeline p = new Pipeline(0); Assert.assertEquals(null, p.getReader()); Assert.assertEquals(0, p.size()); } @Test(expected = IllegalArgumentException.class) public final void testPipeline_NegativeInt() { new Pipeline(-1); } @Test public final void testPipeline() { final Pipeline p = new Pipeline(); Assert.assertEquals(null, p.getReader()); Assert.assertEquals(1, p.size()); } @Test public final void testPipeline_Reader() throws ResourceInitializationException { final CollectionReaderDescription crd = DummyReader.create(); final CollectionReader cr = CollectionReaderFactory.createCollectionReader(crd); final Pipeline p = new Pipeline(cr); Assert.assertEquals(cr, p.getReader()); Assert.assertEquals(1, p.size()); } @Test public final void testPipeline_ReaderEngines() throws ResourceInitializationException { final CollectionReader r = CollectionReaderFactory .createCollectionReader(DummyReader.create()); final Pipeline p = new Pipeline(r, new AnalysisEngine[3]); Assert.assertEquals(r, p.getReader()); Assert.assertEquals(2, p.size()); } @Test public final void testPipeline_Engines() { final Pipeline p = new Pipeline(new AnalysisEngine[1]); Assert.assertEquals(0, p.size()); } @Test public final void testGetReader() throws ResourceInitializationException { final CollectionReader r = CollectionReaderFactory .createCollectionReader(DummyReader.create()); final Pipeline p = new Pipeline(r); Assert.assertEquals(r, p.getReader()); } @Test public final void testSetReader_Reader() throws ResourceInitializationException { final Pipeline p = new Pipeline(); final CollectionReaderDescription r = DummyReader.create(); final CollectionReader cr = CollectionReaderFactory.createCollectionReader(r); Assert.assertNull(p.setReader(r)); Assert.assertEquals(cr.getClass(), p.setReader(r).getClass()); } @Test public final void testSetReader_CommandLineDefault() throws ParseException, UIMAException, IOException { final Options opts = new Options(); Pipeline.addLogHelpAndInputOptions(opts); final CommandLine cmd = (new PosixParser()).parse(opts, new String[] {}); final Pipeline p = new Pipeline(); Assert.assertNull(p.setReader(cmd)); final CollectionReader r = p.getReader(); Assert.assertEquals(DirectoryReader.class.getName(), r.getClass().getName()); } @Test public final void testSetReader_CommandLineDir() throws ParseException, UIMAException, IOException { final Options opts = new Options(); Pipeline.addLogHelpAndInputOptions(opts); final CommandLine cmd = (new PosixParser()).parse(opts, new String[] { System.getProperty("user.dir") }); final Pipeline p = new Pipeline(); Assert.assertNull(p.setReader(cmd)); final CollectionReader r = p.getReader(); Assert.assertEquals(DirectoryReader.class.getName(), r.getClass().getName()); } @Test(expected = IOException.class) public final void testSetReader_CommandLineDirMissing() throws ParseException, UIMAException, IOException { final Options opts = new Options(); Pipeline.addLogHelpAndInputOptions(opts); final CommandLine cmd = (new PosixParser()).parse(opts, new String[] { "a" }); final Pipeline p = new Pipeline(); final ByteArrayOutputStream tmperr = new ByteArrayOutputStream(); final PrintStream stderr = System.err; System.setErr(new PrintStream(tmperr, true, "UTF-8")); try { Assert.assertNull(p.setReader(cmd)); } finally { System.setErr(stderr); Assert.assertEquals("cannot read 'a'\n", tmperr.toString("UTF-8")); } } @Test public final void testSetReader_CommandLineFile() throws ParseException, UIMAException, IOException { final File file = File.createTempFile("test_", null); file.deleteOnExit(); final Options opts = new Options(); Pipeline.addLogHelpAndInputOptions(opts); final CommandLine cmd = (new PosixParser()).parse(opts, new String[] { file.getPath() }); final Pipeline p = new Pipeline(); Assert.assertNull(p.setReader(cmd)); final CollectionReader r = p.getReader(); Assert.assertEquals(FileReader.class.getName(), r.getClass().getName()); } @Test public final void testSetReader_CommandLineFiles() throws ParseException, UIMAException, IOException { final File file = File.createTempFile("test_", null); file.deleteOnExit(); final Options opts = new Options(); Pipeline.addLogHelpAndInputOptions(opts); final CommandLine cmd = (new PosixParser()).parse(opts, new String[] { file.getPath(), file.getPath() }); final Pipeline p = new Pipeline(); Assert.assertNull(p.setReader(cmd)); final CollectionReader r = p.getReader(); Assert.assertEquals(FileReader.class.getName(), r.getClass().getName()); } @Test(expected = IOException.class) public final void testSetReader_CommandLineFileMissing() throws ParseException, UIMAException, IOException { final Options opts = new Options(); Pipeline.addLogHelpAndInputOptions(opts); final CommandLine cmd = (new PosixParser()).parse(opts, new String[] { "a", "b" }); final Pipeline p = new Pipeline(); final ByteArrayOutputStream tmperr = new ByteArrayOutputStream(); final PrintStream stderr = System.err; System.setErr(new PrintStream(tmperr, true, "UTF-8")); try { Assert.assertNull(p.setReader(cmd)); } finally { System.setErr(stderr); Assert.assertEquals("cannot read 'a'\n", tmperr.toString("UTF-8")); } } @Test public final void testSetReader_FilesMime() throws UIMAException, IOException { final Pipeline p = new Pipeline(); final File tmpfile = File.createTempFile("test", "pipeline"); Assert.assertTrue(tmpfile.exists() && tmpfile.canRead()); Assert.assertNull(p.setReader(new String[] { tmpfile.getAbsolutePath() }, "MIME")); Assert.assertEquals(FileReader.class, p.getReader().getClass()); } @Test public final void testSetReader_Files() throws UIMAException, IOException { final Pipeline p = new Pipeline(); final File tmpfile = File.createTempFile("test", "pipeline"); Assert.assertTrue(tmpfile.exists() && tmpfile.canRead()); Assert.assertNull(p.setReader(new String[] { tmpfile.getAbsolutePath() })); Assert.assertEquals(FileReader.class, p.getReader().getClass()); } @Test public final void testSetReader_DirMimeRecursive() throws UIMAException, IOException { final Pipeline p = new Pipeline(); final File tmpdir = File.createTempFile("test", "pipeline").getParentFile(); Assert.assertTrue(tmpdir.exists() && tmpdir.canRead()); Assert.assertNull(p.setReader(tmpdir, "MIME", true)); Assert.assertEquals(DirectoryReader.class, p.getReader().getClass()); } @Test public final void testSetReader_DirMime() throws UIMAException, IOException { final Pipeline p = new Pipeline(); final File tmpdir = File.createTempFile("test", "pipeline").getParentFile(); Assert.assertTrue(tmpdir.exists() && tmpdir.canRead()); Assert.assertNull(p.setReader(tmpdir, "MIME")); Assert.assertEquals(DirectoryReader.class, p.getReader().getClass()); } @Test public final void testSetReader_DirRecursive() throws UIMAException, IOException { final Pipeline p = new Pipeline(); final File tmpdir = File.createTempFile("test", "pipeline").getParentFile(); Assert.assertTrue(tmpdir.exists() && tmpdir.canRead()); Assert.assertNull(p.setReader(tmpdir, true)); Assert.assertEquals(DirectoryReader.class, p.getReader().getClass()); } @Test public final void testSetReader_Dir() throws UIMAException, IOException { final Pipeline p = new Pipeline(); final File tmpdir = File.createTempFile("test", "pipeline").getParentFile(); Assert.assertTrue(tmpdir.exists() && tmpdir.canRead()); Assert.assertNull(p.setReader(tmpdir)); Assert.assertEquals(DirectoryReader.class, p.getReader().getClass()); } @Test public final void testSetConsumer() { final Pipeline p = new Pipeline(); final AnalysisEngine ae = EasyMock.createMock(AnalysisEngine.class); Assert.assertNull(p.setConsumer(ae)); Assert.assertEquals(ae, p.get(p.size())); } @Test public final void testConfigureTika_UsingDefaultValues() throws UIMAException, IOException { final Pipeline p = new Pipeline(); Assert.assertNull(p.configureTika(0, true, "encoding", true, XmlHandler.DEFAULT)); Assert.assertEquals(TikaExtractor.class.getName(), p.get(0).getAnalysisEngineMetaData() .getName()); } @Test public final void testConfigureTika_UsingTikaAnnotator() throws UIMAException, IOException { final Pipeline p = new Pipeline(); Assert.assertNull(p.configureTika(0, false, "encoding", true, XmlHandler.DEFAULT)); Assert.assertEquals(TikaAnnotator.class.getName(), p.get(0).getAnalysisEngineMetaData() .getName()); } @Test public final void testConfigureTike_CommandLineDefault() throws ParseException, UIMAException, IOException { final Options opts = new Options(); Pipeline.addTikaOptions(opts); final CommandLine cmd = (new PosixParser()).parse(opts, new String[] {}); final Pipeline p = new Pipeline(); Assert.assertNull(p.configureTika(cmd)); Assert.assertEquals(TikaExtractor.class.getName(), p.get(0).getAnalysisEngineMetaData() .getName()); } @Test(expected = IllegalStateException.class) public final void testConfigureTika_OnTooShortPipeline() throws UIMAException, IOException { final Pipeline p = new Pipeline(0); p.configureTika(0, true, "encoding", true, XmlHandler.DEFAULT); } @Test(expected = IllegalStateException.class) public final void testConfigureTika_AsLastElement() throws UIMAException, IOException { final Pipeline p = new Pipeline(); p.configureTika(1, true, "encoding", true, XmlHandler.DEFAULT); } @Test public final void testConfigureTika_InFirstPosUsingDefaultValues() throws UIMAException, IOException { final Pipeline p = new Pipeline(); Assert.assertNull(p.configureTika(true, "encoding", true, XmlHandler.DEFAULT)); Assert.assertEquals(TikaExtractor.class.getName(), p.get(0).getAnalysisEngineMetaData() .getName()); } @Test public final void testSetFirst() { final Pipeline p = new Pipeline(); final AnalysisEngine ae = EasyMock.createMock(AnalysisEngine.class); Assert.assertNull(p.setFirst(ae)); Assert.assertEquals(ae, p.get(0)); } @Test public final void testSet() { final Pipeline p = new Pipeline(); final AnalysisEngine ae = EasyMock.createMock(AnalysisEngine.class); Assert.assertNull(p.set(0, ae)); Assert.assertEquals(ae, p.get(0)); } @Test public final void testGet() { final AnalysisEngine ae = EasyMock.createMock(AnalysisEngine.class); final Pipeline p = new Pipeline(new AnalysisEngine[] { ae }); Assert.assertEquals(ae, p.get(0)); } @Test public final void testSetEngineArray() { final Pipeline p = new Pipeline(); final AnalysisEngine ae = EasyMock.createMock(AnalysisEngine.class); Assert.assertArrayEquals(new AnalysisEngine[2], p.set(new AnalysisEngine[] { ae })); Assert.assertEquals(ae, p.get(0)); } @Test public final void testSize() { Pipeline p = new Pipeline(3); Assert.assertEquals(3, p.size()); p = new Pipeline(); Assert.assertEquals(1, p.size()); } @Test public final void testIsReady() { final Pipeline p = new Pipeline(EasyMock.createMock(CollectionReader.class), EasyMock.createMock(AnalysisEngine.class)); Assert.assertTrue(p.isReady()); p.set(0, null); Assert.assertFalse(p.isReady()); } @Test public final void testRun() throws IOException, UIMAException { // make a tmpfile final File tmp = File.createTempFile("test_", "txt"); tmp.deleteOnExit(); final BufferedWriter bw = new BufferedWriter(new FileWriter(tmp)); bw.write("this is a test"); bw.close(); // setup to capture STDOUT final ByteArrayOutputStream tmpout = new ByteArrayOutputStream(); final PrintStream stdout = System.out; String result = null; // super-simple pipeline: final Pipeline p = new Pipeline(); p.setReader(new String[] { tmp.getAbsolutePath() }); p.configureTika(); p.setConsumer(Pipeline.textEngine(TextWriter.configure().printToStdout().create())); try { System.setOut(new PrintStream(tmpout, true, "UTF-8")); // redirect STDOUT p.run(); result = tmpout.toString("UTF-8"); // capture STDOUT } finally { System.setOut(stdout); Assert.assertEquals("this is a test", result); } } }
nvasc/BCT_ORG
app/commons/dataprovider/dataprovider.js
<gh_stars>0 function dataProvider($resource, oauthDataFactory, $rootScope) { var service = {}; var _provider = function (model, key) { var modelUrl = ''; if (model) { modelUrl = oauthDataFactory.urlMain() + 'api/' + model; } else { return null; } if (key) { return $resource('', {}, { 'get': { method: 'GET', headers: { 'nk': $rootScope.base64.encode(key)}, params: { id: '@id' }, url: modelUrl + '?id=:id'}, 'create': { method: 'POST', headers: { 'nk': $rootScope.base64.encode(key)}, params: { }, url: modelUrl}, 'update': { method: 'PUT', headers: { 'nk': $rootScope.base64.encode(key)}, params: { id: '@id' }, url: modelUrl + '?id=:id'}, 'delete': { method: 'DELETE', headers: { 'nk': $rootScope.base64.encode(key)}, params: { id: '@id' }, url: modelUrl + '?id=:id'} }); } else { return $resource('', {}, { 'get': { method: 'GET', params: { id: '@id' }, url: modelUrl + '?id=:id'}, 'create': { method: 'POST', params: { }, url: modelUrl}, 'update': { method: 'PUT', params: { id: '@id' }, url: modelUrl + '?id=:id'}, 'delete': { method: 'DELETE', params: { id: '@id' }, url: modelUrl + '?id=:id'} }); } } service.provider = _provider; return service; } /* @ngInject */ export default dataProvider;
blurbyte/hack-the-news
src/components/StoryHeader/Details.js
import styled from 'styled-components'; import media from '../../styles/media'; const Details = styled.div` display: flex; align-items: center; margin: 2.4rem 0 2.2rem 0; ${media.phone`margin: 1.2rem 0 1rem 0;`} `; export default Details;
mohamedsamara/React-Starter-App
src/theme.js
/** * * theme.js * Theme setup --> Material UI */ import { createMuiTheme } from '@material-ui/core/styles'; import indigo from '@material-ui/core/colors/indigo'; import blue from '@material-ui/core/colors/blue'; // import purple from '@material-ui/core/colors/purple'; // import green from '@material-ui/core/colors/green'; // import pink from '@material-ui/core/colors/pink'; // import red from '@material-ui/core/colors/red'; const theme = createMuiTheme({ palette: { primary: indigo, secondary: blue, }, typography: { useNextVariants: true, }, }); export default theme;
ericallfesta/kirarirenew
proto.git 2/app/models/image.rb
class Image < ActiveRecord::Base belongs_to :writing validates_presence_of :src mount_uploader :src, DefaultUploader end
The-Forecaster/Spark-Client
src/main/java/me/wallhacks/spark/util/player/itemswitcher/itemswitchers/ItemForMineSwitchItem.java
<filename>src/main/java/me/wallhacks/spark/util/player/itemswitcher/itemswitchers/ItemForMineSwitchItem.java package me.wallhacks.spark.util.player.itemswitcher.itemswitchers; import me.wallhacks.spark.util.player.InventoryUtil; import me.wallhacks.spark.util.player.itemswitcher.SwitchItem; import net.minecraft.block.state.IBlockState; import net.minecraft.item.ItemStack; public class ItemForMineSwitchItem extends SwitchItem { public ItemForMineSwitchItem(IBlockState b){ super(); block = b; } final IBlockState block; public float isItemGood(ItemStack item){ float speed = InventoryUtil.getDestroySpeed(item,block); return speed; } }
RIdotCOM/restaurant-review
src/js/helpers/filter.js
<gh_stars>1-10 'use strict' import m from 'mithril' import _ from 'lodash' const Filter = function() { const ctrl = this // const unfilteredRestaurants = m.prop(_.cloneDeep(ctrl.restaurants())) let status = 0 const filter = { status(type) { if ( typeof type === 'number') { status = type } else if (typeof type === 'string') { switch( type ) { case 'price': return filter.active().price() case 'rating': return filter.active().rating() case 'category': return filter.active().category().length default: return false; } } return status }, active: m.prop({ price: m.prop(0), rating: m.prop(0), category: m.prop([]) }), } const filterPrice = function(rests) { filter.status(1) return _.filter(rests(), (restaurant) => { const activePriceFilter = filter.active().price() const price = restaurant.priceTier if (price <= 3 && activePriceFilter === price) { return 1 } else if ( activePriceFilter > 3) { return price > 3 } }) } const filterRating = function(rests) { filter.status(1) return _.filter(rests(), (restaurant) => { const rating = Math.floor(restaurant.rating) const activeRatingFilter = filter.active().rating() if (rating <= 3 && activeRatingFilter === rating) { return 1 } else if ( activeRatingFilter > 3) { return rating > 3 } }) } const filterCategory = function(rests, category) { filter.status(1) // ctrl.categories() //find category name in categories array for the filter menu, // and set it's flag to true const filterListCateroryIndex = _.find(ctrl.categories(), (item) => { return item.name() === category }) console.log(filterListCateroryIndex.active()) return _.filter(rests(), (restaurant) => { const index = _.indexOf(restaurant.categories, category) if(index !== -1) { return 1 } }) } const applyFilter = function() { const rests = m.prop(_.cloneDeep(ctrl.unfilteredRestaurants())) if (filter.active().price()) { rests(filterPrice(rests)) } if (filter.active().rating()) { rests(filterRating(rests)) } if (filter.active().category().length) { _.forEach(filter.active().category(), (category) => { rests(filterCategory(rests, category)) }) } if( !filter.active().price() && !filter.active().rating() && filter.active().category().length === 0) { filter.status(0) } else { filter.status(1) } ctrl.restaurants(rests()) m.redraw() } return { add(type, value) { switch(type) { case 'price': console.log('PRICE FILTER') console.log('CURRENT FILTER') console.log(filter.active().price()) console.log('NEW FILTER VALUE') const newPriceValue = value + 1 console.log(newPriceValue) if(filter.active().price()) { if(filter.active().price() !== newPriceValue) { filter.active().price(newPriceValue) applyFilter() } else { filter.active().price(0) applyFilter() } } else { filter.active().price(newPriceValue) // apply price filter give .. ctrl.restaurants() ctrl.restaurants(filterPrice(ctrl.restaurants)) console.log(ctrl.restaurants()) m.redraw() } break case 'rating': const newRatingValue = value + 1 if(filter.active().rating()) { if(filter.active().rating() !== newRatingValue) { filter.active().rating(newRatingValue) applyFilter() } else { filter.active().rating(0) applyFilter() } } else { filter.active().rating(newRatingValue) // apply rating filter give .. ctrl.restaurants() ctrl.restaurants(filterRating(ctrl.restaurants)) m.redraw() } break case 'category': if (filter.active().category().length) { const index = _.indexOf(filter.active().category(), value) if (index === -1) { // apply category filter filter.active().category().push(value) ctrl.restaurants(filterCategory(ctrl.restaurants, value)) m.redraw() } else { _.pullAt(filter.active().category(), index) // applyFullFilter applyFilter() } } else { filter.active().category().push(value) ctrl.restaurants(filterCategory(ctrl.restaurants, value)) m.redraw() } break default: break } }, reset() { filter.active().category() // console.log(filter.status()) // console.log(filter.active().price()) // console.log(filter.active().rating()) // console.log(filter.active().category()) filter.status(0) filter.active().price(0) filter.active().rating(0) filter.active().category([]) ctrl.restaurants(ctrl.unfilteredRestaurants()) _.forEach(ctrl.categories(), (category) => { category.active(false) }) }, status(type) { return filter.status(type) } } } export default Filter
tyranitar/eureka
client/src/actions/career-actions.js
<filename>client/src/actions/career-actions.js<gh_stars>0 import { asyncGetCareerDetails, asyncGetCareerEducationPaths, asyncGetCareerComments, asyncGetCareerPointOfContact, asyncGetCareerAdvertisements, asyncGetCareerVideos, asyncToggleCareerCommentLike, asyncAddCareerComment, asyncSetTargetCareer, asyncUnsetTargetCareer, } from '../api/career-api'; export const getCareerDetails = (careerId) => { return async (dispatch, getState) => { const careerDetails = await asyncGetCareerDetails(careerId); dispatch(setCareerDetails(careerDetails)); }; }; export const getCareerEducationPaths = (careerId) => { return async (dispatch, getState) => { const careerEducationPaths = await asyncGetCareerEducationPaths(careerId); dispatch(setCareerEducationPaths(careerEducationPaths)); }; }; export const getCareerComments = (careerId) => { return async (dispatch, getState) => { const careerComments = await asyncGetCareerComments(careerId); dispatch(setCareerComments(careerComments)); }; }; export const getCareerPointOfContact = (careerId) => { return async (dispatch, getState) => { const careerPointOfContact = await asyncGetCareerPointOfContact(careerId); dispatch(setCareerPointOfContact(careerPointOfContact)); }; }; export const getCareerAdvertisements = (careerId) => { return async (dispatch, getState) => { const careerAdvertisements = await asyncGetCareerAdvertisements(careerId); dispatch(setCareerAdvertisements(careerAdvertisements)); }; }; export const getCareerVideos = (careerId) => { return async (dispatch, getState) => { const careerVideos = await asyncGetCareerVideos(careerId); dispatch(setCareerVideos(careerVideos)); }; }; export const setCareerVideos = (careerVideos) => { return { type: 'SET_CAREER_VIDEOS', careerVideos, }; }; export const toggleCareerCommentLike = (commentId) => { return async (dispatch, getState) => { await asyncToggleCareerCommentLike(commentId); dispatch(setCareerComments(getState().career.comments.map((comment) => { if (comment.id === commentId) { return Object.assign({}, comment, { liked: !comment.liked, }); } return comment; }))); }; }; export const addCareerComment = (careerId, comment) => { return async (dispatch, getState) => { await asyncAddCareerComment(careerId, comment); const { comments } = getState().career; dispatch(setCareerComments(comments.concat({ user: { name: 'You' }, createdAt: new Date(), id: comments.length, content: comment, }))); }; }; export const addTargetCareer = (career) => { return { type: 'ADD_TARGET_CAREER', career, }; }; export const removeTargetCareer = () => { return { type: 'REMOVE_TARGET_CAREER', }; }; export const setTargetCareer = (career) => { return async (dispatch, getState) => { await asyncSetTargetCareer(career.id); dispatch(addTargetCareer(career)); }; }; export const unsetTargetCareer = () => { return async (dispatch, getState) => { await asyncUnsetTargetCareer(); dispatch(removeTargetCareer()); }; }; export const setCareerDetails = (careerDetails) => { return { type: 'SET_CAREER_DETAILS', careerDetails, }; }; export const resetCareerDetails = () => { return { type: 'RESET_CAREER_DETAILS', }; }; export const setCareerEducationPaths = (careerEducationPaths) => { return { type: 'SET_CAREER_EDUCATION_PATHS', careerEducationPaths, }; }; export const setCareerComments = (careerComments) => { return { type: 'SET_CAREER_COMMENTS', careerComments, }; }; export const setCareerPointOfContact = (careerPointOfContact) => { return { type: 'SET_CAREER_POINT_OF_CONTACT', careerPointOfContact, }; }; export const setCareerAdvertisements = (careerAdvertisements) => { return { type: 'SET_CAREER_ADVERTISEMENTS', careerAdvertisements, }; };
ChaosPaladin/l2mapconv-public
rendering/src/EntityTree.cpp
<reponame>ChaosPaladin/l2mapconv-public<filename>rendering/src/EntityTree.cpp #include "pch.h" #include <rendering/EntityTree.h> namespace rendering { void EntityTree::add(const Entity &entity) { ASSERT(entity.mesh() != nullptr, "Rendering", "Entity must have mesh"); m_entities.push_front(entity); const auto *inserted = &*m_entities.begin(); for (const auto &surface : entity.mesh()->surfaces()) { m_tree[surface.type][entity.shader()][surface.material.texture] [entity.mesh()] .push_back({inserted, &surface}); } } void EntityTree::remove(std::uint64_t surface_filter) { for (auto it = m_tree.cbegin(); it != m_tree.cend();) { if ((it->first & surface_filter) == it->first) { m_tree.erase(it++); } else { ++it; } } m_entities.remove_if([surface_filter](const Entity &entity) { for (const auto &surface : entity.mesh()->surfaces()) { if ((surface.type & surface_filter) == surface.type) { return true; } } return false; }); } auto EntityTree::tree() const -> const Tree & { return m_tree; } } // namespace rendering
rsumner31/pymc3-23
PyMC2/database/pickle.py
### # Pickle backend module # Trace are stored in memory during sampling and saved to a # pickle file at the end of sampling. ### """What I want to do here is allow the possibility to load an existing pickle file to continue sampling where it was left. To do so, we first initialize the database with the name of the pickled file. Since we cannot pickle the nodes and parameters, the model definition, we cannot affect the stored traces to the objects right away. We must wait until initialization, when the model instance is passed, to fill the traces. This means that _initialize must be called in model if it hasn't been. model.state = dict(_current_iter, _iter, _burn, _thin, sampling, ready, tuning parameters.) sampling is True during sampling... ready is True if it is possible to sample without initializing the database (GUI stopped computation and wants to restart it). ready is False if to sample we must initialize the database. (Sampling was interrupted, the database finalized and the session closed.) At the end of a sampling loop, ready is set to false. """ import ram, no_trace import os, datetime, numpy import string class Trace(ram.Trace): pass class Database(object): """Pickle database backend. Saves the trace to a pickle file. """ def __init__(self, filename=None): """This is a proof of concept. The idea is to allow the __init__ method to load an existing database and make it current. """ if filename in os.listdir('.'): file = open(filename, 'r') self.container = cPickle.load(file) file.close() self.reloading = True else: self.reloading=False self.filename = filename def _initialize(self, length, model): """Define filename to store simulation results.""" self.model = model if self.filename is None: modname = self.model.__name__.split('.')[-1]+'.pymc' name = modname i=0 existing_names = os.listdir(".") while again: if name in existing_names: name = modname+'_%d'%i i += 1 else: break self.filename = name for object in self.model._pymc_objects_to_tally: if self.reloading: object._trace = self.container[object.__name__] else: object.trace._initialize(length) self.reloading = False def _finalize(self): """Dump traces using cPickle.""" container={} for object in self.model._pymc_objects_to_tally: object.trace._finalize() container[object.__name__] = object.gettrace() file = open(self.filename, 'w') cPickle.dump(container, file) file.close()
apjanke/janklab-opp
jide-oss/jide-oss-3.7.6/project/jide-oss-3.7.6/src/com/jidesoft/comparator/PrioritizedObjectComparator.java
<reponame>apjanke/janklab-opp package com.jidesoft.comparator; import com.jidesoft.swing.Prioritized; import java.util.Comparator; /** * <code>Comparator</code> for objects that implements {@link com.jidesoft.swing.Prioritized} * interface. It is a singleton pattern. You use {@link #getInstance()} to get an instance. */ public class PrioritizedObjectComparator implements Comparator<Object> { private static PrioritizedObjectComparator singleton = null; protected PrioritizedObjectComparator() { } /** * Gets an instance of <code>PrioritizedObjectComparator</code>. * * @return an instance Cof <code>PrioritizedObjectComparator</code>. */ public static PrioritizedObjectComparator getInstance() { if (singleton == null) { singleton = new PrioritizedObjectComparator(); } return singleton; } public int compare(Object o1, Object o2) { int p1 = 0; if (o1 instanceof Prioritized) { p1 = ((Prioritized) o1).getPriority(); } int p2 = 0; if (o2 instanceof Prioritized) { p2 = ((Prioritized) o2).getPriority(); } return p1 - p2; } }
PircDef/accumulo
server/manager/src/main/java/org/apache/accumulo/manager/tableOps/TableInfo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.manager.tableOps; import java.io.Serializable; import java.util.Map; import org.apache.accumulo.core.client.admin.InitialTableState; import org.apache.accumulo.core.client.admin.TimeType; import org.apache.accumulo.core.data.NamespaceId; import org.apache.accumulo.core.data.TableId; import org.apache.hadoop.fs.Path; public class TableInfo implements Serializable { private static final long serialVersionUID = 1L; private String tableName; private TableId tableId; private NamespaceId namespaceId; private TimeType timeType; private String user; // Record requested initial state at creation private InitialTableState initialTableState; // Track information related to initial split creation private int initialSplitSize; private String splitFile; private String splitDirsFile; public Map<String,String> props; public String getTableName() { return tableName; } public void setTableName(String tableName) { this.tableName = tableName; } public TableId getTableId() { return tableId; } public void setTableId(TableId tableId) { this.tableId = tableId; } public NamespaceId getNamespaceId() { return namespaceId; } public void setNamespaceId(NamespaceId namespaceId) { this.namespaceId = namespaceId; } public TimeType getTimeType() { return timeType; } public void setTimeType(TimeType timeType) { this.timeType = timeType; } public String getUser() { return user; } public void setUser(String user) { this.user = user; } public Path getSplitPath() { return new Path(splitFile); } // stored as string for Java serialization public void setSplitPath(Path splitPath) { this.splitFile = splitPath == null ? null : splitPath.toString(); } public Path getSplitDirsPath() { return new Path(splitDirsFile); } // stored as string for Java serialization public void setSplitDirsPath(Path splitDirsPath) { this.splitDirsFile = splitDirsPath == null ? null : splitDirsPath.toString(); } public InitialTableState getInitialTableState() { return initialTableState; } public void setInitialTableState(InitialTableState initialTableState) { this.initialTableState = initialTableState; } public int getInitialSplitSize() { return initialSplitSize; } public void setInitialSplitSize(int initialSplitSize) { this.initialSplitSize = initialSplitSize; } }
MephistoMMM/gAlgorithmAndDesignPattern
leetcode/0021_merge_two_sorted_list/main.go
<filename>leetcode/0021_merge_two_sorted_list/main.go // Copyright © 2019 <NAME> <<EMAIL>> // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package main import "fmt" // https://leetcode.com/problems/merge-two-sorted-lists/ type ListNode struct { Val int Next *ListNode } // 0 ms 2.5 MB func mergeTwoLists(l1 *ListNode, l2 *ListNode) *ListNode { head := &ListNode{} n0 := head for l1 != nil && l2 != nil { if l1.Val < l2.Val { n0.Next = l1 l1 = l1.Next } else { n0.Next = l2 l2 = l2.Next } n0 = n0.Next } if l1 != nil { n0.Next = l1 } if l2 != nil { n0.Next = l2 } return head.Next } func showList(head *ListNode) { for head != nil { fmt.Printf("%d->", head.Val) head = head.Next } fmt.Println("") } func main() { list1 := &ListNode{ Val: 1, Next: &ListNode{ Val: 2, Next: &ListNode{ Val: 4, Next: nil, }, }, } list2 := &ListNode{ Val: 1, Next: &ListNode{ Val: 3, Next: &ListNode{ Val: 4, Next: nil, }, }, } showList(mergeTwoLists(list1, list2)) }
dingjianhui1013/operator
src/main/java/cn/topca/tca/ra/service/CertInfo.java
package cn.topca.tca.ra.service; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * <p>certInfo complex type的 Java 类。 * * <p>以下模式片段指定包含在此类中的预期内容。 * * <pre> * &lt;complexType name="certInfo"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="certStatus" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certReqDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certReqTransid" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certReqNonce" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certReqBufType" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certReqChallenge" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certReqComment" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certApproveDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certRejectDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certSignDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certSerialNumber" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certNotBefore" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certNotAfter" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certIssuerHashMd5" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certIssuerDn" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certSubjectHashMd5" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certSubjectDn" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certSuspendDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certRevokeDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certRevokeReason" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certRenewalDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certRenewalPrevId" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="certRenewalNextId" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="certReqOverrideValidity" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="id" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="certReqBuf" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certSignBuf" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certPin" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certType" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certKmcReq1" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certKmcReq2" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certKmcReq3" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certKmcRep1" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certKmcRep2" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certKmcRep3" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certDeliverMode" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certApproveAdmin1" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certApproveAdmin2" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certRenewemailDeliver" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certReqBufKmc" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certReqBufTypeKmc" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certSignBufKmc" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certSignBufP7Kmc" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certSignBufP7" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certSerialnumberKmc" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certTypeKmc" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certReqPublickey" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certReqPublickeyHashMd5" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="certIdRandom" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "certInfo", propOrder = { "certStatus", "certReqDate", "certReqTransid", "certReqNonce", "certReqBufType", "certReqChallenge", "certReqComment", "certApproveDate", "certRejectDate", "certSignDate", "certSerialNumber", "certNotBefore", "certNotAfter", "certIssuerHashMd5", "certIssuerDn", "certSubjectHashMd5", "certSubjectDn", "certSuspendDate", "certRevokeDate", "certRevokeReason", "certRenewalDate", "certRenewalPrevId", "certRenewalNextId", "certReqOverrideValidity", "id", "certReqBuf", "certSignBuf", "certPin", "certType", "certKmcReq1", "certKmcReq2", "certKmcReq3", "certKmcRep1", "certKmcRep2", "certKmcRep3", "certDeliverMode", "certApproveAdmin1", "certApproveAdmin2", "certRenewemailDeliver", "certReqBufKmc", "certReqBufTypeKmc", "certSignBufKmc", "certSignBufP7Kmc", "certSignBufP7", "certSerialnumberKmc", "certTypeKmc", "certReqPublickey", "certReqPublickeyHashMd5", "certIdRandom" }) public class CertInfo { protected String certStatus; protected String certReqDate; protected String certReqTransid; protected String certReqNonce; protected String certReqBufType; protected String certReqChallenge; protected String certReqComment; protected String certApproveDate; protected String certRejectDate; protected String certSignDate; protected String certSerialNumber; protected String certNotBefore; protected String certNotAfter; protected String certIssuerHashMd5; protected String certIssuerDn; protected String certSubjectHashMd5; protected String certSubjectDn; protected String certSuspendDate; protected String certRevokeDate; protected String certRevokeReason; protected String certRenewalDate; protected int certRenewalPrevId; protected int certRenewalNextId; protected int certReqOverrideValidity; protected int id; protected String certReqBuf; protected String certSignBuf; protected String certPin; protected String certType; protected String certKmcReq1; protected String certKmcReq2; protected String certKmcReq3; protected String certKmcRep1; protected String certKmcRep2; protected String certKmcRep3; protected String certDeliverMode; protected String certApproveAdmin1; protected String certApproveAdmin2; protected String certRenewemailDeliver; protected String certReqBufKmc; protected String certReqBufTypeKmc; protected String certSignBufKmc; protected String certSignBufP7Kmc; protected String certSignBufP7; protected String certSerialnumberKmc; protected String certTypeKmc; protected String certReqPublickey; protected String certReqPublickeyHashMd5; protected String certIdRandom; /** * 获取certStatus属性的值。 * * @return * possible object is * {@link String } * */ public String getCertStatus() { return certStatus; } /** * 设置certStatus属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertStatus(String value) { this.certStatus = value; } /** * 获取certReqDate属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqDate() { return certReqDate; } /** * 设置certReqDate属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqDate(String value) { this.certReqDate = value; } /** * 获取certReqTransid属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqTransid() { return certReqTransid; } /** * 设置certReqTransid属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqTransid(String value) { this.certReqTransid = value; } /** * 获取certReqNonce属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqNonce() { return certReqNonce; } /** * 设置certReqNonce属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqNonce(String value) { this.certReqNonce = value; } /** * 获取certReqBufType属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqBufType() { return certReqBufType; } /** * 设置certReqBufType属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqBufType(String value) { this.certReqBufType = value; } /** * 获取certReqChallenge属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqChallenge() { return certReqChallenge; } /** * 设置certReqChallenge属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqChallenge(String value) { this.certReqChallenge = value; } /** * 获取certReqComment属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqComment() { return certReqComment; } /** * 设置certReqComment属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqComment(String value) { this.certReqComment = value; } /** * 获取certApproveDate属性的值。 * * @return * possible object is * {@link String } * */ public String getCertApproveDate() { return certApproveDate; } /** * 设置certApproveDate属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertApproveDate(String value) { this.certApproveDate = value; } /** * 获取certRejectDate属性的值。 * * @return * possible object is * {@link String } * */ public String getCertRejectDate() { return certRejectDate; } /** * 设置certRejectDate属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertRejectDate(String value) { this.certRejectDate = value; } /** * 获取certSignDate属性的值。 * * @return * possible object is * {@link String } * */ public String getCertSignDate() { return certSignDate; } /** * 设置certSignDate属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertSignDate(String value) { this.certSignDate = value; } /** * 获取certSerialNumber属性的值。 * * @return * possible object is * {@link String } * */ public String getCertSerialNumber() { return certSerialNumber; } /** * 设置certSerialNumber属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertSerialNumber(String value) { this.certSerialNumber = value; } /** * 获取certNotBefore属性的值。 * * @return * possible object is * {@link String } * */ public String getCertNotBefore() { return certNotBefore; } /** * 设置certNotBefore属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertNotBefore(String value) { this.certNotBefore = value; } /** * 获取certNotAfter属性的值。 * * @return * possible object is * {@link String } * */ public String getCertNotAfter() { return certNotAfter; } /** * 设置certNotAfter属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertNotAfter(String value) { this.certNotAfter = value; } /** * 获取certIssuerHashMd5属性的值。 * * @return * possible object is * {@link String } * */ public String getCertIssuerHashMd5() { return certIssuerHashMd5; } /** * 设置certIssuerHashMd5属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertIssuerHashMd5(String value) { this.certIssuerHashMd5 = value; } /** * 获取certIssuerDn属性的值。 * * @return * possible object is * {@link String } * */ public String getCertIssuerDn() { return certIssuerDn; } /** * 设置certIssuerDn属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertIssuerDn(String value) { this.certIssuerDn = value; } /** * 获取certSubjectHashMd5属性的值。 * * @return * possible object is * {@link String } * */ public String getCertSubjectHashMd5() { return certSubjectHashMd5; } /** * 设置certSubjectHashMd5属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertSubjectHashMd5(String value) { this.certSubjectHashMd5 = value; } /** * 获取certSubjectDn属性的值。 * * @return * possible object is * {@link String } * */ public String getCertSubjectDn() { return certSubjectDn; } /** * 设置certSubjectDn属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertSubjectDn(String value) { this.certSubjectDn = value; } /** * 获取certSuspendDate属性的值。 * * @return * possible object is * {@link String } * */ public String getCertSuspendDate() { return certSuspendDate; } /** * 设置certSuspendDate属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertSuspendDate(String value) { this.certSuspendDate = value; } /** * 获取certRevokeDate属性的值。 * * @return * possible object is * {@link String } * */ public String getCertRevokeDate() { return certRevokeDate; } /** * 设置certRevokeDate属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertRevokeDate(String value) { this.certRevokeDate = value; } /** * 获取certRevokeReason属性的值。 * * @return * possible object is * {@link String } * */ public String getCertRevokeReason() { return certRevokeReason; } /** * 设置certRevokeReason属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertRevokeReason(String value) { this.certRevokeReason = value; } /** * 获取certRenewalDate属性的值。 * * @return * possible object is * {@link String } * */ public String getCertRenewalDate() { return certRenewalDate; } /** * 设置certRenewalDate属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertRenewalDate(String value) { this.certRenewalDate = value; } /** * 获取certRenewalPrevId属性的值。 * */ public int getCertRenewalPrevId() { return certRenewalPrevId; } /** * 设置certRenewalPrevId属性的值。 * */ public void setCertRenewalPrevId(int value) { this.certRenewalPrevId = value; } /** * 获取certRenewalNextId属性的值。 * */ public int getCertRenewalNextId() { return certRenewalNextId; } /** * 设置certRenewalNextId属性的值。 * */ public void setCertRenewalNextId(int value) { this.certRenewalNextId = value; } /** * 获取certReqOverrideValidity属性的值。 * */ public int getCertReqOverrideValidity() { return certReqOverrideValidity; } /** * 设置certReqOverrideValidity属性的值。 * */ public void setCertReqOverrideValidity(int value) { this.certReqOverrideValidity = value; } /** * 获取id属性的值。 * */ public int getId() { return id; } /** * 设置id属性的值。 * */ public void setId(int value) { this.id = value; } /** * 获取certReqBuf属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqBuf() { return certReqBuf; } /** * 设置certReqBuf属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqBuf(String value) { this.certReqBuf = value; } /** * 获取certSignBuf属性的值。 * * @return * possible object is * {@link String } * */ public String getCertSignBuf() { return certSignBuf; } /** * 设置certSignBuf属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertSignBuf(String value) { this.certSignBuf = value; } /** * 获取certPin属性的值。 * * @return * possible object is * {@link String } * */ public String getCertPin() { return certPin; } /** * 设置certPin属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertPin(String value) { this.certPin = value; } /** * 获取certType属性的值。 * * @return * possible object is * {@link String } * */ public String getCertType() { return certType; } /** * 设置certType属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertType(String value) { this.certType = value; } /** * 获取certKmcReq1属性的值。 * * @return * possible object is * {@link String } * */ public String getCertKmcReq1() { return certKmcReq1; } /** * 设置certKmcReq1属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertKmcReq1(String value) { this.certKmcReq1 = value; } /** * 获取certKmcReq2属性的值。 * * @return * possible object is * {@link String } * */ public String getCertKmcReq2() { return certKmcReq2; } /** * 设置certKmcReq2属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertKmcReq2(String value) { this.certKmcReq2 = value; } /** * 获取certKmcReq3属性的值。 * * @return * possible object is * {@link String } * */ public String getCertKmcReq3() { return certKmcReq3; } /** * 设置certKmcReq3属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertKmcReq3(String value) { this.certKmcReq3 = value; } /** * 获取certKmcRep1属性的值。 * * @return * possible object is * {@link String } * */ public String getCertKmcRep1() { return certKmcRep1; } /** * 设置certKmcRep1属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertKmcRep1(String value) { this.certKmcRep1 = value; } /** * 获取certKmcRep2属性的值。 * * @return * possible object is * {@link String } * */ public String getCertKmcRep2() { return certKmcRep2; } /** * 设置certKmcRep2属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertKmcRep2(String value) { this.certKmcRep2 = value; } /** * 获取certKmcRep3属性的值。 * * @return * possible object is * {@link String } * */ public String getCertKmcRep3() { return certKmcRep3; } /** * 设置certKmcRep3属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertKmcRep3(String value) { this.certKmcRep3 = value; } /** * 获取certDeliverMode属性的值。 * * @return * possible object is * {@link String } * */ public String getCertDeliverMode() { return certDeliverMode; } /** * 设置certDeliverMode属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertDeliverMode(String value) { this.certDeliverMode = value; } /** * 获取certApproveAdmin1属性的值。 * * @return * possible object is * {@link String } * */ public String getCertApproveAdmin1() { return certApproveAdmin1; } /** * 设置certApproveAdmin1属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertApproveAdmin1(String value) { this.certApproveAdmin1 = value; } /** * 获取certApproveAdmin2属性的值。 * * @return * possible object is * {@link String } * */ public String getCertApproveAdmin2() { return certApproveAdmin2; } /** * 设置certApproveAdmin2属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertApproveAdmin2(String value) { this.certApproveAdmin2 = value; } /** * 获取certRenewemailDeliver属性的值。 * * @return * possible object is * {@link String } * */ public String getCertRenewemailDeliver() { return certRenewemailDeliver; } /** * 设置certRenewemailDeliver属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertRenewemailDeliver(String value) { this.certRenewemailDeliver = value; } /** * 获取certReqBufKmc属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqBufKmc() { return certReqBufKmc; } /** * 设置certReqBufKmc属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqBufKmc(String value) { this.certReqBufKmc = value; } /** * 获取certReqBufTypeKmc属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqBufTypeKmc() { return certReqBufTypeKmc; } /** * 设置certReqBufTypeKmc属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqBufTypeKmc(String value) { this.certReqBufTypeKmc = value; } /** * 获取certSignBufKmc属性的值。 * * @return * possible object is * {@link String } * */ public String getCertSignBufKmc() { return certSignBufKmc; } /** * 设置certSignBufKmc属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertSignBufKmc(String value) { this.certSignBufKmc = value; } /** * 获取certSignBufP7Kmc属性的值。 * * @return * possible object is * {@link String } * */ public String getCertSignBufP7Kmc() { return certSignBufP7Kmc; } /** * 设置certSignBufP7Kmc属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertSignBufP7Kmc(String value) { this.certSignBufP7Kmc = value; } /** * 获取certSignBufP7属性的值。 * * @return * possible object is * {@link String } * */ public String getCertSignBufP7() { return certSignBufP7; } /** * 设置certSignBufP7属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertSignBufP7(String value) { this.certSignBufP7 = value; } /** * 获取certSerialnumberKmc属性的值。 * * @return * possible object is * {@link String } * */ public String getCertSerialnumberKmc() { return certSerialnumberKmc; } /** * 设置certSerialnumberKmc属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertSerialnumberKmc(String value) { this.certSerialnumberKmc = value; } /** * 获取certTypeKmc属性的值。 * * @return * possible object is * {@link String } * */ public String getCertTypeKmc() { return certTypeKmc; } /** * 设置certTypeKmc属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertTypeKmc(String value) { this.certTypeKmc = value; } /** * 获取certReqPublickey属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqPublickey() { return certReqPublickey; } /** * 设置certReqPublickey属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqPublickey(String value) { this.certReqPublickey = value; } /** * 获取certReqPublickeyHashMd5属性的值。 * * @return * possible object is * {@link String } * */ public String getCertReqPublickeyHashMd5() { return certReqPublickeyHashMd5; } /** * 设置certReqPublickeyHashMd5属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertReqPublickeyHashMd5(String value) { this.certReqPublickeyHashMd5 = value; } /** * 获取certIdRandom属性的值。 * * @return * possible object is * {@link String } * */ public String getCertIdRandom() { return certIdRandom; } /** * 设置certIdRandom属性的值。 * * @param value * allowed object is * {@link String } * */ public void setCertIdRandom(String value) { this.certIdRandom = value; } }
handsome0hell/ckb-sdk-java
ckb-mercury-sdk/src/main/java/org/nervos/mercury/model/common/Range.java
<reponame>handsome0hell/ckb-sdk-java<gh_stars>10-100 package org.nervos.mercury.model.common; import java.math.BigInteger; public class Range { public BigInteger from; public BigInteger to; public Range(BigInteger from, BigInteger to) { this.from = from; this.to = to; } }
bryceharrington/caskbench
src/cairo-tests/image-rotate.cpp
/* * Copyright 2014 © Samsung Research America, Silicon Valley * * Use of this source code is governed by the 3-Clause BSD license * specified in the COPYING file included with this source code. */ #include <config.h> #include <cairo.h> #include "caskbench.h" #include "caskbench_context.h" #include "cairo-shapes.h" static cairo_surface_t *image; static cairo_surface_t *cached_image; int ca_setup_image_rotate(caskbench_context_t *ctx) { image = cairoCreateSampleImage (ctx); cached_image = cairoCacheImageSurface (ctx, image); return 1; } void ca_teardown_image_rotate(void) { cairo_surface_destroy (image); cairo_surface_destroy (cached_image); } int ca_test_image_rotate(caskbench_context_t* ctx) { cairo_t *cr = ctx->cairo_cr; static int counter = 0; double radian = 0; int w = ctx->canvas_width; int h = ctx->canvas_height; int iw = cairo_image_surface_get_width (image); int ih = cairo_image_surface_get_height (image); int pw = w - iw; int ph = h - ih; for (int i=0; i<ctx->size; i++) { double x = (double)rnd()/RAND_MAX * pw; double y = (double)rnd()/RAND_MAX * ph; cairo_new_path(cr); cairo_save(cr); cairo_translate(cr, w/2, h/2); radian = (1/57.29) * (counter/50); cairo_rotate(cr, radian); cairo_translate(cr, -iw/2, -ih/2); cairo_set_source_surface (cr, cached_image, 0, 0); cairo_paint (cr); counter++; cairo_restore(cr); } return 1; } /* Local Variables: mode:c++ c-file-style:"stroustrup" c-file-offsets:((innamespace . 0)(inline-open . 0)(case-label . +)) indent-tabs-mode:nil fill-column:99 End: */ // vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:fileencoding=utf-8:textwidth=99 :
mosheCrespin/moodle_plugin
moodle/lib/form/amd/src/showadvanced.js
<filename>moodle/lib/form/amd/src/showadvanced.js // This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. /** * A class to help show and hide advanced form content. * * @module core_form/showadvanced * @copyright 2016 <NAME> <<EMAIL>> * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ define(['jquery', 'core/log', 'core/str', 'core/notification'], function($, Log, Strings, Notification) { var SELECTORS = { FIELDSETCONTAINSADVANCED: 'fieldset.containsadvancedelements', DIVFITEMADVANCED: 'div.fitem.advanced', DIVADVANCEDSECTION: 'div#form-advanced-div', DIVFCONTAINER: 'div.fcontainer', MORELESSLINK: 'fieldset.containsadvancedelements .moreless-toggler' }, CSS = { SHOW: 'show', MORELESSACTIONS: 'moreless-actions', MORELESSTOGGLER: 'moreless-toggler', SHOWLESS: 'moreless-less' }, WRAPPERS = { FITEM: '<div class="fitem"></div>', FELEMENT: '<div class="felement"></div>', ADVANCEDDIV: '<div id="form-advanced-div"></div>' }, IDPREFIX = 'showadvancedid-'; /** @property {Integer} uniqIdSeed Auto incrementing number used to generate ids. */ var uniqIdSeed = 0; /** * ShowAdvanced behaviour class. * * @class core_form/showadvanced * @param {String} id The id of the form. */ var ShowAdvanced = function(id) { this.id = id; var form = $(document.getElementById(id)); this.enhanceForm(form); }; /** @property {String} id The form id to enhance. */ ShowAdvanced.prototype.id = ''; /** * @method enhanceForm * @param {JQuery} form JQuery selector representing the form * @return {ShowAdvanced} */ ShowAdvanced.prototype.enhanceForm = function(form) { var fieldsets = form.find(SELECTORS.FIELDSETCONTAINSADVANCED); // Enhance each fieldset in the form matching the selector. fieldsets.each(function(index, item) { this.enhanceFieldset($(item)); }.bind(this)); // Attach some event listeners. // Subscribe more/less links to click event. form.on('click', SELECTORS.MORELESSLINK, this.switchState); // Subscribe to key events but filter for space or enter. form.on('keydown', SELECTORS.MORELESSLINK, function(e) { // Enter or space. if (e.which == 13 || e.which == 32) { return this.switchState(e); } return true; }.bind(this)); return this; }; /** * Generates a uniq id for the dom element it's called on unless the element already has an id. * The id is set on the dom node before being returned. * * @method generateId * @param {JQuery} node JQuery selector representing a single DOM Node. * @return {String} */ ShowAdvanced.prototype.generateId = function(node) { var id = node.prop('id'); if (typeof id === 'undefined') { id = IDPREFIX + (uniqIdSeed++); node.prop('id', id); } return id; }; /** * @method enhanceFieldset * @param {JQuery} fieldset JQuery selector representing a fieldset * @return {ShowAdvanced} */ ShowAdvanced.prototype.enhanceFieldset = function(fieldset) { var statuselement = $('input[name=mform_showmore_' + fieldset.prop('id') + ']'); if (!statuselement.length) { Log.debug("M.form.showadvanced::processFieldset was called on an fieldset without a status field: '" + fieldset.prop('id') + "'"); return this; } // Fetch some strings. Strings.get_strings([{ key: 'showmore', component: 'core_form' }, { key: 'showless', component: 'core_form' }]).then(function(results) { var showmore = results[0], showless = results[1]; // Generate more/less links. var morelesslink = $('<a href="#"></a>'); morelesslink.addClass(CSS.MORELESSTOGGLER); if (statuselement.val() === '0') { morelesslink.html(showmore); morelesslink.attr('aria-expanded', 'false'); } else { morelesslink.html(showless); morelesslink.attr('aria-expanded', 'true'); morelesslink.addClass(CSS.SHOWLESS); fieldset.find(SELECTORS.DIVFITEMADVANCED).addClass(CSS.SHOW); } // Build a list of advanced fieldsets. var idlist = []; fieldset.find(SELECTORS.DIVFITEMADVANCED).each(function(index, node) { idlist[idlist.length] = this.generateId($(node)); }.bind(this)); // Set aria attributes. morelesslink.attr('role', 'button'); morelesslink.attr('aria-controls', 'form-advanced-div'); var formadvancedsection = $(WRAPPERS.ADVANCEDDIV); fieldset.find(SELECTORS.DIVFITEMADVANCED).wrapAll(formadvancedsection); // Add elements to the DOM. var fitem = $(WRAPPERS.FITEM); fitem.addClass(CSS.MORELESSACTIONS); var felement = $(WRAPPERS.FELEMENT); felement.append(morelesslink); fitem.append(felement); fieldset.find(SELECTORS.DIVADVANCEDSECTION).before(fitem); return true; }.bind(this)).fail(Notification.exception); return this; }; /** * @method switchState * @param {Event} e Event that triggered this action. * @return {Boolean} */ ShowAdvanced.prototype.switchState = function(e) { e.preventDefault(); // Fetch some strings. Strings.get_strings([{ key: 'showmore', component: 'core_form' }, { key: 'showless', component: 'core_form' }]).then(function(results) { var showmore = results[0], showless = results[1], fieldset = $(e.target).closest(SELECTORS.FIELDSETCONTAINSADVANCED); // Toggle collapsed class. fieldset.find(SELECTORS.DIVFITEMADVANCED).toggleClass(CSS.SHOW); // Get corresponding hidden variable. var statuselement = $('input[name=mform_showmore_' + fieldset.prop('id') + ']'); // Invert it and change the link text. if (statuselement.val() === '0') { statuselement.val(1); $(e.target).addClass(CSS.SHOWLESS); $(e.target).html(showless); $(e.target).attr('aria-expanded', 'true'); } else { statuselement.val(0); $(e.target).removeClass(CSS.SHOWLESS); $(e.target).html(showmore); $(e.target).attr('aria-expanded', 'false'); } return true; }).fail(Notification.exception); return this; }; return { /** * Initialise this module. * @method init * @param {String} formid * @return {ShowAdvanced} */ init: function(formid) { return new ShowAdvanced(formid); } }; });
NaverCloudPlatform/ncloud-sdk-python
lib/services/clouddb/ncloud_clouddb/model/backup_file.py
# coding: utf-8 """ clouddb Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from ncloud_clouddb.model.common_code import CommonCode # noqa: F401,E501 class BackupFile(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'host_name': 'str', 'file_name': 'str', 'database_name': 'str', 'first_lsn': 'str', 'last_lsn': 'str', 'backup_type': 'CommonCode', 'backup_start_time': 'str', 'backup_end_time': 'str' } attribute_map = { 'host_name': 'hostName', 'file_name': 'fileName', 'database_name': 'databaseName', 'first_lsn': 'firstLsn', 'last_lsn': 'LastLsn', 'backup_type': 'backupType', 'backup_start_time': 'backupStartTime', 'backup_end_time': 'backupEndTime' } def __init__(self, host_name=None, file_name=None, database_name=None, first_lsn=None, last_lsn=None, backup_type=None, backup_start_time=None, backup_end_time=None): # noqa: E501 """BackupFile - a model defined in Swagger""" # noqa: E501 self._host_name = None self._file_name = None self._database_name = None self._first_lsn = None self._last_lsn = None self._backup_type = None self._backup_start_time = None self._backup_end_time = None self.discriminator = None if host_name is not None: self.host_name = host_name if file_name is not None: self.file_name = file_name if database_name is not None: self.database_name = database_name if first_lsn is not None: self.first_lsn = first_lsn if last_lsn is not None: self.last_lsn = last_lsn if backup_type is not None: self.backup_type = backup_type if backup_start_time is not None: self.backup_start_time = backup_start_time if backup_end_time is not None: self.backup_end_time = backup_end_time @property def host_name(self): """Gets the host_name of this BackupFile. # noqa: E501 호스트이름 # noqa: E501 :return: The host_name of this BackupFile. # noqa: E501 :rtype: str """ return self._host_name @host_name.setter def host_name(self, host_name): """Sets the host_name of this BackupFile. 호스트이름 # noqa: E501 :param host_name: The host_name of this BackupFile. # noqa: E501 :type: str """ self._host_name = host_name @property def file_name(self): """Gets the file_name of this BackupFile. # noqa: E501 파일이름 # noqa: E501 :return: The file_name of this BackupFile. # noqa: E501 :rtype: str """ return self._file_name @file_name.setter def file_name(self, file_name): """Sets the file_name of this BackupFile. 파일이름 # noqa: E501 :param file_name: The file_name of this BackupFile. # noqa: E501 :type: str """ self._file_name = file_name @property def database_name(self): """Gets the database_name of this BackupFile. # noqa: E501 데이터베이스이름 # noqa: E501 :return: The database_name of this BackupFile. # noqa: E501 :rtype: str """ return self._database_name @database_name.setter def database_name(self, database_name): """Sets the database_name of this BackupFile. 데이터베이스이름 # noqa: E501 :param database_name: The database_name of this BackupFile. # noqa: E501 :type: str """ self._database_name = database_name @property def first_lsn(self): """Gets the first_lsn of this BackupFile. # noqa: E501 시작LSN # noqa: E501 :return: The first_lsn of this BackupFile. # noqa: E501 :rtype: str """ return self._first_lsn @first_lsn.setter def first_lsn(self, first_lsn): """Sets the first_lsn of this BackupFile. 시작LSN # noqa: E501 :param first_lsn: The first_lsn of this BackupFile. # noqa: E501 :type: str """ self._first_lsn = first_lsn @property def last_lsn(self): """Gets the last_lsn of this BackupFile. # noqa: E501 마지막LSN # noqa: E501 :return: The last_lsn of this BackupFile. # noqa: E501 :rtype: str """ return self._last_lsn @last_lsn.setter def last_lsn(self, last_lsn): """Sets the last_lsn of this BackupFile. 마지막LSN # noqa: E501 :param last_lsn: The last_lsn of this BackupFile. # noqa: E501 :type: str """ self._last_lsn = last_lsn @property def backup_type(self): """Gets the backup_type of this BackupFile. # noqa: E501 백업유형 # noqa: E501 :return: The backup_type of this BackupFile. # noqa: E501 :rtype: CommonCode """ return self._backup_type @backup_type.setter def backup_type(self, backup_type): """Sets the backup_type of this BackupFile. 백업유형 # noqa: E501 :param backup_type: The backup_type of this BackupFile. # noqa: E501 :type: CommonCode """ self._backup_type = backup_type @property def backup_start_time(self): """Gets the backup_start_time of this BackupFile. # noqa: E501 백업시작시간 # noqa: E501 :return: The backup_start_time of this BackupFile. # noqa: E501 :rtype: str """ return self._backup_start_time @backup_start_time.setter def backup_start_time(self, backup_start_time): """Sets the backup_start_time of this BackupFile. 백업시작시간 # noqa: E501 :param backup_start_time: The backup_start_time of this BackupFile. # noqa: E501 :type: str """ self._backup_start_time = backup_start_time @property def backup_end_time(self): """Gets the backup_end_time of this BackupFile. # noqa: E501 백업종료시간 # noqa: E501 :return: The backup_end_time of this BackupFile. # noqa: E501 :rtype: str """ return self._backup_end_time @backup_end_time.setter def backup_end_time(self, backup_end_time): """Sets the backup_end_time of this BackupFile. 백업종료시간 # noqa: E501 :param backup_end_time: The backup_end_time of this BackupFile. # noqa: E501 :type: str """ self._backup_end_time = backup_end_time def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, BackupFile): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
helderhernandez/apaw-practice
src/test/java/es/upm/miw/apaw_practice/adapters/rest/tree_conservation/InspectorResourceIT.java
package es.upm.miw.apaw_practice.adapters.rest.tree_conservation; import es.upm.miw.apaw_practice.adapters.rest.RestTestConfig; import es.upm.miw.apaw_practice.domain.models.tree_conservation.InspectorName; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.web.reactive.server.WebTestClient; import org.springframework.web.reactive.function.BodyInserters; @RestTestConfig class InspectorResourceIT { @Autowired private WebTestClient webTestClient; @Test void testUpdate() { InspectorName inspectorNameTest = new InspectorName(); inspectorNameTest.setDni("fakeDni"); inspectorNameTest.setFirstName("fakeFirstName"); inspectorNameTest.setLastName("fakeLastName"); this.webTestClient .patch() .uri(InspectorResource.INSPECTORS) .body(BodyInserters.fromValue(inspectorNameTest)) .exchange() .expectStatus().isNotFound(); } }
pylipp/flask-jwt-extended
tests/test_get_methods_in_optional_endpoint.py
<filename>tests/test_get_methods_in_optional_endpoint.py<gh_stars>1000+ import pytest from flask import Flask from flask import jsonify from flask_jwt_extended import current_user from flask_jwt_extended import get_current_user from flask_jwt_extended import get_jwt from flask_jwt_extended import get_jwt_header from flask_jwt_extended import get_jwt_identity from flask_jwt_extended import jwt_required from flask_jwt_extended import JWTManager @pytest.fixture(scope="function") def app(): app = Flask(__name__) app.config["JWT_SECRET_KEY"] = "foobarbaz" jwt = JWTManager(app) @app.route("/optional", methods=["GET"]) @jwt_required(optional=True) def access_protected(): assert get_jwt() == {} assert get_jwt_header() == {} assert get_jwt_identity() == None # noqa: E711 assert get_current_user() == None # noqa: E711 assert current_user == None # noqa: E711 return jsonify(foo="bar") @jwt.user_lookup_loader def user_lookup_callback(_jwt_header, _jwt_data): assert True == False # noqa: E712 return app def test_get_jwt_in_optional_route(app): test_client = app.test_client() response = test_client.get("/optional") assert response.status_code == 200 assert response.get_json() == {"foo": "bar"}
yoikosugi/rails_tutorial_rspec
spec/system/following_spec.rb
<gh_stars>0 require 'rails_helper' RSpec.describe "FollowingPageFollowersPage", type: :system do let!(:user) { FactoryBot.create(:user) } let!(:other_user) { FactoryBot.create_list(:user, 50, :other_user)} let!(:relationship) { FactoryBot.create_list(:relationship, 5)} let!(:passive) { FactoryBot.create_list(:relationship, 5, :passive) } describe "following page" do it "正しく表示される" do visit login_path click_link "Log in" fill_in "Email", with: user.email fill_in "Password", with: <PASSWORD> click_button "Log in" expect(current_path).to eq user_path(user) visit following_user_path(user) expect(user.following).to_not be_empty expect(page).to have_content user.following.count.to_s user.following.each do |u| expect(page).to have_link u.name, user_path(u) end end end describe "followers page" do it "正しく表示される" do visit login_path click_link "Log in" fill_in "Email", with: user.email fill_in "Password", with: <PASSWORD> click_button "Log in" expect(current_path).to eq user_path(user) visit followers_user_path(user) expect(user.followers).to_not be_empty expect(page).to have_content user.followers.count.to_s user.followers.each do |u| expect(page).to have_link u.name, user_path(u) end end end end
trompamusic/trompa-campaign-manager
src/redux/Startup/Startup.types.js
<gh_stars>0 const namespace = 'STARTUP'; export const STARTUP = `${namespace}/STARTUP`; export const STARTUP_SUCCESS = `${namespace}/STARTUP_SUCCESS`;
salesforce/bazel-jdt-java-toolchain
compiler/src/main/ecj/org/eclipse/jdt/internal/compiler/lookup/SortedCompoundNameVector.java
<reponame>salesforce/bazel-jdt-java-toolchain /******************************************************************************* * Copyright (c) 2019 <NAME> and others. * * This program and the accompanying materials * are made available under the terms of the Eclipse Public License 2.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/legal/epl-2.0/ * * SPDX-License-Identifier: EPL-2.0 * * Contributors: * <NAME> - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.compiler.lookup; import java.util.Arrays; import org.eclipse.jdt.core.compiler.CharOperation; import org.eclipse.jdt.internal.compiler.util.SortedCharArrays; /** * Sorted and simplified version of previously existed CompoundNameVector */ final class SortedCompoundNameVector { static int INITIAL_SIZE = 10; int size; char[][][] elements; public SortedCompoundNameVector() { this.size = 0; this.elements = new char[INITIAL_SIZE][][]; } public boolean add(char[][] newElement) { int idx = Arrays.binarySearch(this.elements, 0, this.size, newElement, SortedCharArrays.CHAR_CHAR_ARR_COMPARATOR); if (idx < 0) { this.elements = SortedCharArrays.insertIntoArray( this.elements, this.size < this.elements.length ? this.elements : new char[this.elements.length * 2][][], newElement, -(idx + 1), this.size++); return true; } return false; } public char[][] elementAt(int index) { return this.elements[index]; } @Override public String toString() { StringBuilder buffer = new StringBuilder(); for (int i = 0; i < this.size; i++) { buffer.append(CharOperation.toString(this.elements[i])).append("\n"); //$NON-NLS-1$ } return buffer.toString(); } }
cwinand/diplomacy
db/seeds.rb
# This file should contain all the record creation needed to seed the database with its default values. # The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup). Country.create([ { country_code: 'de', name: 'germany' }, { country_code: 'fr', name: 'france' }, { country_code: 'en', name: 'england' }, { country_code: 'ru', name: 'russia' }, { country_code: 'tr', name: 'turkey' }, { country_code: 'it', name: 'italy' }, { country_code: 'au', name: 'austria' } ]) Province.create([ { 'province_code':'adr', 'name':'adriatic sea', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'aeg', 'name':'aegean sea', 'province_type':'coast', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'alb', 'name':'albania', 'province_type':'coast', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'ank', 'name':'ankara', 'province_type':'coast', 'is_sc':1, 'home_of':'tr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'apu', 'name':'apulia', 'province_type':'coast', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'arm', 'name':'armenia', 'province_type':'coast', 'is_sc':0, 'home_of':'tr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'bal', 'name':'baltic sea', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'bar', 'name':'barents sea', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'bel', 'name':'belgium', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'ber', 'name':'berlin', 'province_type':'coast', 'is_sc':1, 'home_of':'de', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'bla', 'name':'black sea', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'boh', 'name':'bohemia', 'province_type':'land', 'is_sc':0, 'home_of':'au', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'bot', 'name':'gulf of bothnia', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'bre', 'name':'brest', 'province_type':'coast', 'is_sc':1, 'home_of':'fr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'bud', 'name':'budapest', 'province_type':'land', 'is_sc':1, 'home_of':'au', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'bul', 'name':'bulgaria', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':'sc', 'coast_2':'ec' }, { 'province_code':'bur', 'name':'burgundy', 'province_type':'land', 'is_sc':0, 'home_of':'fr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'cly', 'name':'clyde', 'province_type':'coast', 'is_sc':0, 'home_of':'en', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'con', 'name':'constantinople', 'province_type':'coast', 'is_sc':1, 'home_of':'tr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'den', 'name':'denmark', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'eas', 'name':'eastern mediterranean', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'edi', 'name':'edinburgh', 'province_type':'coast', 'is_sc':1, 'home_of':'en', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'eng', 'name':'english channel', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'fin', 'name':'finland', 'province_type':'coast', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'gal', 'name':'galicia', 'province_type':'land', 'is_sc':0, 'home_of':'au', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'gas', 'name':'gascony', 'province_type':'coast', 'is_sc':0, 'home_of':'fr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'gre', 'name':'greece', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'hel', 'name':'<NAME>', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'hol', 'name':'holland', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'ion', 'name':'ionian sea', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'iri', 'name':'irish sea', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'kie', 'name':'kiel', 'province_type':'coast', 'is_sc':1, 'home_of':'de', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'lon', 'name':'london', 'province_type':'coast', 'is_sc':1, 'home_of':'en', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'lvp', 'name':'liverpool', 'province_type':'coast', 'is_sc':1, 'home_of':'en', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'lvn', 'name':'livonia', 'province_type':'coast', 'is_sc':0, 'home_of':'ru', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'lyo', 'name':'gulf of lyons', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'mar', 'name':'marseilles', 'province_type':'coast', 'is_sc':1, 'home_of':'fr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'mid', 'name':'mid-atlantic ocean', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'mos', 'name':'moscow', 'province_type':'land', 'is_sc':1, 'home_of':'ru', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'mun', 'name':'munich', 'province_type':'land', 'is_sc':1, 'home_of':'de', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'naf', 'name':'north africa', 'province_type':'coast', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'nap', 'name':'naples', 'province_type':'coast', 'is_sc':1, 'home_of':'it', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'nat', 'name':'north atlantic ocean', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'nrg', 'name':'norwegian sea', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'nth', 'name':'north sea', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'nor', 'name':'norway', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'par', 'name':'paris', 'province_type':'land', 'is_sc':1, 'home_of':'fr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'pic', 'name':'picardy', 'province_type':'coast', 'is_sc':0, 'home_of':'fr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'pie', 'name':'piedmont', 'province_type':'coast', 'is_sc':0, 'home_of':'it', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'por', 'name':'portugal', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'pru', 'name':'prussia', 'province_type':'coast', 'is_sc':0, 'home_of':'de', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'rom', 'name':'rome', 'province_type':'coast', 'is_sc':1, 'home_of':'it', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'ruh', 'name':'ruhr', 'province_type':'land', 'is_sc':0, 'home_of':'de', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'rum', 'name':'rumania', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'ser', 'name':'serbia', 'province_type':'land', 'is_sc':1, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'sev', 'name':'sevastopol', 'province_type':'coast', 'is_sc':1, 'home_of':'ru', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'sil', 'name':'silesia', 'province_type':'land', 'is_sc':0, 'home_of':'de', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'ska', 'name':'skagerrack', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'smy', 'name':'smyrna', 'province_type':'coast', 'is_sc':1, 'home_of':'tr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'spa', 'name':'spain', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':'sc', 'coast_2':'nc' }, { 'province_code':'stp', 'name':'<NAME>', 'province_type':'coast', 'is_sc':1, 'home_of':'ru', 'coast_1':'sc', 'coast_2':'nc' }, { 'province_code':'swe', 'name':'sweden', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'syr', 'name':'syria', 'province_type':'coast', 'is_sc':0, 'home_of':'tr', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'tri', 'name':'trieste', 'province_type':'coast', 'is_sc':1, 'home_of':'au', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'tro', 'name':'tyrolia', 'province_type':'land', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'tun', 'name':'tunis', 'province_type':'coast', 'is_sc':1, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'tus', 'name':'tuscany', 'province_type':'coast', 'is_sc':0, 'home_of':'it', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'trn', 'name':'tyrrhenian sea', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'ukr', 'name':'ukraine', 'province_type':'land', 'is_sc':0, 'home_of':'ru', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'ven', 'name':'venice', 'province_type':'coast', 'is_sc':1, 'home_of':'it', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'vie', 'name':'vienna', 'province_type':'land', 'is_sc':1, 'home_of':'au', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'wal', 'name':'wales', 'province_type':'coast', 'is_sc':0, 'home_of':'en', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'war', 'name':'warsaw', 'province_type':'land', 'is_sc':1, 'home_of':'ru', 'coast_1':nil, 'coast_2':nil }, { 'province_code':'wes', 'name':'western mediterranean', 'province_type':'sea', 'is_sc':0, 'home_of':nil, 'coast_1':nil, 'coast_2':nil }, { 'province_code':'yor', 'name':'yorkshire', 'province_type':'coast', 'is_sc':0, 'home_of':'en', 'coast_1':nil, 'coast_2':nil } ]) ProvinceBorder.create([ {'province_code':'adr', 'border_province_code':'tri', 'border_coastal_code':nil}, {'province_code':'adr', 'border_province_code':'ven', 'border_coastal_code':nil}, {'province_code':'adr', 'border_province_code':'ion', 'border_coastal_code':nil}, {'province_code':'adr', 'border_province_code':'apu', 'border_coastal_code':nil}, {'province_code':'adr', 'border_province_code':'alb', 'border_coastal_code':nil}, {'province_code':'aeg', 'border_province_code':'gre', 'border_coastal_code':nil}, {'province_code':'aeg', 'border_province_code':'bul', 'border_coastal_code':'sc'}, {'province_code':'aeg', 'border_province_code':'con', 'border_coastal_code':nil}, {'province_code':'aeg', 'border_province_code':'ion', 'border_coastal_code':nil}, {'province_code':'aeg', 'border_province_code':'eas', 'border_coastal_code':nil}, {'province_code':'aeg', 'border_province_code':'smy', 'border_coastal_code':nil}, {'province_code':'alb', 'border_province_code':'tri', 'border_coastal_code':nil}, {'province_code':'alb', 'border_province_code':'adr', 'border_coastal_code':nil}, {'province_code':'alb', 'border_province_code':'ion', 'border_coastal_code':nil}, {'province_code':'alb', 'border_province_code':'gre', 'border_coastal_code':nil}, {'province_code':'alb', 'border_province_code':'ser', 'border_coastal_code':nil}, {'province_code':'ank', 'border_province_code':'arm', 'border_coastal_code':nil}, {'province_code':'ank', 'border_province_code':'bla', 'border_coastal_code':nil}, {'province_code':'ank', 'border_province_code':'con', 'border_coastal_code':nil}, {'province_code':'ank', 'border_province_code':'smy', 'border_coastal_code':nil}, {'province_code':'apu', 'border_province_code':'adr', 'border_coastal_code':nil}, {'province_code':'apu', 'border_province_code':'ion', 'border_coastal_code':nil}, {'province_code':'apu', 'border_province_code':'rom', 'border_coastal_code':nil}, {'province_code':'apu', 'border_province_code':'nap', 'border_coastal_code':nil}, {'province_code':'apu', 'border_province_code':'ven', 'border_coastal_code':nil}, {'province_code':'arm', 'border_province_code':'sev', 'border_coastal_code':nil}, {'province_code':'arm', 'border_province_code':'bla', 'border_coastal_code':nil}, {'province_code':'arm', 'border_province_code':'ank', 'border_coastal_code':nil}, {'province_code':'arm', 'border_province_code':'syr', 'border_coastal_code':nil}, {'province_code':'arm', 'border_province_code':'smy', 'border_coastal_code':nil}, {'province_code':'bal', 'border_province_code':'lvn', 'border_coastal_code':nil}, {'province_code':'bal', 'border_province_code':'pru', 'border_coastal_code':nil}, {'province_code':'bal', 'border_province_code':'ber', 'border_coastal_code':nil}, {'province_code':'bal', 'border_province_code':'kie', 'border_coastal_code':nil}, {'province_code':'bal', 'border_province_code':'den', 'border_coastal_code':nil}, {'province_code':'bal', 'border_province_code':'swe', 'border_coastal_code':nil}, {'province_code':'bal', 'border_province_code':'bot', 'border_coastal_code':nil}, {'province_code':'bar', 'border_province_code':'stp', 'border_coastal_code':'nc'}, {'province_code':'bar', 'border_province_code':'nor', 'border_coastal_code':nil}, {'province_code':'bar', 'border_province_code':'nrg', 'border_coastal_code':nil}, {'province_code':'bel', 'border_province_code':'hol', 'border_coastal_code':nil}, {'province_code':'bel', 'border_province_code':'ruh', 'border_coastal_code':nil}, {'province_code':'bel', 'border_province_code':'bur', 'border_coastal_code':nil}, {'province_code':'bel', 'border_province_code':'pic', 'border_coastal_code':nil}, {'province_code':'bel', 'border_province_code':'eng', 'border_coastal_code':nil}, {'province_code':'bel', 'border_province_code':'nth', 'border_coastal_code':nil}, {'province_code':'ber', 'border_province_code':'pru', 'border_coastal_code':nil}, {'province_code':'ber', 'border_province_code':'sil', 'border_coastal_code':nil}, {'province_code':'ber', 'border_province_code':'mun', 'border_coastal_code':nil}, {'province_code':'ber', 'border_province_code':'bal', 'border_coastal_code':nil}, {'province_code':'ber', 'border_province_code':'kie', 'border_coastal_code':nil}, {'province_code':'bla', 'border_province_code':'sev', 'border_coastal_code':nil}, {'province_code':'bla', 'border_province_code':'bul', 'border_coastal_code':'ec'}, {'province_code':'bla', 'border_province_code':'rum', 'border_coastal_code':nil}, {'province_code':'bla', 'border_province_code':'con', 'border_coastal_code':nil}, {'province_code':'bla', 'border_province_code':'ank', 'border_coastal_code':nil}, {'province_code':'bla', 'border_province_code':'arm', 'border_coastal_code':nil}, {'province_code':'boh', 'border_province_code':'mun', 'border_coastal_code':nil}, {'province_code':'boh', 'border_province_code':'sil', 'border_coastal_code':nil}, {'province_code':'boh', 'border_province_code':'gal', 'border_coastal_code':nil}, {'province_code':'boh', 'border_province_code':'vie', 'border_coastal_code':nil}, {'province_code':'boh', 'border_province_code':'tro', 'border_coastal_code':nil}, {'province_code':'bot', 'border_province_code':'swe', 'border_coastal_code':nil}, {'province_code':'bot', 'border_province_code':'fin', 'border_coastal_code':nil}, {'province_code':'bot', 'border_province_code':'stp', 'border_coastal_code':'sc'}, {'province_code':'bot', 'border_province_code':'lvn', 'border_coastal_code':nil}, {'province_code':'bot', 'border_province_code':'bal', 'border_coastal_code':nil}, {'province_code':'bre', 'border_province_code':'mid', 'border_coastal_code':nil}, {'province_code':'bre', 'border_province_code':'eng', 'border_coastal_code':nil}, {'province_code':'bre', 'border_province_code':'gas', 'border_coastal_code':nil}, {'province_code':'bre', 'border_province_code':'pic', 'border_coastal_code':nil}, {'province_code':'bre', 'border_province_code':'par', 'border_coastal_code':nil}, {'province_code':'bud', 'border_province_code':'gal', 'border_coastal_code':nil}, {'province_code':'bud', 'border_province_code':'rum', 'border_coastal_code':nil}, {'province_code':'bud', 'border_province_code':'ser', 'border_coastal_code':nil}, {'province_code':'bud', 'border_province_code':'vie', 'border_coastal_code':nil}, {'province_code':'bud', 'border_province_code':'tri', 'border_coastal_code':nil}, {'province_code':'bul', 'border_province_code':'rum', 'border_coastal_code':nil}, {'province_code':'bul', 'border_province_code':'con', 'border_coastal_code':nil}, {'province_code':'bul', 'border_province_code':'ser', 'border_coastal_code':nil}, {'province_code':'bul', 'border_province_code':'bla', 'border_coastal_code':'ec'}, {'province_code':'bul', 'border_province_code':'aeg', 'border_coastal_code':'sc'}, {'province_code':'bur', 'border_province_code':'mun', 'border_coastal_code':nil}, {'province_code':'bur', 'border_province_code':'par', 'border_coastal_code':nil}, {'province_code':'bur', 'border_province_code':'mar', 'border_coastal_code':nil}, {'province_code':'bur', 'border_province_code':'gas', 'border_coastal_code':nil}, {'province_code':'bur', 'border_province_code':'ruh', 'border_coastal_code':nil}, {'province_code':'bur', 'border_province_code':'bel', 'border_coastal_code':nil}, {'province_code':'bur', 'border_province_code':'pic', 'border_coastal_code':nil}, {'province_code':'cly', 'border_province_code':'nat', 'border_coastal_code':nil}, {'province_code':'cly', 'border_province_code':'nrg', 'border_coastal_code':nil}, {'province_code':'cly', 'border_province_code':'edi', 'border_coastal_code':nil}, {'province_code':'cly', 'border_province_code':'lvp', 'border_coastal_code':nil}, {'province_code':'con', 'border_province_code':'bul', 'border_coastal_code':nil}, {'province_code':'con', 'border_province_code':'bla', 'border_coastal_code':nil}, {'province_code':'con', 'border_province_code':'ank', 'border_coastal_code':nil}, {'province_code':'con', 'border_province_code':'smy', 'border_coastal_code':nil}, {'province_code':'con', 'border_province_code':'aeg', 'border_coastal_code':nil}, {'province_code':'den', 'border_province_code':'ska', 'border_coastal_code':nil}, {'province_code':'den', 'border_province_code':'bal', 'border_coastal_code':nil}, {'province_code':'den', 'border_province_code':'kie', 'border_coastal_code':nil}, {'province_code':'den', 'border_province_code':'hel', 'border_coastal_code':nil}, {'province_code':'den', 'border_province_code':'nth', 'border_coastal_code':nil}, {'province_code':'den', 'border_province_code':'swe', 'border_coastal_code':nil}, {'province_code':'eas', 'border_province_code':'smy', 'border_coastal_code':nil}, {'province_code':'eas', 'border_province_code':'syr', 'border_coastal_code':nil}, {'province_code':'eas', 'border_province_code':'aeg', 'border_coastal_code':nil}, {'province_code':'eas', 'border_province_code':'ion', 'border_coastal_code':nil}, {'province_code':'edi', 'border_province_code':'cly', 'border_coastal_code':nil}, {'province_code':'edi', 'border_province_code':'lvp', 'border_coastal_code':nil}, {'province_code':'edi', 'border_province_code':'yor', 'border_coastal_code':nil}, {'province_code':'edi', 'border_province_code':'nrg', 'border_coastal_code':nil}, {'province_code':'edi', 'border_province_code':'nth', 'border_coastal_code':nil}, {'province_code':'eng', 'border_province_code':'wal', 'border_coastal_code':nil}, {'province_code':'eng', 'border_province_code':'lon', 'border_coastal_code':nil}, {'province_code':'eng', 'border_province_code':'bre', 'border_coastal_code':nil}, {'province_code':'eng', 'border_province_code':'bel', 'border_coastal_code':nil}, {'province_code':'eng', 'border_province_code':'pic', 'border_coastal_code':nil}, {'province_code':'eng', 'border_province_code':'iri', 'border_coastal_code':nil}, {'province_code':'eng', 'border_province_code':'mid', 'border_coastal_code':nil}, {'province_code':'fin', 'border_province_code':'stp', 'border_coastal_code':nil}, {'province_code':'fin', 'border_province_code':'nor', 'border_coastal_code':nil}, {'province_code':'fin', 'border_province_code':'swe', 'border_coastal_code':nil}, {'province_code':'fin', 'border_province_code':'bot', 'border_coastal_code':nil}, {'province_code':'gal', 'border_province_code':'war', 'border_coastal_code':nil}, {'province_code':'gal', 'border_province_code':'ukr', 'border_coastal_code':nil}, {'province_code':'gal', 'border_province_code':'rum', 'border_coastal_code':nil}, {'province_code':'gal', 'border_province_code':'bud', 'border_coastal_code':nil}, {'province_code':'gal', 'border_province_code':'vie', 'border_coastal_code':nil}, {'province_code':'gal', 'border_province_code':'boh', 'border_coastal_code':nil}, {'province_code':'gal', 'border_province_code':'sil', 'border_coastal_code':nil}, {'province_code':'gas', 'border_province_code':'spa', 'border_coastal_code':nil}, {'province_code':'gas', 'border_province_code':'bre', 'border_coastal_code':nil}, {'province_code':'gas', 'border_province_code':'bur', 'border_coastal_code':nil}, {'province_code':'gas', 'border_province_code':'par', 'border_coastal_code':nil}, {'province_code':'gas', 'border_province_code':'mar', 'border_coastal_code':nil}, {'province_code':'gas', 'border_province_code':'mid', 'border_coastal_code':nil}, {'province_code':'gre', 'border_province_code':'aeg', 'border_coastal_code':nil}, {'province_code':'gre', 'border_province_code':'bul', 'border_coastal_code':nil}, {'province_code':'gre', 'border_province_code':'ser', 'border_coastal_code':nil}, {'province_code':'gre', 'border_province_code':'alb', 'border_coastal_code':nil}, {'province_code':'gre', 'border_province_code':'ion', 'border_coastal_code':nil}, {'province_code':'hel', 'border_province_code':'den', 'border_coastal_code':nil}, {'province_code':'hel', 'border_province_code':'nth', 'border_coastal_code':nil}, {'province_code':'hel', 'border_province_code':'hol', 'border_coastal_code':nil}, {'province_code':'hel', 'border_province_code':'kie', 'border_coastal_code':nil}, {'province_code':'hol', 'border_province_code':'bel', 'border_coastal_code':nil}, {'province_code':'hol', 'border_province_code':'ruh', 'border_coastal_code':nil}, {'province_code':'hol', 'border_province_code':'kie', 'border_coastal_code':nil}, {'province_code':'hol', 'border_province_code':'hel', 'border_coastal_code':nil}, {'province_code':'hol', 'border_province_code':'nth', 'border_coastal_code':nil}, {'province_code':'ion', 'border_province_code':'aeg', 'border_coastal_code':nil}, {'province_code':'ion', 'border_province_code':'nap', 'border_coastal_code':nil}, {'province_code':'ion', 'border_province_code':'adr', 'border_coastal_code':nil}, {'province_code':'ion', 'border_province_code':'apu', 'border_coastal_code':nil}, {'province_code':'ion', 'border_province_code':'eas', 'border_coastal_code':nil}, {'province_code':'ion', 'border_province_code':'gre', 'border_coastal_code':nil}, {'province_code':'ion', 'border_province_code':'alb', 'border_coastal_code':nil}, {'province_code':'ion', 'border_province_code':'tun', 'border_coastal_code':nil}, {'province_code':'ion', 'border_province_code':'trn', 'border_coastal_code':nil}, {'province_code':'iri', 'border_province_code':'lvp', 'border_coastal_code':nil}, {'province_code':'iri', 'border_province_code':'wal', 'border_coastal_code':nil}, {'province_code':'iri', 'border_province_code':'eng', 'border_coastal_code':nil}, {'province_code':'iri', 'border_province_code':'mid', 'border_coastal_code':nil}, {'province_code':'iri', 'border_province_code':'nat', 'border_coastal_code':nil}, {'province_code':'kie', 'border_province_code':'den', 'border_coastal_code':nil}, {'province_code':'kie', 'border_province_code':'ber', 'border_coastal_code':nil}, {'province_code':'kie', 'border_province_code':'mun', 'border_coastal_code':nil}, {'province_code':'kie', 'border_province_code':'ruh', 'border_coastal_code':nil}, {'province_code':'kie', 'border_province_code':'hol', 'border_coastal_code':nil}, {'province_code':'kie', 'border_province_code':'hel', 'border_coastal_code':nil}, {'province_code':'kie', 'border_province_code':'bal', 'border_coastal_code':nil}, {'province_code':'lon', 'border_province_code':'yor', 'border_coastal_code':nil}, {'province_code':'lon', 'border_province_code':'wal', 'border_coastal_code':nil}, {'province_code':'lon', 'border_province_code':'eng', 'border_coastal_code':nil}, {'province_code':'lon', 'border_province_code':'nth', 'border_coastal_code':nil}, {'province_code':'lvn', 'border_province_code':'bal', 'border_coastal_code':nil}, {'province_code':'lvn', 'border_province_code':'stp', 'border_coastal_code':nil}, {'province_code':'lvn', 'border_province_code':'mos', 'border_coastal_code':nil}, {'province_code':'lvn', 'border_province_code':'bot', 'border_coastal_code':nil}, {'province_code':'lvn', 'border_province_code':'war', 'border_coastal_code':nil}, {'province_code':'lvn', 'border_province_code':'pru', 'border_coastal_code':nil}, {'province_code':'lvp', 'border_province_code':'wal', 'border_coastal_code':nil}, {'province_code':'lvp', 'border_province_code':'yor', 'border_coastal_code':nil}, {'province_code':'lvp', 'border_province_code':'edi', 'border_coastal_code':nil}, {'province_code':'lvp', 'border_province_code':'cly', 'border_coastal_code':nil}, {'province_code':'lvp', 'border_province_code':'iri', 'border_coastal_code':nil}, {'province_code':'lvp', 'border_province_code':'nat', 'border_coastal_code':nil}, {'province_code':'lyo', 'border_province_code':'mar', 'border_coastal_code':nil}, {'province_code':'lyo', 'border_province_code':'pie', 'border_coastal_code':nil}, {'province_code':'lyo', 'border_province_code':'tus', 'border_coastal_code':nil}, {'province_code':'lyo', 'border_province_code':'trn', 'border_coastal_code':nil}, {'province_code':'lyo', 'border_province_code':'wes', 'border_coastal_code':nil}, {'province_code':'lyo', 'border_province_code':'spa', 'border_coastal_code':nil}, {'province_code':'mar', 'border_province_code':'spa', 'border_coastal_code':nil}, {'province_code':'mar', 'border_province_code':'gas', 'border_coastal_code':nil}, {'province_code':'mar', 'border_province_code':'bur', 'border_coastal_code':nil}, {'province_code':'mar', 'border_province_code':'pie', 'border_coastal_code':nil}, {'province_code':'mar', 'border_province_code':'lyo', 'border_coastal_code':nil}, {'province_code':'mid', 'border_province_code':'iri', 'border_coastal_code':nil}, {'province_code':'mid', 'border_province_code':'nat', 'border_coastal_code':nil}, {'province_code':'mid', 'border_province_code':'bre', 'border_coastal_code':nil}, {'province_code':'mid', 'border_province_code':'gas', 'border_coastal_code':nil}, {'province_code':'mid', 'border_province_code':'naf', 'border_coastal_code':nil}, {'province_code':'mid', 'border_province_code':'wes', 'border_coastal_code':nil}, {'province_code':'mid', 'border_province_code':'spa', 'border_coastal_code':'nc'}, {'province_code':'mid', 'border_province_code':'eng', 'border_coastal_code':nil}, {'province_code':'mid', 'border_province_code':'por', 'border_coastal_code':nil}, {'province_code':'mos', 'border_province_code':'stp', 'border_coastal_code':nil}, {'province_code':'mos', 'border_province_code':'war', 'border_coastal_code':nil}, {'province_code':'mos', 'border_province_code':'lvn', 'border_coastal_code':nil}, {'province_code':'mos', 'border_province_code':'ukr', 'border_coastal_code':nil}, {'province_code':'mos', 'border_province_code':'sev', 'border_coastal_code':nil}, {'province_code':'mun', 'border_province_code':'bur', 'border_coastal_code':nil}, {'province_code':'mun', 'border_province_code':'boh', 'border_coastal_code':nil}, {'province_code':'mun', 'border_province_code':'sil', 'border_coastal_code':nil}, {'province_code':'mun', 'border_province_code':'ber', 'border_coastal_code':nil}, {'province_code':'mun', 'border_province_code':'kie', 'border_coastal_code':nil}, {'province_code':'mun', 'border_province_code':'ruh', 'border_coastal_code':nil}, {'province_code':'mun', 'border_province_code':'tro', 'border_coastal_code':nil}, {'province_code':'naf', 'border_province_code':'tun', 'border_coastal_code':nil}, {'province_code':'naf', 'border_province_code':'wes', 'border_coastal_code':nil}, {'province_code':'naf', 'border_province_code':'mid', 'border_coastal_code':nil}, {'province_code':'nap', 'border_province_code':'rom', 'border_coastal_code':nil}, {'province_code':'nap', 'border_province_code':'apu', 'border_coastal_code':nil}, {'province_code':'nap', 'border_province_code':'ion', 'border_coastal_code':nil}, {'province_code':'nap', 'border_province_code':'trn', 'border_coastal_code':nil}, {'province_code':'nat', 'border_province_code':'mid', 'border_coastal_code':nil}, {'province_code':'nat', 'border_province_code':'cly', 'border_coastal_code':nil}, {'province_code':'nat', 'border_province_code':'nrg', 'border_coastal_code':nil}, {'province_code':'nat', 'border_province_code':'lvp', 'border_coastal_code':nil}, {'province_code':'nat', 'border_province_code':'iri', 'border_coastal_code':nil}, {'province_code':'nor', 'border_province_code':'nrg', 'border_coastal_code':nil}, {'province_code':'nor', 'border_province_code':'swe', 'border_coastal_code':nil}, {'province_code':'nor', 'border_province_code':'fin', 'border_coastal_code':nil}, {'province_code':'nor', 'border_province_code':'stp', 'border_coastal_code':nil}, {'province_code':'nor', 'border_province_code':'ska', 'border_coastal_code':nil}, {'province_code':'nor', 'border_province_code':'nth', 'border_coastal_code':nil}, {'province_code':'nor', 'border_province_code':'bar', 'border_coastal_code':nil}, {'province_code':'nrg', 'border_province_code':'nor', 'border_coastal_code':nil}, {'province_code':'nrg', 'border_province_code':'bar', 'border_coastal_code':nil}, {'province_code':'nrg', 'border_province_code':'nth', 'border_coastal_code':nil}, {'province_code':'nrg', 'border_province_code':'nat', 'border_coastal_code':nil}, {'province_code':'nrg', 'border_province_code':'edi', 'border_coastal_code':nil}, {'province_code':'nrg', 'border_province_code':'cly', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'nrg', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'nor', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'edi', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'yor', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'lon', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'eng', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'hel', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'ska', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'bel', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'hol', 'border_coastal_code':nil}, {'province_code':'nth', 'border_province_code':'den', 'border_coastal_code':nil}, {'province_code':'par', 'border_province_code':'bur', 'border_coastal_code':nil}, {'province_code':'par', 'border_province_code':'bre', 'border_coastal_code':nil}, {'province_code':'par', 'border_province_code':'pic', 'border_coastal_code':nil}, {'province_code':'par', 'border_province_code':'gas', 'border_coastal_code':nil}, {'province_code':'pic', 'border_province_code':'par', 'border_coastal_code':nil}, {'province_code':'pic', 'border_province_code':'bel', 'border_coastal_code':nil}, {'province_code':'pic', 'border_province_code':'bre', 'border_coastal_code':nil}, {'province_code':'pic', 'border_province_code':'eng', 'border_coastal_code':nil}, {'province_code':'pic', 'border_province_code':'bur', 'border_coastal_code':nil}, {'province_code':'pie', 'border_province_code':'mar', 'border_coastal_code':nil}, {'province_code':'pie', 'border_province_code':'ven', 'border_coastal_code':nil}, {'province_code':'pie', 'border_province_code':'tro', 'border_coastal_code':nil}, {'province_code':'pie', 'border_province_code':'lyo', 'border_coastal_code':nil}, {'province_code':'pie', 'border_province_code':'tus', 'border_coastal_code':nil}, {'province_code':'por', 'border_province_code':'mid', 'border_coastal_code':nil}, {'province_code':'por', 'border_province_code':'spa', 'border_coastal_code':'both'}, {'province_code':'pru', 'border_province_code':'ber', 'border_coastal_code':nil}, {'province_code':'pru', 'border_province_code':'bal', 'border_coastal_code':nil}, {'province_code':'pru', 'border_province_code':'war', 'border_coastal_code':nil}, {'province_code':'pru', 'border_province_code':'sil', 'border_coastal_code':nil}, {'province_code':'pru', 'border_province_code':'lvn', 'border_coastal_code':nil}, {'province_code':'rom', 'border_province_code':'tus', 'border_coastal_code':nil}, {'province_code':'rom', 'border_province_code':'trn', 'border_coastal_code':nil}, {'province_code':'rom', 'border_province_code':'ven', 'border_coastal_code':nil}, {'province_code':'rom', 'border_province_code':'apu', 'border_coastal_code':nil}, {'province_code':'rom', 'border_province_code':'nap', 'border_coastal_code':nil}, {'province_code':'ruh', 'border_province_code':'mun', 'border_coastal_code':nil}, {'province_code':'ruh', 'border_province_code':'bel', 'border_coastal_code':nil}, {'province_code':'ruh', 'border_province_code':'hol', 'border_coastal_code':nil}, {'province_code':'ruh', 'border_province_code':'kie', 'border_coastal_code':nil}, {'province_code':'ruh', 'border_province_code':'bur', 'border_coastal_code':nil}, {'province_code':'rum', 'border_province_code':'bul', 'border_coastal_code':nil}, {'province_code':'rum', 'border_province_code':'bud', 'border_coastal_code':nil}, {'province_code':'rum', 'border_province_code':'sev', 'border_coastal_code':nil}, {'province_code':'rum', 'border_province_code':'bla', 'border_coastal_code':nil}, {'province_code':'rum', 'border_province_code':'gal', 'border_coastal_code':nil}, {'province_code':'rum', 'border_province_code':'ukr', 'border_coastal_code':nil}, {'province_code':'rum', 'border_province_code':'ser', 'border_coastal_code':nil}, {'province_code':'ser', 'border_province_code':'tri', 'border_coastal_code':nil}, {'province_code':'ser', 'border_province_code':'bud', 'border_coastal_code':nil}, {'province_code':'ser', 'border_province_code':'rum', 'border_coastal_code':nil}, {'province_code':'ser', 'border_province_code':'bul', 'border_coastal_code':nil}, {'province_code':'ser', 'border_province_code':'gre', 'border_coastal_code':nil}, {'province_code':'ser', 'border_province_code':'alb', 'border_coastal_code':nil}, {'province_code':'sev', 'border_province_code':'mos', 'border_coastal_code':nil}, {'province_code':'sev', 'border_province_code':'ukr', 'border_coastal_code':nil}, {'province_code':'sev', 'border_province_code':'rum', 'border_coastal_code':nil}, {'province_code':'sev', 'border_province_code':'bla', 'border_coastal_code':nil}, {'province_code':'sev', 'border_province_code':'arm', 'border_coastal_code':nil}, {'province_code':'sil', 'border_province_code':'ber', 'border_coastal_code':nil}, {'province_code':'sil', 'border_province_code':'pru', 'border_coastal_code':nil}, {'province_code':'sil', 'border_province_code':'war', 'border_coastal_code':nil}, {'province_code':'sil', 'border_province_code':'gal', 'border_coastal_code':nil}, {'province_code':'sil', 'border_province_code':'boh', 'border_coastal_code':nil}, {'province_code':'sil', 'border_province_code':'mun', 'border_coastal_code':nil}, {'province_code':'ska', 'border_province_code':'nor', 'border_coastal_code':nil}, {'province_code':'ska', 'border_province_code':'swe', 'border_coastal_code':nil}, {'province_code':'ska', 'border_province_code':'bal', 'border_coastal_code':nil}, {'province_code':'ska', 'border_province_code':'nth', 'border_coastal_code':nil}, {'province_code':'ska', 'border_province_code':'den', 'border_coastal_code':nil}, {'province_code':'smy', 'border_province_code':'cons', 'border_coastal_code':nil}, {'province_code':'smy', 'border_province_code':'ank', 'border_coastal_code':nil}, {'province_code':'smy', 'border_province_code':'syr', 'border_coastal_code':nil}, {'province_code':'smy', 'border_province_code':'arm', 'border_coastal_code':nil}, {'province_code':'smy', 'border_province_code':'eas', 'border_coastal_code':nil}, {'province_code':'smy', 'border_province_code':'aeg', 'border_coastal_code':nil}, {'province_code':'spa', 'border_province_code':'por', 'border_coastal_code':'both'}, {'province_code':'spa', 'border_province_code':'mid', 'border_coastal_code':'nc'}, {'province_code':'spa', 'border_province_code':'wes', 'border_coastal_code':'sc'}, {'province_code':'spa', 'border_province_code':'lyo', 'border_coastal_code':nil}, {'province_code':'spa', 'border_province_code':'mar', 'border_coastal_code':nil}, {'province_code':'spa', 'border_province_code':'gas', 'border_coastal_code':nil}, {'province_code':'stp', 'border_province_code':'fin', 'border_coastal_code':nil}, {'province_code':'stp', 'border_province_code':'nor', 'border_coastal_code':nil}, {'province_code':'stp', 'border_province_code':'mos', 'border_coastal_code':nil}, {'province_code':'stp', 'border_province_code':'bar', 'border_coastal_code':'nc'}, {'province_code':'stp', 'border_province_code':'bot', 'border_coastal_code':'sc'}, {'province_code':'stp', 'border_province_code':'lvn', 'border_coastal_code':nil}, {'province_code':'swe', 'border_province_code':'nor', 'border_coastal_code':nil}, {'province_code':'swe', 'border_province_code':'fin', 'border_coastal_code':nil}, {'province_code':'swe', 'border_province_code':'den', 'border_coastal_code':nil}, {'province_code':'swe', 'border_province_code':'bot', 'border_coastal_code':nil}, {'province_code':'swe', 'border_province_code':'ska', 'border_coastal_code':nil}, {'province_code':'syr', 'border_province_code':'arm', 'border_coastal_code':nil}, {'province_code':'syr', 'border_province_code':'syr', 'border_coastal_code':nil}, {'province_code':'syr', 'border_province_code':'eas', 'border_coastal_code':nil}, {'province_code':'tri', 'border_province_code':'adr', 'border_coastal_code':nil}, {'province_code':'tri', 'border_province_code':'ven', 'border_coastal_code':nil}, {'province_code':'tri', 'border_province_code':'tro', 'border_coastal_code':nil}, {'province_code':'tri', 'border_province_code':'vie', 'border_coastal_code':nil}, {'province_code':'tri', 'border_province_code':'bud', 'border_coastal_code':nil}, {'province_code':'tri', 'border_province_code':'ser', 'border_coastal_code':nil}, {'province_code':'tri', 'border_province_code':'alb', 'border_coastal_code':nil}, {'province_code':'trn', 'border_province_code':'tus', 'border_coastal_code':nil}, {'province_code':'trn', 'border_province_code':'rom', 'border_coastal_code':nil}, {'province_code':'trn', 'border_province_code':'nap', 'border_coastal_code':nil}, {'province_code':'trn', 'border_province_code':'wes', 'border_coastal_code':nil}, {'province_code':'trn', 'border_province_code':'lyo', 'border_coastal_code':nil}, {'province_code':'trn', 'border_province_code':'ion', 'border_coastal_code':nil}, {'province_code':'trn', 'border_province_code':'tun', 'border_coastal_code':nil}, {'province_code':'tro', 'border_province_code':'tri', 'border_coastal_code':nil}, {'province_code':'tro', 'border_province_code':'mun', 'border_coastal_code':nil}, {'province_code':'tro', 'border_province_code':'boh', 'border_coastal_code':nil}, {'province_code':'tro', 'border_province_code':'pie', 'border_coastal_code':nil}, {'province_code':'tro', 'border_province_code':'ven', 'border_coastal_code':nil}, {'province_code':'tun', 'border_province_code':'trn', 'border_coastal_code':nil}, {'province_code':'tun', 'border_province_code':'ion', 'border_coastal_code':nil}, {'province_code':'tun', 'border_province_code':'wes', 'border_coastal_code':nil}, {'province_code':'tun', 'border_province_code':'naf', 'border_coastal_code':nil}, {'province_code':'tus', 'border_province_code':'pie', 'border_coastal_code':nil}, {'province_code':'tus', 'border_province_code':'ven', 'border_coastal_code':nil}, {'province_code':'tus', 'border_province_code':'rom', 'border_coastal_code':nil}, {'province_code':'tus', 'border_province_code':'trn', 'border_coastal_code':nil}, {'province_code':'tus', 'border_province_code':'lyo', 'border_coastal_code':nil}, {'province_code':'ukr', 'border_province_code':'mos', 'border_coastal_code':nil}, {'province_code':'ukr', 'border_province_code':'sev', 'border_coastal_code':nil}, {'province_code':'ukr', 'border_province_code':'rum', 'border_coastal_code':nil}, {'province_code':'ukr', 'border_province_code':'gal', 'border_coastal_code':nil}, {'province_code':'ukr', 'border_province_code':'war', 'border_coastal_code':nil}, {'province_code':'ven', 'border_province_code':'rom', 'border_coastal_code':nil}, {'province_code':'ven', 'border_province_code':'tus', 'border_coastal_code':nil}, {'province_code':'ven', 'border_province_code':'pie', 'border_coastal_code':nil}, {'province_code':'ven', 'border_province_code':'apu', 'border_coastal_code':nil}, {'province_code':'ven', 'border_province_code':'tro', 'border_coastal_code':nil}, {'province_code':'ven', 'border_province_code':'tri', 'border_coastal_code':nil}, {'province_code':'vie', 'border_province_code':'boh', 'border_coastal_code':nil}, {'province_code':'vie', 'border_province_code':'bud', 'border_coastal_code':nil}, {'province_code':'vie', 'border_province_code':'gal', 'border_coastal_code':nil}, {'province_code':'vie', 'border_province_code':'tri', 'border_coastal_code':nil}, {'province_code':'vie', 'border_province_code':'tro', 'border_coastal_code':nil}, {'province_code':'wal', 'border_province_code':'iri', 'border_coastal_code':nil}, {'province_code':'wal', 'border_province_code':'lvp', 'border_coastal_code':nil}, {'province_code':'wal', 'border_province_code':'yor', 'border_coastal_code':nil}, {'province_code':'wal', 'border_province_code':'lon', 'border_coastal_code':nil}, {'province_code':'wal', 'border_province_code':'eng', 'border_coastal_code':nil}, {'province_code':'war', 'border_province_code':'pru', 'border_coastal_code':nil}, {'province_code':'war', 'border_province_code':'lvn', 'border_coastal_code':nil}, {'province_code':'war', 'border_province_code':'mos', 'border_coastal_code':nil}, {'province_code':'war', 'border_province_code':'ukr', 'border_coastal_code':nil}, {'province_code':'war', 'border_province_code':'gal', 'border_coastal_code':nil}, {'province_code':'war', 'border_province_code':'sil', 'border_coastal_code':nil}, {'province_code':'wes', 'border_province_code':'mid', 'border_coastal_code':nil}, {'province_code':'wes', 'border_province_code':'spa', 'border_coastal_code':'sc'}, {'province_code':'wes', 'border_province_code':'naf', 'border_coastal_code':nil}, {'province_code':'wes', 'border_province_code':'tun', 'border_coastal_code':nil}, {'province_code':'wes', 'border_province_code':'trn', 'border_coastal_code':nil}, {'province_code':'wes', 'border_province_code':'lyo', 'border_coastal_code':nil}, {'province_code':'yor', 'border_province_code':'edi', 'border_coastal_code':nil}, {'province_code':'yor', 'border_province_code':'lvp', 'border_coastal_code':nil}, {'province_code':'yor', 'border_province_code':'wal', 'border_coastal_code':nil}, {'province_code':'yor', 'border_province_code':'lon', 'border_coastal_code':nil}, {'province_code':'yor', 'border_province_code':'nth', 'border_coastal_code':nil} ])
Bholdus/explorer
packages/next/components/charts/lineChart.js
<reponame>Bholdus/explorer import { Axis, Chart, LineAdvance, Tooltip } from "bizcharts"; import styled from "styled-components"; import Image from "next/image"; const ChartWrapper = styled.div` svg, img { margin-top: 17px; } `; const Title = styled.h2` margin: 0; font-size: 12px; text-align: right; color: rgba(255, 255, 255, 0.2); font-weight: 400; `; export default function LineChart({ token = "", data = [], color = "#F22279", }) { return ( <ChartWrapper> <Title>{token} · Last 30d</Title> {data.length ? ( <Chart padding={[2, 0, 0, 0]} width={227} height={34} data={data}> <Axis name="time" visible={false} /> <Axis name="price" visible={false} /> <LineAdvance shape="smooth" area position="time*price" color={color} /> <Tooltip custom={true} containerTpl={`<i></i>`} /> </Chart> ) : ( <img src="/imgs/nochart.svg" alt="NoChartDataLoaded" /> )} </ChartWrapper> ); }
Xellowse/Macaca
Module/HAL/ctor/src/ctor_hal_module.c
///////////////////////////////////////////////////////////////////////////////// // // File name: Module/CTOR/ctor_module.c // The CTOR (Constructor) is used to create/destroy Module's handle(instance). // Because of the HW Layer is the lower layer under HAL, HAL Module CTOR is not only // create HAL handle but also create HW handle as HAL's private data. // ///////////////////////////////////////////////////////////////////////////////// #include <hal_module_pub.h> #include <hal_module.h> #include <osal_pub.h> #include <ctor_osal_pub.h> #include <ctor_hal_module_pub.h> #include <ctor_hw_module_pub.h> //ctor_hal_module_version is used to embed the version information string into binary. //sv# is the special term to let the user can use to search the version string //in binary easier. char *ctor_hal_module_version="sv#ctor_hal_module_ver:1.0.0+1"; extern int HalModuleInit(void *handle); extern int HalModuleDeinit(void *handle); extern int HalModuleEnable(void *handle); extern int HalModuleDisable(void *handle); extern int HalModuleRegisterHalCbMethod1(void *handle, void *CbMethodHandle,int (*CbMethod)(void *handle, HalModuleS32 param1, HalModuleS32 param2)); extern int HalModuleMethod1(void *handle); extern HalModuleS32 HalModuleRegisterISRCbMethod(void *handle, void *CalleeCB, HalModuleS32 (*CbMethod)(void *handle, HalModuleS32 param1, HalModuleS32 param2)); //---------------------------------------------------------------------------- //Function Name: CtorHalModuleCreateHandle //Description : Constructor of Module HAL's handle. HAL's handle is created by // this function. API assignment of HAL is implement in this function. // This function is also create HW Layer because of HW Layer is the // lower layer under HAL. //Param : // 1. HwModuleHandle **handle: HAL Handle pointer which is declared by user. // 2. none // 3. none //return value : True if HAL handle is created correctly or False if it's not. //---------------------------------------------------------------------------- CtorHalModuleS32 CtorHalModuleCreateHandle(HalModuleHandle **handle){ OsalHandle *osal_handle; HalModuleHandle *handle_out; HalModulePrivateData *hal_fd; CtorOsalCreateHandle(&osal_handle); handle_out = (HalModuleHandle *)osal_handle->OsalMalloc(sizeof(HalModuleHandle)); handle_out->fd = osal_handle->OsalMalloc(sizeof(HalModulePrivateData)); *handle = handle_out; hal_fd = (HalModulePrivateData *)handle_out->fd; hal_fd->OsalHandle = osal_handle; handle_out->HalModuleInit = HalModuleInit; handle_out->HalModuleDeinit = HalModuleDeinit; handle_out->HalModuleEnable = HalModuleEnable; handle_out->HalModuleDisable = HalModuleDisable; handle_out->HalModuleRegisterHalCbMethod1= HalModuleRegisterHalCbMethod1; handle_out->HalModuleMethod1 = HalModuleMethod1; handle_out->HalModuleRegisterISRCbMethod = HalModuleRegisterISRCbMethod; CtorHwModuleCreateHandle(&(hal_fd->HwModuleHandle)); return CTOR_HAL_MODULE_TRUE; } //---------------------------------------------------------------------------- //Function Name: CtorHalModuleDestroyHandle //Description : Destructor of HAL's handle. User can use this function to remove // the HAL's handle. The HW Layer's handle will be also removed by // this function, because of HW Layer is the lower layer under HAL. //Param : // 1. HalModuleHandle *handle: HAL handle's pointer which user declared. // 2. none // 3. none //return value : True if HAL handle is removed correctly or False if it's not. //---------------------------------------------------------------------------- CtorHalModuleS32 CtorHalModuleDestroyHandle(HalModuleHandle *handle){ OsalHandle *osal_handle; HalModulePrivateData *hal_fd = (HalModulePrivateData *)handle->fd; //hal_fd->OsalHandle->OsalPrint("[%s]\n",__FUNCTION__); CtorHwModuleDestroyHandle(hal_fd->HwModuleHandle); CtorOsalCreateHandle(&osal_handle); osal_handle->OsalFree(handle->fd); osal_handle->OsalFree(handle); CtorOsalDestroyHandle(osal_handle); return CTOR_HAL_MODULE_TRUE; }
maxiko/gitlabhq
spec/models/project_services/alerts_service_spec.rb
# frozen_string_literal: true require 'spec_helper' RSpec.describe AlertsService do let_it_be(:project) { create(:project) } let(:service_params) { { project: project, active: active } } let(:active) { true } let(:service) { described_class.new(service_params) } shared_context 'when active' do let(:active) { true } end shared_context 'when inactive' do let(:active) { false } end shared_context 'when persisted' do before do service.save! service.reload end end describe '#url' do include Gitlab::Routing subject { service.url } it { is_expected.to eq(project_alerts_notify_url(project, format: :json)) } end describe '#json_fields' do subject { service.json_fields } it { is_expected.to eq(%w(active token)) } end describe '#as_json' do subject { service.as_json(only: service.json_fields) } it { is_expected.to eq('active' => true, 'token' => nil) } end describe '#token' do shared_context 'reset token' do before do service.token = '' service.valid? end end shared_context 'assign token' do |token| before do service.token = token service.valid? end end shared_examples 'valid token' do it { is_expected.to match(/\A\h{32}\z/) } end shared_examples 'no token' do it { is_expected.to be_blank } end subject { service.token } context 'when active' do include_context 'when active' context 'when resetting' do let!(:previous_token) { service.token } include_context 'reset token' it_behaves_like 'valid token' it { is_expected.not_to eq(previous_token) } end context 'when assigning' do include_context 'assign token', 'random token' it_behaves_like 'valid token' end end context 'when inactive' do include_context 'when inactive' context 'when resetting' do let!(:previous_token) { service.token } include_context 'reset token' it_behaves_like 'no token' end end context 'when persisted' do include_context 'when persisted' it_behaves_like 'valid token' end end end
ArtTriumph/RxJavaDemo
app/src/main/java/com/nd/android/rxjavademo/data/impl/observable/MainListWithExample_Observable_toBlocking.java
package com.nd.android.rxjavademo.data.impl.observable; import com.nd.android.rxjavademo.R; import java.util.concurrent.TimeUnit; import rx.Observable; import rx.observables.BlockingObservable; /** * Observable_toBlocking * <p> * Created by HuangYK on 16/10/27. */ public class MainListWithExample_Observable_toBlocking extends MainListWithExample_Observable { public MainListWithExample_Observable_toBlocking() { } @Override public int getDetailInfo() { return R.string.str_mainlist_Observable_toBlocking_info; } @Override public int getSubTitle() { return R.string.str_mainlist_Observable_toBlocking; } private BlockingObservable example1() { return Observable.interval(300, TimeUnit.MILLISECONDS).toBlocking(); } }
6923403/C
test/shared_ptr/shared/Strblob.h
#ifndef _STRBLOB_H_ #define _STRBLOB_H_ #include <vector> #include <string> #include <memory> #include <initializer_list> class StrBlob { public: typedef std::vector<std::string>::size_type size_type; StrBlob(); StrBlob(std::initializer_list<std::string> il); size_type size() const { return data->size(); } void push_back(const std::string &t) { data->push_back(t); } void pop_back(); std::string & front(); std::string & back(); private: std::shared_ptr<std::vector<std::string>> data; void check(size_type i, const std::string &msg) const; }; #endif
muelli/scapi
src/java/edu/biu/scapi/primitives/dlog/cryptopp/CryptoPpDlogZpSafePrime.java
<gh_stars>1-10 /** * %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% * * Copyright (c) 2012 - SCAPI (http://crypto.biu.ac.il/scapi) * This file is part of the SCAPI project. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * We request that any publication and/or code referring to and/or based on SCAPI contain an appropriate citation to SCAPI, including a reference to * http://crypto.biu.ac.il/SCAPI. * * SCAPI uses Crypto++, Miracl, NTL and Bouncy Castle. Please see these projects for any further licensing issues. * %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% * */ package edu.biu.scapi.primitives.dlog.cryptopp; import java.math.BigInteger; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import edu.biu.scapi.primitives.dlog.DlogGroupAbs; import edu.biu.scapi.primitives.dlog.DlogZpSafePrime; import edu.biu.scapi.primitives.dlog.GroupElement; import edu.biu.scapi.primitives.dlog.GroupElementSendableData; import edu.biu.scapi.primitives.dlog.ZpElement; import edu.biu.scapi.primitives.dlog.ZpElementSendableData; import edu.biu.scapi.primitives.dlog.groupParams.ZpGroupParams; import edu.biu.scapi.securityLevel.DDH; import edu.biu.scapi.tools.math.MathAlgorithms; /** * This class implements a Dlog group over Zp* utilizing Crypto++'s implementation.<p> * It uses JNI technology to call Crypto++'s native code. * * @author Cryptography and Computer Security Research Group Department of Computer Science Bar-Ilan University (<NAME>) */ public class CryptoPpDlogZpSafePrime extends DlogGroupAbs implements DlogZpSafePrime, DDH{ private long pointerToGroup = 0; // pointer to the native group object /* native functions for the Dlog functionality */ private native long createDlogZp(byte[] p, byte[] q, byte[] g); private native long createRandomDlogZp(int numBits); private native long getGenerator(long group); private native byte[] getP(long group); private native byte[] getQ(long group); private native long inverseElement(long group, long element); private native long exponentiateElement(long group, long element, byte[] exponent); private native long multiplyElements(long group, long element1, long element2); private native void deleteDlogZp(long group); private native boolean validateZpGroup(long group); private native boolean validateZpGenerator(long group); private native boolean validateZpElement(long group, long element); /** * Initializes the CryptoPP implementation of Dlog over Zp* with the given groupParams * @param groupParams - contains the group parameters */ public CryptoPpDlogZpSafePrime(ZpGroupParams groupParams) { this(groupParams, new SecureRandom()); } /** * Initializes the CryptoPP implementation of Dlog over Zp* with the given groupParams * @param groupParams - contains the group parameters */ public CryptoPpDlogZpSafePrime(ZpGroupParams groupParams, SecureRandom random) { this.random = random; BigInteger p = groupParams.getP(); BigInteger q = groupParams.getQ(); BigInteger g = groupParams.getXg(); // if p is not 2q+1 throw exception if (!q.multiply(new BigInteger("2")).add(BigInteger.ONE).equals(p)) { throw new IllegalArgumentException("p must be equal to 2q+1"); } // if p is not a prime throw exception if (!p.isProbablePrime(40)) { throw new IllegalArgumentException("p must be a prime"); } // if q is not a prime throw exception if (!q.isProbablePrime(40)) { throw new IllegalArgumentException("q must be a prime"); } // set the inner parameters this.groupParams = groupParams; //Create CryptoPP Dlog group with p, ,q , g. //The validity of g will be checked after the creation of the group because the check need the pointer to the group pointerToGroup = createDlogZp(p.toByteArray(), q.toByteArray(), g.toByteArray()); //If the generator is not valid, delete the allocated memory and throw exception if (!validateZpGenerator(pointerToGroup)) { deleteDlogZp(pointerToGroup); throw new IllegalArgumentException("generator value is not valid"); } //Create the GroupElement - generator with the pointer that return from the native function generator = new ZpSafePrimeElementCryptoPp(g, p, false); //Now that we have p, we can calculate k which is the maximum length of a string to be converted to a Group Element of this group. k = calcK(p); } /** * Initializes the CryptoPP implementation of Dlog over Zp* with the given groupParams * @param q the order of the group * @param g the generator of the group * @param p the prime of the group */ public CryptoPpDlogZpSafePrime(String q, String g, String p) { //creates ZpGroupParams from the given arguments and call the appropriate constructor this(new ZpGroupParams(new BigInteger(q), new BigInteger(g), new BigInteger(p))); } /** * Initializes the CryptoPP implementation of Dlog over Zp* with the given groupParams * @param q the order of the group * @param g the generator of the group * @param p the prime of the group * @throws NoSuchAlgorithmException */ public CryptoPpDlogZpSafePrime(String q, String g, String p, String randNumGenAlg) throws NoSuchAlgorithmException { //creates ZpGroupParams from the given arguments and call the appropriate constructor this(new ZpGroupParams(new BigInteger(q), new BigInteger(g), new BigInteger(p)), SecureRandom.getInstance(randNumGenAlg)); } /** * Default constructor. Initializes this object with 1024 bit size. */ public CryptoPpDlogZpSafePrime() { this(1024); } /** * Initializes the CryptoPP implementation of Dlog over Zp* with random elements * @param numBits - number of the prime p bits to generate */ public CryptoPpDlogZpSafePrime(int numBits) { this(numBits, new SecureRandom()); } /** * Initializes the CryptoPP implementation of Dlog over Zp* with random elements * @param numBits - number of the prime p bits to generate */ public CryptoPpDlogZpSafePrime(int numBits, SecureRandom random){ this.random = random; // create random Zp dlog group pointerToGroup = createRandomDlogZp(numBits); // get the generator value long pGenerator = getGenerator(pointerToGroup); //create the GroupElement - generator with the pointer that returned from the native function generator = new ZpSafePrimeElementCryptoPp(pGenerator); BigInteger p = new BigInteger(getP(pointerToGroup)); BigInteger q = new BigInteger(getQ(pointerToGroup)); BigInteger xG = ((ZpElement) generator).getElementValue(); groupParams = new ZpGroupParams(q, xG, p); //Now that we have p, we can calculate k which is the maximum length in bytes of a string to be converted to a Group Element of this group. k = calcK(p); } public CryptoPpDlogZpSafePrime(String numBits) { //creates an int from the given string and calls the appropriate constructor this(new Integer(numBits)); } public CryptoPpDlogZpSafePrime(String numBits, String randNumGenAlg) throws NoSuchAlgorithmException { //creates an int from the given string and calls the appropriate constructor this(new Integer(numBits), SecureRandom.getInstance(randNumGenAlg)); } private int calcK(BigInteger p){ int bitsInp = p.bitLength(); //any string of length k has a numeric value that is less than (p-1)/2 - 1 int k = (bitsInp - 3)/8; //The actual k that we allow is one byte less. This will give us an extra byte to pad the binary string passed to encode to a group element with a 01 byte //and at decoding we will remove that extra byte. This way, even if the original string translates to a negative BigInteger the encode and decode functions //always work with positive numbers. The encoding will be responsible for padding and the decoding will be responsible for removing the pad. k--; //For technical reasons of how we chose to do the padding for encoding and decoding (the least significant byte of the encoded string contains the size of the //the original binary string sent for encoding, which is used to remove the padding when decoding) k has to be <= 255 bytes so that the size can be encoded in the padding. if( k > 255){ k = 255; } return k; } /** * @return the type of the group - Zp* */ public String getGroupType() { return "Zp*"; } /** * * @return the identity of this Zp group - 1 */ public GroupElement getIdentity() { return new ZpSafePrimeElementCryptoPp(BigInteger.ONE, ((ZpGroupParams) groupParams).getP(), false); } /** * Creates a random member of this Dlog group * * @return the random element */ public GroupElement createRandomElement() { //This function overrides the basic implementation of DlogGroupAbs. For the case of Zp Safe Prime this is a more efficient implementation. //It calls the package private constructor of ZpSafePrimeElementCryptoPp, which randomly creates an element in Zp. return new ZpSafePrimeElementCryptoPp(((ZpGroupParams) groupParams).getP(), random); } /** * Checks if the given element is member of this Dlog group * @param element * @return true if the given element is member of that group. false, otherwise. * @throws IllegalArgumentException */ public boolean isMember(GroupElement element) { // check if element is ZpElementCryptoPp if (!(element instanceof ZpSafePrimeElementCryptoPp)) { throw new IllegalArgumentException("element type doesn't match the group type"); } return validateZpElement(pointerToGroup, ((ZpSafePrimeElementCryptoPp) element).getPointerToElement()); } /** * Checks if the given generator is indeed the generator of the group * @return true, is the generator is valid, false otherwise. */ public boolean isGenerator() { return validateZpGenerator(pointerToGroup); } /** * Checks if the parameters of the group are correct. * @return true if valid, false otherwise. */ public boolean validateGroup() { return validateZpGroup(pointerToGroup); } /** * Calculates the inverse of the given GroupElement * @param groupElement to inverse * @return the inverse element of the given GroupElement * @throws IllegalArgumentException */ public GroupElement getInverse(GroupElement groupElement) throws IllegalArgumentException{ if (groupElement instanceof ZpSafePrimeElementCryptoPp){ //call to native inverse function long invertVal = inverseElement(pointerToGroup, ((ZpSafePrimeElementCryptoPp) groupElement).getPointerToElement()); //build a ZpElementCryptoPp element from the result value ZpSafePrimeElementCryptoPp inverseElement = new ZpSafePrimeElementCryptoPp(invertVal); return inverseElement; }else throw new IllegalArgumentException("element type doesn't match the group type"); } /** * Raises the base GroupElement to the exponent. The result is another GroupElement. * @param exponent * @param base * @return the result of the exponentiation * @throws IllegalArgumentException */ public GroupElement exponentiate(GroupElement base, BigInteger exponent) throws IllegalArgumentException{ if (base instanceof ZpSafePrimeElementCryptoPp){ //call to native exponentiate function long exponentiateVal = exponentiateElement(pointerToGroup, ((ZpSafePrimeElementCryptoPp) base).getPointerToElement(), exponent.toByteArray()); //build a ZpElementCryptoPp element from the result value ZpSafePrimeElementCryptoPp exponentiateElement = new ZpSafePrimeElementCryptoPp(exponentiateVal); return exponentiateElement; }else throw new IllegalArgumentException("element type doesn't match the group type"); } /** * Multiplies two GroupElements * * @param groupElement1 * @param groupElement2 * @return the multiplication result * @throws IllegalArgumentException */ public GroupElement multiplyGroupElements(GroupElement groupElement1, GroupElement groupElement2) throws IllegalArgumentException { if ((groupElement1 instanceof ZpSafePrimeElementCryptoPp) && (groupElement2 instanceof ZpSafePrimeElementCryptoPp)){ // call to native multiply function long mulVal = multiplyElements(pointerToGroup, ((ZpSafePrimeElementCryptoPp) groupElement1).getPointerToElement(), ((ZpSafePrimeElementCryptoPp) groupElement2).getPointerToElement()); // build a ZpElementCryptoPp element from the result value ZpSafePrimeElementCryptoPp mulElement = new ZpSafePrimeElementCryptoPp(mulVal); return mulElement; }else throw new IllegalArgumentException("element type doesn't match the group type"); } /** * Computes the product of several exponentiations with distinct bases * and distinct exponents. * Instead of computing each part separately, an optimization is used to * compute it simultaneously. * @param groupElements * @param exponentiations * @return the exponentiation result */ @Override public GroupElement simultaneousMultipleExponentiations (GroupElement[] groupElements, BigInteger[] exponentiations){ for (int i=0; i < groupElements.length; i++){ if (!(groupElements[i] instanceof ZpSafePrimeElementCryptoPp)){ throw new IllegalArgumentException("groupElement doesn't match the DlogGroup"); } } //currently, in cryptoPpDlogZpSafePrime the native algorithm is faster than the optimized one due to many calls to the JNI. //Thus, we operate the native algorithm. In the future we may change this. return computeNaive(groupElements, exponentiations); } /** * @deprecated As of SCAPI-V2_0_0 use generateElment(boolean bCheckMembership, BigInteger...values) */ @Deprecated public ZpElement generateElement(Boolean bCheckMembership, BigInteger x) { return new ZpSafePrimeElementCryptoPp(x, ((ZpGroupParams) groupParams).getP(), bCheckMembership); } /* (non-Javadoc) * @see edu.biu.scapi.primitives.dlog.DlogGroup#generateElement(boolean, java.math.BigInteger[]) */ @Override public GroupElement generateElement(boolean bCheckMembership, BigInteger... values) throws IllegalArgumentException { if(values.length != 1){ throw new IllegalArgumentException("To generate an ZpElement you should pass the x value of the point"); } return new ZpSafePrimeElementCryptoPp(values[0], ((ZpGroupParams) groupParams).getP(), bCheckMembership); } /** * @see edu.biu.scapi.primitives.dlog.DlogGroup#generateElement(boolean, edu.biu.scapi.primitives.dlog.GroupElementSendableData) * @deprecated The name of this function was changed.As of SCAPI-V1-0-2-2 use {@link reconstructElement(boolean bCheckMembership, GroupElementSendableData data)} instead. */ @Override @Deprecated public GroupElement generateElement(boolean bCheckMembership, GroupElementSendableData data) { if (!(data instanceof ZpElementSendableData)) throw new IllegalArgumentException("data type doesn't match the group type"); return generateElement(bCheckMembership, ((ZpElementSendableData)data).getX()); } /** * @see edu.biu.scapi.primitives.dlog.DlogGroup#reconstructElement(boolean, edu.biu.scapi.primitives.dlog.GroupElementSendableData) * @throws IllegalArgumentException if bCheckMembership is true and the data does not correspond to an illegal value of this group */ @Override public GroupElement reconstructElement(boolean bCheckMembership, GroupElementSendableData data) { if (!(data instanceof ZpElementSendableData)) throw new IllegalArgumentException("data type doesn't match the group type"); return generateElement(bCheckMembership, ((ZpElementSendableData)data).getX()); } /** * deletes the related Dlog group object */ protected void finalize() throws Throwable { // delete from the dll the dynamic allocation of the Integer. deleteDlogZp(pointerToGroup); super.finalize(); } /** * This function takes any string of length up to k bytes and encodes it to a Group Element.<p> * k is calculated upon construction of this group and it depends on the length in bits of p.<p> * The encoding-decoding functionality is not a bijection, that is, it is a 1-1 function but is not onto.<p> * Therefore, any string of length in bytes up to k can be encoded to a group element but not<p> * every group element can be decoded to a binary string in the group of binary strings of length up to 2^k.<p> * Thus, the right way to use this functionality is first to encode a byte array and the to decode it, and not the opposite. * @throws IndexOutOfBoundsException if the length of the binary array to encode is longer than k */ public GroupElement encodeByteArrayToGroupElement(byte[] binaryString) { //Any string of length up to k has numeric value that is less than (p-1)/2 - 1. //If longer than k then throw exception. if (binaryString.length > k){ throw new IndexOutOfBoundsException("The binary array to encode is too long."); } //Pad the binaryString with a x01 byte in the most significant byte to ensure that the //encoding and decoding always work with positive numbers. byte[] newString = new byte[binaryString.length + 1]; newString[0] = 1; System.arraycopy(binaryString, 0, newString, 1, binaryString.length); //Denote the string of length k by s. //Set the group element to be y=(s+1)^2 (this ensures that the result is not 0 and is a square) BigInteger s = new BigInteger(newString); BigInteger y = (s.add(BigInteger.ONE)).pow(2).mod(((ZpGroupParams) groupParams).getP()); //There is no need to check membership since the "element" was generated so that it is always an element. ZpSafePrimeElementCryptoPp element = new ZpSafePrimeElementCryptoPp(y, ((ZpGroupParams) groupParams).getP(), false); return element; } /** * This function decodes a group element to a byte array.<p> * This function is guaranteed to work properly ONLY if the group element was obtained as a result * of encoding a binary string of length in bytes up to k. This is because the encoding-decoding functionality is not a bijection, that is, it is a 1-1 function but is not onto.<p> * Therefore, any string of length in bytes up to k can be encoded to a group element but not<p> * any group element can be decoded to a binary sting in the group of binary strings of length up to 2^k. * @param groupElement the GroupElement to decode * @return a byte[] decoding of the group element */ public byte[] decodeGroupElementToByteArray(GroupElement groupElement) { if (!(groupElement instanceof ZpSafePrimeElementCryptoPp)){ throw new IllegalArgumentException("element type doesn't match the group type"); } //Given a group element y, find the two inverses z,-z. Take z to be the value between 1 and (p-1)/2. Return s=z-1 BigInteger y = ((ZpElement) groupElement).getElementValue(); BigInteger p = ((ZpGroupParams) groupParams).getP(); MathAlgorithms.SquareRootResults roots = MathAlgorithms.sqrtModP_3_4(y, p); BigInteger goodRoot; BigInteger halfP = (p.subtract(BigInteger.ONE)).divide(BigInteger.valueOf(2)); if(roots.getRoot1().compareTo(BigInteger.ONE)>= 0 && roots.getRoot1().compareTo(halfP) < 0) goodRoot = roots.getRoot1(); else goodRoot = roots.getRoot2(); goodRoot = goodRoot.subtract(BigInteger.ONE); //Remove the padding byte at the most significant position (that was added while encoding) byte[] rootByteArray = goodRoot.toByteArray(); byte[] oneByteLess = new byte[rootByteArray.length -1]; System.arraycopy(rootByteArray, 1, oneByteLess, 0,oneByteLess.length ); return oneByteLess; } /** * This function maps a group element of this dlog group to a byte array.<p> * This function does not have an inverse function, that is, it is not possible to re-construct the original group element from the resulting byte array. * @return a byte array representation of the given group element */ public byte[] mapAnyGroupElementToByteArray(GroupElement groupElement){ if (!(groupElement instanceof ZpSafePrimeElementCryptoPp)){ throw new IllegalArgumentException("element type doesn't match the group type"); } return ((ZpElement) groupElement).getElementValue().toByteArray(); } // upload CryptoPP library static { System.loadLibrary("CryptoPPJavaInterface"); } }
sakamoto-neko/butterfly
butterflymodel/src/main/java/com/buttongames/butterflymodel/model/UserPhases.java
package com.buttongames.butterflymodel.model; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.OneToOne; import javax.persistence.Table; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; /** * Model class to represent the phases a user has set for different games * on the network. This applies to all PCBs belonging to a particular user. * @author skogaby (<EMAIL>) */ @Entity @Table(name = "user_phases") public class UserPhases implements Externalizable { private static final long serialVersionUID = 1L; /** * ID of the phases, primary key. */ @Id @GeneratedValue @Column(name = "id") private long id; /** * The user these phases belong to. */ @OneToOne @JoinColumn(name = "user_id") private ButterflyUser user; /** * The user's phase for DDR Ace. */ @Column(name = "ddr_16_phase") private int ddr16Phase; public UserPhases() { } public UserPhases(final ButterflyUser user, final int ddr16Phase) { this.user = user; this.ddr16Phase = ddr16Phase; } @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeLong(this.id); out.writeObject(this.user); out.writeInt(this.ddr16Phase); } @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { this.setId(in.readLong()); this.setUser((ButterflyUser) in.readObject()); this.setDdr16Phase(in.readInt()); } private long getId() { return id; } public void setId(long id) { this.id = id; } public ButterflyUser getUser() { return user; } public void setUser(ButterflyUser user) { this.user = user; } public int getDdr16Phase() { return ddr16Phase; } public void setDdr16Phase(int ddr16Phase) { this.ddr16Phase = ddr16Phase; } }
code-dot-org/code-dot-org
apps/test/unit/util/getScriptDataTest.js
<reponame>code-dot-org/code-dot-org<filename>apps/test/unit/util/getScriptDataTest.js import getScriptData from '@cdo/apps/util/getScriptData'; import {expect} from '../../util/reconfiguredChai'; describe('the getScriptData function', () => { beforeEach(() => { const someDiv = document.createElement('div'); document.body.appendChild(someDiv); someDiv.innerHTML = ` <script data-foo="1"></script> <script data-bar='{"userId": "neato"}'></script> <script data-malformed='{malformed}'></script> `; }); it('extracts data from a script tag', () => { expect(getScriptData('foo')).to.equal(1); expect(getScriptData('bar')).to.deep.equal({userId: 'neato'}); }); it('is case-insensitive', () => { expect(getScriptData('FOO')).to.equal(1); }); it('throws an error if the script tag does not exist', () => { expect(() => getScriptData('does-not-exist')).to.throw; }); it('throws an error if the json is malformed', () => { expect(() => getScriptData('malformed')).to.throw; }); });
cragkhit/elasticsearch
references/bcb_chosen_clones/selected#1529627#250#270.java
<gh_stars>10-100 public static Model downloadModel(String url) { Model model = ModelFactory.createDefaultModel(); try { URLConnection connection = new URL(url).openConnection(); if (connection instanceof HttpURLConnection) { HttpURLConnection httpConnection = (HttpURLConnection) connection; httpConnection.setRequestProperty("Accept", "application/rdf+xml, */*;q=.1"); httpConnection.setRequestProperty("Accept-Language", "en"); } InputStream in = connection.getInputStream(); model.read(in, url); in.close(); return model; } catch (MalformedURLException e) { logger.debug("Unable to download model from " + url, e); throw new RuntimeException(e); } catch (IOException e) { logger.debug("Unable to download model from " + url, e); throw new RuntimeException(e); } }
cpswan/docker
daemon/create_unix.go
<gh_stars>0 // +build !windows package daemon import ( "os" "path/filepath" containertypes "github.com/docker/docker/api/types/container" "github.com/docker/docker/container" derr "github.com/docker/docker/errors" "github.com/docker/docker/image" "github.com/docker/docker/pkg/stringid" "github.com/docker/docker/volume" "github.com/opencontainers/runc/libcontainer/label" ) // createContainerPlatformSpecificSettings performs platform specific container create functionality func (daemon *Daemon) createContainerPlatformSpecificSettings(container *container.Container, config *containertypes.Config, hostConfig *containertypes.HostConfig, img *image.Image) error { if err := daemon.Mount(container); err != nil { return err } defer daemon.Unmount(container) for spec := range config.Volumes { name := stringid.GenerateNonCryptoID() destination := filepath.Clean(spec) // Skip volumes for which we already have something mounted on that // destination because of a --volume-from. if container.IsDestinationMounted(destination) { continue } path, err := container.GetResourcePath(destination) if err != nil { return err } stat, err := os.Stat(path) if err == nil && !stat.IsDir() { return derr.ErrorCodeMountOverFile.WithArgs(path) } volumeDriver := hostConfig.VolumeDriver if destination != "" && img != nil { if _, ok := img.ContainerConfig.Volumes[destination]; ok { // check for whether bind is not specified and then set to local if _, ok := container.MountPoints[destination]; !ok { volumeDriver = volume.DefaultDriverName } } } v, err := daemon.createVolume(name, volumeDriver, nil) if err != nil { return err } if err := label.Relabel(v.Path(), container.MountLabel, true); err != nil { return err } // never attempt to copy existing content in a container FS to a shared volume if v.DriverName() == volume.DefaultDriverName { if err := container.CopyImagePathContent(v, destination); err != nil { return err } } container.AddMountPointWithVolume(destination, v, true) } return nil }
MartyMcAir/-WebApps-Experimental-
!_ServersExpExamples/z_TestsExamples/SprBoot_Java_RestTest_MockMvc_habr527330/src/test/java/com/mscharhag/mockmvc/ProductControllerTest.java
package com.mscharhag.mockmvc; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MockMvc; import static com.mscharhag.mockmvc.TestUtil.*; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; // https://habr.com/ru/post/527330/ // https://github.com/mscharhag/blog-examples/tree/master/mockmvc-testing @SpringBootTest @AutoConfigureMockMvc public class ProductControllerTest { @Autowired private MockMvc mvc; @Test @Disabled public void requestTextBlocks() throws Exception { mvc.perform(put("/products/42") .contentType(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON) .content("{\"name\": \"Cool Gadget\", \"description\": \"Looks cool\"}") .header("Authorization", getBasicAuthHeader("John", "<PASSWORD>"))) .andExpect(status().isOk()); mvc.perform(put("/products/42") .contentType(MediaType.APPLICATION_JSON) // .content(""" // { // "name": "Cool Gadget", // "description": "Looks cool" // }""".stripIndent()) .header("Authorization", getBasicAuthHeader("John", "secr3t"))) .andExpect(status().isOk()) .andExpect(jsonPath("$.name").value("Cool Gadget")) .andExpect(jsonPath("$.description").value("Looks cool")); } @Test public void requestCustomJacksonMapping() throws Exception { Product product = new Product("Cool Gadget", "Looks cool"); mvc.perform(put("/products/42") .contentType(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON) .content(objectToJson(product)) .header("Authorization", getBasicAuthHeader("John", "se<PASSWORD>"))) .andExpect(status().isOk()) .andExpect(jsonPath("$.name").value("Cool Gadget")) .andExpect(jsonPath("$.description").value("Looks cool")); } @Test public void requestPutJson() throws Exception { Product product = new Product("Cool Gadget", "Looks cool"); mvc.perform(putJson("/products/42", product) .header("Authorization", getBasicAuthHeader("John", "secr3t"))) .andExpect(status().isOk()) .andExpect(jsonPath("$.name").value("Cool Gadget")) .andExpect(jsonPath("$.description").value("Looks cool")); } @Test public void requestPutJsonWithAuthentication() throws Exception { Product product = new Product("Cool Gadget", "Looks cool"); mvc.perform(putJson("/products/42", product).with(authentication())) .andExpect(status().isOk()) .andExpect(jsonPath("$.name").value("Cool Gadget")) .andExpect(jsonPath("$.description").value("Looks cool")); } @Test public void response() throws Exception { mvc.perform(get("/products/42")) .andExpect(status().isOk()) .andExpect(header().string("Cache-Control", "no-cache")) .andExpect(jsonPath("$.name").value("Cool Gadget")) .andExpect(jsonPath("$.description").value("Looks cool")); } @Test public void responseNoCacheHeader() throws Exception { mvc.perform(get("/products/42")) .andExpect(status().isOk()) .andExpect(noCacheHeader()) .andExpect(jsonPath("$.name").value("Cool Gadget")) .andExpect(jsonPath("$.description").value("Looks cool")); } @Test public void responseCustomResultMatcher() throws Exception { Product product = new Product("Cool Gadget", "Looks cool"); mvc.perform(get("/products/42")) .andExpect(status().isOk()) .andExpect(noCacheHeader()) .andExpect(product("$", product)); } }
gfphoenix/win32
shobj/shobj_methods_64.go
<reponame>gfphoenix/win32 // Copyright 2012 The win Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build windows,amd64 windows,arm64 package shobj import ( "syscall" "unsafe" "github.com/gfphoenix/win32/handle" "github.com/gfphoenix/win32/win" ) func (obj *ITaskbarList3) SetProgressValue(hwnd handle.HWND, current uint32, length uint32) win.HRESULT { ret, _, _ := syscall.Syscall6(obj.LpVtbl.SetProgressValue, 4, uintptr(unsafe.Pointer(obj)), uintptr(hwnd), uintptr(current), uintptr(length), 0, 0) return win.HRESULT(ret) }
vectordb-io/vstl
origin_code/currencyNew.cpp
<reponame>vectordb-io/vstl<gh_stars>0 // test currency class with single data member amount #include <iostream> #include "currencyNew.h" using namespace std; int main() { currency g, h(plus, 3, 50), i, j; // try out both forms of setValue g.setValue(minus, 2, 25); i.setValue(-6.45); // do an add and output j = h.add(g); h.output(); cout << " + "; g.output(); cout << " = "; j.output(); cout << endl; // do an increment and output i.output(); cout << " incremented by "; h.output(); cout << " is "; i.increment(h); i.output(); cout << endl; // do two adds in a sequence j = i.add(g).add(h); i.output(); cout << " + "; g.output(); cout << " + "; h.output(); cout << " = "; j.output(); cout << endl; // do an increment and add cout << "Increment "; i.output(); cout << " by "; g.output(); cout << " and then add "; h.output(); cout << endl << "Result is "; j = i.increment(g).add(h); j.output(); cout << endl; cout << "Incremented object is "; i.output(); cout << endl; // test the exception cout << "Attempting to initialize with cents = 152" << endl; try {i.setValue(plus, 3, 152);} catch (illegalParameterValue e) { cout << "Caught thrown exception" << endl; e.outputMessage(); } return 0; }
mackwic/go-scalingo
scalingomock/signupservice_mock.go
<gh_stars>1-10 // Code generated by MockGen. DO NOT EDIT. // Source: github.com/Scalingo/go-scalingo/v4 (interfaces: SignUpService) // Package scalingomock is a generated GoMock package. package scalingomock import ( reflect "reflect" gomock "github.com/golang/mock/gomock" ) // MockSignUpService is a mock of SignUpService interface type MockSignUpService struct { ctrl *gomock.Controller recorder *MockSignUpServiceMockRecorder } // MockSignUpServiceMockRecorder is the mock recorder for MockSignUpService type MockSignUpServiceMockRecorder struct { mock *MockSignUpService } // NewMockSignUpService creates a new mock instance func NewMockSignUpService(ctrl *gomock.Controller) *MockSignUpService { mock := &MockSignUpService{ctrl: ctrl} mock.recorder = &MockSignUpServiceMockRecorder{mock} return mock } // EXPECT returns an object that allows the caller to indicate expected use func (m *MockSignUpService) EXPECT() *MockSignUpServiceMockRecorder { return m.recorder } // SignUp mocks base method func (m *MockSignUpService) SignUp(arg0, arg1 string) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "SignUp", arg0, arg1) ret0, _ := ret[0].(error) return ret0 } // SignUp indicates an expected call of SignUp func (mr *MockSignUpServiceMockRecorder) SignUp(arg0, arg1 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SignUp", reflect.TypeOf((*MockSignUpService)(nil).SignUp), arg0, arg1) }
matoruru/purescript-react-material-ui-svgicon
src/MaterialUI/SVGIcon/Icon/Brightness1Sharp.js
exports.brightness1SharpImpl = require('@material-ui/icons/Brightness1Sharp').default;
pradeepk1905/Joindesk
Backend/src/main/java/com/ariseontech/joindesk/issues/domain/WorkflowTransitionProperties.java
package com.ariseontech.joindesk.issues.domain; import com.ariseontech.joindesk.AuditModel; import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.Data; import lombok.EqualsAndHashCode; import javax.persistence.*; import javax.validation.constraints.NotNull; @Entity @Data @EqualsAndHashCode(callSuper = false) @Table(indexes = { @Index(name = "JD_WFTRANSP_IDX", columnList = "transition") }) public class WorkflowTransitionProperties extends AuditModel { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @NotNull @Enumerated(EnumType.STRING) private WorkflowTransitionPropertyTypes type; @NotNull @Enumerated(EnumType.STRING) private WorkflowTransitionPropertySubTypes subType; private String key; private String value; private String condition = "OR"; @Transient private String displayValue; @OneToOne(cascade = CascadeType.DETACH, fetch = FetchType.LAZY) @JoinColumn(name = "transition", nullable = false) @JsonIgnore private WorkflowTransition transition; @Transient private String fromStep, toStep, transitionName; public WorkflowTransitionProperties() { } public WorkflowTransitionProperties(@NotNull WorkflowTransitionPropertyTypes type, @NotNull WorkflowTransitionPropertySubTypes subType, String key, String value, @NotNull(message = "Transition is required") WorkflowTransition transition) { this.type = type; this.subType = subType; this.key = key; this.value = value; this.transition = transition; } }
SafonovMikhail/python_000577
001146StepikPyBegin/Stepik001146PyBeginсh09p01st02TASK01_20210125.py
s = 'abcdefg' print(s[0] + s[2] + s[4] + s[6])
localwaves/node
lang/shared/src/main/scala/com/localplatform/lang/v1/evaluator/ctx/LazyVal.scala
package com.localplatform.lang.v1.evaluator.ctx import cats.data.EitherT import cats.implicits._ import com.localplatform.lang.TrampolinedExecResult import com.localplatform.lang.v1.task.CoevalRef import monix.eval.Coeval sealed trait LazyVal { val evaluated: CoevalRef[Boolean] = CoevalRef.of(false) val value: TrampolinedExecResult[Any] override def toString: String = { val valueStringRepr: String = evaluated.read .map(ev => { if (ev) { value.value .attempt() .fold( err => s"Error evaluating value: $err", _.fold( err => s"Error evaluating value: $err", v => v.toString ) ) } else "Not evaluated" }) .value s"Value: $valueStringRepr" } } object LazyVal { private case class LazyValImpl(v: TrampolinedExecResult[Any]) extends LazyVal { override val value: TrampolinedExecResult[Any] = EitherT( Coeval.evalOnce( evaluated.write(true).apply() ) *> Coeval.evalOnce( v.value.apply() ) ) } def apply(v: TrampolinedExecResult[Any]): LazyVal = LazyValImpl(v) }
wesleyfeitosa/devbarber
app/src/services/api.js
import AsyncStorage from '@react-native-community/async-storage'; const BASE_API = 'https://api.b7web.com.br/devbarber/api'; const api = { checkToken: async function (token) { const response = await fetch(`${BASE_API}/auth/refresh`, { method: 'POST', headers: { Accept: 'application/json', 'Content-type': 'application/json', }, body: JSON.stringify({token}), }); const json = await response.json(); return json; }, signIn: async function ({email, password}) { const response = await fetch(`${BASE_API}/auth/login`, { method: 'POST', headers: { Accept: 'application/json', 'Content-type': 'application/json', }, body: JSON.stringify({email, password}), }); const json = await response.json(); return json; }, signUp: async function ({name, email, password}) { const response = await fetch(`${BASE_API}/user`, { method: 'POST', headers: { Accept: 'application/json', 'Content-type': 'application/json', }, body: JSON.stringify({name, email, password}), }); const json = await response.json(); return json; }, logout: async function () { const token = await AsyncStorage.getItem('@DevBarber:token'); const response = await fetch(`${BASE_API}/auth/logout`, { method: 'POST', headers: { Accept: 'application/json', 'Content-type': 'application/json', }, body: JSON.stringify({token}), }); const json = await response.json(); return json; }, getBarbers: async function (latitude = null, longitude = null, address = '') { const token = await AsyncStorage.getItem('@DevBarber:token'); const response = await fetch( `${BASE_API}/barbers?token=${token}&lat=${latitude}&lng=${longitude}&address=${address}`, ); const json = await response.json(); return json; }, getBarber: async function (id) { const token = await AsyncStorage.getItem('@DevBarber:token'); const response = await fetch(`${BASE_API}/barber/${id}?token=${token}`); const json = await response.json(); return json; }, setAppointment: async function ( userId, service, selectedDay, selectedHour, selectedMonth, selectedYear, ) { const token = await AsyncStorage.getItem('@DevBarber:token'); const response = await fetch(`${BASE_API}/user/appointment`, { method: 'POST', headers: { Accept: 'application/json', 'Content-type': 'application/json', }, body: JSON.stringify({ token, id: userId, service, day: selectedDay, hour: selectedHour, month: selectedMonth, year: selectedYear, }), }); const json = await response.json(); return json; }, }; export {api};
niroshw/jenkins-artifactory-plugin-repo
src/main/java/org/jfrog/hudson/pipeline/steps/CreateGradleBuildStep.java
package org.jfrog.hudson.pipeline.steps; import hudson.Extension; import org.jenkinsci.plugins.workflow.steps.AbstractStepDescriptorImpl; import org.jenkinsci.plugins.workflow.steps.AbstractStepImpl; import org.jenkinsci.plugins.workflow.steps.AbstractSynchronousStepExecution; import org.jfrog.hudson.pipeline.types.GradleBuild; import org.kohsuke.stapler.DataBoundConstructor; public class CreateGradleBuildStep extends AbstractStepImpl { @DataBoundConstructor public CreateGradleBuildStep() { } public static class Execution extends AbstractSynchronousStepExecution<GradleBuild> { private static final long serialVersionUID = 1L; @Override protected GradleBuild run() throws Exception { return new GradleBuild(); } } @Extension public static final class DescriptorImpl extends AbstractStepDescriptorImpl { public DescriptorImpl() { super(CreateGradleBuildStep.Execution.class); } @Override public String getFunctionName() { return "newGradleBuild"; } @Override public String getDisplayName() { return "New Artifactory gradle executor"; } @Override public boolean isAdvanced() { return true; } } }
Altiscale/incubator-tez
tez-runtime-internals/src/main/java/org/apache/tez/runtime/metrics/GcTimeUpdater.java
<reponame>Altiscale/incubator-tez<filename>tez-runtime-internals/src/main/java/org/apache/tez/runtime/metrics/GcTimeUpdater.java<gh_stars>10-100 /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.tez.runtime.metrics; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; import java.util.List; import org.apache.tez.common.counters.TezCounter; import org.apache.tez.common.counters.TezCounters; import org.apache.tez.common.counters.TaskCounter; /** * An updater that tracks the amount of time this task has spent in GC. */ class GcTimeUpdater { private long lastGcMillis = 0; private List<GarbageCollectorMXBean> gcBeans = null; TezCounters counters; public GcTimeUpdater(TezCounters counters) { this.gcBeans = ManagementFactory.getGarbageCollectorMXBeans(); getElapsedGc(); // Initialize 'lastGcMillis' with the current time spent. this.counters = counters; } /** * @return the number of milliseconds that the gc has used for CPU since the * last time this method was called. */ protected long getElapsedGc() { long thisGcMillis = 0; for (GarbageCollectorMXBean gcBean : gcBeans) { thisGcMillis += gcBean.getCollectionTime(); } long delta = thisGcMillis - lastGcMillis; this.lastGcMillis = thisGcMillis; return delta; } /** * Increment the gc-elapsed-time counter. */ void incrementGcCounter() { if (null == counters) { return; // nothing to do. } TezCounter gcCounter = counters.findCounter(TaskCounter.GC_TIME_MILLIS); if (null != gcCounter) { gcCounter.increment(getElapsedGc()); } } }
NevzatBOL/ROS-Intermediate
mybot_ws/src/mybot_control/scripts/lidar_data_read.py
<reponame>NevzatBOL/ROS-Intermediate<gh_stars>0 #!/usr/bin/env python import numpy as np import math import rospy from sensor_msgs.msg import LaserScan def scancallback(scan): #print "Lidar Data : ", scan rangesCount = len(scan.ranges) #print "Ranges Count : ", rangesCount #print "Scan Ranges : ", scan.ranges data = np.array(scan.ranges[:60]) #print "Data : ", data #print [math.isinf(i) for i in data] #print np.isinf(data) print "Max Data : ", data.max() print "Max Index : ", data.argmax() print "Min Data : ", data.min() print "Min Index : ", data.argmin() if __name__ == '__main__': rospy.init_node('Lidar_read', anonymous=True) rospy.Subscriber('/mybot/laser/scan',LaserScan,scancallback) rospy.spin()
brunolauze/openpegasus-providers-old
src/Providers/UNIXProviders/RejectConnectionAction/UNIX_RejectConnectionAction_SOLARIS.hxx
#ifdef PEGASUS_OS_SOLARIS #ifndef __UNIX_REJECTCONNECTIONACTION_PRIVATE_H #define __UNIX_REJECTCONNECTIONACTION_PRIVATE_H #endif #endif
wuweiweiwu/babel
packages/babel-plugin-proposal-object-rest-spread/test/fixtures/object-rest/nested-array-2/output.js
const [a, [_ref], _ref2, [_ref3, { h: [i, _ref4] }]] = x; const { b } = _ref, c = babelHelpers.objectWithoutProperties(_ref, ["b"]), { d } = _ref2, e = babelHelpers.objectWithoutProperties(_ref2, ["d"]), { f } = _ref3, g = babelHelpers.objectWithoutProperties(_ref3, ["f"]), { j } = _ref4, k = babelHelpers.objectWithoutProperties(_ref4, ["j"]);
Lunaticf/LeetCode
dynamicProgramming/wordBreak/WordBreak.java
<reponame>Lunaticf/LeetCode package leetCode.dynamicProgramming.wordBreak; import java.util.HashSet; import java.util.List; public class WordBreak { // TLE public boolean wordBreak(String s, List<String> wordDict) { // corner case if (wordDict.size() == 0 || s.length() == 0) return false; HashSet<String> set = new HashSet<>(wordDict); return dfs(s, wordDict,set); } // 递归定义 private boolean dfs(String remainS, List<String> wordDict, HashSet<String> set) { // 出口 if (remainS.length() == 0) return true; // go // 去拿wordDict里面的字符串长度来切分 boolean res = false; for (String word : wordDict) { // 同时要判断子串是否在wordDict中 if (remainS.length() >= word.length() && set.contains(remainS.substring(0, word.length()))) { res = res || dfs(remainS.substring(word.length()), wordDict,set); } } return res; } // dfs public boolean wordBreak1(String s, List<String> wordDict) { boolean[] dp = new boolean[s.length()]; boolean[] computed = new boolean[s.length()]; HashSet<String> set = new HashSet<>(wordDict); return recur(s, dp, set, s.length() - 1,computed, wordDict); } // 自顶向下递推 private boolean recur(String s, boolean[] dp, HashSet<String> set, int i, boolean[] computed, List<String> wordDict) { if (i == -1) { return true; } // if computed before if (computed[i]) { return dp[i]; } // 递推 boolean res = false; for (String word : wordDict) { if (i - word.length() >= -1 && set.contains(s.substring(i - word.length() + 1, i + 1)) ) { res = res || recur(s, dp, set, i - word.length(), computed, wordDict); } } dp[i] = res; computed[i] = true; return res; } // dp1 public boolean wordBreak2(String s, List<String> wordDict) { // 假设dp[i]代表到i-1位置能否wordBreak比较方便 boolean[] dp = new boolean[s.length() + 1]; dp[0] = true; for (int i = 1; i < s.length() + 1; i++) { // 对于每个位置来说 后退一个单词看看 for (String word : wordDict) { if (i >= word.length()) { if (word.equals(s.substring(i - word.length(), i))) { if (dp[i - word.length()]) { dp[i] = true; break; } } } } } return dp[s.length()]; } // dp2 public boolean wordBreak3(String s, List<String> wordDict) { // dp[i]表示到i-1位置的是否能break boolean[] dp = new boolean[s.length() + 1]; dp[0] = true; HashSet set = new HashSet(wordDict); for (int i = 1; i < s.length() + 1; i++) { for (int j = i - 1; j >= 0; j--) { if (dp[j] && set.contains(s.substring(j, i))) { dp[i] = true; break; } } } return dp[s.length()]; } }
ivosh/jcm2018
ui/web/src/registrator/EmailComposer/EmailComposerActions.js
<filename>ui/web/src/registrator/EmailComposer/EmailComposerActions.js import { API_SEND_EMAIL } from 'ui-common/common'; import { WS_API } from 'ui-common/store/wsAPI'; export const SEND_EMAIL = 'SEND_EMAIL'; export const sendEmail = ({ mailTo, subject, body }) => ({ [WS_API]: { type: SEND_EMAIL, endpoint: API_SEND_EMAIL, request: { mailTo, subject, body }, title: 'posílání emailu', }, });
Vizzuality/helix-scope
app/components/modals/MenuModal.js
import React, { Component } from 'react'; import PropTypes from 'prop-types'; import Modal from 'components/common/Modal'; import Button from 'components/common/Button'; import NavLink from 'components/common/NavLink'; class MenuModal extends Component { handleShareOpen() { this.props.setShareModal(true); this.props.setMenuModal(false); } render() { return ( <div> <Modal className="menu" modalOpen={this.props.menuModalOpen} onSetModal={this.props.setMenuModal} btnStyle="primary" > <ul className="mobile-menu"> <li> <NavLink to="/global-maps" className="-green" onClick={() => this.props.setMenuModal(false)}>Global Maps</NavLink> </li> <li> <NavLink to="/countries" className="-orange" onClick={() => this.props.setMenuModal(false)}>Countries</NavLink> </li> <li> <NavLink to="/compare" className="-red" onClick={() => this.props.setMenuModal(false)}>Compare</NavLink> </li> <li> <Button icon="share" style="none" size="small" onClick={() => this.handleShareOpen} /> </li> </ul> </Modal> </div> ); } } MenuModal.propTypes = { /** * Define whether modal is open or not **/ menuModalOpen: PropTypes.bool, /** * Callback when closing or opening modal **/ setMenuModal: PropTypes.func, /** * Callback when closing or opening modal **/ setShareModal: PropTypes.func, /** * Title for Modal component **/ title: PropTypes.string, /** * Route of current location **/ shareUrl: PropTypes.string }; export default MenuModal;
dopplershift/advent-of-code
2018/day16.py
<reponame>dopplershift/advent-of-code from ast import literal_eval class Computer: def __init__(self, regs=None): if regs is None: regs = [0, 0, 0, 0] self.regs = regs def addr(self, a, b, c): self.regs[c] = self.regs[a] + self.regs[b] def addi(self, a, b, c): self.regs[c] = self.regs[a] + b def mulr(self, a, b, c): self.regs[c] = self.regs[a] * self.regs[b] def muli(self, a, b, c): self.regs[c] = self.regs[a] * b def banr(self, a, b, c): self.regs[c] = self.regs[a] & self.regs[b] def bani(self, a, b, c): self.regs[c] = self.regs[a] & b def borr(self, a, b, c): self.regs[c] = self.regs[a] | self.regs[b] def bori(self, a, b, c): self.regs[c] = self.regs[a] | b def setr(self, a, b, c): self.regs[c] = self.regs[a] def seti(self, a, b, c): self.regs[c] = a def gtir(self, a, b, c): self.regs[c] = int(a > self.regs[b]) def gtri(self, a, b, c): self.regs[c] = int(self.regs[a] > b) def gtrr(self, a, b, c): self.regs[c] = int(self.regs[a] > self.regs[b]) def eqir(self, a, b, c): self.regs[c] = int(a == self.regs[b]) def eqri(self, a, b, c): self.regs[c] = int(self.regs[a] == b) def eqrr(self, a, b, c): self.regs[c] = int(self.regs[a] == self.regs[b]) def train(self, commands, before, after): all_funcs = (self.addr, self.addi, self.mulr, self.muli, self.banr, self.bani, self.borr, self.bori, self.setr, self.seti, self.gtir, self.gtri, self.gtrr, self.eqir, self.eqri, self.eqrr) opcode_map = dict() for b, c, a in zip(before, commands, after): opcode, *params = c options = opcode_map.setdefault(opcode, list(all_funcs)) works = [] for cmd in options: self.regs = b.copy() cmd(*params) if self.regs == a: works.append(cmd) opcode_map[opcode] = works # Determine what options exist for each funciton rather than each code func_options = dict() for code, options in opcode_map.items(): for o in options: func_options.setdefault(o, []).append(code) self._code_map = dict() done = set() while func_options: for func, options in sorted(func_options.items(), key=lambda i: len(i[1])): left = set(options) - done if len(left) == 1: done |= left self._code_map[list(left)[0]] = func for func in self._code_map.values(): if func in func_options: func_options.pop(func) self.regs = [0, 0, 0, 0] def __call__(self, opcode, a, b, c): self._code_map[opcode](a, b, c) def count_works(command, before, after): c = Computer() works = 0 for cmd in (c.addr, c.addi, c.mulr, c.muli, c.banr, c.bani, c.borr, c.bori, c.setr, c.seti, c.gtir, c.gtri, c.gtrr, c.eqir, c.eqri, c.eqrr): c.regs = before.copy() cmd(*command[1:]) works += int(c.regs == after) return works def parse(f): before = [] command = [] after = [] data = list(f) for i in range(0, len(data), 4): before.append(literal_eval(data[i].split(':')[-1].strip())) command.append(list(map(int, data[i+1].split()))) after.append(literal_eval(data[i+2].split(':')[-1].strip())) return before, command, after def total(before, command, after): count = 0 for b,c,a in zip(before, command, after): count += int(count_works(c, b, a) >= 3) return count if __name__ == '__main__': from aocd.models import Puzzle assert count_works([9, 2, 1, 2], [3, 2, 1, 1], [3, 2, 2, 1]) == 3 puz = Puzzle(2018, 16) inp1, inp2 = puz.input_data.split('\n\n\n\n') before, command, after = parse(inp1.split('\n')) puz.answer_a = total(before, command, after) print(f'Part 1: {puz.answer_a}') comp = Computer() comp.train(command, before, after) for line in inp2.split('\n'): comp(*map(int, line.split())) puz.answer_b = comp.regs[0] print(f'Part 2: {puz.answer_b}')
earaujoassis/watchman
console.rb
#!/usr/bin/env ruby # frozen_string_literal: true require "./lib/watchman" require "./lib/executor"
idoshveki/magma
symphony/app/fbcnms-projects/inventory/app/mutations/EditServiceMutation.js
/** * Copyright 2004-present Facebook. All Rights Reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. * * @flow * @format */ import RelayEnvironemnt from '../common/RelayEnvironment.js'; import {commitMutation, graphql} from 'react-relay'; import type { EditServiceMutation, EditServiceMutationResponse, EditServiceMutationVariables, } from './__generated__/EditServiceMutation.graphql'; import type {MutationCallbacks} from './MutationCallbacks.js'; import type {SelectorStoreUpdater} from 'relay-runtime'; const mutation = graphql` mutation EditServiceMutation($data: ServiceEditData!) { editService(data: $data) { ...ServiceCard_service } } `; export default ( variables: EditServiceMutationVariables, callbacks?: MutationCallbacks<EditServiceMutationResponse>, updater?: SelectorStoreUpdater, ) => { const {onCompleted, onError} = callbacks ? callbacks : {}; commitMutation<EditServiceMutation>(RelayEnvironemnt, { mutation, variables, updater, onCompleted, onError, }); };
nowkoai/test
ee/spec/models/project_security_setting_spec.rb
<filename>ee/spec/models/project_security_setting_spec.rb # frozen_string_literal: true require 'spec_helper' RSpec.describe ProjectSecuritySetting do using RSpec::Parameterized::TableSyntax describe 'associations' do subject { create(:project).security_setting } it { is_expected.to belong_to(:project) } end describe '#auto_fix_enabled?' do subject { setting.auto_fix_enabled? } let(:setting) { build(:project_security_setting) } where(:license, :feature_flag, :auto_fix_container_scanning, :auto_fix_dependency_scanning, :auto_fix_sast, :auto_fix_enabled?) do true | true | true | true | true | true false | true | true | true | true | false true | false | true | true | true | false true | true | false | true | true | true true | true | true | false | true | true true | true | false | false | true | false true | true | true | true | false | true end with_them do before do stub_licensed_features(vulnerability_auto_fix: license) stub_feature_flags(security_auto_fix: feature_flag) setting.auto_fix_container_scanning = auto_fix_container_scanning setting.auto_fix_dependency_scanning = auto_fix_dependency_scanning setting.auto_fix_sast = auto_fix_sast end it { is_expected.to eq(auto_fix_enabled?) } end end describe '#auto_fix_enabled_types' do subject { setting.auto_fix_enabled_types } let_it_be(:setting) { build(:project_security_setting) } before do setting.auto_fix_container_scanning = false setting.auto_fix_dependency_scanning = true setting.auto_fix_sast = true end it 'return status only for available types' do is_expected.to eq([:dependency_scanning]) end end end
ContextLogic/cldr
resources/locales/es_HN/calendar.go
<gh_stars>0 package es_HN import "github.com/ContextLogic/cldr" var calendar = cldr.Calendar{ Formats: cldr.CalendarFormats{ Date: cldr.CalendarDateFormat{Full: "EEEE dd 'de' MMMM 'de' y", Long: "dd 'de' MMMM 'de' y", Medium: "", Short: ""}, Time: cldr.CalendarDateFormat{}, DateTime: cldr.CalendarDateFormat{}, }, FormatNames: cldr.CalendarFormatNames{}, }
jpluscplusm/paas-cf
platform-tests/src/platform/vendor/github.com/concourse/atc/db/migrations/11_replace_builds_abort_hijack_urls_with_guid_and_endpoint.go
package migrations import ( "database/sql" "net/url" "strings" "github.com/BurntSushi/migration" ) func ReplaceBuildsAbortHijackURLsWithGuidAndEndpoint(tx migration.LimitedTx) error { _, err := tx.Exec(`ALTER TABLE builds ADD COLUMN guid varchar(36)`) if err != nil { return err } _, err = tx.Exec(`ALTER TABLE builds ADD COLUMN endpoint varchar(128)`) if err != nil { return err } cursor := 0 for { var id int var abortURLStr sql.NullString err := tx.QueryRow(` SELECT id, abort_url FROM builds WHERE id > $1 LIMIT 1 `, cursor).Scan(&id, &abortURLStr) if err != nil { if err == sql.ErrNoRows { break } return err } cursor = id if !abortURLStr.Valid { continue } // determine guid + endpoint from abort url // // format should be http://foo.com:5050/builds/some-guid/abort // // best-effort; skip if not possible, not a big deal abortURL, err := url.Parse(abortURLStr.String) if err != nil { continue } pathSegments := strings.Split(abortURL.Path, "/") if len(pathSegments) != 4 { continue } guid := pathSegments[2] endpoint := abortURL.Scheme + "://" + abortURL.Host _, err = tx.Exec(` UPDATE builds SET guid = $1, endpoint = $2 WHERE id = $3 `, guid, endpoint, id) if err != nil { continue } } _, err = tx.Exec(`ALTER TABLE builds DROP COLUMN abort_url`) if err != nil { return err } _, err = tx.Exec(`ALTER TABLE builds DROP COLUMN hijack_url`) if err != nil { return err } return nil }
Adam-sHub/cfn-lint
test/unit/module/maintenance/test_update_iam_policies.py
<filename>test/unit/module/maintenance/test_update_iam_policies.py """ Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: MIT-0 """ import sys import logging from test.testlib.testcase import BaseTestCase from mock import patch import cfnlint.maintenance LOGGER = logging.getLogger('cfnlint.maintenance') LOGGER.addHandler(logging.NullHandler()) class TestUpdateIamPolicies(BaseTestCase): """Used for Testing Rules""" @patch('cfnlint.maintenance.get_url_content') @patch('cfnlint.maintenance.json.dump') def test_update_iam_policies(self, mock_json_dump, mock_content): """Success update iam policies""" mock_content.return_value = 'app.PolicyEditorConfig={"serviceMap":{"Manage Amazon API Gateway":{"Actions":[]},"Amazon Kinesis Video Streams":{"Actions":[]}}}' if sys.version_info.major == 3: builtin_module_name = 'builtins' else: builtin_module_name = '__builtin__' with patch('{}.open'.format(builtin_module_name)) as mock_builtin_open: cfnlint.maintenance.update_iam_policies() mock_json_dump.assert_called_with( { 'serviceMap': { 'Manage Amazon API Gateway': { 'Actions': ['HEAD', 'OPTIONS'] }, 'Amazon Kinesis Video Streams': { 'Actions': ['StartStreamEncryption'] } } }, mock_builtin_open.return_value.__enter__.return_value, indent=2, separators=(',', ': '), sort_keys=True )
atomic-reactor-cloud/Reactium-UI
src/app/components/common-ui/DatePicker/enums.js
export default { ALIGN: { LEFT: 'left', RIGHT: 'right', CENTER: 'center', }, DEBUG: false, EVENT: { CHANGE: 'change', INIT: 'init', NEXT: 'next', NAV: 'nav', PREV: 'prev', TODAY: 'today', }, FORMAT: { DATE: 'L', HEADER: 'MMMM YYYY', }, LABELS: ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'], };
wsxiaozhang/open-service-broker-alibabacloud
vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/cdn/migrate_domain_to_https_delivery.go
package cdn //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" ) // MigrateDomainToHttpsDelivery invokes the cdn.MigrateDomainToHttpsDelivery API synchronously // api document: https://help.aliyun.com/api/cdn/migratedomaintohttpsdelivery.html func (client *Client) MigrateDomainToHttpsDelivery(request *MigrateDomainToHttpsDeliveryRequest) (response *MigrateDomainToHttpsDeliveryResponse, err error) { response = CreateMigrateDomainToHttpsDeliveryResponse() err = client.DoAction(request, response) return } // MigrateDomainToHttpsDeliveryWithChan invokes the cdn.MigrateDomainToHttpsDelivery API asynchronously // api document: https://help.aliyun.com/api/cdn/migratedomaintohttpsdelivery.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) MigrateDomainToHttpsDeliveryWithChan(request *MigrateDomainToHttpsDeliveryRequest) (<-chan *MigrateDomainToHttpsDeliveryResponse, <-chan error) { responseChan := make(chan *MigrateDomainToHttpsDeliveryResponse, 1) errChan := make(chan error, 1) err := client.AddAsyncTask(func() { defer close(responseChan) defer close(errChan) response, err := client.MigrateDomainToHttpsDelivery(request) if err != nil { errChan <- err } else { responseChan <- response } }) if err != nil { errChan <- err close(responseChan) close(errChan) } return responseChan, errChan } // MigrateDomainToHttpsDeliveryWithCallback invokes the cdn.MigrateDomainToHttpsDelivery API asynchronously // api document: https://help.aliyun.com/api/cdn/migratedomaintohttpsdelivery.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) MigrateDomainToHttpsDeliveryWithCallback(request *MigrateDomainToHttpsDeliveryRequest, callback func(response *MigrateDomainToHttpsDeliveryResponse, err error)) <-chan int { result := make(chan int, 1) err := client.AddAsyncTask(func() { var response *MigrateDomainToHttpsDeliveryResponse var err error defer close(result) response, err = client.MigrateDomainToHttpsDelivery(request) callback(response, err) result <- 1 }) if err != nil { defer close(result) callback(nil, err) result <- 0 } return result } // MigrateDomainToHttpsDeliveryRequest is the request struct for api MigrateDomainToHttpsDelivery type MigrateDomainToHttpsDeliveryRequest struct { *requests.RpcRequest PrivateKey string `position:"Query" name:"PrivateKey"` ServerCertificate string `position:"Query" name:"ServerCertificate"` SecurityToken string `position:"Query" name:"SecurityToken"` OwnerAccount string `position:"Query" name:"OwnerAccount"` DomainName string `position:"Query" name:"DomainName"` OwnerId requests.Integer `position:"Query" name:"OwnerId"` } // MigrateDomainToHttpsDeliveryResponse is the response struct for api MigrateDomainToHttpsDelivery type MigrateDomainToHttpsDeliveryResponse struct { *responses.BaseResponse RequestId string `json:"RequestId" xml:"RequestId"` } // CreateMigrateDomainToHttpsDeliveryRequest creates a request to invoke MigrateDomainToHttpsDelivery API func CreateMigrateDomainToHttpsDeliveryRequest() (request *MigrateDomainToHttpsDeliveryRequest) { request = &MigrateDomainToHttpsDeliveryRequest{ RpcRequest: &requests.RpcRequest{}, } request.InitWithApiInfo("Cdn", "2014-11-11", "MigrateDomainToHttpsDelivery", "", "") return } // CreateMigrateDomainToHttpsDeliveryResponse creates a response to parse from MigrateDomainToHttpsDelivery response func CreateMigrateDomainToHttpsDeliveryResponse() (response *MigrateDomainToHttpsDeliveryResponse) { response = &MigrateDomainToHttpsDeliveryResponse{ BaseResponse: &responses.BaseResponse{}, } return }
blinc/hibernate-memcached
src/main/java/com/googlecode/hibernate/memcached/strategy/Item.java
package com.googlecode.hibernate.memcached.strategy; import java.io.Serializable; import java.util.Comparator; import java.util.UUID; import org.hibernate.cache.spi.access.SoftLock; /** * Wrapper type representing unlocked items. */ public final class Item implements Serializable, Lockable { private static final long serialVersionUID = 1L; private final long timestamp; private final Object value; private final Object version; /** * Creates an unlocked item wrapping the given value with a version and creation timestamp. */ Item(Object value, Object version, long timestamp) { this.value = value; this.version = version; this.timestamp = timestamp; } /** * {@inheritDoc} */ public Object getValue() { return value; } /** * {@inheritDoc} */ public boolean isReadable(long txTimestamp) { return txTimestamp > timestamp; } /** * {@inheritDoc} */ public boolean isUnlockable(SoftLock lock) { return false; } /** * {@inheritDoc} */ public boolean isWriteable(long txTimestamp, Object newVersion, Comparator versionComparator) { return version != null && versionComparator.compare(version, newVersion) < 0; } /** * {@inheritDoc} */ public Lock lock(long timeout, UUID uuid, long lockId) { return new Lock(timeout, uuid, lockId, version); } }
memfault/particle-firmware-library
src/memfault-firmware-sdk/components/core/src/memfault_log_data_source.c
<gh_stars>0 //! @file //! //! Copyright (c) Memfault, Inc. //! See License.txt for details //! #include "memfault-firmware-sdk/components/include/memfault/config.h" #if MEMFAULT_LOG_DATA_SOURCE_ENABLED #include "memfault_log_data_source_private.h" #include "memfault_log_private.h" #include <stdint.h> #include <string.h> #include "memfault-firmware-sdk/components/include/memfault/core/compiler.h" #include "memfault-firmware-sdk/components/include/memfault/core/data_packetizer_source.h" #include "memfault-firmware-sdk/components/include/memfault/core/log.h" #include "memfault-firmware-sdk/components/include/memfault/core/math.h" #include "memfault-firmware-sdk/components/include/memfault/core/platform/overrides.h" #include "memfault-firmware-sdk/components/include/memfault/core/platform/system_time.h" #include "memfault-firmware-sdk/components/include/memfault/core/serializer_helper.h" #include "memfault-firmware-sdk/components/include/memfault/core/serializer_key_ids.h" #include "memfault-firmware-sdk/components/include/memfault/util/cbor.h" typedef struct { bool triggered; size_t num_logs; sMemfaultCurrentTime trigger_time; } sMfltLogDataSourceCtx; static sMfltLogDataSourceCtx s_memfault_log_data_source_ctx; static bool prv_log_is_sent(uint8_t hdr) { return hdr & MEMFAULT_LOG_HDR_SENT_MASK; } typedef struct { size_t num_logs; } sMfltLogCountingCtx; static bool prv_log_iterate_counting_callback(sMfltLogIterator *iter) { sMfltLogCountingCtx *const ctx = (sMfltLogCountingCtx *)(iter->user_ctx); if (!prv_log_is_sent(iter->entry.hdr)) { ++ctx->num_logs; } return true; } void memfault_log_trigger_collection(void) { if (s_memfault_log_data_source_ctx.triggered) { return; } sMfltLogCountingCtx ctx = { 0 }; sMfltLogIterator iter = { .user_ctx = &ctx }; memfault_log_iterate(prv_log_iterate_counting_callback, &iter); if (ctx.num_logs == 0) { return; } memfault_lock(); { // Check again in the unlikely case this function was called concurrently: if (s_memfault_log_data_source_ctx.triggered) { memfault_unlock(); return; } s_memfault_log_data_source_ctx.triggered = true; if (!memfault_platform_time_get_current(&s_memfault_log_data_source_ctx.trigger_time)) { s_memfault_log_data_source_ctx.trigger_time.type = kMemfaultCurrentTimeType_Unknown; } s_memfault_log_data_source_ctx.num_logs = ctx.num_logs; } memfault_unlock(); } bool memfault_log_data_source_has_been_triggered(void) { // Note: memfault_lock() is held when this is called by memfault_log return s_memfault_log_data_source_ctx.triggered; } typedef struct { size_t num_logs; sMemfaultCurrentTime trigger_time; sMemfaultCborEncoder encoder; bool has_encoding_error; bool should_stop_encoding; union { size_t num_encoded_logs; size_t num_marked_sent_logs; }; } sMfltLogEncodingCtx; static bool prv_copy_msg_callback(sMfltLogIterator *iter, MEMFAULT_UNUSED size_t offset, const char *buf, size_t buf_len) { sMfltLogEncodingCtx *const ctx = (sMfltLogEncodingCtx *)iter->user_ctx; return memfault_cbor_join(&ctx->encoder, buf, buf_len); } static bool prv_encode_current_log(sMemfaultCborEncoder *encoder, sMfltLogIterator *iter) { if (!memfault_cbor_encode_unsigned_integer(encoder, memfault_log_get_level_from_hdr(iter->entry.hdr))) { return false; } eMemfaultLogRecordType type = memfault_log_get_type_from_hdr(iter->entry.hdr); bool success; // Note: We encode "preformatted" logs (i.e logs that have run through printf) as cbor text // string and "compact" logs as a cbor byte array so we can differentiate between the two while // decoding if (type == kMemfaultLogRecordType_Preformatted) { success = memfault_cbor_encode_string_begin(encoder, iter->entry.len); } else { // kMemfaultLogRecordType_Compact success = memfault_cbor_encode_byte_string_begin(encoder, iter->entry.len); } return (success && memfault_log_iter_copy_msg(iter, prv_copy_msg_callback)); } static bool prv_log_iterate_encode_callback(sMfltLogIterator *iter) { sMfltLogEncodingCtx *const ctx = (sMfltLogEncodingCtx *)iter->user_ctx; if (ctx->should_stop_encoding) { return false; } if (!prv_log_is_sent(iter->entry.hdr)) { ctx->has_encoding_error |= !prv_encode_current_log(&ctx->encoder, iter); // It's possible more logs have been added to the buffer // after the memfault_log_data_source_has_been_triggered() call. They cannot be included, // because the total message size has already been communicated to the packetizer. if (++ctx->num_encoded_logs == ctx->num_logs) { return false; } } return true; } static bool prv_encode(sMemfaultCborEncoder *encoder, void *iter) { sMfltLogEncodingCtx *ctx = (sMfltLogEncodingCtx *)((sMfltLogIterator *)iter)->user_ctx; if (!memfault_serializer_helper_encode_metadata_with_time( encoder, kMemfaultEventType_Logs, &ctx->trigger_time)) { return false; } if (!memfault_cbor_encode_unsigned_integer(encoder, kMemfaultEventKey_EventInfo)) { return false; } // To save space, all logs are encoded into a single array (as opposed to using a map or // array per log): const size_t elements_per_log = 2; // level, msg if (!memfault_cbor_encode_array_begin(encoder, elements_per_log * ctx->num_logs)) { return false; } memfault_log_iterate(prv_log_iterate_encode_callback, iter); return ctx->has_encoding_error; } static void prv_init_encoding_ctx(sMfltLogEncodingCtx *ctx) { *ctx = (sMfltLogEncodingCtx) { .num_logs = s_memfault_log_data_source_ctx.num_logs, .trigger_time = s_memfault_log_data_source_ctx.trigger_time, }; } static bool prv_has_logs(size_t *total_size) { if (!s_memfault_log_data_source_ctx.triggered) { return false; } sMfltLogEncodingCtx ctx; prv_init_encoding_ctx(&ctx); sMfltLogIterator iter = { .read_offset = 0, .user_ctx = &ctx }; *total_size = memfault_serializer_helper_compute_size(&ctx.encoder, prv_encode, &iter); return true; } typedef struct { uint32_t offset; uint8_t *buf; size_t buf_len; size_t data_source_bytes_written; sMfltLogEncodingCtx encoding_ctx; } sMfltLogsDestCtx; static void prv_encoder_callback(void *encoder_ctx, uint32_t src_offset, const void *src_buf, size_t src_buf_len) { sMfltLogsDestCtx *dest = (sMfltLogsDestCtx *)encoder_ctx; const size_t dest_end_offset = dest->offset + dest->buf_len; // Optimization: stop encoding if the encoder writes are past the destination buffer: if (src_offset > dest_end_offset) { dest->encoding_ctx.should_stop_encoding = true; return; } const size_t src_end_offset = src_offset + src_buf_len; const size_t intersection_start_offset = MEMFAULT_MAX(src_offset, dest->offset); const size_t intersection_end_offset = MEMFAULT_MIN(src_end_offset, dest_end_offset); if (intersection_end_offset <= intersection_start_offset) { return; // no intersection } const size_t intersection_len = intersection_end_offset - intersection_start_offset; memcpy(dest->buf + (intersection_start_offset - dest->offset), ((const uint8_t *)src_buf) + (intersection_start_offset - src_offset), intersection_len); dest->data_source_bytes_written += intersection_len; } static bool prv_logs_read(uint32_t offset, void *buf, size_t buf_len) { sMfltLogsDestCtx dest_ctx = (sMfltLogsDestCtx) { .offset = offset, .buf = buf, .buf_len = buf_len, }; sMfltLogIterator iter = { .user_ctx = &dest_ctx.encoding_ctx, }; prv_init_encoding_ctx(&dest_ctx.encoding_ctx); // Note: UINT_MAX is passed as length, because it is possible and expected that the output is written // partially by the callback. The callback takes care of not overrunning the output buffer itself. memfault_cbor_encoder_init(&dest_ctx.encoding_ctx.encoder, prv_encoder_callback, &dest_ctx, UINT32_MAX); prv_encode(&dest_ctx.encoding_ctx.encoder, &iter); return buf_len == dest_ctx.data_source_bytes_written; } static bool prv_log_iterate_mark_sent_callback(sMfltLogIterator *iter) { sMfltLogEncodingCtx *const ctx = (sMfltLogEncodingCtx *)iter->user_ctx; if (!prv_log_is_sent(iter->entry.hdr)) { iter->entry.hdr |= MEMFAULT_LOG_HDR_SENT_MASK; memfault_log_iter_update_entry(iter); if (++ctx->num_marked_sent_logs == ctx->num_logs) { return false; } } return true; } static void prv_logs_mark_sent(void) { sMfltLogEncodingCtx ctx; sMfltLogIterator iter = { .read_offset = 0, .user_ctx = &ctx }; prv_init_encoding_ctx(&ctx); memfault_log_iterate(prv_log_iterate_mark_sent_callback, &iter); memfault_lock(); s_memfault_log_data_source_ctx = (sMfltLogDataSourceCtx) { 0 }; memfault_unlock(); } //! Expose a data source for use by the Memfault Packetizer const sMemfaultDataSourceImpl g_memfault_log_data_source = { .has_more_msgs_cb = prv_has_logs, .read_msg_cb = prv_logs_read, .mark_msg_read_cb = prv_logs_mark_sent, }; void memfault_log_data_source_reset(void) { s_memfault_log_data_source_ctx = (sMfltLogDataSourceCtx) { 0 }; } size_t memfault_log_data_source_count_unsent_logs(void) { sMfltLogCountingCtx ctx = { 0 }; sMfltLogIterator iter = { .user_ctx = &ctx }; memfault_log_iterate(prv_log_iterate_counting_callback, &iter); return ctx.num_logs; } #endif /* MEMFAULT_LOG_DATA_SOURCE_ENABLED */
RCoon/CodingBat
Java/array1/MakeMiddleTest.java
<reponame>RCoon/CodingBat package array1; import java.util.Arrays; /* * Given an array of ints of even length, return a new array length 2 containing * the middle two elements from the original array. The original array will be * length 2 or more. * * makeMiddle({1, 2, 3, 4}) --> {2, 3} * makeMiddle({7, 1, 2, 3, 4, 9}) --> {2, 3} * makeMiddle({1, 2}) --> {1, 2} */ public class MakeMiddleTest { public static void main(String[] args) { MakeMiddleTest test = new MakeMiddleTest(); System.out.println(Arrays.toString(test.makeMiddle( new int[] {1, 2, 3, 4}))); System.out.println(Arrays.toString(test.makeMiddle( new int[] {7, 1, 2, 3, 4, 9}))); System.out.println(Arrays.toString(test.makeMiddle( new int[] {1, 2}))); } public int[] makeMiddle(int[] nums) { if (nums.length == 2) return nums; int[] array = new int[2]; array[0] = nums[(nums.length / 2) - 1]; array[1] = nums[nums.length / 2]; return array; } }
Mu-L/Castor3D
tools/GuiCommon/Properties/Math/MatrixProperties.inl
#include "GuiCommon/Properties/Math/MatrixProperties.hpp" #include "GuiCommon/Properties/Math/PointProperties.hpp" namespace GuiCommon { //************************************************************************************************ template<> inline castor::SquareMatrix< float, 2 > const & matrixRefFromVariant< float, 2 >( wxVariant const & variant ) { return Matrix2x2fRefFromVariant( variant ); } template<> inline castor::SquareMatrix< float, 3 > const & matrixRefFromVariant< float, 3 >( wxVariant const & variant ) { return Matrix3x3fRefFromVariant( variant ); } template<> inline castor::SquareMatrix< float, 4 > const & matrixRefFromVariant< float, 4 >( wxVariant const & variant ) { return Matrix4x4fRefFromVariant( variant ); } //************************************************************************************************ template<> inline castor::SquareMatrix< float, 2 > & matrixRefFromVariant< float, 2 >( wxVariant & variant ) { return Matrix2x2fRefFromVariant( variant ); } template<> inline castor::SquareMatrix< float, 3 > & matrixRefFromVariant< float, 3 >( wxVariant & variant ) { return Matrix3x3fRefFromVariant( variant ); } template<> inline castor::SquareMatrix< float, 4 > & matrixRefFromVariant< float, 4 >( wxVariant & variant ) { return Matrix4x4fRefFromVariant( variant ); } //************************************************************************************************ template< typename Type, uint32_t Count > void setVariantFromMatrix( wxVariant & variant , castor::SquareMatrix< Type, Count > const & value ) { matrixRefFromVariant< Type, Count >( variant ) = value; } //************************************************************************************************ template< typename T, uint32_t Count > struct MatrixPropertyHelper { static void addChildren( MatrixProperty< T, Count > * prop , wxString const * rowNames , wxString const * colNames , castor::SquareMatrix< T, Count > const & value ) { for ( uint32_t i = 0; i < Count; ++i ) { wxString names[Count]; for ( uint32_t j = 0; j < Count; ++j ) { names[j] << colNames[i] << wxT( "." ) << rowNames[j]; } wxPGProperty * property = new PointProperty< T, Count >( names , wxString() << _( "Col " ) << rowNames[i] , wxPG_LABEL , castor::Point< T, Count >( value[i].constPtr() ) ); property->Enable( false ); prop->AddPrivateChild( property ); } } static void refreshChildren( MatrixProperty< T, Count > * prop ) { castor::SquareMatrix< T, Count > const & matrix = matrixRefFromVariant< T, Count >( prop->GetValue() ); for ( uint32_t i = 0; i < Count; ++i ) { wxVariant value = prop->Item( i )->GetValue(); setVariantFromPoint< T, Count >( value, castor::Point< T, Count >( matrix[i].constPtr() ) ); prop->Item( i )->SetValue( value ); } } static wxVariant childChanged( wxVariant & thisValue , int index , wxVariant & newValue ) { castor::SquareMatrix< T, Count > & matrix = matrixRefFromVariant< T, Count >( thisValue ); castor::Point< T, Count > const & row = PointRefFromVariant< T, Count >( newValue ); matrix.setRow( uint32_t( index ), row ); wxVariant result; result << matrix; return result; } }; //************************************************************************************************ template< uint32_t Count > wxString const * getMatrixRowDefaultNames(); template<> inline wxString const * getMatrixRowDefaultNames< 2 >() { return GC_POINT_12; } template<> inline wxString const * getMatrixRowDefaultNames< 3 >() { return GC_POINT_123; } template<> inline wxString const * getMatrixRowDefaultNames< 4 >() { return GC_POINT_1234; } //************************************************************************************************ template< uint32_t Count > wxString const * getMatrixColDefaultNames(); template<> inline wxString const * getMatrixColDefaultNames< 2 >() { return GC_POINT_12; } template<> inline wxString const * getMatrixColDefaultNames< 3 >() { return GC_POINT_123; } template<> inline wxString const * getMatrixColDefaultNames< 4 >() { return GC_POINT_1234; } //************************************************************************************************ template< typename T, uint32_t Count > MatrixProperty< T, Count >::MatrixProperty( wxString const & label , wxString const & name , castor::SquareMatrix< T, Count > const & value ) : wxPGProperty( label, name ) { setValueI( value ); MatrixPropertyHelper< T, Count >::addChildren( this , getMatrixRowDefaultNames< Count >() , getMatrixColDefaultNames< Count >() , value ); } template< typename T, uint32_t Count > MatrixProperty< T, Count >::MatrixProperty( wxString const( & rowNames )[Count] , wxString const( & colNames )[Count] , wxString const & label , wxString const & name , castor::SquareMatrix< T, Count > const & value ) : wxPGProperty( label, name ) { setValueI( value ); MatrixPropertyHelper< T, Count >::addChildren( this , rowNames , colNames , value ); } template< typename T, uint32_t Count > void MatrixProperty< T, Count >::RefreshChildren() { if ( GetChildCount() ) { MatrixPropertyHelper< T, Count >::refreshChildren( this ); } } template< typename T, uint32_t Count > wxVariant MatrixProperty< T, Count >::ChildChanged( wxVariant & thisValue , int childIndex , wxVariant & childValue ) const { return MatrixPropertyHelper< T, Count >::childChanged( thisValue , childIndex , childValue ); } template< typename T, uint32_t Count > inline void MatrixProperty< T, Count >::setValueI( castor::SquareMatrix< T, Count > const & value ) { m_value = WXVARIANT( value ); } //************************************************************************************************ }
Winguweb/atuservicio_rails
db/migrate/20181005184847_create_survey.rb
<gh_stars>0 class CreateSurvey < ActiveRecord::Migration[5.2] def change create_table :surveys do |t| t.string :client_id, index: true t.integer :branch_id, index: true t.integer :step_id, index: true t.string :question_value t.integer :answer_id, index: true t.string :answer_value t.timestamps end end end
CKOGIT/cko-java-library
checkout/src/com/checkout/api/services/customer/request/CustomerUpdate.java
<reponame>CKOGIT/cko-java-library package com.checkout.api.services.customer.request; public class CustomerUpdate extends BaseCustomer { }
cisco-open/kubei
backend/pkg/database/resource_test.go
// Copyright © 2022 Cisco Systems, Inc. and its affiliates. // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package database import ( "reflect" "sort" "testing" dockle_types "github.com/Portshift/dockle/pkg/types" "github.com/google/go-cmp/cmp/cmpopts" "gotest.tools/assert" "github.com/openclarity/kubeclarity/api/server/models" "github.com/openclarity/kubeclarity/backend/pkg/types" runtime_scan_models "github.com/openclarity/kubeclarity/runtime_scan/api/server/models" ) type vulnerabilityInfo struct { // cvss Cvss *types.CVSS `json:"cvss,omitempty"` // description Description string `json:"description,omitempty"` // links Links []string `json:"links"` // severity Severity types.VulnerabilitySeverity `json:"severity,omitempty"` // vulnerability name Name string `json:"vulnerabilityName,omitempty"` } func TestCreateResourceFromVulnerabilityScan(t *testing.T) { resourceInfo := &types.ResourceInfo{ ResourceHash: "ResourceHash", ResourceName: "ResourceName", ResourceType: "ResourceType", } resourceID := CreateResourceID(resourceInfo) pkgInfo := &types.PackageInfo{ Language: "pkg.language", License: "pkg.license", Name: "pkg.name", Version: "pkg.version", } pkgID := CreatePackageID(pkgInfo) pkgInfo2 := &types.PackageInfo{ Language: "pkg2.language", License: "pkg2.license", Name: "pkg2.name", Version: "pkg2.version", } pkgID2 := CreatePackageID(pkgInfo2) vulInfo := vulnerabilityInfo{ Cvss: createTestCVSS(), Description: "Description", Links: []string{"link1", "link2"}, Severity: types.VulnerabilitySeverityCRITICAL, Name: "VulnerabilityName", } vulnerabilityID := CreateVulnerabilityID(&types.PackageVulnerabilityScan{VulnerabilityName: vulInfo.Name}) vulInfo2 := vulnerabilityInfo{ Cvss: createTestCVSS(), Description: "Description2", Links: []string{"link3", "link4"}, Severity: types.VulnerabilitySeverityCRITICAL, Name: "VulnerabilityName2", } vulnerabilityID2 := CreateVulnerabilityID(&types.PackageVulnerabilityScan{VulnerabilityName: vulInfo2.Name}) scannersList := []string{"scanner1", "scanner2"} type args struct { resource *types.ResourceVulnerabilityScan params *TransactionParams } tests := []struct { name string args args want *Resource expectedTransactionParams *TransactionParams }{ { name: "sanity", args: args{ resource: &types.ResourceVulnerabilityScan{ CisDockerBenchmarkResults: []*types.CISDockerBenchmarkResult{ { Code: "code1", Level: int64(dockle_types.InfoLevel), Descriptions: "desc1", }, { Code: "code2", Level: int64(dockle_types.WarnLevel), Descriptions: "desc2", }, }, PackageVulnerabilities: []*types.PackageVulnerabilityScan{ { Cvss: vulInfo.Cvss, Description: vulInfo.Description, FixVersion: "FixVersion", Links: vulInfo.Links, Package: pkgInfo, Scanners: scannersList, Severity: vulInfo.Severity, VulnerabilityName: vulInfo.Name, }, }, Resource: resourceInfo, }, params: &TransactionParams{ FixVersions: map[PkgVulID]string{}, Scanners: map[ResourcePkgID][]string{}, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, want: &Resource{ ID: resourceID, Hash: resourceInfo.ResourceHash, Name: resourceInfo.ResourceName, Type: resourceInfo.ResourceType, Packages: []Package{ { ID: pkgID, Name: pkgInfo.Name, Version: pkgInfo.Version, License: pkgInfo.License, Language: pkgInfo.Language, Vulnerabilities: []Vulnerability{ { ID: vulnerabilityID, Name: vulInfo.Name, Severity: int(TypesVulnerabilitySeverityToInt[vulInfo.Severity]), Description: vulInfo.Description, Links: ArrayToDBArray(vulInfo.Links), CVSS: CreateCVSSString(vulInfo.Cvss), CVSSBaseScore: vulInfo.Cvss.GetBaseScore(), CVSSSeverity: int(ModelsVulnerabilitySeverityToInt[vulInfo.Cvss.GetCVSSSeverity()]), ReportingScanners: ArrayToDBArray(scannersList), Source: models.VulnerabilitySourceCICD, }, }, }, }, CISDockerBenchmarkChecks: []CISDockerBenchmarkCheck{ { ID: "code1", Code: "code1", Level: int(CISDockerBenchmarkLevelINFO), Descriptions: "desc1", }, { ID: "code2", Code: "code2", Level: int(CISDockerBenchmarkLevelWARN), Descriptions: "desc2", }, }, }, expectedTransactionParams: &TransactionParams{ FixVersions: map[PkgVulID]string{ CreatePkgVulID(pkgID, vulnerabilityID): "FixVersion", }, Scanners: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgID): scannersList, }, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, { name: "no cis docker benchmark results", args: args{ resource: &types.ResourceVulnerabilityScan{ PackageVulnerabilities: []*types.PackageVulnerabilityScan{ { Cvss: vulInfo.Cvss, Description: vulInfo.Description, FixVersion: "FixVersion", Links: vulInfo.Links, Package: pkgInfo, Scanners: scannersList, Severity: vulInfo.Severity, VulnerabilityName: vulInfo.Name, }, }, Resource: resourceInfo, }, params: &TransactionParams{ FixVersions: map[PkgVulID]string{}, Scanners: map[ResourcePkgID][]string{}, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, want: &Resource{ ID: resourceID, Hash: resourceInfo.ResourceHash, Name: resourceInfo.ResourceName, Type: resourceInfo.ResourceType, Packages: []Package{ { ID: pkgID, Name: pkgInfo.Name, Version: pkgInfo.Version, License: pkgInfo.License, Language: pkgInfo.Language, Vulnerabilities: []Vulnerability{ { ID: vulnerabilityID, Name: vulInfo.Name, Severity: int(TypesVulnerabilitySeverityToInt[vulInfo.Severity]), Description: vulInfo.Description, Links: ArrayToDBArray(vulInfo.Links), CVSS: CreateCVSSString(vulInfo.Cvss), CVSSBaseScore: vulInfo.Cvss.GetBaseScore(), CVSSSeverity: int(ModelsVulnerabilitySeverityToInt[vulInfo.Cvss.GetCVSSSeverity()]), ReportingScanners: ArrayToDBArray(scannersList), Source: models.VulnerabilitySourceCICD, }, }, }, }, }, expectedTransactionParams: &TransactionParams{ FixVersions: map[PkgVulID]string{ CreatePkgVulID(pkgID, vulnerabilityID): "FixVersion", }, Scanners: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgID): scannersList, }, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, { name: "no fix version", args: args{ resource: &types.ResourceVulnerabilityScan{ PackageVulnerabilities: []*types.PackageVulnerabilityScan{ { Cvss: vulInfo.Cvss, Description: vulInfo.Description, FixVersion: "", Links: vulInfo.Links, Package: pkgInfo, Scanners: scannersList, Severity: vulInfo.Severity, VulnerabilityName: vulInfo.Name, }, }, Resource: resourceInfo, }, params: &TransactionParams{ FixVersions: map[PkgVulID]string{}, Scanners: map[ResourcePkgID][]string{}, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, want: &Resource{ ID: resourceID, Hash: resourceInfo.ResourceHash, Name: resourceInfo.ResourceName, Type: resourceInfo.ResourceType, Packages: []Package{ { ID: pkgID, Name: pkgInfo.Name, Version: pkgInfo.Version, License: pkgInfo.License, Language: pkgInfo.Language, Vulnerabilities: []Vulnerability{ { ID: vulnerabilityID, Name: vulInfo.Name, Severity: int(TypesVulnerabilitySeverityToInt[vulInfo.Severity]), Description: vulInfo.Description, Links: ArrayToDBArray(vulInfo.Links), CVSS: CreateCVSSString(vulInfo.Cvss), CVSSBaseScore: vulInfo.Cvss.GetBaseScore(), CVSSSeverity: int(ModelsVulnerabilitySeverityToInt[vulInfo.Cvss.GetCVSSSeverity()]), ReportingScanners: ArrayToDBArray(scannersList), Source: models.VulnerabilitySourceCICD, }, }, }, }, }, expectedTransactionParams: &TransactionParams{ FixVersions: map[PkgVulID]string{}, Scanners: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgID): scannersList, }, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, { name: "same package different vul", args: args{ resource: &types.ResourceVulnerabilityScan{ PackageVulnerabilities: []*types.PackageVulnerabilityScan{ { Cvss: vulInfo.Cvss, Description: vulInfo.Description, FixVersion: "FixVersion", Links: vulInfo.Links, Package: pkgInfo, Scanners: scannersList, Severity: vulInfo.Severity, VulnerabilityName: vulInfo.Name, }, { Cvss: vulInfo2.Cvss, Description: vulInfo2.Description, FixVersion: "FixVersion", Links: vulInfo2.Links, Package: pkgInfo, Scanners: scannersList, Severity: vulInfo2.Severity, VulnerabilityName: vulInfo2.Name, }, }, Resource: resourceInfo, }, params: &TransactionParams{ FixVersions: map[PkgVulID]string{}, Scanners: map[ResourcePkgID][]string{}, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, want: &Resource{ ID: resourceID, Hash: resourceInfo.ResourceHash, Name: resourceInfo.ResourceName, Type: resourceInfo.ResourceType, Packages: []Package{ { ID: pkgID, Name: pkgInfo.Name, Version: pkgInfo.Version, License: pkgInfo.License, Language: pkgInfo.Language, Vulnerabilities: []Vulnerability{ { ID: vulnerabilityID, Name: vulInfo.Name, Severity: int(TypesVulnerabilitySeverityToInt[vulInfo.Severity]), Description: vulInfo.Description, Links: ArrayToDBArray(vulInfo.Links), CVSS: CreateCVSSString(vulInfo.Cvss), CVSSBaseScore: vulInfo.Cvss.GetBaseScore(), CVSSSeverity: int(ModelsVulnerabilitySeverityToInt[vulInfo.Cvss.GetCVSSSeverity()]), ReportingScanners: ArrayToDBArray(scannersList), Source: models.VulnerabilitySourceCICD, }, { ID: vulnerabilityID2, Name: vulInfo2.Name, Severity: int(TypesVulnerabilitySeverityToInt[vulInfo2.Severity]), Description: vulInfo2.Description, Links: ArrayToDBArray(vulInfo2.Links), CVSS: CreateCVSSString(vulInfo2.Cvss), CVSSBaseScore: vulInfo2.Cvss.GetBaseScore(), CVSSSeverity: int(ModelsVulnerabilitySeverityToInt[vulInfo2.Cvss.GetCVSSSeverity()]), ReportingScanners: ArrayToDBArray(scannersList), Source: models.VulnerabilitySourceCICD, }, }, }, }, }, expectedTransactionParams: &TransactionParams{ FixVersions: map[PkgVulID]string{ CreatePkgVulID(pkgID, vulnerabilityID): "FixVersion", CreatePkgVulID(pkgID, vulnerabilityID2): "FixVersion", }, Scanners: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgID): scannersList, }, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, { name: "different package different vul", args: args{ resource: &types.ResourceVulnerabilityScan{ PackageVulnerabilities: []*types.PackageVulnerabilityScan{ { Cvss: vulInfo.Cvss, Description: vulInfo.Description, FixVersion: "FixVersion", Links: vulInfo.Links, Package: pkgInfo, Scanners: scannersList, Severity: vulInfo.Severity, VulnerabilityName: vulInfo.Name, }, { Cvss: vulInfo2.Cvss, Description: vulInfo2.Description, FixVersion: "FixVersion", Links: vulInfo2.Links, Package: pkgInfo2, Scanners: scannersList, Severity: vulInfo2.Severity, VulnerabilityName: vulInfo2.Name, }, }, Resource: resourceInfo, }, params: &TransactionParams{ FixVersions: map[PkgVulID]string{}, Scanners: map[ResourcePkgID][]string{}, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, want: &Resource{ ID: resourceID, Hash: resourceInfo.ResourceHash, Name: resourceInfo.ResourceName, Type: resourceInfo.ResourceType, Packages: []Package{ { ID: pkgID, Name: pkgInfo.Name, Version: pkgInfo.Version, License: pkgInfo.License, Language: pkgInfo.Language, Vulnerabilities: []Vulnerability{ { ID: vulnerabilityID, Name: vulInfo.Name, Severity: int(TypesVulnerabilitySeverityToInt[vulInfo.Severity]), Description: vulInfo.Description, Links: ArrayToDBArray(vulInfo.Links), CVSS: CreateCVSSString(vulInfo.Cvss), CVSSBaseScore: vulInfo.Cvss.GetBaseScore(), CVSSSeverity: int(ModelsVulnerabilitySeverityToInt[vulInfo.Cvss.GetCVSSSeverity()]), ReportingScanners: ArrayToDBArray(scannersList), Source: models.VulnerabilitySourceCICD, }, }, }, { ID: pkgID2, Name: pkgInfo2.Name, Version: pkgInfo2.Version, License: pkgInfo2.License, Language: pkgInfo2.Language, Vulnerabilities: []Vulnerability{ { ID: vulnerabilityID2, Name: vulInfo2.Name, Severity: int(TypesVulnerabilitySeverityToInt[vulInfo2.Severity]), Description: vulInfo2.Description, Links: ArrayToDBArray(vulInfo2.Links), CVSS: CreateCVSSString(vulInfo2.Cvss), CVSSBaseScore: vulInfo2.Cvss.GetBaseScore(), CVSSSeverity: int(ModelsVulnerabilitySeverityToInt[vulInfo2.Cvss.GetCVSSSeverity()]), ReportingScanners: ArrayToDBArray(scannersList), Source: models.VulnerabilitySourceCICD, }, }, }, }, }, expectedTransactionParams: &TransactionParams{ FixVersions: map[PkgVulID]string{ CreatePkgVulID(pkgID, vulnerabilityID): "FixVersion", CreatePkgVulID(pkgID2, vulnerabilityID2): "FixVersion", }, Scanners: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgID): scannersList, CreateResourcePkgID(resourceID, pkgID2): scannersList, }, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, { name: "different package same vul", args: args{ resource: &types.ResourceVulnerabilityScan{ PackageVulnerabilities: []*types.PackageVulnerabilityScan{ { Cvss: vulInfo.Cvss, Description: vulInfo.Description, FixVersion: "FixVersion", Links: vulInfo.Links, Package: pkgInfo, Scanners: scannersList, Severity: vulInfo.Severity, VulnerabilityName: vulInfo.Name, }, { Cvss: vulInfo.Cvss, Description: vulInfo.Description, FixVersion: "FixVersion", Links: vulInfo.Links, Package: pkgInfo2, Scanners: scannersList, Severity: vulInfo.Severity, VulnerabilityName: vulInfo.Name, }, }, Resource: resourceInfo, }, params: &TransactionParams{ FixVersions: map[PkgVulID]string{}, Scanners: map[ResourcePkgID][]string{}, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, want: &Resource{ ID: resourceID, Hash: resourceInfo.ResourceHash, Name: resourceInfo.ResourceName, Type: resourceInfo.ResourceType, Packages: []Package{ { ID: pkgID, Name: pkgInfo.Name, Version: pkgInfo.Version, License: pkgInfo.License, Language: pkgInfo.Language, Vulnerabilities: []Vulnerability{ { ID: vulnerabilityID, Name: vulInfo.Name, Severity: int(TypesVulnerabilitySeverityToInt[vulInfo.Severity]), Description: vulInfo.Description, Links: ArrayToDBArray(vulInfo.Links), CVSS: CreateCVSSString(vulInfo.Cvss), CVSSBaseScore: vulInfo.Cvss.GetBaseScore(), CVSSSeverity: int(ModelsVulnerabilitySeverityToInt[vulInfo.Cvss.GetCVSSSeverity()]), ReportingScanners: ArrayToDBArray(scannersList), Source: models.VulnerabilitySourceCICD, }, }, }, { ID: pkgID2, Name: pkgInfo2.Name, Version: pkgInfo2.Version, License: pkgInfo2.License, Language: pkgInfo2.Language, Vulnerabilities: []Vulnerability{ { ID: vulnerabilityID, Name: vulInfo.Name, Severity: int(TypesVulnerabilitySeverityToInt[vulInfo.Severity]), Description: vulInfo.Description, Links: ArrayToDBArray(vulInfo.Links), CVSS: CreateCVSSString(vulInfo.Cvss), CVSSBaseScore: vulInfo.Cvss.GetBaseScore(), CVSSSeverity: int(ModelsVulnerabilitySeverityToInt[vulInfo.Cvss.GetCVSSSeverity()]), ReportingScanners: ArrayToDBArray(scannersList), Source: models.VulnerabilitySourceCICD, }, }, }, }, }, expectedTransactionParams: &TransactionParams{ FixVersions: map[PkgVulID]string{ CreatePkgVulID(pkgID, vulnerabilityID): "FixVersion", CreatePkgVulID(pkgID2, vulnerabilityID): "FixVersion", }, Scanners: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgID): scannersList, CreateResourcePkgID(resourceID, pkgID2): scannersList, }, VulnerabilitySource: models.VulnerabilitySourceCICD, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := CreateResourceFromVulnerabilityScan(tt.args.resource, tt.args.params) sort.Slice(got.Packages, func(i, j int) bool { return got.Packages[i].Name < got.Packages[j].Name }) assert.DeepEqual(t, got, tt.want, cmpopts.IgnoreTypes(Vulnerability{}.ScannedAt)) for id := range tt.args.params.Scanners { sort.Strings(tt.args.params.Scanners[id]) } for id := range tt.expectedTransactionParams.Scanners { sort.Strings(tt.expectedTransactionParams.Scanners[id]) } assert.DeepEqual(t, tt.args.params, tt.expectedTransactionParams) }) } } func createTestCVSS() *types.CVSS { return &types.CVSS{ CvssV3Metrics: &types.CVSSV3Metrics{ BaseScore: 1.1, ExploitabilityScore: 2.2, ImpactScore: 3.3, }, CvssV3Vector: &types.CVSSV3Vector{ AttackComplexity: types.AttackComplexityHIGH, AttackVector: types.AttackVectorNETWORK, Availability: types.AvailabilityLOW, Confidentiality: types.ConfidentialityHIGH, Integrity: types.IntegrityHIGH, PrivilegesRequired: types.PrivilegesRequiredHIGH, Scope: types.ScopeCHANGED, UserInteraction: types.UserInteractionNONE, Vector: "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", }, } } func TestUpdateResourceAnalyzers(t *testing.T) { resourceInfo := &types.ResourceInfo{ ResourceHash: "ResourceHash", ResourceName: "ResourceName", ResourceType: "ResourceType", } resourceID := CreateResourceID(resourceInfo) pkgInfo := &types.PackageInfo{ Language: "pkg.language", License: "pkg.license", Name: "pkg.name", Version: "pkg.version", } pkgID := CreatePackageID(pkgInfo) pkgInfo2 := &types.PackageInfo{ Language: "pkg2.language", License: "pkg2.license", Name: "pkg2.name", Version: "pkg2.version", } pkgID2 := CreatePackageID(pkgInfo2) type args struct { resources []Resource resourcePkgIDToAnalyzers map[ResourcePkgID][]string } tests := []struct { name string args args want []Resource }{ { name: "sanity", args: args{ resources: []Resource{ *(CreateResource(resourceInfo).WithPackages([]Package{ *CreatePackage(pkgInfo, nil), *CreatePackage(pkgInfo2, nil), })), }, resourcePkgIDToAnalyzers: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgID): {"analyzer1"}, CreateResourcePkgID(resourceID, pkgID2): {"analyzer2"}, }, }, want: []Resource{ *(CreateResource(resourceInfo). WithPackages([]Package{ *CreatePackage(pkgInfo, nil), *CreatePackage(pkgInfo2, nil), }). WithAnalyzers([]string{"analyzer1", "analyzer2"})), }, }, { name: "multiple resources", args: args{ resources: []Resource{ *(CreateResource(resourceInfo).WithPackages([]Package{ *CreatePackage(pkgInfo, nil), })), *(CreateResource(resourceInfo).WithPackages([]Package{ *CreatePackage(pkgInfo2, nil), })), *(CreateResource(resourceInfo).WithPackages([]Package{ *CreatePackage(pkgInfo, nil), *CreatePackage(pkgInfo2, nil), })), }, resourcePkgIDToAnalyzers: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgID): {"analyzer1"}, CreateResourcePkgID(resourceID, pkgID2): {"analyzer2"}, }, }, want: []Resource{ *(CreateResource(resourceInfo). WithPackages([]Package{ *CreatePackage(pkgInfo, nil), }). WithAnalyzers([]string{"analyzer1"})), *(CreateResource(resourceInfo). WithPackages([]Package{ *CreatePackage(pkgInfo2, nil), }). WithAnalyzers([]string{"analyzer2"})), *(CreateResource(resourceInfo). WithPackages([]Package{ *CreatePackage(pkgInfo, nil), *CreatePackage(pkgInfo2, nil), }). WithAnalyzers([]string{"analyzer1", "analyzer2"})), }, }, { name: "empty resourcePkgIDToAnalyzers", args: args{ resources: []Resource{ *(CreateResource(resourceInfo).WithPackages([]Package{ *CreatePackage(pkgInfo, nil), *CreatePackage(pkgInfo2, nil), })), }, resourcePkgIDToAnalyzers: nil, }, want: []Resource{ *(CreateResource(resourceInfo). WithPackages([]Package{ *CreatePackage(pkgInfo, nil), *CreatePackage(pkgInfo2, nil), })), }, }, { name: "only one resource+pkg match", args: args{ resources: []Resource{ *(CreateResource(resourceInfo).WithPackages([]Package{ *CreatePackage(pkgInfo, nil), })), }, resourcePkgIDToAnalyzers: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgID): {"analyzer1"}, CreateResourcePkgID(resourceID, pkgID2): {"analyzer2"}, }, }, want: []Resource{ *(CreateResource(resourceInfo). WithPackages([]Package{ *CreatePackage(pkgInfo, nil), }). WithAnalyzers([]string{"analyzer1"})), }, }, { name: "remove duplicates", args: args{ resources: []Resource{ *(CreateResource(resourceInfo).WithPackages([]Package{ *CreatePackage(pkgInfo, nil), *CreatePackage(pkgInfo2, nil), })), }, resourcePkgIDToAnalyzers: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgID): {"analyzer1", "analyzer2"}, CreateResourcePkgID(resourceID, pkgID2): {"analyzer2", "analyzer3"}, }, }, want: []Resource{ *(CreateResource(resourceInfo). WithPackages([]Package{ *CreatePackage(pkgInfo, nil), *CreatePackage(pkgInfo2, nil), }). WithAnalyzers([]string{"analyzer1", "analyzer2", "analyzer3"})), }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := UpdateResourceAnalyzers(tt.args.resources, tt.args.resourcePkgIDToAnalyzers) if !reflect.DeepEqual(got, tt.want) { t.Errorf("UpdateResourceAnalyzers() = %v, want %v", got, tt.want) } }) } } func TestCreateResourceFromRuntimeContentAnalysis(t *testing.T) { resourceInfo := &types.ResourceInfo{ ResourceHash: "resource-1-hash", ResourceName: "resource-1-name", ResourceType: "resource-1-type", } resourceID := CreateResourceID(resourceInfo) pkgInfo1 := &types.PackageInfo{ Language: "pkg-1-language", License: "pkg-1-license", Name: "pkg-1-name", Version: "pkg-1-version", } pkgInfo1ID := CreatePackageID(pkgInfo1) pkgInfo2 := &types.PackageInfo{ Language: "pkg-2-language", License: "pkg-2-license", Name: "pkg-2-name", Version: "pkg-2-version", } pkgInfo2ID := CreatePackageID(pkgInfo2) type args struct { resourceContentAnalysis *runtime_scan_models.ResourceContentAnalysis params *TransactionParams } tests := []struct { name string args args want *Resource expectedTransactionParams *TransactionParams }{ { name: "sanity", args: args{ resourceContentAnalysis: &runtime_scan_models.ResourceContentAnalysis{ Packages: []*runtime_scan_models.PackageContentAnalysis{ { Analyzers: []string{"analyzer1", "analyzer11"}, Package: &runtime_scan_models.PackageInfo{ Language: pkgInfo1.Language, License: pkgInfo1.License, Name: pkgInfo1.Name, Version: pkgInfo1.Version, }, }, { Analyzers: []string{"analyzer2", "analyzer11"}, Package: &runtime_scan_models.PackageInfo{ Language: pkgInfo2.Language, License: pkgInfo2.License, Name: pkgInfo2.Name, Version: pkgInfo2.Version, }, }, }, Resource: &runtime_scan_models.ResourceInfo{ ResourceHash: resourceInfo.ResourceHash, ResourceName: resourceInfo.ResourceName, ResourceType: runtime_scan_models.ResourceType(resourceInfo.ResourceType), }, }, params: &TransactionParams{ Analyzers: map[ResourcePkgID][]string{}, }, }, want: &Resource{ ID: resourceID, Hash: resourceInfo.ResourceHash, Name: resourceInfo.ResourceName, Type: resourceInfo.ResourceType, ReportingAnalyzers: ArrayToDBArray([]string{"analyzer1", "analyzer11", "analyzer2"}), Packages: []Package{ { ID: pkgInfo1ID, Name: pkgInfo1.Name, Version: pkgInfo1.Version, License: pkgInfo1.License, Language: pkgInfo1.Language, }, { ID: pkgInfo2ID, Name: pkgInfo2.Name, Version: pkgInfo2.Version, License: pkgInfo2.License, Language: pkgInfo2.Language, }, }, }, expectedTransactionParams: &TransactionParams{ Analyzers: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgInfo1ID): {"analyzer1", "analyzer11"}, CreateResourcePkgID(resourceID, pkgInfo2ID): {"analyzer2", "analyzer11"}, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := CreateResourceFromRuntimeContentAnalysis(tt.args.resourceContentAnalysis, tt.args.params); !reflect.DeepEqual(got, tt.want) { t.Errorf("CreateResourceFromRuntimeContentAnalysis() = %v, want %v", got, tt.want) } assert.DeepEqual(t, tt.args.params, tt.expectedTransactionParams) }) } } func TestCreateResourceFromContentAnalysis(t *testing.T) { resourceInfo := &types.ResourceInfo{ ResourceHash: "resource-1-hash", ResourceName: "resource-1-name", ResourceType: "resource-1-type", } resourceID := CreateResourceID(resourceInfo) pkgInfo1 := &types.PackageInfo{ Language: "pkg-1-language", License: "pkg-1-license", Name: "pkg-1-name", Version: "pkg-1-version", } pkgInfo1ID := CreatePackageID(pkgInfo1) pkgInfo2 := &types.PackageInfo{ Language: "pkg-2-language", License: "pkg-2-license", Name: "pkg-2-name", Version: "pkg-2-version", } pkgInfo2ID := CreatePackageID(pkgInfo2) type args struct { resourceContentAnalysis *models.ResourceContentAnalysis params *TransactionParams } tests := []struct { name string args args want *Resource expectedTransactionParams *TransactionParams }{ { name: "sanity", args: args{ resourceContentAnalysis: &models.ResourceContentAnalysis{ Packages: []*models.PackageContentAnalysis{ { Analyzers: []string{"analyzer1", "analyzer11"}, Package: &models.PackageInfo{ Language: pkgInfo1.Language, License: pkgInfo1.License, Name: pkgInfo1.Name, Version: pkgInfo1.Version, }, }, { Analyzers: []string{"analyzer2", "analyzer11"}, Package: &models.PackageInfo{ Language: pkgInfo2.Language, License: pkgInfo2.License, Name: pkgInfo2.Name, Version: pkgInfo2.Version, }, }, }, Resource: &models.ResourceInfo{ ResourceHash: resourceInfo.ResourceHash, ResourceName: resourceInfo.ResourceName, ResourceType: models.ResourceType(resourceInfo.ResourceType), }, }, params: &TransactionParams{ Analyzers: map[ResourcePkgID][]string{}, }, }, want: &Resource{ ID: resourceID, Hash: resourceInfo.ResourceHash, Name: resourceInfo.ResourceName, Type: resourceInfo.ResourceType, ReportingAnalyzers: ArrayToDBArray([]string{"analyzer1", "analyzer11", "analyzer2"}), Packages: []Package{ { ID: pkgInfo1ID, Name: pkgInfo1.Name, Version: pkgInfo1.Version, License: pkgInfo1.License, Language: pkgInfo1.Language, }, { ID: pkgInfo2ID, Name: pkgInfo2.Name, Version: pkgInfo2.Version, License: pkgInfo2.License, Language: pkgInfo2.Language, }, }, }, expectedTransactionParams: &TransactionParams{ Analyzers: map[ResourcePkgID][]string{ CreateResourcePkgID(resourceID, pkgInfo1ID): {"analyzer1", "analyzer11"}, CreateResourcePkgID(resourceID, pkgInfo2ID): {"analyzer2", "analyzer11"}, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := CreateResourceFromContentAnalysis(tt.args.resourceContentAnalysis, tt.args.params); !reflect.DeepEqual(got, tt.want) { t.Errorf("CreateResourceFromContentAnalysis() = %v, want %v", got, tt.want) } assert.DeepEqual(t, tt.args.params, tt.expectedTransactionParams) }) } } func Test_createResourceCISDockerBenchmarkChecks(t *testing.T) { type args struct { results []*types.CISDockerBenchmarkResult } tests := []struct { name string args args want []CISDockerBenchmarkCheck }{ { name: "sanity", args: args{ results: []*types.CISDockerBenchmarkResult{ { Code: "PassLevel", Level: int64(dockle_types.PassLevel), Descriptions: "PassLevel", }, { Code: "IgnoreLevel", Level: int64(dockle_types.IgnoreLevel), Descriptions: "IgnoreLevel", }, { Code: "SkipLevel", Level: int64(dockle_types.SkipLevel), Descriptions: "SkipLevel", }, { Code: "InfoLevel", Level: int64(dockle_types.InfoLevel), Descriptions: "InfoLevel", }, { Code: "WarnLevel", Level: int64(dockle_types.WarnLevel), Descriptions: "WarnLevel", }, { Code: "FatalLevel", Level: int64(dockle_types.FatalLevel), Descriptions: "FatalLevel", }, }, }, want: []CISDockerBenchmarkCheck{ { ID: "InfoLevel", Code: "InfoLevel", Level: int(FromDockleTypeToLevel(int64(dockle_types.InfoLevel))), Descriptions: "InfoLevel", }, { ID: "WarnLevel", Code: "WarnLevel", Level: int(FromDockleTypeToLevel(int64(dockle_types.WarnLevel))), Descriptions: "WarnLevel", }, { ID: "FatalLevel", Code: "FatalLevel", Level: int(FromDockleTypeToLevel(int64(dockle_types.FatalLevel))), Descriptions: "FatalLevel", }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := createResourceCISDockerBenchmarkChecks(tt.args.results); !reflect.DeepEqual(got, tt.want) { t.Errorf("createResourceCISDockerBenchmarkChecks() = %v, want %v", got, tt.want) } }) } }
nikramakrishnan/codechef-solutions
Solutions/SALARY.cpp
<filename>Solutions/SALARY.cpp #include<iostream> #include<algorithm> using namespace std; int main(){ ios::sync_with_stdio(false); int t,n,fix; cin>>t; while(t--){ cin>>n; int flag=1,count=0,fix2; int w[n]; for(int i=0;i<n;i++){ cin>>w[i]; } sort(w,w+n); fix=n-1; fix2=fix; for(int i=0;i<n-1;i++){ //cout<<"Comparing "<<w[i]<<" and "<<w[i+1]<<endl; if(w[i]!=w[i+1]) {flag=0;break;} } while(flag!=1){ //cout<<"Flag is 0. While started\n\nArray after Increment-"<<endl; for(int i=0;i<n;i++){ if(i!=fix){ w[i]++; } //cout<<w[i]<<" "; if(w[i]>w[fix]) fix2=i; } //cout<<"Fix2="<<w[fix2]<<" at w["<<fix2<<"]"<<endl; //cout<<"\nIncrement complete"<<endl; count++; flag=1; fix=fix2; for(int i=0;i<n-1;i++){ if(w[i]!=w[i+1]) {flag=0;break;} } } cout<<count<<endl; } }
chandu0101/sri
mobile-examples/src/main/scala/sri/mobile/examples/uiexplorer/apis/AppStateExample.scala
<filename>mobile-examples/src/main/scala/sri/mobile/examples/uiexplorer/apis/AppStateExample.scala package sri.mobile.examples.uiexplorer.apis import sri.core.ElementFactory._ import sri.core.ReactComponent import sri.mobile.ReactNative import sri.mobile.examples.uiexplorer.{UIExample, UIExplorerBlock, UIExplorerPage} import sri.universal.components._ import scala.scalajs.js import scala.scalajs.js.UndefOr import scala.scalajs.js.annotation.ScalaJSDefined object AppStateExample extends UIExample { val AppState = ReactNative.AppState object AppStateSubscription { case class State(appState: String = AppState.currentState.get, previousAppSates: js.Array[String] = js.Array()) @ScalaJSDefined class Component extends ReactComponent[Boolean, State] { initialState(State()) def render() = View()( if (props) Text()(state.appState) else Text()(state.previousAppSates.mkString(",")) ) val handleAppStateChange = (appState: String) => { setState(state.copy(appState, state.previousAppSates.+:(appState))) } override def componentDidMount(): Unit = AppState.addEventListener("change", handleAppStateChange) override def componentWillUnmount(): Unit = AppState.removeEventListener("change", handleAppStateChange) } def apply(state: Boolean, key: UndefOr[String] = js.undefined, ref: js.Function1[Component, Unit] = null) = makeElement[Component](state, key = key, ref = ref) } val Component = () => { UIExplorerPage( UIExplorerBlock("AppState.currentState")( Text()(AppState.currentState.get) ), UIExplorerBlock("Subscribed AppState:")( AppStateSubscription(true) ), UIExplorerBlock("Previous states:")( AppStateSubscription(false) ) ) } val component = () => createStatelessFunctionElementNoProps(Component) override def title: String = "AppState" override def description: String = "app background status" }
ramacarlucho/evmos
x/recovery/types/keys.go
package types // constants const ( // ModuleName defines the recovery module name ModuleName = "recovery" // StoreKey to be used when creating the KVStore StoreKey = ModuleName // RouterKey to be used for message routing RouterKey = ModuleName )
cantona/NT6
nitan/clone/misc/mafu.c
<filename>nitan/clone/misc/mafu.c // mafu.c 馬伕 #include <ansi.h> inherit NPC; void create() { set_name("馬伕", ({ "<NAME>", "mafu", "ma"})); set("age", 32); set("gender", "男性"); set("long", "這是馬廄馴馬和小夥計,平時打理照料馬匹。\n"); set("attitude", "peaceful"); set("str", 24); set("dex", 16); set("combat_exp", 50000); set("shen_type", 1); set("chat_chance",2); setup(); } void init() { object ob; ::init(); if (interactive(ob = this_player()) && ! is_fighting()) { remove_call_out("greeting"); call_out("greeting", 1, ob); } } void greeting(object ob) { if (! ob || environment(ob) != environment()) return; say(CYN "馬伕點頭哈腰地説道:這位客官,您好,打算去哪兒啊?\n" NOR); } int accept_object(object who, object ob) { if( query("money_id", ob) ) { message_vision(CYN "$N" CYN "對$n" CYN "説道:你想去" NOR + HIY "(goto)" NOR + CYN "哪兒?先告訴小的再付" "錢也不遲。\n" NOR, this_object(), who); return 0; } return 0; }
computop-services/mobile-android-sdk
sdk/src/main/java/com/wallee/android/sdk/util/HttpError.java
package com.wallee.android.sdk.util; /** * This error is thrown when there is a HTTP error in loading a web page. */ public final class HttpError extends Exception{ private final String url; private final String details; private final int httpStatusCode; public HttpError(String url, String details, int httpStatusCode) { super("The invocation of the URL " + url + " leads to a HTTP error " + httpStatusCode + ": " + details); this.url = Check.requireNonEmpty(url, "The url is required."); this.details = Check.requireNonEmpty(details, "The details is required."); this.httpStatusCode = httpStatusCode; } public String getUrl() { return url; } public String getDetails() { return this.details; } public int getHttpStatusCode() { return httpStatusCode; } @Override public String toString() { return "HttpError{" + "url='" + url + '\'' + ", details='" + details + '\'' + ", httpStatusCode=" + httpStatusCode + '}'; } }
openharmony-sig-ci/security_permission
services/permission_standard/distributedpermissionmanagerservice/test/mock/src/soft_bus_center_mock.cpp
/* * Copyright (c) 2021 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "softbus_bus_center.h" #include <string> #include <cstring> #include "constant.h" #include "permission_log.h" using namespace OHOS::Security::Permission; namespace { static constexpr OHOS::HiviewDFX::HiLogLabel LABEL = {LOG_CORE, SECURITY_DOMAIN_PERMISSION, "SoftBusCenterMock"}; static const int REG_COUNT_LIMIT = 10; } // namespace static int regCount_ = -1; static INodeStateCb *callback_ = nullptr; bool IsRegCountOK() { return regCount_ >= 0 && regCount_ < REG_COUNT_LIMIT; } int32_t RegNodeDeviceStateCb(const char *pkgName, INodeStateCb *callback) { regCount_++; // reg:0-9 if (IsRegCountOK()) { callback_ = const_cast<INodeStateCb *>(callback); PERMISSION_LOG_DEBUG(LABEL, "success, pkg:%{public}s, count: %{public}d", pkgName, regCount_); return Constant::SUCCESS; } // count 10 above alway return failure for retry. PERMISSION_LOG_DEBUG(LABEL, "failure, count: %{public}d", regCount_); return Constant::FAILURE; } int32_t UnregNodeDeviceStateCb(INodeStateCb *callback) { // unreg: 0-9 if (IsRegCountOK()) { regCount_--; callback_ = nullptr; PERMISSION_LOG_DEBUG(LABEL, "success, count: %{public}d", regCount_); return Constant::SUCCESS; } if (regCount_ >= 0) { regCount_--; } PERMISSION_LOG_DEBUG(LABEL, "failure, count: %{public}d", regCount_); return Constant::SUCCESS; } int32_t GetLocalNodeDeviceInfo(const char *pkgName, NodeBasicInfo *info) { if (IsRegCountOK()) { strcpy(info->deviceName, "test-device-id-001"); strcpy(info->networkId, "test-device-id-001:network-id-001"); info->deviceTypeId = 1; PERMISSION_LOG_DEBUG(LABEL, "success, count: %{public}d", regCount_); return Constant::SUCCESS; } PERMISSION_LOG_DEBUG(LABEL, "failure, pkg: %{public}s, count: %{public}d", pkgName, regCount_); return Constant::FAILURE; } int32_t GetNodeKeyInfo( const char *pkgName, const char *networkId, NodeDeivceInfoKey key, uint8_t *info, int32_t infoLen) { if (networkId == nullptr || networkId[0] == '\0') { PERMISSION_LOG_DEBUG(LABEL, "failure, invalid networkId, pkg name: %{public}s", pkgName); return Constant::FAILURE; } if (IsRegCountOK()) { if (key == NodeDeivceInfoKey::NODE_KEY_UDID) { std::string temp = networkId; temp += ":udid-001"; strncpy((char *)info, temp.c_str(), temp.length()); infoLen = temp.length(); } if (key == NodeDeivceInfoKey::NODE_KEY_UUID) { std::string temp = networkId; temp += ":uuid-001"; strncpy((char *)info, temp.c_str(), temp.length()); } PERMISSION_LOG_DEBUG(LABEL, "success, count: %{public}d, id: %{public}s", regCount_, info); return Constant::SUCCESS; } PERMISSION_LOG_DEBUG(LABEL, "failure, count: %{public}d", regCount_); return Constant::FAILURE; }
uk-gov-mirror/alphagov.content-data-api
app/domain/healthchecks/etl_metric_values.rb
module Healthchecks class EtlMetricValues include ActiveModel::Model include Concerns::Deactivable attr_accessor :metric def self.build(metric) new(metric: metric) end def name "etl_metric_values_#{metric}".to_sym end def status if number_of_metric_values.positive? :ok else :critical end end def message "ETL :: no #{metric} for yesterday" if status == :critical end private def number_of_metric_values Facts::Metric.for_yesterday.where("#{metric} > 0").count end end end