text
stringlengths
3
1.05M
/* * Copyright (c) 2006-2021, RT-Thread Development Team * * SPDX-License-Identifier: Apache-2.0 * * Change Logs: * Date Author Notes * 2006-02-24 Bernard first version * 2006-05-03 Bernard add IRQ_DEBUG * 2016-08-09 ArdaFu add interrupt enter and leave hook. * 2018-11-22 Jesven rt_interrupt_get_nest function add disable irq */ #include <rthw.h> #include <rtthread.h> #ifdef RT_USING_HOOK static void (*rt_interrupt_enter_hook)(void); static void (*rt_interrupt_leave_hook)(void); /** * @ingroup Hook * This function set a hook function when the system enter a interrupt * * @note the hook function must be simple and never be blocked or suspend. */ void rt_interrupt_enter_sethook(void (*hook)(void)) { rt_interrupt_enter_hook = hook; } /** * @ingroup Hook * This function set a hook function when the system exit a interrupt. * * @note the hook function must be simple and never be blocked or suspend. */ void rt_interrupt_leave_sethook(void (*hook)(void)) { rt_interrupt_leave_hook = hook; } #endif /* #define IRQ_DEBUG */ /** * @addtogroup Kernel */ /**@{*/ #ifdef RT_USING_SMP #define rt_interrupt_nest rt_cpu_self()->irq_nest #else volatile rt_uint8_t rt_interrupt_nest = 0; #endif /** * This function will be invoked by BSP, when enter interrupt service routine * * @note please don't invoke this routine in application * * @see rt_interrupt_leave */ void rt_interrupt_enter(void) { rt_base_t level; level = rt_hw_interrupt_disable(); rt_interrupt_nest ++; RT_OBJECT_HOOK_CALL(rt_interrupt_enter_hook,()); rt_hw_interrupt_enable(level); RT_DEBUG_LOG(RT_DEBUG_IRQ, ("irq has come..., irq current nest:%d\n", rt_interrupt_nest)); } RTM_EXPORT(rt_interrupt_enter); /** * This function will be invoked by BSP, when leave interrupt service routine * * @note please don't invoke this routine in application * * @see rt_interrupt_enter */ void rt_interrupt_leave(void) { rt_base_t level; RT_DEBUG_LOG(RT_DEBUG_IRQ, ("irq is going to leave, irq current nest:%d\n", rt_interrupt_nest)); level = rt_hw_interrupt_disable(); rt_interrupt_nest --; RT_OBJECT_HOOK_CALL(rt_interrupt_leave_hook,()); rt_hw_interrupt_enable(level); } RTM_EXPORT(rt_interrupt_leave); /** * This function will return the nest of interrupt. * * User application can invoke this function to get whether current * context is interrupt context. * * @return the number of nested interrupts. */ RT_WEAK rt_uint8_t rt_interrupt_get_nest(void) { rt_uint8_t ret; rt_base_t level; level = rt_hw_interrupt_disable(); ret = rt_interrupt_nest; rt_hw_interrupt_enable(level); return ret; } RTM_EXPORT(rt_interrupt_get_nest); RTM_EXPORT(rt_hw_interrupt_disable); RTM_EXPORT(rt_hw_interrupt_enable); /**@}*/
module.exports = 'react';
# -*- coding: utf-8 -*- """ Copyright (c) 2008, appengine-utilities project All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the appengine-utilities project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ # main python imports import datetime import pickle import random import __main__ # google appengine import from google.appengine.ext import db from google.appengine.api import memcache # settings DEFAULT_TIMEOUT = 3600 # cache expires after one hour (3600 sec) CLEAN_CHECK_PERCENT = 50 # 15% of all requests will clean the database MAX_HITS_TO_CLEAN = 100 # the maximum number of cache hits to clean on attempt class _AppEngineUtilities_Cache(db.Model): # It's up to the application to determine the format of their keys cachekey = db.StringProperty() createTime = db.DateTimeProperty(auto_now_add=True) timeout = db.DateTimeProperty() value = db.BlobProperty() class Cache(object): """ Cache is used for storing pregenerated output and/or objects in the Big Table datastore to minimize the amount of queries needed for page displays. The idea is that complex queries that generate the same results really should only be run once. Cache can be used to store pregenerated value made from queries (or other calls such as urlFetch()), or the query objects themselves. """ def __init__(self, clean_check_percent = CLEAN_CHECK_PERCENT, max_hits_to_clean = MAX_HITS_TO_CLEAN, default_timeout = DEFAULT_TIMEOUT): """ Initializer Args: clean_check_percent: how often cache initialization should run the cache cleanup max_hits_to_clean: maximum number of stale hits to clean default_timeout: default length a cache item is good for """ self.clean_check_percent = clean_check_percent self.max_hits_to_clean = max_hits_to_clean self.default_timeout = default_timeout if random.randint(1, 100) < self.clean_check_percent: self._clean_cache() if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheInitialized') def _clean_cache(self): """ _clean_cache is a routine that is run to find and delete cache items that are old. This helps keep the size of your over all datastore down. """ query = _AppEngineUtilities_Cache.all() query.filter('timeout < ', datetime.datetime.now()) results = query.fetch(self.max_hits_to_clean) db.delete(results) #for result in results: # result.delete() def _validate_key(self, key): if key == None: raise KeyError def _validate_value(self, value): if value == None: raise ValueError def _validate_timeout(self, timeout): if timeout == None: timeout = datetime.datetime.now() +\ datetime.timedelta(seconds=DEFAULT_TIMEOUT) if type(timeout) == type(1): timeout = datetime.datetime.now() + \ datetime.timedelta(seconds = timeout) if type(timeout) != datetime.datetime: raise TypeError if timeout < datetime.datetime.now(): raise ValueError return timeout def add(self, key = None, value = None, timeout = None): """ add adds an entry to the cache, if one does not already exist. """ self._validate_key(key) self._validate_value(value) timeout = self._validate_timeout(timeout) if key in self: raise KeyError cacheEntry = _AppEngineUtilities_Cache() cacheEntry.cachekey = key cacheEntry.value = pickle.dumps(value) cacheEntry.timeout = timeout # try to put the entry, if it fails silently pass # failures may happen due to timeouts, the datastore being read # only for maintenance or other applications. However, cache # not being able to write to the datastore should not # break the application try: cacheEntry.put() except: pass memcache_timeout = timeout - datetime.datetime.now() memcache.set('cache-'+key, value, int(memcache_timeout.seconds)) if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheAdded') def set(self, key = None, value = None, timeout = None): """ add adds an entry to the cache, overwriting an existing value if one already exists. """ self._validate_key(key) self._validate_value(value) timeout = self._validate_timeout(timeout) cacheEntry = self._read(key) if not cacheEntry: cacheEntry = _AppEngineUtilities_Cache() cacheEntry.cachekey = key cacheEntry.value = pickle.dumps(value) cacheEntry.timeout = timeout try: cacheEntry.put() except: pass memcache_timeout = timeout - datetime.datetime.now() memcache.set('cache-'+key, value, int(memcache_timeout.seconds)) if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheSet') def _read(self, key = None): """ _read returns a cache object determined by the key. It's set to private because it returns a db.Model object, and also does not handle the unpickling of objects making it not the best candidate for use. The special method __getitem__ is the preferred access method for cache data. """ query = _AppEngineUtilities_Cache.all() query.filter('cachekey', key) query.filter('timeout > ', datetime.datetime.now()) results = query.fetch(1) if len(results) is 0: return None return results[0] if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheReadFromDatastore') if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheRead') def delete(self, key = None): """ Deletes a cache object determined by the key. """ memcache.delete('cache-'+key) result = self._read(key) if result: if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheDeleted') result.delete() def get(self, key): """ get is used to return the cache value associated with the key passed. """ mc = memcache.get('cache-'+key) if mc: if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheReadFromMemcache') if 'AEU_Events' in __main__.__dict__: __main__.AEU_Events.fire_event('cacheRead') return mc result = self._read(key) if result: timeout = result.timeout - datetime.datetime.now() # print timeout.seconds memcache.set('cache-'+key, pickle.loads(result.value), int(timeout.seconds)) return pickle.loads(result.value) else: raise KeyError def get_many(self, keys): """ Returns a dict mapping each key in keys to its value. If the given key is missing, it will be missing from the response dict. """ dict = {} for key in keys: value = self.get(key) if value is not None: dict[key] = val return dict def __getitem__(self, key): """ __getitem__ is necessary for this object to emulate a container. """ return self.get(key) def __setitem__(self, key, value): """ __setitem__ is necessary for this object to emulate a container. """ return self.set(key, value) def __delitem__(self, key): """ Implement the 'del' keyword """ return self.delete(key) def __contains__(self, key): """ Implements "in" operator """ try: r = self.__getitem__(key) except KeyError: return False return True def has_key(self, keyname): """ Equivalent to k in a, use that form in new code """ return self.__contains__(keyname)
exports.extend = function(r) { for (var t = Array.prototype.slice.call(arguments, 1), e = 0; e < t.length; e += 1) { var n = t[e]; for (var o in n) n.hasOwnProperty(o) && (r[o] = n[o]); } return r; };
/*- * Copyright (c) 2008-2015 Juan Romero Pardines. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include <stdio.h> #include <stdbool.h> #include <stdlib.h> #include <string.h> #include <strings.h> #include <errno.h> #include <assert.h> #include <unistd.h> #include <dirent.h> #include <regex.h> #include <xbps.h> #include "defs.h" static int cleaner_cb(struct xbps_handle *xhp, xbps_object_t obj, const char *key UNUSED, void *arg, bool *done UNUSED) { xbps_dictionary_t repo_pkgd; const char *binpkg, *rsha256; char *binpkgsig, *pkgver, *arch; bool drun = false; /* Extract drun (dry-run) flag from arg*/ if (arg != NULL) drun = *(bool*)arg; /* Internalize props.plist dictionary from binary pkg */ binpkg = xbps_string_cstring_nocopy(obj); arch = xbps_binpkg_arch(binpkg); assert(arch); if (!xbps_pkg_arch_match(xhp, arch, NULL)) { xbps_dbg_printf(xhp, "%s: ignoring binpkg with unmatched arch (%s)\n", binpkg, arch); free(arch); return 0; } free(arch); /* * Remove binary pkg if it's not registered in any repository * or if hash doesn't match. */ pkgver = xbps_binpkg_pkgver(binpkg); assert(pkgver); repo_pkgd = xbps_rpool_get_pkg(xhp, pkgver); free(pkgver); if (repo_pkgd) { xbps_dictionary_get_cstring_nocopy(repo_pkgd, "filename-sha256", &rsha256); if (xbps_file_sha256_check(binpkg, rsha256) == 0) { /* hash matched */ return 0; } } binpkgsig = xbps_xasprintf("%s.sig", binpkg); if (!drun && unlink(binpkg) == -1) { fprintf(stderr, "Failed to remove `%s': %s\n", binpkg, strerror(errno)); } else { printf("Removed %s from cachedir (obsolete)\n", binpkg); } if (!drun && unlink(binpkgsig) == -1) { if (errno != ENOENT) { fprintf(stderr, "Failed to remove `%s': %s\n", binpkgsig, strerror(errno)); } } free(binpkgsig); return 0; } int clean_cachedir(struct xbps_handle *xhp, bool drun, xbps_array_t * excludes) { xbps_array_t array = NULL; DIR *dirp; struct dirent *dp; char *ext; int rv = 0; bool flagged = false; if (chdir(xhp->cachedir) == -1) return -1; if ((dirp = opendir(xhp->cachedir)) == NULL) return 0; array = xbps_array_create(); while ((dp = readdir(dirp)) != NULL) { flagged = false; if ((strcmp(dp->d_name, ".") == 0) || (strcmp(dp->d_name, "..") == 0)) continue; /* only process xbps binary packages, ignore something else */ if ((ext = strrchr(dp->d_name, '.')) == NULL) continue; if (strcmp(ext, ".xbps")) { xbps_dbg_printf(xhp, "ignoring unknown file: %s\n", dp->d_name); continue; } /* filter and flag excluded files */ for (unsigned int ind = 0; ind < xbps_array_count(*excludes); ind++){ const char *contp = NULL; regex_t searchex = {0}; (void)xbps_array_get_cstring_nocopy(*excludes, ind, &contp); (void)regcomp(&searchex, contp, (REG_EXTENDED|REG_NOSUB)); if(regexec(&searchex, dp->d_name, 0 ,NULL, 0) == 0){ flagged = true; regfree(&searchex); break; } regfree(&searchex); } if(!flagged) xbps_array_add_cstring(array, dp->d_name); } (void)closedir(dirp); if (xbps_array_count(array)) { rv = xbps_array_foreach_cb_multi(xhp, array, NULL, cleaner_cb, (void*)&drun); xbps_object_release(array); } return rv; }
// SPDX-License-Identifier: BSD-3-Clause // Copyright Contributors to the OpenEXR Project. // This file is auto-generated by the cmake configure step #ifndef INCLUDED_ILMBASE_CONFIG_H #define INCLUDED_ILMBASE_CONFIG_H 1 #pragma once // // Options / configuration based on O.S. / compiler ///////////////////// #define HAVE_PTHREAD 1 #define HAVE_POSIX_SEMAPHORES 1 // // Define and set to 1 if the target system has support for large // stack sizes. // /* #undef ILMBASE_HAVE_LARGE_STACK */ ////////////////////// // // C++ namespace configuration / options // Current (internal) library namepace name and corresponding public // client namespaces. #define ILMBASE_INTERNAL_NAMESPACE_CUSTOM 0 #define IMATH_INTERNAL_NAMESPACE Imath_2_5 #define IEX_INTERNAL_NAMESPACE Iex_2_5 #define ILMTHREAD_INTERNAL_NAMESPACE IlmThread_2_5 #define ILMBASE_NAMESPACE_CUSTOM 0 #define IMATH_NAMESPACE Imath #define IEX_NAMESPACE Iex #define ILMTHREAD_NAMESPACE IlmThread // // Version information // #define ILMBASE_VERSION_STRING "2.5.3" #define ILMBASE_PACKAGE_STRING "IlmBase 2.5.3" #define ILMBASE_VERSION_MAJOR 2 #define ILMBASE_VERSION_MINOR 5 #define ILMBASE_VERSION_PATCH 3 #define ILMBASE_VERSION_HEX ((uint32_t(ILMBASE_VERSION_MAJOR) << 24) | \ (uint32_t(ILMBASE_VERSION_MINOR) << 16) | \ (uint32_t(ILMBASE_VERSION_PATCH) << 8)) #endif // INCLUDED_ILMBASE_CONFIG_H
// Emacs style mode select -*- C++ -*- //----------------------------------------------------------------------------- // // Copyright(C) 1993-1996 Id Software, Inc. // Copyright(C) 2005 Simon Howard // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA // 02111-1307, USA. // // DESCRIPTION: // Duh. // //----------------------------------------------------------------------------- #ifndef __G_GAME__ #define __G_GAME__ #include "doomdef.h" #include "d_event.h" #include "d_ticcmd.h" // // GAME // void G_DeathMatchSpawnPlayer (int playernum); void G_InitNew (skill_t skill, int episode, int map); // Can be called by the startup code or M_Responder. // A normal game starts at map 1, // but a warp test can start elsewhere void G_DeferedInitNew (skill_t skill, int episode, int map); void G_DeferedPlayDemo (char* demo); // Can be called by the startup code or M_Responder, // calls P_SetupLevel or W_EnterWorld. void G_LoadGame (char* name); void G_DoLoadGame (void); // Called by M_Responder. void G_SaveGame (int slot, char* description); // Only called by startup code. void G_RecordDemo (char* name); void G_BeginRecording (void); void G_PlayDemo (char* name); void G_TimeDemo (char* name); boolean G_CheckDemoStatus (void); void G_ExitLevel (void); void G_SecretExitLevel (void); void G_WorldDone (void); // Read current data from inputs and build a player movement command. void G_BuildTiccmd (ticcmd_t *cmd); void G_Ticker (void); boolean G_Responder (event_t* ev); void G_ScreenShot (void); void G_DrawMouseSpeedBox(void); #endif
/* Copyright (c) 2015, The Linux Foundation. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * Neither the name of The Linux Foundation, Inc. nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ #include "mdtp_defs.h" #define MDTP_EFUSE_ADDRESS_MSM8994 0xFC4BC1F8 // QFPROM_CORR_SPARE_REG19_LSB_ADDR #define MDTP_EFUSE_START_MSM8994 16 struct mdtp_ui_defs mdtp_ui_defs_msm8994 = { // Image dimensions 1412, // error_msg_width; 212, // error_msg_height; 1364, // main_text_width; 288, // main_text_height; 180, // pin_digit_width; 180, // pin_digit_height; 644, // ok_button_width; 158, // ok_button_height; 1384, // digits_instructions_width; 166, // digits_instructions_height; 920, // pin_instructions_width; 204, // pin_instructions_height; // Image offsets 0x1000, // error_msg_offset; 0xDD000, // initial_delay_offset; 0x1FD000, // enter_pin_offset; 0x31D000, // invalid_pin_offset; 0x43D000, // pin_digit_0_offset; 0x18000, // pin_digits_offset; 0x52D000, // pin_selected_digit_0_offset; 0x61D000, // ok_button_offset; 0x668000, // selected_ok_button_offset; 0x6B3000, // digits_instructions_offset; 0x75C000, // pin_instructions_offset; //Display settings 12 // digit_space; }; struct mdtp_ui_defs mdtp_get_target_ui_defs() { return mdtp_ui_defs_msm8994; } int mdtp_get_target_efuse(struct mdtp_target_efuse* target_efuse) { if (target_efuse == NULL) { dprintf(CRITICAL, "mdtp: mdtp_get_target_efuse: ERROR, target_efuse is NULL\n"); return -1; } target_efuse->address = MDTP_EFUSE_ADDRESS_MSM8994; target_efuse->start = MDTP_EFUSE_START_MSM8994; return 0; }
define("ace/snippets/haskell_cabal",["require","exports","module"], function(require, exports, module) { "use strict"; exports.snippetText =undefined; exports.scope = "haskell_cabal"; }); (function() { window.require(["ace/snippets/haskell_cabal"], function(m) { if (typeof module == "object" && typeof exports == "object" && module) { module.exports = m; } }); })();
from objectives.conditions._objective_condition import * import random class Condition(ObjectiveCondition): NAME = "Espers" def __init__(self, min_count, max_count): self.count = random.randint(min_count, max_count) super().__init__(ConditionType.EventWord, event_word.ESPERS_FOUND, self.count) def __str__(self): return super().__str__(self.count)
$(document).ready(function () { var e = new Date, t = e.getDate(), a = e.getMonth(), l = e.getFullYear(); $("#external-events div.external-event").each(function () { var e = {title: $.trim($(this).text())}; $(this).data("eventObject", e), $(this).draggable({zIndex: 999, revert: !0, revertDuration: 0}) }); var n = $("#calendar").fullCalendar({ header: {left: "title", center: "agendaDay,agendaWeek,month", right: "prev,next today"}, editable: !0, firstDay: 1, selectable: !0, defaultView: "month", axisFormat: "h:mm", columnFormat: {month: "ddd", week: "ddd d", day: "dddd M/d", agendaDay: "dddd d"}, titleFormat: {month: "MMMM YYYY", week: "MMMM YYYY", day: "MMMM YYYY"}, allDaySlot: !1, locale: 'ru', selectHelper: !0, select: function (e, t, a) { var l = prompt("Добавить событие:"); l && n.fullCalendar("renderEvent", {title: l, start: e, end: t, allDay: a}, !0), n.fullCalendar("unselect") }, droppable: !0, drop: function (e, t) { var a = $(this).data("eventObject"), l = $.extend({}, a); l.start = e, l.allDay = t, $("#calendar").fullCalendar("renderEvent", l, !0), $("#drop-remove").is(":checked") && $(this).remove() }, events: [{title: "All Day Event", start: new Date(l, a, 1)}, { id: 999, title: "Repeating Event", start: new Date(l, a, t - 5, 18, 0), allDay: !1, className: "bg-teal" }, { id: 999, title: "Meeting", start: new Date(l, a, t - 3, 16, 0), allDay: !1, className: "bg-purple" }, { id: 999, title: "Meeting", start: new Date(l, a, t + 4, 16, 0), allDay: !1, className: "bg-warning" }, {title: "Meeting", start: new Date(l, a, t, 10, 30), allDay: !1, className: "bg-danger"}, { title: "Lunch", start: new Date(l, a, t, 12, 0), end: new Date(l, a, t, 14, 0), allDay: !1, className: "bg-success" }, { title: "Birthday Party", start: new Date(l, a, t + 1, 19, 0), end: new Date(l, a, t + 1, 22, 30), allDay: !1, className: "bg-brown" }, { title: "Click for Google", start: new Date(l, a, 28), end: new Date(l, a, 29), url: "http://google.com/", className: "bg-pink" }] }) });
import pandas as pd from datetime import datetime,timedelta from sqlalchemy import create_engine IMPULSE_KWH = 1000 #depends on prepaid meter RUPIAH_PER_KWH = 1444.70 # R-1/TR 1.301 – 2.200 VA def run_cron(): now_date = datetime.now().strftime('%Y-%m-%d') yesterday_date = datetime.now() - timedelta(days=1) yesterday_date = yesterday_date.strftime('%Y-%m-%d') conn = create_engine("postgresql+psycopg2://root:12345678@localhost:5432/plnstats").connect() df = pd.read_sql(sql='SELECT color_percentage, created_on from public.home_pln_kwh_sensor where created_on >= \'{yesterday} 00:00:00\' and created_on < \'{currdate} 00:00:00\''.format(yesterday=yesterday_date,currdate=now_date),con=conn) df['created_on'] = pd.to_datetime(df.created_on) df['created_on'] = df['created_on'].dt.strftime('%Y-%m-%d %H:%M:%S') df = df.drop_duplicates(subset=['created_on']) df['created_on'] = pd.to_datetime(df.created_on) df['hour'] = df['created_on'].dt.strftime('%H') df['day'] = df['created_on'].dt.strftime('%Y-%m-%d') list_day = list(df['day'].unique()) df_summary = pd.DataFrame(columns=['day','hour','sum_impulse','charges_amount']) #DETAIL PER HOUR for dday in list_day: list_hour = list(df[df['day'] == str(dday)]['hour'].unique()) for y in list_hour: wSumKWH = df[(df['day'] == str(dday)) & (df['hour'] == str(y))] df_summary = df_summary.append({'hour':y,'day':dday,'sum_impulse':wSumKWH.groupby('hour').count()['day'][0],'charges_amount':(wSumKWH.groupby('hour').count()['day'][0])*RUPIAH_PER_KWH/IMPULSE_KWH},ignore_index=True) df_summary.to_sql('home_pln_kwh_hour', schema='public', con=conn, if_exists='append', index=False) credit_summary = df_summary[df_summary['day'] == yesterday_date].groupby('day').sum() credit_summary.to_sql('home_pln_kwh_summary', schema='public', con=conn, if_exists='append', index=True) if __name__ == '__main__': run_cron()
/******************************************************************************* * (c) 2018 ZondaX GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ********************************************************************************/ #include "zxmacros.h" #include "utf8.h" #ifdef LEDGER_SPECIFIC #include <stdio.h> #include "stdint.h" void __logstack() { uint8_t st; uint32_t tmp1 = (uint32_t)&st - (uint32_t)&app_stack_canary; uint32_t tmp2 = 0x20002800 - (uint32_t)&st; char buffer[30]; snprintf(buffer, 40, "%d / %d", tmp1, tmp2); LOG(buffer); } #else void __logstack() {} #endif size_t asciify(char *utf8_in_ascii_out) { return asciify_ext(utf8_in_ascii_out, utf8_in_ascii_out); } size_t asciify_ext(const char *utf8_in, char *ascii_only_out) { void *p = (void *) utf8_in; char *q = ascii_only_out; // utf8valid returns zero on success while (*((char *) p) && utf8valid(p) == 0) { utf8_int32_t tmp_codepoint = 0; p = utf8codepoint(p, &tmp_codepoint); *q = (tmp_codepoint >= 32 && tmp_codepoint <= 0x7F)? tmp_codepoint : '.'; q++; } // Terminate string *q = 0; return q - ascii_only_out; }
import logging from pathlib import Path from typing import Dict, Text, List, Any, Optional, Union from ruamel.yaml.parser import ParserError import rasa.utils.common as common_utils import rasa.utils.io from rasa.constants import DOCS_URL_STORIES from rasa.core.constants import INTENT_MESSAGE_PREFIX, RULE_SNIPPET_ACTION_NAME from rasa.core.events import UserUttered, SlotSet, Form from rasa.core.training.story_reader.story_reader import StoryReader from rasa.core.training.structures import StoryStep from rasa.data import YAML_FILE_EXTENSIONS logger = logging.getLogger(__name__) KEY_STORIES = "stories" KEY_STORY_NAME = "story" KEY_RULES = "rules" KEY_RULE_NAME = "rule" KEY_STEPS = "steps" KEY_ENTITIES = "entities" KEY_USER_INTENT = "intent" KEY_SLOT_NAME = "slot" KEY_SLOT_VALUE = "value" KEY_FORM = "form" KEY_ACTION = "action" KEY_CHECKPOINT = "checkpoint" KEY_CHECKPOINT_SLOTS = "slots" KEY_METADATA = "metadata" KEY_OR = "or" class YAMLStoryReader(StoryReader): """Class that reads Core training data and rule data in YAML format.""" async def read_from_file(self, filename: Text) -> List[StoryStep]: """Read stories or rules from file. Args: filename: Path to the story/rule file. Returns: `StoryStep`s read from `filename`. """ try: yaml_content = rasa.utils.io.read_yaml_file(filename) except (ValueError, ParserError) as e: common_utils.raise_warning( f"Failed to read YAML from '{filename}', it will be skipped. Error: {e}" ) return [] if not isinstance(yaml_content, dict): common_utils.raise_warning( f"Failed to read '{filename}'. It should be a YAML dictionary." ) return [] return self.read_from_parsed_yaml(yaml_content) def read_from_parsed_yaml( self, parsed_content: Dict[Text, Union[Dict, List]] ) -> List[StoryStep]: """Read stories from parsed YAML. Args: parsed_content: The parsed YAML as a dictionary. Returns: The parsed stories or rules. """ stories = parsed_content.get(KEY_STORIES, []) self._parse_data(stories, is_rule_data=False) rules = parsed_content.get(KEY_RULES, []) self._parse_data(rules, is_rule_data=True) self._add_current_stories_to_result() return self.story_steps def _parse_data(self, data: List[Dict[Text, Any]], is_rule_data: bool) -> None: item_title = self._get_item_title(is_rule_data) for item in data: if not isinstance(item, dict): common_utils.raise_warning( f"Unexpected block found in '{self.source_name}':\n" f"{item}\nItems under the " f"'{self._get_plural_item_title(is_rule_data)}' key must be YAML " f"dictionaries. It will be skipped.", docs=self._get_docs_link(is_rule_data), ) continue if item_title in item.keys(): self._parse_plain_item(item, is_rule_data) def _parse_plain_item(self, item: Dict[Text, Any], is_rule_data: bool) -> None: item_name = item.get(self._get_item_title(is_rule_data), "") if not item_name: common_utils.raise_warning( f"Issue found in '{self.source_name}': \n" f"{item}\n" f"The {self._get_item_title(is_rule_data)} has an empty name. " f"{self._get_plural_item_title(is_rule_data).capitalize()} should " f"have a name defined under '{self._get_item_title(is_rule_data)}' " f"key. It will be skipped.", docs=self._get_docs_link(is_rule_data), ) steps: List[Union[Text, Dict[Text, Any]]] = item.get(KEY_STEPS, []) if not steps: common_utils.raise_warning( f"Issue found in '{self.source_name}': " f"The {self._get_item_title(is_rule_data)} has no steps. " f"It will be skipped.", docs=self._get_docs_link(is_rule_data), ) return if is_rule_data: self._new_rule_part(item_name, self.source_name) else: self._new_story_part(item_name, self.source_name) for step in steps: self._parse_step(step, is_rule_data) def _parse_step( self, step: Union[Text, Dict[Text, Any]], is_rule_data: bool ) -> None: if step == RULE_SNIPPET_ACTION_NAME: self._parse_rule_snippet_action() elif isinstance(step, str): common_utils.raise_warning( f"Issue found in '{self.source_name}':\n" f"Found an unexpected step in the {self._get_item_title(is_rule_data)} " f"description:\n{step}\nThe step is of type `str` " f"which is only allowed for the rule snippet action " f"'{RULE_SNIPPET_ACTION_NAME}'. It will be skipped.", docs=self._get_docs_link(is_rule_data), ) elif KEY_USER_INTENT in step.keys(): self._parse_user_utterance(step, is_rule_data) elif KEY_OR in step.keys(): self._parse_or_statement(step, is_rule_data) elif KEY_SLOT_NAME in step.keys(): self._parse_slot(step, is_rule_data) elif KEY_ACTION in step.keys(): self._parse_action(step, is_rule_data) elif KEY_CHECKPOINT in step.keys(): self._parse_checkpoint(step, is_rule_data) elif KEY_FORM in step.keys(): self._parse_form(step[KEY_FORM]) elif KEY_METADATA in step.keys(): pass else: common_utils.raise_warning( f"Issue found in '{self.source_name}':\n" f"Found an unexpected step in the {self._get_item_title(is_rule_data)} " f"description:\n{step}\nIt will be skipped.", docs=self._get_docs_link(is_rule_data), ) @staticmethod def _get_item_title(is_rule_data: bool) -> Text: return KEY_RULE_NAME if is_rule_data else KEY_STORY_NAME @staticmethod def _get_plural_item_title(is_rule_data: bool) -> Text: return KEY_RULES if is_rule_data else KEY_STORIES @staticmethod def _get_docs_link(is_rule_data: bool) -> Text: # TODO: update docs link to point to rules return "" if is_rule_data else DOCS_URL_STORIES def _parse_user_utterance(self, step: Dict[Text, Any], is_rule_data: bool) -> None: utterance = self._parse_raw_user_utterance(step, is_rule_data=is_rule_data) if utterance: self._validate_that_utterance_is_in_domain(utterance) self.current_step_builder.add_user_messages([utterance]) def _validate_that_utterance_is_in_domain(self, utterance: UserUttered) -> None: intent_name = utterance.intent.get("name") if not self.domain: logger.debug( "Skipped validating if intent is in domain as domain " "is `None`." ) return if intent_name not in self.domain.intents: common_utils.raise_warning( f"Issue found in '{self.source_name}': \n" f"Found intent '{intent_name}' in stories which is not part of the " f"domain.", docs=DOCS_URL_STORIES, ) def _parse_or_statement(self, step: Dict[Text, Any], is_rule_data: bool) -> None: utterances = [] for utterance in step.get(KEY_OR): if KEY_USER_INTENT in utterance.keys(): utterance = self._parse_raw_user_utterance( utterance, is_rule_data=is_rule_data ) if utterance: utterances.append(utterance) else: common_utils.raise_warning( f"Issue found in '{self.source_name}': \n" f"`OR` statement can only have '{KEY_USER_INTENT}' " f"as a sub-element. This step will be skipped:\n" f"'{utterance}'\n", docs=self._get_docs_link(is_rule_data), ) return self.current_step_builder.add_user_messages(utterances) def _parse_raw_user_utterance( self, step: Dict[Text, Any], is_rule_data: bool ) -> Optional[UserUttered]: user_utterance = step.get(KEY_USER_INTENT, "").strip() if not user_utterance: common_utils.raise_warning( f"Issue found in '{self.source_name}':\n" f"User utterance cannot be empty. " f"This {self._get_item_title(is_rule_data)} step will be skipped:\n" f"{step}", docs=self._get_docs_link(is_rule_data), ) raw_entities = step.get(KEY_ENTITIES, []) final_entities = YAMLStoryReader._parse_raw_entities(raw_entities) if user_utterance.startswith(INTENT_MESSAGE_PREFIX): common_utils.raise_warning( f"Issue found in '{self.source_name}':\n" f"User intent '{user_utterance}' starts with " f"'{INTENT_MESSAGE_PREFIX}'. This is not required.", docs=self._get_docs_link(is_rule_data), ) # Remove leading slash user_utterance = user_utterance[1:] intent = {"name": user_utterance, "confidence": 1.0} return UserUttered(user_utterance, intent, final_entities) @staticmethod def _parse_raw_entities( raw_entities: Union[List[Dict[Text, Text]], List[Text]] ) -> List[Dict[Text, Text]]: final_entities = [] for entity in raw_entities: if isinstance(entity, dict): for key, value in entity.items(): final_entities.append({"entity": key, "value": value}) else: final_entities.append({"entity": entity, "value": ""}) return final_entities def _parse_slot(self, step: Dict[Text, Any], is_rule_data: bool) -> None: slot_name = step.get(KEY_SLOT_NAME, "") if not slot_name or KEY_SLOT_VALUE not in step: common_utils.raise_warning( f"Issue found in '{self.source_name}': \n" f"Slots should have a name and a value. " f"This {self._get_item_title(is_rule_data)} step will be skipped:\n" f"{step}", docs=self._get_docs_link(is_rule_data), ) return slot_value = step.get(KEY_SLOT_VALUE, "") self._add_event(SlotSet.type_name, {slot_name: slot_value}) def _parse_action(self, step: Dict[Text, Any], is_rule_data: bool) -> None: action_name = step.get(KEY_ACTION, "") if not action_name: common_utils.raise_warning( f"Issue found in '{self.source_name}': \n" f"Action name cannot be empty. " f"This {self._get_item_title(is_rule_data)} step will be skipped:\n" f"{step}", docs=self._get_docs_link(is_rule_data), ) return self._add_event(action_name, {}) def _parse_rule_snippet_action(self) -> None: self._add_event(RULE_SNIPPET_ACTION_NAME, {}) def _parse_form(self, form_name: Optional[Text]) -> None: self._add_event(Form.type_name, {"name": form_name}) def _parse_checkpoint(self, step: Dict[Text, Any], is_rule_data: bool) -> None: checkpoint_name = step.get(KEY_CHECKPOINT, "") slots = step.get(KEY_CHECKPOINT_SLOTS, []) slots_dict = {} for slot in slots: if not isinstance(slot, dict): common_utils.raise_warning( f"Issue found in '{self.source_name}':\n" f"Checkpoint '{checkpoint_name}' has an invalid slot: " f"{slots}\nItems under the '{KEY_CHECKPOINT_SLOTS}' key must be " f"YAML dictionaries. The checkpoint will be skipped.", docs=self._get_docs_link(is_rule_data), ) return for key, value in slot.items(): slots_dict[key] = value self._add_checkpoint(checkpoint_name, slots_dict) @staticmethod def is_yaml_story_file(file_path: Text) -> bool: """Check if file contains Core training data or rule data in YAML format. Args: file_path: Path of the file to check. Returns: `True` in case the file is a Core YAML training data or rule data file, `False` otherwise. """ suffix = Path(file_path).suffix if suffix and suffix not in YAML_FILE_EXTENSIONS: return False try: content = rasa.utils.io.read_yaml_file(file_path) return any(key in content for key in [KEY_STORIES, KEY_RULES]) except Exception as e: # Using broad `Exception` because yaml library is not exposing all Errors common_utils.raise_warning( f"Tried to check if '{file_path}' is a story or rule file, but failed " f"to read it. If this file contains story or rule data, you should " f"investigate this error, otherwise it is probably best to " f"move the file to a different location. Error: {e}" ) return False
// Dashboard.js // ==================================================================== // ==================================================================== // - ThemeOn.net - $(window).on('load', function() { // Network chart ( Morris Line Chart ) // ================================================================= // Require MorrisJS Chart // ----------------------------------------------------------------- // http://morrisjs.github.io/morris.js/ // ================================================================= var day_data = [ {"elapsed": "2013 - 01", "value": 24, b:2}, {"elapsed": "2013 - 02", "value": 34, b:22}, {"elapsed": "2013 - 03", "value": 33, b:7}, {"elapsed": "2013 - 04", "value": 22, b:6}, {"elapsed": "2013 - 05", "value": 28, b:17}, {"elapsed": "2013 - 06", "value": 60, b:15}, {"elapsed": "2013 - 07", "value": 60, b:17}, {"elapsed": "2013 - 08", "value": 70, b:7}, {"elapsed": "2013 - 09", "value": 67, b:18}, {"elapsed": "2013 - 10", "value": 86, b: 18}, {"elapsed": "2013 - 11", "value": 86, b: 18}, {"elapsed": "2013 - 12", "value": 113, b: 29}, {"elapsed": "2014 - 01", "value": 130, b: 23}, {"elapsed": "2014 - 02", "value": 114, b:10}, {"elapsed": "2014 - 03", "value": 80, b:22}, {"elapsed": "2014 - 04", "value": 109, b:7}, {"elapsed": "2014 - 05", "value": 100, b:6}, {"elapsed": "2014 - 06", "value": 105, b:17}, {"elapsed": "2014 - 07", "value": 110, b:15}, {"elapsed": "2014 - 08", "value": 102, b:17}, {"elapsed": "2014 - 09", "value": 107, b:7}, {"elapsed": "2014 - 10", "value": 60, b:18}, {"elapsed": "2014 - 11", "value": 67, b: 18}, {"elapsed": "2014 - 12", "value": 76, b: 18}, {"elapsed": "2015 - 01", "value": 73, b: 29}, {"elapsed": "2015 - 02", "value": 94, b: 13}, {"elapsed": "2015 - 03", "value": 79, b: 24} ]; var chart = Morris.Area({ element: 'morris-chart-network', data: day_data, axes:false, xkey: 'elapsed', ykeys: ['value', 'b'], labels: ['Download Speed', 'Upload Speed'], yLabelFormat :function (y) { return y.toString() + ' Mb/s'; }, gridEnabled: false, gridLineColor: 'transparent', lineColors: ['#7faadd','#005dad'], lineWidth:0, pointSize:0, pointFillColors:['#3e80bd'], pointStrokeColors:'#3e80bd', fillOpacity:.7, gridTextColor:'#999', parseTime: false, resize:true, behaveLikeLine : true, hideHover: 'auto' }); // Services chart ( Morris Donut Chart ) // ================================================================= // Require MorrisJS Chart // ----------------------------------------------------------------- // http://morrisjs.github.io/morris.js/ // ================================================================= Morris.Donut({ element: 'demo-morris-donut', data: [ {label: "Supports", value: 12}, {label: "Sales", value: 30}, {label: "Comments", value: 20} ], colors: [ '#9d7fe4', '#7553c9', '#8669cc' ], resize:true }); // Visitor chart ( Sparkline chart ) // ================================================================= // Require Sparkline Chart // ----------------------------------------------------------------- // http://omnipotent.net/jquery.sparkline/#s-about // ================================================================= $("#demo-chart-visitors").sparkline([476,643,356,453,745,976,867,886,984,645,767,799], { type: 'line', width: '110', height: '22', spotRadius: 3, lineWidth:1, lineColor:'rgba(255,255,255,.9)', fillColor: 'rgba(0,0,0,0.05)', spotColor: 'rgba(255,255,255,.5)', minSpotColor: 'rgba(255,255,255,.5)', maxSpotColor: 'rgba(255,255,255,.5)', highlightLineColor : '#ffffff', highlightSpotColor: '#ffffff', tooltipChartTitle: 'Visitors', tooltipSuffix:' k', }); // Bounce rate chart ( Sparkline chart ) // ================================================================= // Require Sparkline Chart // ----------------------------------------------------------------- // http://omnipotent.net/jquery.sparkline/#s-about // ================================================================= $("#demo-chart-bounce-rate").sparkline([23,24,22,27,35,40,39,29,27,33,29,37], { type: 'line', width: '110', height: '22', spotRadius: 3, lineWidth:1, lineColor:'rgba(255,255,255,.9)', fillColor: 'rgba(0,0,0,0.05)', spotColor: 'rgba(255,255,255,.5)', minSpotColor: 'rgba(255,255,255,.5)', maxSpotColor: 'rgba(255,255,255,.5)', highlightLineColor : '#ffffff', highlightSpotColor: '#ffffff', tooltipChartTitle: 'Bounce rate', tooltipSuffix:' %' }); // EXTRA SMALL WEATHER WIDGET // ================================================================= // Require sckycons // ----------------------------------------------------------------- // http://darkskyapp.github.io/skycons/ // ================================================================= // on Android, a nasty hack is needed: {"resizeClear": true} skyconsOptions = { "color": "#3bb5e8", "resizeClear": true } /* Main Icon */ var skycons = new Skycons(skyconsOptions); skycons.add("demo-weather-xs-icon", Skycons.PARTLY_CLOUDY_DAY); skycons.play(); // HDD USAGE - SPARKLINE LINE AREA CHART // ================================================================= // Require sparkline // ----------------------------------------------------------------- // http://omnipotent.net/jquery.sparkline/#s-about // ================================================================= $("#demo-sparkline-area").sparkline([57,69,70,68,73,76,75,79,73,76,77,73], { type: 'line', width: '140', height: '80', spotRadius: 2.5, lineWidth:1.5, lineColor:'rgba(255,255,255,.5)', fillColor: 'rgba(0,0,0,0.2)', spotColor: 'rgba(255,255,255,.5)', minSpotColor: 'rgba(255,255,255,.5)', maxSpotColor: 'rgba(255,255,255,.5)', highlightLineColor : '#ffffff', highlightSpotColor: '#ffffff', tooltipChartTitle: 'Usage', tooltipSuffix:' %' }); // EARNING - SPARKLINE LINE CHART // ================================================================= // Require sparkline // ----------------------------------------------------------------- // http://omnipotent.net/jquery.sparkline/#s-about // ================================================================= $("#demo-sparkline-line").sparkline([345,404,305,455,378,767], { type: 'line', width: '140', height: '80', spotRadius: 2.5, lineWidth:1.5, lineColor:'#ffffff', fillColor: false, minSpotColor :false, maxSpotColor : false, highlightLineColor : '#ffffff', highlightSpotColor: '#ffffff', tooltipChartTitle: 'Earning', tooltipPrefix :'$ ', spotColor:'#ffffff', valueSpots : { '0:': '#ffffff' } }); // SALES - SPARKLINE BAR CHART // ================================================================= // Require sparkline // ----------------------------------------------------------------- // http://omnipotent.net/jquery.sparkline/#s-about // ================================================================= $("#demo-sparkline-bar").sparkline([40,32,53,45,67,45,56,34,67,76], { type: 'bar', height: '80', barWidth: 9, barSpacing: 3, zeroAxis: false, tooltipChartTitle: 'Daily Sales', tooltipSuffix:' Sales', barColor: '#fff'} ); // TOP MOVIE - SPARKLINE PIE CHART // ================================================================= // Require sparkline // ----------------------------------------------------------------- // http://omnipotent.net/jquery.sparkline/#s-about // ================================================================= $("#demo-sparkline-pie").sparkline([5, 12, 17 ,55], { type: 'pie', width: '80', height: '80', tooltipChartTitle: 'Top Movies', tooltipFormat: '{{offset:offset}} ({{percent.1}}%)', tooltipValueLookups: { 'offset': { 0: 'Drama', 1: 'Action', 2: 'Comedy', 3: 'Adventure' } }, sliceColors: ['#2d4859','#fe7211','#7ad689','#128376'], }); // PANEL OVERLAY // ================================================================= // Require Nifty js // ----------------------------------------------------------------- // http://www.themeon.net // ================================================================= $('#demo-panel-network-refresh').niftyOverlay().on('click', function(){ var $el = $(this), relTime; $el.niftyOverlay('show'); relTime = setInterval(function(){ $el.niftyOverlay('hide'); clearInterval(relTime); },2000); }); // WEATHER WIDGET // ================================================================= // Require sckycons // ----------------------------------------------------------------- // http://darkskyapp.github.io/skycons/ // ================================================================= // on Android, a nasty hack is needed: {"resizeClear": true} skyconsOptions = { "color": "#fff", "resizeClear": true } /* Main Icon */ var skycons = new Skycons(skyconsOptions); skycons.add("demo-weather-icon-1", Skycons.PARTLY_CLOUDY_DAY); skycons.play(); /* Small Icons*/ var skycons2 = new Skycons(skyconsOptions); skycons2.add("demo-weather-icon-2", Skycons.CLOUDY); skycons2.play(); var skycons3 = new Skycons(skyconsOptions); skycons3.add("demo-weather-icon-3", Skycons.WIND); skycons3.play(); var skycons4 = new Skycons(skyconsOptions); skycons4.add("demo-weather-icon-4", Skycons.RAIN); skycons4.play(); var skycons5 = new Skycons(skyconsOptions); skycons5.add("demo-weather-icon-5", Skycons.PARTLY_CLOUDY_DAY); skycons5.play(); });
import VueRouter from 'vue-router'; import APP3D from '../views/3d/index'; const routes = [ {path: '/', component: APP3D}, {path: '/3d', component: APP3D}, ]; const router = new VueRouter({ mode: 'history', routes, }); export default router;
# -*- coding: utf-8 -*- import PySide2.QtCore from PySide2.QtCore import Signal from PySide2.QtWidgets import QWidget from ......Classes.SlotW28 import SlotW28 from ......GUI import gui_option from ......GUI.Dialog.DMachineSetup.SWSlot.PWSlot28.Gen_PWSlot28 import Gen_PWSlot28 from ......Methods.Slot.Slot import SlotCheckError translate = PySide2.QtCore.QCoreApplication.translate class PWSlot28(Gen_PWSlot28, QWidget): """Page to set the Slot Type 28""" # Signal to DMachineSetup to know that the save popup is needed saveNeeded = Signal() # Information for Slot combobox slot_name = "Slot Type 28" slot_type = SlotW28 def __init__(self, lamination=None): """Initialize the GUI according to current lamination Parameters ---------- self : PWSlot18 A PWSlot18 widget lamination : Lamination current lamination to edit """ # Build the interface according to the .ui file QWidget.__init__(self) self.setupUi(self) self.lamination = lamination self.slot = lamination.slot # Set FloatEdit unit self.lf_W0.unit = "m" self.lf_W3.unit = "m" self.lf_H0.unit = "m" self.lf_H3.unit = "m" self.lf_R1.unit = "m" # Set unit name (m ou mm) wid_list = [ self.unit_W0, self.unit_W3, self.unit_H0, self.unit_H3, self.unit_R1, ] for wid in wid_list: wid.setText("[" + gui_option.unit.get_m_name() + "]") # Fill the fields with the machine values (if they're filled) self.lf_W0.setValue(self.slot.W0) self.lf_W3.setValue(self.slot.W3) self.lf_R1.setValue(self.slot.R1) self.lf_H0.setValue(self.slot.H0) self.lf_H3.setValue(self.slot.H3) # Display the main output of the slot (surface, height...) self.w_out.comp_output() # Connect the signal/slot self.lf_W0.editingFinished.connect(self.set_W0) self.lf_W3.editingFinished.connect(self.set_W3) self.lf_R1.editingFinished.connect(self.set_R1) self.lf_H0.editingFinished.connect(self.set_H0) self.lf_H3.editingFinished.connect(self.set_H3) def set_W0(self): """Signal to update the value of W0 according to the line edit Parameters ---------- self : PWSlot28 A PWSlot28 object """ self.slot.W0 = self.lf_W0.value() self.w_out.comp_output() # Notify the machine GUI that the machine has changed self.saveNeeded.emit() def set_W3(self): """Signal to update the value of W3 according to the line edit Parameters ---------- self : PWSlot28 A PWSlot28 object """ self.slot.W3 = self.lf_W3.value() self.w_out.comp_output() # Notify the machine GUI that the machine has changed self.saveNeeded.emit() def set_R1(self): """Signal to update the value of R1 according to the line edit Parameters ---------- self : PWSlot28 A PWSlot28 object """ self.slot.R1 = self.lf_R1.value() self.w_out.comp_output() # Notify the machine GUI that the machine has changed self.saveNeeded.emit() def set_H0(self): """Signal to update the value of H0 according to the line edit Parameters ---------- self : PWSlot28 A PWSlot28 object """ self.slot.H0 = self.lf_H0.value() self.w_out.comp_output() # Notify the machine GUI that the machine has changed self.saveNeeded.emit() def set_H3(self): """Signal to update the value of H3 according to the line edit Parameters ---------- self : PWSlot28 A PWSlot28 object """ self.slot.H3 = self.lf_H3.value() self.w_out.comp_output() # Notify the machine GUI that the machine has changed self.saveNeeded.emit() @staticmethod def check(lam): """Check that the current lamination have all the needed field set Parameters ---------- lam: LamSlotWind Lamination to check Returns ------- error: str Error message (return None if no error) """ # Check that everything is set if lam.slot.W0 is None: return "You must set W0 !" elif lam.slot.W3 is None: return "You must set W3 !" elif lam.slot.R1 is None: return "You must set R1 !" elif lam.slot.H0 is None: return "You must set H0 !" elif lam.slot.H3 is None: return "You must set H3 !" # Check that everything is set right # Constraints try: lam.slot.check() except SlotCheckError as error: return str(error) # Output try: yoke_height = lam.comp_height_yoke() except Exception as error: return "Unable to compute yoke height:" + str(error) if yoke_height <= 0: return "The slot height is greater than the lamination !"
import axios from 'axios'; const uuid4 = require('uuid4'); const api = axios.create({ baseURL: 'http://localhost:6000/hlf/api', responseType: 'json', timeout: 50000, contentType: 'application/json', }); const getMiners = () => api .get('/org.acme.goldchain.Miner'); const getCas = () => api .get('/org.acme.goldchain.CertificateAuthority'); const getUsers = () => api .get('/org.acme.goldchain.RegisteredUser'); const getGolds = () => api .get('/org.acme.goldchain.Gold'); const getDeeds = () => api .get('/org.acme.goldchain.Deed'); const deleteMiner = (id) => api .delete( `/org.acme.goldchain.Miner/${id}` ); const deleteCa = (id) => api .delete( `/org.acme.goldchain.CertificateAuthority/${id}` ); const deleteUser = (id) => api .delete( `/org.acme.goldchain.RegisteredUser/${id}` ); const deleteGold = (id) => api .delete( `/org.acme.goldchain.Gold/${id}` ); const deleteDeed = (id) => api .delete( `/org.acme.goldchain.Deed/${id}` ); const execute = async () => { try { const m = await getMiners(); const c = await getCas(); const u = await getUsers(); const g = await getGolds(); const d = await getDeeds(); const miners = m.data; const cas = c.data; const users = u.data; const golds = g.data; const deeds = d.data; miners.forEach(async miner => await deleteMiner(miner.userId)); cas.forEach(async ca => await deleteCa(ca.userId)); users.forEach(async user => await deleteUser(user.userId)); golds.forEach(async gold => await deleteGold(gold.goldId)); deeds.forEach(async deed => await deleteDeed(deed.deedId)) } catch(e) { console.log(e); } }; execute();
from seabreeze.pyseabreeze.features._base import SeaBreezeFeature # Definition # ========== # # TODO: This feature needs to be implemented for pyseabreeze # class SeaBreezeRevisionFeature(SeaBreezeFeature): identifier = "revision" def hardware_revision(self): raise NotImplementedError("implement in derived class") def revision_firmware_get(self): raise NotImplementedError("implement in derived class")
#!/usr/bin/env python # Author: Manivannan Sadhasivam <manivannan.sadhasivam@linaro.org> # Copyright (c) 2018 Linaro Ltd. # # SPDX-License-Identifier: MIT # # Example Usage: Reads maximum brightness value for user1 led and turns it # on/off depending on current state. Then sets led trigger # to heartbeat import mraa import time import sys led_num = 0 if len(sys.argv) < 2 else int(sys.argv[1]) led_val = 0 if len(sys.argv) < 3 else int(sys.argv[2]) # initialise LED led_1 = mraa.Led(led_num) # read maximum brightness val = led_1.readMaxBrightness() # turn led on/off depending on read max_brightness value if val >= 1: print("maximum brightness value for LED is: %d" % val) # never reached mostly else: print("readMaxBrightness is not supported") # set LED brightness led_1.setBrightness(led_val) # sleep for 1 seconds time.sleep(1) if led_1.trigger("heartbeat") == 0: print("led trigger set to: heartbeat") else: print("Setting LED to heartbeat is not supported")
import os from datetime import datetime, timedelta from selenium import webdriver from selenium.common.exceptions import TimeoutException from bs4 import BeautifulSoup import re import pandas as pd import time def base_url_builder(tag): #BUILDS THE BASE URL TO ITERATE ON FROM GIVEN TAG url = "https://medium.com/tag/" + tag +"/archive/" return url def get_start_date(year, month, day): #CHECKS IF START DATE IS A VALID DATE, CONVERTS TO DATETIME OBJECT try: start_date = datetime(year, month, day) except: raise Exception("Start date is in the wrong format or is invalid.") return start_date def get_end_date(year, month, day): #CHECKS IF END DATE IS A VALID DATE, CONVERTS TO DATETIME OBJECT try: end_date = datetime(year, month, day) except: raise Exception("End date is in the wrong format or is invalid.") return end_date def open_chrome(): #OPENS A CHROME DRIVER driver = webdriver.Chrome() driver.implicitly_wait(30) return driver def url_masher(base_url, year, month, day): #MAKES A NEW URL FRON GIVEN DATE #THE FORMAT OF THE URL IS YYYY/MM/DD WE MUST MATCH IT if len(month) == 1: month = "0" + month if len(day) == 1: day = "0" + day #MASH THE STRINGS TOGETHER TO MAKE A PASSABLE URL url = base_url + year + "/" + month + "/" + day return url def find_post_cards(soup): #PULLS EACH CARD FROM THE FEED. EACH CARD IS A STORY OR COMMENT cards = soup.find_all("div", class_="streamItem streamItem--postPreview js-streamItem") return cards def get_titles_from_cards(cards): #PULLS TITLE DATA FROM EACH CARD IN CARDS, RETURNS A LIST OF TITLES def title_cleaner(title): #REMOVE MEDIUMS ENCODING SYMBOLS AND EMOJIS FROM TITLES title = title.replace("\xa0"," ") title = title.replace("\u200a","") title = title.replace("\ufe0f","") title = re.sub(r'[^\x00-\x7F]+','', title) return title titles=[] for card in cards: #SEARCH FOR TITLE THERE ARE 3 DIFF CLASSES variant1 = card.find("h3", class_="graf graf--h3 graf-after--figure graf--title") variant2 = card.find("h3", class_="graf graf--h3 graf-after--figure graf--trailing graf--title") variant3 = card.find("h4", class_="graf graf--h4 graf--leading") variant4 = card.find("h3", class_="graf graf--h3 graf--leading graf--title") variant5 = card.find("p", class_="graf graf--p graf--leading") variant6 = card.find("h3", class_="graf graf--h3 graf--startsWithDoubleQuote graf--leading graf--title") variant7= card.find("h3", class_="graf graf--h3 graf--startsWithDoubleQuote graf-after--figure graf--trailing graf--title") #EACH CARD MUST HAVE ONE OF THE ABOVE TITLE CLASSES FIND IT AND CUT OUT MEDIUM'S #STYLING CODES variants = [variant1, variant2, variant3, variant4, variant5, variant6, variant7] saved = False #THE FIRST TITLE ENTRY WE MATCH, WE SAVE for variant in variants: if ((variant is not None) and (not saved)): title = variant.text title = title_cleaner(title) titles.append(title) saved = True if not saved: titles.append("NaN") return titles def get_subtitles_from_cards(cards): #PULLS TITLE DATA FROM EACH CARD IN CARDS, RETURNS A LIST OF TITLES def subtitle_cleaner(subtitle): #REMOVE MEDIUMS ENCODING SYMBOLS AND EMOJIS FROM TITLES subtitle = subtitle.replace("\xa0"," ") subtitle = subtitle.replace("\u200a","") subtitle = subtitle.replace("\ufe0f","") subtitle = re.sub(r'[^\x00-\x7F]+','', subtitle) return subtitle subtitles=[] for card in cards: #SEARCH FOR TITLE THERE ARE 3 DIFF CLASSES variant1 = card.find("h4", class_="graf graf--h4 graf-after--h3 graf--subtitle") variant2 = card.find("h4", class_="graf graf--h4 graf-after--h3 graf--trailing graf--subtitle") variant3 = card.find("strong", class_="markup--strong markup--p-strong") variant4 = card.find("h4", class_="graf graf--p graf-after--h3 graf--trailing") variant5= card.find("p", class_="graf graf--p graf-after--h3 graf--trailing") variant6= card.find("blockquote", class_="graf graf--pullquote graf-after--figure graf--trailing") variant7 = card.find("p", class_="graf graf--p graf-after--figure") variant8 = card.find("blockquote", class_="graf graf--blockquote graf-after--h3 graf--trailing") variant9 = card.find("p", class_="graf graf--p graf-after--figure graf--trailing") variant10 = card.find("em", class_="markup--em markup--p-em") variant11=card.find("p", class_="graf graf--p graf-after--p graf--trailing") #EACH CARD MUST HAVE ONE OF THE TITLE CLASSES FIND IT AND CUT OUT MEDIUM'S #STYLING CODES variants = [variant1, variant2, variant3, variant4, variant5, variant6, variant7, variant8, variant9, variant10, variant11] saved = False for variant in variants: if ((variant is not None) and (not saved)): subtitle = variant.text subtitle = subtitle_cleaner(subtitle) subtitles.append(subtitle) saved = True if not saved: subtitles.append("NaN") return subtitles def get_image_from_cards(cards): #RETURNS A 1 IF IMAGE IS PRESENT images = [] for card in cards: img = card.find("img",class_="progressiveMedia-image js-progressiveMedia-image") if img is not None: images.append(1) else: images.append(0) return images def get_auth_and_pubs_from_cards(cards): # PULLS AUTHOR AND PUBLICATION FROM EACH STORY CARD authors = [] pubs = [] for card in cards: # get the author and publication author = card.find("a", class_="ds-link ds-link--styleSubtle link link--darken link--accent u-accentColor--textNormal u-accentColor--textDarken") pub = card.find("a", class_="ds-link ds-link--styleSubtle link--darken link--accent u-accentColor--textNormal") if author is not None: text = author.text text = re.sub('\s+[^A-Za-z]', '', text) text = re.sub(r'[^\x00-\x7F]+',' ', text) authors.append(text) else: authors.append("NaN") if pub is not None: text2 = pub.text text2 = re.sub('\s+[^A-Za-z]', '', text2) text2 = re.sub(r'[^\x00-\x7F]+',' ', text2) pubs.append(text2) else: pubs.append("NaN") return authors, pubs def get_dates_and_tags(tag, year,month,day,cards): #CREATES A LIST OF TAGS AND DATES Year=[] Month=[] Day = [] tags=[] for card in cards: tags.append(tag) Year.append(year) Month.append(month) Day.append(day) return Year, Month, Day, tags def get_readTime_from_cards(cards): #PULL READTIME FROM EACH CARD IN CARDS readingTimes=[] for card in cards: time = card.find("span", class_="readingTime") if time is not None: time = time['title'] time = time.replace(" min read", "") readingTimes.append(time) else: readingTimes.append("0") return readingTimes def get_applause_from_cards(cards): #PULL CLAPS FROM CARDS applause=[] for card in cards: claps=card.find("button", class_="button button--chromeless u-baseColor--buttonNormal js-multirecommendCountButton u-disablePointerEvents") if claps is not None: applause.append(claps.text) else: applause.append("0") return applause def get_comment_from_cards(cards): #DETERMINES WHETHER THE TIMELINE CARD IS A COMMENT 1 IF COMMENT comments = [] for card in cards: comment = card.find("div", class_="u-fontSize14 u-marginTop10 u-marginBottom20 u-padding14 u-xs-padding12 u-borderRadius3 u-borderCardBackground u-borderLighterHover u-boxShadow1px4pxCardBorder") if comment is not None: comments.append(1) else: comments.append(0) return comments def get_urls_from_cards(cards): #GETS ARTICLE URLS FROM ALL CARDS urls = [] for card in cards: url = card.find("a", class_="") if url is not None: urls.append(url['href']) else: raise Exception("couldnt find a url") return urls def get_auth_urls_from_cards(cards): #PULLS AUTHORS URL ADDRESS FROM EACH CARD auth_urls = [] for card in cards: url = card.find("a", class_="ds-link ds-link--styleSubtle link link--darken link--accent u-accentColor--textNormal u-accentColor--textDarken") if url is not None: auth_urls.append(url['href']) else: auth_urls.append("NaN") return auth_urls def scrape_tag(tag, yearstart, monthstart, yearstop, monthstop): #------------------------------------------------------------- #INPUT CHECKS #1. MAKE SURE TAG IS VALID (no idea how to do this without exhaustive list... too much work ) #2.CHECK VALID FILE PATH path = os.getcwd() path = path + "/medium_"+tag+".csv" #3. TRY TO OPEN FILE PATH try: file = open(path, "w") file.close() except: raise Exception("Could not open file.") #4. MAKE SURE START DATE <= STOP DATE current_date = get_start_date(int(yearstart), int(monthstart), 1) end_date = get_start_date(int(yearstop), int(monthstop), 1) if current_date > end_date: raise Exception("End date exceeds start date.") else: None #----------------------------------------------------------------- #BEGIN SCRAPE #BUILDS THE BASE URL FROM GIVEN TAG TO ITERATE ON base_url = base_url_builder(tag) #MEDIUM DENIES ANY COMMANDLINE REQUESTS, NEED BROWSER chrome_driver = open_chrome() #USE FIRSTPAGE TO ADD HEADERS TO CSV, USE COUNTER TO GET COMMANDLINE PREVIEW OF PROGRESS firstPage=True counter=0 #START ITERATION OVER DATES while(current_date <= end_date): #BUILD URL FROM CURRENT_DATE url = url_masher(base_url, str(current_date.year), str(current_date.month), str(current_date.day)) #PARSE WEB RESPONSE response = chrome_driver.get(url) # Avoid empty dates if chrome_driver.current_url != url: current_date = current_date + timedelta(days=1) time.sleep(2) else: soup = BeautifulSoup(chrome_driver.page_source, features='lxml') #FIND ALL STORY CARDS, EACH IS AN ARTICLE cards = find_post_cards(soup) #PULL DATA FROM CARDS titles = get_titles_from_cards(cards) subtitles = get_subtitles_from_cards(cards) images = get_image_from_cards(cards) authors, pubs = get_auth_and_pubs_from_cards(cards) year, month, day, tags = get_dates_and_tags(tag, current_date.year, current_date.month, current_date.day, cards) readingTimes = get_readTime_from_cards(cards) applause = get_applause_from_cards(cards) urls = get_urls_from_cards(cards) auth_urls = get_auth_urls_from_cards(cards) comment = get_comment_from_cards(cards) #ACCUMULATE DATA INTO A DICTIONARY dict = {"Title":titles,"Subtitle":subtitles,"Image":images,"Author":authors, "Publication":pubs, "Year":year, "Month":month, "Day":day, "Tag":tags, "Reading_Time":readingTimes, "Claps":applause,"Comment":comment, "url":urls, "Author_url":auth_urls} #CHECK THAT DATA IN EACH CATEGORY IS THE SAME LENGTH vals = list(dict.values()) for col in vals: if len(col)==len(cards): continue else: raise Exception("Data length does not match number of stories on page.") #CREATE DATAFRAME TO ORGANIZE AND SAVE TO CSV df = pd.DataFrame.from_dict(dict) #APPEND DATA TO FILE, # IF FIRSTPAGE-> ADD A HEADER if firstPage: with open(path, 'a') as f: df.to_csv(f, mode="a", header=True, index = False) firstPage=False #IF NOT FIRSTPAGE -> NO HEADER else: with open(path, 'a') as f: df.to_csv(f, mode="a", header=False, index=False) #PRINTS THE NUMBER OF TOTAL TIMELINE CARDS SAVED TO CSV counter = counter + len(cards) print(counter, " ",current_date) #ADDS A DAY TO THE CURRENT DATE FOR NEXT URL CALL current_date = current_date + timedelta(days=1) time.sleep(2) chrome_driver.close()
"""StyleSheetList implements DOM Level 2 Style Sheets StyleSheetList.""" __all__ = ['StyleSheetList'] class StyleSheetList(list): """Interface `StyleSheetList` (introduced in DOM Level 2) The `StyleSheetList` interface provides the abstraction of an ordered collection of :class:`~cssutils.stylesheets.StyleSheet` objects. The items in the `StyleSheetList` are accessible via an integral index, starting from 0. This Python implementation is based on a standard Python list so e.g. allows ``examplelist[index]`` usage. """ def item(self, index): """ Used to retrieve a style sheet by ordinal `index`. If `index` is greater than or equal to the number of style sheets in the list, this returns ``None``. """ try: return self[index] except IndexError: return None length = property( lambda self: len(self), doc="The number of :class:`StyleSheet` objects in the list. The range" " of valid child stylesheet indices is 0 to length-1 inclusive.", )
# The MIT License (MIT) # Copyright (c) 2019 Ian Buttimer # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from .DataSet import DataSet from .sj_types import BitArray # if somebody does "from sales_journal.pipelines import *", this is what they will # be able to access: __all__ = [ 'DataSet', 'BitArray', ]
"""Study addon namespace.""" from contextlib import suppress from itertools import chain from pathlib import Path from time import monotonic from typing import Any, Dict, Iterator, List, Optional, Tuple import pydicom from box import Box from ambra_sdk import ADDON_DOCS_URL from ambra_sdk.addon.dicom import UploadedImageParams from ambra_sdk.deprecated import deprecated from ambra_sdk.exceptions.service import NotFound from ambra_sdk.models import Study as StudyModel from ambra_sdk.service.ws import WSManager class Study: # NOQA:WPS214 """Study addon namespace.""" def __init__(self, api): """Init. :param api: base api """ self._api = api def upload_dir( self, *, study_dir: Path, namespace_id: str, ) -> Tuple[str, List[UploadedImageParams]]: """Upload study to namespace from path. :param study_dir: path to study dir :param namespace_id: uploading to namespace :raises ValueError: Study dir is not directory :return: list of image params """ if not study_dir.is_dir(): raise ValueError('study_dir is not directory') return self.upload_paths( dicom_paths=study_dir.glob('**/*.dcm'), namespace_id=namespace_id, ) def upload_paths( self, *, dicom_paths: Iterator[Path], namespace_id: str, ) -> Tuple[str, List[UploadedImageParams]]: """Upload study to namespace from dicoms iteartor. :param dicom_paths: iterator of dicom paths :param namespace_id: uploading to namespace :raises ValueError: Study dir is not directory :return: list of image params """ images_params = [] first_dicom_path = next(dicom_paths, None) if first_dicom_path is None: raise ValueError('Dicoms iterator is empty') # In pydicom we can pass file path object # But in AI we use old version of pydicom. # For this version we can pass only fp or str. ds = pydicom.dcmread(fp=str(first_dicom_path)) patient_name = ds.PatientName study_uid = ds.StudyInstanceUID study_time = ds.StudyTime study_date = ds.StudyDate # create new study response_data = self._api.Study.add( study_uid=study_uid, study_date=study_date, study_time=study_time, patient_name=patient_name, storage_namespace=namespace_id, phi_namespace=namespace_id, ).get() engine_fqdn = response_data.engine_fqdn uuid: str = response_data.uuid # upload images for dicom_path in chain((first_dicom_path, ), dicom_paths): with dicom_path.open(mode='rb') as dicom: images_params.append( self._api.Addon.Dicom.upload( dicom_file=dicom, namespace_id=namespace_id, engine_fqdn=engine_fqdn, ), ) # then sync data # In api.html sync method have not uuid param... # So we use this hardcode: request = self._api.Study.sync(image_count=1) request_data = request.request_args.data or {} request_data['uuid'] = uuid # NOQA:WPS437 request.request_args.data = request_data # NOQA:WPS110 request.get() return uuid, images_params def wait( self, *, study_uid: str, namespace_id: str, timeout: float, ws_timeout: int, ) -> Box: """Wait study in namespace. :param study_uid: study_uid :param namespace_id: namespace :param timeout: time for waiting new study :param ws_timeout: time for waiting in socket :raises TimeoutError: if study not ready by timeout :return: Study box object """ ws_url = self._api.ws_url ws_manager = WSManager(ws_url) study = None start = monotonic() channel_name = 'study.{namespace_id}'.format(namespace_id=namespace_id) sid = self._api.sid with ws_manager.channel(sid, channel_name) as ws: while True: if monotonic() - start >= timeout: break with suppress(NotFound): study = self._api.Study.get( study_uid=study_uid, storage_namespace=namespace_id, ).get() if study and study.phantom == 0: break with suppress(TimeoutError): ws.wait_for_event( channel_name, sid, 'READY', timeout=ws_timeout, ) if not study: raise TimeoutError return study def upload_dir_and_get( self, *, study_dir: Path, namespace_id: str, timeout: float = 200.0, ws_timeout: int = 5, ) -> Box: """Upload study from dir and get. :param study_dir: path to study dir :param namespace_id: uploading to namespace :param timeout: time for waiting new study :param ws_timeout: time for waiting in socket :return: Study box object """ uuid, images_params = self.upload_dir( study_dir=study_dir, namespace_id=namespace_id, ) study_uid = images_params[0].study_uid return self.wait( study_uid=study_uid, namespace_id=namespace_id, timeout=timeout, ws_timeout=ws_timeout, ) def upload_paths_and_get( self, *, dicom_paths: Iterator[Path], namespace_id: str, timeout: float = 200.0, ws_timeout: int = 5, ) -> Box: """Upload study from dir and get. :param dicom_paths: iterator of dicom paths :param namespace_id: uploading to namespace :param timeout: time for waiting new study :param ws_timeout: time for waiting in socket :return: Study box object """ uuid, images_params = self.upload_paths( dicom_paths=dicom_paths, namespace_id=namespace_id, ) study_uid = images_params[0].study_uid return self.wait( study_uid=study_uid, namespace_id=namespace_id, timeout=timeout, ws_timeout=ws_timeout, ) def duplicate_and_get( self, uuid: str, namespace_id: str, include_attachments: bool = False, timeout: float = 200.0, ws_timeout: int = 5, ) -> Box: """Duplicate study to namespace. :param uuid: study_uuid :param namespace_id: to namespace_id :param include_attachments: include attachments :param timeout: waiting timeout :param ws_timeout: waiting from ws timeout :return: duplicated study """ include_attachments_int = int(include_attachments) from_study_uid = self._api.Study \ .get(uuid=uuid) \ .only(StudyModel.study_uid) \ .get() \ .study_uid self._api.Study.duplicate( uuid=uuid, namespace_id=namespace_id, include_attachments=include_attachments_int, ).get() return self.wait( study_uid=from_study_uid, namespace_id=namespace_id, timeout=timeout, ws_timeout=ws_timeout, ) def anonymize_and_wait( self, engine_fqdn: str, namespace: str, study_uid: str, region: Dict[str, Any], phi_namespace: Optional[str] = None, to_namespace: Optional[str] = None, new_study_uid: Optional[str] = None, keep_image_uids: Optional[str] = None, color: Optional[str] = None, x_ambrahealth_job_id: Optional[str] = None, is_ai: bool = False, only_prepare: bool = False, timeout: float = 200.0, ws_timeout: int = 5, ) -> str: """Start anonymization and wait when it completed. :param engine_fqdn: Engine FQDN (Required). :param namespace: Namespace (Required). :param study_uid: Study uid (Required). :param region: Region (Required). :param phi_namespace: phi namespace :param to_namespace: The storage namespace into which the new study should be placed (default same as original). :param new_study_uid: The Study Instance UID of the new study (default is randomly generated). :param keep_image_uids: Should SOP Instance UIDs of modified copies be same as originals? (default is false) :param color: HTML-formatted color (rrggbb) of obscured regions (default is black-and-white checkerboard) :param x_ambrahealth_job_id: X-AmbraHealth-Job-Id headers argument :param is_ai: This is request from ai stack flag :param only_prepare: Get prepared request. :param timeout: waiting timeout :param ws_timeout: waiting from ws timeout :returns: new study uid """ anonymize = self._api.Addon.Job.wait_completion( self._api.Storage.Study.anonymize, timeout=timeout, ws_timeout=ws_timeout, engine_fqdn=engine_fqdn, namespace=namespace, study_uid=study_uid, region=region, phi_namespace=phi_namespace, to_namespace=to_namespace, new_study_uid=new_study_uid, keep_image_uids=keep_image_uids, color=color, only_prepare=only_prepare, x_ambrahealth_job_id=x_ambrahealth_job_id, is_ai=is_ai, ) anonymized_study_uid: str = anonymize.text return anonymized_study_uid # NOQA:WPS331 def anonymize_and_get( self, engine_fqdn: str, namespace: str, study_uid: str, region: Dict[str, Any], phi_namespace: Optional[str] = None, to_namespace: Optional[str] = None, new_study_uid: Optional[str] = None, keep_image_uids: Optional[str] = None, color: Optional[str] = None, x_ambrahealth_job_id: Optional[str] = None, is_ai: bool = False, only_prepare: bool = False, timeout: float = 200.0, ws_timeout: int = 5, ) -> Box: """Start anonymization wait and get anonymized study. :param engine_fqdn: Engine FQDN (Required). :param namespace: Namespace (Required). :param study_uid: Study uid (Required). :param region: Region (Required). :param phi_namespace: phi namespace :param to_namespace: The storage namespace into which the new study should be placed (default same as original). :param new_study_uid: The Study Instance UID of the new study (default is randomly generated). :param keep_image_uids: Should SOP Instance UIDs of modified copies be same as originals? (default is false) :param color: HTML-formatted color (rrggbb) of obscured regions (default is black-and-white checkerboard) :param x_ambrahealth_job_id: X-AmbraHealth-Job-Id headers argument :param is_ai: This is request from ai stack flag :param only_prepare: Get prepared request. :param timeout: waiting timeout :param ws_timeout: waiting from ws timeout :raises TimeoutError: if job or study not ready by timeout :returns: new study """ start = monotonic() anonymize = self._api.Addon.Job.wait_completion( self._api.Storage.Study.anonymize, timeout=timeout, ws_timeout=ws_timeout, engine_fqdn=engine_fqdn, namespace=namespace, study_uid=study_uid, region=region, phi_namespace=phi_namespace, to_namespace=to_namespace, new_study_uid=new_study_uid, keep_image_uids=keep_image_uids, color=color, only_prepare=only_prepare, x_ambrahealth_job_id=x_ambrahealth_job_id, is_ai=is_ai, ) anonymized_study_uid: str = anonymize.text spend_time = monotonic() - start rest_timeout = timeout - spend_time if rest_timeout <= 0: raise TimeoutError new_namespace = to_namespace if to_namespace is not None else namespace return self.wait( study_uid=anonymized_study_uid, namespace_id=new_namespace, timeout=rest_timeout, ws_timeout=ws_timeout, ) @deprecated( 'Use api.Addon.Job.wait: {addon_docs_url}#job-wait'.format( addon_docs_url=ADDON_DOCS_URL, ), ) def wait_job( self, job_id: str, namespace_id: str, timeout: float, ws_timeout: int, ): """Wait job. :param job_id: job id :param namespace_id: job namespace_id :param timeout: time for waiting new study :param ws_timeout: time for waiting in socket """ self._api.Addon.Job.wait( job_id=job_id, namespace_id=namespace_id, timeout=timeout, ws_timeout=ws_timeout, ) @deprecated( 'Use api.Addon.Dicom.get: {addon_docs_url}#dicom-get'.format( addon_docs_url=ADDON_DOCS_URL, ), ) def dicom( self, namespace_id: str, study_uid: str, image_uid: str, image_version: str = '*', engine_fqdn: Optional[str] = None, pretranscode: Optional[bool] = None, ): """Get dicom. :param namespace_id: uploading to namespace :param study_uid: study_uid :param image_uid: image_uid :param image_version: image_version :param engine_fqdn: fqdn (if None gets namespace fqdn) :param pretranscode: get pretranscoded :return: pydicom object """ return self._api.Addon.Dicom.get( namespace_id=namespace_id, study_uid=study_uid, image_uid=image_uid, image_version=image_version, engine_fqdn=engine_fqdn, pretranscode=pretranscode, ) @deprecated( 'Use api.Addon.Dicom.upload_from_path: ' '{addon_docs_url}#dicom-upload-from-path'.format( addon_docs_url=ADDON_DOCS_URL, ), ) def upload_dicom( self, dicom_path: Path, namespace_id: str, engine_fqdn: Optional[str] = None, ) -> UploadedImageParams: """Upload dicom to namespace. :param dicom_path: path to dicom :param namespace_id: uploading to namespace :param engine_fqdn: fqdn (if None gets namespace fqdn) :return: uploaded image params """ image_params: UploadedImageParams = self._api \ .Addon.Dicom.upload_from_path( dicom_path=dicom_path, namespace_id=namespace_id, engine_fqdn=engine_fqdn, ) return image_params # NOQA:WPS331 @deprecated( 'Use api.Addon.Study.upload_dir: ' '{addon_docs_url}#study-upload-dir'.format( addon_docs_url=ADDON_DOCS_URL, ), ) def upload( self, *, study_dir: Path, namespace_id: str, ) -> Tuple[str, List[UploadedImageParams]]: """Upload study to namespace from path. :param study_dir: path to study dir :param namespace_id: uploading to namespace :return: list of image params """ return self.upload_dir( study_dir=study_dir, namespace_id=namespace_id, ) @deprecated( 'Use api.Addon.Study.upload_dir_and_get: ' '{addon_docs_url}#study-upload-dir-and-get'.format( addon_docs_url=ADDON_DOCS_URL, ), ) def upload_and_get( self, study_dir: Path, namespace_id: str, timeout: float = 200.0, ws_timeout: int = 5, ) -> Box: """Upload study from and get. :param study_dir: path to study dir :param namespace_id: uploading to namespace :param timeout: time for waiting new study :param ws_timeout: time for waiting in socket :return: Study box object """ return self.upload_dir_and_get( study_dir=study_dir, namespace_id=namespace_id, timeout=timeout, ws_timeout=ws_timeout, )
default_app_config = 'capweb.apps.CapwebConfig'
from folium import plugins, Map, Circle, CircleMarker, PolyLine from randomcoords import RandomCoords center_point = [34.062400, -117.894900] # Defining the center of our map as well as our RandomCoords objects # Making our first map, a 100km circle with 5000 points. map1 = Map(location=center_point, zoom_start=12) rc = RandomCoords(center=center_point, diameter=100, units='km') for point in rc.generate_coords(num_coords=5000): CircleMarker(location=point, radius=2, color='blue').add_to(map1) Circle(radius=50000, location=center_point, fill=False, color='red').add_to(map1) map1.save(r'C:\Users\User\Desktop\big_map_km.html') # Making our second map, a 10mi circle with 500 points. map2 = Map(location=center_point, zoom_start=12) rc2 = RandomCoords(center=center_point, diameter=10, units='mi') for point in rc2.generate_coords(num_coords=500): CircleMarker(location=point, radius=2, color='blue').add_to(map2) Circle(radius=8046, location=center_point, fill=False, color='red').add_to(map2) map2.save(r'C:\Users\User\Desktop\smaller_map_mi.html') # Making our third map, a 52,800ft circle with 500 points represented as a heat map. rc3 = RandomCoords(center=center_point, diameter=52800, units='ft') map3 = Map(location=center_point, zoom_start=12) plugins.HeatMap(data=rc3.generate_coords(num_coords=500)).add_to(map3) Circle(radius=8046, location=center_point, fill=False, color='red').add_to(map3) map3.save(r'C:\Users\User\Desktop\smaller_heat_map_ft.html') # Making a map using generate_paths. map4 = Map(location=center_point, zoom_start=12) rc4 = RandomCoords(center=center_point, diameter=10, units='km') for path in rc4.generate_paths(num_paths=15, num_points_range=(10, 50), circle_radius_range=(15, 25)): for point in path: CircleMarker(location=point, radius=2, color='blue', fill=True, fill_opacity=1).add_to(map4) PolyLine(locations=path, color='blue', weight=1).add_to(map4) Circle(radius=5000, location=center_point, fill=False, color='red').add_to(map4) map4.save(r'C:\Users\User\Desktop\path_map_km.html') # Printing out the output for our RandomCoords objects as well as some sample output of the generate_coords function. print('Object \'rc\' __str__ returns:') print(rc, '\n') print('Object \'rc2\' __str__ returns:') print(rc2, '\n') print('Object \'rc3\' __str__ returns:') print(rc3, '\n') print('Object \'rc4\' __str__ returns:') print(rc4, '\n') print('rc.generate_coords(num_coords=5) returns:') print(rc.generate_coords(num_coords=5), '\n') print('rc2.generate_coords(num_coords=5) returns:') print(rc2.generate_coords(num_coords=5), '\n') print('rc3.generate_coords(num_coords=5) returns:') print(rc3.generate_coords(num_coords=5), '\n') print('rc4.generate_paths(num_paths=2, num_points_range=(10, 50), circle_radius_range=(15, 25)) returns:') print(rc4.generate_paths(num_paths=2, num_points_range=(10, 50), circle_radius_range=(15, 25)), '\n')
#ifndef AnalyticalErrorPropagation_H #define AnalyticalErrorPropagation_H #include "FWCore/Utilities/interface/Visibility.h" #include "TrackingTools/AnalyticalJacobians/interface/AnalyticalCurvilinearJacobian.h" #include "TrackingTools/TrajectoryParametrization/interface/GlobalTrajectoryParameters.h" #include "TrackingTools/TrajectoryState/interface/FreeTrajectoryState.h" #include "TrackingTools/TrajectoryState/interface/TrajectoryStateOnSurface.h" #include "TrackingTools/TrajectoryState/interface/SurfaceSideDefinition.h" class Surface; inline std::pair<TrajectoryStateOnSurface,double> analyticalErrorPropagation( const FreeTrajectoryState& startingState, const Surface& surface, SurfaceSideDefinition::SurfaceSide side, const GlobalTrajectoryParameters& destParameters, const double& s) { if unlikely(!startingState.hasError()) // return state without errors return std::pair<TrajectoryStateOnSurface,double>(TrajectoryStateOnSurface(destParameters,surface,side),s); // // compute jacobian // // FIXME: Compute mean B field between startingState and destParameters and pass it to analyticalJacobian //GlobalPoint xStart = startingState.position(); //GlobalPoint xDest = destParameters.position(); //GlobalVector h1 = destParameters.magneticFieldInInverseGeV(xStart); //GlobalVector h2 = destParameters.magneticFieldInInverseGeV(xDest); //GlobalVector h = 0.5*(h1+h2); //LogDebug("RungeKutta") << "AnalyticalErrorPropagation: The Fields are: " << h1 << ", " << h2 << ", " << h ; // AnalyticalCurvilinearJacobian analyticalJacobian(startingState.parameters(), destParameters.position(), destParameters.momentum(), s); auto const & jacobian = analyticalJacobian.jacobian(); return std::pair<TrajectoryStateOnSurface,double>( TrajectoryStateOnSurface(destParameters, ROOT::Math::Similarity(jacobian, startingState.curvilinearError().matrix()), surface,side), s); } #endif
/* ======================================================== * * Londinium - premium responsive admin template * * ======================================================== * * File: application.js; * Description: General plugins and layout settings. * Version: 1.0 * * ======================================================== */ $(function() { /* # Data tables ================================================== */ //===== Default datatable =====// oTable = $('.datatable table').dataTable({ "bJQueryUI": false, "bAutoWidth": false, "sPaginationType": "full_numbers", "sDom": '<"datatable-header"fl><"datatable-scroll"t><"datatable-footer"ip>', "oLanguage": { "sSearch": "<span>Filter:</span> _INPUT_", "sLengthMenu": "<span>Show entries:</span> _MENU_", "oPaginate": { "sFirst": "First", "sLast": "Last", "sNext": ">", "sPrevious": "<" } } }); //===== Table with selectable rows =====// oTable = $('.datatable-selectable table').dataTable({ "bJQueryUI": false, "bAutoWidth": false, "sPaginationType": "full_numbers", "sDom": '<"datatable-header"Tfl><"datatable-scroll"t><"datatable-footer"ip>', "oLanguage": { "sSearch": "<span>Filter:</span> _INPUT_", "sLengthMenu": "<span>Show:</span> _MENU_", "oPaginate": { "sFirst": "First", "sLast": "Last", "sNext": ">", "sPrevious": "<" } }, "oTableTools": { "sRowSelect": "multi", "aButtons": [{ "sExtends": "collection", "sButtonText": "Tools <span class='caret'></span>", "sButtonClass": "btn btn-primary", "aButtons": ["select_all", "select_none"] }] } }); //===== Table with media elements =====// oTable = $('.datatable-media table').dataTable({ "bJQueryUI": false, "bAutoWidth": false, "sPaginationType": "full_numbers", "sDom": '<"datatable-header"fl><"datatable-scroll"t><"datatable-footer"ip>', "oLanguage": { "sSearch": "<span>Filter:</span> _INPUT_", "sLengthMenu": "<span>Show:</span> _MENU_", "oPaginate": { "sFirst": "First", "sLast": "Last", "sNext": ">", "sPrevious": "<" } }, "aoColumnDefs": [{ "bSortable": false, "aTargets": [0, 4] }] }); //===== Table with pager =====// oTable = $('.datatable-pager table').dataTable({ "bJQueryUI": false, "bAutoWidth": false, "sPaginationType": "two_button", "sDom": '<"datatable-header"fl><"datatable-scroll"t><"datatable-footer"ip>', "oLanguage": { "sSearch": "<span>Filter:</span> _INPUT_", "sLengthMenu": "<span>Show entries:</span> _MENU_", "oPaginate": { "sNext": "Next →", "sPrevious": "← Previous" } } }); //===== Table with tools =====// oTable = $('.datatable-tools table').dataTable({ "bJQueryUI": false, "bAutoWidth": true, "sPaginationType": "full_numbers", // "sDom": '<"datatable-header"Tfl><"datatable-scroll"t><"datatable-footer"ip>', // 修改上下面都有切換頁面功能 "sDom": '<"datatable-header"Tfl><"datatable-footer"ip><"datatable-scroll"t><"datatable-footer"ip>', "aaSorting": [ [0, "desc"] ], // 修改上下面都有切換頁面功能 "oLanguage": { "sSearch": "<span>查詢資料:</span> _INPUT_", "sLengthMenu": "<span>顯示筆數:</span> _MENU_", "oPaginate": { "sFirst": "最前", "sLast": "最後", "sNext": ">", "sPrevious": "<" } }, "oTableTools": { "sRowSelect": "single", // "sSwfPath": "media/swf/copy_csv_xls_pdf.swf", "aButtons": [ // "aButtons": [{ // "sExtends": "print", // "sButtonText": "列印", // "sButtonClass": "btn" //}] ] } }); //===== Table with custom sorting columns =====// oTable = $('.datatable-custom-sort table').dataTable({ "bJQueryUI": false, "bAutoWidth": false, "sPaginationType": "full_numbers", "sDom": '<"datatable-header"fl><"datatable-scroll"t><"datatable-footer"ip>', "oLanguage": { "sSearch": "<span>Filter:</span> _INPUT_", "sLengthMenu": "<span>Show:</span> _MENU_", "oPaginate": { "sFirst": "First", "sLast": "Last", "sNext": ">", "sPrevious": "<" } }, "aoColumnDefs": [{ "bSortable": false, "aTargets": [0, 1] }] }); //===== Table with invoices =====// oTable = $('.datatable-invoices table').dataTable({ "bJQueryUI": false, "bAutoWidth": false, "sPaginationType": "full_numbers", "sDom": '<"datatable-header"fl><"datatable-scroll"t><"datatable-footer"ip>', "oLanguage": { "sSearch": "<span>Filter:</span> _INPUT_", "sLengthMenu": "<span>Show:</span> _MENU_", "oPaginate": { "sFirst": "First", "sLast": "Last", "sNext": ">", "sPrevious": "<" } }, "aoColumnDefs": [{ "bSortable": false, "aTargets": [1, 6] }], "aaSorting": [ [0, 'desc'] ] }); //===== Table with tasks =====// oTable = $('.datatable-tasks table').dataTable({ "aaSorting": [], "bJQueryUI": false, "bAutoWidth": false, "sPaginationType": "full_numbers", "sDom": '<"datatable-header"fl><"datatable-scroll"t><"datatable-footer"ip>', "oLanguage": { "sSearch": "<span>Filter tasks:</span> _INPUT_", "sLengthMenu": "<span>Show tasks:</span> _MENU_", "oPaginate": { "sFirst": "First", "sLast": "Last", "sNext": ">", "sPrevious": "<" } }, "aoColumnDefs": [{ "bSortable": false, "aTargets": [5] }] }); //===== Datatable with tfoot column filters =====// var asInitVals = new Array(); var oTable = $('.datatable-add-row table').dataTable({ "bJQueryUI": false, "bAutoWidth": false, "sPaginationType": "full_numbers", "sDom": '<"datatable-header"fl><"datatable-scroll"t><"datatable-footer"ip>', "oLanguage": { "sSearch": "<span>Filter all:</span> _INPUT_", "sLengthMenu": "<span>Show entries:</span> _MENU_", "oPaginate": { "sFirst": "First", "sLast": "Last", "sNext": ">", "sPrevious": "<" } } }); $(".dataTables_wrapper tfoot input").keyup(function() { oTable.fnFilter(this.value, $(".dataTables_wrapper tfoot input").index(this)); }); //===== Adding placeholder to Datatable filter input field =====// $('.dataTables_filter input[type=text]').attr('placeholder', 'Type to filter...'); /* # Select2 dropdowns ================================================== */ //===== Datatable select =====// $(".dataTables_length select").select2({ minimumResultsForSearch: "-1" }); //===== Default select =====// $(".select").select2({ minimumResultsForSearch: "-1", width: 200 }); //===== Liquid select =====// $(".select-liquid").select2({ minimumResultsForSearch: "-1", width: "off" }); //===== Full width select =====// $(".select-full").select2({ minimumResultsForSearch: "-1", width: "100%" }); //===== Select with filter input =====// $(".select-search").select2({ width: 200 }); //===== Multiple select =====// $(".select-multiple").select2({ width: "100%" }); //===== Loading data select =====// $("#loading-data").select2({ placeholder: "Enter at least 1 character", allowClear: true, minimumInputLength: 1, query: function(query) { var data = { results: [] }, i, j, s; for (i = 1; i < 5; i++) { s = ""; for (j = 0; j < i; j++) { s = s + query.term; } data.results.push({ id: query.term + i, text: s }); } query.callback(data); } }); //===== Select with maximum =====// $(".maximum-select").select2({ maximumSelectionSize: 3, width: "100%" }); //===== Allow clear results select =====// $(".clear-results").select2({ placeholder: "Select a State", allowClear: true, width: 200 }); //===== Select with minimum =====// $(".minimum-select").select2({ minimumInputLength: 2, width: 200 }); //===== Multiple select with minimum =====// $(".minimum-multiple-select").select2({ minimumInputLength: 2, width: "100%" }); //===== Disabled select =====// $(".select-disabled").select2( "enable", false ); /* # Form Validation ================================================== */ $(".validate").validate({ errorPlacement: function(error, element) { if (element.parent().parent().attr("class") == "checker" || element.parent().parent().attr("class") == "choice") { error.appendTo(element.parent().parent().parent().parent().parent()); } else if (element.parent().parent().attr("class") == "checkbox" || element.parent().parent().attr("class") == "radio") { error.appendTo(element.parent().parent().parent()); } else { error.insertAfter(element); } }, rules: { minimum_characters: { required: true, minlength: 3 }, maximum_characters: { required: true, maxlength: 6 }, minimum_number: { required: true, min: 3 }, maximum_number: { required: true, max: 6 }, range: { required: true, range: [6, 16] }, email_field: { required: true, email: true }, url_field: { required: true, url: true }, date_field: { required: true, date: true }, digits_only: { required: true, digits: true }, enter_password: { required: true, minlength: 5 }, repeat_password: { required: true, minlength: 5, equalTo: "#enter_password" }, custom_message: "required", group_styled: { required: true, minlength: 2 }, group_unstyled: { required: true, minlength: 2 }, agree: "required" }, messages: { custom_message: { required: "Bazinga! This message is editable", }, agree: "Please accept our policy" }, success: function(label) { label.text('Success!').addClass('valid'); } }); /* # Bootstrap Multiselects ================================================== */ //===== Default multiselect =====// $('.multi-select').multiselect({ buttonClass: 'btn btn-default', onChange: function(element, checked) { $.uniform.update(); } }); //===== Multiselect with colored button =====// $('.multi-select-color').multiselect({ buttonClass: 'btn btn-info', onChange: function(element, checked) { $.uniform.update(); } }); //===== Multiselect with "Select All" option =====// $('.multi-select-all').multiselect({ buttonClass: 'btn btn-default', includeSelectAllOption: true, onChange: function(element, checked) { $.uniform.update(); } }); //===== onChange function =====// $('.multi-select-onchange').multiselect({ buttonClass: 'btn btn-default', onChange: function(element, checked) { $.uniform.update(); $.jGrowl('Change event invoked!', { header: 'Update', position: 'center', life: 1500 }); } }); //===== Right aligned multiselect dropdown =====// $('.multi-select-right').multiselect({ buttonClass: 'btn btn-default', dropRight: true, onChange: function(element, checked) { $.uniform.update(); } }); //===== Search field select =====// $('.multi-select-search').multiselect({ buttonClass: 'btn btn-link btn-lg btn-icon', dropRight: true, buttonText: function(options) { if (options.length == 0) { return '<b class="caret"></b>'; } else { return ' <b class="caret"></b>'; } }, onChange: function(element, checked) { $.uniform.update(); } }); /* # jQuery UI Components ================================================== */ //===== jQuery UI Autocomplete =====// var availableTags = [ "ActionScript", "AppleScript", "Asp", "BASIC", "C", "C++", "Clojure", "COBOL", "ColdFusion", "Erlang", "Fortran", "Groovy", "Haskell", "Java", "JavaScript", "Lisp", "Perl", "PHP", "Python", "Ruby", "Scala", "Scheme" ]; $(".autocomplete").autocomplete({ source: availableTags }); //===== Jquery UI sliders =====// $("#default-slider").slider(); $("#increments-slider").slider({ value: 100, min: 0, max: 500, step: 50, slide: function(event, ui) { $("#donation-amount").val("$" + ui.value); } }); $("#donation-amount").val("$" + $("#increments-slider").slider("value")); $("#range-slider, #range-slider1").slider({ range: true, min: 0, max: 500, values: [75, 300], slide: function(event, ui) { $("#price-amount, #price-amount1").val("$" + ui.values[0] + " - $" + ui.values[1]); } }); $("#price-amount, #price-amount1").val("$" + $("#range-slider, #range-slider1").slider("values", 0) + " - $" + $("#range-slider, #range-slider1").slider("values", 1)); $("#slider-range-min, #slider-range-min1").slider({ range: "min", value: 37, min: 1, max: 700, slide: function(event, ui) { $("#min-amount, #min-amount1").val("$" + ui.value); } }); $("#min-amount, #min-amount1").val("$" + $("#slider-range-min, #slider-range-min1").slider("value")); $("#slider-range-max, #slider-range-max1").slider({ range: "max", min: 1, max: 10, value: 2, slide: function(event, ui) { $("#max-amount, #max-amount1").val(ui.value); } }); $("#max-amount, #max-amount1").val($("#slider-range-max, #slider-range-max1").slider("value")); //===== Spinner options =====// $("#spinner-default").spinner(); $("#spinner-decimal").spinner({ step: 0.01, numberFormat: "n" }); $("#culture").change(function() { var current = $("#spinner-decimal").spinner("value"); Globalize.culture($(this).val()); $("#spinner-decimal").spinner("value", current); }); $("#currency").change(function() { $("#spinner-currency").spinner("option", "culture", $(this).val()); }); $("#spinner-currency").spinner({ min: 5, max: 2500, step: 25, start: 1000, numberFormat: "C" }); $("#spinner-overflow").spinner({ spin: function(event, ui) { if (ui.value > 10) { $(this).spinner("value", -10); return false; } else if (ui.value < -10) { $(this).spinner("value", 10); return false; } } }); $.widget("ui.timespinner", $.ui.spinner, { options: { // seconds step: 60 * 1000, // hours page: 60 }, _parse: function(value) { if (typeof value === "string") { // already a timestamp if (Number(value) == value) { return Number(value); } return +Globalize.parseDate(value); } return value; }, _format: function(value) { return Globalize.format(new Date(value), "t"); } }); $("#spinner-time").timespinner(); $("#culture-time").change(function() { var current = $("#spinner-time").timespinner("value"); Globalize.culture($(this).val()); $("#spinner-time").timespinner("value", current); }); //===== jQuery UI Datepicker =====// $(".datepicker").datepicker({ showOtherMonths: true, dateFormat: 'yy-mm-dd' }); $(".datepicker-inline").datepicker({ showOtherMonths: true }); $(".datepicker-multiple").datepicker({ showOtherMonths: true, numberOfMonths: 3 }); $(".datepicker-trigger").datepicker({ showOn: "button", buttonImage: "images/interface/datepicker_trigger.png", buttonImageOnly: true, showOtherMonths: true }); $(".from-date").datepicker({ defaultDate: "+1w", numberOfMonths: 3, showOtherMonths: true, onClose: function(selectedDate) { $(".to-date").datepicker("option", "minDate", selectedDate); } }); $(".to-date").datepicker({ defaultDate: "+1w", numberOfMonths: 3, showOtherMonths: true, onClose: function(selectedDate) { $(".from-date").datepicker("option", "maxDate", selectedDate); } }); $(".datepicker-restricted").datepicker({ minDate: -20, maxDate: "+1M +10D", showOtherMonths: true }); /* # Bootstrap Plugins ================================================== */ //===== Tooltip =====// $('.tip').tooltip(); //===== Popover =====// $("[data-toggle=popover]").popover().click(function(e) { e.preventDefault() }); //===== Loading button =====// $('.btn-loading').click(function() { var btn = $(this) btn.button('loading') setTimeout(function() { btn.button('reset') }, 3000) }); //===== Add fadeIn animation to dropdown =====// $('.dropdown, .btn-group').on('show.bs.dropdown', function(e) { $(this).find('.dropdown-menu').first().stop(true, true).fadeIn(100); }); //===== Add fadeOut animation to dropdown =====// $('.dropdown, .btn-group').on('hide.bs.dropdown', function(e) { $(this).find('.dropdown-menu').first().stop(true, true).fadeOut(100); }); //===== Prevent dropdown from closing on click =====// $('.popup').click(function(e) { e.stopPropagation(); }); /* # Form Related Plugins ================================================== */ //===== Pluploader (multiple file uploader) =====// $(".multiple-uploader").pluploadQueue({ runtimes: 'html5, html4', url: '../upload.php', chunk_size: '1mb', unique_names: true, filters: { max_file_size: '10mb', mime_types: [{ title: "Image files", extensions: "jpg,gif,png" }, { title: "Zip files", extensions: "zip" }] }, resize: { width: 320, height: 240, quality: 90 } }); //===== WYSIWYG editor =====// $('.editor').wysihtml5({ stylesheets: "public/css/wysihtml5/wysiwyg-color.css" }); //===== Elastic textarea =====// $('.elastic').autosize(); //===== Dual select boxes =====// $.configureBoxes(); //===== Input limiter =====// $('.limited').inputlimiter({ limit: 100, boxId: 'limit-text', boxAttach: false }); //===== Tags Input =====// $('.tags').tagsInput({ width: '100%' }); $('.tags-autocomplete').tagsInput({ width: '100%', autocomplete_url: 'tags_autocomplete.html' }); //===== Form elements styling =====// $(".styled, .multiselect-container input").uniform({ radioClass: 'choice', selectAutoWidth: false }); /* # Interface Related Plugins ================================================== */ //===== Sparkline charts =====// $('.bar-danger').sparkline( 'html', { type: 'bar', barColor: '#D65C4F', height: '35px', barWidth: "5px", barSpacing: "2px", zeroAxis: "false" } ); $('.bar-success').sparkline( 'html', { type: 'bar', barColor: '#65B688', height: '35px', barWidth: "5px", barSpacing: "2px", zeroAxis: "false" } ); $('.bar-primary').sparkline( 'html', { type: 'bar', barColor: '#32434D', height: '35px', barWidth: "5px", barSpacing: "2px", zeroAxis: "false" } ); $('.bar-warning').sparkline( 'html', { type: 'bar', barColor: '#EE8366', height: '35px', barWidth: "5px", barSpacing: "2px", zeroAxis: "false" } ); $('.bar-info').sparkline( 'html', { type: 'bar', barColor: '#3CA2BB', height: '35px', barWidth: "5px", barSpacing: "2px", zeroAxis: "false" } ); $('.bar-default').sparkline( 'html', { type: 'bar', barColor: '#ffffff', height: '35px', barWidth: "5px", barSpacing: "2px", zeroAxis: "false" } ); /* Activate hidden Sparkline on tab show */ $('a[data-toggle="tab"]').on('shown.bs.tab', function() { $.sparkline_display_visible(); }); /* Activate hidden Sparkline */ $('.collapse').on('shown.bs.collapse', function() { $.sparkline_display_visible(); }); //===== Fancy box (lightbox plugin) =====// $(".lightbox").fancybox({ padding: 1 }); //===== DateRangePicker plugin =====// $('#reportrange').daterangepicker({ startDate: moment().subtract('days', 29), endDate: moment(), minDate: '01/01/2012', maxDate: '12/31/2014', dateLimit: { days: 60 }, ranges: { 'Today': [moment(), moment()], 'Yesterday': [moment().subtract('days', 1), moment().subtract('days', 1)], 'Last 7 Days': [moment().subtract('days', 6), moment()], 'This Month': [moment().startOf('month'), moment().endOf('month')], 'Last Month': [moment().subtract('month', 1).startOf('month'), moment().subtract('month', 1).endOf('month')] }, opens: 'left', buttonClasses: ['btn'], applyClass: 'btn-small btn-info btn-block', cancelClass: 'btn-small btn-default btn-block', format: 'MM/DD/YYYY', separator: ' to ', locale: { applyLabel: 'Submit', fromLabel: 'From', toLabel: 'To', customRangeLabel: 'Custom Range', daysOfWeek: ['Su', 'Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa'], monthNames: ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'], firstDay: 1 } }, function(start, end) { $.jGrowl('A date range was changed', { header: 'Update', position: 'center', life: 1500 }); $('#reportrange .date-range').html(start.format('<i>D</i> <b><i>MMM</i> <i>YYYY</i></b>') + '<em> - </em>' + end.format('<i>D</i> <b><i>MMM</i> <i>YYYY</i></b>')); } ); /* Custom date display layout */ $('#reportrange .date-range').html(moment().subtract('days', 29).format('<i>D</i> <b><i>MMM</i> <i>YYYY</i></b>') + '<em> - </em>' + moment().format('<i>D</i> <b><i>MMM</i> <i>YYYY</i></b>')); $('#reportrange').on('show', function(ev, picker) { $('.range').addClass('range-shown'); }); $('#reportrange').on('hide', function(ev, picker) { $('.range').removeClass('range-shown'); }); //===== Bootstrap switches =====// $('.switch').bootstrapSwitch(); //===== Fullcalendar =====// var date = new Date(); var d = date.getDate(); var m = date.getMonth(); var y = date.getFullYear(); var calendar = $('.fullcalendar').fullCalendar({ header: { left: 'prev,next,today', center: 'title', right: 'month,agendaWeek,agendaDay' }, selectable: true, selectHelper: true, select: function(start, end, allDay) { var title = prompt('Event Title:'); if (title) { calendar.fullCalendar('renderEvent', { title: title, start: start, end: end, allDay: allDay }, true // make the event "stick" ); } calendar.fullCalendar('unselect'); }, editable: true, events: [{ title: 'All Day Event', start: new Date(y, m, 1) }, { title: 'Long Event', start: new Date(y, m, d - 5), end: new Date(y, m, d - 2) }, { id: 999, title: 'Repeating Event', start: new Date(y, m, d - 3, 16, 0), allDay: false }, { id: 999, title: 'Repeating Event', start: new Date(y, m, d + 4, 16, 0), allDay: false }, { title: 'Meeting', start: new Date(y, m, d, 10, 30), allDay: false }, { title: 'Lunch', start: new Date(y, m, d, 12, 0), end: new Date(y, m, d, 14, 0), allDay: false }, { title: 'Birthday Party', start: new Date(y, m, d + 1, 19, 0), end: new Date(y, m, d + 1, 22, 30), allDay: false }] }); /* Render hidden calendar on tab show */ $('a[data-toggle="tab"]').on('shown.bs.tab', function() { $('.fullcalendar').fullCalendar('render'); }); //===== Code prettifier =====// window.prettyPrint && prettyPrint(); //===== Time pickers =====// $('#defaultValueExample, #time').timepicker({ 'scrollDefaultNow': true }); $('#durationExample').timepicker({ 'minTime': '2:00pm', 'maxTime': '11:30pm', 'showDuration': true }); $('#onselectExample').timepicker(); $('#onselectExample').on('changeTime', function() { $('#onselectTarget').text($(this).val()); }); $('#timeformatExample1, #timeformatExample3').timepicker({ 'timeFormat': 'H:i:s' }); $('#timeformatExample2, #timeformatExample4').timepicker({ 'timeFormat': 'h:i A' }); //===== Color picker =====// $('.color-picker').colorpicker(); $('.color-picker-hex').colorpicker({ format: 'hex' }); /* Change navbar background color */ var topStyle = $('.navbar-inverse')[0].style; $('.change-navbar-color').colorpicker().on('changeColor', function(ev) { topStyle.background = ev.color.toHex(); }); //===== jGrowl notifications defaults =====// $.jGrowl.defaults.closer = false; $.jGrowl.defaults.easing = 'easeInOutCirc'; //===== Collapsible navigation =====// $('.sidebar-wide li:not(.disabled) .expand, .sidebar-narrow .navigation > li ul .expand').collapsible({ defaultOpen: 'second-level,third-level', cssOpen: 'level-opened', cssClose: 'level-closed', speed: 150 }); /* # Default Layout Options ================================================== */ //===== Appending sidebar to different div's depending on the window width =====// $(window).resize(function() { if ($(window).width() < 992) { $('.sidebar').appendTo('.navbar'); } else { $('.sidebar').appendTo('.page-container'); } }).resize(); //===== Panel Options (collapsing, closing) =====// /* Collapsing */ $('[data-panel=collapse]').click(function(e) { e.preventDefault(); var $target = $(this).parent().parent().next('div'); if ($target.is(':visible')) { $(this).children('i').removeClass('icon-arrow-up9'); $(this).children('i').addClass('icon-arrow-down9'); } else { $(this).children('i').removeClass('icon-arrow-down9'); $(this).children('i').addClass('icon-arrow-up9'); } $target.slideToggle(200); }); /* Closing */ $('[data-panel=close]').click(function(e) { e.preventDefault(); var $panelContent = $(this).parent().parent().parent(); $panelContent.slideUp(200).remove(200); }); //===== Showing spinner animation demo =====// $('.run-first').click(function() { $('body').append('<div class="overlay"><div class="opacity"></div><i class="icon-spinner2 spin"></i></div>'); $('.overlay').fadeIn(150); window.setTimeout(function() { $('.overlay').fadeOut(150, function() { $(this).remove(); }); }, 5000); }); $('.run-second').click(function() { $('body').append('<div class="overlay"><div class="opacity"></div><i class="icon-spinner3 spin"></i></div>'); $('.overlay').fadeIn(150); window.setTimeout(function() { $('.overlay').fadeOut(150, function() { $(this).remove(); }); }, 5000); }); $('.run-third').click(function() { $('body').append('<div class="overlay"><div class="opacity"></div><i class="icon-spinner7 spin"></i></div>'); $('.overlay').fadeIn(150); window.setTimeout(function() { $('.overlay').fadeOut(150, function() { $(this).remove(); }); }, 5000); }); //===== Hiding sidebar =====// $('.sidebar-toggle').click(function() { $('.page-container').toggleClass('sidebar-hidden'); }); //===== Disabling main navigation links =====// $('.navigation li.disabled a, .navbar-nav > .disabled > a').click(function(e) { e.preventDefault(); }); //===== Toggling active class in accordion groups =====// $('.panel-trigger').click(function(e) { e.preventDefault(); $(this).toggleClass('active'); }); });
#pragma once #include <memory> template<class T> class UPtrVirtualDeleterBase { public: UPtrVirtualDeleterBase() {} virtual ~UPtrVirtualDeleterBase() {} virtual void operator()(T *obj) = 0; }; template<class T> struct UPtrVirtualDeleter { std::unique_ptr<UPtrVirtualDeleterBase<T>> virtDeleter; void operator()(T *obj) { (*this->virtDeleter)(obj); } }; template<class T> class unique_ptr_v : public std::unique_ptr<T, UPtrVirtualDeleter<T>> { };
import getpass import os import sys from datetime import datetime from time import sleep import smtplib from email.mime.text import MIMEText import traceback from subprocess import check_output from os.path import exists, getctime from .utils import is_local, hostname log_fpath = None project_name = None project_fpath = None proc_name = None is_debug = False is_silent = False smtp_host = None # set up in source/config.py and system_info.yaml my_address = 'Vlad.Saveliev@astrazeneca.com' address = None error_msgs = [] warning_msgs = [] critical_msgs = [] def init(is_debug_=None, log_fpath_=None, save_previous=False, address_=None): if is_debug_: global is_debug is_debug = is_debug_ if address_: global address address = address_ if log_fpath_: set_log_path(log_fpath_, save_previous=save_previous) info(hostname) with open(os.devnull, 'w') as devnull: username = "NOUSER" try: username = getpass.getuser() username = check_output('finger $(whoami) | head -n1', shell=True, stderr=devnull).decode("utf-8") username = username.strip() except: pass info(username) info() info(' '.join(sys.argv)) info() info('-' * 70) past_msgs = [] def set_log_path(log_fpath_, save_previous=False): assert log_fpath_ global log_fpath, past_msgs log_fpath = log_fpath_ if save_previous: swap_file(log_fpath) for msg in past_msgs: _write_to_file(msg) past_msgs = [] def set_smtp_host(smtp_host_): if smtp_host_: global smtp_host smtp_host = smtp_host_ debug('Set smtp host ' + smtp_host) def timestamp(): return datetime.now().strftime("%Y-%m-%d %H:%M:%S") def step_greetings(name): info() info('-' * 70) info(name) info('-' * 70) def info(msg='', ending='\n', print_date=True, severity='info'): _log(sys.stdout, msg, ending, print_date, severity=severity) def debug(msg='', ending='\n', print_date=True, severity='debug'): _log(sys.stdout, '[DEBUG] ' + msg, ending, print_date, severity=severity) def warn(msg='', ending='\n', print_date=True, severity='warning'): _log(sys.stderr, msg, ending, print_date, severity=severity) def silent_err(msg='', ending='\n', print_date=True, severity='silent_err'): warn(msg, ending, print_date, severity=severity) def err(msg='', ending='\n', print_date=True, severity='error'): warn(msg, ending, print_date, severity=severity) error = err def critical(msg=''): if isinstance(msg, str): err(f'ERROR: {msg}', severity='critical') raise CriticalError(msg) def send_email(msg_other='', subj='', only_me=False, addr_by_username=None, addr=None): if not msg_other or not smtp_host: return debug('Emailing using smtp host ' + smtp_host) username = getpass.getuser() other_address = None if not only_me: if addr: other_address = addr elif address: other_address = address elif addr_by_username and username in addr_by_username: other_address = addr_by_username[username] if other_address == my_address: other_address = None if not other_address and not my_address: return msg_other += '\n' msg_other += '\n' msg_other += 'Ran by ' + username + '\n' msg_other += '\n' if critical_msgs: msg_other += 'Critical errors during the processing:\n' for m in critical_msgs: msg_other += ' ' + m + '\n' msg_other += '\n' if error_msgs: if critical_msgs: msg_other += 'Other e' else: msg_other += 'E' msg_other += 'rrors during the processing:\n' for m in error_msgs: msg_other += ' ' + m + '\n' msg_me = msg_other[:] if warning_msgs: msg_me += 'Warnings during the processing:\n' for m in warning_msgs: msg_me += ' ' + m + '\n' if not subj: subj = '' if project_name: subj += project_name else: subj += 'Reporting' if proc_name: subj += ' - ' + proc_name msg_other = MIMEText(msg_other) msg_me = MIMEText(msg_me) msg_other['Subject'] = msg_me['Subject'] = subj msg_other['From'] = msg_me['From'] = 'klpf990@rask.usbod.astrazeneca.com' msg_other['To'] = other_address msg_me['To'] = my_address def try_send(host, msg_): s = smtplib.SMTP(host) s.sendmail(msg_['From'], msg_['To'].split(','), msg_.as_string()) s.quit() # info('Mail sent to ' + msg_['To'] + ' using ' + host) def print_msg(): for line in msg_other.as_string().split('\n'): sys.stdout.write(' | ' + line + '\n') sys.stdout.write('\n') msgs = [] if other_address: msgs.append(msg_other) if my_address: msgs.append(msg_me) for msg in msgs: try: try_send(smtp_host, msg) except smtplib.SMTPException: warn('Could not send email using the sever "' + smtp_host + '" with exception: ') warn(' ' + '; '.join(traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))) if smtp_host != 'localhost': warn('Trying "localhost" as a server...') try: try_send('localhost', msg) except smtplib.SMTPException: warn('Could not send email using the sever "localhost" with exception: ') warn(' ' + '; '.join(traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))) print_msg() else: print_msg() class CriticalError(Exception): pass def _log(out, msg='', ending='\n', print_date=True, severity=None): if is_silent and severity != 'critical' and severity != 'error': return if severity == 'debug' and not is_debug: return # Keeping track of all severe log messages if severity == 'critical': critical_msgs.append(msg) if severity == 'error': error_msgs.append(msg) if severity == 'warning': warning_msgs.append(msg) msg_e = msg + ending t_msg_e = timestamp() + ' ' + msg_e # Writing logs to log file if log_fpath: _write_to_file(t_msg_e) else: past_msgs.append(t_msg_e) # Writing to stdout will error out of it can't encode messages properly into ascii, so re-converting msg_e_b = msg_e.encode(encoding='ascii', errors='replace').decode(encoding='ascii') t_msg_e_b = t_msg_e.encode(encoding='ascii', errors='replace').decode(encoding='ascii') # Finally, logging to stdout out.write(t_msg_e_b if print_date else msg_e_b) # For messages to appear in a correct order in output sys.stdout.flush() sys.stderr.flush() if is_debug and is_local(): sleep(0.01) def _write_to_file(text): if log_fpath: try: open(log_fpath, 'a').write(text) except IOError: sys.stderr.write('Logging: cannot append to ' + log_fpath + '\n') try: open(log_fpath, 'w').write(text) except IOError: sys.stderr.write('Logging: cannot write to ' + log_fpath + '\n') def swap_file(fpath): if exists(fpath): last_changed = datetime.fromtimestamp(getctime(fpath)) prev_fpath = fpath + '_' + last_changed.strftime('%Y_%m_%d_%H_%M_%S') try: if os.path.isfile(prev_fpath): os.remove(prev_fpath) if not os.path.isfile(prev_fpath): os.rename(fpath, prev_fpath) except OSError: pass
(function () { 'use strict'; angular .module('erpApp') .factory('JhiLanguageService', JhiLanguageService); JhiLanguageService.$inject = ['$q', '$http', '$translate', 'LANGUAGES']; function JhiLanguageService ($q, $http, $translate, LANGUAGES) { var service = { getAll: getAll, getCurrent: getCurrent }; return service; function getAll () { var deferred = $q.defer(); deferred.resolve(LANGUAGES); return deferred.promise; } function getCurrent () { var deferred = $q.defer(); var language = $translate.storage().get('NG_TRANSLATE_LANG_KEY'); deferred.resolve(language); return deferred.promise; } } })();
import os def error_CD(): print('A major error(CD=Config_Damage) has been detected') print('Fixing the error...') os.remove('bin/config.json') input('Please proceed by clicking enter and reopening the app') quit()
import React from 'react' const AboutPage = () => ( <> <h1>About Us</h1> <h2>Our Mission</h2> <p>Our mission is to train the world’s workforce in the careers of the future. We partner with leading technology companies to learn how technology is transforming industries, and teach the critical tech skills that companies are looking for in their workforce. With our powerful and flexible digital education platform, even the busiest learners can prepare themselves to take on the most in-demand tech roles. </p> </> ) export default AboutPage;
ace.define("ace/snippets/plain_text", ["require", "exports", "module"], function (require, exports, module) { "use strict"; exports.snippetText = undefined; exports.scope = "plain_text"; }); (function () { ace.require(["ace/snippets/plain_text"], function (m) { if (typeof module == "object" && typeof exports == "object" && module) { module.exports = m; } }); })();
from django.db import models from django.conf import settings from django.utils import timezone from django.urls import reverse from django.utils.text import slugify from django.utils.formats import localize_input, sanitize_separators from django.core.validators import MaxLengthValidator, RegexValidator #from carts.models import Cart # from purchases.models import Purchase UserModel = getattr(settings, 'AUTH_USER_MODEL') validate_price = RegexValidator('^[-,0-9]+$', 'Only numbers and commas are allowed.') class Merchandise(models.Model): title = models.CharField(max_length=60) slug = models.SlugField(max_length=90, unique=True, editable=False) description = models.TextField(validators=[MaxLengthValidator(limit_value=53)]) price = models.CharField(max_length=6, validators=[validate_price]) price_dec = models.DecimalField(max_digits=6, decimal_places=2, default=0.00) merchant = models.ForeignKey(UserModel, on_delete=models.CASCADE, related_name='merchandises') product_image = models.URLField(max_length=255, default='https://via.placeholder.com/150',blank=True) on_stock = models.BooleanField(default=True) created_date = models.DateTimeField(default=timezone.now) updated_date = models.DateTimeField(auto_now=True) class Meta: ordering = ['-created_date'] def __str__(self): #return self.title return '{}'.format(self.title) def get_absolute_url(self): return reverse('merchandises_detail', kwargs={'slug': self.slug}) def save(self, *args, **kwargs): p = self.price.replace(",", ".") price = "{:.2f}".format(float(p)) self.price_dec = price return super().save(*args, **kwargs) # c = Merchandise.objects.filter(carts__purchases__buyer__username='testuser1').update(on_stock=False)
# /* # * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. # * # * Licensed under the Apache License, Version 2.0 (the "License"). # * You may not use this file except in compliance with the License. # * A copy of the License is located at # * # * http://aws.amazon.com/apache2.0 # * # * or in the "license" file accompanying this file. This file is distributed # * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either # * express or implied. See the License for the specific language governing # * permissions and limitations under the License. # */ import json import logging import uuid from threading import Timer, Lock, Thread class _shadowRequestToken: URN_PREFIX_LENGTH = 9 def getNextToken(self): return uuid.uuid4().urn[self.URN_PREFIX_LENGTH:] # We only need the uuid digits, not the urn prefix def _validateJSON(jsonString): try: json.loads(jsonString) except ValueError: return False return True class _basicJSONParser: def setString(self, srcString): self._rawString = srcString self._dictionObject = None def regenerateString(self): return json.dumps(self._dictionaryObject) def getAttributeValue(self, srcAttributeKey): return self._dictionaryObject.get(srcAttributeKey) def setAttributeValue(self, srcAttributeKey, srcAttributeValue): self._dictionaryObject[srcAttributeKey] = srcAttributeValue def validateJSON(self): try: self._dictionaryObject = json.loads(self._rawString) except ValueError: return False return True class deviceShadow: _logger = logging.getLogger(__name__) def __init__(self, srcShadowName, srcIsPersistentSubscribe, srcShadowManager): """ The class that denotes a local/client-side device shadow instance. Users can perform shadow operations on this instance to retrieve and modify the corresponding shadow JSON document in AWS IoT Cloud. The following shadow operations are available: - Get - Update - Delete - Listen on delta - Cancel listening on delta This is returned from :code:`AWSIoTPythonSDK.MQTTLib.AWSIoTMQTTShadowClient.createShadowWithName` function call. No need to call directly from user scripts. """ if srcShadowName is None or srcIsPersistentSubscribe is None or srcShadowManager is None: raise TypeError("None type inputs detected.") self._shadowName = srcShadowName # Tool handler self._shadowManagerHandler = srcShadowManager self._basicJSONParserHandler = _basicJSONParser() self._tokenHandler = _shadowRequestToken() # Properties self._isPersistentSubscribe = srcIsPersistentSubscribe self._lastVersionInSync = -1 # -1 means not initialized self._isGetSubscribed = False self._isUpdateSubscribed = False self._isDeleteSubscribed = False self._shadowSubscribeCallbackTable = dict() self._shadowSubscribeCallbackTable["get"] = None self._shadowSubscribeCallbackTable["delete"] = None self._shadowSubscribeCallbackTable["update"] = None self._shadowSubscribeCallbackTable["delta"] = None self._shadowSubscribeStatusTable = dict() self._shadowSubscribeStatusTable["get"] = 0 self._shadowSubscribeStatusTable["delete"] = 0 self._shadowSubscribeStatusTable["update"] = 0 self._tokenPool = dict() self._dataStructureLock = Lock() def _doNonPersistentUnsubscribe(self, currentAction): self._shadowManagerHandler.basicShadowUnsubscribe(self._shadowName, currentAction) self._logger.info("Unsubscribed to " + currentAction + " accepted/rejected topics for deviceShadow: " + self._shadowName) def generalCallback(self, client, userdata, message): # In Py3.x, message.payload comes in as a bytes(string) # json.loads needs a string input with self._dataStructureLock: currentTopic = message.topic currentAction = self._parseTopicAction(currentTopic) # get/delete/update/delta currentType = self._parseTopicType(currentTopic) # accepted/rejected/delta payloadUTF8String = message.payload.decode('utf-8') # get/delete/update: Need to deal with token, timer and unsubscribe if currentAction in ["get", "delete", "update"]: # Check for token self._basicJSONParserHandler.setString(payloadUTF8String) if self._basicJSONParserHandler.validateJSON(): # Filter out invalid JSON currentToken = self._basicJSONParserHandler.getAttributeValue(u"clientToken") if currentToken is not None: self._logger.debug("shadow message clientToken: " + currentToken) if currentToken is not None and currentToken in self._tokenPool.keys(): # Filter out JSON without the desired token # Sync local version when it is an accepted response self._logger.debug("Token is in the pool. Type: " + currentType) if currentType == "accepted": incomingVersion = self._basicJSONParserHandler.getAttributeValue(u"version") # If it is get/update accepted response, we need to sync the local version if incomingVersion is not None and incomingVersion > self._lastVersionInSync and currentAction != "delete": self._lastVersionInSync = incomingVersion # If it is a delete accepted, we need to reset the version else: self._lastVersionInSync = -1 # The version will always be synced for the next incoming delta/GU-accepted response # Cancel the timer and clear the token self._tokenPool[currentToken].cancel() del self._tokenPool[currentToken] # Need to unsubscribe? self._shadowSubscribeStatusTable[currentAction] -= 1 if not self._isPersistentSubscribe and self._shadowSubscribeStatusTable.get(currentAction) <= 0: self._shadowSubscribeStatusTable[currentAction] = 0 processNonPersistentUnsubscribe = Thread(target=self._doNonPersistentUnsubscribe, args=[currentAction]) processNonPersistentUnsubscribe.start() # Custom callback if self._shadowSubscribeCallbackTable.get(currentAction) is not None: processCustomCallback = Thread(target=self._shadowSubscribeCallbackTable[currentAction], args=[payloadUTF8String, currentType, currentToken]) processCustomCallback.start() # delta: Watch for version else: currentType += "/" + self._parseTopicShadowName(currentTopic) # Sync local version self._basicJSONParserHandler.setString(payloadUTF8String) if self._basicJSONParserHandler.validateJSON(): # Filter out JSON without version incomingVersion = self._basicJSONParserHandler.getAttributeValue(u"version") if incomingVersion is not None and incomingVersion > self._lastVersionInSync: self._lastVersionInSync = incomingVersion # Custom callback if self._shadowSubscribeCallbackTable.get(currentAction) is not None: processCustomCallback = Thread(target=self._shadowSubscribeCallbackTable[currentAction], args=[payloadUTF8String, currentType, None]) processCustomCallback.start() def _parseTopicAction(self, srcTopic): ret = None fragments = srcTopic.split('/') if fragments[5] == "delta": ret = "delta" else: ret = fragments[4] return ret def _parseTopicType(self, srcTopic): fragments = srcTopic.split('/') return fragments[5] def _parseTopicShadowName(self, srcTopic): fragments = srcTopic.split('/') return fragments[2] def _timerHandler(self, srcActionName, srcToken): with self._dataStructureLock: # Don't crash if we try to remove an unknown token if srcToken not in self._tokenPool: self._logger.warn('Tried to remove non-existent token from pool: %s' % str(srcToken)) return # Remove the token del self._tokenPool[srcToken] # Need to unsubscribe? self._shadowSubscribeStatusTable[srcActionName] -= 1 if not self._isPersistentSubscribe and self._shadowSubscribeStatusTable.get(srcActionName) <= 0: self._shadowSubscribeStatusTable[srcActionName] = 0 self._shadowManagerHandler.basicShadowUnsubscribe(self._shadowName, srcActionName) # Notify time-out issue if self._shadowSubscribeCallbackTable.get(srcActionName) is not None: self._logger.info("Shadow request with token: " + str(srcToken) + " has timed out.") self._shadowSubscribeCallbackTable[srcActionName]("REQUEST TIME OUT", "timeout", srcToken) def shadowGet(self, srcCallback, srcTimeout): """ **Description** Retrieve the device shadow JSON document from AWS IoT by publishing an empty JSON document to the corresponding shadow topics. Shadow response topics will be subscribed to receive responses from AWS IoT regarding the result of the get operation. Retrieved shadow JSON document will be available in the registered callback. If no response is received within the provided timeout, a timeout notification will be passed into the registered callback. **Syntax** .. code:: python # Retrieve the shadow JSON document from AWS IoT, with a timeout set to 5 seconds BotShadow.shadowGet(customCallback, 5) **Parameters** *srcCallback* - Function to be called when the response for this shadow request comes back. Should be in form :code:`customCallback(payload, responseStatus, token)`, where :code:`payload` is the JSON document returned, :code:`responseStatus` indicates whether the request has been accepted, rejected or is a delta message, :code:`token` is the token used for tracing in this request. *srcTimeout* - Timeout to determine whether the request is invalid. When a request gets timeout, a timeout notification will be generated and put into the registered callback to notify users. **Returns** The token used for tracing in this shadow request. """ with self._dataStructureLock: # Update callback data structure self._shadowSubscribeCallbackTable["get"] = srcCallback # Update number of pending feedback self._shadowSubscribeStatusTable["get"] += 1 # clientToken currentToken = self._tokenHandler.getNextToken() self._tokenPool[currentToken] = Timer(srcTimeout, self._timerHandler, ["get", currentToken]) self._basicJSONParserHandler.setString("{}") self._basicJSONParserHandler.validateJSON() self._basicJSONParserHandler.setAttributeValue("clientToken", currentToken) currentPayload = self._basicJSONParserHandler.regenerateString() # Two subscriptions if not self._isPersistentSubscribe or not self._isGetSubscribed: self._shadowManagerHandler.basicShadowSubscribe(self._shadowName, "get", self.generalCallback) self._isGetSubscribed = True self._logger.info("Subscribed to get accepted/rejected topics for deviceShadow: " + self._shadowName) # One publish self._shadowManagerHandler.basicShadowPublish(self._shadowName, "get", currentPayload) # Start the timer with self._dataStructureLock: if currentToken in self._tokenPool: self._tokenPool[currentToken].start() return currentToken def shadowDelete(self, srcCallback, srcTimeout): """ **Description** Delete the device shadow from AWS IoT by publishing an empty JSON document to the corresponding shadow topics. Shadow response topics will be subscribed to receive responses from AWS IoT regarding the result of the get operation. Responses will be available in the registered callback. If no response is received within the provided timeout, a timeout notification will be passed into the registered callback. **Syntax** .. code:: python # Delete the device shadow from AWS IoT, with a timeout set to 5 seconds BotShadow.shadowDelete(customCallback, 5) **Parameters** *srcCallback* - Function to be called when the response for this shadow request comes back. Should be in form :code:`customCallback(payload, responseStatus, token)`, where :code:`payload` is the JSON document returned, :code:`responseStatus` indicates whether the request has been accepted, rejected or is a delta message, :code:`token` is the token used for tracing in this request. *srcTimeout* - Timeout to determine whether the request is invalid. When a request gets timeout, a timeout notification will be generated and put into the registered callback to notify users. **Returns** The token used for tracing in this shadow request. """ with self._dataStructureLock: # Update callback data structure self._shadowSubscribeCallbackTable["delete"] = srcCallback # Update number of pending feedback self._shadowSubscribeStatusTable["delete"] += 1 # clientToken currentToken = self._tokenHandler.getNextToken() self._tokenPool[currentToken] = Timer(srcTimeout, self._timerHandler, ["delete", currentToken]) self._basicJSONParserHandler.setString("{}") self._basicJSONParserHandler.validateJSON() self._basicJSONParserHandler.setAttributeValue("clientToken", currentToken) currentPayload = self._basicJSONParserHandler.regenerateString() # Two subscriptions if not self._isPersistentSubscribe or not self._isDeleteSubscribed: self._shadowManagerHandler.basicShadowSubscribe(self._shadowName, "delete", self.generalCallback) self._isDeleteSubscribed = True self._logger.info("Subscribed to delete accepted/rejected topics for deviceShadow: " + self._shadowName) # One publish self._shadowManagerHandler.basicShadowPublish(self._shadowName, "delete", currentPayload) # Start the timer with self._dataStructureLock: if currentToken in self._tokenPool: self._tokenPool[currentToken].start() return currentToken def shadowUpdate(self, srcJSONPayload, srcCallback, srcTimeout): """ **Description** Update the device shadow JSON document string from AWS IoT by publishing the provided JSON document to the corresponding shadow topics. Shadow response topics will be subscribed to receive responses from AWS IoT regarding the result of the get operation. Response will be available in the registered callback. If no response is received within the provided timeout, a timeout notification will be passed into the registered callback. **Syntax** .. code:: python # Update the shadow JSON document from AWS IoT, with a timeout set to 5 seconds BotShadow.shadowUpdate(newShadowJSONDocumentString, customCallback, 5) **Parameters** *srcJSONPayload* - JSON document string used to update shadow JSON document in AWS IoT. *srcCallback* - Function to be called when the response for this shadow request comes back. Should be in form :code:`customCallback(payload, responseStatus, token)`, where :code:`payload` is the JSON document returned, :code:`responseStatus` indicates whether the request has been accepted, rejected or is a delta message, :code:`token` is the token used for tracing in this request. *srcTimeout* - Timeout to determine whether the request is invalid. When a request gets timeout, a timeout notification will be generated and put into the registered callback to notify users. **Returns** The token used for tracing in this shadow request. """ # Validate JSON if _validateJSON(srcJSONPayload): with self._dataStructureLock: self._basicJSONParserHandler.setString(srcJSONPayload) self._basicJSONParserHandler.validateJSON() # clientToken currentToken = self._tokenHandler.getNextToken() self._tokenPool[currentToken] = Timer(srcTimeout, self._timerHandler, ["update", currentToken]) self._basicJSONParserHandler.setAttributeValue("clientToken", currentToken) JSONPayloadWithToken = self._basicJSONParserHandler.regenerateString() # Update callback data structure self._shadowSubscribeCallbackTable["update"] = srcCallback # Update number of pending feedback self._shadowSubscribeStatusTable["update"] += 1 # Two subscriptions if not self._isPersistentSubscribe or not self._isUpdateSubscribed: self._shadowManagerHandler.basicShadowSubscribe(self._shadowName, "update", self.generalCallback) self._isUpdateSubscribed = True self._logger.info("Subscribed to update accepted/rejected topics for deviceShadow: " + self._shadowName) # One publish self._shadowManagerHandler.basicShadowPublish(self._shadowName, "update", JSONPayloadWithToken) # Start the timer with self._dataStructureLock: if currentToken in self._tokenPool: self._tokenPool[currentToken].start() else: raise ValueError("Invalid JSON file.") return currentToken def shadowRegisterDeltaCallback(self, srcCallback): """ **Description** Listen on delta topics for this device shadow by subscribing to delta topics. Whenever there is a difference between the desired and reported state, the registered callback will be called and the delta payload will be available in the callback. **Syntax** .. code:: python # Listen on delta topics for BotShadow BotShadow.shadowRegisterDeltaCallback(customCallback) **Parameters** *srcCallback* - Function to be called when the response for this shadow request comes back. Should be in form :code:`customCallback(payload, responseStatus, token)`, where :code:`payload` is the JSON document returned, :code:`responseStatus` indicates whether the request has been accepted, rejected or is a delta message, :code:`token` is the token used for tracing in this request. **Returns** None """ with self._dataStructureLock: # Update callback data structure self._shadowSubscribeCallbackTable["delta"] = srcCallback # One subscription self._shadowManagerHandler.basicShadowSubscribe(self._shadowName, "delta", self.generalCallback) self._logger.info("Subscribed to delta topic for deviceShadow: " + self._shadowName) def shadowUnregisterDeltaCallback(self): """ **Description** Cancel listening on delta topics for this device shadow by unsubscribing to delta topics. There will be no delta messages received after this API call even though there is a difference between the desired and reported state. **Syntax** .. code:: python # Cancel listening on delta topics for BotShadow BotShadow.shadowUnregisterDeltaCallback() **Parameters** None **Returns** None """ with self._dataStructureLock: # Update callback data structure del self._shadowSubscribeCallbackTable["delta"] # One unsubscription self._shadowManagerHandler.basicShadowUnsubscribe(self._shadowName, "delta") self._logger.info("Unsubscribed to delta topics for deviceShadow: " + self._shadowName)
var styles=[{"format_version": "1.0", "generated_by": "cytoscape-3.8.1", "target_cytoscapejs_version": "~2.1", "title": "PPIColorByCluster", "style": [{"selector": "node", "css": {"text-valign": "center", "text-halign": "right", "text-opacity": 1.0, "border-opacity": 1.0, "shape": "ellipse", "font-size": 9, "width": 35.0, "background-color": "rgb(0,153,204)", "border-width": 4.0, "height": 35.0, "color": "rgb(0,0,0)", "background-opacity": 1.0, "border-color": "rgb(0,102,153)", "font-family": "Dialog.plain", "font-weight": "normal", "content": "data(Symbol)"}}, {"selector": "node[MCODE_CLUSTER_ID = 0.0]", "css": {"background-color": "rgb(188,189,220)"}}, {"selector": "node[MCODE_CLUSTER_ID = 1.0]", "css": {"background-color": "rgb(228,26,28)"}}, {"selector": "node[MCODE_CLUSTER_ID = 2.0]", "css": {"background-color": "rgb(55,126,184)"}}, {"selector": "node[MCODE_CLUSTER_ID = 4.0]", "css": {"background-color": "rgb(152,78,163)"}}, {"selector": "node[MCODE_CLUSTER_ID = 5.0]", "css": {"background-color": "rgb(255,127,0)"}}, {"selector": "node[MCODE_CLUSTER_ID = 3.0]", "css": {"background-color": "rgb(77,175,74)"}}, {"selector": "node[MCODE_CLUSTER_ID = 6.0]", "css": {"background-color": "rgb(255,255,51)"}}, {"selector": "node:selected", "css": {"background-color": "rgb(255,255,0)"}}, {"selector": "edge", "css": {"target-arrow-color": "rgb(0,0,0)", "width": 3.0, "target-arrow-shape": "none", "line-style": "solid", "font-size": 10, "source-arrow-color": "rgb(0,0,0)", "color": "rgb(0,0,0)", "content": "", "font-family": "Dialog.plain", "font-weight": "normal", "line-color": "rgb(84,39,143)", "text-opacity": 1.0, "source-arrow-shape": "none", "opacity": 0.39215686274509803}}, {"selector": "edge[SCORE > 1]", "css": {"width": 5.0}}, {"selector": "edge[SCORE = 1]", "css": {"width": 5.0}}, {"selector": "edge[SCORE > 0.3][SCORE < 1]", "css": {"width": "mapData(SCORE,0.3,1,2.0,5.0)"}}, {"selector": "edge[SCORE = 0.3]", "css": {"width": 2.0}}, {"selector": "edge[SCORE < 0.3]", "css": {"width": 2.0}}, {"selector": "edge:selected", "css": {"line-color": "rgb(255,0,0)"}}, {"selector": "node[DEGREE<=5]", "css": {"width": 20.0, "height": 20.0}}, {"selector": "node[DEGREE>5][DEGREE<20]", "css": {"width": "mapData(DEGREE,5,20,35.0,50.0)", "height": "mapData(DEGREE,5,20,35.0,50.0)"}}, {"selector": "node[DEGREE>=20]", "css": {"width": 50.0, "height": 50.0}}]}, {"format_version": "1.0", "generated_by": "cytoscape-3.8.1", "target_cytoscapejs_version": "~2.1", "title": "PPIColorByClusterNoLabel", "style": [{"selector": "node", "css": {"text-valign": "center", "text-halign": "right", "text-opacity": 1.0, "border-opacity": 1.0, "shape": "ellipse", "font-size": 20, "content": "", "width": 35.0, "background-color": "rgb(0,153,204)", "border-width": 4.0, "height": 35.0, "color": "rgb(0,0,0)", "background-opacity": 1.0, "border-color": "rgb(0,102,153)", "font-family": "Dialog.plain", "font-weight": "normal"}}, {"selector": "node[MCODE_CLUSTER_ID = 0.0]", "css": {"background-color": "rgb(188,189,220)"}}, {"selector": "node[MCODE_CLUSTER_ID = 1.0]", "css": {"background-color": "rgb(228,26,28)"}}, {"selector": "node[MCODE_CLUSTER_ID = 2.0]", "css": {"background-color": "rgb(55,126,184)"}}, {"selector": "node[MCODE_CLUSTER_ID = 4.0]", "css": {"background-color": "rgb(152,78,163)"}}, {"selector": "node[MCODE_CLUSTER_ID = 5.0]", "css": {"background-color": "rgb(255,127,0)"}}, {"selector": "node[MCODE_CLUSTER_ID = 3.0]", "css": {"background-color": "rgb(77,175,74)"}}, {"selector": "node[MCODE_CLUSTER_ID = 6.0]", "css": {"background-color": "rgb(255,255,51)"}}, {"selector": "node:selected", "css": {"background-color": "rgb(255,255,0)"}}, {"selector": "edge", "css": {"target-arrow-color": "rgb(0,0,0)", "width": 3.0, "target-arrow-shape": "none", "line-style": "solid", "font-size": 10, "source-arrow-color": "rgb(0,0,0)", "color": "rgb(0,0,0)", "content": "", "font-family": "Dialog.plain", "font-weight": "normal", "line-color": "rgb(84,39,143)", "text-opacity": 1.0, "source-arrow-shape": "none", "opacity": 0.39215686274509803}}, {"selector": "edge[SCORE > 1]", "css": {"width": 5.0}}, {"selector": "edge[SCORE = 1]", "css": {"width": 5.0}}, {"selector": "edge[SCORE > 0.3][SCORE < 1]", "css": {"width": "mapData(SCORE,0.3,1,2.0,5.0)"}}, {"selector": "edge[SCORE = 0.3]", "css": {"width": 2.0}}, {"selector": "edge[SCORE < 0.3]", "css": {"width": 2.0}}, {"selector": "edge:selected", "css": {"line-color": "rgb(255,0,0)"}}, {"selector": "node[DEGREE<=5]", "css": {"width": 20.0, "height": 20.0}}, {"selector": "node[DEGREE>5][DEGREE<20]", "css": {"width": "mapData(DEGREE,5,20,35.0,50.0)", "height": "mapData(DEGREE,5,20,35.0,50.0)"}}, {"selector": "node[DEGREE>=20]", "css": {"width": 50.0, "height": 50.0}}]}, {"format_version": "1.0", "generated_by": "cytoscape-3.3.0", "target_cytoscapejs_version": "~2.1", "title": "default", "style": [{"selector": "node", "css": {"text-opacity": 1.0, "text-valign": "center", "text-halign": "right", "color": "rgb(0,0,0)", "font-family": "Dialog.plain", "font-weight": "normal", "border-opacity": 1.0, "border-color": "rgb(0,102,153)", "shape": "ellipse", "font-size": 20, "content": "data(Symbol)", "background-color": "rgb(153,204,255)", "height": 35.0, "background-opacity": 1.0, "width": 35.0, "border-width": 4.0}}, {"selector": "node[_GeneInGOAndHitList > 20]", "css": {"width": 50.0}}, {"selector": "node[_GeneInGOAndHitList = 20]", "css": {"width": 50.0}}, {"selector": "node[_GeneInGOAndHitList > 5][_GeneInGOAndHitList < 20]", "css": {"width": "mapData(_GeneInGOAndHitList,5,20,20.0,50.0)"}}, {"selector": "node[_GeneInGOAndHitList = 5]", "css": {"width": 20.0}}, {"selector": "node[_GeneInGOAndHitList < 5]", "css": {"width": 20.0}}, {"selector": "node[_GeneInGOAndHitList > 20]", "css": {"height": 50.0}}, {"selector": "node[_GeneInGOAndHitList = 20]", "css": {"height": 50.0}}, {"selector": "node[_GeneInGOAndHitList > 5][_GeneInGOAndHitList < 20]", "css": {"height": "mapData(_GeneInGOAndHitList,5,20,20.0,50.0)"}}, {"selector": "node[_GeneInGOAndHitList = 5]", "css": {"height": 20.0}}, {"selector": "node[_GeneInGOAndHitList < 5]", "css": {"height": 20.0}}, {"selector": "node:selected", "css": {"background-color": "rgb(255,255,0)"}}, {"selector": "edge", "css": {"font-size": 10, "line-style": "solid", "opacity": 0.39215686274509803, "color": "rgb(0,0,0)", "target-arrow-color": "rgb(0,0,0)", "source-arrow-color": "rgb(0,0,0)", "content": "", "text-opacity": 1.0, "target-arrow-shape": "none", "source-arrow-shape": "none", "font-family": "Dialog.plain", "font-weight": "normal", "width": 3.0, "line-color": "rgb(84,39,143)"}}, {"selector": "edge[SCORE > 1]", "css": {"width": 10.0}}, {"selector": "edge[SCORE = 1]", "css": {"width": 10.0}}, {"selector": "edge[SCORE > 0.3][SCORE < 1]", "css": {"width": "mapData(SCORE,0.3,1,2.0,10.0)"}}, {"selector": "edge[SCORE = 0.3]", "css": {"width": 2.0}}, {"selector": "edge[SCORE < 0.3]", "css": {"width": 2.0}}, {"selector": "edge:selected", "css": {"line-color": "rgb(255,0,0)"}}, {"selector": "node[DEGREE<=5]", "css": {"width": 20.0, "height": 20.0}}, {"selector": "node[DEGREE>5][DEGREE<20]", "css": {"width": "mapData(DEGREE,5,20,35.0,50.0)", "height": "mapData(DEGREE,5,20,35.0,50.0)"}}, {"selector": "node[DEGREE>=20]", "css": {"width": 50.0, "height": 50.0}}]}];
""" This script is used for course notes. Author: Erick Marin Date: 10/09/2020 """ # True print (10 > 1) # False print("cat" == "dog") # True print(1 != 2) # This would result in an error because the computer knows that one is a # number and the oter is a string. # TypeError: '<' not supported between instances of 'int' and 'str' # print(1 < "1") # False print(1 == "1") # False print("cat" == "Cat") # False print("cat" < "Cat") # True print("cat" > "Cat") # Logical Operators # To evaluate as true, the 'and' operatore would need both expressions to be # true at at the same time. # False print("Yellow" > "Cyan" and "Brown" > "Magenta") # True print(25 > 60 or 1 != 2) # True print(not 42 == "Answer")
// // Assert.h // FastDTW-x // // Created by Melo Yao on 12/2/13. // Copyright (c) 2013 melo.yao. All rights reserved. // #ifndef FastDTW_x_Assert_h #define FastDTW_x_Assert_h #include <cassert> #include <stdio.h> #define FDASSERT0(__assertion__,__msg__) \ do {if(!(__assertion__)){printf(__msg__);assert(false);}}while(0) #define FDASSERT(__assertion__,__msg__,...) \ do {if(!(__assertion__)){printf(__msg__,__VA_ARGS__);assert(false);}}while(0) #endif
#!/usr/bin/env python import rospy from geometry_msgs.msg import Twist from ackermann_msgs.msg import AckermannDrive import math def cmd_vel_cb(msg): pub_msg = AckermannDrive() pub_msg.speed = msg.linear.x if msg.linear.x != 0 and msg.angular.z != 0: turn_radius = msg.linear.x / msg.angular.z pub_msg.steering_angle = math.atan(front_rear_distance/turn_radius) ackermann_pub.publish(pub_msg) if __name__ == '__main__': rospy.init_node('twist_to_ackermann') front_rear_distance = rospy.get_param('~front_rear_distance', 1.0) ackermann_pub = rospy.Publisher("ackermann", AckermannDrive, queue_size = 1) rospy.Subscriber("cmd_vel", Twist, cmd_vel_cb) rospy.spin()
\n int kvm_arch_vcpu_setup(struct kvm_vcpu *vcpu) static u64 core_reg_offset_from_id(u64 id) static int get_core_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) static int set_core_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) int kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) int kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) static bool is_timer_reg(u64 index) static int copy_timer_indices(struct kvm_vcpu *vcpu, u64 __user *uindices) static int set_timer_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) static int get_timer_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) static unsigned long num_core_regs(void) unsigned long kvm_arm_num_regs(struct kvm_vcpu *vcpu) int kvm_arm_copy_reg_indices(struct kvm_vcpu *vcpu, u64 __user *uindices) int kvm_arm_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) int kvm_arm_set_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) int kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs) int kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs) int __kvm_arm_vcpu_get_events(struct kvm_vcpu *vcpu, struct kvm_vcpu_events *events) int __kvm_arm_vcpu_set_events(struct kvm_vcpu *vcpu, struct kvm_vcpu_events *events) int __attribute_const__ kvm_target_cpu(void) int kvm_vcpu_preferred_target(struct kvm_vcpu_init *init) int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) int kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) int kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu *vcpu, struct kvm_translation *tr) int kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu *vcpu, struct kvm_guest_debug *dbg) int kvm_arm_vcpu_arch_set_attr(struct kvm_vcpu *vcpu, struct kvm_device_attr *attr) int kvm_arm_vcpu_arch_get_attr(struct kvm_vcpu *vcpu, struct kvm_device_attr *attr) int kvm_arm_vcpu_arch_has_attr(struct kvm_vcpu *vcpu, struct kvm_device_attr *attr) \n 23 struct kvm_vcpu *vcpu 6 const struct kvm_one_reg *reg 3 struct kvm_device_attr *attr 2 void 2 u64 __user *uindices 2 struct kvm_vcpu_events *events 2 struct kvm_sregs *sregs 2 struct kvm_regs *regs 2 struct kvm_fpu *fpu 1 u64 index 1 u64 id 1 struct kvm_vcpu_init *init 1 struct kvm_translation *tr 1 struct kvm_guest_debug *dbg
import _extends from 'babel-runtime/helpers/extends' import request, { extend } from 'umi-request' import { notification } from 'ant-design-vue' var codeMessage = { 200: '服务器成功返回请求的数据。', 201: '新建或修改数据成功。', 202: '一个请求已经进入后台排队(异步任务)。', 204: '删除数据成功。', 400: '发出的请求有错误,服务器没有进行新建或修改数据的操作。', 401: '用户没有权限(令牌、用户名、密码错误)。', 403: '用户得到授权,但是访问是被禁止的。', 404: '发出的请求针对的是不存在的记录,服务器没有进行操作。', 406: '请求的格式不可得。', 410: '请求的资源被永久删除,且不会再得到的。', 422: '当创建一个对象时,发生一个验证错误。', 500: '服务器发生错误,请检查服务器。', 502: '网关错误。', 503: '服务不可用,服务器暂时过载或维护。', 504: '网关超时。' } var errorHandler = function errorHandler (error) { var _error$response = error.response var response = _error$response === undefined ? {} : _error$response var errortext = codeMessage[response.status] || response.statusText var status = response.status var url = response.url notification.error({ message: '\u8BF7\u6C42\u9519\u8BEF ' + status + ': ' + url, description: errortext }) } export var BASE_URL = process.env.VUE_APP_API_URL || '/api/v1' var customRequest = extend({ prefix: BASE_URL, timeout: 1000, errorHandler: errorHandler }) // request 拦截器 customRequest.interceptors.request.use(function (url, options) { return { url: url + '&interceptors=yes', options: _extends({}, options, { interceptors: true }) } }) // response 拦截器 customRequest.interceptors.response.use(function (response, options) { response.headers.append('interceptors', 'yes yo') return response }) export { request, extend } export default customRequest
// Copyright (c) 2013 GitHub, Inc. // Use of this source code is governed by the MIT license that can be // found in the LICENSE file. #ifndef SHELL_BROWSER_JAVASCRIPT_ENVIRONMENT_H_ #define SHELL_BROWSER_JAVASCRIPT_ENVIRONMENT_H_ #include <memory> #include "base/macros.h" #include "gin/public/isolate_holder.h" #include "uv.h" // NOLINT(build/include_directory) namespace node { class Environment; class MultiIsolatePlatform; } // namespace node namespace electron { class MicrotasksRunner; // Manage the V8 isolate and context automatically. class JavascriptEnvironment { public: explicit JavascriptEnvironment(uv_loop_t* event_loop); ~JavascriptEnvironment(); void OnMessageLoopCreated(); void OnMessageLoopDestroying(); node::MultiIsolatePlatform* platform() const { return platform_; } v8::Isolate* isolate() const { return isolate_; } v8::Local<v8::Context> context() const { return v8::Local<v8::Context>::New(isolate_, context_); } static v8::Isolate* GetIsolate(); private: v8::Isolate* Initialize(uv_loop_t* event_loop); // Leaked on exit. node::MultiIsolatePlatform* platform_; v8::Isolate* isolate_; gin::IsolateHolder isolate_holder_; v8::Locker locker_; v8::Global<v8::Context> context_; std::unique_ptr<MicrotasksRunner> microtasks_runner_; DISALLOW_COPY_AND_ASSIGN(JavascriptEnvironment); }; // Manage the Node Environment automatically. class NodeEnvironment { public: explicit NodeEnvironment(node::Environment* env); ~NodeEnvironment(); node::Environment* env() { return env_; } private: node::Environment* env_; DISALLOW_COPY_AND_ASSIGN(NodeEnvironment); }; } // namespace electron #endif // SHELL_BROWSER_JAVASCRIPT_ENVIRONMENT_H_
import LinksView from './LinksView'; allure.api.addTestResultBlock(LinksView, {position: 'before'});
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; var fs = require('fs'); var angular = require('../../../../camunda-bpm-sdk-js/vendor/angular'), copy = angular.copy, $ = require('jquery'), template = fs.readFileSync(__dirname + '/cam-widget-search.html', 'utf8'); var dateRegex = /(\d\d\d\d)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)(?:.(\d\d\d)| )?$/; function getType(value) { if (value && typeof value === 'string' && value.match(dateRegex)) { return 'date'; } return typeof value; } var isValid = function(search) { return ( search.type.value && (!search.extended || search.name.value) && (search.basic || search.operator.value) && (search.basic || search.value.value) && (getType(search.value.value) === 'date' || !search.enforceDates) ); }; var validateOperator = function(operator) { if (!operator.value) { operator.value = operator.values[0]; return; } var idx = operator.values .map(function(el) { return el.key; }) .indexOf(operator.value.key); operator.value = operator.values[idx === -1 ? 0 : idx]; }; var parseValue = function(value, enforceString) { if (enforceString) { return '' + value; } if (!isNaN(value) && value.trim() !== '') { // value must be transformed to number return +value; } if (value === 'true') { return true; } if (value === 'false') { return false; } if (value === 'NULL') { return null; } return value; }; // global flag for all instances to ignore URL updates to update searches var IGNORE_URL_UPDATE = false; module.exports = [ '$timeout', '$location', 'search', 'widgetLocalConf', '$translate', function($timeout, $location, searchService, widgetLocalConf, $translate) { // check if browser is affected by IE focus bug: // https://connect.microsoft.com/IE/feedback/details/810538/ie-11-fires-input-event-on-focus var checkIEfocusBug = function(cb) { // special timeout so we do not fall into an apply cycle $timeout( function() { // create input field to make "feature" detection of the bug var el = document.createElement('input'); el.setAttribute('type', 'text'); // bug only happens when placeholder is set el.setAttribute('placeholder', 'set'); // this event listener is only called when the browser is affected by the bug var weAreScrewed = false; el.addEventListener('input', function() { // we are affected by the IE focus bug and cannot use the placeholder attribute on the search input field weAreScrewed = true; }); // perform the test document.body.appendChild(el); el.focus(); document.body.removeChild(el); // the event is handled asynchronously, so we have to wait for the result $timeout(function() { cb(weAreScrewed); }); }, 0, false ); }; return { restrict: 'A', scope: { types: '=camWidgetSearchTypes', translations: '=camWidgetSearchTranslations', operators: '=camWidgetSearchOperators', searches: '=?camWidgetSearchSearches', validSearches: '=?camWidgetSearchValidSearches', storageGroup: '=?camWidgetSearchStorageGroup', searchId: '@camWidgetSearchId', total: '=?camWidgetSearchTotal', matchAny: '=?camWidgetSearchMatchAny', disableTypeaheadAutoselect: '=?camWidgetSearchDisableTypeaheadAutoselect' }, link: function($scope, element) { angular.forEach($scope.translations, function(value, key) { $scope.translations[key] = $translate.instant(value); }); if ($scope.types) $scope.types.map(function(el) { el.id.value = $translate.instant(el.id.value); if (el.operators) { el.operators = el.operators.map(function(op) { op.value = $translate.instant(op.value); return op; }); } if (el.options && typeof el.options[0] === 'object') { el.mappedOptions = el.options.map(({key, value}) => { return {key: key, value: $translate.instant(value)}; }); el.options = el.mappedOptions.map(({value}) => value); } return el; }); angular.forEach($scope.operators, function(operatorGroupedByType) { angular.forEach(operatorGroupedByType, function(operator) { operator.value = $translate.instant(operator.value); }); }); $scope.isMatchAnyActive = typeof $scope.matchAny !== 'undefined'; $scope.caseHandeling = {}; $scope.switchMatchType = function() { if ($scope.isMatchAnyActive) { $scope.matchAny = !$scope.matchAny; } }; $scope.focused = false; var formElement = angular.element(element).find('form')[0]; formElement.addEventListener( 'focus', function() { $timeout(function() { $scope.focused = true; }); }, true ); formElement.addEventListener( 'blur', function() { $timeout(function() { $scope.focused = false; }); }, true ); var searchHasVariableQuery = function() { return ($scope.searches || []).some(element => { return element.caseOptions; }); }; $scope.searchHasVariableQuery = searchHasVariableQuery(); // test for IE focus bug checkIEfocusBug(function(hasBug) { if (hasBug) { // if we are afftected by the focus bug, we cannot set a placeholder on the input field // add another indication for the search field var node = document.createElement('div'); node.textContent = $scope.translations.inputPlaceholder + ':'; element[0].insertBefore(node, element[0].firstChild); $scope.$root.$broadcast('plugin:search:change'); } else { // if we are not affected by the focus bug, we can set the placeholder on the input field element[0] .querySelector('input.main-field') .setAttribute( 'placeholder', $scope.translations.inputPlaceholder ); } }); $scope.searchTypes = $scope.types.map(function(el) { return el.id; }); $scope.getRightPadding = function() { if (element.width() > 400) { return '125px'; } return '12px'; }; var defaultType = $scope.types.reduce(function(done, type) { return done || (type.default ? type : null); }, null); var getTypes = function() { // check which classes are allowed var aggregatedTypeKeys = $scope.searches .map(function(el) { return el.type.value.key; }) .reduce(function(aggregatedList, type) { if (aggregatedList.indexOf(type) === -1) { aggregatedList.push(type); } return aggregatedList; }, []); var allowedGroups = aggregatedTypeKeys .map(function(el) { return getConfigByTypeKey(el) ? getConfigByTypeKey(el).groups : null; }) .filter(function(el) { return !!el; }) .reduce(function(groupsArray, groups) { if (groupsArray) { if (groupsArray.length === 0) { return angular.copy(groups); } for (var i = 0; i < groupsArray.length; i++) { if (groups.indexOf(groupsArray[i]) === -1) { groupsArray.splice(i, 1); i--; } } if (groupsArray.length === 0) { return null; } else { return groupsArray; } } else { return null; } }, []); if (allowedGroups === null) { return []; } else if (allowedGroups.length === 0) { return $scope.searchTypes; } else { return $scope.searchTypes.filter(function(el) { var groups = getConfigByTypeKey(el.key).groups; if (!groups) return true; for (var i = 0; i < groups.length; i++) { if (allowedGroups.indexOf(groups[i]) > -1) { return true; } } return false; }); } }; var getConfigByTypeKey = function(typeKey) { return $scope.types.reduce(function(done, type) { return done || (type.id.key === typeKey ? type : null); }, null); }; var getOperators = function(config, value) { return ( config.operators || $scope.operators[getType(parseValue(value, config.enforceString))] ); }; var filteredSearches = function(original) { const getKeyAndValue = (mappedOptions, search) => { let key = null; let value = null; if (mappedOptions) { const inOperator = search.operator === 'In'; const options = mappedOptions.filter( option => inOperator && search.value.includes(option.key) ); if (inOperator) { const keys = options.map(option => option.key); if (keys.length) { key = keys; } value = options.map(option => option.value).join(', '); } else { const option = (mappedOptions || []).find( option => option.key === search.value ); key = option?.key; value = option?.value; } } if (!value) { value = search.value; } return {key: key, value: value}; }; return original .map(function(search) { var config = getConfigByTypeKey(search.type); if (config) { var newSearch = { extended: config.extended, basic: config.basic, type: { values: getTypes(), value: getTypes().reduce(function(done, type) { return done || (type.key === search.type ? type : null); }, null), tooltip: $scope.translations.type }, name: { value: search.name, tooltip: $scope.translations.name }, options: config.options, operator: { tooltip: $scope.translations.operator }, value: { ...getKeyAndValue(config.mappedOptions, search), tooltip: $scope.translations.value }, allowDates: config.allowDates, enforceDates: config.enforceDates, potentialNames: config.potentialNames || [], enforceString: config.enforceString, caseOptions: config.caseOptions }; newSearch.operator.values = getOperators( config, newSearch.value.value ); newSearch.operator.value = newSearch.operator.values.reduce( function(done, op) { return done || (op.key === search.operator ? op : null); }, null ); newSearch.valid = isValid(newSearch); return newSearch; } else { if (search.type === 'variableNamesIgnoreCase') $scope.caseHandeling.ignoreNames = true; if (search.type === 'variableValuesIgnoreCase') $scope.caseHandeling.ignoreValues = true; } }) .filter(function(search) { return search; }); }; var searchId = $scope.searchId || 'search'; var getSearchesFromURL = function() { var urlSearches = JSON.parse( ($location.search() || {})[searchId + 'Query'] || '[]' ); return filteredSearches(urlSearches); }; $scope.searches = $scope.searches || []; $scope.searches = getSearchesFromURL(); $scope.validSearchesBuffer = $scope.searches.reduce(function( valid, search ) { if (search.valid) { valid.push(search); } return valid; }, []); $scope.validSearches = angular.copy($scope.validSearchesBuffer); var selectNextInvalidElement = function(startIndex, startField) { var search = $scope.searches[startIndex]; if (!search.valid) { if ( search.extended && !search.name.value && startField !== 'name' ) { search.name.inEdit = true; return; } else if (startField !== 'value') { search.value.inEdit = true; return; } } for (var i = 1; i < $scope.searches.length; i++) { var idx = (i + startIndex) % $scope.searches.length; search = $scope.searches[idx]; if (!search.valid) { if (search.extended && !search.name.value) { search.name.inEdit = true; } else { search.value.inEdit = true; } return; } } }; $scope.createSearch = function(type) { if (!type && !$scope.inputQuery) { return; } var value = !type ? $scope.inputQuery : ''; type = (type && getConfigByTypeKey(type.key)) || defaultType; var operators = getOperators(type, value); $scope.searches.push({ extended: type.extended, basic: type.basic, type: { values: getTypes(), value: type.id, tooltip: $scope.translations.type }, name: { value: '', inEdit: type.extended, tooltip: $scope.translations.name }, operator: { value: operators[0], values: operators, tooltip: $scope.translations.operator }, options: type.options, value: { value: value, inEdit: !type.extended && !value, tooltip: $scope.translations.value }, allowDates: type.allowDates, enforceDates: type.enforceDates, potentialNames: type.potentialNames, enforceString: type.enforceString, caseOptions: type.caseOptions }); var search = $scope.searches[$scope.searches.length - 1]; search.valid = isValid(search); // To those who think, WHAT THE HECK IS THIS?!: // // Typeahead thinks, it is a good idea to focus the input field after selecting an option via mouse click // (see https://github.com/angular-ui/bootstrap/blob/e909b922a2ce09792a733652e5131e9a95b35e5b/src/typeahead/typeahead.js#L274) // We do not want this. Since they are registering their focus event per timeout AFTER we register our // blur event per timeout, the field is focussed in the end. How to prevent this? More timeouts x_x if (!value) { $timeout(function() { $timeout(function() { $scope.inputQuery = ''; $(element[0].querySelector('.search-container > input')).blur(); }); }); } else { $scope.inputQuery = ''; } }; $scope.deleteSearch = function(idx) { $scope.searches.splice(idx, 1); $timeout(function() { $(element[0].querySelector('.search-container > input')).focus(); }); }; const hasOption = (string, value) => string .toUpperCase() .split(',') .map(strOpt => strOpt.trim()) .includes(value.toUpperCase()); $scope.handleChange = function(idx, field, before, value, evt) { var config; var search = $scope.searches[idx]; if (field === 'type') { config = getConfigByTypeKey(value.key); search.extended = config.extended; search.basic = config.basic; search.allowDates = config.allowDates; if (!search.enforceDates && config.enforceDates) { search.value.value = ''; } search.enforceDates = config.enforceDates; search.operator.values = getOperators(config, search.value.value); validateOperator(search.operator); } else if (field === 'value') { if (idx === $scope.searches.length - 1) { $timeout(function() { $( element[0].querySelector('.search-container > input') ).focus(); }); } config = getConfigByTypeKey(search.type.value.key); if (!config.operators) { search.operator.values = getOperators(config, search.value.value); validateOperator(search.operator); } } search.valid = isValid(search); if (evt && evt.keyCode === 13) { selectNextInvalidElement(idx, field); } const mappedOptions = $scope.types.find( type => type.id.key === search.type.value.key )?.mappedOptions; if (mappedOptions) { if (search.operator.value.key === 'In') { const keys = mappedOptions .filter(option => hasOption(search.value.value, option.value)) .map(option => option.key); search.value.key = keys.length ? keys : undefined; } else { search.value.key = mappedOptions.find( option => search.value.value === option.value )?.key; } } }; $scope.onKeydown = function(evt) { if ([38, 40, 13].indexOf(evt.keyCode) !== -1) { var dd = $( element[0].querySelectorAll('.dropdown-menu[id^="typeahead"]') ); if (dd.length === 0) { $timeout(function() { angular.element(evt.target).triggerHandler('input'); }); } } }; var extractSearches = function(searches) { const getValue = search => { const mappedOptions = $scope.types.find( type => type.id.key === search.type.value.key ).mappedOptions; let value = null; if (mappedOptions) { if (search.operator.value.key === 'In') { const values = mappedOptions .filter(option => hasOption(search.value.value, option.value)) .map(option => option.key); if (values.length) { value = values; } } else { value = mappedOptions.find( option => search.value.value === option.value )?.key; } } if (!value) { value = search.value.value; } return value; }; var out = []; angular.forEach(searches, function(search) { out.push({ type: search.type.value.key, operator: search.operator.value.key, value: getValue(search), name: search.name.value }); }); return out; }; var defaultSearchObject = { basic: true, type: { values: getTypes(), value: {}, tooltip: '' }, name: { value: '', inEdit: '', tooltip: '' }, operator: { value: {key: 'eq', value: '='}, values: [], tooltip: $scope.translations.operator }, value: { value: '', inEdit: false, tooltip: $scope.translations.value }, valid: false }; var handleSearchesUpdate = function() { var searches = $scope.searches; // add valid searches to validSearchesBuffer angular.forEach(searches, function(search) { if ( search.valid && $scope.validSearchesBuffer.indexOf(search) === -1 ) { $scope.validSearchesBuffer.push(search); } }); // remove invalid searches from valid searches $scope.validSearchesBuffer = $scope.validSearchesBuffer.filter( search => { return search.valid && searches.indexOf(search) !== -1; } ); if ($scope.searchHasVariableQuery) { if ($scope.caseHandeling.ignoreNames) { let search = angular.copy(defaultSearchObject); search.type.value.key = 'variableNamesIgnoreCase'; $scope.validSearchesBuffer.push(search); } if ($scope.caseHandeling.ignoreValues) { let search = angular.copy(defaultSearchObject); search.type.value.key = 'variableValuesIgnoreCase'; $scope.validSearchesBuffer.push(search); } } var queryObj = {}; queryObj[searchId + 'Query'] = JSON.stringify( extractSearches($scope.validSearchesBuffer) ); if ($scope.isMatchAnyActive) { var newLocation; if ( $scope.matchAny && !$location.search().hasOwnProperty(searchId + 'OrQuery') ) { newLocation = $location.url() + '&' + searchId + 'OrQuery'; } else if (!$scope.matchAny) { newLocation = $location .url() .replace('&' + searchId + 'OrQuery', ''); } $location.url(newLocation); $location.replace(); } $scope.searchHasVariableQuery = searchHasVariableQuery(); // ignore URL updates for all search widget instances for this update IGNORE_URL_UPDATE = true; searchService.updateSilently( queryObj, !$location.search()[searchId + 'Query'] ); // listen to URL changes again AFTER the locationchange event has fired $timeout(function() { IGNORE_URL_UPDATE = false; }); updateSearchTypes(); }; $scope.$watch( '[searches, matchAny, caseHandeling]', handleSearchesUpdate, true ); $scope.$on('$locationChangeSuccess', function() { $scope.matchAny = $location .search() .hasOwnProperty(searchId + 'OrQuery'); if ( !IGNORE_URL_UPDATE && $location.search().hasOwnProperty(searchId + 'Query') ) { // make new array of searches from the url var searches = getSearchesFromURL(); // if something has changed in the valid searches var compareSearches = $scope.validSearchesBuffer.filter(search => { return search.valid; }); if (!angular.equals(searches, compareSearches)) { // now add all invalid searches which exist within the original search array, but are not in the URL angular.forEach($scope.searches, function(search) { if (!search.valid) { searches.push(search); } }); // empty the valid searches buffer (will be automatically refilled by the listener on the searches) $scope.validSearchesBuffer = []; // replace the original search array with the new one $scope.searches = searches; } } }); var copyValid; $scope.$watch( 'validSearchesBuffer', function() { $timeout.cancel(copyValid); copyValid = $timeout(function() { $scope.validSearches = angular.copy($scope.validSearchesBuffer); }); }, true ); var updateSearchTypes = function() { var types = getTypes(); $scope.dropdownTypes = types; for (var i = 0; i < $scope.searches.length; i++) { $scope.searches[i].type.values = types; } }; $scope.$watch( 'types', function() { //in case if array of types changed - update dropdown values $scope.searchTypes = $scope.types.map(function(el) { return el.id; }); $scope.dropdownTypes = getTypes(); // Currently we only allow to change the potential names of a type, to support changing the filter // in the tasklist while preserving existing search queries angular.forEach($scope.searches, function(search) { search.potentialNames = getConfigByTypeKey(search.type.value.key) ? getConfigByTypeKey(search.type.value.key).potentialNames || [] : null; }); }, true ); $scope.dropdownTypes = getTypes(); ///////////////////////////////////////////////////////////////////// // criteria persistence ///////////////////////////////////////////////////////////////////// var searchCriteriaStorage = ($scope.searchCriteriaStorage = { group: null, nameInput: '', available: {} }); var stored = {}; var types = $scope.storageGroup ? [$scope.storageGroup] : $scope.types .map(function(item) { return item.groups; }) .reduce(function(current, previous) { return (current || []).concat(previous); }) .filter(function(value) { return value; }); var groups = []; for (var i = 0; i < types.length; i++) { if (groups.indexOf(types[i]) < 0 && types[i]) groups.push(types[i]); } if (!groups.length && $scope.storageGroup) { groups.push($scope.storageGroup); } groups.forEach(function(group) { stored[group] = {}; }); $scope.$watch( 'validSearches', function determineGroup() { if ($scope.storageGroup) { searchCriteriaStorage.group = $scope.storageGroup; filterCriteria(); return; } var _group = null; $scope.validSearches.forEach(function(search) { if (_group) return; var key = search.type.value.key; $scope.types.forEach(function(type) { if (_group) return; // I know that sucks... // I mean... it sucks that type.groups is supposed to be an array // because if the array has more than 1 item, we can't reliably // determine which group is the relevant one // (unless we iterate more... which would be the worst nightmare // of the guy who will have to maintain that code) if (type.id.key === key && (type.groups || []).length === 1) { _group = (type.groups || [])[0]; } }); }); searchCriteriaStorage.group = _group; filterCriteria(); }, true ); function filterCriteria() { searchCriteriaStorage.available = {}; if (searchCriteriaStorage.group) { $scope.isSearchCriteriaStorageGrouped = false; searchCriteriaStorage.available = copy( stored[searchCriteriaStorage.group] ); return; } $scope.isSearchCriteriaStorageGrouped = true; groups.forEach(function(group) { searchCriteriaStorage.available[group] = copy(stored[group] || {}); }); } function groupAndName(str, group) { if (group) { return {group: group, name: str}; } else if (searchCriteriaStorage.group) { return {group: searchCriteriaStorage.group, name: str}; } } stored = widgetLocalConf.get('searchCriteria', stored); filterCriteria(); $scope.$watch('storageGroup', function() { if ($scope.storageGroup && groups.indexOf($scope.storageGroup) < 0) { return; } searchCriteriaStorage.group = $scope.storageGroup; filterCriteria(); }); $scope.storedCriteriaInputClick = function($evt) { $evt.stopPropagation(); }; $scope.searchCriteriaInputKeydown = function($evt) { if ($evt.keyCode === 13) { return $scope.storedCriteriaSaveClick($evt); } }; $scope.hasCriteriaSets = function() { if (groups.length > 1) { for (var key in searchCriteriaStorage.available) { if ( Object.keys(searchCriteriaStorage.available[key]).length > 0 ) { return true; } } return false; } else { return !!Object.keys(searchCriteriaStorage.available || {}).length; } }; $scope.loadCriteriaSet = function($evt, name, group) { $scope.caseHandeling = { ignoreNames: false, ignoreValues: false }; var info = groupAndName(name, group); if (!info) return; var original = stored[info.group][info.name]; $scope.searches = filteredSearches(original); // provided by Harry Potter, DO NOT REMOVE if ($scope.isMatchAnyActive) { $scope.matchAny = original[original.length - 1]['matchAny']; } handleSearchesUpdate(); }; $scope.dropCriteriaSet = function($evt, name, group) { $evt.stopPropagation(); var info = groupAndName(name, group); if (!info) return; delete stored[info.group][info.name]; widgetLocalConf.set('searchCriteria', stored); filterCriteria(); }; $scope.storedCriteriaSaveClick = function($evt) { $evt.stopPropagation(); var name = searchCriteriaStorage.nameInput; if (!name) { return; } stored[searchCriteriaStorage.group] = stored[searchCriteriaStorage.group] || {}; stored[searchCriteriaStorage.group][name] = extractSearches( $scope.validSearchesBuffer ); if ($scope.isMatchAnyActive) { stored[searchCriteriaStorage.group][name].push({ matchAny: $scope.matchAny }); } stored[searchCriteriaStorage.group][name].push({ caseHandeling: angular.copy($scope.caseHandeling) }); widgetLocalConf.set('searchCriteria', stored); filterCriteria(); searchCriteriaStorage.nameInput = ''; }; }, template: template }; } ];
""" WSGI config for linkr project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'linkr.settings') application = get_wsgi_application()
/* $Id: msdosfs_vnops.c,v 1.80 1998/12/07 21:58:35 archie Exp $ */ /* $NetBSD: msdosfs_vnops.c,v 1.68 1998/02/10 14:10:04 mrg Exp $ */ /*- * Copyright (C) 1994, 1995, 1997 Wolfgang Solfrank. * Copyright (C) 1994, 1995, 1997 TooLs GmbH. * All rights reserved. * Original code by Paul Popelka (paulp@uts.amdahl.com) (see below). * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * This product includes software developed by TooLs GmbH. * 4. The name of TooLs GmbH may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY TOOLS GMBH ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL TOOLS GMBH BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* * Written by Paul Popelka (paulp@uts.amdahl.com) * * You can do anything you want with this software, just don't say you wrote * it, and don't remove this notice. * * This software is provided "as is". * * The author supplies this software to be publicly redistributed on the * understanding that the author is not responsible for the correct * functioning of this software in any circumstances and is not liable for * any damages caused by this software. * * October 1992 */ #include <sys/param.h> #include <sys/systm.h> #include <sys/namei.h> #include <sys/resourcevar.h> /* defines plimit structure in proc struct */ #include <sys/kernel.h> #include <sys/stat.h> #include <sys/buf.h> #include <sys/proc.h> #include <sys/mount.h> #include <sys/unistd.h> #include <sys/vnode.h> #include <miscfs/specfs/specdev.h> /* XXX */ /* defines v_rdev */ #include <sys/malloc.h> #include <sys/dirent.h> #include <sys/signalvar.h> #include <vm/vm.h> #include <vm/vm_extern.h> #include <vm/vm_zone.h> #include <vm/vnode_pager.h> #include <msdosfs/bpb.h> #include <msdosfs/direntry.h> #include <msdosfs/denode.h> #include <msdosfs/msdosfsmount.h> #include <msdosfs/fat.h> /* * Prototypes for MSDOSFS vnode operations */ static int msdosfs_create __P((struct vop_create_args *)); static int msdosfs_mknod __P((struct vop_mknod_args *)); static int msdosfs_close __P((struct vop_close_args *)); static int msdosfs_access __P((struct vop_access_args *)); static int msdosfs_getattr __P((struct vop_getattr_args *)); static int msdosfs_setattr __P((struct vop_setattr_args *)); static int msdosfs_read __P((struct vop_read_args *)); static int msdosfs_write __P((struct vop_write_args *)); static int msdosfs_fsync __P((struct vop_fsync_args *)); static int msdosfs_remove __P((struct vop_remove_args *)); static int msdosfs_link __P((struct vop_link_args *)); static int msdosfs_rename __P((struct vop_rename_args *)); static int msdosfs_mkdir __P((struct vop_mkdir_args *)); static int msdosfs_rmdir __P((struct vop_rmdir_args *)); static int msdosfs_symlink __P((struct vop_symlink_args *)); static int msdosfs_readdir __P((struct vop_readdir_args *)); static int msdosfs_abortop __P((struct vop_abortop_args *)); static int msdosfs_bmap __P((struct vop_bmap_args *)); static int msdosfs_strategy __P((struct vop_strategy_args *)); static int msdosfs_print __P((struct vop_print_args *)); static int msdosfs_pathconf __P((struct vop_pathconf_args *ap)); static int msdosfs_getpages __P((struct vop_getpages_args *)); static int msdosfs_putpages __P((struct vop_putpages_args *)); /* * Some general notes: * * In the ufs filesystem the inodes, superblocks, and indirect blocks are * read/written using the vnode for the filesystem. Blocks that represent * the contents of a file are read/written using the vnode for the file * (including directories when they are read/written as files). This * presents problems for the dos filesystem because data that should be in * an inode (if dos had them) resides in the directory itself. Since we * must update directory entries without the benefit of having the vnode * for the directory we must use the vnode for the filesystem. This means * that when a directory is actually read/written (via read, write, or * readdir, or seek) we must use the vnode for the filesystem instead of * the vnode for the directory as would happen in ufs. This is to insure we * retreive the correct block from the buffer cache since the hash value is * based upon the vnode address and the desired block number. */ /* * Create a regular file. On entry the directory to contain the file being * created is locked. We must release before we return. We must also free * the pathname buffer pointed at by cnp->cn_pnbuf, always on error, or * only if the SAVESTART bit in cn_flags is clear on success. */ static int msdosfs_create(ap) struct vop_create_args /* { struct vnode *a_dvp; struct vnode **a_vpp; struct componentname *a_cnp; struct vattr *a_vap; } */ *ap; { struct componentname *cnp = ap->a_cnp; struct denode ndirent; struct denode *dep; struct denode *pdep = VTODE(ap->a_dvp); struct timespec ts; int error; #ifdef MSDOSFS_DEBUG printf("msdosfs_create(cnp %p, vap %p\n", cnp, ap->a_vap); #endif /* * If this is the root directory and there is no space left we * can't do anything. This is because the root directory can not * change size. */ if (pdep->de_StartCluster == MSDOSFSROOT && pdep->de_fndoffset >= pdep->de_FileSize) { error = ENOSPC; goto bad; } /* * Create a directory entry for the file, then call createde() to * have it installed. NOTE: DOS files are always executable. We * use the absence of the owner write bit to make the file * readonly. */ #ifdef DIAGNOSTIC if ((cnp->cn_flags & HASBUF) == 0) panic("msdosfs_create: no name"); #endif bzero(&ndirent, sizeof(ndirent)); error = uniqdosname(pdep, cnp, ndirent.de_Name); if (error) goto bad; ndirent.de_Attributes = (ap->a_vap->va_mode & VWRITE) ? ATTR_ARCHIVE : ATTR_ARCHIVE | ATTR_READONLY; ndirent.de_LowerCase = 0; ndirent.de_StartCluster = 0; ndirent.de_FileSize = 0; ndirent.de_dev = pdep->de_dev; ndirent.de_devvp = pdep->de_devvp; ndirent.de_pmp = pdep->de_pmp; ndirent.de_flag = DE_ACCESS | DE_CREATE | DE_UPDATE; getnanotime(&ts); DETIMES(&ndirent, &ts, &ts, &ts); error = createde(&ndirent, pdep, &dep, cnp); if (error) goto bad; if ((cnp->cn_flags & SAVESTART) == 0) zfree(namei_zone, cnp->cn_pnbuf); *ap->a_vpp = DETOV(dep); return (0); bad: zfree(namei_zone, cnp->cn_pnbuf); return (error); } static int msdosfs_mknod(ap) struct vop_mknod_args /* { struct vnode *a_dvp; struct vnode **a_vpp; struct componentname *a_cnp; struct vattr *a_vap; } */ *ap; { switch (ap->a_vap->va_type) { case VDIR: return (msdosfs_mkdir((struct vop_mkdir_args *)ap)); break; case VREG: return (msdosfs_create((struct vop_create_args *)ap)); break; default: zfree(namei_zone, ap->a_cnp->cn_pnbuf); return (EINVAL); } /* NOTREACHED */ } static int msdosfs_close(ap) struct vop_close_args /* { struct vnode *a_vp; int a_fflag; struct ucred *a_cred; struct proc *a_p; } */ *ap; { struct vnode *vp = ap->a_vp; struct denode *dep = VTODE(vp); struct timespec ts; simple_lock(&vp->v_interlock); if (vp->v_usecount > 1) { getnanotime(&ts); DETIMES(dep, &ts, &ts, &ts); } simple_unlock(&vp->v_interlock); return 0; } static int msdosfs_access(ap) struct vop_access_args /* { struct vnode *a_vp; int a_mode; struct ucred *a_cred; struct proc *a_p; } */ *ap; { struct vnode *vp = ap->a_vp; struct denode *dep = VTODE(ap->a_vp); struct msdosfsmount *pmp = dep->de_pmp; struct ucred *cred = ap->a_cred; mode_t mask, file_mode, mode = ap->a_mode; register gid_t *gp; int i; file_mode = (S_IXUSR|S_IXGRP|S_IXOTH) | (S_IRUSR|S_IRGRP|S_IROTH) | ((dep->de_Attributes & ATTR_READONLY) ? 0 : (S_IWUSR|S_IWGRP|S_IWOTH)); file_mode &= pmp->pm_mask; /* * Disallow write attempts on read-only file systems; * unless the file is a socket, fifo, or a block or * character device resident on the file system. */ if (mode & VWRITE) { switch (vp->v_type) { case VDIR: case VLNK: case VREG: if (vp->v_mount->mnt_flag & MNT_RDONLY) return (EROFS); break; } } /* User id 0 always gets access. */ if (cred->cr_uid == 0) return 0; mask = 0; /* Otherwise, check the owner. */ if (cred->cr_uid == pmp->pm_uid) { if (mode & VEXEC) mask |= S_IXUSR; if (mode & VREAD) mask |= S_IRUSR; if (mode & VWRITE) mask |= S_IWUSR; return (file_mode & mask) == mask ? 0 : EACCES; } /* Otherwise, check the groups. */ for (i = 0, gp = cred->cr_groups; i < cred->cr_ngroups; i++, gp++) if (pmp->pm_gid == *gp) { if (mode & VEXEC) mask |= S_IXGRP; if (mode & VREAD) mask |= S_IRGRP; if (mode & VWRITE) mask |= S_IWGRP; return (file_mode & mask) == mask ? 0 : EACCES; } /* Otherwise, check everyone else. */ if (mode & VEXEC) mask |= S_IXOTH; if (mode & VREAD) mask |= S_IROTH; if (mode & VWRITE) mask |= S_IWOTH; return (file_mode & mask) == mask ? 0 : EACCES; } static int msdosfs_getattr(ap) struct vop_getattr_args /* { struct vnode *a_vp; struct vattr *a_vap; struct ucred *a_cred; struct proc *a_p; } */ *ap; { struct denode *dep = VTODE(ap->a_vp); struct msdosfsmount *pmp = dep->de_pmp; struct vattr *vap = ap->a_vap; mode_t mode; struct timespec ts; u_long dirsperblk = pmp->pm_BytesPerSec / sizeof(struct direntry); u_long fileid; getnanotime(&ts); DETIMES(dep, &ts, &ts, &ts); vap->va_fsid = dep->de_dev; /* * The following computation of the fileid must be the same as that * used in msdosfs_readdir() to compute d_fileno. If not, pwd * doesn't work. */ if (dep->de_Attributes & ATTR_DIRECTORY) { fileid = cntobn(pmp, dep->de_StartCluster) * dirsperblk; if (dep->de_StartCluster == MSDOSFSROOT) fileid = 1; } else { fileid = cntobn(pmp, dep->de_dirclust) * dirsperblk; if (dep->de_dirclust == MSDOSFSROOT) fileid = roottobn(pmp, 0) * dirsperblk; fileid += dep->de_diroffset / sizeof(struct direntry); } vap->va_fileid = fileid; if ((dep->de_Attributes & ATTR_READONLY) == 0) mode = S_IRWXU|S_IRWXG|S_IRWXO; else mode = S_IRUSR|S_IXUSR|S_IRGRP|S_IXGRP|S_IROTH|S_IXOTH; vap->va_mode = mode & pmp->pm_mask; vap->va_uid = pmp->pm_uid; vap->va_gid = pmp->pm_gid; vap->va_nlink = 1; vap->va_rdev = 0; vap->va_size = dep->de_FileSize; dos2unixtime(dep->de_MDate, dep->de_MTime, 0, &vap->va_mtime); if (pmp->pm_flags & MSDOSFSMNT_LONGNAME) { dos2unixtime(dep->de_ADate, 0, 0, &vap->va_atime); dos2unixtime(dep->de_CDate, dep->de_CTime, dep->de_CHun, &vap->va_ctime); } else { vap->va_atime = vap->va_mtime; vap->va_ctime = vap->va_mtime; } vap->va_flags = 0; if ((dep->de_Attributes & ATTR_ARCHIVE) == 0) vap->va_flags |= SF_ARCHIVED; vap->va_gen = 0; vap->va_blocksize = pmp->pm_bpcluster; vap->va_bytes = (dep->de_FileSize + pmp->pm_crbomask) & ~pmp->pm_crbomask; vap->va_type = ap->a_vp->v_type; vap->va_filerev = dep->de_modrev; return (0); } static int msdosfs_setattr(ap) struct vop_setattr_args /* { struct vnode *a_vp; struct vattr *a_vap; struct ucred *a_cred; struct proc *a_p; } */ *ap; { struct vnode *vp = ap->a_vp; struct denode *dep = VTODE(ap->a_vp); struct msdosfsmount *pmp = dep->de_pmp; struct vattr *vap = ap->a_vap; struct ucred *cred = ap->a_cred; int error = 0; #ifdef MSDOSFS_DEBUG printf("msdosfs_setattr(): vp %p, vap %p, cred %p, p %p\n", ap->a_vp, vap, cred, ap->a_p); #endif /* * Check for unsettable attributes. */ if ((vap->va_type != VNON) || (vap->va_nlink != VNOVAL) || (vap->va_fsid != VNOVAL) || (vap->va_fileid != VNOVAL) || (vap->va_blocksize != VNOVAL) || (vap->va_rdev != VNOVAL) || (vap->va_bytes != VNOVAL) || (vap->va_gen != VNOVAL)) { #ifdef MSDOSFS_DEBUG printf("msdosfs_setattr(): returning EINVAL\n"); printf(" va_type %d, va_nlink %x, va_fsid %lx, va_fileid %lx\n", vap->va_type, vap->va_nlink, vap->va_fsid, vap->va_fileid); printf(" va_blocksize %lx, va_rdev %x, va_bytes %qx, va_gen %lx\n", vap->va_blocksize, vap->va_rdev, vap->va_bytes, vap->va_gen); printf(" va_uid %x, va_gid %x\n", vap->va_uid, vap->va_gid); #endif return (EINVAL); } if (vap->va_flags != VNOVAL) { if (vp->v_mount->mnt_flag & MNT_RDONLY) return (EROFS); if (cred->cr_uid != pmp->pm_uid && (error = suser(cred, &ap->a_p->p_acflag))) return (error); /* * We are very inconsistent about handling unsupported * attributes. We ignored the access time and the * read and execute bits. We were strict for the other * attributes. * * Here we are strict, stricter than ufs in not allowing * users to attempt to set SF_SETTABLE bits or anyone to * set unsupported bits. However, we ignore attempts to * set ATTR_ARCHIVE for directories `cp -pr' from a more * sensible file system attempts it a lot. */ if (cred->cr_uid != 0) { if (vap->va_flags & SF_SETTABLE) return EPERM; } if (vap->va_flags & ~SF_ARCHIVED) return EOPNOTSUPP; if (vap->va_flags & SF_ARCHIVED) dep->de_Attributes &= ~ATTR_ARCHIVE; else if (!(dep->de_Attributes & ATTR_DIRECTORY)) dep->de_Attributes |= ATTR_ARCHIVE; dep->de_flag |= DE_MODIFIED; } if (vap->va_uid != (uid_t)VNOVAL || vap->va_gid != (gid_t)VNOVAL) { uid_t uid; gid_t gid; if (vp->v_mount->mnt_flag & MNT_RDONLY) return (EROFS); uid = vap->va_uid; if (uid == (uid_t)VNOVAL) uid = pmp->pm_uid; gid = vap->va_gid; if (gid == (gid_t)VNOVAL) gid = pmp->pm_gid; if ((cred->cr_uid != pmp->pm_uid || uid != pmp->pm_uid || (gid != pmp->pm_gid && !groupmember(gid, cred))) && (error = suser(cred, &ap->a_p->p_acflag))) return error; if (uid != pmp->pm_uid || gid != pmp->pm_gid) return EINVAL; } if (vap->va_size != VNOVAL) { /* * Disallow write attempts on read-only file systems; * unless the file is a socket, fifo, or a block or * character device resident on the file system. */ switch (vp->v_type) { case VDIR: return (EISDIR); case VLNK: case VREG: if (vp->v_mount->mnt_flag & MNT_RDONLY) return (EROFS); break; } error = detrunc(dep, vap->va_size, 0, cred, ap->a_p); if (error) return error; } if (vap->va_atime.tv_sec != VNOVAL || vap->va_mtime.tv_sec != VNOVAL) { if (vp->v_mount->mnt_flag & MNT_RDONLY) return (EROFS); if (cred->cr_uid != pmp->pm_uid && (error = suser(cred, &ap->a_p->p_acflag)) && ((vap->va_vaflags & VA_UTIMES_NULL) == 0 || (error = VOP_ACCESS(ap->a_vp, VWRITE, cred, ap->a_p)))) return (error); if (vp->v_type != VDIR) { if ((pmp->pm_flags & MSDOSFSMNT_NOWIN95) == 0 && vap->va_atime.tv_sec != VNOVAL) unix2dostime(&vap->va_atime, &dep->de_ADate, NULL, NULL); if (vap->va_mtime.tv_sec != VNOVAL) unix2dostime(&vap->va_mtime, &dep->de_MDate, &dep->de_MTime, NULL); dep->de_Attributes |= ATTR_ARCHIVE; dep->de_flag |= DE_MODIFIED; } } /* * DOS files only have the ability to have their writability * attribute set, so we use the owner write bit to set the readonly * attribute. */ if (vap->va_mode != (mode_t)VNOVAL) { if (vp->v_mount->mnt_flag & MNT_RDONLY) return (EROFS); if (cred->cr_uid != pmp->pm_uid && (error = suser(cred, &ap->a_p->p_acflag))) return (error); if (vp->v_type != VDIR) { /* We ignore the read and execute bits. */ if (vap->va_mode & VWRITE) dep->de_Attributes &= ~ATTR_READONLY; else dep->de_Attributes |= ATTR_READONLY; dep->de_flag |= DE_MODIFIED; } } return (deupdat(dep, 1)); } static int msdosfs_read(ap) struct vop_read_args /* { struct vnode *a_vp; struct uio *a_uio; int a_ioflag; struct ucred *a_cred; } */ *ap; { int error = 0; int diff; int blsize; int isadir; long n; long on; daddr_t lbn; daddr_t rablock; int rasize; struct buf *bp; struct vnode *vp = ap->a_vp; struct denode *dep = VTODE(vp); struct msdosfsmount *pmp = dep->de_pmp; struct uio *uio = ap->a_uio; /* * If they didn't ask for any data, then we are done. */ if (uio->uio_resid == 0) return (0); if (uio->uio_offset < 0) return (EINVAL); isadir = dep->de_Attributes & ATTR_DIRECTORY; do { lbn = de_cluster(pmp, uio->uio_offset); on = uio->uio_offset & pmp->pm_crbomask; n = min((u_long) (pmp->pm_bpcluster - on), uio->uio_resid); diff = dep->de_FileSize - uio->uio_offset; if (diff <= 0) return (0); if (diff < n) n = diff; /* convert cluster # to block # if a directory */ if (isadir) { error = pcbmap(dep, lbn, &lbn, 0, &blsize); if (error) return (error); } /* * If we are operating on a directory file then be sure to * do i/o with the vnode for the filesystem instead of the * vnode for the directory. */ if (isadir) { error = bread(pmp->pm_devvp, lbn, blsize, NOCRED, &bp); } else { rablock = lbn + 1; if (vp->v_lastr + 1 == lbn && de_cn2off(pmp, rablock) < dep->de_FileSize) { rasize = pmp->pm_bpcluster; error = breadn(vp, lbn, pmp->pm_bpcluster, &rablock, &rasize, 1, NOCRED, &bp); } else error = bread(vp, lbn, pmp->pm_bpcluster, NOCRED, &bp); vp->v_lastr = lbn; } n = min(n, pmp->pm_bpcluster - bp->b_resid); if (error) { brelse(bp); return (error); } error = uiomove(bp->b_data + on, (int) n, uio); brelse(bp); } while (error == 0 && uio->uio_resid > 0 && n != 0); if (!isadir && !(vp->v_mount->mnt_flag & MNT_NOATIME)) dep->de_flag |= DE_ACCESS; return (error); } /* * Write data to a file or directory. */ static int msdosfs_write(ap) struct vop_write_args /* { struct vnode *a_vp; struct uio *a_uio; int a_ioflag; struct ucred *a_cred; } */ *ap; { int n; int croffset; int resid; u_long osize; int error = 0; u_long count; daddr_t bn, lastcn; struct buf *bp; int ioflag = ap->a_ioflag; struct uio *uio = ap->a_uio; struct proc *p = uio->uio_procp; struct vnode *vp = ap->a_vp; struct vnode *thisvp; struct denode *dep = VTODE(vp); struct msdosfsmount *pmp = dep->de_pmp; struct ucred *cred = ap->a_cred; #ifdef MSDOSFS_DEBUG printf("msdosfs_write(vp %p, uio %p, ioflag %x, cred %p\n", vp, uio, ioflag, cred); printf("msdosfs_write(): diroff %lu, dirclust %lu, startcluster %lu\n", dep->de_diroffset, dep->de_dirclust, dep->de_StartCluster); #endif switch (vp->v_type) { case VREG: if (ioflag & IO_APPEND) uio->uio_offset = dep->de_FileSize; thisvp = vp; break; case VDIR: return EISDIR; default: panic("msdosfs_write(): bad file type"); } if (uio->uio_offset < 0) return (EINVAL); if (uio->uio_resid == 0) return (0); /* * If they've exceeded their filesize limit, tell them about it. */ if (p && ((uio->uio_offset + uio->uio_resid) > p->p_rlimit[RLIMIT_FSIZE].rlim_cur)) { psignal(p, SIGXFSZ); return (EFBIG); } /* * If the offset we are starting the write at is beyond the end of * the file, then they've done a seek. Unix filesystems allow * files with holes in them, DOS doesn't so we must fill the hole * with zeroed blocks. */ if (uio->uio_offset > dep->de_FileSize) { error = deextend(dep, uio->uio_offset, cred); if (error) return (error); } /* * Remember some values in case the write fails. */ resid = uio->uio_resid; osize = dep->de_FileSize; /* * If we write beyond the end of the file, extend it to its ultimate * size ahead of the time to hopefully get a contiguous area. */ if (uio->uio_offset + resid > osize) { count = de_clcount(pmp, uio->uio_offset + resid) - de_clcount(pmp, osize); error = extendfile(dep, count, NULL, NULL, 0); if (error && (error != ENOSPC || (ioflag & IO_UNIT))) goto errexit; lastcn = dep->de_fc[FC_LASTFC].fc_frcn; } else lastcn = de_clcount(pmp, osize) - 1; do { if (de_cluster(pmp, uio->uio_offset) > lastcn) { error = ENOSPC; break; } croffset = uio->uio_offset & pmp->pm_crbomask; n = min(uio->uio_resid, pmp->pm_bpcluster - croffset); if (uio->uio_offset + n > dep->de_FileSize) { dep->de_FileSize = uio->uio_offset + n; /* The object size needs to be set before buffer is allocated */ vnode_pager_setsize(vp, dep->de_FileSize); } bn = de_cluster(pmp, uio->uio_offset); if ((uio->uio_offset & pmp->pm_crbomask) == 0 && (de_cluster(pmp, uio->uio_offset + uio->uio_resid) > de_cluster(pmp, uio->uio_offset) || uio->uio_offset + uio->uio_resid >= dep->de_FileSize)) { /* * If either the whole cluster gets written, * or we write the cluster from its start beyond EOF, * then no need to read data from disk. */ bp = getblk(thisvp, bn, pmp->pm_bpcluster, 0, 0); clrbuf(bp); /* * Do the bmap now, since pcbmap needs buffers * for the fat table. (see msdosfs_strategy) */ if (bp->b_blkno == bp->b_lblkno) { error = pcbmap(dep, bp->b_lblkno, &bp->b_blkno, 0, 0); if (error) bp->b_blkno = -1; } if (bp->b_blkno == -1) { brelse(bp); if (!error) error = EIO; /* XXX */ break; } } else { /* * The block we need to write into exists, so read it in. */ error = bread(thisvp, bn, pmp->pm_bpcluster, cred, &bp); if (error) { brelse(bp); break; } } /* * Should these vnode_pager_* functions be done on dir * files? */ /* * Copy the data from user space into the buf header. */ error = uiomove(bp->b_data + croffset, n, uio); /* * If they want this synchronous then write it and wait for * it. Otherwise, if on a cluster boundary write it * asynchronously so we can move on to the next block * without delay. Otherwise do a delayed write because we * may want to write somemore into the block later. */ if (ioflag & IO_SYNC) (void) bwrite(bp); else if (n + croffset == pmp->pm_bpcluster) bawrite(bp); else bdwrite(bp); dep->de_flag |= DE_UPDATE; } while (error == 0 && uio->uio_resid > 0); /* * If the write failed and they want us to, truncate the file back * to the size it was before the write was attempted. */ errexit: if (error) { if (ioflag & IO_UNIT) { detrunc(dep, osize, ioflag & IO_SYNC, NOCRED, NULL); uio->uio_offset -= resid - uio->uio_resid; uio->uio_resid = resid; } else { detrunc(dep, dep->de_FileSize, ioflag & IO_SYNC, NOCRED, NULL); if (uio->uio_resid != resid) error = 0; } } else if (ioflag & IO_SYNC) error = deupdat(dep, 1); return (error); } /* * Flush the blocks of a file to disk. * * This function is worthless for vnodes that represent directories. Maybe we * could just do a sync if they try an fsync on a directory file. */ static int msdosfs_fsync(ap) struct vop_fsync_args /* { struct vnode *a_vp; struct ucred *a_cred; int a_waitfor; struct proc *a_p; } */ *ap; { struct vnode *vp = ap->a_vp; int s; struct buf *bp, *nbp; /* * Flush all dirty buffers associated with a vnode. */ loop: s = splbio(); for (bp = TAILQ_FIRST(&vp->v_dirtyblkhd); bp; bp = nbp) { nbp = TAILQ_NEXT(bp, b_vnbufs); if ((bp->b_flags & B_BUSY)) continue; if ((bp->b_flags & B_DELWRI) == 0) panic("msdosfs_fsync: not dirty"); bremfree(bp); bp->b_flags |= B_BUSY; splx(s); (void) bwrite(bp); goto loop; } while (vp->v_numoutput) { vp->v_flag |= VBWAIT; (void) tsleep((caddr_t)&vp->v_numoutput, PRIBIO + 1, "msdosfsn", 0); } #ifdef DIAGNOSTIC if (!TAILQ_EMPTY(&vp->v_dirtyblkhd)) { vprint("msdosfs_fsync: dirty", vp); goto loop; } #endif splx(s); return (deupdat(VTODE(vp), ap->a_waitfor == MNT_WAIT)); } static int msdosfs_remove(ap) struct vop_remove_args /* { struct vnode *a_dvp; struct vnode *a_vp; struct componentname *a_cnp; } */ *ap; { struct denode *dep = VTODE(ap->a_vp); struct denode *ddep = VTODE(ap->a_dvp); int error; if (ap->a_vp->v_type == VDIR) error = EPERM; else error = removede(ddep, dep); #ifdef MSDOSFS_DEBUG printf("msdosfs_remove(), dep %p, v_usecount %d\n", dep, ap->a_vp->v_usecount); #endif return (error); } /* * DOS filesystems don't know what links are. But since we already called * msdosfs_lookup() with create and lockparent, the parent is locked so we * have to free it before we return the error. */ static int msdosfs_link(ap) struct vop_link_args /* { struct vnode *a_tdvp; struct vnode *a_vp; struct componentname *a_cnp; } */ *ap; { VOP_ABORTOP(ap->a_tdvp, ap->a_cnp); return (EOPNOTSUPP); } /* * Renames on files require moving the denode to a new hash queue since the * denode's location is used to compute which hash queue to put the file * in. Unless it is a rename in place. For example "mv a b". * * What follows is the basic algorithm: * * if (file move) { * if (dest file exists) { * remove dest file * } * if (dest and src in same directory) { * rewrite name in existing directory slot * } else { * write new entry in dest directory * update offset and dirclust in denode * move denode to new hash chain * clear old directory entry * } * } else { * directory move * if (dest directory exists) { * if (dest is not empty) { * return ENOTEMPTY * } * remove dest directory * } * if (dest and src in same directory) { * rewrite name in existing entry * } else { * be sure dest is not a child of src directory * write entry in dest directory * update "." and ".." in moved directory * clear old directory entry for moved directory * } * } * * On entry: * source's parent directory is unlocked * source file or directory is unlocked * destination's parent directory is locked * destination file or directory is locked if it exists * * On exit: * all denodes should be released * * Notes: * I'm not sure how the memory containing the pathnames pointed at by the * componentname structures is freed, there may be some memory bleeding * for each rename done. */ static int msdosfs_rename(ap) struct vop_rename_args /* { struct vnode *a_fdvp; struct vnode *a_fvp; struct componentname *a_fcnp; struct vnode *a_tdvp; struct vnode *a_tvp; struct componentname *a_tcnp; } */ *ap; { struct vnode *tdvp = ap->a_tdvp; struct vnode *fvp = ap->a_fvp; struct vnode *fdvp = ap->a_fdvp; struct vnode *tvp = ap->a_tvp; struct componentname *tcnp = ap->a_tcnp; struct componentname *fcnp = ap->a_fcnp; struct proc *p = fcnp->cn_proc; struct denode *ip, *xp, *dp, *zp; u_char toname[11], oldname[11]; u_long from_diroffset, to_diroffset; u_char to_count; int doingdirectory = 0, newparent = 0; int error; u_long cn; daddr_t bn; struct denode *fddep; /* from file's parent directory */ struct denode *fdep; /* from file or directory */ struct denode *tddep; /* to file's parent directory */ struct denode *tdep; /* to file or directory */ struct msdosfsmount *pmp; struct direntry *dotdotp; struct buf *bp; fddep = VTODE(ap->a_fdvp); fdep = VTODE(ap->a_fvp); tddep = VTODE(ap->a_tdvp); tdep = tvp ? VTODE(tvp) : NULL; pmp = fddep->de_pmp; pmp = VFSTOMSDOSFS(fdvp->v_mount); #ifdef DIAGNOSTIC if ((tcnp->cn_flags & HASBUF) == 0 || (fcnp->cn_flags & HASBUF) == 0) panic("msdosfs_rename: no name"); #endif /* * Check for cross-device rename. */ if ((fvp->v_mount != tdvp->v_mount) || (tvp && (fvp->v_mount != tvp->v_mount))) { error = EXDEV; abortit: VOP_ABORTOP(tdvp, tcnp); if (tdvp == tvp) vrele(tdvp); else vput(tdvp); if (tvp) vput(tvp); VOP_ABORTOP(fdvp, fcnp); vrele(fdvp); vrele(fvp); return (error); } /* * If source and dest are the same, do nothing. */ if (tvp == fvp) { error = 0; goto abortit; } error = vn_lock(fvp, LK_EXCLUSIVE, p); if (error) goto abortit; dp = VTODE(fdvp); ip = VTODE(fvp); /* * Be sure we are not renaming ".", "..", or an alias of ".". This * leads to a crippled directory tree. It's pretty tough to do a * "ls" or "pwd" with the "." directory entry missing, and "cd .." * doesn't work if the ".." entry is missing. */ if (ip->de_Attributes & ATTR_DIRECTORY) { /* * Avoid ".", "..", and aliases of "." for obvious reasons. */ if ((fcnp->cn_namelen == 1 && fcnp->cn_nameptr[0] == '.') || dp == ip || (fcnp->cn_flags & ISDOTDOT) || (tcnp->cn_flags & ISDOTDOT) || (ip->de_flag & DE_RENAME)) { VOP_UNLOCK(fvp, 0, p); error = EINVAL; goto abortit; } ip->de_flag |= DE_RENAME; doingdirectory++; } /* * When the target exists, both the directory * and target vnodes are returned locked. */ dp = VTODE(tdvp); xp = tvp ? VTODE(tvp) : NULL; /* * Remember direntry place to use for destination */ to_diroffset = dp->de_fndoffset; to_count = dp->de_fndcnt; /* * If ".." must be changed (ie the directory gets a new * parent) then the source directory must not be in the * directory heirarchy above the target, as this would * orphan everything below the source directory. Also * the user must have write permission in the source so * as to be able to change "..". We must repeat the call * to namei, as the parent directory is unlocked by the * call to doscheckpath(). */ error = VOP_ACCESS(fvp, VWRITE, tcnp->cn_cred, tcnp->cn_proc); VOP_UNLOCK(fvp, 0, p); if (VTODE(fdvp)->de_StartCluster != VTODE(tdvp)->de_StartCluster) newparent = 1; vrele(fdvp); if (doingdirectory && newparent) { if (error) /* write access check above */ goto bad; if (xp != NULL) vput(tvp); /* * doscheckpath() vput()'s dp, * so we have to do a relookup afterwards */ error = doscheckpath(ip, dp); if (error) goto out; if ((tcnp->cn_flags & SAVESTART) == 0) panic("msdosfs_rename: lost to startdir"); error = relookup(tdvp, &tvp, tcnp); if (error) goto out; dp = VTODE(tdvp); xp = tvp ? VTODE(tvp) : NULL; } if (xp != NULL) { /* * Target must be empty if a directory and have no links * to it. Also, ensure source and target are compatible * (both directories, or both not directories). */ if (xp->de_Attributes & ATTR_DIRECTORY) { if (!dosdirempty(xp)) { error = ENOTEMPTY; goto bad; } if (!doingdirectory) { error = ENOTDIR; goto bad; } cache_purge(tdvp); } else if (doingdirectory) { error = EISDIR; goto bad; } error = removede(dp, xp); if (error) goto bad; vput(tvp); xp = NULL; } /* * Convert the filename in tcnp into a dos filename. We copy this * into the denode and directory entry for the destination * file/directory. */ error = uniqdosname(VTODE(tdvp), tcnp, toname); if (error) goto abortit; /* * Since from wasn't locked at various places above, * have to do a relookup here. */ fcnp->cn_flags &= ~MODMASK; fcnp->cn_flags |= LOCKPARENT | LOCKLEAF; if ((fcnp->cn_flags & SAVESTART) == 0) panic("msdosfs_rename: lost from startdir"); if (!newparent) VOP_UNLOCK(tdvp, 0, p); (void) relookup(fdvp, &fvp, fcnp); if (fvp == NULL) { /* * From name has disappeared. */ if (doingdirectory) panic("rename: lost dir entry"); vrele(ap->a_fvp); if (newparent) VOP_UNLOCK(tdvp, 0, p); vrele(tdvp); return 0; } xp = VTODE(fvp); zp = VTODE(fdvp); from_diroffset = zp->de_fndoffset; /* * Ensure that the directory entry still exists and has not * changed till now. If the source is a file the entry may * have been unlinked or renamed. In either case there is * no further work to be done. If the source is a directory * then it cannot have been rmdir'ed or renamed; this is * prohibited by the DE_RENAME flag. */ if (xp != ip) { if (doingdirectory) panic("rename: lost dir entry"); vrele(ap->a_fvp); VOP_UNLOCK(fvp, 0, p); if (newparent) VOP_UNLOCK(fdvp, 0, p); xp = NULL; } else { vrele(fvp); xp = NULL; /* * First write a new entry in the destination * directory and mark the entry in the source directory * as deleted. Then move the denode to the correct hash * chain for its new location in the filesystem. And, if * we moved a directory, then update its .. entry to point * to the new parent directory. */ bcopy(ip->de_Name, oldname, 11); bcopy(toname, ip->de_Name, 11); /* update denode */ dp->de_fndoffset = to_diroffset; dp->de_fndcnt = to_count; error = createde(ip, dp, (struct denode **)0, tcnp); if (error) { bcopy(oldname, ip->de_Name, 11); if (newparent) VOP_UNLOCK(fdvp, 0, p); VOP_UNLOCK(fvp, 0, p); goto bad; } ip->de_refcnt++; zp->de_fndoffset = from_diroffset; error = removede(zp, ip); if (error) { /* XXX should really panic here, fs is corrupt */ if (newparent) VOP_UNLOCK(fdvp, 0, p); VOP_UNLOCK(fvp, 0, p); goto bad; } if (!doingdirectory) { error = pcbmap(dp, de_cluster(pmp, to_diroffset), 0, &ip->de_dirclust, 0); if (error) { /* XXX should really panic here, fs is corrupt */ if (newparent) VOP_UNLOCK(fdvp, 0, p); VOP_UNLOCK(fvp, 0, p); goto bad; } if (ip->de_dirclust == MSDOSFSROOT) ip->de_diroffset = to_diroffset; else ip->de_diroffset = to_diroffset & pmp->pm_crbomask; } reinsert(ip); if (newparent) VOP_UNLOCK(fdvp, 0, p); } /* * If we moved a directory to a new parent directory, then we must * fixup the ".." entry in the moved directory. */ if (doingdirectory && newparent) { cn = ip->de_StartCluster; if (cn == MSDOSFSROOT) { /* this should never happen */ panic("msdosfs_rename(): updating .. in root directory?"); } else bn = cntobn(pmp, cn); error = bread(pmp->pm_devvp, bn, pmp->pm_bpcluster, NOCRED, &bp); if (error) { /* XXX should really panic here, fs is corrupt */ brelse(bp); VOP_UNLOCK(fvp, 0, p); goto bad; } dotdotp = (struct direntry *)bp->b_data + 1; putushort(dotdotp->deStartCluster, dp->de_StartCluster); if (FAT32(pmp)) putushort(dotdotp->deHighClust, dp->de_StartCluster >> 16); error = bwrite(bp); if (error) { /* XXX should really panic here, fs is corrupt */ VOP_UNLOCK(fvp, 0, p); goto bad; } } VOP_UNLOCK(fvp, 0, p); bad: if (xp) vput(tvp); vput(tdvp); out: ip->de_flag &= ~DE_RENAME; vrele(fdvp); vrele(fvp); return (error); } static struct { struct direntry dot; struct direntry dotdot; } dosdirtemplate = { { ". ", " ", /* the . entry */ ATTR_DIRECTORY, /* file attribute */ 0, /* reserved */ 0, { 0, 0 }, { 0, 0 }, /* create time & date */ { 0, 0 }, /* access date */ { 0, 0 }, /* high bits of start cluster */ { 210, 4 }, { 210, 4 }, /* modify time & date */ { 0, 0 }, /* startcluster */ { 0, 0, 0, 0 } /* filesize */ }, { ".. ", " ", /* the .. entry */ ATTR_DIRECTORY, /* file attribute */ 0, /* reserved */ 0, { 0, 0 }, { 0, 0 }, /* create time & date */ { 0, 0 }, /* access date */ { 0, 0 }, /* high bits of start cluster */ { 210, 4 }, { 210, 4 }, /* modify time & date */ { 0, 0 }, /* startcluster */ { 0, 0, 0, 0 } /* filesize */ } }; static int msdosfs_mkdir(ap) struct vop_mkdir_args /* { struct vnode *a_dvp; struvt vnode **a_vpp; struvt componentname *a_cnp; struct vattr *a_vap; } */ *ap; { struct componentname *cnp = ap->a_cnp; struct denode *dep; struct denode *pdep = VTODE(ap->a_dvp); struct direntry *denp; struct msdosfsmount *pmp = pdep->de_pmp; struct buf *bp; u_long newcluster, pcl; int bn; int error; struct denode ndirent; struct timespec ts; /* * If this is the root directory and there is no space left we * can't do anything. This is because the root directory can not * change size. */ if (pdep->de_StartCluster == MSDOSFSROOT && pdep->de_fndoffset >= pdep->de_FileSize) { error = ENOSPC; goto bad2; } /* * Allocate a cluster to hold the about to be created directory. */ error = clusteralloc(pmp, 0, 1, CLUST_EOFE, &newcluster, NULL); if (error) goto bad2; bzero(&ndirent, sizeof(ndirent)); ndirent.de_pmp = pmp; ndirent.de_flag = DE_ACCESS | DE_CREATE | DE_UPDATE; getnanotime(&ts); DETIMES(&ndirent, &ts, &ts, &ts); /* * Now fill the cluster with the "." and ".." entries. And write * the cluster to disk. This way it is there for the parent * directory to be pointing at if there were a crash. */ bn = cntobn(pmp, newcluster); /* always succeeds */ bp = getblk(pmp->pm_devvp, bn, pmp->pm_bpcluster, 0, 0); bzero(bp->b_data, pmp->pm_bpcluster); bcopy(&dosdirtemplate, bp->b_data, sizeof dosdirtemplate); denp = (struct direntry *)bp->b_data; putushort(denp[0].deStartCluster, newcluster); putushort(denp[0].deCDate, ndirent.de_CDate); putushort(denp[0].deCTime, ndirent.de_CTime); denp[0].deCHundredth = ndirent.de_CHun; putushort(denp[0].deADate, ndirent.de_ADate); putushort(denp[0].deMDate, ndirent.de_MDate); putushort(denp[0].deMTime, ndirent.de_MTime); pcl = pdep->de_StartCluster; if (FAT32(pmp) && pcl == pmp->pm_rootdirblk) pcl = 0; putushort(denp[1].deStartCluster, pcl); putushort(denp[1].deCDate, ndirent.de_CDate); putushort(denp[1].deCTime, ndirent.de_CTime); denp[1].deCHundredth = ndirent.de_CHun; putushort(denp[1].deADate, ndirent.de_ADate); putushort(denp[1].deMDate, ndirent.de_MDate); putushort(denp[1].deMTime, ndirent.de_MTime); if (FAT32(pmp)) { putushort(denp[0].deHighClust, newcluster >> 16); putushort(denp[1].deHighClust, pdep->de_StartCluster >> 16); } error = bwrite(bp); if (error) goto bad; /* * Now build up a directory entry pointing to the newly allocated * cluster. This will be written to an empty slot in the parent * directory. */ #ifdef DIAGNOSTIC if ((cnp->cn_flags & HASBUF) == 0) panic("msdosfs_mkdir: no name"); #endif error = uniqdosname(pdep, cnp, ndirent.de_Name); if (error) goto bad; ndirent.de_Attributes = ATTR_DIRECTORY; ndirent.de_LowerCase = 0; ndirent.de_StartCluster = newcluster; ndirent.de_FileSize = 0; ndirent.de_dev = pdep->de_dev; ndirent.de_devvp = pdep->de_devvp; error = createde(&ndirent, pdep, &dep, cnp); if (error) goto bad; if ((cnp->cn_flags & SAVESTART) == 0) zfree(namei_zone, cnp->cn_pnbuf); *ap->a_vpp = DETOV(dep); return (0); bad: clusterfree(pmp, newcluster, NULL); bad2: zfree(namei_zone, cnp->cn_pnbuf); return (error); } static int msdosfs_rmdir(ap) struct vop_rmdir_args /* { struct vnode *a_dvp; struct vnode *a_vp; struct componentname *a_cnp; } */ *ap; { register struct vnode *vp = ap->a_vp; register struct vnode *dvp = ap->a_dvp; register struct componentname *cnp = ap->a_cnp; register struct denode *ip, *dp; struct proc *p = cnp->cn_proc; int error; ip = VTODE(vp); dp = VTODE(dvp); /* * Verify the directory is empty (and valid). * (Rmdir ".." won't be valid since * ".." will contain a reference to * the current directory and thus be * non-empty.) */ error = 0; if (!dosdirempty(ip) || ip->de_flag & DE_RENAME) { error = ENOTEMPTY; goto out; } /* * Delete the entry from the directory. For dos filesystems this * gets rid of the directory entry on disk, the in memory copy * still exists but the de_refcnt is <= 0. This prevents it from * being found by deget(). When the vput() on dep is done we give * up access and eventually msdosfs_reclaim() will be called which * will remove it from the denode cache. */ error = removede(dp, ip); if (error) goto out; /* * This is where we decrement the link count in the parent * directory. Since dos filesystems don't do this we just purge * the name cache. */ cache_purge(dvp); VOP_UNLOCK(dvp, 0, p); /* * Truncate the directory that is being deleted. */ error = detrunc(ip, (u_long)0, IO_SYNC, cnp->cn_cred, p); cache_purge(vp); vn_lock(dvp, LK_EXCLUSIVE | LK_RETRY, p); out: return (error); } /* * DOS filesystems don't know what symlinks are. */ static int msdosfs_symlink(ap) struct vop_symlink_args /* { struct vnode *a_dvp; struct vnode **a_vpp; struct componentname *a_cnp; struct vattr *a_vap; char *a_target; } */ *ap; { zfree(namei_zone, ap->a_cnp->cn_pnbuf); /* VOP_ABORTOP(ap->a_dvp, ap->a_cnp); ??? */ return (EOPNOTSUPP); } static int msdosfs_readdir(ap) struct vop_readdir_args /* { struct vnode *a_vp; struct uio *a_uio; struct ucred *a_cred; int *a_eofflag; int *a_ncookies; u_long **a_cookies; } */ *ap; { int error = 0; int diff; long n; int blsize; long on; u_long cn; u_long fileno; u_long dirsperblk; long bias = 0; daddr_t bn, lbn; struct buf *bp; struct denode *dep = VTODE(ap->a_vp); struct msdosfsmount *pmp = dep->de_pmp; struct direntry *dentp; struct dirent dirbuf; struct uio *uio = ap->a_uio; u_long *cookies = NULL; int ncookies = 0; off_t offset, off; int chksum = -1; #ifdef MSDOSFS_DEBUG printf("msdosfs_readdir(): vp %p, uio %p, cred %p, eofflagp %p\n", ap->a_vp, uio, ap->a_cred, ap->a_eofflag); #endif /* * msdosfs_readdir() won't operate properly on regular files since * it does i/o only with the the filesystem vnode, and hence can * retrieve the wrong block from the buffer cache for a plain file. * So, fail attempts to readdir() on a plain file. */ if ((dep->de_Attributes & ATTR_DIRECTORY) == 0) return (ENOTDIR); /* * To be safe, initialize dirbuf */ bzero(dirbuf.d_name, sizeof(dirbuf.d_name)); /* * If the user buffer is smaller than the size of one dos directory * entry or the file offset is not a multiple of the size of a * directory entry, then we fail the read. */ off = offset = uio->uio_offset; if (uio->uio_resid < sizeof(struct direntry) || (offset & (sizeof(struct direntry) - 1))) return (EINVAL); if (ap->a_ncookies) { ncookies = uio->uio_resid / 16; MALLOC(cookies, u_long *, ncookies * sizeof(u_long), M_TEMP, M_WAITOK); *ap->a_cookies = cookies; *ap->a_ncookies = ncookies; } dirsperblk = pmp->pm_BytesPerSec / sizeof(struct direntry); /* * If they are reading from the root directory then, we simulate * the . and .. entries since these don't exist in the root * directory. We also set the offset bias to make up for having to * simulate these entries. By this I mean that at file offset 64 we * read the first entry in the root directory that lives on disk. */ if (dep->de_StartCluster == MSDOSFSROOT || (FAT32(pmp) && dep->de_StartCluster == pmp->pm_rootdirblk)) { #if 0 printf("msdosfs_readdir(): going after . or .. in root dir, offset %d\n", offset); #endif bias = 2 * sizeof(struct direntry); if (offset < bias) { for (n = (int)offset / sizeof(struct direntry); n < 2; n++) { if (FAT32(pmp)) dirbuf.d_fileno = cntobn(pmp, pmp->pm_rootdirblk) * dirsperblk; else dirbuf.d_fileno = 1; dirbuf.d_type = DT_DIR; switch (n) { case 0: dirbuf.d_namlen = 1; strcpy(dirbuf.d_name, "."); break; case 1: dirbuf.d_namlen = 2; strcpy(dirbuf.d_name, ".."); break; } dirbuf.d_reclen = GENERIC_DIRSIZ(&dirbuf); if (uio->uio_resid < dirbuf.d_reclen) goto out; error = uiomove((caddr_t) &dirbuf, dirbuf.d_reclen, uio); if (error) goto out; offset += sizeof(struct direntry); off = offset; if (cookies) { *cookies++ = offset; if (--ncookies <= 0) goto out; } } } } off = offset; while (uio->uio_resid > 0) { lbn = de_cluster(pmp, offset - bias); on = (offset - bias) & pmp->pm_crbomask; n = min(pmp->pm_bpcluster - on, uio->uio_resid); diff = dep->de_FileSize - (offset - bias); if (diff <= 0) break; n = min(n, diff); error = pcbmap(dep, lbn, &bn, &cn, &blsize); if (error) break; error = bread(pmp->pm_devvp, bn, blsize, NOCRED, &bp); if (error) { brelse(bp); return (error); } n = min(n, blsize - bp->b_resid); /* * Convert from dos directory entries to fs-independent * directory entries. */ for (dentp = (struct direntry *)(bp->b_data + on); (char *)dentp < bp->b_data + on + n; dentp++, offset += sizeof(struct direntry)) { #if 0 printf("rd: dentp %08x prev %08x crnt %08x deName %02x attr %02x\n", dentp, prev, crnt, dentp->deName[0], dentp->deAttributes); #endif /* * If this is an unused entry, we can stop. */ if (dentp->deName[0] == SLOT_EMPTY) { brelse(bp); goto out; } /* * Skip deleted entries. */ if (dentp->deName[0] == SLOT_DELETED) { chksum = -1; continue; } /* * Handle Win95 long directory entries */ if (dentp->deAttributes == ATTR_WIN95) { if (pmp->pm_flags & MSDOSFSMNT_SHORTNAME) continue; chksum = win2unixfn((struct winentry *)dentp, &dirbuf, chksum, pmp->pm_flags & MSDOSFSMNT_U2WTABLE, pmp->pm_u2w); continue; } /* * Skip volume labels */ if (dentp->deAttributes & ATTR_VOLUME) { chksum = -1; continue; } /* * This computation of d_fileno must match * the computation of va_fileid in * msdosfs_getattr. */ if (dentp->deAttributes & ATTR_DIRECTORY) { fileno = getushort(dentp->deStartCluster); if (FAT32(pmp)) fileno |= getushort(dentp->deHighClust) << 16; /* if this is the root directory */ if (fileno == MSDOSFSROOT) if (FAT32(pmp)) fileno = cntobn(pmp, pmp->pm_rootdirblk) * dirsperblk; else fileno = 1; else fileno = cntobn(pmp, fileno) * dirsperblk; dirbuf.d_fileno = fileno; dirbuf.d_type = DT_DIR; } else { dirbuf.d_fileno = offset / sizeof(struct direntry); dirbuf.d_type = DT_REG; } if (chksum != winChksum(dentp->deName)) dirbuf.d_namlen = dos2unixfn(dentp->deName, (u_char *)dirbuf.d_name, dentp->deLowerCase | ((pmp->pm_flags & MSDOSFSMNT_SHORTNAME) ? (LCASE_BASE | LCASE_EXT) : 0), pmp->pm_flags & MSDOSFSMNT_U2WTABLE, pmp->pm_d2u, pmp->pm_flags & MSDOSFSMNT_ULTABLE, pmp->pm_ul); else dirbuf.d_name[dirbuf.d_namlen] = 0; chksum = -1; dirbuf.d_reclen = GENERIC_DIRSIZ(&dirbuf); if (uio->uio_resid < dirbuf.d_reclen) { brelse(bp); goto out; } error = uiomove((caddr_t) &dirbuf, dirbuf.d_reclen, uio); if (error) { brelse(bp); goto out; } if (cookies) { *cookies++ = offset + sizeof(struct direntry); if (--ncookies <= 0) { brelse(bp); goto out; } } off = offset + sizeof(struct direntry); } brelse(bp); } out: /* Subtract unused cookies */ if (ap->a_ncookies) *ap->a_ncookies -= ncookies; uio->uio_offset = off; /* * Set the eofflag (NFS uses it) */ if (ap->a_eofflag) if (dep->de_FileSize - (offset - bias) <= 0) *ap->a_eofflag = 1; else *ap->a_eofflag = 0; return (error); } static int msdosfs_abortop(ap) struct vop_abortop_args /* { struct vnode *a_dvp; struct componentname *a_cnp; } */ *ap; { if ((ap->a_cnp->cn_flags & (HASBUF | SAVESTART)) == HASBUF) zfree(namei_zone, ap->a_cnp->cn_pnbuf); return (0); } /* * vp - address of vnode file the file * bn - which cluster we are interested in mapping to a filesystem block number. * vpp - returns the vnode for the block special file holding the filesystem * containing the file of interest * bnp - address of where to return the filesystem relative block number */ static int msdosfs_bmap(ap) struct vop_bmap_args /* { struct vnode *a_vp; daddr_t a_bn; struct vnode **a_vpp; daddr_t *a_bnp; int *a_runp; int *a_runb; } */ *ap; { struct denode *dep = VTODE(ap->a_vp); if (ap->a_vpp != NULL) *ap->a_vpp = dep->de_devvp; if (ap->a_bnp == NULL) return (0); if (ap->a_runp) { /* * Sequential clusters should be counted here. */ *ap->a_runp = 0; } if (ap->a_runb) { *ap->a_runb = 0; } return (pcbmap(dep, ap->a_bn, ap->a_bnp, 0, 0)); } static int msdosfs_strategy(ap) struct vop_strategy_args /* { struct vnode *a_vp; struct buf *a_bp; } */ *ap; { struct buf *bp = ap->a_bp; struct denode *dep = VTODE(bp->b_vp); struct vnode *vp; int error = 0; if (bp->b_vp->v_type == VBLK || bp->b_vp->v_type == VCHR) panic("msdosfs_strategy: spec"); /* * If we don't already know the filesystem relative block number * then get it using pcbmap(). If pcbmap() returns the block * number as -1 then we've got a hole in the file. DOS filesystems * don't allow files with holes, so we shouldn't ever see this. */ if (bp->b_blkno == bp->b_lblkno) { error = pcbmap(dep, bp->b_lblkno, &bp->b_blkno, 0, 0); if (error) { bp->b_error = error; bp->b_flags |= B_ERROR; biodone(bp); return (error); } if ((long)bp->b_blkno == -1) vfs_bio_clrbuf(bp); } if (bp->b_blkno == -1) { biodone(bp); return (0); } /* * Read/write the block from/to the disk that contains the desired * file block. */ vp = dep->de_devvp; bp->b_dev = vp->v_rdev; VOP_STRATEGY(vp, bp); return (0); } static int msdosfs_print(ap) struct vop_print_args /* { struct vnode *vp; } */ *ap; { struct denode *dep = VTODE(ap->a_vp); printf( "tag VT_MSDOSFS, startcluster %lu, dircluster %lu, diroffset %lu ", dep->de_StartCluster, dep->de_dirclust, dep->de_diroffset); printf(" dev %d, %d", major(dep->de_dev), minor(dep->de_dev)); lockmgr_printinfo(&dep->de_lock); printf("\n"); return (0); } static int msdosfs_pathconf(ap) struct vop_pathconf_args /* { struct vnode *a_vp; int a_name; int *a_retval; } */ *ap; { struct msdosfsmount *pmp = VTODE(ap->a_vp)->de_pmp; switch (ap->a_name) { case _PC_LINK_MAX: *ap->a_retval = 1; return (0); case _PC_NAME_MAX: *ap->a_retval = pmp->pm_flags & MSDOSFSMNT_LONGNAME ? WIN_MAXLEN : 12; return (0); case _PC_PATH_MAX: *ap->a_retval = PATH_MAX; return (0); case _PC_CHOWN_RESTRICTED: *ap->a_retval = 1; return (0); case _PC_NO_TRUNC: *ap->a_retval = 0; return (0); default: return (EINVAL); } /* NOTREACHED */ } /* * get page routine * * XXX By default, wimp out... note that a_offset is ignored (and always * XXX has been). */ int msdosfs_getpages(ap) struct vop_getpages_args *ap; { return vnode_pager_generic_getpages(ap->a_vp, ap->a_m, ap->a_count, ap->a_reqpage); } /* * put page routine * * XXX By default, wimp out... note that a_offset is ignored (and always * XXX has been). */ int msdosfs_putpages(ap) struct vop_putpages_args *ap; { return vnode_pager_generic_putpages(ap->a_vp, ap->a_m, ap->a_count, ap->a_sync, ap->a_rtvals); } /* Global vfs data structures for msdosfs */ vop_t **msdosfs_vnodeop_p; static struct vnodeopv_entry_desc msdosfs_vnodeop_entries[] = { { &vop_default_desc, (vop_t *) vop_defaultop }, { &vop_abortop_desc, (vop_t *) msdosfs_abortop }, { &vop_access_desc, (vop_t *) msdosfs_access }, { &vop_bmap_desc, (vop_t *) msdosfs_bmap }, { &vop_cachedlookup_desc, (vop_t *) msdosfs_lookup }, { &vop_close_desc, (vop_t *) msdosfs_close }, { &vop_create_desc, (vop_t *) msdosfs_create }, { &vop_fsync_desc, (vop_t *) msdosfs_fsync }, { &vop_getattr_desc, (vop_t *) msdosfs_getattr }, { &vop_inactive_desc, (vop_t *) msdosfs_inactive }, { &vop_islocked_desc, (vop_t *) vop_stdislocked }, { &vop_link_desc, (vop_t *) msdosfs_link }, { &vop_lock_desc, (vop_t *) vop_stdlock }, { &vop_lookup_desc, (vop_t *) vfs_cache_lookup }, { &vop_mkdir_desc, (vop_t *) msdosfs_mkdir }, { &vop_mknod_desc, (vop_t *) msdosfs_mknod }, { &vop_pathconf_desc, (vop_t *) msdosfs_pathconf }, { &vop_print_desc, (vop_t *) msdosfs_print }, { &vop_read_desc, (vop_t *) msdosfs_read }, { &vop_readdir_desc, (vop_t *) msdosfs_readdir }, { &vop_reclaim_desc, (vop_t *) msdosfs_reclaim }, { &vop_remove_desc, (vop_t *) msdosfs_remove }, { &vop_rename_desc, (vop_t *) msdosfs_rename }, { &vop_rmdir_desc, (vop_t *) msdosfs_rmdir }, { &vop_setattr_desc, (vop_t *) msdosfs_setattr }, { &vop_strategy_desc, (vop_t *) msdosfs_strategy }, { &vop_symlink_desc, (vop_t *) msdosfs_symlink }, { &vop_unlock_desc, (vop_t *) vop_stdunlock }, { &vop_write_desc, (vop_t *) msdosfs_write }, { &vop_getpages_desc, (vop_t *) msdosfs_getpages }, { &vop_putpages_desc, (vop_t *) msdosfs_putpages }, { NULL, NULL } }; static struct vnodeopv_desc msdosfs_vnodeop_opv_desc = { &msdosfs_vnodeop_p, msdosfs_vnodeop_entries }; VNODEOP_SET(msdosfs_vnodeop_opv_desc);
module.exports = /******/ (function(modules) { // webpackBootstrap /******/ // The module cache /******/ var installedModules = require('../../../ssr-module-cache.js'); /******/ /******/ // The require function /******/ function __webpack_require__(moduleId) { /******/ /******/ // Check if module is in cache /******/ if(installedModules[moduleId]) { /******/ return installedModules[moduleId].exports; /******/ } /******/ // Create a new module (and put it into the cache) /******/ var module = installedModules[moduleId] = { /******/ i: moduleId, /******/ l: false, /******/ exports: {} /******/ }; /******/ /******/ // Execute the module function /******/ var threw = true; /******/ try { /******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); /******/ threw = false; /******/ } finally { /******/ if(threw) delete installedModules[moduleId]; /******/ } /******/ /******/ // Flag the module as loaded /******/ module.l = true; /******/ /******/ // Return the exports of the module /******/ return module.exports; /******/ } /******/ /******/ /******/ // expose the modules object (__webpack_modules__) /******/ __webpack_require__.m = modules; /******/ /******/ // expose the module cache /******/ __webpack_require__.c = installedModules; /******/ /******/ // define getter function for harmony exports /******/ __webpack_require__.d = function(exports, name, getter) { /******/ if(!__webpack_require__.o(exports, name)) { /******/ Object.defineProperty(exports, name, { enumerable: true, get: getter }); /******/ } /******/ }; /******/ /******/ // define __esModule on exports /******/ __webpack_require__.r = function(exports) { /******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { /******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); /******/ } /******/ Object.defineProperty(exports, '__esModule', { value: true }); /******/ }; /******/ /******/ // create a fake namespace object /******/ // mode & 1: value is a module id, require it /******/ // mode & 2: merge all properties of value into the ns /******/ // mode & 4: return value when already ns object /******/ // mode & 8|1: behave like require /******/ __webpack_require__.t = function(value, mode) { /******/ if(mode & 1) value = __webpack_require__(value); /******/ if(mode & 8) return value; /******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value; /******/ var ns = Object.create(null); /******/ __webpack_require__.r(ns); /******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value }); /******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key)); /******/ return ns; /******/ }; /******/ /******/ // getDefaultExport function for compatibility with non-harmony modules /******/ __webpack_require__.n = function(module) { /******/ var getter = module && module.__esModule ? /******/ function getDefault() { return module['default']; } : /******/ function getModuleExports() { return module; }; /******/ __webpack_require__.d(getter, 'a', getter); /******/ return getter; /******/ }; /******/ /******/ // Object.prototype.hasOwnProperty.call /******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; /******/ /******/ // __webpack_public_path__ /******/ __webpack_require__.p = ""; /******/ /******/ /******/ // Load entry module and return exports /******/ return __webpack_require__(__webpack_require__.s = 7); /******/ }) /************************************************************************/ /******/ ({ /***/ "/+P4": /***/ (function(module, exports, __webpack_require__) { var _Object$getPrototypeOf = __webpack_require__("Bhuq"); var _Object$setPrototypeOf = __webpack_require__("TRZx"); function _getPrototypeOf(o) { module.exports = _getPrototypeOf = _Object$setPrototypeOf ? _Object$getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || _Object$getPrototypeOf(o); }; return _getPrototypeOf(o); } module.exports = _getPrototypeOf; /***/ }), /***/ "/+oN": /***/ (function(module, exports) { module.exports = require("core-js/library/fn/object/get-prototype-of"); /***/ }), /***/ "/HRN": /***/ (function(module, exports) { function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } module.exports = _classCallCheck; /***/ }), /***/ "3NBp": /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return AdminHeader; }); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__("cDcd"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_0__); /* harmony import */ var antd__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__("Exp3"); /* harmony import */ var antd__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(antd__WEBPACK_IMPORTED_MODULE_1__); /* harmony import */ var next_link__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__("YFqc"); /* harmony import */ var next_link__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(next_link__WEBPACK_IMPORTED_MODULE_2__); /* harmony import */ var _redux_helpers_authentication_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__("Shr3"); /* harmony import */ var _static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__("o9iy"); /* harmony import */ var _static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(_static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_4__); function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } var Header = antd__WEBPACK_IMPORTED_MODULE_1__["Layout"].Header; var confirm = antd__WEBPACK_IMPORTED_MODULE_1__["Modal"].confirm; var AdminHeader = /*#__PURE__*/ function (_Component) { _inherits(AdminHeader, _Component); function AdminHeader(props) { var _this; _classCallCheck(this, AdminHeader); _this = _possibleConstructorReturn(this, _getPrototypeOf(AdminHeader).call(this, props)); _this.state = { user: null }; _this.onLogOut = _this.onLogOut.bind(_assertThisInitialized(_this)); return _this; } _createClass(AdminHeader, [{ key: "componentDidMount", value: function componentDidMount() { if (this.props.hasOwnProperty('user') && typeof this.props.user === "string") { this.setState({ user: JSON.parse(this.props.user) }); } } }, { key: "componentDidUpdate", value: function componentDidUpdate(previous) { if (this.props.hasOwnProperty('user') && previous.user !== this.props.user) { this.setState({ user: JSON.parse(this.props.user) }); } } }, { key: "onLogOut", value: function onLogOut() { console.log('sure ?'); confirm({ title: 'Voulez-vous vraiment vous déconnecter ?', content: 'Souhaitez-vous vous déconnecter ?', onOk: function onOk() { Object(_redux_helpers_authentication_helper__WEBPACK_IMPORTED_MODULE_3__[/* logoutService */ "b"])(true); }, onCancel: function onCancel() {} }); } }, { key: "render", value: function render() { return react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(react__WEBPACK_IMPORTED_MODULE_0___default.a.Fragment, null, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(Header, { style: { background: '#ffffff', padding: 0, height: '36px' }, className: "application-admin-header p-0" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("div", { className: "d-flex flex-row justify-content-end sub-admin-header p-0 align-items-center px-3" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(next_link__WEBPACK_IMPORTED_MODULE_2___default.a, { href: "/user-account" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("span", { className: "admin-information-first" }, this.state.user && this.state.user.firstname + ' ' + this.state.user.lastname)), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Button"], { type: "danger", onClick: this.onLogOut, shape: "circle", className: "ml-2 p-0" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(next_link__WEBPACK_IMPORTED_MODULE_2___default.a, { href: "/user-cart" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Icon"], { type: "poweroff", className: "button-icon-header" })))))); } }]); return AdminHeader; }(react__WEBPACK_IMPORTED_MODULE_0__["Component"]); AdminHeader.defaultProps = { user: null }; /***/ }), /***/ "4Q3z": /***/ (function(module, exports) { module.exports = require("next/router"); /***/ }), /***/ "4vsW": /***/ (function(module, exports) { module.exports = require("node-fetch"); /***/ }), /***/ "6dKc": /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return GuestHeader; }); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__("cDcd"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_0__); /* harmony import */ var next_head__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__("xnum"); /* harmony import */ var next_head__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(next_head__WEBPACK_IMPORTED_MODULE_1__); function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } var GuestHeader = /*#__PURE__*/ function (_Component) { _inherits(GuestHeader, _Component); function GuestHeader(props) { _classCallCheck(this, GuestHeader); return _possibleConstructorReturn(this, _getPrototypeOf(GuestHeader).call(this, props)); } _createClass(GuestHeader, [{ key: "render", value: function render() { return react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(next_head__WEBPACK_IMPORTED_MODULE_1___default.a, null, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("meta", { charSet: "UTF-8" }), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("title", null, this.props.title || ''), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("meta", { name: "description", content: this.props.description || '' }), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("meta", { name: "viewport", content: "width=device-width, initial-scale=1" }), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("link", { rel: "icon", type: "image/png", href: "/static/resources/images/logo-fredokav3@2x.png" })); } }]); return GuestHeader; }(react__WEBPACK_IMPORTED_MODULE_0__["Component"]); /***/ }), /***/ 7: /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__("iyB6"); /***/ }), /***/ "9Jkg": /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__("fozc"); /***/ }), /***/ "Bhuq": /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__("/+oN"); /***/ }), /***/ "Exp3": /***/ (function(module, exports) { module.exports = require("antd"); /***/ }), /***/ "K47E": /***/ (function(module, exports) { function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } module.exports = _assertThisInitialized; /***/ }), /***/ "KI45": /***/ (function(module, exports) { function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } module.exports = _interopRequireDefault; /***/ }), /***/ "N9n2": /***/ (function(module, exports, __webpack_require__) { var _Object$create = __webpack_require__("SqZg"); var setPrototypeOf = __webpack_require__("vjea"); function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = _Object$create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) setPrototypeOf(subClass, superClass); } module.exports = _inherits; /***/ }), /***/ "Shr3": /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; // EXTERNAL MODULE: external "js-cookie" var external_js_cookie_ = __webpack_require__("vmXh"); var external_js_cookie_default = /*#__PURE__*/__webpack_require__.n(external_js_cookie_); // EXTERNAL MODULE: external "next/router" var router_ = __webpack_require__("4Q3z"); var router_default = /*#__PURE__*/__webpack_require__.n(router_); // EXTERNAL MODULE: external "graphql-tag" var external_graphql_tag_ = __webpack_require__("txk1"); // CONCATENATED MODULE: ./source/components/apollo-client/account-apollo/authentication-query.js var retrieveAccount = function retrieveAccount(admin) { var query; if (admin === true) { query = "mutation retrieveAdminAccount($email: String!, $password: String!) { retrieveAdminAccount(email: $email, password: $password) { firstname, lastname, token, authenticateToken, email, city, main_address, secondary_address } }"; } else if (admin === false) { query = "mutation retrieveGuestAccount($email: String!, $password: String!) { retrieveGuestAccount(email: $email, password: $password) { firstname, lastname, token, authenticateToken, email, city, main_address, secondary_address } }"; } return query; }; var registerQuery = "mutation makeGuestAccount($civility: String!, $firstname: String!, $lastname: String!, $phone: String!, $email: String!, $password: String!) { makeGuestAccount(email: $email, password: $password, civility: $civility, firstname: $firstname, lastname: $lastname, phone: $phone) \n { firstname, lastname, token, email } }"; // EXTERNAL MODULE: external "node-fetch" var external_node_fetch_ = __webpack_require__("4vsW"); var external_node_fetch_default = /*#__PURE__*/__webpack_require__.n(external_node_fetch_); // CONCATENATED MODULE: ./source/components/redux-helpers/authentication-helper.js /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return authenticateService; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "b", function() { return logoutService; }); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "c", function() { return registerService; }); var hostname = 'http://10.188.37.107'; function authenticateService(email, password, admin) { console.log(email + ' ' + password); var authenticationQuery = retrieveAccount(admin); return external_node_fetch_default()(hostname + ':3000/v1/graphql-first-instance/BtL7NQwOt0R7psYw1Fyx', { method: 'POST', headers: { 'Content-Type': 'application/json', 'Accept': 'application/json' }, body: JSON.stringify({ query: authenticationQuery, variables: { email: email, password: password } }) }).then(function (response) { return response.json(); }).then(function (data) { return data; }); } function logoutService(admin) { external_js_cookie_default.a.remove('authentication-token'); external_js_cookie_default.a.remove('authentication-user'); admin ? router_default.a.push('/admin-account') : router_default.a.push('/'); } function registerService(civility, firstname, lastname, email, password, phone) { return external_node_fetch_default()(hostname + ':3000/v1/graphql-first-instance/BtL7NQwOt0R7psYw1Fyx', { method: 'POST', headers: { 'Content-Type': 'application/json', 'Accept': 'application/json' }, body: JSON.stringify({ query: registerQuery, variables: { civility: civility, firstname: firstname, lastname: lastname, email: email, password: password, phone: phone } }) }).then(function (response) { return response.json(); }).then(function (data) { return data; }); } /***/ }), /***/ "SqZg": /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__("o5io"); /***/ }), /***/ "TRZx": /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__("Wk4r"); /***/ }), /***/ "TUA0": /***/ (function(module, exports) { module.exports = require("core-js/library/fn/object/define-property"); /***/ }), /***/ "WIwn": /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; /* unused harmony export AdminNavbar */ /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__("cDcd"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_0__); /* harmony import */ var antd__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__("Exp3"); /* harmony import */ var antd__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(antd__WEBPACK_IMPORTED_MODULE_1__); /* harmony import */ var _static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__("o9iy"); /* harmony import */ var _static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(_static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_2__); function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } var AdminNavbar = /*#__PURE__*/ function (_Component) { _inherits(AdminNavbar, _Component); function AdminNavbar(props) { _classCallCheck(this, AdminNavbar); return _possibleConstructorReturn(this, _getPrototypeOf(AdminNavbar).call(this, props)); } _createClass(AdminNavbar, [{ key: "render", value: function render() { return react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(react__WEBPACK_IMPORTED_MODULE_0___default.a.Fragment, null, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("div", { className: "container-fluid py-2 m-0 container-top-one " + this.props.className })); } }]); return AdminNavbar; }(react__WEBPACK_IMPORTED_MODULE_0__["Component"]); /***/ }), /***/ "WaGi": /***/ (function(module, exports, __webpack_require__) { var _Object$defineProperty = __webpack_require__("hfKm"); function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; _Object$defineProperty(target, descriptor.key, descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } module.exports = _createClass; /***/ }), /***/ "Wk4r": /***/ (function(module, exports) { module.exports = require("core-js/library/fn/object/set-prototype-of"); /***/ }), /***/ "XVgq": /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__("gHn/"); /***/ }), /***/ "Xxwp": /***/ (function(module, exports) { module.exports = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAFYAAABkCAYAAAAc5MdRAAAAAXNSR0IArs4c6QAAGvRJREFUeAHNXFlzHUWWvovkVZYsydq8SjbYsrENY7oBN8YLbmyGHV6YmAceYP7BPM4zrxMxE/PGC0EEPUAEMRB0A2ZrMG4Y9sUYbGxZtiRLsiTbkhfZspY735eqr3xuqupuZTomI+qek+ecPJn53ayszFN5bzpVPKWLm4QW5diGhQIm5wsK5MuxLeDmt1MVA6KYni0rxabcHpQCXCk25dZ70+wLgVJIxwYU0hfS2cYXAyep3tb1d+XjAIiTs3Fxujh5OR2KAzJOTt+FdOXUfVNto8CIkqlSX+fn4+wkj6JRwJQqi/JXqiyqjlLLFrXzgfHzcuDL/TztomRR8kId8nU2T/82T9+VJN+Hn6/E55wyFgzLW0NfXijv66yfQrzfOeXpj3ylfm2d8kmZeJ9a+0S8GizqO/PlNh/H04fV+T5tXh2TzM9bP+RtXmVKofJLqovlfL4UXyXZVBWw8jth8+JFUw8//HBNR0fH07lc7jp9goa6qDrS6bQ6G6pRJnv58uUDL7300gCELO+uZ5999rFsNltHQ/mNKg8bmsxJ1jaTycxcv3796AsvvPClMWRbWNecNhmbslgCWxCAwJtsRCkW7+jS9b//t8/nbf7XK5maoEj5pGVqcHrd5NdPo+Sfg9KZZ5555t6nnnrqvzo7O5sAUOjU8qEQTJxcNufPn099/PHHLyP/VSC7aWCqDtK4EXujBx6AKBOpm0xV3XeqqsP6LptfM9PdPX5p5JugjgzpqlWr7tu8eXPTypUrnT8LnOVVWZRMOlIAO37y5MlXwNL/DC7156YCTOd+UkWUi4+ilLnrueee2zSUbWvwHZWTn5ebSNVOj3W98sorgyjnQAXN1tfX725paSnHVUHbY8eOnenu7j4Mo7D9poD6aUSVsVHAylNcJZKHdGbB0juHsi2rVbASunTmQqrq+sX3UTYE9aGHHlrf0dHRVlVVxbnVuRVlxvJOGSOT7tKlSxyxJ99+++1zkKn9Ut9U6k8FUZVJRmp5NsTJxqtqHxzL1s9L0rLG6ZGhzNjpg/BBYN2Fkbpl3bp17Un82rL9/f25gYGBtyFTX9Qfa3ZTeHZAyVYi3qe0pSy88GBpHM/Wrb2WXiA/FdG2qYH+l19++WfjO3vrrbfub2trq9bI9CkrkqyUSru6uvp+/PHHz4M6oorctHnWAutX5IMqMGXn8kuXLt00WNW2SsJKaN3MaGpmZlIddqN1w4YNtfDdiasSl3PKTE1NpfDQGn7rrbe65ih/A4GAFYiswvKq0oIq3tldXdS6+3y2cZkMK6H10+fHF472vIuybA/9ZrC82rh+/fo2+StnZKqMpZgGUleuXPk0kHFk6rJmN40XsL5DBxqEAlF65UP9dNWivWOZZKOqdaq/Z+Rs74+oxI1WUozYP6xevbqZFVtQxYtSX0rq7e0d++qrr96BrW53n5bipmQbH1gBRgc+b/NO//TTT28YzrYuy+WZlly3M1yQu5paOH256913370Q1Ml60o2NjTtbW1vzQC3Pc771kSNHznz//fecw+1I/c3A1a2X34obuTlgQuU6Tlpd17ZtuKol2fw6PZrLXhs9YPxm7rvvvvYVK1Y0V1dX32gJuHJHqQqPjIxwGvgJD69xusHFjYEA/k3AtSNWIIqqXSGQEOTx1xc07hvNLs3vvUqVSJumB3unRk7ywaUvOY0pYDPm15K/sGKAnzlzZgqbAk0DFtQSW1m+mQXWlhaAVpbH79y5c+nV7OINV9OL8uTlZNIYOE2TQ8Ovv/66ntSsN4Nl1p7ly5fnOS4GXqF6f/31194ffviBW2ULqj9iC7koWydg2SEm0dmcN0IDvQN97dq1nWer2mY38LIuky7FMis3fe1Q4JdtyTY3Ny+ura3dgq2su/UJqA+qny9U7fj4eAorgu6DBw8yYmbBFM/img4KuSpLJ2BtIR9cqyPvgL22sI3LrETxgfqp4dGq4WN2fk1v27ZtAyNZfqWV5rnMwlTAOjha7Yit1GVJ5SywFlAHHjxEUed4Zv6iXYmXWdOD/b/88stROGQ7XF2YW+9as2ZNuH51lSX4wANrAFMB53CNUI1O0QTe44uqQ/EW+RrX+SeeeGLtOdy0M+nowHJ+kejcopkrqeqp8WPYYl6CBf26tmAq2IVlFvOJE6eM48ePj7z55pu/wJkdrQJVNHFdvgM7YqlzwAVG6pxkyqeWNLZuQTSrPbCriCCaNVV9JQyI0Ef6nnvuWYWHVsv8+fMr8ukX4jSAiNZByAlg1KUito9hP6WshCq65TtTXlS+mU9jG/uPo9mGZMusycG+SwPd39Ofro6Ojk2YX9eosjjqB7P9vMr19PSMYjXArbI/WouNVL/fcklarKyz9UesdSA+7DgFt99+e81UdvHG8cxi6cum2dxUqn56ZBBx0dNBYdaRueWWW+5HNCtvmVW2cxTQSgLz9+B3331nd1satXRbEkAR9efhEaF3IgusX0B5WzZ92223bRysbkv0cOEyKzNx6cPAsQO1rq5uHq4ty5ZVHs8RoPR74cKF1NjY2DE8uMaQJYj+qKVZ0hSFkfOpqcCvgAVsCh3k6lb94Xy2KdG7kobpkfO5kROc++Q3fdddd3Ui8NJsKyXP21yXr1Perms1LXC3deLEib/AxgdVxUT9vlYykuUjLEtgJVRFPrX69FR1zf2jCaNZLVNnBr799lsus+jbXdht/R5b2VZbuUCyMssL0Cg7jNTThw8ftrstW9T2ycrJx+lC0PwCJq+yOU0FrnOewRzZY489tvpC1bIV0+m4gW48xLA1M5dSVZPj3yHofA0mqiONuZXRLDUs8jV2FIBR1Vy7di2FMOHgp59+2mv0BMZeRlUSG7a1BOu0gJWtCitPGspql7VsGapqW2OV5fJcZlWNdvNJLRA5DbRgmbV84cKFzl0xADVSaRxli2kgNTo6+leoo+ZV9adU6trkfaisJ76RLTb01HmWSF+rWcGXhomWWc2TAz1n+04fob/gSmGkdmLHtdpVYg5lMF9OEuCIZA1jqiGwGqF0Y/tSjltbjv5sks6Xh1tJa0w+7HSg4GpgwXRm0dbLCU66VOP0Ue3U+dM4icKAiFIGBzL2cJkVNfpkRCrgrEy81R09enQA77a4zGISuLbz6l8pdNbL7KfsrYw85XlJU4EKiFojV2jLli2dZ6vbEq0GuMxKX3O3qPxnFixYkMUyaxu2spI5SpDtZZX2CyCgApXywcFBBrW/hv00Lk0Ftrjl1V+fRtmUIgttBGwoMIwqc6KZulU7ziVcZjVODQ1ND3f9LajD+cc2thMrgkbVKzCVJxVwVhYlpx0eWld/+umn96AnqEoareqTqPQ+lV6UevGkSpanLMzHARsaBB7SuQV1949l3IE/OS2btk6e4ZOaARH6dxfiuls6Ojrc2wI7EuXcgmr1lPt5lsE00It3WzxCxCRAZ3Oz+bBuCIvxKic7m4/iKaNt5KE4p6AySOn9+/evuFhVv3wyXflhl9qZi7g5r3+F908cSWEdWLv+kWezLEisl3leFtigPXNkAhk7rdTQ0NCveHE4IlvQ3O7duxdju7wZ/mBa+HipytFWPCmOf7r810jffPMNj6qyD7KxPM3dgpRCe1GhRHlq2fJVm49kWxO9LaifPjeeuXCSZ7OU0lu3bm3CKqu9pubG0U8BSiMBRpmSZDZPnnJGs7Ai4FKOKZxfn3zyyf+44447/lnLuVl1+Z/wPwEf/wRgtaNjwyLBLbTcCnszUbMCLw0bKo+6oPZmnB04033CLrPSCGjzbYH7wiygUV0WuJYSTCaBjdjrGey4vqXIKUDvvvtufG81m/GOLhBVTl577bXjWB8zIsckUEWtzC23nJX3EYKKEVWdyy7cdilT65mUnp2PI5qLJ0dPf/bZZ8OmVBrLrL1Yw0YuswSasc+bAqQXxSltjtb+Dz/88BTKCNjURiTEIJZbP5XwWGmkcKDu5IsvvtiP8sLHp6HrqIeXjJ0RvumNeGnYFJaogKmbvpDKXjv/AYqqw6wjjZeGvwOwoUeNWkulFIDMW5552qPTqXPnzn2ELKcAJlcXVhy7cWi58pDZrK9UX1/fFJZybyIrfApSC6zrbOBHJJ2rX3nPSLYp0TfeNH22Z2Ko+4ugUawngwcKNlvrw5eRusVZsQXOysWLyo4Uo3UIQW2dzaIbJn55u+2XNysu/xNTTBeA1fF6H9Q5DgWsDOcYzMyr3YttbKx+ToE5glyqZbL/PN47HbMqLLO2dnR0rCRIAsrq43gLJm2UxzJr6NChQ9ptuVH7+OOPr8RUtiLpq57gpOIApoETqFJYiLIZ4kNKYJWhgU1pnKhuuZytX3U9wTJr6fRoanry6mfGMetLt7e3P4BlVli3BdjyplwIosAkpe3w8DCD2odxTcBeU0EKoN6GZdYa66MSnkEd1PFOUNa13/gJ+2BksQ8vZ7O0edVmzK/ttkC5fMPMyIXqcyfeQ7mwAYg7NDQ1Na1ZsmSJAyZuxFIuEONsqMe7ras///wzOx6CCj6HqeYhTAOJgkbsL16hn8Icyx1j2IeA9/M0d0lTgfJ5dKKmbT+WWYneQbVMDvRj/uNuiw8TNsQts/Ckbo0DyzZCNgJYOuYlwzTQgznwJ+lAc3hgZRGHuNOPQRibkll8aSMI6mg35/rgFZ4DcCywOOKTzlTPvzvJNnbhzHhq3tSlri+//JJHNMOEZdYunCasp4DA6bL50DiCEaAsxx9s4KHSiw3REEz55bnVwI4dOzoxDYQPxwg3JYm42rh69ern+FEI7wYLoMpHyeKnAmxjOwezrYmWWYhmTVZf7tdOSN90Bsff70GY0AGq1vmUoFkAqVfe8gA1h4svJgWqAxagbm9vb19B2yQJ08wFvO3lTktJ/RCVPI/G7rzS9au3j2Sbky2zpgb6Lp3t4U4oTFhm4Ycw6xoIHJNPQ0OjE6CylQ3l3G0hmsWlXF7C215uPvJklWQQd+iFf642Zhsc3BExvmjjvtjYqWBqXu0DSaaBTG4m1Tg9NIyzA1yiKKUxkrbiWkGQBJQojcQLTBUUpVy6mZmZFObWIey2jkPPW9WNWnx5jXg4dtgYhMqXQ/nzULxGP4wj9nyFXlaKBPbBBx9cdiW7pGMiM/sOqiyPgbH7QdzEZd6i+qZJeaiYIyl8UkcBKZmtV2BaGYMuWGIpvitVGg+uDVgRuFCkhJVQrATG8UvGP6Osa7uhUe7UT6fjVOCGrrXE/Le5J71gbeuUfYNiLYrzLdMDY5mxnk8C/65SjNQ6/LZgLd4YuJEpAH1K75KpJuZ9cBHUHsWhur/Cxi6zUnjbsQ99SBQ0Yr2YW09hRfOD2mBoHoiBPA/HyDkWgeIv7rwz+y9r01+jLzkGMUNH4kVNZSELEHLYrZx/6U9/4hYwLItI1oZNmzaF0wDBigPQylFXaEeeFxPmv368NORSjimcChYvXnwvH45JEoMuuCO6P/jgg4pGVySwOO80jut/0DCC4l9sbwgWMxGJPVe5UI2RtFvLrFBIw+BBJpmfp1xgkqceARdeR9D5y1RTzvToo4+uw3TTgMD0rKDCTwZdsCLgakB9DesoxaUPrC0sh/Rj5aX4tfYCOI218b18UhMYe2lERgHKyii3I5U8jxDhVj0QNIb1uQsxiG1YdbgYb6CriOCh2H3q1CmepFFiP2y/JI+kAlYF4sAsy2nQAOsrtWfPnrUIbDfyl9w+gH6eLbVgWr1GLjrei/nvCEzz5lc+HBlDpo9K0+TkJLexA2+88YZd0ZTljsASVAuCD7LNy1YyVhYHumypT2MU3cFlFgswESwLmOWpF4C+nDrshBgf7f3iiy8YdGZiXTnEtGvwYNyY9Pe3mF646dDd4Coo8GGxCM00YimQgUC2efFhwYARqFF66ZwpblGefa0WoFGAUUZARVUZZbqo4zLr9OnTXMqFDyzaYqu8/ma8LeBoxdmy/1X9Baj6TSremQtYCssB1K8rz2mgDH1itC7BKFrPnxhZQAWglVnH0lsZAUbHe3FEkx1nHbq4zNqJvzpJHB/A3D3yzjvv2KAOm+D30c/n2fhTAY3tSBPvUzqxya9EX5KjOAWOgbRhuQAktTwdCUTJKSOIUQmL9gvvv//+MZqYK4Uvb1fSvzlh0AWBnUP4N6XpqLoDWXTDTAGNWIpoXAxAUzSPtRXRh4B1RgB1J5dAzPigEjyBKSoZqQ8upwHstr6EK00Dzu3evXvbsOJoRqiQ+YoTf7eATYcCR/Jj+2d56S11ei32ZFwKpY2183mrdxU2NDTs4IK9EKi2ZT5vAcZDaxRrbJ5P0GrA1Yc5fBNeHCZeZmHT0YfLTgPqn9+sgnk7FdCBP2KVL+gkUKoBeaMVI6kDyyz3S26NSJ/6zqn3R6pkCDr3YY49aup09QLUB7DMSrSNZdAFm47DwW/P/GYxrz5Kx7wvc7pSRqwKyklUXjo6ld5VgClgE5ZZa3wwlaeR5V2h4EPgSo/DEikcUTrJORYmrMeNWkSxMosWLUr8toBBF7yNsLFX2xfLs4U2T97m885uUaERKkoHlo/KU6bk+0hjft3HBbvAoSF5HzRfHjoMHmC0x4NlEpuCD6CzHcnt27ePhzKaVKZSirvhNFYbP3r+6c6CZuuOqypnpwI5sEBanno/7zvO02OZtRhP6tsR0QrtfIClsHKBTp39EniSEDuu7yBW51yHsTG4q729PVHUBasA+ytxNYvUB9XqfD601aqAAoLCJGUeSJD7eWfsfdBGVwoH0RgXdR22wLGMzRNIm/d8uiyPEGHvPoCzAz0QsI1qZw5B7fvtq/So8sVkiD1MYlPwtmenOii2vPK2HXlFBawMCYqSHEmmvPSk1ElueafjS0M8uMLXME6IaYBJo9ICKpn01EnG9SXe7X8CnV1m5XAEqgEbj9V8lZ4kYd4+jaWWgi4WMMu7puFDfVaVNu94C6wKkc72fraYLWTl1n7WMr8cF+y7FReNAtDK5EBUoBJYXhitQ1gGfQ692uModlqYXjckmgYUdAn+k0v+2RSft3k1VTRP58+xvlExIGVPam3TiGatQey1Le54TxyoBFGgWud4sAwi6GKPELmOIHh+P17FJNrGctOBpdZHqM+CE8erWVYvWUg1YmVkwaGR5Crg6315qO/o6OAf5qyWgagPqG51UV/PcjilzY5/gx3XJLJ5UwGiWS7GK/+VUP6DHO6Gg6as7bfPMy+Z5U3xuUflVSAEKM/6hkNP7LIsw/JcG/Of3h5kNMtp8BEFmHSiGqkEWUCT4sEyjpOEXGblgYpfSnZgNdAU90/H8luM4m4YeO+99xjb9YESHnRh+TiXsnHLrSgjGVAXB3JUOdpmcKxnAR4mt+NpPcdGgFlFHOiyRbSpD2FC7rbULtIc7op/wFY20TaWD8WLFy/+De+4bNDF+Q/aaHnbbMurXaFMU0EoiGDmFIqwyQN/165d/KVhS4TdHJEFVUBao+AIURe2mTxCxBR2FCuOPyZ9W8CgC37MzAN1Sra/lqdedYuqzByqLe0cRRkCgUrqLjyld+AWbS7kwwJayI67LcyBDLowcSpw0wGWcovwNnYDY7xJkgm6+GBZUH2dqvRtKHcyrQpkKJCUL5WGoKJAGtGsvVpmWQdxYNqRanmWxRGifgRd5uy28BqdPwxJdIYoJuhiQbS87YrlLbih3J8KrFGpIAtUOk0jmuVOUSsuasH0QQtb4TG04xWcpB7Bb2+7YGIfXClMNdsR4ClpuvHch1kc+LBBFx9EiwXLSC8a+olifGCtje/Y6sjnAao8IllbMBW4I/B+gXLzfLAgmvUJyuWBijy3sXuSHnrDQ/E0phmedLF99YHz8+qGX0ZyR2/2HMs/zGFcNNFhZbWQuy08WPiDDXXOURwxxUzTtlx3hezLoQq64G4YDMqpDrnx85JbShubwvzNAJaOOXoz2GlV45d7v6vkFHXUNIH1ZT+A1fpSozaHB+PmpG8LGHRBCPIvaLcPYFxe8hA8djwuVQosgWTSdODo9u3bNyGEV9K+PQrIWZeznzxChMD2EQafIQlBBZ/DXcFDGYneFiAEyWnAD7pY0ASkbZZ4307ykFYKLB3MARdP6h18DRN6L4GJAxiA8gcbPDtgO5jD77bSiD8kelvAoAunmeDsrgWJLbb1FepBVLnQPgmwdJIHLgLakcussLYymGBE8aWeRqujeFuAVVbnjch5GT5lyqALdlsfIS8QLZWZlVle+kI0/BejQkbFdA7cRx55pAVz62q8eypmX1TPI0SYA3twkvoMjAmowM0hTHgH5tjVRZ0UMMAUMISjqgTWT6UASBsly0vmaNIRGzrDmnIL1pbh2axQUQETHCE6gKIhoOBdp1HPfiyzdKdU4D2VYtAFBz748yILZBRIVm/r8m39fN7LRFuwEB/ZKXT4YQS2F3FvH5X8uVT5KIppoA9zIHdbBDYEF78tqOeJ8CRvC7g2xlLrENrJoIsFLoq3XZHeyqJ4B3KhDUJUoVgZGtr16quv/jeAcifASWHsvgTy/CFGVBKw1GGX5hqPqaDvwIEDXGYJWAcu3xZgmZXolzxB0EXLrMgmQVgIRAdcYBNV3sluBrCuEc8///x/wiP/qZfTiyh5jXBRiFyyeTVWo1M0b1ThofXwxMREA9a28lE2xSmaQdwR/NKYBKCls5rZzzi5b2Pzjq8EWFYmUFQxgZBMlVCmOdzX0SZKJkDlNw9Y/FHPv2NR/6HuCjrBneD8BHcIYwxRfmnq7gj8vOg4/nrkIrKqoxB15YIP2pWcYhtRxAPL2YsA6qJcI1U2Ue6os8k2nLwF2dpZn+LlS9TaW151kPoX7XyZ8lYnnpSJNkohX8mIneMEAoLARMfsnCpQR0VpoySZX4Z6lReVTGWYF29tKFeK0suWVJfslbfU6sRbStvIVCmwdMiGW8cElzJe4sGGAJD3kzpv5fItmeqwtpLRhnKVkVwy6iWzPGWSi7eUtkyysbyVOaOoj0qBtb5UEak671Nrb3nZSSZAouS0UV3SK6/yUdTaiCeN4lVeOuYtL32UPM8uCbByJDCiKhUAVmd56elLvKhkzBeqy/qL4lWWOvGW+ryfjyoXVU+eTJ3IE1aQqdSPLSfeUsuzWcxbmeVtsyWnTEBZXjLSOF72Vk8Zk2SzudnPPBnXm3/PZDuseiUTpZy88pZaXuWLUdth8XGUvnxdMf+2TGj7/wlYNsoHzs9H2YSdCRiVkVxAMR/FUyZ5Mer7YD4y/R+yxDvZh5D49AAAAABJRU5ErkJggg==" /***/ }), /***/ "YFqc": /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__("cTJO") /***/ }), /***/ "Z7t5": /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__("vqFK"); /***/ }), /***/ "ZDA2": /***/ (function(module, exports, __webpack_require__) { var _typeof = __webpack_require__("iZP3"); var assertThisInitialized = __webpack_require__("K47E"); function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return assertThisInitialized(self); } module.exports = _possibleConstructorReturn; /***/ }), /***/ "bzos": /***/ (function(module, exports) { module.exports = require("url"); /***/ }), /***/ "cDcd": /***/ (function(module, exports) { module.exports = require("react"); /***/ }), /***/ "cTJO": /***/ (function(module, exports, __webpack_require__) { "use strict"; /* global __NEXT_DATA__ */ var _interopRequireDefault = __webpack_require__("KI45"); var _stringify = _interopRequireDefault(__webpack_require__("9Jkg")); var _classCallCheck2 = _interopRequireDefault(__webpack_require__("/HRN")); var _createClass2 = _interopRequireDefault(__webpack_require__("WaGi")); var _possibleConstructorReturn2 = _interopRequireDefault(__webpack_require__("ZDA2")); var _getPrototypeOf2 = _interopRequireDefault(__webpack_require__("/+P4")); var _inherits2 = _interopRequireDefault(__webpack_require__("N9n2")); var __importStar = void 0 && (void 0).__importStar || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) { if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; } result["default"] = mod; return result; }; var __importDefault = void 0 && (void 0).__importDefault || function (mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); var url_1 = __webpack_require__("bzos"); var react_1 = __importStar(__webpack_require__("cDcd")); var prop_types_1 = __importDefault(__webpack_require__("rf6O")); var router_1 = __importStar(__webpack_require__("4Q3z")); var utils_1 = __webpack_require__("p8BD"); function isLocal(href) { var url = url_1.parse(href, false, true); var origin = url_1.parse(utils_1.getLocationOrigin(), false, true); return !url.host || url.protocol === origin.protocol && url.host === origin.host; } function memoizedFormatUrl(formatFunc) { var lastHref = null; var lastAs = null; var lastResult = null; return function (href, as) { if (href === lastHref && as === lastAs) { return lastResult; } var result = formatFunc(href, as); lastHref = href; lastAs = as; lastResult = result; return result; }; } function formatUrl(url) { return url && typeof url === 'object' ? utils_1.formatWithValidation(url) : url; } var Link = /*#__PURE__*/ function (_react_1$Component) { (0, _inherits2.default)(Link, _react_1$Component); function Link() { var _this; (0, _classCallCheck2.default)(this, Link); _this = (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(Link).apply(this, arguments)); // The function is memoized so that no extra lifecycles are needed // as per https://reactjs.org/blog/2018/06/07/you-probably-dont-need-derived-state.html _this.formatUrls = memoizedFormatUrl(function (href, asHref) { return { href: formatUrl(href), as: formatUrl(asHref, true) }; }); _this.linkClicked = function (e) { var _e$currentTarget = e.currentTarget, nodeName = _e$currentTarget.nodeName, target = _e$currentTarget.target; if (nodeName === 'A' && (target && target !== '_self' || e.metaKey || e.ctrlKey || e.shiftKey || e.nativeEvent && e.nativeEvent.which === 2)) { // ignore click for new tab / new window behavior return; } var _this$formatUrls = _this.formatUrls(_this.props.href, _this.props.as), href = _this$formatUrls.href, as = _this$formatUrls.as; if (!isLocal(href)) { // ignore click if it's outside our scope return; } var pathname = window.location.pathname; href = url_1.resolve(pathname, href); as = as ? url_1.resolve(pathname, as) : href; e.preventDefault(); // avoid scroll for urls with anchor refs var scroll = _this.props.scroll; if (scroll == null) { scroll = as.indexOf('#') < 0; } // replace state instead of push if prop is present router_1.default[_this.props.replace ? 'replace' : 'push'](href, as, { shallow: _this.props.shallow }).then(function (success) { if (!success) return; if (scroll) { window.scrollTo(0, 0); document.body.focus(); } }).catch(function (err) { if (_this.props.onError) _this.props.onError(err); }); }; return _this; } (0, _createClass2.default)(Link, [{ key: "componentDidMount", value: function componentDidMount() { this.prefetch(); } }, { key: "componentDidUpdate", value: function componentDidUpdate(prevProps) { if ((0, _stringify.default)(this.props.href) !== (0, _stringify.default)(prevProps.href)) { this.prefetch(); } } }, { key: "prefetch", value: function prefetch() { if (!this.props.prefetch) return; if (typeof window === 'undefined') return; // Prefetch the JSON page if asked (only in the client) var pathname = window.location.pathname; var _this$formatUrls2 = this.formatUrls(this.props.href, this.props.as), parsedHref = _this$formatUrls2.href; var href = url_1.resolve(pathname, parsedHref); router_1.default.prefetch(href); } }, { key: "render", value: function render() { var _this2 = this; var children = this.props.children; var _this$formatUrls3 = this.formatUrls(this.props.href, this.props.as), href = _this$formatUrls3.href, as = _this$formatUrls3.as; // Deprecated. Warning shown by propType check. If the childen provided is a string (<Link>example</Link>) we wrap it in an <a> tag if (typeof children === 'string') { children = react_1.default.createElement("a", null, children); } // This will return the first child, if multiple are provided it will throw an error var child = react_1.Children.only(children); var props = { onClick: function onClick(e) { if (child.props && typeof child.props.onClick === 'function') { child.props.onClick(e); } if (!e.defaultPrevented) { _this2.linkClicked(e); } } }; // If child is an <a> tag and doesn't have a href attribute, or if the 'passHref' property is // defined, we specify the current 'href', so that repetition is not needed by the user if (this.props.passHref || child.type === 'a' && !('href' in child.props)) { props.href = as || href; } // Add the ending slash to the paths. So, we can serve the // "<page>/index.html" directly. if (true) { if (props.href && typeof __NEXT_DATA__ !== 'undefined' && __NEXT_DATA__.nextExport) { props.href = router_1.Router._rewriteUrlForNextExport(props.href); } } return react_1.default.cloneElement(child, props); } }]); return Link; }(react_1.Component); if (false) { var exact, warn; } exports.default = Link; /***/ }), /***/ "fozc": /***/ (function(module, exports) { module.exports = require("core-js/library/fn/json/stringify"); /***/ }), /***/ "gHn/": /***/ (function(module, exports) { module.exports = require("core-js/library/fn/symbol/iterator"); /***/ }), /***/ "h74D": /***/ (function(module, exports) { module.exports = require("react-redux"); /***/ }), /***/ "hfKm": /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__("TUA0"); /***/ }), /***/ "iZP3": /***/ (function(module, exports, __webpack_require__) { var _Symbol$iterator = __webpack_require__("XVgq"); var _Symbol = __webpack_require__("Z7t5"); function _typeof2(obj) { if (typeof _Symbol === "function" && typeof _Symbol$iterator === "symbol") { _typeof2 = function _typeof2(obj) { return typeof obj; }; } else { _typeof2 = function _typeof2(obj) { return obj && typeof _Symbol === "function" && obj.constructor === _Symbol && obj !== _Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof2(obj); } function _typeof(obj) { if (typeof _Symbol === "function" && _typeof2(_Symbol$iterator) === "symbol") { module.exports = _typeof = function _typeof(obj) { return _typeof2(obj); }; } else { module.exports = _typeof = function _typeof(obj) { return obj && typeof _Symbol === "function" && obj.constructor === _Symbol && obj !== _Symbol.prototype ? "symbol" : _typeof2(obj); }; } return _typeof(obj); } module.exports = _typeof; /***/ }), /***/ "iyB6": /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__("cDcd"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_0__); /* harmony import */ var react_redux__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__("h74D"); /* harmony import */ var react_redux__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(react_redux__WEBPACK_IMPORTED_MODULE_1__); /* harmony import */ var antd__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__("Exp3"); /* harmony import */ var antd__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(antd__WEBPACK_IMPORTED_MODULE_2__); /* harmony import */ var _source_components_component_header_guest__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__("6dKc"); /* harmony import */ var _source_components_component_navbar_admin__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__("WIwn"); /* harmony import */ var _source_components_fragments_tools_admin_sider__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__("sN95"); /* harmony import */ var _source_components_fragments_tools_admin_header__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__("3NBp"); /* harmony import */ var _static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__("o9iy"); /* harmony import */ var _static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_7___default = /*#__PURE__*/__webpack_require__.n(_static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_7__); function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } var SAdminOrders = /*#__PURE__*/ function (_Component) { _inherits(SAdminOrders, _Component); function SAdminOrders(props) { var _this; _classCallCheck(this, SAdminOrders); _this = _possibleConstructorReturn(this, _getPrototypeOf(SAdminOrders).call(this, props)); _this.state = { user: null }; return _this; } _createClass(SAdminOrders, [{ key: "componentDidMount", value: function componentDidMount() { if (this.props && typeof this.props.user === "string") { this.setState({ user: JSON.parse(this.props.user) }); } } }, { key: "componentDidUpdate", value: function componentDidUpdate(previous) { if (previous.user !== this.props.user) { this.setState({ user: JSON.parse(this.props.user) }); } } }, { key: "render", value: function render() { return react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(react__WEBPACK_IMPORTED_MODULE_0___default.a.Fragment, null, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(_source_components_component_header_guest__WEBPACK_IMPORTED_MODULE_3__[/* GuestHeader */ "a"], { title: this.state.user !== null ? this.state.user.firstname + ' ' + this.state.user.lastname : 'Muscle Feed | Administrateur', description: "Boutique en ligne de compl\xE9ments alimentaires et de prot\xE9ines pour la musculation \xE0 prix cass\xE9. Le meilleur de la construction musculaire : proteines, gainer, bcaa, bruleur de graisses, booster." }), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_2__["Layout"], null, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(_source_components_fragments_tools_admin_sider__WEBPACK_IMPORTED_MODULE_5__[/* AdminSider */ "a"], null), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(_source_components_fragments_tools_admin_header__WEBPACK_IMPORTED_MODULE_6__[/* AdminHeader */ "a"], { user: JSON.stringify(this.state.user) }))); } }]); return SAdminOrders; }(react__WEBPACK_IMPORTED_MODULE_0__["Component"]); function mapStateToProps(state) { var _state$authentication = state.authenticationReducer, user = _state$authentication.user, logged = _state$authentication.logged; return { user: user, logged: logged }; } var connectReduxAdminOrders = Object(react_redux__WEBPACK_IMPORTED_MODULE_1__["connect"])(mapStateToProps)(SAdminOrders); /* harmony default export */ __webpack_exports__["default"] = (connectReduxAdminOrders); /***/ }), /***/ "o5io": /***/ (function(module, exports) { module.exports = require("core-js/library/fn/object/create"); /***/ }), /***/ "o9iy": /***/ (function(module, exports) { /***/ }), /***/ "p8BD": /***/ (function(module, exports) { module.exports = require("next-server/dist/lib/utils"); /***/ }), /***/ "rf6O": /***/ (function(module, exports) { module.exports = require("prop-types"); /***/ }), /***/ "sN95": /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "a", function() { return AdminSider; }); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__("cDcd"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_0__); /* harmony import */ var antd__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__("Exp3"); /* harmony import */ var antd__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(antd__WEBPACK_IMPORTED_MODULE_1__); /* harmony import */ var next_link__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__("YFqc"); /* harmony import */ var next_link__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(next_link__WEBPACK_IMPORTED_MODULE_2__); /* harmony import */ var _static_resources_images_logo_fredokav3_2x_png__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__("Xxwp"); /* harmony import */ var _static_resources_images_logo_fredokav3_2x_png__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(_static_resources_images_logo_fredokav3_2x_png__WEBPACK_IMPORTED_MODULE_3__); /* harmony import */ var _static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__("o9iy"); /* harmony import */ var _static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(_static_resources_admin_account_scss__WEBPACK_IMPORTED_MODULE_4__); function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } var Sider = antd__WEBPACK_IMPORTED_MODULE_1__["Layout"].Sider; var AdminSider = /*#__PURE__*/ function (_Component) { _inherits(AdminSider, _Component); function AdminSider(props) { _classCallCheck(this, AdminSider); return _possibleConstructorReturn(this, _getPrototypeOf(AdminSider).call(this, props)); } _createClass(AdminSider, [{ key: "render", value: function render() { return react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(react__WEBPACK_IMPORTED_MODULE_0___default.a.Fragment, null, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(Sider, { style: { height: '100vh', overflow: 'auto', position: 'fixed', left: 0, width: '140px' }, collapsed: true, collapsedWidth: 60 }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("div", { className: "logo application-admin-logo d-flex flex-row justify-content-center align-items-center py-2" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("img", { src: _static_resources_images_logo_fredokav3_2x_png__WEBPACK_IMPORTED_MODULE_3___default.a, className: "application-admin-logo" })), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Menu"], { theme: "dark", mode: "inline", defaultSelectedKeys: ['1'], className: "mt-5" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Menu"].Item, { key: "1" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(next_link__WEBPACK_IMPORTED_MODULE_2___default.a, { href: "/admin-dashboard" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Icon"], { type: "pie-chart", className: "admin-sider-icon" })), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("span", { className: "nav-text application-admin-nav-link-text" }, "Panneau de Contr\xF4le")), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Menu"].SubMenu, { key: "sub1", title: react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("span", { className: "nav-text application-admin-nav-link-text" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Icon"], { type: "code-sandbox", className: "admin-sider-icon" }), "\xA0", react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("span", null, "Produits")) }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Menu"].Item, { key: "2" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Icon"], { type: "barcode" }), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(next_link__WEBPACK_IMPORTED_MODULE_2___default.a, { href: "/admin-products" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("span", { className: "nav-text application-admin-nav-link-text" }, "Produits"))), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Menu"].Item, { key: "3" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Icon"], { type: "scan" }), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(next_link__WEBPACK_IMPORTED_MODULE_2___default.a, { href: "/admin-product-new" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("span", { className: "nav-text application-admin-nav-link-text" }, "Ajouter")))), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Menu"].SubMenu, { key: "sub2", title: react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("span", { className: "nav-text application-admin-nav-link-text" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Icon"], { type: "box-plot", className: "admin-sider-icon" }), "\xA0", react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("span", null, "Commandes")) }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Menu"].Item, { key: "4" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Icon"], { type: "bars" }), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(next_link__WEBPACK_IMPORTED_MODULE_2___default.a, { href: "/admin-orders" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("span", { className: "nav-text application-admin-nav-link-text" }, "Commandes"))), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Menu"].Item, { key: "5" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(antd__WEBPACK_IMPORTED_MODULE_1__["Icon"], { type: "credit-card" }), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(next_link__WEBPACK_IMPORTED_MODULE_2___default.a, { href: "/admin-payments" }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("span", { className: "nav-text application-admin-nav-link-text" }, "Paiements"))))))); } }]); return AdminSider; }(react__WEBPACK_IMPORTED_MODULE_0__["Component"]); /***/ }), /***/ "txk1": /***/ (function(module, exports) { module.exports = require("graphql-tag"); /***/ }), /***/ "vjea": /***/ (function(module, exports, __webpack_require__) { var _Object$setPrototypeOf = __webpack_require__("TRZx"); function _setPrototypeOf(o, p) { module.exports = _setPrototypeOf = _Object$setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } module.exports = _setPrototypeOf; /***/ }), /***/ "vmXh": /***/ (function(module, exports) { module.exports = require("js-cookie"); /***/ }), /***/ "vqFK": /***/ (function(module, exports) { module.exports = require("core-js/library/fn/symbol"); /***/ }), /***/ "xnum": /***/ (function(module, exports) { module.exports = require("next/head"); /***/ }) /******/ });
A, I = map(int, input().split()) print(A*(I-1)+1)
$(".slider-header").slick({ lazyLoad: "ondemand", dots: true, infinite: true, speed: 1000, autoplaySpeed: 10000, arrows: false, fade: true, cssEase: "linear", autoplay: true, centerMode: true, slidesToShow: 1, pauseOnFocus: false, pauseOnHover: false, responsive: [ { breakpoint: 480, settings: { arrows: false, centerMode: true, centerPadding: "40px", slidesToShow: 1 } } ] });
''' Copyright 2013 Cosnita Radu Viorel Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. .. codeauthor:: Radu Viorel Cosnita <radu.cosnita@gmail.com> .. py:module:: fantastico.roa.query_parser_exceptions ''' from fantastico.roa.roa_exceptions import FantasticoRoaError class QueryParserOperationInvalidError(FantasticoRoaError): '''This exception notifies the query parser that something is wrong with the current operation arguments.'''
'use strict' if (process.env.NODE_ENV !== 'production') { require('dotenv').config() } const CONFIG = require('./config/config') const App = require('./app/app') const errorHandler = require('./app/middleware/error') App.use(errorHandler) App.listen(CONFIG.PORT, function (error) { if (error) return console.log(error) // console.log(`Servidor corriendo en el Puerto: ${CONFIG.HOST}:${CONFIG.PORT}`); console.log( `We are live on ${process.env.NODE_ENV} mode on port ${CONFIG.PORT}` ) console.log(`Jobs json: ${CONFIG.HOST}:${CONFIG.PORT}/api/v1/getJobs`) // console.log(`Jobs json: ${CONFIG.HOST}:${CONFIG.PORT}/api/v1/getLinkedinJobs`) }) // Handle unhandled promise rejections process.on('unhandledRejection', (err, promise) => { console.log(`Error: ${err.message}`.red) // Close server & exit process server.close(() => process.exit(1)) })
guess = None value = 5 trial = 5 print("WELCOME TO OUR GUESS GAME") while trial > 0: print('Your remaining trial is {}'.format(trial)) guess = int(input('enter a number: ')) if guess == value: print('You win') break else: trial = trial-1 else: print('You loss')
import htmlgenerator as hg from django.utils.translation import gettext_lazy as _ from .icon import Icon class Button(hg.BUTTON): """buttontype: "primary", "secondary", "tertiary", "danger", "ghost" """ def __init__( self, *children, buttontype="primary", icon=None, notext=False, small=False, **attributes, ): attributes["type"] = attributes.get("type", "button") attributes["tabindex"] = attributes.get("tabindex", "0") attributes["_class"] = hg.BaseElement( attributes.get("_class", ""), f" bx--btn bx--btn--{buttontype}", hg.If( hg.F( lambda c: hg.resolve_lazy(self.attributes.get("disabled", False), c) ), " bx--btn--disabled", ), ) if small: attributes["_class"] += " bx--btn--sm " if notext or not children: attributes["_class"] += " bx--btn--icon-only" if children: attributes[ "_class" ] += " bx--btn--icon-only bx--tooltip__trigger bx--tooltip--a11y bx--tooltip--bottom bx--tooltip--align-center" children = (hg.SPAN(*children, _class="bx--assistive-text"),) if icon is not None: if isinstance(icon, str): icon = Icon(icon) if isinstance(icon, Icon): icon.attributes["_class"] = ( icon.attributes.get("_class", "") + " bx--btn__icon" ) children += (icon,) super().__init__(*children, **attributes) @staticmethod def fromlink(link, **kwargs): buttonargs = { "icon": link.iconname, "notext": not link.label, "disabled": hg.F(lambda c: not link.has_permission(c["request"])), } return Button( *([link.label] if link.label else []), **{**buttonargs, **link.attributes, **kwargs}, ).as_href(link.href) def as_href(self, href): return hg.A(*self, **{**self.attributes, "href": href}) class ButtonSet(hg.DIV): def __init__(self, *buttons, **attributes): attributes["_class"] = attributes.get("_class", "") + " bx--btn-set" super().__init__(*buttons, **attributes) class PrintPageButton(Button): def __init__(self, **attributes): if "onclick" not in attributes: attributes["onclick"] = "window.print()" super().__init__(_("Print"), icon="printer", notext=True, **attributes)
from .base import NbGraderPreprocessor from .headerfooter import IncludeHeaderFooter from .lockcells import LockCells from .clearsolutions import ClearSolutions from .saveautogrades import SaveAutoGrades from .computechecksums import ComputeChecksums from .savecells import SaveCells from .overwritecells import OverwriteCells from .checkcellmetadata import CheckCellMetadata from .execute import Execute from .getgrades import GetGrades from .clearoutput import ClearOutput from .limitoutput import LimitOutput from .deduplicateids import DeduplicateIds from .latesubmissions import AssignLatePenalties from .clearhiddentests import ClearHiddenTests from .overwritekernelspec import OverwriteKernelspec __all__ = [ "AssignLatePenalties", "IncludeHeaderFooter", "LockCells", "ClearSolutions", "SaveAutoGrades", "ComputeChecksums", "SaveCells", "OverwriteCells", "CheckCellMetadata", "Execute", "GetGrades", "ClearOutput", "LimitOutput", "DeduplicateIds", "ClearHiddenTests", "OverwriteKernelspec", ]
import argparse import os import shutil import socket import subprocess from typing import List parser: argparse.ArgumentParser = argparse.ArgumentParser(description="Launch 35 merlin workflow jobs") parser.add_argument("run_id", type=int, help="The ID of this run") parser.add_argument("output_path", type=str, help="the output path") parser.add_argument("spec_path", type=str, help="path to the spec to run") parser.add_argument("script_path", type=str, help="path to the make samples script") args: argparse.Namespace = parser.parse_args() machine: str = socket.gethostbyaddr(socket.gethostname())[0] if "quartz" in machine: machine = "quartz" elif "pascal" in machine: machine = "pascal" # launch n_samples * n_conc merlin workflow jobs submit_path: str = os.path.abspath(os.path.dirname(os.path.abspath(__file__))) concurrencies: List[int] = [2 ** 0, 2 ** 1, 2 ** 2, 2 ** 3, 2 ** 4, 2 ** 5, 2 ** 6] samples: List[int] = [10 ** 1, 10 ** 2, 10 ** 3, 10 ** 4, 10 ** 5] nodes: List = [] c: int for c in concurrencies: if c > 32: nodes.append(int(c / 32)) else: nodes.append(1) # concurrencies = [2**4, 2**5, 2**6, 2**7] # samples = [10**1, 10**2, 10**3, 10**4, 10**5, 10**6] # concurrencies = [2 ** 3] # samples = [10 ** 5] output_path: str = os.path.join(args.output_path, f"run_{args.run_id}") os.makedirs(output_path, exist_ok=True) ii: int concurrency: int for ii, concurrency in enumerate(concurrencies): c_name: str = os.path.join(output_path, f"c_{concurrency}") if not os.path.isdir(c_name): os.mkdir(c_name) os.chdir(c_name) jj: int sample: int for jj, sample in enumerate(samples): s_name: str = os.path.join(c_name, f"s_{sample}") if not os.path.isdir(s_name): os.mkdir(s_name) os.chdir(s_name) os.mkdir("scripts") samp_per_worker: float = float(sample) / float(concurrency) # if (samp_per_worker / 60) > times[jj]: # print(f"c{concurrency}_s{sample} : {round(samp_per_worker / 60, 0)}m.\ttime: {times[jj]}m.\tdiff: {round((samp_per_worker / 60) - times[jj], 0)}m") real_time: int if (samp_per_worker / 60) < 1.0: real_time = 4 elif (samp_per_worker / 60) < 3.0: real_time = 10 else: real_time = samp_per_worker / 60 real_time *= 1.5 real_time = int(round(real_time, 0)) # print(f"c{concurrency}_s{sample} : {real_time}") if machine == "quartz": account = "lbpm" partition = "pdebug" elif machine == "pascal": account = "wbronze" partition = "pvis" if real_time > 60: partition = "pbatch" if real_time > 1440: real_time = 1440 submit: str = "submit.sbatch" command: str = f"sbatch -J c{concurrency}s{sample}r{args.run_id} --time {real_time} -N {nodes[ii]} -p {partition} -A {account} {submit} {sample} {int(concurrency/nodes[ii])} {args.run_id} {concurrency}" shutil.copyfile(os.path.join(submit_path, submit), submit) shutil.copyfile(args.spec_path, "spec.yaml") shutil.copyfile(args.script_path, os.path.join("scripts", "make_samples.py")) lines: str = subprocess.check_output(command, shell=True).decode("ascii") os.chdir("..") os.chdir("..")
import { Component } from 'react'; import axios from 'axios'; import { withRouter } from 'react-router'; class FormBuilder extends Component { state = { loading: false, payload: [], searchParam: '' }; componentDidMount() { this.setState({ loading: true }); this.apiGet(); } apiGet = () => { console.log("FormBuilder copy"); axios .get(this.props.path) .then((res) => { console.log(res.data); this.setState({ payload: res.data[this.props.field][0].datos }); this.setState({ loading: false }); }) .catch((err) => { console.log(err); }); }; apiPut = () => { console.log('updating shit'); console.log(this.state.data); axios({ method: 'put', url: this.props.editPath, data: this.state.formattedPayload, headers: { 'Content-Type': 'application/json' } }) .then((res) => { const history = this.props.history; console.log(res.data); history.goBack(); }) .catch((err) => { console.log(err); }); }; handleNestedInputChange = (e, stateKey) => { const payload = { ...this.state.payload }; const stateTarget = [ e.target.name ]; payload[stateTarget] = e.target.value; this.setState({ payload: payload }); console.log(this.state.payload); const formattedPayload = { datos: this.state.payload }; this.setState({ formattedPayload: formattedPayload }); }; handleInputChange = (e) => { // Updates Nested State (Inmutably) using input's Name and Value const data = { ...this.state.data }; const stateTarget = [ e.target.name ]; //Input's Name Identifier data[stateTarget] = e.target.value; //Assigns the Specific Key's value = to the input's value this.setState({ formattedPayload: data }); //Sets State to New User Data console.log(this.state.data); }; render() { return this.props.render({ data: this.state, handleInputChange: this.handleNestedInputChange, apiPut: this.apiPut }); // return this.props.render([this.state, this.apiPut]); } } export default withRouter(FormBuilder);
import asyncio import copy import logging import os import pickle from typing import Dict, List, Optional from happy_bittorrent.algorithms import TorrentManager from happy_bittorrent.models import generate_peer_id, TorrentInfo, TorrentState from happy_bittorrent.network import PeerTCPServer from happy_bittorrent.utils import import_signals QObject, pyqtSignal = import_signals() __all__ = ['ControlManager'] state_filename = '.tstate' logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) class ControlManager(QObject): if pyqtSignal: torrents_suggested = pyqtSignal(list) torrent_added = pyqtSignal(TorrentState) torrent_changed = pyqtSignal(TorrentState) torrent_removed = pyqtSignal(bytes) def __init__(self): super().__init__() self._our_peer_id = generate_peer_id() self._torrents = {} # type: Dict[bytes, TorrentInfo] self._torrent_managers = {} # type: Dict[bytes, TorrentManager] self._server = PeerTCPServer(self._our_peer_id, self._torrent_managers) self._torrent_manager_executors = {} # type: Dict[bytes, asyncio.Task] self._state_updating_executor = None # type: Optional[asyncio.Task] self.last_torrent_dir = None # type: Optional[str] self.last_download_dir = None # type: Optional[str] def get_torrents(self) -> List[TorrentInfo]: return list(self._torrents.values()) async def start(self): await self._server.start() def _start_torrent_manager(self, torrent_info: TorrentInfo): info_hash = torrent_info.download_info.info_hash manager = TorrentManager(torrent_info, self._our_peer_id, self._server.port) if pyqtSignal: manager.state_changed.connect(lambda: self.torrent_changed.emit(TorrentState(torrent_info))) self._torrent_managers[info_hash] = manager self._torrent_manager_executors[info_hash] = asyncio.ensure_future(manager.run()) def add(self, torrent_info: TorrentInfo): info_hash = torrent_info.download_info.info_hash if info_hash in self._torrents: raise ValueError('This torrent is already added') if not torrent_info.paused: self._start_torrent_manager(torrent_info) self._torrents[info_hash] = torrent_info if pyqtSignal: self.torrent_added.emit(TorrentState(torrent_info)) def resume(self, info_hash: bytes): if info_hash not in self._torrents: raise ValueError('Torrent not found') torrent_info = self._torrents[info_hash] if not torrent_info.paused: raise ValueError('The torrent is already running') self._start_torrent_manager(torrent_info) torrent_info.paused = False if pyqtSignal: self.torrent_changed.emit(TorrentState(torrent_info)) async def _stop_torrent_manager(self, info_hash: bytes): manager_executor = self._torrent_manager_executors[info_hash] manager_executor.cancel() try: await manager_executor except asyncio.CancelledError: pass del self._torrent_manager_executors[info_hash] manager = self._torrent_managers[info_hash] del self._torrent_managers[info_hash] await manager.stop() async def remove(self, info_hash: bytes): if info_hash not in self._torrents: raise ValueError('Torrent not found') torrent_info = self._torrents[info_hash] del self._torrents[info_hash] if not torrent_info.paused: await self._stop_torrent_manager(info_hash) if pyqtSignal: self.torrent_removed.emit(info_hash) async def pause(self, info_hash: bytes): if info_hash not in self._torrents: raise ValueError('Torrent not found') torrent_info = self._torrents[info_hash] if torrent_info.paused: raise ValueError('The torrent is already paused') await self._stop_torrent_manager(info_hash) torrent_info.paused = True if pyqtSignal: self.torrent_changed.emit(TorrentState(torrent_info)) def _dump_state(self): torrent_list = [] for manager, torrent_info in self._torrents.items(): torrent_info = copy.copy(torrent_info) torrent_info.download_info = copy.copy(torrent_info.download_info) torrent_info.download_info.reset_run_state() torrent_list.append(torrent_info) try: with open(state_filename, 'wb') as f: pickle.dump((self.last_torrent_dir, self.last_download_dir, torrent_list), f) logger.info('state saved (%s torrents)', len(torrent_list)) except Exception as err: logger.warning('Failed to save state: %r', err) STATE_UPDATE_INTERVAL = 5 * 60 async def _execute_state_updates(self): while True: await asyncio.sleep(ControlManager.STATE_UPDATE_INTERVAL) self._dump_state() def invoke_state_dumps(self): self._state_updating_executor = asyncio.ensure_future(self._execute_state_updates()) def load_state(self): if not os.path.isfile(state_filename): return with open(state_filename, 'rb') as f: self.last_torrent_dir, self.last_download_dir, torrent_list = pickle.load(f) for torrent_info in torrent_list: self.add(torrent_info) logger.info('state recovered (%s torrents)', len(torrent_list)) async def stop(self): await self._server.stop() tasks = list(self._torrent_manager_executors.values()) if self._state_updating_executor is not None: tasks.append(self._state_updating_executor) for task in tasks: task.cancel() if tasks: await asyncio.wait(tasks) if self._torrent_managers: await asyncio.wait([manager.stop() for manager in self._torrent_managers.values()]) if self._state_updating_executor is not None: # Only if we have loaded starting state self._dump_state()
export const REGISTER_USER = "REGISTER_USER"; export const LOGIN_USER = "LOGIN_USER"; export const USER_LOADED = "USER_LOADED"; export const LOGOUT = "LOGOUT"; export const AUTH_FAIL = "AUTH_FAIL"; export const SET_ALERT = "AUTH_FAIL"; export const REMOVE_ALERT = "REMOVE_ALERT"; export const CLEAR_ERRORS = "CLEAR_ERRORS"; export const SET_RESTAURANT = "SET_RESTAURANT"; export const UPDATE_RESTAURANT = "UPDATE_RESTAURANT"; export const CLEAR_RESTAURANT = "CLEAR_RESTAURANT"; export const CREATE_RES = "CREATE_RES"; export const SUBMIT_ORDER = "SUBMIT_ORDER"; export const GET_MY_ORDERS = "GET_MY_ORDERS"; export const GET_ALL_RES = "GET_ALL_RES"; export const GET_MY_RES = "GET_MY_RES"; export const GET_MY_DISHES = "GET_MY_DISHES"; export const CLEAR_RES = "CLEAR_RES"; export const ERROR = "ERROR"; export const SET_LOADING = "SET_LOADING";
import {request} from './request' export function fetchGetList(params) { return request({ url:"/equipmentMaintain/getList", method:'get', params:params }) }
// Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; const common = require('../common'); const assert = require('assert'); const util = require('util'); const net = require('net'); const http = require('http'); let requests_recv = 0; let requests_sent = 0; let request_upgradeHead = null; function createTestServer() { return new testServer(); } function testServer() { http.Server.call(this, common.noop); this.on('connection', function() { requests_recv++; }); this.on('request', function(req, res) { res.writeHead(200, {'Content-Type': 'text/plain'}); res.write('okay'); res.end(); }); this.on('upgrade', function(req, socket, upgradeHead) { socket.write('HTTP/1.1 101 Web Socket Protocol Handshake\r\n' + 'Upgrade: WebSocket\r\n' + 'Connection: Upgrade\r\n' + '\r\n\r\n'); request_upgradeHead = upgradeHead; socket.on('data', function(d) { const data = d.toString('utf8'); if (data === 'kill') { socket.end(); } else { socket.write(data, 'utf8'); } }); }); } util.inherits(testServer, http.Server); function writeReq(socket, data, encoding) { requests_sent++; socket.write(data); } /*----------------------------------------------- connection: Upgrade with listener -----------------------------------------------*/ function test_upgrade_with_listener() { const conn = net.createConnection(server.address().port); conn.setEncoding('utf8'); let state = 0; conn.on('connect', function() { writeReq(conn, 'GET / HTTP/1.1\r\n' + 'Upgrade: WebSocket\r\n' + 'Connection: Upgrade\r\n' + '\r\n' + 'WjN}|M(6'); }); conn.on('data', function(data) { state++; assert.strictEqual('string', typeof data); if (state === 1) { assert.strictEqual('HTTP/1.1 101', data.substr(0, 12)); assert.strictEqual('WjN}|M(6', request_upgradeHead.toString('utf8')); conn.write('test', 'utf8'); } else if (state === 2) { assert.strictEqual('test', data); conn.write('kill', 'utf8'); } }); conn.on('end', function() { assert.strictEqual(2, state); conn.end(); server.removeAllListeners('upgrade'); test_upgrade_no_listener(); }); } /*----------------------------------------------- connection: Upgrade, no listener -----------------------------------------------*/ let test_upgrade_no_listener_ended = false; function test_upgrade_no_listener() { const conn = net.createConnection(server.address().port); conn.setEncoding('utf8'); conn.on('connect', function() { writeReq(conn, 'GET / HTTP/1.1\r\n' + 'Upgrade: WebSocket\r\n' + 'Connection: Upgrade\r\n' + '\r\n'); }); conn.on('end', function() { test_upgrade_no_listener_ended = true; conn.end(); }); conn.on('close', function() { test_standard_http(); }); } /*----------------------------------------------- connection: normal -----------------------------------------------*/ function test_standard_http() { const conn = net.createConnection(server.address().port); conn.setEncoding('utf8'); conn.on('connect', function() { writeReq(conn, 'GET / HTTP/1.1\r\n\r\n'); }); conn.once('data', function(data) { assert.strictEqual('string', typeof data); assert.strictEqual('HTTP/1.1 200', data.substr(0, 12)); conn.end(); }); conn.on('close', function() { server.close(); }); } const server = createTestServer(); server.listen(0, function() { // All tests get chained after this: test_upgrade_with_listener(); }); /*----------------------------------------------- Fin. -----------------------------------------------*/ process.on('exit', function() { assert.strictEqual(3, requests_recv); assert.strictEqual(3, requests_sent); assert.ok(test_upgrade_no_listener_ended); });
from tetueSrc.configParser import get_configuration, get_string_list, get_int_element, get_string_element, get_dict, \ get_string_list_only_section, load_configuration from tetueSrc.logger import log_event_info, log_event_error, log_header_info
app = angular.module('app'); app.controller('HistoryCollectedCtrl',[ '$scope', '$rootScope', '$http', 'PublicService', '$sce', '$state', function($scope,$rootScope,$http,PublicService,$sce,$state){ if($rootScope.loginStatus){ var friend = $rootScope.userLogin.name; }else{ var friend = 'A friend'; } window.scrollTo(100,100); $rootScope.showPosts = false; console.log($rootScope.showPosts); $scope.getPosts = function(page){ PublicService.getNewsSite(7,page).then(function(result){ if(result &&result.success){ $scope.currentPage = result.data.current_page; $scope.lastPage = result.data.last_page; $scope.posts = result.data.data; var dataEnter = { content:friend + ' accessed history collected !!!', type:1 }; $scope.insertNoti(dataEnter); } },function(errors){ console.log(errors); }); } $scope.getPosts(1); $scope.loadMore = function(){ $scope.getPosts($scope.currentPage+1); } $scope.previus = function(){ $scope.getPosts($scope.currentPage-1); } $scope.redirec = function(post){ var name = $scope.vietsub(post.name); $state.go('detail', { Id : post.id,Name:name}); } $scope.vietsub = function(str) { str = str.toLowerCase(); str = str.replace(/à|á|ạ|ả|ã|â|ầ|ấ|ậ|ẩ|ẫ|ă|ằ|ắ|ặ|ẳ|ẵ/g, "a"); str = str.replace(/è|é|ẹ|ẻ|ẽ|ê|ề|ế|ệ|ể|ễ/g, "e"); str = str.replace(/ì|í|ị|ỉ|ĩ/g, "i"); str = str.replace(/ò|ó|ọ|ỏ|õ|ô|ồ|ố|ộ|ổ|ỗ|ơ|ờ|ớ|ợ|ở|ỡ/g, "o"); str = str.replace(/ù|ú|ụ|ủ|ũ|ư|ừ|ứ|ự|ử|ữ/g, "u"); str = str.replace(/ỳ|ý|ỵ|ỷ|ỹ/g, "y"); str = str.replace(/đ/g, "d"); str = str.replace(/ /g, "-"); return str; } $scope.insertNoti = function(data){ PublicService.insertNoti(data); } $scope.trustAsHtml = function(value) { return $sce.trustAsHtml(value); }; }]);
# code credit goes to: # https://www.hackerearth.com/practice/notes/beautiful-python-a-simple-ascii-art-generator-from-images/ # code modified to work with Python 3 by @aneagoie from __future__ import division import os import sys import time import click import pyfiglet import pyjokes import random from asciimatics.effects import Print, Clock from asciimatics.exceptions import ResizeScreenError from asciimatics.renderers import FigletText, Rainbow from asciimatics.scene import Scene from asciimatics.screen import Screen from PIL import Image from math import ceil # Silence pygame message. This must precede the lib import from os import environ environ['PYGAME_HIDE_SUPPORT_PROMPT'] = '1' import pygame ASCII_CHARS = ['#', '?', '%', '.', 'S', '+', '.', '*', ':', ',', '@'] ASCII_CHARS_HR = ['-', '_', '+', '<', '>', 'i', '!', 'l', 'I', '?', '/', '\\', '|', '(', ')', '1', '{', '}', '[', ']', 'r', 'c', 'v', 'u', 'n', 'x', 'z', 'j', 'f', 't', 'L', 'C', 'J', 'U', 'Y', 'X', 'Z', 'O', '0', 'Q', 'o', 'a', 'h', 'k', 'b', 'd', 'p', 'q', 'w', 'm', '*', 'W', 'M', 'B', '8', '&', '%', '$', '#', '@'] COLOR_OPTIONS = ['black', 'blue', 'cyan', 'green', 'magenta', 'red', 'white', 'yellow'] FONTS = ['alligator', 'slant', '3-d', '3x5', '5lineoblique', 'banner3-D'] SUPPORTED_IMAGE_TYPES = ('.png', '.jpeg', '.jpg') def scale_image(image, new_width=100): """Resizes an image preserving the aspect ratio.""" (original_width, original_height) = image.size aspect_ratio = original_height / float(original_width) new_height = int(aspect_ratio * new_width * 0.5) return image.resize((new_width, new_height), Image.ANTIALIAS) def convert_to_grayscale(image): return image.convert('L') def map_pixels_to_ascii_chars(image, reverse, highres=False): """Maps each pixel to an ascii char based on the range in which it lies. 0-255 is divided into ranges of pixels based on the number of characters in ASCII_CHARS. """ # We make a local copy on reverse so we don't modify the global array. if highres: ascii_chars = ASCII_CHARS_HR if not reverse else ASCII_CHARS_HR[::-1] else: ascii_chars = ASCII_CHARS if not reverse else ASCII_CHARS[::-1] # Calculates the ranges of pixels based on the number of characters in ascii_chars range_width = ceil(255/len(ascii_chars)) pixels_in_image = list(image.getdata()) pixels_to_chars = [ ascii_chars[int(pixel_value / range_width)] for pixel_value in pixels_in_image] return "".join(pixels_to_chars) def convert_image_to_ascii(image, reverse=False, new_width=None, highres=False): if not new_width: new_width = image.width image = scale_image(image, new_width) image = convert_to_grayscale(image) pixels_to_chars = map_pixels_to_ascii_chars(image, reverse, highres) len_pixels_to_chars = len(pixels_to_chars) image_ascii = [pixels_to_chars[index: index + new_width] for index in range(0, len_pixels_to_chars, new_width)] return "\n".join(image_ascii) def colorText(text): COLORS = { "black": "\u001b[30;1m", "red": "\u001b[31;1m", "green": "\u001b[32m", "yellow": "\u001b[33;1m", "blue": "\u001b[34;1m", "magenta": "\u001b[35m", "cyan": "\u001b[36m", "white": "\u001b[37m", } for color in COLORS: text = text.replace("[[" + color + "]]", COLORS[color]) return text def open_image(path: str) -> Image: """ Wrapper for creation of an Image. We just use this to handle errors when opening the file. """ try: return Image.open(path) except Exception as e: print(f"Unable to open image file {path}.\n{e}") return None def demo(screen): effects = [ Print(screen, Rainbow(screen, FigletText("Hacktoberfest")), y=screen.height//2 - 8), Print(screen, Rainbow(screen, FigletText("ASCII Art 2020")), y=screen.height//2 + 3), Clock(screen, screen.width//2, screen.height//2, screen.height//2), ] screen.play([Scene(effects, -1)], stop_on_resize=True) screen.refresh() def show_clock(): try: Screen.wrapper(demo) sys.exit(0) except ResizeScreenError: pass def get_joke(): return pyjokes.get_joke() # typerwriter is the method for running the text def typewriter(message): # the spaces are for format on the splash screen print(pyfiglet.figlet_format(" zTm ", font=random.choice(FONTS)).rstrip()) print(pyfiglet.figlet_format("Community Presents -- ")) print(pyfiglet.figlet_format(" ASCII ART")) # print(pyfiglet.figlet_format("==> ")) for char in message: sys.stdout.write(char) sys.stdout.flush() if char != '\n': time.sleep(0.1) else: time.sleep(1) def ascii_textinput(): """Converts text entered by the user into random font ascii format text""" text = str(input('\n Enter The Text To Convert To Ascii-Art \n')) ascii_text(text) def ascii_text(text: str): """Converts simple text into random font ascii format text""" print(pyfiglet.figlet_format(text, font=random.choice(FONTS)).rstrip()) def is_supported(path): """ Returns True if given path points to a supported image, False otherwise """ if not path: return False _, ext = os.path.splitext(path) return ext.lower() in SUPPORTED_IMAGE_TYPES def check_file(path): """ Does validation. Check if the given path leads to a supported image type. It exits the program (with printed message) if the image is unsupported. """ if not is_supported(path): print(f"{path} is not supported") print("Supported file types: ", end='') print(', '.join(SUPPORTED_IMAGE_TYPES)) sys.exit(1) def write_file(ascii, filename): """Write ascii text to file""" if not ascii or not filename: return False try: with open(filename, "w") as f: f.write(ascii) return True except: return False def output_name(input): """ Works out the ascii filename from the input name. It'll attempt to save the output in the same directory as that of the input file. """ return f"{os.path.splitext(input if input else '')[0]}_output.txt" def show_credits(): """Show credits""" message = (pyjokes.get_joke()) # this is message ie the running text obtained from pyjokes library function pygame.mixer.init() pygame.mixer.music.load("typewriter.wav") pygame.mixer.music.play(loops=-1) click.termui.clear() typewriter(message) pygame.mixer.music.stop() pygame.mixer.quit() def all_supported_files(): return [f for f in os.listdir() if is_supported(f)] def set_color(image_ascii, color): if not color or color == 'black': return image_ascii text = f"[[{color}]]{image_ascii}[[white]]" return colorText(text) def process(input_file, reverse=False, save=False, output=None, width=None, color=None, highres=False): """Orchestrates the conversion of a single image to ascii.""" check_file(input_file) save = save or (output is not None) if save and not output: output = output_name(input_file) image = open_image(input_file) ascii_str = convert_image_to_ascii(image, reverse, width, highres) if save: if write_file(ascii_str, output): print(f"Image saved to -> {output}") else: print(f"Error writing to file: {output}") else: print(set_color(ascii_str, color))
class Rope { constructor(bodyA,pointB){ var options={ bodyA:bodyA, pointB:pointB, stiffness:1.2, length:250 } this.pointB=pointB; this.rope=Constraint.create(options); World.add(world,this.rope); } display() { var pointA = this.rope.bodyA.position; var pointB = this.pointB; push(); stroke(48,22,8); strokeWeight(3); line(pointB.x,pointB.y,pointA.x,pointA.y); pop(); } }
$().ready(function(){ $("#board_info_file").change(function () { var fileObj = $(this)[0].files[0]; if (typeof (fileObj) == "undefined" || fileObj.size <= 0) { alert("Upload error."); return; } var file_name = $(this).val(); var formFile = new FormData(); formFile.append("name", file_name); formFile.append("file", fileObj); $.ajax({ url: "../upload_board_info", data: formFile, type: "Post", dataType: "json", cache: false, processData: false, contentType: false, success: function (result) { console.log(result); if (result.status == 'success') { if (result.info != 'updated') { alert('Upload successfully.\nA new board type: '+result.info+' created.'); } else { alert('Upload successfully.'); } } else { alert(result.status); } window.location.reload(); }, error: function(e){ console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }) }); $("#scenario_file").change(function () { var fileObj = $(this)[0].files[0]; if (typeof (fileObj) == "undefined" || fileObj.size <= 0) { alert("Upload error."); return; } var file_name = $(this).val(); var formFile = new FormData(); formFile.append("name", file_name); formFile.append("file", fileObj); $.ajax({ url: "../upload_scenario", data: formFile, type: "Post", dataType: "json", cache: false, processData: false, contentType: false, success: function (result) { console.log(result); status = result.status; if (status!='success') { alert(status); return; } error_list = result.error_list; file_name = result.file_name; rename = result.rename if(result.rename==true) { alert('Scenario setting existed, import successfully with a new name: '+file_name); } else { alert('Scenario setting import successfully with name: '+file_name); } window.location = 'http://' + window.location.host+"/scenario/" + file_name; }, error: function(e){ console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }) }); $("#launch_file").change(function () { var fileObj = $(this)[0].files[0]; if (typeof (fileObj) == "undefined" || fileObj.size <= 0) { alert("Upload error."); return; } var file_name = $(this).val(); var formFile = new FormData(); formFile.append("name", file_name); formFile.append("file", fileObj); $.ajax({ url: "../upload_launch", data: formFile, type: "Post", dataType: "json", cache: false, processData: false, contentType: false, success: function (result) { console.log(result); status = result.status; if (status!='success') { alert(status); return; } error_list = result.error_list; file_name = result.file_name; rename = result.rename if(result.rename==true) { alert('Launch setting existed, import successfully with a new name: '+file_name); } else { alert('Launch setting import successfully with name: '+file_name); } window.location = 'http://' + window.location.host+"/launch/" + file_name; }, error: function(e){ console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }) }); $("select#board_info").change(function(){ data = {board_info: $(this).val()}; $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../select_board", data : JSON.stringify(data), success : function(result) { console.log(result); window.location.reload(true); }, error : function(e){ console.log(e.status); console.log(e.responseText); } }); }); $("input").on('blur',function(){ $(this).parents(".form-group").removeClass("has-error"); $(this).parents(".form-group").children("p").text(""); }); $("select").on('changed.bs.select',function(){ $(this).parents(".form-group").removeClass("has-error"); $(this).parents(".form-group").children("p").text(""); }) $('#save_board').on('click', function() { save_board(); }); $('#save_scenario').on('click', function() { var name = $(this).data('id'); if(name=="generate_config_src") { save_scenario(name); } else { save_scenario(); } }); $('#remove_scenario').on('click', function() { old_scenario_name = $("#old_scenario_name").text(); var board_info = $("select#board_info").val(); if (board_info==null || board_info=='') { alert("Please select one board info before this operation."); return; } scenario_config = { old_setting_name: $("#old_scenario_name").text(), new_setting_name: $("#new_scenario_name").val() } $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../remove_setting", data : JSON.stringify(scenario_config), success : function(result) { console.log(result); status = result.status info = result.info if (status == 'success') { alert('Remove current scenario setting from acrn-config app successfully.'); window.location = window.location = 'http://' + window.location.host+"/scenario"; } else { alert('Remove current scenario setting from acrn-config app failed:\n'+info); } }, error : function(e){ console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }); }); $('#save_launch').on('click', function() { var name = $(this).data('id'); if(name=="generate_launch_script") { save_launch(name); } else { save_launch(); } }); $('#remove_launch').on('click', function() { old_launch_name = $("#old_launch_name").text(); var board_info = $("select#board_info").val(); if (board_info==null || board_info=='') { alert("Please select one board before this operation."); return; } launch_config = { old_setting_name: $("#old_launch_name").text(), new_setting_name: $("#new_launch_name").val(), } $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../remove_setting", data : JSON.stringify(launch_config), success : function(result) { console.log(result); status = result.status info = result.info if (status == 'success') { alert('Remove current launch setting from acrn-config app successfully.'); window.location = window.location = 'http://' + window.location.host+"/launch"; } else { alert('Remove current launch setting from acrn-config app failed:\n'+info); } }, error : function(e){ console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }); }); $('#export_scenario_xml').on('click', function() { var dataId = $(this).data('id'); $("#save_scenario").data('id', dataId); $('#src_path_row').addClass('hidden'); }); $('#generate_config_src').on('click', function() { var dataId = $(this).data('id'); $("#save_scenario").data('id', dataId); $('#src_path_row').removeClass('hidden'); }); $('#export_launch_xml').on('click', function() { var dataId = $(this).data('id'); $("#save_launch").data('id', dataId); $('#src_path_row').addClass('hidden'); }); $('#generate_launch_script').on('click', function() { var dataId = $(this).data('id'); $("#save_launch").data('id', dataId); $('#src_path_row').removeClass('hidden'); }); $('a.create_menu').on('click', function() { var type = $(this).data('id'); $("#createModalLabel").text("Create a new " + type + " setting"); var date = new Date(); $("#create_name").val(date.getTime()); $("#create_btn").data('id', type); }); $('#create_btn').on('click', function() { var type = $(this).data('id'); var create_name = $("#create_name").val(); create_setting(type, create_name, create_name, 'create'); }); $(document).on('change', "select#load_scenario_name", function() { $('input#load_scenario_name2').val(this.value); }); $(document).on('change', "select#load_launch_name", function() { $('input#load_launch_name2').val(this.value); }); $('#load_scenario_btn').on('click', function() { var type = $(this).data('id'); var default_load_name = $("#load_scenario_name").val(); var load_name = $("#load_scenario_name2").val(); create_setting(type, default_load_name, load_name, 'load') }); $('#load_launch_btn').on('click', function() { var type = $(this).data('id'); var default_load_name = $("#load_launch_name").val(); var load_name = $("#load_launch_name2").val(); create_setting(type, default_load_name, load_name, 'load') }); $(document).on('click', "#add_vm", function() { var curr_vm_id = $(this).data('id'); $("#add_vm_submit").data('id', curr_vm_id); }); $(document).on('click', "#add_vm_submit", function() { var curr_vm_id = $(this).data('id'); save_scenario('add_vm:'+curr_vm_id) }); $(document).on('click', "#remove_vm", function() { var remove_confirm_message = 'Do you want to delete this VM?' if(confirm(remove_confirm_message)) { var curr_vm_id = $(this).data('id'); save_scenario('remove_vm:'+curr_vm_id) } }); $(document).on('click', "#add_launch_vm", function() { var curr_vm_id = $(this).data('id'); $("#add_launch_submit").data('id', curr_vm_id); }); $(document).on('click', "#add_launch_submit", function() { var curr_vm_id = $(this).data('id'); save_launch('add_vm:'+curr_vm_id); }); $('#add_launch_script').on('click', function() { var curr_vm_id = $(this).data('id'); $("#add_launch_submit").data('id', curr_vm_id); }); $(document).on('click', "#remove_launch_vm", function() { var remove_confirm_message = 'Do you want to delete this VM?' if(confirm(remove_confirm_message)) { var curr_vm_id = $(this).data('id'); save_launch('remove_vm:'+curr_vm_id) } }); $(document).on('change', "select#scenario_name", function() { data = {scenario_name: $(this).val(), launch_name: $('text#old_launch_name').text()}; $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../get_post_launch_vms", data : JSON.stringify(data), success : function(result) { console.log(result); vm_list = result.vm_list $('select#add_launch_type').empty().selectpicker('refresh'); for(i in vm_list) { var option = vm_list[i][1]+' ( ID : '+vm_list[i][0]+' )' $('select#add_launch_type').append( '<option value="'+option+'">'+option+'</option>').selectpicker('refresh'); } }, error : function(e){ console.log(e.status); console.log(e.responseText); } }); }); $("select[ID$='vuart:id=1,base']").change(function(){ var id = $(this).attr('id'); var value = $(this).val(); show_com_target(id, value); }); $("select[ID$='vuart:id=1,base']").each(function(index, item) { var id = $(item).attr('id'); var value = $(item).val(); show_com_target(id, value); }); $("select[ID$='FEATURES,RDT,CDP_ENABLED']").change(function(){ var id = $(this).attr('id'); var value = $(this).val(); update_vcpu_clos_option(id, value); update_rdt_clos_mask(id, value); }); $("select[ID$='FEATURES,RDT,CDP_ENABLED']").each(function(index, item) { var id = $(this).attr('id'); var value = $(item).val(); update_vcpu_clos_option(id, value); update_rdt_clos_mask(id, value); }); $(document).on('click', "button:contains('+')", function() { if($(this).text() != '+') return; var curr_item_id = $(this).attr('id'); var curr_id = curr_item_id.substr(curr_item_id.lastIndexOf('_')+1); var config_item = $(this).parent().parent(); var config_item_added = config_item.clone(); var config_vm = config_item.parent(); var vcpu_index_list = []; config_vm.children().each(function(){ if($(this).find("button:contains('+')").size() > 0) { var btn_add_vm_id = $(this).find("button:contains('+')").attr('id'); vcpu_index_list.push(parseInt(btn_add_vm_id.substr(btn_add_vm_id.lastIndexOf('_')+1))); } }); var id_added = 0; for (i=0; i<100; i++) { if (!vcpu_index_list.includes(i)) { id_added = i; break } } var id_pre_added = curr_item_id.substr(0, curr_item_id.lastIndexOf('_')); config_item_added.find("button:contains('+')").attr('id', id_pre_added+'_'+id_added); config_item_added.find("button:contains('-')").attr('id', id_pre_added.replace('add_', 'remove_')+'_'+id_added); var curr_err_id = config_item_added.find("p").attr('id'); config_item_added.find("p").attr('id', curr_err_id.replace(','+curr_id+'_', ','+id_added+'_')); config_item_added.find("button:contains('-')").prop("disabled", false); config_item_added.find("label:first").text(""); config_item_added.find('.bootstrap-select').replaceWith(function() { return $('select', this); }); config_item_added.find('.selectpicker').val('default').selectpicker('deselectAll');; config_item_added.find('.selectpicker').selectpicker('render'); config_item_added.insertAfter(config_item); if(curr_item_id.indexOf('add_vcpu')>=0) { var config_vm = config_item.parent(); var curr_vcpu_index = vcpu_index_list.indexOf(parseInt(curr_id)) var vcpu_clos_item = config_vm.find("label:contains('vcpu_clos')").first().parent(); while(curr_vcpu_index > 0) { vcpu_clos_item = vcpu_clos_item.next(); curr_vcpu_index -= 1; } var vcpu_clos_item_added = vcpu_clos_item.clone(); vcpu_clos_item_added.find("label:first").text(""); vcpu_clos_item_added.find('.bootstrap-select').replaceWith(function() { return $('select', this); }); vcpu_clos_item_added.find('.selectpicker').val('default').selectpicker('deselectAll');; vcpu_clos_item_added.find('.selectpicker').selectpicker('render'); vcpu_clos_item_added.insertAfter(vcpu_clos_item); } }); $(document).on('click', "button:contains('-')", function() { if($(this).text() != '-') return; var config_item = $(this).parent().parent(); var curr_item_id = $(this).attr('id'); if(curr_item_id.indexOf('remove_vcpu')>=0) { var config_vm = config_item.parent(); var vcpu_index_list = []; config_vm.children().each(function(){ if($(this).find("button:contains('+')").size() > 0) { var btn_del_vm_id = $(this).find("button:contains('+')").attr('id'); vcpu_index_list.push(parseInt(btn_del_vm_id.substr(btn_del_vm_id.lastIndexOf('_')+1))); } }); var curr_item_id = $(this).attr('id'); var curr_id = parseInt(curr_item_id.substr(curr_item_id.lastIndexOf('_')+1)); curr_vcpu_index = vcpu_index_list.indexOf(curr_id); var vcpu_clos_item = config_vm.find("label:contains('vcpu_clos')").first().parent(); while(curr_vcpu_index > 0) { vcpu_clos_item = vcpu_clos_item.next(); curr_vcpu_index -= 1; } vcpu_clos_item.remove(); } config_item.remove(); }); $('#remove_vm_kata').on('click', function() { if(confirm("Do you want to remove the VM?")) { save_scenario("remove_vm_kata"); } }); $('#add_vm_kata').on('click', function() { if(confirm("Do you want to add the Kata VM based on generic config?")) { save_scenario("add_vm_kata"); } }); }) $(window).load(function () {   $("select#scenario_name").change(); }); function show_com_target(id, value) { if(id==null || id=='undefined') { return } var id2 = id.replace('base', 'target_vm_id'); var jquerySpecialChars = ["~", "`", "@", "#", "%", "&", "=", "'", "\"", ":", ";", "<", ">", ",", "/"]; for (var i = 0; i < jquerySpecialChars.length; i++) { id2 = id2.replace(new RegExp(jquerySpecialChars[i], "g"), "\\" + jquerySpecialChars[i]); } if (value == 'INVALID_COM_BASE') { $('#'+id2+'_label1').hide(); $('#'+id2+'_label2').hide(); $('#'+id2+'_config').hide(); $('#'+id2+'_err').hide(); } else { $('#'+id2+'_label1').show(); $('#'+id2+'_label2').show(); $('#'+id2+'_config').show(); $('#'+id2+'_err').show(); } } function update_vcpu_clos_option(id, value) { if(value == 'y') { $("select[ID$='clos,vcpu_clos']").each(function(){ len = $(this).find('option').length; option = $(this).find('option').first(); for(i=0; i<len; i++){ if(i>(len-1)/2){ option.attr('disabled','disabled'); } option = option.next(); } $(this).selectpicker('render'); }); } else { $("select[ID$='clos,vcpu_clos']").each(function(){ len = $(this).find('option').length; option = $(this).find('option').first(); for(i=0; i<len; i++){ if(i>(len-1)/2){ option.removeAttr('disabled'); } option = option.next(); } $(this).selectpicker('render'); }); } } function update_rdt_clos_mask(id, value) { $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../get_num_of_rdt_res_entries", data : JSON.stringify({'cdp_enabled': value}), success : function(result) { console.log(result); num_clos_mask = result.num_clos_mask; num_mba_delay = result.num_mba_delay; clos_mask_entries = [null]; index = 0; $("input[ID$='hv,FEATURES,RDT,CLOS_MASK']").each(function(){ index += 1; if(index<=num_clos_mask) { clos_mask_entries[0] = $(this).parent().parent(); } if(index>num_clos_mask) { clos_mask_entries.push($(this).parent().parent()); } }); if(index<=num_clos_mask) { last_clos_mask_entry = clos_mask_entries[0]; for(i=0; i<num_clos_mask-index; i++) { clos_mask_entry_added = last_clos_mask_entry.clone(); clos_mask_entry_added.insertAfter(last_clos_mask_entry); } } else { for(i=clos_mask_entries.length-1; i>0; i--) { clos_mask_entries[i].remove(); } } mba_delay_entries = [null]; index = 0; $("input[ID$='hv,FEATURES,RDT,MBA_DELAY']").each(function(){ index += 1; if(index<=num_mba_delay) { mba_delay_entries[0] = $(this).parent().parent(); } if(index>num_mba_delay) { mba_delay_entries.push($(this).parent().parent()); } }); if(index<=num_mba_delay) { last_mba_delay_entry = mba_delay_entries[0]; for(i=0; i<num_mba_delay-index; i++) { mba_delay_entry_added = last_mba_delay_entry.clone(); mba_delay_entry_added.insertAfter(last_mba_delay_entry); } } else { for(i=mba_delay_entries.length-1; i>0; i--) { mba_delay_entries[i].remove(); } } }, error : function(e){ console.log(e.status); console.log(e.responseText); alert(e.responseText); } }); } function create_setting(type, default_name, name, mode){ var board_info = $("text#board_type").text(); if (board_info==null || board_info=='') { alert("Please select one board info before this operation."); return; } create_config = { board_info: board_info, type: type, default_name: default_name, create_name: name, mode: mode } $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../check_setting_exist", data : JSON.stringify(create_config), success : function(result) { exist = result.exist create_flag = true if(exist == "yes") { overwirte_confirm_message = 'Setting name: ' + create_config['create_name'] + ' existed in ' + 'acrn-hypervisor/misc/vm_configs/xmls/config-xmls/'+board_info+'/user_defined/.\n'+ 'Do you want to overwrite it?\nClick OK to overwrite it; click Cancel to rename it.' if(!confirm(overwirte_confirm_message)) { create_flag = false } } if(create_flag == true) { $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../create_setting", data : JSON.stringify(create_config), success : function(result) { console.log(result); status = result.status setting = result.setting error_list = result.error_list if (status == 'success' && (JSON.stringify(error_list)=='{}' || JSON.stringify(error_list)=='null')) { alert('create a new setting successfully.'); } else { alert('create a new setting failed. \nError list:\n'+JSON.stringify(error_list)); } var href = window.location.href if(href.endsWith("/scenario") || href.endsWith("/launch")) { window.location = type + "/" + setting; } else { window.location = "../" + type + "/" + setting; } }, error : function(e){ $("#create_modal").modal("hide"); $("#load_scenario_modal").modal("hide"); $("#load_launch_modal").modal("hide"); console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }); } }, error : function(e){ console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }); } function save_scenario(generator=null){ var board_info = $("text#board_type").text(); if (board_info==null || board_info=='') { alert("Please select one board info before this operation."); return; } scenario_config = { old_scenario_name: $("#old_scenario_name").text(), generator: generator } if(generator!=null && generator.indexOf('add_vm:')==0) { scenario_config['new_scenario_name'] = $("#new_scenario_name2").val() } else if(generator!=null && generator.indexOf('remove_vm:')==0) { scenario_config['new_scenario_name'] = $("#old_scenario_name").text() } else { scenario_config['new_scenario_name'] = $("#new_scenario_name").val() } $("input").each(function(){ var id = $(this).attr('id'); var value = $(this).val(); if(id.indexOf('CLOS_MASK')>=0 || id.indexOf('MBA_DELAY')>=0 || id.indexOf('IVSHMEM_REGION')>=0) { if(id in scenario_config) { scenario_config[id].push(value); } else { scenario_config[id] = [value]; } } else if(id!='new_scenario_name' && id!='new_scenario_name2' && id!='board_info_file' && id!='board_info_upload' && id!='scenario_file' && id!='create_name' && id!='load_scenario_name2' && id!='load_launch_name2' && id!='src_path') { scenario_config[id] = value; } }) $("textarea").each(function(){ var id = $(this).attr('id'); var value = $(this).val(); scenario_config[id] = value; }) $("select").each(function(){ var id = $(this).attr('id'); var value = $(this).val(); if(id.indexOf('pcpu_id')>=0 || id.indexOf('pci_dev')>=0 || id.indexOf('vcpu_clos')>=0) { if(id in scenario_config) { scenario_config[id].push(value); } else { scenario_config[id] = [value]; } } else if(id!='board_info' && id!='load_scenario_name' && id!='load_launch_name') { scenario_config[id] = value; } }) $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../check_setting_exist", data : JSON.stringify(scenario_config), success : function(result) { exist = result.exist create_flag = true if(exist == "yes") { overwirte_confirm_message = 'Setting name: ' + scenario_config['create_name'] + ' existed in ' + 'acrn-hypervisor/misc/vm_configs/xmls/config-xmls/'+board_info+'/user_defined/.\n'+ 'Do you want to overwrite it?\nClick OK to overwrite it; click Cancel to rename it.' if(!confirm(overwirte_confirm_message)) { create_flag = false } } if(create_flag == true) { $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../save_scenario", data : JSON.stringify(scenario_config), success : function(result) { error_list = result.error_list; status = result.status; var no_err = true; $.each(error_list, function(index,item){ no_err = false; var jquerySpecialChars = ["~", "`", "@", "#", "%", "&", "=", "'", "\"", ":", ";", "<", ">", ",", "/"]; for (var i = 0; i < jquerySpecialChars.length; i++) { index = index.replace(new RegExp(jquerySpecialChars[i], "g"), "\\" + jquerySpecialChars[i]); } $("#"+index+"_err").parents(".form-group").addClass("has-error"); $("#"+index+"_err").text(item); }) if(no_err == true && status == 'success') { file_name = result.file_name; validate_message = 'Scenario setting saved successfully with name: ' +file_name+'\ninto acrn-hypervisor/misc/vm_configs/xmls/config-xmls/'+board_info+'/user_defined/.' if(result.rename==true) { validate_message = 'Scenario setting existed, saved successfully with a new name: ' +file_name+'\ninto acrn-hypervisor/misc/vm_configs/xmls/config-xmls/'+board_info+'/user_defined/.'; } if(generator=="generate_config_src") { var src_path = $("input#src_path").val(); generate_flag = true; if(src_path == null || src_path == '') { overwirte_confirm_message = 'The Source Path for configuration files is not set.\n' + 'Do you want to generate them into the default path: acrn-hypervisor/misc/vm_configs/board/ and acrn-hypervisor/misc/vm_configs/scenarios/,\n'+ 'and overwrite the old ones?\nClick OK to overwrite them; click Cancel to edit the Source Path.' if(!confirm(overwirte_confirm_message)) { generate_flag = false } } if(generate_flag) { generator_config = { type: generator, board_info: $("select#board_info").val(), board_setting: "board_setting", scenario_setting: file_name, src_path: src_path, } $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../generate_src", data : JSON.stringify(generator_config), success : function(result) { console.log(result); status = result.status error_list = result.error_list if (status == 'success' && (JSON.stringify(error_list)=='{}' || JSON.stringify(error_list)=='null')) { if(src_path==null || src_path=='') { alert(generator+' successfully into acrn-hypervisor/misc/vm_configs/board/ and acrn-hypervisor/misc/vm_configs/scenarios/ '); } else { alert(generator+' successfully into '+src_path); } } else { alert(generator+' failed. \nError list:\n'+JSON.stringify(error_list)); } window.location = "./" + file_name; }, error : function(e){ console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }); } } else { alert(validate_message); window.location = "./" + file_name; } } else { $("#save_modal").modal("hide"); alert(JSON.stringify(error_list)); } }, error : function(e){ $("#save_modal").modal("hide"); console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }); } }, error : function(e){ console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }); } function save_launch(generator=null) { var board_info = $("text#board_type").text(); var scenario_name = $("select#scenario_name").val(); if (board_info==null || board_info=='' || scenario_name==null || scenario_name=='') { alert("Please select one board and scenario before this operation."); return; } launch_config = { old_launch_name: $("#old_launch_name").text(), scenario_name: scenario_name, generator: generator } if(generator!=null && generator.indexOf('add_vm:')==0) { launch_config['new_launch_name'] = $("#new_launch_name2").val() } else if(generator!=null && generator.indexOf('remove_vm:')==0) { launch_config['new_launch_name'] = $("#old_launch_name").text() } else { launch_config['new_launch_name'] = $("#new_launch_name").val() } $("input").each(function(){ var id = $(this).attr('id'); var value = $(this).val(); if(id.indexOf('virtio_devices,network')>=0 || id.indexOf('virtio_devices,block')>=0 || id.indexOf('virtio_devices,input')>=0) { if(id in launch_config) { launch_config[id].push(value); } else { launch_config[id] = [value]; } } else if(id!='new_launch_name' && id!='new_launch_name2' && id!='board_info_file' && id!='board_info_upload' && id!="launch_file" && id!='create_name' && id!='load_scenario_name2' && id!='load_launch_name2' && id!='src_path') { launch_config[id] = value; } }) $("select").each(function(){ var id = $(this).attr('id'); var value = $(this).val(); if(id.indexOf('pcpu_id')>=0 || id.indexOf('shm_region')>=0 || id.indexOf('pci_dev')>=0) { if(id in launch_config) { launch_config[id].push(value); } else { launch_config[id] = [value]; } } else if(id!='board_info' && id!='load_scenario_name' && id!='load_launch_name') { launch_config[id] = value; } }) $("textarea").each(function(){ var id = $(this).attr('id'); var value = $(this).val(); launch_config[id] = value; }) $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../check_setting_exist", data : JSON.stringify(launch_config), success : function(result) { exist = result.exist create_flag = true if(exist == "yes") { overwirte_confirm_message = 'Setting name: ' + launch_config['create_name'] + ' existed in ' + 'acrn-hypervisor/misc/vm_configs/xmls/config-xmls/'+board_info+'/user_defined/.\n'+ 'Do you want to overwrite it?\nClick OK to overwrite it; click Cancel to rename it.' if(!confirm(overwirte_confirm_message)) { create_flag = false } } if(create_flag == true) { $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../save_launch", data : JSON.stringify(launch_config), success : function(result) { console.log(result); error_list = result.error_list; status = result.status; var no_err = true; $.each(error_list, function(index,item){ no_err = false; var jquerySpecialChars = ["~", "`", "@", "#", "%", "&", "=", "'", "\"", ":", ";", "<", ">", ",", "/"]; for (var i = 0; i < jquerySpecialChars.length; i++) { index = index.replace(new RegExp(jquerySpecialChars[i], "g"), "\\" + jquerySpecialChars[i]); } $("#"+index).parents(".form-group").addClass("has-error"); $("#"+index+"_err").text(item); }) if(no_err == true && status == 'success') { file_name = result.file_name; validate_message = 'Launch setting saved successfully with name: ' +file_name+'\nto acrn-hypervisor/misc/vm_configs/xmls/config-xmls/'+board_info+'/user_defined/.' if(result.rename==true) { validate_message = 'Launch setting existed, saved successfully with a new name: ' +file_name+'\nto acrn-hypervisor/misc/vm_configs/xmls/config-xmls/'+board_info+'/user_defined/.'; } if(generator == 'generate_launch_script') { var src_path = $("input#src_path").val(); generate_flag = true; if(src_path == null || src_path == '') { overwirte_confirm_message = 'The Source Path for launch scripts is not set.\n' + 'Do you want to generate them into the default path: misc/vm_configs/xmls/config-xmls/'+board_info+'/output/,\n'+ 'and overwrite the old ones?\nClick OK to overwrite them; click Cancel to edit the Source Path.' if(!confirm(overwirte_confirm_message)) { generate_flag = false } } if(generate_flag) { generator_config = { type: generator, board_info: $("select#board_info").val(), board_setting: "board_setting", scenario_setting: $("select#scenario_name").val(), launch_setting: file_name, src_path: src_path, } $.ajax({ type : "POST", contentType: "application/json;charset=UTF-8", url : "../generate_src", data : JSON.stringify(generator_config), success : function(result) { console.log(result); status = result.status error_list = result.error_list if (status == 'success' && (JSON.stringify(error_list)=='{}' || JSON.stringify(error_list)=='null')) { if(src_path==null || src_path==='') { alert(generator+' successfully into '+ 'acrn-hypervisor/misc/vm_configs/xmls/config-xmls/'+board_info+'/output/.'); } else { alert(generator+' successfully into '+src_path); } } else { alert(generator+' failed. \nError list:\n'+JSON.stringify(error_list)); } window.location = "./" + file_name; }, error : function(e){ console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }); } } else { alert(validate_message); window.location = "./" + file_name; } } else { $("#save_modal").modal("hide"); alert(JSON.stringify(error_list)); } }, error : function(e){ $("#save_modal").modal("hide"); console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }); } }, error : function(e){ console.log(e.status); console.log(e.responseText); alert(e.status+'\n'+e.responseText); } }); }
from bs4 import BeautifulSoup import requests as req import re import asyncio import aiohttp import os if os.name == 'nt': loop = asyncio.ProactorEventLoop() asyncio.set_event_loop(loop) else: loop = asyncio.get_event_loop() # db from db import handleDB, naiveHandleDB # global variables such as parsing URL, requests BASE_URL = 'https://www.rimi.ee' request = req.get(BASE_URL + "/epood/en") parser = BeautifulSoup(request.content, 'html.parser') p_array = list() headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36'} # get all the URLs for every product category def GetCategoryURLS(): url_array = parser.find_all('ul', id=re.compile("(desktop_category_menu_).")) return [x.find('a', class_='base-category-link')['href'] for x in url_array] # service method just to keep everything clear # gets product info (name, price) and returns dictionary def GetProductInfo(html_code): # defining local parser product_parser = BeautifulSoup(str(html_code), 'html.parser') name = product_parser.find('p', class_='card__name').text index = [item['data-product-code'] for item in product_parser.find_all('div', attrs={'data-product-code': True})] discount = True if product_parser.find('div', class_='-has-discount') else False try: pattern = re.compile("[0-9]+") price = pattern.findall(f"{product_parser.find('div', class_='price-tag').text}") price = f"{price[0]}.{price[1]}" except: price = 0 return {'name': f"{index[0]}, {name}", 'price': price, 'discount': discount} async def getPageData(session, url): page = 1 while True: async with session.get(url = f"{BASE_URL}/{url}?page={page}", headers=headers) as response: response_data = await response.text() parser = BeautifulSoup(response_data, 'html.parser') items = parser.find_all('li', class_='product-grid__item') if len(items) == 0: break for item in items: p_array.append(GetProductInfo(item)) page += 1 async def gatherData(): semaphore = asyncio.Semaphore(200) async with semaphore: connector = aiohttp.TCPConnector(force_close=True) async with aiohttp.ClientSession(connector=connector, trust_env = True) as session: tasks = list() for url in GetCategoryURLS(): task = asyncio.create_task(getPageData(session, url)) tasks.append(task) await asyncio.gather(*tasks) def main(method): asyncio.run(gatherData()) if (method == "naive"): naiveHandleDB(p_array, 'rimi') else: handleDB(p_array, 'rimi')
""" Find compiled module linking to Tcl / Tk libraries """ import sys from tkinter import _tkinter as tk if hasattr(sys, "pypy_find_executable"): TKINTER_LIB = tk.tklib_cffi.__file__ else: TKINTER_LIB = tk.__file__
#include "../redismodule.h" #include <string.h> int MutexTryAcquire_RedisCommand(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) { if (argc != 4) { return RedisModule_WrongArity(ctx); } RedisModuleKey *lockKey = RedisModule_OpenKey(ctx, argv[1], REDISMODULE_READ|REDISMODULE_WRITE); int keyType = RedisModule_KeyType(lockKey); if (keyType != REDISMODULE_KEYTYPE_STRING && keyType != REDISMODULE_KEYTYPE_EMPTY) { RedisModule_CloseKey(lockKey); return RedisModule_ReplyWithError(ctx, REDISMODULE_ERRORMSG_WRONGTYPE); } RedisModuleString *requestingOwner = argv[2]; RedisModule_ReplyWithArray(ctx, 3); if (keyType == REDISMODULE_KEYTYPE_EMPTY) { mstime_t px; if (RedisModule_StringToLongLong(argv[3], &px) != REDISMODULE_OK) { RedisModule_CloseKey(lockKey); return RedisModule_ReplyWithError(ctx, "ERR invalid pexire"); } RedisModule_StringSet(lockKey, requestingOwner); RedisModule_SetExpire(lockKey, px); RedisModule_ReplyWithNull(ctx); RedisModule_ReplyWithString(ctx, requestingOwner); RedisModule_ReplyWithLongLong(ctx, px); RedisModule_CloseKey(lockKey); return REDISMODULE_OK; } size_t currentOwnerLen, requestingOwnerLen; const char *currentOwnerStringPtr = RedisModule_StringDMA(lockKey, &currentOwnerLen, REDISMODULE_READ); const char *requestingOwnerStringPtr = RedisModule_StringPtrLen(requestingOwner, &requestingOwnerLen); if (currentOwnerLen == requestingOwnerLen && memcmp(currentOwnerStringPtr, requestingOwnerStringPtr, currentOwnerLen) == 0) { mstime_t px; if (RedisModule_StringToLongLong(argv[3], &px) != REDISMODULE_OK) { RedisModule_CloseKey(lockKey); return RedisModule_ReplyWithError(ctx, "ERR invalid pexire"); } RedisModule_SetExpire(lockKey, px); RedisModule_ReplyWithString(ctx, requestingOwner); RedisModule_ReplyWithString(ctx, requestingOwner); RedisModule_ReplyWithLongLong(ctx, px); RedisModule_CloseKey(lockKey); return REDISMODULE_OK; } RedisModule_ReplyWithStringBuffer(ctx, currentOwnerStringPtr, currentOwnerLen); RedisModule_ReplyWithStringBuffer(ctx, currentOwnerStringPtr, currentOwnerLen); RedisModule_ReplyWithLongLong(ctx, RedisModule_GetExpire(lockKey)); RedisModule_CloseKey(lockKey); return REDISMODULE_OK; } int MutexTryRelease_RedisCommand(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) { if (argc != 3) { return RedisModule_WrongArity(ctx); } RedisModuleKey *lockKey = RedisModule_OpenKey(ctx, argv[1], REDISMODULE_READ|REDISMODULE_WRITE); int keyType = RedisModule_KeyType(lockKey); if (keyType == REDISMODULE_KEYTYPE_EMPTY) { RedisModule_CloseKey(lockKey); RedisModule_ReplyWithNull(ctx); return REDISMODULE_OK; } if (keyType != REDISMODULE_KEYTYPE_STRING) { RedisModule_CloseKey(lockKey); return RedisModule_ReplyWithError(ctx, REDISMODULE_ERRORMSG_WRONGTYPE); } RedisModuleString *requestingOwner = argv[2]; size_t currentOwnerLen, requestingOwnerLen; const char *currentOwnerStringPtr = RedisModule_StringDMA(lockKey, &currentOwnerLen, REDISMODULE_READ); const char *requestingOwnerStringPtr = RedisModule_StringPtrLen(requestingOwner, &requestingOwnerLen); if (currentOwnerLen == requestingOwnerLen && memcmp(currentOwnerStringPtr, requestingOwnerStringPtr, currentOwnerLen) == 0) { RedisModule_DeleteKey(lockKey); RedisModule_ReplyWithString(ctx, requestingOwner); } else { RedisModule_ReplyWithStringBuffer(ctx, currentOwnerStringPtr, currentOwnerLen); } RedisModule_CloseKey(lockKey); return REDISMODULE_OK; } int RedisModule_OnLoad(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) { if (RedisModule_Init(ctx, "locks", 1, REDISMODULE_APIVER_1) == REDISMODULE_ERR) { return REDISMODULE_ERR; } if (RedisModule_CreateCommand(ctx, "locks.mutex.try.acquire", MutexTryAcquire_RedisCommand, "write deny-oom fast", 1, 1, 1) == REDISMODULE_ERR) { return REDISMODULE_ERR; } if (RedisModule_CreateCommand(ctx, "locks.mutex.try.release", MutexTryRelease_RedisCommand, "write fast", 1, 1, 1) == REDISMODULE_ERR) { return REDISMODULE_ERR; } return REDISMODULE_OK; }
function init(){ var idEncuesta=$("#idEncuesta").val(); Generar(idEncuesta); $("#FormularioEncuesta").on("submit", function (e) { RegistroEncuesta(e); }); } function Generar(idEncuesta){ $.post("Gestion/Controlador/CEncuesta.php?op=RecuperarEncuestaCompleta", { "idEncuesta": idEncuesta }, function (data, status) { data = JSON.parse(data); var Encuesta = ""; var TituloEncuesta = data.EncuestaNombre; var Detalle = data.EncuestaDetalle; $("#tituloEncuesta").empty(); $("#tituloEncuesta").text(TituloEncuesta); $("#DetalleEncuesta").empty(); $("#DetalleEncuesta").text(Detalle); var ArregloPreguntas = data.Preguntas.split("&"); ArregloPreguntas.pop(); for (var i = 0; i < ArregloPreguntas.length; i++) { var Pregunta = ArregloPreguntas[i].split("|"); Encuesta = AgregarTipoPregunta(Pregunta, Encuesta); } Encuesta=Encuesta+"<hr>"; $("#CuerpoEncuesta").html(Encuesta); }); } function AgregarTipoPregunta(Pregunta, Encuesta) { Encuesta=Encuesta+' <div class="row Titulopregunta center_element">'+ '<div class="col-10 col-12-xsmall">'+ '<p class="texto-12px" align="justify">'+Pregunta[1]+'</p>'+ '</div>'+ ' </div>'; if (Pregunta[2] == 1) { Encuesta=Encuesta+'<div class="row Titulopregunta center_element m-5">'+ '<div class="col-2 col-2-small">'+ '<input class="pregunta1" data-tipo="1" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcion1-'+Pregunta[0]+'" name="satisfaccion-'+Pregunta[0]+'" value="1" checked>'+ '<label class="texto-12px" for="opcion1-'+Pregunta[0]+'">1</label>'+ '</div>'+ '<div class="col-2 col-2-small">'+ '<input class="pregunta1" data-tipo="1" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcion2-'+Pregunta[0]+'" name="satisfaccion-'+Pregunta[0]+'" value="2">'+ '<label class="texto-12px" for="opcion2-'+Pregunta[0]+'">2</label>'+ '</div>'+ '<div class="col-2 col-2-small">'+ '<input class="pregunta1" data-tipo="1" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcion3-'+Pregunta[0]+'" name="satisfaccion-'+Pregunta[0]+'" value="3">'+ '<label class="texto-12px" for="opcion3-'+Pregunta[0]+'">3</label>'+ '</div>'+ '<div class="col-2 col-2-small">'+ '<input class="pregunta1" data-tipo="1" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcion4-'+Pregunta[0]+'" name="satisfaccion-'+Pregunta[0]+'" value="4">'+ '<label class="texto-12px" for="opcion4-'+Pregunta[0]+'">4</label>'+ '</div>'+ '<div class="col-2 col-2-small">'+ '<input class="pregunta1" data-tipo="1" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcion5-'+Pregunta[0]+'" name="satisfaccion-'+Pregunta[0]+'" value="5">'+ '<label class="texto-12px" for="opcion5-'+Pregunta[0]+'">5</label>'+ '</div>' + '</div>'; } else if (Pregunta[2] == 2) { Encuesta=Encuesta+' <div class="row Titulopregunta center_element m-5">'+ '<div class="col-4 col-4-small">'+ '<input class="pregunta2" data-tipo="2" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcionA-'+Pregunta[0]+'" name="condicion-'+Pregunta[0]+'" value="1" checked>'+ '<label class="texto-12px" for="opcionA-'+Pregunta[0]+'">SI</label>'+ '</div>'+ '<div class="col-4 col-4-small">'+ '<input class="pregunta2" data-tipo="2" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcionB-'+Pregunta[0]+'" name="condicion-'+Pregunta[0]+'" value="2">'+ '<label class="texto-12px" for="opcionB-'+Pregunta[0]+'">NO</label> '+ '</div>'+ '<div class="col-4 col-4-small">'+ '<input class="pregunta2" data-tipo="2" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcionC-'+Pregunta[0]+'" name="condicion-'+Pregunta[0]+'" value="3">'+ '<label class="texto-12px" for="opcionC-'+Pregunta[0]+'">NO SABE/NO OPINA</label>'+ '</div>'+ '</div>'; } else if (Pregunta[2] == 3) { Encuesta=Encuesta+'<div class="row Titulopregunta center_element m-5">'+ '<div class="col-10 col-12-xsmall">'+ ' <textarea class="pregunta3" data-tipo="3" data-pregunta="'+Pregunta[0]+'" required placeholder="Ingrese Respuesta" rows="2" name="descripcion-'+Pregunta[0]+'"></textarea>'+ '</div> '+ '</div>'; } else if(Pregunta[2] == 4){ Encuesta=Encuesta+'<div class="row Titulopregunta center_element m-5">'+ '<div class="col-3 col-3-small">'+ '<input class="pregunta1" data-tipo="1" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcion1-'+Pregunta[0]+'" name="satisfaccion2-'+Pregunta[0]+'" value="1" checked>'+ '<label class="texto-12px" for="opcion1-'+Pregunta[0]+'">MALO</label>'+ '</div>'+ '<div class="col-3 col-3-small">'+ '<input class="pregunta1" data-tipo="1" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcion2-'+Pregunta[0]+'" name="satisfaccion2-'+Pregunta[0]+'" value="2">'+ '<label class="texto-12px" for="opcion2-'+Pregunta[0]+'">REGULAR</label>'+ '</div>'+ '<div class="col-3 col-3-small">'+ '<input class="pregunta1" data-tipo="1" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcion3-'+Pregunta[0]+'" name="satisfaccion2-'+Pregunta[0]+'" value="3">'+ '<label class="texto-12px" for="opcion3-'+Pregunta[0]+'">BUENO</label>'+ '</div>'+ '<div class="col-3 col-3-small">'+ '<input class="pregunta1" data-tipo="1" data-pregunta="'+Pregunta[0]+'" type="radio" id="opcion4-'+Pregunta[0]+'" name="satisfaccion2-'+Pregunta[0]+'" value="4">'+ '<label class="texto-12px" for="opcion4-'+Pregunta[0]+'">MUY BUENO</label>'+ '</div>'+ '</div>'; } return Encuesta; } function RegistroEncuesta(event) { //cargar(true); event.preventDefault(); //No se activará la acción predeterminada del evento var error = ""; var ArregloRespuesta=""; $("input[class='pregunta1']:checked").each(function () { var tipo=$(this).data("tipo"); var pregunta=$(this).data("pregunta"); var valor=$(this).val(); var pregunta=pregunta+"||"+valor+"-"; ArregloRespuesta=ArregloRespuesta+pregunta; }); $("input[class='pregunta2']:checked").each(function () { var tipo=$(this).data("tipo"); var pregunta=$(this).data("pregunta"); var valor=$(this).val(); var pregunta=pregunta+"||"+valor+"-"; ArregloRespuesta=ArregloRespuesta+pregunta; }); $("textarea[class='pregunta3']").each(function () { var tipo=$(this).data("tipo"); var pregunta=$(this).data("pregunta"); var valor=$(this).val(); var pregunta=pregunta+"|"+valor+"|0-"; ArregloRespuesta=ArregloRespuesta+pregunta; }); ArregloRespuesta=ArregloRespuesta.substring(0,ArregloRespuesta.length-1); console.log(ArregloRespuesta); var idEnviado=$("#idEnviado").val(); $.post("Gestion/Controlador/CEncuesta.php?op=RegistrarResultados", { "ArregloRespuesta": ArregloRespuesta, "CodigoEnvio":idEnviado }, function (data, status) { data = JSON.parse(data); console.log(data); var Respuesta=data.Respuesta; var Mensaje=data.Mensaje; if(Respuesta){ $("#ModuloRespuesta").show(); $("#FormularioEncuesta").hide(); }else{ $('#FormularioEncuesta')[0].reset(); notificar_danger(Mensaje); } }); } function AjaxRegistroCliente() { var formData = new FormData($("#FormularioCliente")[0]); console.log(formData); $.ajax({ url: "../../Controlador/CCliente.php?op=AccionCliente" , type: "POST" , data: formData , contentType: false , processData: false , success: function (data, status) { data = JSON.parse(data); console.log(data); var Mensaje = data.Mensaje; var Error = data.Registro; if (!Error) { $("#ModalCliente #cuerpo").removeClass("whirl"); $("#ModalCliente #cuerpo").removeClass("ringed"); $("#ModalCliente").modal("hide"); swal("Error:", Mensaje); LimpiarCliente(); tablaCliente.ajax.reload(); } else { $("#ModalCliente #cuerpo").removeClass("whirl"); $("#ModalCliente #cuerpo").removeClass("ringed"); $("#ModalCliente").modal("hide"); swal("Acción:", Mensaje); LimpiarCliente(); tablaCliente.ajax.reload(); } } }); } init();
const express = require('express'); const path = require('path'); const favicon = require('serve-favicon'); const logger = require('morgan'); const cookieParser = require('cookie-parser'); const bodyParser = require('body-parser'); const routes = require('./routes/index'); const latex = require('./routes/latex'); const app = express(); // view engine setup app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'jade'); // uncomment after placing your favicon in /public //app.use(favicon(path.join(__dirname, 'public', 'favicon.ico'))); app.use(logger('dev')); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: false })); app.use(cookieParser()); app.use(express.static(path.join(__dirname, 'public'))); app.use('/', routes); app.use('/latex', latex); // catch 404 and forward to error handler app.use(function(req, res, next) { const err = new Error('Not Found'); err.status = 404; next(err); }); // error handlers // development error handler // will print stacktrace // if (app.get('env') === 'development') { // app.use(function(err, req, res, next) { // res.status(err.status || 500); // res.send(err); // }); // } // production error handler // no stacktraces leaked to user app.use(function(err, req, res, next) { res.status(err.status || 500); res.send(err.message); }); module.exports = app;
# -*- coding: utf-8 -*- import json import os import shutil import tempfile import time import zipfile import multiprocessing import contextlib from unittest import mock from django import forms from django.conf import settings from django.forms import ValidationError from django.test.utils import override_settings import lxml import pytest from defusedxml.common import EntitiesForbidden, NotSupportedError from waffle.testutils import override_switch from olympia import amo from olympia.amo.tests import TestCase, user_factory from olympia.amo.tests.test_helpers import get_addon_file from olympia.applications.models import AppVersion from olympia.files import utils pytestmark = pytest.mark.django_db def _touch(fname): open(fname, 'a').close() os.utime(fname, None) class AppVersionsMixin(object): @classmethod def setUpTestData(cls): cls.create_webext_default_versions() @classmethod def create_appversion(cls, name, version): return AppVersion.objects.create(application=amo.APPS[name].id, version=version) @classmethod def create_webext_default_versions(cls): cls.create_appversion('firefox', '36.0') # Incompatible with webexts. cls.create_appversion('firefox', amo.DEFAULT_WEBEXT_MIN_VERSION) cls.create_appversion('firefox', amo.DEFAULT_WEBEXT_MAX_VERSION) cls.create_appversion('firefox', amo.DEFAULT_WEBEXT_MIN_VERSION_NO_ID) cls.create_appversion( 'android', amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID) cls.create_appversion( 'android', amo.DEFAULT_WEBEXT_MAX_VERSION) cls.create_appversion( 'firefox', amo.DEFAULT_STATIC_THEME_MIN_VERSION_FIREFOX) cls.create_appversion( 'android', amo.DEFAULT_STATIC_THEME_MIN_VERSION_ANDROID) class TestExtractor(AppVersionsMixin, TestCase): def test_no_manifest(self): fake_zip = utils.make_xpi({'dummy': 'dummy'}) with self.assertRaises(utils.NoManifestFound) as exc: utils.Extractor.parse(fake_zip) assert isinstance(exc.exception, forms.ValidationError) assert exc.exception.message == ( 'No install.rdf or manifest.json found') @mock.patch('olympia.files.utils.ManifestJSONExtractor') @mock.patch('olympia.files.utils.RDFExtractor') def test_parse_install_rdf(self, rdf_extractor, manifest_json_extractor): fake_zip = utils.make_xpi({'install.rdf': ''}) utils.Extractor.parse(fake_zip) assert rdf_extractor.called assert not manifest_json_extractor.called @mock.patch('olympia.files.utils.ManifestJSONExtractor') @mock.patch('olympia.files.utils.RDFExtractor') def test_ignore_package_json(self, rdf_extractor, manifest_json_extractor): # Previously we preferred `package.json` to `install.rdf` which # we don't anymore since # https://github.com/mozilla/addons-server/issues/2460 fake_zip = utils.make_xpi({'install.rdf': '', 'package.json': ''}) utils.Extractor.parse(fake_zip) assert rdf_extractor.called assert not manifest_json_extractor.called @mock.patch('olympia.files.utils.ManifestJSONExtractor') @mock.patch('olympia.files.utils.RDFExtractor') def test_parse_manifest_json(self, rdf_extractor, manifest_json_extractor): fake_zip = utils.make_xpi({'manifest.json': ''}) utils.Extractor.parse(fake_zip) assert not rdf_extractor.called assert manifest_json_extractor.called @mock.patch('olympia.files.utils.ManifestJSONExtractor') @mock.patch('olympia.files.utils.RDFExtractor') def test_prefers_manifest_to_install_rdf(self, rdf_extractor, manifest_json_extractor): fake_zip = utils.make_xpi({'install.rdf': '', 'manifest.json': ''}) utils.Extractor.parse(fake_zip) assert not rdf_extractor.called assert manifest_json_extractor.called @mock.patch('olympia.files.utils.os.path.getsize') def test_static_theme_max_size(self, getsize_mock): getsize_mock.return_value = settings.MAX_STATICTHEME_SIZE manifest = utils.ManifestJSONExtractor( '/fake_path', '{"theme": {}}').parse() # Calling to check it doesn't raise. assert utils.check_xpi_info(manifest, xpi_file=mock.Mock()) # Increase the size though and it should raise an error. getsize_mock.return_value = settings.MAX_STATICTHEME_SIZE + 1 with pytest.raises(forms.ValidationError) as exc: utils.check_xpi_info(manifest, xpi_file=mock.Mock()) assert ( exc.value.message == u'Maximum size for WebExtension themes is 7.0 MB.') # dpuble check only static themes are limited manifest = utils.ManifestJSONExtractor( '/fake_path', '{}').parse() assert utils.check_xpi_info(manifest, xpi_file=mock.Mock()) class TestRDFExtractor(TestCase): def setUp(self): self.firefox_versions = [ AppVersion.objects.create(application=amo.APPS['firefox'].id, version='38.0a1'), AppVersion.objects.create(application=amo.APPS['firefox'].id, version='43.0'), ] self.thunderbird_versions = [ AppVersion.objects.create(application=amo.APPS['android'].id, version='42.0'), AppVersion.objects.create(application=amo.APPS['android'].id, version='45.0'), ] def test_apps_disallow_thunderbird_and_seamonkey(self): zip_file = utils.SafeZip(get_addon_file( 'valid_firefox_and_thunderbird_addon.xpi')) extracted = utils.RDFExtractor(zip_file).parse() apps = extracted['apps'] assert len(apps) == 1 assert apps[0].appdata == amo.FIREFOX assert apps[0].min.version == '38.0a1' assert apps[0].max.version == '43.0' class TestManifestJSONExtractor(AppVersionsMixin, TestCase): def parse(self, base_data): return utils.ManifestJSONExtractor( '/fake_path', json.dumps(base_data)).parse() def test_instanciate_without_data(self): """Without data, we load the data from the file path.""" data = {'id': 'some-id'} fake_zip = utils.make_xpi({'manifest.json': json.dumps(data)}) extractor = utils.ManifestJSONExtractor(zipfile.ZipFile(fake_zip)) assert extractor.data == data def test_guid_from_applications(self): """Use applications>gecko>id for the guid.""" assert self.parse( {'applications': { 'gecko': { 'id': 'some-id'}}})['guid'] == 'some-id' def test_guid_from_browser_specific_settings(self): """Use applications>gecko>id for the guid.""" assert self.parse( {'browser_specific_settings': { 'gecko': { 'id': 'some-id'}}})['guid'] == 'some-id' def test_name_for_guid_if_no_id(self): """Don't use the name for the guid if there is no id.""" assert self.parse({'name': 'addon-name'})['guid'] is None def test_type(self): """manifest.json addons are always ADDON_EXTENSION.""" assert self.parse({})['type'] == amo.ADDON_EXTENSION def test_is_restart_required(self): """manifest.json addons never requires restart.""" assert self.parse({})['is_restart_required'] is False def test_name(self): """Use name for the name.""" assert self.parse({'name': 'addon-name'})['name'] == 'addon-name' def test_version(self): """Use version for the version.""" assert self.parse({'version': '23.0.1'})['version'] == '23.0.1' def test_homepage(self): """Use homepage_url for the homepage.""" assert ( self.parse({'homepage_url': 'http://my-addon.org'})['homepage'] == 'http://my-addon.org') def test_summary(self): """Use description for the summary.""" assert ( self.parse({'description': 'An addon.'})['summary'] == 'An addon.') def test_invalid_strict_min_version(self): data = { 'applications': { 'gecko': { 'strict_min_version': 'A', 'id': '@invalid_strict_min_version' } } } with pytest.raises(forms.ValidationError) as exc: self.parse(data) assert ( exc.value.message == 'Lowest supported "strict_min_version" is 42.0.') def test_unknown_strict_min_version(self): data = { 'applications': { 'gecko': { 'strict_min_version': '76.0', 'id': '@unknown_strict_min_version' } } } with pytest.raises(forms.ValidationError) as exc: self.parse(data) assert exc.value.message == ( u'Unknown "strict_min_version" 76.0 for Firefox') def test_unknown_strict_max_version(self): data = { 'applications': { 'gecko': { 'strict_max_version': '76.0', 'id': '@unknown_strict_min_version' } } } apps = self.parse(data)['apps'] assert len(apps) == 2 app = apps[0] assert app.appdata == amo.FIREFOX assert app.min.version == amo.DEFAULT_WEBEXT_MIN_VERSION assert app.max.version == amo.DEFAULT_WEBEXT_MAX_VERSION app = apps[1] assert app.appdata == amo.ANDROID assert app.min.version == amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID assert app.max.version == amo.DEFAULT_WEBEXT_MAX_VERSION def test_strict_min_version_needs_to_be_higher_then_42_if_specified(self): """strict_min_version needs to be higher than 42.0 if specified.""" data = { 'applications': { 'gecko': { 'strict_min_version': '36.0', 'id': '@too_old_strict_min_version' } } } with pytest.raises(forms.ValidationError) as exc: self.parse(data) assert ( exc.value.message == 'Lowest supported "strict_min_version" is 42.0.') def test_apps_use_provided_versions(self): """Use the min and max versions if provided.""" firefox_min_version = self.create_appversion('firefox', '47.0') firefox_max_version = self.create_appversion('firefox', '47.*') data = { 'applications': { 'gecko': { 'strict_min_version': '>=47.0', 'strict_max_version': '=47.*', 'id': '@random' } } } apps = self.parse(data)['apps'] assert len(apps) == 2 app = apps[0] assert app.appdata == amo.FIREFOX assert app.min == firefox_min_version assert app.max == firefox_max_version # We have no way of specifying a different version for Android when an # explicit version number is provided... That being said, we know that # 47.0 is too low for Android, so we silently cap it at 48.0. That # forces us to also change the max version for android. app = apps[1] assert app.appdata == amo.ANDROID assert app.min.version == amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID assert app.max.version == amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID def test_apps_use_default_versions_if_none_provided(self): """Use the default min and max versions if none provided.""" data = {'applications': {'gecko': {'id': 'some-id'}}} apps = self.parse(data)['apps'] assert len(apps) == 2 app = apps[0] assert app.appdata == amo.FIREFOX assert app.min.version == amo.DEFAULT_WEBEXT_MIN_VERSION assert app.max.version == amo.DEFAULT_WEBEXT_MAX_VERSION app = apps[1] assert app.appdata == amo.ANDROID assert app.min.version == amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID assert app.max.version == amo.DEFAULT_WEBEXT_MAX_VERSION # But if 'browser_specific_settings' is used, it's higher min version. data = {'browser_specific_settings': {'gecko': {'id': 'some-id'}}} apps = self.parse(data)['apps'] assert len(apps) == 2 app = apps[0] assert app.appdata == amo.FIREFOX assert app.min.version == ( amo.DEFAULT_WEBEXT_MIN_VERSION_BROWSER_SPECIFIC) assert app.max.version == amo.DEFAULT_WEBEXT_MAX_VERSION app = apps[1] assert app.appdata == amo.ANDROID assert app.min.version == ( amo.DEFAULT_WEBEXT_MIN_VERSION_BROWSER_SPECIFIC) assert app.max.version == amo.DEFAULT_WEBEXT_MAX_VERSION def test_is_webextension(self): assert self.parse({})['is_webextension'] def test_allow_static_theme_waffle(self): manifest = utils.ManifestJSONExtractor( '/fake_path', '{"theme": {}}').parse() utils.check_xpi_info(manifest) assert self.parse({'theme': {}})['type'] == amo.ADDON_STATICTHEME def test_extensions_dont_have_strict_compatibility(self): assert self.parse({})['strict_compatibility'] is False def test_moz_signed_extension_no_strict_compat(self): addon = amo.tests.addon_factory() user = amo.tests.user_factory(email='foo@mozilla.com') file_obj = addon.current_version.all_files[0] file_obj.update(is_mozilla_signed_extension=True) fixture = ( 'src/olympia/files/fixtures/files/' 'legacy-addon-already-signed-0.1.0.xpi') with amo.tests.copy_file(fixture, file_obj.file_path): parsed = utils.parse_xpi(file_obj.file_path, user=user) assert parsed['is_mozilla_signed_extension'] assert not parsed['strict_compatibility'] def test_moz_signed_extension_reuse_strict_compat(self): addon = amo.tests.addon_factory() user = amo.tests.user_factory(email='foo@mozilla.com') file_obj = addon.current_version.all_files[0] file_obj.update(is_mozilla_signed_extension=True) fixture = ( 'src/olympia/files/fixtures/files/' 'legacy-addon-already-signed-strict-compat-0.1.0.xpi') with amo.tests.copy_file(fixture, file_obj.file_path): parsed = utils.parse_xpi(file_obj.file_path, user=user) assert parsed['is_mozilla_signed_extension'] # We set `strictCompatibility` in install.rdf assert parsed['strict_compatibility'] @mock.patch('olympia.addons.models.resolve_i18n_message') def test_mozilla_trademark_disallowed(self, resolve_message): resolve_message.return_value = 'Notify Mozilla' addon = amo.tests.addon_factory() file_obj = addon.current_version.all_files[0] fixture = ( 'src/olympia/files/fixtures/files/notify-link-clicks-i18n.xpi') with amo.tests.copy_file(fixture, file_obj.file_path): with pytest.raises(forms.ValidationError) as exc: utils.parse_xpi(file_obj.file_path) assert dict(exc.value.messages)['en-us'].startswith( u'Add-on names cannot contain the Mozilla or' ) @mock.patch('olympia.addons.models.resolve_i18n_message') @override_switch('content-optimization', active=False) def test_mozilla_trademark_for_prefix_allowed(self, resolve_message): resolve_message.return_value = 'Notify for Mozilla' addon = amo.tests.addon_factory() file_obj = addon.current_version.all_files[0] fixture = ( 'src/olympia/files/fixtures/files/notify-link-clicks-i18n.xpi') with amo.tests.copy_file(fixture, file_obj.file_path): utils.parse_xpi(file_obj.file_path) def test_apps_use_default_versions_if_applications_is_omitted(self): """ WebExtensions are allowed to omit `applications[/gecko]` and we previously skipped defaulting to any `AppVersion` once this is not defined. That resulted in none of our plattforms being selectable. See https://github.com/mozilla/addons-server/issues/2586 and probably many others. """ data = {} apps = self.parse(data)['apps'] assert len(apps) == 2 assert apps[0].appdata == amo.FIREFOX assert apps[0].min.version == amo.DEFAULT_WEBEXT_MIN_VERSION_NO_ID assert apps[0].max.version == amo.DEFAULT_WEBEXT_MAX_VERSION assert apps[1].appdata == amo.ANDROID assert apps[1].min.version == amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID assert apps[1].max.version == amo.DEFAULT_WEBEXT_MAX_VERSION def test_handle_utf_bom(self): manifest = b'\xef\xbb\xbf{"manifest_version": 2, "name": "..."}' parsed = utils.ManifestJSONExtractor(None, manifest).parse() assert parsed['name'] == '...' def test_raise_error_if_no_optional_id_support(self): """ We only support optional ids in Firefox 48+ and will throw an error otherwise. """ data = { 'applications': { 'gecko': { 'strict_min_version': '42.0', 'strict_max_version': '49.0', } } } with pytest.raises(forms.ValidationError) as exc: self.parse(data)['apps'] assert ( exc.value.message == 'Add-on ID is required for Firefox 47 and below.') def test_comments_are_allowed(self): json_string = """ { // Required "manifest_version": 2, "name": "My Extension", "version": "versionString", // Recommended "default_locale": "en", "description": "A plain text description" } """ manifest = utils.ManifestJSONExtractor( '/fake_path', json_string).parse() assert manifest['is_webextension'] is True assert manifest.get('name') == 'My Extension' def test_dont_skip_apps_because_of_strict_version_incompatibility(self): # We shouldn't skip adding specific apps to the WebExtension # no matter any potential incompatibility, e.g # browser_specific_settings is only supported from Firefox 48.0 # onwards, now if the user specifies strict_min_compat as 42.0 # we shouldn't skip the app because of that. Instead we override the # value with the known min version that started supporting that. data = { 'browser_specific_settings': { 'gecko': { 'strict_min_version': '42.0', 'id': '@random' } } } apps = self.parse(data)['apps'] assert len(apps) == 2 assert apps[0].appdata == amo.FIREFOX assert apps[0].min.version == ( amo.DEFAULT_WEBEXT_MIN_VERSION_BROWSER_SPECIFIC) assert apps[1].appdata == amo.ANDROID assert apps[1].min.version == ( amo.DEFAULT_WEBEXT_MIN_VERSION_BROWSER_SPECIFIC) class TestLanguagePackAndDictionaries(AppVersionsMixin, TestCase): def test_parse_langpack(self): self.create_appversion('firefox', '60.0') self.create_appversion('firefox', '60.*') self.create_appversion('android', '60.0') self.create_appversion('android', '60.*') data = { 'applications': { 'gecko': { 'strict_min_version': '>=60.0', 'strict_max_version': '=60.*', 'id': '@langp' } }, 'langpack_id': 'foo' } parsed_data = utils.ManifestJSONExtractor( '/fake_path', json.dumps(data)).parse() assert parsed_data['type'] == amo.ADDON_LPAPP assert parsed_data['strict_compatibility'] is True assert parsed_data['is_webextension'] is True apps = parsed_data['apps'] assert len(apps) == 1 # Langpacks are not compatible with android. assert apps[0].appdata == amo.FIREFOX assert apps[0].min.version == '60.0' assert apps[0].max.version == '60.*' def test_parse_langpack_not_targeting_versions_explicitly(self): data = { 'applications': { 'gecko': { 'id': '@langp' } }, 'langpack_id': 'foo' } parsed_data = utils.ManifestJSONExtractor( '/fake_path', json.dumps(data)).parse() assert parsed_data['type'] == amo.ADDON_LPAPP assert parsed_data['strict_compatibility'] is True assert parsed_data['is_webextension'] is True apps = parsed_data['apps'] assert len(apps) == 1 # Langpacks are not compatible with android. assert apps[0].appdata == amo.FIREFOX assert apps[0].min.version == '42.0' # The linter should force the langpack to have a strict_max_version, # so the value here doesn't matter much. assert apps[0].max.version == '*' def test_parse_dictionary(self): self.create_appversion('firefox', '61.0') data = { 'applications': { 'gecko': { 'id': '@dict' } }, 'dictionaries': {'en-US': '/path/to/en-US.dic'} } parsed_data = utils.ManifestJSONExtractor( '/fake_path', json.dumps(data)).parse() assert parsed_data['type'] == amo.ADDON_DICT assert parsed_data['strict_compatibility'] is False assert parsed_data['is_webextension'] is True assert parsed_data['target_locale'] == 'en-US' apps = parsed_data['apps'] assert len(apps) == 1 # Dictionaries are not compatible with android. assert apps[0].appdata == amo.FIREFOX assert apps[0].min.version == '61.0' assert apps[0].max.version == '*' def test_parse_broken_dictionary(self): data = { 'dictionaries': {} } with self.assertRaises(forms.ValidationError): utils.ManifestJSONExtractor('/fake_path', json.dumps(data)).parse() def test_check_xpi_info_langpack_submission_restrictions(self): user = user_factory() self.create_appversion('firefox', '60.0') self.create_appversion('firefox', '60.*') data = { 'applications': { 'gecko': { 'strict_min_version': '>=60.0', 'strict_max_version': '=60.*', 'id': '@langp' } }, 'langpack_id': 'foo' } parsed_data = utils.ManifestJSONExtractor( '/fake_path.xpi', json.dumps(data)).parse() with self.assertRaises(ValidationError): # Regular users aren't allowed to submit langpacks. utils.check_xpi_info(parsed_data, xpi_file=mock.Mock(), user=user) # Shouldn't raise for users with proper permissions self.grant_permission(user, ':'.join(amo.permissions.LANGPACK_SUBMIT)) utils.check_xpi_info(parsed_data, xpi_file=mock.Mock(), user=user) class TestManifestJSONExtractorStaticTheme(TestManifestJSONExtractor): def parse(self, base_data): if 'theme' not in base_data.keys(): base_data.update(theme={}) return super( TestManifestJSONExtractorStaticTheme, self).parse(base_data) def test_type(self): assert self.parse({})['type'] == amo.ADDON_STATICTHEME def test_apps_use_default_versions_if_applications_is_omitted(self): """ Override this because static themes have a higher default version. """ data = {} apps = self.parse(data)['apps'] assert len(apps) == 2 assert apps[0].appdata == amo.FIREFOX assert apps[0].min.version == ( amo.DEFAULT_STATIC_THEME_MIN_VERSION_FIREFOX) assert apps[0].max.version == amo.DEFAULT_WEBEXT_MAX_VERSION assert apps[1].appdata == amo.ANDROID assert apps[1].min.version == ( amo.DEFAULT_STATIC_THEME_MIN_VERSION_ANDROID) assert apps[1].max.version == amo.DEFAULT_WEBEXT_MAX_VERSION def test_apps_use_default_versions_if_none_provided(self): """Use the default min and max versions if none provided.""" data = {'applications': {'gecko': {'id': 'some-id'}}} apps = self.parse(data)['apps'] assert len(apps) == 2 assert apps[0].appdata == amo.FIREFOX assert apps[0].min.version == ( amo.DEFAULT_STATIC_THEME_MIN_VERSION_FIREFOX) assert apps[0].max.version == amo.DEFAULT_WEBEXT_MAX_VERSION assert apps[1].appdata == amo.ANDROID assert apps[1].min.version == ( amo.DEFAULT_STATIC_THEME_MIN_VERSION_ANDROID) assert apps[1].max.version == amo.DEFAULT_WEBEXT_MAX_VERSION def test_apps_use_provided_versions(self): """Use the min and max versions if provided.""" firefox_min_version = self.create_appversion('firefox', '66.0') firefox_max_version = self.create_appversion('firefox', '66.*') android_min_version = self.create_appversion('android', '66.0') android_max_version = self.create_appversion('android', '66.*') data = { 'applications': { 'gecko': { 'strict_min_version': '>=66.0', 'strict_max_version': '=66.*', 'id': '@random' } } } apps = self.parse(data)['apps'] assert len(apps) == 2 assert apps[0].appdata == amo.FIREFOX assert apps[0].min == firefox_min_version assert apps[0].max == firefox_max_version assert apps[1].appdata == amo.ANDROID assert apps[1].min == android_min_version assert apps[1].max == android_max_version def test_theme_json_extracted(self): # Check theme data is extracted from the manifest and returned. data = {'theme': {'colors': {'tab_background_text': "#3deb60"}}} assert self.parse(data)['theme'] == data['theme'] def test_unknown_strict_max_version(self): data = { 'applications': { 'gecko': { 'strict_max_version': '76.0', 'id': '@unknown_strict_min_version' } } } apps = self.parse(data)['apps'] assert len(apps) == 2 app = apps[0] assert app.appdata == amo.FIREFOX assert app.min.version == amo.DEFAULT_STATIC_THEME_MIN_VERSION_FIREFOX assert app.max.version == amo.DEFAULT_WEBEXT_MAX_VERSION app = apps[1] assert app.appdata == amo.ANDROID assert app.min.version == amo.DEFAULT_STATIC_THEME_MIN_VERSION_ANDROID assert app.max.version == amo.DEFAULT_WEBEXT_MAX_VERSION def test_dont_skip_apps_because_of_strict_version_incompatibility(self): # In the parent class this method would bump the min_version to 48.0 # because that's the first version to support # browser_specific_settings, but in static themes we bump it even # higher because of the minimum version when we started supporting # static themes themselves. data = { 'browser_specific_settings': { 'gecko': { 'strict_min_version': '42.0', 'id': '@random' } } } apps = self.parse(data)['apps'] assert len(apps) == 2 assert apps[0].appdata == amo.FIREFOX assert apps[0].min.version == ( amo.DEFAULT_STATIC_THEME_MIN_VERSION_FIREFOX) assert apps[0].max.version == amo.DEFAULT_WEBEXT_MAX_VERSION assert apps[1].appdata == amo.ANDROID assert apps[1].min.version == ( amo.DEFAULT_STATIC_THEME_MIN_VERSION_ANDROID) assert apps[1].max.version == amo.DEFAULT_WEBEXT_MAX_VERSION @pytest.mark.parametrize('filename, expected_files', [ ('webextension_no_id.xpi', [ 'README.md', 'beasts', 'button', 'content_scripts', 'manifest.json', 'popup' ]), ('webextension_no_id.zip', [ 'README.md', 'beasts', 'button', 'content_scripts', 'manifest.json', 'popup' ]), ('webextension_no_id.tar.gz', [ 'README.md', 'beasts', 'button', 'content_scripts', 'manifest.json', 'popup' ]), ('webextension_no_id.tar.bz2', [ 'README.md', 'beasts', 'button', 'content_scripts', 'manifest.json', 'popup' ]), ('search.xml', [ 'search.xml', ]) ]) def test_extract_extension_to_dest(filename, expected_files): extension_file = 'src/olympia/files/fixtures/files/{fname}'.format( fname=filename) with mock.patch('olympia.files.utils.os.fsync') as fsync_mock: temp_folder = utils.extract_extension_to_dest(extension_file) assert sorted(os.listdir(temp_folder)) == expected_files # fsync isn't called by default assert not fsync_mock.called @pytest.mark.parametrize('filename', [ 'webextension_no_id.xpi', 'webextension_no_id.zip', 'webextension_no_id.tar.bz2', 'webextension_no_id.tar.gz', 'search.xml', ]) def test_extract_extension_to_dest_call_fsync(filename): extension_file = 'src/olympia/files/fixtures/files/{fname}'.format( fname=filename) with mock.patch('olympia.files.utils.os.fsync') as fsync_mock: utils.extract_extension_to_dest(extension_file, force_fsync=True) # fsync isn't called by default assert fsync_mock.called def test_extract_extension_to_dest_invalid_archive(): extension_file = 'src/olympia/files/fixtures/files/doesntexist.zip' with mock.patch('olympia.files.utils.shutil.rmtree') as mock_rmtree: with pytest.raises(forms.ValidationError): utils.extract_extension_to_dest(extension_file) # Make sure we are cleaning up our temprary directory if possible assert mock_rmtree.called @pytest.fixture def file_obj(): addon = amo.tests.addon_factory() addon.update(guid='xxxxx') version = addon.current_version return version.all_files[0] @pytestmark def test_bump_version_in_manifest_json(file_obj): AppVersion.objects.create(application=amo.FIREFOX.id, version=amo.DEFAULT_WEBEXT_MIN_VERSION) AppVersion.objects.create(application=amo.FIREFOX.id, version=amo.DEFAULT_WEBEXT_MAX_VERSION) AppVersion.objects.create(application=amo.ANDROID.id, version=amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID) AppVersion.objects.create(application=amo.ANDROID.id, version=amo.DEFAULT_WEBEXT_MAX_VERSION) with amo.tests.copy_file( 'src/olympia/files/fixtures/files/webextension.xpi', file_obj.file_path): utils.update_version_number(file_obj, '0.0.1.1-signed') parsed = utils.parse_xpi(file_obj.file_path) assert parsed['version'] == '0.0.1.1-signed' def test_extract_translations_simple(file_obj): extension = 'src/olympia/files/fixtures/files/notify-link-clicks-i18n.xpi' with amo.tests.copy_file(extension, file_obj.file_path): messages = utils.extract_translations(file_obj) assert list(sorted(messages.keys())) == [ 'de', 'en-US', 'ja', 'nb-NO', 'nl', 'ru', 'sv-SE'] @mock.patch('olympia.files.utils.zipfile.ZipFile.read') def test_extract_translations_fail_silent_invalid_file(read_mock, file_obj): extension = 'src/olympia/files/fixtures/files/notify-link-clicks-i18n.xpi' with amo.tests.copy_file(extension, file_obj.file_path): read_mock.side_effect = KeyError # Does not raise an exception utils.extract_translations(file_obj) read_mock.side_effect = IOError # Does not raise an exception too utils.extract_translations(file_obj) # We don't fail on invalid JSON too, this is addons-linter domain read_mock.side_effect = ValueError utils.extract_translations(file_obj) # But everything else... read_mock.side_effect = TypeError with pytest.raises(TypeError): utils.extract_translations(file_obj) def test_get_all_files(): tempdir = tempfile.mkdtemp(dir=settings.TMP_PATH) os.mkdir(os.path.join(tempdir, 'dir1')) _touch(os.path.join(tempdir, 'foo1')) _touch(os.path.join(tempdir, 'dir1', 'foo2')) assert utils.get_all_files(tempdir) == [ os.path.join(tempdir, 'dir1'), os.path.join(tempdir, 'dir1', 'foo2'), os.path.join(tempdir, 'foo1'), ] shutil.rmtree(tempdir) assert not os.path.exists(tempdir) def test_get_all_files_strip_prefix_no_prefix_silent(): tempdir = tempfile.mkdtemp(dir=settings.TMP_PATH) os.mkdir(os.path.join(tempdir, 'dir1')) _touch(os.path.join(tempdir, 'foo1')) _touch(os.path.join(tempdir, 'dir1', 'foo2')) # strip_prefix alone doesn't do anything. assert utils.get_all_files(tempdir, strip_prefix=tempdir) == [ os.path.join(tempdir, 'dir1'), os.path.join(tempdir, 'dir1', 'foo2'), os.path.join(tempdir, 'foo1'), ] def test_get_all_files_prefix(): tempdir = tempfile.mkdtemp(dir=settings.TMP_PATH) os.mkdir(os.path.join(tempdir, 'dir1')) _touch(os.path.join(tempdir, 'foo1')) _touch(os.path.join(tempdir, 'dir1', 'foo2')) # strip_prefix alone doesn't do anything. assert utils.get_all_files(tempdir, prefix='/foo/bar') == [ '/foo/bar' + os.path.join(tempdir, 'dir1'), '/foo/bar' + os.path.join(tempdir, 'dir1', 'foo2'), '/foo/bar' + os.path.join(tempdir, 'foo1'), ] def test_get_all_files_prefix_with_strip_prefix(): tempdir = tempfile.mkdtemp(dir=settings.TMP_PATH) os.mkdir(os.path.join(tempdir, 'dir1')) _touch(os.path.join(tempdir, 'foo1')) _touch(os.path.join(tempdir, 'dir1', 'foo2')) # strip_prefix alone doesn't do anything. result = utils.get_all_files( tempdir, strip_prefix=tempdir, prefix='/foo/bar') assert result == [ os.path.join('/foo', 'bar', 'dir1'), os.path.join('/foo', 'bar', 'dir1', 'foo2'), os.path.join('/foo', 'bar', 'foo1'), ] def test_lock_with_lock_attained(): with utils.lock(settings.TMP_PATH, 'test-lock-lock2') as lock_attained: assert lock_attained @contextlib.contextmanager def _run_lock_holding_process(lock_name, sleep): def _other_process_holding_lock(): with utils.lock(settings.TMP_PATH, lock_name) as lock_attained: assert lock_attained time.sleep(sleep) other_process = multiprocessing.Process(target=_other_process_holding_lock) other_process.start() # Give the process some time to acquire the lock time.sleep(0.2) yield other_process other_process.join() def test_lock_timeout(): with _run_lock_holding_process('test-lock-lock3', sleep=2): # Waiting for 3 seconds allows us to attain the lock from the parent # process. lock = utils.lock(settings.TMP_PATH, 'test-lock-lock3', timeout=3) with lock as lock_attained: assert lock_attained with _run_lock_holding_process('test-lock-lock3', sleep=2): # Waiting only 1 second fails to acquire the lock lock = utils.lock(settings.TMP_PATH, 'test-lock-lock3', timeout=1) with lock as lock_attained: assert not lock_attained def test_parse_search_empty_shortname(): from olympia.files.tests.test_file_viewer import get_file fname = get_file('search_empty_shortname.xml') with pytest.raises(forms.ValidationError) as excinfo: utils.parse_search(fname) assert ( str(excinfo.value.message) == 'Could not parse uploaded file, missing or empty <ShortName> element') class TestResolvei18nMessage(object): def test_no_match(self): assert utils.resolve_i18n_message('foo', {}, '') == 'foo' def test_locale_found(self): messages = { 'de': { 'foo': {'message': 'bar'} } } result = utils.resolve_i18n_message('__MSG_foo__', messages, 'de') assert result == 'bar' def test_uses_default_locale(self): messages = { 'en-US': { 'foo': {'message': 'bar'} } } result = utils.resolve_i18n_message( '__MSG_foo__', messages, 'de', 'en') assert result == 'bar' def test_no_locale_match(self): # Neither `locale` or `locale` are found, "message" is returned # unchanged messages = { 'fr': { 'foo': {'message': 'bar'} } } result = utils.resolve_i18n_message( '__MSG_foo__', messages, 'de', 'en') assert result == '__MSG_foo__' def test_field_not_set(self): """Make sure we don't fail on messages that are `None` Fixes https://github.com/mozilla/addons-server/issues/3067 """ result = utils.resolve_i18n_message(None, {}, 'de', 'en') assert result is None def test_field_no_string(self): """Make sure we don't fail on messages that are no strings""" result = utils.resolve_i18n_message([], {}, 'de', 'en') assert result == [] def test_corrects_locales(self): messages = { 'en-US': { 'foo': {'message': 'bar'} } } result = utils.resolve_i18n_message('__MSG_foo__', messages, 'en') assert result == 'bar' def test_ignore_wrong_format(self): messages = { 'en-US': { 'foo': 'bar' } } result = utils.resolve_i18n_message('__MSG_foo__', messages, 'en') assert result == '__MSG_foo__' class TestXMLVulnerabilities(TestCase): """Test a few known vulnerabilities to make sure our defusedxml patching is applied automatically. This doesn't replicate all defusedxml tests. """ def test_quadratic_xml(self): quadratic_xml = os.path.join( os.path.dirname(__file__), '..', 'fixtures', 'files', 'quadratic.xml') with pytest.raises(forms.ValidationError) as exc: utils.extract_search(quadratic_xml) assert exc.value.message == u'OpenSearch: XML Security error.' def test_general_entity_expansion_is_disabled(self): zip_file = utils.SafeZip(os.path.join( os.path.dirname(__file__), '..', 'fixtures', 'files', 'xxe-example-install.zip')) # This asserts that the malicious install.rdf blows up with # a parse error. If it gets as far as this specific parse error # it means that the external entity was not processed. # # Before the patch in files/utils.py, this would raise an IOError # from the test suite refusing to make an external HTTP request to # the entity ref. with pytest.raises(EntitiesForbidden): utils.RDFExtractor(zip_file) def test_lxml_XMLParser_no_resolve_entities(self): with pytest.raises(NotSupportedError): lxml.etree.XMLParser(resolve_entities=True) # not setting it works lxml.etree.XMLParser() # Setting it explicitly to `False` is fine too. lxml.etree.XMLParser(resolve_entities=False) class TestGetBackgroundImages(TestCase): file_obj = os.path.join( settings.ROOT, 'src/olympia/devhub/tests/addons/static_theme.zip') file_obj_dep = os.path.join( settings.ROOT, 'src/olympia/devhub/tests/addons/static_theme_deprecated.zip') def test_get_background_images(self): data = {'images': {'theme_frame': 'weta.png'}} images = utils.get_background_images(self.file_obj, data) assert 'weta.png' in images assert len(images.items()) == 1 assert len(images['weta.png']) == 126447 def test_get_background_deprecated(self): data = {'images': {'headerURL': 'weta.png'}} images = utils.get_background_images(self.file_obj_dep, data) assert 'weta.png' in images assert len(images.items()) == 1 assert len(images['weta.png']) == 126447 def test_get_background_images_no_theme_data_provided(self): images = utils.get_background_images(self.file_obj, theme_data=None) assert 'weta.png' in images assert len(images.items()) == 1 assert len(images['weta.png']) == 126447 def test_get_background_images_missing(self): data = {'images': {'theme_frame': 'missing_file.png'}} images = utils.get_background_images(self.file_obj, data) assert not images def test_get_background_images_not_image(self): self.file_obj = os.path.join( settings.ROOT, 'src/olympia/devhub/tests/addons/static_theme_non_image.zip') data = {'images': {'theme_frame': 'not_an_image.js'}} images = utils.get_background_images(self.file_obj, data) assert not images def test_get_background_images_with_additional_imgs(self): self.file_obj = os.path.join( settings.ROOT, 'src/olympia/devhub/tests/addons/static_theme_tiled.zip') data = {'images': { 'theme_frame': 'empty.png', 'additional_backgrounds': [ 'transparent.gif', 'missing_&_ignored.png', 'weta_for_tiling.png'] }} images = utils.get_background_images(self.file_obj, data) assert len(images.items()) == 3 assert len(images['empty.png']) == 332 assert len(images['transparent.gif']) == 42 assert len(images['weta_for_tiling.png']) == 93371 # And again but only with the header image images = utils.get_background_images( self.file_obj, data, header_only=True) assert len(images.items()) == 1 assert len(images['empty.png']) == 332 @pytest.mark.parametrize('value, expected', [ (1, '1/1/1'), (1, '1/1/1'), (12, '2/12/12'), (123, '3/23/123'), (123456789, '9/89/123456789'), ]) def test_id_to_path(value, expected): assert utils.id_to_path(value) == expected class TestSafeZip(TestCase): def test_raises_error_for_invalid_webextension_xpi(self): with pytest.raises(forms.ValidationError): utils.SafeZip(get_addon_file('invalid_webextension.xpi')) def test_raises_validation_error_when_uncompressed_size_is_too_large(self): with override_settings(MAX_ZIP_UNCOMPRESSED_SIZE=1000): with pytest.raises(forms.ValidationError): # total uncompressed size of this xpi is: 2269 bytes utils.SafeZip(get_addon_file( 'valid_firefox_and_thunderbird_addon.xpi')) class TestArchiveMemberValidator(TestCase): # We cannot easily test `archive_member_validator` so let's test # `_validate_archive_member_name_and_size` instead. def test_raises_when_filename_is_none(self): with pytest.raises(forms.ValidationError): utils._validate_archive_member_name_and_size(None, 123) def test_raises_when_filesize_is_none(self): with pytest.raises(forms.ValidationError): utils._validate_archive_member_name_and_size('filename', None) def test_raises_when_filename_is_dot_dot_slash(self): with pytest.raises(forms.ValidationError): utils._validate_archive_member_name_and_size('../', 123) def test_raises_when_filename_starts_with_slash(self): with pytest.raises(forms.ValidationError): utils._validate_archive_member_name_and_size('/..', 123) def test_raises_when_filename_is_dot_dot(self): with pytest.raises(forms.ValidationError): utils._validate_archive_member_name_and_size('..', 123) def test_does_not_raise_when_filename_is_dot_dot_extension(self): utils._validate_archive_member_name_and_size('foo..svg', 123) @override_settings(FILE_UNZIP_SIZE_LIMIT=100) def test_raises_when_filesize_is_above_limit(self): with pytest.raises(forms.ValidationError): utils._validate_archive_member_name_and_size( 'filename', settings.FILE_UNZIP_SIZE_LIMIT + 100 )
// Account Handling! var crypto = require('crypto'); // Data Models var User = require('../models/user.js'); var Session = require('../models/session.js'); function Account(c_socket, c_io) { this.m_io = c_io; this.m_socket = c_socket; } Account.prototype.VerifyEmailAddress = function (email) { var re = /^([\w-]+(?:\.[\w-]+)*)@((?:[\w-]+\.)*\w[\w-]{0,66})\.([a-z]{2,6}(?:\.[a-z]{2})?)$/i; return re.test(email); } Account.prototype.SanitizeUserDocument = function (userDocument) { var sanitizedUserDocument = JSON.parse(JSON.stringify(userDocument)); sanitizedUserDocument.salt = undefined; sanitizedUserDocument.password = undefined; return sanitizedUserDocument; } Account.prototype.CreateUser = function (email, username, password) { var a_accountObj = this; var a_socket = this.m_socket; if (email == undefined || password == undefined) { a_socket.emit('registrationResult', { success: false, msg: 'Need more data...' }); return; } if (!this.VerifyEmailAddress(email)) { a_socket.emit('registrationResult', { success: false, msg: 'Invalid email address...' }); return; } User.findOne({ 'email': email }, function (err, existingUser) { if (existingUser !== null) { a_socket.emit('registrationResult', { success: false, msg: 'User already exists' }); return; } var userSalt = crypto.randomBytes(128).toString('base64'); var salty_password = require('../../ss.json').ss + password + userSalt; var key = crypto.pbkdf2Sync(salty_password, userSalt, 10000, 512); var derivedKey = key.toString('hex'); var user = new User({ email: email, username: username, salt: userSalt, password: derivedKey }); // save the user and check for errors user.save(function (err) { if (err) { a_socket.emit('registrationResult', { success: false, msg: 'Unknown Error', err: err }); return; } a_socket.emit('registrationResult', { success: true, userid: user._id, email: email }); return; }); }); } Account.prototype.Login = function (email, password) { var a_accountObj = this; var a_socket = this.m_socket; if (email === undefined || password === undefined) { a_socket.emit('loginResult', { success: false, msg: 'Need more data...' }); return; } if (!this.VerifyEmailAddress(email)) { a_socket.emit('loginResult', { success: false, msg: 'Need a valid email...' }); return; } User.findOne({ 'email': email }).exec(function (err, existingUser) { if (existingUser === null) { a_socket.emit('loginResult', { success: false, msg: 'I don\'t think I know you, try registering?' }); return; } var salty_password = require('../../ss.json').ss + password + existingUser.salt; var key = crypto.pbkdf2Sync(salty_password, existingUser.salt, 10000, 512); var derivedKey = key.toString('hex'); if (existingUser.password !== derivedKey) { a_socket.emit('loginResult', { success: false, msg: 'Access Denied...' }); return; } // Find Old Session var a_clientIp = a_socket.client.conn.remoteAddress; Session.findOne({ 'ip': a_clientIp }, function (err, existingSession) { if (existingSession === null) { // Create New Session existingSession = new Session({ ip: a_clientIp, token: crypto.randomBytes(128).toString('base64'), _user: existingUser._id }); existingSession.save(function (err) { if (err) { a_socket.emit('loginResult', { success: false, msg: 'Could not save your session...' }); return; } existingUser._sessions.push(existingSession._id); existingUser.save(); a_accountObj.ProcessLogin(existingSession.token, existingUser); }); } else { a_accountObj.ProcessLogin(existingSession.token, existingUser); } }); }); }; Account.prototype.ProcessLogin = function (token, user) { var a_cleanUser = this.SanitizeUserDocument(user); this.m_socket._user = a_cleanUser; this.m_socket._isAuthed = true; this.m_socket.emit('write-string-memory', { key: 'session', value: token }); this.m_socket.emit('set-user', a_cleanUser); this.m_socket.emit('loginResult', { success: true }); this.m_socket.emit('set-state', 'lobby'); }; Account.prototype.ConsumeToken = function (token, callback) { var a_accountObj = this; process.nextTick(function () { Session.findOne({ 'token': token }).populate('_user').exec(function (err, existingSession) { if (callback === undefined) { return; } if (err !== null) { callback(err); return; } if (existingSession === null) { callback('session not found'); return; } var newToken = crypto.randomBytes(128).toString('base64'); existingSession.token = newToken; existingSession.save(function (err) { if (err) { callback(err); return; } callback(null, { 'token': newToken, 'user': a_accountObj.SanitizeUserDocument(existingSession._user) }); }); }); }); } module.exports = Account;
'use strict'; /* jshint esnext: true */ import React, { Component } from 'react'; import PropTypes from 'prop-types'; import ReactNative, { ListView, StyleSheet, View, NativeModules, } from 'react-native'; import merge from 'merge'; import SectionHeader from './SectionHeader'; import SectionList from './SectionList'; import CellWrapper from './CellWrapper'; const { UIManager } = NativeModules; export default class SelectableSectionsListView extends Component { constructor(props, context) { super(props, context); this.state = { dataSource: new ListView.DataSource({ rowHasChanged: (row1, row2) => row1 !== row2, sectionHeaderHasChanged: (prev, next) => prev !== next }), offsetY: 0 }; this.renderFooter = this.renderFooter.bind(this); this.renderHeader = this.renderHeader.bind(this); this.renderRow = this.renderRow.bind(this); this.renderSectionHeader = this.renderSectionHeader.bind(this); this.onScroll = this.onScroll.bind(this); this.onScrollAnimationEnd = this.onScrollAnimationEnd.bind(this); this.scrollToSection = this.scrollToSection.bind(this); // used for dynamic scrolling // always the first cell of a section keyed by section id this.cellTagMap = {}; this.sectionTagMap = {}; this.updateTagInCellMap = this.updateTagInCellMap.bind(this); this.updateTagInSectionMap = this.updateTagInSectionMap.bind(this); } componentWillMount() { this.calculateTotalHeight(); } componentDidMount() { // push measuring into the next tick setTimeout(() => { UIManager.measure(ReactNative.findNodeHandle(this.refs.view), (x,y,w,h) => { this.containerHeight = h; if (this.props.contentInset && this.props.data && this.props.data.length > 0) { this.scrollToSection(Object.keys(this.props.data)[0]); } }); }, 0); } componentWillReceiveProps(nextProps) { if (nextProps.data && nextProps.data !== this.props.data) { this.calculateTotalHeight(nextProps.data); } } calculateTotalHeight(data) { data = data || this.props.data; if (Array.isArray(data)) { return; } this.sectionItemCount = {}; this.totalHeight = Object.keys(data) .reduce((carry, key) => { var itemCount = data[key].length; carry += itemCount * this.props.cellHeight; carry += this.props.sectionHeaderHeight; this.sectionItemCount[key] = itemCount; return carry; }, 0); } updateTagInSectionMap(tag, section) { this.sectionTagMap[section] = tag; } updateTagInCellMap(tag, section) { this.cellTagMap[section] = tag; } scrollToSection(section) { let y = 0; let headerHeight = this.props.headerHeight || 0; y += headerHeight; if(this.props.contentInset) { y -= this.props.contentInset.top - headerHeight } if (!this.props.useDynamicHeights) { const cellHeight = this.props.cellHeight; let sectionHeaderHeight = this.props.sectionHeaderHeight; let keys = Object.keys(this.props.data); if (typeof(this.props.compareFunction) === "function") { keys = keys.sort(this.props.compareFunction); } const index = keys.indexOf(section); let numcells = 0; for (var i = 0; i < index; i++) { numcells += this.props.data[keys[i]].length; } sectionHeaderHeight = index * sectionHeaderHeight; y += numcells * cellHeight + sectionHeaderHeight; // 解决BUG:列表高度总和少于容器总高度的时,ui布局错乱 let isTotalHeightMoreThanContainer = this.totalHeight - this.containerHeight < 0 ? 0 : this.totalHeight - this.containerHeight ; const maxY = isTotalHeightMoreThanContainer + headerHeight; y = y > maxY ? maxY : y; this.refs.listview.scrollTo({ x:0, y, animated: true }); } else { UIManager.measureLayout(this.cellTagMap[section], ReactNative.findNodeHandle(this.refs.listview), () => {}, (x, y, w, h) => { y = y - this.props.sectionHeaderHeight; this.refs.listview.scrollTo({ x:0, y, animated: true }); }); } this.props.onScrollToSection && this.props.onScrollToSection(section); } renderSectionHeader(sectionData, sectionId) { const updateTag = this.props.useDynamicHeights ? this.updateTagInSectionMap : null; const title = this.props.getSectionTitle ? this.props.getSectionTitle(sectionId) : sectionId; return ( <SectionHeader component={this.props.sectionHeader} title={title} sectionId={sectionId} sectionData={sectionData} updateTag={updateTag} /> ); } renderFooter() { const Footer = this.props.footer; return <Footer />; } renderHeader() { const Header = this.props.header; return <Header />; } renderRow(item, sectionId, index) { const CellComponent = this.props.cell; index = parseInt(index, 10); const isFirst = index === 0; const isLast = this.sectionItemCount && this.sectionItemCount[sectionId]-1 === index; const props = { isFirst, isLast, sectionId, index, item, offsetY: this.state.offsetY, onSelect: this.props.onCellSelect }; return index === 0 && this.props.useDynamicHeights ? <CellWrapper updateTag={this.updateTagInCellMap} component={CellComponent} {...props} {...this.props.cellProps} /> : <CellComponent {...props} {...this.props.cellProps} />; } onScroll(e) { const offsetY = e.nativeEvent.contentOffset.y; if (this.props.updateScrollState) { this.setState({ offsetY }); } this.props.onScroll && this.props.onScroll(e); } onScrollAnimationEnd(e) { if (this.props.updateScrollState) { this.setState({ offsetY: e.nativeEvent.contentOffset.y }); } } render() { const { data } = this.props; const dataIsArray = Array.isArray(data); let sectionList; let renderSectionHeader; let dataSource; let sections = Object.keys(data); if (typeof(this.props.compareFunction) === "function") { sections = sections.sort(this.props.compareFunction); } if (dataIsArray) { dataSource = this.state.dataSource.cloneWithRows(data); } else { sectionList = !this.props.hideSectionList ? <SectionList style={this.props.sectionListStyle} onSectionSelect={this.scrollToSection} sections={sections} data={data} getSectionListTitle={this.props.getSectionListTitle} component={this.props.sectionListItem} fontStyle={this.props.sectionListFontStyle} /> : null; renderSectionHeader = this.renderSectionHeader; dataSource = this.state.dataSource.cloneWithRowsAndSections(data, sections); } const renderFooter = this.props.footer ? this.renderFooter : this.props.renderFooter; const renderHeader = this.props.header ? this.renderHeader : this.props.renderHeader; const props = merge({}, this.props, { onScroll: this.onScroll, onScrollAnimationEnd: this.onScrollAnimationEnd, dataSource, renderFooter, renderHeader, renderRow: this.renderRow, renderSectionHeader }); props.style = void 0; return ( <View ref="view" style={[styles.container, this.props.style]}> <ListView ref="listview" {...props} /> {sectionList} </View> ); } } const styles = StyleSheet.create({ container: { flex: 1 } }); const stylesheetProp = PropTypes.oneOfType([ PropTypes.number, PropTypes.object, ]); SelectableSectionsListView.propTypes = { /** * The data to render in the listview */ data: PropTypes.oneOfType([ PropTypes.array, PropTypes.object, ]).isRequired, /** * Whether to show the section listing or not */ hideSectionList: PropTypes.bool, /** * Functions to provide a title for the section header and the section list * items. If not provided, the section ids will be used (the keys from the data object) */ getSectionTitle: PropTypes.func, getSectionListTitle: PropTypes.func, /** * Function to sort sections. If not provided, the sections order will match data source */ compareFunction: PropTypes.func, /** * Callback which should be called when a cell has been selected */ onCellSelect: PropTypes.func, /** * Callback which should be called when the user scrolls to a section */ onScrollToSection: PropTypes.func, /** * The cell element to render for each row */ cell: PropTypes.func.isRequired, /** * A custom element to render for each section list item */ sectionListItem: PropTypes.func, /** * A custom element to render for each section header */ sectionHeader: PropTypes.func, /** * A custom element to render as footer */ footer: PropTypes.func, /** * A custom element to render as header */ header: PropTypes.func, /** * The height of the header element to render. Is required if a * header element is used, so the positions can be calculated correctly */ headerHeight: PropTypes.number, /** * A custom function to render as footer */ renderHeader: PropTypes.func, /** * A custom function to render as header */ renderFooter: PropTypes.func, /** * An object containing additional props, which will be passed * to each cell component */ cellProps: PropTypes.object, /** * The height of the section header component */ sectionHeaderHeight: PropTypes.number.isRequired, /** * The height of the cell component */ cellHeight: PropTypes.number.isRequired, /** * Whether to determine the y postion to scroll to by calculating header and * cell heights or by using the UIManager to measure the position of the * destination element. This is an exterimental feature */ useDynamicHeights: PropTypes.bool, /** * Whether to set the current y offset as state and pass it to each * cell during re-rendering */ updateScrollState: PropTypes.bool, /** * Styles to pass to the container */ style: stylesheetProp, /** * Styles to pass to the section list container */ sectionListStyle: stylesheetProp, /** * Selector styles */ sectionListFontStyle: stylesheetProp, };
# Natural Language Toolkit: Tokenizer Interface # # Copyright (C) 2001-2008 University of Pennsylvania # Author: Edward Loper <edloper@gradient.cis.upenn.edu> # URL: <http://nltk.sf.net> # For license information, see LICENSE.TXT """ Tokenizer Interface """ from nltk.internals import overridden class TokenizerI(object): """ A procesing interface for I{tokenizing} a string, or dividing it into a list of substrings. Subclasses must define: - either L{tokenize()} or L{batch_tokenize()} (or both) """ def tokenize(self, s): """ Divide the given string into a list of substrings. @return: C{list} of C{str} """ if overridden(self.batch_tokenize): return self.batch_tokenize([s])[0] else: raise NotImplementedError() def batch_tokenize(self, strings): """ Apply L{self.tokenize()} to each element of C{strings}. I.e.: >>> return [self.tokenize(s) for s in strings] @rtype: C{list} of C{list} of C{str} """ return [self.tokenize(s) for s in strings]
module.exports = { purge: ['./components/**/*.js', './pages/**/*.js'], plugins: [require('@tailwindcss/forms')], theme: { extend: { height: (theme) => ({ 'screen-1/2': '50vh', 'screen-1/5': '20vh', 'screen-2/5': '40vh', 'screen-3/5': '60vh', 'screen-4/5': '80vh', }), padding: { 'fluid-video': '56.25%' }, }, }, };
import csv import tmdbsimple as tmdb from ast import literal_eval BASE_URL = 'https://image.tmdb.org/t/p/w300' def readCsvFile(filename): # initializing the titles and rows list rows = [] # reading csv file with open(filename, 'r') as csvfile: # creating a csv reader object csvreader = csv.DictReader(csvfile) # extracting each data row one by one for row in csvreader: rows.append(row) return rows def writeToFile(filename, fields, rows): with open(filename, 'w') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=fields) writer.writeheader() for row in rows: writer.writerow(row) def columnsToObject(rows, fields): for row in rows: for field in fields: row[field] = literal_eval(row[field]) def genGenreRelations(rows): res = [] fields = ['movie_id', 'movie_title', 'genre_id', 'genre_name'] for row in rows: movie_title = row['title'] movie_id = row['id'] for genre in row['genres']: entry = { 'movie_id': movie_id, 'movie_title': movie_title, 'genre_id': genre['id'], 'genre_name': genre['name'] } res.append(entry) return fields, res def genKeywordRelations(rows): res = [] fields = ['movie_id', 'movie_title', 'keyword_id', 'keyword_name'] for row in rows: movie_title = row['title'] movie_id = row['id'] for i, keyword in enumerate(row['keywords']): if i > 4: break entry = { 'movie_id': movie_id, 'movie_title': movie_title, 'keyword_id': keyword['id'], 'keyword_name': keyword['name'] } res.append(entry) return fields, res def genDirectorRelations(rows): res = [] fields = ['movie_id', 'movie_title', 'director_id', 'director_name'] for row in rows: movie_title = row['title'] movie_id = row['movie_id'] for crew in row['crew']: if crew['job'] == 'Director': entry = { 'movie_id': movie_id, 'movie_title': movie_title, 'director_id': crew['id'], 'director_name': crew['name'] } res.append(entry) return fields, res def genActorRelations(rows): res = [] fields = ['movie_id', 'movie_title', 'actor_id', 'actor_name'] for row in rows: movie_title = row['title'] movie_id = row['movie_id'] # Add up to 5 actors per movie for i, cast in enumerate(row['cast']): if i > 4: break entry = { 'movie_id': movie_id, 'movie_title': movie_title, 'actor_id': cast['id'], 'actor_name': cast['name'] } res.append(entry) return fields, res def getMoviePosters(rows): res = [] fields = ['movie_id', 'movie_title', 'poster_url'] for i, row in enumerate(rows): movie_title = row['title'] movie_id = row['id'] # Add up to 5 actors per movie movie = tmdb.Movies(movie_id) try: response = movie.info() except: continue url = response['poster_path'] if url is None: continue poster_url = BASE_URL + url entry = { 'movie_id': movie_id, 'movie_title': movie_title, 'poster_url': poster_url } res.append(entry) return fields, res def main(): tmdb.API_KEY = 'f191d6e48bdc99b7c4efe47235236f3a' rows_d = readCsvFile("dataset/tmdb_5000_movies.csv") rows_c = readCsvFile("dataset/tmdb_5000_credits.csv") columnsToObject(rows_d, ['genres', 'keywords']) columnsToObject(rows_c, ['cast', 'crew']) posters_f, posters_r = getMoviePosters(rows_d) writeToFile('gen/posters.csv', posters_f, posters_r) genres_f, genres_r = genGenreRelations(rows_d) writeToFile('gen/genres.csv', genres_f, genres_r) keyword_f, keyword_r = genKeywordRelations(rows_d) writeToFile('gen/keyword.csv', keyword_f, keyword_r) director_f, director_r = genDirectorRelations(rows_c) writeToFile('gen/director.csv', director_f, director_r) actor_f, actor_r = genActorRelations(rows_c) writeToFile('gen/actor.csv', actor_f, actor_r) main()
from utils import utils import numpy as np import pandas as pd from sklearn import metrics #matplotlib.use('Agg') import matplotlib.pyplot as plt from keras.models import load_model from config import settings as cnst from .predict_args import DefaultPredictArguments, Predict as pObj from plots.plots import display_probability_chart from analyzers.collect_exe_files import get_partition_data, partition_pkl_files_by_count, partition_pkl_files_by_size import gc import os from trend import activation_trend_identification as ati import logging import math import datetime, time def predict_byte(model, partition, xfiles, args): """ Function to perform prediction process on entire byte sequence of PE samples in Tier-1 Params: model: Trained Tier-1 model to be used for prediction xfiles: list of sample names to be used for prediction partition: t1 partition containing the data for sample list in xfiles args: contains various config parameters Returns: pred: object with predicted probabilities """ xlen = len(xfiles) pred_steps = xlen//args.batch_size if xlen % args.batch_size == 0 else xlen//args.batch_size + 1 # tst = time.time() pred = model.predict( utils.data_generator(partition, xfiles, np.ones(xfiles.shape), args.max_len, args.batch_size, shuffle=False), steps=pred_steps, verbose=args.verbose ) # tet = time.time() # print("[][][][][][][][][][][][][][][][][][][][][][][1] SAMPLE-WISE Tier-1 Prediction Time :", int((tet - tst)*1000)/1000, "ms") return pred def predict_byte_by_section_by_samples(model, xfiles, q_sections, args): """ Function to perform prediction process on PE section level byte sequence of PE samples in Tier-2 Params: model: Trained Tier-2 model to be used for prediction xfiles: list of sample names to be used for prediction spartition: t2 partition containing the section data for sample list in xfiles q_sections: list of qualified sections to be used - remaining PE sections' data will be set to 0 section_map: not used here args: contains various config parameters Returns: pred: object with predicted probabilities """ xlen = len(xfiles) pred_steps = xlen//args.batch_size if xlen % args.batch_size == 0 else xlen//args.batch_size + 1 #tst = time.time() pred = model.predict( utils.data_generator_by_section_by_samples(q_sections, xfiles, np.ones(xfiles.shape), args.max_len, args.batch_size, shuffle=False), steps=pred_steps, verbose=args.verbose ) #tet = time.time() #print("[][][][][][][][][][][][][][][][][][][][][][][2] SAMPLE-WISE Tier-2 Prediction Time :", int((tet - tst)*1000)/187, "ms") return pred def predict_byte_by_section(model, spartition, xfiles, q_sections, section_map, args): """ Function to perform prediction process on PE section level byte sequence of PE samples in Tier-2 Params: model: Trained Tier-2 model to be used for prediction xfiles: list of sample names to be used for prediction spartition: t2 partition containing the section data for sample list in xfiles q_sections: list of qualified sections to be used - remaining PE sections' data will be set to 0 section_map: not used here args: contains various config parameters Returns: pred: object with predicted probabilities """ xlen = len(xfiles) pred_steps = xlen//args.batch_size if xlen % args.batch_size == 0 else xlen//args.batch_size + 1 tst = time.time() pred = model.predict( utils.data_generator_by_section(spartition, q_sections, section_map, xfiles, np.ones(xfiles.shape), args.max_len, args.batch_size, shuffle=False), steps=pred_steps, verbose=args.verbose ) tet = time.time() print("[][][][][][][][][][][][][][][][][][][][][][][3] SAMPLE-WISE Tier-2 Prediction Time :", int((tet - tst) * 1000) / 187, "ms") return pred def predict_nn(args): """Obsolete: For this block implementation""" try: p = list() p.append(np.array(args.t2_nn_x_predict)) if cnst.USE_SECTION_ID_EMB_FOR_NN: p.append(np.array(args.sec_id_emb_predict)) if cnst.USE_ACT_MAG_FOR_NN: p.append(np.array(args.act_mag_predict)) pred = args.t2_nn_model.predict(p, verbose=1) except Exception as e: logging.exception("Error during NN prediction") return pred def predict_by_features(model, fn_list, label, batch_size, verbose, features_to_drop=None): """Obsolete: For this block implementation""" pred = model.predict_generator( utils.data_generator_by_features(fn_list, np.ones(fn_list.shape), batch_size, False, features_to_drop), steps=len(fn_list), verbose=verbose ) return pred def predict_by_fusion(model, fn_list, label, batch_size, verbose): """Obsolete: For this block implementation""" byte_sequence_max_len = model.input[0].shape[1] pred = model.predict_generator( utils.data_generator_by_fusion(fn_list, np.ones(fn_list.shape), byte_sequence_max_len, batch_size, shuffle=False), steps=len(fn_list), verbose=verbose ) return pred def calculate_prediction_metrics(predict_obj): """ Function to calculate the prediction metrics like confusion matrix, auc, restricted auc """ predict_obj.ypred = (predict_obj.yprob >= (predict_obj.thd / 100)).astype(int) cm = metrics.confusion_matrix(predict_obj.ytrue, predict_obj.ypred, labels=[cnst.BENIGN, cnst.MALWARE]) tn = cm[0][0] fp = cm[0][1] fn = cm[1][0] tp = cm[1][1] predict_obj.tpr = (tp / (tp + fn)) * 100 predict_obj.fpr = (fp / (fp + tn)) * 100 # print("Before AUC score computation:", predict_obj.thd, predict_obj.tpr, predict_obj.fpr) try: predict_obj.auc = metrics.roc_auc_score(predict_obj.ytrue, predict_obj.ypred) predict_obj.rauc = metrics.roc_auc_score(predict_obj.ytrue, predict_obj.ypred, max_fpr=cnst.OVERALL_TARGET_FPR/100) except Exception as e: logging.exception("Error during prediction metrics calculation.") return predict_obj def select_decision_threshold(predict_obj): """ Function to calibrate and find a decision threshold for classification that satisfies supplied Target FPR Returns: predict_obj: prediction object updated with selected decision threshold value ,y_pred, TPR and FPR """ predict_obj.target_fpr = (np.floor((predict_obj.target_fpr / 100) * len(predict_obj.yprob[predict_obj.ytrue == 0])) * 100) / len(predict_obj.yprob[predict_obj.ytrue == 0]) calibrated_threshold = (np.percentile(predict_obj.yprob[predict_obj.ytrue == 0], q=[100 - predict_obj.target_fpr]) * 100)[0] logging.debug("Initial Calibrated Threshold: %s %s", calibrated_threshold, predict_obj.target_fpr) pow = str(calibrated_threshold)[::-1].find('.') calibrated_threshold = math.ceil(calibrated_threshold * 10 ** (pow - 1)) / (10 ** (pow - 1)) logging.debug("Ceiled Threshold: %s", calibrated_threshold) selected_threshold = calibrated_threshold if calibrated_threshold < 100.0 else 100.0 temp_ypred = (predict_obj.yprob >= (selected_threshold / 100)).astype(int) cm = metrics.confusion_matrix(predict_obj.ytrue, temp_ypred, labels=[cnst.BENIGN, cnst.MALWARE]) tn = cm[0][0] fp = cm[0][1] fn = cm[1][0] tp = cm[1][1] TPR = (tp / (tp + fn)) * 100 FPR = (fp / (fp + tn)) * 100 logging.info("Selected Threshold: " + str(selected_threshold) + " TPR: {:6.3f}\tFPR: {:6.3f}".format(TPR, FPR)) predict_obj.thd = selected_threshold predict_obj.ypred = temp_ypred predict_obj.tpr = TPR predict_obj.fpr = FPR return predict_obj def get_bfn_mfp(pObj): """ Function to perfom filtering of FNs and FPs in Tier-1 results (in pObj) to form the B1 and M1 sets. It also performs identifying a possible boosting bound during training process, and promotes the samples with yprob < boosting bound to B2 set directly - as they are treated as obvious benign files. Returns: pObj: predict object updated with B1 and M1 set information """ prediction = (pObj.yprob >= (pObj.thd / 100)).astype(int) if cnst.PERFORM_B2_BOOSTING: if pObj.boosting_upper_bound is None: fn_indices = np.all([pObj.ytrue.ravel() == cnst.MALWARE, prediction.ravel() == cnst.BENIGN], axis=0) pObj.boosting_upper_bound = np.min(pObj.yprob[fn_indices]) if np.sum(fn_indices) > 0 else 0 logging.info("Setting B2 boosting threshold: %s", pObj.boosting_upper_bound) # To filter the predicted Benign FN files from prediction results brow_indices = np.where(np.all([prediction == cnst.BENIGN, pObj.yprob >= pObj.boosting_upper_bound], axis=0))[0] pObj.xB1 = pObj.xtrue[brow_indices] pObj.yB1 = pObj.ytrue[brow_indices] pObj.yprobB1 = pObj.yprob[brow_indices] pObj.ypredB1 = prediction[brow_indices] # To filter the benign files that can be boosted directly to B2 set boosted_indices = np.where(np.all([prediction == cnst.BENIGN, pObj.yprob < pObj.boosting_upper_bound], axis=0))[0] fn_escaped_by_boosting = np.where(np.all([prediction.ravel() == cnst.BENIGN, pObj.yprob.ravel() < pObj.boosting_upper_bound, pObj.ytrue.ravel() == cnst.MALWARE], axis=0))[0] pObj.boosted_xB2 = pObj.xtrue[boosted_indices] pObj.boosted_yB2 = pObj.ytrue[boosted_indices] pObj.boosted_yprobB2 = pObj.yprob[boosted_indices] pObj.boosted_ypredB2 = prediction[boosted_indices] logging.info("Number of files boosted to B2=" + str(len(boosted_indices)) + " \t[ " + str(len(np.where(prediction == cnst.BENIGN)[0])) + " - " + str(len(brow_indices)) + " ] Boosting Bound used: " + str(pObj.boosting_upper_bound) + " Escaped FNs:" + str(len(fn_escaped_by_boosting))) else: logging.info("NO BOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOSTING") # To filter the predicted Benign FN files from prediction results brow_indices = np.where(prediction == cnst.BENIGN)[0] pObj.xB1 = pObj.xtrue[brow_indices] pObj.yB1 = pObj.ytrue[brow_indices] pObj.yprobB1 = pObj.yprob[brow_indices] pObj.ypredB1 = prediction[brow_indices] # print("\nPREDICT MODULE Total B1 [{0}]\tGroundTruth [{1}:{2}]".format(len(brow_indices), # len(np.where(pObj.yB1 == cnst.BENIGN)[0]), len(np.where(pObj.yB1 == cnst.MALWARE)[0]))) mrow_indices = np.where(prediction == cnst.MALWARE)[0] pObj.xM1 = pObj.xtrue[mrow_indices] pObj.yM1 = pObj.ytrue[mrow_indices] pObj.yprobM1 = pObj.yprob[mrow_indices] pObj.ypredM1 = prediction[mrow_indices] return pObj def predict_tier1(model_idx, pobj, fold_index): """ Function to initiate the Tier-1 prediction process Params: model_idx: Default 0. Do not change pobj: initialized with parameters for prediction fold_index: current fold index of cross validation Returns: pobj: object containing the predict results """ predict_args = DefaultPredictArguments() tier1_model = load_model(predict_args.model_path + cnst.TIER1_MODELS[model_idx] + "_" + str(fold_index) + ".h5") # model.summary() if cnst.EXECUTION_TYPE[model_idx] == cnst.BYTE: pobj.yprob = predict_byte(tier1_model, pobj.partition, pobj.xtrue, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FEATURISTIC: pobj.yprob = predict_by_features(tier1_model, pobj.xtrue, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FUSION: pobj.yprob = predict_by_fusion(tier1_model, pobj.xtrue, predict_args) del tier1_model gc.collect() return pobj def predict_tier2_block(model_idx, pobj, fold_index): """ Function to initiate the Tier-2's block-based prediction process Params: model_idx: Default 0. Do not change pobj: initialized with parameters for prediction fold_index: current fold index of cross validation Returns: pobj: object containing the predict results """ predict_args = DefaultPredictArguments() predict_args.max_len = cnst.TIER2_NEW_INPUT_SHAPE tier2_model = load_model(predict_args.model_path + cnst.TIER2_MODELS[model_idx] + "_" + str(fold_index) + ".h5") if cnst.EXECUTION_TYPE[model_idx] == cnst.BYTE: pobj.yprob = predict_byte(tier2_model, pobj.partition, pobj.xtrue, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FEATURISTIC: pobj.yprob = predict_by_features(tier2_model, pobj.xtrue, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FUSION: pobj.yprob = predict_by_fusion(tier2_model, pobj.xtrue, predict_args) del tier2_model gc.collect() return pobj def select_thd_get_metrics_bfn_mfp(tier, pobj): """ Funtion used to select decision threshold during training process or to obtain prediction metrics (TPR, FPR) during Testing phase. It also perfoms filtering FNs and FPs in Tier-1 results to form the B1 and M1 sets. Params: tier: TIER1 or TIER2 pobj: object with prediction outcomes Returns: pobj: object updated with calculated prediction metrics """ if pobj.thd is None: pobj = select_decision_threshold(pobj) # +++ returned pobj also includes ypred based on selected threshold else: pobj = calculate_prediction_metrics(pobj) if tier == cnst.TIER1: pobj = get_bfn_mfp(pobj) return pobj def select_thd_get_metrics(pobj): """ Funtion used to select decision threshold during training process or to obtain prediction metrics (TPR, FPR) during Testing phase. Params: tier: TIER1 or TIER2 pobj: object with prediction outcomes Returns: pobj: object updated with calculated prediction metrics """ if pobj.thd is None: pobj = select_decision_threshold(pobj) # +++ returned pobj also includes ypred based on selected threshold else: pobj = calculate_prediction_metrics(pobj) return pobj def predict_tier2_by_samples(model_idx, pobj, fold_index): """ Function to initiate the Tier-2 prediction process Params: model_idx: Default 0. Do not change pobj: initialized with parameters for prediction fold_index: current fold index of cross validation Returns: pobj: object containing the predict results """ predict_args = DefaultPredictArguments() tier2_model = load_model(predict_args.model_path + cnst.TIER2_MODELS[model_idx] + "_" + str(fold_index) + ".h5") '''if cnst.EXECUTION_TYPE[model_idx] == cnst.BYTE: # pbs.trigger_predict_by_section() pobj.yprob = predict_byte_by_section(tier2_model, pobj.spartition, pobj.xtrue, pobj.q_sections, pobj.predict_section_map, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FEATURISTIC: pobj.yprob = predict_by_features(tier2_model, pobj.xtrue, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FUSION: pobj.yprob = predict_by_fusion(tier2_model, pobj.xtrue, predict_args)''' if cnst.EXECUTION_TYPE[model_idx] == cnst.BYTE: # pbs.trigger_predict_by_section() pobj.yprob = predict_byte_by_section_by_samples(tier2_model, pobj.xtrue, pobj.q_sections, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FEATURISTIC: pobj.yprob = predict_by_features(tier2_model, pobj.xtrue, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FUSION: pobj.yprob = predict_by_fusion(tier2_model, pobj.xtrue, predict_args) del tier2_model gc.collect() return pobj def predict_tier2(model_idx, pobj, fold_index): """ Function to initiate the Tier-2 prediction process Params: model_idx: Default 0. Do not change pobj: initialized with parameters for prediction fold_index: current fold index of cross validation Returns: pobj: object containing the predict results """ predict_args = DefaultPredictArguments() tier2_model = load_model(predict_args.model_path + cnst.TIER2_MODELS[model_idx] + "_" + str(fold_index) + ".h5") '''if cnst.EXECUTION_TYPE[model_idx] == cnst.BYTE: # pbs.trigger_predict_by_section() pobj.yprob = predict_byte_by_section(tier2_model, pobj.spartition, pobj.xtrue, pobj.q_sections, pobj.predict_section_map, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FEATURISTIC: pobj.yprob = predict_by_features(tier2_model, pobj.xtrue, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FUSION: pobj.yprob = predict_by_fusion(tier2_model, pobj.xtrue, predict_args)''' if cnst.EXECUTION_TYPE[model_idx] == cnst.BYTE: # pbs.trigger_predict_by_section() pobj.yprob = predict_byte_by_section(tier2_model, pobj.spartition, pobj.xtrue, pobj.q_sections, pobj.predict_section_map, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FEATURISTIC: pobj.yprob = predict_by_features(tier2_model, pobj.xtrue, predict_args) elif cnst.EXECUTION_TYPE[model_idx] == cnst.FUSION: pobj.yprob = predict_by_fusion(tier2_model, pobj.xtrue, predict_args) del tier2_model gc.collect() return pobj def get_reconciled_tpr_fpr(yt1, yp1, yt2, yp2): """ Function to obtain TPR and FPR of reconciled Tier-1 & Tier-2's y_true, y_pred """ cm1 = metrics.confusion_matrix(yt1, yp1, labels=[cnst.BENIGN, cnst.MALWARE]) tn1 = cm1[0][0] fp1 = cm1[0][1] fn1 = cm1[1][0] tp1 = cm1[1][1] cm2 = metrics.confusion_matrix(yt2, yp2, labels=[cnst.BENIGN,cnst.MALWARE]) tn2 = cm2[0][0] fp2 = cm2[0][1] fn2 = cm2[1][0] tp2 = cm2[1][1] tpr = ((tp1+tp2) / (tp1+tp2+fn1+fn2)) * 100 fpr = ((fp1+fp2) / (fp1+fp2+tn1+tn2)) * 100 return tpr, fpr def get_tpr_fpr(yt, yp): """ Function to obtain TPR and FPR based on y_true and y_pred """ cm = metrics.confusion_matrix(yt, yp, labels=[cnst.BENIGN, cnst.MALWARE]) tn = cm[0][0] fp = cm[0][1] fn = cm[1][0] tp = cm[1][1] tpr = (tp / (tp+fn)) * 100 fpr = (fp / (fp+tn)) * 100 return tpr, fpr def reconcile(pt1, pt2, cv_obj, fold_index): """ Funtion to reconcile the prediction results of Tier-1 and Tier-2 for the current fold of cross validation """ logging.info("\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ RECONCILING DATA ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") # RECONCILE - xM1, yprobM1, xB1, pred_proba2 logging.info("BEFORE RECONCILIATION: [Total True Malwares:" + str(np.sum(pt1.ytrue)) + "] M1 has => " + str(np.shape(pt1.xM1)[0]) + "\tTPs:" + str(np.sum(np.all([pt1.ytrue.ravel() == cnst.MALWARE, pt1.ypred.ravel() == cnst.MALWARE], axis=0))) + "\tFPs:" + str(np.sum(np.all([pt1.ytrue.ravel() == cnst.BENIGN, pt1.ypred.ravel() == cnst.MALWARE], axis=0)))) logging.info(" [Total True Benign :" + str(len(np.where(pt1.ytrue.ravel() == cnst.BENIGN)[0])) + "] B1 has => " + str(len(np.where(pt1.ypred == cnst.BENIGN)[0])) + "\tTNs:" + str(np.sum(np.all([pt1.ytrue.ravel() == cnst.BENIGN, pt1.ypred.ravel() == cnst.BENIGN], axis=0))) + "\tFNs:" + str(np.sum(np.all([pt1.ytrue.ravel() == cnst.MALWARE, pt1.ypred.ravel() == cnst.BENIGN], axis=0)))) if cnst.PERFORM_B2_BOOSTING: logging.info(" Boosted: " + str(np.shape(pt1.boosted_xB2)[0]) + "\tRemaining B1:" + str(np.shape(pt2.xtrue)[0])) xtruereconciled = np.concatenate((pt1.xM1, pt1.boosted_xB2, pt2.xtrue)) # pt2.xtrue contains xB1 ytruereconciled = np.concatenate((pt1.yM1, pt1.boosted_yB2, pt2.ytrue)) ypredreconciled = np.concatenate((pt1.ypredM1, pt1.boosted_ypredB2, pt2.ypred)) yprobreconciled = np.concatenate((pt1.yprobM1, pt1.boosted_yprobB2, pt2.yprob)) # yprobreconciled = np.concatenate((scaled_pt1_yprobM1, scaled_pt1_boosted_yprobB2, scaled_pt2_yprob)) else: xtruereconciled = np.concatenate((pt1.xM1, pt2.xtrue)) # pt2.xtrue contains xB1 ytruereconciled = np.concatenate((pt1.yM1, pt2.ytrue)) ypredreconciled = np.concatenate((pt1.ypredM1, pt2.ypred)) yprobreconciled = np.concatenate((pt1.yprobM1, pt2.yprob)) # yprobreconciled = np.concatenate((scaled_pt1_yprobM1, scaled_pt2_yprob)) logging.info("AFTER RECONCILIATION : [M1+B1] => [ M1+(M2+B2)" + ("+B2_Boosted" if cnst.PERFORM_B2_BOOSTING else "") + " ] = " + str(np.shape(xtruereconciled)[0]) + " New TPs found: [M2] =>" + str(np.sum(np.all([pt2.ytrue.ravel() == cnst.MALWARE, pt2.ypred.ravel() == cnst.MALWARE], axis=0))) + "\tNew FPs: =>" + str(np.sum(np.all([pt2.ytrue.ravel() == cnst.BENIGN, pt2.ypred.ravel() == cnst.MALWARE], axis=0))) + "\n[RECON TPs] =>" + str(np.sum(np.all([ytruereconciled.ravel() == cnst.MALWARE, ypredreconciled.ravel() == cnst.MALWARE], axis=0))) + "\tFPs:" + str(np.sum(np.all([ytruereconciled.ravel() == cnst.BENIGN, ypredreconciled.ravel() == cnst.MALWARE], axis=0))) + "\n[RECON TNs] =>" + str(np.sum(np.all([ytruereconciled.ravel() == cnst.BENIGN, ypredreconciled.ravel() == cnst.BENIGN], axis=0))) + "\tFNs:" + str(np.sum(np.all([ytruereconciled.ravel() == cnst.MALWARE, ypredreconciled.ravel() == cnst.BENIGN], axis=0)))) try: pd.DataFrame().to_csv(cnst.PROJECT_BASE_PATH+cnst.ESC+'out'+cnst.ESC+'result'+cnst.ESC + "overall_pred_results_" + str(fold_index) + ".csv", header=None, index=None) reconciled_df = pd.concat([pd.DataFrame(xtruereconciled), pd.DataFrame(ytruereconciled), pd.DataFrame(yprobreconciled), pd.DataFrame(ypredreconciled)], axis=1) reconciled_df.to_csv(cnst.PROJECT_BASE_PATH+cnst.ESC+'out'+cnst.ESC+'result'+cnst.ESC + "overall_pred_results_" + str(fold_index) + ".csv", header=None, index=None, mode='a') pd.DataFrame().to_csv(cnst.PROJECT_BASE_PATH+cnst.ESC+'out'+cnst.ESC+'result'+cnst.ESC + "tier2_test_" + str(fold_index) + ".csv", header=None, index=None) ftier2_df = pd.concat([pd.DataFrame(pt2.xtrue), pd.DataFrame(pt2.ytrue), pd.DataFrame(pt2.yprob), pd.DataFrame(pt2.ypred)], axis=1) ftier2_df.to_csv(cnst.PROJECT_BASE_PATH+cnst.ESC+'out'+cnst.ESC+'result'+cnst.ESC + "tier2_test_" + str(fold_index) + ".csv", header=None, index=None, mode='a') print("Successfully saved overall prediction results in 'data' directory !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") except Exception as e: print(str(e), "\n\nFailed to save overall prediction results !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") reconciled_tpr = np.array([]) reconciled_fpr = np.array([]) tier1_tpr = np.array([]) tier1_fpr = np.array([]) probability_score = np.arange(0, 100.01, 0.1) # 0.1 for p in probability_score: rtpr, rfpr = None, None if cnst.PERFORM_B2_BOOSTING: ytrue_M1B2_Boosted = np.concatenate((pt1.yM1, pt1.boosted_yB2)) yprob_M1B2_Boosted = np.concatenate((pt1.yprobM1, pt1.boosted_yprobB2)) rtpr, rfpr = get_reconciled_tpr_fpr(ytrue_M1B2_Boosted, yprob_M1B2_Boosted > (p/100), pt2.ytrue, pt2.yprob > (p/100)) else: rtpr, rfpr = get_reconciled_tpr_fpr(pt1.yM1, pt1.yprobM1 > (p/100), pt2.ytrue, pt2.yprob > (p/100)) reconciled_tpr = np.append(reconciled_tpr, rtpr) reconciled_fpr = np.append(reconciled_fpr, rfpr) tpr1, fpr1 = get_tpr_fpr(pt1.ytrue, pt1.yprob > (p/100)) tier1_tpr = np.append(tier1_tpr, tpr1) tier1_fpr = np.append(tier1_fpr, fpr1) cv_obj.t1_tpr_folds[fold_index] = tier1_tpr cv_obj.t1_fpr_folds[fold_index] = tier1_fpr cv_obj.recon_tpr_folds[fold_index] = reconciled_tpr cv_obj.recon_fpr_folds[fold_index] = reconciled_fpr cv_obj.t1_mean_tpr_auc = tier1_tpr if cv_obj.t1_mean_tpr_auc is None else np.sum([cv_obj.t1_mean_tpr_auc, tier1_tpr], axis=0) / 2 cv_obj.t1_mean_fpr_auc = tier1_fpr if cv_obj.t1_mean_fpr_auc is None else np.sum([cv_obj.t1_mean_fpr_auc, tier1_fpr], axis=0) / 2 cv_obj.recon_mean_tpr_auc = reconciled_tpr if cv_obj.recon_mean_tpr_auc is None else np.sum([cv_obj.recon_mean_tpr_auc, reconciled_tpr], axis=0) / 2 cv_obj.recon_mean_fpr_auc = reconciled_fpr if cv_obj.recon_mean_fpr_auc is None else np.sum([cv_obj.recon_mean_fpr_auc, reconciled_fpr], axis=0) / 2 tpr1, fpr1 = get_tpr_fpr(pt1.ytrue, pt1.ypred) rtpr, rfpr = None, None if cnst.PERFORM_B2_BOOSTING: ytrue_M1B2_Boosted = np.concatenate((pt1.yM1, pt1.boosted_yB2)) ypred_M1B2_Boosted = np.concatenate((pt1.ypredM1, pt1.boosted_ypredB2)) rtpr, rfpr = get_reconciled_tpr_fpr(ytrue_M1B2_Boosted, ypred_M1B2_Boosted, pt2.ytrue, pt2.ypred) else: rtpr, rfpr = get_reconciled_tpr_fpr(pt1.yM1, pt1.ypredM1, pt2.ytrue, pt2.ypred) logging.info("FOLD: %s\tTIER1 TPR: %s\tFPR: %s\tOVERALL TPR: %s\tFPR: %s", fold_index+1, tpr1, fpr1, rtpr, rfpr) cv_obj.t1_tpr_list = np.append(cv_obj.t1_tpr_list, tpr1) cv_obj.t1_fpr_list = np.append(cv_obj.t1_fpr_list, fpr1) cv_obj.recon_tpr_list = np.append(cv_obj.recon_tpr_list, rtpr) cv_obj.recon_fpr_list = np.append(cv_obj.recon_fpr_list, rfpr) cv_obj.t1_mean_auc_score = np.append(cv_obj.t1_mean_auc_score, metrics.roc_auc_score(pt1.ytrue, pt1.yprob)) cv_obj.t1_mean_auc_score_restricted = np.append(cv_obj.t1_mean_auc_score_restricted, metrics.roc_auc_score(pt1.ytrue, pt1.yprob, max_fpr=cnst.OVERALL_TARGET_FPR/100)) cv_obj.recon_mean_auc_score = np.append(cv_obj.recon_mean_auc_score, metrics.roc_auc_score(ytruereconciled, yprobreconciled)) cv_obj.recon_mean_auc_score_restricted = np.append(cv_obj.recon_mean_auc_score_restricted, metrics.roc_auc_score(ytruereconciled, yprobreconciled, max_fpr=cnst.OVERALL_TARGET_FPR/100)) logging.info(r'Tier1 Restricted AUC = %0.3f ± %0.2f ' % ( # [Full AUC: %0.3f] metrics.roc_auc_score(pt1.ytrue, pt1.yprob, max_fpr=cnst.OVERALL_TARGET_FPR/100) , np.std(metrics.roc_auc_score(pt1.ytrue, pt1.yprob, max_fpr=cnst.OVERALL_TARGET_FPR/100)) # , metrics.roc_auc_score(pt1.ytrue, pt1.yprob) )) logging.info(r'Recon Restricted AUC = %0.3f ± %0.2f ' % ( # [Full AUC: %0.3f] metrics.roc_auc_score(ytruereconciled, yprobreconciled, max_fpr=cnst.OVERALL_TARGET_FPR/100) , np.std(metrics.roc_auc_score(ytruereconciled, yprobreconciled, max_fpr=cnst.OVERALL_TARGET_FPR/100)) # , metrics.roc_auc_score(ytruereconciled, yprobreconciled) )) return cv_obj, rfpr def init(model_idx, test_partitions, cv_obj, fold_index, scalers=None): """ Module for performing prediction for Testing phase # ################################################################################################################## # OBJECTIVES: # 1) Predict using trained Tier-1 model over Testing data - using selected THD1 found during Tier-1 training process # 2) Obtain B1 set of test samples and collect top activation blocks based dataset using qualified sections found # 3) Predict using trained Tier-2 model over block dataset - using THD2 found during Tier-2 training process # 4) Reconcile results for Tier-1 and Tier-2 and generate overall results # ################################################################################################################## Args: model_idx: Default 0 for byte sequence models. Do not change. test_partitions: list of partition indexes to be used for testing cv_obj: object to hold cross validation results for current fold fold_index: current fold of cross-validation scalers: not used here Returns: cv_obj """ # TIER-1 PREDICTION OVER TEST DATA partition_tracker_df = pd.read_csv(cnst.DATA_SOURCE_PATH + cnst.ESC + "partition_tracker_" + str(fold_index) + ".csv") logging.info("Prediction on Testing Data - TIER1 # Partitions: %s", partition_tracker_df["test"][0]) todf = pd.read_csv(os.path.join(cnst.PROJECT_BASE_PATH + cnst.ESC + "out" + cnst.ESC + "result" + cnst.ESC, "training_outcomes_" + str(fold_index) + ".csv")) thd1 = todf["thd1"][0] thd2 = todf["thd2"][0] boosting_bound = todf["boosting_bound"][0] logging.info("THD1: %s THD2: %s Boosting Bound: %s", thd1, thd2, boosting_bound) if thd2 is None or math.isnan(thd2): logging.critical("Threshold for Tier-2 model is not available. Aborting entire prediction process.") return section_map = None q_sections = pd.read_csv(os.path.join(cnst.PROJECT_BASE_PATH + cnst.ESC + "out" + cnst.ESC + "result" + cnst.ESC, "qualified_sections_" + str(fold_index) + ".csv"), header=None)[0] q_sections = list(q_sections) logging.info("Number of Q_sections received for prediction: %s", len(q_sections)) predict_t1_test_data_all = pObj(cnst.TIER2, None, None, None) predict_t1_test_data_all.thd = thd1 total = 0 if not cnst.SKIP_TIER1_PREDICTION: pd.DataFrame().to_csv(cnst.PROJECT_BASE_PATH+cnst.ESC+'out'+cnst.ESC+'result'+cnst.ESC + "tier1_test_"+str(fold_index)+"_pkl.csv", header=None, index=None) pd.DataFrame().to_csv(cnst.PROJECT_BASE_PATH + cnst.ESC + "data" + cnst.ESC + "b1_test_"+str(fold_index)+"_pkl.csv", header=None, index=None) pd.DataFrame().to_csv(cnst.PROJECT_BASE_PATH + cnst.ESC + "data" + cnst.ESC + "m1_test_"+str(fold_index)+"_pkl.csv", header=None, index=None) pd.DataFrame().to_csv(cnst.PROJECT_BASE_PATH + cnst.ESC + "data" + cnst.ESC + "b2_test_"+str(fold_index)+"_pkl.csv", header=None, index=None) for pcount in test_partitions: logging.info("Predicting partition: %s", pcount) tst_datadf = pd.read_csv(cnst.DATA_SOURCE_PATH + cnst.ESC + "p" + str(pcount) + ".csv", header=None) predict_t1_test_data_partition = pObj(cnst.TIER1, None, tst_datadf.iloc[:, 0].values, tst_datadf.iloc[:, 1].values) predict_t1_test_data_partition.thd = thd1 predict_t1_test_data_partition.boosting_upper_bound = boosting_bound predict_t1_test_data_partition.partition = get_partition_data(None, None, pcount, "t1") tst = time.time() predict_t1_test_data_partition = predict_tier1(model_idx, predict_t1_test_data_partition, fold_index) total = time.time() - tst del predict_t1_test_data_partition.partition # Release Memory gc.collect() # predict_t1_test_data_partition = get_bfn_mfp(predict_t1_test_data_partition) predict_t1_test_data_partition = select_thd_get_metrics_bfn_mfp("TIER1", predict_t1_test_data_partition) predict_t1_test_data_all.xtrue = predict_t1_test_data_partition.xtrue if predict_t1_test_data_all.xtrue is None else np.concatenate([predict_t1_test_data_all.xtrue, predict_t1_test_data_partition.xtrue]) predict_t1_test_data_all.ytrue = predict_t1_test_data_partition.ytrue if predict_t1_test_data_all.ytrue is None else np.concatenate([predict_t1_test_data_all.ytrue, predict_t1_test_data_partition.ytrue]) predict_t1_test_data_all.yprob = predict_t1_test_data_partition.yprob if predict_t1_test_data_all.yprob is None else np.concatenate([predict_t1_test_data_all.yprob, predict_t1_test_data_partition.yprob]) predict_t1_test_data_all.ypred = predict_t1_test_data_partition.ypred if predict_t1_test_data_all.ypred is None else np.concatenate([predict_t1_test_data_all.ypred, predict_t1_test_data_partition.ypred]) predict_t1_test_data_all.xB1 = predict_t1_test_data_partition.xB1 if predict_t1_test_data_all.xB1 is None else np.concatenate([predict_t1_test_data_all.xB1, predict_t1_test_data_partition.xB1]) predict_t1_test_data_all.yB1 = predict_t1_test_data_partition.yB1 if predict_t1_test_data_all.yB1 is None else np.concatenate([predict_t1_test_data_all.yB1, predict_t1_test_data_partition.yB1]) predict_t1_test_data_all.yprobB1 = predict_t1_test_data_partition.yprobB1 if predict_t1_test_data_all.yprobB1 is None else np.concatenate([predict_t1_test_data_all.yprobB1, predict_t1_test_data_partition.yprobB1]) predict_t1_test_data_all.ypredB1 = predict_t1_test_data_partition.ypredB1 if predict_t1_test_data_all.ypredB1 is None else np.concatenate([predict_t1_test_data_all.ypredB1, predict_t1_test_data_partition.ypredB1]) predict_t1_test_data_all.xM1 = predict_t1_test_data_partition.xM1 if predict_t1_test_data_all.xM1 is None else np.concatenate([predict_t1_test_data_all.xM1, predict_t1_test_data_partition.xM1]) predict_t1_test_data_all.yM1 = predict_t1_test_data_partition.yM1 if predict_t1_test_data_all.yM1 is None else np.concatenate([predict_t1_test_data_all.yM1, predict_t1_test_data_partition.yM1]) predict_t1_test_data_all.yprobM1 = predict_t1_test_data_partition.yprobM1 if predict_t1_test_data_all.yprobM1 is None else np.concatenate([predict_t1_test_data_all.yprobM1, predict_t1_test_data_partition.yprobM1]) predict_t1_test_data_all.ypredM1 = predict_t1_test_data_partition.ypredM1 if predict_t1_test_data_all.ypredM1 is None else np.concatenate([predict_t1_test_data_all.ypredM1, predict_t1_test_data_partition.ypredM1]) predict_t1_test_data_all.boosted_xB2 = predict_t1_test_data_partition.boosted_xB2 if predict_t1_test_data_all.boosted_xB2 is None else np.concatenate([predict_t1_test_data_all.boosted_xB2, predict_t1_test_data_partition.boosted_xB2]) predict_t1_test_data_all.boosted_yB2 = predict_t1_test_data_partition.boosted_yB2 if predict_t1_test_data_all.boosted_yB2 is None else np.concatenate([predict_t1_test_data_all.boosted_yB2, predict_t1_test_data_partition.boosted_yB2]) predict_t1_test_data_all.boosted_yprobB2 = predict_t1_test_data_partition.boosted_yprobB2 if predict_t1_test_data_all.boosted_yprobB2 is None else np.concatenate([predict_t1_test_data_all.boosted_yprobB2, predict_t1_test_data_partition.boosted_yprobB2]) predict_t1_test_data_all.boosted_ypredB2 = predict_t1_test_data_partition.boosted_ypredB2 if predict_t1_test_data_all.boosted_ypredB2 is None else np.concatenate([predict_t1_test_data_all.boosted_ypredB2, predict_t1_test_data_partition.boosted_ypredB2]) test_tier1datadf = pd.concat([pd.DataFrame(predict_t1_test_data_partition.xtrue), pd.DataFrame(predict_t1_test_data_partition.ytrue), pd.DataFrame(predict_t1_test_data_partition.yprob), pd.DataFrame(predict_t1_test_data_partition.ypred)], axis=1) test_tier1datadf.to_csv(cnst.PROJECT_BASE_PATH+cnst.ESC+'out'+cnst.ESC+'result'+cnst.ESC + "tier1_test_"+str(fold_index)+"_pkl.csv", header=None, index=None, mode='a') test_b1datadf = pd.concat([pd.DataFrame(predict_t1_test_data_partition.xB1), pd.DataFrame(predict_t1_test_data_partition.yB1), pd.DataFrame(predict_t1_test_data_partition.yprobB1), pd.DataFrame(predict_t1_test_data_partition.ypredB1)], axis=1) test_b1datadf.to_csv(cnst.PROJECT_BASE_PATH + cnst.ESC + "data" + cnst.ESC + "b1_test_"+str(fold_index)+"_pkl.csv", header=None, index=None, mode='a') test_m1datadf = pd.concat([pd.DataFrame(predict_t1_test_data_partition.xM1), pd.DataFrame(predict_t1_test_data_partition.yM1), pd.DataFrame(predict_t1_test_data_partition.yprobM1), pd.DataFrame(predict_t1_test_data_partition.ypredM1)], axis=1) test_m1datadf.to_csv(cnst.PROJECT_BASE_PATH + cnst.ESC + "data" + cnst.ESC + "m1_test_"+str(fold_index)+"_pkl.csv", header=None, index=None, mode='a') test_b2datadf = pd.concat([pd.DataFrame(predict_t1_test_data_partition.boosted_xB2), pd.DataFrame(predict_t1_test_data_partition.boosted_yB2), pd.DataFrame(predict_t1_test_data_partition.boosted_yprobB2), pd.DataFrame(predict_t1_test_data_partition.boosted_ypredB2)], axis=1) test_b2datadf.to_csv(cnst.PROJECT_BASE_PATH + cnst.ESC + "data" + cnst.ESC + "b2_test_"+str(fold_index)+"_pkl.csv", header=None, index=None, mode='a') print("[][][][][][] FOR ALL PARTITIONS [SAMPLE-WISE] Tier-1 Prediction Time :", int(total * 1000) / 1000, "ms") #test_b1_partition_count = partition_pkl_files_by_count("b1_test", fold_index, predict_t1_test_data_all.xB1, predict_t1_test_data_all.yB1) if cnst.PARTITION_BY_COUNT else partition_pkl_files_by_size("b1_test", fold_index, predict_t1_test_data_all.xB1, predict_t1_test_data_all.yB1) #pd.DataFrame([{"b1_pred_partition_count": test_b1_partition_count}]).to_csv(os.path.join(cnst.DATA_SOURCE_PATH, "b1_pred_partitions_" + str(fold_index) + ".csv"), index=False) #test_b1_partition_count = pd.read_csv(os.path.join(cnst.DATA_SOURCE_PATH, "b1_pred_partitions_"+str(fold_index)+".csv"))["b1_pred_partition_count"][0] predict_t1_test_data_all = select_thd_get_metrics(predict_t1_test_data_all) logging.info("Threshold used for Prediction : " + str(predict_t1_test_data_all.thd) + "\t\tTPR: {:6.3f}\tFPR: {:6.3f}\tAUC: {:6.3f}\tRst. AUC: {:6.3f}".format(predict_t1_test_data_all.tpr, predict_t1_test_data_all.fpr, predict_t1_test_data_all.auc, predict_t1_test_data_all.rauc)) else: logging.info("Skipped TIER-1 prediction") test_tier1datadf_all = pd.read_csv(cnst.PROJECT_BASE_PATH+cnst.ESC+'out'+cnst.ESC+'result'+cnst.ESC + "tier1_test_"+str(fold_index)+"_pkl.csv", header=None) test_b1datadf_all = pd.read_csv(cnst.PROJECT_BASE_PATH + cnst.ESC + "data" + cnst.ESC + "b1_test_"+str(fold_index)+"_pkl.csv", header=None) test_m1datadf_all = pd.read_csv(cnst.PROJECT_BASE_PATH + cnst.ESC + "data" + cnst.ESC + "m1_test_"+str(fold_index)+"_pkl.csv", header=None) test_b2datadf_all = pd.read_csv(cnst.PROJECT_BASE_PATH + cnst.ESC + "data" + cnst.ESC + "b2_test_"+str(fold_index)+"_pkl.csv", header=None) predict_t1_test_data_all.xtrue = test_tier1datadf_all.iloc[:, 0] predict_t1_test_data_all.ytrue = test_tier1datadf_all.iloc[:, 1] predict_t1_test_data_all.yprob = test_tier1datadf_all.iloc[:, 2] predict_t1_test_data_all.ypred = test_tier1datadf_all.iloc[:, 3] predict_t1_test_data_all.xB1 = test_b1datadf_all.iloc[:, 0] predict_t1_test_data_all.yB1 = test_b1datadf_all.iloc[:, 1] predict_t1_test_data_all.yprobB1 = test_b1datadf_all.iloc[:, 2] predict_t1_test_data_all.ypredB1 = test_b1datadf_all.iloc[:, 3] predict_t1_test_data_all.xM1 = test_m1datadf_all.iloc[:, 0] predict_t1_test_data_all.yM1 = test_m1datadf_all.iloc[:, 1] predict_t1_test_data_all.yprobM1 = test_m1datadf_all.iloc[:, 2] predict_t1_test_data_all.ypredM1 = test_m1datadf_all.iloc[:, 3] predict_t1_test_data_all.boosted_xB2 = test_b2datadf_all.iloc[:, 0] predict_t1_test_data_all.boosted_yB2 = test_b2datadf_all.iloc[:, 1] predict_t1_test_data_all.boosted_yprobB2 = test_b2datadf_all.iloc[:, 2] predict_t1_test_data_all.boosted_ypredB2 = test_b2datadf_all.iloc[:, 3] logging.info("Loaded old TIER-1 prediction data") predict_t1_test_data_all = select_thd_get_metrics(predict_t1_test_data_all) logging.info("Threshold used for Prediction : " + str(predict_t1_test_data_all.thd) + "\t\tTPR: {:6.3f}\tFPR: {:6.3f}\tAUC: {:6.3f}\tRst. AUC: {:6.3f}".format(predict_t1_test_data_all.tpr, predict_t1_test_data_all.fpr, predict_t1_test_data_all.auc, predict_t1_test_data_all.rauc)) if len(predict_t1_test_data_all.xB1) == 0: logging.info("!!!!! Skipping Tier-2 - B1 set is empty") return None total = 0 if thd2 is not None and q_sections is not None: p_args = DefaultPredictArguments() p_args.q_sections = q_sections p_args.t1_model_name = cnst.TIER1_MODELS[model_idx] + "_" + str(fold_index) + ".h5" p_args.t2_model_name = cnst.TIER2_MODELS[model_idx] + "_" + str(fold_index) + ".h5" #if not cnst.SKIP_TIER1_PREDICTION: # logging.info("Collecting Block Data for fold-%s partitions-%s type-%s", fold_index, test_b1_partition_count, "test") # ati.collect_b1_block_dataset(p_args, fold_index, test_b1_partition_count, "test") logging.info("Retrieving stored B1 Data for Block Prediction with selected THD2") '''p_args.t2_nn_model = load_model(cnst.MODEL_PATH + "nn_t2_" + str(fold_index) + ".h5") nn_test_data = pd.read_csv(cnst.PROJECT_BASE_PATH + cnst.ESC + 'data' + cnst.ESC + 'b1_test_'+str(fold_index)+'_qX_nn_dataset.csv', header=None) all_nn_x_predict, p_args.t2_nn_y_predict = nn_test_data.iloc[:, 0:-2], nn_test_data.iloc[:, -1] # p_args.t2_nn_x_predict = pd.DataFrame(scaler.transform(p_args.t2_nn_x_predict), columns=p_args.t2_nn_x_predict.columns) p_args.t2_nn_x_predict = pd.DataFrame(scalers[0].transform(all_nn_x_predict.iloc[:, 0:128]), columns=all_nn_x_predict.columns[0:128]) if cnst.USE_SECTION_ID_EMB_FOR_NN: if cnst.SCALE_SECTION_ID_EMB_FOR_NN: p_args.sec_id_emb_predict = pd.DataFrame(scalers[1].transform(all_nn_x_predict.iloc[:, 128:256]), columns=all_nn_x_predict.columns[128:256]) else: p_args.sec_id_emb_predict = all_nn_x_predict.iloc[:, 128:256] if cnst.USE_ACT_MAG_FOR_NN: if cnst.SCALE_ACT_MAG_FOR_NN: p_args.act_mag_predict = pd.DataFrame(scalers[2].transform(all_nn_x_predict.iloc[:, 256:384]), columns=all_nn_x_predict.columns[256:384]) else: p_args.act_mag_predict = all_nn_x_predict.iloc[:, 256:384] # TIER-2 PREDICTION print("Prediction on Testing Data - TIER2 [B1 data] # Partitions", test_b1_partition_count) # \t\t\tSection Map Length:", len(section_map)) t2_nn_pred = predict_nn(p_args) predict_t2_test_data_all = pObj(cnst.TIER2, None, nn_test_data.iloc[:, -2], p_args.t2_nn_y_predict) predict_t2_test_data_all.thd = thd2 predict_t2_test_data_all.yprob = t2_nn_pred ''' # TIER-2 PREDICTION logging.info("Prediction on Testing Data - TIER2 [B1 data]") # # Partitions: %s", test_b1_partition_count) # \t\t\tSection Map Length:", len(section_map)) predict_t2_test_data_all = pObj(cnst.TIER2, None, None, None) predict_t2_test_data_all.thd = thd2 predict_t2_test_data_partition = pObj(cnst.TIER2, None, predict_t1_test_data_all.xB1, predict_t1_test_data_all.yB1) #test_b1datadf_all.iloc[:, 0], test_b1datadf_all.iloc[:, 1]) predict_t2_test_data_partition.thd = thd2 predict_t2_test_data_partition.q_sections = q_sections predict_t2_test_data_partition.predict_section_map = section_map # predict_t2_test_data_partition.wpartition = get_partition_data("b1_test", fold_index, pcount, "t1") # predict_t2_test_data_partition.spartition = get_partition_data("b1_test", fold_index, pcount, "t2") tst = time.time() predict_t2_test_data_partition = predict_tier2_by_samples(model_idx, predict_t2_test_data_partition, fold_index) total = time.time() - tst print("[][][][][][] FOR ALL SAMPLES [SAMPLE-WISE] Tier-2 Prediction Time :", int(total * 1000) / 187, "ms") # del predict_t2_test_data_partition.wpartition # Release Memory # del predict_t2_test_data_partition.spartition # Release Memory gc.collect() predict_t2_test_data_all.xtrue = predict_t2_test_data_partition.xtrue if predict_t2_test_data_all.xtrue is None else np.concatenate([predict_t2_test_data_all.xtrue, predict_t2_test_data_partition.xtrue]) predict_t2_test_data_all.ytrue = predict_t2_test_data_partition.ytrue if predict_t2_test_data_all.ytrue is None else np.concatenate([predict_t2_test_data_all.ytrue, predict_t2_test_data_partition.ytrue]) predict_t2_test_data_all.yprob = predict_t2_test_data_partition.yprob if predict_t2_test_data_all.yprob is None else np.concatenate([predict_t2_test_data_all.yprob, predict_t2_test_data_partition.yprob]) predict_t2_test_data_all.ypred = predict_t2_test_data_partition.ypred if predict_t2_test_data_all.ypred is None else np.concatenate([predict_t2_test_data_all.ypred, predict_t2_test_data_partition.ypred]) logging.info("Overall Tier-2 Test data Size updated: %s", predict_t2_test_data_all.ytrue.shape) ''' for pcount in range(0, test_b1_partition_count): b1_tst_datadf = pd.read_csv(cnst.DATA_SOURCE_PATH + cnst.ESC + "b1_test_" + str(fold_index) + "_p" + str(pcount) + ".csv", header=None) predict_t2_test_data_partition = pObj(cnst.TIER2, None, b1_tst_datadf.iloc[:, 0], b1_tst_datadf.iloc[:, 1]) # predict_t1_test_data.xB1, predict_t1_test_data.yB1) predict_t2_test_data_partition.thd = thd2 predict_t2_test_data_partition.q_sections = q_sections predict_t2_test_data_partition.predict_section_map = section_map # predict_t2_test_data_partition.wpartition = get_partition_data("b1_test", fold_index, pcount, "t1") predict_t2_test_data_partition.spartition = get_partition_data("b1_test", fold_index, pcount, "t2") predict_t2_test_data_partition = predict_tier2(model_idx, predict_t2_test_data_partition, fold_index) # del predict_t2_test_data_partition.wpartition # Release Memory del predict_t2_test_data_partition.spartition # Release Memory gc.collect() predict_t2_test_data_all.xtrue = predict_t2_test_data_partition.xtrue if predict_t2_test_data_all.xtrue is None else np.concatenate([predict_t2_test_data_all.xtrue, predict_t2_test_data_partition.xtrue]) predict_t2_test_data_all.ytrue = predict_t2_test_data_partition.ytrue if predict_t2_test_data_all.ytrue is None else np.concatenate([predict_t2_test_data_all.ytrue, predict_t2_test_data_partition.ytrue]) predict_t2_test_data_all.yprob = predict_t2_test_data_partition.yprob if predict_t2_test_data_all.yprob is None else np.concatenate([predict_t2_test_data_all.yprob, predict_t2_test_data_partition.yprob]) predict_t2_test_data_all.ypred = predict_t2_test_data_partition.ypred if predict_t2_test_data_all.ypred is None else np.concatenate([predict_t2_test_data_all.ypred, predict_t2_test_data_partition.ypred]) logging.info("Overall Tier-2 Test data Size updated: %s", predict_t2_test_data_all.ytrue.shape) ''' predict_t2_test_data_all = select_thd_get_metrics_bfn_mfp(cnst.TIER2, predict_t2_test_data_all) display_probability_chart(predict_t2_test_data_all.ytrue, predict_t2_test_data_all.yprob, predict_t2_test_data_all.thd, "TESTING_TIER2_PROB_PLOT_F" + str(fold_index)) logging.info("List of TPs found: %s", predict_t2_test_data_all.xtrue[np.all([predict_t2_test_data_all.ytrue.ravel() == cnst.MALWARE, predict_t2_test_data_all.ypred.ravel() == cnst.MALWARE], axis=0)]) logging.info("List of New FPs : %s", predict_t2_test_data_all.xtrue[np.all([predict_t2_test_data_all.ytrue.ravel() == cnst.BENIGN, predict_t2_test_data_all.ypred.ravel() == cnst.MALWARE], axis=0)]) # RECONCILIATION OF PREDICTION RESULTS FROM TIER - 1&2 predict_t1_test_data_all.yprobB1 = np.array(predict_t1_test_data_all.yprobB1).ravel() predict_t1_test_data_all.yprobM1 = np.array(predict_t1_test_data_all.yprobM1).ravel() predict_t1_test_data_all.boosted_yprobB2 = np.array(predict_t1_test_data_all.boosted_yprobB2).ravel() predict_t1_test_data_all.ypredB1 = np.array(predict_t1_test_data_all.ypredB1).ravel() predict_t1_test_data_all.ypredM1 = np.array(predict_t1_test_data_all.ypredM1).ravel() predict_t1_test_data_all.boosted_ypredB2 = np.array(predict_t1_test_data_all.boosted_ypredB2).ravel() predict_t2_test_data_all.yprob = np.array(predict_t2_test_data_all.yprob).ravel() predict_t2_test_data_all.ypred = np.array(predict_t2_test_data_all.ypred).ravel() still_benign_indices = np.where(np.all([predict_t2_test_data_all.ytrue.ravel() == cnst.BENIGN, predict_t2_test_data_all.ypred.ravel() == cnst.BENIGN], axis=0))[0] predict_t2_test_data_all.yprob[still_benign_indices] = predict_t1_test_data_all.yprobB1[still_benign_indices] # Assign Tier-1 probabilities for samples that are still benign to avoid AUC conflict new_tp_indices = np.where(np.all([predict_t2_test_data_all.ytrue.ravel() == cnst.MALWARE, predict_t2_test_data_all.ypred.ravel() == cnst.MALWARE], axis=0))[0] predict_t2_test_data_all.yprob[new_tp_indices] -= predict_t2_test_data_all.yprob[new_tp_indices] + 1 new_fp_indices = np.where(np.all([predict_t2_test_data_all.ytrue.ravel() == cnst.BENIGN, predict_t2_test_data_all.ypred.ravel() == cnst.MALWARE], axis=0))[0] predict_t2_test_data_all.yprob[new_fp_indices] -= predict_t2_test_data_all.yprob[new_fp_indices] cvobj, benchmark_fpr = reconcile(predict_t1_test_data_all, predict_t2_test_data_all, cv_obj, fold_index) # benchmark_tier1(model_idx, predict_t1_test_data, fold_index, benchmark_fpr) return cvobj else: logging.info("Skipping Tier-2 prediction --- Reconciliation --- Not adding fold entry to CV AUC") return None if __name__ == '__main__': print("PREDICT MAIN") '''print("Prediction on Testing Data") #import Predict as pObj testdata = pd.read_csv(cnst.PROJECT_BASE_PATH + 'small_pkl_1_1.csv', header=None) pObj_testdata = Predict(cnst.TIER1, cnst.TIER1_TARGET_FPR, testdata.iloc[:, 0].values, testdata.iloc[:, 1].values) pObj_testdata.thd = 67.1 pObj_testdata = predict_tier1(0, pObj_testdata) # TIER-1 prediction - on training data print("TPR:", pObj_testdata.tpr, "FPR:", pObj_testdata.fpr)''' ''' def get_prediction_data(result_file): t1 = pd.read_csv(result_file, header=None) y1 = t1[1] p1 = t1[2] pv1 = t1[3] return y1, p1, pv1 '''
# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg config_drive_opts = [ cfg.StrOpt('config_drive_format', default='iso9660', deprecated_for_removal=True, deprecated_since='19.0.0', deprecated_reason=""" This option was originally added as a workaround for bug in libvirt, #1246201, that was resolved in libvirt v1.2.17. As a result, this option is no longer necessary or useful. """, choices=[ ('iso9660', 'A file system image standard that is widely ' 'supported across operating systems.'), ('vfat', 'Provided for legacy reasons and to enable live ' 'migration with the libvirt driver and non-shared storage')], help=""" Configuration drive format Configuration drive format that will contain metadata attached to the instance when it boots. Related options: * This option is meaningful when one of the following alternatives occur: 1. ``force_config_drive`` option set to ``true`` 2. the REST API call to create the instance contains an enable flag for config drive option 3. the image used to create the instance requires a config drive, this is defined by ``img_config_drive`` property for that image. * A compute node running Hyper-V hypervisor can be configured to attach configuration drive as a CD drive. To attach the configuration drive as a CD drive, set the ``[hyperv] config_drive_cdrom`` option to true. """), cfg.BoolOpt('force_config_drive', default=False, help=""" Force injection to take place on a config drive When this option is set to true configuration drive functionality will be forced enabled by default, otherwise user can still enable configuration drives via the REST API or image metadata properties. Launched VMs are not affected by this option. Possible values: * True: Force to use of configuration drive regardless the user's input in the REST API call. * False: Do not force use of configuration drive. Config drives can still be enabled via the REST API or image metadata properties. Related options: * Use the 'mkisofs_cmd' flag to set the path where you install the genisoimage program. If genisoimage is in same path as the nova-compute service, you do not need to set this flag. * To use configuration drive with Hyper-V, you must set the 'mkisofs_cmd' value to the full path to an mkisofs.exe installation. Additionally, you must set the qemu_img_cmd value in the hyperv configuration section to the full path to an qemu-img command installation. """), cfg.StrOpt('mkisofs_cmd', default='genisoimage', help=""" Name or path of the tool used for ISO image creation Use the mkisofs_cmd flag to set the path where you install the genisoimage program. If genisoimage is on the system path, you do not need to change the default value. To use configuration drive with Hyper-V, you must set the mkisofs_cmd value to the full path to an mkisofs.exe installation. Additionally, you must set the qemu_img_cmd value in the hyperv configuration section to the full path to an qemu-img command installation. Possible values: * Name of the ISO image creator program, in case it is in the same directory as the nova-compute service * Path to ISO image creator program Related options: * This option is meaningful when config drives are enabled. * To use configuration drive with Hyper-V, you must set the qemu_img_cmd value in the hyperv configuration section to the full path to an qemu-img command installation. """), ] def register_opts(conf): conf.register_opts(config_drive_opts) def list_opts(): return {"DEFAULT": config_drive_opts}
#!/usr/local/bin/python3 # coding: utf-8 # YYeTsBot - __init__.py # 9/21/21 18:09 # __author__ = "Benny <benny.think@gmail.com>" import requests import logging API = "https://yyets.dmesg.app/api/resource?" logging.basicConfig( level=logging.INFO, format='[%(asctime)s %(filename)s:%(lineno)d %(levelname).1s] %(message)s', datefmt="%Y-%m-%d %H:%M:%S" ) class Resource: def __init__(self): self.enname = None self.cnname = None def __str__(self): return f"{self.cnname} - {self.enname}" class YYeTs: def __init__(self, keyword: "str"): self.result = [] self.keyword = keyword self.search_api = f"{API}keyword={self.keyword}" self.resource_api = f"{API}id=%s" self.search() def search(self): data = requests.get(self.search_api).json() for item in data["data"]: r = Resource() info = item["data"]["info"] setattr(r, "list", self.fetch(info)) for k, v in info.items(): setattr(r, k, v) self.result.append(r) def fetch(self, info): rid = info["id"] url = self.resource_api % rid headers = {"Referer": url} logging.info("Fetching %s...%s", info["cnname"], url) return requests.get(url, headers=headers).json()["data"]["list"] def __str__(self): return f"{self.keyword} - {self.search_api}" if __name__ == '__main__': ins = YYeTs("逃避") for i in ins.result: print(i)
#!/usr/bin/env python # -*- coding: utf-8 -*- import numpy as np chars = [u" ", u"▁", u"▂", u"▃", u"▄", u"▅", u"▆", u"▇", u"█"] def plot_hinton(arr, max_arr=None): if max_arr == None: max_arr = arr arr = np.array(arr) max_val = max(abs(np.max(max_arr)), abs(np.min(max_arr))) print (np.array2string(arr, formatter={'float_kind': lambda x: visual_hinton(x, max_val)}, max_line_width=5000 )) def visual_hinton(val, max_val): if abs(val) == max_val: step = len(chars) - 1 else: step = int(abs(float(val) / max_val) * len(chars)) colourstart = "" colourend = "" if val < 0: colourstart, colourend = '\033[90m', '\033[0m' #bh.internal = colourstart + chars[step] + colourend return colourstart + chars[step] + colourend
import getpass import re from django.core.management.base import BaseCommand from django.contrib.auth import get_user_model class Command(BaseCommand): help = 'Used to create admin.' requires_migrations_checks = True def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.UserModel = get_user_model() self.users_query = self.UserModel.objects.all() def handle(self, *args, **options): full_name = self.get_full_name() email = self.get_email() username = self.get_username() password = self.get_password() self.UserModel.objects.create_admin( full_name, username, email, password) self.stdout.write("Superuser created successfully.") def get_input_data(self, field, message): """ Prompt admin to input details """ raw_value = input(message) return raw_value def validate_password(self, password): if not re.match(r'^(?=.*[A-Za-z])(?=.*[0-9])(?=.*[^A-Za-z0-9]).*', password): # noqa E501 return False return True def validate_email(self, email): valid = re.match( "(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)", email.strip()) # noqa E501 if valid is None: return False return True def validate_string(self, string): valid = re.match("(^[a-zA-Z]+)", string) if valid is None: return False return True def get_full_name(self): full_name_valid = False while not full_name_valid: full_name = self.get_input_data('full_name', 'Full Name : ') if not self.validate_string(full_name): self.stderr.write( "Error: Full name can only contain character only") continue full_name_valid = True return full_name def get_username(self): username_exist = False while not username_exist: username = self.get_input_data('username', 'Username : ') if self.users_query.filter(username=username): self.stderr.write("Error: Username already taken") continue username_exist = True return username def get_email(self): email_valid = False while not email_valid: email = self.get_input_data('email', 'Email : ') if not self.validate_email(email): self.stderr.write("Error: Invalid email") continue if self.users_query.filter(email=email): self.stderr.write("Error: Email already taken") continue email_valid = True return email def get_password(self): pass_match_valid = False while not pass_match_valid: password = getpass.getpass() confirm_password = getpass.getpass('Password (again): ') if password != confirm_password: self.stderr.write("Error: Your passwords didn't match.") continue if not self.validate_password(password): self.stderr.write( "Error: Password must contain a number, capital letter and special charachter") # noqa E501 continue pass_match_valid = True return password
from ._anvil_designer import html_displayTemplate from anvil import * import anvil.facebook.auth import anvil.google.auth, anvil.google.drive from anvil.google.drive import app_files import anvil.microsoft.auth import anvil.users import anvil.server import anvil.tables as tables import anvil.tables.query as q from anvil.tables import app_tables class html_display(html_displayTemplate): def __init__(self, **properties): # Set Form properties and Data Bindings. self.init_components(**properties) # Any code you write here will run when the form opens.
# -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Run Hadoop Mapreduce jobs using Hadoop Streaming. To run a job, you need to subclass :py:class:`luigi.contrib.hadoop.JobTask` and implement a ``mapper`` and ``reducer`` methods. See :doc:`/example_top_artists` for an example of how to run a Hadoop job. """ from __future__ import print_function import abc import binascii import datetime import glob import logging import os import pickle import random import re import shutil import signal try: from StringIO import StringIO except ImportError: from io import StringIO import subprocess import sys import tempfile import warnings from hashlib import md5 from itertools import groupby from cached_property import cached_property from luigi import six from luigi import configuration import luigi import luigi.contrib.hdfs import luigi.s3 from luigi import mrrunner if six.PY2: from itertools import imap as map try: # See benchmark at https://gist.github.com/mvj3/02dca2bcc8b0ef1bbfb5 import ujson as json except: import json logger = logging.getLogger('luigi-interface') _attached_packages = [] def attach(*packages): """ Attach a python package to hadoop map reduce tarballs to make those packages available on the hadoop cluster. """ _attached_packages.extend(packages) def dereference(f): if os.path.islink(f): # by joining with the dirname we are certain to get the absolute path return dereference(os.path.join(os.path.dirname(f), os.readlink(f))) else: return f def get_extra_files(extra_files): result = [] for f in extra_files: if isinstance(f, str): src, dst = f, os.path.basename(f) elif isinstance(f, tuple): src, dst = f else: raise Exception() if os.path.isdir(src): src_prefix = os.path.join(src, '') for base, dirs, files in os.walk(src): for f in files: f_src = os.path.join(base, f) f_src_stripped = f_src[len(src_prefix):] f_dst = os.path.join(dst, f_src_stripped) result.append((f_src, f_dst)) else: result.append((src, dst)) return result def create_packages_archive(packages, filename): """ Create a tar archive which will contain the files for the packages listed in packages. """ import tarfile tar = tarfile.open(filename, "w") def add(src, dst): logger.debug('adding to tar: %s -> %s', src, dst) tar.add(src, dst) def add_files_for_package(sub_package_path, root_package_path, root_package_name): for root, dirs, files in os.walk(sub_package_path): if '.svn' in dirs: dirs.remove('.svn') for f in files: if not f.endswith(".pyc") and not f.startswith("."): add(dereference(root + "/" + f), root.replace(root_package_path, root_package_name) + "/" + f) for package in packages: # Put a submodule's entire package in the archive. This is the # magic that usually packages everything you need without # having to attach packages/modules explicitly if not getattr(package, "__path__", None) and '.' in package.__name__: package = __import__(package.__name__.rpartition('.')[0], None, None, 'non_empty') n = package.__name__.replace(".", "/") if getattr(package, "__path__", None): # TODO: (BUG) picking only the first path does not # properly deal with namespaced packages in different # directories p = package.__path__[0] if p.endswith('.egg') and os.path.isfile(p): raise 'egg files not supported!!!' # Add the entire egg file # p = p[:p.find('.egg') + 4] # add(dereference(p), os.path.basename(p)) else: # include __init__ files from parent projects root = [] for parent in package.__name__.split('.')[0:-1]: root.append(parent) module_name = '.'.join(root) directory = '/'.join(root) add(dereference(__import__(module_name, None, None, 'non_empty').__path__[0] + "/__init__.py"), directory + "/__init__.py") add_files_for_package(p, p, n) # include egg-info directories that are parallel: for egg_info_path in glob.glob(p + '*.egg-info'): logger.debug( 'Adding package metadata to archive for "%s" found at "%s"', package.__name__, egg_info_path ) add_files_for_package(egg_info_path, p, n) else: f = package.__file__ if f.endswith("pyc"): f = f[:-3] + "py" if n.find(".") == -1: add(dereference(f), os.path.basename(f)) else: add(dereference(f), n + ".py") tar.close() def flatten(sequence): """ A simple generator which flattens a sequence. Only one level is flattened. .. code-block:: python (1, (2, 3), 4) -> (1, 2, 3, 4) """ for item in sequence: if hasattr(item, "__iter__") and not isinstance(item, str) and not isinstance(item, bytes): for i in item: yield i else: yield item class HadoopRunContext(object): def __init__(self): self.job_id = None def __enter__(self): self.__old_signal = signal.getsignal(signal.SIGTERM) signal.signal(signal.SIGTERM, self.kill_job) return self def kill_job(self, captured_signal=None, stack_frame=None): if self.job_id: logger.info('Job interrupted, killing job %s', self.job_id) subprocess.call(['mapred', 'job', '-kill', self.job_id]) if captured_signal is not None: # adding 128 gives the exit code corresponding to a signal sys.exit(128 + captured_signal) def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is KeyboardInterrupt: self.kill_job() signal.signal(signal.SIGTERM, self.__old_signal) class HadoopJobError(RuntimeError): def __init__(self, message, out=None, err=None): super(HadoopJobError, self).__init__(message, out, err) self.message = message self.out = out self.err = err def run_and_track_hadoop_job(arglist, tracking_url_callback=None, env=None): """ Runs the job by invoking the command from the given arglist. Finds tracking urls from the output and attempts to fetch errors using those urls if the job fails. Throws HadoopJobError with information about the error (including stdout and stderr from the process) on failure and returns normally otherwise. :param arglist: :param tracking_url_callback: :param env: :return: """ logger.info('%s', ' '.join(arglist)) def write_luigi_history(arglist, history): """ Writes history to a file in the job's output directory in JSON format. Currently just for tracking the job ID in a configuration where no history is stored in the output directory by Hadoop. """ history_filename = configuration.get_config().get('core', 'history-filename', '') if history_filename and '-output' in arglist: output_dir = arglist[arglist.index('-output') + 1] f = luigi.contrib.hdfs.HdfsTarget(os.path.join(output_dir, history_filename)).open('w') f.write(json.dumps(history)) f.close() def track_process(arglist, tracking_url_callback, env=None): # Dump stdout to a temp file, poll stderr and log it temp_stdout = tempfile.TemporaryFile('w+t') proc = subprocess.Popen(arglist, stdout=temp_stdout, stderr=subprocess.PIPE, env=env, close_fds=True, universal_newlines=True) # We parse the output to try to find the tracking URL. # This URL is useful for fetching the logs of the job. tracking_url = None job_id = None err_lines = [] with HadoopRunContext() as hadoop_context: while proc.poll() is None: err_line = proc.stderr.readline() err_lines.append(err_line) err_line = err_line.strip() if err_line: logger.info('%s', err_line) err_line = err_line.lower() if err_line.find('tracking url') != -1: tracking_url = err_line.split('tracking url: ')[-1] try: tracking_url_callback(tracking_url) except Exception as e: logger.error("Error in tracking_url_callback, disabling! %s", e) tracking_url_callback = lambda x: None if err_line.find('running job') != -1: # hadoop jar output job_id = err_line.split('running job: ')[-1] if err_line.find('submitted hadoop job:') != -1: # scalding output job_id = err_line.split('submitted hadoop job: ')[-1] hadoop_context.job_id = job_id # Read the rest + stdout err = ''.join(err_lines + [err_line for err_line in proc.stderr]) temp_stdout.seek(0) out = ''.join(temp_stdout.readlines()) if proc.returncode == 0: write_luigi_history(arglist, {'job_id': job_id}) return (out, err) # Try to fetch error logs if possible message = 'Streaming job failed with exit code %d. ' % proc.returncode if not tracking_url: raise HadoopJobError(message + 'Also, no tracking url found.', out, err) try: task_failures = fetch_task_failures(tracking_url) except Exception as e: raise HadoopJobError(message + 'Additionally, an error occurred when fetching data from %s: %s' % (tracking_url, e), out, err) if not task_failures: raise HadoopJobError(message + 'Also, could not fetch output from tasks.', out, err) else: raise HadoopJobError(message + 'Output from tasks below:\n%s' % task_failures, out, err) if tracking_url_callback is None: tracking_url_callback = lambda x: None return track_process(arglist, tracking_url_callback, env) def fetch_task_failures(tracking_url): """ Uses mechanize to fetch the actual task logs from the task tracker. This is highly opportunistic, and we might not succeed. So we set a low timeout and hope it works. If it does not, it's not the end of the world. TODO: Yarn has a REST API that we should probably use instead: http://hadoop.apache.org/docs/current/hadoop-yarn/hadoop-yarn-site/WebServicesIntro.html """ import mechanize timeout = 3.0 failures_url = tracking_url.replace('jobdetails.jsp', 'jobfailures.jsp') + '&cause=failed' logger.debug('Fetching data from %s', failures_url) b = mechanize.Browser() b.open(failures_url, timeout=timeout) links = list(b.links(text_regex='Last 4KB')) # For some reason text_regex='All' doesn't work... no idea why links = random.sample(links, min(10, len(links))) # Fetch a random subset of all failed tasks, so not to be biased towards the early fails error_text = [] for link in links: task_url = link.url.replace('&start=-4097', '&start=-100000') # Increase the offset logger.debug('Fetching data from %s', task_url) b2 = mechanize.Browser() try: r = b2.open(task_url, timeout=timeout) data = r.read() except Exception as e: logger.debug('Error fetching data from %s: %s', task_url, e) continue # Try to get the hex-encoded traceback back from the output for exc in re.findall(r'luigi-exc-hex=[0-9a-f]+', data): error_text.append('---------- %s:' % task_url) error_text.append(exc.split('=')[-1].decode('hex')) return '\n'.join(error_text) class JobRunner(object): run_job = NotImplemented class HadoopJobRunner(JobRunner): """ Takes care of uploading & executing a Hadoop job using Hadoop streaming. TODO: add code to support Elastic Mapreduce (using boto) and local execution. """ def __init__(self, streaming_jar, modules=None, streaming_args=None, libjars=None, libjars_in_hdfs=None, jobconfs=None, input_format=None, output_format=None, end_job_with_atomic_move_dir=True): def get(x, default): return x is not None and x or default self.streaming_jar = streaming_jar self.modules = get(modules, []) self.streaming_args = get(streaming_args, []) self.libjars = get(libjars, []) self.libjars_in_hdfs = get(libjars_in_hdfs, []) self.jobconfs = get(jobconfs, {}) self.input_format = input_format self.output_format = output_format self.end_job_with_atomic_move_dir = end_job_with_atomic_move_dir self.tmp_dir = False def run_job(self, job): packages = [luigi] + self.modules + job.extra_modules() + list(_attached_packages) # find the module containing the job packages.append(__import__(job.__module__, None, None, 'dummy')) # find the path to out runner.py runner_path = mrrunner.__file__ # assume source is next to compiled if runner_path.endswith("pyc"): runner_path = runner_path[:-3] + "py" base_tmp_dir = configuration.get_config().get('core', 'tmp-dir', None) if base_tmp_dir: warnings.warn("The core.tmp-dir configuration item is" " deprecated, please use the TMPDIR" " environment variable if you wish" " to control where luigi.contrib.hadoop may" " create temporary files and directories.") self.tmp_dir = os.path.join(base_tmp_dir, 'hadoop_job_%016x' % random.getrandbits(64)) os.makedirs(self.tmp_dir) else: self.tmp_dir = tempfile.mkdtemp() logger.debug("Tmp dir: %s", self.tmp_dir) # build arguments config = configuration.get_config() python_executable = config.get('hadoop', 'python-executable', 'python') map_cmd = '{0} mrrunner.py map'.format(python_executable) cmb_cmd = '{0} mrrunner.py combiner'.format(python_executable) red_cmd = '{0} mrrunner.py reduce'.format(python_executable) output_final = job.output().path # atomic output: replace output with a temporary work directory if self.end_job_with_atomic_move_dir: if isinstance(job.output(), luigi.s3.S3FlagTarget): raise TypeError("end_job_with_atomic_move_dir is not supported" " for S3FlagTarget") output_hadoop = '{output}-temp-{time}'.format( output=output_final, time=datetime.datetime.now().isoformat().replace(':', '-')) else: output_hadoop = output_final arglist = luigi.contrib.hdfs.load_hadoop_cmd() + ['jar', self.streaming_jar] # 'libjars' is a generic option, so place it first libjars = [libjar for libjar in self.libjars] for libjar in self.libjars_in_hdfs: run_cmd = luigi.contrib.hdfs.load_hadoop_cmd() + ['fs', '-get', libjar, self.tmp_dir] logger.debug(' '.join(run_cmd)) subprocess.call(run_cmd) libjars.append(os.path.join(self.tmp_dir, os.path.basename(libjar))) if libjars: arglist += ['-libjars', ','.join(libjars)] # Add static files and directories extra_files = get_extra_files(job.extra_files()) files = [] for src, dst in extra_files: dst_tmp = '%s_%09d' % (dst.replace('/', '_'), random.randint(0, 999999999)) files += ['%s#%s' % (src, dst_tmp)] # -files doesn't support subdirectories, so we need to create the dst_tmp -> dst manually job.add_link(dst_tmp, dst) if files: arglist += ['-files', ','.join(files)] jobconfs = job.jobconfs() for k, v in six.iteritems(self.jobconfs): jobconfs.append('%s=%s' % (k, v)) for conf in jobconfs: arglist += ['-D', conf] arglist += self.streaming_args arglist += ['-mapper', map_cmd] if job.combiner != NotImplemented: arglist += ['-combiner', cmb_cmd] if job.reducer != NotImplemented: arglist += ['-reducer', red_cmd] files = [runner_path, self.tmp_dir + '/packages.tar', self.tmp_dir + '/job-instance.pickle'] for f in files: arglist += ['-file', f] if self.output_format: arglist += ['-outputformat', self.output_format] if self.input_format: arglist += ['-inputformat', self.input_format] for target in luigi.task.flatten(job.input_hadoop()): if not isinstance(target, luigi.contrib.hdfs.HdfsTarget) \ and not isinstance(target, luigi.s3.S3Target): raise TypeError('target must be an HdfsTarget or S3Target') arglist += ['-input', target.path] if not isinstance(job.output(), luigi.contrib.hdfs.HdfsTarget) \ and not isinstance(job.output(), luigi.s3.S3FlagTarget): raise TypeError('output must be an HdfsTarget or S3FlagTarget') arglist += ['-output', output_hadoop] # submit job create_packages_archive(packages, self.tmp_dir + '/packages.tar') job.dump(self.tmp_dir) run_and_track_hadoop_job(arglist) if self.end_job_with_atomic_move_dir: luigi.contrib.hdfs.HdfsTarget(output_hadoop).move_dir(output_final) self.finish() def finish(self): # FIXME: check for isdir? if self.tmp_dir and os.path.exists(self.tmp_dir): logger.debug('Removing directory %s', self.tmp_dir) shutil.rmtree(self.tmp_dir) def __del__(self): self.finish() class DefaultHadoopJobRunner(HadoopJobRunner): """ The default job runner just reads from config and sets stuff. """ def __init__(self): config = configuration.get_config() streaming_jar = config.get('hadoop', 'streaming-jar') super(DefaultHadoopJobRunner, self).__init__(streaming_jar=streaming_jar) # TODO: add more configurable options class LocalJobRunner(JobRunner): """ Will run the job locally. This is useful for debugging and also unit testing. Tries to mimic Hadoop Streaming. TODO: integrate with JobTask """ def __init__(self, samplelines=None): self.samplelines = samplelines def sample(self, input_stream, n, output): for i, line in enumerate(input_stream): if n is not None and i >= n: break output.write(line) def group(self, input_stream): output = StringIO() lines = [] for i, line in enumerate(input_stream): parts = line.rstrip('\n').split('\t') blob = md5(str(i).encode('ascii')).hexdigest() # pseudo-random blob to make sure the input isn't sorted lines.append((parts[:-1], blob, line)) for _, _, line in sorted(lines): output.write(line) output.seek(0) return output def run_job(self, job): map_input = StringIO() for i in luigi.task.flatten(job.input_hadoop()): self.sample(i.open('r'), self.samplelines, map_input) map_input.seek(0) if job.reducer == NotImplemented: # Map only job; no combiner, no reducer map_output = job.output().open('w') job.run_mapper(map_input, map_output) map_output.close() return job.init_mapper() # run job now... map_output = StringIO() job.run_mapper(map_input, map_output) map_output.seek(0) if job.combiner == NotImplemented: reduce_input = self.group(map_output) else: combine_input = self.group(map_output) combine_output = StringIO() job.run_combiner(combine_input, combine_output) combine_output.seek(0) reduce_input = self.group(combine_output) job.init_reducer() reduce_output = job.output().open('w') job.run_reducer(reduce_input, reduce_output) reduce_output.close() class BaseHadoopJobTask(luigi.Task): pool = luigi.Parameter(is_global=True, default=None, significant=False) # This value can be set to change the default batching increment. Default is 1 for backwards compatibility. batch_counter_default = 1 final_mapper = NotImplemented final_combiner = NotImplemented final_reducer = NotImplemented mr_priority = NotImplemented _counter_dict = {} task_id = None @abc.abstractmethod def job_runner(self): pass def jobconfs(self): jcs = [] jcs.append('mapred.job.name="%s"' % self.task_id) if self.mr_priority != NotImplemented: jcs.append('mapred.job.priority=%s' % self.mr_priority()) pool = self.pool if pool is not None: # Supporting two schedulers: fair (default) and capacity using the same option scheduler_type = configuration.get_config().get('hadoop', 'scheduler', 'fair') if scheduler_type == 'fair': jcs.append('mapred.fairscheduler.pool=%s' % pool) elif scheduler_type == 'capacity': jcs.append('mapred.job.queue.name=%s' % pool) return jcs def init_local(self): """ Implement any work to setup any internal datastructure etc here. You can add extra input using the requires_local/input_local methods. Anything you set on the object will be pickled and available on the Hadoop nodes. """ pass def init_hadoop(self): pass def run(self): self.init_local() self.job_runner().run_job(self) def requires_local(self): """ Default impl - override this method if you need any local input to be accessible in init(). """ return [] def requires_hadoop(self): return self.requires() # default impl def input_local(self): return luigi.task.getpaths(self.requires_local()) def input_hadoop(self): return luigi.task.getpaths(self.requires_hadoop()) def deps(self): # Overrides the default implementation return luigi.task.flatten(self.requires_hadoop()) + luigi.task.flatten(self.requires_local()) def on_failure(self, exception): if isinstance(exception, HadoopJobError): return """Hadoop job failed with message: {message} stdout: {stdout} stderr: {stderr} """.format(message=exception.message, stdout=exception.out, stderr=exception.err) else: return super(BaseHadoopJobTask, self).on_failure(exception) DataInterchange = { "python": {"serialize": str, "internal_serialize": repr, "deserialize": eval}, "json": {"serialize": json.dumps, "internal_serialize": json.dumps, "deserialize": json.loads} } class JobTask(BaseHadoopJobTask): n_reduce_tasks = 25 reducer = NotImplemented # available formats are "python" and "json". data_interchange_format = "python" def jobconfs(self): jcs = super(JobTask, self).jobconfs() if self.reducer == NotImplemented: jcs.append('mapred.reduce.tasks=0') else: jcs.append('mapred.reduce.tasks=%s' % self.n_reduce_tasks) return jcs @cached_property def serialize(self): return DataInterchange[self.data_interchange_format]['serialize'] @cached_property def internal_serialize(self): return DataInterchange[self.data_interchange_format]['internal_serialize'] @cached_property def deserialize(self): return DataInterchange[self.data_interchange_format]['deserialize'] def init_mapper(self): pass def init_combiner(self): pass def init_reducer(self): pass def _setup_remote(self): self._setup_links() def job_runner(self): # We recommend that you define a subclass, override this method and set up your own config """ Get the MapReduce runner for this job. If all outputs are HdfsTargets, the DefaultHadoopJobRunner will be used. Otherwise, the LocalJobRunner which streams all data through the local machine will be used (great for testing). """ outputs = luigi.task.flatten(self.output()) for output in outputs: if not isinstance(output, luigi.contrib.hdfs.HdfsTarget): warnings.warn("Job is using one or more non-HdfsTarget outputs" + " so it will be run in local mode") return LocalJobRunner() else: return DefaultHadoopJobRunner() def reader(self, input_stream): """ Reader is a method which iterates over input lines and outputs records. The default implementation yields one argument containing the line for each line in the input.""" for line in input_stream: yield line, def writer(self, outputs, stdout, stderr=sys.stderr): """ Writer format is a method which iterates over the output records from the reducer and formats them for output. The default implementation outputs tab separated items. """ for output in outputs: try: output = flatten(output) if self.data_interchange_format == "json": # Only dump one json string, and skip another one, maybe key or value. output = filter(lambda x: x, output) else: # JSON is already serialized, so we put `self.serialize` in a else statement. output = map(self.serialize, output) print("\t".join(output), file=stdout) except: print(output, file=stderr) raise def mapper(self, item): """ Re-define to process an input item (usually a line of input data). Defaults to identity mapper that sends all lines to the same reducer. """ yield None, item combiner = NotImplemented def incr_counter(self, *args, **kwargs): """ Increments a Hadoop counter. Since counters can be a bit slow to update, this batches the updates. """ threshold = kwargs.get("threshold", self.batch_counter_default) if len(args) == 2: # backwards compatibility with existing hadoop jobs group_name, count = args key = (group_name,) else: group, name, count = args key = (group, name) ct = self._counter_dict.get(key, 0) ct += count if ct >= threshold: new_arg = list(key) + [ct] self._incr_counter(*new_arg) ct = 0 self._counter_dict[key] = ct def _flush_batch_incr_counter(self): """ Increments any unflushed counter values. """ for key, count in six.iteritems(self._counter_dict): if count == 0: continue args = list(key) + [count] self._incr_counter(*args) def _incr_counter(self, *args): """ Increments a Hadoop counter. Note that this seems to be a bit slow, ~1 ms Don't overuse this function by updating very frequently. """ if len(args) == 2: # backwards compatibility with existing hadoop jobs group_name, count = args print('reporter:counter:%s,%s' % (group_name, count), file=sys.stderr) else: group, name, count = args print('reporter:counter:%s,%s,%s' % (group, name, count), file=sys.stderr) def extra_modules(self): return [] # can be overridden in subclass def extra_files(self): """ Can be overriden in subclass. Each element is either a string, or a pair of two strings (src, dst). * `src` can be a directory (in which case everything will be copied recursively). * `dst` can include subdirectories (foo/bar/baz.txt etc) Uses Hadoop's -files option so that the same file is reused across tasks. """ return [] def add_link(self, src, dst): if not hasattr(self, '_links'): self._links = [] self._links.append((src, dst)) def _setup_links(self): if hasattr(self, '_links'): missing = [] for src, dst in self._links: d = os.path.dirname(dst) if d and not os.path.exists(d): os.makedirs(d) if not os.path.exists(src): missing.append(src) continue if not os.path.exists(dst): # If the combiner runs, the file might already exist, # so no reason to create the link again os.link(src, dst) if missing: raise HadoopJobError( 'Missing files for distributed cache: ' + ', '.join(missing)) def dump(self, directory=''): """ Dump instance to file. """ file_name = os.path.join(directory, 'job-instance.pickle') if self.__module__ == '__main__': d = pickle.dumps(self) module_name = os.path.basename(sys.argv[0]).rsplit('.', 1)[0] d = d.replace(b'(c__main__', "(c" + module_name) open(file_name, "wb").write(d) else: pickle.dump(self, open(file_name, "wb")) def _map_input(self, input_stream): """ Iterate over input and call the mapper for each item. If the job has a parser defined, the return values from the parser will be passed as arguments to the mapper. If the input is coded output from a previous run, the arguments will be splitted in key and value. """ for record in self.reader(input_stream): for output in self.mapper(*record): yield output if self.final_mapper != NotImplemented: for output in self.final_mapper(): yield output self._flush_batch_incr_counter() def _reduce_input(self, inputs, reducer, final=NotImplemented): """ Iterate over input, collect values with the same key, and call the reducer for each unique key. """ for key, values in groupby(inputs, key=lambda x: self.internal_serialize(x[0])): for output in reducer(self.deserialize(key), (v[1] for v in values)): yield output if final != NotImplemented: for output in final(): yield output self._flush_batch_incr_counter() def run_mapper(self, stdin=sys.stdin, stdout=sys.stdout): """ Run the mapper on the hadoop node. """ self.init_hadoop() self.init_mapper() outputs = self._map_input((line[:-1] for line in stdin)) if self.reducer == NotImplemented: self.writer(outputs, stdout) else: self.internal_writer(outputs, stdout) def run_reducer(self, stdin=sys.stdin, stdout=sys.stdout): """ Run the reducer on the hadoop node. """ self.init_hadoop() self.init_reducer() outputs = self._reduce_input(self.internal_reader((line[:-1] for line in stdin)), self.reducer, self.final_reducer) self.writer(outputs, stdout) def run_combiner(self, stdin=sys.stdin, stdout=sys.stdout): self.init_hadoop() self.init_combiner() outputs = self._reduce_input(self.internal_reader((line[:-1] for line in stdin)), self.combiner, self.final_combiner) self.internal_writer(outputs, stdout) def internal_reader(self, input_stream): """ Reader which uses python eval on each part of a tab separated string. Yields a tuple of python objects. """ for input_line in input_stream: yield list(map(self.deserialize, input_line.split("\t"))) def internal_writer(self, outputs, stdout): """ Writer which outputs the python repr for each item. """ for output in outputs: print("\t".join(map(self.internal_serialize, output)), file=stdout)
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.SHELL_EXEC_OPTIONS = { silent: true, }; exports.SHELL_EXEC_RETURN_ARRAY = "array"; exports.SHELL_EXEC_RETURN_STRING = "string";
""" Python 3 compatibility tools. Copied from numpy/compat/py3k. Please prefer the routines in the six module when possible. BSD license """ __all__ = ['bytes', 'asbytes', 'isfileobj', 'getexception', 'strchar', 'unicode', 'asunicode', 'asbytes_nested', 'asunicode_nested', 'asstr', 'open_latin1', 'StringIO', 'BytesIO'] import sys if sys.version_info[0] >= 3: import io StringIO = io.StringIO BytesIO = io.BytesIO bytes = bytes unicode = str asunicode = str def asbytes(s): if isinstance(s, bytes): return s return s.encode('latin1') def asstr(s): if isinstance(s, str): return s return s.decode('latin1') def isfileobj(f): return isinstance(f, io.FileIO) def open_latin1(filename, mode='r'): return open(filename, mode=mode, encoding='iso-8859-1') strchar = 'U' ints2bytes = lambda seq: bytes(seq) ZEROB = bytes([0]) FileNotFoundError = FileNotFoundError import builtins else: import StringIO StringIO = BytesIO = StringIO.StringIO bytes = str unicode = unicode asbytes = str asstr = str strchar = 'S' def isfileobj(f): return isinstance(f, file) def asunicode(s): if isinstance(s, unicode): return s return s.decode('ascii') def open_latin1(filename, mode='r'): return open(filename, mode=mode) ints2bytes = lambda seq: ''.join(chr(i) for i in seq) ZEROB = chr(0) class FileNotFoundError(IOError): pass import __builtin__ as builtins # noqa def getexception(): return sys.exc_info()[1] def asbytes_nested(x): if hasattr(x, '__iter__') and not isinstance(x, (bytes, unicode)): return [asbytes_nested(y) for y in x] else: return asbytes(x) def asunicode_nested(x): if hasattr(x, '__iter__') and not isinstance(x, (bytes, unicode)): return [asunicode_nested(y) for y in x] else: return asunicode(x)
import matplotlib.pyplot as plt from matplotlib.image import imread img = imread('./room.jpg') plt.imshow(img) plt.show()
# -*- coding: utf-8 -*- from pandas_ta import Imports from pandas_ta.overlap import hlc3, sma from pandas_ta.statistics.mad import mad from pandas_ta.utils import get_offset, verify_series def cci(high, low, close, length=None, c=None, offset=None, **kwargs): """Indicator: Commodity Channel Index (CCI)""" # Validate Arguments length = int(length) if length and length > 0 else 14 c = float(c) if c and c > 0 else 0.015 high = verify_series(high, length) low = verify_series(low, length) close = verify_series(close, length) offset = get_offset(offset) if high is None or low is None or close is None: return # Calculate Result if Imports["talib"]: from talib import CCI cci = CCI(high, low, close, length) else: typical_price = hlc3(high=high, low=low, close=close) mean_typical_price = sma(typical_price, length=length) mad_typical_price = mad(typical_price, length=length) cci = typical_price - mean_typical_price cci /= c * mad_typical_price # Offset if offset != 0: cci = cci.shift(offset) # Handle fills if "fillna" in kwargs: cci.fillna(kwargs["fillna"], inplace=True) if "fill_method" in kwargs: cci.fillna(method=kwargs["fill_method"], inplace=True) # Name and Categorize it cci.name = f"CCI_{length}_{c}" cci.category = "momentum" return cci cci.__doc__ = \ """Commodity Channel Index (CCI) Commodity Channel Index is a momentum oscillator used to primarily identify overbought and oversold levels relative to a mean. Sources: https://www.tradingview.com/wiki/Commodity_Channel_Index_(CCI) Calculation: Default Inputs: length=14, c=0.015 SMA = Simple Moving Average MAD = Mean Absolute Deviation tp = typical_price = hlc3 = (high + low + close) / 3 mean_tp = SMA(tp, length) mad_tp = MAD(tp, length) CCI = (tp - mean_tp) / (c * mad_tp) Args: high (pd.Series): Series of 'high's low (pd.Series): Series of 'low's close (pd.Series): Series of 'close's length (int): It's period. Default: 14 c (float): Scaling Constant. Default: 0.015 offset (int): How many periods to offset the result. Default: 0 Kwargs: fillna (value, optional): pd.DataFrame.fillna(value) fill_method (value, optional): Type of fill method Returns: pd.Series: New feature generated. """
import React from 'react' const Django = () => ( <svg width="256px" height="326px" viewBox="0 0 256 326" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlnsXlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"> <g fill="white"> <path d="M114.78426,0 L168.062259,0 L168.062259,244.191113 C140.771855,249.353285 120.681181,251.384192 98.944855,251.384192 C33.8730137,251.316268 0,222.24509 0,166.412124 C0,112.616857 35.9300792,77.7042732 91.6079875,77.7042732 C100.247663,77.7042732 106.830273,78.3835061 114.78426,80.4212057 L114.78426,0 Z M116.651443,124.426648 C110.411677,122.388948 105.269013,121.709714 98.6864035,121.709714 C71.7388438,121.709714 56.1737143,138.147157 56.1737143,166.953436 C56.1737143,194.998972 71.0531556,210.485489 98.3435585,210.485489 C104.24048,210.485489 109.040299,210.152664 116.651443,209.133814 L116.651443,124.426648 Z"></path> <path d="M255.186899,84.2605714 L255.186899,206.52254 C255.186899,248.628204 252.032731,268.876144 242.775936,286.332436 C234.136261,303.116288 222.753832,313.698741 199.234715,325.388343 L149.796571,302.090646 C173.315688,291.161784 184.698118,281.503088 191.966417,266.763729 C199.577559,251.691545 201.977469,234.235253 201.977469,188.319092 L201.977469,84.2605714 L255.186899,84.2605714 L255.186899,84.2605714 Z"></path> <path d="M196.608,0 L249.885999,0 L249.885999,54.1348831 L196.608,54.1348831 L196.608,0 L196.608,0 Z"></path> </g> </svg> ) export default Django
import angular from 'angular'; import uiRouter from 'angular-ui-router'; import serviceModule from './../../services/util/util.service.module' import applicationModule from './../../services/application/application.module' import homeComponent from './home.component'; let homeModule = angular.module('home', [ uiRouter, serviceModule, applicationModule ]) .config(($stateProvider, $urlRouterProvider) => { 'ngInject' $urlRouterProvider.otherwise('/'); $stateProvider .state('home', { url: '/', component: 'home', resolve:{ homeData: () => { return "data from router !!" } } }); }) .component('home', homeComponent) .name; export default homeModule;
function isString(value) { return typeof value === 'string' || value instanceof String; } function contains(source, value) { if (source == undefined || value == undefined) { return false } return source.indexOf(value) > -1; } function containsAny(source, ...value) { if (source == undefined) { return false } for (let i = 0; i < value.length; i++) { if (source.indexOf(value[i]) > -1) { return true; } } return false; } function containsAll(source, ...value) { if (source == undefined) { return false } for (let i = 0; i < value.length; i++) { if (source.indexOf(value[i]) === -1) { return false; } } return true; } var CaseType = { uppercaseFirst: 1, lowercaseFirst: 2, toggleCase: 3, uppercaseAll: 4, lowercaseAll: 5 }; function toggleCase(str) { var itemText = ""; str.split("").forEach( function (item) { if (/^([a-z]+)/.test(item)) { itemText += item.toUpperCase(); } else if (/^([A-Z]+)/.test(item)) { itemText += item.toLowerCase(); } else { itemText += item; } }); return itemText; } function changeCase(source, caseType) { if (source == undefined) { return undefined } switch (caseType) { case CaseType.uppercaseFirst: return source.replace(/\b\w+\b/g, function (word) { return word.substring(0, 1).toUpperCase() + word.substring(1).toLowerCase(); }); case CaseType.lowercaseFirst: return source.replace(/\b\w+\b/g, function (word) { return word.substring(0, 1).toLowerCase() + word.substring(1).toUpperCase(); }); case CaseType.toggleCase: return toggleCase(source); case CaseType.uppercaseAll: return source.toUpperCase(); case CaseType.lowercaseAll: return source.toLowerCase(); default: return source } } function genUUID(len, radix) { var chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split( '' ); var uuid = []; var i; radix = radix || chars.length; if (len) { // Compact form for (i = 0; i < len; i++) { uuid[i] = chars[0 | (Math.random() * radix)]; } } else { // rfc4122, version 4 form var r; // rfc4122 requires these characters uuid[8] = uuid[13] = uuid[18] = uuid[23] = '-'; uuid[14] = '4'; // Fill in random data. At i==19 set the high bits of clock sequence as per // rfc4122, sec. 4.1.5 for (i = 0; i < 36; i++) { if (!uuid[i]) { r = 0 | (Math.random() * 16); uuid[i] = chars[i === 19 ? (r & 0x3) | 0x8 : r]; } } } return uuid.join(''); } module.exports = { isString, contains, containsAny, containsAll, changeCase, CaseType, genUUID };
#include "config.h" #include "AL/alc.h" #include "AL/al.h" #include "alMain.h" #include "defs.h" extern inline void BiquadFilter_clear(BiquadFilter *filter); extern inline void BiquadFilter_copyParams(BiquadFilter *restrict dst, const BiquadFilter *restrict src); extern inline void BiquadFilter_passthru(BiquadFilter *filter, ALsizei numsamples); extern inline ALfloat calc_rcpQ_from_slope(ALfloat gain, ALfloat slope); extern inline ALfloat calc_rcpQ_from_bandwidth(ALfloat f0norm, ALfloat bandwidth); void BiquadFilter_setParams(BiquadFilter *filter, BiquadType type, ALfloat gain, ALfloat f0norm, ALfloat rcpQ) { ALfloat alpha, sqrtgain_alpha_2; ALfloat w0, sin_w0, cos_w0; ALfloat a[3] = { 1.0f, 0.0f, 0.0f }; ALfloat b[3] = { 1.0f, 0.0f, 0.0f }; // Limit gain to -100dB assert(gain > 0.00001f); w0 = F_TAU * f0norm; sin_w0 = sinf(w0); cos_w0 = cosf(w0); alpha = sin_w0/2.0f * rcpQ; /* Calculate filter coefficients depending on filter type */ switch(type) { case BiquadType_HighShelf: sqrtgain_alpha_2 = 2.0f * sqrtf(gain) * alpha; b[0] = gain*((gain+1.0f) + (gain-1.0f)*cos_w0 + sqrtgain_alpha_2); b[1] = -2.0f*gain*((gain-1.0f) + (gain+1.0f)*cos_w0 ); b[2] = gain*((gain+1.0f) + (gain-1.0f)*cos_w0 - sqrtgain_alpha_2); a[0] = (gain+1.0f) - (gain-1.0f)*cos_w0 + sqrtgain_alpha_2; a[1] = 2.0f* ((gain-1.0f) - (gain+1.0f)*cos_w0 ); a[2] = (gain+1.0f) - (gain-1.0f)*cos_w0 - sqrtgain_alpha_2; break; case BiquadType_LowShelf: sqrtgain_alpha_2 = 2.0f * sqrtf(gain) * alpha; b[0] = gain*((gain+1.0f) - (gain-1.0f)*cos_w0 + sqrtgain_alpha_2); b[1] = 2.0f*gain*((gain-1.0f) - (gain+1.0f)*cos_w0 ); b[2] = gain*((gain+1.0f) - (gain-1.0f)*cos_w0 - sqrtgain_alpha_2); a[0] = (gain+1.0f) + (gain-1.0f)*cos_w0 + sqrtgain_alpha_2; a[1] = -2.0f* ((gain-1.0f) + (gain+1.0f)*cos_w0 ); a[2] = (gain+1.0f) + (gain-1.0f)*cos_w0 - sqrtgain_alpha_2; break; case BiquadType_Peaking: gain = sqrtf(gain); b[0] = 1.0f + alpha * gain; b[1] = -2.0f * cos_w0; b[2] = 1.0f - alpha * gain; a[0] = 1.0f + alpha / gain; a[1] = -2.0f * cos_w0; a[2] = 1.0f - alpha / gain; break; case BiquadType_LowPass: b[0] = (1.0f - cos_w0) / 2.0f; b[1] = 1.0f - cos_w0; b[2] = (1.0f - cos_w0) / 2.0f; a[0] = 1.0f + alpha; a[1] = -2.0f * cos_w0; a[2] = 1.0f - alpha; break; case BiquadType_HighPass: b[0] = (1.0f + cos_w0) / 2.0f; b[1] = -(1.0f + cos_w0); b[2] = (1.0f + cos_w0) / 2.0f; a[0] = 1.0f + alpha; a[1] = -2.0f * cos_w0; a[2] = 1.0f - alpha; break; case BiquadType_BandPass: b[0] = alpha; b[1] = 0; b[2] = -alpha; a[0] = 1.0f + alpha; a[1] = -2.0f * cos_w0; a[2] = 1.0f - alpha; break; } filter->a1 = a[1] / a[0]; filter->a2 = a[2] / a[0]; filter->b0 = b[0] / a[0]; filter->b1 = b[1] / a[0]; filter->b2 = b[2] / a[0]; } void BiquadFilter_processC(BiquadFilter *filter, ALfloat *restrict dst, const ALfloat *restrict src, ALsizei numsamples) { if(LIKELY(numsamples > 1)) { const ALfloat a1 = filter->a1; const ALfloat a2 = filter->a2; const ALfloat b0 = filter->b0; const ALfloat b1 = filter->b1; const ALfloat b2 = filter->b2; ALfloat z1 = filter->z1; ALfloat z2 = filter->z2; ALsizei i; /* Processing loop is transposed direct form II. This requires less * storage versus direct form I (only two delay components, instead of * a four-sample history; the last two inputs and outputs), and works * better for floating-point which favors summing similarly-sized * values while being less bothered by overflow. * * See: http://www.earlevel.com/main/2003/02/28/biquads/ */ for(i = 0;i < numsamples;i++) { ALfloat input = src[i]; ALfloat output = input*b0 + z1; z1 = input*b1 - output*a1 + z2; z2 = input*b2 - output*a2; dst[i] = output; } filter->z1 = z1; filter->z2 = z2; } else if(numsamples == 1) { ALfloat input = *src; ALfloat output = input*filter->b0 + filter->z1; filter->z1 = input*filter->b1 - output*filter->a1 + filter->z2; filter->z2 = input*filter->b2 - output*filter->a2; *dst = output; } }
""" Words/Ladder Graph ------------------ Generate an undirected graph over the 5757 5-letter words in the datafile words_dat.txt.gz. Two words are connected by an edge if they differ in one letter, resulting in 14,135 edges. This example is described in Section 1.1 in Knuth's book [1]_,[2]_. References ---------- .. [1] Donald E. Knuth, "The Stanford GraphBase: A Platform for Combinatorial Computing", ACM Press, New York, 1993. .. [2] http://www-cs-faculty.stanford.edu/~knuth/sgb.html """ __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Brendt Wohlberg', 'hughdbrown@yahoo.com']) # Copyright (C) 2004-2015 by # Aric Hagberg <hagberg@lanl.gov> # Dan Schult <dschult@colgate.edu> # Pieter Swart <swart@lanl.gov> # All rights reserved. # BSD license. import networkx as nx #------------------------------------------------------------------- # The Words/Ladder graph of Section 1.1 #------------------------------------------------------------------- def generate_graph(words): from string import ascii_lowercase as lowercase G = nx.Graph(name="words") lookup = dict((c,lowercase.index(c)) for c in lowercase) def edit_distance_one(word): for i in range(len(word)): left, c, right = word[0:i], word[i], word[i+1:] j = lookup[c] # lowercase.index(c) for cc in lowercase[j+1:]: yield left + cc + right candgen = ((word, cand) for word in sorted(words) for cand in edit_distance_one(word) if cand in words) G.add_nodes_from(words) for word, cand in candgen: G.add_edge(word, cand) return G def words_graph(): """Return the words example graph from the Stanford GraphBase""" import gzip fh=gzip.open('words_dat.txt.gz','r') words=set() for line in fh.readlines(): line = line.decode() if line.startswith('*'): continue w=str(line[0:5]) words.add(w) return generate_graph(words) if __name__ == '__main__': from networkx import * G=words_graph() print("Loaded words_dat.txt containing 5757 five-letter English words.") print("Two words are connected if they differ in one letter.") print("Graph has %d nodes with %d edges" %(number_of_nodes(G),number_of_edges(G))) print("%d connected components" % number_connected_components(G)) for (source,target) in [('chaos','order'), ('nodes','graph'), ('pound','marks')]: print("Shortest path between %s and %s is"%(source,target)) try: sp=shortest_path(G, source, target) for n in sp: print(n) except nx.NetworkXNoPath: print("None")
# -*- coding: utf-8 -*- """Orbit Class""" import numpy as np from scipy.integrate import ode import pyatmos as atm import mayavi as mya from . import util as ut class Orbit: """ A class to repersent the orbit of the spacecraft. ... Attributes --------- spacecraft : Spacecraft the spacecraft that will have its orbit calculated environment : Environment the environment settings for the simulation tspan : int span of time that the simulation runs for dt : int time step for the simulation """ def __init__( self, spacecraft, environment, tspan, dt ): self.spacecraft = spacecraft self.environment = environment self.tspan = tspan self.dt = dt # convert keplerian orbital elements to orbital state vector if necessary if self.spacecraft.state_format == 'koe': self.spacecraft.set_state0(ut.koe_to_osv( self.spacecraft.state0,self.environment.cb_mu)) # calculate number of steps required self.n_steps = int(np.ceil(self.tspan/self.dt)) # allocate memory for time and state arrays self.ts = np.zeros((self.n_steps,1)) self.ys = np.zeros((self.n_steps,6)) # initial conditions self.ys[0] = self.spacecraft.state0 self.step = 1 # set up solver self.solver = ode(self.diff_y) self.solver.set_integrator('lsoda') self.solver.set_initial_value(self.ys[0],self.ts[0]) self.solve_orbit() def solve_orbit(self): # propagate the orbit while self.solver.successful() and self.step < self.n_steps: self.solver.integrate(self.solver.t+self.dt) self.ts[self.step] = self.solver.t self.ys[self.step] = self.solver.y self.step+=1 self.rs = self.ys[:,:3] self.vs = self.ys[:,3:] def diff_y(self,t,y): r = y[:3] v = y[3:] r_norm = np.linalg.norm(r) # two body acceleration a = -r*self.environment.cb_mu / r_norm**3 # constant thrust if "thrust" in self.environment.perturbations: v_dir = v/np.linalg.norm(v) a_thrust = v_dir * self.spacecraft.engine.thrust/self.spacecraft.mass_total a+= a_thrust # aerodynamic drag if 'aero' in self.environment.perturbations: #calculate air density z = r_norm - self.environment.cb_radius # km rho = atm.coesa76(z)[0][0]*1e9 # kg / km^3 #calculate motion of s/c wrt a rotating atmosphere v_rel = v - np.cross(self.environment.cb_atm_rot,r) drag = 0.5 * rho * v_rel*np.linalg.norm(v_rel) * self.spacecraft.drag_coeff\ * self.spacecraft.area / self.spacecraft.mass_total a += drag # J2 perturbation if 'J2' in self.environment.perturbations: z2 = r[2]**2 r2 = r_norm**2 tx = r[0] / r_norm*(5*z2/r2 -1) ty = r[1] / r_norm*(5*z2/r2 -1) tz = r[2] / r_norm*(5*z2/r2 -3) a_j2 = 1.5*self.environment.cb_J2*self.environment.cb_mu\ * self.environment.cb_radius**2 / r_norm**4 * np.array([tx, ty, tz]) a += a_j2 return np.concatenate([v,a]) def plot_orbits(self,orbits): ut.plot_n_orbits(orbits,self.environment)
import Types from '../types' import ThemeDao from "../../expand/dao/ThemeDao"; /** * 主题变更 * @param theme * @returns {{type: string, theme: *}} */ export function onThemeChange(theme) { return { type: Types.THEME_CHANGE, theme: theme, } } /** * 初始化主题 * @returns {Function} */ export function onThemeInit() { return dispatch => { new ThemeDao().getTheme() .then((theme) => { dispatch(onThemeChange(theme)); }) } } /** * 显示或者隐藏自定义主题浮层 * @param show 显示或者隐藏 * @returns {{customThemeViewVisible: *, type: string}} */ export function onShowCustomThemeView(show) { return { type: Types.SHOW_THEME_VIEW, customThemeViewVisible: show, } }
document.addEventListener('DOMContentLoaded', function () { document.getElementById('main-launcher') && startProcessNetteTester(); }); function startProcessNetteTester() { var $mainLauncher = document.getElementById('main-launcher'), runSingleSelector = '.run-single', $display = document.getElementById('display'), $counter = $display.querySelector('span.counter'), total = parseInt($display.querySelector('span.total').innerHTML), $table = document.getElementById('pnt-table'), texts = JSON.parse($display.getAttribute('data-texts')), isScrollIntoViewSupported = document.body.scrollIntoView, $passedItems = document.getElementsByClassName('passed'), $failedItems = document.getElementsByClassName('failed'), $pendingItems = document.getElementsByClassName('pending'), missingAssertMessage = 'Error: This test forgets to execute an assertion.', maxTimeout = parseInt($display.getAttribute('data-max-timeout')), userSettings = { testerStopOnFail: false, testerHidePassed: false, testerCountFailed: false, testerRetryFailed: false, testerAutoScroll: false, testerShowBreadcrumbs: true }, requests = {}, pntFlags = { stop: false, bulk: false, bulkComplete: false }, COLUMN_RESULT = 3, COLUMN_TIME = 4; if (!$table) { return false; } initDom(); setupControls(); addEvents(); applySettings(); function setRowState($row, state) { $row.className = state; } function getRowState($row) { return $row.className; } function resetTable() { var $rows = $table.querySelectorAll('tbody tr'); for (var i = 0; i < $rows.length; i++) { resetRow($rows[i]); } resetDisplay(); } function resetMainLauncherText() { setMainLauncherText(texts.run, texts.stop); } function resetDisplay() { $display.className = ''; $counter.innerHTML = 0; } function clearDisplayAnimStyles() { $display.removeAttribute('data-state'); } function setPass($row) { setRowState($row, 'passed'); } function setFail($row) { setRowState($row, 'failed'); if ($failedItems.length && userSettings.testerStopOnFail) { pntFlags.stop = true; stop(); return false; } } function setTimedout($row) { setRowState($row, 'timedout'); } function resetRow($row, text) { setRowState($row, ''); $row.removeAttribute('data-has-run'); setStatusText($row, COLUMN_RESULT, text); setStatusText($row, COLUMN_TIME, texts.emptyValue); $display.removeAttribute('class'); updateDisplay(); } function setStatusText($row, column, text) { text = text || texts.emptyValue; $row.querySelector('td:nth-child(' + column + ')').innerHTML = text; } function stop($row) { var $pendingRows; if ($row && getRowState($row) === 'pending') { abortTest($row); } if (pntFlags.bulk) { $pendingRows = $($pendingItems); if ($pendingRows.length) { for (var i = 0; i < $pendingRows.length; i++) { abortTest($pendingRows[i]); } pntFlags.stop = true; } } clearDisplayAnimStyles(); return false; } function abortTest($row) { var testName = $row.getAttribute('data-test-name'), req = requests[testName]; if (req) { req.abort(); delete requests[testName]; } resetRow($row, texts.aborted); setRowState($row, 'aborted'); } function setMainLauncherText(text1, text2) { $mainLauncher.querySelector('em:nth-child(1)').innerHTML = text1; $mainLauncher.querySelector('em:nth-child(2)').innerHTML = text2; } function setBulkRunCompleted() { pntFlags.bulkComplete = true; setMainLauncherText(getRestartText(), ''); showTotalTime(); } function showTotalTime() { var totalTime = 0, $timeCells = $table.querySelectorAll('tbody tr td:last-child'); for (var i = 0; i < $timeCells.length; i++) { var time = parseFloat($timeCells[i].innerHTML); if (!isNaN(time)) { totalTime += time; } } $display.setAttribute('data-total-value', texts.total.replace('%f', totalTime.toFixed(4))); } function hideTotalTime() { $display.removeAttribute('data-total-value'); } function runNext() { var $row, pendingSelector = 'tbody tr.pending', hasNotRunSelector = 'tbody tr:not([data-has-run]):not(.pass):not(.pending)', $btn; // disallow run multiple (manually run multiple when bulk is running) if ($(pendingSelector).length) return false; if (pntFlags.stop) { pntFlags.stop = true; stop(); return false; } $row = $table.querySelector(hasNotRunSelector); if ($row) { $btn = $row.querySelector(runSingleSelector); if (isScrollIntoViewSupported) { if (pntFlags.bulk && userSettings.testerAutoScroll) { $btn.scrollIntoView({ behavior: 'auto', block: 'center' }); } } $btn.click(); } else { // no more rows clearDisplayAnimStyles(); pntFlags.bulk = false; setBulkRunCompleted(); } } function updateDisplay() { $display.classList.remove('all-passed'); $counter.innerHTML = userSettings.testerCountFailed ? $failedItems.length : $passedItems.length; if ($failedItems.length > 0) { $display.classList.add('has-failed'); } else { $display.classList.remove('has-failed'); } if ($passedItems.length === total) { $display.classList.add('all-passed'); setMainLauncherText(getRestartText(), ''); showTotalTime(); } // update tablesorter if (window.jQuery) { jQuery($table).trigger('update'); } } function initDom() { var $rows = $table.querySelectorAll('tbody tr'); for (var i = 0; i < $rows.length; i++) { var $row = $rows[i]; $row.setAttribute('data-test-name', $row.querySelector('td:first-child span').innerText.replace('.php', '').toLowerCase() + '__' + (i + 1)); } } function addEvents() { // start/stop with space key $(document).on('keydown', function (e) { e = e || window.event; var keyCode = e.keyCode || e.charCode || e.which, target = e.target; if ($(target).is('input, textarea')) { return true; } if (keyCode === 32) { $mainLauncher.click(); return false; } }); // main display $mainLauncher.addEventListener('click', function (e) { var itemSelector, $rows; hideTotalTime(); if (pntFlags.bulkComplete) { pntFlags.bulkComplete = false; itemSelector = userSettings.testerRetryFailed ? 'tbody tr.failed, tbody tr.timedout' : 'tbody tr'; $rows = $table.querySelectorAll(itemSelector); for (var i = 0; i < $rows.length; i++) { $rows[i].removeAttribute('data-has-run'); } resetMainLauncherText(); if (!userSettings.testerRetryFailed) { resetTable(); resetDisplay(); } } if ($display.getAttribute('data-state')) { pntFlags.stop = true; stop(); pntFlags.bulk = false; return false; } e = e || window.event; if (e.metaKey || e.ctrlKey) { resetTable(); resetDisplay(); return false; } $display.setAttribute('data-state', 'running'); pntFlags.stop = false; pntFlags.bulk = true; runNext(); }); // run single test $table.addEventListener("click", filterEventHandler(runSingleSelector, function (e) { e = e || window.event; e.preventDefault(); var $button = e.filterdTarget, $row = $button.parentElement.parentElement, ajaxUrl = $button.getAttribute('data-url'), state = getRowState($row), testName = $row.getAttribute('data-test-name'), msg = 'Error', response, xhr; resetRow($row); hideTotalTime(); updateDisplay(); resetMainLauncherText(); if (state === 'pending') { abortTest($row); pntFlags.stop = false; return false; } if (e.metaKey || e.ctrlKey) { if (pntFlags.bulkComplete) { pntFlags.bulkComplete = false; } return false; } setRowState($row, 'pending'); xhr = new XMLHttpRequest(); xhr.open('GET', ajaxUrl, true); xhr.timeout = maxTimeout; xhr.setRequestHeader('Content-Type', 'application/json'); xhr.setRequestHeader('X-Requested-With', 'XMLHttpRequest'); xhr.ontimeout = function () { setTimedout($row); setStatusText($row, COLUMN_RESULT, texts.timedout); if (pntFlags.bulk) { runNext(); } }; xhr.onreadystatechange = function () { var DONE = 4, OK = 200, time = ''; if (xhr.readyState === DONE) { var hasMissingAssertText = xhr.responseText && xhr.responseText.indexOf(missingAssertMessage) !== -1; xhr.getAllResponseHeaders(); if (xhr.status === OK) { try { response = JSON.parse(xhr.responseText.replace(missingAssertMessage, '')); if (response.success) { if (hasMissingAssertText) { setFail($row); msg = addPre(missingAssertMessage); } else { setPass($row); msg = response.data.result; time = response.data.time; } } else { setFail($row); msg = addPre(response.data.result); } } catch (ex) { setFail($row); msg = addPre(xhr.responseText); } finally { var $msgRow = $row.querySelector('td:nth-child(3)'); $msgRow.innerHTML = msg; setBlankClass($msgRow); if (time) { var $timeRow = $row.querySelector('td:nth-child(4)'); $timeRow.innerHTML = time; setBlankClass($timeRow); } } } else { // every other error if (xhr.status !== 0) { setFail($row); msg = xhr.responseText.replace(missingAssertMessage, '').trim(); $row.querySelector('td:nth-child(3)').innerHTML = addPre(msg); } } } $row.setAttribute('data-has-run', '1'); if (pntFlags.bulk) { runNext(); } // if (!timedOut) { updateDisplay(); applySettings(); // } }; xhr.send(); requests[testName] = xhr; })); // add Tracy editor links if (document.getElementById("tracy-debug-panel-FileEditorPanel") && window.tracyFileEditorLoader) { $table.setAttribute('data-editor-available', '1'); $table.addEventListener('click', filterEventHandler('[data-editor-url]', function (e) { var url = e.filterdTarget.getAttribute('data-editor-url'); tracyFileEditorLoader.loadFileEditor(url, 1); return false; })); } } function setBlankClass($el) { if ($el.innerHtml === '') { $el.classList.add('blank'); } else { $el.classList.remove('blank'); } } function addPre(msg) { return '<pre>' + msg + '</pre>'; } function applySettings() { var $wrap = document.getElementById('ProcessNetteTester-wrap'); $wrap.setAttribute('data-hide-passed', userSettings.testerHidePassed ? '1' : '0'); $wrap.setAttribute('data-show-breadcrumb', userSettings.testerShowBreadcrumbs ? '1' : '0'); $counter.innerHTML = userSettings.testerCountFailed ? $failedItems.length : $passedItems.length; if (pntFlags.bulkComplete) { setMainLauncherText(getRestartText(), ''); } } function getRestartText() { return userSettings.testerRetryFailed ? texts.retryFailed : texts.restart; } function setupControls() { var $controls = document.getElementById('controls'); init(); setEvents(); function init() { var $inputs = $controls.querySelectorAll('input'); for (var i = 0; i < $inputs.length; i++) { var $input = $inputs[i], name = $input.getAttribute('value'), storedData = localStorage.getItem($input.getAttribute('value')); $input.checked = !!storedData; setSetting(name, storedData); } } function setEvents() { $controls.addEventListener('change', filterEventHandler('input', function (e) { storeData(e.filterdTarget); applySettings(); })); } function storeData($input) { var name = $input.getAttribute('value'), isChecked = $input.checked; if (isChecked) { localStorage.setItem(name, 1); } else { localStorage.removeItem(name); } setSetting(name, isChecked); } function setSetting(name, data) { if (data) { userSettings[name] = true; } else { delete userSettings[name]; } } } } // jQuery .on() equivalent var filterEventHandler = function (selector, callback) { return (!callback || !callback.call) ? null : function (e) { var target = e.target || e.srcElement || null; while (target && target.parentElement && target.parentElement.querySelectorAll) { var elms = target.parentElement.querySelectorAll(selector); for (var i = 0; i < elms.length; i++) { if (elms[i] === target) { e.filterdTarget = elms[i]; callback.call(elms[i], e); return; } } target = target.parentElement; } }; };
import React, { useState, useEffect, useContext } from "react"; import apiRequest from "../utils/request"; import Question from "./Question"; import Grid from "@material-ui/core/Grid"; import Button from "@material-ui/core/Button"; import QuizCard from "./QuizCard"; import { QuizContext } from "./QuizContext"; import { QuizCreatorContext } from "./QuizCreatorContext"; import TextField from "@material-ui/core/TextField"; import QuestionCard from "./QuestionCard"; import AddQuizButton from "./AddQuizButton"; import CreateQuestion from "./CreateQuestion"; import QuizTypeDropdown from "./QuizTypeDropdown"; import { useHistory } from "react-router-dom"; export default function AddQuiz() { const { questions, submitQuiz, setName, setDesc, canSubmit } = useContext( QuizCreatorContext ); let history = useHistory(); return ( <div style={{ padding: 20 }}> <Grid container spacing={5} direction="column" alignItems="center" justify="center" > <Grid container alignItems="center" style={{ maxWidth: 300 }} justify="center" alignContent="center" item xs={12} > <TextField variant="outlined" multiline placeholder="Quiz Name" onChange={(e) => setName(e.target.value)} /> <TextField variant="outlined" multiline placeholder="Quiz Description" onChange={(e) => setDesc(e.target.value)} /> </Grid> {questions.map((question, idx) => ( <CreateQuestion questionObj={question} key={idx} /> ))} <CreateQuestion questionObj={null} /> <Button variant="contained" color="primary" onClick={ canSubmit ? (e) => { history.goBack(); submitQuiz(); } : null } disabled={!canSubmit} > Submit Quiz </Button> </Grid> </div> ); }
import PropTypes from 'prop-types'; import React from 'react'; import Editor from '../../../editor'; import Preview from '../../../preview'; import codeText from '../modules/examples/example.txt'; import style from './playground.module.scss'; class Playground extends React.Component { static propTypes = { className: PropTypes.string }; state = { code: codeText }; handleCodeChange = (code) => { this.setState({code}); }; loadCode (code) { this.refs.editor.setCode(code); } render () { return ( <aside className={this.props.className}> <Editor ref='editor' className={style.editor} codeText={this.state.code} onChange={this.handleCodeChange} /> <Preview className={style.preview} code={this.state.code} /> </aside> ); } } export default Playground;
/* Terminal Kit Copyright (c) 2009 - 2018 Cédric Ronvel The MIT License (MIT) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ "use strict" ; // Characters that are hard to type // Comments explain how to type it on a linux platform, using a fr layout keyboard module.exports = { password: '●' , // Currently: the same as blackCircle forwardSingleQuote: '´' , // Altgr + , overscore: '¯' , // Altgr + Shift + $ multiply: '×' , // Altgr + Shift + ; divide: '÷' , // Altgr + Shift + : // Arrows up: '↑' , // Altgr + Shift + u down: '↓' , // Altgr + u left: '←' , // Altgr + y right: '→' , // Altgr + i leftAndRight: '↔' , upAndDown: '↕' , upLeft: '↖' , upRight: '↗' , downRight: '↘' , downLeft: '↙' , upLeftAndDownRight: '⤡' , upRightAndDownLeft: '⤢' , // Those names are most common in the UTF-8 parlance northWest: '↖' , northEast: '↗' , southEast: '↘' , southWest: '↙' , northWestAndSouthEast: '⤡' , northEastAndSouthWest: '⤢' , fullBlock: '█' , upperHalfBlock: '▀' , lowerHalfBlock: '▄' , // Array of 0-8 growing/enlarging blocks growingBlock: [ ' ' , '▁' , '▂' , '▃' , '▄' , '▅' , '▆' , '▇' , '█' ] , enlargingBlock: [ ' ' , '▏' , '▎' , '▍' , '▌' , '▋' , '▊' , '▉' , '█' ] , box: { light: { vertical: '│' , horizontal: '─' , topLeft: '┌' , topRight: '┐' , bottomLeft: '└' , bottomRight: '┘' , topTee: '┬' , bottomTee: '┴' , leftTee: '├' , rightTee: '┤' , cross: '┼' } , lightRounded: { vertical: '│' , horizontal: '─' , topLeft: '╭' , topRight: '╮' , bottomLeft: '╰' , bottomRight: '╯' , topTee: '┬' , bottomTee: '┴' , leftTee: '├' , rightTee: '┤' , cross: '┼' } , heavy: { vertical: '┃' , horizontal: '━' , topLeft: '┏' , topRight: '┓' , bottomLeft: '┗' , bottomRight: '┛' , topTee: '┳' , bottomTee: '┻' , leftTee: '┣' , rightTee: '┫' , cross: '╋' } , double: { vertical: '║' , horizontal: '═' , topLeft: '╔' , topRight: '╗' , bottomLeft: '╚' , bottomRight: '╝' , topTee: '╦' , bottomTee: '╩' , leftTee: '╠' , rightTee: '╣' , cross: '╬' } , dotted: { vertical: '┊' , horizontal: '┄' , topLeft: '┌' , topRight: '┐' , bottomLeft: '└' , bottomRight: '┘' , topTee: '┬' , bottomTee: '┴' , leftTee: '├' , rightTee: '┤' , cross: '┼' } } , blackSquare: '■' , whiteSquare: '□' , blackCircle: '●' , whiteCircle: '○' , blackUpTriangle: '▲' , whiteUpTriangle: '△' , blackDownTriangle: '▼' , whiteDownTriangle: '▽' , blackLeftTriangle: '◀' , whiteLeftTriangle: '◁' , blackRightTriangle: '▶' , whiteRightTriangle: '▷' , blackDiamond: '◆' , whiteDiamond: '◇' , blackStar: '★' , whiteStar: '☆' , spadeSuit: '♠' , heartSuit: '♥' , diamondSuit: '♦' , clubSuit: '♣' , // Powerline specific characters (https://powerline.readthedocs.io) // It is displayed only with the appropriate font powerline: { branch: '' , line: '' , readOnly: '' , rightTriangleSeparator: '' , rightArrowSeparator: '' , leftTriangleSeparator: '' , leftArrowSeparator: '' } } ;
from . import views from django.urls import path urlpatterns = [ path('', views.PostList.as_view(), name='home'), path('<slug:slug>/', views.PostDetail.as_view(), name='post_detail'), path('register', views.register_request, name="register"), path('login', views.login_request, name='login'), path("logout", views.logout_request, name='logout'), path("password_reset", views.password_reset_request, name="password_reset") ]