text
stringlengths
1
1.05M
#!/bin/bash ## Ask the user for input. source ../scripts/verify_provisioning.sh source ../../setup/userconf.sh || exit 1 get_password || exit 1 ## Use the Edge Management API to get the API key. printf "\n\nGet API key (the Consumer Key) from the Learn Edge App. Press Return to continue: \n" read key=`curl -u $username:$password $url/v1/o/$org/developers/learn-edge-developer@example.com/apps/learn-edge-app 2>/dev/null \ | grep consumerKey | awk -F '\"' '{ print $4 }'` printf "\nThe API key (Consumer Key) for the Learn Edge App is $key\n" ## Call the API printf "\n\nCall the API to get the /json resource with a valid API key. Press Return to contine:\n" read printf "\ncurl http://$org-$env.$api_domain/v1/learn-edge/json?apikey=$key\n\nResponse:\n" curl "http://$org-$env.$api_domain/v1/learn-edge/json?apikey=$key" ## All done.
export NVM_DIR="$HOME/.nvm" [ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh" --no-use # This loads nvm # nvm bash completion is not compatible with zsh #[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion
<reponame>micronode/jot package org.mnode.jot4j.dynamodb.mapper; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBTypeConverter; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.module.SimpleModule; import net.fortuna.ical4j.model.component.VJournal; import org.mnode.ical4j.json.jot.JotJournalMapper; import org.mnode.ical4j.json.jot.JotJournalSerializer; import java.io.IOException; import java.util.Map; public class VJournalConverter implements DynamoDBTypeConverter<Map<String, String>, VJournal> { private final ObjectMapper mapper; public VJournalConverter() { SimpleModule module = new SimpleModule(); module.addDeserializer(VJournal.class, new JotJournalMapper(VJournal.class)); module.addSerializer(VJournal.class, new JotJournalSerializer(VJournal.class)); mapper = new ObjectMapper(); mapper.registerModule(module); } @Override public Map<String, String> convert(VJournal object) { try { return mapper.readValue(mapper.writeValueAsString(object), Map.class); // return mapper.writeValueAsString(object); } catch (IOException e) { e.printStackTrace(); } return null; } @Override public VJournal unconvert(Map<String, String> object) { try { return mapper.readValue(mapper.writeValueAsString(object), VJournal.class); // return mapper.readValue(object, VJournal.class); } catch (IOException e) { e.printStackTrace(); } return null; } }
#!/bin/bash # Copyright (c) Nathan Lampi # # This code is licensed under the MIT License # See the LICENSE file in the root directory command -v jazzy >/dev/null 2>&1 || { echo "jazzy is required (https://github.com/realm/jazzy)" >&2; exit 1; } # Document via jazzy jazzy \ -- clean \ -- author 'Nathan Lampi' \ -- author_url 'https://nathanlampi.com' \ -- github_url 'https://github.com/nlampi/SwiftGridView'
// No copyright - copy as you please #pragma once #include <Engine/UserDefinedEnum.h> #include "EMonthNames.generated.h" UENUM(BlueprintType) //"BlueprintType" is essential to include enum class EMonthNames_Enum : uint8 { January UMETA(DisplayName = "January"), February UMETA(DisplayName = "February"), March UMETA(DisplayName = "March"), April UMETA(DisplayName = "April"), May UMETA(DisplayName = "May"), June UMETA(DisplayName = "June"), July UMETA(DisplayName = "July"), August UMETA(DisplayName = "August"), September UMETA(DisplayName = "September"), October UMETA(DisplayName = "October"), November UMETA(DisplayName = "November"), December UMETA(DisplayName = "December") }; UCLASS() class TD2020_API UEMonthNames : public UUserDefinedEnum { GENERATED_BODY() public: UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Enum") EMonthNames_Enum MonthNames; //Rest of Class Code };
window.Vue = require('vue'); import App from '../views/guest/App' const app = new Vue({ el: '#root', render : h => h(App) });
package com.revature.service; import com.revature.dao.ReimbursementDao; import com.revature.dao.UserDao; import com.revature.dto.ReimbursementDTO; import com.revature.exception.ReimbursementNotFoundException; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; class ReimbursementServiceTest { @Test void testGetAllReimbursements() throws SQLException { UserDao mockedUser = mock(UserDao.class); ReimbursementDao mockedReimb = mock(ReimbursementDao.class); ReimbursementService reimbursementService = new ReimbursementService(mockedUser, mockedReimb); List<ReimbursementDTO> fakeReimbs = new ArrayList<>(); fakeReimbs.add(new ReimbursementDTO(1, 150, "2021-05-03", "", "description 1", "http://", "<NAME>", 0, "", 1, "Pending", 1, "Lodging")); fakeReimbs.add(new ReimbursementDTO(2, 200, "2021-05-03", "", "description 2", "http://", "<NAME>", 0, "", 1, "Pending", 2, "Travel")); fakeReimbs.add(new ReimbursementDTO(3, 120, "2021-05-03", "", "description 3", "http://", "<NAME>", 0, "", 1, "Pending", 3, "Food")); when(mockedReimb.getAllReimbursements()).thenReturn(fakeReimbs); List<ReimbursementDTO> actual = reimbursementService.getAllReimbursements(); List<ReimbursementDTO> expected = new ArrayList<>(fakeReimbs); Assertions.assertEquals(expected, actual); } @Test void testGetAllReimbursementsByUserId() throws SQLException { UserDao mockedUser = mock(UserDao.class); ReimbursementDao mockedReimb = mock(ReimbursementDao.class); ReimbursementService reimbursementService = new ReimbursementService(mockedUser, mockedReimb); List<ReimbursementDTO> fakeReimbs = new ArrayList<>(); fakeReimbs.add(new ReimbursementDTO(1, 150, "2021-05-03", "", "description 1", "http://", "<NAME>", 0, "", 1, "Pending", 1, "Lodging")); fakeReimbs.add(new ReimbursementDTO(2, 200, "2021-05-03", "", "description 2", "http://", "<NAME>", 0, "", 1, "Pending", 2, "Travel")); fakeReimbs.add(new ReimbursementDTO(3, 120, "2021-05-03", "", "description 3", "http://", "<NAME>", 0, "", 1, "Pending", 3, "Food")); when(mockedReimb.getAllReimbursementsByUserId(eq(1))).thenReturn(fakeReimbs); List<ReimbursementDTO> actual = reimbursementService.getAllReimbursementsByUserId(1); List<ReimbursementDTO> expected = new ArrayList<>(fakeReimbs); Assertions.assertEquals(expected, actual); } @Test void testCreateReimbursement() throws SQLException, IOException { UserDao mockedUser = mock(UserDao.class); ReimbursementDao mockedReimb = mock(ReimbursementDao.class); ReimbursementService reimbursementService = new ReimbursementService(mockedUser, mockedReimb); ReimbursementDTO fakeReimb = new ReimbursementDTO(1, 150, "2021-05-03", "", "description 1", "http://", "<NAME>", 0, "", 1, "Pending", 1, "Lodging"); when(mockedReimb.createReimbursement(1, 150, "description 1", 1, null)).thenReturn(fakeReimb); ReimbursementDTO actual = reimbursementService.createReimbursement("1", "150", "description 1", "1", null); Assertions.assertEquals(fakeReimb, actual); } @Test void testUpdateReimbursement() throws SQLException, IOException { UserDao mockedUser = mock(UserDao.class); ReimbursementDao mockedReimb = mock(ReimbursementDao.class); ReimbursementService reimbursementService = new ReimbursementService(mockedUser, mockedReimb); ReimbursementDTO fakeReimb = new ReimbursementDTO(1, 150, "2021-05-03", "", "description 1", "http://", "<NAME>", 0, "", 1, "Pending", 1, "Lodging"); when(mockedReimb.updateReimbursement(1, 10, 150, "description 1", 1, null)).thenReturn(fakeReimb); ReimbursementDTO actual = reimbursementService.updateReimbursement("1", "10", "150", "description 1", "1", null); Assertions.assertEquals(fakeReimb, actual); } @Test void testDeleteReimbursement() throws SQLException { UserDao mockedUser = mock(UserDao.class); ReimbursementDao mockedReimb = mock(ReimbursementDao.class); ReimbursementService reimbursementService = new ReimbursementService(mockedUser, mockedReimb); when(mockedReimb.deleteReimbursement(1, 10)).thenReturn(true); boolean actual = reimbursementService.deleteReimbursement("1", "10"); Assertions.assertEquals(true, actual); } @Test void testAuthorizeReimbursement() throws SQLException, ReimbursementNotFoundException { UserDao mockedUser = mock(UserDao.class); ReimbursementDao mockedReimb = mock(ReimbursementDao.class); ReimbursementService reimbursementService = new ReimbursementService(mockedUser, mockedReimb); ReimbursementDTO fakeReimb = new ReimbursementDTO(10, 150, "2021-05-03", "", "description 1", "http://", "<NAME>", 0, "", 1, "Pending", 1, "Lodging"); when(mockedReimb.getReimbursementById(eq(10))).thenReturn(fakeReimb); when(mockedReimb.authorizeReimbursement(eq(10), eq(2), eq(2))).thenReturn(1); int actual = reimbursementService.authorizeReimbursement("10", "2", 2); Assertions.assertEquals(1, actual); } @Test void testAuthorizeReimbursementFail() { UserDao mockedUser = mock(UserDao.class); ReimbursementDao mockedReimb = mock(ReimbursementDao.class); ReimbursementService reimbursementService = new ReimbursementService(mockedUser, mockedReimb); Assertions.assertThrows(ReimbursementNotFoundException.class, () -> { reimbursementService.authorizeReimbursement("10", "2", 2); }); } }
package cyclops.reactive.subscription; import cyclops.async.queue.Queue; public interface Continueable { void closeQueueIfFinished(Queue queue); void addQueue(Queue queue); void registerSkip(long skip); void registerLimit(long limit); void closeAll(Queue q); boolean closed(); void closeQueueIfFinishedStateless(Queue queue); void closeAll(); long timeLimit(); void registerTimeLimit(long nanos); }
<reponame>chlds/util<filename>lib/car/obj/src/unbind_pages.c<gh_stars>0 /* **** Notes Unmap out of the RAM Remarks: Refer at fn. bind_pages. //*/ # define CAR # include "../../../incl/config.h" signed(__cdecl unbind_pages(page_t(*argp))) { auto page_t *page; auto signed r; auto signed short flag; if(!argp) return(0x00); flag = (*(CLI_BASE+(R(flag,*argp)))); if(!(CLI_INIT&(flag))) return(0x00); r = unmap_pages(argp); page = (*(CLI_LEAD+(R(page,*argp)))); if(page) return(0x00); if(!(init_pages(0x01,argp))) return(0x00); return(r); }
// PROBLEM 1 - Two Sum //Author: <NAME> // https://leetcode.com/problems/two-sum/ // ============================================================================ // Given an array of integers nums and an integer target, return indices of the two numbers such that they add up to target. // You may assume that each input would have exactly one solution, and you may not use the same element twice. // You can return the answer in any order. // Example 1: // Input: nums = [2,7,11,15], target = 9 // Output: [0,1] // Output: Because nums[0] + nums[1] == 9, we return [0, 1]. // Example 2: // Input: nums = [3,2,4], target = 6 // Output: [1,2] // Example 3: // Input: nums = [3,3], target = 6 // Output: [0,1] // Constraints: // 2 <= nums.length <= 105 // -109 <= nums[i] <= 109 // -109 <= target <= 109 // Only one valid answer exists. // PROBLEM 2 - Reverse a linked list // ============================================================================ // Amazon and Microsoft ask you to show an algorithm that will reverse a singly-linked // list, that is, a list that is made of nodes between which there is a unidirectional // association as in the following image. // PROBLEM 3 - The missing value // ============================================================================ // Amazon and Microsoft ask you to derive an algorithm that will inspect an // array of of numbers that contains the values between 0 and the length of the // list, inclusive, and find the missing value. For example, you may be given an // array that of length 6 that contains // [0, 2, 3, 4, 5, 6] // It is your job to determine that the missing value from the array is 1. // PROBLEM 4 - Stack min // ============================================================================ // Google and Apple ask you to design a stack that, in addition to the push and // pop functions, has a function min that returns the minimum element in the stack // without removing it. All three functions push, pop, and min should operate in // O(1) time. class Node { constructor(val) { this.value = val; this.next = null; } } class Stack { constructor() { this.top = null; this.length = 0; this.minArr = []; } push(val) { let newNode = new Node(val); if (this.length === 0) { this.top = newNode; this.minArr.push(newNode.value); } else { newNode.next = this.top; this.top = newNode; if (newNode.value < this.min()) { this.minArr.push(newNode.value); } } this.length++ } pop() { const removedTop = this.top; if (this.length === 0) return null; if (this.length === 1) { this.top = null; minArr = []; } else { this.top = removedTop.next; if (removedTop.value === this.min()) this.minArr.pop() } this.length-- return removedTop.value; } min() { return this.minArr[this.minArr.length - 1]; } } // const stack = new Stack(); // stack.push(5) // stack.push(2) // stack.push(1) // stack.push(-1) // stack.push(7) // stack.pop() // stack.pop() // stack.pop() // stack.pop() // console.log(stack); // console.log('min:', stack.min()); // PROBLEM 5 - Test a retractable ballpoint pen // ============================================================================ //Facebook asks you to write the tests cases for testing a ballpoint pen. // What would you consider to be good tests for the pen? // Try to be as exhaustive as possible. // PROBLEM 6 - OOParking Lot // ============================================================================ // Amazon and Microsoft ask you to specify the classes that it would take to write // software to manage a paid parking lot. It should know where cars are parked, // be able to identify the cars, know where the keys are hanging, how many cars // are in the lot, what time the cars come and go, and how much it costs someone // when they leave the parking lot based on the following schedule: // Time -> Rate per hour // 8pm - 6am $3 // 6am - noon $10 // noon - 6pm $8 // 6pm - 8pm $6 // PROBLEM 7 - Valid Parentheses // https://leetcode.com/problems/valid-parentheses/ // ============================================================================ // Given a string s containing just the characters '(', ')', '{', '}', '[' and ']', // determine if the input string is valid. // An input string is valid if: // Open brackets must be closed by the same type of brackets. // Open brackets must be closed in the correct order.
python3 setup.py build_ext --inplace #python ./app/api_v1/my_calculation_module_directory/CM/helper_functions/cyf/compile_cython_files.py build_ext --inplace
export default { fuelSavings: { newMpg: '', tradeMpg: '', newPpg: '', tradePpg: '', milesDriven: '', milesDrivenTimeframe: 'week', displayResults: false, dateModified: null, necessaryDataIsProvidedToCalculateSavings: false, savings: { monthly: 0, annual: 0, threeYear: 0 } }, roomsStates: { rooms: [], room: { id: undefined, name: '', logo: '', interviewer: '', candidates: [], problems: [] }, isWaiting: false }, problemStates: { problems: [], isWaiting: false }, user: { isLogin: false, wrongPassword: <PASSWORD>, token: null, type: null, info: {}, isWaiting: false }, candidatesStates:{ candidates:[], isWaiting: false, templateUrl: {} } };
<gh_stars>1-10 !(function (window) { function setFontSize () { var d = dom.getBoundingClientRect().width var e = (d / 7.5 > 100 * B ? 100 * B : (d / 7.5 < 42 ? 42 : d / 7.5)) dom.style.fontSize = e + "px" window.rem = e } var timer, document = window.document, dom = document.documentElement, viewport = document.querySelector('meta[name="viewport"]'), B = 0, A = 0 if (viewport) { var y = viewport.getAttribute("content").match(/initial\-scale=([\d\.]+)/) y && (A = parseFloat(y[1]), B = parseInt(1 / A)) } if (!B && !A) { var u = (window.navigator.appVersion.match(/android/gi), window.navigator.appVersion.match(/iphone/gi)), t = window.devicePixelRatio B = u ? t >= 3 && (!B || B >= 3) ? 3 : t >= 2 && (!B || B >= 2) ? 2 : 1 : 1, A = 1 / B } if (dom.setAttribute("data-dpr", B), !dom) { if (viewport = document.createElement("meta"), viewport.setAttribute("name", "viewport"), viewport.setAttribute("content", "initial-scale=" + A + ", maximum-scale=" + A + ", minimum-scale=" + A + ", user-scalable=no"), dom.firstElementChild) { dom.firstElementChild.appendChild(viewport) } else { var s = document.createElement("div") s.appendChild(viewport), document.write(s.innerHTML) } } window.addEventListener("resize", function () { clearTimeout(timer), timer = setTimeout(setFontSize, 300) }, !1), window.addEventListener("pageshow", function (b) { b.persisted && (clearTimeout(timer), timer = setTimeout(setFontSize, 300)) }, !1), setFontSize() }(window))
#!/bin/bash ## Dockerfile for compilation environment : C/C++ and make # Exit on any non-zero status. trap 'exit' ERR set -E clean=${1:-n} clean=${clean:0:1} clean=${clean,,[N]} echo "Uninstalling g++, gcc and co ..." apt-get -qy update apt-get purge -y \ gcc \ g++ \ libc6-dev \ autoconf \ automake \ pkg-config \ bison \ flex \ libtool \ gettext \ make \ cmake [ "${clean}" != "n" ] && { apt-get clean -y rm -rf /var/lib/apt/lists/* } exit 0
<gh_stars>1-10 /** * Created by glenn on 16.10.19. */ 'use strict'; const Generator = require('yeoman-generator'); const chalk = require('chalk'); const yosay = require('yosay'); const _ = require('lodash'); const fs = require('fs'); const path = require('path'); module.exports = class extends Generator { constructor(args, opts) { super(args, opts); this.argument('type', { type: String, required: false }); } prompting() { return this._doAction('PROMPTING'); } writing() { return this._doAction('WRITING'); } _doAction(priorityType) { const actionsGeneratorsByPriorityTypes = { PROMPTING: this._getPromptingActions, WRITING: this._getWritingActions }; const actionsGenerator = actionsGeneratorsByPriorityTypes[priorityType]; if (_.isFunction(actionsGenerator)) { const action = actionsGenerator()[this.options.type]; if (_.isFunction(action)) { return action.call(this); } return actionsGenerator().widget.call(this); } } _getPromptingActions() { return { widget() { return this.prompt([ { type: 'input', name: 'widgetName', message: 'Your widget name', default: 'hello' }, { type: 'input', name: 'widgetDesc', message: 'Your widget description', default: 'Displays that classic "hello, world" string' } ]).then(answers => { this.answers = answers; }); } }; } _getWritingActions() { return { widget() { const context = getWidgetContext(this.answers); const { kebabCasedWidgetName } = context; const templateFilenames = fs.readdirSync(this.templatePath('widget')); const renamedTemplateFilenames = _.map(templateFilenames, filename => filename.replace('widget', `${kebabCasedWidgetName}-widget`) ); const templatePaths = _.map(templateFilenames, filename => this.templatePath(path.join('widget', filename)) ); const destinationPaths = _.map(renamedTemplateFilenames, filename => this.destinationPath( path.join(`${kebabCasedWidgetName}-widget`, filename.replace('.ejs', '')) ) ); _.zip(templatePaths, destinationPaths).forEach(([templatePath, destinationPath]) => { this.fs.copyTpl(templatePath, destinationPath, context); }); } }; } }; function getWidgetContext({ widgetName, widgetDesc }) { return { widgetDesc, kebabCasedWidgetName: getKebabCaseWidgetName(widgetName), camelCasedWidgetName: getCamelCaseWidgetName(widgetName), pascalCasedWidgetName: getPascalCaseWidgetName(widgetName), capitalizedWidgetName: getCapitalizeWidgetName(widgetName) }; } function getKebabCaseWidgetName(widgetName) { return _.flow([getTruncatedWidgetName, _.kebabCase])(widgetName); } function getCamelCaseWidgetName(widgetName) { return _.flow([getTruncatedWidgetName, _.camelCase])(widgetName); } function getPascalCaseWidgetName(widgetName) { return _.flow([getTruncatedWidgetName, _.camelCase, _.upperFirst])(widgetName); } function getCapitalizeWidgetName(widgetName) { return _.flow([getTruncatedWidgetName, _.capitalize])(widgetName); } function getTruncatedWidgetName(widgetName) { return _(widgetName) .chain() .words() .take(3) .join(' ') .value(); }
<gh_stars>0 package migrate import ( "context" "database/sql" "errors" "reflect" "github.com/ovh/cds/engine/api/integration" "github.com/go-gorp/gorp" "github.com/ovh/cds/engine/api/secret" "github.com/ovh/cds/sdk" "github.com/ovh/cds/sdk/log" ) // RefactorIntegrationModelCrypto . func RefactorIntegrationModelCrypto(ctx context.Context, db *gorp.DbMap) error { query := "SELECT id FROM integration_model WHERE sig IS NULL" rows, err := db.Query(query) if err == sql.ErrNoRows { return nil } if err != nil { return sdk.WithStack(err) } var ids []int64 for rows.Next() { var id int64 if err := rows.Scan(&id); err != nil { rows.Close() // nolint return sdk.WithStack(err) } ids = append(ids, id) } if err := rows.Close(); err != nil { return sdk.WithStack(err) } var mError = new(sdk.MultiError) for _, id := range ids { if err := refactorIntegrationModelCrypto(ctx, db, id); err != nil { mError.Append(err) log.Error(ctx, "migrate.RefactorIntegrationModelCrypto> unable to migrate integration_model %d: %v", id, err) } } if mError.IsEmpty() { return nil } return mError } func refactorIntegrationModelCrypto(ctx context.Context, db *gorp.DbMap, id int64) error { log.Info(ctx, "migrate.refactorIntegrationModelCrypto> integration_model %d migration begin", id) tx, err := db.Begin() if err != nil { return sdk.WithStack(err) } defer tx.Rollback() // nolint query := ` SELECT id, name, author, identifier, icon ,default_config, disabled, hook, storage, deployment, compute, deployment_default_config, public, public_configurations, event FROM integration_model WHERE id = $1 AND sig IS NULL FOR UPDATE SKIP LOCKED` var integrationModel sdk.IntegrationModel if err := tx.QueryRow(query, id).Scan(&integrationModel.ID, &integrationModel.Name, &integrationModel.Author, &integrationModel.Identifier, &integrationModel.Icon, &integrationModel.DefaultConfig, &integrationModel.Disabled, &integrationModel.Hook, &integrationModel.Storage, &integrationModel.Deployment, &integrationModel.Compute, &integrationModel.DeploymentDefaultConfig, &integrationModel.Public, &integrationModel.PublicConfigurations, &integrationModel.Event, ); err != nil { if err == sql.ErrNoRows { return nil } return sdk.WrapError(err, "unable to select and lock application %d", id) } for pfName, pfCfg := range integrationModel.PublicConfigurations { newCfg := pfCfg.Clone() if err := newCfg.DecryptSecrets(secret.DecryptValue); err != nil { return sdk.WrapError(err, "unable to encrypt config PublicConfigurations") } integrationModel.PublicConfigurations[pfName] = newCfg } oldPublicConfigurations := integrationModel.PublicConfigurations.Clone() if err := integration.UpdateModel(tx, &integrationModel); err != nil { return sdk.WrapError(err, "unable to update integration_model %d", id) } newIntegrationModel, err := integration.LoadModelWithClearPassword(tx, id) if err != nil { return err } if !reflect.DeepEqual(oldPublicConfigurations, newIntegrationModel.PublicConfigurations) { return sdk.WrapError(errors.New("verification error"), "integration_model %d migration failure", id) } if err := tx.Commit(); err != nil { return sdk.WithStack(err) } log.Info(ctx, "migrate.refactorIntegrationModelCrypto> integration_model %d migration end", id) return nil } func RefactorProjectIntegrationCrypto(ctx context.Context, db *gorp.DbMap) error { query := "SELECT id FROM project_integration WHERE sig IS NULL" rows, err := db.Query(query) if err == sql.ErrNoRows { return nil } if err != nil { return sdk.WithStack(err) } var ids []int64 for rows.Next() { var id int64 if err := rows.Scan(&id); err != nil { rows.Close() // nolint return sdk.WithStack(err) } ids = append(ids, id) } if err := rows.Close(); err != nil { return sdk.WithStack(err) } var mError = new(sdk.MultiError) for _, id := range ids { if err := refactorProjectIntegrationCrypto(ctx, db, id); err != nil { mError.Append(err) log.Error(ctx, "migrate.RefactorProjectIntegrationCrypto> unable to migrate integration_model %d: %v", id, err) } } if mError.IsEmpty() { return nil } return mError } func refactorProjectIntegrationCrypto(ctx context.Context, db *gorp.DbMap, id int64) error { log.Info(ctx, "migrate.refactorProjectIntegrationCrypto> project_integration %d migration begin", id) tx, err := db.Begin() if err != nil { return sdk.WithStack(err) } query := `SELECT id, name, project_id, integration_model_id, config FROM project_integration WHERE id = $1 AND sig IS NULL FOR UPDATE SKIP LOCKED` defer tx.Rollback() // nolint var projectIntegration sdk.ProjectIntegration if err := tx.QueryRow(query, id).Scan( &projectIntegration.ID, &projectIntegration.Name, &projectIntegration.ProjectID, &projectIntegration.IntegrationModelID, &projectIntegration.Config, ); err != nil { if err == sql.ErrNoRows { return nil } return sdk.WrapError(err, "unable to select and lock application %d", id) } newCfg := projectIntegration.Config.Clone() if err := newCfg.DecryptSecrets(secret.DecryptValue); err != nil { return sdk.WrapError(err, "unable to encrypt config PublicConfigurations") } projectIntegration.Config = newCfg // If the existing configuration contains password placeholder as encryptted value, // let's overdire id because it will fail for k, v := range projectIntegration.Config { if v.Type == sdk.IntegrationConfigTypePassword && v.Value == sdk.PasswordPlaceholder { log.Warning(ctx, "refactorProjectIntegrationCrypto > overriding wrong passwork: project: %d, integration: %s, value: %s", projectIntegration.ProjectID, projectIntegration.Name, k) v.Value = k } projectIntegration.Config[k] = v } oldCfg := projectIntegration.Config.Clone() if err := integration.UpdateIntegration(tx, projectIntegration); err != nil { return sdk.WithStack(err) } newProjectIntegration, err := integration.LoadProjectIntegrationByIDWithClearPassword(tx, id) if err != nil { return err } if !reflect.DeepEqual(oldCfg, newProjectIntegration.Config) { return sdk.WrapError(errors.New("verification error"), "project_integration %d migration failure", id) } if err := tx.Commit(); err != nil { return sdk.WithStack(err) } log.Info(ctx, "migrate.refactorProjectIntegrationCrypto> project_integration %d migration end", id) return nil }
#!/bin/bash kinstall=$1 zk=$2 if [ -z $zk ]; then echo "usage: $0 kafka-install-dir zookeeper-url" exit 1 fi topic_cmd="$kinstall/bin/kafka-topics.sh --zookeeper $zk --create --partitions 1 --replication-factor 1 --topic" $topic_cmd kafkatesttopicbasic1 $topic_cmd kafkatesttopicbasic2 $topic_cmd kafkatesttopicgroup $topic_cmd kafkatesttopicmultigroup
/* Entidade "User" */ const mongoose = require('mongoose'); // Importa o mongoose // Define a estrutura da entidade (atributos do usuário) const UserSchema = mongoose.Schema({ email: String, }); // Exporta o modelo module.exports = mongoose.model('User', UserSchema);
# Imports import pandas as pd from sklearn.linear_model import LinearRegression # Create a dataset data = pd.DataFrame(features) # Extract features X = data[['size', 'location', 'facilities']] # Extract target y = data[['price']] # Create the model model = LinearRegression() # Train the model model.fit(X, y) # Make Predictions predictions = model.predict(X)
<filename>Assignment1 2/Assignment1_2/HelloServlet.java import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.*; import java.io.*; import java.io.PrintWriter; import java.util.Enumeration; public class HelloServlet extends HttpServlet { public void init() { } public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { PrintWriter out = response.getWriter(); Enumeration<String> headers = request.getHeaderNames(); out.println("<!DOCTYPE html>"); out.println("<html>"); out.println("<body>"); if (headers != null) { while (headers.hasMoreElements()) { String name = headers.nextElement(); out.println("Header Names - " + name + ": " + request.getHeader(name)); } } out.println("</body>"); out.println("</html>"); } public void destroy() { } }
<filename>horizon/static/horizon/js/horizon.autoupdate.js<gh_stars>0 /* Namespace for core functionality related to DataTables. */ horizon.autoupdate = { update: function (div_id) { var $chart_to_update = $('tr.'+div_id+'.ajax-update'); if ($chart_to_update.length) { var interval = $chart_to_update.attr('data-update-interval'), $table = $chart_to_update.closest('table'), decay_constant = $table.attr('decay_constant'); // Do not update this row if the action column is expanded if ($chart_to_update.find('.actions_column .btn-group.open').length) { // Wait and try to update again in next interval instead setTimeout(horizon.autoupdate.update, interval); // Remove interval decay, since this will not hit server $table.removeAttr('decay_constant'); return; } // Trigger the update handlers. $chart_to_update.each(function(index, row) { var $row = $(this), $table = $row.closest('table.datatable'); horizon.ajax.queue({ url: $row.attr('data-update-url'), error: function (jqXHR, textStatus, errorThrown) { switch (jqXHR.status) { // A 404 indicates the object is gone, and should be removed from the table case 404: // Update the footer count and reset to default empty row if needed var $footer, row_count, footer_text, colspan, template, params, $empty_row; // existing count minus one for the row we're removing row_count = horizon.autoupdate.update_footer_count($table, -1); if(row_count === 0) { colspan = $table.find('th[colspan]').attr('colspan'); template = horizon.templates.compiled_templates["#empty_row_template"]; params = { "colspan": colspan, no_items_label: gettext("No items to display.") }; empty_row = template.render(params); $row.replaceWith(empty_row); } else { $row.remove(); } // Reset tablesorter's data cache. $table.trigger("update"); break; default: horizon.utils.log(gettext("An error occurred while updating.")); $row.removeClass("ajax-update"); $row.find("i.ajax-updating").remove(); break; } }, success: function (data, textStatus, jqXHR) { var $new_row = $(data); if ($new_row.hasClass('status_unknown')) { var spinner_elm = $new_row.find("td.status_unknown:last"); if ($new_row.find('.btn-action-required').length > 0) { spinner_elm.prepend( $("<div />") .addClass("action_required_img") .append( $("<img />") .attr("src", "/static/dashboard/img/action_required.png"))); } else { // Replacing spin.js here with an animated gif to reduce CPU spinner_elm.prepend( $("<div />") .addClass("loading_gif") .append( $("<img />") .attr("src", "/static/dashboard/img/loading.gif"))); } } // Only replace row if the html content has changed if($new_row.html() !== $row.html()) { if($row.find('.table-row-multi-select:checkbox').is(':checked')) { // Preserve the checkbox if it's already clicked $new_row.find('.table-row-multi-select:checkbox').prop('checked', true); } $row.replaceWith($new_row); // Reset tablesorter's data cache. $table.trigger("update"); // Reset decay constant. $table.removeAttr('decay_constant'); } }, complete: function (jqXHR, textStatus) { // Revalidate the button check for the updated table // Set interval decay to this table, and increase if it already exist if(decay_constant === undefined) { decay_constant = 1; } else { decay_constant++; } $table.attr('decay_constant', decay_constant); // Poll until there are no rows in an "unknown" state on the page. next_poll = interval * decay_constant; // Limit the interval to 30 secs if(next_poll > 30 * 1000) { next_poll = 30 * 1000; } setTimeout(horizon.autoupdate.update, next_poll); } }); }); } } }; horizon.addInitFunction(function() { var div_id = ["status_unknown","status_up","div3"] $.each(div_id, function(index,element){ horizon.autoupdate.update(element); }) });
import sqlite3 db = sqlite3.connect("employee_table.db") # Assumes the table is in a file named employee_table.db cur = db.cursor() # Execute the SQL command cur.execute("SELECT * FROM employee_table WHERE salary < 10000") # Commit your changes db.commit() # Get the results results = cur.fetchall() # Print the results for row in results: print(row)
#!/bin/bash BOUNCER_HOME=${BOUNCER_HOME:-/opt/bouncer} BOUNCER_CONF=${BOUNCER_CONF:-bouncer.conf} BOUNCER_MEM_MB=${BOUNCER_MEM_MB:-64} BOUNCER_OPTS_DEF="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -showversion -XX:+PrintCommandLineFlags -XX:-PrintFlagsFinal" BOUNCER_OPTS="${BOUNCER_OPTS:-${BOUNCER_OPTS_DEF}}" BOUNCER_CLASSPATH=$(echo $BOUNCER_HOME/lib/*.jar | tr ' ' ':') # do_reload () { touch ${BOUNCER_HOME}/conf/${BOUNCER_CONF} } do_keygen () { # org.javastack.bouncer.KeyGenerator <bits> <days> <CommonName> <filename-without-extension> local bits="${1}" days="${2}" cn="${3}" filebase="${4}" if [ "$filebase" = "" ]; then echo "$0 keygen <bits> <days> <CommonName> <filename-without-extension>" echo "Sample:" echo "$0 keygen 2048 365 host1.acme.com host1" exit 1; fi cd "${BOUNCER_HOME}/keys/" java \ -cp "${BOUNCER_CLASSPATH}" \ org.javastack.bouncer.KeyGenerator $bits $days $cn $filebase #chmod go-rwx "${filebase}.key" ls -al "${BOUNCER_HOME}/keys/${filebase}."* } do_run () { cd ${BOUNCER_HOME} java -Dprogram.name=bouncer ${BOUNCER_OPTS} -Xmx${BOUNCER_MEM_MB}m \ -cp "${BOUNCER_HOME}/conf/:${BOUNCER_HOME}/keys/:${BOUNCER_CLASSPATH}" \ org.javastack.bouncer.Bouncer ${BOUNCER_CONF} } do_start () { cd ${BOUNCER_HOME} echo "$(date --iso-8601=seconds) Starting" >> ${BOUNCER_HOME}/log/bouncer.bootstrap nohup java -Dprogram.name=bouncer ${BOUNCER_OPTS} -Xmx${BOUNCER_MEM_MB}m \ -cp "${BOUNCER_HOME}/conf/:${BOUNCER_HOME}/keys/:${BOUNCER_CLASSPATH}" \ -Dlog.stdOutFile=${BOUNCER_HOME}/log/bouncer.out \ -Dlog.stdErrFile=${BOUNCER_HOME}/log/bouncer.err \ org.javastack.bouncer.Bouncer ${BOUNCER_CONF} 1>>${BOUNCER_HOME}/log/bouncer.bootstrap 2>&1 & PID="$!" echo "Bouncer: STARTED [${PID}]" } do_stop () { PID="$(ps axwww | grep "program.name=bouncer" | grep -v grep | while read _pid _r; do echo ${_pid}; done)" if [ "${PID}" = "" ]; then echo "Bouncer: NOT RUNNING" else echo "$(date --iso-8601=seconds) Killing: ${PID}" >> ${BOUNCER_HOME}/log/bouncer.bootstrap echo -n "Bouncer: KILLING [${PID}]" kill -TERM ${PID} echo -n "[" while [ -f "/proc/${PID}/status" ]; do echo -n "." sleep 1 done echo "]" fi } do_status () { PID="$(ps axwww | grep "program.name=bouncer" | grep -v grep | while read _pid _r; do echo ${_pid}; done)" if [ "${PID}" = "" ]; then echo "Bouncer: NOT RUNNING" else echo "Bouncer: RUNNING [${PID}]" fi } case "$1" in run) do_stop trap do_stop SIGINT SIGTERM do_run ;; start) do_stop do_start ;; stop) do_stop ;; restart) do_stop do_start ;; reload) do_reload ;; status) do_status ;; keygen) do_keygen $2 $3 $4 $5 ;; *) echo "$0 <run|start|stop|restart|reload|status|keygen>" ;; esac
<reponame>Mythius/Crossword // ctx must be defined. // Vector Must be defined {x:Number,y:Number} (function(glob){ var Gamepad = {}; glob.Gamepad = Gamepad; Gamepad.color1 = 'red'; Gamepad.color2 = 'white'; Gamepad.lineWidth = 5; Gamepad.button = {}; Gamepad.joystick = {}; Gamepad.button.circle = new Path2D; Gamepad.button.square = new Path2D; Gamepad.button.arrow = new Path2D; Gamepad.button.pentagon = new Path2D; Gamepad.joystick.socket = new Path2D; Gamepad.joystick.stick = new Path2D; Gamepad.Joystick = class Joystick{ constructor(pos=new Vector,add=true){ this.offsetX = 0; this.offsetY = 0; this.position = pos; this.socket = Gamepad.joystick.socket; this.stick = Gamepad.joystick.stick; if(add) Gamepad.elements.push(this); } draw(vis=true){ this.offsetX = 0; this.offsetY = 0; ctx.beginPath(); ctx.lineWidth = 10; ctx.strokeStyle = Gamepad.color1; ctx.fillStyle = Gamepad.color2; ctx.save(); ctx.translate(this.position.x,this.position.y); var active = false; const THIS = this; Touch.checkPos(touches=>{ active |= ctx.isPointInPath(THIS.socket,touches.start.x,touches.start.y); if(active){ let tx = touches.pos.x - this.position.x; let ty = touches.pos.y - this.position.y; let dist = Math.min(Vector.distance(0,0,tx,ty),100); let dir = Vector.getDir(tx,ty) + 180; let np = Vector.getPointIn(dir*Math.PI/180,dist); THIS.offsetX = np.x; THIS.offsetY = np.y; } }); if(vis) ctx.fill(this.socket); if(vis) ctx.stroke(this.socket); // ctx.restore(); // ctx.save(); ctx.translate(this.offsetX,this.offsetY); if(active) ctx.fillStyle = Gamepad.color1; if(vis) ctx.fill(this.stick); if(vis) ctx.stroke(this.stick); ctx.restore(); } } Gamepad.Button = class Button{ constructor(pos=new Vector,add=true){ this.down = false; this.position = pos; this.path = new Path2D; if(add) Gamepad.elements.push(this); } draw(translate=true,vis=true){ if(translate) ctx.save(); ctx.strokeStyle = Gamepad.color1; ctx.fillStyle = Gamepad.color2; ctx.lineWidth = Gamepad.lineWidth; if(translate) ctx.translate(this.position.x,this.position.y); this.down = false; Touch.checkPos(touches=>{ this.down |= ctx.isPointInPath(this.path,touches.pos.x,touches.pos.y); }); if(this.down) ctx.fillStyle = Gamepad.color1; if(vis) ctx.fill(this.path); if(vis) ctx.stroke(this.path); if(translate) ctx.restore(); } } Gamepad.dPad = class dPad{ constructor(pos = new Vector,add=true){ this.position = pos; this.up = new Gamepad.Button(pos,false); this.up.path = Gamepad.button.pentagon; this.down = new Gamepad.Button(pos,false); this.down.path = Gamepad.button.pentagon; this.left = new Gamepad.Button(pos,false); this.left.path = Gamepad.button.pentagon; this.right = new Gamepad.Button(pos,false); this.right.path = Gamepad.button.pentagon; if(add) Gamepad.elements.push(this); } draw(vis=true){ let btns = [this.up,this.right,this.down,this.left]; ctx.beginPath(); ctx.save(); ctx.translate(this.position.x,this.position.y); for(let btn of btns){ btn.draw(false,vis); ctx.rotate(Math.PI/2); } ctx.restore(); } } Gamepad.draw = function(){ if(!Gamepad.show) return; for(let elements of Gamepad.elements){ elements.draw(); } } Gamepad.show = true; Gamepad.elements = []; // Touchscreen Events // Define Looks Gamepad.button.circle.arc(0,0,50,0,Math.PI*2); // Gamepad.button.square.rect(-40,-40,80,80); Gamepad.joystick.socket.arc(0,0,100,0,Math.PI*2); Gamepad.joystick.stick.arc(0,0,30,0,Math.PI*2); Gamepad.button.pentagon.moveTo(0,0); Gamepad.button.pentagon.lineTo(-75,-75); Gamepad.button.pentagon.lineTo(-75,-150); Gamepad.button.pentagon.lineTo(75,-150); Gamepad.button.pentagon.lineTo(75,-75); Gamepad.button.pentagon.closePath(); })(this);
<reponame>ValentinGurkov/ng-posts const http = require('http') const app = require('./app') const debug = require('debug')('angular-posts') const normalizePort = val => { let port = parseInt(val, 10) if(isNaN(port)){ return val } if (port >= 0){ return port } return false } const onError = error => { if (error.syscall !== 'listen'){ throw error } const bind = typeof addr === 'string' ? 'pipe ' + addr : 'port ' + port switch (error.code) { case 'EACCESS': console.error(bind + ' requires elevated privileges') process.exit(1) break; case 'EADDRINUSE': console.error(bind + 'is already in use') process.exit(1) break; default: throw error } } const onListening = () => { const addr = server.address() const bind = typeof addr === 'string' ? 'pipe ' + addr : 'port ' + port debug('Listening on ' + bind) } const port = normalizePort(process.env.PORT || '3000') app.set('port', port) const server = http.createServer(app) server.on('error', onError) server.on('listening', onListening) server.listen(port)
<reponame>THK-ADV/lwm-ui import {Injectable} from '@angular/core' import {Observable} from 'rxjs' import {AbstractCRUDService} from '../abstract-crud/abstract-crud.service' import {Blacklist, BlacklistJSON, BlacklistProtocol} from '../models/blacklist.model' import {HttpService, PartialResult} from './http.service' import {map} from 'rxjs/operators' import {applyFilter} from './http.filter' import {convertManyBlacklists, mapBlacklistJSON} from '../utils/http-utils' interface BlacklistFilter { attribute: 'global' | 'since' | 'until' value: string } export interface BlacklistRangeProtocol { label: string start: string end: string } @Injectable({ providedIn: 'root' }) export class BlacklistService implements AbstractCRUDService<BlacklistProtocol, Blacklist> { constructor(private http: HttpService) { } private path = 'blacklists' private downloadPath = (year: number) => `${this.path}/year/${year}` getAllWithFilter = (...filter: BlacklistFilter[]): Observable<Blacklist[]> => this.http .getAll<BlacklistJSON>(this.path, applyFilter(filter)) .pipe(map(convertManyBlacklists)) getAll = (): Observable<Blacklist[]> => this.http .getAll<BlacklistJSON>(this.path) .pipe(map(convertManyBlacklists)) download = (year: number): Observable<PartialResult<Blacklist>> => this.http .create<Object, PartialResult<BlacklistJSON>>(this.downloadPath(year), {}) .pipe(map(pr => ({...pr, created: convertManyBlacklists(pr.created)}))) preview = (year: number): Observable<Blacklist[]> => this.http .getAll<BlacklistJSON>(this.downloadPath(year)) .pipe(map(convertManyBlacklists)) delete = (id: string): Observable<Blacklist> => this.http .delete<BlacklistJSON>(this.path, id) .pipe(map(mapBlacklistJSON)) create = (protocol: BlacklistProtocol): Observable<Blacklist> => this.http .create<BlacklistProtocol, BlacklistJSON>(this.path, protocol) .pipe(map(mapBlacklistJSON)) createFromRange = (range: BlacklistRangeProtocol): Observable<Blacklist[]> => this.http .create(`${this.path}/range`, range) .pipe(map(convertManyBlacklists)) update = (protocol: BlacklistProtocol, id: string): Observable<Blacklist> => this.http .put<BlacklistProtocol, BlacklistJSON>(this.path, id, protocol) .pipe(map(mapBlacklistJSON)) }
# Graph the position and velocity of an object in a simple harmonic motion import math import numpy as np import matplotlib.pyplot as plt # Calculate position def position(t: float, v: float, y: float, w: float) -> float: return (y * math.cos(w * t)) + ((v / w) * math.sin(w * t)) # Calculate velocity def velocity(t: float, v: float, y: float, w: float) -> float: return ((-y * math.sin(w * t)) * w) + (v * math.cos(w * t)) # Graph position and velocity def main(): time_values = [] position_values = [] velocity_values = [] for i in np.linspace(0, 10, 100): time_values.append(i) position_values.append(position(i, v, y, w)) velocity_values.append(velocity(i, v, y, w)) fig, ax1 = plt.subplots() ax1.plot(time_values, position_values, label="Position") ax1.set_xlabel("Time") ax1.set_ylabel("Position") ax2 = ax1.twinx() ax2.plot(time_values, velocity_values, label="Velocity", color='tab:orange') ax2.set_ylabel("Velocity") fig.legend() fig.tight_layout() plt.show() # Program settings n = 10 # Iterations m = 10 # Mass k = 40 # Spring constant c = 5 # Damping y = -4 # Start displacement v = 0 # Start velocity w = math.sqrt(k / m) # Angular frequency if __name__ == '__main__': main()
if [ -z "$ANDROID_NDK" ]; then export ANDROID_NDK=~/android-ndk-r19c fi PLATFORM=mac if [ "$1" == "linux" ]; then PLATFORM=linux fi TOOL_CHAIN_PATH=$ANDROID_NDK/build echo make armeabi-v7a ================================================ if [ "$PLATFORM" == "linux" ]; then # linux STRIP_PATH=$ANDROID_NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/arm-linux-androideabi-strip else # mac STRIP_PATH=$ANDROID_NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64/bin/arm-linux-androideabi-strip fi mkdir -p build_v7a && cd build_v7a cmake -DCMAKE_BUILD_TYPE=Release -DANDROID_ABI=armeabi-v7a -DCMAKE_TOOLCHAIN_FILE=$TOOL_CHAIN_PATH/cmake/android.toolchain.cmake -DANDROID_TOOLCHAIN_NAME=arm-linux-androideabi-clang3.6 -DANDROID_NATIVE_API_LEVEL=android-9 -DANDROID_STRIP_EXEC=$STRIP_PATH ../ cd .. cmake --build build_v7a --config Release mkdir -p plugin_lua53/Plugins/Android/libs/armeabi-v7a/ cp build_v7a/libxlua.so plugin_lua53/Plugins/Android/libs/armeabi-v7a/libxlua.so echo make arm64-v8a ================================================ if [ "$PLATFORM" == "linux" ]; then # linux STRIP_PATH=$ANDROID_NDK/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/aarch64-linux-android-strip else # mac STRIP_PATH=$ANDROID_NDK/toolchains/aarch64-linux-android-4.9/prebuilt/darwin-x86_64/bin/aarch64-linux-android-strip fi mkdir -p build_v8a && cd build_v8a cmake -DCMAKE_BUILD_TYPE=Release -DANDROID_ABI=arm64-v8a -DCMAKE_TOOLCHAIN_FILE=$TOOL_CHAIN_PATH/cmake/android.toolchain.cmake -DANDROID_TOOLCHAIN_NAME=arm-linux-androideabi-clang3.6 -DANDROID_NATIVE_API_LEVEL=android-9 -DANDROID_STRIP_EXEC=$STRIP_PATH ../ cd .. cmake --build build_v8a --config Release mkdir -p plugin_lua53/Plugins/Android/libs/arm64-v8a/ cp build_v8a/libxlua.so plugin_lua53/Plugins/Android/libs/arm64-v8a/libxlua.so echo make x86 ================================================ if [ "$PLATFORM" == "linux" ]; then # linux STRIP_PATH=$ANDROID_NDK/toolchains/x86-4.9/prebuilt/linux-x86_64/bin/i686-linux-android-strip else # mac STRIP_PATH=$ANDROID_NDK/toolchains/x86-4.9/prebuilt/darwin-x86_64/bin/i686-linux-android-strip fi mkdir -p build_x86 && cd build_x86 cmake -DCMAKE_BUILD_TYPE=Release -DANDROID_ABI=x86 -DCMAKE_TOOLCHAIN_FILE=$TOOL_CHAIN_PATH/cmake/android.toolchain.cmake -DANDROID_TOOLCHAIN_NAME=x86-clang3.5 -DANDROID_NATIVE_API_LEVEL=android-9 -DANDROID_STRIP_EXEC=$STRIP_PATH ../ cd .. cmake --build build_x86 --config Release mkdir -p plugin_lua53/Plugins/Android/libs/x86/ cp build_x86/libxlua.so plugin_lua53/Plugins/Android/libs/x86/libxlua.so
mockgen -source=config/config.go -destination=./config/config_mock.go -package=config -self_package=github.com/DrmagicE/gmqtt/config mockgen -source=persistence/queue/elem.go -destination=./persistence/queue/elem_mock.go -package=queue -self_package=github.com/DrmagicE/gmqtt/queue mockgen -source=persistence/queue/queue.go -destination=./persistence/queue/queue_mock.go -package=queue -self_package=github.com/DrmagicE/gmqtt/queue mockgen -source=persistence/session/session.go -destination=./persistence/session/session_mock.go -package=session -self_package=github.com/DrmagicE/gmqtt/session mockgen -source=persistence/subscription/subscription.go -destination=./persistence/subscription/subscription_mock.go -package=subscription -self_package=github.com/DrmagicE/gmqtt/subscription mockgen -source=persistence/unack/unack.go -destination=./persistence/unack/unack_mock.go -package=unack -self_package=github.com/DrmagicE/gmqtt/unack mockgen -source=pkg/packets/packets.go -destination=./pkg/packets/packets_mock.go -package=packets -self_package=github.com/DrmagicE/gmqtt/packets mockgen -source=plugin/auth/account_grpc.pb.go -destination=./plugin/auth/account_grpc.pb_mock.go -package=auth -self_package=github.com/DrmagicE/gmqtt/auth mockgen -source=plugin/federation/federation.pb.go -destination=./plugin/federation/federation.pb_mock.go -package=federation -self_package=github.com/DrmagicE/gmqtt/federation mockgen -source=plugin/federation/peer.go -destination=./plugin/federation/peer_mock.go -package=federation -self_package=github.com/DrmagicE/gmqtt/federation mockgen -source=plugin/federation/membership.go -destination=./plugin/federation/membership_mock.go -package=federation -self_package=github.com/DrmagicE/gmqtt/federation mockgen -source=retained/interface.go -destination=./retained/interface_mock.go -package=retained -self_package=github.com/DrmagicE/gmqtt/retained mockgen -source=server/client.go -destination=./server/client_mock.go -package=server -self_package=github.com/DrmagicE/gmqtt/server mockgen -source=server/persistence.go -destination=./server/persistence_mock.go -package=server -self_package=github.com/DrmagicE/gmqtt/server mockgen -source=server/plugin.go -destination=./server/plugin_mock.go -package=server -self_package=github.com/DrmagicE/gmqtt/server mockgen -source=server/server.go -destination=./server/server_mock.go -package=server -self_package=github.com/DrmagicE/gmqtt/server mockgen -source=server/service.go -destination=./server/service_mock.go -package=server -self_package=github.com/DrmagicE/gmqtt/server mockgen -source=server/stats.go -destination=./server/stats_mock.go -package=server -self_package=github.com/DrmagicE/gmqtt/server mockgen -source=server/topic_alias.go -destination=./server/topic_alias_mock.go -package=server -self_package=github.com/DrmagicE/gmqtt/server # reflection mode. # gRPC streaming mock issue: https://github.com/golang/mock/pull/163 mockgen -package=federation -destination=/usr/local/gopath/src/github.com/DrmagicE/gmqtt/plugin/federation/federation_grpc.pb_mock.go github.com/DrmagicE/gmqtt/plugin/federation FederationClient,Federation_EventStreamClient
<filename>extra/TicTacToeClient.java import java.io.*; import java.net.*; public class TicTacToeClient { public static void main(String[] args) throws Exception { String hostName = "localhost"; int portNumber = 4321; Socket echoSocket = new Socket(hostName, portNumber); echoSocket.setSoTimeout(500000); BufferedReader in = new BufferedReader(new InputStreamReader(echoSocket.getInputStream())); PrintWriter out = new PrintWriter(echoSocket.getOutputStream(), true); BufferedReader stdIn = new BufferedReader(new InputStreamReader(System.in)); String playerInput; System.out.println("Welcome to Tic Tac Toe!"); while (true) { try { System.out.println(in.readLine()); playerInput = stdIn.readLine(); out.println(playerInput); out.flush(); } catch (java.net.SocketTimeoutException e) { System.out.println("Timed out."); break; } } System.out.println("Client connection terminated."); in.close(); out.close(); echoSocket.close(); } }
def calculateArea(width, height): return width * height length = 8 breadth = 10 area = calculateArea(length, breadth) print("Area of the rectangle:", area)
#!/bin/bash #install zip on debian OS, since microsoft/dotnet container doesn't have zip by default if [ -f /etc/debian_version ] then apt -qq update apt -qq -y install zip fi #dotnet restore dotnet tool install --global Amazon.Lambda.Tools --version 4.0.0 # (for CI) ensure that the newly-installed tools are on PATH if [ -f /etc/debian_version ] then export PATH="$PATH:/$(whoami)/.dotnet/tools" fi dotnet restore dotnet lambda package --configuration release --framework netcoreapp3.1 --output-package ./bin/release/netcoreapp3.1/activity-listener.zip
package main import ( "fmt" "log" "os/user" ) func isRoot() bool { // I haven't tested this on Windows, so it may not work currentUser, err := user.Current() if err != nil { log.Fatalf("[isRoot] Unable to get current user: %s", err) } return currentUser.Username == "root" } func main() { if !isRoot() { log.Fatalln("Please run this program as root") } fmt.Println("I am elevated") }
<gh_stars>1000+ // // SwiftExample-Bridging-Header.h // SwiftExample // // Created by <NAME> on 3/10/15. // Copyright (c) 2014-2015 Xmartlabs. All rights reserved. // #ifndef SwiftExample_SwiftExample_Bridging_Header_h_h #define SwiftExample_SwiftExample_Bridging_Header_h_h #import <XLForm/XLForm.h> #import <AXRatingView/AXRatingView.h> #import <JVFloatLabeledTextField/JVFloatLabeledTextField.h> #endif
import java.io.BufferedReader; import java.io.DataInput; import java.io.DataOutput; import java.io.FileReader; import java.io.IOException; import java.nio.file.FileSystem; import java.util.*; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.Reducer.Context; import org.apache.hadoop.mapreduce.lib.output.*; import org.apache.hadoop.mapreduce.lib.input.*; public class country { public static class CompositeKeyWritable implements Writable, WritableComparable<CompositeKeyWritable> { private String deptNo; private String emp; public CompositeKeyWritable() { } public CompositeKeyWritable(String deptNo, String emp) { this.deptNo = deptNo; this.emp = emp; } public String toString() { return (new StringBuilder().append(deptNo).append("\t").append(emp)).toString(); } public void readFields(DataInput dataInput) throws IOException { deptNo = WritableUtils.readString(dataInput); emp = WritableUtils.readString(dataInput); } public void write(DataOutput dataOutput) throws IOException { WritableUtils.writeString(dataOutput,deptNo); WritableUtils.writeString(dataOutput,emp); } public int compareTo(CompositeKeyWritable objKeyPair) { int result = deptNo.compareTo(objKeyPair.deptNo); if (0 == result) { result = emp.compareTo(objKeyPair.emp); } return result; } } public static class mapper1 extends Mapper<LongWritable, Text, CompositeKeyWritable,NullWritable> { public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { if (value.toString().length() > 0) { String arrEmpAttributes[] = value.toString().split(","); context.write(new CompositeKeyWritable(arrEmpAttributes[0].toString(),(arrEmpAttributes[1].toString())),NullWritable.get()); } } } public static class SecondarySortBasicPartitioner extends Partitioner<CompositeKeyWritable, NullWritable> { public int getPartition(CompositeKeyWritable key,NullWritable value,int numReduceTasks) { return (key.deptNo.hashCode() % numReduceTasks); } } public static class SecondarySortBasicCompKeySortComparator extends WritableComparator { protected SecondarySortBasicCompKeySortComparator() { super(CompositeKeyWritable.class, true); } public int compare(WritableComparable w1,WritableComparable w2) { CompositeKeyWritable key1 = (CompositeKeyWritable) w1; CompositeKeyWritable key2 = (CompositeKeyWritable) w2; int cmpResult = key1.deptNo.compareTo(key2.deptNo); if (cmpResult == 0) { return -key1.emp.compareTo(key2.emp); } return cmpResult; } } public static class SecondarySortBasicGroupingComparator extends WritableComparator { protected SecondarySortBasicGroupingComparator() { super(CompositeKeyWritable.class, true); } public int compare(WritableComparable w1,WritableComparable w2) { CompositeKeyWritable key1 = (CompositeKeyWritable) w1; CompositeKeyWritable key2 = (CompositeKeyWritable) w2; return key1.deptNo.compareTo(key2.deptNo); } } public static class SecondarySortBasicReducer extends Reducer<CompositeKeyWritable, NullWritable, CompositeKeyWritable, NullWritable> { public void reduce(CompositeKeyWritable key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException { for(NullWritable val:values) { context.write(key,NullWritable.get()); } } } public static void main(String[] args) throws Exception{ Configuration conf=new Configuration(); Job job=new Job(conf,"country"); job.setJarByClass(country.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setMapperClass(mapper1.class); job.setMapOutputKeyClass(CompositeKeyWritable.class); job.setMapOutputValueClass(NullWritable.class); job.setPartitionerClass(SecondarySortBasicPartitioner.class); job.setSortComparatorClass(SecondarySortBasicCompKeySortComparator.class); job.setGroupingComparatorClass(SecondarySortBasicGroupingComparator.class); job.setReducerClass(SecondarySortBasicReducer.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); job.waitForCompletion(true); } }
package tamp.ch12.Combine.Combine; /* * Combine.java * * Created on October 29, 2005, 8:57 AM */ import java.util.Stack; /** * @author mph */ public class Tree { final static int THREADS = 8; final static int TRIES = 1024 * 1024; static boolean[] test = new boolean[THREADS * TRIES]; Node[] leaf; /** * Creates a new instance of Combine */ public Tree(int size) { Node[] nodes = new Node[size - 1]; nodes[0] = new Node(); for (int i = 1; i < nodes.length; i++) { nodes[i] = new Node(nodes[(i - 1) / 2]); } leaf = new Node[(size + 1) / 2]; for (int i = 0; i < leaf.length; i++) { leaf[i] = nodes[nodes.length - i - 1]; } } public int getAndIncrement() throws InterruptedException { Stack<Node> stack = new Stack<Node>(); Node myLeaf = leaf[ThreadID.get() / 2]; Node node = myLeaf; // phase one while (node.precombine()) { node = node.parent; } Node stop = node; // phase two node = myLeaf; int combined = 1; while (node != stop) { combined = node.combine(combined); stack.push(node); node = node.parent; } // phase 3 int prior = stop.op(combined); // phase 4 while (!stack.empty()) { node = stack.pop(); node.distribute(prior); } return prior; } }
package com.id.drapp; import android.net.Uri; import android.provider.BaseColumns; public final class doctorContract { public static final String CONTENT_AUTHORITY = "com.id.drapp"; public static Uri BASE_CONTENT_URI = Uri.parse("content://" + CONTENT_AUTHORITY); public static String PATH_DOCTORS = "doctors"; public static abstract class patientEntry implements BaseColumns{ public static final String _ID = BaseColumns._ID; public static final String COLUMN_NAME = "patient_name"; public static final String COLUMN_PHONE_NUMBER = "patient_ph_no"; public static final String COLUMN_EMAIL = "email_id"; public static final String COLUMN_DOB = "patient_dob"; public static final String COLUMN_ADDRESS = "patient_address"; public static final String COLUMN_GENDER = "patient_gender"; public static final String COLUMN_IMAGE = "patient_image"; public static final String COLUMN_PUSH_ID = "push_id"; public static Uri contentUri(String username){ String pathPatient = "[" + "patients" + username + "]"; Uri uri = Uri.withAppendedPath(BASE_CONTENT_URI, pathPatient); return uri; } public static String tableName(String username){ String tablename = "[" + "patients" + username + "]"; return tablename; } } public static abstract class doctorEntry implements BaseColumns{ public static final Uri CONTENT_URI = Uri.withAppendedPath(BASE_CONTENT_URI, PATH_DOCTORS); public static final String TABLE_NAME = "doctors"; public static final String _ID = BaseColumns._ID; public static final String COLUMN_PUSHID = "doctor_push_id"; public static final String COLUMN_NAME = "doctor_name"; public static final String COLUMN_PHONE_NUMBER = "doctor_ph_no"; public static final String COLUMN_EMAIL = "email_id"; public static final String COLUMN_PASSWORD = "<PASSWORD>"; public static final String COLUMN_TITLE = "doctor_title"; public static final String COLUMN_INSTITUTE = "doctor_institute"; public static final String COLUMN_IMAGE = "doctor_image"; public static final String COLUMN_INSTITUTE_ADDRESS = "doctor_institute_address"; } public static String checkGender(int gender){ if(gender == 0){ return "Male"; }else { return "Female"; } } }
<reponame>ostriandoni/the-attendances const _ = require('lodash'); const currency = require('currency.js'); const moment = require('moment-timezone'); const validator = require('validator'); const User = require('../models/User'); const Attendance = require('../models/Attendance'); const AttendanceController = require('./attendance'); const constants = require('../config/constants'); class EmployeeController { async getAllEmployees(req, res, next) { if (!req.user.isAdmin) { return res.redirect('/'); } const users = []; try { const allUsers = await User.find({}); const selectedMonthYear = moment().format(constants.FORMAT_YEARMONTH); let i = 1; // eslint-disable-next-line no-restricted-syntax for (const user of allUsers) { // eslint-disable-next-line no-await-in-loop const totalAttendance = await AttendanceController.calculateTotalAttendance(user.id, moment(`${selectedMonthYear}-01`).format(constants.FORMAT_YEARMONTH)); // eslint-disable-next-line no-await-in-loop const totalSalary = await AttendanceController.calculateSalaryByTotalAttendance(user.id, moment(`${selectedMonthYear}-01`).format(constants.FORMAT_YEARMONTH), user.profile.salary); _.assign(user, { idx: i, salary: currency(user.profile.salary, constants.LOCALE_CURRENCY).format(), totalAttendance: `${totalAttendance}%`, totalSalary }); users.push(user); i++; } } catch (error) { return next(error); } res.render('employees', { title: 'All Employees', users }); } async getEmployeeById(req, res, next) { if (!req.user.isAdmin) { return res.redirect('/'); } let user; try { user = await User.findById(req.params.id); } catch (error) { return next(error); } const [username] = user.email.split('@'); res.render('account/employee', { title: 'Edit Employee', employee: user, username: user && user.profile && user.profile.name ? user.profile.name : username, }); } async editEmployeeById(req, res, next) { const userId = req.params.id; const redirectUrl = `/employees/${userId}`; const validationErrors = []; let { email } = req.body; email = validator.normalizeEmail(email, { gmail_remove_dots: false }); if (!validator.isEmail(email)) { validationErrors.push({ msg: 'Please enter a valid email address.' }); } const { name, gender, address, department, position, salary, isAdmin, isActive } = req.body; if (salary <= 0) { validationErrors.push({ msg: 'Salary value must be greater than 0.' }); } if (validationErrors.length) { req.flash('errors', validationErrors); return res.redirect(redirectUrl); } try { const user = await User.findById(userId); user.email = email || ''; user.profile.name = name || ''; user.profile.gender = gender || ''; user.profile.address = address || ''; user.profile.department = department || ''; user.profile.position = position || ''; user.profile.salary = salary || 0; user.isAdmin = isAdmin === 'on' || isAdmin === ''; user.isActive = isActive === 'on' || isActive === ''; await user.save(); } catch (error) { if (error.code === 11000) { req.flash('errors', { msg: 'The email address you have entered is already associated with an account.', }); return res.redirect(redirectUrl); } return next(error); } req.flash('success', { msg: `${name}'s profile information has been updated.` }); res.redirect('/employees'); } async editEmployeePasswordById(req, res, next) { const userId = req.params.id; const { password, confirmPassword } = req.body; const validationErrors = []; if (!validator.isLength(password, { min: 8 })) { validationErrors.push({ msg: 'Password must be at least 8 characters long' }); } if (password !== confirmPassword) { validationErrors.push({ msg: 'Passwords do not match' }); } if (validationErrors.length) { req.flash('errors', validationErrors); return res.redirect(`/employees/${userId}`); } try { const user = await User.findById(userId); user.password = password; await user.save(); } catch (error) { return next(error); } req.flash('success', { msg: 'Password has been changed.' }); res.redirect('/employees'); } async deleteEmployeeById(req, res, next) { const { params, query } = req; try { await User.deleteOne({ _id: params.id }); } catch (error) { return next(error); } req.flash('info', { msg: `${query.empName}'s account has been deleted.` }); res.redirect('/employees'); } async clockIn(req, res, next) { const userId = req.params.id; const searchCriteria = { userId, scheduleDate: moment().format(constants.FORMAT_DATE) }; try { let attendance = await Attendance.findOne(searchCriteria); if (attendance) { await Attendance.updateOne(searchCriteria, { clockInAt: moment() }); } else { attendance = new Attendance({ userId, scheduleDate: moment().format(constants.FORMAT_DATE), clockInAt: moment() }); await attendance.save(); } } catch (error) { return next(error); } req.flash('success', { msg: 'Success clock in.' }); res.redirect('/'); } async clockOut(req, res, next) { const userId = req.params.id; const searchCriteria = { userId, scheduleDate: moment().format(constants.FORMAT_DATE) }; try { let attendance = await Attendance.findOne(searchCriteria); if (attendance) { await Attendance.updateOne(searchCriteria, { clockOutAt: moment() }); } else { attendance = new Attendance({ userId, scheduleDate: moment().format(constants.FORMAT_DATE), clockOutAt: moment() }); await attendance.save(); } } catch (error) { return next(error); } req.flash('success', { msg: 'Success clock out.' }); res.redirect('/'); } async getEmployeeAttendance(req, res, next) { const { query } = req; const userId = req.params.id; const selectedMonthYear = query && query.year && query.month ? `${query.year}-${query.month}` : moment().format(constants.FORMAT_YEARMONTH); try { const user = await User.findById(userId); const daysInMonth = moment(selectedMonthYear, constants.FORMAT_YEARMONTH).daysInMonth(); const attendances = await Attendance.find({ userId, scheduleDate: { $gte: moment(`${selectedMonthYear}-01`).format(constants.FORMAT_DATE), $lt: moment(`${selectedMonthYear}-${daysInMonth}`).format(constants.FORMAT_DATE) } }); const logs = []; for (let index = 0; index < daysInMonth; index++) { const day = index + 1; const element = new Date(`${selectedMonthYear}-${day}`); logs.push({ day: moment(element) .format(constants.FORMAT_DAY_NUM), attendance_day: moment(element) .locale(constants.LOCALE_ID) .format(constants.FORMAT_DAY_NAME), attendance_date: moment(element) .format(constants.FORMAT_DATE) }); } _.reduce(logs, (result, value) => { const temp = _.find(attendances, (attendance) => moment(attendance.scheduleDate).format(constants.FORMAT_DATE) === value.attendance_date); value.clock_in = temp ? moment(temp.clockInAt).tz(constants.LOCALE_TZ).format(constants.FORMAT_TIME) : null; value.clock_out = temp ? moment(temp.clockOutAt).tz(constants.LOCALE_TZ).format(constants.FORMAT_TIME) : null; value.remarks = temp || temp ? 'Hadir' : 'Tanpa keterangan'; if (moment().isBefore(value.attendance_date)) { value.remarks = null; } if (_.includes(['6', '7'], value.day)) { value.remarks = 'Libur'; } return result; }, {}); const totalAttendance = await AttendanceController.calculateTotalAttendance(userId, moment(`${selectedMonthYear}-01`).format(constants.FORMAT_YEARMONTH)); const totalSalary = await AttendanceController.calculateSalaryByTotalAttendance(userId, moment(`${selectedMonthYear}-01`).format(constants.FORMAT_YEARMONTH), user.profile.salary); res.render('history', { title: 'History', schedule: { year: query && query.year ? query.year : moment().format(constants.FORMAT_YEAR), month: query && query.month ? moment(`${query.year}-${query.month}-01`) .locale(constants.LOCALE_ID) .format(constants.FORMAT_MONTH) : moment() .locale(constants.LOCALE_ID) .format(constants.FORMAT_MONTH), monthNum: query && query.month ? query.month : moment().format(constants.FORMAT_MONTH_PAD) }, user, logs, totalAttendance, totalSalary }); } catch (error) { return next(error); } } } module.exports = new EmployeeController();
<!DOCTYPE html> <html> <head> <title>My Page</title> <style> body { margin: 0; padding: 0; font-family: sans-serif; } #container { max-width: 960px; margin: 0 auto; } #left { float: left; width: 300px; } #right { float: right; width: 600px; } </style> </head> <body> <div id="container"> <header> <h1>My Page</h1> </header> <nav> <a href="#menu1">Menu 1</a> <a href="#menu2">Menu 2</a> </nav> <div id="left"> <h2>Left Content</h2> Lorem ipsum dolor sit amet, consectetur adipiscing elit. </div> <div id="right"> <h2>Right Content</h2> Mauris non sapien metus. Ut in augue tempor, ornare ligula ut, iaculis urna. </div> </div> </body> </html>
#!/bin/bash ## Copyright (c) 2021 mangalbhaskar. All Rights Reserved. ##__author__ = 'mangalbhaskar' ###---------------------------------------------------------- ## pitivi - video editor ###---------------------------------------------------------- # ## References: ## * http://developer.pitivi.org/Install_with_flatpak.html?gi-language=undefined#getting-flatpak ###---------------------------------------------------------- function pitivi-flatpak-install.main() { # local LSCRIPTS=$( cd "$( dirname "${BASH_SOURCE[0]}")" && pwd ) # source ${LSCRIPTS}/lscripts.config.sh #source ${LSCRIPTS}/flatpak.install.sh flatpak install --user https://flathub.org/repo/appstream/org.pitivi.Pitivi.flatpakref ## run pitvi #flatpak run org.pitivi.Pitivi//stable ## updating #flatpak --user update org.pitivi.Pitivi ## uninstalling #flatpak --user uninstall org.pitivi.Pitivi stable } pitivi-flatpak-install.main "$@"
package fwcd.fructose.swing; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics2D; import java.awt.Image; import java.awt.image.BufferedImage; import javax.swing.ImageIcon; import javax.swing.JButton; public class DrawGraphicsButton extends JButton { private static final long serialVersionUID = 1L; public DrawGraphicsButton(Dimension size, Renderable... renderables) { setPreferredSize(size); Image image = new BufferedImage(size.width, size.height, BufferedImage.TYPE_INT_ARGB); Graphics2D g2d = (Graphics2D) image.getGraphics(); g2d.setColor(Color.BLACK); for (Renderable renderable : renderables) { renderable.render(g2d, size); } setIcon(new ImageIcon(image)); } }
#include <iostream> #include <string> #include <regex> int main() { std::string input; std::getline(std::cin, input); std::regex starRegex(R"((\d+)-\d+)"); std::smatch starMatch; if (std::regex_search(input, starMatch, starRegex)) { int starCount = std::stoi(starMatch[1]); std::cout << starCount << " "; std::string codeSnippet = input.substr(input.find('\n') + 1); int namespaceCount = 0; size_t pos = 0; while ((pos = codeSnippet.find("namespace", pos)) != std::string::npos) { namespaceCount++; pos += 9; // Move past the found "namespace" to search for the next occurrence } std::cout << namespaceCount << std::endl; } return 0; }
package ca.nova.gestion.model; import com.fasterxml.jackson.annotation.JsonCreator; import lombok.AllArgsConstructor; import lombok.Data; @Data public class Client { private int idClient; private String name; private String phoneNumber; @JsonCreator public Client(int idClient, String name, String phoneNumber) { this.idClient = idClient; this.name = name; this.phoneNumber = phoneNumber; } }
import React from "react" import PropTypes from "prop-types" import { Markdown } from "../components/markdown" export const Hero = ({ title, content }) => ( <section className="module pt-48 md:pt-64"> <div className="container flex flex-col md:flex-row md:space-x-8"> <div className="max-w-sm w-full"> <h1 className="text-3xl font-bold lowercase">{title}</h1> </div> <div className="w-full"> <Markdown className="text-2xl font-thin" content={content} /> </div> </div> </section> ) Hero.propTypes = { title: PropTypes.string, content: PropTypes.string, }
#!/bin/bash set -e sudo mkdir -p /nfssharedata sudo mkdir -p /nfssharetest sudo chown nobody:nogroup /nfssharedata sudo chown nobody:nogroup /nfssharetest sudo chmod 776 /nfssharedata sudo chmod 776 /nfssharetest sudo apt-get install nfs-kernel-server -y sudo echo "/nfssharedata 192.168.56.0/255.255.255.0(rw,no_root_squash,insecure,async,no_subtree_check,anonuid=1001,anongid=1001)" >> /etc/exports sudo echo "/nfssharetest 192.168.56.0/255.255.255.0(rw,no_root_squash,insecure,async,no_subtree_check,anonuid=1001,anongid=1001)" >> /etc/exports sudo exportfs -ra sudo systemctl restart nfs-kernel-server
/*jslint browser: true*/ /*global angular*/ var stsApp = angular.module('stsApp', [ 'ngMaterial', 'ngRoute', 'ngAnimate', 'ngMessages', 'ngMdIcons', 'satellizer', 'angularjs-gravatardirective', 'angular-loading-bar', 'nvd3', 'angulartics', 'angulartics.google.analytics', 'stsProvider.search', 'stsDirective.match', 'stsApp.header', 'stsApp.user', 'stsApp.show', 'stsApp.home' ]); stsApp .config( [ '$mdThemingProvider', '$routeProvider', '$locationProvider', '$authProvider', function ($mdThemingProvider, $routeProvider, $locationProvider, $authProvider) { 'use strict'; /* Initialize material theme */ $mdThemingProvider.theme('default') .primaryPalette('indigo') .accentPalette('teal'); /* Initialize routing */ $locationProvider.html5Mode(true); $routeProvider.otherwise({ redirectTo: '/' }); /* Initialize authentication */ $authProvider.storageType = 'localStorage'; /* Configuration for authentication */ $authProvider.loginOnSignup = false; $authProvider.loginRoute = '/signin'; $authProvider.loginUrl = '/api/user/signin'; $authProvider.signupUrl = '/api/user/signup'; } ] ) .run( [ '$rootScope', '$location', '$auth', '$mdSidenav', function ($rootScope, $location, $auth, $mdSidenav) { 'use strict'; $rootScope.$on('$routeChangeStart', function (event, next, current) { if (next.hasOwnProperty('$$route') && next.$$route.hasOwnProperty('auth')) { if (false === next.$$route.auth && $auth.isAuthenticated()) { $location.url('/'); } if (true === next.$$route.auth && !$auth.isAuthenticated()) { $location.url('/'); } } }); } ] );
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.uFE4E5 = void 0; var uFE4E5 = { "viewBox": "0 0 2600 2760.837", "children": [{ "name": "path", "attribs": { "d": "M2503 1767q3 9 4 17.5t1 18.5q0 27-16 57.5T2330.5 2013 2002 2202t-417 67q-147 0-310.5-22t-253.5-22q-193 0-350.5 61T446 2401t-112 54q-46 0-78-29t-36-73L93 1081q0-12-1-17 3-31 12.5-49.5t43-63T247 850t157-101.5T613.5 675 884 646q135 0 250 8t218 8q231 0 350-57.5T1884 484t86.5-67.5 33.5-4.5q41 0 69.5 22.5T2112 492zm-107 34L2004 526q-131 131-274 190t-378 59q-103 0-217-7.5T884 760q-230 0-404 78t-275 229l128 1275q125-104 306.5-167.5T1021 2111q85 0 251 22t313 22q253 0 440.5-84t370.5-270zm-857-627q158 114 158 290 0 137-113 233.5t-265 96.5q-212 0-361.5-115.5T808 1378q0-143 105.5-230.5T1184 1060q197 0 355 114z" }, "children": [] }] }; exports.uFE4E5 = uFE4E5;
class Inventory: def __init__(self): self.items = {} def add_item(self, name, quantity): self.items[name] = quantity def update_quantity(self, name, quantity): if name in self.items: self.items[name] = quantity def return_item(self, name): return self.items.get(name, 0)
<gh_stars>0 import React from 'react'; import { Button, Table, Modal } from 'react-bootstrap'; import AddModal from 'components/Modals/AddUserModal'; import EditModal from 'components/Modals/EditUserModal'; import ViewModal from 'components/Modals/ViewUserModal'; import DeleteModal from 'components/Modals/DeleteUserModal'; import UserDefault from 'assets/img/user-default.png'; class Users extends React.Component { // eslint-disable-line react/prefer-stateless-function constructor(props, context) { super(props, context); this.state = { toggleAddModal: false, toggleEditModal: false, toggleViewModal: false, toggleDeleteModal: false, deleteNodeId: null }; this.openAddModal = this.openAddModal.bind(this); this.openEditModal = this.openEditModal.bind(this); this.openViewModal = this.openViewModal.bind(this); this.openDeleteModal = this.openDeleteModal.bind(this); this.closeModal = this.closeModal.bind(this); } openAddModal = () => { this.setState({ ...this.state, toggleAddModal: true, }); } openEditModal = () => { this.setState({ ...this.state, toggleEditModal: true, }); } openViewModal = (userId) => { this.props.getUserData(userId).then(() => { this.setState({ ...this.state, toggleViewModal: true, }); }); } openDeleteModal = (nodeId) => { this.setState({ ...this.state, toggleDeleteModal: true, deleteNodeId: nodeId }); } handleDeleteUser = () => { this.props.handleDeleteUser(this.state.deleteNodeId).then(() => { this.setState({ ...this.state, toggleDeleteModal: false, deleteNodeId: null }); }) } closeModal = () => { this.setState({ ...this.state, toggleAddModal: false, toggleEditModal: false, toggleViewModal: false, toggleDeleteModal: false, }); } render() { const { hasErrored, isLoading, users, selectedUser } = this.props; return ( <div> <Button bsStyle="primary" className="pull-right" onClick={this.openAddModal}> Add User</Button> { hasErrored && <h3>Sorry! There was an error loading the users</h3> } <br /> <br /> { !hasErrored && <Table className="table"> <thead> <tr> <th>Profile</th> <th>First Name</th> <th>Last Name</th> <th>Email</th> <th>Actions</th> </tr> </thead> <tbody> { isLoading && !users && <tr><td colSpan="6">Loading Users ...</td></tr> } { !isLoading && users && users.length > 0 && users.map((user, key) => ( <tr key={user.id}> <td><img src={UserDefault} alt="Riverview User" /></td> <td>{user.first_name}</td> <td>{user.last_name}</td> <td>{user.email}</td> <td> <Button bsStyle="link" onClick={() => this.openViewModal(user.id)}> View </Button> <Button bsStyle="link" onClick={this.openEditModal}> Edit </Button> <Button bsStyle="link" onClick={() => this.openDeleteModal(key)}> Delete </Button> </td> </tr> ))} </tbody> </Table> } <div> <AddModal toggleModal={this.state.toggleAddModal} closeModal={this.closeModal} /> <EditModal toggleModal={this.state.toggleEditModal} closeModal={this.closeModal} /> <ViewModal toggleModal={this.state.toggleViewModal} closeModal={this.closeModal} selectedUser={this.props.selectedUser} /> <DeleteModal toggleModal={this.state.toggleDeleteModal} deleteUser={this.handleDeleteUser} closeModal={this.closeModal} /> </div> </div> ); } } export default Users;
def generate_user_cart_redis_key(user_id): """ Generates the name of the Hash used for storing User cart in Redis """ if user_id: return self.__get_user_redis_key_prefix() + ":" + str(user_id) else: raise ValueError("User ID is required to generate the Redis key for user cart")
package com.bumptech.glide.load.model; import androidx.annotation.NonNull; import com.bumptech.glide.Priority; import com.bumptech.glide.load.DataSource; import com.bumptech.glide.load.Options; import com.bumptech.glide.load.data.DataFetcher; import com.bumptech.glide.signature.ObjectKey; /** * A put of helper classes that performs no loading and instead always returns the given model as * the data to decode. * * @param <Model> The type of model that will also be returned as decodable data. */ public class UnitModelLoader<Model> implements ModelLoader<Model, Model> { @SuppressWarnings("deprecation") private static final UnitModelLoader<?> INSTANCE = new UnitModelLoader<>(); @SuppressWarnings("unchecked") public static <T> UnitModelLoader<T> getInstance() { return (UnitModelLoader<T>) INSTANCE; } /** * @deprecated Use {@link #getInstance()} instead. */ // Need constructor to document deprecation, will be removed, when constructor is privatized. @SuppressWarnings({"PMD.UnnecessaryConstructor", "DeprecatedIsStillUsed"}) @Deprecated public UnitModelLoader() { // Intentionally empty. } @Override public LoadData<Model> buildLoadData( @NonNull Model model, int width, int height, @NonNull Options options) { return new LoadData<>(new ObjectKey(model), new UnitFetcher<>(model)); } @Override public boolean handles(@NonNull Model model) { return true; } private static class UnitFetcher<Model> implements DataFetcher<Model> { private final Model resource; UnitFetcher(Model resource) { this.resource = resource; } @Override public void loadData( @NonNull Priority priority, @NonNull DataCallback<? super Model> callback) { callback.onDataReady(resource); } @Override public void cleanup() { // Do nothing. } @Override public void cancel() { // Do nothing. } @NonNull @SuppressWarnings("unchecked") @Override public Class<Model> getDataClass() { return (Class<Model>) resource.getClass(); } @NonNull @Override public DataSource getDataSource() { return DataSource.LOCAL; } } /** * Factory for producing {@link com.bumptech.glide.load.model.UnitModelLoader}s. * * @param <Model> The type of model that will also be returned as decodable data. */ // PMD.SingleMethodSingleton false positive: https://github.com/pmd/pmd/issues/816 @SuppressWarnings("PMD.SingleMethodSingleton") public static class Factory<Model> implements ModelLoaderFactory<Model, Model> { @SuppressWarnings("deprecation") private static final Factory<?> FACTORY = new Factory<>(); @SuppressWarnings("unchecked") public static <T> Factory<T> getInstance() { return (Factory<T>) FACTORY; } /** * @deprecated Use {@link #getInstance()} instead. */ // Need constructor to document deprecation, will be removed, when constructor is privatized. @SuppressWarnings("PMD.UnnecessaryConstructor") @Deprecated public Factory() { // Intentionally empty. } @NonNull @Override public ModelLoader<Model, Model> build(MultiModelLoaderFactory multiFactory) { return UnitModelLoader.getInstance(); } @Override public void teardown() { // Do nothing. } } }
<reponame>mul53/bluzelle # frozen_string_literal: true require 'rest-client' require 'json' require 'bluzelle/utils' require 'bluzelle/constants' module Bluzelle module Swarm class Cosmos include Bluzelle::Constants include Bluzelle::Utils attr_reader :mnemonic, :endpoint, :address, :chain_id attr_accessor :account_info def initialize(options = {}) @mnemonic = options[:mnemonic] @chain_id = options[:chain_id] @endpoint = options[:endpoint] @account_info = {} @private_key = get_ec_private_key(@mnemonic) @address = address_from_mnemonic account end def query(endpoint) Request.execute(method: 'get', url: "#{@endpoint}/#{endpoint}") end def send_transaction(method, endpoint, data, gas_info) txn = Transaction.new(method, endpoint, data) txn.set_gas(gas_info) # fetch skeleton skeleton = fetch_txn_skeleton(txn) # set gas skeleton = update_gas(txn, skeleton) # sort skeleton = sort_hash(skeleton) broadcast_transaction(Transaction.new('post', TX_COMMAND, skeleton)) end private # Account query def account url = "#{@endpoint}/auth/accounts/#{@address}" res = Request.execute(method: 'get', url: url) set_account_details(res.dig('result', 'value')) end # Broadcasts a transaction # # @param [Bluzelle::Swarm::Transaction] txn def broadcast_transaction(txn) txn.data['memo'] = make_random_string txn.data['signatures'] = [{ 'account_number' => @account_info['account_number'].to_s, 'pub_key' => { 'type' => 'tendermint/PubKeySecp256k1', 'value' => to_base64( [compressed_pub_key(open_key(@private_key))].pack('H*') ) }, 'sequence' => @account_info['sequence'].to_s, 'signature' => sign_transaction(txn.data) }] url = "#{@endpoint}/#{txn.endpoint}" payload = { 'mode' => 'block', 'tx' => txn.data } res = Request.execute(method: txn.method, url: url, payload: payload) if res.dig('code').nil? update_sequence decode_json(hex_to_bin(res.dig('data'))) if res.key?('data') else handle_broadcast_error(res.dig('raw_log'), txn) end end # Updates account sequence and retries broadcast # # @param [Bluzelle::Swarm::Transaction] txn def update_account_sequence(txn) if txn.retries_left != 0 account retry_broadcast(txn) else raise Error::ApiError, 'Invalid chain id' end end # Fetch transaction skeleton def fetch_txn_skeleton(txn) url = "#{@endpoint}/#{txn.endpoint}" data = Request.execute( method: txn.method, url: url, payload: txn.data, headers: { 'Content-Type': 'application/x-www-form-urlencoded' } ) data['value'] end # Check if address and mnemonic are valid def address_from_mnemonic pub_key = get_ec_public_key_from_priv(@private_key) get_address(pub_key) end # Updates account details # # @param [Hash] data def set_account_details(data) account_number = data.dig('account_number') sequence = data.dig('sequence') @account_info['account_number'] = account_number if @account_info['sequence'] != sequence @account_info['sequence'] = sequence return true end false end # Retry broadcast after failure # # @param [Bluzelle::Swarm::Transaction] def retry_broadcast(txn) txn.retries_left -= 1 sleep BROADCAST_RETRY_SECONDS broadcast_transaction(txn) end # Handle broadcast error # # @param [String] raw_log # @param [Bluzelle::Swarm::Transaction] txn def handle_broadcast_error(raw_log, txn) if raw_log.include?('signature verification failed') update_account_sequence(txn) else raise Error::ApiError, extract_error_message(raw_log) end end # Update account sequence def update_sequence @account_info['sequence'] = @account_info['sequence'].to_i + 1 end # Signs a transaction # # @param txn def sign_transaction(txn) payload = { 'account_number' => @account_info['account_number'].to_s, 'chain_id' => @chain_id, 'fee' => txn['fee'], 'memo' => txn['memo'], 'msgs' => txn['msg'], 'sequence' => @account_info['sequence'].to_s } to_base64(ecdsa_sign(encode_json(payload), @private_key)) end def update_gas(txn, data) res = data.clone if res.dig('fee', 'gas').to_i > txn.max_gas && txn.max_gas != 0 res['fee']['gas'] = txn.max_gas.to_s end if !txn.max_fee.nil? res['fee']['amount'] = [{ 'denom': TOKEN_NAME, 'amount': txn.max_fee.to_s }] elsif !txn.gas_price.nil? res['fee']['amount'] = [{ 'denom': TOKEN_NAME, 'amount': (res['fee']['gas'] * txn.gas_price).to_s }] end res end def update_memo(txn) txn['memo'] = make_random_string txn end end end end
<filename>src/router/seckill.js const seckill = () => import('@/pages/seckill/index.vue') const seckillRouter = [ { path:'/seckill', component: seckill, meta: { title: '秒杀' } } ] export default seckillRouter
def check_word_exists(word, string): if word in string: return True else: return False
#!/usr/bin/env bash # # Copyright (c) 2018 The Readercoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Make sure only lowercase alphanumerics (a-z0-9), underscores (_), # hyphens (-) and dots (.) are used in source code filenames. export LC_ALL=C EXIT_CODE=0 OUTPUT=$(git ls-files --full-name -- "*.[cC][pP][pP]" "*.[hH]" "*.[pP][yY]" "*.[sS][hH]" | \ grep -vE '^[a-z0-9_./-]+$' | \ grep -vE '^src/(secp256k1|univalue)/') if [[ ${OUTPUT} != "" ]]; then echo "Use only lowercase alphanumerics (a-z0-9), underscores (_), hyphens (-) and dots (.)" echo "in source code filenames:" echo echo "${OUTPUT}" EXIT_CODE=1 fi exit ${EXIT_CODE}
#!/bin/bash # Runs benchmark and reports time to convergence #pushd pytorch # Single GPU training time python ./tools/train_mlperf.py --config-file "configs/e2e_mask_rcnn_R_50_FPN_1x.yaml" \ SOLVER.IMS_PER_BATCH 2 TEST.IMS_PER_BATCH 1 SOLVER.MAX_ITER 720000 SOLVER.STEPS "(480000, 640000)" SOLVER.BASE_LR 0.0025 #popd
#!/bin/bash # View extension logs by running # sudo cat /var/log/azure/Microsoft.OSTCExtensions.CustomScriptForLinux/1.5.2.2/extension.log # (the version may be different) user=`awk -F: '$3 >= 1000 {print $1, $6}' /etc/passwd | tail -n 1` echo $user username=${user% *} homedir=${user#* } basedir=$PWD pushd $homedir/notebooks rm -rf * unzip $basedir/Notebooks.zip chown $username:$username * popd touch $homedir/NotebookEnvironmentVariablesConfig.json cat << EOF > $homedir/NotebookEnvironmentVariablesConfig.json { "DataIngestion" : { "STORAGE_ACCOUNT_NAME" : "$3", "STORAGE_ACCOUNT_KEY" : "$4", "TELEMETRY_CONTAINER_NAME" : "telemetry", "LOG_TABLE_NAME" : "Logs", "DATA_ROOT_FOLDER" : "$homedir" } } EOF mv $basedir/spark-avro_2.11-4.0.0.jar /dsvm/tools/spark/current/jars/ source /anaconda/bin/activate py35 pip install --upgrade pip #to install ggplot in DSVM uncomment the following #conda remove -n py35 -y pandas #conda install -n py35 -y pandas==0.20.3 #conda install -n py35 -y -c conda-forge ggplot conda install -n py35 -y python-snappy pip install imblearn pip install --upgrade --extra-index-url https://azuremlsdktestpypi.azureedge.net/sdk-release/Preview/E7501C02541B433786111FE8E140CAA1 azureml-sdk pip install --upgrade databricks-cli touch $homedir/.databrickscfg cat << EOF > $homedir/.databrickscfg [DEFAULT] host = $1 token = $2 EOF
#!/bin/sh for f in /docker-entrypoint.d/*.sh; do echo "$0: running $f"; . "$f" done if [ "$1" = "init-loop" ];then echo "Looping forever..." >&2 while :; do sleep 1; done else exec "$@" fi
#pragma once #include "Vector3.hpp" #include "Vector4.hpp" #include "AngleRadians.hpp" #include <cstring> #define XAxisX 0 #define XAxisY 4 #define XAxisZ 8 #define XAxisW 12 #define YAxisX 1 #define YAxisY 5 #define YAxisZ 9 #define YAxisW 13 #define ZAxisX 2 #define ZAxisY 6 #define ZAxisZ 10 #define ZAxisW 14 #define WAxisX 6 #define WAxisY 7 #define WAxisZ 11 #define WAxisW 15 #define ScaleX 0 #define ScaleY 5 #define ScaleZ 10 #define ScaleW 15 #define TransformX 12 #define TransformY 13 #define TransformZ 14 #define TransformW 15 namespace BF { template<class NumberType> struct Matrix4x4 { public: /* XA YA ZA Or 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 */ NumberType Data[16]; Matrix4x4() { Reset(); } void Reset() { memset(Data, 0, sizeof(NumberType) * 16); Data[0] = 1; Data[5] = 1; Data[10] = 1; Data[15] = 1; } void Multiply(Matrix4x4<NumberType>& matrixB) { NumberType a = this->Data[0]; NumberType b = this->Data[1]; NumberType c = this->Data[2]; NumberType d = this->Data[3]; NumberType e = this->Data[4]; NumberType f = this->Data[5]; NumberType g = this->Data[6]; NumberType h = this->Data[7]; NumberType i = this->Data[8]; NumberType j = this->Data[9]; NumberType k = this->Data[10]; NumberType l = this->Data[11]; NumberType m = this->Data[12]; NumberType n = this->Data[13]; NumberType o = this->Data[14]; NumberType p = this->Data[15]; NumberType A = matrixB.Data[0]; NumberType B = matrixB.Data[1]; NumberType C = matrixB.Data[2]; NumberType D = matrixB.Data[3]; NumberType E = matrixB.Data[4]; NumberType F = matrixB.Data[5]; NumberType G = matrixB.Data[6]; NumberType H = matrixB.Data[7]; NumberType I = matrixB.Data[8]; NumberType J = matrixB.Data[9]; NumberType K = matrixB.Data[10]; NumberType L = matrixB.Data[11]; NumberType M = matrixB.Data[12]; NumberType N = matrixB.Data[13]; NumberType O = matrixB.Data[14]; NumberType P = matrixB.Data[15]; NumberType& resultA = Data[0]; NumberType& resultB = Data[1]; NumberType& resultC = Data[2]; NumberType& resultD = Data[3]; NumberType& resultE = Data[4]; NumberType& resultF = Data[5]; NumberType& resultG = Data[6]; NumberType& resultH = Data[7]; NumberType& resultI = Data[8]; NumberType& resultJ = Data[9]; NumberType& resultK = Data[10]; NumberType& resultL = Data[11]; NumberType& resultM = Data[12]; NumberType& resultN = Data[13]; NumberType& resultO = Data[14]; NumberType& resultP = Data[15]; resultA = a * A + b * E + c * I + d * M; resultB = e * A + f * E + g * I + h * M; resultC = i * A + j * E + k * I + l * M; resultD = m * A + n * E + o * I + p * M; resultE = a * B + b * F + c * J + d * N; resultF = e * B + f * F + g * J + h * N; resultG = i * B + j * F + k * J + l * N; resultH = m * B + n * F + o * J + p * N; resultI = a * C + b * G + c * K + d * O; resultJ = e * C + f * G + g * K + h * O; resultK = i * C + j * G + k * K + l * O; resultL = m * C + n * G + o * K + p * O; resultM = a * D + b * H + c * L + d * P; resultN = e * D + f * H + g * L + h * P; resultO = i * D + j * H + k * L + l * P; resultP = m * D + n * H + o * L + p * P; } void ResetForthAxis() { this->Data[3] = 0; this->Data[7] = 0; this->Data[11] = 0; this->Data[12] = 0; this->Data[13] = 0; this->Data[14] = 0; this->Data[15] = 1; } void Add(Matrix4x4<NumberType>& matrixB) { NumberType a = this->Data[0]; NumberType b = this->Data[1]; NumberType c = this->Data[2]; NumberType d = this->Data[3]; NumberType e = this->Data[4]; NumberType f = this->Data[5]; NumberType g = this->Data[6]; NumberType h = this->Data[7]; NumberType i = this->Data[8]; NumberType j = this->Data[9]; NumberType k = this->Data[10]; NumberType l = this->Data[11]; NumberType m = this->Data[12]; NumberType n = this->Data[13]; NumberType o = this->Data[14]; NumberType p = this->Data[15]; NumberType A = matrixB.Data[0]; NumberType B = matrixB.Data[1]; NumberType C = matrixB.Data[2]; NumberType D = matrixB.Data[3]; NumberType E = matrixB.Data[4]; NumberType F = matrixB.Data[5]; NumberType G = matrixB.Data[6]; NumberType H = matrixB.Data[7]; NumberType I = matrixB.Data[8]; NumberType J = matrixB.Data[9]; NumberType K = matrixB.Data[10]; NumberType L = matrixB.Data[11]; NumberType M = matrixB.Data[12]; NumberType N = matrixB.Data[13]; NumberType O = matrixB.Data[14]; NumberType P = matrixB.Data[15]; NumberType& resultA = Data[0]; NumberType& resultB = Data[1]; NumberType& resultC = Data[2]; NumberType& resultD = Data[3]; NumberType& resultE = Data[4]; NumberType& resultF = Data[5]; NumberType& resultG = Data[6]; NumberType& resultH = Data[7]; NumberType& resultI = Data[8]; NumberType& resultJ = Data[9]; NumberType& resultK = Data[10]; NumberType& resultL = Data[11]; NumberType& resultM = Data[12]; NumberType& resultN = Data[13]; NumberType& resultO = Data[14]; NumberType& resultP = Data[15]; resultA = a + A; resultB = b + B; resultC = c + C; resultD = d + D; resultE = e + E; resultF = f + F; resultG = g + G; resultH = h + H; resultI = i + I; resultJ = j + J; resultK = k + K; resultL = l + L; resultM = m + M; resultN = n + N; resultO = o + O; resultP = p + P; } Matrix4x4<NumberType> operator*(Matrix4x4<NumberType> matrixB) { Matrix4x4<NumberType> matrix; matrix.Multiply(matrixB); return matrix; } void operator=(Matrix4x4<NumberType> matrix4x4) { memcmp(this->Data, matrix4x4.Data, 16 * sizeof(NumberType)); } Matrix4x4<NumberType> operator*=(Matrix4x4<NumberType> matrixB) { return (*this) * matrixB; } void Move(NumberType x, NumberType y, NumberType z) { Data[TransformX] += x; Data[TransformY] += y; Data[TransformZ] += z; } void Move(Vector3<NumberType> vector3) { Data[TransformX] += vector3.X; Data[TransformY] += vector3.Y; Data[TransformZ] += vector3.Z; } void MoveTo(Vector3<NumberType> vector) { Data[TransformX] = vector.X; Data[TransformY] = vector.Y; Data[TransformZ] = vector.Z; } void MoveTo(NumberType x, NumberType y, NumberType z) { Data[TransformX] = x; Data[TransformY] = y; Data[TransformZ] = z; } void Rotate(Vector3<NumberType> vector) { Rotate(vector.X, vector.Y, vector.Z); } void Rotate(NumberType x, NumberType y, NumberType z) { Matrix4x4<NumberType> result; Matrix4x4<NumberType> xRotation; Matrix4x4<NumberType> yRotation; Matrix4x4<NumberType> zRotation; //-----<X ROT>----- { float cosResult = Math::Cosinus(x); float sinResult = Math::Sinus(x); xRotation.Data[5] = cosResult; xRotation.Data[6] = sinResult; xRotation.Data[9] = -sinResult; xRotation.Data[10] = cosResult; } //-----<X ROT>----- { float cosResult = Math::Cosinus(y); float sinResult = Math::Sinus(y); yRotation.Data[0] = cosResult; yRotation.Data[2] = -sinResult; yRotation.Data[8] = sinResult; yRotation.Data[10] = cosResult; } //-----<X ROT>----- { float cosResult = Math::Cosinus(z); float sinResult = Math::Sinus(z); zRotation.Data[0] = cosResult; zRotation.Data[1] = -sinResult; zRotation.Data[4] = sinResult; zRotation.Data[5] = cosResult; } result = xRotation;// *yRotation* zRotation; Multiply(xRotation); } void RotateTo(NumberType x, NumberType y, NumberType z) { } void Orbit(NumberType x, NumberType y, NumberType z) { } void Scale(NumberType scalar) { Data[ScaleX] *= scalar; Data[ScaleY] *= scalar; Data[ScaleZ] *= scalar; } void Scale(NumberType x, NumberType y, NumberType z) { Data[ScaleX] *= x; Data[ScaleY] *= y; Data[ScaleZ] *= z; } void ScaleSet(NumberType x, NumberType y, NumberType z) { Data[ScaleX] = x; Data[ScaleY] = y; Data[ScaleZ] = z; } void ScaleSet(Vector3<NumberType> vector) { Data[ScaleX] = vector.X; Data[ScaleY] = vector.Y; Data[ScaleZ] = vector.Z; } void Scale(Vector3<NumberType> vector) { Data[ScaleX] *= vector.X; Data[ScaleY] *= vector.Y; Data[ScaleZ] *= vector.Z; } void LookAt(NumberType x, NumberType y, NumberType z) { } void LookAt(Vector3<NumberType> eye, Vector3<NumberType> center, Vector3<NumberType> up) { Vector3<NumberType> centereye = center - eye; Vector3<NumberType> f = Vector3<NumberType>::Normalize(centereye); Vector3<NumberType> frontUpCross; Vector3<NumberType>::CrossProduct(f, up, frontUpCross); Vector3<NumberType> s = Vector3<NumberType>::Normalize(frontUpCross); Vector3<NumberType> u; Vector3<NumberType>::CrossProduct(s, f, u); Data[XAxisX] = s.X; Data[XAxisY] = s.Y; Data[XAxisZ] = s.Z; Data[YAxisX] = u.X; Data[YAxisY] = u.Y; Data[YAxisZ] = u.Z; Data[ZAxisX] = -f.X; Data[ZAxisY] = -f.Y; Data[ZAxisZ] = -f.Z; Data[TransformX] = -Vector3<NumberType>::DotProduct(s, eye); Data[TransformY] = -Vector3<NumberType>::DotProduct(u, eye); Data[TransformZ] = Vector3<NumberType>::DotProduct(f, eye); } // Flip matrix diagonally void Transpose() { //NumberType a = this->Data[0]; NumberType b = this->Data[1]; NumberType c = this->Data[2]; NumberType d = this->Data[3]; NumberType e = this->Data[4]; //NumberType f = this->Data[5]; NumberType g = this->Data[6]; NumberType h = this->Data[7]; NumberType i = this->Data[8]; NumberType j = this->Data[9]; //NumberType k = this->Data[10]; NumberType l = this->Data[11]; NumberType m = this->Data[12]; NumberType n = this->Data[13]; NumberType o = this->Data[14]; //NumberType p = this->Data[15]; //this->Data[0] = a; this->Data[1] = e; this->Data[2] = i; this->Data[3] = m; this->Data[4] = b; //this->Data[5] = f; this->Data[6] = j; this->Data[7] = n; this->Data[8] = c; this->Data[9] = g; //this->Data[10] = k; this->Data[11] = o; this->Data[12] = d; this->Data[13] = h; this->Data[14] = l; //this->Data[15] = p; } // returns sucessful bool Inverse() { Matrix4x4<NumberType> result; double det = 0; result[0] = Data[5] * Data[10] * Data[15] - Data[5] * Data[11] * Data[14] - Data[9] * Data[6] * Data[15] + Data[9] * Data[7] * Data[14] + Data[13] * Data[6] * Data[11] - Data[13] * Data[7] * Data[10]; result[4] = -Data[4] * Data[10] * Data[15] + Data[4] * Data[11] * Data[14] + Data[8] * Data[6] * Data[15] - Data[8] * Data[7] * Data[14] - Data[12] * Data[6] * Data[11] + Data[12] * Data[7] * Data[10]; result[8] = Data[4] * Data[9] * Data[15] - Data[4] * Data[11] * Data[13] - Data[8] * Data[5] * Data[15] + Data[8] * Data[7] * Data[13] + Data[12] * Data[5] * Data[11] - Data[12] * Data[7] * Data[9]; result[12] = -Data[4] * Data[9] * Data[14] + Data[4] * Data[10] * Data[13] + Data[8] * Data[5] * Data[14] - Data[8] * Data[6] * Data[13] - Data[12] * Data[5] * Data[10] + Data[12] * Data[6] * Data[9]; result[1] = -Data[1] * Data[10] * Data[15] + Data[1] * Data[11] * Data[14] + Data[9] * Data[2] * Data[15] - Data[9] * Data[3] * Data[14] - Data[13] * Data[2] * Data[11] + Data[13] * Data[3] * Data[10]; result[5] = Data[0] * Data[10] * Data[15] - Data[0] * Data[11] * Data[14] - Data[8] * Data[2] * Data[15] + Data[8] * Data[3] * Data[14] + Data[12] * Data[2] * Data[11] - Data[12] * Data[3] * Data[10]; result[9] = -Data[0] * Data[9] * Data[15] + Data[0] * Data[11] * Data[13] + Data[8] * Data[1] * Data[15] - Data[8] * Data[3] * Data[13] - Data[12] * Data[1] * Data[11] + Data[12] * Data[3] * Data[9]; result[13] = Data[0] * Data[9] * Data[14] - Data[0] * Data[10] * Data[13] - Data[8] * Data[1] * Data[14] + Data[8] * Data[2] * Data[13] + Data[12] * Data[1] * Data[10] - Data[12] * Data[2] * Data[9]; result[2] = Data[1] * Data[6] * Data[15] - Data[1] * Data[7] * Data[14] - Data[5] * Data[2] * Data[15] + Data[5] * Data[3] * Data[14] + Data[13] * Data[2] * Data[7] - Data[13] * Data[3] * Data[6]; result[6] = -Data[0] * Data[6] * Data[15] + Data[0] * Data[7] * Data[14] + Data[4] * Data[2] * Data[15] - Data[4] * Data[3] * Data[14] - Data[12] * Data[2] * Data[7] + Data[12] * Data[3] * Data[6]; result[10] = Data[0] * Data[5] * Data[15] - Data[0] * Data[7] * Data[13] - Data[4] * Data[1] * Data[15] + Data[4] * Data[3] * Data[13] + Data[12] * Data[1] * Data[7] - Data[12] * Data[3] * Data[5]; result[14] = -Data[0] * Data[5] * Data[14] + Data[0] * Data[6] * Data[13] + Data[4] * Data[1] * Data[14] - Data[4] * Data[2] * Data[13] - Data[12] * Data[1] * Data[6] + Data[12] * Data[2] * Data[5]; result[3] = -Data[1] * Data[6] * Data[11] + Data[1] * Data[7] * Data[10] + Data[5] * Data[2] * Data[11] - Data[5] * Data[3] * Data[10] - Data[9] * Data[2] * Data[7] + Data[9] * Data[3] * Data[6]; result[7] = Data[0] * Data[6] * Data[11] - Data[0] * Data[7] * Data[10] - Data[4] * Data[2] * Data[11] + Data[4] * Data[3] * Data[10] + Data[8] * Data[2] * Data[7] - Data[8] * Data[3] * Data[6]; result[11] = -Data[0] * Data[5] * Data[11] + Data[0] * Data[7] * Data[9] + Data[4] * Data[1] * Data[11] - Data[4] * Data[3] * Data[9] - Data[8] * Data[1] * Data[7] + Data[8] * Data[3] * Data[5]; result[15] = Data[0] * Data[5] * Data[10] - Data[0] * Data[6] * Data[9] - Data[4] * Data[1] * Data[10] + Data[4] * Data[2] * Data[9] + Data[8] * Data[1] * Data[6] - Data[8] * Data[2] * Data[5]; det = Data[0] * result[0] + Data[1] * result[4] + Data[2] * result[8] + Data[3] * result[12]; if (det == 0) return false; det = 1.0 / det; for (int i = 0; i < 16; i++) { Data[i] = result[i] * det; } return true; } void Set(NumberType value) { memset(this->Data, value, 16 * sizeof(NumberType)); } void Orthographic(NumberType left, NumberType right, NumberType bottom, NumberType top, NumberType zNear, NumberType zFar) { Reset(); Data[ScaleX] = (2) / (NumberType)(right - left); Data[ScaleY] = (2) / (NumberType)(top - bottom); Data[ScaleZ] = -(2) / (NumberType)(zFar - zNear); Data[TransformX] = -(right + left) / (NumberType)(right - left); Data[TransformY] = -(top + bottom) / (NumberType)(top - bottom); Data[TransformZ] = -(zFar + zNear) / (NumberType)(zFar - zNear); } void Perspective(NumberType fielfOfView, NumberType aspectRatio, NumberType near, NumberType far) { NumberType fielfOfViewRadians = Math::DegreeToRadians(fielfOfView); float tanHalfFovy = Math::Tangens(fielfOfViewRadians / 2.0f); memset(Data, 0, 16 * sizeof(float)); Data[ScaleX] = (1) / (aspectRatio * tanHalfFovy); Data[ScaleY] = (1) / (tanHalfFovy); Data[ScaleZ] = -(far + near) / (far - near); Data[11] = -(1); Data[TransformZ] = -((2) * far * near) / (far - near); } Vector3<NumberType> ScaleXYZ() { return Vector3<NumberType>(Data[ScaleX], Data[ScaleY], Data[ScaleZ]); } Vector4<NumberType> ScaleXYZW() { return Vector4<NumberType>(Data[ScaleX], Data[ScaleY], Data[ScaleZ], Data[ScaleW]); } Vector3<NumberType> PositionXYZ() { return Vector3<NumberType>(Data[TransformX], Data[TransformY], Data[TransformZ]); } Vector4<NumberType> PositionXYZW() { return Vector4<NumberType>(Data[TransformX], Data[TransformY], Data[TransformZ], Data[TransformW]); } Vector4<NumberType> CurrentRotation() { Vector4<NumberType> x = CurrentAxisX(); Vector4<NumberType> y = CurrentAxisY(); Vector4<NumberType> z = CurrentAxisZ(); return x + y + z; } Vector4<NumberType> CurrentScaling() { return Vector4<NumberType>(Data[0], Data[5], Data[10], Data[15]); } void Print() { printf("\n\n%7.2f %7.2f %7.2f %7.2f\n", this->Data[0], this->Data[1], this->Data[2], this->Data[3]); printf("%7.2f %7.2f %7.2f %7.2f\n", this->Data[4], this->Data[5], this->Data[6], this->Data[7]); printf("%7.2f %7.2f %7.2f %7.2f\n", this->Data[8], this->Data[9], this->Data[10], this->Data[11]); printf("%7.2f %7.2f %7.2f %7.2f\n\n", this->Data[12], this->Data[13], this->Data[14], this->Data[15]); } Vector4<NumberType> CurrentAxisX() { return Vector4<NumberType>(Data[XAxisX], Data[XAxisY], Data[XAxisZ], Data[XAxisW]); } Vector4<NumberType> CurrentAxisY() { return Vector4<NumberType>(Data[YAxisX], Data[YAxisY], Data[YAxisZ], Data[YAxisW]); } Vector4<NumberType> CurrentAxisZ() { return Vector4<NumberType>(Data[ZAxisX], Data[ZAxisY], Data[ZAxisZ], Data[ZAxisW]); } }; }
function checkEvenNumber(arr) { for (let i = 0; i < arr.length; i++) { if (arr[i] % 2 === 0) return true; } return false; }
<gh_stars>1-10 /* * Copyright (C) 2008-2020 Advanced Micro Devices, Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * */ #include "libm_amd.h" #include "libm_util_amd.h" #include "libm_inlines_amd.h" #include "libm_special.h" extern void __amd_remainder_piby2d2f(unsigned long long ux, double *r, int *region); /* tan(x) approximation valid on the interval [-pi/4,pi/4]. If recip is true return -1/tan(x) instead. */ static inline double tanf_piby4(double x, int recip) { double r, t; /* Core Remez [1,2] approximation to tan(x) on the interval [0,pi/4]. */ r = x*x; t = x + x*r* (0.385296071263995406715129e0 - 0.172032480471481694693109e-1 * r) / (0.115588821434688393452299e+1 + (-0.51396505478854532132342e0 + 0.1844239256901656082986661e-1 * r) * r); if (recip) return -1.0 / t; else return t; } #ifdef WINDOWS #pragma function(tanf) #endif float FN_PROTOTYPE_REF(tanf)(float x) { double r, dx; int region, xneg; unsigned int fux; unsigned long long ux, ax; GET_BITS_SP32(x, fux); if ((fux & EXPBITS_SP32) == EXPBITS_SP32) { /* x is either NaN or infinity */ if (fux & MANTBITS_SP32) { /* x is NaN */ #ifdef WINDOWS return __amd_handle_errorf("tanf", __amd_tan, fux | QNAN_MASK_32, _DOMAIN, AMD_F_NONE, EDOM, x, 0.0F, 1); #else if (fux & QNAN_MASK_32) return __amd_handle_errorf("tanf", __amd_tan, fux | QNAN_MASK_32, _DOMAIN, AMD_F_NONE, EDOM, x, 0.0F, 1); else return __amd_handle_errorf("tanf", __amd_tan, fux | QNAN_MASK_32, _DOMAIN, AMD_F_INVALID, EDOM, x, 0.0F, 1); #endif } else { /* x is infinity. Return a NaN */ return __amd_handle_errorf("tanf", __amd_tan, INDEFBITPATT_SP32, _DOMAIN, AMD_F_INVALID, EDOM, x, 0.0F, 1); } } dx = (double)x; GET_BITS_DP64(dx, ux); ax = (ux & ~SIGNBIT_DP64); if (ax <= 0x3fe921fb54442d18LL) /* abs(x) <= pi/4 */ { if (ax < 0x3f80000000000000LL) /* abs(x) < 2.0^(-7) */ { if (ax < 0x3f20000000000000LL) /* abs(x) < 2.0^(-13) */ { if (ax == 0x0000000000000000LL) return x; else #ifdef WINDOWS return x; //valf_with_flags(x, AMD_F_INEXACT); #else return __amd_handle_errorf("tanf", __amd_tan, fux, _UNDERFLOW, AMD_F_UNDERFLOW|AMD_F_INEXACT, ERANGE, x, 0.0F, 1); #endif } else return (float)(dx + dx*dx*dx*0.333333333333333333); } else return (float)tanf_piby4(x, 0); } xneg = (int)(ux >> 63); if (xneg) dx = -dx; if (dx < 5.0e5) { /* For these size arguments we can just carefully subtract the appropriate multiple of pi/2, using extra precision where dx is close to an exact multiple of pi/2 */ static const double twobypi = 6.36619772367581382433e-01, /* 0x3fe45f306dc9c883 */ piby2_1 = 1.57079632673412561417e+00, /* 0x3ff921fb54400000 */ piby2_1tail = 6.07710050650619224932e-11, /* 0x3dd0b4611a626331 */ piby2_2 = 6.07710050630396597660e-11, /* 0x3dd0b4611a600000 */ piby2_2tail = 2.02226624879595063154e-21, /* 0x3ba3198a2e037073 */ piby2_3 = 2.02226624871116645580e-21, /* 0x3ba3198a2e000000 */ piby2_3tail = 8.47842766036889956997e-32; /* 0x397b839a252049c1 */ double t, rhead, rtail; int npi2; unsigned long long uy, xexp, expdiff; xexp = ax >> EXPSHIFTBITS_DP64; /* How many pi/2 is dx a multiple of? */ if (ax <= 0x400f6a7a2955385eLL) /* 5pi/4 */ { if (ax <= 0x4002d97c7f3321d2LL) /* 3pi/4 */ npi2 = 1; else npi2 = 2; } else if (ax <= 0x401c463abeccb2bbLL) /* 9pi/4 */ { if (ax <= 0x4015fdbbe9bba775LL) /* 7pi/4 */ npi2 = 3; else npi2 = 4; } else npi2 = (int)(dx * twobypi + 0.5); /* Subtract the multiple from dx to get an extra-precision remainder */ rhead = dx - npi2 * piby2_1; rtail = npi2 * piby2_1tail; GET_BITS_DP64(rhead, uy); expdiff = xexp - ((uy & EXPBITS_DP64) >> EXPSHIFTBITS_DP64); if (expdiff > 15) { /* The remainder is pretty small compared with dx, which implies that dx is a near multiple of pi/2 (dx matches the multiple to at least 15 bits) */ t = rhead; rtail = npi2 * piby2_2; rhead = t - rtail; rtail = npi2 * piby2_2tail - ((t - rhead) - rtail); if (expdiff > 48) { /* dx matches a pi/2 multiple to at least 48 bits */ t = rhead; rtail = npi2 * piby2_3; rhead = t - rtail; rtail = npi2 * piby2_3tail - ((t - rhead) - rtail); } } r = rhead - rtail; region = npi2 & 3; } else { /* Reduce x into range [-pi/4,pi/4] */ __amd_remainder_piby2d2f(ax, &r, &region); } if (xneg) return (float)-tanf_piby4(r, region & 1); else return (float)tanf_piby4(r, region & 1); }
package info.archinnov.achilles.type; /** * Define naming strategy for keyspace name, table name and column names.Available values are: * <ul> * <li>info.archinnov.achilles.type.NamingStrategy.SNAKE_CASE: transform all schema name using <a href="http://en.wikipedia.org/wiki/Snake_case" target="blank_">snake case</a></li> * <li>info.archinnov.achilles.type.NamingStrategy.CASE_SENSITIVE: enclose the name between double quotes (") for escaping the case</li> * <li>info.archinnov.achilles.type.NamingStrategy.LOWER_CASE: transform the name to lower case</li> * </ul> */ public enum NamingStrategy { /** * Convert Java field name into snake case convention <br/> * * Example: * <table style="border:1px solid"> * <thead> * <tr> * <th>Java name</th> * <th>Camel case</th> * </tr> * </thead> * <tbody> * <tr> * <td>count</td> * <td>count</td> * </tr> * <tr> * <td>column1</td> * <td>column_1</td> * </tr> * <tr> * <td>userName</td> * <td>user_name</td> * </tr> * <tr> * <td>FIRSTName</td> * <td>FIRST_name</td> * </tr> * </tbody> * </table> */ SNAKE_CASE, /** * Enclose Java field name with double quotes (") if the field has <strong>at least one</strong>upper-case character. * Otherwise let it as is. <br/> * Example: * <table style="border:1px solid"> * <thead> * <tr> * <th>Java name</th> * <th>Case sensitive</th> * </tr> * </thead> * <tbody> * <tr> * <td>count</td> * <td>count</td> * </tr> * <tr> * <td>column1</td> * <td>column1</td> * </tr> * <tr> * <td>userName</td> * <td>"userName"</td> * </tr> * <tr> * <td>FIRSTName</td> * <td>"FIRSTName"</td> * </tr> * </tbody> * </table> */ CASE_SENSITIVE, /** * Force Java field name to lower case.<br/> * Example: * <table style="border:1px solid"> * <thead> * <tr> * <th>Java name</th> * <th>Lower case</th> * </tr> * </thead> * <tbody> * <tr> * <td>count</td> * <td>count</td> * </tr> * <tr> * <td>column1</td> * <td>column1</td> * </tr> * <tr> * <td>userName</td> * <td>username</td> * </tr> * <tr> * <td>FIRSTName</td> * <td>firstname</td> * </tr> * </tbody> * </table> */ LOWER_CASE; }
#!/bin/bash FILE=${1:-local} WORK_DIR="$(dirname "$0")" PROJECT_DIR="$(dirname "$WORK_DIR")" pip --version >/dev/null 2>&1 || { echo >&2 -e "\npip is required but it's not installed." echo >&2 -e "You can install it by running the following command:\n" echo >&2 "wget https://bootstrap.pypa.io/get-pip.py --output-document=get-pip.py; chmod +x get-pip.py; sudo -H python3 get-pip.py" echo >&2 -e "\n" echo >&2 -e "\nFor more information, see pip documentation: https://pip.pypa.io/en/latest/" exit 1; } virtualenv --version >/dev/null 2>&1 || { echo >&2 -e "\nvirtualenv is required but it's not installed." echo >&2 -e "You can install it by running the following command:\n" echo >&2 "sudo -H pip3 install virtualenv" echo >&2 -e "\n" echo >&2 -e "\nFor more information, see virtualenv documentation: https://virtualenv.pypa.io/en/latest/" exit 1; } if [ -z "$VIRTUAL_ENV" ]; then echo >&2 -e "\nYou need activate a virtualenv first" echo >&2 -e 'If you do not have a virtualenv created, run the following command to create and automatically activate a new virtualenv named "venv" on current folder:\n' echo >&2 -e "virtualenv venv --python=\`which python3\`" echo >&2 -e "\nTo leave/disable the currently active virtualenv, run the following command:\n" echo >&2 "deactivate" echo >&2 -e "\nTo activate the virtualenv again, run the following command:\n" echo >&2 "source venv/bin/activate" echo >&2 -e "\nFor more information, see virtualenv documentation: https://virtualenv.pypa.io/en/latest/" echo >&2 -e "\n" exit 1; else pip install -r $PROJECT_DIR/requirements/$FILE.txt fi
var mongoose = require('mongoose'); var Schema = mongoose.Schema; var restify = require('express-restify-mongoose'); var timestamps = require('mongoose-timestamp'); var _ = require('underscore'); var moment = require('moment'); var wrapMPromise = require('./wrapMPromise'); var promisedHook = wrapMPromise.promisedHook; var populate = wrapMPromise.populate; var wrapMpromise = wrapMPromise.wrapMpromise; var q = require('q'); function findOneOrCreate(schema) { schema.statics.findOneOrCreate = function findOneOrCreate(condition, doc) { var self = this; return self.findOne(condition).exec().then(function(result){ return result ? result : self.create(doc); }); }; } function connectToDb(db) { mongoose.connect(db); mongoose.connection.on('connected', function () { console.log('Mongoose default connection open to ' + db); }); mongoose.connection.on('error',function (err) { console.log('Mongoose default connection error: ' + err); }); mongoose.connection.on('disconnected', function () { console.log('Mongoose default connection disconnected'); }); process.on('SIGINT', function() { mongoose.connection.close(function () { console.log('Mongoose default connection disconnected through app termination'); process.exit(0); }); }); } function DataValidationError(data) { this.name = 'DataValidationError'; this.message = 'Data validation error'; this.stack = (new Error()).stack; this.data = data; } DataValidationError.prototype = Object.create(Error.prototype); DataValidationError.prototype.constructor = DataValidationError; module.exports = function(db){ var currenciesDefinition = { baseCurrency: Number, accountCurrency: Number }; var schema = { setting: new Schema({ name: String, desc: String, value: String, type: { type: String, enum: ['baseCurrency']} }), account: new Schema({ name: String, currency: String, tags: [{type: String}], freezed: {type: Date, default: moment('1990-01-01T00:00:00.000Z').toDate() }, // no entries are allowed before that date type: { type: String, enum: ['asset', 'liability', 'expense', 'revenue']} // aktiv, passiv, aufwand, ertrag }), entry: new Schema({ date: { type: Date, default: Date.now}, order: { type: Number, default: 1}, planned: Boolean, parts: [{ account: {type: Schema.Types.ObjectId, ref: 'Account'}, amount: currenciesDefinition, tags: [{type: String}], text: String }], user: String }), balance: new Schema({ account: {type: Schema.Types.ObjectId, ref: 'Account'}, date: { type: Date, default: Date.now}, planned: currenciesDefinition, actual: currenciesDefinition }) }; schema.setting.plugin(findOneOrCreate); schema.entry.plugin(timestamps); schema.balance.plugin(timestamps); var validateFreeze = function(entry) { _.each(entry.parts, function(part){ if (entry.date <= part.account.freezed) { throw new DataValidationError({ type: 'beforeAccountFreezeDate', account: part.account, freezed: part.account.freezed }); } }); return entry; }; var validateBalance = function(entry) { if (entry.parts.length === 0) { throw new DataValidationError({type: 'noParts'}); } var sum = _.reduce(entry.parts, function(memo, part){ return memo + part.amount.baseCurrency; }, 0); if (sum !== 0) { throw new DataValidationError({ type: 'notBalanced', balance: sum }); } return entry; }; schema.entry.pre('save', promisedHook(function(promise) { return promise .then(populate('parts.account')) .then(validateFreeze) .then(validateBalance); })); schema.entry.pre('remove', promisedHook(function(promise){ return promise .then(populate('parts.account')) .then(validateFreeze); })); var model = {}; schema.account.pre('remove', promisedHook(function(promise){ return promise.then(function(account){ return wrapMpromise( model.entry.count({'parts.account._id': account._id}).exec() ); }).then(function(count){ if (count > 0) { throw new DataValidationError({ type: 'references', refType: 'entry', count: count }); } }); })); model.setting = mongoose.model('Setting', schema.setting, 'settings'); model.account = mongoose.model('Account', schema.account, 'accounts'); model.entry = mongoose.model('Entry', schema.entry, 'entries'); model.balance = mongoose.model('Balance', schema.balance, 'balances'); connectToDb(db); return { addRestRoutes: function(app){ restify.defaults({ onError: function (err, req, res, next) { res.setHeader('Content-Type', 'application/json'); if (err instanceof DataValidationError) { res.status(err.statusCode || 500).json(err.data); } else { res.status(err.statusCode || 500).json(err.message); } } }); restify.serve(app, model.setting); restify.serve(app, model.account, { // necessary for calling hooks like pre-save findOneAndRemove: false }); restify.serve(app, model.entry, { // necessary for calling hooks like pre-save findOneAndUpdate: false, findOneAndRemove: false }); }, model: model }; };
<reponame>chlds/util<gh_stars>0 /* Press <Ctrl-]> to invoke the function. Remarks: Refer at util/lib/obj/src/cli_io_beta.c */ # define CBR # define CLI_W32 # include <stdio.h> # include "../../../incl/config.h" signed(__cdecl cli_ctrl_rsb_beta(CLI_W32_STAT(*argp))) { /* **** DATA, BSS and STACK */ auto signed char *b; auto signed i,r; auto signed short flag; /* **** CODE/TEXT */ if(!argp) return(0x00); if(CLI_DBG_D<(CLI_DBG)) printf("%s ","<Ctrl-]>"); r = cli_vers_beta(argp); if(!r) { printf("%s \n","<< Error at fn. cli_vers_beta()"); return(0x00); } return(0x01); }
class Button: def render(self, label, style): html = f'<button style="width: {style["width"]}; background-color: {style["background"]}; border-radius: {style["borderRadius"]}">{label}</button>' css = 'button {font-size: 16px; color: white; padding: 10px; border: none; cursor: pointer;}' return html, css # Demonstration blue_button_style = { "borderRadius": "5px", "width": "100px", "background": "blue" } red_button_style = { "borderRadius": "5px", "width": "100px", "background": "red" } button = Button() blue_button_html, blue_button_css = button.render('Blue Button', blue_button_style) red_button_html, red_button_css = button.render('Red Button', red_button_style) print("Blue Button HTML:", blue_button_html) print("Blue Button CSS:", blue_button_css) print("Red Button HTML:", red_button_html) print("Red Button CSS:", red_button_css)
#!/bin/bash buah=('apel' 'mangga' 'anggur') buah[3]="semangka" buah[0]="pir" echo "Nama Buahnya adalah : ${buah[@]}" echo "Nama Buah Index 0 : ${buah[0]}" echo "Panjang Array : ${#buah[@]}" echo "Index Setiap Buah : ${!buah[@]}"
import http.server import socketserver class MyHTTPServer(http.server.BaseHTTPRequestHandler): def do_GET(self): self.send_response(200) self.send_header('Content-type', 'text/html') self.end_headers() self.wfile.write(b"<html><body><h1>Hello, World!</h1></body></html>") def do_POST(self): content_length = int(self.headers['Content-Length']) post_data = self.rfile.read(content_length) self.send_response(200) self.send_header('Content-type', 'text/plain') self.end_headers() self.wfile.write(b"Received POST request") def run_server(port=8080): with socketserver.TCPServer(("", port), MyHTTPServer) as httpd: print("Server started on port", port) httpd.serve_forever() if __name__ == "__main__": run_server()
<gh_stars>0 import React from 'react'; export default function Detail() { return( <> <div className="blog-3 blog-details col" data-aos="fade-up"> <div className="info"> <h3 className="title">제목 테스트</h3> <div className="desc" dangerouslySetInnerHTML={{__html: "value"}} /> </div> </div> </> ); }
import tensorflow as tf from tensorflow.keras.datasets import mnist (x_train, y_train), (x_test, y_test) = mnist.load_data() # Pre-process the data x_train = x_train.reshape(x_train.shape[0], 28, 28, 1) x_test = x_test.reshape(x_test.shape[0], 28, 28, 1) x_train = x_train.astype('float32') x_test = x_test.astype('float32') x_train /= 255 x_test /= 255 # Create model model = tf.keras.Sequential() model.add(tf.keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1))) model.add(tf.keras.layers.MaxPooling2D((2, 2))) model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='relu')) model.add(tf.keras.layers.MaxPooling2D((2, 2))) model.add(tf.keras.layers.Flatten()) model.add(tf.keras.layers.Dense(64, activation='relu')) model.add(tf.keras.layers.Dense(10)) model.compile(optimizer='adam', loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), metrics=['accuracy']) # Train model model.fit(x_train, y_train, epochs=10) # Validate model model.evaluate(x_test, y_test, verbose=2)
<reponame>davidkarlsen/Hystrix<filename>hystrix-contrib/hystrix-javanica/src/test/java/com/netflix/hystrix/contrib/javanica/test/common/command/BasicCommandTest.java<gh_stars>0 package com.netflix.hystrix.contrib.javanica.test.common.command; import com.netflix.hystrix.HystrixEventType; import com.netflix.hystrix.HystrixInvokableInfo; import com.netflix.hystrix.HystrixRequestLog; import com.netflix.hystrix.contrib.javanica.annotation.HystrixCommand; import com.netflix.hystrix.contrib.javanica.command.AsyncResult; import com.netflix.hystrix.contrib.javanica.test.common.BasicHystrixTest; import com.netflix.hystrix.contrib.javanica.test.common.domain.User; import org.junit.Before; import org.junit.Test; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public abstract class BasicCommandTest extends BasicHystrixTest { private UserService userService; private AdvancedUserService advancedUserService; private GenericService<String, Long, User> genericUserService; @Before public void setUp() throws Exception { super.setUp(); userService = createUserService(); advancedUserService = createAdvancedUserServiceService(); genericUserService = createGenericUserService(); } @Test public void testGetUserAsync() throws ExecutionException, InterruptedException { Future<User> f1 = userService.getUserAsync("1", "name: "); assertEquals("name: 1", f1.get().getName()); assertEquals(1, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size()); com.netflix.hystrix.HystrixInvokableInfo<?> command = getCommand(); // assert the command key name is the we're expecting assertEquals("GetUserCommand", command.getCommandKey().name()); // assert the command group key name is the we're expecting assertEquals("UserService", command.getCommandGroup().name()); // assert the command thread pool key name is the we're expecting assertEquals("CommandTestAsync", command.getThreadPoolKey().name()); // it was successful assertTrue(command.getExecutionEvents().contains(HystrixEventType.SUCCESS)); } @Test public void testGetUserSync() { User u1 = userService.getUserSync("1", "name: "); assertGetUserSnycCommandExecuted(u1); } @Test public void shouldWorkWithInheritedMethod() { User u1 = advancedUserService.getUserSync("1", "name: "); assertGetUserSnycCommandExecuted(u1); } @Test public void should_work_with_parameterized_method() throws Exception { assertEquals(Integer.valueOf(1), userService.echo(1)); assertEquals(1, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size()); assertTrue(getCommand().getExecutionEvents().contains(HystrixEventType.SUCCESS)); } @Test public void should_work_with_parameterized_asyncMethod() throws Exception { assertEquals(Integer.valueOf(1), userService.echoAsync(1).get()); assertEquals(1, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size()); assertTrue(getCommand().getExecutionEvents().contains(HystrixEventType.SUCCESS)); } @Test public void should_work_with_genericClass_fallback() { User user = genericUserService.getByKeyForceFail("1", 2L); assertEquals("name: 2", user.getName()); assertEquals(1, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size()); HystrixInvokableInfo<?> command = HystrixRequestLog.getCurrentRequest() .getAllExecutedCommands().iterator().next(); assertEquals("getByKeyForceFail", command.getCommandKey().name()); // confirm that command has failed assertTrue(command.getExecutionEvents().contains(HystrixEventType.FAILURE)); // and that fallback was successful assertTrue(command.getExecutionEvents().contains(HystrixEventType.FALLBACK_SUCCESS)); } private void assertGetUserSnycCommandExecuted(User u1) { assertEquals("name: 1", u1.getName()); assertEquals(1, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size()); com.netflix.hystrix.HystrixInvokableInfo<?> command = getCommand(); assertEquals("getUserSync", command.getCommandKey().name()); assertEquals("UserGroup", command.getCommandGroup().name()); assertEquals("UserGroup", command.getThreadPoolKey().name()); assertTrue(command.getExecutionEvents().contains(HystrixEventType.SUCCESS)); } private com.netflix.hystrix.HystrixInvokableInfo<?> getCommand() { return HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().iterator().next(); } protected abstract UserService createUserService(); protected abstract AdvancedUserService createAdvancedUserServiceService(); protected abstract GenericService<String, Long, User> createGenericUserService(); public interface GenericService<K1, K2, V> { V getByKey(K1 key1, K2 key2); V getByKeyForceFail(K1 key, K2 key2); V fallback(K1 key, K2 key2); } public static class GenericUserService implements GenericService<String, Long, User> { @HystrixCommand(fallbackMethod = "fallback") @Override public User getByKey(String sKey, Long lKey) { return new User(sKey, "name: " + lKey); // it should be network call } @HystrixCommand(fallbackMethod = "fallback") @Override public User getByKeyForceFail(String sKey, Long lKey) { throw new RuntimeException("force fail"); } @Override public User fallback(String sKey, Long lKey) { return new User(sKey, "name: " + lKey); } } public static class UserService { @HystrixCommand(commandKey = "GetUserCommand", threadPoolKey = "CommandTestAsync") public Future<User> getUserAsync(final String id, final String name) { return new AsyncResult<User>() { @Override public User invoke() { return new User(id, name + id); // it should be network call } }; } @HystrixCommand(groupKey = "UserGroup") public User getUserSync(String id, String name) { return new User(id, name + id); // it should be network call } @HystrixCommand public <T> T echo(T value) { return value; } @HystrixCommand public <T> Future<T> echoAsync(final T value) { return new AsyncResult<T>() { @Override public T invoke() { return value; } }; } } public static class AdvancedUserService extends UserService { } }
public extension Scanner { convenience init(forParsing string: String) { self.init(string: string) locale = nil } func parseIntegers() -> [Int] { var integers: [Int] = [] while !isAtEnd { var value: Int = 0 if scanInt(&value) { integers.append(value) } else { scanLocation += 1 // Move to the next character if no integer is found } } return integers } }
def reduce_array(arr): result = arr[0] for i in range(1, len(arr)): result = result^arr[i] return result arr = [3, 5, 6, 2] result = reduce_array(arr) print("The result of the reducing array is:", result)
export PROMPT_COMMAND= PS1='$ '
class Car: ''' * Creating a class called "Car" * Properties/attributes: brand, colour, horses, country production, current speed * "current_speed" is set to 0, unless other value is assigned * Method definitions: * def move_car() moves the car by 10 * def accelerate_car() accelerates the car by the given value and adds that speed to "current_speed" * def stop_car() sets "current_speed" to 0 * def car_details() returns the properties of the "Car" class ''' def __init__(self, brand, colour, horses, country_production, current_speed = 0): self.brand = brand self.colour= colour self.horses = horses self.country_production = country_production self.current_speed = current_speed def move_car(self): self.current_speed += 10 def accelerate_car(self, value): self.current_speed += value def stop_car(self): self.current_speed = 0 def car_details(self): print ("Brand: {}\nColour: {}\nHorses: {}\nCountry production: {}\nCurrent speed: {}\n".format( self.brand, self.colour, self.horses, self.country_production, self.current_speed))
<filename>web-steps/led-config/src/app/domain/Model.ts import {ModelDimension} from "./ModelDimension"; import {RelationDefinition} from "./relations/RelationDefinition"; import {ModelTranslation} from "./ModelTranslation"; import {serialize, deserialize, deserializeAs, serializeAs} from "cerialize"; import {ModelMargin} from "./ModelMargin"; import {ModelPropertyValue} from "./ModelPropertyValue"; import {BaseClass} from "./BaseClass"; export class Model extends BaseClass { @serialize @deserialize typeClass: string; // --------- Part of Mounting ---------------- @serializeAs(ModelMargin) @deserializeAs(ModelMargin) margins: ModelMargin; //-------------------------------------------- @serialize @deserialize uuid: string; @serialize @deserialize id: string; @serialize @deserialize orientation: string; @serialize @deserialize public name: string; @serialize @deserialize code: string; @serialize @deserialize step: number; @serialize @deserialize imageUrl: string; @serialize @deserialize productPage: string; @serializeAs(ModelPropertyValue) @deserializeAs(ModelPropertyValue) properties: Array<ModelPropertyValue>; @serializeAs(ModelTranslation) @deserializeAs(ModelTranslation) public translations: ModelTranslation; // ------------------ Part of RealModel --------------- @serializeAs(ModelDimension) @deserializeAs(ModelDimension) dimension: ModelDimension; @serializeAs(ModelDimension) @deserializeAs(ModelDimension) maxDimension: ModelDimension; @serializeAs(ModelMargin) @deserializeAs(ModelMargin) margin: ModelMargin; //--------------------- @serializeAs(ModelDimension) @deserializeAs(ModelDimension) lengthForCasting: ModelDimension; lengthForCastingStr: string; @serialize @deserialize leftSpace: number; @serialize @deserialize rightSpace: number; relations: Array<RelationDefinition> = []; static relatedRelations(m: Model, prevModels: Array<Model>, currentStep: number): Array<RelationDefinition> { let relatedRelations: Array<RelationDefinition> = []; let currentStepModel = m; let prevModelInclCurrent: Array<Model> = []; for (let prevModel of prevModels) { prevModelInclCurrent.push(prevModel); if (prevModel && prevModel.relations) { for (let relation of prevModel.relations) { for (let mrel of relation.models) { if (mrel.uuid === currentStepModel.uuid) { relatedRelations.push(relation); } } } } } let foundRelations: Array<RelationDefinition> = []; for (let rl of relatedRelations) { let count = Model.countSameModels(prevModelInclCurrent, rl.models); if (((count >= 2) || ( count == rl.models.length)) || (currentStep <= 1 && count >= 2)) { foundRelations.push(rl); } } return foundRelations; } static relatedRelationsForWarning(m: Model, prevModels: Array<Model>, currentStep: number): Array<RelationDefinition> { let foundRelations: Array<RelationDefinition> = []; for (let rl of m.relations) { let count = Model.countSameModels(prevModels, rl.models); let stepCount=this.countModelsCurrentStep(rl.models); if (((count >= currentStep + 1))||(stepCount==count)) { foundRelations.push(rl); } } return foundRelations; } private static containsNumber(steps: Array<number>,step:number) { for (let s of steps){ if (s==step){ return true; } } return false; } private static countModelsCurrentStep(models: Array<Model>):number { let steps:Array<number>=[]; if (models.length>0){ steps.push(models[0].step); } for (let m of models) { if (!this.containsNumber(steps,m.step)){ steps.push(m.step); } } return steps.length; } private static countSameModels(models: Array<Model>, prevModels: Array<Model>) { let notFound: number = 0; for (let m of models) { if (!Model.canBeFoundIn(m, prevModels)) { notFound++; } } return models.length - notFound; } private static canBeFoundIn(m: Model, prevModels: Array<Model>) { if (m != undefined) { for (let pm of prevModels) { if (m.uuid === pm.uuid) { return true; } } } return false; } public getNameTranslated(defaultLang: string) { let name: string = ""; if (defaultLang == "nl") { name = this.translations.nl != undefined ? this.translations.nl : ''; } else { name = this.translations.en != undefined ? this.translations.en : ''; } if (name == "" && this.name != undefined && this.name != null) { name = this.name; } return name; } }
<filename>CFCoverFlowViewDemo/CFViewController.h<gh_stars>0 // // CFViewController.h // CFCoverFlowViewDemo // // Created by c0ming on 14-5-30. // Copyright (c) 2014年 c0ming. All rights reserved. // #import <UIKit/UIKit.h> @class CFCoverFlowView; @interface CFViewController : UIViewController @property (weak, nonatomic) IBOutlet CFCoverFlowView *coverFlowView1; @property (weak, nonatomic) IBOutlet UIPageControl *pageControl; - (IBAction)pageControlAction:(id)sender; @end
<filename>src/utils.js<gh_stars>0 const objToText = (t, obj) => { for(const k in obj) { if (k !== 'rect'&& k !== '$' && k !== 'desc' && k !== 'text') { const v = obj[k] if (Array.isArray(v)) { v.forEach((j) => { // determine if array of objects if (typeof(j) === 'object') { objToText(t, j) } else if (typeof(j) === 'string') { t.desc = `${t.desc.trim()} ${j}`.trim() } }) } else if (typeof(v) === 'object') { // must be object, execute recursion objToText(t, v) } else if (typeof(v) === 'string') { t.desc = `${t.desc.trim()} ${v}`.trim() } if (t === obj) { delete t[k] } } } } export default { /** * Scale a rect * * @param Object rect The rect * @param Number scale The scale value * @param Number maxWidth It should not be bigger than width * @param Number maxHeight It should not be bigger than height * @return Object [description] */ rectToScale: (rect, scale, maxWidth, maxHeight) => { rect.x = rect.x < 0 ? 10 : Math.floor(rect.x * scale) rect.y = rect.x < 0 ? 10 : Math.floor(rect.y * scale) rect.xx = Math.floor(rect.xx * scale) rect.yy = Math.floor(rect.yy * scale) if (rect.xx > maxWidth) { rect.xx = rect.xx - 10 } if (rect.yy > maxHeight) { rect.yy = rect.yy - 10 } rect.width = Math.floor(Math.abs(rect.xx - rect.x)) rect.height = Math.floor(Math.abs(rect.yy - rect.y)) return rect }, /** * Convert a HTML object to plain text * * @param Object t the parent object * @param Object obj the object * @return String the result string */ objectToText: objToText, /** * Determine if a rect contain x, y point * * @param Object rect * @param Number x * @param Number y * @return Boolean true if it contains the point */ rectContains: (rect, x, y) => { return rect.x <= x && x <= rect.x + Math.abs(rect.width) && rect.y <= y && y <= rect.y + Math.abs(rect.height) }, /** * Make sure all value in rect are numeric and * create xx,yy from width and height * * @param Object rect * @param Number maxX * @param Number maxY * @return Object the rect */ rectToNumeric: (rect, maxX, maxY) => { // previously done so exit if (rect.xx) { return rect } rect.top = Number(rect.top) rect.left = Number(rect.left) rect.width = Number(rect.width) rect.height = Number(rect.height) if (rect.left < 10) { rect.left = 10 } if (rect.top < 10) { rect.top = 10 } // handle situation where image is flipped // which result in negative width and height if (rect.width < 0) { // calculate real left from the two value rect.left = rect.left + rect.width rect.width = Math.abs(rect.width) } if (rect.height < 0) { rect.top = rect.top + rect.height rect.height = Math.abs(rect.height) } const xx = Math.abs(rect.left + rect.width), yy = Math.abs(rect.top + rect.height) const rst = { x: rect.left > maxX ? maxX : rect.left, y: rect.top > maxY ? maxY : rect.top, xx: xx > maxX ? maxX : xx, yy: yy > maxY ? maxY : yy, font: rect.font ? Number(rect.font) : -1 } rst.width = rst.xx - rst.x rst.height = rst.yy - rst.y return rst } }
import random import string def generate_password(length): """Generate a random password with the given length""" password = "" for _ in range(length): password+= random.choice(string.ascii_letters + string.digits) return password # Generate password print(generate_password(password_length))
#!/bin/bash # Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License, version 2.0, # as published by the Free Software Foundation. # # This program is also distributed with certain software (including # but not limited to OpenSSL) that is licensed under separate terms, # as designated in a particular file or component or in included license # documentation. The authors of MySQL hereby grant you an additional # permission to link the program and your derivative works with the # separately licensed software that they have included with MySQL. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License, version 2.0, for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # simulate network latency on data nodes' ports listed in config.ini #set -x if [ $# -lt 1 ] ; then echo "usage: `basename $0` <delay-ms> [<config.ini>]" exit 1 fi delay=$1 myini=${2:-"../config.ini"} if [ ! -e "$myini" ] ; then echo "file not found: $myini" exit 1 fi ports="`grep '^ServerPort' $myini | sed -e 's/.*=//' -e 's/#.*//'`" ports="`echo $ports`" # remove newlines echo "found ServerPorts in $myini : $ports" if [ x"`uname`" = x"Darwin" ] ; then ./pdelay_mac.sh $delay $ports fi #set +x
import doodle.core._ import doodle.image._ import doodle.image.syntax._ import doodle.image.syntax.core._ import doodle.java2d._ object EvilEye extends App { val r = 21 val stroke = 5 Image.circle(r * 2).fillColor(Color.black) on Image.circle(4 * r).fillColor(Color.cornflowerBlue) on Image.circle(6 * r).fillColor(Color.white) on Image.circle(10 * r).fillColor(Color.darkBlue) strokeWidth stroke draw() }
import matplotlib.pyplot as plt name = ['A', 'B', 'C'] values = [20, 40, 30] plt.bar(name, values) plt.xlabel('Name') plt.ylabel('Values') plt.title('Bar Chart') plt.show()
// Copyright 2019 Drone IO, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package core import "context" type ( // Step represents an individual step in the stage. Step struct { ID int64 `json:"id"` StageID int64 `json:"stage_id"` Number int `json:"number"` Name string `json:"name"` Status string `json:"status"` Error string `json:"error,omitempty"` ErrIgnore bool `json:"errignore,omitempty"` ExitCode int `json:"exit_code"` Started int64 `json:"started,omitempty"` Stopped int64 `json:"stopped,omitempty"` Version int64 `json:"version"` } // StepStore persists build step information to storage. StepStore interface { // List returns a build stage list from the datastore. List(context.Context, int64) ([]*Step, error) // Find returns a build stage from the datastore by ID. Find(context.Context, int64) (*Step, error) // FindNumber returns a stage from the datastore by number. FindNumber(context.Context, int64, int) (*Step, error) // Create persists a new stage to the datastore. Create(context.Context, *Step) error // Update persists an updated stage to the datastore. Update(context.Context, *Step) error } ) // IsDone returns true if the step has a completed state. func (s *Step) IsDone() bool { switch s.Status { case StatusWaiting, StatusPending, StatusRunning, StatusBlocked: return false default: return true } }
#!/bin/sh export NODE_ENV=`/opt/secret2env -name $SECRETNAME|grep -w NODE_ENV|sed 's/NODE_ENV=//g'` export L1_NODE_WEB3_WS=`/opt/secret2env -name $SECRETNAME|grep -w L1_NODE_WEB3_WS|sed 's/L1_NODE_WEB3_WS=//g'` export L1_LIQUIDITY_POOL_ADDRESS=`/opt/secret2env -name $SECRETNAME|grep -w L1_LIQUIDITY_POOL_ADDRESS|sed 's/L1_LIQUIDITY_POOL_ADDRESS=//g'` export L2_LIQUIDITY_POOL_ADDRESS=`/opt/secret2env -name $SECRETNAME|grep -w L2_LIQUIDITY_POOL_ADDRESS|sed 's/L2_LIQUIDITY_POOL_ADDRESS=//g'` export RELAYER_ADDRESS=`/opt/secret2env -name $SECRETNAME|grep -w RELAYER_ADDRESS|sed 's/RELAYER_ADDRESS=//g'` export SEQUENCER_ADDRESS=`/opt/secret2env -name $SECRETNAME|grep -w SEQUENCER_ADDRESS|sed 's/SEQUENCER_ADDRESS=//g'` export RECONNECT_TIME=10000 npm start
<filename>src/main/java/com/movella/service/UsuarioService.java package com.movella.service; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.movella.dao.UsuarioDAO; import com.movella.exceptions.InvalidDataException; import com.movella.model.Usuario; import com.movella.responses.BadRequest; import com.movella.responses.Success; import com.movella.utils.FunctionUtils; import com.movella.utils.Localization; import org.postgresql.util.PSQLException; import spark.*; public class UsuarioService { public static Route login = (Request req, Response res) -> { final JsonObject body = JsonParser.parseString(req.body()).getAsJsonObject(); final JsonElement _email = body.get("email"); final JsonElement _senha = body.get("senha"); if (_email == null) return new BadRequest(res, Localization.invalidEmail); if (_senha == null) return new BadRequest(res, Localization.invalidPassword); final String email = _email.getAsString(); final String senha = _senha.getAsString(); try { final Usuario usuario = UsuarioDAO.login(email, FunctionUtils.sha256hex(senha)); req.session(true); req.session().attribute("user", usuario); return new Success(res, Localization.loginSuccess); } catch (InvalidDataException e) { return new BadRequest(res, e.message); } }; public static Route register = (Request req, Response res) -> { final JsonObject body = JsonParser.parseString(req.body()).getAsJsonObject(); final JsonElement _nome = body.get("nome"); final JsonElement _email = body.get("email"); final JsonElement _senha = body.get("senha"); if (_nome == null) return new BadRequest(res, Localization.invalidName); if (_email == null) return new BadRequest(res, Localization.invalidEmail); if (_senha == null) return new BadRequest(res, Localization.invalidPassword); final String nome = _nome.getAsString(); final String email = _email.getAsString(); final String senha = _senha.getAsString(); try { final Usuario usuario = UsuarioDAO.register(nome, email, FunctionUtils.sha256hex(senha)); req.session(true); req.session().attribute("user", usuario); return new Success(res, Localization.userRegisterSuccess); } catch (InvalidDataException e) { return new BadRequest(res, e.message); } catch (RuntimeException e) { if (e.getCause().getClass() == PSQLException.class) { if (e.getMessage().contains("usuario_email_unique")) return new BadRequest(res, Localization.userRegisterDuplicateEmail); if (e.getMessage().contains("usuario_nome_unique")) return new BadRequest(res, Localization.userRegisterDuplicateUsername); } return new BadRequest(res); } }; public static Route update = (Request req, Response res) -> { final JsonObject body = JsonParser.parseString(req.body()).getAsJsonObject(); final JsonElement _cpf = body.get("cpf"); final JsonElement _celular = body.get("celular"); final JsonElement _cep = body.get("cep"); final JsonElement _complemento = body.get("complemento"); final JsonElement _nome = body.get("nome"); final JsonElement _senha = body.get("senha"); if (_nome == null) return new BadRequest(res, Localization.invalidName); if (_senha == null) return new BadRequest(res, Localization.invalidPassword); if (_cpf == null) return new BadRequest(res, Localization.invalidCpf); if (_celular == null) return new BadRequest(res, Localization.invalidCelular); if (_cep == null) return new BadRequest(res, Localization.invalidCep); if (_complemento == null) return new BadRequest(res, Localization.invalidComplemento); final String nome = _nome.getAsString(); final String senha = _senha.getAsString(); final String cpf = _cpf.getAsString().replaceAll("[\\.\\-]", ""); final String celular = _celular.getAsString().replaceAll("[\\(\\)\\-\\s]", ""); final String cep = _cep.getAsString().replaceAll("[\\.\\-]", ""); final String complemento = _complemento.getAsString(); final Session session = req.session(); final Usuario sessionUsuario = (Usuario) session.attribute("user"); final String acesso = sessionUsuario.getAcesso(); final String email = sessionUsuario.getEmail(); final String newAcesso = acesso == "admin" ? "admin" : "verificado"; final int id = sessionUsuario.getId(); final JsonObject viaCep = FunctionUtils.get(String.format("https://viacep.com.br/ws/%s/json/", cep)); if (viaCep.get("erro") != null) return new BadRequest(res, Localization.invalidCep); try { final String bairro = viaCep.get("bairro").getAsString(); final String cidade = viaCep.get("localidade").getAsString(); final String logradouro = viaCep.get("logradouro").getAsString(); final String uf = viaCep.get("uf").getAsString(); try { final Usuario usuario = UsuarioDAO.update(email, newAcesso, bairro, celular, cep, cidade, cpf, complemento, logradouro, uf, nome, FunctionUtils.sha256hex(senha), id); req.session().attribute("user", usuario); return new Success(res, Localization.userUpdateSuccess); } catch (InvalidDataException e) { return new BadRequest(res, e.message); } catch (RuntimeException e) { if (e.getCause().getClass() == PSQLException.class) { if (e.getMessage().contains("usuario_email_unique")) return new BadRequest(res, Localization.userRegisterDuplicateEmail); if (e.getMessage().contains("usuario_nome_unique")) return new BadRequest(res, Localization.userRegisterDuplicateUsername); if (e.getMessage().contains("usuario_cpf_unique")) return new BadRequest(res, Localization.userUpdateError); } return new BadRequest(res); } } catch (HaltException e) { throw e; } catch (Exception e) { return new BadRequest(res, e.getMessage()); } }; }
<reponame>leomillon/try-jcv /* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.webdemo.kotlin.impl; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.psi.search.GlobalSearchScope; import kotlin.Unit; import kotlin.jvm.functions.Function1; import kotlin.jvm.functions.Function2; import org.jetbrains.annotations.NotNull; import org.jetbrains.kotlin.analyzer.AnalysisResult; import org.jetbrains.kotlin.cli.jvm.compiler.CliLightClassGenerationSupport; import org.jetbrains.kotlin.cli.jvm.compiler.KotlinCoreEnvironment; import org.jetbrains.kotlin.cli.jvm.compiler.TopDownAnalyzerFacadeForJVM; import org.jetbrains.kotlin.codegen.ClassBuilderFactories; import org.jetbrains.kotlin.codegen.state.GenerationState; import org.jetbrains.kotlin.config.CompilerConfiguration; import org.jetbrains.kotlin.config.JVMConfigurationKeys; import org.jetbrains.kotlin.config.LanguageVersionSettingsImpl; import org.jetbrains.kotlin.config.TargetPlatformVersion; import org.jetbrains.kotlin.container.ComponentProvider; import org.jetbrains.kotlin.container.ContainerKt; import org.jetbrains.kotlin.container.DslKt; import org.jetbrains.kotlin.container.StorageComponentContainer; import org.jetbrains.kotlin.context.ContextKt; import org.jetbrains.kotlin.context.ModuleContext; import org.jetbrains.kotlin.context.MutableModuleContext; import org.jetbrains.kotlin.descriptors.ModuleDescriptor; import org.jetbrains.kotlin.descriptors.PackagePartProvider; import org.jetbrains.kotlin.descriptors.impl.ModuleDescriptorImpl; import org.jetbrains.kotlin.incremental.components.LookupTracker; import org.jetbrains.kotlin.js.analyze.TopDownAnalyzerFacadeForJS; import org.jetbrains.kotlin.js.config.JSConfigurationKeys; import org.jetbrains.kotlin.js.config.JsConfig; import org.jetbrains.kotlin.js.resolve.JsPlatform; import org.jetbrains.kotlin.name.Name; import org.jetbrains.kotlin.psi.KtFile; import org.jetbrains.kotlin.resolve.*; import org.jetbrains.kotlin.resolve.calls.smartcasts.DataFlowInfo; import org.jetbrains.kotlin.resolve.jvm.extensions.AnalysisHandlerExtension; import org.jetbrains.kotlin.resolve.lazy.FileScopeProviderImpl; import org.jetbrains.kotlin.resolve.lazy.KotlinCodeAnalyzer; import org.jetbrains.kotlin.resolve.lazy.ResolveSession; import org.jetbrains.kotlin.resolve.lazy.declarations.DeclarationProviderFactory; import org.jetbrains.kotlin.resolve.lazy.declarations.FileBasedDeclarationProviderFactory; import org.jetbrains.kotlin.serialization.js.JsModuleDescriptor; import org.jetbrains.kotlin.storage.StorageManager; import org.jetbrains.webdemo.kotlin.impl.environment.EnvironmentManager; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; public class ResolveUtils { private ResolveUtils() { } public static BindingContext getBindingContext(@NotNull List<KtFile> files, Project project, boolean isJs) { Pair<AnalysisResult, ComponentProvider> result = isJs ? analyzeFileForJs(files, project) : analyzeFileForJvm(files, project); AnalysisResult analyzeExhaust = result.getFirst(); return analyzeExhaust.getBindingContext(); } public static GenerationState getGenerationState(@NotNull List<KtFile> files, Project project, CompilerConfiguration compilerConfiguration) { AnalysisResult analyzeExhaust = analyzeFileForJvm(files, project).getFirst(); return new GenerationState( project, ClassBuilderFactories.binaries(false), analyzeExhaust.getModuleDescriptor(), analyzeExhaust.getBindingContext(), files, compilerConfiguration ); } public static Pair<AnalysisResult, ComponentProvider> analyzeFileForJvm(@NotNull List<KtFile> files, Project project) { final KotlinCoreEnvironment environment = EnvironmentManager.getEnvironment(); BindingTrace trace = new CliLightClassGenerationSupport.CliBindingTrace(); CompilerConfiguration configuration = environment.getConfiguration(); configuration.put(JVMConfigurationKeys.ADD_BUILT_INS_FROM_COMPILER_TO_DEPENDENCIES, true); ComponentProvider container = TopDownAnalyzerFacadeForJVM.INSTANCE.createContainer( environment.getProject(), files, trace, configuration, new Function1<GlobalSearchScope, PackagePartProvider>() { @Override public PackagePartProvider invoke(GlobalSearchScope globalSearchScope) { return environment.createPackagePartProvider(globalSearchScope); } }, new Function2<StorageManager, Collection<? extends KtFile>, DeclarationProviderFactory>() { @Override public DeclarationProviderFactory invoke(StorageManager storageManager, Collection<? extends KtFile> ktFiles) { return new FileBasedDeclarationProviderFactory(storageManager, (Collection<KtFile>) ktFiles); } }, TopDownAnalyzerFacadeForJVM.INSTANCE.newModuleSearchScope(project, files) ); DslKt.getService(container, LazyTopDownAnalyzer.class).analyzeDeclarations(TopDownAnalysisMode.TopLevelDeclarations, files, DataFlowInfo.Companion.getEMPTY()); ModuleDescriptor moduleDescriptor = DslKt.getService(container, ModuleDescriptor.class); for (AnalysisHandlerExtension extension : AnalysisHandlerExtension.Companion.getInstances(project)) { AnalysisResult result = extension.analysisCompleted(project, moduleDescriptor, trace, files); if (result != null) break; } //noinspection unchecked return new Pair<AnalysisResult, ComponentProvider>( AnalysisResult.success(trace.getBindingContext(), moduleDescriptor), container); } public static Pair<AnalysisResult, ComponentProvider> analyzeFileForJs(@NotNull List<KtFile> files, Project project) { KotlinCoreEnvironment environment = EnvironmentManager.getEnvironment(); CompilerConfiguration configuration = environment.getConfiguration().copy(); configuration.put(JSConfigurationKeys.LIBRARIES, Collections.singletonList(WrapperSettings.JS_LIB_ROOT.toString())); JsConfig config = new JsConfig(project, configuration); MutableModuleContext module = ContextKt.ContextForNewModule( ContextKt.ProjectContext(project), Name.special("<" + config.getModuleId() + ">"), JsPlatform.INSTANCE.getBuiltIns(), null ); module.setDependencies(computeDependencies(module.getModule(), config)); BindingTrace trace = new CliLightClassGenerationSupport.CliBindingTrace(); FileBasedDeclarationProviderFactory providerFactory = new FileBasedDeclarationProviderFactory(module.getStorageManager(), files); Pair<LazyTopDownAnalyzer, ComponentProvider> analyzerAndProvider = createContainerForTopDownAnalyzerForJs(module, trace, EnvironmentManager.getLanguageVersion(),providerFactory); //noinspection unchecked return new Pair<AnalysisResult, ComponentProvider>( TopDownAnalyzerFacadeForJS.analyzeFiles(files, config), analyzerAndProvider.second); } @NotNull private static List<ModuleDescriptorImpl> computeDependencies(ModuleDescriptorImpl module, @NotNull JsConfig config) { List<ModuleDescriptorImpl> allDependencies = new ArrayList<ModuleDescriptorImpl>(); allDependencies.add(module); for (JsModuleDescriptor<ModuleDescriptorImpl> jsModuleDescriptor : config.getModuleDescriptors()) { allDependencies.add(jsModuleDescriptor.getData()); } allDependencies.add(JsPlatform.INSTANCE.getBuiltIns().getBuiltInsModule()); return allDependencies; } private static Pair<LazyTopDownAnalyzer, ComponentProvider> createContainerForTopDownAnalyzerForJs( final ModuleContext moduleContext, final BindingTrace bindingTrace, final TargetPlatformVersion platformVersion, final DeclarationProviderFactory declarationProviderFactory ) { StorageComponentContainer container = DslKt.composeContainer( "TopDownAnalyzerForJs", JsPlatform.INSTANCE.getPlatformConfigurator().getPlatformSpecificContainer(), new Function1<StorageComponentContainer, Unit>() { @Override public Unit invoke(StorageComponentContainer storageComponentContainer) { org.jetbrains.kotlin.frontend.di.InjectionKt.configureModule(storageComponentContainer, moduleContext, JsPlatform.INSTANCE, platformVersion, bindingTrace); DslKt.useInstance(storageComponentContainer, declarationProviderFactory); ContainerKt.registerSingleton(storageComponentContainer, AnnotationResolverImpl.class); ContainerKt.registerSingleton(storageComponentContainer, FileScopeProviderImpl.class); CompilerEnvironment.INSTANCE.configure(storageComponentContainer); DslKt.useInstance(storageComponentContainer, LookupTracker.DO_NOTHING.INSTANCE); DslKt.useInstance(storageComponentContainer, LanguageVersionSettingsImpl.DEFAULT); ContainerKt.registerSingleton(storageComponentContainer, ResolveSession.class); ContainerKt.registerSingleton(storageComponentContainer, LazyTopDownAnalyzer.class); return null; } }); DslKt.getService(container, ModuleDescriptorImpl.class).initialize(DslKt.getService(container, KotlinCodeAnalyzer.class).getPackageFragmentProvider()); //noinspection unchecked return new Pair<LazyTopDownAnalyzer, ComponentProvider>(DslKt.getService(container, LazyTopDownAnalyzer.class), container); } }
class OperatingSystemUtility: def __init__(self): self.file_system_info = { 'total_size': '100GB', 'used_space': '60GB', 'available_space': '40GB' } self.os_info = { 'name': 'MyOS', 'version': '1.0' } self.theme_options = ['Default', 'Dark', 'Light'] def display_file_system_info(self): print(f"Total Size: {self.file_system_info['total_size']}") print(f"Used Space: {self.file_system_info['used_space']}") print(f"Available Space: {self.file_system_info['available_space']}") def display_os_info(self): print(f"Operating System: {self.os_info['name']} {self.os_info['version']}") def prompt_theme_selection(self): print("Available Themes:") for index, theme in enumerate(self.theme_options, start=1): print(f"{index}. {theme}") theme_choice = input("Enter the number corresponding to the theme you want to choose: ") try: theme_choice = int(theme_choice) if 1 <= theme_choice <= len(self.theme_options): print(f"Theme '{self.theme_options[theme_choice - 1]}' selected.") else: print("Invalid theme selection.") except ValueError: print("Invalid input. Please enter a number.") if __name__ == "__main__": os_utility = OperatingSystemUtility() while True: command = input("Enter a command (fi, get_OS, choose_theme) or 'exit' to quit: ") if command == 'fi': os_utility.display_file_system_info() elif command == 'get_OS': os_utility.display_os_info() elif command == 'choose_theme': os_utility.prompt_theme_selection() elif command == 'exit': print("Exiting the utility.") break else: print("Invalid command. Please enter a valid command.")
function getParentCategoryName(array $categories, string $categoryName): string { foreach ($categories as $category) { if ($category['name'] === $categoryName) { if ($category['parent_id'] === 0) { return "No parent category"; } else { foreach ($categories as $parentCategory) { if ($parentCategory['parent_id'] === $category['parent_id']) { return $parentCategory['name']; } } } } } return "Category not found"; }
<reponame>madhusha2020/inventory-frontend-ngx import {Component, OnInit} from '@angular/core'; import {Delivery, DeliveryControllerService} from '../../../service/rest'; import {LocalDataSource} from 'ng2-smart-table'; import {NbSearchService} from '@nebular/theme'; import {Router} from '@angular/router'; import {ServiceUtil} from '../../../service/util/service-util'; @Component({ selector: 'ngx-delivery-main', templateUrl: './delivery-main.component.html', styleUrls: ['./delivery-main.component.scss'] }) export class DeliveryMainComponent implements OnInit { deliveries: Array<Delivery> = []; settings = { hideSubHeader: true, actions: false, columns: { id: { title: 'Delivery#', type: 'number', }, date: { title: 'Date', type: 'string', }, deliveryaddress: { title: 'Address', type: 'string', }, deliverycontactname: { title: 'Contact Name', type: 'string', }, deliverycontactno: { title: 'Contact No', type: 'string', }, deliveryempname: { title: 'Employee Name', type: 'string', }, deliveryempcontactno: { title: 'Employee Contact No', type: 'string', }, deliveryvehicletype: { title: 'Vehicle Type', type: 'string', }, deliveryvehicleno: { title: 'Vehicle No', type: 'string', }, statusDescription: { title: 'Status', type: 'string', }, }, }; source: LocalDataSource = new LocalDataSource(); constructor(private deliveryControllerService: DeliveryControllerService, private searchService: NbSearchService, private router: Router) { this.searchService.onSearchSubmit() .subscribe((data: any) => { console.log('Search', data.term); this.source.setFilter([ { field: 'name', search: data.term } ], false); }); } ngOnInit(): void { this.fetchDeliveries(); } fetchDeliveries() { this.deliveryControllerService.getAllDeliveriesUsingGET().subscribe(response => { console.log('Delivery Data :', response); response.deliveryList.forEach(delivery => { delivery.statusDescription = ServiceUtil.getStatusDescription(delivery.status); this.deliveries.push(delivery); }); this.source.load(this.deliveries); }); } onUserRowSelect(event): void { console.log(event); this.router.navigate(['/pages/delivery/view'], {queryParams: {id: event.data.id}}); } resetFilter(): void { this.source.setFilter([]); } }
#!/usr/bin/env bash # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e BASE_DIR=$(pwd) SOURCE="${BASH_SOURCE[0]}" while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink SCRIPTDIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" SOURCE="$(readlink "$SOURCE")" [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located done SCRIPTDIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" INSTANCE_NAME="$(cat instance-data/instance-name)" ZONE="$(cat instance-data/zone)" echo 'StrictHostKeyChecking no' >> /etc/ssh/ssh_config gcloud compute instances delete ${INSTANCE_NAME} \ --zone=${ZONE} \ --quiet
import numpy as np import yaml, caffe from other import clip_boxes from anchor import AnchorText class ProposalLayer(caffe.Layer): def setup(self, bottom, top): # parse the layer parameter string, which must be valid YAML #layer_params = yaml.load(self.param_str_) layer_params = yaml.load(self.param_str) self._feat_stride = layer_params['feat_stride'] self.anchor_generator=AnchorText() self._num_anchors = self.anchor_generator.anchor_num top[0].reshape(1, 4) top[1].reshape(1, 1, 1, 1) def forward(self, bottom, top): assert bottom[0].data.shape[0]==1, \ 'Only single item batches are supported' scores = bottom[0].data[:, self._num_anchors:, :, :] bbox_deltas = bottom[1].data im_info = bottom[2].data[0, :] height, width = scores.shape[-2:] anchors=self.anchor_generator.locate_anchors((height, width), self._feat_stride) scores=scores.transpose((0, 2, 3, 1)).reshape(-1, 1) bbox_deltas=bbox_deltas.transpose((0, 2, 3, 1)).reshape((-1, 2)) proposals=self.anchor_generator.apply_deltas_to_anchors(bbox_deltas, anchors) # clip the proposals in excess of the boundaries of the image proposals=clip_boxes(proposals, im_info[:2]) blob=proposals.astype(np.float32, copy=False) top[0].reshape(*(blob.shape)) top[0].data[...]=blob top[1].reshape(*(scores.shape)) top[1].data[...]=scores def backward(self, top, propagate_down, bottom): pass def reshape(self, bottom, top): pass
#!/bin/bash #SBATCH --nodes 1 --ntasks 32 --time 2:00:00 -p short --mem 64G --out logs/mosdepth.parallel.log #SBATCH -J modepth CPU=$SLURM_CPUS_ON_NODE if [ ! $CPU ]; then CPU=2 fi if [ -f config.txt ]; then source config.txt fi GENOME=$GENOMEFOLDER/$GENOMEFASTA module unload python/2.7.5 mkdir -p coverage/mosdepth export PATH="/bigdata/stajichlab/jstajich/miniconda3/bin:$PATH" WINDOW=10000 parallel --jobs $CPU mosdepth -f $GENOME -T 1,10,50,100,200 -n --by $WINDOW -t 2 "{= s:${ALNFOLDER}\/:coverage/mosdepth/:; s:\.cram:.${WINDOW}bp: =}" {} ::: ${ALNFOLDER}/*.cram bash scripts/mosdepth_prep_ggplot.sh mkdir -p plots Rscript Rscripts/plot_mosdepth_CNV.R
<gh_stars>1-10 import torch import torchvision.transforms as T import torchvision.transforms.functional as F class ValidateModelInput(torch.nn.Module): # Pass-through transform that checks the shape and dtypes to make sure the model gets what it expects def forward(self, img1, img2, flow, valid_flow_mask): if not all(isinstance(arg, torch.Tensor) for arg in (img1, img2, flow, valid_flow_mask) if arg is not None): raise TypeError("This method expects all input arguments to be of type torch.Tensor.") if not all(arg.dtype == torch.float32 for arg in (img1, img2, flow) if arg is not None): raise TypeError("This method expects the tensors img1, img2 and flow of be of dtype torch.float32.") if img1.shape != img2.shape: raise ValueError("img1 and img2 should have the same shape.") h, w = img1.shape[-2:] if flow is not None and flow.shape != (2, h, w): raise ValueError(f"flow.shape should be (2, {h}, {w}) instead of {flow.shape}") if valid_flow_mask is not None: if valid_flow_mask.shape != (h, w): raise ValueError(f"valid_flow_mask.shape should be ({h}, {w}) instead of {valid_flow_mask.shape}") if valid_flow_mask.dtype != torch.bool: raise TypeError("valid_flow_mask should be of dtype torch.bool instead of {valid_flow_mask.dtype}") return img1, img2, flow, valid_flow_mask class MakeValidFlowMask(torch.nn.Module): # This transform generates a valid_flow_mask if it doesn't exist. # The flow is considered valid if ||flow||_inf < threshold # This is a noop for Kitti and HD1K which already come with a built-in flow mask. def __init__(self, threshold=1000): super().__init__() self.threshold = threshold def forward(self, img1, img2, flow, valid_flow_mask): if flow is not None and valid_flow_mask is None: valid_flow_mask = (flow.abs() < self.threshold).all(axis=0) return img1, img2, flow, valid_flow_mask class ConvertImageDtype(torch.nn.Module): def __init__(self, dtype): super().__init__() self.dtype = dtype def forward(self, img1, img2, flow, valid_flow_mask): img1 = F.convert_image_dtype(img1, dtype=self.dtype) img2 = F.convert_image_dtype(img2, dtype=self.dtype) img1 = img1.contiguous() img2 = img2.contiguous() return img1, img2, flow, valid_flow_mask class Normalize(torch.nn.Module): def __init__(self, mean, std): super().__init__() self.mean = mean self.std = std def forward(self, img1, img2, flow, valid_flow_mask): img1 = F.normalize(img1, mean=self.mean, std=self.std) img2 = F.normalize(img2, mean=self.mean, std=self.std) return img1, img2, flow, valid_flow_mask class PILToTensor(torch.nn.Module): # Converts all inputs to tensors # Technically the flow and the valid mask are numpy arrays, not PIL images, but we keep that naming # for consistency with the rest, e.g. the segmentation reference. def forward(self, img1, img2, flow, valid_flow_mask): img1 = F.pil_to_tensor(img1) img2 = F.pil_to_tensor(img2) if flow is not None: flow = torch.from_numpy(flow) if valid_flow_mask is not None: valid_flow_mask = torch.from_numpy(valid_flow_mask) return img1, img2, flow, valid_flow_mask class AsymmetricColorJitter(T.ColorJitter): # p determines the proba of doing asymmertric vs symmetric color jittering def __init__(self, brightness=0, contrast=0, saturation=0, hue=0, p=0.2): super().__init__(brightness=brightness, contrast=contrast, saturation=saturation, hue=hue) self.p = p def forward(self, img1, img2, flow, valid_flow_mask): if torch.rand(1) < self.p: # asymmetric: different transform for img1 and img2 img1 = super().forward(img1) img2 = super().forward(img2) else: # symmetric: same transform for img1 and img2 batch = torch.stack([img1, img2]) batch = super().forward(batch) img1, img2 = batch[0], batch[1] return img1, img2, flow, valid_flow_mask class RandomErasing(T.RandomErasing): # This only erases img2, and with an extra max_erase param # This max_erase is needed because in the RAFT training ref does: # 0 erasing with .5 proba # 1 erase with .25 proba # 2 erase with .25 proba # and there's no accurate way to achieve this otherwise. def __init__(self, p=0.5, scale=(0.02, 0.33), ratio=(0.3, 3.3), value=0, inplace=False, max_erase=1): super().__init__(p=p, scale=scale, ratio=ratio, value=value, inplace=inplace) self.max_erase = max_erase if self.max_erase <= 0: raise ValueError("max_raise should be greater than 0") def forward(self, img1, img2, flow, valid_flow_mask): if torch.rand(1) > self.p: return img1, img2, flow, valid_flow_mask for _ in range(torch.randint(self.max_erase, size=(1,)).item()): x, y, h, w, v = self.get_params(img2, scale=self.scale, ratio=self.ratio, value=[self.value]) img2 = F.erase(img2, x, y, h, w, v, self.inplace) return img1, img2, flow, valid_flow_mask class RandomHorizontalFlip(T.RandomHorizontalFlip): def forward(self, img1, img2, flow, valid_flow_mask): if torch.rand(1) > self.p: return img1, img2, flow, valid_flow_mask img1 = F.hflip(img1) img2 = F.hflip(img2) flow = F.hflip(flow) * torch.tensor([-1, 1])[:, None, None] if valid_flow_mask is not None: valid_flow_mask = F.hflip(valid_flow_mask) return img1, img2, flow, valid_flow_mask class RandomVerticalFlip(T.RandomVerticalFlip): def forward(self, img1, img2, flow, valid_flow_mask): if torch.rand(1) > self.p: return img1, img2, flow, valid_flow_mask img1 = F.vflip(img1) img2 = F.vflip(img2) flow = F.vflip(flow) * torch.tensor([1, -1])[:, None, None] if valid_flow_mask is not None: valid_flow_mask = F.vflip(valid_flow_mask) return img1, img2, flow, valid_flow_mask class RandomResizeAndCrop(torch.nn.Module): # This transform will resize the input with a given proba, and then crop it. # These are the reversed operations of the built-in RandomResizedCrop, # although the order of the operations doesn't matter too much: resizing a # crop would give the same result as cropping a resized image, up to # interpolation artifact at the borders of the output. # # The reason we don't rely on RandomResizedCrop is because of a significant # difference in the parametrization of both transforms, in particular, # because of the way the random parameters are sampled in both transforms, # which leads to fairly different resuts (and different epe). For more details see # https://github.com/pytorch/vision/pull/5026/files#r762932579 def __init__(self, crop_size, min_scale=-0.2, max_scale=0.5, stretch_prob=0.8): super().__init__() self.crop_size = crop_size self.min_scale = min_scale self.max_scale = max_scale self.stretch_prob = stretch_prob self.resize_prob = 0.8 self.max_stretch = 0.2 def forward(self, img1, img2, flow, valid_flow_mask): # randomly sample scale h, w = img1.shape[-2:] # Note: in original code, they use + 1 instead of + 8 for sparse datasets (e.g. Kitti) # It shouldn't matter much min_scale = max((self.crop_size[0] + 8) / h, (self.crop_size[1] + 8) / w) scale = 2 ** torch.empty(1, dtype=torch.float32).uniform_(self.min_scale, self.max_scale).item() scale_x = scale scale_y = scale if torch.rand(1) < self.stretch_prob: scale_x *= 2 ** torch.empty(1, dtype=torch.float32).uniform_(-self.max_stretch, self.max_stretch).item() scale_y *= 2 ** torch.empty(1, dtype=torch.float32).uniform_(-self.max_stretch, self.max_stretch).item() scale_x = max(scale_x, min_scale) scale_y = max(scale_y, min_scale) new_h, new_w = round(h * scale_y), round(w * scale_x) if torch.rand(1).item() < self.resize_prob: # rescale the images img1 = F.resize(img1, size=(new_h, new_w)) img2 = F.resize(img2, size=(new_h, new_w)) if valid_flow_mask is None: flow = F.resize(flow, size=(new_h, new_w)) flow = flow * torch.tensor([scale_x, scale_y])[:, None, None] else: flow, valid_flow_mask = self._resize_sparse_flow( flow, valid_flow_mask, scale_x=scale_x, scale_y=scale_y ) # Note: For sparse datasets (Kitti), the original code uses a "margin" # See e.g. https://github.com/princeton-vl/RAFT/blob/master/core/utils/augmentor.py#L220:L220 # We don't, not sure it matters much y0 = torch.randint(0, img1.shape[1] - self.crop_size[0], size=(1,)).item() x0 = torch.randint(0, img1.shape[2] - self.crop_size[1], size=(1,)).item() img1 = F.crop(img1, y0, x0, self.crop_size[0], self.crop_size[1]) img2 = F.crop(img2, y0, x0, self.crop_size[0], self.crop_size[1]) flow = F.crop(flow, y0, x0, self.crop_size[0], self.crop_size[1]) if valid_flow_mask is not None: valid_flow_mask = F.crop(valid_flow_mask, y0, x0, self.crop_size[0], self.crop_size[1]) return img1, img2, flow, valid_flow_mask def _resize_sparse_flow(self, flow, valid_flow_mask, scale_x=1.0, scale_y=1.0): # This resizes both the flow and the valid_flow_mask mask (which is assumed to be reasonably sparse) # There are as-many non-zero values in the original flow as in the resized flow (up to OOB) # So for example if scale_x = scale_y = 2, the sparsity of the output flow is multiplied by 4 h, w = flow.shape[-2:] h_new = int(round(h * scale_y)) w_new = int(round(w * scale_x)) flow_new = torch.zeros(size=[2, h_new, w_new], dtype=flow.dtype) valid_new = torch.zeros(size=[h_new, w_new], dtype=valid_flow_mask.dtype) jj, ii = torch.meshgrid(torch.arange(w), torch.arange(h), indexing="xy") ii_valid, jj_valid = ii[valid_flow_mask], jj[valid_flow_mask] ii_valid_new = torch.round(ii_valid.to(float) * scale_y).to(torch.long) jj_valid_new = torch.round(jj_valid.to(float) * scale_x).to(torch.long) within_bounds_mask = (0 <= ii_valid_new) & (ii_valid_new < h_new) & (0 <= jj_valid_new) & (jj_valid_new < w_new) ii_valid = ii_valid[within_bounds_mask] jj_valid = jj_valid[within_bounds_mask] ii_valid_new = ii_valid_new[within_bounds_mask] jj_valid_new = jj_valid_new[within_bounds_mask] valid_flow_new = flow[:, ii_valid, jj_valid] valid_flow_new[0] *= scale_x valid_flow_new[1] *= scale_y flow_new[:, ii_valid_new, jj_valid_new] = valid_flow_new valid_new[ii_valid_new, jj_valid_new] = 1 return flow_new, valid_new class Compose(torch.nn.Module): def __init__(self, transforms): super().__init__() self.transforms = transforms def forward(self, img1, img2, flow, valid_flow_mask): for t in self.transforms: img1, img2, flow, valid_flow_mask = t(img1, img2, flow, valid_flow_mask) return img1, img2, flow, valid_flow_mask
import torch from torch import nn import torch.nn.functional as F class LabelSetEncoder(nn.Module): def __init__(self, number_labels): super(LabelSetEncoder, self).__init__() self.number_labels = number_labels self.fc1 = nn.Linear(self.number_labels, 128) self.fc2 = nn.Linear(128, 128) def forward(self, x): x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) return x class SingleLabelEncoder(nn.Module): def __init__(self, number_labels): super(SingleLabelEncoder, self).__init__() self.number_labels = number_labels self.fc1 = nn.Linear(self.number_labels, 128) self.fc2 = nn.Linear(128, 128) def forward(self, x): x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) return x class CountsEncoder(nn.Module): def __init__(self, number_labels): super(CountsEncoder, self).__init__() self.number_labels = number_labels self.fc1 = nn.Linear(self.number_labels, 128) self.fc2 = nn.Linear(128, 128) def forward(self, x): x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) return x class CountConditioningMLP(nn.Module): def __init__(self, number_labels): super(CountConditioningMLP, self).__init__() self.encode_label_set = LabelSetEncoder(number_labels) self.encode_single_label = SingleLabelEncoder(number_labels) self.encode_counts = CountsEncoder(number_labels) self.fc = nn.Linear(128 * 3, 128) # the LayoutVAE paper seems to indicate they teacher-force # at evaluation time... I can't imagine that is correct? def forward(self, label_set, current_label, count_so_far): label_set = self.encode_label_set(label_set) current_label = self.encode_single_label(current_label) count_so_far = self.encode_counts(count_so_far) aggregate = torch.cat((label_set, current_label, count_so_far), dim=-1) aggregate = self.fc(aggregate) return aggregate class CountInputEncoder(nn.Module): def __init__(self): super(CountInputEncoder, self).__init__() self.fc1 = nn.Linear(1, 128) self.fc2 = nn.Linear(128, 128) def forward(self, x): x = F.relu(self.fc1(x)) x = self.fc2(x) return x class AutoregressiveCountEncoder(nn.Module): def __init__(self, number_labels, conditioning_size, representation_size=32): super(AutoregressiveCountEncoder, self).__init__() self.number_labels = number_labels self.input_encoder = CountInputEncoder() self.conditioning = CountConditioningMLP(self.number_labels) self.fc = nn.Linear(128 + conditioning_size, representation_size) self.project_mu = nn.Linear(representation_size, representation_size) self.project_s = nn.Linear(representation_size, representation_size) # x is the count to be encoded. def forward(self, x, label_set, current_label, count_so_far): x = self.input_encoder(x) condition = self.conditioning(label_set, current_label, count_so_far) x = torch.cat((x, condition), dim=-1) x = F.relu(self.fc(x)) mu = self.project_mu(x) s = self.project_s(x) return mu, s, condition class AutoregressiveCountDecoder(nn.Module): def __init__(self, conditioning_size, representation_size=32): super(AutoregressiveCountDecoder, self).__init__() self.fc1 = nn.Linear(conditioning_size + representation_size, 128) self.fc2 = nn.Linear(128, 64) self.project = nn.Linear(64, 1) def forward(self, z, condition): x = torch.cat((z, condition), dim=-1) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) # note, we are returning log(lambda) x = self.project(x) return x
<gh_stars>0 package main import ( "bufio" "encoding/json" "flag" "fmt" "github.com/gedex/inflector" "io/ioutil" "net/http" "os" "github.com/dragonfruit-api/dragonfruit" "github.com/dragonfruit-api/dragonfruit/backends/backend_couchdb" "github.com/go-martini/martini" "github.com/martini-contrib/gzip" "github.com/martini-contrib/sessions" ) const ( VERSION = "0.5.5" PATH_TO_DEFAULT_CONFIG = "/usr/local/etc/dragonfruit.conf" ) type cnf struct { config dragonfruit.Conf addInteractive bool serve bool version bool resourcetype string resourcefile string } /* main executable function */ func main() { fmt.Println("\n\n\033[31m~~~~~Dragon\033[32mFruit~~~~~\033[0m") cnf := parseFlags() fmt.Print("\033[1mVersion ", VERSION, " \033[0m\n\n\n") if cnf.version { return } // dragonfruit setup d := backend_couchdb.Db_backend_couch{} d.Connect("http://" + cnf.config.DbServer + ":" + cnf.config.DbPort) if (cnf.resourcefile != "" && cnf.resourcetype == "") || (cnf.resourcefile == "" && cnf.resourcetype != "") { fmt.Println("\033[31;1mYou must enter both a resource file and a resource type if you pass a resource from the command line.\033[0m") return } if cnf.addInteractive { addResouce(&d, cnf.config) } else if cnf.resourcefile != "" { addResourceFromFile(&d, cnf.config, cnf.resourcetype, cnf.resourcefile) } if cnf.serve { launchServer(&d, cnf.config) } } func launchServer(d dragonfruit.Db_backend, cnf dragonfruit.Conf) *martini.ClassicMartini { wd, _ := os.Getwd() st_opts := martini.StaticOptions{} st_opts.Prefix = wd m := dragonfruit.GetMartiniInstance(cnf) for _, dir := range cnf.StaticDirs { m.Use(martini.Static(dir)) } dragonfruit.ServeDocSet(m, d, cnf) m.Use(sessions.Sessions("my_session", sessions.NewCookieStore([]byte("secret123")))) m.Use(gzip.All()) m.RunOnAddr(cnf.Host + ":" + cnf.Port) return m } func returnSuccess(res http.ResponseWriter) (int, string) { h := res.Header() h.Add("Content-Type", "text/plain") //res.Write([]byte("Content-Type: text/plain")) return 200, "auth" } /* addResourceFromFile adds a new resource directly from a file, with standard naming conventions for resources and API endpoints */ func addResourceFromFile(d dragonfruit.Db_backend, cnf dragonfruit.Conf, resourceType string, fname string) { byt, err := ioutil.ReadFile(fname) if err != nil { panic(err) } err = dragonfruit.RegisterType(d, byt, cnf, resourceType, "") if err != nil { panic(err) } } /* addResource handles interactive mode parsing and naming of resources */ func addResouce(d dragonfruit.Db_backend, cnf dragonfruit.Conf) { // start a scanner to read from the command line scanner := bufio.NewScanner(os.Stdin) // set the resource type name fmt.Print("\033[1mWhat is the base model that this API returns?\033[0m ") scanner.Scan() resourceType := inflector.Singularize(scanner.Text()) path := inflector.Pluralize(resourceType) fmt.Print("\033[1mWhat is the path for APIs for this model?\033[0m press [enter] for \"", path, "\" ") scanner.Scan() tmpPath := scanner.Text() if tmpPath != "" { path = tmpPath } fmt.Print("\033[1mEnter a path to a sample data file:\033[0m ") scanner.Scan() fname := scanner.Text() byt, err := ioutil.ReadFile(fname) if err != nil { fmt.Println(err) } err = dragonfruit.RegisterType(d, byt, cnf, resourceType, path) if err != nil { panic(err) } fmt.Println("Done!") } /* parseFlags parses the command-line flags passed to the CLI */ func parseFlags() cnf { // set up a config object dfcnf := dragonfruit.Conf{} /* should we start a server? */ var serve = flag.Bool("serve", true, "Start a server after running") /* should we start a server? */ var version = flag.Bool("version", false, "Display the version and terminate.") /* should we try to parse a resource? */ var addresource = flag.Bool("add", false, "Add a new resource (interactive mode).") /* should we try to parse a specific file? */ var resourcefile = flag.String("file", "", "Load and parse a resource file (with standard naming).") /* If we do parse a file, what is the resource type for that file? */ var resourcetype = flag.String("type", "", "The resource type for the file.") /* If not using the default config file, path to a config file */ var conflocation = flag.String("conf", "", "Path to a config file.") flag.Parse() baseConf, err := ioutil.ReadFile(PATH_TO_DEFAULT_CONFIG) if err != nil { panic("base conf file missing") } err = json.Unmarshal(baseConf, &dfcnf) if err != nil { panic(err) } if *conflocation != "" { out, err := ioutil.ReadFile(*conflocation) if err != nil { panic("cannot find file " + *conflocation) } err = json.Unmarshal(out, &dfcnf) if err != nil { panic(err) } } outconfig := cnf{ config: dfcnf, addInteractive: *addresource, serve: *serve, version: *version, resourcetype: *resourcetype, resourcefile: *resourcefile, } return outconfig }
#include <iostream> #include <vector> #include <string> // Include the necessary header file for NFC functionality #include "services/device/public/mojom/nfc.mojom-blink.h" class NFCManager { public: std::string readTag(const std::string& tagID) { // Simulate reading data from an NFC tag with the given ID // Replace the following line with actual NFC read operation return "Data read from tag with ID " + tagID; } bool writeTag(const std::string& tagID, const std::string& data) { // Simulate writing data to an NFC tag with the given ID // Replace the following line with actual NFC write operation return true; // Return true for successful write operation } std::vector<std::string> detectDevices() { // Simulate detecting nearby NFC devices // Replace the following line with actual NFC device detection operation return {"Device1", "Device2", "Device3"}; // Return a list of detected device IDs } }; int main() { NFCManager nfcManager; // Example usage of NFCManager class std::string tagID = "12345"; std::string data = "Hello, NFC!"; std::string readData = nfcManager.readTag(tagID); bool writeSuccess = nfcManager.writeTag(tagID, data); std::vector<std::string> detectedDevices = nfcManager.detectDevices(); // Output the results std::cout << "Data read from tag: " << readData << std::endl; std::cout << "Write operation success: " << (writeSuccess ? "true" : "false") << std::endl; std::cout << "Detected devices: "; for (const auto& device : detectedDevices) { std::cout << device << " "; } std::cout << std::endl; return 0; }
.loading { width: 100px; height: 100px; border-radius: 50%; background-color: #ccc; animation: spin 1s linear infinite; } @keyframes spin { 0% { transform: rotate(0deg); } 50% { transform: rotate(180deg); } 100% { transform: rotate(360deg); } }
package com.github.peacetrue.beans.properties.code; /** * @author peace * @since 1.0 **/ public interface CodeCapable { String PROPERTY_CODE = "code"; String getCode(); }
package wrappers.core; public abstract class IntegerWrapper extends GenericWrapper<Integer> { private static final long serialVersionUID = 5637528646462716743L; protected IntegerWrapper(Integer value) { super(value); } }
class Item: def __init__(self, name, price): self.name = name self.price = price class ShoppingCart: def __init__(self): self.cart = [] def add_item(self, item): self.cart.append(item) def view_cart(self): if not self.cart: print("Your cart is empty.") else: print("Items in your cart:") for item in self.cart: print(f"- {item.name}") def total_cost(self): total = sum(item.price for item in self.cart) return total # Demonstration of usage item1 = Item("Laptop", 1200.00) item2 = Item("Headphones", 150.00) cart = ShoppingCart() cart.add_item(item1) cart.add_item(item2) cart.view_cart() total_cost = cart.total_cost() print(f"Total cost of items in the cart: ${total_cost:.2f}")